mirror of
https://github.com/Kong/insomnia.git
synced 2026-04-21 14:47:46 -04:00
[TypeScript] Phase 1 & 2 (#3370)
Co-authored-by: Opender Singh <opender.singh@konghq.com>
This commit is contained in:
committed by
GitHub
parent
bdb4b4e661
commit
5f4c19da35
@@ -5,3 +5,4 @@ indent_size = 2
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
end_of_line = lf
|
||||
quote_type = single
|
||||
|
||||
@@ -8,13 +8,12 @@ screenshots/
|
||||
**/.cache/
|
||||
**/coverage/
|
||||
**/node_modules/
|
||||
**/webpack/
|
||||
**/bin/
|
||||
**/__fixtures__/
|
||||
**/fixtures
|
||||
**/__snapshots__/
|
||||
**/flow-typed/
|
||||
**/dist/
|
||||
**/.cache/
|
||||
**/svgr/
|
||||
**/storybook-static/
|
||||
*.md
|
||||
*.md
|
||||
101
.eslintrc.js
Normal file
101
.eslintrc.js
Normal file
@@ -0,0 +1,101 @@
|
||||
/** @type { import('eslint').Linter.Config } */
|
||||
module.exports = {
|
||||
parser: '@typescript-eslint/parser',
|
||||
parserOptions: {
|
||||
project: [
|
||||
'./tsconfig.eslint.json',
|
||||
'./packages/*/tsconfig.json',
|
||||
'./plugins/*/tsconfig.json',
|
||||
],
|
||||
tsconfigRootDir: __dirname,
|
||||
ecmaFeatures: {
|
||||
jsx: true,
|
||||
},
|
||||
},
|
||||
extends: [
|
||||
'plugin:@typescript-eslint/eslint-recommended',
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
'semistandard',
|
||||
'plugin:react-hooks/recommended',
|
||||
],
|
||||
plugins: [
|
||||
'@typescript-eslint',
|
||||
'react',
|
||||
'jest',
|
||||
'html',
|
||||
'json',
|
||||
'filenames',
|
||||
'react-hooks',
|
||||
],
|
||||
globals: {
|
||||
__DEV__: true,
|
||||
fail: true,
|
||||
NodeJS: true,
|
||||
HTMLDivElement: true,
|
||||
HTMLElement: true,
|
||||
HTMLInputElement: true,
|
||||
HTMLSelectElement: true,
|
||||
JSX: true,
|
||||
},
|
||||
env: {
|
||||
browser: true,
|
||||
commonjs: true,
|
||||
es6: true,
|
||||
'jest/globals': true,
|
||||
node: true,
|
||||
},
|
||||
overrides: [
|
||||
{
|
||||
files: ['*.js'],
|
||||
rules: {
|
||||
'@typescript-eslint/no-var-requires': 'off',
|
||||
},
|
||||
},
|
||||
],
|
||||
rules: {
|
||||
'comma-dangle': ['error', 'always-multiline'],
|
||||
indent: 'off',
|
||||
'no-var': 'error',
|
||||
'no-async-promise-executor': 'off',
|
||||
'no-case-declarations': 'off',
|
||||
'no-prototype-builtins': 'off',
|
||||
'no-duplicate-imports': 'off',
|
||||
'react/jsx-uses-react': 'error',
|
||||
'react/jsx-uses-vars': 'error',
|
||||
'space-in-parens': 'off',
|
||||
'react-hooks/rules-of-hooks': 'error',
|
||||
'react-hooks/exhaustive-deps': 'error',
|
||||
camelcase: ['error', { allow: ['__export_format', '__export_date', '__export_source'] }],
|
||||
'space-before-function-paren': [
|
||||
'error',
|
||||
{
|
||||
anonymous: 'never',
|
||||
named: 'never',
|
||||
asyncArrow: 'always',
|
||||
},
|
||||
],
|
||||
'filenames/match-exported': [
|
||||
'error',
|
||||
'kebab',
|
||||
],
|
||||
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
||||
'no-unused-vars': 'off',
|
||||
'@typescript-eslint/no-unused-vars': ['error', { ignoreRestSiblings: true }],
|
||||
'@typescript-eslint/ban-types': 'off',
|
||||
'@typescript-eslint/no-empty-function': 'off',
|
||||
'@typescript-eslint/no-namespace': ['error', { allowDeclarations: true }],
|
||||
'spaced-comment': ['error', 'always', {
|
||||
exceptions: ['/', '*', '-', '* '], // for ASCII art :)
|
||||
markers: [
|
||||
'/', // for TypeScript directives, doxygen, vsdoc, etc. (which use `///`)
|
||||
'?', // for Quokka
|
||||
],
|
||||
}],
|
||||
'@typescript-eslint/array-type': ['error', { default: 'array', readonly: 'array' }],
|
||||
'@typescript-eslint/consistent-type-definitions': ['error', 'interface'],
|
||||
quotes: 'off',
|
||||
'@typescript-eslint/quotes': ['error', 'single', { avoidEscape: true }],
|
||||
'no-use-before-define': 'off',
|
||||
'@typescript-eslint/no-use-before-define': 'error',
|
||||
},
|
||||
};
|
||||
@@ -1,52 +0,0 @@
|
||||
{
|
||||
"parser": "babel-eslint",
|
||||
"extends": ["semistandard", "plugin:flowtype/recommended", "plugin:prettier/recommended", "plugin:react-hooks/recommended"],
|
||||
"plugins": ["react", "jest", "html", "json", "filenames", "flowtype", "react-hooks"],
|
||||
"parserOptions": {
|
||||
"ecmaFeatures": {
|
||||
"jsx": true
|
||||
}
|
||||
},
|
||||
"globals": {
|
||||
"__DEV__": true,
|
||||
"fail": true,
|
||||
"HTMLDivElement": true,
|
||||
"HTMLElement": true,
|
||||
"HTMLInputElement": true,
|
||||
"HTMLSelectElement": true
|
||||
},
|
||||
"env": {
|
||||
"jest/globals": true
|
||||
},
|
||||
"rules": {
|
||||
"comma-dangle": ["error", "always-multiline"],
|
||||
"indent": "off",
|
||||
"no-var": "error",
|
||||
"no-async-promise-executor": "off",
|
||||
"no-case-declarations": "off",
|
||||
"no-prototype-builtins": "off",
|
||||
"no-duplicate-imports": "off",
|
||||
"flowtype/space-after-type-colon": "off",
|
||||
"react/jsx-uses-react": "error",
|
||||
"react/jsx-uses-vars": "error",
|
||||
"space-in-parens": "off",
|
||||
"react-hooks/rules-of-hooks": "error",
|
||||
"react-hooks/exhaustive-deps": "error",
|
||||
"space-before-function-paren": [
|
||||
"error",
|
||||
{
|
||||
"anonymous": "never",
|
||||
"named": "never",
|
||||
"asyncArrow": "always"
|
||||
}
|
||||
],
|
||||
"filenames/match-exported": ["error", "kebab"],
|
||||
"flowtype/array-style-simple-type": "error",
|
||||
"flowtype/array-style-complex-type": "error"
|
||||
},
|
||||
"settings": {
|
||||
"flowtype": {
|
||||
"onlyFilesWithFlowAnnotation": true
|
||||
}
|
||||
}
|
||||
}
|
||||
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -28,6 +28,8 @@ jobs:
|
||||
node-version: ${{ steps.nvm.outputs.NVMRC }}
|
||||
- name: Bootstrap packages
|
||||
run: npm run bootstrap
|
||||
- name: Lint
|
||||
run: npm run lint
|
||||
- name: Run tests
|
||||
run: npm test
|
||||
- name: Build for smoke tests
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -29,3 +29,5 @@ graphql.config.json
|
||||
.graphqlconfig
|
||||
schema.graphql
|
||||
packages/insomnia-smoke-test/screenshots
|
||||
*.tsbuildinfo
|
||||
dist
|
||||
@@ -1,7 +0,0 @@
|
||||
**/bin/*
|
||||
**/build/*
|
||||
**/dist/*
|
||||
**/__fixtures__/*
|
||||
**/flow-typed/*
|
||||
*.md
|
||||
**/__snapshots__/
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"bracketSpacing": true,
|
||||
"jsxBracketSameLine": true,
|
||||
"semi": true,
|
||||
"singleQuote": true,
|
||||
"tabWidth": 2,
|
||||
"trailingComma": "all",
|
||||
"printWidth": 100
|
||||
}
|
||||
20
jest.config.js
Normal file
20
jest.config.js
Normal file
@@ -0,0 +1,20 @@
|
||||
/** @type { import('@jest/types').Config.InitialOptions } */
|
||||
module.exports = {
|
||||
globals: {
|
||||
'ts-jest': {
|
||||
isolatedModules: true,
|
||||
},
|
||||
},
|
||||
moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'],
|
||||
testEnvironment: 'node',
|
||||
transform: {
|
||||
'^.+\\.tsx?$': 'ts-jest',
|
||||
},
|
||||
testMatch: [
|
||||
'**/*.test.ts',
|
||||
],
|
||||
verbose: false,
|
||||
resetMocks: true,
|
||||
resetModules: true,
|
||||
collectCoverage: false,
|
||||
};
|
||||
5309
package-lock.json
generated
5309
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
34
package.json
34
package.json
@@ -10,15 +10,15 @@
|
||||
},
|
||||
"homepage": "https://github.com/kong/insomnia#readme",
|
||||
"scripts": {
|
||||
"lint": "eslint . --ext .js,.json",
|
||||
"lint:fix": "npm run lint -- --fix",
|
||||
"build": "lerna run build --parallel",
|
||||
"lint": "lerna run lint --parallel --stream --no-bail",
|
||||
"lint:fix": "lerna run lint:fix --parallel --stream --no-bail",
|
||||
"bootstrap": "npm install && lerna bootstrap && lerna run --parallel --stream bootstrap",
|
||||
"version": "lerna version --exact --preid beta --force-publish",
|
||||
"version:dry": "npm run version -- --no-git-tag-version",
|
||||
"publish": "lerna publish from-git --pre-dist-tag beta",
|
||||
"clean": "lerna clean --yes && rimraf node_modules",
|
||||
"typecheck": "lerna run --parallel --stream typecheck",
|
||||
"test": "npm run lint && npm run typecheck && lerna run --stream --parallel test",
|
||||
"test": "lerna run --stream --parallel test",
|
||||
"test:pre-release": "npm run test --prefix packages/insomnia-app",
|
||||
"inso-start": "npm start --prefix packages/insomnia-inso",
|
||||
"app-start": "npm start --prefix packages/insomnia-app",
|
||||
@@ -35,7 +35,7 @@
|
||||
"test:smoke:cli": "npm run test:cli --prefix packages/insomnia-smoke-test"
|
||||
},
|
||||
"lint-staged": {
|
||||
"{packages,plugins}/**/*.{js,json}": [
|
||||
"{packages,plugins}/**/*.{js,json,ts,tsx}": [
|
||||
"eslint --fix"
|
||||
]
|
||||
},
|
||||
@@ -51,14 +51,19 @@
|
||||
"@babel/preset-env": "^7.4.3",
|
||||
"@babel/preset-flow": "^7.9.0",
|
||||
"@babel/preset-react": "^7.9.4",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"babel-jest": "^25.3.0",
|
||||
"@jest/types": "^26.6.2",
|
||||
"@types/chai": "^4.2.15",
|
||||
"@types/eslint": "4.16.1",
|
||||
"@types/jest": "^26.0.23",
|
||||
"@types/node": "^14.14.32",
|
||||
"@types/rimraf": "^3.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^4.16.1",
|
||||
"@typescript-eslint/parser": "^4.16.1",
|
||||
"babel-loader": "^8.0.5",
|
||||
"babel-plugin-inline-react-svg": "^1.1.0",
|
||||
"babel-plugin-styled-components": "^1.10.6",
|
||||
"babel-plugin-styled-components": "^1.12.0",
|
||||
"cross-env": "^7.0.2",
|
||||
"eslint": "^7.2.0",
|
||||
"eslint-config-prettier": "^6.11.0",
|
||||
"eslint-config-semistandard": "^15.0.0",
|
||||
"eslint-config-standard": "^14.1.1",
|
||||
"eslint-plugin-filenames": "^1.2.0",
|
||||
@@ -66,19 +71,20 @@
|
||||
"eslint-plugin-html": "^6.0.1",
|
||||
"eslint-plugin-import": "^2.20.2",
|
||||
"eslint-plugin-jest": "^21.15.1",
|
||||
"eslint-plugin-jest-formatting": "^2.0.1",
|
||||
"eslint-plugin-json": "^1.2.0",
|
||||
"eslint-plugin-node": "^6.0.1",
|
||||
"eslint-plugin-prettier": "^3.1.3",
|
||||
"eslint-plugin-promise": "^3.7.0",
|
||||
"eslint-plugin-react": "^7.19.0",
|
||||
"eslint-plugin-react-hooks": "^4.0.4",
|
||||
"eslint-plugin-standard": "^4.0.1",
|
||||
"flow-bin": "^0.122.0",
|
||||
"husky": "^4.2.5",
|
||||
"jest": "^25.3.0",
|
||||
"jest": "^26.6.3",
|
||||
"lerna": "^3.22.0",
|
||||
"lint-staged": "^10.2.2",
|
||||
"prettier": "^1.16.4",
|
||||
"rimraf": "^2.6.3"
|
||||
"rimraf": "^3.0.2",
|
||||
"ts-jest": "^26.5.6",
|
||||
"type-fest": "^1.0.2",
|
||||
"typescript": "^4.2.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
{
|
||||
"presets": [
|
||||
"@babel/preset-react",
|
||||
"@babel/preset-flow"
|
||||
],
|
||||
"plugins": [
|
||||
"babel-plugin-styled-components",
|
||||
[
|
||||
"@babel/plugin-proposal-decorators",
|
||||
{
|
||||
"legacy": true
|
||||
}
|
||||
],
|
||||
["@babel/plugin-proposal-class-properties", { "loose" : true }],
|
||||
"@babel/plugin-proposal-optional-chaining"
|
||||
],
|
||||
"env": {
|
||||
"development": {
|
||||
"plugins": [
|
||||
"react-hot-loader/babel"
|
||||
]
|
||||
},
|
||||
"test": {
|
||||
"presets": [
|
||||
[
|
||||
"@babel/preset-env",
|
||||
{
|
||||
"targets": {
|
||||
"node": "12"
|
||||
}
|
||||
}
|
||||
],
|
||||
// We need to add this again because it has to run before es2015
|
||||
"@babel/preset-flow"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
7
packages/insomnia-app/.eslintignore
Normal file
7
packages/insomnia-app/.eslintignore
Normal file
@@ -0,0 +1,7 @@
|
||||
build
|
||||
bin
|
||||
send-request
|
||||
**/main.min.js
|
||||
webpack.config.*.js
|
||||
webpack.config.*.js.map
|
||||
webpack.config.*.d.ts
|
||||
12
packages/insomnia-app/.eslintrc.js
Normal file
12
packages/insomnia-app/.eslintrc.js
Normal file
@@ -0,0 +1,12 @@
|
||||
/** @type { import('eslint').Linter.Config } */
|
||||
module.exports = {
|
||||
extends: '../../.eslintrc.js',
|
||||
rules: {
|
||||
'filenames/match-exported': 'off',
|
||||
camelcase: 'off',
|
||||
'@typescript-eslint/array-type': ['error', { default: 'generic', readonly: 'generic' }],
|
||||
'@typescript-eslint/no-use-before-define': 'off', // TSCONVERSION
|
||||
'@typescript-eslint/no-explicit-any': 'off', // TSCONVERSION
|
||||
// 'padding-line-between-statements': ['error', { blankLine: "always", prev: ["*"], next: "export"}],
|
||||
},
|
||||
};
|
||||
@@ -1,20 +0,0 @@
|
||||
[ignore]
|
||||
.*/node_modules/.*
|
||||
.*/__fixtures__/.*
|
||||
!<PROJECT_ROOT>/node_modules/graphql
|
||||
!<PROJECT_ROOT>/node_modules/iterall
|
||||
|
||||
[include]
|
||||
|
||||
[libs]
|
||||
|
||||
[options]
|
||||
esproposal.decorators=ignore
|
||||
module.file_ext=.css
|
||||
esproposal.optional_chaining=enable
|
||||
|
||||
[lints]
|
||||
|
||||
[untyped]
|
||||
<PROJECT_ROOT>/node_modules/graphql
|
||||
<PROJECT_ROOT>/node_modules/iterall
|
||||
3
packages/insomnia-app/.gitignore
vendored
3
packages/insomnia-app/.gitignore
vendored
@@ -2,5 +2,4 @@ dist
|
||||
build
|
||||
|
||||
# Generated
|
||||
app/main.min.js
|
||||
|
||||
app/main.min.js
|
||||
@@ -1,10 +1,6 @@
|
||||
import { configure, addDecorator } from '@storybook/react';
|
||||
import { withInfo } from '@storybook/addon-info';
|
||||
|
||||
addDecorator(
|
||||
withInfo({
|
||||
inline: true,
|
||||
}),
|
||||
);
|
||||
addDecorator(withInfo({ inline: true }));
|
||||
|
||||
configure(require.context('../app/ui', true, /\.stories\.js$/), module);
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
module.exports = async function({ config, mode }) {
|
||||
/** @type { import('webpack').Configuration } */
|
||||
module.exports = ({ config }) => {
|
||||
config.module.rules.push({
|
||||
test: /\.less$/,
|
||||
use: [
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
import * as db from '../common/database';
|
||||
import { database as db } from '../common/database';
|
||||
import * as models from '../models';
|
||||
import * as fetch from '../account/fetch';
|
||||
|
||||
export async function globalBeforeEach() {
|
||||
// Setup the local database in case it's used
|
||||
|
||||
fetch.setup('insomnia-tests', 'http://localhost:8000');
|
||||
await db.init(models.types(), { inMemoryOnly: true }, true, () => {});
|
||||
await db.init(
|
||||
models.types(),
|
||||
{
|
||||
inMemoryOnly: true,
|
||||
},
|
||||
true,
|
||||
() => {},
|
||||
);
|
||||
}
|
||||
@@ -1,10 +1,10 @@
|
||||
// @flow
|
||||
|
||||
import * as entities from '../ui/redux/modules/entities';
|
||||
|
||||
const reduxStateForTest = async (activeWorkspaceId: string): Promise<Object> => ({
|
||||
const reduxStateForTest = async (activeWorkspaceId: string): Promise<Record<string, any>> => ({
|
||||
entities: entities.reducer({}, entities.initializeWith(await entities.allDocs())),
|
||||
global: { activeWorkspaceId },
|
||||
global: {
|
||||
activeWorkspaceId,
|
||||
},
|
||||
});
|
||||
|
||||
export default reduxStateForTest;
|
||||
@@ -1 +1,2 @@
|
||||
import '@testing-library/jest-dom';
|
||||
import '@testing-library/jest-dom/extend-expect';
|
||||
@@ -1,30 +0,0 @@
|
||||
import 'whatwg-fetch';
|
||||
|
||||
const localStorageMock = (function() {
|
||||
let store = {};
|
||||
|
||||
return {
|
||||
getItem(key) {
|
||||
return store[key];
|
||||
},
|
||||
setItem(key, value) {
|
||||
store[key] = value.toString();
|
||||
},
|
||||
clear() {
|
||||
store = {};
|
||||
},
|
||||
};
|
||||
})();
|
||||
|
||||
global.__DEV__ = false;
|
||||
global.localStorage = localStorageMock;
|
||||
global.requestAnimationFrame = cb => process.nextTick(cb);
|
||||
global.require = require;
|
||||
|
||||
// Don't console log real logs that start with a tag (eg. [db] ...). It's annoying
|
||||
const log = console.log;
|
||||
global.console.log = (...args) => {
|
||||
if (!(typeof args[0] === 'string' && args[0][0] === '[')) {
|
||||
log(...args);
|
||||
}
|
||||
};
|
||||
49
packages/insomnia-app/app/__jest__/setup.ts
Normal file
49
packages/insomnia-app/app/__jest__/setup.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import 'whatwg-fetch';
|
||||
|
||||
const localStorageMock: Storage = (function() {
|
||||
let store: Record<string, string> = {};
|
||||
return {
|
||||
get length() {
|
||||
return Object.keys(store).length;
|
||||
},
|
||||
|
||||
clear() {
|
||||
store = {};
|
||||
},
|
||||
|
||||
getItem(key: string) {
|
||||
return store[key];
|
||||
},
|
||||
|
||||
key() {
|
||||
return null;
|
||||
},
|
||||
|
||||
removeItem(key: string) {
|
||||
delete store[key];
|
||||
},
|
||||
|
||||
setItem(key: string, value: string) {
|
||||
store[key] = value.toString();
|
||||
},
|
||||
};
|
||||
})();
|
||||
|
||||
global.__DEV__ = false;
|
||||
global.localStorage = localStorageMock;
|
||||
|
||||
global.requestAnimationFrame = (callback: FrameRequestCallback) => {
|
||||
process.nextTick(callback);
|
||||
// note: the spec indicates that the return of this function (the request id) is a non-zero number. hopefully returning 0 here will indicate that this is a mock if the return is ever to be used accidentally.
|
||||
return 0;
|
||||
};
|
||||
|
||||
global.require = require;
|
||||
// Don't console log real logs that start with a tag (eg. [db] ...). It's annoying
|
||||
const log = console.log;
|
||||
|
||||
global.console.log = (...args) => {
|
||||
if (!(typeof args[0] === 'string' && args[0][0] === '[')) {
|
||||
log(...args);
|
||||
}
|
||||
};
|
||||
@@ -1,9 +1,12 @@
|
||||
import { EventEmitter } from 'events';
|
||||
const grpcJs = jest.requireActual('@grpc/grpc-js');
|
||||
|
||||
const mockCallWrite = jest.fn();
|
||||
const mockCallEnd = jest.fn();
|
||||
const mockCallCancel = jest.fn();
|
||||
|
||||
export const status = grpcJs.status;
|
||||
|
||||
class MockCall extends EventEmitter {
|
||||
write(...args) {
|
||||
mockCallWrite(...args);
|
||||
@@ -1 +0,0 @@
|
||||
module.exports = {};
|
||||
2
packages/insomnia-app/app/__mocks__/dummy.ts
Normal file
2
packages/insomnia-app/app/__mocks__/dummy.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
// WARNING: changing this to `export default` will break the mock and be incredibly hard to debug. Ask me how I know.
|
||||
module.exports = {};
|
||||
@@ -1,11 +1,9 @@
|
||||
import mkdirp from 'mkdirp';
|
||||
import events from 'events';
|
||||
import { EventEmitter } from 'events';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
|
||||
const RANDOM_STRING = Math.random()
|
||||
.toString()
|
||||
.replace('.', '');
|
||||
const RANDOM_STRING = Math.random().toString().replace('.', '');
|
||||
|
||||
const remote = {
|
||||
app: {
|
||||
@@ -14,14 +12,18 @@ const remote = {
|
||||
mkdirp.sync(dir);
|
||||
return dir;
|
||||
},
|
||||
|
||||
getLocale() {
|
||||
return 'en-US';
|
||||
},
|
||||
},
|
||||
net: {
|
||||
request(url) {
|
||||
const req = new events.EventEmitter();
|
||||
request() {
|
||||
const req = new EventEmitter();
|
||||
|
||||
// @ts-expect-error -- TSCONVERSION appears to be genuine
|
||||
req.end = function() {};
|
||||
|
||||
return req;
|
||||
},
|
||||
},
|
||||
@@ -29,32 +31,47 @@ const remote = {
|
||||
getAllWindows() {
|
||||
return [];
|
||||
},
|
||||
|
||||
getFocusedWindow() {
|
||||
return {
|
||||
getContentBounds() {
|
||||
return { width: 1900, height: 1060 };
|
||||
return {
|
||||
width: 1900,
|
||||
height: 1060,
|
||||
};
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
screen: {
|
||||
getPrimaryDisplay() {
|
||||
return { workAreaSize: { width: 1920, height: 1080 } };
|
||||
return {
|
||||
workAreaSize: {
|
||||
width: 1920,
|
||||
height: 1080,
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
const electron = {
|
||||
...remote,
|
||||
remote: remote,
|
||||
remote,
|
||||
ipcMain: {
|
||||
on: jest.fn(),
|
||||
|
||||
once() {},
|
||||
},
|
||||
ipcRenderer: {
|
||||
on: jest.fn(),
|
||||
removeAllListeners: jest.fn(),
|
||||
|
||||
once() {},
|
||||
|
||||
send: jest.fn(),
|
||||
},
|
||||
};
|
||||
|
||||
// WARNING: changing this to `export default` will break the mock and be incredibly hard to debug. Ask me how I know.
|
||||
module.exports = electron;
|
||||
@@ -1 +0,0 @@
|
||||
module.exports = {};
|
||||
2
packages/insomnia-app/app/__mocks__/font-scanner.ts
Normal file
2
packages/insomnia-app/app/__mocks__/font-scanner.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
// WARNING: changing this to `export default` will break the mock and be incredibly hard to debug. Ask me how I know.
|
||||
module.exports = {};
|
||||
@@ -2,6 +2,11 @@
|
||||
const git = jest.requireActual('isomorphic-git');
|
||||
const mock = jest.genMockFromModule('isomorphic-git');
|
||||
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
git.push = mock.push;
|
||||
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
git.clone = mock.clone;
|
||||
|
||||
// WARNING: changing this to `export default` will break the mock and be incredibly hard to debug. Ask me how I know.
|
||||
module.exports = git;
|
||||
@@ -3,8 +3,9 @@
|
||||
* The reason it is needed is because the Forge module loader doesn't
|
||||
* play along with Jest.
|
||||
*/
|
||||
const forge = require('../../node_modules/node-forge/lib/index');
|
||||
import forge from '../../node_modules/node-forge/lib/index';
|
||||
|
||||
// WARNING: changing this to `export default` will break the mock and be incredibly hard to debug. Ask me how I know.
|
||||
module.exports = {
|
||||
jsbn: forge.jsbn,
|
||||
util: forge.util,
|
||||
@@ -17,25 +18,28 @@ module.exports = {
|
||||
rsa: {
|
||||
setPublicKey() {
|
||||
return {
|
||||
encrypt(str) {
|
||||
encrypt(str: string) {
|
||||
return str;
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
setPrivateKey() {
|
||||
return {
|
||||
decrypt(str) {
|
||||
decrypt(str: string) {
|
||||
return str;
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
random: {
|
||||
getBytesSync(n) {
|
||||
getBytesSync(n: number) {
|
||||
let s = '';
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
s += 'a';
|
||||
}
|
||||
|
||||
return s;
|
||||
},
|
||||
},
|
||||
@@ -62,28 +66,35 @@ module.exports = {
|
||||
},
|
||||
},
|
||||
cipher: {
|
||||
createCipher(alg, key) {
|
||||
createCipher() {
|
||||
return {
|
||||
start(config) {
|
||||
this._config = config;
|
||||
},
|
||||
|
||||
update(buffer) {
|
||||
this._data = buffer;
|
||||
},
|
||||
|
||||
finish() {
|
||||
this.mode = { tag: 'tag' };
|
||||
this.mode = {
|
||||
tag: 'tag',
|
||||
};
|
||||
this.output = this._data;
|
||||
},
|
||||
};
|
||||
},
|
||||
createDecipher(alg, key) {
|
||||
|
||||
createDecipher() {
|
||||
return {
|
||||
start(config) {
|
||||
this._config = config;
|
||||
},
|
||||
|
||||
update(buffer) {
|
||||
this.output = buffer;
|
||||
},
|
||||
|
||||
finish() {
|
||||
return true;
|
||||
},
|
||||
@@ -1,6 +1,5 @@
|
||||
import { EventEmitter } from 'events';
|
||||
import fs from 'fs';
|
||||
|
||||
import { CurlAuth } from 'node-libcurl/dist/enum/CurlAuth';
|
||||
import { CurlCode } from 'node-libcurl/dist/enum/CurlCode';
|
||||
import { CurlInfoDebug } from 'node-libcurl/dist/enum/CurlInfoDebug';
|
||||
@@ -9,11 +8,60 @@ import { CurlNetrc } from 'node-libcurl/dist/enum/CurlNetrc';
|
||||
import { CurlHttpVersion } from 'node-libcurl/dist/enum/CurlHttpVersion';
|
||||
|
||||
class Curl extends EventEmitter {
|
||||
constructor() {
|
||||
super();
|
||||
this._options = {};
|
||||
this._meta = {};
|
||||
this._features = {};
|
||||
_options = {};
|
||||
_meta = {};
|
||||
_features = {};
|
||||
|
||||
// cannot include these from node-libcurl because they come from the native library
|
||||
// and it's not possible to load it while testing (as it was built to run with Electron)
|
||||
static info = {
|
||||
COOKIELIST: 'COOKIELIST',
|
||||
EFFECTIVE_URL: 'EFFECTIVE_URL',
|
||||
SIZE_DOWNLOAD: 'SIZE_DOWNLOAD',
|
||||
TOTAL_TIME: 'TOTAL_TIME',
|
||||
}
|
||||
|
||||
static option = {
|
||||
ACCEPT_ENCODING: 'ACCEPT_ENCODING',
|
||||
CAINFO: 'CAINFO',
|
||||
COOKIEFILE: 'COOKIEFILE',
|
||||
COOKIELIST: 'COOKIELIST',
|
||||
CUSTOMREQUEST: 'CUSTOMREQUEST',
|
||||
DEBUGFUNCTION: 'DEBUGFUNCTION',
|
||||
FOLLOWLOCATION: 'FOLLOWLOCATION',
|
||||
HTTPAUTH: 'HTTPAUTH',
|
||||
HTTPGET: 'HTTPGET',
|
||||
HTTPHEADER: 'HTTPHEADER',
|
||||
HTTPPOST: 'HTTPPOST',
|
||||
HTTP_VERSION: 'HTTP_VERSION',
|
||||
INFILESIZE_LARGE: 'INFILESIZE_LARGE',
|
||||
KEYPASSWD: 'KEYPASSWD',
|
||||
MAXREDIRS: 'MAXREDIRS',
|
||||
NETRC: 'NETRC',
|
||||
NOBODY: 'NOBODY',
|
||||
NOPROGRESS: 'NOPROGRESS',
|
||||
NOPROXY: 'NOPROXY',
|
||||
PASSWORD: 'PASSWORD',
|
||||
POST: 'POST',
|
||||
POSTFIELDS: 'POSTFIELDS',
|
||||
PROXY: 'PROXY',
|
||||
PROXYAUTH: 'PROXYAUTH',
|
||||
READDATA: 'READDATA',
|
||||
READFUNCTION: 'READFUNCTION',
|
||||
SSLCERT: 'SSLCERT',
|
||||
SSLCERTTYPE: 'SSLCERTTYPE',
|
||||
SSLKEY: 'SSLKEY',
|
||||
SSL_VERIFYHOST: 'SSL_VERIFYHOST',
|
||||
SSL_VERIFYPEER: 'SSL_VERIFYPEER',
|
||||
TIMEOUT_MS: 'TIMEOUT_MS',
|
||||
UNIX_SOCKET_PATH: 'UNIX_SOCKET_PATH',
|
||||
UPLOAD: 'UPLOAD',
|
||||
URL: 'URL',
|
||||
USERAGENT: 'USERAGENT',
|
||||
USERNAME: 'USERNAME',
|
||||
VERBOSE: 'VERBOSE',
|
||||
WRITEFUNCTION: 'WRITEFUNCTION',
|
||||
XFERINFOFUNCTION: 'XFERINFOFUNCTION',
|
||||
}
|
||||
|
||||
static getVersion() {
|
||||
@@ -36,13 +84,16 @@ class Curl extends EventEmitter {
|
||||
|
||||
if (name === Curl.option.READFUNCTION) {
|
||||
let body = '';
|
||||
|
||||
// Only limiting this to prevent infinite loops
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
const buffer = Buffer.alloc(23);
|
||||
const bytes = value(buffer);
|
||||
|
||||
if (bytes === 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
body += buffer.slice(0, bytes);
|
||||
}
|
||||
|
||||
@@ -52,6 +103,7 @@ class Curl extends EventEmitter {
|
||||
if (name === Curl.option.COOKIELIST) {
|
||||
// This can be set multiple times
|
||||
this._options[name] = this._options[name] || [];
|
||||
|
||||
this._options[name].push(value);
|
||||
} else if (name === Curl.option.READDATA) {
|
||||
const { size } = fs.fstatSync(value);
|
||||
@@ -67,12 +119,16 @@ class Curl extends EventEmitter {
|
||||
switch (name) {
|
||||
case Curl.info.COOKIELIST:
|
||||
return [`#HttpOnly_.insomnia.rest\tTRUE\t/url/path\tTRUE\t${Date.now() / 1000}\tfoo\tbar`];
|
||||
|
||||
case Curl.info.EFFECTIVE_URL:
|
||||
return this._options[Curl.option.URL];
|
||||
|
||||
case Curl.info.TOTAL_TIME:
|
||||
return 700;
|
||||
|
||||
case Curl.info.SIZE_DOWNLOAD:
|
||||
return 800;
|
||||
|
||||
default:
|
||||
throw new Error(`Invalid info ${name}`);
|
||||
}
|
||||
@@ -87,8 +143,9 @@ class Curl extends EventEmitter {
|
||||
features: this._features,
|
||||
}),
|
||||
);
|
||||
|
||||
this.emit('data', data);
|
||||
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
this._options.WRITEFUNCTION(data);
|
||||
|
||||
process.nextTick(() => {
|
||||
@@ -110,78 +167,32 @@ class Curl extends EventEmitter {
|
||||
close() {}
|
||||
}
|
||||
|
||||
// cannot include these from node-libcurl because they come from the native library
|
||||
// and it's not possible to load it while testing (as it was built to run with Electron)
|
||||
Curl.info = {
|
||||
COOKIELIST: 'COOKIELIST',
|
||||
EFFECTIVE_URL: 'EFFECTIVE_URL',
|
||||
SIZE_DOWNLOAD: 'SIZE_DOWNLOAD',
|
||||
TOTAL_TIME: 'TOTAL_TIME',
|
||||
};
|
||||
|
||||
Curl.option = {
|
||||
ACCEPT_ENCODING: 'ACCEPT_ENCODING',
|
||||
CAINFO: 'CAINFO',
|
||||
COOKIEFILE: 'COOKIEFILE',
|
||||
COOKIELIST: 'COOKIELIST',
|
||||
CUSTOMREQUEST: 'CUSTOMREQUEST',
|
||||
DEBUGFUNCTION: 'DEBUGFUNCTION',
|
||||
FOLLOWLOCATION: 'FOLLOWLOCATION',
|
||||
HTTPAUTH: 'HTTPAUTH',
|
||||
HTTPGET: 'HTTPGET',
|
||||
HTTPHEADER: 'HTTPHEADER',
|
||||
HTTPPOST: 'HTTPPOST',
|
||||
HTTP_VERSION: 'HTTP_VERSION',
|
||||
INFILESIZE_LARGE: 'INFILESIZE_LARGE',
|
||||
KEYPASSWD: 'KEYPASSWD',
|
||||
MAXREDIRS: 'MAXREDIRS',
|
||||
NETRC: 'NETRC',
|
||||
NOBODY: 'NOBODY',
|
||||
NOPROGRESS: 'NOPROGRESS',
|
||||
NOPROXY: 'NOPROXY',
|
||||
PASSWORD: 'PASSWORD',
|
||||
POST: 'POST',
|
||||
POSTFIELDS: 'POSTFIELDS',
|
||||
PROXY: 'PROXY',
|
||||
PROXYAUTH: 'PROXYAUTH',
|
||||
READDATA: 'READDATA',
|
||||
READFUNCTION: 'READFUNCTION',
|
||||
SSLCERT: 'SSLCERT',
|
||||
SSLCERTTYPE: 'SSLCERTTYPE',
|
||||
SSLKEY: 'SSLKEY',
|
||||
SSL_VERIFYHOST: 'SSL_VERIFYHOST',
|
||||
SSL_VERIFYPEER: 'SSL_VERIFYPEER',
|
||||
TIMEOUT_MS: 'TIMEOUT_MS',
|
||||
UNIX_SOCKET_PATH: 'UNIX_SOCKET_PATH',
|
||||
UPLOAD: 'UPLOAD',
|
||||
URL: 'URL',
|
||||
USERAGENT: 'USERAGENT',
|
||||
USERNAME: 'USERNAME',
|
||||
VERBOSE: 'VERBOSE',
|
||||
WRITEFUNCTION: 'WRITEFUNCTION',
|
||||
XFERINFOFUNCTION: 'XFERINFOFUNCTION',
|
||||
};
|
||||
|
||||
// This is just to make it easier to test
|
||||
// node-libcurl Enum exports (CurlAuth, CurlCode, etc) are TypeScript enums, which are
|
||||
// converted to an object with format:
|
||||
// { EnumKey: 0, 0: EnumKey }
|
||||
// We only want the named members (non-number ones)
|
||||
/**
|
||||
* This is just to make it easier to test
|
||||
* node-libcurl Enum exports (CurlAuth, CurlCode, etc) are TypeScript enums, which are converted to an object with format:
|
||||
* ```ts
|
||||
* const myEnum = {
|
||||
* EnumKey: 0,
|
||||
* 0: EnumKey,
|
||||
* }
|
||||
* ```
|
||||
* We only want the named members (non-number ones)
|
||||
*/
|
||||
const getTsEnumOnlyWithNamedMembers = enumObj => {
|
||||
let obj = {};
|
||||
|
||||
for (const member in enumObj) {
|
||||
if (typeof enumObj[member] === 'number') {
|
||||
obj = {
|
||||
...obj,
|
||||
[member]: member,
|
||||
};
|
||||
obj = { ...obj, [member]: member };
|
||||
}
|
||||
}
|
||||
|
||||
return obj;
|
||||
};
|
||||
|
||||
// WARNING: changing this to `export default` will break the mock and be incredibly hard to debug. Ask me how I know.
|
||||
module.exports = {
|
||||
Curl: Curl,
|
||||
Curl,
|
||||
CurlAuth: getTsEnumOnlyWithNamedMembers(CurlAuth),
|
||||
CurlCode: getTsEnumOnlyWithNamedMembers(CurlCode),
|
||||
CurlInfoDebug: getTsEnumOnlyWithNamedMembers(CurlInfoDebug),
|
||||
@@ -4,36 +4,29 @@ describe('install.js', () => {
|
||||
describe('containsOnlyDeprecationWarning', () => {
|
||||
it('should return true when all lines in stderr are deprecation warnings', () => {
|
||||
const consoleWarnSpy = jest.spyOn(console, 'warn').mockImplementation(() => {});
|
||||
|
||||
const stderr =
|
||||
// Warning #1
|
||||
const stderr = // Warning #1
|
||||
'warning insomnia-plugin-xxx-yyy > xyz > xyz > xyz > xyz > xyz: ' +
|
||||
'xyz is no longer maintained and not recommended for usage due to the number of issues. ' +
|
||||
'Please, upgrade your dependencies to the actual version of xyz.\r\n' +
|
||||
// Warning #2
|
||||
'Please, upgrade your dependencies to the actual version of xyz.\r\n' + // Warning #2
|
||||
'warning insomnia-plugin-xxx-yyy > xyz > xyz > xyz > xyz > xyz: ' +
|
||||
'xyz is no longer maintained and not recommended for usage due to the number of issues. ' +
|
||||
'Please, upgrade your dependencies to the actual version of xyz.\n' +
|
||||
// Warning #3
|
||||
'Please, upgrade your dependencies to the actual version of xyz.\n' + // Warning #3
|
||||
'warning insomnia-plugin-xxx-yyy > xyz > xyz > xyz > xyz > xyz: ' +
|
||||
'xyz is no longer maintained and not recommended for usage due to the number of issues. ' +
|
||||
'Please, upgrade your dependencies to the actual version of xyz.';
|
||||
expect(containsOnlyDeprecationWarnings(stderr)).toBe(true);
|
||||
expect(consoleWarnSpy).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('should return false when stderr contains a deprecation warning and an error', () => {
|
||||
const consoleWarnSpy = jest.spyOn(console, 'warn').mockImplementation(() => {});
|
||||
|
||||
const stderr =
|
||||
// Warning #1
|
||||
const stderr = // Warning #1
|
||||
'warning insomnia-plugin-xxx-yyy > xyz > xyz > xyz > xyz > xyz: ' +
|
||||
'xyz is no longer maintained and not recommended for usage due to the number of issues. ' +
|
||||
'Please, upgrade your dependencies to the actual version of xyz.\r\n' +
|
||||
// Error #1
|
||||
'Please, upgrade your dependencies to the actual version of xyz.\r\n' + // Error #1
|
||||
'error https://npm.example.net/@types%example/-/nello-1.3.5.tgz:' +
|
||||
'Integrity check failed for "@types/example"' +
|
||||
'(computed integrity doesn\'t match our records, got "sha512-z4kkSfaPg==")\n' +
|
||||
// Warning #2
|
||||
'(computed integrity doesn\'t match our records, got "sha512-z4kkSfaPg==")\n' + // Warning #2
|
||||
'warning insomnia-plugin-xxx-yyy > xyz > xyz > xyz > xyz > xyz: ' +
|
||||
'xyz is no longer maintained and not recommended for usage due to the number of issues. ' +
|
||||
'Please, upgrade your dependencies to the actual version of xyz.';
|
||||
@@ -41,12 +34,14 @@ describe('install.js', () => {
|
||||
expect(consoleWarnSpy).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isDeprecatedDependencies', () => {
|
||||
it('should not match when the message is falsy', () => {
|
||||
expect(isDeprecatedDependencies('')).toBe(false);
|
||||
expect(isDeprecatedDependencies(null)).toBe(false);
|
||||
expect(isDeprecatedDependencies(undefined)).toBe(false);
|
||||
});
|
||||
|
||||
it('should match with multiple nested dependencies', () => {
|
||||
const msg =
|
||||
'warning insomnia-plugin-xxx-yyy > xyz > xyz > xyz > xyz > xyz: ' +
|
||||
@@ -54,6 +49,7 @@ describe('install.js', () => {
|
||||
'Please, upgrade your dependencies to the actual version of xyz.';
|
||||
expect(isDeprecatedDependencies(msg)).toBe(true);
|
||||
});
|
||||
|
||||
it('should match with one nested dependency', () => {
|
||||
const msg =
|
||||
'warning insomnia-plugin-xxx-yyy > xyz: ' +
|
||||
@@ -61,12 +57,14 @@ describe('install.js', () => {
|
||||
'Please, upgrade your dependencies to the actual version of xyz.';
|
||||
expect(isDeprecatedDependencies(msg)).toBe(true);
|
||||
});
|
||||
|
||||
it('should not match with an unrelated warning', () => {
|
||||
const msg =
|
||||
'warning You are using Node "6.0.0" which is not supported and may encounter bugs or unexpected behaviour. ' +
|
||||
'Yarn supports the following server range: "^4.8.0 || ^5.7.0 || ^6.2.2 || >=8.0.0"';
|
||||
expect(isDeprecatedDependencies(msg)).toBe(false);
|
||||
});
|
||||
|
||||
it('should not match with an unrelated error', () => {
|
||||
const msg =
|
||||
'error https://npm.example.net/@types%example/-/nello-1.3.5.tgz: ' +
|
||||
@@ -3,6 +3,7 @@ import { globalBeforeEach } from '../__jest__/before-each';
|
||||
|
||||
describe('package.json', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('all packed dependencies should exist', () => {
|
||||
for (const name of globalPackage.packedDependencies) {
|
||||
const version = globalPackage.dependencies[name];
|
||||
@@ -1,7 +1,6 @@
|
||||
// Broken for now...
|
||||
// import * as renderer from '../renderer';
|
||||
// import { globalBeforeEach } from '../__jest__/before-each';
|
||||
|
||||
// describe('imports', () => {
|
||||
// beforeEach(globalBeforeEach);
|
||||
// it('ui module should import successfully', () => {
|
||||
@@ -5,7 +5,6 @@ describe('crypt', () => {
|
||||
it('derives a key properly', async () => {
|
||||
const result = await crypt.deriveKey('Password', 'email', 'salt');
|
||||
const expected = 'fb058595c02ae9660ed7098273bf50e49407942ecc437bf317638d76c4578eae';
|
||||
|
||||
expect(result).toBe(expected);
|
||||
});
|
||||
});
|
||||
@@ -19,10 +18,8 @@ describe('crypt', () => {
|
||||
key_ops: ['encrypt', 'decrypt'],
|
||||
k: '5hs1f2xuiNPHUp11i6SWlsqYpWe_hWPcEKucZlwBfFE',
|
||||
};
|
||||
|
||||
const resultEncrypted = crypt.encryptAES(key, 'Hello World!', 'additional data');
|
||||
const resultDecrypted = crypt.decryptAES(key, resultEncrypted);
|
||||
|
||||
expect(resultDecrypted).toEqual('Hello World!');
|
||||
});
|
||||
});
|
||||
@@ -36,10 +33,8 @@ describe('crypt', () => {
|
||||
key_ops: ['encrypt', 'decrypt'],
|
||||
k: '5hs1f2xuiNPHUp11i6SWlsqYpWe_hWPcEKucZlwBfFE',
|
||||
};
|
||||
|
||||
const resultEncrypted = crypt.encryptAESBuffer(key, Buffer.from('Hello World!', 'utf8'));
|
||||
const resultDecrypted = crypt.decryptAESToBuffer(key, resultEncrypted);
|
||||
|
||||
expect(resultDecrypted).toEqual(Buffer.from('Hello World!', 'utf8'));
|
||||
});
|
||||
});
|
||||
@@ -78,7 +73,6 @@ describe('crypt', () => {
|
||||
'FMVx12_ioueu052xgFxWdIS_lImUGTrw8Iiw_kp-KKsONtofs91A5GVRtyg_wdXpG2qyomaet1hTlHhLnoI23L2a' +
|
||||
'EkQ87SokIpoR9lR8jfIRwLwKKXMc33_bRRQXvWop0yvTzmSGaC0gULcqj0OHiUR1u9Ver1ZvgGz2jh4mP_E',
|
||||
};
|
||||
|
||||
const publicKey = {
|
||||
alg: 'RSA-OAEP-256',
|
||||
kty: 'RSA',
|
||||
@@ -90,12 +84,9 @@ describe('crypt', () => {
|
||||
'Om4kAhLVfmdzYB1nZmq9xH1O8_acIUoWDbAWX6fIXhPhn7jCuCO4WZQVDxZ5_bc27UVhR4VYe2Our7aESUQ5Zy' +
|
||||
'MtYNymo9Oy0y_m3OS6W_JR_feXBbxRCBuGf7fjnvV9ohx1ZqLpJFx9_xL7naoVCQhBDfVE31iYz3L6KTIhFQ',
|
||||
};
|
||||
|
||||
const resultEncrypted = crypt.encryptRSAWithJWK(publicKey, 'aaaaaaaaa');
|
||||
const resultDecrypted = crypt.decryptRSAWithJWK(privateKey, resultEncrypted);
|
||||
|
||||
const expectedDecrypted = 'aaaaaaaaa';
|
||||
|
||||
expect(resultDecrypted.toString()).toEqual(expectedDecrypted);
|
||||
});
|
||||
});
|
||||
@@ -36,9 +36,11 @@ export function encryptRSAWithJWK(publicKeyJWK, plaintext) {
|
||||
const encodedPlaintext = encodeURIComponent(plaintext);
|
||||
|
||||
const n = _b64UrlToBigInt(publicKeyJWK.n);
|
||||
const e = _b64UrlToBigInt(publicKeyJWK.e);
|
||||
const publicKey = forge.rsa.setPublicKey(n, e);
|
||||
|
||||
const e = _b64UrlToBigInt(publicKeyJWK.e);
|
||||
|
||||
// @ts-expect-error -- TSCONVERSION appears not to be exported for some reason
|
||||
const publicKey = forge.rsa.setPublicKey(n, e);
|
||||
const encrypted = publicKey.encrypt(encodedPlaintext, 'RSA-OAEP', {
|
||||
md: forge.md.sha256.create(),
|
||||
});
|
||||
@@ -47,20 +49,27 @@ export function encryptRSAWithJWK(publicKeyJWK, plaintext) {
|
||||
|
||||
export function decryptRSAWithJWK(privateJWK, encryptedBlob) {
|
||||
const n = _b64UrlToBigInt(privateJWK.n);
|
||||
|
||||
const e = _b64UrlToBigInt(privateJWK.e);
|
||||
|
||||
const d = _b64UrlToBigInt(privateJWK.d);
|
||||
|
||||
const p = _b64UrlToBigInt(privateJWK.p);
|
||||
|
||||
const q = _b64UrlToBigInt(privateJWK.q);
|
||||
|
||||
const dP = _b64UrlToBigInt(privateJWK.dp);
|
||||
|
||||
const dQ = _b64UrlToBigInt(privateJWK.dq);
|
||||
|
||||
const qInv = _b64UrlToBigInt(privateJWK.qi);
|
||||
|
||||
// @ts-expect-error -- TSCONVERSION appears not to be exported for some reason
|
||||
const privateKey = forge.rsa.setPrivateKey(n, e, d, p, q, dP, dQ, qInv);
|
||||
const bytes = forge.util.hexToBytes(encryptedBlob);
|
||||
const decrypted = privateKey.decrypt(bytes, 'RSA-OAEP', {
|
||||
md: forge.md.sha256.create(),
|
||||
});
|
||||
|
||||
return decodeURIComponent(decrypted);
|
||||
}
|
||||
|
||||
@@ -76,18 +85,21 @@ export function encryptAESBuffer(jwkOrKey, buff, additionalData = '') {
|
||||
// TODO: Add assertion checks for JWK
|
||||
const rawKey = typeof jwkOrKey === 'string' ? jwkOrKey : _b64UrlToHex(jwkOrKey.k);
|
||||
const key = forge.util.hexToBytes(rawKey);
|
||||
|
||||
const iv = forge.random.getBytesSync(12);
|
||||
const cipher = forge.cipher.createCipher('AES-GCM', key);
|
||||
|
||||
cipher.start({ additionalData, iv, tagLength: 128 });
|
||||
cipher.start({
|
||||
additionalData,
|
||||
iv,
|
||||
tagLength: 128,
|
||||
});
|
||||
cipher.update(forge.util.createBuffer(buff));
|
||||
cipher.finish();
|
||||
|
||||
return {
|
||||
iv: forge.util.bytesToHex(iv),
|
||||
// @ts-expect-error -- TSCONVERSION needs to be converted to string
|
||||
t: forge.util.bytesToHex(cipher.mode.tag),
|
||||
ad: forge.util.bytesToHex(additionalData),
|
||||
// @ts-expect-error -- TSCONVERSION needs to be converted to string
|
||||
d: forge.util.bytesToHex(cipher.output),
|
||||
};
|
||||
}
|
||||
@@ -104,21 +116,23 @@ export function encryptAES(jwkOrKey, plaintext, additionalData = '') {
|
||||
// TODO: Add assertion checks for JWK
|
||||
const rawKey = typeof jwkOrKey === 'string' ? jwkOrKey : _b64UrlToHex(jwkOrKey.k);
|
||||
const key = forge.util.hexToBytes(rawKey);
|
||||
|
||||
const iv = forge.random.getBytesSync(12);
|
||||
const cipher = forge.cipher.createCipher('AES-GCM', key);
|
||||
|
||||
// Plaintext could contain weird unicode, so we have to encode that
|
||||
const encodedPlaintext = encodeURIComponent(plaintext);
|
||||
|
||||
cipher.start({ additionalData, iv, tagLength: 128 });
|
||||
cipher.start({
|
||||
additionalData,
|
||||
iv,
|
||||
tagLength: 128,
|
||||
});
|
||||
cipher.update(forge.util.createBuffer(encodedPlaintext));
|
||||
cipher.finish();
|
||||
|
||||
return {
|
||||
iv: forge.util.bytesToHex(iv),
|
||||
// @ts-expect-error -- TSCONVERSION needs to be converted to string
|
||||
t: forge.util.bytesToHex(cipher.mode.tag),
|
||||
ad: forge.util.bytesToHex(additionalData),
|
||||
// @ts-expect-error -- TSCONVERSION needs to be converted to string
|
||||
d: forge.util.bytesToHex(cipher.output),
|
||||
};
|
||||
}
|
||||
@@ -134,19 +148,17 @@ export function decryptAES(jwkOrKey, encryptedResult) {
|
||||
// TODO: Add assertion checks for JWK
|
||||
const rawKey = typeof jwkOrKey === 'string' ? jwkOrKey : _b64UrlToHex(jwkOrKey.k);
|
||||
const key = forge.util.hexToBytes(rawKey);
|
||||
|
||||
// ~~~~~~~~~~~~~~~~~~~~ //
|
||||
// Decrypt with AES-GCM //
|
||||
// ~~~~~~~~~~~~~~~~~~~~ //
|
||||
|
||||
const decipher = forge.cipher.createDecipher('AES-GCM', key);
|
||||
decipher.start({
|
||||
iv: forge.util.hexToBytes(encryptedResult.iv),
|
||||
tagLength: encryptedResult.t.length * 4,
|
||||
// @ts-expect-error -- TSCONVERSION needs to be converted to string
|
||||
tag: forge.util.hexToBytes(encryptedResult.t),
|
||||
additionalData: forge.util.hexToBytes(encryptedResult.ad),
|
||||
});
|
||||
|
||||
decipher.update(forge.util.createBuffer(forge.util.hexToBytes(encryptedResult.d)));
|
||||
|
||||
if (decipher.finish()) {
|
||||
@@ -166,22 +178,21 @@ export function decryptAESToBuffer(jwkOrKey, encryptedResult) {
|
||||
// TODO: Add assertion checks for JWK
|
||||
const rawKey = typeof jwkOrKey === 'string' ? jwkOrKey : _b64UrlToHex(jwkOrKey.k);
|
||||
const key = forge.util.hexToBytes(rawKey);
|
||||
|
||||
// ~~~~~~~~~~~~~~~~~~~~ //
|
||||
// Decrypt with AES-GCM //
|
||||
// ~~~~~~~~~~~~~~~~~~~~ //
|
||||
|
||||
const decipher = forge.cipher.createDecipher('AES-GCM', key);
|
||||
decipher.start({
|
||||
iv: forge.util.hexToBytes(encryptedResult.iv),
|
||||
tagLength: encryptedResult.t.length * 4,
|
||||
// @ts-expect-error -- TSCONVERSION needs to be converted to string
|
||||
tag: forge.util.hexToBytes(encryptedResult.t),
|
||||
additionalData: forge.util.hexToBytes(encryptedResult.ad),
|
||||
});
|
||||
|
||||
decipher.update(forge.util.createBuffer(forge.util.hexToBytes(encryptedResult.d)));
|
||||
|
||||
if (decipher.finish()) {
|
||||
// @ts-expect-error -- TSCONVERSION needs to be converted to string
|
||||
return Buffer.from(forge.util.bytesToHex(decipher.output), 'hex');
|
||||
} else {
|
||||
throw new Error('Failed to decrypt data');
|
||||
@@ -210,14 +221,19 @@ export function srpGenKey() {
|
||||
*/
|
||||
export async function generateAES256Key() {
|
||||
const c = window.crypto;
|
||||
// @ts-expect-error -- TSCONVERSION: likely needs a module augmentation for webkit
|
||||
const subtle = c ? c.subtle || c.webkitSubtle : null;
|
||||
|
||||
if (subtle) {
|
||||
console.log('[crypt] Using Native AES Key Generation');
|
||||
const key = await subtle.generateKey({ name: 'AES-GCM', length: 256 }, true, [
|
||||
'encrypt',
|
||||
'decrypt',
|
||||
]);
|
||||
const key = await subtle.generateKey(
|
||||
{
|
||||
name: 'AES-GCM',
|
||||
length: 256,
|
||||
},
|
||||
true,
|
||||
['encrypt', 'decrypt'],
|
||||
);
|
||||
return subtle.exportKey('jwk', key);
|
||||
} else {
|
||||
console.log('[crypt] Using Fallback Forge AES Key Generation');
|
||||
@@ -243,7 +259,6 @@ export async function generateKeyPairJWK() {
|
||||
|
||||
if (subtle) {
|
||||
console.log('[crypt] Using Native RSA Generation');
|
||||
|
||||
const pair = await subtle.generateKey(
|
||||
{
|
||||
name: 'RSA-OAEP',
|
||||
@@ -254,15 +269,16 @@ export async function generateKeyPairJWK() {
|
||||
true,
|
||||
['encrypt', 'decrypt'],
|
||||
);
|
||||
|
||||
return {
|
||||
publicKey: await subtle.exportKey('jwk', pair.publicKey),
|
||||
privateKey: await subtle.exportKey('jwk', pair.privateKey),
|
||||
};
|
||||
} else {
|
||||
console.log('[crypt] Using Forge RSA Generation');
|
||||
|
||||
const pair = forge.pki.rsa.generateKeyPair({ bits: 2048, e: 0x10001 });
|
||||
const pair = forge.pki.rsa.generateKeyPair({
|
||||
bits: 2048,
|
||||
e: 0x10001,
|
||||
});
|
||||
const privateKey = {
|
||||
alg: 'RSA-OAEP-256',
|
||||
kty: 'RSA',
|
||||
@@ -277,7 +293,6 @@ export async function generateKeyPairJWK() {
|
||||
q: _bigIntToB64Url(pair.privateKey.q),
|
||||
qi: _bigIntToB64Url(pair.privateKey.qInv),
|
||||
};
|
||||
|
||||
const publicKey = {
|
||||
alg: 'RSA-OAEP-256',
|
||||
kty: 'RSA',
|
||||
@@ -285,8 +300,10 @@ export async function generateKeyPairJWK() {
|
||||
e: _bigIntToB64Url(pair.publicKey.e),
|
||||
n: _bigIntToB64Url(pair.publicKey.n),
|
||||
};
|
||||
|
||||
return { privateKey, publicKey };
|
||||
return {
|
||||
privateKey,
|
||||
publicKey,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -302,7 +319,7 @@ export async function generateKeyPairJWK() {
|
||||
* @returns {Promise}
|
||||
*/
|
||||
async function _hkdfSalt(rawSalt, rawEmail) {
|
||||
return new Promise(resolve => {
|
||||
return new Promise<string>(resolve => {
|
||||
const hkdf = new HKDF('sha256', rawSalt, rawEmail);
|
||||
hkdf.derive('', DEFAULT_BYTE_LENGTH, buffer => resolve(buffer.toString('hex')));
|
||||
});
|
||||
@@ -321,18 +338,15 @@ function _bigIntToB64Url(n) {
|
||||
|
||||
function _hexToB64Url(h) {
|
||||
const bytes = forge.util.hexToBytes(h);
|
||||
return window
|
||||
.btoa(bytes)
|
||||
.replace(/=/g, '')
|
||||
.replace(/\+/g, '-')
|
||||
.replace(/\//g, '_');
|
||||
return window.btoa(bytes).replace(/=/g, '').replace(/\+/g, '-').replace(/\//g, '_');
|
||||
}
|
||||
|
||||
function _b64UrlToBigInt(s) {
|
||||
function _b64UrlToBigInt(s: string) {
|
||||
// @ts-expect-error -- TSCONVERSION needs investigation in forge types
|
||||
return new forge.jsbn.BigInteger(_b64UrlToHex(s), 16);
|
||||
}
|
||||
|
||||
function _b64UrlToHex(s) {
|
||||
function _b64UrlToHex(s: string) {
|
||||
const b64 = s.replace(/-/g, '+').replace(/_/g, '/');
|
||||
return forge.util.bytesToHex(window.atob(b64));
|
||||
}
|
||||
@@ -346,22 +360,21 @@ function _b64UrlToHex(s) {
|
||||
async function _pbkdf2Passphrase(passphrase, salt) {
|
||||
if (window.crypto && window.crypto.subtle) {
|
||||
console.log('[crypt] Using native PBKDF2');
|
||||
|
||||
const k = await window.crypto.subtle.importKey(
|
||||
'raw',
|
||||
Buffer.from(passphrase, 'utf8'),
|
||||
{ name: 'PBKDF2' },
|
||||
{
|
||||
name: 'PBKDF2',
|
||||
},
|
||||
false,
|
||||
['deriveBits'],
|
||||
);
|
||||
|
||||
const algo = {
|
||||
name: 'PBKDF2',
|
||||
salt: Buffer.from(salt, 'hex'),
|
||||
iterations: DEFAULT_PBKDF2_ITERATIONS,
|
||||
hash: 'SHA-256',
|
||||
};
|
||||
|
||||
const derivedKeyRaw = await window.crypto.subtle.deriveBits(algo, k, DEFAULT_BYTE_LENGTH * 8);
|
||||
return Buffer.from(derivedKeyRaw).toString('hex');
|
||||
} else {
|
||||
@@ -373,7 +386,6 @@ async function _pbkdf2Passphrase(passphrase, salt) {
|
||||
DEFAULT_BYTE_LENGTH,
|
||||
forge.md.sha256.create(),
|
||||
);
|
||||
|
||||
return forge.util.bytesToHex(derivedKeyRaw);
|
||||
}
|
||||
}
|
||||
@@ -1,17 +1,16 @@
|
||||
import { delay } from '../common/misc';
|
||||
import { parse as urlParse } from 'url';
|
||||
import zlib from 'zlib';
|
||||
|
||||
let _userAgent = '';
|
||||
let _baseUrl = '';
|
||||
const _commandListeners = [];
|
||||
const _commandListeners: Array<Function> = [];
|
||||
|
||||
export function setup(userAgent, baseUrl) {
|
||||
_userAgent = userAgent;
|
||||
_baseUrl = baseUrl;
|
||||
}
|
||||
|
||||
export function onCommand(callback) {
|
||||
export function onCommand(callback: Function) {
|
||||
_commandListeners.push(callback);
|
||||
}
|
||||
|
||||
@@ -32,7 +31,11 @@ async function _fetch(method, path, obj, sessionId, compressBody = false, retrie
|
||||
throw new Error(`No session ID provided to ${method}:${path}`);
|
||||
}
|
||||
|
||||
const config = {
|
||||
const config: {
|
||||
method: string;
|
||||
headers: HeadersInit;
|
||||
body?: string | Buffer;
|
||||
} = {
|
||||
method: method,
|
||||
headers: {},
|
||||
};
|
||||
@@ -56,7 +59,9 @@ async function _fetch(method, path, obj, sessionId, compressBody = false, retrie
|
||||
}
|
||||
|
||||
let response;
|
||||
|
||||
const url = _getUrl(path);
|
||||
|
||||
try {
|
||||
response = await window.fetch(url, config);
|
||||
|
||||
@@ -69,12 +74,14 @@ async function _fetch(method, path, obj, sessionId, compressBody = false, retrie
|
||||
} catch (err) {
|
||||
throw new Error(`Failed to fetch '${url}'`);
|
||||
}
|
||||
|
||||
const uri = response.headers.get('x-insomnia-command');
|
||||
uri && _notifyCommandListeners(uri);
|
||||
|
||||
if (!response.ok) {
|
||||
const err = new Error(`Response ${response.status} for ${path}`);
|
||||
err.message = await response.text();
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
err.statusCode = response.status;
|
||||
throw err;
|
||||
}
|
||||
@@ -96,7 +103,6 @@ function _getUrl(path) {
|
||||
|
||||
function _notifyCommandListeners(uri) {
|
||||
const parsed = urlParse(uri, true);
|
||||
|
||||
const command = `${parsed.hostname}${parsed.pathname}`;
|
||||
const args = JSON.parse(JSON.stringify(parsed.query));
|
||||
|
||||
@@ -2,11 +2,12 @@ import * as srp from 'srp-js';
|
||||
import * as crypt from './crypt';
|
||||
import * as fetch from './fetch';
|
||||
|
||||
const loginCallbacks = [];
|
||||
const loginCallbacks: Array<Function> = [];
|
||||
|
||||
function _callCallbacks() {
|
||||
const loggedIn = isLoggedIn();
|
||||
console.log('[session] Sync state changed loggedIn=' + loggedIn);
|
||||
|
||||
for (const cb of loginCallbacks) {
|
||||
if (typeof cb === 'function') {
|
||||
cb(loggedIn);
|
||||
@@ -14,7 +15,7 @@ function _callCallbacks() {
|
||||
}
|
||||
}
|
||||
|
||||
export function onLoginLogout(callback) {
|
||||
export function onLoginLogout(callback: Function) {
|
||||
loginCallbacks.push(callback);
|
||||
}
|
||||
|
||||
@@ -23,14 +24,13 @@ export async function login(rawEmail, rawPassphrase) {
|
||||
// ~~~~~~~~~~~~~~~ //
|
||||
// Sanitize Inputs //
|
||||
// ~~~~~~~~~~~~~~~ //
|
||||
|
||||
const email = _sanitizeEmail(rawEmail);
|
||||
|
||||
const passphrase = _sanitizePassphrase(rawPassphrase);
|
||||
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
||||
// Fetch Salt and Submit A To Server //
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
||||
|
||||
const { saltKey, saltAuth } = await _getAuthSalts(email);
|
||||
const authSecret = await crypt.deriveKey(passphrase, email, saltKey);
|
||||
const secret1 = await crypt.srpGenKey();
|
||||
@@ -39,6 +39,7 @@ export async function login(rawEmail, rawPassphrase) {
|
||||
Buffer.from(saltAuth, 'hex'),
|
||||
Buffer.from(email, 'utf8'),
|
||||
Buffer.from(authSecret, 'hex'),
|
||||
// @ts-expect-error -- TSCONVERSION missing type from srpGenKey
|
||||
Buffer.from(secret1, 'hex'),
|
||||
);
|
||||
const srpA = c.computeA().toString('hex');
|
||||
@@ -50,11 +51,9 @@ export async function login(rawEmail, rawPassphrase) {
|
||||
},
|
||||
null,
|
||||
);
|
||||
|
||||
// ~~~~~~~~~~~~~~~~~~~~~ //
|
||||
// Compute and Submit M1 //
|
||||
// ~~~~~~~~~~~~~~~~~~~~~ //
|
||||
|
||||
c.setB(Buffer.from(srpB, 'hex'));
|
||||
const srpM1 = c.computeM1().toString('hex');
|
||||
const { srpM2 } = await fetch.post(
|
||||
@@ -65,20 +64,15 @@ export async function login(rawEmail, rawPassphrase) {
|
||||
},
|
||||
null,
|
||||
);
|
||||
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
||||
// Verify Server Identity M2 //
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
||||
|
||||
c.checkM2(Buffer.from(srpM2, 'hex'));
|
||||
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~ //
|
||||
// Initialize the Session //
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~ //
|
||||
|
||||
// Compute K (used for session ID)
|
||||
const sessionId = c.computeK().toString('hex');
|
||||
|
||||
// Get and store some extra info (salts and keys)
|
||||
const {
|
||||
publicKey,
|
||||
@@ -89,10 +83,8 @@ export async function login(rawEmail, rawPassphrase) {
|
||||
firstName,
|
||||
lastName,
|
||||
} = await _whoami(sessionId);
|
||||
|
||||
const derivedSymmetricKey = await crypt.deriveKey(passphrase, email, saltEnc);
|
||||
const symmetricKeyStr = await crypt.decryptAES(derivedSymmetricKey, JSON.parse(encSymmetricKey));
|
||||
|
||||
// Store the information for later
|
||||
setSessionData(
|
||||
sessionId,
|
||||
@@ -107,20 +99,18 @@ export async function login(rawEmail, rawPassphrase) {
|
||||
|
||||
_callCallbacks();
|
||||
}
|
||||
|
||||
export async function changePasswordWithToken(rawNewPassphrase, confirmationCode) {
|
||||
// Sanitize inputs
|
||||
const newPassphrase = _sanitizePassphrase(rawNewPassphrase);
|
||||
|
||||
const newEmail = getEmail(); // Use the same one
|
||||
|
||||
// Fetch some things
|
||||
const { saltEnc, encSymmetricKey } = await _whoami();
|
||||
const { saltKey, saltAuth } = await _getAuthSalts(newEmail);
|
||||
|
||||
// Generate some secrets for the user base'd on password
|
||||
const newSecret = await crypt.deriveKey(newPassphrase, newEmail, saltEnc);
|
||||
const newAuthSecret = await crypt.deriveKey(newPassphrase, newEmail, saltKey);
|
||||
|
||||
const newVerifier = srp
|
||||
.computeVerifier(
|
||||
_getSrpParams(),
|
||||
@@ -129,12 +119,10 @@ export async function changePasswordWithToken(rawNewPassphrase, confirmationCode
|
||||
Buffer.from(newAuthSecret, 'hex'),
|
||||
)
|
||||
.toString('hex');
|
||||
|
||||
// Re-encrypt existing keys with new secret
|
||||
const symmetricKey = JSON.stringify(_getSymmetricKey());
|
||||
const newEncSymmetricKeyJSON = crypt.encryptAES(newSecret, symmetricKey);
|
||||
const newEncSymmetricKey = JSON.stringify(newEncSymmetricKeyJSON);
|
||||
|
||||
return fetch.post(
|
||||
'/auth/change-password',
|
||||
{
|
||||
@@ -147,21 +135,18 @@ export async function changePasswordWithToken(rawNewPassphrase, confirmationCode
|
||||
getCurrentSessionId(),
|
||||
);
|
||||
}
|
||||
|
||||
export function sendPasswordChangeCode() {
|
||||
return fetch.post('/auth/send-password-code', null, getCurrentSessionId());
|
||||
}
|
||||
|
||||
export function getPublicKey() {
|
||||
return _getSessionData().publicKey;
|
||||
}
|
||||
|
||||
export function getPrivateKey() {
|
||||
const { symmetricKey, encPrivateKey } = _getSessionData();
|
||||
|
||||
const privateKeyStr = crypt.decryptAES(symmetricKey, encPrivateKey);
|
||||
return JSON.parse(privateKeyStr);
|
||||
}
|
||||
|
||||
export function getCurrentSessionId() {
|
||||
if (window) {
|
||||
return window.localStorage.getItem('currentSessionId');
|
||||
@@ -169,23 +154,18 @@ export function getCurrentSessionId() {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
export function getAccountId() {
|
||||
return _getSessionData().accountId;
|
||||
}
|
||||
|
||||
export function getEmail() {
|
||||
return _getSessionData().email;
|
||||
}
|
||||
|
||||
export function getFirstName() {
|
||||
return _getSessionData().firstName;
|
||||
}
|
||||
|
||||
export function getLastName() {
|
||||
return _getSessionData().lastName;
|
||||
}
|
||||
|
||||
export function getFullName() {
|
||||
return `${getFirstName()} ${getLastName()}`.trim();
|
||||
}
|
||||
@@ -206,6 +186,7 @@ export async function logout() {
|
||||
}
|
||||
|
||||
_unsetSessionData();
|
||||
|
||||
_callCallbacks();
|
||||
}
|
||||
|
||||
@@ -230,17 +211,13 @@ export function setSessionData(
|
||||
firstName: firstName,
|
||||
lastName: lastName,
|
||||
});
|
||||
|
||||
window.localStorage.setItem(_getSessionKey(sessionId), dataStr);
|
||||
|
||||
// NOTE: We're setting this last because the stuff above might fail
|
||||
window.localStorage.setItem('currentSessionId', sessionId);
|
||||
}
|
||||
|
||||
export async function listTeams() {
|
||||
return fetch.get('/api/teams', getCurrentSessionId());
|
||||
}
|
||||
|
||||
export async function endTrial() {
|
||||
await fetch.put('/api/billing/end-trial', null, getCurrentSessionId());
|
||||
}
|
||||
@@ -248,9 +225,9 @@ export async function endTrial() {
|
||||
// ~~~~~~~~~~~~~~~~ //
|
||||
// Helper Functions //
|
||||
// ~~~~~~~~~~~~~~~~ //
|
||||
|
||||
function _getSymmetricKey() {
|
||||
const sessionData = _getSessionData();
|
||||
|
||||
return sessionData.symmetricKey;
|
||||
}
|
||||
|
||||
@@ -259,16 +236,26 @@ function _whoami(sessionId = null) {
|
||||
}
|
||||
|
||||
function _getAuthSalts(email) {
|
||||
return fetch.post('/auth/login-s', { email }, getCurrentSessionId());
|
||||
return fetch.post(
|
||||
'/auth/login-s',
|
||||
{
|
||||
email,
|
||||
},
|
||||
getCurrentSessionId(),
|
||||
);
|
||||
}
|
||||
|
||||
function _getSessionData() {
|
||||
const sessionId = getCurrentSessionId();
|
||||
|
||||
if (!sessionId || !window) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const dataStr = window.localStorage.getItem(_getSessionKey(sessionId));
|
||||
if (dataStr === null) {
|
||||
return null;
|
||||
}
|
||||
return JSON.parse(dataStr);
|
||||
}
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
import * as models from '../../models';
|
||||
import { workspace, requestGroup, request, BaseModel } from '../../models';
|
||||
|
||||
export const data = {
|
||||
[models.workspace.type]: [
|
||||
export const data: Record<string, Array<Partial<BaseModel>>> = {
|
||||
[workspace.type]: [
|
||||
{
|
||||
_id: 'wrk_1',
|
||||
name: 'Wrk 1',
|
||||
},
|
||||
],
|
||||
|
||||
[models.requestGroup.type]: [
|
||||
[requestGroup.type]: [
|
||||
{
|
||||
_id: 'fld_1',
|
||||
parentId: 'wrk_1',
|
||||
@@ -25,8 +24,7 @@ export const data = {
|
||||
name: 'Fld 3',
|
||||
},
|
||||
],
|
||||
|
||||
[models.request.type]: [
|
||||
[request.type]: [
|
||||
{
|
||||
_id: 'req_1',
|
||||
parentId: 'fld_1',
|
||||
@@ -1,3 +1,4 @@
|
||||
// WARNING: changing this to `export default` will break the mock and be incredibly hard to debug. Ask me how I know.
|
||||
const trackEvent = jest.fn();
|
||||
const trackSegmentEvent = jest.fn();
|
||||
module.exports = { trackEvent, trackSegmentEvent };
|
||||
@@ -1,6 +0,0 @@
|
||||
const render = jest.requireActual('../render');
|
||||
|
||||
render.getRenderedGrpcRequest = jest.fn();
|
||||
render.getRenderedGrpcRequestMessage = jest.fn();
|
||||
|
||||
module.exports = render;
|
||||
6
packages/insomnia-app/app/common/__mocks__/render.ts
Normal file
6
packages/insomnia-app/app/common/__mocks__/render.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
const _render = jest.requireActual('../render');
|
||||
_render.getRenderedGrpcRequest = jest.fn();
|
||||
_render.getRenderedGrpcRequestMessage = jest.fn();
|
||||
|
||||
// WARNING: changing this to `export default` will break the mock and be incredibly hard to debug. Ask me how I know.
|
||||
module.exports = _render;
|
||||
@@ -16,9 +16,11 @@ import {
|
||||
describe('init()', () => {
|
||||
beforeEach(async () => {
|
||||
await globalBeforeEach();
|
||||
electron.net.request = jest.fn(url => {
|
||||
electron.net.request = jest.fn(() => {
|
||||
const req = new EventEmitter();
|
||||
|
||||
req.end = function() {};
|
||||
|
||||
return req;
|
||||
});
|
||||
jest.useFakeTimers();
|
||||
@@ -31,7 +33,6 @@ describe('init()', () => {
|
||||
});
|
||||
expect(settings.enableAnalytics).toBe(false);
|
||||
expect(electron.net.request.mock.calls).toEqual([]);
|
||||
|
||||
await _trackEvent(true, 'Foo', 'Bar');
|
||||
jest.runAllTimers();
|
||||
expect(electron.net.request.mock.calls).toEqual([]);
|
||||
@@ -44,7 +45,6 @@ describe('init()', () => {
|
||||
});
|
||||
expect(settings.enableAnalytics).toBe(true);
|
||||
expect(electron.net.request.mock.calls).toEqual([]);
|
||||
|
||||
await _trackEvent(true, 'Foo', 'Bar');
|
||||
jest.runAllTimers();
|
||||
expect(electron.net.request.mock.calls).toEqual([
|
||||
@@ -78,7 +78,6 @@ describe('init()', () => {
|
||||
deviceId: 'device',
|
||||
enableAnalytics: true,
|
||||
});
|
||||
|
||||
await _trackEvent(false, 'Foo', 'Bar');
|
||||
jest.runAllTimers();
|
||||
expect(electron.net.request.mock.calls).toEqual([
|
||||
@@ -113,7 +112,6 @@ describe('init()', () => {
|
||||
deviceId: 'device',
|
||||
enableAnalytics: true,
|
||||
});
|
||||
|
||||
await _trackPageView('/my/path');
|
||||
jest.runAllTimers();
|
||||
expect(electron.net.request.mock.calls).toEqual([
|
||||
@@ -145,7 +143,6 @@ describe('init()', () => {
|
||||
deviceId: 'device',
|
||||
enableAnalytics: true,
|
||||
});
|
||||
|
||||
await _trackPageView('/my/path');
|
||||
jest.runAllTimers();
|
||||
await _trackEvent(true, 'cat', 'act', 'lab', 'val');
|
||||
@@ -8,18 +8,17 @@ describe('parseApiSpec()', () => {
|
||||
it('parses YAML and JSON OpenAPI specs', () => {
|
||||
const objSpec = {
|
||||
openapi: '3.0.0',
|
||||
info: { title: 'My API' },
|
||||
info: {
|
||||
title: 'My API',
|
||||
},
|
||||
};
|
||||
|
||||
const yamlSpec = YAML.stringify(objSpec);
|
||||
const jsonSpec = JSON.stringify(objSpec);
|
||||
|
||||
const expected = {
|
||||
format: 'openapi',
|
||||
formatVersion: '3.0.0',
|
||||
contents: objSpec,
|
||||
};
|
||||
|
||||
expect(parseApiSpec(yamlSpec)).toEqual({ ...expected, rawContents: yamlSpec });
|
||||
expect(parseApiSpec(jsonSpec)).toEqual({ ...expected, rawContents: jsonSpec });
|
||||
});
|
||||
@@ -27,18 +26,17 @@ describe('parseApiSpec()', () => {
|
||||
it('parses YAML and JSON Swagger specs', () => {
|
||||
const objSpec = {
|
||||
swagger: '2.0.0',
|
||||
info: { title: 'My API' },
|
||||
info: {
|
||||
title: 'My API',
|
||||
},
|
||||
};
|
||||
|
||||
const expected = {
|
||||
format: 'swagger',
|
||||
formatVersion: '2.0.0',
|
||||
contents: objSpec,
|
||||
};
|
||||
|
||||
const yamlSpec = YAML.stringify(objSpec);
|
||||
const jsonSpec = JSON.stringify(objSpec);
|
||||
|
||||
expect(parseApiSpec(yamlSpec)).toEqual({ ...expected, rawContents: yamlSpec });
|
||||
expect(parseApiSpec(jsonSpec)).toEqual({ ...expected, rawContents: jsonSpec });
|
||||
});
|
||||
@@ -46,18 +44,17 @@ describe('parseApiSpec()', () => {
|
||||
it('parses YAML and JSON Unknown specs', () => {
|
||||
const objSpec = {
|
||||
funnyBusiness: '2.0.0',
|
||||
info: { title: 'My API' },
|
||||
info: {
|
||||
title: 'My API',
|
||||
},
|
||||
};
|
||||
|
||||
const expected = {
|
||||
format: null,
|
||||
formatVersion: null,
|
||||
contents: objSpec,
|
||||
};
|
||||
|
||||
const yamlSpec = YAML.stringify(objSpec);
|
||||
const jsonSpec = JSON.stringify(objSpec);
|
||||
|
||||
expect(parseApiSpec(yamlSpec)).toEqual({ ...expected, rawContents: yamlSpec });
|
||||
expect(parseApiSpec(jsonSpec)).toEqual({ ...expected, rawContents: jsonSpec });
|
||||
});
|
||||
@@ -69,13 +66,11 @@ describe('parseApiSpec()', () => {
|
||||
contents: null,
|
||||
rawContents: '',
|
||||
};
|
||||
|
||||
expect(parseApiSpec('')).toEqual(expected);
|
||||
});
|
||||
|
||||
it('Fails on malformed JSON/YAML', () => {
|
||||
const rawSpec = ['openapi: 3.0.0', 'info: {{{'].join('\n');
|
||||
|
||||
expect(() => parseApiSpec(rawSpec)).toThrowError('Failed to parse API spec');
|
||||
});
|
||||
});
|
||||
@@ -1,52 +1,58 @@
|
||||
import * as models from '../../models';
|
||||
import * as db from '../database';
|
||||
import { database as db, _repairDatabase } from '../database';
|
||||
import { globalBeforeEach } from '../../__jest__/before-each';
|
||||
import { data as fixtures } from '../__fixtures__/nestedfolders';
|
||||
|
||||
function loadFixture(name) {
|
||||
const fixtures = require(`../__fixtures__/${name}`).data;
|
||||
const promises = [];
|
||||
function loadFixture() {
|
||||
const promises: Array<Promise<models.BaseModel>> = [];
|
||||
for (const type of Object.keys(fixtures)) {
|
||||
for (const doc of fixtures[type]) {
|
||||
promises.push(db.insert(Object.assign({}, doc, { type })));
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
promises.push(db.insert<models.BaseModel>({ ...doc, type }));
|
||||
}
|
||||
}
|
||||
|
||||
return Promise.all(promises);
|
||||
}
|
||||
|
||||
describe('init()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('handles being initialized twice', async () => {
|
||||
await db.init(models.types(), { inMemoryOnly: true });
|
||||
await db.init(models.types(), { inMemoryOnly: true });
|
||||
await db.init(models.types(), {
|
||||
inMemoryOnly: true,
|
||||
});
|
||||
await db.init(models.types(), {
|
||||
inMemoryOnly: true,
|
||||
});
|
||||
expect((await db.all(models.request.type)).length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onChange()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('handles change listeners', async () => {
|
||||
const doc = {
|
||||
type: models.request.type,
|
||||
parentId: 'nothing',
|
||||
name: 'foo',
|
||||
};
|
||||
const changesSeen: Array<Function> = [];
|
||||
|
||||
const changesSeen = [];
|
||||
const callback = change => {
|
||||
changesSeen.push(change);
|
||||
};
|
||||
|
||||
db.onChange(callback);
|
||||
|
||||
const newDoc = await models.request.create(doc);
|
||||
const updatedDoc = await models.request.update(newDoc, { name: 'bar' });
|
||||
const updatedDoc = await models.request.update(newDoc, {
|
||||
name: 'bar',
|
||||
});
|
||||
expect(changesSeen.length).toBe(2);
|
||||
|
||||
expect(changesSeen).toEqual([
|
||||
[[db.CHANGE_INSERT, newDoc, false]],
|
||||
[[db.CHANGE_UPDATE, updatedDoc, false]],
|
||||
]);
|
||||
|
||||
db.offChange(callback);
|
||||
await models.request.create(doc);
|
||||
expect(changesSeen.length).toBe(2);
|
||||
@@ -55,26 +61,26 @@ describe('onChange()', () => {
|
||||
|
||||
describe('bufferChanges()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('properly buffers changes', async () => {
|
||||
const doc = {
|
||||
type: models.request.type,
|
||||
parentId: 'n/a',
|
||||
name: 'foo',
|
||||
};
|
||||
const changesSeen: Array<Function> = [];
|
||||
|
||||
const changesSeen = [];
|
||||
const callback = change => {
|
||||
changesSeen.push(change);
|
||||
};
|
||||
db.onChange(callback);
|
||||
|
||||
db.onChange(callback);
|
||||
await db.bufferChanges();
|
||||
const newDoc = await models.request.create(doc);
|
||||
// @ts-expect-error -- TSCONVERSION appears to be genuine
|
||||
const updatedDoc = await models.request.update(newDoc, true);
|
||||
|
||||
// Assert no change seen before flush
|
||||
expect(changesSeen.length).toBe(0);
|
||||
|
||||
// Assert changes seen after flush
|
||||
await db.flushChanges();
|
||||
expect(changesSeen).toEqual([
|
||||
@@ -83,7 +89,6 @@ describe('bufferChanges()', () => {
|
||||
[db.CHANGE_UPDATE, updatedDoc, false],
|
||||
],
|
||||
]);
|
||||
|
||||
// Assert no more changes seen after flush again
|
||||
await db.flushChanges();
|
||||
expect(changesSeen).toEqual([
|
||||
@@ -100,20 +105,19 @@ describe('bufferChanges()', () => {
|
||||
parentId: 'n/a',
|
||||
name: 'foo',
|
||||
};
|
||||
const changesSeen: Array<Function> = [];
|
||||
|
||||
const changesSeen = [];
|
||||
const callback = change => {
|
||||
changesSeen.push(change);
|
||||
};
|
||||
db.onChange(callback);
|
||||
|
||||
db.onChange(callback);
|
||||
await db.bufferChanges();
|
||||
const newDoc = await models.request.create(doc);
|
||||
// @ts-expect-error -- TSCONVERSION appears to be genuine
|
||||
const updatedDoc = await models.request.update(newDoc, true);
|
||||
|
||||
// Default flush timeout is 1000ms after starting buffering
|
||||
await new Promise(resolve => setTimeout(resolve, 1500));
|
||||
|
||||
expect(changesSeen).toEqual([
|
||||
[
|
||||
[db.CHANGE_INSERT, newDoc, false],
|
||||
@@ -128,19 +132,18 @@ describe('bufferChanges()', () => {
|
||||
parentId: 'n/a',
|
||||
name: 'foo',
|
||||
};
|
||||
const changesSeen: Array<Function> = [];
|
||||
|
||||
const changesSeen = [];
|
||||
const callback = change => {
|
||||
changesSeen.push(change);
|
||||
};
|
||||
db.onChange(callback);
|
||||
|
||||
db.onChange(callback);
|
||||
await db.bufferChanges(500);
|
||||
const newDoc = await models.request.create(doc);
|
||||
// @ts-expect-error -- TSCONVERSION appears to be genuine
|
||||
const updatedDoc = await models.request.update(newDoc, true);
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
|
||||
expect(changesSeen).toEqual([
|
||||
[
|
||||
[db.CHANGE_INSERT, newDoc, false],
|
||||
@@ -152,29 +155,28 @@ describe('bufferChanges()', () => {
|
||||
|
||||
describe('bufferChangesIndefinitely()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('should not auto flush', async () => {
|
||||
const doc = {
|
||||
type: models.request.type,
|
||||
parentId: 'n/a',
|
||||
name: 'foo',
|
||||
};
|
||||
const changesSeen: Array<Function> = [];
|
||||
|
||||
const changesSeen = [];
|
||||
const callback = change => {
|
||||
changesSeen.push(change);
|
||||
};
|
||||
db.onChange(callback);
|
||||
|
||||
db.onChange(callback);
|
||||
await db.bufferChangesIndefinitely();
|
||||
const newDoc = await models.request.create(doc);
|
||||
// @ts-expect-error -- TSCONVERSION appears to be genuine
|
||||
const updatedDoc = await models.request.update(newDoc, true);
|
||||
|
||||
// Default flush timeout is 1000ms after starting buffering
|
||||
await new Promise(resolve => setTimeout(resolve, 1500));
|
||||
|
||||
// Assert no change seen before flush
|
||||
expect(changesSeen.length).toBe(0);
|
||||
|
||||
// Assert changes seen after flush
|
||||
await db.flushChanges();
|
||||
expect(changesSeen).toEqual([
|
||||
@@ -188,17 +190,15 @@ describe('bufferChangesIndefinitely()', () => {
|
||||
|
||||
describe('requestCreate()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('creates a valid request', async () => {
|
||||
const now = Date.now();
|
||||
|
||||
const patch = {
|
||||
name: 'My Request',
|
||||
parentId: 'wrk_123',
|
||||
};
|
||||
|
||||
const r = await models.request.create(patch);
|
||||
expect(Object.keys(r).length).toBe(21);
|
||||
|
||||
expect(r._id).toMatch(/^req_[a-zA-Z0-9]{32}$/);
|
||||
expect(r.created).toBeGreaterThanOrEqual(now);
|
||||
expect(r.modified).toBeGreaterThanOrEqual(now);
|
||||
@@ -215,7 +215,11 @@ describe('requestCreate()', () => {
|
||||
});
|
||||
|
||||
it('throws when missing parentID', () => {
|
||||
const fn = () => models.request.create({ name: 'My Request' });
|
||||
const fn = () =>
|
||||
models.request.create({
|
||||
name: 'My Request',
|
||||
});
|
||||
|
||||
expect(fn).toThrowError('New Requests missing `parentId`');
|
||||
});
|
||||
});
|
||||
@@ -223,17 +227,20 @@ describe('requestCreate()', () => {
|
||||
describe('requestGroupDuplicate()', () => {
|
||||
beforeEach(async () => {
|
||||
await globalBeforeEach();
|
||||
await loadFixture('nestedfolders');
|
||||
await loadFixture();
|
||||
});
|
||||
|
||||
it('duplicates a RequestGroup', async () => {
|
||||
const requestGroup = await models.requestGroup.getById('fld_1');
|
||||
expect(requestGroup.name).toBe('Fld 1');
|
||||
expect(requestGroup).not.toEqual(null);
|
||||
if (requestGroup === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
expect(requestGroup.name).toBe('Fld 1');
|
||||
const newRequestGroup = await models.requestGroup.duplicate(requestGroup);
|
||||
expect(newRequestGroup._id).not.toBe(requestGroup._id);
|
||||
expect(newRequestGroup.name).toBe('Fld 1 (Copy)');
|
||||
|
||||
const allRequests = await models.request.all();
|
||||
const allRequestGroups = await models.requestGroup.all();
|
||||
const childRequests = await models.request.findByParentId(requestGroup._id);
|
||||
@@ -244,10 +251,8 @@ describe('requestGroupDuplicate()', () => {
|
||||
// to see that the recursion worked (for the most part)
|
||||
expect(allRequests.length).toBe(8);
|
||||
expect(allRequestGroups.length).toBe(5);
|
||||
|
||||
expect(childRequests.length).toBe(2);
|
||||
expect(childRequestGroups.length).toBe(1);
|
||||
|
||||
expect(newChildRequests.length).toBe(2);
|
||||
expect(newChildRequestGroups.length).toBe(1);
|
||||
});
|
||||
@@ -258,190 +263,354 @@ describe('_repairDatabase()', () => {
|
||||
|
||||
it('fixes duplicate environments', async () => {
|
||||
// Create Workspace with no children
|
||||
const workspace = await models.workspace.create({ _id: 'w1' });
|
||||
const workspace = await models.workspace.create({
|
||||
_id: 'w1',
|
||||
});
|
||||
const spec = await models.apiSpec.getByParentId(workspace._id);
|
||||
|
||||
expect((await db.withDescendants(workspace)).length).toBe(2);
|
||||
|
||||
// Create one set of sub environments
|
||||
await models.environment.create({
|
||||
_id: 'b1',
|
||||
parentId: 'w1',
|
||||
data: { foo: 'b1', b1: true },
|
||||
data: {
|
||||
foo: 'b1',
|
||||
b1: true,
|
||||
},
|
||||
});
|
||||
await models.environment.create({
|
||||
_id: 'b1_sub1',
|
||||
parentId: 'b1',
|
||||
data: { foo: '1' },
|
||||
data: {
|
||||
foo: '1',
|
||||
},
|
||||
});
|
||||
await models.environment.create({
|
||||
_id: 'b1_sub2',
|
||||
parentId: 'b1',
|
||||
data: { foo: '2' },
|
||||
data: {
|
||||
foo: '2',
|
||||
},
|
||||
});
|
||||
|
||||
// Create second set of sub environments
|
||||
await models.environment.create({
|
||||
_id: 'b2',
|
||||
parentId: 'w1',
|
||||
data: { foo: 'b2', b2: true },
|
||||
data: {
|
||||
foo: 'b2',
|
||||
b2: true,
|
||||
},
|
||||
});
|
||||
await models.environment.create({
|
||||
_id: 'b2_sub1',
|
||||
parentId: 'b2',
|
||||
data: { foo: '3' },
|
||||
data: {
|
||||
foo: '3',
|
||||
},
|
||||
});
|
||||
await models.environment.create({
|
||||
_id: 'b2_sub2',
|
||||
parentId: 'b2',
|
||||
data: { foo: '4' },
|
||||
data: {
|
||||
foo: '4',
|
||||
},
|
||||
});
|
||||
|
||||
// Make sure we have everything
|
||||
expect((await db.withDescendants(workspace)).length).toBe(8);
|
||||
const descendants = (await db.withDescendants(workspace)).map(d => ({
|
||||
_id: d._id,
|
||||
parentId: d.parentId,
|
||||
// @ts-expect-error -- TSCONVERSION appears to be genuine
|
||||
data: d.data || null,
|
||||
}));
|
||||
expect(descendants).toEqual([
|
||||
{ _id: 'w1', data: null, parentId: null },
|
||||
{ _id: 'b1', data: { foo: 'b1', b1: true }, parentId: 'w1' },
|
||||
{ _id: 'b2', data: { foo: 'b2', b2: true }, parentId: 'w1' },
|
||||
expect.objectContaining({ _id: spec._id, parentId: 'w1' }),
|
||||
{ _id: 'b1_sub1', data: { foo: '1' }, parentId: 'b1' },
|
||||
{ _id: 'b1_sub2', data: { foo: '2' }, parentId: 'b1' },
|
||||
{ _id: 'b2_sub1', data: { foo: '3' }, parentId: 'b2' },
|
||||
{ _id: 'b2_sub2', data: { foo: '4' }, parentId: 'b2' },
|
||||
{
|
||||
_id: 'w1',
|
||||
data: null,
|
||||
parentId: null,
|
||||
},
|
||||
{
|
||||
_id: 'b1',
|
||||
data: {
|
||||
foo: 'b1',
|
||||
b1: true,
|
||||
},
|
||||
parentId: 'w1',
|
||||
},
|
||||
{
|
||||
_id: 'b2',
|
||||
data: {
|
||||
foo: 'b2',
|
||||
b2: true,
|
||||
},
|
||||
parentId: 'w1',
|
||||
},
|
||||
expect.objectContaining({
|
||||
_id: spec?._id,
|
||||
parentId: 'w1',
|
||||
}),
|
||||
{
|
||||
_id: 'b1_sub1',
|
||||
data: {
|
||||
foo: '1',
|
||||
},
|
||||
parentId: 'b1',
|
||||
},
|
||||
{
|
||||
_id: 'b1_sub2',
|
||||
data: {
|
||||
foo: '2',
|
||||
},
|
||||
parentId: 'b1',
|
||||
},
|
||||
{
|
||||
_id: 'b2_sub1',
|
||||
data: {
|
||||
foo: '3',
|
||||
},
|
||||
parentId: 'b2',
|
||||
},
|
||||
{
|
||||
_id: 'b2_sub2',
|
||||
data: {
|
||||
foo: '4',
|
||||
},
|
||||
parentId: 'b2',
|
||||
},
|
||||
]);
|
||||
|
||||
// Run the fix algorithm
|
||||
await db._repairDatabase();
|
||||
await _repairDatabase();
|
||||
|
||||
// Make sure things get adjusted
|
||||
const descendants2 = (await db.withDescendants(workspace)).map(d => ({
|
||||
_id: d._id,
|
||||
parentId: d.parentId,
|
||||
// @ts-expect-error -- TSCONVERSION appears to be genuine
|
||||
data: d.data || null,
|
||||
}));
|
||||
expect(descendants2).toEqual([
|
||||
{ _id: 'w1', data: null, parentId: null },
|
||||
{ _id: 'b1', data: { foo: 'b1', b1: true, b2: true }, parentId: 'w1' },
|
||||
expect.objectContaining({ _id: spec._id, parentId: 'w1' }),
|
||||
|
||||
// Extra base environments should have been deleted
|
||||
{
|
||||
_id: 'w1',
|
||||
data: null,
|
||||
parentId: null,
|
||||
},
|
||||
{
|
||||
_id: 'b1',
|
||||
data: {
|
||||
foo: 'b1',
|
||||
b1: true,
|
||||
b2: true,
|
||||
},
|
||||
parentId: 'w1',
|
||||
},
|
||||
expect.objectContaining({
|
||||
_id: spec?._id,
|
||||
parentId: 'w1',
|
||||
}), // Extra base environments should have been deleted
|
||||
// {_id: 'b2', data: {foo: 'bar'}, parentId: 'w1'},
|
||||
|
||||
// Sub environments should have been moved to new "master" base environment
|
||||
{ _id: 'b1_sub1', data: { foo: '1' }, parentId: 'b1' },
|
||||
{ _id: 'b1_sub2', data: { foo: '2' }, parentId: 'b1' },
|
||||
{ _id: 'b2_sub1', data: { foo: '3' }, parentId: 'b1' },
|
||||
{ _id: 'b2_sub2', data: { foo: '4' }, parentId: 'b1' },
|
||||
{
|
||||
_id: 'b1_sub1',
|
||||
data: {
|
||||
foo: '1',
|
||||
},
|
||||
parentId: 'b1',
|
||||
},
|
||||
{
|
||||
_id: 'b1_sub2',
|
||||
data: {
|
||||
foo: '2',
|
||||
},
|
||||
parentId: 'b1',
|
||||
},
|
||||
{
|
||||
_id: 'b2_sub1',
|
||||
data: {
|
||||
foo: '3',
|
||||
},
|
||||
parentId: 'b1',
|
||||
},
|
||||
{
|
||||
_id: 'b2_sub2',
|
||||
data: {
|
||||
foo: '4',
|
||||
},
|
||||
parentId: 'b1',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('fixes duplicate cookie jars', async () => {
|
||||
// Create Workspace with no children
|
||||
const workspace = await models.workspace.create({ _id: 'w1' });
|
||||
const workspace = await models.workspace.create({
|
||||
_id: 'w1',
|
||||
});
|
||||
const spec = await models.apiSpec.getByParentId(workspace._id);
|
||||
|
||||
expect((await db.withDescendants(workspace)).length).toBe(2);
|
||||
|
||||
// Create one set of sub environments
|
||||
await models.cookieJar.create({
|
||||
_id: 'j1',
|
||||
parentId: 'w1',
|
||||
cookies: [
|
||||
{ id: '1', key: 'foo', value: '1' },
|
||||
{ id: 'j1_1', key: 'j1', value: '1' },
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
{
|
||||
id: '1',
|
||||
key: 'foo',
|
||||
value: '1',
|
||||
},
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
{
|
||||
id: 'j1_1',
|
||||
key: 'j1',
|
||||
value: '1',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await models.cookieJar.create({
|
||||
_id: 'j2',
|
||||
parentId: 'w1',
|
||||
cookies: [
|
||||
{ id: '1', key: 'foo', value: '2' },
|
||||
{ id: 'j2_1', key: 'j2', value: '2' },
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
{
|
||||
id: '1',
|
||||
key: 'foo',
|
||||
value: '2',
|
||||
},
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
{
|
||||
id: 'j2_1',
|
||||
key: 'j2',
|
||||
value: '2',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Make sure we have everything
|
||||
expect((await db.withDescendants(workspace)).length).toBe(4);
|
||||
const descendants = (await db.withDescendants(workspace)).map(d => ({
|
||||
_id: d._id,
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
cookies: d.cookies || null,
|
||||
parentId: d.parentId,
|
||||
}));
|
||||
expect(descendants).toEqual([
|
||||
{ _id: 'w1', cookies: null, parentId: null },
|
||||
{
|
||||
_id: 'w1',
|
||||
cookies: null,
|
||||
parentId: null,
|
||||
},
|
||||
{
|
||||
_id: 'j1',
|
||||
parentId: 'w1',
|
||||
cookies: [
|
||||
{ id: '1', key: 'foo', value: '1' },
|
||||
{ id: 'j1_1', key: 'j1', value: '1' },
|
||||
{
|
||||
id: '1',
|
||||
key: 'foo',
|
||||
value: '1',
|
||||
},
|
||||
{
|
||||
id: 'j1_1',
|
||||
key: 'j1',
|
||||
value: '1',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
_id: 'j2',
|
||||
parentId: 'w1',
|
||||
cookies: [
|
||||
{ id: '1', key: 'foo', value: '2' },
|
||||
{ id: 'j2_1', key: 'j2', value: '2' },
|
||||
{
|
||||
id: '1',
|
||||
key: 'foo',
|
||||
value: '2',
|
||||
},
|
||||
{
|
||||
id: 'j2_1',
|
||||
key: 'j2',
|
||||
value: '2',
|
||||
},
|
||||
],
|
||||
},
|
||||
expect.objectContaining({ _id: spec._id, parentId: 'w1' }),
|
||||
expect.objectContaining({
|
||||
_id: spec?._id,
|
||||
parentId: 'w1',
|
||||
}),
|
||||
]);
|
||||
|
||||
// Run the fix algorithm
|
||||
await db._repairDatabase();
|
||||
|
||||
await _repairDatabase();
|
||||
// Make sure things get adjusted
|
||||
const descendants2 = (await db.withDescendants(workspace)).map(d => ({
|
||||
_id: d._id,
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
cookies: d.cookies || null,
|
||||
parentId: d.parentId,
|
||||
}));
|
||||
expect(descendants2).toEqual([
|
||||
{ _id: 'w1', cookies: null, parentId: null },
|
||||
{
|
||||
_id: 'w1',
|
||||
cookies: null,
|
||||
parentId: null,
|
||||
},
|
||||
{
|
||||
_id: 'j1',
|
||||
parentId: 'w1',
|
||||
cookies: [
|
||||
{ id: '1', key: 'foo', value: '1' },
|
||||
{ id: 'j1_1', key: 'j1', value: '1' },
|
||||
{ id: 'j2_1', key: 'j2', value: '2' },
|
||||
{
|
||||
id: '1',
|
||||
key: 'foo',
|
||||
value: '1',
|
||||
},
|
||||
{
|
||||
id: 'j1_1',
|
||||
key: 'j1',
|
||||
value: '1',
|
||||
},
|
||||
{
|
||||
id: 'j2_1',
|
||||
key: 'j2',
|
||||
value: '2',
|
||||
},
|
||||
],
|
||||
},
|
||||
expect.objectContaining({ _id: spec._id, parentId: 'w1' }),
|
||||
expect.objectContaining({
|
||||
_id: spec?._id,
|
||||
parentId: 'w1',
|
||||
}),
|
||||
]);
|
||||
});
|
||||
|
||||
it('fixes the filename on an apiSpec', async () => {
|
||||
// Create Workspace with apiSpec child (migration in workspace will automatically create this as it is not mocked)
|
||||
const w1 = await models.workspace.create({ _id: 'w1', name: 'Workspace 1' });
|
||||
const w2 = await models.workspace.create({ _id: 'w2', name: 'Workspace 2' });
|
||||
const w3 = await models.workspace.create({ _id: 'w3', name: 'Workspace 3' });
|
||||
|
||||
await models.apiSpec.updateOrCreateForParentId(w1._id, { fileName: '' });
|
||||
const w1 = await models.workspace.create({
|
||||
_id: 'w1',
|
||||
name: 'Workspace 1',
|
||||
});
|
||||
const w2 = await models.workspace.create({
|
||||
_id: 'w2',
|
||||
name: 'Workspace 2',
|
||||
});
|
||||
const w3 = await models.workspace.create({
|
||||
_id: 'w3',
|
||||
name: 'Workspace 3',
|
||||
});
|
||||
await models.apiSpec.updateOrCreateForParentId(w1._id, {
|
||||
fileName: '',
|
||||
});
|
||||
await models.apiSpec.updateOrCreateForParentId(w2._id, {
|
||||
fileName: models.apiSpec.init().fileName,
|
||||
});
|
||||
await models.apiSpec.updateOrCreateForParentId(w3._id, { fileName: 'Unique name' });
|
||||
|
||||
await models.apiSpec.updateOrCreateForParentId(w3._id, {
|
||||
fileName: 'Unique name',
|
||||
});
|
||||
// Make sure we have everything
|
||||
expect((await models.apiSpec.getByParentId(w1._id)).fileName).toBe('');
|
||||
expect((await models.apiSpec.getByParentId(w2._id)).fileName).toBe('New Document');
|
||||
expect((await models.apiSpec.getByParentId(w3._id)).fileName).toBe('Unique name');
|
||||
|
||||
expect((await models.apiSpec.getByParentId(w1._id))?.fileName).toBe('');
|
||||
expect((await models.apiSpec.getByParentId(w2._id))?.fileName).toBe('New Document');
|
||||
expect((await models.apiSpec.getByParentId(w3._id))?.fileName).toBe('Unique name');
|
||||
// Run the fix algorithm
|
||||
await db._repairDatabase();
|
||||
|
||||
await _repairDatabase();
|
||||
// Make sure things get adjusted
|
||||
expect((await models.apiSpec.getByParentId(w1._id)).fileName).toBe('Workspace 1'); // Should fix
|
||||
expect((await models.apiSpec.getByParentId(w2._id)).fileName).toBe('Workspace 2'); // Should fix
|
||||
expect((await models.apiSpec.getByParentId(w3._id)).fileName).toBe('Unique name'); // should not fix
|
||||
expect((await models.apiSpec.getByParentId(w1._id))?.fileName).toBe('Workspace 1'); // Should fix
|
||||
expect((await models.apiSpec.getByParentId(w2._id))?.fileName).toBe('Workspace 2'); // Should fix
|
||||
expect((await models.apiSpec.getByParentId(w3._id))?.fileName).toBe('Unique name'); // should not fix
|
||||
});
|
||||
|
||||
it('fixes old git uris', async () => {
|
||||
@@ -459,9 +628,7 @@ describe('_repairDatabase()', () => {
|
||||
const newRepoWithoutSuffix = await models.gitRepository.create({
|
||||
uri: 'https://github.com/foo/bar',
|
||||
});
|
||||
|
||||
await db._repairDatabase();
|
||||
|
||||
await _repairDatabase();
|
||||
expect(await db.get(models.gitRepository.type, oldRepoWithSuffix._id)).toEqual(
|
||||
expect.objectContaining({
|
||||
uri: 'https://github.com/foo/bar.git',
|
||||
@@ -496,18 +663,18 @@ describe('duplicate()', () => {
|
||||
it('should overwrite appropriate fields on the parent when duplicating', async () => {
|
||||
const date = 1478795580200;
|
||||
Date.now = jest.fn().mockReturnValue(date);
|
||||
|
||||
const workspace = await models.workspace.create({
|
||||
name: 'Test Workspace',
|
||||
});
|
||||
|
||||
const newDescription = 'test';
|
||||
const duplicated = await db.duplicate(workspace, { description: newDescription });
|
||||
|
||||
const duplicated = await db.duplicate(workspace, {
|
||||
description: newDescription,
|
||||
});
|
||||
expect(duplicated._id).not.toEqual(workspace._id);
|
||||
expect(duplicated._id).toMatch(/^wrk_[a-z0-9]{32}$/);
|
||||
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
delete workspace._id;
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
delete duplicated._id;
|
||||
expect(duplicated).toEqual({
|
||||
...workspace,
|
||||
@@ -517,11 +684,11 @@ describe('duplicate()', () => {
|
||||
type: models.workspace.type,
|
||||
});
|
||||
});
|
||||
|
||||
it('should should not call migrate when duplicating', async () => {
|
||||
const workspace = await models.workspace.create({
|
||||
name: 'Test Workspace',
|
||||
});
|
||||
|
||||
const spy = jest.spyOn(models.workspace, 'migrate');
|
||||
await db.duplicate(workspace);
|
||||
expect(spy).not.toHaveBeenCalled();
|
||||
@@ -534,11 +701,9 @@ describe('docCreate()', () => {
|
||||
|
||||
it('should call migrate when creating', async () => {
|
||||
const spy = jest.spyOn(models.workspace, 'migrate');
|
||||
|
||||
await db.docCreate(models.workspace.type, {
|
||||
name: 'Test Workspace',
|
||||
});
|
||||
|
||||
// TODO: This is actually called twice, not once - we should avoid the double model.init() call.
|
||||
expect(spy).toHaveBeenCalled();
|
||||
});
|
||||
@@ -549,24 +714,30 @@ describe('withAncestors()', () => {
|
||||
|
||||
it('should return itself and all parents but exclude siblings', async () => {
|
||||
const wrk = await models.workspace.create();
|
||||
const wrkReq = await models.request.create({ parentId: wrk._id });
|
||||
const wrkGrpcReq = await models.grpcRequest.create({ parentId: wrk._id });
|
||||
const grp = await models.requestGroup.create({ parentId: wrk._id });
|
||||
const grpReq = await models.request.create({ parentId: grp._id });
|
||||
const grpGrpcReq = await models.grpcRequest.create({ parentId: grp._id });
|
||||
|
||||
const wrkReq = await models.request.create({
|
||||
parentId: wrk._id,
|
||||
});
|
||||
const wrkGrpcReq = await models.grpcRequest.create({
|
||||
parentId: wrk._id,
|
||||
});
|
||||
const grp = await models.requestGroup.create({
|
||||
parentId: wrk._id,
|
||||
});
|
||||
const grpReq = await models.request.create({
|
||||
parentId: grp._id,
|
||||
});
|
||||
const grpGrpcReq = await models.grpcRequest.create({
|
||||
parentId: grp._id,
|
||||
});
|
||||
// Workspace child searching for ancestors
|
||||
await expect(db.withAncestors(wrk)).resolves.toStrictEqual([wrk]);
|
||||
await expect(db.withAncestors(wrkReq)).resolves.toStrictEqual([wrkReq, wrk]);
|
||||
await expect(db.withAncestors(wrkGrpcReq)).resolves.toStrictEqual([wrkGrpcReq, wrk]);
|
||||
|
||||
// Group searching for ancestors
|
||||
await expect(db.withAncestors(grp)).resolves.toStrictEqual([grp, wrk]);
|
||||
|
||||
// Group child searching for ancestors
|
||||
await expect(db.withAncestors(grpReq)).resolves.toStrictEqual([grpReq, grp, wrk]);
|
||||
await expect(db.withAncestors(grpGrpcReq)).resolves.toStrictEqual([grpGrpcReq, grp, wrk]);
|
||||
|
||||
// Group child searching for ancestors with filters
|
||||
await expect(db.withAncestors(grpGrpcReq, [models.requestGroup.type])).resolves.toStrictEqual([
|
||||
grpGrpcReq,
|
||||
@@ -575,7 +746,6 @@ describe('withAncestors()', () => {
|
||||
await expect(
|
||||
db.withAncestors(grpGrpcReq, [models.requestGroup.type, models.workspace.type]),
|
||||
).resolves.toStrictEqual([grpGrpcReq, grp, wrk]);
|
||||
|
||||
// Group child searching for ancestors but excluding groups will not find the workspace
|
||||
await expect(db.withAncestors(grpGrpcReq, [models.workspace.type])).resolves.toStrictEqual([
|
||||
grpGrpcReq,
|
||||
@@ -1,5 +1,3 @@
|
||||
// @flow
|
||||
|
||||
import {
|
||||
getGrpcPathSegments,
|
||||
getShortGrpcPath,
|
||||
@@ -45,9 +43,14 @@ describe('getShortGrpcPath', () => {
|
||||
const serviceName = 'service';
|
||||
const methodName = 'method';
|
||||
const fullPath = '/package.service/method';
|
||||
|
||||
const shortPath = getShortGrpcPath({ packageName, serviceName, methodName }, fullPath);
|
||||
|
||||
const shortPath = getShortGrpcPath(
|
||||
{
|
||||
packageName,
|
||||
serviceName,
|
||||
methodName,
|
||||
},
|
||||
fullPath,
|
||||
);
|
||||
expect(shortPath).toBe('/service/method');
|
||||
});
|
||||
|
||||
@@ -56,13 +59,17 @@ describe('getShortGrpcPath', () => {
|
||||
const serviceName = 'service';
|
||||
const methodName = 'method';
|
||||
const fullPath = '/service/method';
|
||||
|
||||
const shortPath = getShortGrpcPath({ packageName, serviceName, methodName }, fullPath);
|
||||
|
||||
const shortPath = getShortGrpcPath(
|
||||
{
|
||||
packageName,
|
||||
serviceName,
|
||||
methodName,
|
||||
},
|
||||
fullPath,
|
||||
);
|
||||
expect(shortPath).toBe(fullPath);
|
||||
});
|
||||
});
|
||||
|
||||
const methodBuilder = createBuilder(grpcMethodDefinitionSchema);
|
||||
|
||||
describe('groupGrpcMethodsByPackage', () => {
|
||||
@@ -87,16 +94,13 @@ describe('groupGrpcMethodsByPackage', () => {
|
||||
.requestStream(true)
|
||||
.responseStream(true)
|
||||
.build();
|
||||
|
||||
const grouped = groupGrpcMethodsByPackage([
|
||||
packageMethod1,
|
||||
packageMethod2,
|
||||
newPackage,
|
||||
noPackage,
|
||||
]);
|
||||
|
||||
expect(Object.keys(grouped).length).toBe(3);
|
||||
|
||||
expect(grouped[NO_PACKAGE_KEY]).toStrictEqual([
|
||||
{
|
||||
segments: {
|
||||
@@ -108,7 +112,6 @@ describe('groupGrpcMethodsByPackage', () => {
|
||||
fullPath: noPackage.path,
|
||||
},
|
||||
]);
|
||||
|
||||
expect(grouped.package1).toStrictEqual([
|
||||
{
|
||||
segments: {
|
||||
@@ -129,7 +132,6 @@ describe('groupGrpcMethodsByPackage', () => {
|
||||
fullPath: packageMethod2.path,
|
||||
},
|
||||
]);
|
||||
|
||||
expect(grouped.package2).toStrictEqual([
|
||||
{
|
||||
segments: {
|
||||
@@ -7,6 +7,7 @@ import { globalBeforeEach } from '../../__jest__/before-each';
|
||||
|
||||
describe('exportHar()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('exports single requests', async () => {
|
||||
const wrk = await models.workspace.create({
|
||||
_id: 'wrk_1',
|
||||
@@ -16,7 +17,6 @@ describe('exportHar()', () => {
|
||||
_id: 'req_1',
|
||||
name: 'Request 1',
|
||||
parentId: wrk._id,
|
||||
|
||||
url: 'https://httpstat.us/200',
|
||||
method: 'POST',
|
||||
body: {
|
||||
@@ -24,26 +24,44 @@ describe('exportHar()', () => {
|
||||
text: '{}',
|
||||
},
|
||||
headers: [
|
||||
{ name: 'Content-Type', value: 'application/json' },
|
||||
{ name: 'Accept', value: 'application/json', disabled: false },
|
||||
{ name: 'X-Disabled', value: 'X-Disabled', disabled: true },
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: 'application/json',
|
||||
},
|
||||
{
|
||||
name: 'Accept',
|
||||
value: 'application/json',
|
||||
disabled: false,
|
||||
},
|
||||
{
|
||||
name: 'X-Disabled',
|
||||
value: 'X-Disabled',
|
||||
disabled: true,
|
||||
},
|
||||
],
|
||||
});
|
||||
await models.response.create({
|
||||
parentId: req1._id,
|
||||
|
||||
statusCode: 200,
|
||||
statusMessage: 'OK',
|
||||
elapsedTime: 999,
|
||||
headers: [{ name: 'Content-Type', value: 'application/json' }],
|
||||
headers: [
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: 'application/json',
|
||||
},
|
||||
],
|
||||
contentType: 'application/json',
|
||||
bodyPath: path.join(__dirname, '../__fixtures__/har/test-response.json'),
|
||||
bodyCompression: null,
|
||||
});
|
||||
|
||||
const exportRequests = [{ requestId: req1._id, environmentId: 'n/a' }];
|
||||
const exportRequests = [
|
||||
{
|
||||
requestId: req1._id,
|
||||
environmentId: 'n/a',
|
||||
},
|
||||
];
|
||||
const harExport = await harUtils.exportHar(exportRequests);
|
||||
|
||||
expect(harExport).toMatchObject({
|
||||
log: {
|
||||
version: '1.2',
|
||||
@@ -60,8 +78,14 @@ describe('exportHar()', () => {
|
||||
httpVersion: 'HTTP/1.1',
|
||||
cookies: [],
|
||||
headers: [
|
||||
{ name: 'Content-Type', value: 'application/json' },
|
||||
{ name: 'Accept', value: 'application/json' },
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: 'application/json',
|
||||
},
|
||||
{
|
||||
name: 'Accept',
|
||||
value: 'application/json',
|
||||
},
|
||||
],
|
||||
queryString: [],
|
||||
postData: {
|
||||
@@ -77,7 +101,12 @@ describe('exportHar()', () => {
|
||||
statusText: 'OK',
|
||||
httpVersion: 'HTTP/1.1',
|
||||
cookies: [],
|
||||
headers: [{ name: 'Content-Type', value: 'application/json' }],
|
||||
headers: [
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: 'application/json',
|
||||
},
|
||||
],
|
||||
content: {
|
||||
size: 15,
|
||||
mimeType: 'application/json',
|
||||
@@ -109,34 +138,45 @@ describe('exportHar()', () => {
|
||||
_id: 'wrk_1',
|
||||
name: 'Workspace',
|
||||
});
|
||||
|
||||
const baseReq = await models.request.create({
|
||||
_id: 'req_0',
|
||||
type: models.request.type,
|
||||
name: 'Request',
|
||||
parentId: workspace._id,
|
||||
|
||||
url: 'http://localhost',
|
||||
method: 'GET',
|
||||
body: {},
|
||||
headers: [{ name: 'X-Environment', value: '{{ envvalue }}' }],
|
||||
headers: [
|
||||
{
|
||||
name: 'X-Environment',
|
||||
value: '{{ envvalue }}',
|
||||
},
|
||||
],
|
||||
});
|
||||
const req1 = await models.request.duplicate(baseReq);
|
||||
req1._id = 'req_1';
|
||||
req1.name = 'Request 1';
|
||||
req1.headers.push({ name: 'X-Request', value: '1' });
|
||||
req1.headers.push({
|
||||
name: 'X-Request',
|
||||
value: '1',
|
||||
});
|
||||
await models.request.create(req1);
|
||||
const req2 = await models.request.duplicate(baseReq);
|
||||
req2._id = 'req_2';
|
||||
req2.name = 'Request 2';
|
||||
req2.headers.push({ name: 'X-Request', value: '2' });
|
||||
req2.headers.push({
|
||||
name: 'X-Request',
|
||||
value: '2',
|
||||
});
|
||||
await models.request.create(req2);
|
||||
const req3 = await models.request.duplicate(baseReq);
|
||||
req3._id = 'req_3';
|
||||
req3.name = 'Request 3';
|
||||
req3.headers.push({ name: 'X-Request', value: '3' });
|
||||
req3.headers.push({
|
||||
name: 'X-Request',
|
||||
value: '3',
|
||||
});
|
||||
await models.request.create(req3);
|
||||
|
||||
const envBase = await models.environment.getOrCreateForWorkspace(workspace);
|
||||
await models.environment.update(envBase, {
|
||||
data: {
|
||||
@@ -160,7 +200,6 @@ describe('exportHar()', () => {
|
||||
envvalue: 'private',
|
||||
},
|
||||
});
|
||||
|
||||
await models.response.create({
|
||||
_id: 'res_1',
|
||||
parentId: req1._id,
|
||||
@@ -176,14 +215,21 @@ describe('exportHar()', () => {
|
||||
parentId: req3._id,
|
||||
statusCode: 500,
|
||||
});
|
||||
|
||||
const exportRequests = [
|
||||
{ requestId: req1._id, environmentId: null },
|
||||
{ requestId: req2._id, environmentId: envPublic._id },
|
||||
{ requestId: req3._id, environmentId: envPrivate._id },
|
||||
{
|
||||
requestId: req1._id,
|
||||
environmentId: null,
|
||||
},
|
||||
{
|
||||
requestId: req2._id,
|
||||
environmentId: envPublic._id,
|
||||
},
|
||||
{
|
||||
requestId: req3._id,
|
||||
environmentId: envPrivate._id,
|
||||
},
|
||||
];
|
||||
const harExport = await harUtils.exportHar(exportRequests);
|
||||
|
||||
expect(harExport).toMatchObject({
|
||||
log: {
|
||||
version: '1.2',
|
||||
@@ -194,8 +240,14 @@ describe('exportHar()', () => {
|
||||
{
|
||||
request: {
|
||||
headers: [
|
||||
{ name: 'X-Environment', value: '' },
|
||||
{ name: 'X-Request', value: '1' },
|
||||
{
|
||||
name: 'X-Environment',
|
||||
value: '',
|
||||
},
|
||||
{
|
||||
name: 'X-Request',
|
||||
value: '1',
|
||||
},
|
||||
],
|
||||
},
|
||||
response: {
|
||||
@@ -206,8 +258,14 @@ describe('exportHar()', () => {
|
||||
{
|
||||
request: {
|
||||
headers: [
|
||||
{ name: 'X-Environment', value: 'public' },
|
||||
{ name: 'X-Request', value: '2' },
|
||||
{
|
||||
name: 'X-Environment',
|
||||
value: 'public',
|
||||
},
|
||||
{
|
||||
name: 'X-Request',
|
||||
value: '2',
|
||||
},
|
||||
],
|
||||
},
|
||||
response: {
|
||||
@@ -218,8 +276,14 @@ describe('exportHar()', () => {
|
||||
{
|
||||
request: {
|
||||
headers: [
|
||||
{ name: 'X-Environment', value: 'private' },
|
||||
{ name: 'X-Request', value: '3' },
|
||||
{
|
||||
name: 'X-Environment',
|
||||
value: 'private',
|
||||
},
|
||||
{
|
||||
name: 'X-Request',
|
||||
value: '3',
|
||||
},
|
||||
],
|
||||
},
|
||||
response: {
|
||||
@@ -235,11 +299,10 @@ describe('exportHar()', () => {
|
||||
|
||||
describe('exportHarResponse()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('exports a default har response for an empty response', async () => {
|
||||
const notFoundResponse = null;
|
||||
|
||||
const harResponse = await harUtils.exportHarResponse(notFoundResponse);
|
||||
|
||||
expect(harResponse).toMatchObject({
|
||||
status: 0,
|
||||
statusText: '',
|
||||
@@ -250,6 +313,7 @@ describe('exportHarResponse()', () => {
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('exports a valid har response for a non empty response', async () => {
|
||||
const response = Object.assign(models.response.init(), {
|
||||
_id: 'res_123',
|
||||
@@ -257,20 +321,26 @@ describe('exportHarResponse()', () => {
|
||||
parentId: 'req_123',
|
||||
modified: 0,
|
||||
created: 0,
|
||||
|
||||
statusCode: 200,
|
||||
statusMessage: 'OK',
|
||||
headers: [
|
||||
{ name: 'Content-Type', value: 'application/json' },
|
||||
{ name: 'Content-Length', value: '2' },
|
||||
{ name: 'Set-Cookie', value: 'sessionid=12345; HttpOnly; Path=/' },
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: 'application/json',
|
||||
},
|
||||
{
|
||||
name: 'Content-Length',
|
||||
value: '2',
|
||||
},
|
||||
{
|
||||
name: 'Set-Cookie',
|
||||
value: 'sessionid=12345; HttpOnly; Path=/',
|
||||
},
|
||||
],
|
||||
contentType: 'application/json',
|
||||
bodyPath: path.join(__dirname, '../__fixtures__/har/test-response.json'),
|
||||
});
|
||||
|
||||
const harResponse = await harUtils.exportHarResponse(response);
|
||||
|
||||
expect(harResponse).toMatchObject({
|
||||
status: 200,
|
||||
statusText: 'OK',
|
||||
@@ -284,9 +354,18 @@ describe('exportHarResponse()', () => {
|
||||
},
|
||||
],
|
||||
headers: [
|
||||
{ name: 'Content-Type', value: 'application/json' },
|
||||
{ name: 'Content-Length', value: '2' },
|
||||
{ name: 'Set-Cookie', value: 'sessionid=12345; HttpOnly; Path=/' },
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: 'application/json',
|
||||
},
|
||||
{
|
||||
name: 'Content-Length',
|
||||
value: '2',
|
||||
},
|
||||
{
|
||||
name: 'Set-Cookie',
|
||||
value: 'sessionid=12345; HttpOnly; Path=/',
|
||||
},
|
||||
],
|
||||
content: {
|
||||
size: 15,
|
||||
@@ -302,6 +381,7 @@ describe('exportHarResponse()', () => {
|
||||
|
||||
describe('exportHarWithRequest()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('renders does it correctly', async () => {
|
||||
const workspace = await models.workspace.create();
|
||||
const cookies = [
|
||||
@@ -316,18 +396,26 @@ describe('exportHarWithRequest()', () => {
|
||||
lastAccessed: new Date('2096-10-05T04:40:49.505Z'),
|
||||
},
|
||||
];
|
||||
|
||||
const cookieJar = await models.cookieJar.getOrCreateForParentId(workspace._id);
|
||||
await models.cookieJar.update(cookieJar, {
|
||||
parentId: workspace._id,
|
||||
cookies,
|
||||
});
|
||||
|
||||
const request = Object.assign(models.request.init(), {
|
||||
_id: 'req_123',
|
||||
parentId: workspace._id,
|
||||
headers: [{ name: 'Content-Type', value: 'application/json' }],
|
||||
parameters: [{ name: 'foo bar', value: 'hello&world' }],
|
||||
headers: [
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: 'application/json',
|
||||
},
|
||||
],
|
||||
parameters: [
|
||||
{
|
||||
name: 'foo bar',
|
||||
value: 'hello&world',
|
||||
},
|
||||
],
|
||||
method: 'POST',
|
||||
body: {
|
||||
text: 'foo bar',
|
||||
@@ -339,10 +427,8 @@ describe('exportHarWithRequest()', () => {
|
||||
password: 'pass',
|
||||
},
|
||||
});
|
||||
|
||||
const { request: renderedRequest } = await render.getRenderedRequestAndContext(request);
|
||||
const har = await harUtils.exportHarWithRequest(renderedRequest);
|
||||
|
||||
expect(har.cookies.length).toBe(1);
|
||||
expect(har).toEqual({
|
||||
bodySize: -1,
|
||||
@@ -356,8 +442,14 @@ describe('exportHarWithRequest()', () => {
|
||||
},
|
||||
],
|
||||
headers: [
|
||||
{ name: 'Content-Type', value: 'application/json' },
|
||||
{ name: 'Authorization', value: 'Basic dXNlcjpwYXNz' },
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: 'application/json',
|
||||
},
|
||||
{
|
||||
name: 'Authorization',
|
||||
value: 'Basic dXNlcjpwYXNz',
|
||||
},
|
||||
],
|
||||
headersSize: -1,
|
||||
httpVersion: 'HTTP/1.1',
|
||||
@@ -367,7 +459,12 @@ describe('exportHarWithRequest()', () => {
|
||||
params: [],
|
||||
text: 'foo bar',
|
||||
},
|
||||
queryString: [{ name: 'foo bar', value: 'hello&world' }],
|
||||
queryString: [
|
||||
{
|
||||
name: 'foo bar',
|
||||
value: 'hello&world',
|
||||
},
|
||||
],
|
||||
url: 'http://google.com/',
|
||||
settingEncodeUrl: true,
|
||||
});
|
||||
@@ -378,28 +475,49 @@ describe('exportHarWithRequest()', () => {
|
||||
const request = Object.assign(models.request.init(), {
|
||||
_id: 'req_123',
|
||||
parentId: workspace._id,
|
||||
headers: [{ name: 'Content-Type', value: 'multipart/form-data' }],
|
||||
headers: [
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: 'multipart/form-data',
|
||||
},
|
||||
],
|
||||
parameters: [],
|
||||
method: 'POST',
|
||||
body: {
|
||||
mimeType: 'multipart/form-data',
|
||||
params: [
|
||||
{ name: 'a_file', value: '', fileName: '/tmp/my_file', type: 'file' },
|
||||
{ name: 'a_simple_field', value: 'a_simple_value' },
|
||||
{ name: 'a_second_file', value: '', fileName: '/tmp/my_file_2', type: 'file' },
|
||||
{
|
||||
name: 'a_file',
|
||||
value: '',
|
||||
fileName: '/tmp/my_file',
|
||||
type: 'file',
|
||||
},
|
||||
{
|
||||
name: 'a_simple_field',
|
||||
value: 'a_simple_value',
|
||||
},
|
||||
{
|
||||
name: 'a_second_file',
|
||||
value: '',
|
||||
fileName: '/tmp/my_file_2',
|
||||
type: 'file',
|
||||
},
|
||||
],
|
||||
},
|
||||
url: 'http://example.com/post',
|
||||
authentication: {},
|
||||
});
|
||||
|
||||
const { request: renderedRequest } = await render.getRenderedRequestAndContext(request);
|
||||
const har = await harUtils.exportHarWithRequest(renderedRequest);
|
||||
|
||||
expect(har).toEqual({
|
||||
bodySize: -1,
|
||||
cookies: [],
|
||||
headers: [{ name: 'Content-Type', value: 'multipart/form-data' }],
|
||||
headers: [
|
||||
{
|
||||
name: 'Content-Type',
|
||||
value: 'multipart/form-data',
|
||||
},
|
||||
],
|
||||
headersSize: -1,
|
||||
httpVersion: 'HTTP/1.1',
|
||||
method: 'POST',
|
||||
@@ -1,5 +1,3 @@
|
||||
// @flow
|
||||
|
||||
import * as models from '../../models';
|
||||
import * as importUtil from '../import';
|
||||
import { getAppVersion } from '../constants';
|
||||
@@ -8,6 +6,7 @@ import YAML from 'yaml';
|
||||
|
||||
describe('exportWorkspacesHAR() and exportRequestsHAR()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('exports a single workspace and some requests only as an HTTP Archive', async () => {
|
||||
const wrk1 = await models.workspace.create({
|
||||
_id: 'wrk_1',
|
||||
@@ -17,7 +16,12 @@ describe('exportWorkspacesHAR() and exportRequestsHAR()', () => {
|
||||
_id: 'req_1',
|
||||
name: 'Request 1',
|
||||
parentId: wrk1._id,
|
||||
headers: [{ name: 'X-Environment', value: '{{ envvalue }}' }],
|
||||
headers: [
|
||||
{
|
||||
name: 'X-Environment',
|
||||
value: '{{ envvalue }}',
|
||||
},
|
||||
],
|
||||
metaSortKey: 0,
|
||||
});
|
||||
const req2 = await models.request.create({
|
||||
@@ -43,7 +47,6 @@ describe('exportWorkspacesHAR() and exportRequestsHAR()', () => {
|
||||
parentId: wrk1._id,
|
||||
activeEnvironmentId: env1Private._id,
|
||||
});
|
||||
|
||||
const wrk2 = await models.workspace.create({
|
||||
_id: 'wrk_2',
|
||||
name: 'Workspace 2',
|
||||
@@ -53,9 +56,7 @@ describe('exportWorkspacesHAR() and exportRequestsHAR()', () => {
|
||||
name: 'Request 3',
|
||||
parentId: wrk2._id,
|
||||
});
|
||||
|
||||
const includePrivateDocs = true;
|
||||
|
||||
// Test export whole workspace.
|
||||
const exportWorkspacesJson = await importUtil.exportWorkspacesHAR(wrk1, includePrivateDocs);
|
||||
const exportWorkspacesData = JSON.parse(exportWorkspacesJson);
|
||||
@@ -64,7 +65,12 @@ describe('exportWorkspacesHAR() and exportRequestsHAR()', () => {
|
||||
entries: [
|
||||
{
|
||||
request: {
|
||||
headers: [{ name: 'X-Environment', value: 'private1' }],
|
||||
headers: [
|
||||
{
|
||||
name: 'X-Environment',
|
||||
value: 'private1',
|
||||
},
|
||||
],
|
||||
},
|
||||
comment: req1.name,
|
||||
},
|
||||
@@ -75,7 +81,6 @@ describe('exportWorkspacesHAR() and exportRequestsHAR()', () => {
|
||||
},
|
||||
});
|
||||
expect(exportWorkspacesData.log.entries.length).toBe(2);
|
||||
|
||||
// Test export some requests only.
|
||||
const exportRequestsJson = await importUtil.exportRequestsHAR([req1], includePrivateDocs);
|
||||
const exportRequestsData = JSON.parse(exportRequestsJson);
|
||||
@@ -84,7 +89,12 @@ describe('exportWorkspacesHAR() and exportRequestsHAR()', () => {
|
||||
entries: [
|
||||
{
|
||||
request: {
|
||||
headers: [{ name: 'X-Environment', value: 'private1' }],
|
||||
headers: [
|
||||
{
|
||||
name: 'X-Environment',
|
||||
value: 'private1',
|
||||
},
|
||||
],
|
||||
},
|
||||
comment: req1.name,
|
||||
},
|
||||
@@ -93,6 +103,7 @@ describe('exportWorkspacesHAR() and exportRequestsHAR()', () => {
|
||||
});
|
||||
expect(exportRequestsData.log.entries.length).toBe(1);
|
||||
});
|
||||
|
||||
it('exports all workspaces as an HTTP Archive', async () => {
|
||||
const wrk1 = await models.workspace.create({
|
||||
_id: 'wrk_1',
|
||||
@@ -106,15 +117,24 @@ describe('exportWorkspacesHAR() and exportRequestsHAR()', () => {
|
||||
_id: 'req_1',
|
||||
name: 'Request 1',
|
||||
parentId: wrk1._id,
|
||||
headers: [{ name: 'X-Environment', value: '{{ envvalue }}' }],
|
||||
headers: [
|
||||
{
|
||||
name: 'X-Environment',
|
||||
value: '{{ envvalue }}',
|
||||
},
|
||||
],
|
||||
});
|
||||
await models.request.create({
|
||||
_id: 'req_2',
|
||||
name: 'Request 2',
|
||||
parentId: wrk2._id,
|
||||
headers: [{ name: 'X-Environment', value: '{{ envvalue }}' }],
|
||||
headers: [
|
||||
{
|
||||
name: 'X-Environment',
|
||||
value: '{{ envvalue }}',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
let env1Base = await models.environment.getOrCreateForWorkspace(wrk1);
|
||||
env1Base = await models.environment.update(env1Base, {
|
||||
data: {
|
||||
@@ -142,7 +162,6 @@ describe('exportWorkspacesHAR() and exportRequestsHAR()', () => {
|
||||
envvalue: 'private2',
|
||||
},
|
||||
});
|
||||
|
||||
await models.workspaceMeta.create({
|
||||
parentId: wrk1._id,
|
||||
activeEnvironmentId: env1Public._id,
|
||||
@@ -151,23 +170,31 @@ describe('exportWorkspacesHAR() and exportRequestsHAR()', () => {
|
||||
parentId: wrk2._id,
|
||||
activeEnvironmentId: env2Private._id,
|
||||
});
|
||||
|
||||
const includePrivateDocs = false;
|
||||
const json = await importUtil.exportWorkspacesHAR(null, includePrivateDocs);
|
||||
const data = JSON.parse(json);
|
||||
|
||||
expect(data).toMatchObject({
|
||||
log: {
|
||||
entries: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
request: expect.objectContaining({
|
||||
headers: [{ name: 'X-Environment', value: 'public1' }],
|
||||
headers: [
|
||||
{
|
||||
name: 'X-Environment',
|
||||
value: 'public1',
|
||||
},
|
||||
],
|
||||
}),
|
||||
comment: 'Request 1',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
request: expect.objectContaining({
|
||||
headers: [{ name: 'X-Environment', value: 'base2' }],
|
||||
headers: [
|
||||
{
|
||||
name: 'X-Environment',
|
||||
value: 'base2',
|
||||
},
|
||||
],
|
||||
}),
|
||||
comment: 'Request 2',
|
||||
}),
|
||||
@@ -179,9 +206,13 @@ describe('exportWorkspacesHAR() and exportRequestsHAR()', () => {
|
||||
|
||||
describe('export', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('exports all workspaces and some requests only', async () => {
|
||||
const w = await models.workspace.create({ name: 'Workspace' });
|
||||
const w = await models.workspace.create({
|
||||
name: 'Workspace',
|
||||
});
|
||||
const spec = await models.apiSpec.getByParentId(w._id); // Created by workspace migration
|
||||
|
||||
const jar = await models.cookieJar.getOrCreateForParentId(w._id);
|
||||
const r1 = await models.request.create({
|
||||
name: 'Request 1',
|
||||
@@ -227,70 +258,108 @@ describe('export', () => {
|
||||
isPrivate: true,
|
||||
parentId: eBase._id,
|
||||
});
|
||||
|
||||
// Test export whole workspace.
|
||||
const exportedWorkspacesJson = await importUtil.exportWorkspacesData(null, false, 'json');
|
||||
const exportedWorkspacesYaml = await importUtil.exportWorkspacesData(null, false, 'yaml');
|
||||
const exportWorkspacesDataJson = JSON.parse(exportedWorkspacesJson);
|
||||
const exportWorkspacesDataYaml = YAML.parse(exportedWorkspacesYaml);
|
||||
|
||||
// Ensure JSON is the same as YAML
|
||||
expect(exportWorkspacesDataJson.resources).toEqual(exportWorkspacesDataYaml.resources);
|
||||
|
||||
expect(exportWorkspacesDataJson).toMatchObject({
|
||||
_type: 'export',
|
||||
__export_format: 4,
|
||||
__export_date: expect.stringMatching(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z$/),
|
||||
__export_source: `insomnia.desktop.app:v${getAppVersion()}`,
|
||||
resources: expect.arrayContaining([
|
||||
expect.objectContaining({ _id: w._id }),
|
||||
expect.objectContaining({ _id: spec._id }),
|
||||
expect.objectContaining({ _id: eBase._id }),
|
||||
expect.objectContaining({ _id: jar._id }),
|
||||
expect.objectContaining({ _id: r1._id }),
|
||||
expect.objectContaining({ _id: f2._id }),
|
||||
expect.objectContaining({ _id: r2._id }),
|
||||
expect.objectContaining({ _id: ePub._id }),
|
||||
expect.objectContaining({ _id: gr1._id }),
|
||||
expect.objectContaining({ _id: pd._id }),
|
||||
expect.objectContaining({ _id: pf1._id }),
|
||||
expect.objectContaining({ _id: gr2._id }),
|
||||
expect.objectContaining({ _id: pf2._id }),
|
||||
expect.objectContaining({
|
||||
_id: w._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: spec._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: eBase._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: jar._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: r1._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: f2._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: r2._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: ePub._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: gr1._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: pd._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: pf1._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: gr2._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: pf2._id,
|
||||
}),
|
||||
]),
|
||||
});
|
||||
expect(exportWorkspacesDataJson.resources.length).toBe(13);
|
||||
|
||||
// Test export some requests only.
|
||||
const exportRequestsJson = await importUtil.exportRequestsData([r1, gr1], false, 'json');
|
||||
const exportRequestsYaml = await importUtil.exportRequestsData([r1, gr1], false, 'yaml');
|
||||
const exportRequestsDataJSON = JSON.parse(exportRequestsJson);
|
||||
const exportRequestsDataYAML = YAML.parse(exportRequestsYaml);
|
||||
|
||||
expect(exportRequestsDataJSON).toMatchObject({
|
||||
_type: 'export',
|
||||
__export_format: 4,
|
||||
__export_date: expect.stringMatching(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z$/),
|
||||
__export_source: `insomnia.desktop.app:v${getAppVersion()}`,
|
||||
resources: expect.arrayContaining([
|
||||
expect.objectContaining({ _id: w._id }),
|
||||
expect.objectContaining({ _id: eBase._id }),
|
||||
expect.objectContaining({ _id: jar._id }),
|
||||
expect.objectContaining({ _id: r1._id }),
|
||||
expect.objectContaining({ _id: ePub._id }),
|
||||
expect.objectContaining({ _id: gr1._id }),
|
||||
expect.objectContaining({ _id: pf1._id }),
|
||||
expect.objectContaining({ _id: pf2._id }),
|
||||
expect.objectContaining({
|
||||
_id: w._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: eBase._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: jar._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: r1._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: ePub._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: gr1._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: pf1._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: pf2._id,
|
||||
}),
|
||||
]),
|
||||
});
|
||||
|
||||
expect(exportRequestsDataJSON.resources.length).toBe(10);
|
||||
expect(exportRequestsDataYAML.resources.length).toBe(10);
|
||||
|
||||
// Ensure JSON and YAML are the same
|
||||
expect(exportRequestsDataJSON.resources).toEqual(exportRequestsDataYAML.resources);
|
||||
});
|
||||
|
||||
it('exports correct models', async () => {
|
||||
const w = await models.workspace.create({ name: 'Workspace' });
|
||||
const w = await models.workspace.create({
|
||||
name: 'Workspace',
|
||||
});
|
||||
const spec = await models.apiSpec.getOrCreateForParentId(w._id, {
|
||||
type: 'yaml',
|
||||
contents: 'openapi: "3.0.0"',
|
||||
@@ -348,29 +417,55 @@ describe('export', () => {
|
||||
isPrivate: true,
|
||||
parentId: eBase._id,
|
||||
});
|
||||
|
||||
const result = await importUtil.exportWorkspacesData(w, false, 'json');
|
||||
|
||||
expect(JSON.parse(result)).toEqual({
|
||||
_type: 'export',
|
||||
__export_format: 4,
|
||||
__export_date: expect.stringMatching(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z$/),
|
||||
__export_source: `insomnia.desktop.app:v${getAppVersion()}`,
|
||||
resources: expect.arrayContaining([
|
||||
expect.objectContaining({ _id: w._id }),
|
||||
expect.objectContaining({ _id: eBase._id }),
|
||||
expect.objectContaining({ _id: jar._id }),
|
||||
expect.objectContaining({ _id: pd._id }),
|
||||
expect.objectContaining({ _id: pf1._id }),
|
||||
expect.objectContaining({ _id: pf2._id }),
|
||||
expect.objectContaining({ _id: r1._id }),
|
||||
expect.objectContaining({ _id: r2._id }),
|
||||
expect.objectContaining({ _id: gr1._id }),
|
||||
expect.objectContaining({ _id: gr2._id }),
|
||||
expect.objectContaining({ _id: uts1._id }),
|
||||
expect.objectContaining({ _id: ut1._id }),
|
||||
expect.objectContaining({ _id: ePub._id }),
|
||||
expect.objectContaining({ _id: spec._id }),
|
||||
expect.objectContaining({
|
||||
_id: w._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: eBase._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: jar._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: pd._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: pf1._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: pf2._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: r1._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: r2._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: gr1._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: gr2._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: uts1._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: ut1._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: ePub._id,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
_id: spec._id,
|
||||
}),
|
||||
]),
|
||||
});
|
||||
});
|
||||
@@ -378,24 +473,38 @@ describe('export', () => {
|
||||
|
||||
describe('isApiSpecImport()', () => {
|
||||
it.each(['swagger2', 'openapi3'])('should return true if spec id is %o', (id: string) => {
|
||||
expect(importUtil.isApiSpecImport({ id })).toBe(true);
|
||||
expect(
|
||||
importUtil.isApiSpecImport({
|
||||
id,
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false if spec id is not valid', () => {
|
||||
const id = 'invalid-id';
|
||||
|
||||
expect(importUtil.isApiSpecImport({ id })).toBe(false);
|
||||
expect(
|
||||
importUtil.isApiSpecImport({
|
||||
id,
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isInsomniaV4Import()', () => {
|
||||
it.each(['insomnia-4'])('should return true if spec id is %o', (id: string) => {
|
||||
expect(importUtil.isInsomniaV4Import({ id })).toBe(true);
|
||||
expect(
|
||||
importUtil.isInsomniaV4Import({
|
||||
id,
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false if spec id is not valid', () => {
|
||||
const id = 'invalid-id';
|
||||
|
||||
expect(importUtil.isInsomniaV4Import({ id })).toBe(false);
|
||||
expect(
|
||||
importUtil.isInsomniaV4Import({
|
||||
id,
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
@@ -7,7 +7,6 @@ describe('LocalStorage()', () => {
|
||||
beforeEach(async () => {
|
||||
await globalBeforeEach();
|
||||
jest.useFakeTimers();
|
||||
|
||||
// There has to be a better way to reset this...
|
||||
setTimeout.mock.calls = [];
|
||||
});
|
||||
@@ -27,16 +26,19 @@ describe('LocalStorage()', () => {
|
||||
it('does basic operations', () => {
|
||||
const basePath = `/tmp/insomnia-localstorage-${Math.random()}`;
|
||||
const localStorage = new LocalStorage(basePath);
|
||||
|
||||
// Test get and set
|
||||
localStorage.setItem('foo', 'bar 1');
|
||||
localStorage.setItem('foo', 'bar');
|
||||
expect(localStorage.getItem('foo', 'BAD')).toBe('bar');
|
||||
|
||||
// Test Object storage
|
||||
localStorage.setItem('obj', { foo: 'bar', arr: [1, 2, 3] });
|
||||
expect(localStorage.getItem('obj')).toEqual({ foo: 'bar', arr: [1, 2, 3] });
|
||||
|
||||
localStorage.setItem('obj', {
|
||||
foo: 'bar',
|
||||
arr: [1, 2, 3],
|
||||
});
|
||||
expect(localStorage.getItem('obj')).toEqual({
|
||||
foo: 'bar',
|
||||
arr: [1, 2, 3],
|
||||
});
|
||||
// Test default values
|
||||
expect(localStorage.getItem('dne', 'default')).toEqual('default');
|
||||
expect(localStorage.getItem('dne')).toEqual('default');
|
||||
@@ -44,30 +46,26 @@ describe('LocalStorage()', () => {
|
||||
|
||||
it('does handles malformed files', () => {
|
||||
const consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation(() => {});
|
||||
|
||||
const basePath = `/tmp/insomnia-localstorage-${Math.random()}`;
|
||||
const localStorage = new LocalStorage(basePath);
|
||||
|
||||
// Assert default is returned on bad JSON
|
||||
fs.writeFileSync(path.join(basePath, 'key'), '{bad JSON');
|
||||
expect(localStorage.getItem('key', 'default')).toBe('default');
|
||||
|
||||
// Assert that writing our file actually works
|
||||
fs.writeFileSync(path.join(basePath, 'key'), '{"good": "JSON"}');
|
||||
expect(localStorage.getItem('key', 'default')).toEqual({ good: 'JSON' });
|
||||
expect(localStorage.getItem('key', 'default')).toEqual({
|
||||
good: 'JSON',
|
||||
});
|
||||
expect(consoleErrorSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('does handles failing to write file', () => {
|
||||
const consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation(() => {});
|
||||
|
||||
const basePath = `/tmp/insomnia-localstorage-${Math.random()}`;
|
||||
const localStorage = new LocalStorage(basePath);
|
||||
fs.rmdirSync(basePath);
|
||||
localStorage.setItem('key', 'value');
|
||||
|
||||
jest.runAllTimers();
|
||||
|
||||
// Since the above operation failed to write, we should now get back
|
||||
// the default value
|
||||
expect(localStorage.getItem('key', 'different')).toBe('different');
|
||||
@@ -78,17 +76,13 @@ describe('LocalStorage()', () => {
|
||||
const basePath = `/tmp/insomnia-localstorage-${Math.random()}`;
|
||||
const localStorage = new LocalStorage(basePath);
|
||||
localStorage.setItem('foo', 'bar');
|
||||
|
||||
// Assert timeouts are called
|
||||
expect(setTimeout.mock.calls.length).toBe(1);
|
||||
expect(setTimeout.mock.calls[0][1]).toBe(100);
|
||||
|
||||
// Force debouncer to flush
|
||||
jest.runAllTimers();
|
||||
|
||||
// Assert there is one item stored
|
||||
expect(fs.readdirSync(basePath).length).toEqual(1);
|
||||
|
||||
// Assert the contents are correct
|
||||
const contents = fs.readFileSync(path.join(basePath, 'foo'), 'utf8');
|
||||
expect(contents).toEqual('"bar"');
|
||||
@@ -100,18 +94,14 @@ describe('LocalStorage()', () => {
|
||||
localStorage.setItem('foo', 'bar1');
|
||||
localStorage.setItem('another', 10);
|
||||
localStorage.setItem('foo', 'bar3');
|
||||
|
||||
// Assert timeouts are called
|
||||
expect(setTimeout.mock.calls.length).toBe(3);
|
||||
expect(setTimeout.mock.calls[0][1]).toBe(100);
|
||||
expect(setTimeout.mock.calls[1][1]).toBe(100);
|
||||
expect(setTimeout.mock.calls[2][1]).toBe(100);
|
||||
|
||||
expect(fs.readdirSync(basePath).length).toEqual(0);
|
||||
|
||||
// Force flush
|
||||
jest.runAllTimers();
|
||||
|
||||
// Make sure only one item exists
|
||||
expect(fs.readdirSync(basePath).length).toEqual(2);
|
||||
expect(fs.readFileSync(path.join(basePath, 'foo'), 'utf8')).toEqual('"bar3"');
|
||||
@@ -10,27 +10,39 @@ import {
|
||||
|
||||
describe('hasAuthHeader()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('finds valid header', () => {
|
||||
const yes = misc.hasAuthHeader([
|
||||
{ name: 'foo', value: 'bar' },
|
||||
{ name: 'authorization', value: 'foo' },
|
||||
{
|
||||
name: 'foo',
|
||||
value: 'bar',
|
||||
},
|
||||
{
|
||||
name: 'authorization',
|
||||
value: 'foo',
|
||||
},
|
||||
]);
|
||||
|
||||
expect(yes).toEqual(true);
|
||||
});
|
||||
|
||||
it('finds valid header case insensitive', () => {
|
||||
const yes = misc.hasAuthHeader([
|
||||
{ name: 'foo', value: 'bar' },
|
||||
{ name: 'AuthOrizAtiOn', value: 'foo' },
|
||||
{
|
||||
name: 'foo',
|
||||
value: 'bar',
|
||||
},
|
||||
{
|
||||
name: 'AuthOrizAtiOn',
|
||||
value: 'foo',
|
||||
},
|
||||
]);
|
||||
|
||||
expect(yes).toEqual(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateId()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('generates a valid ID', () => {
|
||||
const id = misc.generateId('foo');
|
||||
expect(id).toMatch(/^foo_[a-z0-9]{32}$/);
|
||||
@@ -44,18 +56,72 @@ describe('generateId()', () => {
|
||||
|
||||
describe('filterHeaders()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('handles bad headers', () => {
|
||||
expect(misc.filterHeaders(null, null)).toEqual([]);
|
||||
expect(misc.filterHeaders([], null)).toEqual([]);
|
||||
expect(misc.filterHeaders(['bad'], null)).toEqual([]);
|
||||
expect(misc.filterHeaders(['bad'], 'good')).toEqual([]);
|
||||
expect(misc.filterHeaders(null, 'good')).toEqual([]);
|
||||
expect(misc.filterHeaders([{ name: '', value: 'valid' }], '')).toEqual([]);
|
||||
expect(misc.filterHeaders([{ name: 123, value: 123 }], 123)).toEqual([]);
|
||||
expect(misc.filterHeaders([{ name: 'good', value: 'valid' }], 123)).toEqual([]);
|
||||
expect(misc.filterHeaders([{ name: 'good', value: 'valid' }], null)).toEqual([]);
|
||||
expect(misc.filterHeaders([{ name: 'good', value: 'valid' }], 'good')).toEqual([
|
||||
{ name: 'good', value: 'valid' },
|
||||
expect(
|
||||
misc.filterHeaders(
|
||||
[
|
||||
{
|
||||
name: '',
|
||||
value: 'valid',
|
||||
},
|
||||
],
|
||||
'',
|
||||
),
|
||||
).toEqual([]);
|
||||
expect(
|
||||
misc.filterHeaders(
|
||||
[
|
||||
{
|
||||
name: 123,
|
||||
value: 123,
|
||||
},
|
||||
],
|
||||
123,
|
||||
),
|
||||
).toEqual([]);
|
||||
expect(
|
||||
misc.filterHeaders(
|
||||
[
|
||||
{
|
||||
name: 'good',
|
||||
value: 'valid',
|
||||
},
|
||||
],
|
||||
123,
|
||||
),
|
||||
).toEqual([]);
|
||||
expect(
|
||||
misc.filterHeaders(
|
||||
[
|
||||
{
|
||||
name: 'good',
|
||||
value: 'valid',
|
||||
},
|
||||
],
|
||||
null,
|
||||
),
|
||||
).toEqual([]);
|
||||
expect(
|
||||
misc.filterHeaders(
|
||||
[
|
||||
{
|
||||
name: 'good',
|
||||
value: 'valid',
|
||||
},
|
||||
],
|
||||
'good',
|
||||
),
|
||||
).toEqual([
|
||||
{
|
||||
name: 'good',
|
||||
value: 'valid',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -71,18 +137,14 @@ describe('keyedDebounce()', () => {
|
||||
const fn = misc.keyedDebounce(results => {
|
||||
resultsList.push(results);
|
||||
}, 100);
|
||||
|
||||
fn('foo', 'bar');
|
||||
fn('baz', 'bar');
|
||||
fn('foo', 'bar2');
|
||||
fn('foo', 'bar3');
|
||||
fn('multi', 'foo', 'bar', 'baz');
|
||||
|
||||
expect(setTimeout.mock.calls.length).toBe(5);
|
||||
expect(resultsList).toEqual([]);
|
||||
|
||||
jest.runAllTimers();
|
||||
|
||||
expect(resultsList).toEqual([
|
||||
{
|
||||
foo: ['bar3'],
|
||||
@@ -104,31 +166,27 @@ describe('debounce()', () => {
|
||||
const fn = misc.debounce((...args) => {
|
||||
resultList.push(args);
|
||||
}, 100);
|
||||
|
||||
fn('foo');
|
||||
fn('foo');
|
||||
fn('multi', 'foo', 'bar', 'baz');
|
||||
fn('baz', 'bar');
|
||||
fn('foo', 'bar3');
|
||||
|
||||
expect(setTimeout.mock.calls.length).toBe(5);
|
||||
expect(resultList).toEqual([]);
|
||||
|
||||
jest.runAllTimers();
|
||||
|
||||
expect(resultList).toEqual([['foo', 'bar3']]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('fuzzyMatch()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('can get a positive fuzzy match on a single field', () => {
|
||||
expect(misc.fuzzyMatch('test', 'testing')).toEqual({
|
||||
score: -3,
|
||||
indexes: [0, 1, 2, 3],
|
||||
target: 'testing',
|
||||
});
|
||||
|
||||
expect(misc.fuzzyMatch('tst', 'testing')).toEqual({
|
||||
score: -2004,
|
||||
indexes: [0, 2, 3],
|
||||
@@ -144,6 +202,7 @@ describe('fuzzyMatch()', () => {
|
||||
|
||||
describe('fuzzyMatchAll()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('can get a positive fuzzy match on multiple fields', () => {
|
||||
expect(misc.fuzzyMatchAll('', [undefined])).toEqual(null);
|
||||
expect(misc.fuzzyMatchAll('', ['testing'])).toEqual(null);
|
||||
@@ -153,7 +212,11 @@ describe('fuzzyMatchAll()', () => {
|
||||
indexes: [0, 1, 2, 3],
|
||||
target: 'testing foo',
|
||||
});
|
||||
expect(misc.fuzzyMatchAll('test foo', ['testing', 'foo'], { splitSpace: true })).toEqual({
|
||||
expect(
|
||||
misc.fuzzyMatchAll('test foo', ['testing', 'foo'], {
|
||||
splitSpace: true,
|
||||
}),
|
||||
).toEqual({
|
||||
score: 0,
|
||||
indexes: [0, 1, 2, 3, 0, 1, 2],
|
||||
target: 'testing foo',
|
||||
@@ -225,32 +288,65 @@ describe('pluralize()', () => {
|
||||
});
|
||||
|
||||
describe('diffPatchObj()', () => {
|
||||
const a = { x: 1 };
|
||||
const b = { x: 2, y: 3 };
|
||||
const c = { x: 4, y: { z: 5 } };
|
||||
const a = {
|
||||
x: 1,
|
||||
};
|
||||
const b = {
|
||||
x: 2,
|
||||
y: 3,
|
||||
};
|
||||
const c = {
|
||||
x: 4,
|
||||
y: {
|
||||
z: 5,
|
||||
},
|
||||
};
|
||||
|
||||
it('does a basic merge', () => {
|
||||
expect(diffPatchObj(a, b)).toEqual({ x: 2, y: 3 });
|
||||
|
||||
expect(diffPatchObj(b, a)).toEqual({ x: 1, y: 3 });
|
||||
expect(diffPatchObj(a, b)).toEqual({
|
||||
x: 2,
|
||||
y: 3,
|
||||
});
|
||||
expect(diffPatchObj(b, a)).toEqual({
|
||||
x: 1,
|
||||
y: 3,
|
||||
});
|
||||
});
|
||||
|
||||
it.skip('does a basic merge, deep', () => {
|
||||
expect(diffPatchObj(a, c, true)).toEqual({ x: 2, y: 3 });
|
||||
|
||||
expect(diffPatchObj(c, a, true)).toEqual({ x: 1 });
|
||||
expect(diffPatchObj(a, c, true)).toEqual({
|
||||
x: 2,
|
||||
y: 3,
|
||||
});
|
||||
expect(diffPatchObj(c, a, true)).toEqual({
|
||||
x: 1,
|
||||
});
|
||||
});
|
||||
|
||||
it.skip('does a basic nested merge', () => {
|
||||
expect(diffPatchObj(a, b)).toEqual({ x: 2, y: 3 });
|
||||
|
||||
expect(diffPatchObj(b, a)).toEqual({ x: 1, y: { z: 5 } });
|
||||
expect(diffPatchObj(a, b)).toEqual({
|
||||
x: 2,
|
||||
y: 3,
|
||||
});
|
||||
expect(diffPatchObj(b, a)).toEqual({
|
||||
x: 1,
|
||||
y: {
|
||||
z: 5,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it.skip('does a basic nested merge, deep', () => {
|
||||
expect(diffPatchObj(a, c, true)).toEqual({ x: 2, y: 3 });
|
||||
|
||||
expect(diffPatchObj(c, a, true)).toEqual({ x: 1, y: { z: 5 } });
|
||||
expect(diffPatchObj(a, c, true)).toEqual({
|
||||
x: 2,
|
||||
y: 3,
|
||||
});
|
||||
expect(diffPatchObj(c, a, true)).toEqual({
|
||||
x: 1,
|
||||
y: {
|
||||
z: 5,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -297,7 +393,6 @@ describe('isNotNullOrUndefined', () => {
|
||||
expect(isNotNullOrUndefined(0)).toBe(true);
|
||||
expect(isNotNullOrUndefined('')).toBe(true);
|
||||
expect(isNotNullOrUndefined(false)).toBe(true);
|
||||
|
||||
expect(isNotNullOrUndefined(null)).toBe(false);
|
||||
expect(isNotNullOrUndefined(undefined)).toBe(false);
|
||||
});
|
||||
@@ -6,6 +6,7 @@ jest.mock('electron');
|
||||
|
||||
describe('render()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('renders hello world', async () => {
|
||||
const rendered = await renderUtils.render('Hello {{ msg }}!', {
|
||||
msg: 'World',
|
||||
@@ -25,14 +26,20 @@ describe('render()', () => {
|
||||
|
||||
it('renders nested object', async () => {
|
||||
const rendered = await renderUtils.render('Hello {{ users[0].name }}!', {
|
||||
users: [{ name: 'Niji' }],
|
||||
users: [
|
||||
{
|
||||
name: 'Niji',
|
||||
},
|
||||
],
|
||||
});
|
||||
expect(rendered).toBe('Hello Niji!');
|
||||
});
|
||||
|
||||
it('fails on invalid template', async () => {
|
||||
try {
|
||||
await renderUtils.render('Hello {{ msg }!', { msg: 'World' });
|
||||
await renderUtils.render('Hello {{ msg }!', {
|
||||
msg: 'World',
|
||||
});
|
||||
fail('Render should have failed');
|
||||
} catch (err) {
|
||||
expect(err.message).toContain('expected variable end');
|
||||
@@ -52,16 +59,13 @@ describe('render()', () => {
|
||||
'&': ['value', 'replaced', 'hashed', 'consume'],
|
||||
},
|
||||
};
|
||||
|
||||
const context = await renderUtils.buildRenderContext([], rootEnvironment);
|
||||
|
||||
expect(context).toEqual({
|
||||
value: 'ThisIsATopSecretValue',
|
||||
hashed: 'f67565de946a899a534fd908e7eef872',
|
||||
replaced: 'f67565de946a899a534fd908e7eef872',
|
||||
consume: 'f67565de946a899a534fd908e7eef872',
|
||||
});
|
||||
|
||||
// In runtime, this context is used to render, which re-evaluates the expression for replaced in the rootEnvironment by using the built context
|
||||
// Regression test from issue 1917 - https://github.com/Kong/insomnia/issues/1917
|
||||
const renderExpression = await renderUtils.render(rootEnvironment.data.replaced, context);
|
||||
@@ -71,6 +75,7 @@ describe('render()', () => {
|
||||
|
||||
describe('buildRenderContext()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('cascades properly', async () => {
|
||||
const ancestors = [
|
||||
{
|
||||
@@ -88,7 +93,6 @@ describe('buildRenderContext()', () => {
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const rootEnvironment = {
|
||||
type: models.environment.type,
|
||||
data: {
|
||||
@@ -96,7 +100,6 @@ describe('buildRenderContext()', () => {
|
||||
root: true,
|
||||
},
|
||||
};
|
||||
|
||||
const subEnvironment = {
|
||||
type: models.environment.type,
|
||||
data: {
|
||||
@@ -104,13 +107,11 @@ describe('buildRenderContext()', () => {
|
||||
sub: true,
|
||||
},
|
||||
};
|
||||
|
||||
const context = await renderUtils.buildRenderContext(
|
||||
ancestors,
|
||||
rootEnvironment,
|
||||
subEnvironment,
|
||||
);
|
||||
|
||||
expect(context).toEqual({
|
||||
foo: 'parent',
|
||||
ancestor: true,
|
||||
@@ -124,12 +125,12 @@ describe('buildRenderContext()', () => {
|
||||
{
|
||||
// Sub Environment
|
||||
type: models.requestGroup.type,
|
||||
environment: { recursive: '{{ recursive }}/hello' },
|
||||
environment: {
|
||||
recursive: '{{ recursive }}/hello',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const context = await renderUtils.buildRenderContext(ancestors);
|
||||
|
||||
// This is longer than 3 because it multiplies every time (1 -> 2 -> 4 -> 8)
|
||||
expect(context).toEqual({
|
||||
recursive: '{{ recursive }}/hello/hello/hello/hello/hello/hello/hello/hello',
|
||||
@@ -145,7 +146,6 @@ describe('buildRenderContext()', () => {
|
||||
url: '{{ proto }}://{{ domain }}',
|
||||
},
|
||||
};
|
||||
|
||||
const sub = {
|
||||
type: models.environment.type,
|
||||
data: {
|
||||
@@ -155,7 +155,6 @@ describe('buildRenderContext()', () => {
|
||||
url: '{{ proto }}://{{ domain }}:{{ port }}',
|
||||
},
|
||||
};
|
||||
|
||||
const ancestors = [
|
||||
{
|
||||
// Folder Environment
|
||||
@@ -167,9 +166,7 @@ describe('buildRenderContext()', () => {
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const context = await renderUtils.buildRenderContext(ancestors, root, sub);
|
||||
|
||||
expect(context).toEqual({
|
||||
proto: 'https',
|
||||
domain: 'folder.com',
|
||||
@@ -181,14 +178,16 @@ describe('buildRenderContext()', () => {
|
||||
it('does the thing', async () => {
|
||||
const root = {
|
||||
type: models.environment.type,
|
||||
data: { url: 'insomnia.rest' },
|
||||
data: {
|
||||
url: 'insomnia.rest',
|
||||
},
|
||||
};
|
||||
|
||||
const sub = {
|
||||
type: models.environment.type,
|
||||
data: { url: '{{ url }}/sub' },
|
||||
data: {
|
||||
url: '{{ url }}/sub',
|
||||
},
|
||||
};
|
||||
|
||||
const ancestors = [
|
||||
{
|
||||
// Folder Environment
|
||||
@@ -199,9 +198,7 @@ describe('buildRenderContext()', () => {
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const context = await renderUtils.buildRenderContext(ancestors, root, sub);
|
||||
|
||||
expect(context).toEqual({
|
||||
url: 'insomnia.rest/sub/folder',
|
||||
name: 'folder',
|
||||
@@ -222,9 +219,7 @@ describe('buildRenderContext()', () => {
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const context = await renderUtils.buildRenderContext(ancestors);
|
||||
|
||||
expect(context).toEqual({
|
||||
d: '/d',
|
||||
c: '/c/d',
|
||||
@@ -245,9 +240,7 @@ describe('buildRenderContext()', () => {
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const context = await renderUtils.buildRenderContext(ancestors);
|
||||
|
||||
expect(context).toEqual({
|
||||
sibling: 'sibling',
|
||||
test: 'sibling/hello',
|
||||
@@ -271,9 +264,7 @@ describe('buildRenderContext()', () => {
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const context = await renderUtils.buildRenderContext(ancestors);
|
||||
|
||||
expect(context).toEqual({
|
||||
grandparent: 'grandparent',
|
||||
test: 'grandparent parent',
|
||||
@@ -297,10 +288,10 @@ describe('buildRenderContext()', () => {
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const context = await renderUtils.buildRenderContext(ancestors);
|
||||
|
||||
expect(context).toEqual({ base_url: 'https://insomnia.rest/resource' });
|
||||
expect(context).toEqual({
|
||||
base_url: 'https://insomnia.rest/resource',
|
||||
});
|
||||
});
|
||||
|
||||
it('rendered parent, ignoring sibling environment variables', async () => {
|
||||
@@ -329,7 +320,6 @@ describe('buildRenderContext()', () => {
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const context = await renderUtils.buildRenderContext(ancestors);
|
||||
expect(await renderUtils.render('{{ urls.admin }}/foo', context)).toBe(
|
||||
'https://parent.com/admin/foo',
|
||||
@@ -356,9 +346,7 @@ describe('buildRenderContext()', () => {
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const context = await renderUtils.buildRenderContext(ancestors);
|
||||
|
||||
expect(context).toEqual({
|
||||
parent: 'parent',
|
||||
test: 'parent grandparent',
|
||||
@@ -376,23 +364,41 @@ describe('buildRenderContext()', () => {
|
||||
name: 'Grandparent',
|
||||
type: models.requestGroup.type,
|
||||
environment: {
|
||||
users: [{ name: 'Mike' }, { name: 'Opender' }],
|
||||
users: [
|
||||
{
|
||||
name: 'Mike',
|
||||
},
|
||||
{
|
||||
name: 'Opender',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const context = await renderUtils.buildRenderContext(ancestors);
|
||||
|
||||
expect(context).toEqual({
|
||||
users: [{ name: 'Mike' }, { name: 'Opender' }],
|
||||
users: [
|
||||
{
|
||||
name: 'Mike',
|
||||
},
|
||||
{
|
||||
name: 'Opender',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('works with ordered objects', async () => {
|
||||
const obj = {
|
||||
users: [
|
||||
{ name: 'Mike', id: 1 },
|
||||
{ name: 'Opender', id: 2 },
|
||||
{
|
||||
name: 'Mike',
|
||||
id: 1,
|
||||
},
|
||||
{
|
||||
name: 'Opender',
|
||||
id: 2,
|
||||
},
|
||||
],
|
||||
};
|
||||
const order = {
|
||||
@@ -400,39 +406,39 @@ describe('buildRenderContext()', () => {
|
||||
'&~|users~|0': ['id', 'name'],
|
||||
'&~|users~|1': ['id', 'name'],
|
||||
};
|
||||
|
||||
const requestGroup = {
|
||||
name: 'Parent',
|
||||
type: models.requestGroup.type,
|
||||
environment: obj,
|
||||
environmentPropertyOrder: order,
|
||||
};
|
||||
|
||||
const rootEnvironment = {
|
||||
name: 'Parent',
|
||||
type: models.environment.type,
|
||||
data: obj,
|
||||
dataPropertyOrder: order,
|
||||
};
|
||||
|
||||
const subEnvironment = {
|
||||
name: 'Sub',
|
||||
type: models.environment.type,
|
||||
data: obj,
|
||||
dataPropertyOrder: order,
|
||||
};
|
||||
|
||||
const groupCtx = await renderUtils.buildRenderContext([requestGroup]);
|
||||
const rootCtx = await renderUtils.buildRenderContext([], rootEnvironment);
|
||||
const subCtx = await renderUtils.buildRenderContext([], null, subEnvironment);
|
||||
|
||||
const expected = {
|
||||
users: [
|
||||
{ id: 1, name: 'Mike' },
|
||||
{ id: 2, name: 'Opender' },
|
||||
{
|
||||
id: 1,
|
||||
name: 'Mike',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'Opender',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
expect(groupCtx).toEqual(expected);
|
||||
expect(rootCtx).toEqual(expected);
|
||||
expect(subCtx).toEqual(expected);
|
||||
@@ -465,9 +471,7 @@ describe('buildRenderContext()', () => {
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const context = await renderUtils.buildRenderContext(ancestors);
|
||||
|
||||
expect(context).toEqual({
|
||||
parent: 'parent',
|
||||
test: 'parent grandparent',
|
||||
@@ -499,7 +503,6 @@ describe('buildRenderContext()', () => {
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const subEnvironment = {
|
||||
type: models.environment.type,
|
||||
data: {
|
||||
@@ -508,7 +511,6 @@ describe('buildRenderContext()', () => {
|
||||
base_url: 'https://insomnia.rest',
|
||||
},
|
||||
};
|
||||
|
||||
const rootEnvironment = {
|
||||
type: models.environment.type,
|
||||
data: {
|
||||
@@ -517,13 +519,11 @@ describe('buildRenderContext()', () => {
|
||||
base_url: 'ignore this',
|
||||
},
|
||||
};
|
||||
|
||||
const context = await renderUtils.buildRenderContext(
|
||||
ancestors,
|
||||
rootEnvironment,
|
||||
subEnvironment,
|
||||
);
|
||||
|
||||
expect(context).toEqual({
|
||||
base_url: 'https://insomnia.rest',
|
||||
url: 'https://insomnia.rest/resource',
|
||||
@@ -547,9 +547,7 @@ describe('buildRenderContext()', () => {
|
||||
'&': ['value', 'hashed', 'replaced', 'consume'],
|
||||
},
|
||||
};
|
||||
|
||||
const context = await renderUtils.buildRenderContext([], rootEnvironment);
|
||||
|
||||
expect(context).toEqual({
|
||||
value: 'ThisIsATopSecretValue',
|
||||
hashed: 'f67565de946a899a534fd908e7eef872',
|
||||
@@ -570,9 +568,7 @@ describe('buildRenderContext()', () => {
|
||||
secret: 'ThisIsATopSecretValue',
|
||||
},
|
||||
};
|
||||
|
||||
const context = await renderUtils.buildRenderContext([], rootEnvironment);
|
||||
|
||||
expect(context).toEqual({
|
||||
hash_input: '123456789012345ThisIsATopSecretValue',
|
||||
hash_input_expected: '123456789012345ThisIsATopSecretValue',
|
||||
@@ -589,19 +585,18 @@ describe('buildRenderContext()', () => {
|
||||
const ancestors = null;
|
||||
const rootEnvironment = null;
|
||||
const subEnvironment = null;
|
||||
|
||||
const context = await renderUtils.buildRenderContext(
|
||||
ancestors,
|
||||
rootEnvironment,
|
||||
subEnvironment,
|
||||
);
|
||||
|
||||
expect(context).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('render()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('correctly renders simple Object', async () => {
|
||||
const newObj = await renderUtils.render(
|
||||
{
|
||||
@@ -614,7 +609,6 @@ describe('render()', () => {
|
||||
bad: 'hi',
|
||||
},
|
||||
);
|
||||
|
||||
expect(newObj).toEqual({
|
||||
foo: 'bar',
|
||||
bar: 'bar',
|
||||
@@ -636,9 +630,9 @@ describe('render()', () => {
|
||||
arr: [1, 2, '{{ foo }}'],
|
||||
},
|
||||
};
|
||||
|
||||
const newObj = await renderUtils.render(obj, { foo: 'bar' });
|
||||
|
||||
const newObj = await renderUtils.render(obj, {
|
||||
foo: 'bar',
|
||||
});
|
||||
expect(newObj).toEqual({
|
||||
foo: 'bar',
|
||||
null: null,
|
||||
@@ -651,7 +645,6 @@ describe('render()', () => {
|
||||
arr: [1, 2, 'bar'],
|
||||
},
|
||||
});
|
||||
|
||||
// Make sure original request isn't changed
|
||||
expect(obj.foo).toBe('{{ foo }}');
|
||||
expect(obj.nested.foo).toBe('{{ foo }}');
|
||||
@@ -666,7 +659,9 @@ describe('render()', () => {
|
||||
bar: 'bar',
|
||||
baz: '{{ bad }}',
|
||||
},
|
||||
{ foo: 'bar' },
|
||||
{
|
||||
foo: 'bar',
|
||||
},
|
||||
);
|
||||
fail('Render should have failed');
|
||||
} catch (err) {
|
||||
@@ -676,16 +671,17 @@ describe('render()', () => {
|
||||
|
||||
it('keep on error setting', async () => {
|
||||
const template = '{{ foo }} {% invalid "hi" %}';
|
||||
const context = { foo: 'bar' };
|
||||
|
||||
const context = {
|
||||
foo: 'bar',
|
||||
};
|
||||
const resultOnlyVars = await renderUtils.render(
|
||||
template,
|
||||
context,
|
||||
null,
|
||||
renderUtils.KEEP_ON_ERROR,
|
||||
);
|
||||
|
||||
expect(resultOnlyVars).toBe('{{ foo }} {% invalid "hi" %}');
|
||||
|
||||
try {
|
||||
await renderUtils.render(template, context, null);
|
||||
fail('Render should not have succeeded');
|
||||
@@ -696,7 +692,11 @@ describe('render()', () => {
|
||||
|
||||
it('outputs correct error path', async () => {
|
||||
const template = {
|
||||
foo: [{ bar: '{% foo %}' }],
|
||||
foo: [
|
||||
{
|
||||
bar: '{% foo %}',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -709,7 +709,11 @@ describe('render()', () => {
|
||||
|
||||
it('outputs correct error path when private first node', async () => {
|
||||
const template = {
|
||||
_foo: { _bar: { baz: '{% foo %}' } },
|
||||
_foo: {
|
||||
_bar: {
|
||||
baz: '{% foo %}',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -733,7 +737,6 @@ describe('getRenderedGrpcRequestMessage()', () => {
|
||||
host: 'testb.in:9000',
|
||||
},
|
||||
});
|
||||
|
||||
const grpcRequest = await models.grpcRequest.create({
|
||||
parentId: w1._id,
|
||||
name: 'hi {{ foo }}',
|
||||
@@ -1,360 +0,0 @@
|
||||
import {
|
||||
ascendingNumberSort,
|
||||
descendingNumberSort,
|
||||
metaSortKeySort,
|
||||
sortMethodMap,
|
||||
} from '../sorting';
|
||||
import { request, requestGroup, grpcRequest } from '../../models';
|
||||
import {
|
||||
METHOD_DELETE,
|
||||
METHOD_GET,
|
||||
METHOD_HEAD,
|
||||
METHOD_OPTIONS,
|
||||
METHOD_PATCH,
|
||||
METHOD_POST,
|
||||
METHOD_PUT,
|
||||
SORT_CREATED_ASC,
|
||||
SORT_CREATED_DESC,
|
||||
SORT_HTTP_METHOD,
|
||||
SORT_NAME_ASC,
|
||||
SORT_NAME_DESC,
|
||||
SORT_TYPE_ASC,
|
||||
SORT_TYPE_DESC,
|
||||
} from '../constants';
|
||||
|
||||
describe('Sorting methods', () => {
|
||||
it('sorts by name', () => {
|
||||
const ascendingNameSort = sortMethodMap[SORT_NAME_ASC];
|
||||
expect(ascendingNameSort({ name: 'a' }, { name: 'b' })).toBe(-1);
|
||||
expect(ascendingNameSort({ name: 'b' }, { name: 'a' })).toBe(1);
|
||||
expect(ascendingNameSort({ name: 'ab' }, { name: 'abb' })).toBe(-1);
|
||||
expect(ascendingNameSort({ name: 'abb' }, { name: 'ab' })).toBe(1);
|
||||
expect(ascendingNameSort({ name: 'Abb' }, { name: 'bbb' })).toBe(-1);
|
||||
expect(ascendingNameSort({ name: 'bbb' }, { name: 'Abb' })).toBe(1);
|
||||
expect(ascendingNameSort({ name: 'abb' }, { name: 'Bbb' })).toBe(-1);
|
||||
expect(ascendingNameSort({ name: 'Bbb' }, { name: 'abb' })).toBe(1);
|
||||
expect(ascendingNameSort({ name: 'åbb' }, { name: 'bbb' })).toBe(-1);
|
||||
expect(ascendingNameSort({ name: 'bbb' }, { name: 'åbb' })).toBe(1);
|
||||
expect(ascendingNameSort({ name: 'abcdef' }, { name: 'abcdef' })).toBe(0);
|
||||
|
||||
const descendingNameSort = sortMethodMap[SORT_NAME_DESC];
|
||||
expect(descendingNameSort({ name: 'a' }, { name: 'b' })).toBe(1);
|
||||
expect(descendingNameSort({ name: 'b' }, { name: 'a' })).toBe(-1);
|
||||
expect(descendingNameSort({ name: 'ab' }, { name: 'abb' })).toBe(1);
|
||||
expect(descendingNameSort({ name: 'abb' }, { name: 'ab' })).toBe(-1);
|
||||
expect(descendingNameSort({ name: 'Abb' }, { name: 'bbb' })).toBe(1);
|
||||
expect(descendingNameSort({ name: 'bbb' }, { name: 'Abb' })).toBe(-1);
|
||||
expect(descendingNameSort({ name: 'abb' }, { name: 'Bbb' })).toBe(1);
|
||||
expect(descendingNameSort({ name: 'Bbb' }, { name: 'abb' })).toBe(-1);
|
||||
expect(descendingNameSort({ name: 'åbb' }, { name: 'bbb' })).toBe(1);
|
||||
expect(descendingNameSort({ name: 'bbb' }, { name: 'åbb' })).toBe(-1);
|
||||
expect(descendingNameSort({ name: 'abcdef' }, { name: 'abcdef' })).toBe(0);
|
||||
});
|
||||
|
||||
it('sorts by timestamp', () => {
|
||||
const createdFirstSort = sortMethodMap[SORT_CREATED_ASC];
|
||||
expect(createdFirstSort({ created: 1000 }, { created: 1100 })).toBe(-1);
|
||||
expect(createdFirstSort({ created: 1100 }, { created: 1000 })).toBe(1);
|
||||
expect(createdFirstSort({ created: 0 }, { created: 1 })).toBe(-1);
|
||||
expect(createdFirstSort({ created: 1 }, { created: 0 })).toBe(1);
|
||||
expect(createdFirstSort({ created: 123456789 }, { created: 123456789 })).toBe(0);
|
||||
|
||||
const createdLastSort = sortMethodMap[SORT_CREATED_DESC];
|
||||
expect(createdLastSort({ created: 1000 }, { created: 1100 })).toBe(1);
|
||||
expect(createdLastSort({ created: 1100 }, { created: 1000 })).toBe(-1);
|
||||
expect(createdLastSort({ created: 0 }, { created: 1 })).toBe(1);
|
||||
expect(createdLastSort({ created: 1 }, { created: 0 })).toBe(-1);
|
||||
expect(createdLastSort({ created: 123456789 }, { created: 123456789 })).toBe(0);
|
||||
});
|
||||
|
||||
it('sorts by type', () => {
|
||||
const ascendingTypeSort = sortMethodMap[SORT_TYPE_ASC];
|
||||
expect(
|
||||
ascendingTypeSort(
|
||||
{ type: request.type, metaSortKey: 2 },
|
||||
{ type: requestGroup.type, metaSortKey: 1 },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
ascendingTypeSort(
|
||||
{ type: requestGroup.type, metaSortKey: 1 },
|
||||
{ type: request.type, metaSortKey: 2 },
|
||||
),
|
||||
).toBe(1);
|
||||
expect(
|
||||
ascendingTypeSort(
|
||||
{ type: request.type, metaSortKey: 2 },
|
||||
{ type: grpcRequest.type, metaSortKey: 1 },
|
||||
),
|
||||
).toBe(1);
|
||||
expect(
|
||||
ascendingTypeSort(
|
||||
{ type: grpcRequest.type, metaSortKey: 1 },
|
||||
{ type: request.type, metaSortKey: 2 },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
ascendingTypeSort(
|
||||
{ type: grpcRequest.type, metaSortKey: 2 },
|
||||
{ type: requestGroup.type, metaSortKey: 1 },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
ascendingTypeSort(
|
||||
{ type: requestGroup.type, metaSortKey: 1 },
|
||||
{ type: grpcRequest.type, metaSortKey: 2 },
|
||||
),
|
||||
).toBe(1);
|
||||
expect(
|
||||
ascendingTypeSort(
|
||||
{ type: request.type, metaSortKey: 1 },
|
||||
{ type: request.type, metaSortKey: 2 },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
ascendingTypeSort(
|
||||
{ type: request.type, metaSortKey: 2 },
|
||||
{ type: request.type, metaSortKey: 1 },
|
||||
),
|
||||
).toBe(1);
|
||||
expect(
|
||||
ascendingTypeSort(
|
||||
{ type: requestGroup.type, metaSortKey: 1 },
|
||||
{ type: requestGroup.type, metaSortKey: 2 },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
ascendingTypeSort(
|
||||
{ type: requestGroup.type, metaSortKey: 2 },
|
||||
{ type: requestGroup.type, metaSortKey: 1 },
|
||||
),
|
||||
).toBe(1);
|
||||
expect(
|
||||
ascendingTypeSort(
|
||||
{ type: grpcRequest.type, metaSortKey: 1 },
|
||||
{ type: grpcRequest.type, metaSortKey: 2 },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
ascendingTypeSort(
|
||||
{ type: grpcRequest.type, metaSortKey: 2 },
|
||||
{ type: grpcRequest.type, metaSortKey: 1 },
|
||||
),
|
||||
).toBe(1);
|
||||
|
||||
const descendingTypeSort = sortMethodMap[SORT_TYPE_DESC];
|
||||
expect(
|
||||
descendingTypeSort(
|
||||
{ type: request.type, metaSortKey: 2 },
|
||||
{ type: requestGroup.type, metaSortKey: 1 },
|
||||
),
|
||||
).toBe(1);
|
||||
expect(
|
||||
descendingTypeSort(
|
||||
{ type: requestGroup.type, metaSortKey: 1 },
|
||||
{ type: request.type, metaSortKey: 2 },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
descendingTypeSort(
|
||||
{ type: request.type, metaSortKey: 2 },
|
||||
{ type: grpcRequest.type, metaSortKey: 1 },
|
||||
),
|
||||
).toBe(1);
|
||||
expect(
|
||||
descendingTypeSort(
|
||||
{ type: grpcRequest.type, metaSortKey: 1 },
|
||||
{ type: request.type, metaSortKey: 2 },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
descendingTypeSort(
|
||||
{ type: grpcRequest.type, metaSortKey: 2 },
|
||||
{ type: requestGroup.type, metaSortKey: 1 },
|
||||
),
|
||||
).toBe(1);
|
||||
expect(
|
||||
descendingTypeSort(
|
||||
{ type: requestGroup.type, metaSortKey: 1 },
|
||||
{ type: grpcRequest.type, metaSortKey: 2 },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
descendingTypeSort(
|
||||
{ type: request.type, metaSortKey: 1 },
|
||||
{ type: request.type, metaSortKey: 2 },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
descendingTypeSort(
|
||||
{ type: request.type, metaSortKey: 2 },
|
||||
{ type: request.type, metaSortKey: 1 },
|
||||
),
|
||||
).toBe(1);
|
||||
expect(
|
||||
descendingTypeSort(
|
||||
{ type: requestGroup.type, metaSortKey: 1 },
|
||||
{ type: requestGroup.type, metaSortKey: 2 },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
descendingTypeSort(
|
||||
{ type: requestGroup.type, metaSortKey: 2 },
|
||||
{ type: requestGroup.type, metaSortKey: 1 },
|
||||
),
|
||||
).toBe(1);
|
||||
expect(
|
||||
descendingTypeSort(
|
||||
{ type: grpcRequest.type, metaSortKey: 1 },
|
||||
{ type: grpcRequest.type, metaSortKey: 2 },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
descendingTypeSort(
|
||||
{ type: grpcRequest.type, metaSortKey: 2 },
|
||||
{ type: grpcRequest.type, metaSortKey: 1 },
|
||||
),
|
||||
).toBe(1);
|
||||
});
|
||||
|
||||
it('sorts by HTTP method', () => {
|
||||
const httpMethodSort = sortMethodMap[SORT_HTTP_METHOD];
|
||||
expect(httpMethodSort({ type: request.type }, { type: requestGroup.type })).toBe(-1);
|
||||
expect(httpMethodSort({ type: requestGroup.type }, { type: request.type })).toBe(1);
|
||||
expect(httpMethodSort({ type: request.type }, { type: grpcRequest.type })).toBe(-1);
|
||||
expect(httpMethodSort({ type: grpcRequest.type }, { type: request.type })).toBe(1);
|
||||
expect(httpMethodSort({ type: requestGroup.type }, { type: grpcRequest.type })).toBe(1);
|
||||
expect(httpMethodSort({ type: grpcRequest.type }, { type: requestGroup.type })).toBe(-1);
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: requestGroup.type, metaSortKey: 1 },
|
||||
{ type: requestGroup.type, metaSortKey: 2 },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: requestGroup.type, metaSortKey: 2 },
|
||||
{ type: requestGroup.type, metaSortKey: 1 },
|
||||
),
|
||||
).toBe(1);
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: grpcRequest.type, metaSortKey: 1 },
|
||||
{ type: grpcRequest.type, metaSortKey: 2 },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: grpcRequest.type, metaSortKey: 2 },
|
||||
{ type: grpcRequest.type, metaSortKey: 1 },
|
||||
),
|
||||
).toBe(1);
|
||||
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: request.type, method: 'CUSTOM_A' },
|
||||
{ type: request.type, method: 'CUSTOM_B' },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: request.type, method: 'CUSTOM' },
|
||||
{ type: request.type, method: METHOD_GET },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: request.type, method: METHOD_GET },
|
||||
{ type: request.type, method: METHOD_POST },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: request.type, method: METHOD_POST },
|
||||
{ type: request.type, method: METHOD_PUT },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: request.type, method: METHOD_PUT },
|
||||
{ type: request.type, method: METHOD_PATCH },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: request.type, method: METHOD_PATCH },
|
||||
{ type: request.type, method: METHOD_DELETE },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: request.type, method: METHOD_DELETE },
|
||||
{ type: request.type, method: METHOD_OPTIONS },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: request.type, method: METHOD_OPTIONS },
|
||||
{ type: request.type, method: METHOD_HEAD },
|
||||
),
|
||||
).toBe(-1);
|
||||
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: request.type, method: 'CUSTOM', metaSortKey: 1 },
|
||||
{ type: request.type, method: 'CUSTOM', metaSortKey: 2 },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: request.type, method: 'CUSTOM', metaSortKey: 2 },
|
||||
{ type: request.type, method: 'CUSTOM', metaSortKey: 1 },
|
||||
),
|
||||
).toBe(1);
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: request.type, method: METHOD_GET, metaSortKey: 1 },
|
||||
{ type: request.type, method: METHOD_GET, metaSortKey: 2 },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: request.type, method: METHOD_GET, metaSortKey: 2 },
|
||||
{ type: request.type, method: METHOD_GET, metaSortKey: 1 },
|
||||
),
|
||||
).toBe(1);
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: request.type, method: METHOD_DELETE, metaSortKey: 1 },
|
||||
{ type: request.type, method: METHOD_DELETE, metaSortKey: 2 },
|
||||
),
|
||||
).toBe(-1);
|
||||
expect(
|
||||
httpMethodSort(
|
||||
{ type: request.type, method: METHOD_DELETE, metaSortKey: 2 },
|
||||
{ type: request.type, method: METHOD_DELETE, metaSortKey: 1 },
|
||||
),
|
||||
).toBe(1);
|
||||
});
|
||||
|
||||
it('sorts by metaSortKey', () => {
|
||||
expect(metaSortKeySort({ metaSortKey: 1 }, { metaSortKey: 2 })).toBe(-1);
|
||||
expect(metaSortKeySort({ metaSortKey: 2 }, { metaSortKey: 1 })).toBe(1);
|
||||
expect(metaSortKeySort({ metaSortKey: -2 }, { metaSortKey: 1 })).toBe(-1);
|
||||
expect(metaSortKeySort({ metaSortKey: 1 }, { metaSortKey: -2 })).toBe(1);
|
||||
expect(metaSortKeySort({ metaSortKey: 1, _id: 2 }, { metaSortKey: 1, _id: 1 })).toBe(-1);
|
||||
expect(metaSortKeySort({ metaSortKey: 1, _id: 1 }, { metaSortKey: 1, _id: 2 })).toBe(1);
|
||||
});
|
||||
|
||||
it('sorts by number', () => {
|
||||
expect(ascendingNumberSort(1, 2)).toBe(-1);
|
||||
expect(ascendingNumberSort(-2, 1)).toBe(-1);
|
||||
expect(ascendingNumberSort(2, 1)).toBe(1);
|
||||
expect(ascendingNumberSort(1, -2)).toBe(1);
|
||||
|
||||
expect(descendingNumberSort(1, 2)).toBe(1);
|
||||
expect(descendingNumberSort(-2, 1)).toBe(1);
|
||||
expect(descendingNumberSort(2, 1)).toBe(-1);
|
||||
expect(descendingNumberSort(1, -2)).toBe(-1);
|
||||
});
|
||||
});
|
||||
1018
packages/insomnia-app/app/common/__tests__/sorting.test.ts
Normal file
1018
packages/insomnia-app/app/common/__tests__/sorting.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,3 @@
|
||||
// @flow
|
||||
import * as models from '../../models';
|
||||
import { strings } from '../strings';
|
||||
import { getWorkspaceLabel } from '../get-workspace-label';
|
||||
@@ -1,8 +1,7 @@
|
||||
// @flow
|
||||
import { buildQueryStringFromParams, joinUrlAndQueryString } from 'insomnia-url';
|
||||
import * as electron from 'electron';
|
||||
import * as models from '../models/index';
|
||||
import * as db from '../common/database';
|
||||
import { database as db } from '../common/database';
|
||||
import * as uuid from 'uuid';
|
||||
import {
|
||||
getAppId,
|
||||
@@ -15,13 +14,12 @@ import {
|
||||
isDevelopment,
|
||||
} from './constants';
|
||||
import type { RequestParameter } from '../models/request';
|
||||
import { getScreenResolution, getUserLanguage, getViewportSize } from './misc';
|
||||
import { getScreenResolution, getUserLanguage, getViewportSize } from './electron-helpers';
|
||||
import Analytics from 'analytics-node';
|
||||
import { getAccountId } from '../account/session';
|
||||
|
||||
const DIMENSION_PLATFORM = 1;
|
||||
const DIMENSION_VERSION = 2;
|
||||
|
||||
const KEY_TRACKING_ID = 'tid';
|
||||
const KEY_VERSION = 'v';
|
||||
const KEY_CLIENT_ID = 'cid';
|
||||
@@ -42,12 +40,15 @@ const KEY_ANONYMIZE_IP = 'aip';
|
||||
const KEY_APPLICATION_NAME = 'an';
|
||||
const KEY_APPLICATION_ID = 'aid';
|
||||
const KEY_APPLICATION_VERSION = 'av';
|
||||
|
||||
const KEY_CUSTOM_DIMENSION_PREFIX = 'cd';
|
||||
|
||||
let _currentLocationPath = '/';
|
||||
|
||||
export function trackEvent(category: string, action: string, label: ?string, value: ?string) {
|
||||
export function trackEvent(
|
||||
category: string,
|
||||
action: string,
|
||||
label?: string | null,
|
||||
value?: string | null,
|
||||
) {
|
||||
process.nextTick(async () => {
|
||||
await _trackEvent(true, category, action, label, value);
|
||||
});
|
||||
@@ -56,8 +57,8 @@ export function trackEvent(category: string, action: string, label: ?string, val
|
||||
export function trackNonInteractiveEvent(
|
||||
category: string,
|
||||
action: string,
|
||||
label: ?string,
|
||||
value: ?string,
|
||||
label?: string | null,
|
||||
value?: string | null,
|
||||
) {
|
||||
process.nextTick(async () => {
|
||||
await _trackEvent(false, category, action, label, value, false);
|
||||
@@ -79,8 +80,8 @@ export function trackNonInteractiveEvent(
|
||||
export function trackNonInteractiveEventQueueable(
|
||||
category: string,
|
||||
action: string,
|
||||
label: ?string,
|
||||
value: ?string,
|
||||
label?: string | null,
|
||||
value?: string | null,
|
||||
) {
|
||||
process.nextTick(async () => {
|
||||
await _trackEvent(false, category, action, label, value, true);
|
||||
@@ -93,25 +94,27 @@ export function trackPageView(path: string) {
|
||||
});
|
||||
}
|
||||
|
||||
export async function getDeviceId(): Promise<string> {
|
||||
export async function getDeviceId() {
|
||||
const settings = await models.settings.getOrCreate();
|
||||
|
||||
let { deviceId } = settings;
|
||||
|
||||
if (!deviceId) {
|
||||
// Migrate old GA ID into settings model if needed
|
||||
const oldId = (window && window.localStorage.getItem('gaClientId')) || null;
|
||||
deviceId = oldId || uuid.v4();
|
||||
|
||||
await models.settings.update(settings, { deviceId });
|
||||
await models.settings.update(settings, {
|
||||
deviceId,
|
||||
});
|
||||
}
|
||||
|
||||
return deviceId;
|
||||
}
|
||||
|
||||
let segmentClient = null;
|
||||
let segmentClient: Analytics | null = null;
|
||||
|
||||
export async function trackSegmentEvent(event: String, properties?: Object) {
|
||||
export async function trackSegmentEvent(event: string, properties?: Record<string, any>) {
|
||||
const settings = await models.settings.getOrCreate();
|
||||
|
||||
if (!settings.enableAnalytics) {
|
||||
return;
|
||||
}
|
||||
@@ -119,19 +122,20 @@ export async function trackSegmentEvent(event: String, properties?: Object) {
|
||||
try {
|
||||
if (!segmentClient) {
|
||||
segmentClient = new Analytics(getSegmentWriteKey(), {
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
axiosConfig: {
|
||||
// This is needed to ensure that we use the NodeJS adapter in the render process
|
||||
...(global?.require && { adapter: global.require('axios/lib/adapters/http') }),
|
||||
...(global?.require && {
|
||||
adapter: global.require('axios/lib/adapters/http'),
|
||||
}),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const anonymousId = await getDeviceId();
|
||||
|
||||
// TODO: This currently always returns an empty string in the main process
|
||||
// This is due to the session data being stored in localStorage
|
||||
const userId = getAccountId();
|
||||
|
||||
segmentClient.track({
|
||||
anonymousId,
|
||||
userId,
|
||||
@@ -156,14 +160,16 @@ export async function trackSegmentEvent(event: String, properties?: Object) {
|
||||
// ~~~~~~~~~~~~~~~~~ //
|
||||
// Private Functions //
|
||||
// ~~~~~~~~~~~~~~~~~ //
|
||||
|
||||
function _getOsName(): string {
|
||||
function _getOsName() {
|
||||
const platform = getAppPlatform();
|
||||
|
||||
switch (platform) {
|
||||
case 'darwin':
|
||||
return 'mac';
|
||||
|
||||
case 'win32':
|
||||
return 'windows';
|
||||
|
||||
default:
|
||||
return platform;
|
||||
}
|
||||
@@ -174,69 +180,133 @@ export async function _trackEvent(
|
||||
interactive: boolean,
|
||||
category: string,
|
||||
action: string,
|
||||
label: ?string,
|
||||
value: ?string,
|
||||
queuable: ?boolean,
|
||||
label?: string | null,
|
||||
value?: string | null,
|
||||
queuable?: boolean | null,
|
||||
) {
|
||||
const prefix = interactive ? '[ga] Event' : '[ga] Non-interactive';
|
||||
console.log(prefix, [category, action, label, value].filter(Boolean).join(', '));
|
||||
|
||||
const params = [
|
||||
{ name: KEY_HIT_TYPE, value: 'event' },
|
||||
{ name: KEY_EVENT_CATEGORY, value: category },
|
||||
{ name: KEY_EVENT_ACTION, value: action },
|
||||
{
|
||||
name: KEY_HIT_TYPE,
|
||||
value: 'event',
|
||||
},
|
||||
{
|
||||
name: KEY_EVENT_CATEGORY,
|
||||
value: category,
|
||||
},
|
||||
{
|
||||
name: KEY_EVENT_ACTION,
|
||||
value: action,
|
||||
},
|
||||
];
|
||||
|
||||
!interactive && params.push({ name: KEY_NON_INTERACTION, value: '1' });
|
||||
label && params.push({ name: KEY_EVENT_LABEL, value: label });
|
||||
value && params.push({ name: KEY_EVENT_VALUE, value: value });
|
||||
|
||||
!interactive &&
|
||||
params.push({
|
||||
name: KEY_NON_INTERACTION,
|
||||
value: '1',
|
||||
});
|
||||
label &&
|
||||
params.push({
|
||||
name: KEY_EVENT_LABEL,
|
||||
value: label,
|
||||
});
|
||||
value &&
|
||||
params.push({
|
||||
name: KEY_EVENT_VALUE,
|
||||
value: value,
|
||||
});
|
||||
// @ts-expect-error -- TSCONVERSION appears to be a genuine error
|
||||
await _sendToGoogle(params, !!queuable);
|
||||
}
|
||||
|
||||
export async function _trackPageView(location: string) {
|
||||
_currentLocationPath = location;
|
||||
console.log('[ga] Page', _currentLocationPath);
|
||||
|
||||
const params = [{ name: KEY_HIT_TYPE, value: 'pageview' }];
|
||||
|
||||
const params = [
|
||||
{
|
||||
name: KEY_HIT_TYPE,
|
||||
value: 'pageview',
|
||||
},
|
||||
];
|
||||
// @ts-expect-error -- TSCONVERSION appears to be a genuine error
|
||||
await _sendToGoogle(params, false);
|
||||
}
|
||||
|
||||
async function _getDefaultParams(): Promise<Array<RequestParameter>> {
|
||||
const deviceId = await getDeviceId();
|
||||
|
||||
// Prepping user agent string prior to sending to GA due to Electron base UA not being GA friendly.
|
||||
const ua = String(window?.navigator?.userAgent)
|
||||
.replace(new RegExp(`${getAppId()}\\/\\d+\\.\\d+\\.\\d+ `), '')
|
||||
.replace(/Electron\/\d+\.\d+\.\d+ /, '');
|
||||
|
||||
const params = [
|
||||
{ name: KEY_VERSION, value: '1' },
|
||||
{ name: KEY_TRACKING_ID, value: getGoogleAnalyticsId() },
|
||||
{ name: KEY_CLIENT_ID, value: deviceId },
|
||||
{ name: KEY_USER_AGENT, value: ua },
|
||||
{ name: KEY_LOCATION, value: getGoogleAnalyticsLocation() + _currentLocationPath },
|
||||
{ name: KEY_SCREEN_RESOLUTION, value: getScreenResolution() },
|
||||
{ name: KEY_USER_LANGUAGE, value: getUserLanguage() },
|
||||
{ name: KEY_TITLE, value: `${getAppId()}:${getAppVersion()}` },
|
||||
{ name: KEY_CUSTOM_DIMENSION_PREFIX + DIMENSION_PLATFORM, value: getAppPlatform() },
|
||||
{ name: KEY_CUSTOM_DIMENSION_PREFIX + DIMENSION_VERSION, value: getAppVersion() },
|
||||
{ name: KEY_ANONYMIZE_IP, value: '1' },
|
||||
{ name: KEY_APPLICATION_NAME, value: getAppName() },
|
||||
{ name: KEY_APPLICATION_ID, value: getAppId() },
|
||||
{ name: KEY_APPLICATION_VERSION, value: getAppVersion() },
|
||||
{
|
||||
name: KEY_VERSION,
|
||||
value: '1',
|
||||
},
|
||||
{
|
||||
name: KEY_TRACKING_ID,
|
||||
value: getGoogleAnalyticsId(),
|
||||
},
|
||||
{
|
||||
name: KEY_CLIENT_ID,
|
||||
value: deviceId,
|
||||
},
|
||||
{
|
||||
name: KEY_USER_AGENT,
|
||||
value: ua,
|
||||
},
|
||||
{
|
||||
name: KEY_LOCATION,
|
||||
value: getGoogleAnalyticsLocation() + _currentLocationPath,
|
||||
},
|
||||
{
|
||||
name: KEY_SCREEN_RESOLUTION,
|
||||
value: getScreenResolution(),
|
||||
},
|
||||
{
|
||||
name: KEY_USER_LANGUAGE,
|
||||
value: getUserLanguage(),
|
||||
},
|
||||
{
|
||||
name: KEY_TITLE,
|
||||
value: `${getAppId()}:${getAppVersion()}`,
|
||||
},
|
||||
{
|
||||
name: KEY_CUSTOM_DIMENSION_PREFIX + DIMENSION_PLATFORM,
|
||||
value: getAppPlatform(),
|
||||
},
|
||||
{
|
||||
name: KEY_CUSTOM_DIMENSION_PREFIX + DIMENSION_VERSION,
|
||||
value: getAppVersion(),
|
||||
},
|
||||
{
|
||||
name: KEY_ANONYMIZE_IP,
|
||||
value: '1',
|
||||
},
|
||||
{
|
||||
name: KEY_APPLICATION_NAME,
|
||||
value: getAppName(),
|
||||
},
|
||||
{
|
||||
name: KEY_APPLICATION_ID,
|
||||
value: getAppId(),
|
||||
},
|
||||
{
|
||||
name: KEY_APPLICATION_VERSION,
|
||||
value: getAppVersion(),
|
||||
},
|
||||
];
|
||||
|
||||
const viewport = getViewportSize();
|
||||
viewport && params.push({ name: KEY_VIEWPORT_SIZE, value: viewport });
|
||||
|
||||
viewport &&
|
||||
params.push({
|
||||
name: KEY_VIEWPORT_SIZE,
|
||||
value: viewport,
|
||||
});
|
||||
global.document &&
|
||||
params.push({
|
||||
name: KEY_DOCUMENT_ENCODING,
|
||||
value: global.document.inputEncoding,
|
||||
});
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
@@ -245,6 +315,7 @@ async function _getDefaultParams(): Promise<Array<RequestParameter>> {
|
||||
db.onChange(async changes => {
|
||||
for (const change of changes) {
|
||||
const [event, doc] = change;
|
||||
|
||||
if (doc.type === models.settings.type && event === 'update') {
|
||||
if (doc.enableAnalytics) {
|
||||
await _flushQueuedEvents();
|
||||
@@ -253,38 +324,40 @@ db.onChange(async changes => {
|
||||
}
|
||||
});
|
||||
|
||||
async function _sendToGoogle(params: Array<RequestParameter>, queueable: boolean) {
|
||||
async function _sendToGoogle(params: RequestParameter, queueable: boolean) {
|
||||
const settings = await models.settings.getOrCreate();
|
||||
|
||||
if (!settings.enableAnalytics) {
|
||||
if (queueable) {
|
||||
console.log('[ga] Queued event', params);
|
||||
|
||||
_queuedEvents.push(params);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const baseParams = await _getDefaultParams();
|
||||
// @ts-expect-error -- TSCONVERSION appears to be a genuine error
|
||||
const allParams = [...baseParams, ...params];
|
||||
const qs = buildQueryStringFromParams(allParams);
|
||||
const baseUrl = isDevelopment()
|
||||
? 'https://www.google-analytics.com/debug/collect'
|
||||
: 'https://www.google-analytics.com/collect';
|
||||
const url = joinUrlAndQueryString(baseUrl, qs);
|
||||
|
||||
const net = (electron.remote || electron).net;
|
||||
const request = net.request(url);
|
||||
|
||||
request.once('error', err => {
|
||||
console.warn('[ga] Network error', err);
|
||||
});
|
||||
|
||||
request.once('response', response => {
|
||||
const { statusCode } = response;
|
||||
|
||||
if (statusCode < 200 && statusCode >= 300) {
|
||||
console.warn('[ga] Bad status code ' + statusCode);
|
||||
}
|
||||
|
||||
const chunks = [];
|
||||
const chunks: Array<Buffer> = [];
|
||||
const [contentType] = response.headers['content-type'] || [];
|
||||
|
||||
if (contentType !== 'application/json') {
|
||||
@@ -294,9 +367,11 @@ async function _sendToGoogle(params: Array<RequestParameter>, queueable: boolean
|
||||
|
||||
response.on('end', () => {
|
||||
const jsonStr = Buffer.concat(chunks).toString('utf8');
|
||||
|
||||
try {
|
||||
const data = JSON.parse(jsonStr);
|
||||
const { hitParsingResult } = data;
|
||||
|
||||
if (hitParsingResult.valid) {
|
||||
return;
|
||||
}
|
||||
@@ -310,12 +385,10 @@ async function _sendToGoogle(params: Array<RequestParameter>, queueable: boolean
|
||||
console.warn('[ga] Failed to parse response', err);
|
||||
}
|
||||
});
|
||||
|
||||
response.on('data', chunk => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
});
|
||||
|
||||
request.end();
|
||||
}
|
||||
|
||||
@@ -325,13 +398,11 @@ async function _sendToGoogle(params: Array<RequestParameter>, queueable: boolean
|
||||
* @returns {Promise<void>}
|
||||
* @private
|
||||
*/
|
||||
let _queuedEvents = [];
|
||||
let _queuedEvents: Array<RequestParameter> = [];
|
||||
|
||||
async function _flushQueuedEvents() {
|
||||
console.log(`[ga] Flushing ${_queuedEvents.length} queued events`);
|
||||
|
||||
const tmp = [..._queuedEvents];
|
||||
|
||||
// Clear queue before we even start sending to prevent races
|
||||
_queuedEvents = [];
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
// @flow
|
||||
|
||||
import YAML from 'yaml';
|
||||
|
||||
export interface ParsedApiSpec {
|
||||
contents: Record<string, any> | null;
|
||||
rawContents: string;
|
||||
format: 'openapi' | 'swagger' | null;
|
||||
formatVersion: string | null;
|
||||
}
|
||||
|
||||
export function parseApiSpec(
|
||||
rawDocument: string,
|
||||
): {
|
||||
contents: Object | null,
|
||||
rawContents: string,
|
||||
format: 'openapi' | 'swagger' | null,
|
||||
formatVersion: string | null,
|
||||
} {
|
||||
const result = {
|
||||
) {
|
||||
const result: ParsedApiSpec = {
|
||||
contents: null,
|
||||
rawContents: rawDocument,
|
||||
format: null,
|
||||
@@ -1,96 +1,33 @@
|
||||
// @flow
|
||||
import { appConfig } from '../../config';
|
||||
import * as electron from 'electron';
|
||||
import appConfig from '../../config/config.json';
|
||||
import path from 'path';
|
||||
import mkdirp from 'mkdirp';
|
||||
import { getDataDirectory } from './misc';
|
||||
import { ValueOf } from 'type-fest';
|
||||
import { getDataDirectory } from './electron-helpers';
|
||||
|
||||
// App Stuff
|
||||
export const getAppVersion = () => appConfig.version;
|
||||
export const getAppLongName = () => appConfig.longName;
|
||||
export const getAppName = () => appConfig.productName;
|
||||
export const getAppDefaultTheme = () => appConfig.theme;
|
||||
export const getAppDefaultLightTheme = () => appConfig.lightTheme;
|
||||
export const getAppDefaultDarkTheme = () => appConfig.darkTheme;
|
||||
export const getAppSynopsis = () => appConfig.synopsis;
|
||||
export const getAppId = () => appConfig.appId;
|
||||
export const getGoogleAnalyticsId = () => appConfig.gaId;
|
||||
export const getGoogleAnalyticsLocation = () => appConfig.gaLocation;
|
||||
export const getAppPlatform = () => process.platform;
|
||||
export const isMac = () => getAppPlatform() === 'darwin';
|
||||
export const isLinux = () => getAppPlatform() === 'linux';
|
||||
export const isWindows = () => getAppPlatform() === 'win32';
|
||||
export const getAppEnvironment = () => process.env.INSOMNIA_ENV || 'production';
|
||||
export const isDevelopment = () => getAppEnvironment() === 'development';
|
||||
export const getSegmentWriteKey = () => appConfig.segmentWriteKeys[isDevelopment() ? 'development' : 'production'];
|
||||
export const getAppReleaseDate = () => new Date(process.env.RELEASE_DATE ?? '').toLocaleDateString();
|
||||
|
||||
export function getAppVersion() {
|
||||
return appConfig().version;
|
||||
}
|
||||
|
||||
export function getAppLongName() {
|
||||
return appConfig().longName;
|
||||
}
|
||||
|
||||
export function getAppName() {
|
||||
return appConfig().productName;
|
||||
}
|
||||
|
||||
export function getAppDefaultTheme() {
|
||||
return appConfig().theme;
|
||||
}
|
||||
|
||||
export function getAppDefaultLightTheme() {
|
||||
return appConfig().lightTheme;
|
||||
}
|
||||
|
||||
export function getAppDefaultDarkTheme() {
|
||||
return appConfig().darkTheme;
|
||||
}
|
||||
|
||||
export function getAppSynopsis() {
|
||||
return appConfig().synopsis;
|
||||
}
|
||||
|
||||
export function getAppId() {
|
||||
return appConfig().appId;
|
||||
}
|
||||
|
||||
export function getGoogleAnalyticsId() {
|
||||
return appConfig().gaId;
|
||||
}
|
||||
|
||||
export function getGoogleAnalyticsLocation() {
|
||||
return appConfig().gaLocation;
|
||||
}
|
||||
|
||||
export function getSegmentWriteKey() {
|
||||
if (isDevelopment()) {
|
||||
return appConfig().segmentWriteKeys.development;
|
||||
}
|
||||
|
||||
return appConfig().segmentWriteKeys.production;
|
||||
}
|
||||
|
||||
export function getAppPlatform() {
|
||||
return process.platform;
|
||||
}
|
||||
|
||||
export function getAppEnvironment() {
|
||||
return process.env.INSOMNIA_ENV || 'production';
|
||||
}
|
||||
|
||||
export function getAppReleaseDate() {
|
||||
return new Date(process.env.RELEASE_DATE).toLocaleDateString();
|
||||
}
|
||||
|
||||
export function getBrowserUserAgent() {
|
||||
const ua = encodeURIComponent(
|
||||
String(window.navigator.userAgent)
|
||||
.replace(new RegExp(`${getAppId()}\\/\\d+\\.\\d+\\.\\d+ `), '')
|
||||
.replace(/Electron\/\d+\.\d+\.\d+ /, ''),
|
||||
).replace('%2C', ',');
|
||||
return ua;
|
||||
}
|
||||
|
||||
export function getTempDir() {
|
||||
// NOTE: Using a fairly unique name here because "insomnia" is a common word
|
||||
const { app } = electron.remote || electron;
|
||||
const dir = path.join(app.getPath('temp'), `insomnia_${getAppVersion()}`);
|
||||
mkdirp.sync(dir);
|
||||
return dir;
|
||||
}
|
||||
|
||||
export function isMac() {
|
||||
return getAppPlatform() === 'darwin';
|
||||
}
|
||||
|
||||
export function isLinux() {
|
||||
return getAppPlatform() === 'linux';
|
||||
}
|
||||
export const getBrowserUserAgent = () => encodeURIComponent(
|
||||
String(window.navigator.userAgent)
|
||||
.replace(new RegExp(`${getAppId()}\\/\\d+\\.\\d+\\.\\d+ `), '')
|
||||
.replace(/Electron\/\d+\.\d+\.\d+ /, ''),
|
||||
).replace('%2C', ',');
|
||||
|
||||
export function updatesSupported() {
|
||||
// Updates are not supported on Linux
|
||||
@@ -106,21 +43,8 @@ export function updatesSupported() {
|
||||
return true;
|
||||
}
|
||||
|
||||
export function isWindows() {
|
||||
return getAppPlatform() === 'win32';
|
||||
}
|
||||
|
||||
export function isDevelopment() {
|
||||
return getAppEnvironment() === 'development';
|
||||
}
|
||||
|
||||
export function getClientString() {
|
||||
return `${getAppEnvironment()}::${getAppPlatform()}::${getAppVersion()}`;
|
||||
}
|
||||
|
||||
export function changelogUrl(): string {
|
||||
return appConfig().changelogUrl;
|
||||
}
|
||||
export const getClientString = () => `${getAppEnvironment()}::${getAppPlatform()}::${getAppVersion()}`;
|
||||
export const changelogUrl = () => appConfig.changelogUrl;
|
||||
|
||||
// Global Stuff
|
||||
export const DB_PERSIST_INTERVAL = 1000 * 60 * 30; // Compact every once in a while
|
||||
@@ -140,33 +64,33 @@ export const AUTOBIND_CFG = {
|
||||
],
|
||||
};
|
||||
|
||||
// Available editor key maps
|
||||
// Available editor key map
|
||||
export const EDITOR_KEY_MAP_DEFAULT = 'default';
|
||||
export const EDITOR_KEY_MAP_EMACS = 'emacs';
|
||||
export const EDITOR_KEY_MAP_SUBLIME = 'sublime';
|
||||
export const EDITOR_KEY_MAP_VIM = 'vim';
|
||||
|
||||
// Hotkeys
|
||||
// Hotkey
|
||||
export const MNEMONIC_SYM = isMac() ? '' : '&';
|
||||
export const CTRL_SYM = isMac() ? '⌃' : 'Ctrl';
|
||||
export const ALT_SYM = isMac() ? '⌥' : 'Alt';
|
||||
export const SHIFT_SYM = isMac() ? '⇧' : 'Shift';
|
||||
export const META_SYM = isMac() ? '⌘' : 'Super';
|
||||
|
||||
// Updates
|
||||
// Update
|
||||
export const UPDATE_CHANNEL_STABLE = 'stable';
|
||||
export const UPDATE_CHANNEL_BETA = 'beta';
|
||||
export const UPDATE_URL_MAC = 'https://updates.insomnia.rest/builds/check/mac';
|
||||
export const UPDATE_URL_WINDOWS = 'https://updates.insomnia.rest/updates/win';
|
||||
|
||||
// API
|
||||
// AP
|
||||
export const API_BASE_URL = 'https://api.insomnia.rest';
|
||||
|
||||
// PLUGINS
|
||||
// PLUGIN
|
||||
export const PLUGIN_HUB_BASE = 'https://insomnia.rest/plugins';
|
||||
export const NPM_PACKAGE_BASE = 'https://www.npmjs.com/package';
|
||||
|
||||
// UI Stuff
|
||||
// UI Stuf
|
||||
export const MAX_SIDEBAR_REMS = 45;
|
||||
export const MIN_SIDEBAR_REMS = 0.75;
|
||||
export const COLLAPSE_SIDEBAR_REMS = 3;
|
||||
@@ -201,12 +125,13 @@ export const ACTIVITY_MIGRATION: GlobalActivity = 'migration';
|
||||
export const ACTIVITY_ANALYTICS: GlobalActivity = 'analytics';
|
||||
export const DEPRECATED_ACTIVITY_INSOMNIA = 'insomnia';
|
||||
|
||||
export const isWorkspaceActivity = (activity: GlobalActivity): boolean => {
|
||||
export const isWorkspaceActivity = (activity: string): activity is GlobalActivity => {
|
||||
switch (activity) {
|
||||
case ACTIVITY_SPEC:
|
||||
case ACTIVITY_DEBUG:
|
||||
case ACTIVITY_UNIT_TEST:
|
||||
return true;
|
||||
|
||||
case ACTIVITY_HOME:
|
||||
case ACTIVITY_ONBOARDING:
|
||||
case ACTIVITY_MIGRATION:
|
||||
@@ -216,7 +141,7 @@ export const isWorkspaceActivity = (activity: GlobalActivity): boolean => {
|
||||
}
|
||||
};
|
||||
|
||||
export const isValidActivity = (activity: GlobalActivity): boolean => {
|
||||
export const isValidActivity = (activity: string): activity is GlobalActivity => {
|
||||
switch (activity) {
|
||||
case ACTIVITY_SPEC:
|
||||
case ACTIVITY_DEBUG:
|
||||
@@ -226,6 +151,7 @@ export const isValidActivity = (activity: GlobalActivity): boolean => {
|
||||
case ACTIVITY_MIGRATION:
|
||||
case ACTIVITY_ANALYTICS:
|
||||
return true;
|
||||
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
@@ -256,13 +182,11 @@ export const METHOD_GRPC = 'GRPC';
|
||||
export const PREVIEW_MODE_FRIENDLY = 'friendly';
|
||||
export const PREVIEW_MODE_SOURCE = 'source';
|
||||
export const PREVIEW_MODE_RAW = 'raw';
|
||||
|
||||
const previewModeMap = {
|
||||
[PREVIEW_MODE_FRIENDLY]: ['Preview', 'Visual Preview'],
|
||||
[PREVIEW_MODE_SOURCE]: ['Source', 'Source Code'],
|
||||
[PREVIEW_MODE_RAW]: ['Raw', 'Raw Data'],
|
||||
};
|
||||
|
||||
export const PREVIEW_MODES = Object.keys(previewModeMap);
|
||||
|
||||
// Content Types
|
||||
@@ -275,7 +199,6 @@ export const CONTENT_TYPE_FORM_DATA = 'multipart/form-data';
|
||||
export const CONTENT_TYPE_FILE = 'application/octet-stream';
|
||||
export const CONTENT_TYPE_GRAPHQL = 'application/graphql';
|
||||
export const CONTENT_TYPE_OTHER = '';
|
||||
|
||||
const contentTypesMap = {
|
||||
[CONTENT_TYPE_EDN]: ['EDN', 'EDN'],
|
||||
[CONTENT_TYPE_FILE]: ['File', 'Binary File'],
|
||||
@@ -300,7 +223,6 @@ export const AUTH_HAWK = 'hawk';
|
||||
export const AUTH_AWS_IAM = 'iam';
|
||||
export const AUTH_NETRC = 'netrc';
|
||||
export const AUTH_ASAP = 'asap';
|
||||
|
||||
export const HAWK_ALGORITHM_SHA256 = 'sha256';
|
||||
export const HAWK_ALGORITHM_SHA1 = 'sha1';
|
||||
|
||||
@@ -315,9 +237,8 @@ export const HttpVersions = {
|
||||
V2_0: 'V2_0',
|
||||
v3: 'v3',
|
||||
default: 'default',
|
||||
};
|
||||
|
||||
export type HttpVersion = $Keys<typeof HttpVersions>;
|
||||
} as const;
|
||||
export type HttpVersion = ValueOf<typeof HttpVersions>;
|
||||
|
||||
const authTypesMap = {
|
||||
[AUTH_BASIC]: ['Basic', 'Basic Auth'],
|
||||
@@ -330,15 +251,15 @@ const authTypesMap = {
|
||||
[AUTH_AWS_IAM]: ['AWS', 'AWS IAM v4'],
|
||||
[AUTH_ASAP]: ['ASAP', 'Atlassian ASAP'],
|
||||
[AUTH_NETRC]: ['Netrc', 'Netrc File'],
|
||||
};
|
||||
} as const;
|
||||
|
||||
// Sort Orders
|
||||
export type SortOrder =
|
||||
| 'name-asc'
|
||||
| 'name-desc'
|
||||
| 'created-first'
|
||||
| 'created-last'
|
||||
| 'method'
|
||||
| 'created-asc'
|
||||
| 'created-desc'
|
||||
| 'http-method'
|
||||
| 'type-desc'
|
||||
| 'type-asc';
|
||||
export const SORT_NAME_ASC: SortOrder = 'name-asc';
|
||||
@@ -357,8 +278,7 @@ export const SORT_ORDERS = [
|
||||
SORT_TYPE_DESC,
|
||||
SORT_TYPE_ASC,
|
||||
];
|
||||
|
||||
export const sortOrderName: { [SortOrder]: string } = {
|
||||
export const sortOrderName: Record<SortOrder, string> = {
|
||||
[SORT_NAME_ASC]: 'Name Ascending',
|
||||
[SORT_NAME_DESC]: 'Name Descending',
|
||||
[SORT_CREATED_ASC]: 'Oldest First',
|
||||
@@ -376,7 +296,7 @@ export function getPreviewModeName(previewMode, useLong = false) {
|
||||
}
|
||||
}
|
||||
|
||||
export function getContentTypeName(contentType, useLong = false) {
|
||||
export function getContentTypeName(contentType?: string | null, useLong = false) {
|
||||
if (typeof contentType !== 'string') {
|
||||
return '';
|
||||
}
|
||||
@@ -396,7 +316,7 @@ export function getAuthTypeName(authType, useLong = false) {
|
||||
}
|
||||
}
|
||||
|
||||
export function getContentTypeFromHeaders(headers, defaultValue = null) {
|
||||
export function getContentTypeFromHeaders(headers, defaultValue: string | null = null) {
|
||||
if (!Array.isArray(headers)) {
|
||||
return null;
|
||||
}
|
||||
@@ -410,14 +330,10 @@ export const RESPONSE_CODE_DESCRIPTIONS = {
|
||||
// Special
|
||||
[STATUS_CODE_PLUGIN_ERROR]:
|
||||
'An Insomnia plugin threw an error which prevented the request from sending',
|
||||
|
||||
// 100s
|
||||
|
||||
100: 'This interim response indicates that everything so far is OK and that the client should continue with the request or ignore it if it is already finished.',
|
||||
101: 'This code is sent in response to an Upgrade: request header by the client and indicates the protocol the server is switching to. It was introduced to allow migration to an incompatible protocol version, and it is not in common use.',
|
||||
|
||||
// 200s
|
||||
|
||||
200: 'The request has succeeded.',
|
||||
201: 'The request has succeeded and a new resource has been created as a result. This is typically the response sent after POST requests, or some PUT requests.',
|
||||
202: 'The request has been received but not yet acted upon. It is non-committal, meaning that there is no way in HTTP to later send an asynchronous response indicating the outcome of processing the request. It is intended for cases where another process or server handles the request, or for batch processing.',
|
||||
@@ -428,9 +344,7 @@ export const RESPONSE_CODE_DESCRIPTIONS = {
|
||||
207: 'A Multi-Status response conveys information about multiple resources in situations where multiple status codes might be appropriate.',
|
||||
208: 'Used inside a DAV: propstat response element to avoid enumerating the internal members of multiple bindings to the same collection repeatedly.',
|
||||
226: 'The server has fulfilled a GET request for the resource, and the response is a representation of the result of one or more instance-manipulations applied to the current instance.',
|
||||
|
||||
// 300s
|
||||
|
||||
300: 'The request has more than one possible responses. User-agent or user should choose one of them. There is no standardized way to choose one of the responses.',
|
||||
301: 'This response code means that URI of requested resource has been changed. Probably, new URI would be given in the response.',
|
||||
302: 'This response code means that URI of requested resource has been changed temporarily. New changes in the URI might be made in the future. Therefore, this same URI should be used by the client in future requests.',
|
||||
@@ -440,9 +354,7 @@ export const RESPONSE_CODE_DESCRIPTIONS = {
|
||||
306: 'This response code is no longer used and is just reserved currently. It was used in a previous version of the HTTP 1.1 specification.',
|
||||
307: 'Server sent this response to directing client to get requested resource to another URI with same method that used prior request. This has the same semantic than the 302 Found HTTP response code, with the exception that the user agent must not change the HTTP method used: if a POST was used in the first request, a POST must be used in the second request.',
|
||||
308: 'This means that the resource is now permanently located at another URI, specified by the Location: HTTP Response header. This has the same semantics as the 301 Moved Permanently HTTP response code, with the exception that the user agent must not change the HTTP method used: if a POST was used in the first request, a POST must be used in the second request.',
|
||||
|
||||
// 400s
|
||||
|
||||
400: 'This response means that the server could not understand the request due to invalid syntax.',
|
||||
401: 'Authentication is needed to get the requested response. This is similar to 403, but is different in that authentication is possible.',
|
||||
402: 'This response code is reserved for future use. Initial aim for creating this code was using it for digital payment systems, but it is not used currently.',
|
||||
@@ -471,9 +383,7 @@ export const RESPONSE_CODE_DESCRIPTIONS = {
|
||||
429: 'The user has sent too many requests in a given amount of time ("rate limiting").',
|
||||
431: 'The server is unwilling to process the request because its header fields are too large. The request MAY be resubmitted after reducing the size of the request header fields.',
|
||||
451: 'The user requests an illegal resource, such as a web page censored by a government.',
|
||||
|
||||
// 500s
|
||||
|
||||
500: "The server has encountered a situation it doesn't know how to handle.",
|
||||
501: 'The request method is not supported by the server and cannot be handled. The only methods that servers are required to support (and therefore that must not return this code) are GET and HEAD.',
|
||||
502: 'This error response means that the server, while working as a gateway to get a response needed to handle the request, got an invalid response.',
|
||||
@@ -490,14 +400,10 @@ export const RESPONSE_CODE_DESCRIPTIONS = {
|
||||
export const RESPONSE_CODE_REASONS = {
|
||||
// Special
|
||||
[STATUS_CODE_PLUGIN_ERROR]: 'Plugin Error',
|
||||
|
||||
// 100s
|
||||
|
||||
100: 'Continue',
|
||||
101: 'Switching Protocols',
|
||||
|
||||
// 200s
|
||||
|
||||
200: 'OK',
|
||||
201: 'Created',
|
||||
202: 'Accepted',
|
||||
@@ -508,9 +414,7 @@ export const RESPONSE_CODE_REASONS = {
|
||||
207: 'Multi-Status',
|
||||
208: 'Already Reported',
|
||||
226: 'IM Used',
|
||||
|
||||
// 300s
|
||||
|
||||
300: 'Multiple Choices',
|
||||
301: 'Moved Permanently',
|
||||
302: 'Found',
|
||||
@@ -520,9 +424,7 @@ export const RESPONSE_CODE_REASONS = {
|
||||
306: 'Switch Proxy',
|
||||
307: 'Temporary Redirect',
|
||||
308: 'Permanent Redirect',
|
||||
|
||||
// 400s
|
||||
|
||||
400: 'Bad Request',
|
||||
401: 'Unauthorized',
|
||||
402: 'Payment Required',
|
||||
@@ -552,9 +454,7 @@ export const RESPONSE_CODE_REASONS = {
|
||||
429: 'Too Many Requests',
|
||||
431: 'Request Header Fields Too Large',
|
||||
451: 'Unavailable For Legal Reasons',
|
||||
|
||||
// 500s
|
||||
|
||||
500: 'Internal Server Error',
|
||||
501: 'Not Implemented',
|
||||
502: 'Bad Gateway',
|
||||
@@ -1,812 +0,0 @@
|
||||
// @flow
|
||||
import type { BaseModel } from '../models/index';
|
||||
import * as models from '../models/index';
|
||||
import electron from 'electron';
|
||||
import NeDB from 'nedb';
|
||||
import fsPath from 'path';
|
||||
import { DB_PERSIST_INTERVAL } from './constants';
|
||||
import * as uuid from 'uuid';
|
||||
import { generateId, getDataDirectory } from './misc';
|
||||
import { mustGetModel } from '../models';
|
||||
import type { Workspace } from '../models/workspace';
|
||||
|
||||
export const CHANGE_INSERT = 'insert';
|
||||
export const CHANGE_UPDATE = 'update';
|
||||
export const CHANGE_REMOVE = 'remove';
|
||||
|
||||
const database = {};
|
||||
const db = ({
|
||||
_empty: true,
|
||||
}: Object);
|
||||
|
||||
// ~~~~~~~ //
|
||||
// HELPERS //
|
||||
// ~~~~~~~ //
|
||||
|
||||
function allTypes() {
|
||||
return Object.keys(db);
|
||||
}
|
||||
|
||||
function getDBFilePath(modelType) {
|
||||
// NOTE: Do not EVER change this. EVER!
|
||||
return fsPath.join(getDataDirectory(), `insomnia.${modelType}.db`);
|
||||
}
|
||||
|
||||
export async function initClient() {
|
||||
electron.ipcRenderer.on('db.changes', async (e, changes) => {
|
||||
for (const fn of changeListeners) {
|
||||
await fn(changes);
|
||||
}
|
||||
});
|
||||
console.log('[db] Initialized DB client');
|
||||
}
|
||||
|
||||
export async function init(
|
||||
types: Array<string>,
|
||||
config: Object = {},
|
||||
forceReset: boolean = false,
|
||||
consoleLog: () => void = console.log,
|
||||
) {
|
||||
if (forceReset) {
|
||||
changeListeners = [];
|
||||
for (const attr of Object.keys(db)) {
|
||||
if (attr === '_empty') {
|
||||
continue;
|
||||
}
|
||||
|
||||
delete db[attr];
|
||||
}
|
||||
}
|
||||
|
||||
// Fill in the defaults
|
||||
for (const modelType of types) {
|
||||
if (db[modelType]) {
|
||||
consoleLog(`[db] Already initialized DB.${modelType}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const filePath = getDBFilePath(modelType);
|
||||
const collection = new NeDB(
|
||||
Object.assign(
|
||||
{
|
||||
autoload: true,
|
||||
filename: filePath,
|
||||
corruptAlertThreshold: 0.9,
|
||||
},
|
||||
config,
|
||||
),
|
||||
);
|
||||
|
||||
collection.persistence.setAutocompactionInterval(DB_PERSIST_INTERVAL);
|
||||
|
||||
db[modelType] = collection;
|
||||
}
|
||||
|
||||
delete db._empty;
|
||||
|
||||
electron.ipcMain.on('db.fn', async (e, fnName, replyChannel, ...args) => {
|
||||
try {
|
||||
const result = await database[fnName](...args);
|
||||
e.sender.send(replyChannel, null, result);
|
||||
} catch (err) {
|
||||
e.sender.send(replyChannel, { message: err.message, stack: err.stack });
|
||||
}
|
||||
});
|
||||
|
||||
// NOTE: Only repair the DB if we're not running in memory. Repairing here causes tests to
|
||||
// hang indefinitely for some reason.
|
||||
// TODO: Figure out why this makes tests hang
|
||||
if (!config.inMemoryOnly) {
|
||||
await _repairDatabase();
|
||||
}
|
||||
|
||||
if (!config.inMemoryOnly) {
|
||||
consoleLog(`[db] Initialized DB at ${getDBFilePath('$TYPE')}`);
|
||||
}
|
||||
|
||||
// This isn't the best place for this but w/e
|
||||
// Listen for response deletions and delete corresponding response body files
|
||||
onChange(async changes => {
|
||||
for (const [type, doc] of changes) {
|
||||
const m: Object | null = models.getModel(doc.type);
|
||||
|
||||
if (!m) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (type === CHANGE_REMOVE && typeof m.hookRemove === 'function') {
|
||||
try {
|
||||
await m.hookRemove(doc, consoleLog);
|
||||
} catch (err) {
|
||||
consoleLog(`[db] Delete hook failed for ${type} ${doc._id}: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (type === CHANGE_INSERT && typeof m.hookInsert === 'function') {
|
||||
try {
|
||||
await m.hookInsert(doc, consoleLog);
|
||||
} catch (err) {
|
||||
consoleLog(`[db] Insert hook failed for ${type} ${doc._id}: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (type === CHANGE_UPDATE && typeof m.hookUpdate === 'function') {
|
||||
try {
|
||||
await m.hookUpdate(doc, consoleLog);
|
||||
} catch (err) {
|
||||
consoleLog(`[db] Update hook failed for ${type} ${doc._id}: ${err.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
for (const model of models.all()) {
|
||||
if (typeof model.hookDatabaseInit === 'function') {
|
||||
await model.hookDatabaseInit(consoleLog);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ~~~~~~~~~~~~~~~~ //
|
||||
// Change Listeners //
|
||||
// ~~~~~~~~~~~~~~~~ //
|
||||
|
||||
let bufferingChanges = false;
|
||||
let bufferChangesId = 1;
|
||||
let changeBuffer = [];
|
||||
let changeListeners = [];
|
||||
|
||||
export function onChange(callback: Function): void {
|
||||
changeListeners.push(callback);
|
||||
}
|
||||
|
||||
export function offChange(callback: Function): void {
|
||||
changeListeners = changeListeners.filter(l => l !== callback);
|
||||
}
|
||||
|
||||
/** buffers database changes and returns a buffer id */
|
||||
export const bufferChanges = (database.bufferChanges = async function(
|
||||
millis: number = 1000,
|
||||
): Promise<number> {
|
||||
if (db._empty) return _send('bufferChanges', ...arguments);
|
||||
|
||||
bufferingChanges = true;
|
||||
setTimeout(database.flushChanges, millis);
|
||||
|
||||
return ++bufferChangesId;
|
||||
});
|
||||
|
||||
/** buffers database changes and returns a buffer id */
|
||||
export const bufferChangesIndefinitely = (database.bufferChangesIndefinitely = async function(): Promise<number> {
|
||||
if (db._empty) return _send('bufferChangesIndefinitely', ...arguments);
|
||||
|
||||
bufferingChanges = true;
|
||||
|
||||
return ++bufferChangesId;
|
||||
});
|
||||
|
||||
export const flushChangesAsync = (database.flushChangesAsync = async function(
|
||||
fake: boolean = false,
|
||||
) {
|
||||
process.nextTick(async () => {
|
||||
await flushChanges(0, fake);
|
||||
});
|
||||
});
|
||||
|
||||
export const flushChanges = (database.flushChanges = async function(
|
||||
id: number = 0,
|
||||
fake: boolean = false,
|
||||
) {
|
||||
if (db._empty) return _send('flushChanges', ...arguments);
|
||||
|
||||
// Only flush if ID is 0 or the current flush ID is the same as passed
|
||||
if (id !== 0 && bufferChangesId !== id) {
|
||||
return;
|
||||
}
|
||||
|
||||
bufferingChanges = false;
|
||||
const changes = [...changeBuffer];
|
||||
changeBuffer = [];
|
||||
|
||||
if (changes.length === 0) {
|
||||
// No work to do
|
||||
return;
|
||||
}
|
||||
|
||||
if (fake) {
|
||||
console.log(`[db] Dropped ${changes.length} changes.`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Notify local listeners too
|
||||
for (const fn of changeListeners) {
|
||||
await fn(changes);
|
||||
}
|
||||
|
||||
// Notify remote listeners
|
||||
const windows = electron.BrowserWindow.getAllWindows();
|
||||
for (const window of windows) {
|
||||
window.webContents.send('db.changes', changes);
|
||||
}
|
||||
});
|
||||
|
||||
async function notifyOfChange(event: string, doc: BaseModel, fromSync: boolean): Promise<void> {
|
||||
changeBuffer.push([event, doc, fromSync]);
|
||||
|
||||
// Flush right away if we're not buffering
|
||||
if (!bufferingChanges) {
|
||||
await database.flushChanges();
|
||||
}
|
||||
}
|
||||
|
||||
// ~~~~~~~ //
|
||||
// Helpers //
|
||||
// ~~~~~~~ //
|
||||
|
||||
export const getMostRecentlyModified = (database.getMostRecentlyModified = async function<T>(
|
||||
type: string,
|
||||
query: Object = {},
|
||||
): Promise<T | null> {
|
||||
if (db._empty) return _send('getMostRecentlyModified', ...arguments);
|
||||
|
||||
const docs = await database.findMostRecentlyModified(type, query, 1);
|
||||
return docs.length ? docs[0] : null;
|
||||
});
|
||||
|
||||
export const findMostRecentlyModified = (database.findMostRecentlyModified = async function<T>(
|
||||
type: string,
|
||||
query: Object = {},
|
||||
limit: number | null = null,
|
||||
): Promise<Array<T>> {
|
||||
if (db._empty) return _send('findMostRecentlyModified', ...arguments);
|
||||
|
||||
return new Promise(resolve => {
|
||||
db[type]
|
||||
.find(query)
|
||||
.sort({ modified: -1 })
|
||||
.limit(limit)
|
||||
.exec(async (err, rawDocs) => {
|
||||
if (err) {
|
||||
console.warn('[db] Failed to find docs', err);
|
||||
resolve([]);
|
||||
return;
|
||||
}
|
||||
|
||||
const docs = [];
|
||||
for (const rawDoc of rawDocs) {
|
||||
docs.push(await models.initModel(type, rawDoc));
|
||||
}
|
||||
|
||||
resolve(docs);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
export const find = (database.find = async function<T: BaseModel>(
|
||||
type: string,
|
||||
query: Object = {},
|
||||
sort: Object = { created: 1 },
|
||||
): Promise<Array<T>> {
|
||||
if (db._empty) return _send('find', ...arguments);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db[type]
|
||||
.find(query)
|
||||
.sort(sort)
|
||||
.exec(async (err, rawDocs) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
const docs = [];
|
||||
for (const rawDoc of rawDocs) {
|
||||
docs.push(await models.initModel(type, rawDoc));
|
||||
}
|
||||
|
||||
resolve(docs);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
export const all = (database.all = async function<T: BaseModel>(type: string): Promise<Array<T>> {
|
||||
if (db._empty) return _send('all', ...arguments);
|
||||
|
||||
return database.find(type);
|
||||
});
|
||||
|
||||
export const getWhere = (database.getWhere = async function<T: BaseModel>(
|
||||
type: string,
|
||||
query: Object,
|
||||
): Promise<T | null> {
|
||||
if (db._empty) return _send('getWhere', ...arguments);
|
||||
|
||||
const docs = await database.find(type, query);
|
||||
return docs.length ? docs[0] : null;
|
||||
});
|
||||
|
||||
export const get = (database.get = async function<T: BaseModel>(
|
||||
type: string,
|
||||
id: string,
|
||||
): Promise<T | null> {
|
||||
if (db._empty) return _send('get', ...arguments);
|
||||
|
||||
// Short circuit IDs used to represent nothing
|
||||
if (!id || id === 'n/a') {
|
||||
return null;
|
||||
} else {
|
||||
return database.getWhere(type, { _id: id });
|
||||
}
|
||||
});
|
||||
|
||||
export const count = (database.count = async function(
|
||||
type: string,
|
||||
query: Object = {},
|
||||
): Promise<number> {
|
||||
if (db._empty) return _send('count', ...arguments);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
db[type].count(query, (err, count) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
resolve(count);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
export const upsert = (database.upsert = async function(
|
||||
doc: BaseModel,
|
||||
fromSync: boolean = false,
|
||||
): Promise<BaseModel> {
|
||||
if (db._empty) return _send('upsert', ...arguments);
|
||||
|
||||
const existingDoc = await database.get(doc.type, doc._id);
|
||||
if (existingDoc) {
|
||||
return database.update(doc, fromSync);
|
||||
} else {
|
||||
return database.insert(doc, fromSync);
|
||||
}
|
||||
});
|
||||
|
||||
export const insert = (database.insert = async function<T: BaseModel>(
|
||||
doc: T,
|
||||
fromSync: boolean = false,
|
||||
initializeModel: boolean = true,
|
||||
): Promise<T> {
|
||||
if (db._empty) return _send('insert', ...arguments);
|
||||
|
||||
return new Promise(async (resolve, reject) => {
|
||||
let docWithDefaults;
|
||||
try {
|
||||
if (initializeModel) {
|
||||
docWithDefaults = await models.initModel(doc.type, doc);
|
||||
} else {
|
||||
docWithDefaults = doc;
|
||||
}
|
||||
} catch (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
db[doc.type].insert(docWithDefaults, (err, newDoc) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
resolve(newDoc);
|
||||
|
||||
// NOTE: This needs to be after we resolve
|
||||
notifyOfChange(CHANGE_INSERT, newDoc, fromSync);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
export const update = (database.update = async function<T: BaseModel>(
|
||||
doc: T,
|
||||
fromSync: boolean = false,
|
||||
): Promise<T> {
|
||||
if (db._empty) return _send('update', ...arguments);
|
||||
|
||||
return new Promise(async (resolve, reject) => {
|
||||
let docWithDefaults;
|
||||
try {
|
||||
docWithDefaults = await models.initModel(doc.type, doc);
|
||||
} catch (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
db[doc.type].update({ _id: docWithDefaults._id }, docWithDefaults, err => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
resolve(docWithDefaults);
|
||||
|
||||
// NOTE: This needs to be after we resolve
|
||||
notifyOfChange(CHANGE_UPDATE, docWithDefaults, fromSync);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
export const remove = (database.remove = async function<T: BaseModel>(
|
||||
doc: T,
|
||||
fromSync: boolean = false,
|
||||
): Promise<void> {
|
||||
if (db._empty) return _send('remove', ...arguments);
|
||||
|
||||
const flushId = await database.bufferChanges();
|
||||
|
||||
const docs = await database.withDescendants(doc);
|
||||
const docIds = docs.map(d => d._id);
|
||||
const types = [...new Set(docs.map(d => d.type))];
|
||||
|
||||
// Don't really need to wait for this to be over;
|
||||
types.map(t => db[t].remove({ _id: { $in: docIds } }, { multi: true }));
|
||||
|
||||
docs.map(d => notifyOfChange(CHANGE_REMOVE, d, fromSync));
|
||||
|
||||
await database.flushChanges(flushId);
|
||||
});
|
||||
|
||||
/** Removes entries without removing their children */
|
||||
export const unsafeRemove = (database.unsafeRemove = async function<T: BaseModel>(
|
||||
doc: T,
|
||||
fromSync: boolean = false,
|
||||
): Promise<void> {
|
||||
if (db._empty) return _send('unsafeRemove', ...arguments);
|
||||
|
||||
db[doc.type].remove({ _id: doc._id });
|
||||
notifyOfChange(CHANGE_REMOVE, doc, fromSync);
|
||||
});
|
||||
|
||||
export const removeWhere = (database.removeWhere = async function(
|
||||
type: string,
|
||||
query: Object,
|
||||
): Promise<void> {
|
||||
if (db._empty) return _send('removeWhere', ...arguments);
|
||||
|
||||
const flushId = await database.bufferChanges();
|
||||
|
||||
for (const doc of await database.find(type, query)) {
|
||||
const docs = await database.withDescendants(doc);
|
||||
const docIds = docs.map(d => d._id);
|
||||
const types = [...new Set(docs.map(d => d.type))];
|
||||
|
||||
// Don't really need to wait for this to be over;
|
||||
types.map(t => db[t].remove({ _id: { $in: docIds } }, { multi: true }));
|
||||
|
||||
docs.map(d => notifyOfChange(CHANGE_REMOVE, d, false));
|
||||
}
|
||||
|
||||
await database.flushChanges(flushId);
|
||||
});
|
||||
|
||||
export const batchModifyDocs = (database.batchModifyDocs = async function(operations: {
|
||||
upsert: Array<Object>,
|
||||
remove: Array<Object>,
|
||||
}): Promise<void> {
|
||||
if (db._empty) return _send('batchModifyDocs', ...arguments);
|
||||
|
||||
const flushId = await bufferChanges();
|
||||
|
||||
const promisesUpserted = [];
|
||||
const promisesDeleted = [];
|
||||
for (const doc: BaseModel of operations.upsert) {
|
||||
promisesUpserted.push(upsert(doc, true));
|
||||
}
|
||||
|
||||
for (const doc: BaseModel of operations.remove) {
|
||||
promisesDeleted.push(unsafeRemove(doc, true));
|
||||
}
|
||||
|
||||
// Perform from least to most dangerous
|
||||
await Promise.all(promisesUpserted);
|
||||
await Promise.all(promisesDeleted);
|
||||
|
||||
await flushChanges(flushId);
|
||||
});
|
||||
|
||||
// ~~~~~~~~~~~~~~~~~~~ //
|
||||
// DEFAULT MODEL STUFF //
|
||||
// ~~~~~~~~~~~~~~~~~~~ //
|
||||
|
||||
export async function docUpdate<T: BaseModel>(
|
||||
originalDoc: T,
|
||||
...patches: Array<Object>
|
||||
): Promise<T> {
|
||||
// No need to re-initialize the model during update; originalDoc will be in a valid state by virtue of loading
|
||||
const doc = await models.initModel(
|
||||
originalDoc.type,
|
||||
originalDoc,
|
||||
|
||||
// NOTE: This is before `patch` because we want `patch.modified` to win if it has it
|
||||
{ modified: Date.now() },
|
||||
|
||||
...patches,
|
||||
);
|
||||
|
||||
return database.update(doc);
|
||||
}
|
||||
|
||||
export async function docCreate<T: BaseModel>(type: string, ...patches: Array<Object>): Promise<T> {
|
||||
const doc = await models.initModel(
|
||||
type,
|
||||
...patches,
|
||||
|
||||
// Fields that the user can't touch
|
||||
{ type: type },
|
||||
);
|
||||
|
||||
return database.insert(doc);
|
||||
}
|
||||
|
||||
// ~~~~~~~ //
|
||||
// GENERAL //
|
||||
// ~~~~~~~ //
|
||||
|
||||
export const withDescendants = (database.withDescendants = async function(
|
||||
doc: BaseModel | null,
|
||||
stopType: string | null = null,
|
||||
): Promise<Array<BaseModel>> {
|
||||
if (db._empty) return _send('withDescendants', ...arguments);
|
||||
|
||||
let docsToReturn = doc ? [doc] : [];
|
||||
|
||||
async function next(docs: Array<BaseModel | null>): Promise<Array<BaseModel>> {
|
||||
let foundDocs = [];
|
||||
|
||||
for (const d of docs) {
|
||||
if (stopType && d && d.type === stopType) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const promises = [];
|
||||
for (const type of allTypes()) {
|
||||
// If the doc is null, we want to search for parentId === null
|
||||
const parentId = d ? d._id : null;
|
||||
promises.push(database.find(type, { parentId }));
|
||||
}
|
||||
|
||||
for (const more of await Promise.all(promises)) {
|
||||
foundDocs = [...foundDocs, ...more];
|
||||
}
|
||||
}
|
||||
|
||||
if (foundDocs.length === 0) {
|
||||
// Didn't find anything. We're done
|
||||
return docsToReturn;
|
||||
}
|
||||
|
||||
// Continue searching for children
|
||||
docsToReturn = [...docsToReturn, ...foundDocs];
|
||||
return next(foundDocs);
|
||||
}
|
||||
|
||||
return next([doc]);
|
||||
});
|
||||
|
||||
export const withAncestors = (database.withAncestors = async function(
|
||||
doc: BaseModel | null,
|
||||
types: Array<string> = allTypes(),
|
||||
): Promise<Array<BaseModel>> {
|
||||
if (db._empty) return _send('withAncestors', ...arguments);
|
||||
|
||||
if (!doc) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let docsToReturn = doc ? [doc] : [];
|
||||
|
||||
async function next(docs: Array<BaseModel>): Promise<Array<BaseModel>> {
|
||||
const foundDocs = [];
|
||||
for (const d: BaseModel of docs) {
|
||||
for (const type of types) {
|
||||
// If the doc is null, we want to search for parentId === null
|
||||
const another = await database.get(type, d.parentId);
|
||||
another && foundDocs.push(another);
|
||||
}
|
||||
}
|
||||
|
||||
if (foundDocs.length === 0) {
|
||||
// Didn't find anything. We're done
|
||||
return docsToReturn;
|
||||
}
|
||||
|
||||
// Continue searching for children
|
||||
docsToReturn = [...docsToReturn, ...foundDocs];
|
||||
return next(foundDocs);
|
||||
}
|
||||
|
||||
return next([doc]);
|
||||
});
|
||||
|
||||
export const duplicate = (database.duplicate = async function<T: BaseModel>(
|
||||
originalDoc: T,
|
||||
patch: Object = {},
|
||||
): Promise<T> {
|
||||
if (db._empty) return _send('duplicate', ...arguments);
|
||||
|
||||
const flushId = await database.bufferChanges();
|
||||
|
||||
async function next<T: BaseModel>(docToCopy: T, patch: Object): Promise<T> {
|
||||
const model = mustGetModel(docToCopy.type);
|
||||
|
||||
const overrides = {
|
||||
_id: generateId(model.prefix),
|
||||
modified: Date.now(),
|
||||
created: Date.now(),
|
||||
type: docToCopy.type, // Ensure this is not overwritten by the patch
|
||||
};
|
||||
|
||||
// 1. Copy the doc
|
||||
const newDoc = Object.assign({}, docToCopy, patch, overrides);
|
||||
|
||||
// Don't initialize the model during insert, and simply duplicate
|
||||
const createdDoc = await database.insert(newDoc, false, false);
|
||||
|
||||
// 2. Get all the children
|
||||
for (const type of allTypes()) {
|
||||
// Note: We never want to duplicate a response
|
||||
if (!models.canDuplicate(type)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const parentId = docToCopy._id;
|
||||
const children = await database.find(type, { parentId });
|
||||
for (const doc of children) {
|
||||
await next(doc, { parentId: createdDoc._id });
|
||||
}
|
||||
}
|
||||
|
||||
return createdDoc;
|
||||
}
|
||||
|
||||
const createdDoc = await next(originalDoc, patch);
|
||||
|
||||
await database.flushChanges(flushId);
|
||||
|
||||
return createdDoc;
|
||||
});
|
||||
|
||||
// ~~~~~~~ //
|
||||
// Helpers //
|
||||
// ~~~~~~~ //
|
||||
|
||||
async function _send<T>(fnName: string, ...args: Array<any>): Promise<T> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const replyChannel = `db.fn.reply:${uuid.v4()}`;
|
||||
electron.ipcRenderer.send('db.fn', fnName, replyChannel, ...args);
|
||||
electron.ipcRenderer.once(replyChannel, (e, err, result) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(result);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Run various database repair scripts
|
||||
*/
|
||||
export async function _repairDatabase() {
|
||||
console.log('[fix] Running database repairs');
|
||||
for (const workspace of await find(models.workspace.type)) {
|
||||
await _repairBaseEnvironments(workspace);
|
||||
await _fixMultipleCookieJars(workspace);
|
||||
await _applyApiSpecName(workspace);
|
||||
}
|
||||
|
||||
for (const gitRepository of await find(models.gitRepository.type)) {
|
||||
await _fixOldGitURIs(gitRepository);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function ensures that apiSpec exists for each workspace
|
||||
* If the filename on the apiSpec is not set or is the default initialized name
|
||||
* It will apply the workspace name to it
|
||||
*/
|
||||
async function _applyApiSpecName(workspace: Workspace) {
|
||||
const apiSpec = await models.apiSpec.getByParentId(workspace._id);
|
||||
|
||||
if (!apiSpec.fileName || apiSpec.fileName === models.apiSpec.init().fileName) {
|
||||
await models.apiSpec.update(apiSpec, { fileName: workspace.name });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function repairs workspaces that have multiple base environments. Since a workspace
|
||||
* can only have one, this function walks over all base environments, merges the data, and
|
||||
* moves all children as well.
|
||||
*/
|
||||
async function _repairBaseEnvironments(workspace) {
|
||||
const baseEnvironments = await find(models.environment.type, {
|
||||
parentId: workspace._id,
|
||||
});
|
||||
|
||||
// Nothing to do here
|
||||
if (baseEnvironments.length <= 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
const chosenBase = baseEnvironments[0];
|
||||
for (const baseEnvironment of baseEnvironments) {
|
||||
if (baseEnvironment._id === chosenBase._id) {
|
||||
continue;
|
||||
}
|
||||
|
||||
chosenBase.data = Object.assign(baseEnvironment.data, chosenBase.data);
|
||||
const subEnvironments = await find(models.environment.type, {
|
||||
parentId: baseEnvironment._id,
|
||||
});
|
||||
|
||||
for (const subEnvironment of subEnvironments) {
|
||||
await docUpdate(subEnvironment, { parentId: chosenBase._id });
|
||||
}
|
||||
|
||||
// Remove unnecessary base env
|
||||
await remove(baseEnvironment);
|
||||
}
|
||||
|
||||
// Update remaining base env
|
||||
await update(chosenBase);
|
||||
|
||||
console.log(`[fix] Merged ${baseEnvironments.length} base environments under ${workspace.name}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* This function repairs workspaces that have multiple cookie jars. Since a workspace
|
||||
* can only have one, this function walks over all jars and merges them and their cookies
|
||||
* together.
|
||||
*/
|
||||
async function _fixMultipleCookieJars(workspace) {
|
||||
const cookieJars = await find(models.cookieJar.type, {
|
||||
parentId: workspace._id,
|
||||
});
|
||||
|
||||
// Nothing to do here
|
||||
if (cookieJars.length <= 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
const chosenJar = cookieJars[0];
|
||||
for (const cookieJar of cookieJars) {
|
||||
if (cookieJar._id === chosenJar._id) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const cookie of cookieJar.cookies) {
|
||||
if (chosenJar.cookies.find(c => c.id === cookie.id)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
chosenJar.cookies.push(cookie);
|
||||
}
|
||||
|
||||
// Remove unnecessary jar
|
||||
await remove(cookieJar);
|
||||
}
|
||||
|
||||
// Update remaining jar
|
||||
await update(chosenJar);
|
||||
|
||||
console.log(`[fix] Merged ${cookieJars.length} cookie jars under ${workspace.name}`);
|
||||
}
|
||||
|
||||
// Append .git to old git URIs to mimic previous isomorphic-git behaviour
|
||||
async function _fixOldGitURIs(doc: GitRepository) {
|
||||
if (!doc.uriNeedsMigration) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!doc.uri.endsWith('.git')) {
|
||||
doc.uri += '.git';
|
||||
}
|
||||
|
||||
doc.uriNeedsMigration = false;
|
||||
|
||||
await update(doc);
|
||||
|
||||
console.log(`[fix] Fixed git URI for ${doc._id}`);
|
||||
}
|
||||
830
packages/insomnia-app/app/common/database.ts
Normal file
830
packages/insomnia-app/app/common/database.ts
Normal file
@@ -0,0 +1,830 @@
|
||||
/* eslint-disable prefer-rest-params -- don't want to change ...arguments usage for these sensitive functions without more testing */
|
||||
import type { BaseModel } from '../models/index';
|
||||
import * as models from '../models/index';
|
||||
import electron from 'electron';
|
||||
import NeDB from 'nedb';
|
||||
import fsPath from 'path';
|
||||
import { DB_PERSIST_INTERVAL } from './constants';
|
||||
import * as uuid from 'uuid';
|
||||
import { generateId } from './misc';
|
||||
import { getDataDirectory } from './electron-helpers';
|
||||
import { mustGetModel } from '../models';
|
||||
import type { Workspace } from '../models/workspace';
|
||||
import { GitRepository } from '../models/git-repository';
|
||||
import { CookieJar } from '../models/cookie-jar';
|
||||
import { Environment } from '../models/environment';
|
||||
|
||||
export interface Query {
|
||||
_id?: string | SpecificQuery;
|
||||
parentId?: string | null;
|
||||
plugin?: string;
|
||||
key?: string;
|
||||
environmentId?: string | null;
|
||||
protoFileId?: string;
|
||||
}
|
||||
|
||||
type Sort = Record<string, any>;
|
||||
|
||||
interface Operation {
|
||||
upsert?: Array<BaseModel>;
|
||||
remove?: Array<BaseModel>;
|
||||
}
|
||||
|
||||
export interface SpecificQuery {
|
||||
$gt?: number;
|
||||
$in?: Array<string>;
|
||||
$nin?: Array<string>;
|
||||
}
|
||||
|
||||
export type ModelQuery<T extends BaseModel> = Partial<Record<keyof T, SpecificQuery>>;
|
||||
|
||||
export const database = {
|
||||
all: async function<T extends BaseModel>(type: string) {
|
||||
if (db._empty) return _send<Array<T>>('all', ...arguments);
|
||||
return database.find<T>(type);
|
||||
},
|
||||
|
||||
batchModifyDocs: async function(operation: Operation) {
|
||||
if (db._empty) return _send<void>('batchModifyDocs', ...arguments);
|
||||
const flushId = await database.bufferChanges();
|
||||
const promisesUpserted: Array<Promise<BaseModel>> = [];
|
||||
const promisesDeleted: Array<Promise<void>> = [];
|
||||
|
||||
// @ts-expect-error -- TSCONVERSION upsert operations are optional
|
||||
for (const doc of operation.upsert) {
|
||||
promisesUpserted.push(database.upsert(doc, true));
|
||||
}
|
||||
|
||||
// @ts-expect-error -- TSCONVERSION remove operations are optional
|
||||
for (const doc of operation.remove) {
|
||||
promisesDeleted.push(database.unsafeRemove(doc, true));
|
||||
}
|
||||
|
||||
// Perform from least to most dangerous
|
||||
await Promise.all(promisesUpserted);
|
||||
await Promise.all(promisesDeleted);
|
||||
|
||||
await database.flushChanges(flushId);
|
||||
},
|
||||
|
||||
/** buffers database changes and returns a buffer id */
|
||||
bufferChanges: async function(millis = 1000) {
|
||||
if (db._empty) return _send<number>('bufferChanges', ...arguments);
|
||||
bufferingChanges = true;
|
||||
setTimeout(database.flushChanges, millis);
|
||||
return ++bufferChangesId;
|
||||
},
|
||||
|
||||
/** buffers database changes and returns a buffer id */
|
||||
bufferChangesIndefinitely: async function() {
|
||||
if (db._empty) return _send<number>('bufferChangesIndefinitely', ...arguments);
|
||||
bufferingChanges = true;
|
||||
return ++bufferChangesId;
|
||||
},
|
||||
|
||||
CHANGE_INSERT: 'insert',
|
||||
|
||||
CHANGE_UPDATE: 'update',
|
||||
|
||||
CHANGE_REMOVE: 'remove',
|
||||
|
||||
count: async function<T extends BaseModel>(type: string, query: Query = {}) {
|
||||
if (db._empty) return _send<number>('count', ...arguments);
|
||||
return new Promise<number>((resolve, reject) => {
|
||||
(db[type] as NeDB<T>).count(query, (err, count) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
resolve(count);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
docCreate: async <T extends BaseModel>(type: string, ...patches: Array<Patch<T>>) => {
|
||||
const doc = await models.initModel<T>(
|
||||
type,
|
||||
...patches,
|
||||
// Fields that the user can't touch
|
||||
{
|
||||
type: type,
|
||||
},
|
||||
);
|
||||
return database.insert<T>(doc);
|
||||
},
|
||||
|
||||
docUpdate: async <T extends BaseModel>(originalDoc: T, ...patches: Array<Patch<T>>) => {
|
||||
// No need to re-initialize the model during update; originalDoc will be in a valid state by virtue of loading
|
||||
const doc = await models.initModel<T>(
|
||||
originalDoc.type,
|
||||
originalDoc,
|
||||
|
||||
// NOTE: This is before `patches` because we want `patch.modified` to win if it has it
|
||||
{
|
||||
modified: Date.now(),
|
||||
},
|
||||
...patches,
|
||||
);
|
||||
return database.update<T>(doc);
|
||||
},
|
||||
|
||||
duplicate: async function<T extends BaseModel>(originalDoc: T, patch: Patch<T> = {}) {
|
||||
if (db._empty) return _send<T>('duplicate', ...arguments);
|
||||
const flushId = await database.bufferChanges();
|
||||
|
||||
async function next<T extends BaseModel>(docToCopy: T, patch: Patch<T>) {
|
||||
const model = mustGetModel(docToCopy.type);
|
||||
const overrides = {
|
||||
_id: generateId(model.prefix),
|
||||
modified: Date.now(),
|
||||
created: Date.now(),
|
||||
type: docToCopy.type, // Ensure this is not overwritten by the patch
|
||||
};
|
||||
|
||||
// 1. Copy the doc
|
||||
const newDoc = Object.assign({}, docToCopy, patch, overrides);
|
||||
|
||||
// Don't initialize the model during insert, and simply duplicate
|
||||
const createdDoc = await database.insert(newDoc, false, false);
|
||||
|
||||
// 2. Get all the children
|
||||
for (const type of allTypes()) {
|
||||
// Note: We never want to duplicate a response
|
||||
if (!models.canDuplicate(type)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const parentId = docToCopy._id;
|
||||
const children = await database.find(type, { parentId });
|
||||
|
||||
for (const doc of children) {
|
||||
await next(doc, { parentId: createdDoc._id });
|
||||
}
|
||||
}
|
||||
|
||||
return createdDoc;
|
||||
}
|
||||
|
||||
const createdDoc = await next(originalDoc, patch);
|
||||
await database.flushChanges(flushId);
|
||||
return createdDoc;
|
||||
},
|
||||
|
||||
find: async function<T extends BaseModel>(
|
||||
type: string,
|
||||
query: Query | string = {},
|
||||
sort: Sort = { created: 1 },
|
||||
) {
|
||||
if (db._empty) return _send<Array<T>>('find', ...arguments);
|
||||
return new Promise<Array<T>>((resolve, reject) => {
|
||||
(db[type] as NeDB<T>)
|
||||
.find(query)
|
||||
.sort(sort)
|
||||
.exec(async (err, rawDocs) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
const docs: Array<T> = [];
|
||||
|
||||
for (const rawDoc of rawDocs) {
|
||||
docs.push(await models.initModel(type, rawDoc));
|
||||
}
|
||||
|
||||
resolve(docs);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
findMostRecentlyModified: async function<T extends BaseModel>(
|
||||
type: string,
|
||||
query: Query = {},
|
||||
limit: number | null = null,
|
||||
) {
|
||||
if (db._empty) return _send<Array<T>>('findMostRecentlyModified', ...arguments);
|
||||
return new Promise<Array<T>>(resolve => {
|
||||
(db[type] as NeDB<T>)
|
||||
.find(query)
|
||||
.sort({
|
||||
modified: -1,
|
||||
})
|
||||
// @ts-expect-error -- TSCONVERSION limit shouldn't be applied if it's null, or default to something that means no-limit
|
||||
.limit(limit)
|
||||
.exec(async (err, rawDocs) => {
|
||||
if (err) {
|
||||
console.warn('[db] Failed to find docs', err);
|
||||
resolve([]);
|
||||
return;
|
||||
}
|
||||
|
||||
const docs: Array<T> = [];
|
||||
|
||||
for (const rawDoc of rawDocs) {
|
||||
docs.push(await models.initModel(type, rawDoc));
|
||||
}
|
||||
|
||||
resolve(docs);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
flushChanges: async function(id = 0, fake = false) {
|
||||
if (db._empty) return _send<void>('flushChanges', ...arguments);
|
||||
|
||||
// Only flush if ID is 0 or the current flush ID is the same as passed
|
||||
if (id !== 0 && bufferChangesId !== id) {
|
||||
return;
|
||||
}
|
||||
|
||||
bufferingChanges = false;
|
||||
const changes = [...changeBuffer];
|
||||
changeBuffer = [];
|
||||
|
||||
if (changes.length === 0) {
|
||||
// No work to do
|
||||
return;
|
||||
}
|
||||
|
||||
if (fake) {
|
||||
console.log(`[db] Dropped ${changes.length} changes.`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Notify local listeners too
|
||||
for (const fn of changeListeners) {
|
||||
await fn(changes);
|
||||
}
|
||||
|
||||
// Notify remote listeners
|
||||
const windows = electron.BrowserWindow.getAllWindows();
|
||||
|
||||
for (const window of windows) {
|
||||
window.webContents.send('db.changes', changes);
|
||||
}
|
||||
},
|
||||
|
||||
flushChangesAsync: async (fake = false) => {
|
||||
process.nextTick(async () => {
|
||||
await database.flushChanges(0, fake);
|
||||
});
|
||||
},
|
||||
|
||||
get: async function<T extends BaseModel>(type: string, id?: string) {
|
||||
if (db._empty) return _send<T>('get', ...arguments);
|
||||
|
||||
// Short circuit IDs used to represent nothing
|
||||
if (!id || id === 'n/a') {
|
||||
return null;
|
||||
} else {
|
||||
return database.getWhere<T>(type, { _id: id });
|
||||
}
|
||||
},
|
||||
|
||||
getMostRecentlyModified: async function<T extends BaseModel>(type: string, query: Query = {}) {
|
||||
if (db._empty) return _send<T>('getMostRecentlyModified', ...arguments);
|
||||
const docs = await database.findMostRecentlyModified<T>(type, query, 1);
|
||||
return docs.length ? docs[0] : null;
|
||||
},
|
||||
|
||||
getWhere: async function<T extends BaseModel>(type: string, query: ModelQuery<T> | Query) {
|
||||
if (db._empty) return _send<T>('getWhere', ...arguments);
|
||||
// @ts-expect-error -- TSCONVERSION type narrowing needed
|
||||
const docs = await database.find<T>(type, query);
|
||||
return docs.length ? docs[0] : null;
|
||||
},
|
||||
|
||||
init: async (
|
||||
types: Array<string>,
|
||||
config: NeDB.DataStoreOptions = {},
|
||||
forceReset = false,
|
||||
consoleLog: typeof console.log = console.log,
|
||||
) => {
|
||||
if (forceReset) {
|
||||
changeListeners = [];
|
||||
|
||||
for (const attr of Object.keys(db)) {
|
||||
if (attr === '_empty') {
|
||||
continue;
|
||||
}
|
||||
|
||||
delete db[attr];
|
||||
}
|
||||
}
|
||||
|
||||
// Fill in the defaults
|
||||
for (const modelType of types) {
|
||||
if (db[modelType]) {
|
||||
consoleLog(`[db] Already initialized DB.${modelType}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const filePath = getDBFilePath(modelType);
|
||||
const collection = new NeDB(
|
||||
Object.assign(
|
||||
{
|
||||
autoload: true,
|
||||
filename: filePath,
|
||||
corruptAlertThreshold: 0.9,
|
||||
},
|
||||
config,
|
||||
),
|
||||
);
|
||||
collection.persistence.setAutocompactionInterval(DB_PERSIST_INTERVAL);
|
||||
db[modelType] = collection;
|
||||
}
|
||||
|
||||
delete db._empty;
|
||||
electron.ipcMain.on('db.fn', async (e, fnName, replyChannel, ...args) => {
|
||||
try {
|
||||
const result = await database[fnName](...args);
|
||||
e.sender.send(replyChannel, null, result);
|
||||
} catch (err) {
|
||||
e.sender.send(replyChannel, {
|
||||
message: err.message,
|
||||
stack: err.stack,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// NOTE: Only repair the DB if we're not running in memory. Repairing here causes tests to hang indefinitely for some reason.
|
||||
// TODO: Figure out why this makes tests hang
|
||||
if (!config.inMemoryOnly) {
|
||||
await _repairDatabase();
|
||||
consoleLog(`[db] Initialized DB at ${getDBFilePath('$TYPE')}`);
|
||||
}
|
||||
|
||||
// This isn't the best place for this but w/e
|
||||
// Listen for response deletions and delete corresponding response body files
|
||||
database.onChange(async changes => {
|
||||
for (const [type, doc] of changes) {
|
||||
// TODO(TSCONVERSION) what's returned here is the entire model implementation, not just a model
|
||||
// The type definition will be a little confusing
|
||||
const m: Record<string, any> | null = models.getModel(doc.type);
|
||||
|
||||
if (!m) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (type === database.CHANGE_REMOVE && typeof m.hookRemove === 'function') {
|
||||
try {
|
||||
await m.hookRemove(doc, consoleLog);
|
||||
} catch (err) {
|
||||
consoleLog(`[db] Delete hook failed for ${type} ${doc._id}: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (type === database.CHANGE_INSERT && typeof m.hookInsert === 'function') {
|
||||
try {
|
||||
await m.hookInsert(doc, consoleLog);
|
||||
} catch (err) {
|
||||
consoleLog(`[db] Insert hook failed for ${type} ${doc._id}: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (type === database.CHANGE_UPDATE && typeof m.hookUpdate === 'function') {
|
||||
try {
|
||||
await m.hookUpdate(doc, consoleLog);
|
||||
} catch (err) {
|
||||
consoleLog(`[db] Update hook failed for ${type} ${doc._id}: ${err.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
for (const model of models.all()) {
|
||||
// @ts-expect-error -- TSCONVERSION optional type on response
|
||||
if (typeof model.hookDatabaseInit === 'function') {
|
||||
// @ts-expect-error -- TSCONVERSION optional type on response
|
||||
await model.hookDatabaseInit?.(consoleLog);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
initClient: async () => {
|
||||
electron.ipcRenderer.on('db.changes', async (_e, changes) => {
|
||||
for (const fn of changeListeners) {
|
||||
await fn(changes);
|
||||
}
|
||||
});
|
||||
console.log('[db] Initialized DB client');
|
||||
},
|
||||
|
||||
insert: async function<T extends BaseModel>(doc: T, fromSync = false, initializeModel = true) {
|
||||
if (db._empty) return _send<T>('insert', ...arguments);
|
||||
return new Promise<T>(async (resolve, reject) => {
|
||||
let docWithDefaults: T | null = null;
|
||||
|
||||
try {
|
||||
if (initializeModel) {
|
||||
docWithDefaults = await models.initModel<T>(doc.type, doc);
|
||||
} else {
|
||||
docWithDefaults = doc;
|
||||
}
|
||||
} catch (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
(db[doc.type] as NeDB<T>).insert(docWithDefaults, (err, newDoc: T) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
resolve(newDoc);
|
||||
// NOTE: This needs to be after we resolve
|
||||
notifyOfChange(database.CHANGE_INSERT, newDoc, fromSync);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
onChange: (callback: ChangeListener) => {
|
||||
changeListeners.push(callback);
|
||||
},
|
||||
|
||||
offChange: (callback: ChangeListener) => {
|
||||
changeListeners = changeListeners.filter(l => l !== callback);
|
||||
},
|
||||
|
||||
remove: async function<T extends BaseModel>(doc: T, fromSync = false) {
|
||||
if (db._empty) return _send<void>('remove', ...arguments);
|
||||
|
||||
const flushId = await database.bufferChanges();
|
||||
|
||||
const docs = await database.withDescendants(doc);
|
||||
const docIds = docs.map(d => d._id);
|
||||
const types = [...new Set(docs.map(d => d.type))];
|
||||
|
||||
// Don't really need to wait for this to be over;
|
||||
types.map(t =>
|
||||
db[t].remove(
|
||||
{
|
||||
_id: {
|
||||
$in: docIds,
|
||||
},
|
||||
},
|
||||
{
|
||||
multi: true,
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
docs.map(d => notifyOfChange(database.CHANGE_REMOVE, d, fromSync));
|
||||
await database.flushChanges(flushId);
|
||||
},
|
||||
|
||||
removeWhere: async function<T extends BaseModel>(type: string, query: Query) {
|
||||
if (db._empty) return _send<void>('removeWhere', ...arguments);
|
||||
const flushId = await database.bufferChanges();
|
||||
|
||||
for (const doc of await database.find<T>(type, query)) {
|
||||
const docs = await database.withDescendants(doc);
|
||||
const docIds = docs.map(d => d._id);
|
||||
const types = [...new Set(docs.map(d => d.type))];
|
||||
|
||||
// Don't really need to wait for this to be over;
|
||||
types.map(t =>
|
||||
db[t].remove(
|
||||
{
|
||||
_id: {
|
||||
$in: docIds,
|
||||
},
|
||||
},
|
||||
{
|
||||
multi: true,
|
||||
},
|
||||
),
|
||||
);
|
||||
docs.map(d => notifyOfChange(database.CHANGE_REMOVE, d, false));
|
||||
}
|
||||
|
||||
await database.flushChanges(flushId);
|
||||
},
|
||||
|
||||
/** Removes entries without removing their children */
|
||||
unsafeRemove: async function<T extends BaseModel>(doc: T, fromSync = false) {
|
||||
if (db._empty) return _send<void>('unsafeRemove', ...arguments);
|
||||
|
||||
(db[doc.type] as NeDB<T>).remove({ _id: doc._id });
|
||||
notifyOfChange(database.CHANGE_REMOVE, doc, fromSync);
|
||||
},
|
||||
|
||||
update: async function<T extends BaseModel>(doc: T, fromSync = false) {
|
||||
if (db._empty) return _send<T>('update', ...arguments);
|
||||
|
||||
return new Promise<T>(async (resolve, reject) => {
|
||||
let docWithDefaults: T;
|
||||
|
||||
try {
|
||||
docWithDefaults = await models.initModel<T>(doc.type, doc);
|
||||
} catch (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
(db[doc.type] as NeDB<T>).update(
|
||||
{ _id: docWithDefaults._id },
|
||||
docWithDefaults,
|
||||
// TODO(TSCONVERSION) see comment below, upsert can happen automatically as part of the update
|
||||
// @ts-expect-error -- TSCONVERSION expects 4 args but only sent 3. Need to validate what UpdateOptions should be.
|
||||
err => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
resolve(docWithDefaults);
|
||||
// NOTE: This needs to be after we resolve
|
||||
notifyOfChange(database.CHANGE_UPDATE, docWithDefaults, fromSync);
|
||||
},
|
||||
);
|
||||
});
|
||||
},
|
||||
|
||||
// TODO(TSCONVERSION) the update method above can now take an upsert property
|
||||
upsert: async function<T extends BaseModel>(doc: T, fromSync = false) {
|
||||
if (db._empty) return _send<T>('upsert', ...arguments);
|
||||
const existingDoc = await database.get<T>(doc.type, doc._id);
|
||||
|
||||
if (existingDoc) {
|
||||
return database.update<T>(doc, fromSync);
|
||||
} else {
|
||||
return database.insert<T>(doc, fromSync);
|
||||
}
|
||||
},
|
||||
|
||||
withAncestors: async function<T extends BaseModel>(doc: T | null, types: Array<string> = allTypes()) {
|
||||
if (db._empty) return _send<Array<T>>('withAncestors', ...arguments);
|
||||
|
||||
if (!doc) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let docsToReturn: Array<T> = doc ? [doc] : [];
|
||||
|
||||
async function next(docs: Array<T>) {
|
||||
const foundDocs: Array<T> = [];
|
||||
|
||||
for (const d of docs) {
|
||||
for (const type of types) {
|
||||
// If the doc is null, we want to search for parentId === null
|
||||
const another = await database.get<T>(type, d.parentId);
|
||||
another && foundDocs.push(another);
|
||||
}
|
||||
}
|
||||
|
||||
if (foundDocs.length === 0) {
|
||||
// Didn't find anything. We're done
|
||||
return docsToReturn;
|
||||
}
|
||||
|
||||
// Continue searching for children
|
||||
docsToReturn = [
|
||||
...docsToReturn,
|
||||
...foundDocs,
|
||||
];
|
||||
return next(foundDocs);
|
||||
}
|
||||
|
||||
return next([doc]);
|
||||
},
|
||||
|
||||
withDescendants: async function<T extends BaseModel>(doc: T | null, stopType: string | null = null) {
|
||||
if (db._empty) return _send<Array<T>>('withDescendants', ...arguments);
|
||||
let docsToReturn = doc ? [doc] : [];
|
||||
|
||||
async function next(docs: Array<T | null>): Promise<Array<T>> {
|
||||
let foundDocs: Array<T> = [];
|
||||
|
||||
for (const doc of docs) {
|
||||
if (stopType && doc && doc.type === stopType) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const promises: Array<Promise<Array<T>>> = [];
|
||||
|
||||
for (const type of allTypes()) {
|
||||
// If the doc is null, we want to search for parentId === null
|
||||
const parentId = doc ? doc._id : null;
|
||||
const promise = database.find<T>(type, { parentId });
|
||||
promises.push(promise);
|
||||
}
|
||||
|
||||
for (const more of await Promise.all(promises)) {
|
||||
foundDocs = [
|
||||
...foundDocs,
|
||||
...more,
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
if (foundDocs.length === 0) {
|
||||
// Didn't find anything. We're done
|
||||
return docsToReturn;
|
||||
}
|
||||
|
||||
// Continue searching for children
|
||||
docsToReturn = [...docsToReturn, ...foundDocs];
|
||||
return next(foundDocs);
|
||||
}
|
||||
|
||||
return next([doc]);
|
||||
},
|
||||
};
|
||||
|
||||
interface DB {
|
||||
[index: string]: NeDB;
|
||||
}
|
||||
|
||||
// @ts-expect-error -- TSCONVERSION _empty doesn't match the index signature, use something other than _empty in future
|
||||
const db: DB = {
|
||||
_empty: true,
|
||||
} as DB;
|
||||
|
||||
// ~~~~~~~ //
|
||||
// HELPERS //
|
||||
// ~~~~~~~ //
|
||||
const allTypes = () => Object.keys(db);
|
||||
|
||||
function getDBFilePath(modelType: string) {
|
||||
// NOTE: Do not EVER change this. EVER!
|
||||
return fsPath.join(getDataDirectory(), `insomnia.${modelType}.db`);
|
||||
}
|
||||
|
||||
// ~~~~~~~~~~~~~~~~ //
|
||||
// Change Listeners //
|
||||
// ~~~~~~~~~~~~~~~~ //
|
||||
let bufferingChanges = false;
|
||||
let bufferChangesId = 1;
|
||||
|
||||
type ChangeBufferEvent = [
|
||||
event: string,
|
||||
doc: BaseModel,
|
||||
fromSync: boolean
|
||||
];
|
||||
|
||||
let changeBuffer: Array<ChangeBufferEvent> = [];
|
||||
|
||||
type ChangeListener = Function;
|
||||
|
||||
let changeListeners: Array<ChangeListener> = [];
|
||||
|
||||
async function notifyOfChange<T extends BaseModel>(event: string, doc: T, fromSync: boolean) {
|
||||
changeBuffer.push([event, doc, fromSync]);
|
||||
|
||||
// Flush right away if we're not buffering
|
||||
if (!bufferingChanges) {
|
||||
await database.flushChanges();
|
||||
}
|
||||
}
|
||||
|
||||
// ~~~~~~~~~~~~~~~~~~~ //
|
||||
// DEFAULT MODEL STUFF //
|
||||
// ~~~~~~~~~~~~~~~~~~~ //
|
||||
|
||||
type Patch<T> = Partial<T>;
|
||||
|
||||
// ~~~~~~~ //
|
||||
// Helpers //
|
||||
// ~~~~~~~ //
|
||||
async function _send<T>(fnName: string, ...args: Array<any>) {
|
||||
return new Promise<T>((resolve, reject) => {
|
||||
const replyChannel = `db.fn.reply:${uuid.v4()}`;
|
||||
electron.ipcRenderer.send('db.fn', fnName, replyChannel, ...args);
|
||||
electron.ipcRenderer.once(replyChannel, (_e, err, result: T) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(result);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Run various database repair scripts
|
||||
*/
|
||||
export async function _repairDatabase() {
|
||||
console.log('[fix] Running database repairs');
|
||||
|
||||
for (const workspace of await database.find<Workspace>(models.workspace.type)) {
|
||||
await _repairBaseEnvironments(workspace);
|
||||
await _fixMultipleCookieJars(workspace);
|
||||
await _applyApiSpecName(workspace);
|
||||
}
|
||||
|
||||
for (const gitRepository of await database.find<GitRepository>(models.gitRepository.type)) {
|
||||
await _fixOldGitURIs(gitRepository);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function ensures that apiSpec exists for each workspace
|
||||
* If the filename on the apiSpec is not set or is the default initialized name
|
||||
* It will apply the workspace name to it
|
||||
*/
|
||||
async function _applyApiSpecName(workspace: Workspace) {
|
||||
const apiSpec = await models.apiSpec.getByParentId(workspace._id);
|
||||
if (apiSpec === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!apiSpec.fileName || apiSpec.fileName === models.apiSpec.init().fileName) {
|
||||
await models.apiSpec.update(apiSpec, {
|
||||
fileName: workspace.name,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function repairs workspaces that have multiple base environments. Since a workspace
|
||||
* can only have one, this function walks over all base environments, merges the data, and
|
||||
* moves all children as well.
|
||||
*/
|
||||
async function _repairBaseEnvironments(workspace: Workspace) {
|
||||
const baseEnvironments = await database.find<Environment>(models.environment.type, {
|
||||
parentId: workspace._id,
|
||||
});
|
||||
|
||||
// Nothing to do here
|
||||
if (baseEnvironments.length <= 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
const chosenBase = baseEnvironments[0];
|
||||
|
||||
for (const baseEnvironment of baseEnvironments) {
|
||||
if (baseEnvironment._id === chosenBase._id) {
|
||||
continue;
|
||||
}
|
||||
|
||||
chosenBase.data = Object.assign(baseEnvironment.data, chosenBase.data);
|
||||
const subEnvironments = await database.find<Environment>(models.environment.type, {
|
||||
parentId: baseEnvironment._id,
|
||||
});
|
||||
|
||||
for (const subEnvironment of subEnvironments) {
|
||||
await database.docUpdate(subEnvironment, {
|
||||
parentId: chosenBase._id,
|
||||
});
|
||||
}
|
||||
|
||||
// Remove unnecessary base env
|
||||
await database.remove(baseEnvironment);
|
||||
}
|
||||
|
||||
// Update remaining base env
|
||||
await database.update(chosenBase);
|
||||
console.log(`[fix] Merged ${baseEnvironments.length} base environments under ${workspace.name}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* This function repairs workspaces that have multiple cookie jars. Since a workspace
|
||||
* can only have one, this function walks over all jars and merges them and their cookies
|
||||
* together.
|
||||
*/
|
||||
async function _fixMultipleCookieJars(workspace: Workspace) {
|
||||
const cookieJars = await database.find<CookieJar>(models.cookieJar.type, {
|
||||
parentId: workspace._id,
|
||||
});
|
||||
|
||||
// Nothing to do here
|
||||
if (cookieJars.length <= 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
const chosenJar = cookieJars[0];
|
||||
|
||||
for (const cookieJar of cookieJars) {
|
||||
if (cookieJar._id === chosenJar._id) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const cookie of cookieJar.cookies) {
|
||||
if (chosenJar.cookies.find(c => c.id === cookie.id)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
chosenJar.cookies.push(cookie);
|
||||
}
|
||||
|
||||
// Remove unnecessary jar
|
||||
await database.remove(cookieJar);
|
||||
}
|
||||
|
||||
// Update remaining jar
|
||||
await database.update(chosenJar);
|
||||
console.log(`[fix] Merged ${cookieJars.length} cookie jars under ${workspace.name}`);
|
||||
}
|
||||
|
||||
// Append .git to old git URIs to mimic previous isomorphic-git behaviour
|
||||
async function _fixOldGitURIs(doc: GitRepository) {
|
||||
if (!doc.uriNeedsMigration) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!doc.uri.endsWith('.git')) {
|
||||
doc.uri += '.git';
|
||||
}
|
||||
|
||||
doc.uriNeedsMigration = false;
|
||||
await database.update(doc);
|
||||
console.log(`[fix] Fixed git URI for ${doc._id}`);
|
||||
}
|
||||
@@ -1,7 +1,4 @@
|
||||
// @flow
|
||||
function insomniaDocs(slug: string): string {
|
||||
return `https://support.insomnia.rest${slug}`;
|
||||
}
|
||||
const insomniaDocs = (slug: string) => `https://support.insomnia.rest${slug}`;
|
||||
|
||||
export const docsBase = insomniaDocs('/');
|
||||
export const docsGitSync = insomniaDocs('/article/193-git-sync');
|
||||
@@ -9,12 +6,9 @@ export const docsTemplateTags = insomniaDocs('/article/171-template-tags');
|
||||
export const docsVersionControl = insomniaDocs('/article/165-version-control-sync');
|
||||
export const docsPlugins = insomniaDocs('/article/173-plugins');
|
||||
export const docsImportExport = insomniaDocs('/article/172-importing-and-exporting-data');
|
||||
|
||||
export const docsGitAccessToken = {
|
||||
github:
|
||||
'https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token',
|
||||
github: 'https://docs.github.com/github/authenticating-to-github/creating-a-personal-access-token',
|
||||
gitlab: 'https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html',
|
||||
bitbucket: 'https://support.atlassian.com/bitbucket-cloud/docs/app-passwords/',
|
||||
bitbucketServer:
|
||||
'https://confluence.atlassian.com/bitbucketserver/personal-access-tokens-939515499.html',
|
||||
bitbucketServer: 'https://confluence.atlassian.com/bitbucketserver/personal-access-tokens-939515499.html',
|
||||
};
|
||||
50
packages/insomnia-app/app/common/electron-helpers.ts
Normal file
50
packages/insomnia-app/app/common/electron-helpers.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import * as electron from 'electron';
|
||||
import { join } from 'path';
|
||||
import appConfig from '../../config/config.json';
|
||||
import mkdirp from 'mkdirp';
|
||||
|
||||
export function clickLink(href: string) {
|
||||
electron.shell.openExternal(href);
|
||||
}
|
||||
|
||||
export function getDesignerDataDir() {
|
||||
const { app } = electron.remote || electron;
|
||||
return process.env.DESIGNER_DATA_PATH || join(app.getPath('appData'), 'Insomnia Designer');
|
||||
}
|
||||
|
||||
export function getDataDirectory() {
|
||||
const { app } = electron.remote || electron;
|
||||
return process.env.INSOMNIA_DATA_PATH || app.getPath('userData');
|
||||
}
|
||||
|
||||
export function getViewportSize(): string | null {
|
||||
const { BrowserWindow } = electron.remote || electron;
|
||||
const browserWindow = BrowserWindow.getFocusedWindow() || BrowserWindow.getAllWindows()[0];
|
||||
|
||||
if (browserWindow) {
|
||||
const { width, height } = browserWindow.getContentBounds();
|
||||
return `${width}x${height}`;
|
||||
} else {
|
||||
// No windows open
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function getScreenResolution() {
|
||||
const { screen } = electron.remote || electron;
|
||||
const { width, height } = screen.getPrimaryDisplay().workAreaSize;
|
||||
return `${width}x${height}`;
|
||||
}
|
||||
|
||||
export function getUserLanguage() {
|
||||
const { app } = electron.remote || electron;
|
||||
return app.getLocale();
|
||||
}
|
||||
|
||||
export function getTempDir() {
|
||||
// NOTE: Using a fairly unique name here because "insomnia" is a common word
|
||||
const { app } = electron.remote || electron;
|
||||
const dir = join(app.getPath('temp'), `insomnia_${appConfig.version}`);
|
||||
mkdirp.sync(dir);
|
||||
return dir;
|
||||
}
|
||||
@@ -1,4 +1,3 @@
|
||||
// @flow
|
||||
import type { Workspace } from '../models/workspace';
|
||||
import { isDesign } from '../models/helpers/is-model';
|
||||
import { strings } from './strings';
|
||||
@@ -1,4 +1,4 @@
|
||||
// @flow
|
||||
import { ValueOf } from 'type-fest';
|
||||
|
||||
export const GrpcRequestEventEnum = {
|
||||
start: 'GRPC_START',
|
||||
@@ -6,8 +6,9 @@ export const GrpcRequestEventEnum = {
|
||||
commit: 'GRPC_COMMIT',
|
||||
cancel: 'GRPC_CANCEL',
|
||||
cancelMultiple: 'GRPC_CANCEL_MULTIPLE',
|
||||
};
|
||||
export type GrpcRequestEvent = $Values<typeof GrpcRequestEventEnum>;
|
||||
} as const;
|
||||
|
||||
export type GrpcRequestEvent = ValueOf<typeof GrpcRequestEventEnum>;
|
||||
|
||||
export const GrpcResponseEventEnum = {
|
||||
start: 'GRPC_START',
|
||||
@@ -15,5 +16,6 @@ export const GrpcResponseEventEnum = {
|
||||
error: 'GRPC_ERROR',
|
||||
end: 'GRPC_END',
|
||||
status: 'GRPC_STATUS',
|
||||
};
|
||||
export type GrpcResponseEvent = $Values<typeof GrpcResponseEventEnum>;
|
||||
} as const;
|
||||
|
||||
export type GrpcResponseEvent = ValueOf<typeof GrpcResponseEventEnum>;
|
||||
@@ -1,26 +1,25 @@
|
||||
// @flow
|
||||
|
||||
import type { GrpcMethodDefinition, GrpcMethodType } from '../network/grpc/method';
|
||||
import { groupBy } from 'lodash';
|
||||
import { getMethodType } from '../network/grpc/method';
|
||||
|
||||
const PROTO_PATH_REGEX = /^\/(?:(?<package>[\w.]+)\.)?(?<service>\w+)\/(?<method>\w+)$/;
|
||||
|
||||
type GrpcPathSegments = {
|
||||
packageName?: string,
|
||||
serviceName?: string,
|
||||
methodName?: string,
|
||||
};
|
||||
interface GrpcPathSegments {
|
||||
packageName?: string;
|
||||
serviceName?: string;
|
||||
methodName?: string;
|
||||
}
|
||||
|
||||
// Split a full gRPC path into it's segments
|
||||
export const getGrpcPathSegments = (path: string): GrpcPathSegments => {
|
||||
const result = PROTO_PATH_REGEX.exec(path);
|
||||
|
||||
const packageName = result?.groups?.package;
|
||||
const serviceName = result?.groups?.service;
|
||||
const methodName = result?.groups?.method;
|
||||
|
||||
return { packageName, serviceName, methodName };
|
||||
return {
|
||||
packageName,
|
||||
serviceName,
|
||||
methodName,
|
||||
};
|
||||
};
|
||||
|
||||
// If all segments are found, return a shorter path, otherwise the original path
|
||||
@@ -31,16 +30,12 @@ export const getShortGrpcPath = (
|
||||
return packageName && serviceName && methodName ? `/${serviceName}/${methodName}` : fullPath;
|
||||
};
|
||||
|
||||
export type GrpcMethodInfo = {
|
||||
segments: GrpcPathSegments,
|
||||
type: GrpcMethodType,
|
||||
fullPath: string,
|
||||
};
|
||||
|
||||
type GroupedGrpcMethodInfo = {
|
||||
[packageName: string]: Array<GrpcMethodInfo>,
|
||||
};
|
||||
|
||||
export interface GrpcMethodInfo {
|
||||
segments: GrpcPathSegments;
|
||||
type: GrpcMethodType;
|
||||
fullPath: string;
|
||||
}
|
||||
type GroupedGrpcMethodInfo = Record<string, Array<GrpcMethodInfo>>;
|
||||
export const NO_PACKAGE_KEY = 'no-package';
|
||||
|
||||
const getMethodInfo = (method: GrpcMethodDefinition): GrpcMethodInfo => ({
|
||||
@@ -1,4 +1,3 @@
|
||||
// @flow
|
||||
import fs from 'fs';
|
||||
import clone from 'clone';
|
||||
import { Cookie as toughCookie } from 'tough-cookie';
|
||||
@@ -7,7 +6,7 @@ import type { RenderedRequest } from './render';
|
||||
import { getRenderedRequestAndContext } from './render';
|
||||
import { jarFromCookies } from 'insomnia-cookies';
|
||||
import * as pluginContexts from '../plugins/context/index';
|
||||
import * as misc from './misc';
|
||||
import { getSetCookieHeaders, filterHeaders, hasAuthHeader } from './misc';
|
||||
import type { Cookie } from '../models/cookie-jar';
|
||||
import type { Request } from '../models/request';
|
||||
import { newBodyRaw } from '../models/request';
|
||||
@@ -18,173 +17,176 @@ import { RenderError } from '../templating/index';
|
||||
import { smartEncodeUrl } from 'insomnia-url';
|
||||
import * as plugins from '../plugins';
|
||||
|
||||
export type HarCookie = {
|
||||
name: string,
|
||||
value: string,
|
||||
path?: string,
|
||||
domain?: string,
|
||||
expires?: string,
|
||||
httpOnly?: boolean,
|
||||
secure?: boolean,
|
||||
comment?: string,
|
||||
};
|
||||
export interface HarCookie {
|
||||
name: string;
|
||||
value: string;
|
||||
path?: string;
|
||||
domain?: string;
|
||||
expires?: string;
|
||||
httpOnly?: boolean;
|
||||
secure?: boolean;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export type HarHeader = {
|
||||
name: string,
|
||||
value: string,
|
||||
comment?: string,
|
||||
};
|
||||
export interface HarHeader {
|
||||
name: string;
|
||||
value: string;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export type HarQueryString = {
|
||||
name: string,
|
||||
value: string,
|
||||
comment?: string,
|
||||
};
|
||||
export interface HarQueryString {
|
||||
name: string;
|
||||
value: string;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export type HarPostParam = {
|
||||
name: string,
|
||||
value?: string,
|
||||
fileName?: string,
|
||||
contentType?: string,
|
||||
comment?: string,
|
||||
};
|
||||
export interface HarPostParam {
|
||||
name: string;
|
||||
value?: string;
|
||||
fileName?: string;
|
||||
contentType?: string;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export type HarPostData = {
|
||||
mimeType: string,
|
||||
params: Array<HarPostParam>,
|
||||
text: string,
|
||||
comment?: string,
|
||||
};
|
||||
export interface HarPostData {
|
||||
mimeType: string;
|
||||
params: Array<HarPostParam>;
|
||||
text: string;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export type HarRequest = {
|
||||
method: string,
|
||||
url: string,
|
||||
httpVersion: string,
|
||||
cookies: Array<HarCookie>,
|
||||
headers: Array<HarHeader>,
|
||||
queryString: Array<HarQueryString>,
|
||||
postData?: HarPostData,
|
||||
headersSize: number,
|
||||
bodySize: number,
|
||||
comment?: string,
|
||||
settingEncodeUrl: boolean,
|
||||
};
|
||||
export interface HarRequest {
|
||||
method: string;
|
||||
url: string;
|
||||
httpVersion: string;
|
||||
cookies: Array<HarCookie>;
|
||||
headers: Array<HarHeader>;
|
||||
queryString: Array<HarQueryString>;
|
||||
postData?: HarPostData;
|
||||
headersSize: number;
|
||||
bodySize: number;
|
||||
comment?: string;
|
||||
settingEncodeUrl: boolean;
|
||||
}
|
||||
|
||||
export type HarContent = {
|
||||
size: number,
|
||||
compression?: number,
|
||||
mimeType: string,
|
||||
text?: string,
|
||||
encoding?: string,
|
||||
comment?: string,
|
||||
};
|
||||
export interface HarContent {
|
||||
size: number;
|
||||
compression?: number;
|
||||
mimeType: string;
|
||||
text?: string;
|
||||
encoding?: string;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export type HarResponse = {
|
||||
status: number,
|
||||
statusText: string,
|
||||
httpVersion: string,
|
||||
cookies: Array<HarCookie>,
|
||||
headers: Array<HarHeader>,
|
||||
content: HarContent,
|
||||
redirectURL: string,
|
||||
headersSize: number,
|
||||
bodySize: number,
|
||||
comment?: string,
|
||||
};
|
||||
export interface HarResponse {
|
||||
status: number;
|
||||
statusText: string;
|
||||
httpVersion: string;
|
||||
cookies: Array<HarCookie>;
|
||||
headers: Array<HarHeader>;
|
||||
content: HarContent;
|
||||
redirectURL: string;
|
||||
headersSize: number;
|
||||
bodySize: number;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export type HarRequestCache = {
|
||||
expires?: string,
|
||||
lastAccess: string,
|
||||
eTag: string,
|
||||
hitCount: number,
|
||||
comment?: string,
|
||||
};
|
||||
export interface HarRequestCache {
|
||||
expires?: string;
|
||||
lastAccess: string;
|
||||
eTag: string;
|
||||
hitCount: number;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export type HarCache = {
|
||||
beforeRequest?: HarRequestCache,
|
||||
afterRequest?: HarRequestCache,
|
||||
comment?: string,
|
||||
};
|
||||
export interface HarCache {
|
||||
beforeRequest?: HarRequestCache;
|
||||
afterRequest?: HarRequestCache;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export type HarEntryTimings = {
|
||||
blocked?: number,
|
||||
dns?: number,
|
||||
connect?: number,
|
||||
send: number,
|
||||
wait: number,
|
||||
receive: number,
|
||||
ssl?: number,
|
||||
comment?: string,
|
||||
};
|
||||
export interface HarEntryTimings {
|
||||
blocked?: number;
|
||||
dns?: number;
|
||||
connect?: number;
|
||||
send: number;
|
||||
wait: number;
|
||||
receive: number;
|
||||
ssl?: number;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export type HarEntry = {
|
||||
pageref?: string,
|
||||
startedDateTime: string,
|
||||
time: number,
|
||||
request: HarRequest,
|
||||
response: HarResponse,
|
||||
cache: HarCache,
|
||||
timings: HarEntryTimings,
|
||||
serverIPAddress?: string,
|
||||
connection?: string,
|
||||
comment?: string,
|
||||
};
|
||||
export interface HarEntry {
|
||||
pageref?: string;
|
||||
startedDateTime: string;
|
||||
time: number;
|
||||
request: HarRequest;
|
||||
response: HarResponse;
|
||||
cache: HarCache;
|
||||
timings: HarEntryTimings;
|
||||
serverIPAddress?: string;
|
||||
connection?: string;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export type HarPageTimings = {
|
||||
onContentLoad?: number,
|
||||
onLoad?: number,
|
||||
comment?: string,
|
||||
};
|
||||
export interface HarPageTimings {
|
||||
onContentLoad?: number;
|
||||
onLoad?: number;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export type HarPage = {
|
||||
startedDateTime: string,
|
||||
id: string,
|
||||
title: string,
|
||||
pageTimings: HarPageTimings,
|
||||
comment?: string,
|
||||
};
|
||||
export interface HarPage {
|
||||
startedDateTime: string;
|
||||
id: string;
|
||||
title: string;
|
||||
pageTimings: HarPageTimings;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export type HarCreator = {
|
||||
name: string,
|
||||
version: string,
|
||||
comment?: string,
|
||||
};
|
||||
export interface HarCreator {
|
||||
name: string;
|
||||
version: string;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export type HarBrowser = {
|
||||
name: string,
|
||||
version: string,
|
||||
comment?: string,
|
||||
};
|
||||
export interface HarBrowser {
|
||||
name: string;
|
||||
version: string;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export type HarLog = {
|
||||
version: string,
|
||||
creator: HarCreator,
|
||||
browser?: HarBrowser,
|
||||
pages?: Array<HarPage>,
|
||||
entries: Array<HarEntry>,
|
||||
comment?: string,
|
||||
};
|
||||
export interface HarLog {
|
||||
version: string;
|
||||
creator: HarCreator;
|
||||
browser?: HarBrowser;
|
||||
pages?: Array<HarPage>;
|
||||
entries: Array<HarEntry>;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export type Har = {
|
||||
log: HarLog,
|
||||
};
|
||||
export interface Har {
|
||||
log: HarLog;
|
||||
}
|
||||
|
||||
export type ExportRequest = {
|
||||
requestId: string,
|
||||
environmentId: string | null,
|
||||
};
|
||||
export interface ExportRequest {
|
||||
requestId: string;
|
||||
environmentId: string | null;
|
||||
}
|
||||
|
||||
export async function exportHar(exportRequests: Array<ExportRequest>): Promise<Har> {
|
||||
export async function exportHar(exportRequests: Array<ExportRequest>) {
|
||||
// Export HAR entries with the same start time in order to keep their workspace sort order.
|
||||
const startedDateTime = new Date().toISOString();
|
||||
const entries: Array<HarEntry> = [];
|
||||
|
||||
for (const exportRequest of exportRequests) {
|
||||
const request: Request | null = await models.request.getById(exportRequest.requestId);
|
||||
|
||||
if (!request) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const harRequest = await exportHarWithRequest(request, exportRequest.environmentId);
|
||||
|
||||
if (!harRequest) {
|
||||
continue;
|
||||
}
|
||||
@@ -194,6 +196,7 @@ export async function exportHar(exportRequests: Array<ExportRequest>): Promise<H
|
||||
exportRequest.environmentId || null,
|
||||
);
|
||||
const harResponse = await exportHarResponse(response);
|
||||
|
||||
if (!harResponse) {
|
||||
continue;
|
||||
}
|
||||
@@ -215,11 +218,10 @@ export async function exportHar(exportRequests: Array<ExportRequest>): Promise<H
|
||||
},
|
||||
comment: request.name,
|
||||
};
|
||||
|
||||
entries.push(entry);
|
||||
}
|
||||
|
||||
return {
|
||||
const har: Har = {
|
||||
log: {
|
||||
version: '1.2',
|
||||
creator: {
|
||||
@@ -229,9 +231,10 @@ export async function exportHar(exportRequests: Array<ExportRequest>): Promise<H
|
||||
entries: entries,
|
||||
},
|
||||
};
|
||||
return har;
|
||||
}
|
||||
|
||||
export async function exportHarResponse(response: ResponseModel | null): Promise<HarResponse> {
|
||||
export async function exportHarResponse(response: ResponseModel | null) {
|
||||
if (!response) {
|
||||
return {
|
||||
status: 0,
|
||||
@@ -249,7 +252,7 @@ export async function exportHarResponse(response: ResponseModel | null): Promise
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
const harResponse: HarResponse = {
|
||||
status: response.statusCode,
|
||||
statusText: response.statusMessage,
|
||||
httpVersion: 'HTTP/1.1',
|
||||
@@ -260,14 +263,16 @@ export async function exportHarResponse(response: ResponseModel | null): Promise
|
||||
headersSize: -1,
|
||||
bodySize: -1,
|
||||
};
|
||||
return harResponse;
|
||||
}
|
||||
|
||||
export async function exportHarRequest(
|
||||
requestId: string,
|
||||
environmentId: string,
|
||||
addContentLength: boolean = false,
|
||||
): Promise<HarRequest | null> {
|
||||
addContentLength = false,
|
||||
) {
|
||||
const request = await models.request.getById(requestId);
|
||||
|
||||
if (!request) {
|
||||
return null;
|
||||
}
|
||||
@@ -278,8 +283,8 @@ export async function exportHarRequest(
|
||||
export async function exportHarWithRequest(
|
||||
request: Request,
|
||||
environmentId: string | null,
|
||||
addContentLength: boolean = false,
|
||||
): Promise<HarRequest | null> {
|
||||
addContentLength = false,
|
||||
) {
|
||||
try {
|
||||
const renderResult = await getRenderedRequestAndContext(request, environmentId);
|
||||
const renderedRequest = await _applyRequestPluginHooks(
|
||||
@@ -298,16 +303,16 @@ export async function exportHarWithRequest(
|
||||
|
||||
async function _applyRequestPluginHooks(
|
||||
renderedRequest: RenderedRequest,
|
||||
renderedContext: Object,
|
||||
): Promise<RenderedRequest> {
|
||||
renderedContext: Record<string, any>,
|
||||
) {
|
||||
let newRenderedRequest = renderedRequest;
|
||||
|
||||
for (const { plugin, hook } of await plugins.getRequestHooks()) {
|
||||
newRenderedRequest = clone(newRenderedRequest);
|
||||
|
||||
const context = {
|
||||
...(pluginContexts.app.init(): Object),
|
||||
...(pluginContexts.request.init(newRenderedRequest, renderedContext): Object),
|
||||
...(pluginContexts.store.init(plugin): Object),
|
||||
...(pluginContexts.app.init() as Record<string, any>),
|
||||
...(pluginContexts.request.init(newRenderedRequest, renderedContext) as Record<string, any>),
|
||||
...(pluginContexts.store.init(plugin) as Record<string, any>),
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -323,24 +328,28 @@ async function _applyRequestPluginHooks(
|
||||
|
||||
export async function exportHarWithRenderedRequest(
|
||||
renderedRequest: RenderedRequest,
|
||||
addContentLength: boolean = false,
|
||||
): Promise<HarRequest> {
|
||||
addContentLength = false,
|
||||
) {
|
||||
const url = smartEncodeUrl(renderedRequest.url, renderedRequest.settingEncodeUrl);
|
||||
|
||||
if (addContentLength) {
|
||||
const hasContentLengthHeader =
|
||||
misc.filterHeaders(renderedRequest.headers, 'Content-Length').length > 0;
|
||||
filterHeaders(renderedRequest.headers, 'Content-Length').length > 0;
|
||||
|
||||
if (!hasContentLengthHeader) {
|
||||
const name = 'Content-Length';
|
||||
const value = Buffer.byteLength((renderedRequest.body || {}).text || '').toString();
|
||||
renderedRequest.headers.push({ name, value });
|
||||
renderedRequest.headers.push({
|
||||
name,
|
||||
value,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Set auth header if we have it
|
||||
if (!misc.hasAuthHeader(renderedRequest.headers)) {
|
||||
if (!hasAuthHeader(renderedRequest.headers)) {
|
||||
const header = await getAuthHeader(renderedRequest, url);
|
||||
|
||||
if (header) {
|
||||
renderedRequest.headers.push({
|
||||
name: header.name,
|
||||
@@ -349,7 +358,7 @@ export async function exportHarWithRenderedRequest(
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
const harRequest: HarRequest = {
|
||||
method: renderedRequest.method,
|
||||
url,
|
||||
httpVersion: 'HTTP/1.1',
|
||||
@@ -361,32 +370,39 @@ export async function exportHarWithRenderedRequest(
|
||||
bodySize: -1,
|
||||
settingEncodeUrl: renderedRequest.settingEncodeUrl,
|
||||
};
|
||||
return harRequest;
|
||||
}
|
||||
|
||||
function getRequestCookies(renderedRequest: RenderedRequest): Array<HarCookie> {
|
||||
function getRequestCookies(renderedRequest: RenderedRequest) {
|
||||
const jar = jarFromCookies(renderedRequest.cookieJar.cookies);
|
||||
const domainCookies = jar.getCookiesSync(renderedRequest.url);
|
||||
return domainCookies.map(mapCookie);
|
||||
const harCookies: Array<HarCookie> = domainCookies.map(mapCookie);
|
||||
return harCookies;
|
||||
}
|
||||
|
||||
function getReponseCookies(response: ResponseModel): Array<HarCookie> {
|
||||
return misc
|
||||
.getSetCookieHeaders(response.headers)
|
||||
.map(h => {
|
||||
let cookie;
|
||||
try {
|
||||
cookie = toughCookie.parse(h.value || '');
|
||||
} catch (error) {}
|
||||
if (!cookie) {
|
||||
return null;
|
||||
}
|
||||
function getReponseCookies(response: ResponseModel) {
|
||||
const headers = response.headers.filter(Boolean) as Array<HarCookie>;
|
||||
const responseCookies = getSetCookieHeaders(headers)
|
||||
.reduce((accumulator, harCookie) => {
|
||||
let cookie: null | undefined | toughCookie = null;
|
||||
|
||||
return mapCookie(cookie);
|
||||
})
|
||||
.filter(Boolean);
|
||||
try {
|
||||
cookie = toughCookie.parse(harCookie.value || '');
|
||||
} catch (error) {}
|
||||
|
||||
if (cookie === null || cookie === undefined) {
|
||||
return accumulator;
|
||||
}
|
||||
|
||||
return [
|
||||
...accumulator,
|
||||
mapCookie(cookie as unknown as Cookie),
|
||||
];
|
||||
}, [] as Array<HarCookie>);
|
||||
return responseCookies;
|
||||
}
|
||||
|
||||
function mapCookie(cookie: Cookie): HarCookie {
|
||||
function mapCookie(cookie: Cookie) {
|
||||
const harCookie: HarCookie = {
|
||||
name: cookie.key,
|
||||
value: cookie.value,
|
||||
@@ -401,7 +417,8 @@ function mapCookie(cookie: Cookie): HarCookie {
|
||||
}
|
||||
|
||||
if (cookie.expires) {
|
||||
let expires = null;
|
||||
let expires: Date | null = null;
|
||||
|
||||
if (cookie.expires instanceof Date) {
|
||||
expires = cookie.expires;
|
||||
} else if (typeof cookie.expires === 'string') {
|
||||
@@ -427,59 +444,55 @@ function mapCookie(cookie: Cookie): HarCookie {
|
||||
return harCookie;
|
||||
}
|
||||
|
||||
function getResponseContent(response: ResponseModel): HarContent {
|
||||
let body: Buffer | null = models.response.getBodyBuffer(response);
|
||||
function getResponseContent(response: ResponseModel) {
|
||||
let body = models.response.getBodyBuffer(response);
|
||||
|
||||
if (body === null) {
|
||||
body = Buffer.alloc(0);
|
||||
}
|
||||
|
||||
return {
|
||||
const harContent: HarContent = {
|
||||
size: body.byteLength,
|
||||
mimeType: response.contentType,
|
||||
text: body.toString('utf8'),
|
||||
};
|
||||
return harContent;
|
||||
}
|
||||
|
||||
function getResponseHeaders(response: ResponseModel): Array<HarHeader> {
|
||||
function getResponseHeaders(response: ResponseModel) {
|
||||
return response.headers
|
||||
.filter(header => header.name)
|
||||
.map(h => {
|
||||
return {
|
||||
name: h.name,
|
||||
value: h.value,
|
||||
};
|
||||
});
|
||||
.map<HarHeader>(header => ({
|
||||
name: header.name,
|
||||
value: header.value,
|
||||
}));
|
||||
}
|
||||
|
||||
function getRequestHeaders(renderedRequest: RenderedRequest): Array<HarHeader> {
|
||||
function getRequestHeaders(renderedRequest: RenderedRequest) {
|
||||
return renderedRequest.headers
|
||||
.filter(header => header.name)
|
||||
.map(header => {
|
||||
return {
|
||||
name: header.name,
|
||||
value: header.value,
|
||||
};
|
||||
});
|
||||
.map<HarHeader>(header => ({
|
||||
name: header.name,
|
||||
value: header.value,
|
||||
}));
|
||||
}
|
||||
|
||||
function getRequestQueryString(renderedRequest: RenderedRequest): Array<HarQueryString> {
|
||||
return renderedRequest.parameters.map(parameter => {
|
||||
return {
|
||||
name: parameter.name,
|
||||
value: parameter.value,
|
||||
};
|
||||
});
|
||||
return renderedRequest.parameters.map<HarQueryString>(parameter => ({
|
||||
name: parameter.name,
|
||||
value: parameter.value,
|
||||
}));
|
||||
}
|
||||
|
||||
function getRequestPostData(renderedRequest: RenderedRequest): HarPostData | void {
|
||||
function getRequestPostData(renderedRequest: RenderedRequest): HarPostData | undefined {
|
||||
let body;
|
||||
|
||||
if (renderedRequest.body.fileName) {
|
||||
try {
|
||||
body = newBodyRaw(fs.readFileSync(renderedRequest.body.fileName, 'base64'));
|
||||
} catch (e) {
|
||||
console.warn('[code gen] Failed to read file', e);
|
||||
return undefined;
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
// For every other type, Insomnia uses the same body format as HAR
|
||||
@@ -487,6 +500,7 @@ function getRequestPostData(renderedRequest: RenderedRequest): HarPostData | voi
|
||||
}
|
||||
|
||||
let params = [];
|
||||
|
||||
if (body.params) {
|
||||
params = body.params.map(param => {
|
||||
if (param.type === 'file') {
|
||||
@@ -1,15 +1,14 @@
|
||||
// @flow
|
||||
import type { HotKeyDefinition, KeyBindings, KeyCombination } from './hotkeys';
|
||||
import { areSameKeyCombinations, getPlatformKeyCombinations } from './hotkeys';
|
||||
import * as models from '../models';
|
||||
|
||||
function _pressedHotKey(e: KeyboardEvent, bindings: KeyBindings): boolean {
|
||||
const _pressedHotKey = (event: KeyboardEvent, bindings: KeyBindings) => {
|
||||
const pressedKeyComb: KeyCombination = {
|
||||
ctrl: e.ctrlKey,
|
||||
alt: e.altKey,
|
||||
shift: e.shiftKey,
|
||||
meta: e.metaKey,
|
||||
keyCode: e.keyCode,
|
||||
ctrl: event.ctrlKey,
|
||||
alt: event.altKey,
|
||||
shift: event.shiftKey,
|
||||
meta: event.metaKey,
|
||||
keyCode: event.keyCode,
|
||||
};
|
||||
const keyCombList = getPlatformKeyCombinations(bindings);
|
||||
|
||||
@@ -20,35 +19,33 @@ function _pressedHotKey(e: KeyboardEvent, bindings: KeyBindings): boolean {
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Check whether a hotkey has been pressed.
|
||||
* @param e the activated keyboard event.
|
||||
* @param event the activated keyboard event.
|
||||
* @param definition the hotkey definition being checked.
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
export async function pressedHotKey(
|
||||
e: KeyboardEvent,
|
||||
export const pressedHotKey = async (
|
||||
event: KeyboardEvent,
|
||||
definition: HotKeyDefinition,
|
||||
): Promise<boolean> {
|
||||
) => {
|
||||
const settings = await models.settings.getOrCreate();
|
||||
return _pressedHotKey(e, settings.hotKeyRegistry[definition.id]);
|
||||
}
|
||||
return _pressedHotKey(event, settings.hotKeyRegistry[definition.id]);
|
||||
};
|
||||
|
||||
/**
|
||||
* Call callback if the hotkey has been pressed.
|
||||
* @param e the activated keyboard event.
|
||||
* @param event the activated keyboard event.
|
||||
* @param definition the hotkey definition being checked.
|
||||
* @param callback to be called if the hotkey has been activated.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
export async function executeHotKey(
|
||||
e: KeyboardEvent,
|
||||
export const executeHotKey = async <T extends Function>(
|
||||
event: KeyboardEvent,
|
||||
definition: HotKeyDefinition,
|
||||
callback: Function,
|
||||
): Promise<void> {
|
||||
if (await pressedHotKey(e, definition)) {
|
||||
callback: T,
|
||||
) => {
|
||||
if (await pressedHotKey(event, definition)) {
|
||||
callback();
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -1,4 +1,3 @@
|
||||
// @flow
|
||||
import { keyboardKeys } from './keyboard-keys';
|
||||
import { ALT_SYM, CTRL_SYM, isMac, META_SYM, SHIFT_SYM } from './constants';
|
||||
import { strings } from './strings';
|
||||
@@ -7,38 +6,36 @@ import { strings } from './strings';
|
||||
* The readable definition of a hotkey.
|
||||
* The {@code id} the hotkey's reference id.
|
||||
*/
|
||||
export type HotKeyDefinition = {
|
||||
id: string,
|
||||
description: string,
|
||||
};
|
||||
export interface HotKeyDefinition {
|
||||
id: string;
|
||||
description: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The combination of key presses that will activate a hotkey if pressed.
|
||||
*/
|
||||
export type KeyCombination = {
|
||||
ctrl: boolean,
|
||||
alt: boolean,
|
||||
shift: boolean,
|
||||
meta: boolean,
|
||||
keyCode: number,
|
||||
};
|
||||
export interface KeyCombination {
|
||||
ctrl: boolean;
|
||||
alt: boolean;
|
||||
shift: boolean;
|
||||
meta: boolean;
|
||||
keyCode: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* The collection of a hotkey's key combinations for each platforms.
|
||||
*/
|
||||
export type KeyBindings = {
|
||||
macKeys: Array<KeyCombination>,
|
||||
export interface KeyBindings {
|
||||
macKeys: Array<KeyCombination>;
|
||||
// The key combinations for both Windows and Linux.
|
||||
winLinuxKeys: Array<KeyCombination>,
|
||||
};
|
||||
winLinuxKeys: Array<KeyCombination>;
|
||||
}
|
||||
|
||||
/**
|
||||
* The collection of defined hotkeys.
|
||||
* The registry maps a hotkey by its reference id to its key bindings.
|
||||
*/
|
||||
export type HotKeyRegistry = {
|
||||
[refId: string]: KeyBindings,
|
||||
};
|
||||
export type HotKeyRegistry = Record<string, KeyBindings>;
|
||||
|
||||
function defineHotKey(id: string, description: string): HotKeyDefinition {
|
||||
return {
|
||||
@@ -85,84 +82,53 @@ function keyBinds(
|
||||
* The collection of available hotkeys' and their definitions.
|
||||
*/
|
||||
// Not using dot, because NeDB prohibits field names to contain dots.
|
||||
export const hotKeyRefs: { [string]: HotKeyDefinition } = {
|
||||
export const hotKeyRefs: Record<string, HotKeyDefinition> = {
|
||||
WORKSPACE_SHOW_SETTINGS: defineHotKey(
|
||||
'workspace_showSettings',
|
||||
`Show ${strings.document} / ${strings.collection} Settings`,
|
||||
),
|
||||
|
||||
REQUEST_SHOW_SETTINGS: defineHotKey('request_showSettings', 'Show Request Settings'),
|
||||
|
||||
PREFERENCES_SHOW_KEYBOARD_SHORTCUTS: defineHotKey(
|
||||
'preferences_showKeyboardShortcuts',
|
||||
'Show Keyboard Shortcuts',
|
||||
),
|
||||
|
||||
PREFERENCES_SHOW_GENERAL: defineHotKey('preferences_showGeneral', 'Show App Preferences'),
|
||||
|
||||
TOGGLE_MAIN_MENU: defineHotKey('toggleMainMenu', 'Toggle Main Menu'),
|
||||
|
||||
REQUEST_QUICK_SWITCH: defineHotKey('request_quickSwitch', 'Switch Requests'),
|
||||
|
||||
SHOW_RECENT_REQUESTS: defineHotKey('request_showRecent', 'Show Recent Requests'),
|
||||
|
||||
SHOW_RECENT_REQUESTS_PREVIOUS: defineHotKey(
|
||||
'request_showRecentPrevious',
|
||||
'Show Recent Requests (Previous)',
|
||||
),
|
||||
|
||||
PLUGIN_RELOAD: defineHotKey('plugin_reload', 'Reload Plugins'),
|
||||
|
||||
SHOW_AUTOCOMPLETE: defineHotKey('showAutocomplete', 'Show Autocomplete'),
|
||||
|
||||
REQUEST_SEND: defineHotKey('request_send', 'Send Request'),
|
||||
|
||||
REQUEST_SHOW_OPTIONS: defineHotKey('request_showOptions', 'Send Request (Options)'),
|
||||
|
||||
ENVIRONMENT_SHOW_EDITOR: defineHotKey('environment_showEditor', 'Show Environment Editor'),
|
||||
|
||||
ENVIRONMENT_SHOW_SWITCH_MENU: defineHotKey('environment_showSwitchMenu', 'Switch Environments'),
|
||||
|
||||
REQUEST_TOGGLE_HTTP_METHOD_MENU: defineHotKey(
|
||||
'request_toggleHttpMethodMenu',
|
||||
'Change HTTP Method',
|
||||
),
|
||||
|
||||
REQUEST_TOGGLE_HISTORY: defineHotKey('request_toggleHistory', 'Show Request History'),
|
||||
|
||||
REQUEST_FOCUS_URL: defineHotKey('request_focusUrl', 'Focus URL'),
|
||||
|
||||
REQUEST_SHOW_GENERATE_CODE_EDITOR: defineHotKey(
|
||||
'request_showGenerateCodeEditor',
|
||||
'Generate Code',
|
||||
),
|
||||
|
||||
SIDEBAR_FOCUS_FILTER: defineHotKey('sidebar_focusFilter', 'Filter Sidebar'),
|
||||
|
||||
SIDEBAR_TOGGLE: defineHotKey('sidebar_toggle', 'Toggle Sidebar'),
|
||||
|
||||
RESPONSE_FOCUS: defineHotKey('response_focus', 'Focus Response'),
|
||||
|
||||
SHOW_COOKIES_EDITOR: defineHotKey('showCookiesEditor', 'Edit Cookies'),
|
||||
|
||||
REQUEST_SHOW_CREATE: defineHotKey('request_showCreate', 'Create Request'),
|
||||
|
||||
REQUEST_QUICK_CREATE: defineHotKey('request_quickCreate', 'Create Request (Quick)'),
|
||||
|
||||
REQUEST_SHOW_DELETE: defineHotKey('request_showDelete', 'Delete Request'),
|
||||
|
||||
REQUEST_SHOW_CREATE_FOLDER: defineHotKey('request_showCreateFolder', 'Create Folder'),
|
||||
|
||||
REQUEST_SHOW_DUPLICATE: defineHotKey('request_showDuplicate', 'Duplicate Request'),
|
||||
|
||||
REQUEST_TOGGLE_PIN: defineHotKey('request_togglePin', 'Pin/Unpin Request'),
|
||||
|
||||
CLOSE_DROPDOWN: defineHotKey('closeDropdown', 'Close Dropdown'),
|
||||
|
||||
CLOSE_MODAL: defineHotKey('closeModal', 'Close Modal'),
|
||||
|
||||
ENVIRONMENT_UNCOVER_VARIABLES: defineHotKey('environment_uncoverVariables', 'Uncover Variables'),
|
||||
|
||||
// Designer-specific
|
||||
SHOW_SPEC_EDITOR: defineHotKey('activity_specEditor', 'Show Spec Activity'),
|
||||
SHOW_TEST: defineHotKey('activity_test', 'Show Test Activity'),
|
||||
@@ -179,52 +145,42 @@ const defaultRegistry: HotKeyRegistry = {
|
||||
keyComb(false, false, true, true, keyboardKeys.comma.keyCode),
|
||||
keyComb(true, false, true, false, keyboardKeys.comma.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.REQUEST_SHOW_SETTINGS.id]: keyBinds(
|
||||
keyComb(false, true, true, true, keyboardKeys.comma.keyCode),
|
||||
keyComb(true, true, true, false, keyboardKeys.comma.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.PREFERENCES_SHOW_KEYBOARD_SHORTCUTS.id]: keyBinds(
|
||||
keyComb(true, false, true, true, keyboardKeys.forwardslash.keyCode),
|
||||
keyComb(true, false, true, false, keyboardKeys.forwardslash.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.PREFERENCES_SHOW_GENERAL.id]: keyBinds(
|
||||
keyComb(false, false, false, true, keyboardKeys.comma.keyCode),
|
||||
keyComb(true, false, false, false, keyboardKeys.comma.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.TOGGLE_MAIN_MENU.id]: keyBinds(
|
||||
keyComb(false, true, false, true, keyboardKeys.comma.keyCode),
|
||||
keyComb(true, true, false, false, keyboardKeys.comma.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.REQUEST_QUICK_SWITCH.id]: keyBinds(
|
||||
keyComb(false, false, false, true, keyboardKeys.p.keyCode),
|
||||
keyComb(true, false, false, false, keyboardKeys.p.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.SHOW_RECENT_REQUESTS.id]: keyBinds(
|
||||
keyComb(true, false, false, false, keyboardKeys.tab.keyCode),
|
||||
keyComb(true, false, false, false, keyboardKeys.tab.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.SHOW_RECENT_REQUESTS_PREVIOUS.id]: keyBinds(
|
||||
keyComb(true, false, true, false, keyboardKeys.tab.keyCode),
|
||||
keyComb(true, false, true, false, keyboardKeys.tab.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.PLUGIN_RELOAD.id]: keyBinds(
|
||||
keyComb(false, false, true, true, keyboardKeys.r.keyCode),
|
||||
keyComb(true, false, true, false, keyboardKeys.r.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.SHOW_AUTOCOMPLETE.id]: keyBinds(
|
||||
keyComb(true, false, false, false, keyboardKeys.space.keyCode),
|
||||
keyComb(true, false, false, false, keyboardKeys.space.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.REQUEST_SEND.id]: keyBinds(
|
||||
[
|
||||
keyComb(false, false, false, true, keyboardKeys.enter.keyCode),
|
||||
@@ -237,127 +193,102 @@ const defaultRegistry: HotKeyRegistry = {
|
||||
keyComb(false, false, false, false, keyboardKeys.f5.keyCode),
|
||||
],
|
||||
),
|
||||
|
||||
[hotKeyRefs.REQUEST_SHOW_OPTIONS.id]: keyBinds(
|
||||
keyComb(false, false, true, true, keyboardKeys.enter.keyCode),
|
||||
keyComb(true, false, true, false, keyboardKeys.enter.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.ENVIRONMENT_SHOW_EDITOR.id]: keyBinds(
|
||||
keyComb(false, false, false, true, keyboardKeys.e.keyCode),
|
||||
keyComb(true, false, false, false, keyboardKeys.e.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.ENVIRONMENT_SHOW_SWITCH_MENU.id]: keyBinds(
|
||||
keyComb(false, false, true, true, keyboardKeys.e.keyCode),
|
||||
keyComb(true, false, true, false, keyboardKeys.e.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.REQUEST_TOGGLE_HTTP_METHOD_MENU.id]: keyBinds(
|
||||
keyComb(false, false, true, true, keyboardKeys.l.keyCode),
|
||||
keyComb(true, false, true, false, keyboardKeys.l.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.REQUEST_TOGGLE_HISTORY.id]: keyBinds(
|
||||
keyComb(false, false, true, true, keyboardKeys.h.keyCode),
|
||||
keyComb(true, false, true, false, keyboardKeys.h.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.REQUEST_FOCUS_URL.id]: keyBinds(
|
||||
keyComb(false, false, false, true, keyboardKeys.l.keyCode),
|
||||
keyComb(true, false, false, false, keyboardKeys.l.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.REQUEST_SHOW_GENERATE_CODE_EDITOR.id]: keyBinds(
|
||||
keyComb(false, false, true, true, keyboardKeys.g.keyCode),
|
||||
keyComb(true, false, true, false, keyboardKeys.g.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.SIDEBAR_FOCUS_FILTER.id]: keyBinds(
|
||||
keyComb(false, false, true, true, keyboardKeys.f.keyCode),
|
||||
keyComb(true, false, true, false, keyboardKeys.f.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.SIDEBAR_TOGGLE.id]: keyBinds(
|
||||
keyComb(false, false, false, true, keyboardKeys.backslash.keyCode),
|
||||
keyComb(true, false, false, false, keyboardKeys.backslash.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.RESPONSE_FOCUS.id]: keyBinds(
|
||||
keyComb(false, false, false, true, keyboardKeys.singlequote.keyCode),
|
||||
keyComb(true, false, false, false, keyboardKeys.singlequote.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.SHOW_COOKIES_EDITOR.id]: keyBinds(
|
||||
keyComb(false, false, false, true, keyboardKeys.k.keyCode),
|
||||
keyComb(true, false, false, false, keyboardKeys.k.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.REQUEST_SHOW_CREATE.id]: keyBinds(
|
||||
keyComb(false, false, false, true, keyboardKeys.n.keyCode),
|
||||
keyComb(true, false, false, false, keyboardKeys.n.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.REQUEST_QUICK_CREATE.id]: keyBinds(
|
||||
keyComb(false, true, false, true, keyboardKeys.n.keyCode),
|
||||
keyComb(true, true, false, false, keyboardKeys.n.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.REQUEST_SHOW_DELETE.id]: keyBinds(
|
||||
keyComb(false, false, true, true, keyboardKeys.delete.keyCode),
|
||||
keyComb(true, false, true, false, keyboardKeys.delete.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.REQUEST_SHOW_CREATE_FOLDER.id]: keyBinds(
|
||||
keyComb(false, false, true, true, keyboardKeys.n.keyCode),
|
||||
keyComb(true, false, true, false, keyboardKeys.n.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.REQUEST_SHOW_DUPLICATE.id]: keyBinds(
|
||||
keyComb(false, false, false, true, keyboardKeys.d.keyCode),
|
||||
keyComb(true, false, false, false, keyboardKeys.d.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.REQUEST_TOGGLE_PIN.id]: keyBinds(
|
||||
keyComb(false, false, true, true, keyboardKeys.p.keyCode),
|
||||
keyComb(true, false, true, false, keyboardKeys.p.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.CLOSE_DROPDOWN.id]: keyBinds(
|
||||
keyComb(false, false, false, false, keyboardKeys.esc.keyCode),
|
||||
keyComb(false, false, false, false, keyboardKeys.esc.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.CLOSE_MODAL.id]: keyBinds(
|
||||
keyComb(false, false, false, false, keyboardKeys.esc.keyCode),
|
||||
keyComb(false, false, false, false, keyboardKeys.esc.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.ENVIRONMENT_UNCOVER_VARIABLES.id]: keyBinds(
|
||||
keyComb(false, true, true, false, keyboardKeys.u.keyCode),
|
||||
keyComb(false, true, true, false, keyboardKeys.u.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.SHOW_SPEC_EDITOR.id]: keyBinds(
|
||||
keyComb(false, false, true, true, keyboardKeys.s.keyCode),
|
||||
keyComb(true, false, true, false, keyboardKeys.s.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.SHOW_TEST.id]: keyBinds(
|
||||
keyComb(false, false, true, true, keyboardKeys.t.keyCode),
|
||||
keyComb(true, false, true, false, keyboardKeys.t.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.SHOW_MONITOR.id]: keyBinds(
|
||||
keyComb(false, false, true, true, keyboardKeys.m.keyCode),
|
||||
keyComb(true, false, true, false, keyboardKeys.m.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.SHOW_HOME.id]: keyBinds(
|
||||
keyComb(false, false, true, true, keyboardKeys.h.keyCode),
|
||||
keyComb(true, false, true, false, keyboardKeys.h.keyCode),
|
||||
),
|
||||
|
||||
[hotKeyRefs.FILTER_DOCUMENTS.id]: keyBinds(
|
||||
keyComb(false, false, false, true, keyboardKeys.f.keyCode),
|
||||
keyComb(true, false, false, false, keyboardKeys.f.keyCode),
|
||||
@@ -391,12 +322,15 @@ export function newDefaultKeyBindings(hotKeyRefId: string): KeyBindings {
|
||||
*/
|
||||
export function newDefaultRegistry(): HotKeyRegistry {
|
||||
const newDefaults: HotKeyRegistry = {};
|
||||
|
||||
for (const refId in defaultRegistry) {
|
||||
if (!defaultRegistry.hasOwnProperty(refId)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
newDefaults[refId] = newDefaultKeyBindings(refId);
|
||||
}
|
||||
|
||||
return newDefaults;
|
||||
}
|
||||
|
||||
@@ -409,6 +343,7 @@ export function getPlatformKeyCombinations(bindings: KeyBindings): Array<KeyComb
|
||||
if (isMac()) {
|
||||
return bindings.macKeys;
|
||||
}
|
||||
|
||||
return bindings.winLinuxKeys;
|
||||
}
|
||||
|
||||
@@ -421,7 +356,7 @@ export function getPlatformKeyCombinations(bindings: KeyBindings): Array<KeyComb
|
||||
export function areSameKeyCombinations(
|
||||
keyComb1: KeyCombination,
|
||||
keyComb2: KeyCombination,
|
||||
): boolean {
|
||||
) {
|
||||
return (
|
||||
keyComb1.alt === keyComb2.alt &&
|
||||
keyComb1.shift === keyComb2.shift &&
|
||||
@@ -438,22 +373,27 @@ export function areSameKeyCombinations(
|
||||
* @param keyBinds to check with the default ones.
|
||||
* @returns {boolean}
|
||||
*/
|
||||
export function areKeyBindingsSameAsDefault(hotKeyRefId: string, keyBinds: KeyBindings): boolean {
|
||||
export function areKeyBindingsSameAsDefault(hotKeyRefId: string, keyBinds: KeyBindings) {
|
||||
const keyCombs = getPlatformKeyCombinations(keyBinds);
|
||||
const defaultKeyCombs = getPlatformKeyCombinations(defaultRegistry[hotKeyRefId]);
|
||||
|
||||
if (keyCombs.length !== defaultKeyCombs.length) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (const keyComb of keyCombs) {
|
||||
const found = defaultKeyCombs.find(defKeyComb => {
|
||||
if (areSameKeyCombinations(keyComb, defKeyComb)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
if (found == null) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -462,7 +402,7 @@ export function areKeyBindingsSameAsDefault(hotKeyRefId: string, keyBinds: KeyBi
|
||||
* @param keyCode
|
||||
* @returns {string}
|
||||
*/
|
||||
export function getChar(keyCode: number): string {
|
||||
export function getChar(keyCode: number) {
|
||||
let char;
|
||||
const key = Object.keys(keyboardKeys).find(k => keyboardKeys[k].keyCode === keyCode);
|
||||
|
||||
@@ -475,10 +415,11 @@ export function getChar(keyCode: number): string {
|
||||
return char || 'unknown';
|
||||
}
|
||||
|
||||
function joinHotKeys(mustUsePlus: boolean, keys: Array<string>): string {
|
||||
function joinHotKeys(mustUsePlus: boolean, keys: Array<string>) {
|
||||
if (!mustUsePlus && isMac()) {
|
||||
return keys.join('');
|
||||
}
|
||||
|
||||
return keys.join('+');
|
||||
}
|
||||
|
||||
@@ -487,12 +428,11 @@ function joinHotKeys(mustUsePlus: boolean, keys: Array<string>): string {
|
||||
* @param keyCode
|
||||
* @returns {boolean}
|
||||
*/
|
||||
export function isModifierKeyCode(keyCode: number): boolean {
|
||||
export function isModifierKeyCode(keyCode: number) {
|
||||
return (
|
||||
keyCode === keyboardKeys.alt.keyCode ||
|
||||
keyCode === keyboardKeys.shift.keyCode ||
|
||||
keyCode === keyboardKeys.ctrl.keyCode ||
|
||||
// Meta keys.
|
||||
keyCode === keyboardKeys.ctrl.keyCode || // Meta keys.
|
||||
keyCode === keyboardKeys.leftwindowkey.keyCode ||
|
||||
keyCode === keyboardKeys.rightwindowkey.keyCode ||
|
||||
keyCode === keyboardKeys.selectkey.keyCode
|
||||
@@ -512,22 +452,24 @@ export function isModifierKeyCode(keyCode: number): boolean {
|
||||
export function constructKeyCombinationDisplay(
|
||||
keyComb: KeyCombination,
|
||||
mustUsePlus: boolean,
|
||||
): string {
|
||||
) {
|
||||
const { ctrl, alt, shift, meta, keyCode } = keyComb;
|
||||
const chars = [];
|
||||
|
||||
const chars: Array<string> = [];
|
||||
alt && chars.push(ALT_SYM);
|
||||
shift && chars.push(SHIFT_SYM);
|
||||
ctrl && chars.push(CTRL_SYM);
|
||||
meta && chars.push(META_SYM);
|
||||
|
||||
if (keyCode != null && !isModifierKeyCode(keyCode)) {
|
||||
chars.push(getChar(keyCode));
|
||||
}
|
||||
|
||||
let joint = joinHotKeys(mustUsePlus, chars);
|
||||
|
||||
if (mustUsePlus && isModifierKeyCode(keyCode)) {
|
||||
joint += '+';
|
||||
}
|
||||
|
||||
return joint;
|
||||
}
|
||||
|
||||
@@ -544,12 +486,14 @@ export function getHotKeyDisplay(
|
||||
hotKeyRegistry: HotKeyRegistry,
|
||||
mustUsePlus: boolean,
|
||||
) {
|
||||
const hotKey: ?KeyBindings = hotKeyRegistry[hotKeyDef.id];
|
||||
const hotKey: KeyBindings | null | undefined = hotKeyRegistry[hotKeyDef.id];
|
||||
|
||||
if (!hotKey) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const keyCombs: Array<KeyCombination> = getPlatformKeyCombinations(hotKey);
|
||||
|
||||
if (keyCombs.length === 0) {
|
||||
return '';
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
// @flow
|
||||
import { convert } from 'insomnia-importers';
|
||||
import { convert, Insomnia4Data } from 'insomnia-importers';
|
||||
import clone from 'clone';
|
||||
import * as db from './database';
|
||||
import { database as db } from './database';
|
||||
import * as har from './har';
|
||||
import type { BaseModel } from '../models/index';
|
||||
import * as models from '../models/index';
|
||||
@@ -20,14 +19,13 @@ import {
|
||||
isRequestGroup,
|
||||
isWorkspace,
|
||||
} from '../models/helpers/is-model';
|
||||
import type { Workspace, WorkspaceScope } from '../models/workspace';
|
||||
import type { Workspace } from '../models/workspace';
|
||||
import type { ApiSpec } from '../models/api-spec';
|
||||
import { ImportToWorkspacePrompt, SetWorkspaceScopePrompt } from '../ui/redux/modules/helpers';
|
||||
|
||||
const WORKSPACE_ID_KEY = '__WORKSPACE_ID__';
|
||||
const BASE_ENVIRONMENT_ID_KEY = '__BASE_ENVIRONMENT_ID__';
|
||||
|
||||
const EXPORT_FORMAT = 4;
|
||||
|
||||
const EXPORT_TYPE_REQUEST = 'request';
|
||||
const EXPORT_TYPE_GRPC_REQUEST = 'grpc_request';
|
||||
const EXPORT_TYPE_REQUEST_GROUP = 'request_group';
|
||||
@@ -39,10 +37,8 @@ const EXPORT_TYPE_ENVIRONMENT = 'environment';
|
||||
const EXPORT_TYPE_API_SPEC = 'api_spec';
|
||||
const EXPORT_TYPE_PROTO_FILE = 'proto_file';
|
||||
const EXPORT_TYPE_PROTO_DIRECTORY = 'proto_directory';
|
||||
|
||||
// If we come across an ID of this form, we will replace it with a new one
|
||||
const REPLACE_ID_REGEX = /__\w+_\d+__/g;
|
||||
|
||||
const MODELS = {
|
||||
[EXPORT_TYPE_REQUEST]: models.request,
|
||||
[EXPORT_TYPE_GRPC_REQUEST]: models.grpcRequest,
|
||||
@@ -57,40 +53,40 @@ const MODELS = {
|
||||
[EXPORT_TYPE_PROTO_DIRECTORY]: models.protoDirectory,
|
||||
};
|
||||
|
||||
export type ImportResult = {
|
||||
source: string,
|
||||
error: Error | null,
|
||||
summary: { [string]: Array<BaseModel> },
|
||||
};
|
||||
export interface ImportResult {
|
||||
source: string;
|
||||
error: Error | null;
|
||||
summary: Record<string, Array<BaseModel>>;
|
||||
}
|
||||
|
||||
type ConvertResultType = {
|
||||
id: string,
|
||||
name: string,
|
||||
description: string,
|
||||
};
|
||||
interface ConvertResultType {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
}
|
||||
|
||||
type ConvertResult = {
|
||||
type: ConvertResultType,
|
||||
interface ConvertResult {
|
||||
type: ConvertResultType;
|
||||
data: {
|
||||
resources: Array<Object>,
|
||||
},
|
||||
};
|
||||
resources: Array<Record<string, any>>;
|
||||
};
|
||||
}
|
||||
|
||||
export type ImportRawConfig = {
|
||||
getWorkspaceId: () => Promise<string | null>,
|
||||
getWorkspaceScope?: string => Promise<WorkspaceScope>,
|
||||
enableDiffBasedPatching?: boolean,
|
||||
enableDiffDeep?: boolean,
|
||||
export interface ImportRawConfig {
|
||||
getWorkspaceId: ImportToWorkspacePrompt;
|
||||
getWorkspaceScope?: SetWorkspaceScopePrompt;
|
||||
enableDiffBasedPatching?: boolean;
|
||||
enableDiffDeep?: boolean;
|
||||
bypassDiffProps?: {
|
||||
url: string,
|
||||
},
|
||||
};
|
||||
url: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
export async function importUri(uri: string, importConfig: ImportRawConfig): Promise<ImportResult> {
|
||||
export async function importUri(uri: string, importConfig: ImportRawConfig) {
|
||||
let rawText;
|
||||
|
||||
// If GH preview, force raw
|
||||
const url = new URL(uri);
|
||||
|
||||
if (url.origin === 'https://github.com') {
|
||||
uri = uri
|
||||
.replace('https://github.com', 'https://raw.githubusercontent.com')
|
||||
@@ -114,6 +110,7 @@ export async function importUri(uri: string, importConfig: ImportRawConfig): Pro
|
||||
if (error) {
|
||||
showError({
|
||||
title: 'Failed to import',
|
||||
// @ts-expect-error -- TSCONVERSION appears to be a genuine error
|
||||
error: error.message,
|
||||
message: 'Import failed',
|
||||
});
|
||||
@@ -127,15 +124,18 @@ export async function importUri(uri: string, importConfig: ImportRawConfig): Pro
|
||||
return count === 0 ? null : `${count} ${name}`;
|
||||
})
|
||||
.filter(s => s !== null);
|
||||
|
||||
let message;
|
||||
|
||||
if (statements.length === 0) {
|
||||
message = 'Nothing was found to import.';
|
||||
} else {
|
||||
message = `You imported ${statements.join(', ')}!`;
|
||||
}
|
||||
showModal(AlertModal, { title: 'Import Succeeded', message });
|
||||
|
||||
showModal(AlertModal, {
|
||||
title: 'Import Succeeded',
|
||||
message,
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -148,22 +148,24 @@ export async function importRaw(
|
||||
enableDiffDeep,
|
||||
bypassDiffProps,
|
||||
}: ImportRawConfig,
|
||||
): Promise<ImportResult> {
|
||||
) {
|
||||
let results: ConvertResult;
|
||||
|
||||
try {
|
||||
results = await convert(rawContent);
|
||||
} catch (err) {
|
||||
return {
|
||||
const importResult: ImportResult = {
|
||||
source: 'not found',
|
||||
error: err,
|
||||
summary: {},
|
||||
};
|
||||
return importResult;
|
||||
}
|
||||
|
||||
const { data, type: resultsType } = results;
|
||||
|
||||
// Generate all the ids we may need
|
||||
const generatedIds: { [string]: string | Function } = {};
|
||||
const generatedIds: Record<string, string | ((...args: Array<any>) => any)> = {};
|
||||
|
||||
for (const r of data.resources) {
|
||||
for (const key of r._id.match(REPLACE_ID_REGEX) || []) {
|
||||
generatedIds[key] = generateId(MODELS[r._type].prefix);
|
||||
@@ -173,15 +175,11 @@ export async function importRaw(
|
||||
// Contains the ID of the workspace to be used with the import
|
||||
generatedIds[WORKSPACE_ID_KEY] = async () => {
|
||||
const workspaceId = await getWorkspaceId();
|
||||
|
||||
// First try getting the workspace to overwrite
|
||||
const workspace = await models.workspace.getById(workspaceId || 'n/a');
|
||||
|
||||
// Update this fn so it doesn't run again
|
||||
const idToUse = workspace?._id || generateId(models.workspace.prefix);
|
||||
|
||||
generatedIds[WORKSPACE_ID_KEY] = idToUse;
|
||||
|
||||
return idToUse;
|
||||
};
|
||||
|
||||
@@ -189,17 +187,15 @@ export async function importRaw(
|
||||
generatedIds[BASE_ENVIRONMENT_ID_KEY] = async () => {
|
||||
const parentId = await fnOrString(generatedIds[WORKSPACE_ID_KEY]);
|
||||
const baseEnvironment = await models.environment.getOrCreateForWorkspaceId(parentId);
|
||||
|
||||
// Update this fn so it doesn't run again
|
||||
generatedIds[BASE_ENVIRONMENT_ID_KEY] = baseEnvironment._id;
|
||||
|
||||
return baseEnvironment._id;
|
||||
};
|
||||
|
||||
// Import everything backwards so they get inserted in the correct order
|
||||
data.resources.reverse();
|
||||
|
||||
const importedDocs = {};
|
||||
|
||||
for (const model of models.all()) {
|
||||
importedDocs[model.type] = [];
|
||||
}
|
||||
@@ -227,7 +223,8 @@ export async function importRaw(
|
||||
}
|
||||
}
|
||||
|
||||
const model: Object = MODELS[resource._type];
|
||||
const model = MODELS[resource._type];
|
||||
|
||||
if (!model) {
|
||||
console.warn('Unknown doc type for import', resource._type);
|
||||
continue;
|
||||
@@ -256,7 +253,10 @@ export async function importRaw(
|
||||
) {
|
||||
try {
|
||||
JSON.parse(resource.body.text);
|
||||
resource.headers.push({ name: 'Content-Type', value: 'application/json' });
|
||||
resource.headers.push({
|
||||
name: 'Content-Type',
|
||||
value: 'application/json',
|
||||
});
|
||||
} catch (err) {
|
||||
// Not JSON
|
||||
}
|
||||
@@ -264,6 +264,7 @@ export async function importRaw(
|
||||
|
||||
const existingDoc = await db.get(model.type, resource._id);
|
||||
let newDoc: BaseModel;
|
||||
|
||||
if (existingDoc) {
|
||||
let updateDoc = resource;
|
||||
|
||||
@@ -279,20 +280,23 @@ export async function importRaw(
|
||||
|
||||
// If workspace, don't overwrite the existing scope
|
||||
if (isWorkspace(model)) {
|
||||
(updateDoc: Workspace).scope = (existingDoc: Workspace).scope;
|
||||
(updateDoc as Workspace).scope = (existingDoc as Workspace).scope;
|
||||
}
|
||||
|
||||
newDoc = await db.docUpdate(existingDoc, updateDoc);
|
||||
} else {
|
||||
if (isWorkspace(model)) {
|
||||
await updateWorkspaceScope(resource, resultsType, getWorkspaceScope);
|
||||
await updateWorkspaceScope(resource as Workspace, resultsType, getWorkspaceScope);
|
||||
}
|
||||
|
||||
newDoc = await db.docCreate(model.type, resource);
|
||||
|
||||
// Mark as not seen if we created a new workspace from sync
|
||||
if (isWorkspace(newDoc)) {
|
||||
const workspaceMeta = await models.workspaceMeta.getOrCreateByParentId(newDoc._id);
|
||||
await models.workspaceMeta.update(workspaceMeta, { hasSeen: false });
|
||||
await models.workspaceMeta.update(workspaceMeta, {
|
||||
hasSeen: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -306,13 +310,13 @@ export async function importRaw(
|
||||
contents: rawContent,
|
||||
contentType: 'yaml',
|
||||
});
|
||||
|
||||
importedDocs[spec.type].push(spec);
|
||||
}
|
||||
|
||||
// Set active environment when none is currently selected and one exists
|
||||
const meta = await models.workspaceMeta.getOrCreateByParentId(workspace._id);
|
||||
const envs = importedDocs[models.environment.type];
|
||||
|
||||
if (!meta.activeEnvironmentId && envs.length > 0) {
|
||||
meta.activeEnvironmentId = envs[0]._id;
|
||||
await models.workspaceMeta.update(meta);
|
||||
@@ -320,20 +324,19 @@ export async function importRaw(
|
||||
}
|
||||
|
||||
await db.flushChanges();
|
||||
|
||||
trackEvent('Data', 'Import', resultsType.id);
|
||||
|
||||
return {
|
||||
const importRequest: ImportResult = {
|
||||
source: resultsType && typeof resultsType.id === 'string' ? resultsType.id : 'unknown',
|
||||
summary: importedDocs,
|
||||
error: null,
|
||||
};
|
||||
return importRequest;
|
||||
}
|
||||
|
||||
async function updateWorkspaceScope(
|
||||
resource: Workspace,
|
||||
resultType: ConvertResultType,
|
||||
getWorkspaceScope?: string => Promise<WorkspaceScope>,
|
||||
getWorkspaceScope?: SetWorkspaceScopePrompt,
|
||||
) {
|
||||
// Set the workspace scope if creating a new workspace
|
||||
// IF is creating a new workspace
|
||||
@@ -342,6 +345,7 @@ async function updateWorkspaceScope(
|
||||
if ((!resource.hasOwnProperty('scope') || resource.scope === null) && getWorkspaceScope) {
|
||||
const workspaceName = resource.name;
|
||||
let specName;
|
||||
|
||||
// If is from insomnia v4 and the spec has contents, add to the name when prompting
|
||||
if (isInsomniaV4Import(resultType)) {
|
||||
const spec: ApiSpec | null = await models.apiSpec.getByParentId(resource._id);
|
||||
@@ -350,23 +354,24 @@ async function updateWorkspaceScope(
|
||||
specName = spec.fileName;
|
||||
}
|
||||
}
|
||||
|
||||
const nameToPrompt = specName ? `${specName} / ${workspaceName}` : workspaceName;
|
||||
(resource: Workspace).scope = await getWorkspaceScope(nameToPrompt);
|
||||
(resource as Workspace).scope = await getWorkspaceScope(nameToPrompt);
|
||||
}
|
||||
}
|
||||
|
||||
export function isApiSpecImport({ id }: ConvertResultType): boolean {
|
||||
export function isApiSpecImport({ id }: ConvertResultType) {
|
||||
return id === 'openapi3' || id === 'swagger2';
|
||||
}
|
||||
|
||||
export function isInsomniaV4Import({ id }: ConvertResultType): boolean {
|
||||
export function isInsomniaV4Import({ id }: ConvertResultType) {
|
||||
return id === 'insomnia-4';
|
||||
}
|
||||
|
||||
export async function exportWorkspacesHAR(
|
||||
parentDoc: BaseModel | null = null,
|
||||
includePrivateDocs: boolean = false,
|
||||
): Promise<string> {
|
||||
includePrivateDocs = false,
|
||||
) {
|
||||
const docs: Array<BaseModel> = await getDocWithDescendants(parentDoc, includePrivateDocs);
|
||||
const requests: Array<BaseModel> = docs.filter(isRequest);
|
||||
return exportRequestsHAR(requests, includePrivateDocs);
|
||||
@@ -374,11 +379,12 @@ export async function exportWorkspacesHAR(
|
||||
|
||||
export async function exportRequestsHAR(
|
||||
requests: Array<BaseModel>,
|
||||
includePrivateDocs: boolean = false,
|
||||
): Promise<string> {
|
||||
includePrivateDocs = false,
|
||||
) {
|
||||
const workspaces: Array<BaseModel> = [];
|
||||
const mapRequestIdToWorkspace: Object = {};
|
||||
const workspaceLookup: Object = {};
|
||||
const mapRequestIdToWorkspace: Record<string, any> = {};
|
||||
const workspaceLookup: Record<string, any> = {};
|
||||
|
||||
for (const request of requests) {
|
||||
const ancestors: Array<BaseModel> = await db.withAncestors(request, [
|
||||
models.workspace.type,
|
||||
@@ -386,32 +392,42 @@ export async function exportRequestsHAR(
|
||||
]);
|
||||
const workspace = ancestors.find(isWorkspace);
|
||||
mapRequestIdToWorkspace[request._id] = workspace;
|
||||
|
||||
if (workspace == null || workspaceLookup.hasOwnProperty(workspace._id)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
workspaceLookup[workspace._id] = true;
|
||||
workspaces.push(workspace);
|
||||
}
|
||||
|
||||
const mapWorkspaceIdToEnvironmentId: Object = {};
|
||||
const mapWorkspaceIdToEnvironmentId: Record<string, any> = {};
|
||||
|
||||
for (const workspace of workspaces) {
|
||||
const workspaceMeta = await models.workspaceMeta.getByParentId(workspace._id);
|
||||
let environmentId = workspaceMeta ? workspaceMeta.activeEnvironmentId : null;
|
||||
const environment = await models.environment.getById(environmentId || 'n/a');
|
||||
|
||||
if (!environment || (environment.isPrivate && !includePrivateDocs)) {
|
||||
environmentId = 'n/a';
|
||||
}
|
||||
|
||||
mapWorkspaceIdToEnvironmentId[workspace._id] = environmentId;
|
||||
}
|
||||
|
||||
requests = requests.sort((a: Object, b: Object) => (a.metaSortKey < b.metaSortKey ? -1 : 1));
|
||||
const harRequests: Array<Object> = [];
|
||||
requests = requests.sort((a: Record<string, any>, b: Record<string, any>) =>
|
||||
a.metaSortKey < b.metaSortKey ? -1 : 1,
|
||||
);
|
||||
const harRequests: Array<har.ExportRequest> = [];
|
||||
|
||||
for (const request of requests) {
|
||||
const workspace = mapRequestIdToWorkspace[request._id];
|
||||
|
||||
if (workspace == null) {
|
||||
// Workspace not found for request, so don't export it.
|
||||
continue;
|
||||
}
|
||||
|
||||
const environmentId = mapWorkspaceIdToEnvironmentId[workspace._id];
|
||||
harRequests.push({
|
||||
requestId: request._id,
|
||||
@@ -420,9 +436,7 @@ export async function exportRequestsHAR(
|
||||
}
|
||||
|
||||
const data = await har.exportHar(harRequests);
|
||||
|
||||
trackEvent('Data', 'Export', 'HAR');
|
||||
|
||||
return JSON.stringify(data, null, '\t');
|
||||
}
|
||||
|
||||
@@ -430,7 +444,7 @@ export async function exportWorkspacesData(
|
||||
parentDoc: BaseModel | null,
|
||||
includePrivateDocs: boolean,
|
||||
format: 'json' | 'yaml',
|
||||
): Promise<string> {
|
||||
) {
|
||||
const docs: Array<BaseModel> = await getDocWithDescendants(parentDoc, includePrivateDocs);
|
||||
const requests: Array<BaseModel> = docs.filter(doc => isRequest(doc) || isGrpcRequest(doc));
|
||||
return exportRequestsData(requests, includePrivateDocs, format);
|
||||
@@ -440,8 +454,9 @@ export async function exportRequestsData(
|
||||
requests: Array<BaseModel>,
|
||||
includePrivateDocs: boolean,
|
||||
format: 'json' | 'yaml',
|
||||
): Promise<string> {
|
||||
const data = {
|
||||
) {
|
||||
const data: Insomnia4Data = {
|
||||
// @ts-expect-error -- TSCONVERSION maybe this needs to be added to the upstream type?
|
||||
_type: 'export',
|
||||
__export_format: EXPORT_FORMAT,
|
||||
__export_date: new Date(),
|
||||
@@ -450,17 +465,21 @@ export async function exportRequestsData(
|
||||
};
|
||||
const docs: Array<BaseModel> = [];
|
||||
const workspaces: Array<BaseModel> = [];
|
||||
const mapTypeAndIdToDoc: Object = {};
|
||||
const mapTypeAndIdToDoc: Record<string, any> = {};
|
||||
|
||||
for (const req of requests) {
|
||||
const ancestors: Array<BaseModel> = clone(await db.withAncestors(req));
|
||||
|
||||
for (const ancestor of ancestors) {
|
||||
const key = ancestor.type + '___' + ancestor._id;
|
||||
|
||||
if (mapTypeAndIdToDoc.hasOwnProperty(key)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
mapTypeAndIdToDoc[key] = ancestor;
|
||||
docs.push(ancestor);
|
||||
|
||||
if (isWorkspace(ancestor)) {
|
||||
workspaces.push(ancestor);
|
||||
}
|
||||
@@ -496,47 +515,64 @@ export async function exportRequestsData(
|
||||
isProtoFile(d) ||
|
||||
isProtoDirectory(d) ||
|
||||
isWorkspace(d) ||
|
||||
// @ts-expect-error -- TSCONVERSION maybe this needs to be added to the upstream type?
|
||||
d.type === models.cookieJar.type ||
|
||||
// @ts-expect-error -- TSCONVERSION maybe this needs to be added to the upstream type?
|
||||
d.type === models.environment.type ||
|
||||
// @ts-expect-error -- TSCONVERSION maybe this needs to be added to the upstream type?
|
||||
d.type === models.apiSpec.type
|
||||
)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// BaseModel doesn't have isPrivate, so cast it first.
|
||||
return !(d: Object).isPrivate || includePrivateDocs;
|
||||
return !d.isPrivate || includePrivateDocs;
|
||||
})
|
||||
.map((d: Object) => {
|
||||
.map(d => {
|
||||
if (isWorkspace(d)) {
|
||||
// @ts-expect-error -- TSCONVERSION maybe this needs to be added to the upstream type?
|
||||
d._type = EXPORT_TYPE_WORKSPACE;
|
||||
} else if (d.type === models.cookieJar.type) {
|
||||
// @ts-expect-error -- TSCONVERSION maybe this needs to be added to the upstream type?
|
||||
d._type = EXPORT_TYPE_COOKIE_JAR;
|
||||
} else if (d.type === models.environment.type) {
|
||||
// @ts-expect-error -- TSCONVERSION maybe this needs to be added to the upstream type?
|
||||
d._type = EXPORT_TYPE_ENVIRONMENT;
|
||||
} else if (d.type === models.unitTestSuite.type) {
|
||||
// @ts-expect-error -- TSCONVERSION maybe this needs to be added to the upstream type?
|
||||
d._type = EXPORT_TYPE_UNIT_TEST_SUITE;
|
||||
} else if (d.type === models.unitTest.type) {
|
||||
// @ts-expect-error -- TSCONVERSION maybe this needs to be added to the upstream type?
|
||||
d._type = EXPORT_TYPE_UNIT_TEST;
|
||||
} else if (isRequestGroup(d)) {
|
||||
// @ts-expect-error -- TSCONVERSION maybe this needs to be added to the upstream type?
|
||||
d._type = EXPORT_TYPE_REQUEST_GROUP;
|
||||
} else if (isRequest(d)) {
|
||||
// @ts-expect-error -- TSCONVERSION maybe this needs to be added to the upstream type?
|
||||
d._type = EXPORT_TYPE_REQUEST;
|
||||
} else if (isGrpcRequest(d)) {
|
||||
// @ts-expect-error -- TSCONVERSION maybe this needs to be added to the upstream type?
|
||||
d._type = EXPORT_TYPE_GRPC_REQUEST;
|
||||
} else if (isProtoFile(d)) {
|
||||
// @ts-expect-error -- TSCONVERSION maybe this needs to be added to the upstream type?
|
||||
d._type = EXPORT_TYPE_PROTO_FILE;
|
||||
} else if (isProtoDirectory(d)) {
|
||||
// @ts-expect-error -- TSCONVERSION maybe this needs to be added to the upstream type?
|
||||
d._type = EXPORT_TYPE_PROTO_DIRECTORY;
|
||||
// @ts-expect-error -- TSCONVERSION maybe this needs to be added to the upstream type?
|
||||
} else if (d.type === models.apiSpec.type) {
|
||||
// @ts-expect-error -- TSCONVERSION maybe this needs to be added to the upstream type?
|
||||
d._type = EXPORT_TYPE_API_SPEC;
|
||||
}
|
||||
|
||||
// @ts-expect-error -- TSCONVERSION maybe this needs to be added to the upstream type?
|
||||
// Delete the things we don't want to export
|
||||
delete d.type;
|
||||
return d;
|
||||
});
|
||||
|
||||
trackEvent('Data', 'Export', `Insomnia ${format}`);
|
||||
|
||||
if (format.toLowerCase() === 'yaml') {
|
||||
return YAML.stringify(data);
|
||||
} else if (format.toLowerCase() === 'json') {
|
||||
@@ -548,12 +584,11 @@ export async function exportRequestsData(
|
||||
|
||||
async function getDocWithDescendants(
|
||||
parentDoc: BaseModel | null = null,
|
||||
includePrivateDocs: boolean = false,
|
||||
includePrivateDocs = false,
|
||||
): Promise<Array<BaseModel>> {
|
||||
const docs = await db.withDescendants(parentDoc);
|
||||
return docs.filter(
|
||||
d =>
|
||||
// Don't include if private, except if we want to
|
||||
!(d: any).isPrivate || includePrivateDocs,
|
||||
// Don't include if private, except if we want to
|
||||
doc => !doc?.isPrivate || includePrivateDocs,
|
||||
);
|
||||
}
|
||||
@@ -1,6 +1,4 @@
|
||||
// @flow
|
||||
|
||||
export const keyboardKeys: Object = {
|
||||
export const keyboardKeys: Record<string, any> = {
|
||||
backspace: {
|
||||
keyCode: 8,
|
||||
label: 'Backspace',
|
||||
@@ -1,4 +1,3 @@
|
||||
// @flow
|
||||
import log from 'electron-log';
|
||||
import { isDevelopment } from './constants';
|
||||
import { dirname } from 'path';
|
||||
@@ -13,8 +12,8 @@ export const initializeLogging = () => {
|
||||
// Set the max log file size to 10mb
|
||||
// When the log file exceeds this limit, it will be rotated to {file name}.old.log file.
|
||||
fileTransport.maxSize = 1024 * 1024 * 10;
|
||||
|
||||
// Rotate the log file every time we start the app
|
||||
// @ts-expect-error -- TSCONVERSION seems like something is wrong here but I don't want to convert to string until I can take a closer look
|
||||
fileTransport.archiveLog(logFile);
|
||||
logFile.clear();
|
||||
}
|
||||
@@ -23,7 +22,7 @@ export const initializeLogging = () => {
|
||||
Object.assign(console, log.functions);
|
||||
};
|
||||
|
||||
export function getLogDirectory(): string {
|
||||
export function getLogDirectory() {
|
||||
const logPath = log.transports.file.getFile().path;
|
||||
return dirname(logPath);
|
||||
}
|
||||
@@ -3,6 +3,7 @@ import marked from 'marked';
|
||||
marked.setOptions({
|
||||
renderer: new marked.Renderer(),
|
||||
gfm: true,
|
||||
// @ts-expect-error -- TSCONVERSION missing from marked types
|
||||
tables: true,
|
||||
breaks: false,
|
||||
pedantic: false,
|
||||
@@ -1,10 +1,9 @@
|
||||
// @flow
|
||||
import NeDB from 'nedb';
|
||||
import type { BaseModel } from '../models';
|
||||
import fsPath from 'path';
|
||||
import fs from 'fs';
|
||||
import * as models from '../models';
|
||||
import * as db from './database';
|
||||
import { database as db } from './database';
|
||||
import { getModelName } from '../models';
|
||||
import { difference } from 'lodash';
|
||||
import type { Workspace } from '../models/workspace';
|
||||
@@ -14,17 +13,19 @@ import * as electron from 'electron';
|
||||
import { trackEvent } from './analytics';
|
||||
import { WorkspaceScopeKeys } from '../models/workspace';
|
||||
|
||||
async function loadDesignerDb(types: Array<string>, designerDataDir: string): Promise<Object> {
|
||||
async function loadDesignerDb(
|
||||
types: Array<string>,
|
||||
designerDataDir: string,
|
||||
): Promise<Record<string, any>> {
|
||||
const designerDb = {};
|
||||
|
||||
types.forEach(type => {
|
||||
designerDb[type] = []; // initialize each type to empty array
|
||||
});
|
||||
|
||||
const promises = types.map(
|
||||
type =>
|
||||
new Promise((resolve, reject) => {
|
||||
new Promise<void>((resolve, reject) => {
|
||||
const filePath = fsPath.join(designerDataDir, `insomnia.${type}.db`);
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
console.log(`[db] db file for ${type} not found: ${filePath}`);
|
||||
resolve();
|
||||
@@ -36,76 +37,64 @@ async function loadDesignerDb(types: Array<string>, designerDataDir: string): Pr
|
||||
filename: filePath,
|
||||
corruptAlertThreshold: 0.9,
|
||||
});
|
||||
|
||||
// Find every entry and store in memory
|
||||
collection.find({}, (err, docs: Array<BaseModel>) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
(designerDb[type]: Array<Object>).push(...docs);
|
||||
(designerDb[type] as Array<Record<string, any>>).push(...docs);
|
||||
resolve();
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
await Promise.all(promises);
|
||||
|
||||
// Return entries, but no longer linked to the database files
|
||||
return designerDb;
|
||||
}
|
||||
|
||||
type DBType = { [string]: Array<BaseModel> };
|
||||
type DBType = Record<string, Array<BaseModel>>;
|
||||
|
||||
export type MigrationOptions = {
|
||||
useDesignerSettings: boolean,
|
||||
copyPlugins: boolean,
|
||||
copyWorkspaces: boolean,
|
||||
designerDataDir: string,
|
||||
coreDataDir: string,
|
||||
};
|
||||
export interface MigrationOptions {
|
||||
useDesignerSettings: boolean;
|
||||
copyPlugins: boolean;
|
||||
copyWorkspaces: boolean;
|
||||
designerDataDir: string;
|
||||
coreDataDir: string;
|
||||
}
|
||||
|
||||
export type MigrationResult = {
|
||||
error?: Error,
|
||||
};
|
||||
export interface MigrationResult {
|
||||
error?: Error;
|
||||
}
|
||||
|
||||
async function createCoreBackup(modelTypes: Array<string>, coreDataDir: string) {
|
||||
console.log(`[db-merge] creating backup`);
|
||||
|
||||
console.log('[db-merge] creating backup');
|
||||
const backupDir = fsPath.join(coreDataDir, 'core-backup');
|
||||
await fsx.remove(backupDir);
|
||||
await fsx.ensureDir(backupDir);
|
||||
|
||||
// Copy db files
|
||||
const filesToCopy = modelTypes.map(modelType => `insomnia.${modelType}.db`);
|
||||
|
||||
for (const entryName of filesToCopy) {
|
||||
const src = fsPath.join(coreDataDir, entryName);
|
||||
const dest = fsPath.join(backupDir, entryName);
|
||||
|
||||
await fsx.copy(src, dest);
|
||||
}
|
||||
|
||||
// Copy dirs
|
||||
const dirsToCopy = ['plugins', 'responses', 'version-control'];
|
||||
|
||||
await copyDirs(dirsToCopy, coreDataDir, backupDir);
|
||||
|
||||
console.log(`[db-merge] backup created at ${backupDir}`);
|
||||
|
||||
return backupDir;
|
||||
}
|
||||
|
||||
async function migratePlugins(designerDataDir: string, coreDataDir: string) {
|
||||
const designerPluginDir = fsPath.join(designerDataDir, 'plugins');
|
||||
const corePluginDir = fsPath.join(coreDataDir, 'plugins');
|
||||
|
||||
// get list of plugins in Designer
|
||||
const designerPlugins = await readDirs(designerPluginDir);
|
||||
|
||||
await removeDirs(designerPlugins, corePluginDir);
|
||||
await copyDirs(designerPlugins, designerPluginDir, corePluginDir);
|
||||
|
||||
// Remove plugin bundle from installed plugins because it's included with the app now
|
||||
const pluginsToDelete = [
|
||||
'insomnia-plugin-kong-bundle',
|
||||
@@ -116,7 +105,7 @@ async function migratePlugins(designerDataDir: string, coreDataDir: string) {
|
||||
await removeDirs(pluginsToDelete, corePluginDir);
|
||||
}
|
||||
|
||||
async function readDirs(srcDir: string): Array<string> {
|
||||
async function readDirs(srcDir: string) {
|
||||
if (existsAndIsDirectory(srcDir)) {
|
||||
return await fs.promises.readdir(srcDir);
|
||||
} else {
|
||||
@@ -140,47 +129,46 @@ async function copyDirs(dirs: Array<string>, srcDir: string, destDir: string) {
|
||||
async function removeDirs(dirs: Array<string>, srcDir: string) {
|
||||
for (const dir of dirs.filter(c => c)) {
|
||||
const dirToRemove = fsPath.join(srcDir, dir);
|
||||
|
||||
if (existsAndIsDirectory(dirToRemove)) {
|
||||
await fsx.remove(dirToRemove);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function existsAndIsDirectory(name: string): boolean {
|
||||
export function existsAndIsDirectory(name: string) {
|
||||
return fs.existsSync(name) && fs.statSync(name).isDirectory();
|
||||
}
|
||||
|
||||
export default async function migrateFromDesigner({
|
||||
useDesignerSettings,
|
||||
designerDataDir,
|
||||
coreDataDir,
|
||||
copyPlugins,
|
||||
copyWorkspaces,
|
||||
}: MigrationOptions): Promise<MigrationResult> {
|
||||
}: MigrationOptions) {
|
||||
console.log(
|
||||
`[db-merge] starting process for migrating from ${designerDataDir} to ${coreDataDir}`,
|
||||
);
|
||||
|
||||
const nonWorkspaceModels = [
|
||||
models.stats.type, // TODO: investigate further any implications that may invalidate collected stats
|
||||
models.settings.type,
|
||||
];
|
||||
|
||||
// Every model except those to ignore and settings is a "workspace" model
|
||||
const workspaceModels = difference(models.types(), nonWorkspaceModels);
|
||||
|
||||
const modelTypesToMerge = [];
|
||||
|
||||
if (useDesignerSettings) {
|
||||
trackEvent('Data', 'Migration', 'Settings');
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
modelTypesToMerge.push(models.settings.type);
|
||||
console.log(`[db-merge] keeping settings from Insomnia Designer`);
|
||||
console.log('[db-merge] keeping settings from Insomnia Designer');
|
||||
} else {
|
||||
console.log(`[db-merge] keeping settings from Insomnia Core`);
|
||||
console.log('[db-merge] keeping settings from Insomnia Core');
|
||||
}
|
||||
|
||||
if (copyWorkspaces) {
|
||||
trackEvent('Data', 'Migration', 'Workspaces');
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
modelTypesToMerge.push(...workspaceModels);
|
||||
}
|
||||
|
||||
@@ -189,7 +177,6 @@ export default async function migrateFromDesigner({
|
||||
try {
|
||||
// Create core backup
|
||||
backupDir = await createCoreBackup(modelTypesToMerge, coreDataDir);
|
||||
|
||||
// Load designer database
|
||||
const designerDb: DBType = await loadDesignerDb(modelTypesToMerge, designerDataDir);
|
||||
|
||||
@@ -207,7 +194,7 @@ export default async function migrateFromDesigner({
|
||||
];
|
||||
propertiesToPersist.forEach(s => {
|
||||
if (coreSettings.hasOwnProperty(s)) {
|
||||
(entries[0]: Settings)[s] = coreSettings[s];
|
||||
(entries[0] as Settings)[s] = coreSettings[s];
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -215,7 +202,7 @@ export default async function migrateFromDesigner({
|
||||
// For each workspace coming from Designer, mark workspace.scope as 'design'
|
||||
if (modelType === models.workspace.type) {
|
||||
for (const workspace of entries) {
|
||||
(workspace: Workspace).scope = WorkspaceScopeKeys.design;
|
||||
(workspace as Workspace).scope = WorkspaceScopeKeys.design;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -223,25 +210,26 @@ export default async function migrateFromDesigner({
|
||||
console.log(
|
||||
`[db-merge] merging ${entryCount} ${getModelName(modelType, entryCount)} from Designer`,
|
||||
);
|
||||
await db.batchModifyDocs({ upsert: entries, remove: [] });
|
||||
await db.batchModifyDocs({
|
||||
upsert: entries,
|
||||
remove: [],
|
||||
});
|
||||
}
|
||||
|
||||
if (copyWorkspaces) {
|
||||
console.log(`[db-merge] migrating version control data from designer to core`);
|
||||
console.log('[db-merge] migrating version control data from designer to core');
|
||||
await copyDirs(['version-control'], designerDataDir, coreDataDir);
|
||||
|
||||
console.log(`[db-merge] migrating response cache from designer to core`);
|
||||
console.log('[db-merge] migrating response cache from designer to core');
|
||||
await copyDirs(['responses'], designerDataDir, coreDataDir);
|
||||
}
|
||||
|
||||
if (copyPlugins) {
|
||||
console.log(`[db-merge] migrating plugins from designer to core`);
|
||||
console.log('[db-merge] migrating plugins from designer to core');
|
||||
trackEvent('Data', 'Migration', 'Plugins');
|
||||
await migratePlugins(designerDataDir, coreDataDir);
|
||||
}
|
||||
|
||||
console.log('[db-merge] done!');
|
||||
|
||||
trackEvent('Data', 'Migration', 'Success');
|
||||
return {};
|
||||
} catch (error) {
|
||||
@@ -249,13 +237,14 @@ export default async function migrateFromDesigner({
|
||||
console.error(error);
|
||||
trackEvent('Data', 'Migration', 'Failure');
|
||||
await restoreCoreBackup(backupDir, coreDataDir);
|
||||
return { error };
|
||||
return {
|
||||
error,
|
||||
} as MigrationResult;
|
||||
}
|
||||
}
|
||||
|
||||
export async function restoreCoreBackup(backupDir: string, coreDataDir: string) {
|
||||
if (!backupDir) {
|
||||
console.log(`[db-merge] nothing to restore; no backup was created`);
|
||||
console.log('[db-merge] nothing to restore; no backup was created');
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -264,12 +253,11 @@ export async function restoreCoreBackup(backupDir: string, coreDataDir: string)
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`[db-merge] restoring from backup`);
|
||||
console.log('[db-merge] restoring from backup');
|
||||
await removeDirs(['plugins', 'responses', 'version-control'], coreDataDir);
|
||||
await fsx.copy(backupDir, coreDataDir);
|
||||
console.log(`[db-merge] restored from backup`);
|
||||
console.log('[db-merge] restored from backup');
|
||||
}
|
||||
|
||||
export function restartApp() {
|
||||
const { app } = electron.remote || electron;
|
||||
app.relaunch();
|
||||
@@ -1,5 +1,3 @@
|
||||
// @flow
|
||||
import * as electron from 'electron';
|
||||
import { Readable, Writable } from 'stream';
|
||||
import fuzzysort from 'fuzzysort';
|
||||
import * as uuid from 'uuid';
|
||||
@@ -9,17 +7,20 @@ import { METHOD_OPTIONS, METHOD_DELETE, DEBOUNCE_MILLIS } from './constants';
|
||||
|
||||
const ESCAPE_REGEX_MATCH = /[-[\]/{}()*+?.\\^$|]/g;
|
||||
|
||||
type Header = {
|
||||
name: string,
|
||||
value: string,
|
||||
};
|
||||
interface Header {
|
||||
name: string;
|
||||
value: string;
|
||||
}
|
||||
|
||||
type Parameter = {
|
||||
name: string,
|
||||
value: string,
|
||||
};
|
||||
interface Parameter {
|
||||
name: string;
|
||||
value: string;
|
||||
}
|
||||
|
||||
export function filterParameters<T: Parameter>(parameters: Array<T>, name: string): Array<T> {
|
||||
export function filterParameters<T extends Parameter>(
|
||||
parameters: Array<T>,
|
||||
name: string,
|
||||
): Array<T> {
|
||||
if (!Array.isArray(parameters) || !name) {
|
||||
return [];
|
||||
}
|
||||
@@ -27,75 +28,75 @@ export function filterParameters<T: Parameter>(parameters: Array<T>, name: strin
|
||||
return parameters.filter(h => (!h || !h.name ? false : h.name === name));
|
||||
}
|
||||
|
||||
export function filterHeaders<T: Header>(headers: Array<T>, name: string): Array<T> {
|
||||
if (!Array.isArray(headers) || !name || !(typeof name === 'string')) {
|
||||
export function filterHeaders<T extends Header>(headers: Array<T>, name?: string): Array<T> {
|
||||
if (!Array.isArray(headers) || !name || typeof name !== 'string') {
|
||||
return [];
|
||||
}
|
||||
|
||||
return headers.filter(h => {
|
||||
return headers.filter(header => {
|
||||
// Never match against invalid headers
|
||||
if (!h || !h.name || typeof h.name !== 'string') {
|
||||
if (!header || !header.name || typeof header.name !== 'string') {
|
||||
return false;
|
||||
}
|
||||
|
||||
return h.name.toLowerCase() === name.toLowerCase();
|
||||
return header.name.toLowerCase() === name.toLowerCase();
|
||||
});
|
||||
}
|
||||
|
||||
export function hasContentTypeHeader<T: Header>(headers: Array<T>): boolean {
|
||||
export function hasContentTypeHeader<T extends Header>(headers: Array<T>) {
|
||||
return filterHeaders(headers, 'content-type').length > 0;
|
||||
}
|
||||
|
||||
export function hasContentLengthHeader<T: Header>(headers: Array<T>): boolean {
|
||||
export function hasContentLengthHeader<T extends Header>(headers: Array<T>) {
|
||||
return filterHeaders(headers, 'content-length').length > 0;
|
||||
}
|
||||
|
||||
export function hasAuthHeader<T: Header>(headers: Array<T>): boolean {
|
||||
export function hasAuthHeader<T extends Header>(headers: Array<T>) {
|
||||
return filterHeaders(headers, 'authorization').length > 0;
|
||||
}
|
||||
|
||||
export function hasAcceptHeader<T: Header>(headers: Array<T>): boolean {
|
||||
export function hasAcceptHeader<T extends Header>(headers: Array<T>) {
|
||||
return filterHeaders(headers, 'accept').length > 0;
|
||||
}
|
||||
|
||||
export function hasUserAgentHeader<T: Header>(headers: Array<T>): boolean {
|
||||
export function hasUserAgentHeader<T extends Header>(headers: Array<T>) {
|
||||
return filterHeaders(headers, 'user-agent').length > 0;
|
||||
}
|
||||
|
||||
export function hasAcceptEncodingHeader<T: Header>(headers: Array<T>): boolean {
|
||||
export function hasAcceptEncodingHeader<T extends Header>(headers: Array<T>) {
|
||||
return filterHeaders(headers, 'accept-encoding').length > 0;
|
||||
}
|
||||
|
||||
export function getSetCookieHeaders<T: Header>(headers: Array<T>): Array<T> {
|
||||
export function getSetCookieHeaders<T extends Header>(headers: Array<T>): Array<T> {
|
||||
return filterHeaders(headers, 'set-cookie');
|
||||
}
|
||||
|
||||
export function getLocationHeader<T: Header>(headers: Array<T>): T | null {
|
||||
export function getLocationHeader<T extends Header>(headers: Array<T>): T | null {
|
||||
const matches = filterHeaders(headers, 'location');
|
||||
return matches.length ? matches[0] : null;
|
||||
}
|
||||
|
||||
export function getContentTypeHeader<T: Header>(headers: Array<T>): T | null {
|
||||
export function getContentTypeHeader<T extends Header>(headers: Array<T>): T | null {
|
||||
const matches = filterHeaders(headers, 'content-type');
|
||||
return matches.length ? matches[0] : null;
|
||||
}
|
||||
|
||||
export function getMethodOverrideHeader<T: Header>(headers: Array<T>): T | null {
|
||||
export function getMethodOverrideHeader<T extends Header>(headers: Array<T>): T | null {
|
||||
const matches = filterHeaders(headers, 'x-http-method-override');
|
||||
return matches.length ? matches[0] : null;
|
||||
}
|
||||
|
||||
export function getHostHeader<T: Header>(headers: Array<T>): T | null {
|
||||
export function getHostHeader<T extends Header>(headers: Array<T>): T | null {
|
||||
const matches = filterHeaders(headers, 'host');
|
||||
return matches.length ? matches[0] : null;
|
||||
}
|
||||
|
||||
export function getContentDispositionHeader<T: Header>(headers: Array<T>): T | null {
|
||||
export function getContentDispositionHeader<T extends Header>(headers: Array<T>): T | null {
|
||||
const matches = filterHeaders(headers, 'content-disposition');
|
||||
return matches.length ? matches[0] : null;
|
||||
}
|
||||
|
||||
export function getContentLengthHeader<T: Header>(headers: Array<T>): T | null {
|
||||
export function getContentLengthHeader<T extends Header>(headers: Array<T>): T | null {
|
||||
const matches = filterHeaders(headers, 'content-length');
|
||||
return matches.length ? matches[0] : null;
|
||||
}
|
||||
@@ -105,7 +106,7 @@ export function getContentLengthHeader<T: Header>(headers: Array<T>): T | null {
|
||||
* @param prefix
|
||||
* @returns {string}
|
||||
*/
|
||||
export function generateId(prefix: string): string {
|
||||
export function generateId(prefix?: string) {
|
||||
const id = uuid.v4().replace(/-/g, '');
|
||||
|
||||
if (prefix) {
|
||||
@@ -115,31 +116,32 @@ export function generateId(prefix: string): string {
|
||||
}
|
||||
}
|
||||
|
||||
export function delay(milliseconds: number = DEBOUNCE_MILLIS): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, milliseconds));
|
||||
export function delay(milliseconds: number = DEBOUNCE_MILLIS) {
|
||||
return new Promise<void>(resolve => setTimeout(resolve, milliseconds));
|
||||
}
|
||||
|
||||
export function removeVowels(str: string): string {
|
||||
export function removeVowels(str: string) {
|
||||
return str.replace(/[aeiouyAEIOUY]/g, '');
|
||||
}
|
||||
|
||||
export function formatMethodName(method: string): string {
|
||||
export function formatMethodName(method: string) {
|
||||
let methodName = method || '';
|
||||
|
||||
if (method === METHOD_DELETE || method === METHOD_OPTIONS) {
|
||||
methodName = method.slice(0, 3);
|
||||
} else if (method.length > 4) {
|
||||
methodName = removeVowels(method).slice(0, 4);
|
||||
}
|
||||
|
||||
return methodName;
|
||||
}
|
||||
|
||||
export function keyedDebounce(callback: Function, millis: number = DEBOUNCE_MILLIS): Function {
|
||||
export function keyedDebounce<T extends Function>(callback: T, millis: number = DEBOUNCE_MILLIS): T {
|
||||
let timeout;
|
||||
let results = {};
|
||||
|
||||
return function(key, ...args) {
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
const t: T = function(key, ...args) {
|
||||
results[key] = args;
|
||||
|
||||
clearTimeout(timeout);
|
||||
timeout = setTimeout(() => {
|
||||
if (!Object.keys(results).length) {
|
||||
@@ -150,23 +152,27 @@ export function keyedDebounce(callback: Function, millis: number = DEBOUNCE_MILL
|
||||
results = {};
|
||||
}, millis);
|
||||
};
|
||||
return t;
|
||||
}
|
||||
|
||||
export function debounce(callback: Function, millis: number = DEBOUNCE_MILLIS): Function {
|
||||
export function debounce<T extends Function>(
|
||||
callback: T,
|
||||
milliseconds: number = DEBOUNCE_MILLIS,
|
||||
): T {
|
||||
// For regular debounce, just use a keyed debounce with a fixed key
|
||||
return keyedDebounce(results => {
|
||||
// eslint-disable-next-line prefer-spread -- don't know if there was a "this binding" reason for this being this way so I'm leaving it alone
|
||||
callback.apply(null, results.__key__);
|
||||
}, millis).bind(null, '__key__');
|
||||
}, milliseconds).bind(null, '__key__');
|
||||
}
|
||||
|
||||
export function describeByteSize(bytes: number, long: boolean = false): string {
|
||||
export function describeByteSize(bytes: number, long = false) {
|
||||
bytes = Math.round(bytes * 10) / 10;
|
||||
let size;
|
||||
|
||||
// NOTE: We multiply these by 2 so we don't end up with
|
||||
// values like 0 GB
|
||||
|
||||
let unit;
|
||||
|
||||
if (bytes < 1024 * 2) {
|
||||
size = bytes;
|
||||
unit = long ? 'bytes' : 'B';
|
||||
@@ -185,19 +191,15 @@ export function describeByteSize(bytes: number, long: boolean = false): string {
|
||||
return `${rounded} ${unit}`;
|
||||
}
|
||||
|
||||
export function nullFn(): void {
|
||||
export function nullFn() {
|
||||
// Do nothing
|
||||
}
|
||||
|
||||
export function preventDefault(e: Event): void {
|
||||
export function preventDefault(e: Event) {
|
||||
e.preventDefault();
|
||||
}
|
||||
|
||||
export function clickLink(href: string): void {
|
||||
electron.shell.openExternal(href);
|
||||
}
|
||||
|
||||
export function fnOrString(v: string | Function, ...args: Array<any>) {
|
||||
export function fnOrString(v: string | ((...args: Array<any>) => any), ...args: Array<any>) {
|
||||
if (typeof v === 'string') {
|
||||
return v;
|
||||
} else {
|
||||
@@ -205,7 +207,7 @@ export function fnOrString(v: string | Function, ...args: Array<any>) {
|
||||
}
|
||||
}
|
||||
|
||||
export function compressObject(obj: any): string {
|
||||
export function compressObject(obj: any) {
|
||||
const compressed = zlib.gzipSync(JSON.stringify(obj));
|
||||
return compressed.toString('base64');
|
||||
}
|
||||
@@ -219,7 +221,7 @@ export function decompressObject(input: string | null): any {
|
||||
return JSON.parse(jsonBuffer.toString('utf8'));
|
||||
}
|
||||
|
||||
export function resolveHomePath(p: string): string {
|
||||
export function resolveHomePath(p: string) {
|
||||
if (p.indexOf('~/') === 0) {
|
||||
return pathJoin(process.env.HOME || '/', p.slice(1));
|
||||
} else {
|
||||
@@ -235,7 +237,7 @@ export function jsonParseOr(str: string, fallback: any): any {
|
||||
}
|
||||
}
|
||||
|
||||
export function escapeHTML(unsafeText: string): string {
|
||||
export function escapeHTML(unsafeText: string) {
|
||||
const div = document.createElement('div');
|
||||
div.innerText = unsafeText;
|
||||
return div.innerHTML;
|
||||
@@ -246,37 +248,48 @@ export function escapeHTML(unsafeText: string): string {
|
||||
* @param str - string to escape
|
||||
* @returns {string} escaped string
|
||||
*/
|
||||
export function escapeRegex(str: string): string {
|
||||
export function escapeRegex(str: string) {
|
||||
return str.replace(ESCAPE_REGEX_MATCH, '\\$&');
|
||||
}
|
||||
|
||||
export function fuzzyMatch(
|
||||
searchString: string,
|
||||
text: string,
|
||||
options: { splitSpace?: boolean, loose?: boolean } = {},
|
||||
): null | { score: number, indexes: Array<number> } {
|
||||
options: {
|
||||
splitSpace?: boolean;
|
||||
loose?: boolean;
|
||||
} = {},
|
||||
): null | {
|
||||
score: number;
|
||||
indexes: Array<number>;
|
||||
} {
|
||||
return fuzzyMatchAll(searchString, [text], options);
|
||||
}
|
||||
|
||||
export function fuzzyMatchAll(
|
||||
searchString: string,
|
||||
allText: Array<string>,
|
||||
options: { splitSpace?: boolean, loose?: boolean } = {},
|
||||
): null | { score: number, indexes: Array<number> } {
|
||||
options: {
|
||||
splitSpace?: boolean;
|
||||
loose?: boolean;
|
||||
} = {},
|
||||
) {
|
||||
if (!searchString || !searchString.trim()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const words = searchString.split(' ').filter(w => w.trim());
|
||||
const terms = options.splitSpace ? [...words, searchString] : [searchString];
|
||||
|
||||
let maxScore = null;
|
||||
let indexes = [];
|
||||
let maxScore: number | null = null;
|
||||
let indexes: Array<number> = [];
|
||||
let termsMatched = 0;
|
||||
|
||||
for (const term of terms) {
|
||||
let matchedTerm = false;
|
||||
|
||||
for (const text of allText.filter(t => !t || t.trim())) {
|
||||
const result = fuzzysort.single(term, text);
|
||||
|
||||
if (!result) {
|
||||
continue;
|
||||
}
|
||||
@@ -315,62 +328,30 @@ export function fuzzyMatchAll(
|
||||
};
|
||||
}
|
||||
|
||||
export function getViewportSize(): string | null {
|
||||
const { BrowserWindow } = electron.remote || electron;
|
||||
const w = BrowserWindow.getFocusedWindow() || BrowserWindow.getAllWindows()[0];
|
||||
|
||||
if (w) {
|
||||
const { width, height } = w.getContentBounds();
|
||||
return `${width}x${height}`;
|
||||
} else {
|
||||
// No windows open
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function getScreenResolution(): string {
|
||||
const { screen } = electron.remote || electron;
|
||||
const { width, height } = screen.getPrimaryDisplay().workAreaSize;
|
||||
return `${width}x${height}`;
|
||||
}
|
||||
|
||||
export function getUserLanguage(): string {
|
||||
const { app } = electron.remote || electron;
|
||||
return app.getLocale();
|
||||
}
|
||||
|
||||
export async function waitForStreamToFinish(s: Readable | Writable): Promise<void> {
|
||||
return new Promise(resolve => {
|
||||
if ((s: any)._readableState && (s: any)._readableState.finished) {
|
||||
export async function waitForStreamToFinish(stream: Readable | Writable) {
|
||||
return new Promise<void>(resolve => {
|
||||
// @ts-expect-error -- access of internal values that are intended to be private. We should _not_ do this.
|
||||
if (stream._readableState?.finished) {
|
||||
return resolve();
|
||||
}
|
||||
|
||||
if ((s: any)._writableState && (s: any)._writableState.finished) {
|
||||
// @ts-expect-error -- access of internal values that are intended to be private. We should _not_ do this.
|
||||
if (stream._writableState?.finished) {
|
||||
return resolve();
|
||||
}
|
||||
|
||||
s.on('close', () => {
|
||||
stream.on('close', () => {
|
||||
resolve();
|
||||
});
|
||||
|
||||
s.on('error', () => {
|
||||
stream.on('error', () => {
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export function getDesignerDataDir(): string {
|
||||
const { app } = electron.remote || electron;
|
||||
return process.env.DESIGNER_DATA_PATH || pathJoin(app.getPath('appData'), 'Insomnia Designer');
|
||||
}
|
||||
export function chunkArray<T>(arr: Array<T>, chunkSize: number) {
|
||||
const chunks: Array<Array<T>> = [];
|
||||
|
||||
export function getDataDirectory(): string {
|
||||
const { app } = electron.remote || electron;
|
||||
return process.env.INSOMNIA_DATA_PATH || app.getPath('userData');
|
||||
}
|
||||
|
||||
export function chunkArray<T>(arr: Array<T>, chunkSize: number): Array<Array<T>> {
|
||||
const chunks = [];
|
||||
for (let i = 0, j = arr.length; i < j; i += chunkSize) {
|
||||
chunks.push(arr.slice(i, i + chunkSize));
|
||||
}
|
||||
@@ -378,7 +359,7 @@ export function chunkArray<T>(arr: Array<T>, chunkSize: number): Array<Array<T>>
|
||||
return chunks;
|
||||
}
|
||||
|
||||
export function pluralize(text: string): string {
|
||||
export function pluralize(text: string) {
|
||||
let trailer = 's';
|
||||
let chop = 0;
|
||||
|
||||
@@ -398,7 +379,7 @@ export function pluralize(text: string): string {
|
||||
return `${text.slice(0, text.length - chop)}${trailer}`;
|
||||
}
|
||||
|
||||
export function diffPatchObj(baseObj: {}, patchObj: {}, deep = false): ObjectComparison {
|
||||
export function diffPatchObj(baseObj: {}, patchObj: {}, deep = false) {
|
||||
const clonedBaseObj = JSON.parse(JSON.stringify(baseObj));
|
||||
|
||||
for (const prop in baseObj) {
|
||||
@@ -437,21 +418,20 @@ export function diffPatchObj(baseObj: {}, patchObj: {}, deep = false): ObjectCom
|
||||
return clonedBaseObj;
|
||||
}
|
||||
|
||||
export function isObject(obj: any) {
|
||||
export function isObject(obj: unknown) {
|
||||
return obj !== null && typeof obj === 'object';
|
||||
}
|
||||
|
||||
/**
|
||||
Finds epoch's digit count and converts it to make it exactly 13 digits.
|
||||
Which is the epoch millisecond represntation.
|
||||
*/
|
||||
export function convertEpochToMilliseconds(epoch: number) {
|
||||
/*
|
||||
Finds epoch's digit count and converts it to make it exactly 13 digits.
|
||||
Which is the epoch millisecond represntation.
|
||||
*/
|
||||
const expDigitCount = epoch.toString().length;
|
||||
const convertedEpoch = parseInt(epoch * 10 ** (13 - expDigitCount));
|
||||
return convertedEpoch;
|
||||
return parseInt(String(epoch * 10 ** (13 - expDigitCount)), 10);
|
||||
}
|
||||
|
||||
export function snapNumberToLimits(value: number, min?: number, max?: number): number {
|
||||
export function snapNumberToLimits(value: number, min?: number, max?: number) {
|
||||
const moreThanMax = max && !Number.isNaN(max) && value > max;
|
||||
const lessThanMin = min && !Number.isNaN(min) && value < min;
|
||||
|
||||
@@ -464,6 +444,6 @@ export function snapNumberToLimits(value: number, min?: number, max?: number): n
|
||||
return value;
|
||||
}
|
||||
|
||||
export function isNotNullOrUndefined(obj: any): boolean {
|
||||
export function isNotNullOrUndefined(obj: unknown) {
|
||||
return obj !== null && obj !== undefined;
|
||||
}
|
||||
@@ -1,12 +1,10 @@
|
||||
// @flow
|
||||
import type { Request } from '../models/request';
|
||||
import type { BaseModel } from '../models/index';
|
||||
|
||||
import { setDefaultProtocol } from 'insomnia-url';
|
||||
import clone from 'clone';
|
||||
import * as models from '../models';
|
||||
import { CONTENT_TYPE_GRAPHQL, JSON_ORDER_SEPARATOR } from './constants';
|
||||
import * as db from './database';
|
||||
import { database as db } from './database';
|
||||
import * as templating from '../templating';
|
||||
import type { CookieJar } from '../models/cookie-jar';
|
||||
import type { Environment } from '../models/environment';
|
||||
@@ -17,31 +15,49 @@ import { isRequestGroup } from '../models/helpers/is-model';
|
||||
|
||||
export const KEEP_ON_ERROR = 'keep';
|
||||
export const THROW_ON_ERROR = 'throw';
|
||||
|
||||
export type RenderPurpose = 'send' | 'general' | 'no-render';
|
||||
|
||||
export const RENDER_PURPOSE_SEND: RenderPurpose = 'send';
|
||||
export const RENDER_PURPOSE_GENERAL: RenderPurpose = 'general';
|
||||
export const RENDER_PURPOSE_NO_RENDER: RenderPurpose = 'no-render';
|
||||
|
||||
/** Key/value pairs to be provided to the render context */
|
||||
export type ExtraRenderInfo = Array<{ name: string, value: any }>;
|
||||
export type ExtraRenderInfo = Array<{
|
||||
name: string;
|
||||
value: any;
|
||||
}>;
|
||||
|
||||
export type RenderedRequest = Request & {
|
||||
cookies: Array<{ name: string, value: string, disabled?: boolean }>,
|
||||
cookieJar: CookieJar,
|
||||
cookies: Array<{
|
||||
name: string;
|
||||
value: string;
|
||||
disabled?: boolean;
|
||||
}>;
|
||||
cookieJar: CookieJar;
|
||||
};
|
||||
|
||||
export type RenderedGrpcRequest = GrpcRequest;
|
||||
|
||||
export type RenderedGrpcRequestBody = GrpcRequestBody;
|
||||
|
||||
export interface RenderContextAndKeys {
|
||||
context: Record<string, any>;
|
||||
keys: Array<{
|
||||
name: string;
|
||||
value: any;
|
||||
}>
|
||||
}
|
||||
|
||||
export type HandleGetRenderContext = () => Promise<RenderContextAndKeys>;
|
||||
|
||||
export type HandleRender = <T>(object: T, contextCacheKey?: string | null) => Promise<T>;
|
||||
|
||||
export async function buildRenderContext(
|
||||
ancestors: Array<BaseModel> | null,
|
||||
rootEnvironment: Environment | null,
|
||||
subEnvironment: Environment | null,
|
||||
baseContext: Object = {},
|
||||
): Object {
|
||||
const envObjects = [];
|
||||
baseContext: Record<string, any> = {},
|
||||
) {
|
||||
const envObjects: Array<Record<string, any>> = [];
|
||||
|
||||
// Get root environment keys in correct order
|
||||
// Then get sub environment keys in correct order
|
||||
@@ -52,7 +68,6 @@ export async function buildRenderContext(
|
||||
rootEnvironment.dataPropertyOrder,
|
||||
JSON_ORDER_SEPARATOR,
|
||||
);
|
||||
|
||||
envObjects.push(ordered);
|
||||
}
|
||||
|
||||
@@ -62,13 +77,13 @@ export async function buildRenderContext(
|
||||
subEnvironment.dataPropertyOrder,
|
||||
JSON_ORDER_SEPARATOR,
|
||||
);
|
||||
|
||||
envObjects.push(ordered);
|
||||
}
|
||||
|
||||
for (const doc of (ancestors || []).reverse()) {
|
||||
const ancestor: any = doc;
|
||||
const { environment, environmentPropertyOrder } = ancestor;
|
||||
|
||||
if (typeof environment === 'object' && environment !== null) {
|
||||
const ordered = orderedJSON.order(
|
||||
environment,
|
||||
@@ -87,8 +102,12 @@ export async function buildRenderContext(
|
||||
let renderContext = baseContext;
|
||||
|
||||
// Made the rendering into a recursive function to handle nested Objects
|
||||
async function renderSubContext(subObject: Object, subContext: Object): Promise<any> {
|
||||
async function renderSubContext(
|
||||
subObject: Record<string, any>,
|
||||
subContext: Record<string, any>,
|
||||
) {
|
||||
const keys = _getOrderedEnvironmentKeys(subObject);
|
||||
|
||||
for (const key of keys) {
|
||||
/*
|
||||
* If we're overwriting a string, try to render it first using the same key from the base
|
||||
@@ -126,10 +145,11 @@ export async function buildRenderContext(
|
||||
subContext[key] = subObject[key];
|
||||
}
|
||||
}
|
||||
|
||||
return subContext;
|
||||
}
|
||||
|
||||
for (const envObject: Object of envObjects) {
|
||||
for (const envObject of envObjects) {
|
||||
// For every environment render the Objects
|
||||
renderContext = await renderSubContext(envObject, renderContext);
|
||||
}
|
||||
@@ -141,6 +161,7 @@ export async function buildRenderContext(
|
||||
|
||||
// Render recursive references and tags.
|
||||
const skipNextTime = {};
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
for (const key of keys) {
|
||||
// Skip rendering keys that stayed the same multiple times. This is here because
|
||||
@@ -168,6 +189,7 @@ export async function buildRenderContext(
|
||||
finalRenderContext[key] = renderResult;
|
||||
}
|
||||
}
|
||||
|
||||
return finalRenderContext;
|
||||
}
|
||||
|
||||
@@ -182,15 +204,15 @@ export async function buildRenderContext(
|
||||
*/
|
||||
export async function render<T>(
|
||||
obj: T,
|
||||
context: Object = {},
|
||||
context: Record<string, any> = {},
|
||||
blacklistPathRegex: RegExp | null = null,
|
||||
errorMode: string = THROW_ON_ERROR,
|
||||
name: string = '',
|
||||
): Promise<T> {
|
||||
name = '',
|
||||
) {
|
||||
// Make a deep copy so no one gets mad :)
|
||||
const newObj = clone(obj);
|
||||
|
||||
async function next(x: any, path: string, first: boolean = false): Promise<any> {
|
||||
async function next<T>(x: T, path: string, first = false) {
|
||||
if (blacklistPathRegex && path.match(blacklistPathRegex)) {
|
||||
return x;
|
||||
}
|
||||
@@ -210,12 +232,15 @@ export async function render<T>(
|
||||
// Do nothing to these types
|
||||
} else if (typeof x === 'string') {
|
||||
try {
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
x = await templating.render(x, { context, path });
|
||||
|
||||
// If the variable outputs a tag, render it again. This is a common use
|
||||
// case for environment variables:
|
||||
// {{ foo }} => {% uuid 'v4' %} => dd265685-16a3-4d76-a59c-e8264c16835a
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
if (x.includes('{%')) {
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
x = await templating.render(x, { context, path });
|
||||
}
|
||||
} catch (err) {
|
||||
@@ -230,11 +255,13 @@ export async function render<T>(
|
||||
} else if (typeof x === 'object' && x !== null) {
|
||||
// Don't even try rendering disabled objects
|
||||
// Note, this logic probably shouldn't be here, but w/e for now
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
if (x.disabled) {
|
||||
return x;
|
||||
}
|
||||
|
||||
const keys = Object.keys(x);
|
||||
|
||||
for (const key of keys) {
|
||||
if (first && key.indexOf('_') === 0) {
|
||||
x[key] = await next(x[key], path);
|
||||
@@ -248,21 +275,21 @@ export async function render<T>(
|
||||
return x;
|
||||
}
|
||||
|
||||
return next(newObj, name, true);
|
||||
return next<T>(newObj, name, true);
|
||||
}
|
||||
|
||||
export async function getRenderContext(
|
||||
request: Request | GrpcRequest,
|
||||
request: Request | GrpcRequest | null,
|
||||
environmentId: string | null,
|
||||
ancestors: Array<BaseModel> | null = null,
|
||||
purpose: RenderPurpose | null = null,
|
||||
extraInfo: ExtraRenderInfo | null = null,
|
||||
): Promise<Object> {
|
||||
): Promise<Record<string, any>> {
|
||||
if (!ancestors) {
|
||||
ancestors = await _getRequestAncestors(request);
|
||||
}
|
||||
|
||||
const workspace = ancestors.find(doc => doc.type === models.workspace.type);
|
||||
|
||||
if (!workspace) {
|
||||
throw new Error('Failed to render. Could not find workspace');
|
||||
}
|
||||
@@ -271,7 +298,6 @@ export async function getRenderContext(
|
||||
workspace ? workspace._id : 'n/a',
|
||||
);
|
||||
const subEnvironment = await models.environment.getById(environmentId || 'n/a');
|
||||
|
||||
const keySource = {};
|
||||
|
||||
// Function that gets Keys and stores their Source location
|
||||
@@ -283,6 +309,7 @@ export async function getRenderContext(
|
||||
|
||||
// Recurse down for Objects and Arrays
|
||||
const typeStr = Object.prototype.toString.call(subObject);
|
||||
|
||||
if (typeStr === '[object Object]') {
|
||||
for (const key of Object.keys(subObject)) {
|
||||
getKeySource(subObject[key], templatingUtils.forceBracketNotation(inKey, key), inSource);
|
||||
@@ -295,7 +322,6 @@ export async function getRenderContext(
|
||||
}
|
||||
|
||||
const inKey = templating.NUNJUCKS_TEMPLATE_GLOBAL_PROPERTY_NAME;
|
||||
|
||||
// Get Keys from root environment
|
||||
getKeySource((rootEnvironment || {}).data, inKey, 'root');
|
||||
|
||||
@@ -308,6 +334,7 @@ export async function getRenderContext(
|
||||
if (ancestors) {
|
||||
for (let idx = 0; idx < ancestors.length; idx++) {
|
||||
const ancestor: any = ancestors[idx] || {};
|
||||
|
||||
if (
|
||||
isRequestGroup(ancestor) &&
|
||||
ancestor.hasOwnProperty('environment') &&
|
||||
@@ -319,7 +346,8 @@ export async function getRenderContext(
|
||||
}
|
||||
|
||||
// Add meta data helper function
|
||||
const baseContext = {};
|
||||
const baseContext: Record<string, any> = {};
|
||||
|
||||
baseContext.getMeta = () => ({
|
||||
requestId: request ? request._id : null,
|
||||
workspaceId: workspace ? workspace._id : 'n/a',
|
||||
@@ -330,6 +358,7 @@ export async function getRenderContext(
|
||||
});
|
||||
|
||||
baseContext.getPurpose = () => purpose;
|
||||
|
||||
baseContext.getExtraInfo = (key: string) => {
|
||||
if (!Array.isArray(extraInfo)) {
|
||||
return null;
|
||||
@@ -344,14 +373,13 @@ export async function getRenderContext(
|
||||
// Generate the context we need to render
|
||||
return buildRenderContext(ancestors, rootEnvironment, subEnvironment, baseContext);
|
||||
}
|
||||
|
||||
export async function getRenderedGrpcRequest(
|
||||
request: GrpcRequest,
|
||||
environmentId: string | null,
|
||||
purpose?: RenderPurpose,
|
||||
extraInfo?: ExtraRenderInfo,
|
||||
skipBody?: boolean,
|
||||
): Promise<{ request: RenderedGrpcRequest, context: Object }> {
|
||||
) {
|
||||
const renderContext = await getRenderContext(
|
||||
request,
|
||||
environmentId,
|
||||
@@ -359,24 +387,18 @@ export async function getRenderedGrpcRequest(
|
||||
purpose,
|
||||
extraInfo || null,
|
||||
);
|
||||
|
||||
const description = request.description;
|
||||
|
||||
// Render description separately because it's lower priority
|
||||
request.description = '';
|
||||
|
||||
// Ignore body by default and only include if specified to
|
||||
const ignorePathRegex = skipBody ? /^body.*/ : null;
|
||||
|
||||
// Render all request properties
|
||||
const renderedRequest: RenderedGrpcRequest = await render(
|
||||
request,
|
||||
renderContext,
|
||||
ignorePathRegex,
|
||||
);
|
||||
|
||||
renderedRequest.description = await render(description, renderContext, null, KEEP_ON_ERROR);
|
||||
|
||||
return renderedRequest;
|
||||
}
|
||||
|
||||
@@ -385,7 +407,7 @@ export async function getRenderedGrpcRequestMessage(
|
||||
environmentId: string | null,
|
||||
purpose?: RenderPurpose,
|
||||
extraInfo?: ExtraRenderInfo,
|
||||
): Promise<RenderedGrpcRequestBody> {
|
||||
) {
|
||||
const renderContext = await getRenderContext(
|
||||
request,
|
||||
environmentId,
|
||||
@@ -393,24 +415,20 @@ export async function getRenderedGrpcRequestMessage(
|
||||
purpose,
|
||||
extraInfo || null,
|
||||
);
|
||||
|
||||
// Render request body
|
||||
const renderedBody: RenderedGrpcRequestBody = await render(request.body, renderContext);
|
||||
|
||||
return renderedBody;
|
||||
}
|
||||
|
||||
export async function getRenderedRequestAndContext(
|
||||
request: Request,
|
||||
environmentId: string | null,
|
||||
purpose?: RenderPurpose,
|
||||
extraInfo?: ExtraRenderInfo,
|
||||
): Promise<{ request: RenderedRequest, context: Object }> {
|
||||
) {
|
||||
const ancestors = await _getRequestAncestors(request);
|
||||
const workspace = ancestors.find(doc => doc.type === models.workspace.type);
|
||||
const parentId = workspace ? workspace._id : 'n/a';
|
||||
const cookieJar = await models.cookieJar.getOrCreateForParentId(parentId);
|
||||
|
||||
const renderContext = await getRenderContext(
|
||||
request,
|
||||
environmentId,
|
||||
@@ -427,26 +445,25 @@ export async function getRenderedRequestAndContext(
|
||||
o.query = o.query.replace(/#}/g, '# }');
|
||||
request.body.text = JSON.stringify(o);
|
||||
}
|
||||
} catch (err) {}
|
||||
} catch (err) { }
|
||||
|
||||
// Render description separately because it's lower priority
|
||||
const description = request.description;
|
||||
request.description = '';
|
||||
|
||||
// Render all request properties
|
||||
const renderResult = await render(
|
||||
{ _request: request, _cookieJar: cookieJar },
|
||||
{
|
||||
_request: request,
|
||||
_cookieJar: cookieJar,
|
||||
},
|
||||
renderContext,
|
||||
request.settingDisableRenderRequestBody ? /^body.*/ : null,
|
||||
);
|
||||
|
||||
const renderedRequest = renderResult._request;
|
||||
const renderedCookieJar = renderResult._cookieJar;
|
||||
renderedRequest.description = await render(description, renderContext, null, KEEP_ON_ERROR);
|
||||
|
||||
// Remove disabled params
|
||||
renderedRequest.parameters = renderedRequest.parameters.filter(p => !p.disabled);
|
||||
|
||||
// Remove disabled headers
|
||||
renderedRequest.headers = renderedRequest.headers.filter(p => !p.disabled);
|
||||
|
||||
@@ -462,7 +479,6 @@ export async function getRenderedRequestAndContext(
|
||||
|
||||
// Default the proto if it doesn't exist
|
||||
renderedRequest.url = setDefaultProtocol(renderedRequest.url);
|
||||
|
||||
return {
|
||||
context: renderContext,
|
||||
request: {
|
||||
@@ -470,7 +486,6 @@ export async function getRenderedRequestAndContext(
|
||||
cookieJar: renderedCookieJar,
|
||||
cookies: [],
|
||||
isPrivate: false,
|
||||
|
||||
// NOTE: Flow doesn't like Object.assign, so we have to do each property manually
|
||||
// for now to convert Request to RenderedRequest.
|
||||
_id: renderedRequest._id,
|
||||
@@ -509,15 +524,17 @@ function _nunjucksSortValue(v) {
|
||||
return v && v.match && v.match(/({{|{%)/) ? 2 : 1;
|
||||
}
|
||||
|
||||
function _getOrderedEnvironmentKeys(finalRenderContext: Object): Array<string> {
|
||||
function _getOrderedEnvironmentKeys(finalRenderContext: Record<string, any>): Array<string> {
|
||||
return Object.keys(finalRenderContext).sort((k1, k2) => {
|
||||
const k1Sort = _nunjucksSortValue(finalRenderContext[k1]);
|
||||
|
||||
const k2Sort = _nunjucksSortValue(finalRenderContext[k2]);
|
||||
|
||||
return k1Sort - k2Sort;
|
||||
});
|
||||
}
|
||||
|
||||
async function _getRequestAncestors(request: Request | GrpcRequest): Promise<Array<BaseModel>> {
|
||||
async function _getRequestAncestors(request: Request | GrpcRequest | null): Promise<Array<BaseModel>> {
|
||||
return await db.withAncestors(request, [
|
||||
models.request.type,
|
||||
models.grpcRequest.type,
|
||||
@@ -1,52 +1,66 @@
|
||||
// @flow
|
||||
|
||||
import { remote } from 'electron';
|
||||
|
||||
type Options = {
|
||||
itemTypes?: Array<'file' | 'directory'>,
|
||||
extensions?: Array<string>,
|
||||
};
|
||||
|
||||
type FileSelection = {
|
||||
filePath: string,
|
||||
canceled: boolean,
|
||||
};
|
||||
import { OpenDialogOptions, remote } from 'electron';
|
||||
interface Options {
|
||||
itemTypes?: Array<'file' | 'directory'>;
|
||||
extensions?: Array<string>;
|
||||
}
|
||||
interface FileSelection {
|
||||
filePath: string;
|
||||
canceled: boolean;
|
||||
}
|
||||
|
||||
const selectFileOrFolder = async ({ itemTypes, extensions }: Options): Promise<FileSelection> => {
|
||||
// If no types are selected then default to just files and not directories
|
||||
const types = itemTypes || ['file'];
|
||||
let title = 'Select ';
|
||||
|
||||
if (types.includes('file')) {
|
||||
title += ' File';
|
||||
|
||||
if (types.length > 2) {
|
||||
title += ' or';
|
||||
}
|
||||
}
|
||||
|
||||
if (types.includes('directory')) {
|
||||
title += ' Directory';
|
||||
}
|
||||
const options = {
|
||||
|
||||
const options: OpenDialogOptions = {
|
||||
title: title,
|
||||
buttonLabel: 'Select',
|
||||
// @ts-expect-error -- TSCONVERSION we should update this to accept other properties types as well, which flow all the way up to plugins
|
||||
properties: types.map(type => {
|
||||
if (type === 'file') {
|
||||
return 'openFile';
|
||||
}
|
||||
|
||||
if (type === 'directory') {
|
||||
return 'openDirectory';
|
||||
}
|
||||
}),
|
||||
filters: [{ name: 'All Files', extensions: ['*'] }],
|
||||
filters: [
|
||||
{
|
||||
name: 'All Files',
|
||||
extensions: ['*'],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
// If extensions are provided then filter for just those extensions
|
||||
if (extensions?.length) {
|
||||
options.filters = [{ name: 'Files', extensions: extensions }];
|
||||
options.filters = [
|
||||
{
|
||||
name: 'Files',
|
||||
extensions: extensions,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
const { canceled, filePaths } = await remote.dialog.showOpenDialog(options);
|
||||
|
||||
return { filePath: filePaths[0], canceled };
|
||||
return {
|
||||
filePath: filePaths[0],
|
||||
canceled,
|
||||
};
|
||||
};
|
||||
|
||||
export default selectFileOrFolder;
|
||||
@@ -1,28 +1,36 @@
|
||||
import * as db from './database';
|
||||
import { types as modelTypes, stats } from '../models';
|
||||
import { database as db } from './database';
|
||||
import { types as modelTypes, stats, BaseModel } from '../models';
|
||||
import { send } from '../network/network';
|
||||
import { getBodyBuffer } from '../models/response';
|
||||
import * as plugins from '../plugins';
|
||||
|
||||
export async function getSendRequestCallbackMemDb(environmentId, memDB) {
|
||||
// Initialize the DB in-memory and fill it with data if we're given one
|
||||
await db.init(modelTypes(), { inMemoryOnly: true }, true, () => {});
|
||||
await db.init(
|
||||
modelTypes(),
|
||||
{
|
||||
inMemoryOnly: true,
|
||||
},
|
||||
true,
|
||||
() => {},
|
||||
);
|
||||
const docs: Array<BaseModel> = [];
|
||||
|
||||
const docs = [];
|
||||
for (const type of Object.keys(memDB)) {
|
||||
for (const doc of memDB[type]) {
|
||||
docs.push(doc);
|
||||
}
|
||||
}
|
||||
|
||||
await db.batchModifyDocs({ upsert: docs, remove: [] });
|
||||
|
||||
await db.batchModifyDocs({
|
||||
upsert: docs,
|
||||
remove: [],
|
||||
});
|
||||
// Return callback helper to send requests
|
||||
return async function sendRequest(requestId) {
|
||||
return sendAndTransform(requestId, environmentId);
|
||||
};
|
||||
}
|
||||
|
||||
export function getSendRequestCallback(environmentId) {
|
||||
return async function sendRequest(requestId) {
|
||||
stats.incrementExecutedRequests();
|
||||
@@ -35,13 +43,13 @@ async function sendAndTransform(requestId, environmentId) {
|
||||
plugins.ignorePlugin('insomnia-plugin-kong-bundle');
|
||||
const res = await send(requestId, environmentId);
|
||||
const headersObj = {};
|
||||
|
||||
for (const h of res.headers || []) {
|
||||
const name = h.name || '';
|
||||
headersObj[name.toLowerCase()] = h.value || '';
|
||||
}
|
||||
|
||||
const bodyBuffer = await getBodyBuffer(res);
|
||||
|
||||
const bodyBuffer = await getBodyBuffer(res) as Buffer;
|
||||
return {
|
||||
status: res.statusCode,
|
||||
statusMessage: res.statusMessage,
|
||||
@@ -1,4 +1,3 @@
|
||||
// @flow
|
||||
import {
|
||||
HTTP_METHODS,
|
||||
SortOrder,
|
||||
@@ -57,13 +56,17 @@ const httpMethodSort: SortFunction = (a, b) => {
|
||||
// Sort Requests by HTTP method
|
||||
if (isRequest(a)) {
|
||||
const aIndex = HTTP_METHODS.indexOf(a.method);
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
const bIndex = HTTP_METHODS.indexOf(b.method);
|
||||
|
||||
if (aIndex !== bIndex) {
|
||||
return aIndex < bIndex ? -1 : 1;
|
||||
}
|
||||
|
||||
// Sort by ascending method name if comparing two custom methods
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
if (aIndex === -1 && a.method.localeCompare(b.method) !== 0) {
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
return a.method.localeCompare(b.method);
|
||||
}
|
||||
}
|
||||
@@ -105,7 +108,8 @@ export const descendingNumberSort = (a: number, b: number): number => {
|
||||
return ascendingNumberSort(b, a);
|
||||
};
|
||||
|
||||
export const sortMethodMap: { [SortOrder]: SortFunction } = {
|
||||
// @ts-expect-error -- TSCONVERSION appears to be a genuine error
|
||||
export const sortMethodMap: Record<SortOrder, SortFunction> = {
|
||||
[SORT_NAME_ASC]: ascendingNameSort,
|
||||
[SORT_NAME_DESC]: descendingNameSort,
|
||||
[SORT_CREATED_ASC]: createdFirstSort,
|
||||
@@ -1,4 +1,3 @@
|
||||
// @flow
|
||||
import { pluralize } from './misc';
|
||||
|
||||
export const strings = {
|
||||
26
packages/insomnia-app/app/global.d.ts
vendored
Normal file
26
packages/insomnia-app/app/global.d.ts
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
declare module '*.svg' {
|
||||
const content: any;
|
||||
export default content;
|
||||
}
|
||||
|
||||
declare module '*.png' {
|
||||
const content: any;
|
||||
export default content;
|
||||
}
|
||||
|
||||
declare const __DEV__: boolean;
|
||||
|
||||
declare namespace NodeJS {
|
||||
interface Global {
|
||||
__DEV__: boolean;
|
||||
}
|
||||
}
|
||||
|
||||
interface Window {
|
||||
__REDUX_DEVTOOLS_EXTENSION_COMPOSE__: Function;
|
||||
}
|
||||
|
||||
// needed for @hot-loader/react-dom in order for TypeScript to build
|
||||
declare const __REACT_DEVTOOLS_GLOBAL_HOOK__: undefined | {
|
||||
checkDCE: Function;
|
||||
};
|
||||
@@ -1,6 +1,5 @@
|
||||
// @flow
|
||||
import { checkIfRestartNeeded } from './main/squirrel-startup';
|
||||
import { appConfig } from '../config';
|
||||
import appConfig from '../config/config.json';
|
||||
import path from 'path';
|
||||
import * as electron from 'electron';
|
||||
import * as errorHandling from './main/error-handling';
|
||||
@@ -8,7 +7,7 @@ import * as updates from './main/updates';
|
||||
import * as grpcIpcMain from './main/grpc-ipc-main';
|
||||
import * as windowUtils from './main/window-utils';
|
||||
import * as models from './models/index';
|
||||
import * as database from './common/database';
|
||||
import { database } from './common/database';
|
||||
import { changelogUrl, getAppVersion, isDevelopment, isMac } from './common/constants';
|
||||
import type { ToastNotification } from './ui/components/toast';
|
||||
import type { Stats } from './models/stats';
|
||||
@@ -21,10 +20,8 @@ if (checkIfRestartNeeded()) {
|
||||
}
|
||||
|
||||
initializeLogging();
|
||||
|
||||
const { app, ipcMain, session } = electron;
|
||||
const commandLineArgs = process.argv.slice(1);
|
||||
|
||||
log.info(`Running version ${getAppVersion()}`);
|
||||
|
||||
// Explicitly set userData folder from config because it's sketchy to
|
||||
@@ -32,32 +29,27 @@ log.info(`Running version ${getAppVersion()}`);
|
||||
// by accident.
|
||||
if (!isDevelopment()) {
|
||||
const defaultPath = app.getPath('userData');
|
||||
const newPath = path.join(defaultPath, '../', appConfig().userDataFolder);
|
||||
const newPath = path.join(defaultPath, '../', appConfig.userDataFolder);
|
||||
app.setPath('userData', newPath);
|
||||
}
|
||||
|
||||
// So if (window) checks don't throw
|
||||
global.window = global.window || undefined;
|
||||
|
||||
// When the app is first launched
|
||||
app.on('ready', async () => {
|
||||
// Init some important things first
|
||||
await database.init(models.types());
|
||||
await _createModelInstances();
|
||||
|
||||
await errorHandling.init();
|
||||
await windowUtils.init();
|
||||
|
||||
// Init the app
|
||||
const updatedStats = await _trackStats();
|
||||
await _updateFlags(updatedStats);
|
||||
await _launchApp();
|
||||
|
||||
// Init the rest
|
||||
await updates.init();
|
||||
grpcIpcMain.init();
|
||||
});
|
||||
|
||||
// Set as default protocol
|
||||
app.setAsDefaultProtocolClient(`insomnia${isDevelopment() ? 'dev' : ''}`);
|
||||
|
||||
@@ -67,19 +59,16 @@ function _addUrlToOpen(e, url) {
|
||||
}
|
||||
|
||||
app.on('open-url', _addUrlToOpen);
|
||||
|
||||
// Enable this for CSS grid layout :)
|
||||
app.commandLine.appendSwitch('enable-experimental-web-platform-features');
|
||||
|
||||
// Quit when all windows are closed (except on Mac).
|
||||
app.on('window-all-closed', () => {
|
||||
if (!isMac()) {
|
||||
app.quit();
|
||||
}
|
||||
});
|
||||
|
||||
// Mac-only, when the user clicks the doc icon
|
||||
app.on('activate', (e, hasVisibleWindows) => {
|
||||
app.on('activate', (_error, hasVisibleWindows) => {
|
||||
// Create a new window when clicking the doc icon if there isn't one open
|
||||
if (!hasVisibleWindows) {
|
||||
try {
|
||||
@@ -95,12 +84,11 @@ app.on('activate', (e, hasVisibleWindows) => {
|
||||
function _launchApp() {
|
||||
app.removeListener('open-url', _addUrlToOpen);
|
||||
const window = windowUtils.createWindow();
|
||||
|
||||
// Handle URLs sent via command line args
|
||||
ipcMain.once('window-ready', () => {
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
commandLineArgs.length && window.send('run-command', commandLineArgs[0]);
|
||||
});
|
||||
|
||||
// Called when second instance launched with args (Windows)
|
||||
const gotTheLock = app.requestSingleInstanceLock();
|
||||
|
||||
@@ -109,16 +97,16 @@ function _launchApp() {
|
||||
return;
|
||||
}
|
||||
|
||||
app.on('second-instance', (event, commandLine, workingDirectory) => {
|
||||
app.on('second-instance', () => {
|
||||
// Someone tried to run a second instance, we should focus our window.
|
||||
if (window) {
|
||||
if (window.isMinimized()) window.restore();
|
||||
window.focus();
|
||||
}
|
||||
});
|
||||
|
||||
// Handle URLs when app already open
|
||||
app.addListener('open-url', (e, url) => {
|
||||
app.addListener('open-url', (_error, url) => {
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
window.send('run-command', url);
|
||||
// Apparently a timeout is needed because Chrome steals back focus immediately
|
||||
// after opening the URL.
|
||||
@@ -126,7 +114,6 @@ function _launchApp() {
|
||||
window.focus();
|
||||
}, 100);
|
||||
});
|
||||
|
||||
// Don't send origin header from Insomnia app because we're not technically using CORS
|
||||
session.defaultSession.webRequest.onBeforeSendHeaders((details, fn) => {
|
||||
delete details.requestHeaders.Origin;
|
||||
@@ -149,6 +136,7 @@ async function _createModelInstances() {
|
||||
|
||||
async function _updateFlags({ launches }: Stats) {
|
||||
const firstLaunch = launches === 1;
|
||||
|
||||
if (firstLaunch) {
|
||||
await models.settings.patch({
|
||||
hasPromptedOnboarding: false,
|
||||
@@ -158,7 +146,7 @@ async function _updateFlags({ launches }: Stats) {
|
||||
}
|
||||
}
|
||||
|
||||
async function _trackStats(): Promise<Stats> {
|
||||
async function _trackStats() {
|
||||
// Handle the stats
|
||||
const oldStats = await models.stats.get();
|
||||
const stats: Stats = await models.stats.update({
|
||||
@@ -168,7 +156,6 @@ async function _trackStats(): Promise<Stats> {
|
||||
lastVersion: oldStats.currentVersion,
|
||||
launches: oldStats.launches + 1,
|
||||
});
|
||||
|
||||
// Update Stats Object
|
||||
const firstLaunch = stats.launches === 1;
|
||||
const justUpdated = !firstLaunch && stats.currentVersion !== stats.lastVersion;
|
||||
@@ -183,6 +170,7 @@ async function _trackStats(): Promise<Stats> {
|
||||
|
||||
ipcMain.once('window-ready', () => {
|
||||
const { currentVersion } = stats;
|
||||
|
||||
if (!justUpdated || !currentVersion) {
|
||||
return;
|
||||
}
|
||||
@@ -194,14 +182,13 @@ async function _trackStats(): Promise<Stats> {
|
||||
cta: "See What's New",
|
||||
message: `Updated to ${currentVersion}`,
|
||||
};
|
||||
|
||||
// Wait a bit before showing the user because the app just launched.
|
||||
setTimeout(() => {
|
||||
for (const window of BrowserWindow.getAllWindows()) {
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
window.send('show-notification', notification);
|
||||
}
|
||||
}, 5000);
|
||||
});
|
||||
|
||||
return stats;
|
||||
}
|
||||
@@ -7,9 +7,10 @@ import { ResponseCallbacks } from '../../network/grpc/response-callbacks';
|
||||
jest.mock('../../network/grpc');
|
||||
|
||||
describe('grpcIpcMain', () => {
|
||||
const event = { reply: jest.fn() };
|
||||
const event = {
|
||||
reply: jest.fn(),
|
||||
};
|
||||
const id = 'abc';
|
||||
|
||||
beforeEach(() => {
|
||||
grpcIpcMain.init(); // ipcMain is mocked
|
||||
});
|
||||
@@ -20,11 +21,8 @@ describe('grpcIpcMain', () => {
|
||||
|
||||
it('should add expected listener for start', () => {
|
||||
const [channel, listener] = ipcMain.on.mock.calls[0];
|
||||
|
||||
expect(channel).toBe(GrpcRequestEventEnum.start);
|
||||
|
||||
const param = {};
|
||||
|
||||
// Execute the callback, and make sure the correct grpc method is called
|
||||
listener(event, param);
|
||||
expect(grpc.start).toHaveBeenCalledWith(param, expect.any(ResponseCallbacks));
|
||||
@@ -32,12 +30,9 @@ describe('grpcIpcMain', () => {
|
||||
|
||||
it('should add expected listener for sendMessage', () => {
|
||||
const [channel, listener] = ipcMain.on.mock.calls[1];
|
||||
|
||||
// Expect the sendUnary channel
|
||||
expect(channel).toBe(GrpcRequestEventEnum.sendMessage);
|
||||
|
||||
const param = {};
|
||||
|
||||
// Execute the callback, and make sure the correct grpc method is called
|
||||
listener(event, param);
|
||||
expect(grpc.sendMessage).toHaveBeenCalledWith(param, expect.any(ResponseCallbacks));
|
||||
@@ -45,9 +40,7 @@ describe('grpcIpcMain', () => {
|
||||
|
||||
it('should add expected listener for commit', () => {
|
||||
const [channel, listener] = ipcMain.on.mock.calls[2];
|
||||
|
||||
expect(channel).toBe(GrpcRequestEventEnum.commit);
|
||||
|
||||
// Execute the callback, and make sure the correct grpc method is called
|
||||
listener(event, id);
|
||||
expect(grpc.commit).toHaveBeenCalledWith(id);
|
||||
@@ -55,9 +48,7 @@ describe('grpcIpcMain', () => {
|
||||
|
||||
it('should add expected listener for cancel', () => {
|
||||
const [channel, listener] = ipcMain.on.mock.calls[3];
|
||||
|
||||
expect(channel).toBe(GrpcRequestEventEnum.cancel);
|
||||
|
||||
// Execute the callback, and make sure the correct grpc method is called
|
||||
listener(event, id);
|
||||
expect(grpc.cancel).toHaveBeenCalledWith(id);
|
||||
@@ -65,9 +56,7 @@ describe('grpcIpcMain', () => {
|
||||
|
||||
it('should add expected listener for cancel multiple', () => {
|
||||
const [channel, listener] = ipcMain.on.mock.calls[4];
|
||||
|
||||
expect(channel).toBe(GrpcRequestEventEnum.cancelMultiple);
|
||||
|
||||
// Execute the callback, and make sure the correct grpc method is called
|
||||
listener(event, id);
|
||||
expect(grpc.cancelMultiple).toHaveBeenCalledWith(id);
|
||||
@@ -2,8 +2,7 @@ export function init() {
|
||||
process.on('uncaughtException', err => {
|
||||
console.error('[catcher] Uncaught exception:', err.stack);
|
||||
});
|
||||
|
||||
process.on('unhandledRejection', err => {
|
||||
process.on('unhandledRejection', (err: Error) => {
|
||||
console.error('[catcher] Unhandled rejection:', err.stack);
|
||||
});
|
||||
}
|
||||
@@ -1,16 +1,15 @@
|
||||
// @flow
|
||||
|
||||
import * as grpc from '../network/grpc';
|
||||
import { ipcMain } from 'electron';
|
||||
import { GrpcRequestEventEnum } from '../common/grpc-events';
|
||||
import { ResponseCallbacks } from '../network/grpc/response-callbacks';
|
||||
import type { GrpcIpcRequestParams } from '../ui/context/grpc/prepare';
|
||||
import { GrpcIpcRequestParams } from '../network/grpc/prepare';
|
||||
|
||||
export function init() {
|
||||
ipcMain.on(GrpcRequestEventEnum.start, (e, params: GrpcIpcRequestParams) =>
|
||||
grpc.start(params, new ResponseCallbacks(e)),
|
||||
);
|
||||
ipcMain.on(GrpcRequestEventEnum.sendMessage, (e, params: GrpcIpcRequestParams) =>
|
||||
// @ts-expect-error -- TSCONVERSION
|
||||
grpc.sendMessage(params, new ResponseCallbacks(e)),
|
||||
);
|
||||
ipcMain.on(GrpcRequestEventEnum.commit, (_, requestId) => grpc.commit(requestId));
|
||||
@@ -3,13 +3,13 @@ import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
class LocalStorage {
|
||||
constructor(basePath) {
|
||||
_buffer: Record<string, string> = {};
|
||||
_timeouts: Record<string, NodeJS.Timeout> = {};
|
||||
_basePath: string | null = null;
|
||||
|
||||
constructor(basePath: string) {
|
||||
this._basePath = basePath;
|
||||
|
||||
// Debounce writes on a per key basis
|
||||
this._timeouts = {};
|
||||
this._buffer = {};
|
||||
|
||||
mkdirp.sync(basePath);
|
||||
console.log(`[localstorage] Initialized at ${basePath}`);
|
||||
}
|
||||
@@ -27,8 +27,9 @@ class LocalStorage {
|
||||
let contents = JSON.stringify(defaultObj);
|
||||
|
||||
const path = this._getKeyPath(key);
|
||||
|
||||
try {
|
||||
contents = fs.readFileSync(path);
|
||||
contents = String(fs.readFileSync(path));
|
||||
} catch (e) {
|
||||
if (e.code === 'ENOENT') {
|
||||
this.setItem(key, defaultObj);
|
||||
@@ -52,6 +53,7 @@ class LocalStorage {
|
||||
|
||||
for (const key of keys) {
|
||||
const contents = this._buffer[key];
|
||||
|
||||
const path = this._getKeyPath(key);
|
||||
|
||||
delete this._buffer[key];
|
||||
@@ -65,6 +67,7 @@ class LocalStorage {
|
||||
}
|
||||
|
||||
_getKeyPath(key) {
|
||||
// @ts-expect-error -- TSCONVERSION this appears to be a genuine error
|
||||
return path.join(this._basePath, key);
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,9 @@ import { app } from 'electron';
|
||||
|
||||
function run(args, done) {
|
||||
const updateExe = path.resolve(path.dirname(process.execPath), '..', 'Update.exe');
|
||||
spawn(updateExe, args, { detached: true }).on('close', done);
|
||||
spawn(updateExe, args, {
|
||||
detached: true,
|
||||
}).on('close', done);
|
||||
}
|
||||
|
||||
export function checkIfRestartNeeded() {
|
||||
@@ -20,9 +22,11 @@ export function checkIfRestartNeeded() {
|
||||
case '--squirrel-install':
|
||||
run(['--createShortcut=' + target + ''], app.quit);
|
||||
return true;
|
||||
|
||||
case '--squirrel-uninstall':
|
||||
run(['--removeShortcut=' + target + ''], app.quit);
|
||||
return true;
|
||||
|
||||
case '--squirrel-updated':
|
||||
case '--squirrel-obsolete':
|
||||
app.quit();
|
||||
@@ -1,4 +1,3 @@
|
||||
// @flow
|
||||
import electron from 'electron';
|
||||
import {
|
||||
CHECK_FOR_UPDATES_INTERVAL,
|
||||
@@ -12,13 +11,12 @@ import {
|
||||
import * as models from '../models/index';
|
||||
import { buildQueryStringFromParams, joinUrlAndQueryString } from 'insomnia-url';
|
||||
import { delay } from '../common/misc';
|
||||
|
||||
const { autoUpdater, BrowserWindow, ipcMain } = electron;
|
||||
|
||||
async function getUpdateUrl(force: boolean): Promise<string | null> {
|
||||
const platform = process.platform;
|
||||
const settings = await models.settings.getOrCreate();
|
||||
let updateUrl = null;
|
||||
let updateUrl: string | null = null;
|
||||
|
||||
if (!updatesSupported()) {
|
||||
return null;
|
||||
@@ -33,11 +31,19 @@ async function getUpdateUrl(force: boolean): Promise<string | null> {
|
||||
}
|
||||
|
||||
const params = [
|
||||
{ name: 'v', value: getAppVersion() },
|
||||
{ name: 'app', value: getAppId() },
|
||||
{ name: 'channel', value: settings.updateChannel },
|
||||
{
|
||||
name: 'v',
|
||||
value: getAppVersion(),
|
||||
},
|
||||
{
|
||||
name: 'app',
|
||||
value: getAppId(),
|
||||
},
|
||||
{
|
||||
name: 'channel',
|
||||
value: settings.updateChannel,
|
||||
},
|
||||
];
|
||||
|
||||
const qs = buildQueryStringFromParams(params);
|
||||
const fullUrl = joinUrlAndQueryString(updateUrl, qs);
|
||||
console.log(`[updater] Using url ${fullUrl}`);
|
||||
@@ -60,50 +66,51 @@ async function getUpdateUrl(force: boolean): Promise<string | null> {
|
||||
|
||||
function _sendUpdateStatus(status) {
|
||||
const windows = BrowserWindow.getAllWindows();
|
||||
for (const w of windows) {
|
||||
w.send('updater.check.status', status);
|
||||
|
||||
for (const window of windows) {
|
||||
// @ts-expect-error -- TSCONVERSION seems to be a genuine error
|
||||
window.send('updater.check.status', status);
|
||||
}
|
||||
}
|
||||
|
||||
function _sendUpdateComplete(success: boolean, msg: string) {
|
||||
const windows = BrowserWindow.getAllWindows();
|
||||
for (const w of windows) {
|
||||
w.send('updater.check.complete', success, msg);
|
||||
|
||||
for (const window of windows) {
|
||||
// @ts-expect-error -- TSCONVERSION seems to be a genuine error
|
||||
window.send('updater.check.complete', success, msg);
|
||||
}
|
||||
}
|
||||
|
||||
let hasPromptedForUpdates = false;
|
||||
|
||||
export async function init() {
|
||||
autoUpdater.on('error', e => {
|
||||
console.warn(`[updater] Error: ${e.message}`);
|
||||
});
|
||||
|
||||
autoUpdater.on('update-not-available', () => {
|
||||
console.log('[updater] Not Available');
|
||||
|
||||
_sendUpdateComplete(false, 'Up to Date');
|
||||
});
|
||||
|
||||
autoUpdater.on('update-available', () => {
|
||||
console.log('[updater] Update Available');
|
||||
|
||||
_sendUpdateStatus('Downloading...');
|
||||
});
|
||||
|
||||
autoUpdater.on('update-downloaded', (e, releaseNotes, releaseName, releaseDate, updateUrl) => {
|
||||
autoUpdater.on('update-downloaded', (_error, _releaseNotes, releaseName) => {
|
||||
console.log(`[updater] Downloaded ${releaseName}`);
|
||||
|
||||
_sendUpdateComplete(true, 'Updated (Restart Required)');
|
||||
|
||||
_showUpdateNotification();
|
||||
});
|
||||
|
||||
ipcMain.on('updater.check', async e => {
|
||||
ipcMain.on('updater.check', async () => {
|
||||
await _checkForUpdates(true);
|
||||
});
|
||||
|
||||
// Check for updates on an interval
|
||||
setInterval(async () => {
|
||||
await _checkForUpdates(false);
|
||||
}, CHECK_FOR_UPDATES_INTERVAL);
|
||||
|
||||
// Check for updates immediately
|
||||
await _checkForUpdates(false);
|
||||
}
|
||||
@@ -114,6 +121,7 @@ function _showUpdateNotification() {
|
||||
}
|
||||
|
||||
const windows = BrowserWindow.getAllWindows();
|
||||
|
||||
if (windows.length && windows[0].webContents) {
|
||||
windows[0].webContents.send('update-available');
|
||||
}
|
||||
@@ -123,6 +131,7 @@ function _showUpdateNotification() {
|
||||
|
||||
async function _checkForUpdates(force: boolean) {
|
||||
_sendUpdateStatus('Checking');
|
||||
|
||||
await delay(500);
|
||||
|
||||
if (force) {
|
||||
@@ -140,16 +149,20 @@ async function _checkForUpdates(force: boolean) {
|
||||
console.log(
|
||||
`[updater] Updater not running platform=${process.platform} dev=${isDevelopment()}`,
|
||||
);
|
||||
|
||||
_sendUpdateComplete(false, 'Updates Not Supported');
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
console.log(`[updater] Checking for updates url=${updateUrl}`);
|
||||
// @ts-expect-error -- TSCONVERSION appears to be a genuine error
|
||||
autoUpdater.setFeedURL(updateUrl);
|
||||
autoUpdater.checkForUpdates();
|
||||
} catch (err) {
|
||||
console.warn('[updater] Failed to check for updates:', err.message);
|
||||
|
||||
_sendUpdateComplete(false, 'Update Error');
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import electron from 'electron';
|
||||
import electron, { BrowserWindow } from 'electron';
|
||||
import path from 'path';
|
||||
import { Curl } from 'node-libcurl';
|
||||
import fs from 'fs';
|
||||
@@ -14,25 +14,34 @@ import {
|
||||
isMac,
|
||||
MNEMONIC_SYM,
|
||||
} from '../common/constants';
|
||||
import * as misc from '../common/misc';
|
||||
import { clickLink, getDataDirectory } from '../common/electron-helpers';
|
||||
import * as log from '../common/log';
|
||||
import * as os from 'os';
|
||||
import { docsBase } from '../common/documentation';
|
||||
|
||||
const { app, Menu, BrowserWindow, shell, dialog, clipboard } = electron;
|
||||
|
||||
const { app, Menu, shell, dialog, clipboard } = electron;
|
||||
// So we can use native modules in renderer
|
||||
// NOTE: This will be deprecated in Electron 10 and impossible in 11
|
||||
// https://github.com/electron/electron/issues/18397
|
||||
app.allowRendererProcessReuse = false;
|
||||
|
||||
// Note: this hack is required because MenuItemConstructorOptions is not exported from the electron types as of 9.3.5
|
||||
type MenuItemConstructorOptions = Parameters<typeof Menu.buildFromTemplate>[0][0];
|
||||
|
||||
const DEFAULT_WIDTH = 1280;
|
||||
const DEFAULT_HEIGHT = 700;
|
||||
const MINIMUM_WIDTH = 500;
|
||||
const MINIMUM_HEIGHT = 400;
|
||||
|
||||
let mainWindow = null;
|
||||
let localStorage = null;
|
||||
let mainWindow: BrowserWindow | null = null;
|
||||
let localStorage: LocalStorage | null = null;
|
||||
|
||||
interface Bounds {
|
||||
height?: number;
|
||||
width?: number;
|
||||
x?: number;
|
||||
y?: number;
|
||||
}
|
||||
|
||||
export function init() {
|
||||
initLocalStorage();
|
||||
@@ -43,14 +52,19 @@ export function createWindow() {
|
||||
const zoomFactor = getZoomFactor();
|
||||
const { bounds, fullscreen, maximize } = getBounds();
|
||||
const { x, y, width, height } = bounds;
|
||||
const appLogo = 'static/insomnia-core-logo_16x.png';
|
||||
|
||||
const appLogo = 'static/insomnia-core-logo_16x.png';
|
||||
let isVisibleOnAnyDisplay = true;
|
||||
|
||||
for (const d of electron.screen.getAllDisplays()) {
|
||||
const isVisibleOnDisplay =
|
||||
// @ts-expect-error -- TSCONVERSION genuine error
|
||||
x >= d.bounds.x &&
|
||||
// @ts-expect-error -- TSCONVERSION genuine error
|
||||
y >= d.bounds.y &&
|
||||
// @ts-expect-error -- TSCONVERSION genuine error
|
||||
x + width <= d.bounds.x + d.bounds.width &&
|
||||
// @ts-expect-error -- TSCONVERSION genuine error
|
||||
y + height <= d.bounds.y + d.bounds.height;
|
||||
|
||||
if (!isVisibleOnDisplay) {
|
||||
@@ -62,7 +76,6 @@ export function createWindow() {
|
||||
// Make sure we don't initialize the window outside the bounds
|
||||
x: isVisibleOnAnyDisplay ? x : undefined,
|
||||
y: isVisibleOnAnyDisplay ? y : undefined,
|
||||
|
||||
// Other options
|
||||
backgroundColor: '#2C2C2C',
|
||||
fullscreen: fullscreen,
|
||||
@@ -84,23 +97,18 @@ export function createWindow() {
|
||||
|
||||
// BrowserWindow doesn't have an option for this, so we have to do it manually :(
|
||||
if (maximize) {
|
||||
mainWindow.maximize();
|
||||
mainWindow?.maximize();
|
||||
}
|
||||
|
||||
mainWindow.on('resize', e => saveBounds());
|
||||
|
||||
mainWindow.on('maximize', e => saveBounds());
|
||||
|
||||
mainWindow.on('unmaximize', e => saveBounds());
|
||||
|
||||
mainWindow.on('move', e => saveBounds());
|
||||
|
||||
mainWindow.on('unresponsive', e => {
|
||||
mainWindow?.on('resize', () => saveBounds());
|
||||
mainWindow?.on('maximize', () => saveBounds());
|
||||
mainWindow?.on('unmaximize', () => saveBounds());
|
||||
mainWindow?.on('move', () => saveBounds());
|
||||
mainWindow?.on('unresponsive', () => {
|
||||
showUnresponsiveModal();
|
||||
});
|
||||
|
||||
// Open generic links (<a .../>) in default browser
|
||||
mainWindow.webContents.on('will-navigate', (e, url) => {
|
||||
mainWindow?.webContents.on('will-navigate', (e, url) => {
|
||||
if (url === appUrl) {
|
||||
return;
|
||||
}
|
||||
@@ -109,76 +117,121 @@ export function createWindow() {
|
||||
e.preventDefault();
|
||||
electron.shell.openExternal(url);
|
||||
});
|
||||
|
||||
// Load the html of the app.
|
||||
const url = process.env.APP_RENDER_URL;
|
||||
const appUrl = url || `file://${app.getAppPath()}/renderer.html`;
|
||||
console.log(`[main] Loading ${appUrl}`);
|
||||
mainWindow.loadURL(appUrl);
|
||||
|
||||
mainWindow?.loadURL(appUrl);
|
||||
// Emitted when the window is closed.
|
||||
mainWindow.on('closed', () => {
|
||||
mainWindow?.on('closed', () => {
|
||||
// Dereference the window object, usually you would store windows
|
||||
// in an array if your app supports multi windows, this is the time
|
||||
// when you should delete the corresponding element.
|
||||
mainWindow = null;
|
||||
});
|
||||
|
||||
const applicationMenu = {
|
||||
const applicationMenu: MenuItemConstructorOptions = {
|
||||
label: `${MNEMONIC_SYM}Application`,
|
||||
submenu: [
|
||||
{
|
||||
label: `${MNEMONIC_SYM}Preferences`,
|
||||
click: function(menuItem, window, e) {
|
||||
click: function(_menuItem, window) {
|
||||
if (!window || !window.webContents) {
|
||||
return;
|
||||
}
|
||||
|
||||
window.webContents.send('toggle-preferences');
|
||||
},
|
||||
},
|
||||
{
|
||||
label: `${MNEMONIC_SYM}Changelog`,
|
||||
click: function(menuItem, window, e) {
|
||||
click: function(_menuItem, window) {
|
||||
if (!window || !window.webContents) {
|
||||
return;
|
||||
}
|
||||
misc.clickLink(changelogUrl());
|
||||
|
||||
clickLink(changelogUrl());
|
||||
},
|
||||
},
|
||||
{ type: 'separator' },
|
||||
{ role: 'hide' },
|
||||
{ role: 'hideothers' },
|
||||
{ type: 'separator' },
|
||||
{ label: `${MNEMONIC_SYM}Quit`, accelerator: 'CmdOrCtrl+Q', click: () => app.quit() },
|
||||
{
|
||||
type: 'separator',
|
||||
},
|
||||
{
|
||||
role: 'hide',
|
||||
},
|
||||
{
|
||||
// @ts-expect-error -- TSCONVERSION appears to be a genuine error
|
||||
role: 'hideothers',
|
||||
},
|
||||
{
|
||||
type: 'separator',
|
||||
},
|
||||
{
|
||||
label: `${MNEMONIC_SYM}Quit`,
|
||||
accelerator: 'CmdOrCtrl+Q',
|
||||
click: () => app.quit(),
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const editMenu = {
|
||||
const editMenu: MenuItemConstructorOptions = {
|
||||
label: `${MNEMONIC_SYM}Edit`,
|
||||
submenu: [
|
||||
{ label: `${MNEMONIC_SYM}Undo`, accelerator: 'CmdOrCtrl+Z', selector: 'undo:' },
|
||||
{ label: `${MNEMONIC_SYM}Redo`, accelerator: 'Shift+CmdOrCtrl+Z', selector: 'redo:' },
|
||||
{ type: 'separator' },
|
||||
{ label: `Cu${MNEMONIC_SYM}t`, accelerator: 'CmdOrCtrl+X', selector: 'cut:' },
|
||||
{ label: `${MNEMONIC_SYM}Copy`, accelerator: 'CmdOrCtrl+C', selector: 'copy:' },
|
||||
{ label: `${MNEMONIC_SYM}Paste`, accelerator: 'CmdOrCtrl+V', selector: 'paste:' },
|
||||
{
|
||||
label: `${MNEMONIC_SYM}Undo`,
|
||||
accelerator: 'CmdOrCtrl+Z',
|
||||
// @ts-expect-error -- TSCONVERSION missing in official electron types
|
||||
selector: 'undo:',
|
||||
},
|
||||
{
|
||||
label: `${MNEMONIC_SYM}Redo`,
|
||||
accelerator: 'Shift+CmdOrCtrl+Z',
|
||||
// @ts-expect-error -- TSCONVERSION missing in official electron types
|
||||
selector: 'redo:',
|
||||
},
|
||||
{
|
||||
type: 'separator',
|
||||
},
|
||||
{
|
||||
label: `Cu${MNEMONIC_SYM}t`,
|
||||
accelerator: 'CmdOrCtrl+X',
|
||||
// @ts-expect-error -- TSCONVERSION missing in official electron types
|
||||
selector: 'cut:',
|
||||
},
|
||||
{
|
||||
label: `${MNEMONIC_SYM}Copy`,
|
||||
accelerator: 'CmdOrCtrl+C',
|
||||
// @ts-expect-error -- TSCONVERSION missing in official electron types
|
||||
selector: 'copy:',
|
||||
},
|
||||
{
|
||||
label: `${MNEMONIC_SYM}Paste`,
|
||||
accelerator: 'CmdOrCtrl+V',
|
||||
// @ts-expect-error -- TSCONVERSION missing in official electron types
|
||||
selector: 'paste:',
|
||||
},
|
||||
{
|
||||
label: `Select ${MNEMONIC_SYM}All`,
|
||||
accelerator: 'CmdOrCtrl+A',
|
||||
// @ts-expect-error -- TSCONVERSION missing in official electron types
|
||||
selector: 'selectAll:',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const viewMenu = {
|
||||
const viewMenu: MenuItemConstructorOptions = {
|
||||
label: `${MNEMONIC_SYM}View`,
|
||||
submenu: [
|
||||
{ label: `Toggle ${MNEMONIC_SYM}Full Screen`, role: 'togglefullscreen' },
|
||||
{
|
||||
label: `Toggle ${MNEMONIC_SYM}Full Screen`,
|
||||
role: 'togglefullscreen',
|
||||
},
|
||||
{
|
||||
label: `${MNEMONIC_SYM}Actual Size`,
|
||||
accelerator: 'CmdOrCtrl+0',
|
||||
click: () => {
|
||||
const w = BrowserWindow.getFocusedWindow();
|
||||
|
||||
if (!w || !w.webContents) {
|
||||
return;
|
||||
}
|
||||
@@ -193,13 +246,13 @@ export function createWindow() {
|
||||
accelerator: 'CmdOrCtrl+=',
|
||||
click: () => {
|
||||
const w = BrowserWindow.getFocusedWindow();
|
||||
|
||||
if (!w || !w.webContents) {
|
||||
return;
|
||||
}
|
||||
|
||||
const zoomFactor = Math.min(1.8, getZoomFactor() + 0.05);
|
||||
w.webContents.setZoomFactor(zoomFactor);
|
||||
|
||||
saveZoomFactor(zoomFactor);
|
||||
},
|
||||
},
|
||||
@@ -208,6 +261,7 @@ export function createWindow() {
|
||||
accelerator: 'CmdOrCtrl+-',
|
||||
click: () => {
|
||||
const w = BrowserWindow.getFocusedWindow();
|
||||
|
||||
if (!w || !w.webContents) {
|
||||
return;
|
||||
}
|
||||
@@ -221,6 +275,7 @@ export function createWindow() {
|
||||
label: 'Toggle Sidebar',
|
||||
click: () => {
|
||||
const w = BrowserWindow.getFocusedWindow();
|
||||
|
||||
if (!w || !w.webContents) {
|
||||
return;
|
||||
}
|
||||
@@ -231,27 +286,39 @@ export function createWindow() {
|
||||
{
|
||||
label: `Toggle ${MNEMONIC_SYM}DevTools`,
|
||||
accelerator: 'Alt+CmdOrCtrl+I',
|
||||
click: () => mainWindow.toggleDevTools(),
|
||||
// @ts-expect-error -- TSCONVERSION needs global module augmentation
|
||||
click: () => mainWindow?.toggleDevTools(),
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const windowMenu = {
|
||||
const windowMenu: MenuItemConstructorOptions = {
|
||||
label: `${MNEMONIC_SYM}Window`,
|
||||
role: 'window',
|
||||
submenu: [
|
||||
{ label: `${MNEMONIC_SYM}Minimize`, role: 'minimize' },
|
||||
...(isMac() ? [{ label: `${MNEMONIC_SYM}Close`, role: 'close' }] : []),
|
||||
{
|
||||
label: `${MNEMONIC_SYM}Minimize`,
|
||||
role: 'minimize',
|
||||
},
|
||||
// @ts-expect-error -- TSCONVERSION missing in official electron types
|
||||
...(isMac() ? [
|
||||
{
|
||||
label: `${MNEMONIC_SYM}Close`,
|
||||
role: 'close',
|
||||
},
|
||||
]
|
||||
: []),
|
||||
],
|
||||
};
|
||||
|
||||
const helpMenu = {
|
||||
const helpMenu: MenuItemConstructorOptions = {
|
||||
label: `${MNEMONIC_SYM}Help`,
|
||||
role: 'help',
|
||||
id: 'help',
|
||||
submenu: [
|
||||
{
|
||||
label: `${MNEMONIC_SYM}Help and Support`,
|
||||
// @ts-expect-error -- TSCONVERSION TSCONVERSION `Accelerator` type from electron is needed here as a cast but is not exported as of the 9.3.5 types
|
||||
accelerator: !isMac() ? 'F1' : null,
|
||||
click: () => {
|
||||
shell.openExternal(docsBase);
|
||||
@@ -260,30 +327,31 @@ export function createWindow() {
|
||||
{
|
||||
label: `${MNEMONIC_SYM}Keyboard Shortcuts`,
|
||||
accelerator: 'CmdOrCtrl+Shift+?',
|
||||
click: (menuItem, w, e) => {
|
||||
click: (_menuItem, w) => {
|
||||
if (!w || !w.webContents) {
|
||||
return;
|
||||
}
|
||||
|
||||
w.webContents.send('toggle-preferences-shortcuts');
|
||||
},
|
||||
},
|
||||
{
|
||||
label: `Show App ${MNEMONIC_SYM}Data Folder`,
|
||||
click: (menuItem, w, e) => {
|
||||
const directory = misc.getDataDirectory();
|
||||
click: () => {
|
||||
const directory = getDataDirectory();
|
||||
shell.showItemInFolder(directory);
|
||||
},
|
||||
},
|
||||
{
|
||||
label: `Show App ${MNEMONIC_SYM}Logs Folder`,
|
||||
click: (menuItem, w, e) => {
|
||||
click: () => {
|
||||
const directory = log.getLogDirectory();
|
||||
shell.showItemInFolder(directory);
|
||||
},
|
||||
},
|
||||
{
|
||||
label: 'Show Open Source Licenses',
|
||||
click: (menuItem, w, e) => {
|
||||
click: () => {
|
||||
const licensePath = path.resolve(app.getAppPath(), '../opensource-licenses.txt');
|
||||
shell.openPath(licensePath);
|
||||
},
|
||||
@@ -300,9 +368,7 @@ export function createWindow() {
|
||||
const aboutMenuClickHandler = async () => {
|
||||
const copy = 'Copy';
|
||||
const ok = 'OK';
|
||||
|
||||
const buttons = isLinux() ? [copy, ok] : [ok, copy];
|
||||
|
||||
const detail = [
|
||||
`Version: ${getAppLongName()} ${getAppVersion()}`,
|
||||
`Release date: ${getAppReleaseDate()}`,
|
||||
@@ -331,33 +397,38 @@ export function createWindow() {
|
||||
};
|
||||
|
||||
if (isMac()) {
|
||||
applicationMenu.submenu.unshift(
|
||||
// @ts-expect-error -- TSCONVERSION type splitting
|
||||
applicationMenu.submenu?.unshift(
|
||||
{
|
||||
label: `A${MNEMONIC_SYM}bout ${getAppName()}`,
|
||||
click: aboutMenuClickHandler,
|
||||
},
|
||||
{ type: 'separator' },
|
||||
{
|
||||
type: 'separator',
|
||||
},
|
||||
);
|
||||
} else {
|
||||
helpMenu.submenu.push({
|
||||
// @ts-expect-error -- TSCONVERSION type splitting
|
||||
helpMenu.submenu?.push({
|
||||
label: `${MNEMONIC_SYM}About`,
|
||||
click: aboutMenuClickHandler,
|
||||
});
|
||||
}
|
||||
|
||||
const developerMenu = {
|
||||
const developerMenu: MenuItemConstructorOptions = {
|
||||
label: `${MNEMONIC_SYM}Developer`,
|
||||
// @ts-expect-error -- TSCONVERSION missing in official electron types
|
||||
position: 'before=help',
|
||||
submenu: [
|
||||
{
|
||||
label: `${MNEMONIC_SYM}Reload`,
|
||||
accelerator: 'Shift+F5',
|
||||
click: () => mainWindow.reload(),
|
||||
click: () => mainWindow?.reload(),
|
||||
},
|
||||
{
|
||||
label: `Resize to Defaul${MNEMONIC_SYM}t`,
|
||||
click: () =>
|
||||
mainWindow.setBounds({
|
||||
mainWindow?.setBounds({
|
||||
x: 100,
|
||||
y: 100,
|
||||
width: DEFAULT_WIDTH,
|
||||
@@ -367,7 +438,8 @@ export function createWindow() {
|
||||
{
|
||||
label: `Take ${MNEMONIC_SYM}Screenshot`,
|
||||
click: function() {
|
||||
mainWindow.capturePage(image => {
|
||||
// @ts-expect-error -- TSCONVERSION not accounted for in the electron types to provide a function
|
||||
mainWindow?.capturePage(image => {
|
||||
const buffer = image.toPNG();
|
||||
const dir = app.getPath('desktop');
|
||||
fs.writeFileSync(path.join(dir, `Screenshot-${new Date()}.png`), buffer);
|
||||
@@ -384,14 +456,14 @@ export function createWindow() {
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const toolsMenu = {
|
||||
const toolsMenu: MenuItemConstructorOptions = {
|
||||
label: `${MNEMONIC_SYM}Tools`,
|
||||
submenu: [
|
||||
{
|
||||
label: `${MNEMONIC_SYM}Reload Plugins`,
|
||||
click: () => {
|
||||
const w = BrowserWindow.getFocusedWindow();
|
||||
|
||||
if (!w || !w.webContents) {
|
||||
return;
|
||||
}
|
||||
@@ -401,9 +473,7 @@ export function createWindow() {
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const template = [];
|
||||
|
||||
const template: Array<MenuItemConstructorOptions> = [];
|
||||
template.push(applicationMenu);
|
||||
template.push(editMenu);
|
||||
template.push(viewMenu);
|
||||
@@ -416,7 +486,6 @@ export function createWindow() {
|
||||
}
|
||||
|
||||
Menu.setApplicationMenu(Menu.buildFromTemplate(template));
|
||||
|
||||
return mainWindow;
|
||||
}
|
||||
|
||||
@@ -430,8 +499,9 @@ async function showUnresponsiveModal() {
|
||||
message: 'Insomnia has become unresponsive. Do you want to reload?',
|
||||
});
|
||||
|
||||
// @ts-expect-error -- TSCONVERSION appears to be a genuine error
|
||||
if (id === 1) {
|
||||
mainWindow.destroy();
|
||||
mainWindow?.destroy();
|
||||
createWindow();
|
||||
}
|
||||
}
|
||||
@@ -441,42 +511,48 @@ function saveBounds() {
|
||||
return;
|
||||
}
|
||||
|
||||
const fullscreen = mainWindow.isFullScreen();
|
||||
const fullscreen = mainWindow?.isFullScreen();
|
||||
|
||||
// Only save the size if we're not in fullscreen
|
||||
if (!fullscreen) {
|
||||
localStorage.setItem('bounds', mainWindow.getBounds());
|
||||
localStorage.setItem('maximize', mainWindow.isMaximized());
|
||||
localStorage.setItem('fullscreen', false);
|
||||
localStorage?.setItem('bounds', mainWindow?.getBounds());
|
||||
localStorage?.setItem('maximize', mainWindow?.isMaximized());
|
||||
localStorage?.setItem('fullscreen', false);
|
||||
} else {
|
||||
localStorage.setItem('fullscreen', true);
|
||||
localStorage?.setItem('fullscreen', true);
|
||||
}
|
||||
}
|
||||
|
||||
function getBounds() {
|
||||
let bounds = {};
|
||||
let bounds: Bounds = {};
|
||||
let fullscreen = false;
|
||||
let maximize = false;
|
||||
|
||||
try {
|
||||
bounds = localStorage.getItem('bounds', {});
|
||||
fullscreen = localStorage.getItem('fullscreen', false);
|
||||
maximize = localStorage.getItem('maximize', false);
|
||||
bounds = localStorage?.getItem('bounds', {});
|
||||
fullscreen = localStorage?.getItem('fullscreen', false);
|
||||
maximize = localStorage?.getItem('maximize', false);
|
||||
} catch (e) {
|
||||
// This should never happen, but if it does...!
|
||||
console.error('Failed to parse window bounds', e);
|
||||
}
|
||||
|
||||
return { bounds, fullscreen, maximize };
|
||||
return {
|
||||
bounds,
|
||||
fullscreen,
|
||||
maximize,
|
||||
};
|
||||
}
|
||||
|
||||
function saveZoomFactor(zoomFactor) {
|
||||
localStorage.setItem('zoomFactor', zoomFactor);
|
||||
localStorage?.setItem('zoomFactor', zoomFactor);
|
||||
}
|
||||
|
||||
function getZoomFactor() {
|
||||
let zoomFactor = 1;
|
||||
|
||||
try {
|
||||
zoomFactor = localStorage.getItem('zoomFactor', 1);
|
||||
zoomFactor = localStorage?.getItem('zoomFactor', 1);
|
||||
} catch (e) {
|
||||
// This should never happen, but if it does...!
|
||||
console.error('Failed to parse zoomFactor', e);
|
||||
@@ -486,10 +562,11 @@ function getZoomFactor() {
|
||||
}
|
||||
|
||||
function initLocalStorage() {
|
||||
const localStoragePath = path.join(misc.getDataDirectory(), 'localStorage');
|
||||
const localStoragePath = path.join(getDataDirectory(), 'localStorage');
|
||||
localStorage = new LocalStorage(localStoragePath);
|
||||
}
|
||||
|
||||
function initContextMenus() {
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
require('electron-context-menu')({});
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
let v1Counter = 0;
|
||||
|
||||
let v4Counter = 0;
|
||||
|
||||
const v1UUIDs = [
|
||||
@@ -210,6 +211,7 @@ function v4() {
|
||||
return uuid;
|
||||
}
|
||||
|
||||
// WARNING: changing this to `export default` will break the mock and be incredibly hard to debug. Ask me how I know.
|
||||
module.exports = () => v4();
|
||||
module.exports.v4 = () => v4();
|
||||
module.exports.v1 = () => v1();
|
||||
@@ -3,6 +3,7 @@ import { globalBeforeEach } from '../../__jest__/before-each';
|
||||
|
||||
describe('init()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('contains all required fields', async () => {
|
||||
expect(models.grpcRequestMeta.init()).toEqual({
|
||||
pinned: false,
|
||||
@@ -13,9 +14,9 @@ describe('init()', () => {
|
||||
|
||||
describe('create()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('creates a valid GrpcRequest', async () => {
|
||||
Date.now = jest.fn().mockReturnValue(1478795580200);
|
||||
|
||||
const request = await models.grpcRequestMeta.create({
|
||||
pinned: true,
|
||||
parentId: 'greq_124',
|
||||
@@ -29,14 +30,12 @@ describe('create()', () => {
|
||||
type: 'GrpcRequestMeta',
|
||||
lastActive: 0,
|
||||
};
|
||||
|
||||
expect(request).toEqual(expected);
|
||||
expect(await models.grpcRequestMeta.getOrCreateByParentId(expected.parentId)).toEqual(expected);
|
||||
});
|
||||
|
||||
it('creates a valid GrpcRequestMeta if it does not exist', async () => {
|
||||
Date.now = jest.fn().mockReturnValue(1478795580200);
|
||||
|
||||
const request = await models.grpcRequestMeta.getOrCreateByParentId('greq_124');
|
||||
const expected = {
|
||||
_id: 'greqm_dd2ccc1a2745477a881a9e8ef9d42403',
|
||||
@@ -47,19 +46,22 @@ describe('create()', () => {
|
||||
type: 'GrpcRequestMeta',
|
||||
lastActive: 0,
|
||||
};
|
||||
|
||||
expect(request).toEqual(expected);
|
||||
});
|
||||
|
||||
it('fails when missing parentId', async () => {
|
||||
expect(() => models.grpcRequestMeta.create({ pinned: true })).toThrow(
|
||||
'New GrpcRequestMeta missing `parentId`',
|
||||
);
|
||||
expect(() =>
|
||||
models.grpcRequestMeta.create({
|
||||
pinned: true,
|
||||
}),
|
||||
).toThrow('New GrpcRequestMeta missing `parentId`');
|
||||
});
|
||||
|
||||
it('fails when parentId prefix is not that of a GrpcRequest', async () => {
|
||||
expect(() => models.grpcRequestMeta.create({ parentId: 'req_123' })).toThrow(
|
||||
'Expected the parent of GrpcRequestMeta to be a GrpcRequest',
|
||||
);
|
||||
expect(() =>
|
||||
models.grpcRequestMeta.create({
|
||||
parentId: 'req_123',
|
||||
}),
|
||||
).toThrow('Expected the parent of GrpcRequestMeta to be a GrpcRequest');
|
||||
});
|
||||
});
|
||||
@@ -3,9 +3,9 @@ import { globalBeforeEach } from '../../__jest__/before-each';
|
||||
|
||||
describe('init()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('contains all required fields', async () => {
|
||||
Date.now = jest.fn().mockReturnValue(1478795580200);
|
||||
|
||||
expect(models.grpcRequest.init()).toEqual({
|
||||
url: '',
|
||||
name: 'New gRPC Request',
|
||||
@@ -16,16 +16,16 @@ describe('init()', () => {
|
||||
text: '{}',
|
||||
},
|
||||
metaSortKey: -1478795580200,
|
||||
idPrivate: false,
|
||||
isPrivate: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('create()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('creates a valid GrpcRequest', async () => {
|
||||
Date.now = jest.fn().mockReturnValue(1478795580200);
|
||||
|
||||
const request = await models.grpcRequest.create({
|
||||
name: 'My request',
|
||||
parentId: 'fld_124',
|
||||
@@ -44,18 +44,19 @@ describe('create()', () => {
|
||||
text: '{}',
|
||||
},
|
||||
metaSortKey: -1478795580200,
|
||||
idPrivate: false,
|
||||
isPrivate: false,
|
||||
type: 'GrpcRequest',
|
||||
};
|
||||
|
||||
expect(request).toEqual(expected);
|
||||
expect(await models.grpcRequest.getById(expected._id)).toEqual(expected);
|
||||
});
|
||||
|
||||
it('fails when missing parentId', async () => {
|
||||
Date.now = jest.fn().mockReturnValue(1478795580200);
|
||||
expect(() => models.grpcRequest.create({ name: 'no parentId' })).toThrow(
|
||||
'New GrpcRequest missing `parentId`',
|
||||
);
|
||||
expect(() =>
|
||||
models.grpcRequest.create({
|
||||
name: 'no parentId',
|
||||
}),
|
||||
).toThrow('New GrpcRequest missing `parentId`');
|
||||
});
|
||||
});
|
||||
@@ -1,12 +1,9 @@
|
||||
// @flow
|
||||
|
||||
import { globalBeforeEach } from '../../__jest__/before-each';
|
||||
import { getModel, mustGetModel } from '../index';
|
||||
import * as models from '../index';
|
||||
|
||||
describe('index', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
describe('getModel()', () => {
|
||||
it('should get model if found', () => {
|
||||
expect(getModel(models.workspace.type)).not.toBeNull();
|
||||
@@ -24,6 +21,7 @@ describe('index', () => {
|
||||
|
||||
it('should return null if model not found', () => {
|
||||
const func = () => mustGetModel('UNKNOWN');
|
||||
|
||||
expect(func).toThrowError('The model type UNKNOWN must exist but could not be found.');
|
||||
});
|
||||
});
|
||||
@@ -3,6 +3,7 @@ import { globalBeforeEach } from '../../__jest__/before-each';
|
||||
|
||||
describe('init()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('contains all required fields', async () => {
|
||||
expect(models.protoFile.init()).toEqual({
|
||||
name: 'New Proto File',
|
||||
@@ -13,9 +14,9 @@ describe('init()', () => {
|
||||
|
||||
describe('create()', () => {
|
||||
beforeEach(globalBeforeEach);
|
||||
|
||||
it('creates a valid protofile', async () => {
|
||||
Date.now = jest.fn().mockReturnValue(1478795580200);
|
||||
|
||||
const request = await models.protoFile.create({
|
||||
name: 'My File',
|
||||
parentId: 'fld_124',
|
||||
@@ -30,15 +31,16 @@ describe('create()', () => {
|
||||
name: 'My File',
|
||||
protoText: 'some proto text',
|
||||
};
|
||||
|
||||
expect(request).toEqual(expected);
|
||||
expect(await models.protoFile.getById(expected._id)).toEqual(expected);
|
||||
});
|
||||
|
||||
it('fails when missing parentId', async () => {
|
||||
Date.now = jest.fn().mockReturnValue(1478795580200);
|
||||
expect(() => models.protoFile.create({ name: 'no parentId' })).toThrow(
|
||||
'New ProtoFile missing `parentId`',
|
||||
);
|
||||
expect(() =>
|
||||
models.protoFile.create({
|
||||
name: 'no parentId',
|
||||
}),
|
||||
).toThrow('New ProtoFile missing `parentId`');
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user