mirror of
https://github.com/Kong/insomnia.git
synced 2025-12-23 22:28:58 -05:00
164
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
164
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -2,85 +2,85 @@ name: Bug Report
|
||||
description: Report an Insomnia bug
|
||||
labels: [B-bug, S-unverified]
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Expected Behavior
|
||||
description: A clear and concise description of what you expected to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: '> **Tip**: You can attach images or log files to textareas by clicking to highlight and then dragging files in.'
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Actual Behavior
|
||||
description: A clear description of what actually happens.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Reproduction Steps
|
||||
description: Provide steps to reproduce the behavior
|
||||
placeholder: |
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing issue for this?
|
||||
options:
|
||||
- label: I have searched the [issue tracker](https://www.github.com/Kong/insomnia/issues) for this problem.
|
||||
required: true
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Which sync method do you use?
|
||||
options:
|
||||
- label: Git sync.
|
||||
- label: Insomnia Cloud sync.
|
||||
- label: Local only
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional Information
|
||||
description: If your problem needs further explanation, please add more information here.
|
||||
- type: input
|
||||
attributes:
|
||||
label: Insomnia Version
|
||||
description: What version of Insmonia are you using?
|
||||
placeholder: "2021.5.0"
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: What operating system are you using?
|
||||
options:
|
||||
- Windows
|
||||
- macOS
|
||||
- Ubuntu
|
||||
- Other Linux
|
||||
- Other (specify below)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: Operating System Version
|
||||
description: |-
|
||||
What operating system version are you using?
|
||||
On Windows, click Start button > Settings > System > About.
|
||||
On macOS, click the Apple Menu > About This Mac.
|
||||
On Linux, use `lsb_release` or `uname -a`.
|
||||
placeholder: "e.g. Windows 10 version 1909, macOS Catalina 10.15.7, or Ubuntu 20.04"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: Installation method
|
||||
description: How did you install Insomnia?
|
||||
placeholder: "e.g. download from insomnia.rest, homebrew, apt, etc."
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: Last Known Working Insomnia version
|
||||
description: What is the last version of Insomnia this worked in, if applicable?
|
||||
placeholder: "2021.4.0"
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Expected Behavior
|
||||
description: A clear and concise description of what you expected to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: '> **Tip**: You can attach images or log files to textareas by clicking to highlight and then dragging files in.'
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Actual Behavior
|
||||
description: A clear description of what actually happens.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Reproduction Steps
|
||||
description: Provide steps to reproduce the behavior
|
||||
placeholder: |
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing issue for this?
|
||||
options:
|
||||
- label: I have searched the [issue tracker](https://www.github.com/Kong/insomnia/issues) for this problem.
|
||||
required: true
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Which sync method do you use?
|
||||
options:
|
||||
- label: Git sync.
|
||||
- label: Insomnia Cloud sync.
|
||||
- label: Local only
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional Information
|
||||
description: If your problem needs further explanation, please add more information here.
|
||||
- type: input
|
||||
attributes:
|
||||
label: Insomnia Version
|
||||
description: What version of Insmonia are you using?
|
||||
placeholder: '2021.5.0'
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: What operating system are you using?
|
||||
options:
|
||||
- Windows
|
||||
- macOS
|
||||
- Ubuntu
|
||||
- Other Linux
|
||||
- Other (specify below)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: Operating System Version
|
||||
description: |-
|
||||
What operating system version are you using?
|
||||
On Windows, click Start button > Settings > System > About.
|
||||
On macOS, click the Apple Menu > About This Mac.
|
||||
On Linux, use `lsb_release` or `uname -a`.
|
||||
placeholder: 'e.g. Windows 10 version 1909, macOS Catalina 10.15.7, or Ubuntu 20.04'
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: Installation method
|
||||
description: How did you install Insomnia?
|
||||
placeholder: 'e.g. download from insomnia.rest, homebrew, apt, etc.'
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: Last Known Working Insomnia version
|
||||
description: What is the last version of Insomnia this worked in, if applicable?
|
||||
placeholder: '2021.4.0'
|
||||
|
||||
8
.github/workflows/release-build.yml
vendored
8
.github/workflows/release-build.yml
vendored
@@ -74,7 +74,7 @@ jobs:
|
||||
- name: Package app (Linux ARM64 only)
|
||||
if: runner.os == 'Linux' && runner.arch == 'ARM64'
|
||||
shell: bash
|
||||
run: npm run app-package
|
||||
run: npm run app-package
|
||||
env:
|
||||
NODE_OPTIONS: '--max_old_space_size=6144'
|
||||
BUILD_TARGETS: AppImage,tar.gz
|
||||
@@ -112,7 +112,6 @@ jobs:
|
||||
shell: bash
|
||||
run: ./build-secure-wrapper.sh CI
|
||||
|
||||
|
||||
- name: Move .dll and .exe files to /tosign (PowerShell)
|
||||
if: runner.os == 'Windows'
|
||||
shell: pwsh
|
||||
@@ -158,7 +157,6 @@ jobs:
|
||||
CREDENTIAL_ID: ${{secrets.ES_CREDENTIAL_ID}}
|
||||
TOTP_SECRET: ${{secrets.ES_TOTP_SECRET}}
|
||||
|
||||
|
||||
- name: Package inso
|
||||
run: |
|
||||
echo "Replacing electron binary with node binary"
|
||||
@@ -182,7 +180,7 @@ jobs:
|
||||
VERSION: ${{ env.INSO_VERSION }}
|
||||
|
||||
- name: Notarize Inso CLI installer (macOS only)
|
||||
if: runner.os == 'macOS'
|
||||
if: runner.os == 'macOS'
|
||||
uses: lando/notarize-action@b5c3ef16cf2fbcf2af26dc58c90255ec242abeed # v2
|
||||
with:
|
||||
product-path: ./packages/${{ env.INSO_PACKAGE_NAME }}/artifacts/inso-${{ matrix.os }}-${{ env.INSO_VERSION }}.pkg
|
||||
@@ -273,7 +271,7 @@ jobs:
|
||||
id: release_version
|
||||
shell: bash
|
||||
run: |
|
||||
echo "version=${BRANCH/release\//}" >> $GITHUB_OUTPUT
|
||||
echo "version=${BRANCH/release\//}" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
BRANCH: ${{ github.ref_name }}
|
||||
|
||||
|
||||
18
.github/workflows/release-publish.yml
vendored
18
.github/workflows/release-publish.yml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: package-lock.json
|
||||
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ env.IS_PRERELEASE == 'false' }}
|
||||
type=raw,value=alpha,enable=${{ env.IS_PRERELEASE == 'true' && contains(github.event.inputs.version, 'alpha') }}
|
||||
type=raw,value=beta,enable=${{ env.IS_PRERELEASE == 'true' && contains(github.event.inputs.version, 'beta') }}
|
||||
sep-tags: ","
|
||||
sep-tags: ','
|
||||
|
||||
# Setup regctl to parse platform specific image digest from image manifest
|
||||
- name: Install regctl
|
||||
@@ -100,11 +100,11 @@ jobs:
|
||||
|
||||
- name: Verify Inso Container Image Provenance produced on insomnia-ee
|
||||
run: |
|
||||
slsa-verifier verify-image \
|
||||
kong/inso:${{env.RELEASE_VERSION}}@${{steps.image_manifest_metadata.outputs.inso_image_sha}} \
|
||||
--print-provenance \
|
||||
--provenance-repository ${{env.NOTARY_REPOSITORY}} \
|
||||
--source-uri 'github.com/Kong/insomnia-ee'
|
||||
slsa-verifier verify-image \
|
||||
kong/inso:${{env.RELEASE_VERSION}}@${{steps.image_manifest_metadata.outputs.inso_image_sha}} \
|
||||
--print-provenance \
|
||||
--provenance-repository ${{env.NOTARY_REPOSITORY}} \
|
||||
--source-uri 'github.com/Kong/insomnia-ee'
|
||||
|
||||
- name: Verify Inso Binary Provenance for artifacts produced on insomnia-ee
|
||||
run: |
|
||||
@@ -127,7 +127,7 @@ jobs:
|
||||
id: core_tag_and_release
|
||||
with:
|
||||
tag: ${{ env.RELEASE_CORE_TAG }}
|
||||
name: "${{ env.RELEASE_VERSION }} 📦"
|
||||
name: '${{ env.RELEASE_VERSION }} 📦'
|
||||
generateReleaseNotes: true
|
||||
commit: ${{ env.RELEASE_BRANCH }}
|
||||
prerelease: ${{ env.IS_PRERELEASE }}
|
||||
@@ -142,7 +142,7 @@ jobs:
|
||||
with:
|
||||
release_id: ${{ steps.core_tag_and_release.outputs.id }}
|
||||
tag_name: ${{ env.RELEASE_CORE_TAG }}
|
||||
file: "./artifacts/*"
|
||||
file: './artifacts/*'
|
||||
prerelease: ${{ env.IS_PRERELEASE }}
|
||||
draft: false
|
||||
|
||||
|
||||
2
.github/workflows/release-recurring.yml
vendored
2
.github/workflows/release-recurring.yml
vendored
@@ -41,7 +41,7 @@ jobs:
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: package-lock.json
|
||||
|
||||
|
||||
19
.github/workflows/release-start.yml
vendored
19
.github/workflows/release-start.yml
vendored
@@ -9,9 +9,9 @@ on:
|
||||
type: choice
|
||||
description: Channel of the release (alpha/beta/stable)
|
||||
options:
|
||||
- alpha
|
||||
- beta
|
||||
- stable
|
||||
- alpha
|
||||
- beta
|
||||
- stable
|
||||
version:
|
||||
required: false
|
||||
description: force version of the release (e.g. 9.0.0) if previous release was successful, this should auto increment
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: package-lock.json
|
||||
|
||||
@@ -57,8 +57,7 @@ jobs:
|
||||
if: github.event.inputs.channel != 'stable' && github.event.inputs.version
|
||||
run: npm --workspaces version "${{ github.event.inputs.version }}"
|
||||
|
||||
|
||||
# ############################################################
|
||||
# ############################################################
|
||||
|
||||
- name: Get version
|
||||
shell: bash
|
||||
@@ -85,8 +84,8 @@ jobs:
|
||||
with:
|
||||
username: ${{ (github.event_name == 'workflow_dispatch' && github.actor) || 'insomnia-infra' }}
|
||||
|
||||
# ############################################################
|
||||
# re-run the versioning steps to apply to the new branch
|
||||
# ############################################################
|
||||
# re-run the versioning steps to apply to the new branch
|
||||
|
||||
- name: (Re-run) App version (stable, patch latest stable)
|
||||
if: github.event.inputs.channel == 'stable' && !github.event.inputs.version
|
||||
@@ -104,7 +103,7 @@ jobs:
|
||||
if: github.event.inputs.channel != 'stable' && github.event.inputs.version
|
||||
run: npm --workspaces version "${{ github.event.inputs.version }}"
|
||||
|
||||
# ############################################################
|
||||
# ############################################################
|
||||
|
||||
- name: Git Commit
|
||||
run: git commit -am "Bump app version to ${{ env.RELEASE_VERSION }}"
|
||||
@@ -122,7 +121,7 @@ jobs:
|
||||
github_token: ${{ secrets.RELEASE_GH_TOKEN }}
|
||||
source_branch: ${{ env.RELEASE_BRANCH}}
|
||||
target_branch: develop
|
||||
title: ":rocket: ${{ env.RELEASE_VERSION}}"
|
||||
title: ':rocket: ${{ env.RELEASE_VERSION}}'
|
||||
body: |
|
||||
**Automated pull request**
|
||||
Artifacts build in progress...
|
||||
|
||||
5
.github/workflows/sast.yml
vendored
5
.github/workflows/sast.yml
vendored
@@ -4,11 +4,10 @@ on:
|
||||
pull_request: {}
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- release/*
|
||||
- develop
|
||||
- release/*
|
||||
workflow_dispatch: {}
|
||||
|
||||
|
||||
jobs:
|
||||
semgrep:
|
||||
timeout-minutes: 5
|
||||
|
||||
4
.github/workflows/test-cli.yml
vendored
4
.github/workflows/test-cli.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: package-lock.json
|
||||
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
- name: Install node version of node-libcurl for inso tests
|
||||
run: node_modules/.bin/node-pre-gyp install --update-binary --directory node_modules/@getinsomnia/node-libcurl
|
||||
|
||||
- name : Build Inso
|
||||
- name: Build Inso
|
||||
run: npm run build -w insomnia-inso
|
||||
|
||||
- name: Run Inso unit tests
|
||||
|
||||
3
.github/workflows/test.yml
vendored
3
.github/workflows/test.yml
vendored
@@ -36,9 +36,6 @@ jobs:
|
||||
- name: Lint
|
||||
run: npm run lint
|
||||
|
||||
- name: Lint markdown
|
||||
run: npm run lint:markdown
|
||||
|
||||
- name: Type checks
|
||||
run: npm run type-check
|
||||
|
||||
|
||||
3
.github/workflows/update-changelog.yml
vendored
3
.github/workflows/update-changelog.yml
vendored
@@ -1,11 +1,10 @@
|
||||
# .github/workflows/update-changelog.yaml
|
||||
name: "Update Changelog"
|
||||
name: 'Update Changelog'
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [released]
|
||||
|
||||
|
||||
jobs:
|
||||
update:
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
@@ -8,9 +8,7 @@
|
||||
{
|
||||
"files": "packages/insomnia/**/*",
|
||||
"options": {
|
||||
"plugins": [
|
||||
"prettier-plugin-tailwindcss"
|
||||
]
|
||||
"plugins": ["prettier-plugin-tailwindcss"]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
7
.vscode/launch.json
vendored
7
.vscode/launch.json
vendored
@@ -84,7 +84,7 @@
|
||||
},
|
||||
{
|
||||
"value": "help"
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
@@ -98,10 +98,7 @@
|
||||
},
|
||||
"stopAll": true,
|
||||
"preLaunchTask": "Insomnia: Compile (Watch)",
|
||||
"configurations": [
|
||||
"Electron: main",
|
||||
"Electron: renderer"
|
||||
]
|
||||
"configurations": ["Electron: main", "Electron: renderer"]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -43,5 +43,5 @@
|
||||
],
|
||||
"[cpp]": {
|
||||
"editor.defaultFormatter": "llvm-vs-code-extensions.vscode-clangd"
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
9
.vscode/tasks.json
vendored
9
.vscode/tasks.json
vendored
@@ -11,7 +11,7 @@
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/packages/insomnia",
|
||||
"env": {
|
||||
"NODE_ENV": "development",
|
||||
"NODE_ENV": "development"
|
||||
}
|
||||
},
|
||||
"command": "${workspaceRoot}/node_modules/.bin/esr esbuild.main.ts"
|
||||
@@ -24,7 +24,7 @@
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/packages/insomnia",
|
||||
"env": {
|
||||
"NODE_ENV": "development",
|
||||
"NODE_ENV": "development"
|
||||
}
|
||||
},
|
||||
"isBackground": true,
|
||||
@@ -57,10 +57,7 @@
|
||||
{
|
||||
"label": "Insomnia: Compile (Watch)",
|
||||
"detail": "Compile Renderer (Watch) | Compile Main",
|
||||
"dependsOn": [
|
||||
"Insomnia: Compile Renderer (Watch)",
|
||||
"Insomnia: Compile Main"
|
||||
]
|
||||
"dependsOn": ["Insomnia: Compile Renderer (Watch)", "Insomnia: Compile Main"]
|
||||
},
|
||||
{
|
||||
"label": "Inso: Compile (Watch)",
|
||||
|
||||
328
CHANGELOG.md
328
CHANGELOG.md
@@ -8,166 +8,170 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
## [core@10.0.0] - 2024-09-10
|
||||
|
||||
## What's Changed
|
||||
* bump: react-router by @CurryYangxx in https://github.com/Kong/insomnia/pull/7795
|
||||
* fix(Sync Staging Modal): use action to update selected items by @gatzjames in https://github.com/Kong/insomnia/pull/7794
|
||||
* Fixed copying credential In Auth: Basic were username and password input needed to copied by users by @pranavithape in https://github.com/Kong/insomnia/pull/7789
|
||||
* fix(ux): duplicate improvement by @CurryYangxx in https://github.com/Kong/insomnia/pull/7803
|
||||
* fixes lodash.set cve by @jackkav in https://github.com/Kong/insomnia/pull/7801
|
||||
* chore: rm userId from sentry [INS-4260] by @filfreire in https://github.com/Kong/insomnia/pull/7804
|
||||
* fix cves and add CI check by @jackkav in https://github.com/Kong/insomnia/pull/7806
|
||||
* Fix: Keep equal sign for empty query parameters[INS-4228] by @cwangsmv in https://github.com/Kong/insomnia/pull/7802
|
||||
* chore: hash userID on segment [INS-4260] by @filfreire in https://github.com/Kong/insomnia/pull/7805
|
||||
* Fix backslash in environment key freeze app [INS-4157] by @yaoweiprc in https://github.com/Kong/insomnia/pull/7763
|
||||
* fix(Key-Value Editor): deleting the last item on the key-value pair not showing an empty pair by @gatzjames in https://github.com/Kong/insomnia/pull/7818
|
||||
* Remove styled-components by @jackkav in https://github.com/Kong/insomnia/pull/7809
|
||||
* feat(sync): support offline commit- [INS-4226] by @CurryYangxx in https://github.com/Kong/insomnia/pull/7811
|
||||
* :rocket: 9.3.4-beta.0 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7823
|
||||
* Import postman env in Insomnia project level [INS-4253] by @yaoweiprc in https://github.com/Kong/insomnia/pull/7821
|
||||
* feat: display uncommit&unpush change - [INS-4138] by @CurryYangxx in https://github.com/Kong/insomnia/pull/7816
|
||||
* Preserve the original Authorization headers when importing [INS-4269] by @yaoweiprc in https://github.com/Kong/insomnia/pull/7827
|
||||
* feat: Context menu for Nunjucks tag[INS-4273] by @cwangsmv in https://github.com/Kong/insomnia/pull/7828
|
||||
* feat(Keyboard Shorcuts): update delete request shortcut by @gatzjames in https://github.com/Kong/insomnia/pull/7824
|
||||
* feat: show uncommit&unpush status for all projects-[INS-4138] by @CurryYangxx in https://github.com/Kong/insomnia/pull/7830
|
||||
* Fix: GraphQL request export curl body issue and GraphQL payload delete issue[INS-4281] by @cwangsmv in https://github.com/Kong/insomnia/pull/7831
|
||||
* :rocket: 9.3.4-beta.1 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7836
|
||||
* Try to fix smoke test flaky by @cwangsmv in https://github.com/Kong/insomnia/pull/7840
|
||||
* fix: test-util snippet to proper status code check [no-ticket] by @filfreire in https://github.com/Kong/insomnia/pull/7844
|
||||
* feat(Project View): UI improvements by @gatzjames in https://github.com/Kong/insomnia/pull/7850
|
||||
* fix: disable failure on npm audit [no-ticket] by @filfreire in https://github.com/Kong/insomnia/pull/7862
|
||||
* chore: bump electron to 30.4 [INS-4316] by @filfreire in https://github.com/Kong/insomnia/pull/7852
|
||||
* import postman data dump [INS-3810] by @yaoweiprc in https://github.com/Kong/insomnia/pull/7834
|
||||
* support for removing default org project by @yaoweiprc in https://github.com/Kong/insomnia/pull/7854
|
||||
* fix: persist cookies from response together with ones from after-response script by @ihexxa in https://github.com/Kong/insomnia/pull/7819
|
||||
* chore: split packaging for windows builds [INS-3983] by @filfreire in https://github.com/Kong/insomnia/pull/7838
|
||||
* fix(Git Sync): fix issue when switching to Insomnia Sync by @gatzjames in https://github.com/Kong/insomnia/pull/7860
|
||||
* fix: handle login when opening org logged out [INS-4330] by @filfreire in https://github.com/Kong/insomnia/pull/7865
|
||||
* Revert "support for removing default org project" by @yaoweiprc in https://github.com/Kong/insomnia/pull/7874
|
||||
* inso cli scripting first pass by @jackkav in https://github.com/Kong/insomnia/pull/7790
|
||||
* Allow deleting default project in org and fix sync issue [INS-4342] [INS-4311] by @yaoweiprc in https://github.com/Kong/insomnia/pull/7881
|
||||
* chore(runner): cleaning up runner-pr1 and resolve conflicts by @ihexxa in https://github.com/Kong/insomnia/pull/7878
|
||||
* :rocket: 10.0.0-beta.0 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7882
|
||||
* only minify inso cli in prod by @jackkav in https://github.com/Kong/insomnia/pull/7879
|
||||
* feat: Add logic to redirect users based on their plan when creating a new organization by @pavkout in https://github.com/Kong/insomnia/pull/7856
|
||||
* tabs should rerender when changing mock by @jackkav in https://github.com/Kong/insomnia/pull/7888
|
||||
* fix: check for open curl by @jackkav in https://github.com/Kong/insomnia/pull/7889
|
||||
* Bump/electron-31 by @jackkav in https://github.com/Kong/insomnia/pull/7884
|
||||
* shell.nix -> flake.nix by @jackkav in https://github.com/Kong/insomnia/pull/7892
|
||||
* add mock method header by @jackkav in https://github.com/Kong/insomnia/pull/7872
|
||||
* use nixpkgs/unstable by @jackkav in https://github.com/Kong/insomnia/pull/7902
|
||||
* fix: file not synced after switch sync method -[INS-4347] by @CurryYangxx in https://github.com/Kong/insomnia/pull/7897
|
||||
* fix: rename untracked projects [INS-4365] by @filfreire in https://github.com/Kong/insomnia/pull/7898
|
||||
* fix: flaky git test by @ihexxa in https://github.com/Kong/insomnia/pull/7883
|
||||
* Add ut and e2e test for data upload and pre-script in collection runner by @cwangsmv in https://github.com/Kong/insomnia/pull/7903
|
||||
* Supporting moving files from one project to another [INS-3865] by @yaoweiprc in https://github.com/Kong/insomnia/pull/7849
|
||||
* Enhance Response function to aware environment change and update description[INS-4279] by @cwangsmv in https://github.com/Kong/insomnia/pull/7896
|
||||
* chore: update AI service URL to use 'https://ai-helper.insomnia.rest' [INS-4367] by @pavkout in https://github.com/Kong/insomnia/pull/7910
|
||||
* fix(runner): some minor fixes and improvements by @ihexxa in https://github.com/Kong/insomnia/pull/7900
|
||||
* improve flakey test by @jackkav in https://github.com/Kong/insomnia/pull/7909
|
||||
* feat: sign all files on windows [INS-4362] by @filfreire in https://github.com/Kong/insomnia/pull/7913
|
||||
* Avoid encoding queryParams when request.settingEncodeUrl is set to false by @XSPGMike in https://github.com/Kong/insomnia/pull/7893
|
||||
* feat: improve EDN response by @garug in https://github.com/Kong/insomnia/pull/7777
|
||||
* fix: minor fixes in styles, linting and UT by @ihexxa in https://github.com/Kong/insomnia/pull/7916
|
||||
* preserve relationships in nunjucks tags by @jackkav in https://github.com/Kong/insomnia/pull/7915
|
||||
* :rocket: 10.0.0-beta.1 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7917
|
||||
* chore: cleanup after v10 beta.1 [no-ticket] by @filfreire in https://github.com/Kong/insomnia/pull/7918
|
||||
* feat(Onboarding): v10 by @gatzjames in https://github.com/Kong/insomnia/pull/7863
|
||||
* chore: return friendly message when sendRequest sees an error by @ihexxa in https://github.com/Kong/insomnia/pull/7911
|
||||
* chore: upgrade micromatch and add back npm audit by @ihexxa in https://github.com/Kong/insomnia/pull/7921
|
||||
* fix(runner): some minor fixes by @ihexxa in https://github.com/Kong/insomnia/pull/7923
|
||||
* add team check by @jackkav in https://github.com/Kong/insomnia/pull/7919
|
||||
* :rocket: 10.0.0-beta.2 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7924
|
||||
* Fix duplicate file cause application error in collection view UI by @cwangsmv in https://github.com/Kong/insomnia/pull/7928
|
||||
* :rocket: 10.0.0-beta.3 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7930
|
||||
|
||||
- bump: react-router by @CurryYangxx in https://github.com/Kong/insomnia/pull/7795
|
||||
- fix(Sync Staging Modal): use action to update selected items by @gatzjames in https://github.com/Kong/insomnia/pull/7794
|
||||
- Fixed copying credential In Auth: Basic were username and password input needed to copied by users by @pranavithape in https://github.com/Kong/insomnia/pull/7789
|
||||
- fix(ux): duplicate improvement by @CurryYangxx in https://github.com/Kong/insomnia/pull/7803
|
||||
- fixes lodash.set cve by @jackkav in https://github.com/Kong/insomnia/pull/7801
|
||||
- chore: rm userId from sentry [INS-4260] by @filfreire in https://github.com/Kong/insomnia/pull/7804
|
||||
- fix cves and add CI check by @jackkav in https://github.com/Kong/insomnia/pull/7806
|
||||
- Fix: Keep equal sign for empty query parameters[INS-4228] by @cwangsmv in https://github.com/Kong/insomnia/pull/7802
|
||||
- chore: hash userID on segment [INS-4260] by @filfreire in https://github.com/Kong/insomnia/pull/7805
|
||||
- Fix backslash in environment key freeze app [INS-4157] by @yaoweiprc in https://github.com/Kong/insomnia/pull/7763
|
||||
- fix(Key-Value Editor): deleting the last item on the key-value pair not showing an empty pair by @gatzjames in https://github.com/Kong/insomnia/pull/7818
|
||||
- Remove styled-components by @jackkav in https://github.com/Kong/insomnia/pull/7809
|
||||
- feat(sync): support offline commit- [INS-4226] by @CurryYangxx in https://github.com/Kong/insomnia/pull/7811
|
||||
- :rocket: 9.3.4-beta.0 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7823
|
||||
- Import postman env in Insomnia project level [INS-4253] by @yaoweiprc in https://github.com/Kong/insomnia/pull/7821
|
||||
- feat: display uncommit&unpush change - [INS-4138] by @CurryYangxx in https://github.com/Kong/insomnia/pull/7816
|
||||
- Preserve the original Authorization headers when importing [INS-4269] by @yaoweiprc in https://github.com/Kong/insomnia/pull/7827
|
||||
- feat: Context menu for Nunjucks tag[INS-4273] by @cwangsmv in https://github.com/Kong/insomnia/pull/7828
|
||||
- feat(Keyboard Shorcuts): update delete request shortcut by @gatzjames in https://github.com/Kong/insomnia/pull/7824
|
||||
- feat: show uncommit&unpush status for all projects-[INS-4138] by @CurryYangxx in https://github.com/Kong/insomnia/pull/7830
|
||||
- Fix: GraphQL request export curl body issue and GraphQL payload delete issue[INS-4281] by @cwangsmv in https://github.com/Kong/insomnia/pull/7831
|
||||
- :rocket: 9.3.4-beta.1 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7836
|
||||
- Try to fix smoke test flaky by @cwangsmv in https://github.com/Kong/insomnia/pull/7840
|
||||
- fix: test-util snippet to proper status code check [no-ticket] by @filfreire in https://github.com/Kong/insomnia/pull/7844
|
||||
- feat(Project View): UI improvements by @gatzjames in https://github.com/Kong/insomnia/pull/7850
|
||||
- fix: disable failure on npm audit [no-ticket] by @filfreire in https://github.com/Kong/insomnia/pull/7862
|
||||
- chore: bump electron to 30.4 [INS-4316] by @filfreire in https://github.com/Kong/insomnia/pull/7852
|
||||
- import postman data dump [INS-3810] by @yaoweiprc in https://github.com/Kong/insomnia/pull/7834
|
||||
- support for removing default org project by @yaoweiprc in https://github.com/Kong/insomnia/pull/7854
|
||||
- fix: persist cookies from response together with ones from after-response script by @ihexxa in https://github.com/Kong/insomnia/pull/7819
|
||||
- chore: split packaging for windows builds [INS-3983] by @filfreire in https://github.com/Kong/insomnia/pull/7838
|
||||
- fix(Git Sync): fix issue when switching to Insomnia Sync by @gatzjames in https://github.com/Kong/insomnia/pull/7860
|
||||
- fix: handle login when opening org logged out [INS-4330] by @filfreire in https://github.com/Kong/insomnia/pull/7865
|
||||
- Revert "support for removing default org project" by @yaoweiprc in https://github.com/Kong/insomnia/pull/7874
|
||||
- inso cli scripting first pass by @jackkav in https://github.com/Kong/insomnia/pull/7790
|
||||
- Allow deleting default project in org and fix sync issue [INS-4342] [INS-4311] by @yaoweiprc in https://github.com/Kong/insomnia/pull/7881
|
||||
- chore(runner): cleaning up runner-pr1 and resolve conflicts by @ihexxa in https://github.com/Kong/insomnia/pull/7878
|
||||
- :rocket: 10.0.0-beta.0 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7882
|
||||
- only minify inso cli in prod by @jackkav in https://github.com/Kong/insomnia/pull/7879
|
||||
- feat: Add logic to redirect users based on their plan when creating a new organization by @pavkout in https://github.com/Kong/insomnia/pull/7856
|
||||
- tabs should rerender when changing mock by @jackkav in https://github.com/Kong/insomnia/pull/7888
|
||||
- fix: check for open curl by @jackkav in https://github.com/Kong/insomnia/pull/7889
|
||||
- Bump/electron-31 by @jackkav in https://github.com/Kong/insomnia/pull/7884
|
||||
- shell.nix -> flake.nix by @jackkav in https://github.com/Kong/insomnia/pull/7892
|
||||
- add mock method header by @jackkav in https://github.com/Kong/insomnia/pull/7872
|
||||
- use nixpkgs/unstable by @jackkav in https://github.com/Kong/insomnia/pull/7902
|
||||
- fix: file not synced after switch sync method -[INS-4347] by @CurryYangxx in https://github.com/Kong/insomnia/pull/7897
|
||||
- fix: rename untracked projects [INS-4365] by @filfreire in https://github.com/Kong/insomnia/pull/7898
|
||||
- fix: flaky git test by @ihexxa in https://github.com/Kong/insomnia/pull/7883
|
||||
- Add ut and e2e test for data upload and pre-script in collection runner by @cwangsmv in https://github.com/Kong/insomnia/pull/7903
|
||||
- Supporting moving files from one project to another [INS-3865] by @yaoweiprc in https://github.com/Kong/insomnia/pull/7849
|
||||
- Enhance Response function to aware environment change and update description[INS-4279] by @cwangsmv in https://github.com/Kong/insomnia/pull/7896
|
||||
- chore: update AI service URL to use 'https://ai-helper.insomnia.rest' [INS-4367] by @pavkout in https://github.com/Kong/insomnia/pull/7910
|
||||
- fix(runner): some minor fixes and improvements by @ihexxa in https://github.com/Kong/insomnia/pull/7900
|
||||
- improve flakey test by @jackkav in https://github.com/Kong/insomnia/pull/7909
|
||||
- feat: sign all files on windows [INS-4362] by @filfreire in https://github.com/Kong/insomnia/pull/7913
|
||||
- Avoid encoding queryParams when request.settingEncodeUrl is set to false by @XSPGMike in https://github.com/Kong/insomnia/pull/7893
|
||||
- feat: improve EDN response by @garug in https://github.com/Kong/insomnia/pull/7777
|
||||
- fix: minor fixes in styles, linting and UT by @ihexxa in https://github.com/Kong/insomnia/pull/7916
|
||||
- preserve relationships in nunjucks tags by @jackkav in https://github.com/Kong/insomnia/pull/7915
|
||||
- :rocket: 10.0.0-beta.1 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7917
|
||||
- chore: cleanup after v10 beta.1 [no-ticket] by @filfreire in https://github.com/Kong/insomnia/pull/7918
|
||||
- feat(Onboarding): v10 by @gatzjames in https://github.com/Kong/insomnia/pull/7863
|
||||
- chore: return friendly message when sendRequest sees an error by @ihexxa in https://github.com/Kong/insomnia/pull/7911
|
||||
- chore: upgrade micromatch and add back npm audit by @ihexxa in https://github.com/Kong/insomnia/pull/7921
|
||||
- fix(runner): some minor fixes by @ihexxa in https://github.com/Kong/insomnia/pull/7923
|
||||
- add team check by @jackkav in https://github.com/Kong/insomnia/pull/7919
|
||||
- :rocket: 10.0.0-beta.2 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7924
|
||||
- Fix duplicate file cause application error in collection view UI by @cwangsmv in https://github.com/Kong/insomnia/pull/7928
|
||||
- :rocket: 10.0.0-beta.3 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7930
|
||||
|
||||
## New Contributors
|
||||
* @pranavithape made their first contribution in https://github.com/Kong/insomnia/pull/7789
|
||||
* @XSPGMike made their first contribution in https://github.com/Kong/insomnia/pull/7893
|
||||
* @garug made their first contribution in https://github.com/Kong/insomnia/pull/7777
|
||||
|
||||
- @pranavithape made their first contribution in https://github.com/Kong/insomnia/pull/7789
|
||||
- @XSPGMike made their first contribution in https://github.com/Kong/insomnia/pull/7893
|
||||
- @garug made their first contribution in https://github.com/Kong/insomnia/pull/7777
|
||||
|
||||
**Full Changelog**: https://github.com/Kong/insomnia/compare/core@9.3.3...core@10.0.0
|
||||
|
||||
## [core@9.3.3] - 2024-07-31
|
||||
|
||||
## What's Changed
|
||||
* perf: App start improvement [INS-3957] by @CurryYangxx in https://github.com/Kong/insomnia/pull/7492
|
||||
* :rocket: 9.3.3-beta.0 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7674
|
||||
* fix: default user-agent for oauth2 [7672] by @filfreire in https://github.com/Kong/insomnia/pull/7675
|
||||
* inso fifth pass by @jackkav in https://github.com/Kong/insomnia/pull/7601
|
||||
* feat: inso parent folder auth by @jackkav in https://github.com/Kong/insomnia/pull/7676
|
||||
* chore: duplicate / symbol for import in insomnia-scripting-environment/src/objects/(interfaces.ts, request.ts) by @Novsochetra in https://github.com/Kong/insomnia/pull/7686
|
||||
* fix: changelog [no-ticket] by @filfreire in https://github.com/Kong/insomnia/pull/7677
|
||||
* chore: enable sentry tracing by @CurryYangxx in https://github.com/Kong/insomnia/pull/7688
|
||||
* force vite to always wipe cache by @jackkav in https://github.com/Kong/insomnia/pull/7690
|
||||
* chore: make the overlay darker in displaying request timings by @ihexxa in https://github.com/Kong/insomnia/pull/7691
|
||||
* split test job into app and cli by @jackkav in https://github.com/Kong/insomnia/pull/7685
|
||||
* chore: add smoke test for git-sync [INS-4132] by @filfreire in https://github.com/Kong/insomnia/pull/7682
|
||||
* feat(Markdown Preview): always enable preview by @gatzjames in https://github.com/Kong/insomnia/pull/7694
|
||||
* fix(Delete Environment): Don't show empty view when deleting an environment by @gatzjames in https://github.com/Kong/insomnia/pull/7695
|
||||
* fix(Export): Option to export all data from the settings on the login view by @gatzjames in https://github.com/Kong/insomnia/pull/7702
|
||||
* fix: typo in style name by @ihexxa in https://github.com/Kong/insomnia/pull/7701
|
||||
* fix: ui improvement when return deferred data in loader by @CurryYangxx in https://github.com/Kong/insomnia/pull/7681
|
||||
* fix: refresh storage rule when org change by @CurryYangxx in https://github.com/Kong/insomnia/pull/7707
|
||||
* hide self host url in create/edit mock by @jackkav in https://github.com/Kong/insomnia/pull/7704
|
||||
* feat(Request pane): Add indicators for body and auth in the request pane tabs by @gatzjames in https://github.com/Kong/insomnia/pull/7697
|
||||
* Trim Bearer Authentication Strings by @SimplexShotz in https://github.com/Kong/insomnia/pull/7279
|
||||
* feat: show deprecation warnings on graphql arguments by @anujbiyani in https://github.com/Kong/insomnia/pull/7364
|
||||
* Clean up outdate jest and tsconfigs by @jackkav in https://github.com/Kong/insomnia/pull/7712
|
||||
* chore: mv prerelease tests into smoke [INS-4132] by @filfreire in https://github.com/Kong/insomnia/pull/7705
|
||||
* feat: add test utils on scripting snippets [INS-4141] by @filfreire in https://github.com/Kong/insomnia/pull/7692
|
||||
* fix: migrate loader redirect by @CurryYangxx in https://github.com/Kong/insomnia/pull/7426
|
||||
* refactor: flatten and reduce tsconfigs by @jackkav in https://github.com/Kong/insomnia/pull/7716
|
||||
* enable verbatimModuleSyntax by @jackkav in https://github.com/Kong/insomnia/pull/7718
|
||||
* perf: return deferred data in permission loader by @CurryYangxx in https://github.com/Kong/insomnia/pull/7635
|
||||
* perf: change org performance improvement [INS-3968] by @CurryYangxx in https://github.com/Kong/insomnia/pull/7582
|
||||
* feat: add async task indicator [INS-4106] by @CurryYangxx in https://github.com/Kong/insomnia/pull/7522
|
||||
* fix(sdk): sdk type cleanup by @ihexxa in https://github.com/Kong/insomnia/pull/7721
|
||||
* chore: bump electron 30.0 to 30.2 by @filfreire in https://github.com/Kong/insomnia/pull/7714
|
||||
* fix: Use SSE for storage control updates by @pavkout in https://github.com/Kong/insomnia/pull/7661
|
||||
* :rocket: 9.3.3-beta.1 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7723
|
||||
* Update CHANGELOG.md by @CurryYangxx in https://github.com/Kong/insomnia/pull/7725
|
||||
* feat: inso collection runner by @jackkav in https://github.com/Kong/insomnia/pull/7700
|
||||
* fix: cannot delete request by shortcut [INS-4156] by @yaoweiprc in https://github.com/Kong/insomnia/pull/7728
|
||||
* fix(Key-Value Editor): Edit mode by @gatzjames in https://github.com/Kong/insomnia/pull/7739
|
||||
* remove send-request by @jackkav in https://github.com/Kong/insomnia/pull/7731
|
||||
* feat(Generate Collection from Spec): add description to requests if it's available from the oas3 schema by @gatzjames in https://github.com/Kong/insomnia/pull/7734
|
||||
* fix: syncing status indicator ui by @CurryYangxx in https://github.com/Kong/insomnia/pull/7730
|
||||
* feat(Sidebar): interactions improvements by @gatzjames in https://github.com/Kong/insomnia/pull/7722
|
||||
* fix(Git Clone): redirect using incorrect organizationId by @gatzjames in https://github.com/Kong/insomnia/pull/7740
|
||||
* :rocket: 9.3.3-beta.2 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7741
|
||||
* handle null auth by @jackkav in https://github.com/Kong/insomnia/pull/7746
|
||||
* fix(Collection): Clean up auto-scroll and add back selected item styling by @gatzjames in https://github.com/Kong/insomnia/pull/7747
|
||||
* fixes incorrect scrollbar display issue by @Karthik7406 in https://github.com/Kong/insomnia/pull/7742
|
||||
* fix: reduce uncessary navigate when switching requests and tests by @CurryYangxx in https://github.com/Kong/insomnia/pull/7748
|
||||
* fix: lost <disabled> in header transforming and blank req body by @ihexxa in https://github.com/Kong/insomnia/pull/7738
|
||||
* chore: git sync pull push test [INS-4132] by @filfreire in https://github.com/Kong/insomnia/pull/7720
|
||||
* vitest by @jackkav in https://github.com/Kong/insomnia/pull/7754
|
||||
* feat(History): Navigate to last opened workspace on app load by @gatzjames in https://github.com/Kong/insomnia/pull/7755
|
||||
* feat(Sentry): clean up unnecessary sentry stack by @gatzjames in https://github.com/Kong/insomnia/pull/7758
|
||||
* fix(Settings): update header styles for analytics by @gatzjames in https://github.com/Kong/insomnia/pull/7759
|
||||
* fix: can't match project when last visit page is dashboard by @CurryYangxx in https://github.com/Kong/insomnia/pull/7762
|
||||
* fix(History): navigate to the project if the last visited workspace has been deleted by @gatzjames in https://github.com/Kong/insomnia/pull/7764
|
||||
* feat(Response Pane): improve tabs styles by @gatzjames in https://github.com/Kong/insomnia/pull/7765
|
||||
* :rocket: 9.3.3-beta.3 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7766
|
||||
* chore: add sentry metric [INS-4115] by @CurryYangxx in https://github.com/Kong/insomnia/pull/7727
|
||||
* Vitest-2 app package by @jackkav in https://github.com/Kong/insomnia/pull/7757
|
||||
* fix: project switch report by @CurryYangxx in https://github.com/Kong/insomnia/pull/7771
|
||||
* :rocket: 9.3.3-beta.4 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7774
|
||||
* feat(UI): UI improvements for the app by @gatzjames in https://github.com/Kong/insomnia/pull/7773
|
||||
* chore: check analytics issue [INS-4212] by @filfreire in https://github.com/Kong/insomnia/pull/7775
|
||||
* fix(GraphQL Editor): make inputValueDeprecation optional and change variable mode to json by @gatzjames in https://github.com/Kong/insomnia/pull/7779
|
||||
* inso cli dx improvements by @jackkav in https://github.com/Kong/insomnia/pull/7776
|
||||
* fix(Git Staging Modal): close the modal on ESC by @gatzjames in https://github.com/Kong/insomnia/pull/7781
|
||||
* fix(KeyValue Editor): fix key value focus issue and handle updating params from url by @gatzjames in https://github.com/Kong/insomnia/pull/7780
|
||||
* feat(Styles): Minor style improvements by @gatzjames in https://github.com/Kong/insomnia/pull/7782
|
||||
* :rocket: 9.3.3-beta.5 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7783
|
||||
* Add type checking to sdk package by @jackkav in https://github.com/Kong/insomnia/pull/7719
|
||||
|
||||
- perf: App start improvement [INS-3957] by @CurryYangxx in https://github.com/Kong/insomnia/pull/7492
|
||||
- :rocket: 9.3.3-beta.0 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7674
|
||||
- fix: default user-agent for oauth2 [7672] by @filfreire in https://github.com/Kong/insomnia/pull/7675
|
||||
- inso fifth pass by @jackkav in https://github.com/Kong/insomnia/pull/7601
|
||||
- feat: inso parent folder auth by @jackkav in https://github.com/Kong/insomnia/pull/7676
|
||||
- chore: duplicate / symbol for import in insomnia-scripting-environment/src/objects/(interfaces.ts, request.ts) by @Novsochetra in https://github.com/Kong/insomnia/pull/7686
|
||||
- fix: changelog [no-ticket] by @filfreire in https://github.com/Kong/insomnia/pull/7677
|
||||
- chore: enable sentry tracing by @CurryYangxx in https://github.com/Kong/insomnia/pull/7688
|
||||
- force vite to always wipe cache by @jackkav in https://github.com/Kong/insomnia/pull/7690
|
||||
- chore: make the overlay darker in displaying request timings by @ihexxa in https://github.com/Kong/insomnia/pull/7691
|
||||
- split test job into app and cli by @jackkav in https://github.com/Kong/insomnia/pull/7685
|
||||
- chore: add smoke test for git-sync [INS-4132] by @filfreire in https://github.com/Kong/insomnia/pull/7682
|
||||
- feat(Markdown Preview): always enable preview by @gatzjames in https://github.com/Kong/insomnia/pull/7694
|
||||
- fix(Delete Environment): Don't show empty view when deleting an environment by @gatzjames in https://github.com/Kong/insomnia/pull/7695
|
||||
- fix(Export): Option to export all data from the settings on the login view by @gatzjames in https://github.com/Kong/insomnia/pull/7702
|
||||
- fix: typo in style name by @ihexxa in https://github.com/Kong/insomnia/pull/7701
|
||||
- fix: ui improvement when return deferred data in loader by @CurryYangxx in https://github.com/Kong/insomnia/pull/7681
|
||||
- fix: refresh storage rule when org change by @CurryYangxx in https://github.com/Kong/insomnia/pull/7707
|
||||
- hide self host url in create/edit mock by @jackkav in https://github.com/Kong/insomnia/pull/7704
|
||||
- feat(Request pane): Add indicators for body and auth in the request pane tabs by @gatzjames in https://github.com/Kong/insomnia/pull/7697
|
||||
- Trim Bearer Authentication Strings by @SimplexShotz in https://github.com/Kong/insomnia/pull/7279
|
||||
- feat: show deprecation warnings on graphql arguments by @anujbiyani in https://github.com/Kong/insomnia/pull/7364
|
||||
- Clean up outdate jest and tsconfigs by @jackkav in https://github.com/Kong/insomnia/pull/7712
|
||||
- chore: mv prerelease tests into smoke [INS-4132] by @filfreire in https://github.com/Kong/insomnia/pull/7705
|
||||
- feat: add test utils on scripting snippets [INS-4141] by @filfreire in https://github.com/Kong/insomnia/pull/7692
|
||||
- fix: migrate loader redirect by @CurryYangxx in https://github.com/Kong/insomnia/pull/7426
|
||||
- refactor: flatten and reduce tsconfigs by @jackkav in https://github.com/Kong/insomnia/pull/7716
|
||||
- enable verbatimModuleSyntax by @jackkav in https://github.com/Kong/insomnia/pull/7718
|
||||
- perf: return deferred data in permission loader by @CurryYangxx in https://github.com/Kong/insomnia/pull/7635
|
||||
- perf: change org performance improvement [INS-3968] by @CurryYangxx in https://github.com/Kong/insomnia/pull/7582
|
||||
- feat: add async task indicator [INS-4106] by @CurryYangxx in https://github.com/Kong/insomnia/pull/7522
|
||||
- fix(sdk): sdk type cleanup by @ihexxa in https://github.com/Kong/insomnia/pull/7721
|
||||
- chore: bump electron 30.0 to 30.2 by @filfreire in https://github.com/Kong/insomnia/pull/7714
|
||||
- fix: Use SSE for storage control updates by @pavkout in https://github.com/Kong/insomnia/pull/7661
|
||||
- :rocket: 9.3.3-beta.1 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7723
|
||||
- Update CHANGELOG.md by @CurryYangxx in https://github.com/Kong/insomnia/pull/7725
|
||||
- feat: inso collection runner by @jackkav in https://github.com/Kong/insomnia/pull/7700
|
||||
- fix: cannot delete request by shortcut [INS-4156] by @yaoweiprc in https://github.com/Kong/insomnia/pull/7728
|
||||
- fix(Key-Value Editor): Edit mode by @gatzjames in https://github.com/Kong/insomnia/pull/7739
|
||||
- remove send-request by @jackkav in https://github.com/Kong/insomnia/pull/7731
|
||||
- feat(Generate Collection from Spec): add description to requests if it's available from the oas3 schema by @gatzjames in https://github.com/Kong/insomnia/pull/7734
|
||||
- fix: syncing status indicator ui by @CurryYangxx in https://github.com/Kong/insomnia/pull/7730
|
||||
- feat(Sidebar): interactions improvements by @gatzjames in https://github.com/Kong/insomnia/pull/7722
|
||||
- fix(Git Clone): redirect using incorrect organizationId by @gatzjames in https://github.com/Kong/insomnia/pull/7740
|
||||
- :rocket: 9.3.3-beta.2 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7741
|
||||
- handle null auth by @jackkav in https://github.com/Kong/insomnia/pull/7746
|
||||
- fix(Collection): Clean up auto-scroll and add back selected item styling by @gatzjames in https://github.com/Kong/insomnia/pull/7747
|
||||
- fixes incorrect scrollbar display issue by @Karthik7406 in https://github.com/Kong/insomnia/pull/7742
|
||||
- fix: reduce uncessary navigate when switching requests and tests by @CurryYangxx in https://github.com/Kong/insomnia/pull/7748
|
||||
- fix: lost <disabled> in header transforming and blank req body by @ihexxa in https://github.com/Kong/insomnia/pull/7738
|
||||
- chore: git sync pull push test [INS-4132] by @filfreire in https://github.com/Kong/insomnia/pull/7720
|
||||
- vitest by @jackkav in https://github.com/Kong/insomnia/pull/7754
|
||||
- feat(History): Navigate to last opened workspace on app load by @gatzjames in https://github.com/Kong/insomnia/pull/7755
|
||||
- feat(Sentry): clean up unnecessary sentry stack by @gatzjames in https://github.com/Kong/insomnia/pull/7758
|
||||
- fix(Settings): update header styles for analytics by @gatzjames in https://github.com/Kong/insomnia/pull/7759
|
||||
- fix: can't match project when last visit page is dashboard by @CurryYangxx in https://github.com/Kong/insomnia/pull/7762
|
||||
- fix(History): navigate to the project if the last visited workspace has been deleted by @gatzjames in https://github.com/Kong/insomnia/pull/7764
|
||||
- feat(Response Pane): improve tabs styles by @gatzjames in https://github.com/Kong/insomnia/pull/7765
|
||||
- :rocket: 9.3.3-beta.3 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7766
|
||||
- chore: add sentry metric [INS-4115] by @CurryYangxx in https://github.com/Kong/insomnia/pull/7727
|
||||
- Vitest-2 app package by @jackkav in https://github.com/Kong/insomnia/pull/7757
|
||||
- fix: project switch report by @CurryYangxx in https://github.com/Kong/insomnia/pull/7771
|
||||
- :rocket: 9.3.3-beta.4 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7774
|
||||
- feat(UI): UI improvements for the app by @gatzjames in https://github.com/Kong/insomnia/pull/7773
|
||||
- chore: check analytics issue [INS-4212] by @filfreire in https://github.com/Kong/insomnia/pull/7775
|
||||
- fix(GraphQL Editor): make inputValueDeprecation optional and change variable mode to json by @gatzjames in https://github.com/Kong/insomnia/pull/7779
|
||||
- inso cli dx improvements by @jackkav in https://github.com/Kong/insomnia/pull/7776
|
||||
- fix(Git Staging Modal): close the modal on ESC by @gatzjames in https://github.com/Kong/insomnia/pull/7781
|
||||
- fix(KeyValue Editor): fix key value focus issue and handle updating params from url by @gatzjames in https://github.com/Kong/insomnia/pull/7780
|
||||
- feat(Styles): Minor style improvements by @gatzjames in https://github.com/Kong/insomnia/pull/7782
|
||||
- :rocket: 9.3.3-beta.5 by @insomnia-infra in https://github.com/Kong/insomnia/pull/7783
|
||||
- Add type checking to sdk package by @jackkav in https://github.com/Kong/insomnia/pull/7719
|
||||
|
||||
## New Contributors
|
||||
* @Novsochetra made their first contribution in https://github.com/Kong/insomnia/pull/7686
|
||||
* @SimplexShotz made their first contribution in https://github.com/Kong/insomnia/pull/7279
|
||||
* @anujbiyani made their first contribution in https://github.com/Kong/insomnia/pull/7364
|
||||
* @yaoweiprc made their first contribution in https://github.com/Kong/insomnia/pull/7728
|
||||
* @Karthik7406 made their first contribution in https://github.com/Kong/insomnia/pull/7742
|
||||
|
||||
- @Novsochetra made their first contribution in https://github.com/Kong/insomnia/pull/7686
|
||||
- @SimplexShotz made their first contribution in https://github.com/Kong/insomnia/pull/7279
|
||||
- @anujbiyani made their first contribution in https://github.com/Kong/insomnia/pull/7364
|
||||
- @yaoweiprc made their first contribution in https://github.com/Kong/insomnia/pull/7728
|
||||
- @Karthik7406 made their first contribution in https://github.com/Kong/insomnia/pull/7742
|
||||
|
||||
**Full Changelog**: https://github.com/Kong/insomnia/compare/core@9.3.2...core@9.3.3
|
||||
|
||||
@@ -252,7 +256,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- fix(Response Tabs): Tabs with a menu inside are not accessible - Response Panes by @gatzjames in <https://github.com/Kong/insomnia/pull/7477>
|
||||
- fix: script to parse binary digests by @saisatishkarra in <https://github.com/Kong/insomnia/pull/7493>
|
||||
- :rocket: 9.3.0-alpha.5 by @insomnia-infra in <https://github.com/Kong/insomnia/pull/7494>
|
||||
- feat: folder inheritance scripts by @jackkav in <https://github.com/Kong/insomnia/pull/7430>
|
||||
- feat: folder inheritance scripts by @jackkav in <https://github.com/Kong/insomnia/pull/7430>
|
||||
- fix: use base64 output file for provenance for large assets by @saisatishkarra in <https://github.com/Kong/insomnia/pull/7496>
|
||||
- Bump/electron-30 by @jackkav in <https://github.com/Kong/insomnia/pull/7354>
|
||||
- chore: new analytics events [INS-3938] by @filfreire in <https://github.com/Kong/insomnia/pull/7495>
|
||||
@@ -276,7 +280,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- fix: url preview should contain auth params by @ihexxa in <https://github.com/Kong/insomnia/pull/7509>
|
||||
- include read only in headers count by @jackkav in <https://github.com/Kong/insomnia/pull/7521>
|
||||
- remove oas 2 kong by @jackkav in <https://github.com/Kong/insomnia/pull/7503>
|
||||
- fix: windows artifact and update code signer [INS-3993][INS-3982] by @filfreire in <https://github.com/Kong/insomnia/pull/7523>
|
||||
- fix: windows artifact and update code signer [INS-3993][ins-3982] by @filfreire in <https://github.com/Kong/insomnia/pull/7523>
|
||||
- :rocket: 9.3.0-alpha.11 by @insomnia-infra in <https://github.com/Kong/insomnia/pull/7524>
|
||||
- :rocket: 9.3.0-beta.3 by @insomnia-infra in <https://github.com/Kong/insomnia/pull/7526>
|
||||
- chore(Minor UI improvements): Expand/Collapse all and file card titles by @gatzjames in <https://github.com/Kong/insomnia/pull/7528>
|
||||
@@ -439,11 +443,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- feat(hidden-window): enable baseEnvironment in the pre-request scripting [INS-3379] by @ihexxa in <https://github.com/Kong/insomnia/pull/7102>
|
||||
- fix: release-start changelog step [no-ticket] by @filfreire in <https://github.com/Kong/insomnia/pull/7113>
|
||||
- fix: move changelog step to release-publish by @filfreire in <https://github.com/Kong/insomnia/pull/7114>
|
||||
- feat: enable globals, iterationData and variables in pre-request scripting [INS-3379] by @ihexxa in <https://github.com/Kong/insomnia/pull/7103>
|
||||
- feat: enable globals, iterationData and variables in pre-request scripting [INS-3379] by @ihexxa in <https://github.com/Kong/insomnia/pull/7103>
|
||||
- chore: bump GH actions versions [no-ticket] by @filfreire in <https://github.com/Kong/insomnia/pull/7117>
|
||||
- feat(Insomnia Cloud Sync): Update filesystem driver for VCS sync by @gatzjames in <https://github.com/Kong/insomnia/pull/7111>
|
||||
- feat: enable property in pre-request scripting [INS-3379] by @ihexxa in <https://github.com/Kong/insomnia/pull/7120>
|
||||
- feat: enable headers in pre-request scripting [INS-3379] by @ihexxa in <https://github.com/Kong/insomnia/pull/7121>
|
||||
- feat: enable headers in pre-request scripting [INS-3379] by @ihexxa in <https://github.com/Kong/insomnia/pull/7121>
|
||||
- feat: enable collection-variable in pre-request scripting [INS-3379] by @ihexxa in <https://github.com/Kong/insomnia/pull/7122>
|
||||
- feat: enable Url in pre-request scripting [INS-3379] by @ihexxa in <https://github.com/Kong/insomnia/pull/7123>
|
||||
- feat: enable Request and Response in pre-request scripting [INS-3379] by @ihexxa in <https://github.com/Kong/insomnia/pull/7128>
|
||||
@@ -571,19 +575,19 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
### :sparkles: New Features
|
||||
|
||||
- [`e1e3b13`](https://github.com/Kong/insomnia/commit/e1e3b139b3bb917ab9dfcb0ce12d16079dee9c04) - **unit-tests**: Unit test reordering *(PR [#7020](https://github.com/Kong/insomnia/pull/7020) by [@gatzjames](https://github.com/gatzjames))*
|
||||
- [`2249bb7`](https://github.com/Kong/insomnia/commit/2249bb7b98c947ab1cb11955928fd80d4adec845) - **environment**: update environments icons *(PR [#7050](https://github.com/Kong/insomnia/pull/7050) by [@gatzjames](https://github.com/gatzjames))*
|
||||
- [`a09c233`](https://github.com/Kong/insomnia/commit/a09c23305c9c493105808b8df23d1911f5b59ea2) - **pane-tabs**: Consistent styles for tabs *(PR [#7062](https://github.com/Kong/insomnia/pull/7062) by [@gatzjames](https://github.com/gatzjames))*
|
||||
- [`d1c2928`](https://github.com/Kong/insomnia/commit/d1c292891cc9dd8a17d4637f643336cf1afcccfa) - **command-palette**: add button to open the command palette *(PR [#7064](https://github.com/Kong/insomnia/pull/7064) by [@gatzjames](https://github.com/gatzjames))*
|
||||
- [`e1e3b13`](https://github.com/Kong/insomnia/commit/e1e3b139b3bb917ab9dfcb0ce12d16079dee9c04) - **unit-tests**: Unit test reordering _(PR [#7020](https://github.com/Kong/insomnia/pull/7020) by [@gatzjames](https://github.com/gatzjames))_
|
||||
- [`2249bb7`](https://github.com/Kong/insomnia/commit/2249bb7b98c947ab1cb11955928fd80d4adec845) - **environment**: update environments icons _(PR [#7050](https://github.com/Kong/insomnia/pull/7050) by [@gatzjames](https://github.com/gatzjames))_
|
||||
- [`a09c233`](https://github.com/Kong/insomnia/commit/a09c23305c9c493105808b8df23d1911f5b59ea2) - **pane-tabs**: Consistent styles for tabs _(PR [#7062](https://github.com/Kong/insomnia/pull/7062) by [@gatzjames](https://github.com/gatzjames))_
|
||||
- [`d1c2928`](https://github.com/Kong/insomnia/commit/d1c292891cc9dd8a17d4637f643336cf1afcccfa) - **command-palette**: add button to open the command palette _(PR [#7064](https://github.com/Kong/insomnia/pull/7064) by [@gatzjames](https://github.com/gatzjames))_
|
||||
|
||||
### :bug: Bug Fixes
|
||||
|
||||
- [`df0a791`](https://github.com/Kong/insomnia/commit/df0a79194143dc615310ecc0976381c538f695f2) - re-initialize the parameter editor state when switching between requests *(PR [#7005](https://github.com/Kong/insomnia/pull/7005) by [@gatzjames](https://github.com/gatzjames))*
|
||||
- :arrow_lower_right: *fixes issue [#7000](undefined) opened by [@jwarner112](https://github.com/jwarner112)*
|
||||
- [`3fceccf`](https://github.com/Kong/insomnia/commit/3fceccfdf691a0f3d7592f31120030eeff92be61) - **workspace**: Add default name for when creating a workspace *(PR [#7046](https://github.com/Kong/insomnia/pull/7046) by [@gatzjames](https://github.com/gatzjames))*
|
||||
- [`df0a791`](https://github.com/Kong/insomnia/commit/df0a79194143dc615310ecc0976381c538f695f2) - re-initialize the parameter editor state when switching between requests _(PR [#7005](https://github.com/Kong/insomnia/pull/7005) by [@gatzjames](https://github.com/gatzjames))_
|
||||
- :arrow*lower_right: \_fixes issue [#7000](undefined) opened by [@jwarner112](https://github.com/jwarner112)*
|
||||
- [`3fceccf`](https://github.com/Kong/insomnia/commit/3fceccfdf691a0f3d7592f31120030eeff92be61) - **workspace**: Add default name for when creating a workspace _(PR [#7046](https://github.com/Kong/insomnia/pull/7046) by [@gatzjames](https://github.com/gatzjames))_
|
||||
|
||||
### :wrench: Chores
|
||||
|
||||
- [`353780e`](https://github.com/Kong/insomnia/commit/353780e16ab30853ce206398850c0c0f1c9bd887) - edit changelog process [INS-3456] *(PR [#7001](https://github.com/Kong/insomnia/pull/7001) by [@filfreire](https://github.com/filfreire))*
|
||||
- [`353780e`](https://github.com/Kong/insomnia/commit/353780e16ab30853ce206398850c0c0f1c9bd887) - edit changelog process [INS-3456] _(PR [#7001](https://github.com/Kong/insomnia/pull/7001) by [@filfreire](https://github.com/filfreire))_
|
||||
|
||||
[core@8.6.1]: https://github.com/Kong/insomnia/compare/core@8.6.0...core@8.6.1
|
||||
|
||||
@@ -9,7 +9,7 @@ This code of conduct applies to all spaces managed by the Insomnia project, incl
|
||||
If you believe someone is violating the code of conduct, we ask that you report it by emailing [support@insomnia.rest](mailto:support@insomnia.rest). For more details please see our Reporting Guidelines
|
||||
|
||||
- **Be friendly and patient.**
|
||||
- **Be welcoming.** We strive to be a community that welcomes and supports people of all backgrounds and identities. This includes, but is not limited to members of any race, ethnicity, culture, national origin, colour, immigration status, social and economic class, educational level, sex, sexual orientation, gender identity and expression, age, size, family status, political belief, religion, and mental and physical ability.
|
||||
- **Be welcoming.** We strive to be a community that welcomes and supports people of all backgrounds and identities. This includes, but is not limited to members of any race, ethnicity, culture, national origin, colour, immigration status, social and economic class, educational level, sex, sexual orientation, gender identity and expression, age, size, family status, political belief, religion, and mental and physical ability.
|
||||
- **Be considerate.** Your work will be used by other people, and you in turn will depend on the work of others. Any decision you take will affect users and colleagues, and you should take those consequences into account when making decisions. Remember that we're a world-wide community, so you might not be communicating in someone else's primary language.
|
||||
- **Be respectful.** Not all of us will agree all the time, but disagreement is no excuse for poor behavior and poor manners. We might all experience some frustration now and then, but we cannot allow that frustration to turn into a personal attack. It's important to remember that a community where people feel uncomfortable or threatened is not a productive one. Members of the Insomnia community should be respectful when dealing with other members as well as with people outside the Insomnia community.
|
||||
- **Be careful in the words that you choose.** We are a community of professionals, and we conduct ourselves professionally. Be kind to others. Do not insult or put down other participants. Harassment and other exclusionary behavior isn't acceptable. This includes, but is not limited to:
|
||||
|
||||
@@ -25,7 +25,7 @@ Insomnia uses [`npm workspaces`](https://docs.npmjs.com/cli/v9/using-npm/workspa
|
||||
|
||||
Insomnia Inso CLI is built using a series of steps
|
||||
|
||||
1. `insomnia-inso` uses monorepo references to import `insomnia` and `insomnia-testing` to expose `getSendRequestCallbackMemDb` and `generate`, `runTests`, `runTestsCli` respectively
|
||||
1. `insomnia-inso` uses monorepo references to import `insomnia` and `insomnia-testing` to expose `getSendRequestCallbackMemDb` and `generate`, `runTests`, `runTestsCli` respectively
|
||||
1. `packages/insomnia-inso/dist/index.js` is transpiled with esbuild to commonjs
|
||||
1. `packages/insomnia-inso/bin/inso` is shell script which points at `packages/insomnia-inso/dist/index.js` and is used for local development
|
||||
1. `packages/insomnia-inso/binaries/inso` is an executable made with `pkg`
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import eslint from '@eslint/js';
|
||||
import eslintConfigPrettier from "eslint-config-prettier/flat";
|
||||
import eslintConfigPrettier from 'eslint-config-prettier/flat';
|
||||
import reactPlugin from 'eslint-plugin-react';
|
||||
import reactHooksPlugin from 'eslint-plugin-react-hooks';
|
||||
import simpleImportSortPlugin from 'eslint-plugin-simple-import-sort';
|
||||
@@ -27,10 +27,10 @@ export default tseslint.config(
|
||||
'eqeqeq': ['error', 'smart'],
|
||||
'no-async-promise-executor': 'off',
|
||||
'no-else-return': 'error',
|
||||
'no-empty': ["error", { "allowEmptyCatch": true }],
|
||||
'no-empty': ['error', { allowEmptyCatch: true }],
|
||||
'no-var': 'error',
|
||||
'no-trailing-spaces': 'error',
|
||||
'no-multiple-empty-lines': ['error', { 'max': 1, 'maxEOF': 0 }],
|
||||
'no-multiple-empty-lines': ['error', { max: 1, maxEOF: 0 }],
|
||||
'no-inner-declarations': 'off',
|
||||
'no-useless-escape': 'off', // TODO: Enable this rule
|
||||
'object-curly-spacing': ['error', 'always'],
|
||||
@@ -43,26 +43,33 @@ export default tseslint.config(
|
||||
'react/jsx-uses-vars': 'error',
|
||||
'react/jsx-indent-props': ['error', 2],
|
||||
'react/prop-types': 'off',
|
||||
'react/function-component-definition': ['error', {
|
||||
'namedComponents': 'arrow-function',
|
||||
'unnamedComponents': 'arrow-function',
|
||||
}],
|
||||
'react/function-component-definition': [
|
||||
'error',
|
||||
{
|
||||
namedComponents: 'arrow-function',
|
||||
unnamedComponents: 'arrow-function',
|
||||
},
|
||||
],
|
||||
'react/jsx-closing-bracket-location': ['error', 'line-aligned'],
|
||||
'react/prefer-stateless-function': 'error',
|
||||
'react/jsx-key': ['error', { 'checkFragmentShorthand': true }],
|
||||
'react/jsx-key': ['error', { checkFragmentShorthand: true }],
|
||||
'react/no-array-index-key': 'error',
|
||||
'react/self-closing-comp': 'error',
|
||||
|
||||
'react-hooks/exhaustive-deps': ['error', {
|
||||
// From react-use https://github.com/streamich/react-use/issues/1703#issuecomment-770972824
|
||||
'additionalHooks': '^use(Async|AsyncFn|AsyncRetry|Debounce|UpdateEffect|IsomorphicLayoutEffect|DeepCompareEffect|ShallowCompareEffect)$',
|
||||
}],
|
||||
'react-hooks/exhaustive-deps': [
|
||||
'error',
|
||||
{
|
||||
// From react-use https://github.com/streamich/react-use/issues/1703#issuecomment-770972824
|
||||
additionalHooks:
|
||||
'^use(Async|AsyncFn|AsyncRetry|Debounce|UpdateEffect|IsomorphicLayoutEffect|DeepCompareEffect|ShallowCompareEffect)$',
|
||||
},
|
||||
],
|
||||
'react-hooks/rules-of-hooks': 'error',
|
||||
|
||||
'@typescript-eslint/array-type': ['error', { default: 'array', readonly: 'array' }],
|
||||
'@typescript-eslint/consistent-type-definitions': ['error', 'interface'],
|
||||
'@typescript-eslint/consistent-type-imports': 'error',
|
||||
'@typescript-eslint/no-empty-interface': ['error', { 'allowSingleExtends': true }],
|
||||
'@typescript-eslint/no-empty-interface': ['error', { allowSingleExtends: true }],
|
||||
'@typescript-eslint/no-empty-object-type': 'off', // TODO: Enable this rule
|
||||
'@typescript-eslint/no-empty-function': 'off',
|
||||
'@typescript-eslint/no-namespace': ['error', { allowDeclarations: true }],
|
||||
@@ -77,7 +84,7 @@ export default tseslint.config(
|
||||
'@typescript-eslint/no-dynamic-delete': 'off',
|
||||
'@typescript-eslint/no-non-null-assertion': 'off',
|
||||
'@typescript-eslint/no-invalid-void-type': 'off',
|
||||
}
|
||||
},
|
||||
},
|
||||
eslintConfigPrettier,
|
||||
{
|
||||
@@ -106,6 +113,6 @@ export default tseslint.config(
|
||||
'**/traces/*',
|
||||
'**/verify-pkg.js',
|
||||
'**/__mocks__/*',
|
||||
]
|
||||
}
|
||||
],
|
||||
},
|
||||
);
|
||||
|
||||
@@ -26,7 +26,6 @@
|
||||
"lint": "npm run lint --workspaces --if-present",
|
||||
"type-check": "npm run type-check --workspaces --if-present",
|
||||
"test": "npm run test --workspaces --if-present",
|
||||
"lint:markdown": "npx markdownlint-cli2 \"**/*.md\" \"#**/node_modules\" \"#**/CHANGELOG.md\"",
|
||||
"clean": "git clean -dfX",
|
||||
"install-libcurl-electron": "node_modules/.bin/node-pre-gyp install --directory node_modules/@getinsomnia/node-libcurl --update-binary --runtime=electron --target=$target",
|
||||
"inso-start": "npm start -w insomnia-inso",
|
||||
|
||||
@@ -33,8 +33,7 @@ $PWD/packages/insomnia-inso/bin/inso -w packages/insomnia-inso/src/db/fixtures/g
|
||||
|
||||
### node-libcurl
|
||||
|
||||
`Error: The module '.../insomnia/node_modules/@getinsomnia/node-libcurl/lib/binding/node_libcurl.node'
|
||||
was compiled against a different Node.js version using`
|
||||
`Error: The module '.../insomnia/node_modules/@getinsomnia/node-libcurl/lib/binding/node_libcurl.node' was compiled against a different Node.js version using`
|
||||
|
||||
node-libcurl builds for 3 operating systems and two versions of nodejs. insomnia-inso uses the nodejs build and insomnia app uses the electron build. you can switch between them using the following two commands
|
||||
|
||||
|
||||
@@ -22,9 +22,7 @@ const config: BuildOptions = {
|
||||
paths: [args.resolveDir],
|
||||
});
|
||||
// Call twice the replace is to solve the problem of the path in Windows
|
||||
const pathEsm = pathUmdMay
|
||||
.replace('/umd/', '/esm/')
|
||||
.replace('\\umd\\', '\\esm\\');
|
||||
const pathEsm = pathUmdMay.replace('/umd/', '/esm/').replace('\\umd\\', '\\esm\\');
|
||||
return { path: pathEsm };
|
||||
});
|
||||
},
|
||||
|
||||
@@ -82,7 +82,8 @@ describe('inso dev bundle', () => {
|
||||
});
|
||||
describe('response and timeline has scripting effects', () => {
|
||||
it('console log appears in timeline', async () => {
|
||||
const input = '$PWD/packages/insomnia-inso/bin/inso run collection -w packages/insomnia-inso/src/examples/minimal.yml wrk_5b5ab6 --verbose';
|
||||
const input =
|
||||
'$PWD/packages/insomnia-inso/bin/inso run collection -w packages/insomnia-inso/src/examples/minimal.yml wrk_5b5ab6 --verbose';
|
||||
const result = await runCliFromRoot(input);
|
||||
if (result.code !== 0) {
|
||||
console.log(result);
|
||||
@@ -93,7 +94,8 @@ describe('inso dev bundle', () => {
|
||||
});
|
||||
|
||||
it('insomnia.request.addHeader works', async () => {
|
||||
const input = '$PWD/packages/insomnia-inso/bin/inso run collection -w packages/insomnia-inso/src/examples/script-add-header.yml wrk_5b5ab6 --verbose';
|
||||
const input =
|
||||
'$PWD/packages/insomnia-inso/bin/inso run collection -w packages/insomnia-inso/src/examples/script-add-header.yml wrk_5b5ab6 --verbose';
|
||||
const result = await runCliFromRoot(input);
|
||||
if (result.code !== 0) {
|
||||
console.log(result);
|
||||
@@ -104,7 +106,8 @@ describe('inso dev bundle', () => {
|
||||
});
|
||||
|
||||
it('require("insomnia-collection") works', async () => {
|
||||
const input = '$PWD/packages/insomnia-inso/bin/inso run collection -w packages/insomnia-inso/src/examples/script-require.yml wrk_5b5ab6 --verbose';
|
||||
const input =
|
||||
'$PWD/packages/insomnia-inso/bin/inso run collection -w packages/insomnia-inso/src/examples/script-require.yml wrk_5b5ab6 --verbose';
|
||||
const result = await runCliFromRoot(input);
|
||||
if (result.code !== 0) {
|
||||
console.log(result);
|
||||
@@ -114,7 +117,8 @@ describe('inso dev bundle', () => {
|
||||
});
|
||||
|
||||
it('insomnia.sendRequest works', async () => {
|
||||
const input = '$PWD/packages/insomnia-inso/bin/inso run collection -w packages/insomnia-inso/src/examples/script-send-request.yml wrk_cfacae --verbose';
|
||||
const input =
|
||||
'$PWD/packages/insomnia-inso/bin/inso run collection -w packages/insomnia-inso/src/examples/script-send-request.yml wrk_cfacae --verbose';
|
||||
const result = await runCliFromRoot(input);
|
||||
if (result.code !== 0) {
|
||||
console.log(result);
|
||||
@@ -123,7 +127,8 @@ describe('inso dev bundle', () => {
|
||||
});
|
||||
|
||||
it('iterationData and iterationCount args work', async () => {
|
||||
const input = '$PWD/packages/insomnia-inso/bin/inso run collection -d packages/insomnia-smoke-test/fixtures/files/runner-data.json -w packages/insomnia-inso/src/examples/three-requests.yml -n 2 -i req_3fd28aabbb18447abab1f45e6ee4bdc1 -e env_86e135 wrk_c992d40 --verbose';
|
||||
const input =
|
||||
'$PWD/packages/insomnia-inso/bin/inso run collection -d packages/insomnia-smoke-test/fixtures/files/runner-data.json -w packages/insomnia-inso/src/examples/three-requests.yml -n 2 -i req_3fd28aabbb18447abab1f45e6ee4bdc1 -e env_86e135 wrk_c992d40 --verbose';
|
||||
const result = await runCliFromRoot(input);
|
||||
if (result.code !== 0) {
|
||||
console.log(result);
|
||||
@@ -133,7 +138,8 @@ describe('inso dev bundle', () => {
|
||||
});
|
||||
|
||||
it('send request with client cert and key', async () => {
|
||||
const input = '$PWD/packages/insomnia-inso/bin/inso run collection -w packages/insomnia-inso/src/db/fixtures/nedb --requestNamePattern "withCertAndCA" --verbose "Insomnia Designer" wrk_0b96eff';
|
||||
const input =
|
||||
'$PWD/packages/insomnia-inso/bin/inso run collection -w packages/insomnia-inso/src/db/fixtures/nedb --requestNamePattern "withCertAndCA" --verbose "Insomnia Designer" wrk_0b96eff';
|
||||
const result = await runCliFromRoot(input);
|
||||
if (result.code !== 0) {
|
||||
console.log(result);
|
||||
@@ -143,13 +149,15 @@ describe('inso dev bundle', () => {
|
||||
});
|
||||
|
||||
it('send request with settings enabled (by testing followRedirects)', async () => {
|
||||
const input = '$PWD/packages/insomnia-inso/bin/inso run collection -w packages/insomnia-inso/src/db/fixtures/nedb --requestNamePattern "withSettings" --verbose "Insomnia Designer" wrk_0b96eff';
|
||||
const input =
|
||||
'$PWD/packages/insomnia-inso/bin/inso run collection -w packages/insomnia-inso/src/db/fixtures/nedb --requestNamePattern "withSettings" --verbose "Insomnia Designer" wrk_0b96eff';
|
||||
const result = await runCliFromRoot(input);
|
||||
expect(result.stdout).not.toContain("Issue another request to this URL: 'https://insomnia.rest/'");
|
||||
});
|
||||
|
||||
it('run collection: run requests in specified order', async () => {
|
||||
const input = '$PWD/packages/insomnia-inso/bin/inso run collection -w packages/insomnia-inso/src/examples/three-requests.yml -i req_6063adcdab5b409e9b4f00f47322df4a -i req_3fd28aabbb18447abab1f45e6ee4bdc1 -e env_86e135 wrk_c992d40 --verbose';
|
||||
const input =
|
||||
'$PWD/packages/insomnia-inso/bin/inso run collection -w packages/insomnia-inso/src/examples/three-requests.yml -i req_6063adcdab5b409e9b4f00f47322df4a -i req_3fd28aabbb18447abab1f45e6ee4bdc1 -e env_86e135 wrk_c992d40 --verbose';
|
||||
const result = await runCliFromRoot(input);
|
||||
|
||||
expect(result.code).toBe(0);
|
||||
@@ -164,8 +172,12 @@ describe('inso dev bundle', () => {
|
||||
});
|
||||
});
|
||||
|
||||
const packagedSuccessCodes = shouldReturnSuccessCode.map(x => x.replace('$PWD/packages/insomnia-inso/bin/inso', '$PWD/packages/insomnia-inso/binaries/inso'));
|
||||
const packagedErrorCodes = shouldReturnErrorCode.map(x => x.replace('$PWD/packages/insomnia-inso/bin/inso', '$PWD/packages/insomnia-inso/binaries/inso'));
|
||||
const packagedSuccessCodes = shouldReturnSuccessCode.map(x =>
|
||||
x.replace('$PWD/packages/insomnia-inso/bin/inso', '$PWD/packages/insomnia-inso/binaries/inso'),
|
||||
);
|
||||
const packagedErrorCodes = shouldReturnErrorCode.map(x =>
|
||||
x.replace('$PWD/packages/insomnia-inso/bin/inso', '$PWD/packages/insomnia-inso/binaries/inso'),
|
||||
);
|
||||
|
||||
describe('inso packaged binary', () => {
|
||||
describe('exit codes are consistent', () => {
|
||||
@@ -206,7 +218,12 @@ describe('Snapshot for', () => {
|
||||
});
|
||||
|
||||
// Execute the command in the root directory of the project
|
||||
export const runCliFromRoot = (input: string): Promise<{ code: number; error: ExecException | null; stdout: string; stderr: string }> => {
|
||||
return new Promise(resolve => exec(input, { cwd: path.resolve(__dirname, '../../..') },
|
||||
(error, stdout, stderr) => resolve({ code: error?.code || 0, error, stdout, stderr })));
|
||||
export const runCliFromRoot = (
|
||||
input: string,
|
||||
): Promise<{ code: number; error: ExecException | null; stdout: string; stderr: string }> => {
|
||||
return new Promise(resolve =>
|
||||
exec(input, { cwd: path.resolve(__dirname, '../../..') }, (error, stdout, stderr) =>
|
||||
resolve({ code: error?.code || 0, error, stdout, stderr }),
|
||||
),
|
||||
);
|
||||
};
|
||||
|
||||
@@ -109,7 +109,7 @@ export class InsoError extends Error {
|
||||
* getAppDataDir returns the data directory for an Electron app,
|
||||
* it is equivalent to the app.getPath('userData') API in Electron.
|
||||
* https://www.electronjs.org/docs/api/app#appgetpathname
|
||||
*/
|
||||
*/
|
||||
export function getAppDataDir(app: string): string {
|
||||
switch (process.platform) {
|
||||
case 'darwin':
|
||||
@@ -146,7 +146,7 @@ export const logErrorAndExit = (err?: Error) => {
|
||||
};
|
||||
const noConsoleLog = async <T>(callback: () => Promise<T>): Promise<T> => {
|
||||
const oldConsoleLog = console.log;
|
||||
console.log = () => { };
|
||||
console.log = () => {};
|
||||
try {
|
||||
return await callback();
|
||||
} finally {
|
||||
@@ -213,10 +213,10 @@ const logTestResult = (reporter: TestReporter, testResults?: RequestTestResult[]
|
||||
const fallbackReporter = testResults.map(r => `${r.status === 'passed' ? '✅' : '❌'} ${r.testCase}`).join('\n');
|
||||
|
||||
const reporterMap = {
|
||||
dot: testResults.map(r => r.status === 'passed' ? '.' : 'F').join(''),
|
||||
dot: testResults.map(r => (r.status === 'passed' ? '.' : 'F')).join(''),
|
||||
list: fallbackReporter,
|
||||
min: ' ',
|
||||
progress: `[${testResults.map(r => r.status === 'passed' ? '-' : 'x').join('')}]`,
|
||||
progress: `[${testResults.map(r => (r.status === 'passed' ? '-' : 'x')).join('')}]`,
|
||||
spec: fallbackReporter,
|
||||
tap: convertToTAP(testResults),
|
||||
};
|
||||
@@ -226,7 +226,10 @@ Total tests: ${testResults.length}
|
||||
Passed: ${testResults.filter(r => r.status === 'passed').length}
|
||||
Failed: ${testResults.filter(r => r.status === 'failed').length}
|
||||
|
||||
${testResults.filter(r => r.status === 'failed').map(r => r.errorMessage).join('\n')}`;
|
||||
${testResults
|
||||
.filter(r => r.status === 'failed')
|
||||
.map(r => r.errorMessage)
|
||||
.join('\n')}`;
|
||||
return `${reporterMap[reporter] || fallbackReporter}${summary}`;
|
||||
};
|
||||
function convertToTAP(testCases: RequestTestResult[]): string {
|
||||
@@ -253,7 +256,9 @@ const readFileFromPathOrUrl = async (pathOrUrl: string) => {
|
||||
return readFile(pathOrUrl, 'utf8');
|
||||
};
|
||||
const pathToIterationData = async (pathOrUrl: string, env: string[]): Promise<UserUploadEnvironment[]> => {
|
||||
const envAsObject = env.map(envString => Object.fromEntries(new URLSearchParams(envString).entries())).reduce((acc, obj) => ({ ...acc, ...obj }), {});
|
||||
const envAsObject = env
|
||||
.map(envString => Object.fromEntries(new URLSearchParams(envString).entries()))
|
||||
.reduce((acc, obj) => ({ ...acc, ...obj }), {});
|
||||
const fileType = pathOrUrl.split('.').pop()?.toLowerCase();
|
||||
const content = await readFileFromPathOrUrl(pathOrUrl);
|
||||
if (!content) {
|
||||
@@ -267,7 +272,9 @@ const getListFromFileOrUrl = (content: string, fileType?: string): Record<string
|
||||
try {
|
||||
const jsonDataContent = JSON.parse(content);
|
||||
if (Array.isArray(jsonDataContent)) {
|
||||
return jsonDataContent.filter(data => data && typeof data === 'object' && !Array.isArray(data) && data !== null);
|
||||
return jsonDataContent.filter(
|
||||
data => data && typeof data === 'object' && !Array.isArray(data) && data !== null,
|
||||
);
|
||||
}
|
||||
throw new Error('Invalid JSON file uploaded, JSON file must be array of key-value pairs.');
|
||||
} catch (error) {
|
||||
@@ -275,15 +282,20 @@ const getListFromFileOrUrl = (content: string, fileType?: string): Record<string
|
||||
}
|
||||
} else if (fileType === 'csv') {
|
||||
// Replace CRLF (Windows line break) and CR (Mac link break) with \n, then split into csv arrays
|
||||
const csvRows = content.replace(/\r\n|\r/g, '\n').split('\n').map(row => row.split(','));
|
||||
const csvRows = content
|
||||
.replace(/\r\n|\r/g, '\n')
|
||||
.split('\n')
|
||||
.map(row => row.split(','));
|
||||
// at least 2 rows required for csv
|
||||
if (csvRows.length > 1) {
|
||||
const csvHeaders = csvRows[0];
|
||||
const csvContentRows = csvRows.slice(1, csvRows.length);
|
||||
return csvContentRows.map(contentRow => csvHeaders.reduce((acc: Record<string, any>, cur, idx) => {
|
||||
acc[cur] = contentRow[idx] ?? '';
|
||||
return acc;
|
||||
}, {}));
|
||||
return csvContentRows.map(contentRow =>
|
||||
csvHeaders.reduce((acc: Record<string, any>, cur, idx) => {
|
||||
acc[cur] = contentRow[idx] ?? '';
|
||||
return acc;
|
||||
}, {}),
|
||||
);
|
||||
}
|
||||
throw new Error('CSV file must contain at least two rows with first row as variable names');
|
||||
}
|
||||
@@ -306,7 +318,6 @@ const transformIterationDataToEnvironmentList = (list: Record<string, string>[])
|
||||
};
|
||||
|
||||
export const go = (args?: string[]) => {
|
||||
|
||||
const program = new commander.Command();
|
||||
const version = process.env.VERSION || packageJson.version;
|
||||
|
||||
@@ -341,7 +352,8 @@ export const go = (args?: string[]) => {
|
||||
|
||||
program
|
||||
.version(version, '-v, --version')
|
||||
.description(`A CLI for Insomnia!
|
||||
.description(
|
||||
`A CLI for Insomnia!
|
||||
With this tool you can test, lint, and export your Insomnia data.
|
||||
Inso will try to detect your locally installed Insomnia data.
|
||||
You can also point it at a git repository folder, or an Insomnia export file.
|
||||
@@ -355,18 +367,19 @@ export const go = (args?: string[]) => {
|
||||
|
||||
Inso also supports configuration files, by default it will look for .insorc in the current/provided working directory.
|
||||
$ inso export spec --config /some/path/.insorc
|
||||
`)
|
||||
`,
|
||||
)
|
||||
.option('-w, --workingDir <dir>', 'set working directory/file: .insomnia folder, *.db.json, export.yaml', '')
|
||||
.option('--verbose', 'show additional logs while running the command', false)
|
||||
.option('--ci', 'run in CI, disables all prompts, defaults to false', false)
|
||||
.option('--config <path>', 'path to configuration file containing above options (.insorc)', '')
|
||||
.option('--printOptions', 'print the loaded options', false);
|
||||
|
||||
const run = program.command('run')
|
||||
.description('Execution utilities');
|
||||
const run = program.command('run').description('Execution utilities');
|
||||
|
||||
const defaultReporter: TestReporter = 'spec';
|
||||
run.command('test [identifier]')
|
||||
run
|
||||
.command('test [identifier]')
|
||||
.description('Run Insomnia unit test suites, identifier can be a test suite id or a API Spec id')
|
||||
.option('-e, --env <identifier>', 'environment to use', '')
|
||||
.option('-t, --testNamePattern <regex>', 'run tests that match the regex', '')
|
||||
@@ -376,261 +389,76 @@ export const go = (args?: string[]) => {
|
||||
.option('-k, --disableCertValidation', 'disable certificate validation for requests with SSL', false)
|
||||
.option('--httpsProxy <proxy>', 'URL for the proxy server for https requests.', proxySettings.httpsProxy)
|
||||
.option('--httpProxy <proxy>', 'URL for the proxy server for http requests.', proxySettings.httpProxy)
|
||||
.option('--noProxy <comma_separated_list_of_hostnames>', 'Comma separated list of hostnames that do not require a proxy to get reached, even if one is specified.', proxySettings.noProxy)
|
||||
.action(async (identifier, cmd: { env: string; testNamePattern: string; reporter: TestReporter; bail: boolean; keepFile: boolean; disableCertValidation: boolean; ci: boolean; httpsProxy?: string; httpProxy?: string; noProxy?: string }) => {
|
||||
const globals: GlobalOptions = program.optsWithGlobals();
|
||||
const commandOptions = { ...globals, ...cmd };
|
||||
const __configFile = await tryToReadInsoConfigFile(commandOptions.config, commandOptions.workingDir);
|
||||
.option(
|
||||
'--noProxy <comma_separated_list_of_hostnames>',
|
||||
'Comma separated list of hostnames that do not require a proxy to get reached, even if one is specified.',
|
||||
proxySettings.noProxy,
|
||||
)
|
||||
.action(
|
||||
async (
|
||||
identifier,
|
||||
cmd: {
|
||||
env: string;
|
||||
testNamePattern: string;
|
||||
reporter: TestReporter;
|
||||
bail: boolean;
|
||||
keepFile: boolean;
|
||||
disableCertValidation: boolean;
|
||||
ci: boolean;
|
||||
httpsProxy?: string;
|
||||
httpProxy?: string;
|
||||
noProxy?: string;
|
||||
},
|
||||
) => {
|
||||
const globals: GlobalOptions = program.optsWithGlobals();
|
||||
const commandOptions = { ...globals, ...cmd };
|
||||
const __configFile = await tryToReadInsoConfigFile(commandOptions.config, commandOptions.workingDir);
|
||||
|
||||
const options = {
|
||||
...__configFile?.options || {},
|
||||
...commandOptions,
|
||||
};
|
||||
logger.level = options.verbose ? LogLevel.Verbose : LogLevel.Info;
|
||||
options.ci && logger.setReporters([new BasicReporter()]);
|
||||
options.printOptions && logger.log('Loaded options', options, '\n');
|
||||
const useLocalAppData = !options.workingDir && !options.exportFile;
|
||||
let pathToSearch = '';
|
||||
if (useLocalAppData) {
|
||||
logger.warn('No working directory or export file provided, using local app data directory.');
|
||||
pathToSearch = localAppDir;
|
||||
} else {
|
||||
pathToSearch = path.resolve(options.workingDir || process.cwd(), options.exportFile || '');
|
||||
}
|
||||
if (options.reporter && !reporterTypes.find(r => r === options.reporter)) {
|
||||
logger.fatal(`Reporter "${options.reporter}" not unrecognized. Options are [${reporterTypes.join(', ')}].`);
|
||||
return process.exit(1);
|
||||
}
|
||||
const options = {
|
||||
...(__configFile?.options || {}),
|
||||
...commandOptions,
|
||||
};
|
||||
logger.level = options.verbose ? LogLevel.Verbose : LogLevel.Info;
|
||||
options.ci && logger.setReporters([new BasicReporter()]);
|
||||
options.printOptions && logger.log('Loaded options', options, '\n');
|
||||
const useLocalAppData = !options.workingDir && !options.exportFile;
|
||||
let pathToSearch = '';
|
||||
if (useLocalAppData) {
|
||||
logger.warn('No working directory or export file provided, using local app data directory.');
|
||||
pathToSearch = localAppDir;
|
||||
} else {
|
||||
pathToSearch = path.resolve(options.workingDir || process.cwd(), options.exportFile || '');
|
||||
}
|
||||
if (options.reporter && !reporterTypes.find(r => r === options.reporter)) {
|
||||
logger.fatal(`Reporter "${options.reporter}" not unrecognized. Options are [${reporterTypes.join(', ')}].`);
|
||||
return process.exit(1);
|
||||
}
|
||||
|
||||
const db = await loadDb({
|
||||
pathToSearch,
|
||||
filterTypes: [],
|
||||
});
|
||||
|
||||
// Find suites
|
||||
const suites = identifier ? loadTestSuites(db, identifier) : await promptTestSuites(db, !!options.ci);
|
||||
|
||||
if (!suites.length) {
|
||||
logger.fatal('No test suites found; cannot run tests.', identifier);
|
||||
return process.exit(1);
|
||||
}
|
||||
|
||||
// Find environment
|
||||
const workspaceId = suites[0].parentId;
|
||||
|
||||
const environment = options.env ? loadEnvironment(db, workspaceId, options.env) : await promptEnvironment(db, !!options.ci, workspaceId);
|
||||
|
||||
if (!environment) {
|
||||
logger.fatal('No environment identified; cannot run tests without a valid environment.');
|
||||
return process.exit(1);
|
||||
}
|
||||
|
||||
const transientVariables = {
|
||||
...init(),
|
||||
_id: uuidv4(),
|
||||
type: EnvironmentType,
|
||||
parentId: '',
|
||||
modified: 0,
|
||||
created: Date.now(),
|
||||
name: 'Transient Variables',
|
||||
data: {},
|
||||
};
|
||||
|
||||
const proxyOptions: {
|
||||
proxyEnabled: boolean;
|
||||
httpProxy?: string;
|
||||
httpsProxy?: string;
|
||||
noProxy?: string;
|
||||
} = {
|
||||
proxyEnabled: Boolean(options.httpProxy || options.httpsProxy),
|
||||
httpProxy: options.httpProxy,
|
||||
httpsProxy: options.httpsProxy,
|
||||
noProxy: options.noProxy,
|
||||
};
|
||||
|
||||
try {
|
||||
const sendRequest = await getSendRequestCallbackMemDb(environment._id, db, transientVariables, { validateSSL: !options.disableCertValidation, ...proxyOptions });
|
||||
// Generate test file
|
||||
const testFileContents = generate(suites.map(suite => ({
|
||||
name: suite.name,
|
||||
suites: [],
|
||||
tests: db.UnitTest.filter(test => test.parentId === suite._id)
|
||||
.sort((a, b) => a.metaSortKey - b.metaSortKey)
|
||||
.map(({ name, code, requestId }) => ({ name, code, defaultRequestId: requestId })),
|
||||
})));
|
||||
|
||||
const runTestPromise = runTestsCli(testFileContents, {
|
||||
reporter: options.reporter,
|
||||
bail: options.bail,
|
||||
keepFile: options.keepFile,
|
||||
sendRequest,
|
||||
testFilter: options.testNamePattern,
|
||||
const db = await loadDb({
|
||||
pathToSearch,
|
||||
filterTypes: [],
|
||||
});
|
||||
|
||||
// TODO: is this necessary?
|
||||
const success = options.verbose ? await runTestPromise : await noConsoleLog(() => runTestPromise);
|
||||
return process.exit(success ? 0 : 1);
|
||||
} catch (error) {
|
||||
logErrorAndExit(error);
|
||||
}
|
||||
return process.exit(1);
|
||||
});
|
||||
// Find suites
|
||||
const suites = identifier ? loadTestSuites(db, identifier) : await promptTestSuites(db, !!options.ci);
|
||||
|
||||
run.command('collection [identifier]')
|
||||
.description('Run Insomnia request collection, identifier can be a workspace id')
|
||||
.option('-t, --requestNamePattern <regex>', 'run requests that match the regex', '')
|
||||
.option('-i, --item <requestid>', 'request or folder id to run', collect, [])
|
||||
.option('-e, --env <identifier>', 'environment to use', '')
|
||||
.option('-g, --globals <identifier>', 'global environment to use (filepath or id)', '')
|
||||
.option('--delay-request <duration>', 'milliseconds to delay between requests', '0')
|
||||
.option('--env-var <key=value>', 'override environment variables', collect, [])
|
||||
.option('-n, --iteration-count <count>', 'number of times to repeat', '1')
|
||||
.option('-d, --iteration-data <path/url>', 'file path or url (JSON or CSV)', '')
|
||||
.option('-r, --reporter <reporter>', `reporter to use, options are [${reporterTypes.join(', ')}]`, defaultReporter)
|
||||
.option('-b, --bail', 'abort ("bail") after first non-200 response', false)
|
||||
.option('--disableCertValidation', 'disable certificate validation for requests with SSL', false)
|
||||
.option('--httpsProxy <proxy>', 'URL for the proxy server for https requests.', proxySettings.httpsProxy)
|
||||
.option('--httpProxy <proxy>', 'URL for the proxy server for http requests.', proxySettings.httpProxy)
|
||||
.option('--noProxy <comma_separated_list_of_hostnames>', 'Comma separated list of hostnames that do not require a proxy to get reached, even if one is specified.', proxySettings.noProxy)
|
||||
.action(async (identifier, cmd: { env: string; globals: string; disableCertValidation: boolean; requestNamePattern: string; bail: boolean; item: string[]; delayRequest: string; iterationCount: string; iterationData: string; envVar: string[]; httpsProxy?: string; httpProxy?: string; noProxy?: string }) => {
|
||||
const globals: { config: string; workingDir: string; exportFile: string; ci: boolean; printOptions: boolean; verbose: boolean } = program.optsWithGlobals();
|
||||
|
||||
const commandOptions = { ...globals, ...cmd };
|
||||
const __configFile = await tryToReadInsoConfigFile(commandOptions.config, commandOptions.workingDir);
|
||||
|
||||
const options = {
|
||||
reporter: defaultReporter,
|
||||
...__configFile?.options || {},
|
||||
...commandOptions,
|
||||
};
|
||||
logger.level = options.verbose ? LogLevel.Verbose : LogLevel.Info;
|
||||
options.ci && logger.setReporters([new BasicReporter()]);
|
||||
options.printOptions && logger.log('Loaded options', options, '\n');
|
||||
let pathToSearch = '';
|
||||
const useLocalAppData = !options.workingDir && !options.exportFile;
|
||||
if (useLocalAppData) {
|
||||
logger.warn('No working directory or export file provided, using local app data directory.');
|
||||
pathToSearch = localAppDir;
|
||||
} else {
|
||||
pathToSearch = path.resolve(options.workingDir || process.cwd(), options.exportFile || '');
|
||||
}
|
||||
|
||||
const db = await loadDb({
|
||||
pathToSearch,
|
||||
filterTypes: [],
|
||||
});
|
||||
|
||||
const workspace = await getWorkspaceOrFallback(db, identifier, options.ci);
|
||||
if (!workspace) {
|
||||
logger.fatal('No workspace found in the provided data store or fallbacks.');
|
||||
return process.exit(1);
|
||||
}
|
||||
|
||||
// Find environment
|
||||
const workspaceId = workspace._id;
|
||||
// get global env by id from nedb or gitstore, or first element from file
|
||||
// smell: mutates db
|
||||
if (options.globals) {
|
||||
const isGlobalFile = await isFile(options.globals);
|
||||
if (!isGlobalFile) {
|
||||
const globalEnv = db.Environment.find(env => matchIdIsh(env, options.globals) || env.name === options.globals);
|
||||
if (!globalEnv) {
|
||||
logger.warn('No global environment found with id or name', options.globals);
|
||||
return process.exit(1);
|
||||
}
|
||||
if (globalEnv) {
|
||||
// attach this global env to the workspace
|
||||
db.WorkspaceMeta = [{ activeGlobalEnvironmentId: globalEnv._id, _id: `wrkm_${uuidv4().replace(/-/g, '')}`, type: 'WorkspaceMeta', parentId: workspaceId, name: '' }];
|
||||
}
|
||||
if (!suites.length) {
|
||||
logger.fatal('No test suites found; cannot run tests.', identifier);
|
||||
return process.exit(1);
|
||||
}
|
||||
if (isGlobalFile) {
|
||||
const globalEnvDb = await insomniaExportAdapter(options.globals, ['Environment']);
|
||||
logger.trace('--globals is a file path, loading from file, global env selection is not currently supported, taking first element');
|
||||
const firstGlobalEnv = globalEnvDb?.Environment?.[0];
|
||||
if (!firstGlobalEnv) {
|
||||
logger.warn('No environments found in the file', options.globals);
|
||||
return process.exit(1);
|
||||
}
|
||||
// mutate db to include the global envs
|
||||
db.Environment = [...db.Environment, ...globalEnvDb.Environment];
|
||||
// attach this global env to the workspace
|
||||
db.WorkspaceMeta = [{ activeGlobalEnvironmentId: firstGlobalEnv._id, _id: `wrkm_${uuidv4().replace(/-/g, '')}`, type: 'WorkspaceMeta', parentId: workspaceId, name: '' }];
|
||||
|
||||
// Find environment
|
||||
const workspaceId = suites[0].parentId;
|
||||
|
||||
const environment = options.env
|
||||
? loadEnvironment(db, workspaceId, options.env)
|
||||
: await promptEnvironment(db, !!options.ci, workspaceId);
|
||||
|
||||
if (!environment) {
|
||||
logger.fatal('No environment identified; cannot run tests without a valid environment.');
|
||||
return process.exit(1);
|
||||
}
|
||||
}
|
||||
const environment = options.env ? loadEnvironment(db, workspaceId, options.env) : await promptEnvironment(db, !!options.ci, workspaceId);
|
||||
if (!environment) {
|
||||
logger.fatal('No environment identified; cannot run requests without a valid environment.');
|
||||
return process.exit(1);
|
||||
}
|
||||
|
||||
let requestsToRun = getRequestsToRunFromListOrWorkspace(db, workspaceId, options.item);
|
||||
if (options.requestNamePattern) {
|
||||
requestsToRun = requestsToRun.filter(req => req.name.match(new RegExp(options.requestNamePattern)));
|
||||
}
|
||||
if (!requestsToRun.length) {
|
||||
logger.fatal('No requests identified; nothing to run.');
|
||||
return process.exit(1);
|
||||
}
|
||||
|
||||
// sort requests
|
||||
const isRunningFolder = options.item.length === 1 && options.item[0].startsWith('fld_');
|
||||
if (options.item.length && !isRunningFolder) {
|
||||
const requestOrder = new Map<string, number>();
|
||||
options.item.forEach((reqId: string, order: number) => requestOrder.set(reqId, order + 1));
|
||||
requestsToRun = requestsToRun.sort((a, b) => (requestOrder.get(a._id) || requestsToRun.length) - (requestOrder.get(b._id) || requestsToRun.length));
|
||||
} else {
|
||||
const getAllParentGroupSortKeys = (doc: BaseModel): number[] => {
|
||||
const parentFolder = db.RequestGroup.find(rg => rg._id === doc.parentId);
|
||||
if (parentFolder === undefined) {
|
||||
return [];
|
||||
}
|
||||
return [(parentFolder as RequestGroup).metaSortKey, ...getAllParentGroupSortKeys(parentFolder)];
|
||||
};
|
||||
|
||||
// sort by metaSortKey (manual sorting order)
|
||||
requestsToRun = requestsToRun.map(request => {
|
||||
const allParentGroupSortKeys = getAllParentGroupSortKeys(request as BaseModel);
|
||||
|
||||
return {
|
||||
ancestors: allParentGroupSortKeys.reverse(),
|
||||
request,
|
||||
};
|
||||
}).sort((a, b) => {
|
||||
let compareResult = 0;
|
||||
|
||||
let i = 0, j = 0;
|
||||
for (; i < a.ancestors.length && j < b.ancestors.length; i++, j++) {
|
||||
const aSortKey = a.ancestors[i];
|
||||
const bSortKey = b.ancestors[j];
|
||||
if (aSortKey < bSortKey) {
|
||||
compareResult = -1;
|
||||
break;
|
||||
} else if (aSortKey > bSortKey) {
|
||||
compareResult = 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (compareResult !== 0) {
|
||||
return compareResult;
|
||||
}
|
||||
|
||||
if (a.ancestors.length === b.ancestors.length) {
|
||||
return a.request.metaSortKey - b.request.metaSortKey;
|
||||
}
|
||||
|
||||
if (i < a.ancestors.length) {
|
||||
return a.ancestors[i] - b.request.metaSortKey;
|
||||
} else if (j < b.ancestors.length) {
|
||||
return a.request.metaSortKey - b.ancestors[j];
|
||||
}
|
||||
return 0;
|
||||
}).map(({ request }) => request);
|
||||
}
|
||||
|
||||
try {
|
||||
const iterationCount = parseInt(options.iterationCount, 10);
|
||||
|
||||
const iterationData = await pathToIterationData(options.iterationData, options.envVar);
|
||||
const transientVariables = {
|
||||
...init(),
|
||||
_id: uuidv4(),
|
||||
@@ -654,62 +482,346 @@ export const go = (args?: string[]) => {
|
||||
noProxy: options.noProxy,
|
||||
};
|
||||
|
||||
const sendRequest = await getSendRequestCallbackMemDb(environment._id, db, transientVariables, { validateSSL: !options.disableCertValidation, ...proxyOptions }, iterationData, iterationCount);
|
||||
let success = true;
|
||||
for (let i = 0; i < iterationCount; i++) {
|
||||
let reqIndex = 0;
|
||||
while (reqIndex < requestsToRun.length) {
|
||||
const req = requestsToRun[reqIndex];
|
||||
try {
|
||||
const sendRequest = await getSendRequestCallbackMemDb(environment._id, db, transientVariables, {
|
||||
validateSSL: !options.disableCertValidation,
|
||||
...proxyOptions,
|
||||
});
|
||||
// Generate test file
|
||||
const testFileContents = generate(
|
||||
suites.map(suite => ({
|
||||
name: suite.name,
|
||||
suites: [],
|
||||
tests: db.UnitTest.filter(test => test.parentId === suite._id)
|
||||
.sort((a, b) => a.metaSortKey - b.metaSortKey)
|
||||
.map(({ name, code, requestId }) => ({ name, code, defaultRequestId: requestId })),
|
||||
})),
|
||||
);
|
||||
|
||||
if (options.bail && !success) {
|
||||
return;
|
||||
const runTestPromise = runTestsCli(testFileContents, {
|
||||
reporter: options.reporter,
|
||||
bail: options.bail,
|
||||
keepFile: options.keepFile,
|
||||
sendRequest,
|
||||
testFilter: options.testNamePattern,
|
||||
});
|
||||
|
||||
// TODO: is this necessary?
|
||||
const success = options.verbose ? await runTestPromise : await noConsoleLog(() => runTestPromise);
|
||||
return process.exit(success ? 0 : 1);
|
||||
} catch (error) {
|
||||
logErrorAndExit(error);
|
||||
}
|
||||
return process.exit(1);
|
||||
},
|
||||
);
|
||||
|
||||
run
|
||||
.command('collection [identifier]')
|
||||
.description('Run Insomnia request collection, identifier can be a workspace id')
|
||||
.option('-t, --requestNamePattern <regex>', 'run requests that match the regex', '')
|
||||
.option('-i, --item <requestid>', 'request or folder id to run', collect, [])
|
||||
.option('-e, --env <identifier>', 'environment to use', '')
|
||||
.option('-g, --globals <identifier>', 'global environment to use (filepath or id)', '')
|
||||
.option('--delay-request <duration>', 'milliseconds to delay between requests', '0')
|
||||
.option('--env-var <key=value>', 'override environment variables', collect, [])
|
||||
.option('-n, --iteration-count <count>', 'number of times to repeat', '1')
|
||||
.option('-d, --iteration-data <path/url>', 'file path or url (JSON or CSV)', '')
|
||||
.option('-r, --reporter <reporter>', `reporter to use, options are [${reporterTypes.join(', ')}]`, defaultReporter)
|
||||
.option('-b, --bail', 'abort ("bail") after first non-200 response', false)
|
||||
.option('--disableCertValidation', 'disable certificate validation for requests with SSL', false)
|
||||
.option('--httpsProxy <proxy>', 'URL for the proxy server for https requests.', proxySettings.httpsProxy)
|
||||
.option('--httpProxy <proxy>', 'URL for the proxy server for http requests.', proxySettings.httpProxy)
|
||||
.option(
|
||||
'--noProxy <comma_separated_list_of_hostnames>',
|
||||
'Comma separated list of hostnames that do not require a proxy to get reached, even if one is specified.',
|
||||
proxySettings.noProxy,
|
||||
)
|
||||
.action(
|
||||
async (
|
||||
identifier,
|
||||
cmd: {
|
||||
env: string;
|
||||
globals: string;
|
||||
disableCertValidation: boolean;
|
||||
requestNamePattern: string;
|
||||
bail: boolean;
|
||||
item: string[];
|
||||
delayRequest: string;
|
||||
iterationCount: string;
|
||||
iterationData: string;
|
||||
envVar: string[];
|
||||
httpsProxy?: string;
|
||||
httpProxy?: string;
|
||||
noProxy?: string;
|
||||
},
|
||||
) => {
|
||||
const globals: {
|
||||
config: string;
|
||||
workingDir: string;
|
||||
exportFile: string;
|
||||
ci: boolean;
|
||||
printOptions: boolean;
|
||||
verbose: boolean;
|
||||
} = program.optsWithGlobals();
|
||||
|
||||
const commandOptions = { ...globals, ...cmd };
|
||||
const __configFile = await tryToReadInsoConfigFile(commandOptions.config, commandOptions.workingDir);
|
||||
|
||||
const options = {
|
||||
reporter: defaultReporter,
|
||||
...(__configFile?.options || {}),
|
||||
...commandOptions,
|
||||
};
|
||||
logger.level = options.verbose ? LogLevel.Verbose : LogLevel.Info;
|
||||
options.ci && logger.setReporters([new BasicReporter()]);
|
||||
options.printOptions && logger.log('Loaded options', options, '\n');
|
||||
let pathToSearch = '';
|
||||
const useLocalAppData = !options.workingDir && !options.exportFile;
|
||||
if (useLocalAppData) {
|
||||
logger.warn('No working directory or export file provided, using local app data directory.');
|
||||
pathToSearch = localAppDir;
|
||||
} else {
|
||||
pathToSearch = path.resolve(options.workingDir || process.cwd(), options.exportFile || '');
|
||||
}
|
||||
|
||||
const db = await loadDb({
|
||||
pathToSearch,
|
||||
filterTypes: [],
|
||||
});
|
||||
|
||||
const workspace = await getWorkspaceOrFallback(db, identifier, options.ci);
|
||||
if (!workspace) {
|
||||
logger.fatal('No workspace found in the provided data store or fallbacks.');
|
||||
return process.exit(1);
|
||||
}
|
||||
|
||||
// Find environment
|
||||
const workspaceId = workspace._id;
|
||||
// get global env by id from nedb or gitstore, or first element from file
|
||||
// smell: mutates db
|
||||
if (options.globals) {
|
||||
const isGlobalFile = await isFile(options.globals);
|
||||
if (!isGlobalFile) {
|
||||
const globalEnv = db.Environment.find(
|
||||
env => matchIdIsh(env, options.globals) || env.name === options.globals,
|
||||
);
|
||||
if (!globalEnv) {
|
||||
logger.warn('No global environment found with id or name', options.globals);
|
||||
return process.exit(1);
|
||||
}
|
||||
logger.log(`Running request: ${req.name} ${req._id}`);
|
||||
const res = await sendRequest(req._id, i);
|
||||
if (!res) {
|
||||
logger.error('Timed out while running script');
|
||||
success = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
const timelineString = await readFile(res.timelinePath, 'utf8');
|
||||
const appendNewLineIfNeeded = (str: string) => str.endsWith('\n') ? str : str + '\n';
|
||||
const timeline = deserializeNDJSON(timelineString).map(e => appendNewLineIfNeeded(e.value)).join('');
|
||||
logger.trace(timeline);
|
||||
if (res.testResults?.length) {
|
||||
console.log(`
|
||||
Test results:`);
|
||||
console.log(logTestResult(options.reporter, res.testResults));
|
||||
const hasFailedTests = res.testResults.some(t => t.status === 'failed');
|
||||
if (hasFailedTests) {
|
||||
success = false;
|
||||
}
|
||||
}
|
||||
|
||||
await new Promise(r => setTimeout(r, parseInt(options.delayRequest, 10)));
|
||||
|
||||
if (res.nextRequestIdOrName) {
|
||||
const offset = getNextRequestOffset(requestsToRun.slice(reqIndex), res.nextRequestIdOrName);
|
||||
reqIndex += offset;
|
||||
if (reqIndex < requestsToRun.length) {
|
||||
console.log(`The next request has been pointed to "${requestsToRun[reqIndex].name}"`);
|
||||
} else {
|
||||
console.log(`No request has been found for "${res.nextRequestIdOrName}", ending the iteration`);
|
||||
}
|
||||
} else {
|
||||
reqIndex++;
|
||||
if (globalEnv) {
|
||||
// attach this global env to the workspace
|
||||
db.WorkspaceMeta = [
|
||||
{
|
||||
activeGlobalEnvironmentId: globalEnv._id,
|
||||
_id: `wrkm_${uuidv4().replace(/-/g, '')}`,
|
||||
type: 'WorkspaceMeta',
|
||||
parentId: workspaceId,
|
||||
name: '',
|
||||
},
|
||||
];
|
||||
}
|
||||
}
|
||||
if (isGlobalFile) {
|
||||
const globalEnvDb = await insomniaExportAdapter(options.globals, ['Environment']);
|
||||
logger.trace(
|
||||
'--globals is a file path, loading from file, global env selection is not currently supported, taking first element',
|
||||
);
|
||||
const firstGlobalEnv = globalEnvDb?.Environment?.[0];
|
||||
if (!firstGlobalEnv) {
|
||||
logger.warn('No environments found in the file', options.globals);
|
||||
return process.exit(1);
|
||||
}
|
||||
// mutate db to include the global envs
|
||||
db.Environment = [...db.Environment, ...globalEnvDb.Environment];
|
||||
// attach this global env to the workspace
|
||||
db.WorkspaceMeta = [
|
||||
{
|
||||
activeGlobalEnvironmentId: firstGlobalEnv._id,
|
||||
_id: `wrkm_${uuidv4().replace(/-/g, '')}`,
|
||||
type: 'WorkspaceMeta',
|
||||
parentId: workspaceId,
|
||||
name: '',
|
||||
},
|
||||
];
|
||||
}
|
||||
}
|
||||
const environment = options.env
|
||||
? loadEnvironment(db, workspaceId, options.env)
|
||||
: await promptEnvironment(db, !!options.ci, workspaceId);
|
||||
if (!environment) {
|
||||
logger.fatal('No environment identified; cannot run requests without a valid environment.');
|
||||
return process.exit(1);
|
||||
}
|
||||
return process.exit(success ? 0 : 1);
|
||||
} catch (error) {
|
||||
logErrorAndExit(error);
|
||||
}
|
||||
return process.exit(1);
|
||||
});
|
||||
|
||||
program.command('lint')
|
||||
.description('Lint a yaml file in the workingDir or the provided file path (with .spectral.yml) or a spec in an Insomnia database directory')
|
||||
let requestsToRun = getRequestsToRunFromListOrWorkspace(db, workspaceId, options.item);
|
||||
if (options.requestNamePattern) {
|
||||
requestsToRun = requestsToRun.filter(req => req.name.match(new RegExp(options.requestNamePattern)));
|
||||
}
|
||||
if (!requestsToRun.length) {
|
||||
logger.fatal('No requests identified; nothing to run.');
|
||||
return process.exit(1);
|
||||
}
|
||||
|
||||
// sort requests
|
||||
const isRunningFolder = options.item.length === 1 && options.item[0].startsWith('fld_');
|
||||
if (options.item.length && !isRunningFolder) {
|
||||
const requestOrder = new Map<string, number>();
|
||||
options.item.forEach((reqId: string, order: number) => requestOrder.set(reqId, order + 1));
|
||||
requestsToRun = requestsToRun.sort(
|
||||
(a, b) =>
|
||||
(requestOrder.get(a._id) || requestsToRun.length) - (requestOrder.get(b._id) || requestsToRun.length),
|
||||
);
|
||||
} else {
|
||||
const getAllParentGroupSortKeys = (doc: BaseModel): number[] => {
|
||||
const parentFolder = db.RequestGroup.find(rg => rg._id === doc.parentId);
|
||||
if (parentFolder === undefined) {
|
||||
return [];
|
||||
}
|
||||
return [(parentFolder as RequestGroup).metaSortKey, ...getAllParentGroupSortKeys(parentFolder)];
|
||||
};
|
||||
|
||||
// sort by metaSortKey (manual sorting order)
|
||||
requestsToRun = requestsToRun
|
||||
.map(request => {
|
||||
const allParentGroupSortKeys = getAllParentGroupSortKeys(request as BaseModel);
|
||||
|
||||
return {
|
||||
ancestors: allParentGroupSortKeys.reverse(),
|
||||
request,
|
||||
};
|
||||
})
|
||||
.sort((a, b) => {
|
||||
let compareResult = 0;
|
||||
|
||||
let i = 0,
|
||||
j = 0;
|
||||
for (; i < a.ancestors.length && j < b.ancestors.length; i++, j++) {
|
||||
const aSortKey = a.ancestors[i];
|
||||
const bSortKey = b.ancestors[j];
|
||||
if (aSortKey < bSortKey) {
|
||||
compareResult = -1;
|
||||
break;
|
||||
} else if (aSortKey > bSortKey) {
|
||||
compareResult = 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (compareResult !== 0) {
|
||||
return compareResult;
|
||||
}
|
||||
|
||||
if (a.ancestors.length === b.ancestors.length) {
|
||||
return a.request.metaSortKey - b.request.metaSortKey;
|
||||
}
|
||||
|
||||
if (i < a.ancestors.length) {
|
||||
return a.ancestors[i] - b.request.metaSortKey;
|
||||
} else if (j < b.ancestors.length) {
|
||||
return a.request.metaSortKey - b.ancestors[j];
|
||||
}
|
||||
return 0;
|
||||
})
|
||||
.map(({ request }) => request);
|
||||
}
|
||||
|
||||
try {
|
||||
const iterationCount = parseInt(options.iterationCount, 10);
|
||||
|
||||
const iterationData = await pathToIterationData(options.iterationData, options.envVar);
|
||||
const transientVariables = {
|
||||
...init(),
|
||||
_id: uuidv4(),
|
||||
type: EnvironmentType,
|
||||
parentId: '',
|
||||
modified: 0,
|
||||
created: Date.now(),
|
||||
name: 'Transient Variables',
|
||||
data: {},
|
||||
};
|
||||
|
||||
const proxyOptions: {
|
||||
proxyEnabled: boolean;
|
||||
httpProxy?: string;
|
||||
httpsProxy?: string;
|
||||
noProxy?: string;
|
||||
} = {
|
||||
proxyEnabled: Boolean(options.httpProxy || options.httpsProxy),
|
||||
httpProxy: options.httpProxy,
|
||||
httpsProxy: options.httpsProxy,
|
||||
noProxy: options.noProxy,
|
||||
};
|
||||
|
||||
const sendRequest = await getSendRequestCallbackMemDb(
|
||||
environment._id,
|
||||
db,
|
||||
transientVariables,
|
||||
{ validateSSL: !options.disableCertValidation, ...proxyOptions },
|
||||
iterationData,
|
||||
iterationCount,
|
||||
);
|
||||
let success = true;
|
||||
for (let i = 0; i < iterationCount; i++) {
|
||||
let reqIndex = 0;
|
||||
while (reqIndex < requestsToRun.length) {
|
||||
const req = requestsToRun[reqIndex];
|
||||
|
||||
if (options.bail && !success) {
|
||||
return;
|
||||
}
|
||||
logger.log(`Running request: ${req.name} ${req._id}`);
|
||||
const res = await sendRequest(req._id, i);
|
||||
if (!res) {
|
||||
logger.error('Timed out while running script');
|
||||
success = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
const timelineString = await readFile(res.timelinePath, 'utf8');
|
||||
const appendNewLineIfNeeded = (str: string) => (str.endsWith('\n') ? str : str + '\n');
|
||||
const timeline = deserializeNDJSON(timelineString)
|
||||
.map(e => appendNewLineIfNeeded(e.value))
|
||||
.join('');
|
||||
logger.trace(timeline);
|
||||
if (res.testResults?.length) {
|
||||
console.log(`
|
||||
Test results:`);
|
||||
console.log(logTestResult(options.reporter, res.testResults));
|
||||
const hasFailedTests = res.testResults.some(t => t.status === 'failed');
|
||||
if (hasFailedTests) {
|
||||
success = false;
|
||||
}
|
||||
}
|
||||
|
||||
await new Promise(r => setTimeout(r, parseInt(options.delayRequest, 10)));
|
||||
|
||||
if (res.nextRequestIdOrName) {
|
||||
const offset = getNextRequestOffset(requestsToRun.slice(reqIndex), res.nextRequestIdOrName);
|
||||
reqIndex += offset;
|
||||
if (reqIndex < requestsToRun.length) {
|
||||
console.log(`The next request has been pointed to "${requestsToRun[reqIndex].name}"`);
|
||||
} else {
|
||||
console.log(`No request has been found for "${res.nextRequestIdOrName}", ending the iteration`);
|
||||
}
|
||||
} else {
|
||||
reqIndex++;
|
||||
}
|
||||
}
|
||||
}
|
||||
return process.exit(success ? 0 : 1);
|
||||
} catch (error) {
|
||||
logErrorAndExit(error);
|
||||
}
|
||||
return process.exit(1);
|
||||
},
|
||||
);
|
||||
|
||||
program
|
||||
.command('lint')
|
||||
.description(
|
||||
'Lint a yaml file in the workingDir or the provided file path (with .spectral.yml) or a spec in an Insomnia database directory',
|
||||
)
|
||||
.command('spec [identifier]')
|
||||
.description('Lint an API Specification, identifier can be an API Spec id or a file path')
|
||||
.action(async identifier => {
|
||||
@@ -717,17 +829,18 @@ Test results:`);
|
||||
const commandOptions = globals;
|
||||
const __configFile = await tryToReadInsoConfigFile(commandOptions.config, commandOptions.workingDir);
|
||||
const options = {
|
||||
...__configFile?.options || {},
|
||||
...(__configFile?.options || {}),
|
||||
...commandOptions,
|
||||
};
|
||||
logger.level = options.verbose ? LogLevel.Verbose : LogLevel.Info;
|
||||
options.ci && logger.setReporters([new BasicReporter()]);
|
||||
// Assert identifier is a file
|
||||
const identifierAsAbsPath = identifier && getAbsoluteFilePath({ workingDir: options.workingDir, file: identifier });
|
||||
const identifierAsAbsPath =
|
||||
identifier && getAbsoluteFilePath({ workingDir: options.workingDir, file: identifier });
|
||||
let isIdentiferAFile = false;
|
||||
try {
|
||||
isIdentiferAFile = identifier && (await fs.promises.stat(identifierAsAbsPath)).isFile();
|
||||
} catch (err) { }
|
||||
} catch (err) {}
|
||||
const pathToSearch = '';
|
||||
let specContent;
|
||||
let rulesetFileName;
|
||||
@@ -763,7 +876,9 @@ Test results:`);
|
||||
return process.exit(1);
|
||||
});
|
||||
|
||||
program.command('export').description('Export data from insomnia models')
|
||||
program
|
||||
.command('export')
|
||||
.description('Export data from insomnia models')
|
||||
.command('spec [identifier]')
|
||||
.description('Export an API Specification to a file, identifier can be an API Spec id')
|
||||
.option('-o, --output <path>', 'save the generated config to a file', '')
|
||||
@@ -773,7 +888,7 @@ Test results:`);
|
||||
const commandOptions = { ...globals, ...cmd };
|
||||
const __configFile = await tryToReadInsoConfigFile(commandOptions.config, commandOptions.workingDir);
|
||||
const options = {
|
||||
...__configFile?.options || {},
|
||||
...(__configFile?.options || {}),
|
||||
...commandOptions,
|
||||
};
|
||||
options.printOptions && logger.log('Loaded options', options, '\n');
|
||||
@@ -788,7 +903,8 @@ Test results:`);
|
||||
specContent,
|
||||
skipAnnotations: options.skipAnnotations,
|
||||
});
|
||||
const outputPath = options.output && getAbsoluteFilePath({ workingDir: options.workingDir, file: options.output });
|
||||
const outputPath =
|
||||
options.output && getAbsoluteFilePath({ workingDir: options.workingDir, file: options.output });
|
||||
if (!outputPath) {
|
||||
logger.log(toExport);
|
||||
return process.exit(0);
|
||||
@@ -803,7 +919,8 @@ Test results:`);
|
||||
});
|
||||
|
||||
// Add script base command
|
||||
program.command('script <script-name>')
|
||||
program
|
||||
.command('script <script-name>')
|
||||
.description('Run scripts defined in .insorc')
|
||||
.allowUnknownOption()
|
||||
.action(async (scriptName: string, cmd) => {
|
||||
@@ -812,7 +929,7 @@ Test results:`);
|
||||
const __configFile = await tryToReadInsoConfigFile(commandOptions.config, commandOptions.workingDir);
|
||||
|
||||
const options = {
|
||||
...__configFile?.options || {},
|
||||
...(__configFile?.options || {}),
|
||||
...commandOptions,
|
||||
};
|
||||
logger.level = options.verbose ? LogLevel.Verbose : LogLevel.Info;
|
||||
@@ -822,7 +939,10 @@ Test results:`);
|
||||
const scriptTask = __configFile?.scripts?.[scriptName];
|
||||
|
||||
if (!scriptTask) {
|
||||
logger.fatal(`Could not find inso script "${scriptName}" in the config file.`, Object.keys(__configFile?.scripts || {}));
|
||||
logger.fatal(
|
||||
`Could not find inso script "${scriptName}" in the config file.`,
|
||||
Object.keys(__configFile?.scripts || {}),
|
||||
);
|
||||
return process.exit(1);
|
||||
}
|
||||
|
||||
@@ -833,9 +953,7 @@ Test results:`);
|
||||
|
||||
// Get args after script name
|
||||
const passThroughArgs = program.args.slice(program.args.indexOf(scriptName) + 1);
|
||||
const scriptArgs: string[] = parseArgsStringToArgv(
|
||||
`self ${scriptTask} ${passThroughArgs.join(' ')}`,
|
||||
);
|
||||
const scriptArgs: string[] = parseArgsStringToArgv(`self ${scriptTask} ${passThroughArgs.join(' ')}`);
|
||||
|
||||
logger.debug(`>> ${scriptArgs.slice(1).join(' ')}`);
|
||||
|
||||
@@ -845,10 +963,7 @@ Test results:`);
|
||||
program.parseAsync(args || process.argv).catch(logErrorAndExit);
|
||||
};
|
||||
|
||||
const getNextRequestOffset = (
|
||||
leftRequestsToRun: Request[],
|
||||
nextRequestIdOrName: string
|
||||
) => {
|
||||
const getNextRequestOffset = (leftRequestsToRun: Request[], nextRequestIdOrName: string) => {
|
||||
const idMatchOffset = leftRequestsToRun.findIndex(req => req._id.trim() === nextRequestIdOrName.trim());
|
||||
if (idMatchOffset >= 0) {
|
||||
return idMatchOffset;
|
||||
|
||||
@@ -47,5 +47,4 @@ paths:
|
||||
description: OK
|
||||
`);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
@@ -5,10 +5,7 @@ import YAML from 'yaml';
|
||||
|
||||
import { InsoError } from '../cli';
|
||||
|
||||
export async function writeFileWithCliOptions(
|
||||
outputPath: string,
|
||||
contents: string,
|
||||
): Promise<string> {
|
||||
export async function writeFileWithCliOptions(outputPath: string, contents: string): Promise<string> {
|
||||
try {
|
||||
await mkdir(path.dirname(outputPath), { recursive: true });
|
||||
await writeFile(outputPath, contents);
|
||||
@@ -19,7 +16,13 @@ export async function writeFileWithCliOptions(
|
||||
}
|
||||
}
|
||||
|
||||
export async function exportSpecification({ specContent, skipAnnotations }: { specContent: string; skipAnnotations: boolean }) {
|
||||
export async function exportSpecification({
|
||||
specContent,
|
||||
skipAnnotations,
|
||||
}: {
|
||||
specContent: string;
|
||||
skipAnnotations: boolean;
|
||||
}) {
|
||||
if (!skipAnnotations) {
|
||||
return specContent;
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import { describe, expect, it } from 'vitest';
|
||||
import { lintSpecification } from './lint-specification';
|
||||
|
||||
describe('lint specification', () => {
|
||||
|
||||
const specContent = `openapi: '3.0.2'
|
||||
info:
|
||||
title: Sample Spec
|
||||
@@ -66,7 +65,8 @@ paths:
|
||||
responses:
|
||||
'200':
|
||||
description: OK
|
||||
`, rulesetFileName,
|
||||
`,
|
||||
rulesetFileName,
|
||||
});
|
||||
expect(result.isValid).toBe(true);
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { RulesetDefinition} from '@stoplight/spectral-core';
|
||||
import type { RulesetDefinition } from '@stoplight/spectral-core';
|
||||
import { Spectral } from '@stoplight/spectral-core';
|
||||
|
||||
|
||||
const { bundleAndLoadRuleset } = require('@stoplight/spectral-ruleset-bundler/with-loader');
|
||||
import { oas } from '@stoplight/spectral-rulesets';
|
||||
import { DiagnosticSeverity } from '@stoplight/types';
|
||||
@@ -22,7 +22,13 @@ export const getRuleSetFileFromFolderByFilename = async (filePath: string) => {
|
||||
throw new InsoError(`Failed to read "${filePath}"`, error);
|
||||
}
|
||||
};
|
||||
export async function lintSpecification({ specContent, rulesetFileName }: { specContent: string; rulesetFileName?: string },) {
|
||||
export async function lintSpecification({
|
||||
specContent,
|
||||
rulesetFileName,
|
||||
}: {
|
||||
specContent: string;
|
||||
rulesetFileName?: string;
|
||||
}) {
|
||||
const spectral = new Spectral();
|
||||
// Use custom ruleset if present
|
||||
let ruleset = oas;
|
||||
@@ -49,7 +55,11 @@ export async function lintSpecification({ specContent, rulesetFileName }: { spec
|
||||
logger.warn(`${results.filter(r => r.severity === DiagnosticSeverity.Warning).length} lint warnings found. \n`);
|
||||
}
|
||||
results.forEach(r =>
|
||||
logger.log(`${r.range.start.line + 1}:${r.range.start.character + 1} - ${DiagnosticSeverity[r.severity]} - ${r.code} - ${r.message} - ${r.path.join('.')}`),
|
||||
logger.log(
|
||||
`${r.range.start.line + 1}:${r.range.start.character + 1} - ${DiagnosticSeverity[r.severity]} - ${r.code} - ${
|
||||
r.message
|
||||
} - ${r.path.join('.')}`,
|
||||
),
|
||||
);
|
||||
|
||||
// Fail if errors present
|
||||
|
||||
@@ -24,10 +24,7 @@ const gitAdapter: DbAdapter = async (dir, filterTypes) => {
|
||||
|
||||
const db = emptyDb();
|
||||
|
||||
const readAndInsertDoc = async (
|
||||
type: keyof Database,
|
||||
fileName: string,
|
||||
): Promise<void> => {
|
||||
const readAndInsertDoc = async (type: keyof Database, fileName: string): Promise<void> => {
|
||||
// Get contents of each file in type dir and insert into data
|
||||
let contents = '';
|
||||
try {
|
||||
@@ -40,7 +37,7 @@ const gitAdapter: DbAdapter = async (dir, filterTypes) => {
|
||||
(db[type] as {}[]).push(obj);
|
||||
};
|
||||
|
||||
const types = filterTypes?.length ? filterTypes : Object.keys(db) as (keyof Database)[];
|
||||
const types = filterTypes?.length ? filterTypes : (Object.keys(db) as (keyof Database)[]);
|
||||
await Promise.all(
|
||||
types.map(async t => {
|
||||
// Get all files in type dir
|
||||
@@ -54,9 +51,7 @@ const gitAdapter: DbAdapter = async (dir, filterTypes) => {
|
||||
}
|
||||
return Promise.all(
|
||||
// Insert each file from each type
|
||||
files.map(file =>
|
||||
readAndInsertDoc(t, path.join(dir, '.insomnia', t, file)),
|
||||
),
|
||||
files.map(file => readAndInsertDoc(t, path.join(dir, '.insomnia', t, file))),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -58,15 +58,13 @@ describe('insomniaAdapter()', () => {
|
||||
expect(db?.UnitTest.length).toBe(0);
|
||||
});
|
||||
|
||||
it.each([
|
||||
'malformed.yaml',
|
||||
'no-export-format.yaml',
|
||||
'v3-export-format.yaml',
|
||||
'empty.yaml',
|
||||
])('should throw InsoError if malformed yaml content: %s', async (fileName: string) => {
|
||||
const pathname = path.join(fixturesPath, 'insomnia-v4', fileName);
|
||||
await expect(insomniaAdapter(pathname)).rejects.toThrowErrorMatchingSnapshot();
|
||||
});
|
||||
it.each(['malformed.yaml', 'no-export-format.yaml', 'v3-export-format.yaml', 'empty.yaml'])(
|
||||
'should throw InsoError if malformed yaml content: %s',
|
||||
async (fileName: string) => {
|
||||
const pathname = path.join(fixturesPath, 'insomnia-v4', fileName);
|
||||
await expect(insomniaAdapter(pathname)).rejects.toThrowErrorMatchingSnapshot();
|
||||
},
|
||||
);
|
||||
|
||||
it('should return null if pathname is invalid', async () => {
|
||||
const pathname = path.join(fixturesPath, 'insomnia-v4', 'insomnia');
|
||||
|
||||
@@ -29,7 +29,8 @@ import type { BaseModel } from '../models/types';
|
||||
* @see packages/insomnia/src/common/import.js
|
||||
*/
|
||||
|
||||
type RawTypeKey = 'api_spec'
|
||||
type RawTypeKey =
|
||||
| 'api_spec'
|
||||
| 'environment'
|
||||
| 'request'
|
||||
| 'request_group'
|
||||
@@ -37,7 +38,6 @@ type RawTypeKey = 'api_spec'
|
||||
| 'unit_test_suite'
|
||||
| 'unit_test';
|
||||
|
||||
|
||||
const rawTypeToParsedTypeMap: Record<RawTypeKey, BaseModel['type']> = {
|
||||
api_spec: 'ApiSpec',
|
||||
environment: 'Environment',
|
||||
@@ -47,7 +47,6 @@ const rawTypeToParsedTypeMap: Record<RawTypeKey, BaseModel['type']> = {
|
||||
unit_test_suite: 'UnitTestSuite',
|
||||
unit_test: 'UnitTest',
|
||||
};
|
||||
|
||||
|
||||
type ExtraProperties = Record<string, unknown>;
|
||||
|
||||
@@ -79,10 +78,12 @@ const insomniaAdapter: DbAdapter = async (filePath, filterTypes) => {
|
||||
|
||||
// Now, reading and parsing
|
||||
const content = await fs.promises.readFile(filePath, { encoding: 'utf-8' });
|
||||
let parsed: {
|
||||
__export_format: number;
|
||||
resources: RawTypeModel[];
|
||||
} | undefined;
|
||||
let parsed:
|
||||
| {
|
||||
__export_format: number;
|
||||
resources: RawTypeModel[];
|
||||
}
|
||||
| undefined;
|
||||
|
||||
try {
|
||||
parsed = YAML.parse(content);
|
||||
@@ -108,7 +109,9 @@ const insomniaAdapter: DbAdapter = async (filePath, filterTypes) => {
|
||||
} else if (!parsed.__export_format) {
|
||||
throw new InsoError(`Expected an Insomnia v4 export file; unexpected data found in ${fileName}.`);
|
||||
} else if (parsed.__export_format !== 4 && parsed.__export_format !== 5) {
|
||||
throw new InsoError(`Expected an Insomnia v4 export file; found an Insomnia v${parsed.__export_format} export file in ${fileName}.`);
|
||||
throw new InsoError(
|
||||
`Expected an Insomnia v4 export file; found an Insomnia v${parsed.__export_format} export file in ${fileName}.`,
|
||||
);
|
||||
}
|
||||
|
||||
// Transform filter to a set for faster search
|
||||
|
||||
@@ -3,7 +3,7 @@ import { stat } from 'node:fs/promises';
|
||||
import NeDB from '@seald-io/nedb';
|
||||
import path from 'path';
|
||||
|
||||
import type { Database, DbAdapter} from '../index';
|
||||
import type { Database, DbAdapter } from '../index';
|
||||
import { emptyDb } from '../index';
|
||||
import type { BaseModel } from '../models/types';
|
||||
|
||||
@@ -16,24 +16,25 @@ const neDbAdapter: DbAdapter = async (dir, filterTypes) => {
|
||||
}
|
||||
|
||||
const db = emptyDb();
|
||||
const types = filterTypes?.length ? filterTypes : Object.keys(db) as (keyof Database)[];
|
||||
const promises = types.map(t =>
|
||||
new Promise((resolve, reject) => {
|
||||
const filePath = path.join(dir, `insomnia.${t}.db`);
|
||||
const collection = new NeDB({
|
||||
autoload: true,
|
||||
filename: filePath,
|
||||
corruptAlertThreshold: 0.9,
|
||||
});
|
||||
collection.find({}, (err: Error, docs: BaseModel[]) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
const types = filterTypes?.length ? filterTypes : (Object.keys(db) as (keyof Database)[]);
|
||||
const promises = types.map(
|
||||
t =>
|
||||
new Promise((resolve, reject) => {
|
||||
const filePath = path.join(dir, `insomnia.${t}.db`);
|
||||
const collection = new NeDB({
|
||||
autoload: true,
|
||||
filename: filePath,
|
||||
corruptAlertThreshold: 0.9,
|
||||
});
|
||||
collection.find({}, (err: Error, docs: BaseModel[]) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
(db[t] as {}[]).push(...docs);
|
||||
resolve(null);
|
||||
});
|
||||
}),
|
||||
(db[t] as {}[]).push(...docs);
|
||||
resolve(null);
|
||||
});
|
||||
}),
|
||||
);
|
||||
await Promise.all(promises);
|
||||
return db;
|
||||
|
||||
@@ -48,10 +48,7 @@ export const emptyDb = (): Database => ({
|
||||
Settings: [],
|
||||
});
|
||||
|
||||
export type DbAdapter = (
|
||||
dir: string,
|
||||
filterTypes?: (keyof Database)[],
|
||||
) => Promise<Database | null>;
|
||||
export type DbAdapter = (dir: string, filterTypes?: (keyof Database)[]) => Promise<Database | null>;
|
||||
|
||||
interface Options {
|
||||
pathToSearch: string;
|
||||
@@ -65,10 +62,7 @@ export const isFile = async (path: string) => {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
export const loadDb = async ({
|
||||
pathToSearch,
|
||||
filterTypes,
|
||||
}: Options) => {
|
||||
export const loadDb = async ({ pathToSearch, filterTypes }: Options) => {
|
||||
// if path to file is provided try to it is an insomnia export file
|
||||
const isFilePath = await isFile(pathToSearch);
|
||||
if (isFilePath) {
|
||||
|
||||
@@ -4,34 +4,17 @@ import { AutoComplete } from 'enquirer';
|
||||
import { logger } from '../../cli';
|
||||
import type { Database } from '../index';
|
||||
import type { ApiSpec } from './types';
|
||||
import {
|
||||
ensureSingleOrNone,
|
||||
generateIdIsh,
|
||||
getDbChoice,
|
||||
matchIdIsh,
|
||||
} from './util';
|
||||
import { ensureSingleOrNone, generateIdIsh, getDbChoice, matchIdIsh } from './util';
|
||||
const entity = 'api specification';
|
||||
|
||||
export const loadApiSpec = (
|
||||
db: Database,
|
||||
identifier: string,
|
||||
): ApiSpec | null | undefined => {
|
||||
logger.trace(
|
||||
'Load %s with identifier `%s` from data store',
|
||||
entity,
|
||||
identifier,
|
||||
);
|
||||
const items = db.ApiSpec.filter(
|
||||
spec => matchIdIsh(spec, identifier) || spec.fileName === identifier,
|
||||
);
|
||||
export const loadApiSpec = (db: Database, identifier: string): ApiSpec | null | undefined => {
|
||||
logger.trace('Load %s with identifier `%s` from data store', entity, identifier);
|
||||
const items = db.ApiSpec.filter(spec => matchIdIsh(spec, identifier) || spec.fileName === identifier);
|
||||
logger.trace('Found %d.', items.length);
|
||||
return ensureSingleOrNone(items, entity);
|
||||
};
|
||||
|
||||
export const promptApiSpec = async (
|
||||
db: Database,
|
||||
ci: boolean,
|
||||
): Promise<ApiSpec | null | undefined> => {
|
||||
export const promptApiSpec = async (db: Database, ci: boolean): Promise<ApiSpec | null | undefined> => {
|
||||
if (ci || !db.ApiSpec.length) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -7,10 +7,7 @@ import type { Environment } from './types';
|
||||
import { ensureSingle, generateIdIsh, getDbChoice, matchIdIsh } from './util';
|
||||
|
||||
const loadBaseEnvironmentForWorkspace = (db: Database, workspaceId: string): Environment => {
|
||||
logger.trace(
|
||||
'Load base environment for the workspace `%s` from data store',
|
||||
workspaceId,
|
||||
);
|
||||
logger.trace('Load base environment for the workspace `%s` from data store', workspaceId);
|
||||
const items = db.Environment.filter(environment => environment.parentId === workspaceId);
|
||||
logger.trace('Found %d.', items.length);
|
||||
return ensureSingle(items, 'base environment');
|
||||
@@ -31,10 +28,7 @@ export const loadEnvironment = (
|
||||
return loadBaseEnvironmentForWorkspace(db, workspaceId);
|
||||
}
|
||||
|
||||
logger.trace(
|
||||
'Load sub environment with identifier `%s` from data store',
|
||||
identifier,
|
||||
);
|
||||
logger.trace('Load sub environment with identifier `%s` from data store', identifier);
|
||||
return db.Environment.find(env => matchIdIsh(env, identifier) || env.name === identifier);
|
||||
};
|
||||
|
||||
@@ -49,9 +43,7 @@ export const promptEnvironment = async (
|
||||
|
||||
// Get the sub environments
|
||||
const baseWorkspaceEnv = loadBaseEnvironmentForWorkspace(db, workspaceId);
|
||||
const subEnvironments = db.Environment.filter(
|
||||
subEnv => subEnv.parentId === baseWorkspaceEnv._id,
|
||||
);
|
||||
const subEnvironments = db.Environment.filter(subEnv => subEnv.parentId === baseWorkspaceEnv._id);
|
||||
|
||||
if (!subEnvironments.length) {
|
||||
logger.trace('No sub environments found, using base environment');
|
||||
|
||||
@@ -2,46 +2,46 @@
|
||||
import type { Database } from '../index';
|
||||
|
||||
export interface BaseModel {
|
||||
_id: string;
|
||||
name: string;
|
||||
type: keyof Database;
|
||||
parentId: string;
|
||||
_id: string;
|
||||
name: string;
|
||||
type: keyof Database;
|
||||
parentId: string;
|
||||
}
|
||||
|
||||
export interface BaseApiSpec {
|
||||
fileName: string;
|
||||
contentType: 'json' | 'yaml';
|
||||
contents: string;
|
||||
fileName: string;
|
||||
contentType: 'json' | 'yaml';
|
||||
contents: string;
|
||||
}
|
||||
|
||||
export type ApiSpec = BaseModel & BaseApiSpec;
|
||||
|
||||
interface BaseUnitTestSuite {
|
||||
name: string;
|
||||
metaSortKey: number;
|
||||
name: string;
|
||||
metaSortKey: number;
|
||||
}
|
||||
|
||||
export type UnitTestSuite = BaseModel & BaseUnitTestSuite;
|
||||
|
||||
interface BaseUnitTest {
|
||||
name: string;
|
||||
code: string;
|
||||
requestId: string | null;
|
||||
metaSortKey: number;
|
||||
name: string;
|
||||
code: string;
|
||||
requestId: string | null;
|
||||
metaSortKey: number;
|
||||
}
|
||||
|
||||
export type UnitTest = BaseModel & BaseUnitTest;
|
||||
|
||||
interface BaseEnvironment {
|
||||
name: string;
|
||||
metaSortKey: number;
|
||||
name: string;
|
||||
metaSortKey: number;
|
||||
}
|
||||
|
||||
export type Environment = BaseModel & BaseEnvironment;
|
||||
|
||||
interface BaseWorkspace {
|
||||
name: string;
|
||||
description: string;
|
||||
name: string;
|
||||
description: string;
|
||||
}
|
||||
|
||||
export type WorkspaceMeta = BaseModel & { activeGlobalEnvironmentId: string };
|
||||
@@ -49,11 +49,11 @@ export type WorkspaceMeta = BaseModel & { activeGlobalEnvironmentId: string };
|
||||
export type Workspace = BaseModel & BaseWorkspace;
|
||||
|
||||
export type InsomniaRequest = BaseModel & {
|
||||
name: string;
|
||||
description: string;
|
||||
method: string;
|
||||
url: string;
|
||||
headers: Record<string, string>;
|
||||
body: string;
|
||||
metaSortKey: number;
|
||||
name: string;
|
||||
description: string;
|
||||
method: string;
|
||||
url: string;
|
||||
headers: Record<string, string>;
|
||||
body: string;
|
||||
metaSortKey: number;
|
||||
};
|
||||
|
||||
@@ -5,33 +5,17 @@ import { logger } from '../../cli';
|
||||
import type { Database } from '../index';
|
||||
import { loadApiSpec } from './api-spec';
|
||||
import type { UnitTestSuite } from './types';
|
||||
import {
|
||||
ensureSingleOrNone,
|
||||
generateIdIsh,
|
||||
getDbChoice,
|
||||
matchIdIsh,
|
||||
} from './util';
|
||||
import { ensureSingleOrNone, generateIdIsh, getDbChoice, matchIdIsh } from './util';
|
||||
import { loadWorkspace } from './workspace';
|
||||
|
||||
export const loadUnitTestSuite = (
|
||||
db: Database,
|
||||
identifier: string,
|
||||
): UnitTestSuite | null | undefined => {
|
||||
export const loadUnitTestSuite = (db: Database, identifier: string): UnitTestSuite | null | undefined => {
|
||||
// Identifier is for one specific suite; find it
|
||||
logger.trace(
|
||||
'Load unit test suite with identifier `%s` from data store',
|
||||
identifier,
|
||||
);
|
||||
const items = db.UnitTestSuite.filter(
|
||||
suite => matchIdIsh(suite, identifier) || suite.name === identifier,
|
||||
);
|
||||
logger.trace('Load unit test suite with identifier `%s` from data store', identifier);
|
||||
const items = db.UnitTestSuite.filter(suite => matchIdIsh(suite, identifier) || suite.name === identifier);
|
||||
logger.trace('Found %d.', items.length);
|
||||
return ensureSingleOrNone(items, 'unit test suite');
|
||||
};
|
||||
export const loadTestSuites = (
|
||||
db: Database,
|
||||
identifier: string,
|
||||
): UnitTestSuite[] => {
|
||||
export const loadTestSuites = (db: Database, identifier: string): UnitTestSuite[] => {
|
||||
const apiSpec = loadApiSpec(db, identifier);
|
||||
const workspace = loadWorkspace(db, apiSpec?.parentId || identifier); // if identifier is for an apiSpec or a workspace, return all suites for that workspace
|
||||
|
||||
@@ -42,19 +26,14 @@ export const loadTestSuites = (
|
||||
const result = loadUnitTestSuite(db, identifier);
|
||||
return result ? [result] : [];
|
||||
};
|
||||
export const promptTestSuites = async (
|
||||
db: Database,
|
||||
ci: boolean,
|
||||
): Promise<UnitTestSuite[]> => {
|
||||
export const promptTestSuites = async (db: Database, ci: boolean): Promise<UnitTestSuite[]> => {
|
||||
if (ci) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const choices = db.ApiSpec.map(spec => [
|
||||
getDbChoice(generateIdIsh(spec), spec.fileName),
|
||||
...db.UnitTestSuite.filter(
|
||||
suite => suite.parentId === spec.parentId,
|
||||
).map(suite =>
|
||||
...db.UnitTestSuite.filter(suite => suite.parentId === spec.parentId).map(suite =>
|
||||
getDbChoice(generateIdIsh(suite), suite.name, {
|
||||
indent: 1,
|
||||
}),
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import { InsoError } from '../../cli';
|
||||
import type { BaseModel } from './types';
|
||||
|
||||
export const matchIdIsh = ({ _id }: BaseModel, identifier: string) =>
|
||||
_id.startsWith(identifier);
|
||||
export const matchIdIsh = ({ _id }: BaseModel, identifier: string) => _id.startsWith(identifier);
|
||||
|
||||
export const generateIdIsh = ({ _id }: BaseModel, length = 10) =>
|
||||
_id.slice(0, length);
|
||||
export const generateIdIsh = ({ _id }: BaseModel, length = 10) => _id.slice(0, length);
|
||||
|
||||
function indent(level: number, code: string, tab = ' |'): string {
|
||||
if (!level || level < 0) {
|
||||
@@ -20,9 +18,9 @@ export const getDbChoice = (
|
||||
idIsh: string,
|
||||
message: string,
|
||||
config: {
|
||||
indent?: number;
|
||||
hint?: string;
|
||||
} = {},
|
||||
indent?: number;
|
||||
hint?: string;
|
||||
} = {},
|
||||
) => ({
|
||||
name: idIsh,
|
||||
message: indent(config.indent || 0, message),
|
||||
@@ -30,10 +28,7 @@ export const getDbChoice = (
|
||||
hint: config.hint || `${idIsh}`,
|
||||
});
|
||||
|
||||
export const ensureSingleOrNone = <T>(
|
||||
items: T[],
|
||||
entity: string,
|
||||
): T | null | undefined => {
|
||||
export const ensureSingleOrNone = <T>(items: T[], entity: string): T | null | undefined => {
|
||||
if (items.length === 1) {
|
||||
return items[0];
|
||||
}
|
||||
@@ -42,9 +37,7 @@ export const ensureSingleOrNone = <T>(
|
||||
return null;
|
||||
}
|
||||
|
||||
throw new InsoError(
|
||||
`Expected single or no ${entity} in the data store, but found multiple (${items.length}).`,
|
||||
);
|
||||
throw new InsoError(`Expected single or no ${entity} in the data store, but found multiple (${items.length}).`);
|
||||
};
|
||||
export const ensureSingle = <T>(items: T[], entity: string): T => {
|
||||
if (items.length === 1) {
|
||||
@@ -52,12 +45,8 @@ export const ensureSingle = <T>(items: T[], entity: string): T => {
|
||||
}
|
||||
|
||||
if (items.length === 0) {
|
||||
throw new InsoError(
|
||||
`Expected single ${entity} in the data store, but found none.`,
|
||||
);
|
||||
throw new InsoError(`Expected single ${entity} in the data store, but found none.`);
|
||||
}
|
||||
|
||||
throw new InsoError(
|
||||
`Expected single ${entity} in the data store, but found multiple (${items.length}).`,
|
||||
);
|
||||
throw new InsoError(`Expected single ${entity} in the data store, but found multiple (${items.length}).`);
|
||||
};
|
||||
|
||||
@@ -7,20 +7,12 @@ import type { Workspace } from './types';
|
||||
import { ensureSingleOrNone, generateIdIsh, getDbChoice, matchIdIsh } from './util';
|
||||
const entity = 'workspace';
|
||||
export const loadWorkspace = (db: Database, identifier: string) => {
|
||||
logger.trace(
|
||||
'Load workspace with identifier `%s` from data store',
|
||||
identifier,
|
||||
);
|
||||
const items = db.Workspace.filter(workspace => (
|
||||
matchIdIsh(workspace, identifier) || workspace.name === identifier
|
||||
));
|
||||
logger.trace('Load workspace with identifier `%s` from data store', identifier);
|
||||
const items = db.Workspace.filter(workspace => matchIdIsh(workspace, identifier) || workspace.name === identifier);
|
||||
logger.trace('Found %d.', items.length);
|
||||
return ensureSingleOrNone(items, 'workspace');
|
||||
};
|
||||
export const promptWorkspace = async (
|
||||
db: Database,
|
||||
ci: boolean,
|
||||
): Promise<Workspace | null | undefined> => {
|
||||
export const promptWorkspace = async (db: Database, ci: boolean): Promise<Workspace | null | undefined> => {
|
||||
if (ci || !db.Workspace.length) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ resources:
|
||||
created: 1725888487017
|
||||
url: localhost:4010/echo
|
||||
name: New Request
|
||||
description: ""
|
||||
description: ''
|
||||
method: GET
|
||||
body: {}
|
||||
parameters: []
|
||||
@@ -45,7 +45,7 @@ resources:
|
||||
modified: 1725888480476
|
||||
created: 1725888480476
|
||||
name: after response test
|
||||
description: ""
|
||||
description: ''
|
||||
scope: collection
|
||||
_type: workspace
|
||||
- _id: env_af8d61e3630269b8a124b18968608f85797d57fc
|
||||
|
||||
@@ -9,7 +9,7 @@ resources:
|
||||
created: 1725888487017
|
||||
url: localhost:4010/echo
|
||||
name: New Request
|
||||
description: ""
|
||||
description: ''
|
||||
method: GET
|
||||
body: {}
|
||||
parameters: []
|
||||
@@ -39,7 +39,7 @@ resources:
|
||||
modified: 1725888480476
|
||||
created: 1725888480476
|
||||
name: after response test
|
||||
description: ""
|
||||
description: ''
|
||||
scope: collection
|
||||
_type: workspace
|
||||
- _id: env_af8d61e3630269b8a124b18968608f85797d57fc
|
||||
|
||||
@@ -9,11 +9,11 @@ resources:
|
||||
created: 1720531712225
|
||||
url: https://localhost:4011/echo
|
||||
name: New Request
|
||||
description: ""
|
||||
description: ''
|
||||
method: GET
|
||||
body:
|
||||
mimeType: text/plain
|
||||
text: "{{foo}}"
|
||||
text: '{{foo}}'
|
||||
parameters: []
|
||||
headers:
|
||||
- name: Content-Type
|
||||
@@ -21,7 +21,7 @@ resources:
|
||||
- name: User-Agent
|
||||
value: insomnia/9.3.3-beta.0
|
||||
authentication: {}
|
||||
preRequestScript: ""
|
||||
preRequestScript: ''
|
||||
metaSortKey: -1720531712226
|
||||
isPrivate: false
|
||||
pathParameters: []
|
||||
@@ -37,11 +37,11 @@ resources:
|
||||
modified: 1720533498913
|
||||
created: 1720531708748
|
||||
name: My Folder
|
||||
description: ""
|
||||
description: ''
|
||||
environment:
|
||||
foo: bar
|
||||
environmentPropertyOrder:
|
||||
"&":
|
||||
'&':
|
||||
- foo
|
||||
metaSortKey: -1720531708748
|
||||
authentication:
|
||||
@@ -54,7 +54,7 @@ resources:
|
||||
- id: pair_74f3158279464c9c9827dd2be66762ca
|
||||
name: customheader
|
||||
value: jack
|
||||
description: ""
|
||||
description: ''
|
||||
disabled: false
|
||||
_type: request_group
|
||||
- _id: wrk_b216792ae69e468aa46ddbf7783c7c76
|
||||
@@ -62,7 +62,7 @@ resources:
|
||||
modified: 1720533288412
|
||||
created: 1720533288412
|
||||
name: folder-inheritance-doc.yaml
|
||||
description: ""
|
||||
description: ''
|
||||
scope: design
|
||||
_type: workspace
|
||||
- _id: env_9e44ac71d3a51fc6a8951b8968e45c44c5236355
|
||||
@@ -88,7 +88,7 @@ resources:
|
||||
modified: 1720533288413
|
||||
created: 1720533288413
|
||||
fileName: New Document
|
||||
contents: ""
|
||||
contents: ''
|
||||
contentType: yaml
|
||||
_type: api_spec
|
||||
- _id: uts_412db4d4943a4baeb1319b3bd37fa4bc
|
||||
|
||||
@@ -9,7 +9,7 @@ resources:
|
||||
modified: 1728298565335
|
||||
created: 1728298565335
|
||||
name: GlobalEnv
|
||||
description: ""
|
||||
description: ''
|
||||
scope: environment
|
||||
_type: workspace
|
||||
- _id: env_dcf0363e684fae59a4ac57c09c208674caa8a64a
|
||||
@@ -25,7 +25,7 @@ resources:
|
||||
firstkey: first
|
||||
secondkey: second
|
||||
dataPropertyOrder:
|
||||
"&":
|
||||
'&':
|
||||
- apiKey
|
||||
- newkey
|
||||
- base_path
|
||||
|
||||
@@ -9,7 +9,7 @@ resources:
|
||||
created: 1636141100570
|
||||
url: http://127.0.0.1:4010/
|
||||
name: with pre request script
|
||||
description: ""
|
||||
description: ''
|
||||
method: GET
|
||||
body: {}
|
||||
parameters: []
|
||||
@@ -28,7 +28,7 @@ resources:
|
||||
modified: 1636140994423
|
||||
created: 1636140994423
|
||||
name: Minimal Collection
|
||||
description: ""
|
||||
description: ''
|
||||
scope: collection
|
||||
_type: workspace
|
||||
- _id: env_9e44ac71d3a51fc6a8951b8968e45c44c5236355
|
||||
|
||||
@@ -9,7 +9,7 @@ resources:
|
||||
created: 1636141100570
|
||||
url: http://127.0.0.1:4010
|
||||
name: custom header by pre request script
|
||||
description: ""
|
||||
description: ''
|
||||
method: GET
|
||||
body: {}
|
||||
parameters: []
|
||||
@@ -28,7 +28,7 @@ resources:
|
||||
modified: 1636140994423
|
||||
created: 1636140994423
|
||||
name: Minimal Collection 2
|
||||
description: ""
|
||||
description: ''
|
||||
scope: collection
|
||||
_type: workspace
|
||||
- _id: env_9e44ac71d3a51fc6a8951b8968e45c44c5236355
|
||||
|
||||
@@ -9,7 +9,7 @@ resources:
|
||||
created: 1636141014552
|
||||
url: http://127.0.0.1:4010/echo
|
||||
name: insomnia.request manipulation
|
||||
description: ""
|
||||
description: ''
|
||||
method: POST
|
||||
parameters: []
|
||||
headers:
|
||||
@@ -41,7 +41,7 @@ resources:
|
||||
raw: 'rawContent',
|
||||
});
|
||||
body:
|
||||
mimeType: "application/json"
|
||||
mimeType: 'application/json'
|
||||
text: |-
|
||||
{}
|
||||
_type: request
|
||||
@@ -50,7 +50,7 @@ resources:
|
||||
modified: 1636140994423
|
||||
created: 1636140994423
|
||||
name: Minimal Collection 3
|
||||
description: ""
|
||||
description: ''
|
||||
scope: collection
|
||||
_type: workspace
|
||||
- _id: env_9e44ac71d3a51fc6a8951b8968e45c44c5236355
|
||||
|
||||
@@ -9,7 +9,7 @@ resources:
|
||||
created: 1722852503814
|
||||
url: localhost:4010/echo
|
||||
name: sendRequest
|
||||
description: ""
|
||||
description: ''
|
||||
method: GET
|
||||
body: {}
|
||||
parameters: []
|
||||
@@ -18,7 +18,7 @@ resources:
|
||||
value: insomnia/9.3.3
|
||||
authentication: {}
|
||||
preRequestScript: |-
|
||||
|
||||
|
||||
const resp = await new Promise((resolve, reject) => {
|
||||
insomnia.sendRequest(
|
||||
'http://localhost:4010/echo',
|
||||
@@ -44,7 +44,7 @@ resources:
|
||||
modified: 1722852498102
|
||||
created: 1722852498102
|
||||
name: send request script
|
||||
description: ""
|
||||
description: ''
|
||||
scope: collection
|
||||
_type: workspace
|
||||
- _id: env_fa53fdd70393dd67a77da3c45c26f2b75ecbc1f2
|
||||
|
||||
@@ -9,7 +9,7 @@ resources:
|
||||
created: 1729981763331
|
||||
url: localhost:4010/echo
|
||||
name: setNextRequest
|
||||
description: ""
|
||||
description: ''
|
||||
method: GET
|
||||
body: {}
|
||||
parameters: []
|
||||
@@ -34,7 +34,7 @@ resources:
|
||||
modified: 1729981758905
|
||||
created: 1729981758905
|
||||
name: test next request
|
||||
description: ""
|
||||
description: ''
|
||||
scope: collection
|
||||
_type: workspace
|
||||
- _id: req_6a0343d51ca74de7a2c73e34211354ab
|
||||
@@ -43,7 +43,7 @@ resources:
|
||||
created: 1729981846012
|
||||
url: localhost:4010/echo
|
||||
name: Failing Request
|
||||
description: ""
|
||||
description: ''
|
||||
method: GET
|
||||
body: {}
|
||||
parameters: []
|
||||
@@ -71,7 +71,7 @@ resources:
|
||||
created: 1729981822432
|
||||
url: localhost:4010/echo
|
||||
name: Passing Request
|
||||
description: ""
|
||||
description: ''
|
||||
method: GET
|
||||
body: {}
|
||||
parameters: []
|
||||
|
||||
@@ -8,8 +8,8 @@ resources:
|
||||
modified: 1726666263873
|
||||
created: 1726658232232
|
||||
url: localhost:4010/echo
|
||||
name: "1"
|
||||
description: ""
|
||||
name: '1'
|
||||
description: ''
|
||||
method: GET
|
||||
body:
|
||||
mimeType: text/plain
|
||||
@@ -45,7 +45,7 @@ resources:
|
||||
modified: 1726658198059
|
||||
created: 1726658198059
|
||||
name: Collection with 3 requests
|
||||
description: ""
|
||||
description: ''
|
||||
scope: collection
|
||||
_type: workspace
|
||||
- _id: req_6063adcdab5b409e9b4f00f47322df4a
|
||||
@@ -53,8 +53,8 @@ resources:
|
||||
modified: 1726658253319
|
||||
created: 1726658253319
|
||||
url: localhost:4010/echo
|
||||
name: "2"
|
||||
description: ""
|
||||
name: '2'
|
||||
description: ''
|
||||
method: GET
|
||||
body: {}
|
||||
parameters: []
|
||||
@@ -77,8 +77,8 @@ resources:
|
||||
modified: 1726658259060
|
||||
created: 1726658259060
|
||||
url: localhost:4010/echo
|
||||
name: "3"
|
||||
description: ""
|
||||
name: '3'
|
||||
description: ''
|
||||
method: GET
|
||||
body: {}
|
||||
parameters: []
|
||||
@@ -104,7 +104,7 @@ resources:
|
||||
data:
|
||||
value: 123
|
||||
dataPropertyOrder:
|
||||
"&":
|
||||
'&':
|
||||
- value
|
||||
color: null
|
||||
isPrivate: false
|
||||
|
||||
@@ -8,8 +8,8 @@ resources:
|
||||
modified: 1726666263873
|
||||
created: 1726658232232
|
||||
url: localhost:4010/echo
|
||||
name: "1"
|
||||
description: ""
|
||||
name: '1'
|
||||
description: ''
|
||||
method: GET
|
||||
body:
|
||||
mimeType: text/plain
|
||||
@@ -45,7 +45,7 @@ resources:
|
||||
modified: 1726658198059
|
||||
created: 1726658198059
|
||||
name: Collection with 3 requests
|
||||
description: ""
|
||||
description: ''
|
||||
scope: collection
|
||||
_type: workspace
|
||||
- _id: env_86e1354fb9909cdb109ccadf83c3353f3bb9bd09
|
||||
@@ -55,7 +55,7 @@ resources:
|
||||
name: Base Environment
|
||||
data: {}
|
||||
dataPropertyOrder:
|
||||
"&":
|
||||
'&':
|
||||
- value
|
||||
color: null
|
||||
isPrivate: false
|
||||
|
||||
@@ -11,8 +11,7 @@ describe('tryToReadInsoConfigFile()', () => {
|
||||
it('should load .insorc-test.yaml config file in fixtures dir', async () => {
|
||||
const result = await tryToReadInsoConfigFile(path.join(fixturesDir, '.insorc-test.yaml'));
|
||||
expect(result).toEqual({
|
||||
options: {
|
||||
},
|
||||
options: {},
|
||||
scripts: {
|
||||
exportSpec: 'inso export spec',
|
||||
lintSpec: 'inso lint spec',
|
||||
@@ -23,8 +22,8 @@ describe('tryToReadInsoConfigFile()', () => {
|
||||
});
|
||||
|
||||
it('should return empty object and report error if specified config file not found', async () => {
|
||||
const consoleLogSpy = vi.spyOn(console, 'log').mockImplementation(() => { });
|
||||
const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => { });
|
||||
const consoleLogSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
|
||||
const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
|
||||
const result = await tryToReadInsoConfigFile('not-found.yaml');
|
||||
expect(result).toEqual({});
|
||||
expect(consoleLogSpy).toHaveBeenCalledWith('Could not find config file at not-found.yaml.');
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
import fs from 'node:fs/promises';
|
||||
import process from 'node:process';
|
||||
|
||||
import type { ProcessEnvOptions} from 'child_process';
|
||||
import type { ProcessEnvOptions } from 'child_process';
|
||||
import { spawn } from 'child_process';
|
||||
import path from 'path';
|
||||
|
||||
@@ -14,24 +14,21 @@ const spawnCompressProcess = (cwd: ProcessEnvOptions['cwd']) => {
|
||||
const version = process.env.VERSION || packageJson.version;
|
||||
|
||||
if (platform === 'darwin') {
|
||||
return spawn('ditto', [
|
||||
'-c',
|
||||
'-k',
|
||||
'../binaries/inso',
|
||||
`inso-macos-${version}.zip`,
|
||||
], { cwd });
|
||||
return spawn('ditto', ['-c', '-k', '../binaries/inso', `inso-macos-${version}.zip`], { cwd });
|
||||
}
|
||||
|
||||
if (platform === 'win32' || platform === 'linux') {
|
||||
return spawn('tar', [
|
||||
'-C',
|
||||
'../binaries',
|
||||
platform === 'win32' ? '-a -cf' : '-cJf',
|
||||
platform === 'win32'
|
||||
? `inso-windows-${version}.zip`
|
||||
: `inso-linux-${process.arch}-${version}.tar.xz`,
|
||||
platform === 'win32' ? 'inso.exe' : 'inso',
|
||||
], { cwd, shell: platform === 'win32' });
|
||||
return spawn(
|
||||
'tar',
|
||||
[
|
||||
'-C',
|
||||
'../binaries',
|
||||
platform === 'win32' ? '-a -cf' : '-cJf',
|
||||
platform === 'win32' ? `inso-windows-${version}.zip` : `inso-linux-${process.arch}-${version}.tar.xz`,
|
||||
platform === 'win32' ? 'inso.exe' : 'inso',
|
||||
],
|
||||
{ cwd, shell: platform === 'win32' },
|
||||
);
|
||||
}
|
||||
|
||||
throw new Error(`[pkg-inso-artifacts] Unsupported OS: ${platform}`);
|
||||
|
||||
@@ -10,9 +10,7 @@
|
||||
"moduleResolution": "node",
|
||||
"isolatedModules": true,
|
||||
"paths": {
|
||||
"electron": [
|
||||
"../insomnia/send-request/electron",
|
||||
],
|
||||
"electron": ["../insomnia/send-request/electron"]
|
||||
},
|
||||
/* remove this once react AlertModal is out of the plugins code path */
|
||||
"jsx": "react",
|
||||
@@ -20,27 +18,16 @@
|
||||
"module": "CommonJS",
|
||||
"sourceMap": true,
|
||||
/* Runs in the DOM NOTE: this is inconsistent with reality */
|
||||
"lib": [
|
||||
"ES2023",
|
||||
"DOM",
|
||||
"DOM.Iterable"
|
||||
],
|
||||
"lib": ["ES2023", "DOM", "DOM.Iterable"],
|
||||
/* Strictness */
|
||||
"strict": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"useUnknownInCatchVariables": false,
|
||||
"useUnknownInCatchVariables": false
|
||||
},
|
||||
"include": [
|
||||
".eslintrc.js",
|
||||
"esbuild.ts",
|
||||
"jest.config.js",
|
||||
"package.json",
|
||||
"src",
|
||||
"../insomnia/types",
|
||||
],
|
||||
"include": [".eslintrc.js", "esbuild.ts", "jest.config.js", "package.json", "src", "../insomnia/types"],
|
||||
"exclude": [
|
||||
"**/*.test.ts",
|
||||
"**/__mocks__/*",
|
||||
@@ -51,6 +38,6 @@
|
||||
"jest.config.js",
|
||||
"node_modules",
|
||||
"scripts",
|
||||
"src/vitest",
|
||||
],
|
||||
"src/vitest"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -8,9 +8,7 @@ export default defineConfig({
|
||||
},
|
||||
server: {
|
||||
deps: {
|
||||
inline: [
|
||||
'tinykeys',
|
||||
],
|
||||
inline: ['tinykeys'],
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -4,191 +4,185 @@ import { type AuthOptions, fromPreRequestAuth, RequestAuth, toPreRequestAuth } f
|
||||
import type { Variable, VariableList } from '../variables';
|
||||
|
||||
const varListToObject = (obj: VariableList<Variable> | undefined) => {
|
||||
if (!obj) {
|
||||
return undefined;
|
||||
}
|
||||
if (!obj) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return obj.map(
|
||||
(optVar: Variable) => ({
|
||||
// type: 'any', // TODO: fix type
|
||||
key: optVar.key,
|
||||
value: optVar.value,
|
||||
}),
|
||||
{}
|
||||
);
|
||||
return obj.map(
|
||||
(optVar: Variable) => ({
|
||||
// type: 'any', // TODO: fix type
|
||||
key: optVar.key,
|
||||
value: optVar.value,
|
||||
}),
|
||||
{},
|
||||
);
|
||||
};
|
||||
|
||||
describe('test sdk objects', () => {
|
||||
it('test RequestAuth methods', () => {
|
||||
expect(RequestAuth.isValidType('noauth')).toBeTruthy();
|
||||
it('test RequestAuth methods', () => {
|
||||
expect(RequestAuth.isValidType('noauth')).toBeTruthy();
|
||||
|
||||
const basicAuthOptions = {
|
||||
type: 'basic',
|
||||
basic: [
|
||||
{ key: 'username', value: 'user1' },
|
||||
{ key: 'password', value: 'pwd1' },
|
||||
],
|
||||
} as AuthOptions;
|
||||
const basicAuthOptions = {
|
||||
type: 'basic',
|
||||
basic: [
|
||||
{ key: 'username', value: 'user1' },
|
||||
{ key: 'password', value: 'pwd1' },
|
||||
],
|
||||
} as AuthOptions;
|
||||
|
||||
const authObj = new RequestAuth(basicAuthOptions);
|
||||
const authObj = new RequestAuth(basicAuthOptions);
|
||||
|
||||
const basicAuthOptsFromAuth = varListToObject(authObj.parameters());
|
||||
expect(basicAuthOptsFromAuth).toEqual(basicAuthOptions.basic);
|
||||
const basicAuthOptsFromAuth = varListToObject(authObj.parameters());
|
||||
expect(basicAuthOptsFromAuth).toEqual(basicAuthOptions.basic);
|
||||
|
||||
const basicAuthOptions2 = {
|
||||
type: 'basic',
|
||||
basic: [
|
||||
{ key: 'username', value: 'user2' },
|
||||
{ key: 'password', value: 'pwd2' },
|
||||
],
|
||||
} as AuthOptions;
|
||||
const bearerAuthOptions = {
|
||||
type: 'bearer',
|
||||
bearer: [
|
||||
{ key: 'token', value: 'mytoken' },
|
||||
],
|
||||
} as AuthOptions;
|
||||
const basicAuthOptions2 = {
|
||||
type: 'basic',
|
||||
basic: [
|
||||
{ key: 'username', value: 'user2' },
|
||||
{ key: 'password', value: 'pwd2' },
|
||||
],
|
||||
} as AuthOptions;
|
||||
const bearerAuthOptions = {
|
||||
type: 'bearer',
|
||||
bearer: [{ key: 'token', value: 'mytoken' }],
|
||||
} as AuthOptions;
|
||||
|
||||
authObj.update(basicAuthOptions2);
|
||||
const basicAuthOpt2FromAuth = varListToObject(authObj.parameters());
|
||||
expect(basicAuthOpt2FromAuth).toEqual(basicAuthOptions2.basic);
|
||||
authObj.update(basicAuthOptions2);
|
||||
const basicAuthOpt2FromAuth = varListToObject(authObj.parameters());
|
||||
expect(basicAuthOpt2FromAuth).toEqual(basicAuthOptions2.basic);
|
||||
|
||||
authObj.use('bearer', bearerAuthOptions);
|
||||
const beareerAuthOptFromAuth = varListToObject(authObj.parameters());
|
||||
expect(beareerAuthOptFromAuth).toEqual(bearerAuthOptions.bearer);
|
||||
authObj.use('bearer', bearerAuthOptions);
|
||||
const beareerAuthOptFromAuth = varListToObject(authObj.parameters());
|
||||
expect(beareerAuthOptFromAuth).toEqual(bearerAuthOptions.bearer);
|
||||
|
||||
authObj.clear('bearer');
|
||||
expect(authObj.parameters()).toBeUndefined();
|
||||
});
|
||||
authObj.clear('bearer');
|
||||
expect(authObj.parameters()).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('test auth transforming', () => {
|
||||
it('transforming from script side to Insomnia and the reverse direction', () => {
|
||||
const basicAuth = {
|
||||
type: 'basic',
|
||||
useISO88591: true,
|
||||
disabled: false,
|
||||
username: 'uname',
|
||||
password: 'pwd',
|
||||
};
|
||||
const apikeyAuth = {
|
||||
type: 'apikey',
|
||||
disabled: false,
|
||||
key: 'key',
|
||||
value: 'value',
|
||||
addTo: 'addto',
|
||||
};
|
||||
const hawkAuth = {
|
||||
type: 'hawk',
|
||||
disabled: true,
|
||||
algorithm: 'sha256',
|
||||
id: 'id',
|
||||
key: 'key',
|
||||
ext: 'ext',
|
||||
validatePayload: true,
|
||||
};
|
||||
const oauth1Auth = {
|
||||
type: 'oauth1',
|
||||
disabled: true,
|
||||
signatureMethod: 'HMAC-SHA1',
|
||||
consumerKey: 'consumerKey',
|
||||
consumerSecret: 'consumerSecret',
|
||||
tokenKey: 'tokenKey',
|
||||
tokenSecret: 'tokenSecret',
|
||||
privateKey: 'privateKey',
|
||||
version: 'version',
|
||||
nonce: 'nonce',
|
||||
timestamp: 'timestamp',
|
||||
callback: 'callback',
|
||||
realm: 'realm',
|
||||
verifier: 'verifier',
|
||||
includeBodyHash: true,
|
||||
};
|
||||
const digestAuth = {
|
||||
type: 'digest',
|
||||
disabled: true,
|
||||
username: 'username',
|
||||
password: 'password',
|
||||
};
|
||||
const digestNtlm = {
|
||||
type: 'ntlm',
|
||||
disabled: true,
|
||||
username: 'username',
|
||||
password: 'password',
|
||||
};
|
||||
const bearerAuth = {
|
||||
type: 'bearer',
|
||||
disabled: true,
|
||||
token: 'token',
|
||||
prefix: 'prefix',
|
||||
};
|
||||
const awsv4Auth = {
|
||||
type: 'iam',
|
||||
disabled: true,
|
||||
accessKeyId: 'accessKeyId',
|
||||
secretAccessKey: 'secretAccessKey',
|
||||
sessionToken: 'sessionToken',
|
||||
region: 'region',
|
||||
service: 'service',
|
||||
};
|
||||
const asapAuth = {
|
||||
type: 'asap',
|
||||
disabled: true,
|
||||
issuer: 'issuer',
|
||||
subject: 'subject',
|
||||
audience: 'audience',
|
||||
additionalClaims: 'additionalClaims',
|
||||
keyId: 'keyId',
|
||||
privateKey: 'privateKey',
|
||||
};
|
||||
const noneAuth = {
|
||||
type: 'none',
|
||||
disabled: true,
|
||||
};
|
||||
const oauth2Auth = {
|
||||
type: 'oauth2',
|
||||
disabled: true,
|
||||
grantType: 'authorization_code',
|
||||
accessTokenUrl: 'accessTokenUrl',
|
||||
authorizationUrl: 'authorizationUrl',
|
||||
clientId: 'clientId',
|
||||
clientSecret: 'clientSecret',
|
||||
audience: 'audience',
|
||||
scope: 'scope',
|
||||
resource: 'resource',
|
||||
username: 'username',
|
||||
password: 'password',
|
||||
redirectUrl: 'redirectUrl',
|
||||
credentialsInBody: true,
|
||||
state: 'state',
|
||||
code: 'code',
|
||||
accessToken: 'accessToken',
|
||||
refreshToken: 'refreshToken',
|
||||
tokenPrefix: 'tokenPrefix',
|
||||
usePkce: true,
|
||||
pkceMethod: 'pkceMethod',
|
||||
responseType: 'id_token',
|
||||
origin: 'origin',
|
||||
};
|
||||
it('transforming from script side to Insomnia and the reverse direction', () => {
|
||||
const basicAuth = {
|
||||
type: 'basic',
|
||||
useISO88591: true,
|
||||
disabled: false,
|
||||
username: 'uname',
|
||||
password: 'pwd',
|
||||
};
|
||||
const apikeyAuth = {
|
||||
type: 'apikey',
|
||||
disabled: false,
|
||||
key: 'key',
|
||||
value: 'value',
|
||||
addTo: 'addto',
|
||||
};
|
||||
const hawkAuth = {
|
||||
type: 'hawk',
|
||||
disabled: true,
|
||||
algorithm: 'sha256',
|
||||
id: 'id',
|
||||
key: 'key',
|
||||
ext: 'ext',
|
||||
validatePayload: true,
|
||||
};
|
||||
const oauth1Auth = {
|
||||
type: 'oauth1',
|
||||
disabled: true,
|
||||
signatureMethod: 'HMAC-SHA1',
|
||||
consumerKey: 'consumerKey',
|
||||
consumerSecret: 'consumerSecret',
|
||||
tokenKey: 'tokenKey',
|
||||
tokenSecret: 'tokenSecret',
|
||||
privateKey: 'privateKey',
|
||||
version: 'version',
|
||||
nonce: 'nonce',
|
||||
timestamp: 'timestamp',
|
||||
callback: 'callback',
|
||||
realm: 'realm',
|
||||
verifier: 'verifier',
|
||||
includeBodyHash: true,
|
||||
};
|
||||
const digestAuth = {
|
||||
type: 'digest',
|
||||
disabled: true,
|
||||
username: 'username',
|
||||
password: 'password',
|
||||
};
|
||||
const digestNtlm = {
|
||||
type: 'ntlm',
|
||||
disabled: true,
|
||||
username: 'username',
|
||||
password: 'password',
|
||||
};
|
||||
const bearerAuth = {
|
||||
type: 'bearer',
|
||||
disabled: true,
|
||||
token: 'token',
|
||||
prefix: 'prefix',
|
||||
};
|
||||
const awsv4Auth = {
|
||||
type: 'iam',
|
||||
disabled: true,
|
||||
accessKeyId: 'accessKeyId',
|
||||
secretAccessKey: 'secretAccessKey',
|
||||
sessionToken: 'sessionToken',
|
||||
region: 'region',
|
||||
service: 'service',
|
||||
};
|
||||
const asapAuth = {
|
||||
type: 'asap',
|
||||
disabled: true,
|
||||
issuer: 'issuer',
|
||||
subject: 'subject',
|
||||
audience: 'audience',
|
||||
additionalClaims: 'additionalClaims',
|
||||
keyId: 'keyId',
|
||||
privateKey: 'privateKey',
|
||||
};
|
||||
const noneAuth = {
|
||||
type: 'none',
|
||||
disabled: true,
|
||||
};
|
||||
const oauth2Auth = {
|
||||
type: 'oauth2',
|
||||
disabled: true,
|
||||
grantType: 'authorization_code',
|
||||
accessTokenUrl: 'accessTokenUrl',
|
||||
authorizationUrl: 'authorizationUrl',
|
||||
clientId: 'clientId',
|
||||
clientSecret: 'clientSecret',
|
||||
audience: 'audience',
|
||||
scope: 'scope',
|
||||
resource: 'resource',
|
||||
username: 'username',
|
||||
password: 'password',
|
||||
redirectUrl: 'redirectUrl',
|
||||
credentialsInBody: true,
|
||||
state: 'state',
|
||||
code: 'code',
|
||||
accessToken: 'accessToken',
|
||||
refreshToken: 'refreshToken',
|
||||
tokenPrefix: 'tokenPrefix',
|
||||
usePkce: true,
|
||||
pkceMethod: 'pkceMethod',
|
||||
responseType: 'id_token',
|
||||
origin: 'origin',
|
||||
};
|
||||
|
||||
[
|
||||
basicAuth,
|
||||
apikeyAuth,
|
||||
hawkAuth,
|
||||
oauth1Auth,
|
||||
digestAuth,
|
||||
digestNtlm,
|
||||
bearerAuth,
|
||||
awsv4Auth,
|
||||
asapAuth,
|
||||
noneAuth,
|
||||
oauth2Auth,
|
||||
].forEach(authMethod => {
|
||||
expect(fromPreRequestAuth(
|
||||
new RequestAuth(
|
||||
toPreRequestAuth(authMethod)),
|
||||
)
|
||||
).toEqual(authMethod);
|
||||
});
|
||||
[
|
||||
basicAuth,
|
||||
apikeyAuth,
|
||||
hawkAuth,
|
||||
oauth1Auth,
|
||||
digestAuth,
|
||||
digestNtlm,
|
||||
bearerAuth,
|
||||
awsv4Auth,
|
||||
asapAuth,
|
||||
noneAuth,
|
||||
oauth2Auth,
|
||||
].forEach(authMethod => {
|
||||
expect(fromPreRequestAuth(new RequestAuth(toPreRequestAuth(authMethod)))).toEqual(authMethod);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,33 +3,30 @@ import { describe, expect, it } from 'vitest';
|
||||
import { Certificate } from '../certificates';
|
||||
|
||||
describe('test Certificate object', () => {
|
||||
it('test methods', () => {
|
||||
const cert = new Certificate({
|
||||
name: 'Certificate for example.com',
|
||||
matches: ['https://example.com'],
|
||||
key: { src: '/User/path/to/certificate/key' },
|
||||
cert: { src: '/User/path/to/certificate' },
|
||||
passphrase: 'iampassphrase',
|
||||
});
|
||||
|
||||
[
|
||||
'https://example.com',
|
||||
'https://example.com/subdomain',
|
||||
].forEach(testCase => {
|
||||
expect(cert.canApplyTo(testCase)).toBeTruthy();
|
||||
});
|
||||
|
||||
cert.update({
|
||||
name: 'Certificate for api.com',
|
||||
matches: ['https://api.com'],
|
||||
key: { src: '/User/path/to/certificate/key' },
|
||||
cert: { src: '/User/path/to/certificate' },
|
||||
passphrase: 'iampassphrase',
|
||||
});
|
||||
|
||||
expect(cert.name).toEqual('Certificate for api.com');
|
||||
expect(cert.key).toEqual({ src: '/User/path/to/certificate/key' });
|
||||
expect(cert.cert).toEqual({ src: '/User/path/to/certificate' });
|
||||
expect(cert.passphrase).toEqual('iampassphrase');
|
||||
it('test methods', () => {
|
||||
const cert = new Certificate({
|
||||
name: 'Certificate for example.com',
|
||||
matches: ['https://example.com'],
|
||||
key: { src: '/User/path/to/certificate/key' },
|
||||
cert: { src: '/User/path/to/certificate' },
|
||||
passphrase: 'iampassphrase',
|
||||
});
|
||||
|
||||
['https://example.com', 'https://example.com/subdomain'].forEach(testCase => {
|
||||
expect(cert.canApplyTo(testCase)).toBeTruthy();
|
||||
});
|
||||
|
||||
cert.update({
|
||||
name: 'Certificate for api.com',
|
||||
matches: ['https://api.com'],
|
||||
key: { src: '/User/path/to/certificate/key' },
|
||||
cert: { src: '/User/path/to/certificate' },
|
||||
passphrase: 'iampassphrase',
|
||||
});
|
||||
|
||||
expect(cert.name).toEqual('Certificate for api.com');
|
||||
expect(cert.key).toEqual({ src: '/User/path/to/certificate/key' });
|
||||
expect(cert.cert).toEqual({ src: '/User/path/to/certificate' });
|
||||
expect(cert.passphrase).toEqual('iampassphrase');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,227 +3,201 @@ import { describe, expect, it } from 'vitest';
|
||||
import { Cookie, CookieJar, CookieList, CookieObject, mergeCookieJar } from '../cookies';
|
||||
|
||||
describe('test Cookie object', () => {
|
||||
it('test basic operations', () => {
|
||||
const cookieStr1 = 'key=value; Domain=inso.com; Path=/; Expires=Wed, 21 Oct 2015 07:28:00 GMT; Max-Age=0;Secure;HttpOnly;HostOnly;Session';
|
||||
it('test basic operations', () => {
|
||||
const cookieStr1 =
|
||||
'key=value; Domain=inso.com; Path=/; Expires=Wed, 21 Oct 2015 07:28:00 GMT; Max-Age=0;Secure;HttpOnly;HostOnly;Session';
|
||||
|
||||
expect(
|
||||
Cookie.parse(cookieStr1)
|
||||
).toEqual({
|
||||
key: 'key',
|
||||
value: 'value',
|
||||
domain: 'inso.com',
|
||||
expires: new Date('2015-10-21T07:28:00.000Z'),
|
||||
maxAge: 0,
|
||||
path: '/',
|
||||
secure: true,
|
||||
httpOnly: true,
|
||||
hostOnly: true,
|
||||
session: true,
|
||||
extensions: [],
|
||||
});
|
||||
expect(Cookie.parse(cookieStr1)).toEqual({
|
||||
key: 'key',
|
||||
value: 'value',
|
||||
domain: 'inso.com',
|
||||
expires: new Date('2015-10-21T07:28:00.000Z'),
|
||||
maxAge: 0,
|
||||
path: '/',
|
||||
secure: true,
|
||||
httpOnly: true,
|
||||
hostOnly: true,
|
||||
session: true,
|
||||
extensions: [],
|
||||
});
|
||||
|
||||
const cookie1Opt = {
|
||||
key: 'myCookie',
|
||||
value: 'myCookie',
|
||||
expires: '01 Jan 1970 00:00:01 GMT',
|
||||
maxAge: 7,
|
||||
domain: 'domain.com',
|
||||
path: '/',
|
||||
secure: true,
|
||||
httpOnly: true,
|
||||
hostOnly: true,
|
||||
session: true,
|
||||
extensions: [{ key: 'Ext', value: 'ExtValue' }],
|
||||
};
|
||||
const cookie1 = new Cookie(cookie1Opt);
|
||||
const cookie1Opt = {
|
||||
key: 'myCookie',
|
||||
value: 'myCookie',
|
||||
expires: '01 Jan 1970 00:00:01 GMT',
|
||||
maxAge: 7,
|
||||
domain: 'domain.com',
|
||||
path: '/',
|
||||
secure: true,
|
||||
httpOnly: true,
|
||||
hostOnly: true,
|
||||
session: true,
|
||||
extensions: [{ key: 'Ext', value: 'ExtValue' }],
|
||||
};
|
||||
const cookie1 = new Cookie(cookie1Opt);
|
||||
|
||||
const expectedCookieString = 'myCookie=myCookie; Expires=Thu, 01 Jan 1970 00:00:01 GMT; Max-Age=7; Path=/; Secure; HttpOnly; HostOnly; Ext=ExtValue';
|
||||
const expectedCookieString =
|
||||
'myCookie=myCookie; Expires=Thu, 01 Jan 1970 00:00:01 GMT; Max-Age=7; Path=/; Secure; HttpOnly; HostOnly; Ext=ExtValue';
|
||||
|
||||
expect(cookie1.toString()).toEqual(expectedCookieString);
|
||||
expect(Cookie.stringify(cookie1)).toEqual(expectedCookieString);
|
||||
expect(cookie1.toString()).toEqual(expectedCookieString);
|
||||
expect(Cookie.stringify(cookie1)).toEqual(expectedCookieString);
|
||||
|
||||
const cookie2 = new Cookie(expectedCookieString);
|
||||
expect(cookie2.toString()).toEqual(expectedCookieString);
|
||||
expect(Cookie.stringify(cookie2)).toEqual(expectedCookieString);
|
||||
const cookie2 = new Cookie(expectedCookieString);
|
||||
expect(cookie2.toString()).toEqual(expectedCookieString);
|
||||
expect(Cookie.stringify(cookie2)).toEqual(expectedCookieString);
|
||||
|
||||
const c1 = new Cookie({
|
||||
const c1 = new Cookie({
|
||||
key: 'c1',
|
||||
value: 'c1',
|
||||
maxAge: 1,
|
||||
});
|
||||
const c2 = new Cookie({
|
||||
key: 'c2',
|
||||
value: 'c2',
|
||||
maxAge: 2,
|
||||
});
|
||||
const CookieListStr = Cookie.unparse([c1, c2]);
|
||||
expect(CookieListStr).toEqual('c1=c1; Max-Age=1; c2=c2; Max-Age=2');
|
||||
|
||||
expect(Cookie.unparseSingle(cookie1Opt)).toEqual(expectedCookieString);
|
||||
|
||||
const malformedCookieString = '=gingerale';
|
||||
expect(Cookie.parse(malformedCookieString)).toEqual({
|
||||
key: '',
|
||||
value: 'gingerale',
|
||||
expires: 'Infinity',
|
||||
hostOnly: false,
|
||||
httpOnly: false,
|
||||
maxAge: null,
|
||||
secure: false,
|
||||
session: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('test cookie transforming', () => {
|
||||
const fakeBaseModel = {
|
||||
_id: '',
|
||||
type: '',
|
||||
parentId: '',
|
||||
modified: 0,
|
||||
created: 0,
|
||||
isPrivate: false,
|
||||
name: '',
|
||||
};
|
||||
const cookieJars = [
|
||||
{
|
||||
cookies: [
|
||||
{
|
||||
id: '1',
|
||||
key: 'c1',
|
||||
value: 'c1',
|
||||
maxAge: 1,
|
||||
});
|
||||
const c2 = new Cookie({
|
||||
key: 'c2',
|
||||
value: 'c2',
|
||||
maxAge: 2,
|
||||
});
|
||||
const CookieListStr = Cookie.unparse([c1, c2]);
|
||||
expect(CookieListStr).toEqual(
|
||||
'c1=c1; Max-Age=1; c2=c2; Max-Age=2'
|
||||
);
|
||||
|
||||
expect(
|
||||
Cookie.unparseSingle(cookie1Opt)
|
||||
).toEqual(expectedCookieString);
|
||||
|
||||
const malformedCookieString = '=gingerale';
|
||||
expect(
|
||||
Cookie.parse(malformedCookieString)
|
||||
).toEqual({
|
||||
key: '',
|
||||
value: 'gingerale',
|
||||
value: 'v1',
|
||||
expires: 'Infinity',
|
||||
hostOnly: false,
|
||||
httpOnly: false,
|
||||
maxAge: null,
|
||||
secure: false,
|
||||
session: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('test cookie transforming', () => {
|
||||
const fakeBaseModel = {
|
||||
_id: '',
|
||||
type: '',
|
||||
parentId: '',
|
||||
modified: 0,
|
||||
created: 0,
|
||||
isPrivate: false,
|
||||
name: '',
|
||||
};
|
||||
const cookieJars = [
|
||||
{
|
||||
cookies: [
|
||||
{
|
||||
id: '1',
|
||||
key: 'c1',
|
||||
value: 'v1',
|
||||
expires: 'Infinity',
|
||||
domain: 'inso.com',
|
||||
path: '/',
|
||||
secure: true,
|
||||
httpOnly: true,
|
||||
extensions: [],
|
||||
creation: new Date(),
|
||||
creationIndex: 0,
|
||||
hostOnly: true,
|
||||
pathIsDefault: true,
|
||||
lastAccessed: new Date(),
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
key: 'c2',
|
||||
value: 'v2',
|
||||
expires: new Date('08 Aug 1988 08:08:08 GMT'),
|
||||
domain: 'inso.com',
|
||||
path: '/',
|
||||
secure: true,
|
||||
httpOnly: true,
|
||||
extensions: [],
|
||||
creation: new Date(),
|
||||
creationIndex: 0,
|
||||
hostOnly: true,
|
||||
pathIsDefault: true,
|
||||
lastAccessed: new Date(),
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
cookieJars.forEach(jar => {
|
||||
const originalJar = { ...fakeBaseModel, ...jar };
|
||||
const sdkJar = new CookieObject(originalJar);
|
||||
const convertedJar = mergeCookieJar(originalJar, sdkJar.jar().toInsomniaCookieJar());
|
||||
|
||||
expect(convertedJar).toEqual(originalJar);
|
||||
});
|
||||
domain: 'inso.com',
|
||||
path: '/',
|
||||
secure: true,
|
||||
httpOnly: true,
|
||||
extensions: [],
|
||||
creation: new Date(),
|
||||
creationIndex: 0,
|
||||
hostOnly: true,
|
||||
pathIsDefault: true,
|
||||
lastAccessed: new Date(),
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
key: 'c2',
|
||||
value: 'v2',
|
||||
expires: new Date('08 Aug 1988 08:08:08 GMT'),
|
||||
domain: 'inso.com',
|
||||
path: '/',
|
||||
secure: true,
|
||||
httpOnly: true,
|
||||
extensions: [],
|
||||
creation: new Date(),
|
||||
creationIndex: 0,
|
||||
hostOnly: true,
|
||||
pathIsDefault: true,
|
||||
lastAccessed: new Date(),
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
cookieJars.forEach(jar => {
|
||||
const originalJar = { ...fakeBaseModel, ...jar };
|
||||
const sdkJar = new CookieObject(originalJar);
|
||||
const convertedJar = mergeCookieJar(originalJar, sdkJar.jar().toInsomniaCookieJar());
|
||||
|
||||
expect(convertedJar).toEqual(originalJar);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('test CookieJar', () => {
|
||||
it('basic operations', () => {
|
||||
const cookieOptBase = {
|
||||
key: 'myCookie',
|
||||
value: 'myCookie',
|
||||
expires: '01 Jan 1970 00:00:01 GMT',
|
||||
maxAge: 7,
|
||||
domain: 'domain.com',
|
||||
path: '/',
|
||||
secure: true,
|
||||
httpOnly: true,
|
||||
hostOnly: true,
|
||||
session: true,
|
||||
extensions: [{ key: 'Ext', value: 'ExtValue' }],
|
||||
};
|
||||
it('basic operations', () => {
|
||||
const cookieOptBase = {
|
||||
key: 'myCookie',
|
||||
value: 'myCookie',
|
||||
expires: '01 Jan 1970 00:00:01 GMT',
|
||||
maxAge: 7,
|
||||
domain: 'domain.com',
|
||||
path: '/',
|
||||
secure: true,
|
||||
httpOnly: true,
|
||||
hostOnly: true,
|
||||
session: true,
|
||||
extensions: [{ key: 'Ext', value: 'ExtValue' }],
|
||||
};
|
||||
|
||||
const jar = new CookieJar(
|
||||
'my jar',
|
||||
[
|
||||
new Cookie({ ...cookieOptBase, key: 'c1', value: 'c1' }),
|
||||
new Cookie({ ...cookieOptBase, key: 'c2', value: 'c2' }),
|
||||
],
|
||||
);
|
||||
const jar = new CookieJar('my jar', [
|
||||
new Cookie({ ...cookieOptBase, key: 'c1', value: 'c1' }),
|
||||
new Cookie({ ...cookieOptBase, key: 'c2', value: 'c2' }),
|
||||
]);
|
||||
|
||||
jar.set('domain.com', 'c1', { ...cookieOptBase, key: 'c1', value: 'c1Updated' }, (error, cookie) => {
|
||||
expect(error).toBeUndefined();
|
||||
expect(cookie?.toJSON()).toEqual(
|
||||
new Cookie({ ...cookieOptBase, key: 'c1', value: 'c1Updated' }).toJSON()
|
||||
);
|
||||
});
|
||||
|
||||
jar.set('domain2.com', 'c2', { ...cookieOptBase, key: 'c2', value: 'c2' }, (error, cookie) => {
|
||||
expect(error).toBeUndefined();
|
||||
expect(cookie?.toJSON()).toEqual(
|
||||
new Cookie({ ...cookieOptBase, key: 'c2', value: 'c2' }).toJSON()
|
||||
);
|
||||
});
|
||||
|
||||
jar.get('domain.com', 'c1', (err, cookie) => {
|
||||
expect(err).toBeUndefined();
|
||||
expect(
|
||||
cookie?.toJSON(),
|
||||
).toEqual(
|
||||
new Cookie({ ...cookieOptBase, key: 'c1', value: 'c1Updated' }).toJSON(),
|
||||
);
|
||||
});
|
||||
|
||||
jar.get('domain2.com', 'c2', (err, cookie) => {
|
||||
expect(err).toBeUndefined();
|
||||
expect(
|
||||
cookie?.toJSON(),
|
||||
).toEqual(
|
||||
new Cookie({ ...cookieOptBase, key: 'c2', value: 'c2' }).toJSON(),
|
||||
);
|
||||
});
|
||||
|
||||
jar.unset('domain.com', 'c1', err => {
|
||||
expect(err).toBeUndefined();
|
||||
});
|
||||
|
||||
jar.get('domain.com', 'c1', (err, cookie) => {
|
||||
expect(err).toBeUndefined();
|
||||
expect(cookie).toBeUndefined();
|
||||
});
|
||||
|
||||
jar.clear('domain2.com', err => {
|
||||
expect(err).toBeUndefined();
|
||||
});
|
||||
|
||||
jar.get('domain2.com', 'c2', (err, cookie) => {
|
||||
expect(err).toBeUndefined();
|
||||
expect(cookie).toBeUndefined();
|
||||
});
|
||||
jar.set('domain.com', 'c1', { ...cookieOptBase, key: 'c1', value: 'c1Updated' }, (error, cookie) => {
|
||||
expect(error).toBeUndefined();
|
||||
expect(cookie?.toJSON()).toEqual(new Cookie({ ...cookieOptBase, key: 'c1', value: 'c1Updated' }).toJSON());
|
||||
});
|
||||
|
||||
it('CookieList operations', () => {
|
||||
const cookieList = new CookieList(
|
||||
[
|
||||
new Cookie({ key: 'c1', value: 'v1' }),
|
||||
new Cookie({ key: 'c2', value: 'v2' }),
|
||||
]
|
||||
);
|
||||
|
||||
const upsertedC1 = new Cookie({ key: 'c1', value: 'v1upserted' });
|
||||
cookieList.upsert(upsertedC1);
|
||||
expect(cookieList.one('c1')).toEqual(upsertedC1);
|
||||
jar.set('domain2.com', 'c2', { ...cookieOptBase, key: 'c2', value: 'c2' }, (error, cookie) => {
|
||||
expect(error).toBeUndefined();
|
||||
expect(cookie?.toJSON()).toEqual(new Cookie({ ...cookieOptBase, key: 'c2', value: 'c2' }).toJSON());
|
||||
});
|
||||
|
||||
jar.get('domain.com', 'c1', (err, cookie) => {
|
||||
expect(err).toBeUndefined();
|
||||
expect(cookie?.toJSON()).toEqual(new Cookie({ ...cookieOptBase, key: 'c1', value: 'c1Updated' }).toJSON());
|
||||
});
|
||||
|
||||
jar.get('domain2.com', 'c2', (err, cookie) => {
|
||||
expect(err).toBeUndefined();
|
||||
expect(cookie?.toJSON()).toEqual(new Cookie({ ...cookieOptBase, key: 'c2', value: 'c2' }).toJSON());
|
||||
});
|
||||
|
||||
jar.unset('domain.com', 'c1', err => {
|
||||
expect(err).toBeUndefined();
|
||||
});
|
||||
|
||||
jar.get('domain.com', 'c1', (err, cookie) => {
|
||||
expect(err).toBeUndefined();
|
||||
expect(cookie).toBeUndefined();
|
||||
});
|
||||
|
||||
jar.clear('domain2.com', err => {
|
||||
expect(err).toBeUndefined();
|
||||
});
|
||||
|
||||
jar.get('domain2.com', 'c2', (err, cookie) => {
|
||||
expect(err).toBeUndefined();
|
||||
expect(cookie).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
it('CookieList operations', () => {
|
||||
const cookieList = new CookieList([new Cookie({ key: 'c1', value: 'v1' }), new Cookie({ key: 'c2', value: 'v2' })]);
|
||||
|
||||
const upsertedC1 = new Cookie({ key: 'c1', value: 'v1upserted' });
|
||||
cookieList.upsert(upsertedC1);
|
||||
expect(cookieList.one('c1')).toEqual(upsertedC1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -5,104 +5,96 @@ import { Environment, Variables } from '../environments';
|
||||
import { Folder, ParentFolders } from '../folders';
|
||||
|
||||
describe('test Variables object', () => {
|
||||
it('test basic operations', () => {
|
||||
const variables = new Variables({
|
||||
globalVars: new Environment('globals', { value: 'xyz' }),
|
||||
environmentVars: new Environment('environments', {}),
|
||||
collectionVars: new Environment('baseEnvironment', {}),
|
||||
iterationDataVars: new Environment('iterationData', {}),
|
||||
folderLevelVars: [],
|
||||
localVars: new Environment('local', {}),
|
||||
});
|
||||
|
||||
const uuidAndXyz = variables.replaceIn('{{ $randomUUID }}{{value }}');
|
||||
expect(validate(uuidAndXyz.replace('xyz', ''))).toBeTruthy();
|
||||
|
||||
const uuidAndBrackets1 = variables.replaceIn('{{ $randomUUID }}}}');
|
||||
expect(validate(uuidAndBrackets1.replace('}}', ''))).toBeTruthy();
|
||||
|
||||
const uuidAndBrackets2 = variables.replaceIn('}}{{ $randomUUID }}');
|
||||
expect(validate(uuidAndBrackets2.replace('}}', ''))).toBeTruthy();
|
||||
it('test basic operations', () => {
|
||||
const variables = new Variables({
|
||||
globalVars: new Environment('globals', { value: 'xyz' }),
|
||||
environmentVars: new Environment('environments', {}),
|
||||
collectionVars: new Environment('baseEnvironment', {}),
|
||||
iterationDataVars: new Environment('iterationData', {}),
|
||||
folderLevelVars: [],
|
||||
localVars: new Environment('local', {}),
|
||||
});
|
||||
|
||||
it('test environment overriding', () => {
|
||||
const globalOnlyVariables = new Variables({
|
||||
globalVars: new Environment('globals', { scope: 'global', value: 'global-value' }),
|
||||
environmentVars: new Environment('environments', {}),
|
||||
collectionVars: new Environment('baseEnvironment', {}),
|
||||
iterationDataVars: new Environment('iterationData', {}),
|
||||
folderLevelVars: [],
|
||||
localVars: new Environment('local', {}),
|
||||
});
|
||||
const normalVariables = new Variables({
|
||||
globalVars: new Environment('globals', { scope: 'global', value: 'global-value' }),
|
||||
environmentVars: new Environment('environments', { scope: 'subEnv', value: 'subEnv-value' }),
|
||||
collectionVars: new Environment('baseEnvironment', { scope: 'baseEnv', value: 'baseEnv-value' }),
|
||||
iterationDataVars: new Environment('iterationData', {}),
|
||||
folderLevelVars: [],
|
||||
localVars: new Environment('local', {}),
|
||||
});
|
||||
const variablesWithIterationData = new Variables({
|
||||
globalVars: new Environment('globals', { scope: 'global', value: 'global-value' }),
|
||||
environmentVars: new Environment('environments', { scope: 'subEnv', value: 'subEnv-value' }),
|
||||
collectionVars: new Environment('baseEnvironment', { scope: 'baseEnv', value: 'baseEnv-value' }),
|
||||
iterationDataVars: new Environment('iterationData', { scope: 'iterationData', value: 'iterationData-value' }),
|
||||
folderLevelVars: [],
|
||||
localVars: new Environment('local', {}),
|
||||
});
|
||||
const variablesWithFolderLevelData = new Variables({
|
||||
globalVars: new Environment('globals', { scope: 'global', value: 'global-value' }),
|
||||
environmentVars: new Environment('environments', { scope: 'subEnv', value: 'subEnv-value' }),
|
||||
collectionVars: new Environment('baseEnvironment', { scope: 'baseEnv', value: 'baseEnv-value' }),
|
||||
iterationDataVars: new Environment('iterationData', { scope: 'iterationData', value: 'iterationData-value' }),
|
||||
folderLevelVars: [
|
||||
new Environment('folderLevel1', { scope: 'folderLevel1', value: 'folderLevel1-value' }),
|
||||
new Environment('folderLevel2', { scope: 'folderLevel2', value: 'folderLevel2-value' }),
|
||||
],
|
||||
localVars: new Environment('local', { scope: 'local' }),
|
||||
});
|
||||
const variablesWithLocalData = new Variables({
|
||||
globalVars: new Environment('globals', { scope: 'global', value: 'global-value' }),
|
||||
environmentVars: new Environment('environments', { scope: 'subEnv', value: 'subEnv-value' }),
|
||||
collectionVars: new Environment('baseEnvironment', { scope: 'baseEnv', value: 'baseEnv-value' }),
|
||||
iterationDataVars: new Environment('iterationData', { scope: 'iterationData', value: 'iterationData-value' }),
|
||||
folderLevelVars: [],
|
||||
localVars: new Environment('local', { scope: 'local', value: 'local-value' }),
|
||||
});
|
||||
const uuidAndXyz = variables.replaceIn('{{ $randomUUID }}{{value }}');
|
||||
expect(validate(uuidAndXyz.replace('xyz', ''))).toBeTruthy();
|
||||
|
||||
expect(globalOnlyVariables.get('value')).toEqual('global-value');
|
||||
expect(normalVariables.get('value')).toEqual('subEnv-value');
|
||||
expect(variablesWithIterationData.get('value')).toEqual('iterationData-value');
|
||||
expect(variablesWithFolderLevelData.get('value')).toEqual('folderLevel2-value');
|
||||
expect(variablesWithLocalData.get('value')).toEqual('local-value');
|
||||
const uuidAndBrackets1 = variables.replaceIn('{{ $randomUUID }}}}');
|
||||
expect(validate(uuidAndBrackets1.replace('}}', ''))).toBeTruthy();
|
||||
|
||||
expect(variablesWithFolderLevelData.replaceIn('{{ value}}')).toEqual('folderLevel2-value');
|
||||
const uuidAndBrackets2 = variables.replaceIn('}}{{ $randomUUID }}');
|
||||
expect(validate(uuidAndBrackets2.replace('}}', ''))).toBeTruthy();
|
||||
});
|
||||
|
||||
it('test environment overriding', () => {
|
||||
const globalOnlyVariables = new Variables({
|
||||
globalVars: new Environment('globals', { scope: 'global', value: 'global-value' }),
|
||||
environmentVars: new Environment('environments', {}),
|
||||
collectionVars: new Environment('baseEnvironment', {}),
|
||||
iterationDataVars: new Environment('iterationData', {}),
|
||||
folderLevelVars: [],
|
||||
localVars: new Environment('local', {}),
|
||||
});
|
||||
const normalVariables = new Variables({
|
||||
globalVars: new Environment('globals', { scope: 'global', value: 'global-value' }),
|
||||
environmentVars: new Environment('environments', { scope: 'subEnv', value: 'subEnv-value' }),
|
||||
collectionVars: new Environment('baseEnvironment', { scope: 'baseEnv', value: 'baseEnv-value' }),
|
||||
iterationDataVars: new Environment('iterationData', {}),
|
||||
folderLevelVars: [],
|
||||
localVars: new Environment('local', {}),
|
||||
});
|
||||
const variablesWithIterationData = new Variables({
|
||||
globalVars: new Environment('globals', { scope: 'global', value: 'global-value' }),
|
||||
environmentVars: new Environment('environments', { scope: 'subEnv', value: 'subEnv-value' }),
|
||||
collectionVars: new Environment('baseEnvironment', { scope: 'baseEnv', value: 'baseEnv-value' }),
|
||||
iterationDataVars: new Environment('iterationData', { scope: 'iterationData', value: 'iterationData-value' }),
|
||||
folderLevelVars: [],
|
||||
localVars: new Environment('local', {}),
|
||||
});
|
||||
const variablesWithFolderLevelData = new Variables({
|
||||
globalVars: new Environment('globals', { scope: 'global', value: 'global-value' }),
|
||||
environmentVars: new Environment('environments', { scope: 'subEnv', value: 'subEnv-value' }),
|
||||
collectionVars: new Environment('baseEnvironment', { scope: 'baseEnv', value: 'baseEnv-value' }),
|
||||
iterationDataVars: new Environment('iterationData', { scope: 'iterationData', value: 'iterationData-value' }),
|
||||
folderLevelVars: [
|
||||
new Environment('folderLevel1', { scope: 'folderLevel1', value: 'folderLevel1-value' }),
|
||||
new Environment('folderLevel2', { scope: 'folderLevel2', value: 'folderLevel2-value' }),
|
||||
],
|
||||
localVars: new Environment('local', { scope: 'local' }),
|
||||
});
|
||||
const variablesWithLocalData = new Variables({
|
||||
globalVars: new Environment('globals', { scope: 'global', value: 'global-value' }),
|
||||
environmentVars: new Environment('environments', { scope: 'subEnv', value: 'subEnv-value' }),
|
||||
collectionVars: new Environment('baseEnvironment', { scope: 'baseEnv', value: 'baseEnv-value' }),
|
||||
iterationDataVars: new Environment('iterationData', { scope: 'iterationData', value: 'iterationData-value' }),
|
||||
folderLevelVars: [],
|
||||
localVars: new Environment('local', { scope: 'local', value: 'local-value' }),
|
||||
});
|
||||
|
||||
it('variables operations', () => {
|
||||
const folders = new ParentFolders([
|
||||
new Folder(
|
||||
'1',
|
||||
'folder1',
|
||||
{ value: 'folder1Value' },
|
||||
),
|
||||
new Folder(
|
||||
'2',
|
||||
'folder2',
|
||||
{ value: 'folder2Value' },
|
||||
),
|
||||
]);
|
||||
expect(globalOnlyVariables.get('value')).toEqual('global-value');
|
||||
expect(normalVariables.get('value')).toEqual('subEnv-value');
|
||||
expect(variablesWithIterationData.get('value')).toEqual('iterationData-value');
|
||||
expect(variablesWithFolderLevelData.get('value')).toEqual('folderLevel2-value');
|
||||
expect(variablesWithLocalData.get('value')).toEqual('local-value');
|
||||
|
||||
const variables = new Variables({
|
||||
globalVars: new Environment('globals', { scope: 'global', value: 'global-value' }),
|
||||
environmentVars: new Environment('environments', { scope: 'subEnv', value: 'subEnv-value' }),
|
||||
collectionVars: new Environment('baseEnvironment', { scope: 'baseEnv', value: 'baseEnv-value' }),
|
||||
iterationDataVars: new Environment('iterationData', { scope: 'iterationData', value: 'iterationData-value' }),
|
||||
folderLevelVars: folders.getEnvironments(),
|
||||
localVars: new Environment('local', { scope: 'local' }),
|
||||
});
|
||||
expect(variablesWithFolderLevelData.replaceIn('{{ value}}')).toEqual('folderLevel2-value');
|
||||
});
|
||||
|
||||
folders.get('folder2').environment.set('value', 'folder1ValueOverride');
|
||||
expect(variables.get('value')).toEqual('folder1ValueOverride');
|
||||
it('variables operations', () => {
|
||||
const folders = new ParentFolders([
|
||||
new Folder('1', 'folder1', { value: 'folder1Value' }),
|
||||
new Folder('2', 'folder2', { value: 'folder2Value' }),
|
||||
]);
|
||||
|
||||
const variables = new Variables({
|
||||
globalVars: new Environment('globals', { scope: 'global', value: 'global-value' }),
|
||||
environmentVars: new Environment('environments', { scope: 'subEnv', value: 'subEnv-value' }),
|
||||
collectionVars: new Environment('baseEnvironment', { scope: 'baseEnv', value: 'baseEnv-value' }),
|
||||
iterationDataVars: new Environment('iterationData', { scope: 'iterationData', value: 'iterationData-value' }),
|
||||
folderLevelVars: folders.getEnvironments(),
|
||||
localVars: new Environment('local', { scope: 'local' }),
|
||||
});
|
||||
|
||||
folders.get('folder2').environment.set('value', 'folder1ValueOverride');
|
||||
expect(variables.get('value')).toEqual('folder1ValueOverride');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,31 +4,26 @@ import { Header, HeaderList } from '../headers';
|
||||
// import { QueryParam, setUrlParser, Url, UrlMatchPattern } from '../urls';
|
||||
|
||||
describe('test Header object', () => {
|
||||
it('test basic operations', () => {
|
||||
// const header = new Header('Content-Type: application/json\nUser-Agent: MyClientLibrary/2.0\n');
|
||||
const headerStr = 'Content-Type: application/json\nUser-Agent: MyClientLibrary/2.0\n';
|
||||
const headerObjs = [
|
||||
{ key: 'Content-Type', value: 'application/json' },
|
||||
{ key: 'User-Agent', value: 'MyClientLibrary/2.0' },
|
||||
];
|
||||
it('test basic operations', () => {
|
||||
// const header = new Header('Content-Type: application/json\nUser-Agent: MyClientLibrary/2.0\n');
|
||||
const headerStr = 'Content-Type: application/json\nUser-Agent: MyClientLibrary/2.0\n';
|
||||
const headerObjs = [
|
||||
{ key: 'Content-Type', value: 'application/json' },
|
||||
{ key: 'User-Agent', value: 'MyClientLibrary/2.0' },
|
||||
];
|
||||
|
||||
expect(Header.parse(headerStr)).toEqual(headerObjs);
|
||||
expect(
|
||||
Header.parse(Header.unparse(headerObjs))
|
||||
).toEqual(headerObjs);
|
||||
});
|
||||
expect(Header.parse(headerStr)).toEqual(headerObjs);
|
||||
expect(Header.parse(Header.unparse(headerObjs))).toEqual(headerObjs);
|
||||
});
|
||||
|
||||
it('HeaderList operations', () => {
|
||||
const headerList = new HeaderList(
|
||||
undefined,
|
||||
[
|
||||
new Header({ key: 'h1', value: 'v1' }),
|
||||
new Header({ key: 'h2', value: 'v2' }),
|
||||
]
|
||||
);
|
||||
it('HeaderList operations', () => {
|
||||
const headerList = new HeaderList(undefined, [
|
||||
new Header({ key: 'h1', value: 'v1' }),
|
||||
new Header({ key: 'h2', value: 'v2' }),
|
||||
]);
|
||||
|
||||
const upserted = new Header({ key: 'h1', value: 'v1upserted' });
|
||||
headerList.upsert(upserted);
|
||||
expect(headerList.one('h1')).toEqual(upserted);
|
||||
});
|
||||
const upserted = new Header({ key: 'h1', value: 'v1upserted' });
|
||||
headerList.upsert(upserted);
|
||||
expect(headerList.one('h1')).toEqual(upserted);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,205 +3,134 @@ import { describe, expect, it } from 'vitest';
|
||||
import { Property, PropertyBase, PropertyList } from '../properties';
|
||||
|
||||
describe('test Property objects', () => {
|
||||
it('PropertyBase: basic operations', () => {
|
||||
const pbase = new PropertyBase('my property');
|
||||
|
||||
it('PropertyBase: basic operations', () => {
|
||||
const pbase = new PropertyBase('my property');
|
||||
expect(pbase.toJSON()).toEqual({
|
||||
description: 'my property',
|
||||
});
|
||||
expect(pbase.toObject()).toEqual({
|
||||
description: 'my property',
|
||||
});
|
||||
});
|
||||
|
||||
expect(pbase.toJSON()).toEqual({
|
||||
description: 'my property',
|
||||
});
|
||||
expect(pbase.toObject()).toEqual({
|
||||
description: 'my property',
|
||||
});
|
||||
it('Property: basic operations', () => {
|
||||
const prop = new Property('id', 'name', false, { id: 'real_id', name: 'real_name' });
|
||||
|
||||
expect(prop.toJSON()).toEqual({
|
||||
disabled: false,
|
||||
id: 'real_id',
|
||||
name: 'real_name',
|
||||
});
|
||||
|
||||
it('Property: basic operations', () => {
|
||||
const prop = new Property(
|
||||
'id',
|
||||
'name',
|
||||
false,
|
||||
{ id: 'real_id', name: 'real_name' },
|
||||
);
|
||||
expect(Property.replaceSubstitutions('{{ hehe }}', { hehe: 777 })).toEqual('777');
|
||||
expect(
|
||||
Property.replaceSubstitutionsIn(
|
||||
{
|
||||
value: '{{ hehe }}',
|
||||
},
|
||||
{ hehe: 777 },
|
||||
),
|
||||
).toEqual({ value: '777' });
|
||||
});
|
||||
|
||||
expect(prop.toJSON()).toEqual({
|
||||
disabled: false,
|
||||
id: 'real_id',
|
||||
name: 'real_name',
|
||||
});
|
||||
it('PropertyList: basic operations: add, append, count, all, clear', () => {
|
||||
const propList = new PropertyList({}, undefined, [new Property('id1', 'p1')]);
|
||||
|
||||
expect(Property.replaceSubstitutions('{{ hehe }}', { hehe: 777 })).toEqual('777');
|
||||
expect(Property.replaceSubstitutionsIn(
|
||||
{
|
||||
value: '{{ hehe }}',
|
||||
},
|
||||
{ hehe: 777 },
|
||||
))
|
||||
.toEqual({ value: '777' });
|
||||
});
|
||||
propList.add(new Property('id2', 'p2'));
|
||||
propList.append(new Property('id3', 'p3'));
|
||||
expect(propList.count()).toBe(3);
|
||||
expect(propList.all()).toEqual([
|
||||
{
|
||||
disabled: false,
|
||||
id: 'id1',
|
||||
name: 'p1',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
id: 'id2',
|
||||
name: 'p2',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
id: 'id3',
|
||||
name: 'p3',
|
||||
},
|
||||
]);
|
||||
|
||||
it('PropertyList: basic operations: add, append, count, all, clear', () => {
|
||||
const propList = new PropertyList(
|
||||
{},
|
||||
undefined,
|
||||
[
|
||||
new Property('id1', 'p1'),
|
||||
],
|
||||
);
|
||||
propList.clear();
|
||||
});
|
||||
|
||||
propList.add(new Property('id2', 'p2'));
|
||||
propList.append(new Property('id3', 'p3'));
|
||||
expect(propList.count()).toBe(3);
|
||||
expect(propList.all()).toEqual([
|
||||
{
|
||||
disabled: false,
|
||||
id: 'id1',
|
||||
name: 'p1',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
id: 'id2',
|
||||
name: 'p2',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
id: 'id3',
|
||||
name: 'p3',
|
||||
},
|
||||
]);
|
||||
it('PropertyList: basic operations: assimilate, each, filter, find', () => {
|
||||
const propList = new PropertyList<Property>(Property, undefined, []);
|
||||
|
||||
propList.clear();
|
||||
});
|
||||
propList.assimilate([new Property('id1', 'p1'), new Property('id2', 'p2')], false);
|
||||
expect(propList.count()).toBe(2);
|
||||
|
||||
it('PropertyList: basic operations: assimilate, each, filter, find', () => {
|
||||
const propList = new PropertyList<Property>(
|
||||
Property,
|
||||
undefined,
|
||||
[],
|
||||
);
|
||||
propList.each(prop => {
|
||||
expect(prop.name?.startsWith('p')).toBeTruthy();
|
||||
}, {});
|
||||
|
||||
propList.assimilate(
|
||||
[
|
||||
new Property('id1', 'p1'),
|
||||
new Property('id2', 'p2'),
|
||||
],
|
||||
false,
|
||||
);
|
||||
expect(propList.count()).toBe(2);
|
||||
expect(propList.filter(prop => prop.name === 'p1', {}).length).toBe(1);
|
||||
|
||||
propList.each(
|
||||
prop => {
|
||||
expect(prop.name?.startsWith('p')).toBeTruthy();
|
||||
},
|
||||
{},
|
||||
);
|
||||
expect(propList.find(prop => prop?.name === 'p2', {}) != null).toBeTruthy();
|
||||
});
|
||||
|
||||
expect(
|
||||
propList.filter(
|
||||
prop => prop.name === 'p1',
|
||||
{},
|
||||
).length
|
||||
).toBe(1);
|
||||
it('PropertyList: basic operations: one, has, indexOf, insert, insertAfter, prepend, populate, map, reduce', () => {
|
||||
const propList = new PropertyList<Property>(Property, undefined, [
|
||||
new Property('id1', 'p1'),
|
||||
new Property('id2', 'p2'),
|
||||
]);
|
||||
|
||||
expect(
|
||||
propList.find(
|
||||
prop => prop?.name === 'p2',
|
||||
{},
|
||||
) != null
|
||||
).toBeTruthy();
|
||||
});
|
||||
expect(propList.one('id1')).toEqual(new Property('id1', 'p1'));
|
||||
expect(propList.has(new Property('id1', 'p1'))).toBeTruthy();
|
||||
expect(propList.indexOf(new Property('id1', 'p1')) === 0).toBeTruthy();
|
||||
propList.clear();
|
||||
|
||||
it('PropertyList: basic operations: one, has, indexOf, insert, insertAfter, prepend, populate, map, reduce', () => {
|
||||
const propList = new PropertyList<Property>(
|
||||
Property,
|
||||
undefined,
|
||||
[
|
||||
new Property('id1', 'p1'),
|
||||
new Property('id2', 'p2'),
|
||||
],
|
||||
);
|
||||
propList.insert(new Property('id0', 'p0'), 0);
|
||||
propList.insertAfter(new Property('id1', 'p1'), 1);
|
||||
propList.prepend(new Property('id-1', 'p-1'));
|
||||
propList.populate([new Property('id2', 'p2')]);
|
||||
});
|
||||
|
||||
expect(propList.one('id1'))
|
||||
.toEqual(new Property('id1', 'p1'));
|
||||
expect(propList.has(new Property('id1', 'p1')))
|
||||
.toBeTruthy();
|
||||
expect(propList.indexOf(new Property('id1', 'p1')) === 0).toBeTruthy();
|
||||
propList.clear();
|
||||
it('PropertyList: basic operations: one, has, indexOf, insert, insertAfter, prepend, populate, map, reduce', () => {
|
||||
const propList = new PropertyList<Property>(Property, undefined, [
|
||||
new Property('id0', 'p0'),
|
||||
new Property('id1', 'p1'),
|
||||
new Property('id2', 'p2'),
|
||||
]);
|
||||
|
||||
propList.insert(new Property('id0', 'p0'), 0);
|
||||
propList.insertAfter(new Property('id1', 'p1'), 1);
|
||||
propList.prepend(new Property('id-1', 'p-1'));
|
||||
propList.populate([new Property('id2', 'p2')]);
|
||||
});
|
||||
expect(propList.map(prop => prop.id, {})).toEqual(['id0', 'id1', 'id2']);
|
||||
expect(propList.reduce((acc, prop) => (acc += prop.id), '', {})).toEqual('id0id1id2');
|
||||
});
|
||||
|
||||
it('PropertyList: basic operations: one, has, indexOf, insert, insertAfter, prepend, populate, map, reduce', () => {
|
||||
const propList = new PropertyList<Property>(
|
||||
Property,
|
||||
undefined,
|
||||
[
|
||||
new Property('id0', 'p0'),
|
||||
new Property('id1', 'p1'),
|
||||
new Property('id2', 'p2'),
|
||||
],
|
||||
);
|
||||
it('PropertyList: basic operations: remove, count, repopulate, toString, get, one, idx, upsert', () => {
|
||||
const propList = new PropertyList<Property>(Property, undefined, [
|
||||
new Property('id0', 'p0'),
|
||||
new Property('id1', 'p1'),
|
||||
new Property('id2', 'p2'),
|
||||
]);
|
||||
|
||||
expect(
|
||||
propList.map(
|
||||
prop => prop.id,
|
||||
{},
|
||||
)
|
||||
).toEqual([
|
||||
'id0',
|
||||
'id1',
|
||||
'id2',
|
||||
]);
|
||||
expect(
|
||||
propList.reduce(
|
||||
(acc, prop) => acc += prop.id,
|
||||
'',
|
||||
{},
|
||||
),
|
||||
).toEqual('id0id1id2');
|
||||
});
|
||||
propList.remove(prop => prop.id === 'id0', {});
|
||||
expect(propList.count()).toEqual(2);
|
||||
|
||||
it('PropertyList: basic operations: remove, count, repopulate, toString, get, one, idx, upsert', () => {
|
||||
const propList = new PropertyList<Property>(
|
||||
Property,
|
||||
undefined,
|
||||
[
|
||||
new Property('id0', 'p0'),
|
||||
new Property('id1', 'p1'),
|
||||
new Property('id2', 'p2'),
|
||||
],
|
||||
);
|
||||
propList.repopulate([new Property('id1', 'p1'), new Property('id2', 'p2')]);
|
||||
|
||||
propList.remove(
|
||||
prop => prop.id === 'id0',
|
||||
{},
|
||||
);
|
||||
expect(
|
||||
propList.count(),
|
||||
).toEqual(2);
|
||||
expect(propList.toString()).toEqual(
|
||||
'[{"id":"id1","name":"p1","disabled":false}; {"id":"id2","name":"p2","disabled":false}]',
|
||||
);
|
||||
|
||||
propList.repopulate([
|
||||
new Property('id1', 'p1'),
|
||||
new Property('id2', 'p2'),
|
||||
]);
|
||||
const expectedP1 = new Property('id1', 'p1');
|
||||
const getP1 = propList.get('id1');
|
||||
const oneP1 = propList.one('id1');
|
||||
expect(getP1).toEqual(expectedP1);
|
||||
expect(oneP1).toEqual(expectedP1);
|
||||
|
||||
expect(propList.toString()).toEqual(
|
||||
'[{"id":"id1","name":"p1","disabled":false}; {"id":"id2","name":"p2","disabled":false}]',
|
||||
);
|
||||
const idxP1 = propList.idx(0);
|
||||
expect(idxP1).toEqual(expectedP1);
|
||||
|
||||
const expectedP1 = new Property('id1', 'p1');
|
||||
const getP1 = propList.get('id1');
|
||||
const oneP1 = propList.one('id1');
|
||||
expect(getP1).toEqual(expectedP1);
|
||||
expect(oneP1).toEqual(expectedP1);
|
||||
|
||||
const idxP1 = propList.idx(0);
|
||||
expect(idxP1).toEqual(expectedP1);
|
||||
|
||||
const upsertedP2 = new Property('id2', 'upsertedP2');
|
||||
propList.upsert(upsertedP2);
|
||||
expect(propList.one('id2')).toEqual(upsertedP2);
|
||||
});
|
||||
const upsertedP2 = new Property('id2', 'upsertedP2');
|
||||
propList.upsert(upsertedP2);
|
||||
expect(propList.one('id2')).toEqual(upsertedP2);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,65 +4,58 @@ import { ProxyConfig, ProxyConfigList, transformToSdkProxyOptions } from '../pro
|
||||
import { Url } from '../urls';
|
||||
|
||||
describe('test ProxyConfig object', () => {
|
||||
it('test basic operations', () => {
|
||||
|
||||
const proxyConfig = new ProxyConfig({
|
||||
match: 'http+https://*.example.com:80/*',
|
||||
host: 'proxy.com',
|
||||
port: 8080,
|
||||
tunnel: true,
|
||||
disabled: false,
|
||||
authenticate: true,
|
||||
username: 'proxy_username',
|
||||
password: 'proxy_password',
|
||||
protocol: 'https:',
|
||||
});
|
||||
|
||||
expect(
|
||||
proxyConfig.getProtocols()
|
||||
).toEqual(
|
||||
['http', 'https']
|
||||
);
|
||||
|
||||
expect(proxyConfig.getProxyUrl()).toEqual(
|
||||
'https://proxy_username:proxy_password@proxy.com:8080'
|
||||
);
|
||||
|
||||
expect(
|
||||
proxyConfig.test('http://a.example.com:80/a')
|
||||
).toBeTruthy();
|
||||
|
||||
const configList = new ProxyConfigList<ProxyConfig>(undefined, []);
|
||||
configList.add(proxyConfig);
|
||||
configList.add(new ProxyConfig({
|
||||
match: 'https://*.example.com:80/*',
|
||||
host: 'proxy.com',
|
||||
port: 8080,
|
||||
tunnel: true,
|
||||
disabled: false,
|
||||
authenticate: true,
|
||||
username: 'proxy_username',
|
||||
password: 'proxy_password',
|
||||
protocol: 'https:',
|
||||
}));
|
||||
|
||||
const matchedProxyConfigDef = configList.resolve(new Url('http://sub.example.com:80/path'));
|
||||
expect(matchedProxyConfigDef?.host).toEqual('proxy.com');
|
||||
it('test basic operations', () => {
|
||||
const proxyConfig = new ProxyConfig({
|
||||
match: 'http+https://*.example.com:80/*',
|
||||
host: 'proxy.com',
|
||||
port: 8080,
|
||||
tunnel: true,
|
||||
disabled: false,
|
||||
authenticate: true,
|
||||
username: 'proxy_username',
|
||||
password: 'proxy_password',
|
||||
protocol: 'https:',
|
||||
});
|
||||
|
||||
const proxyUrls = [
|
||||
'http://wormhole',
|
||||
'http://wormhole:0',
|
||||
'https://localhost',
|
||||
'http://user:pass@localhost:666',
|
||||
'http://user:pass@localhost:0',
|
||||
'http://user:pass@localhost',
|
||||
];
|
||||
expect(proxyConfig.getProtocols()).toEqual(['http', 'https']);
|
||||
|
||||
proxyUrls.forEach(url => {
|
||||
it(`test proxy transforming: ${url}`, () => {
|
||||
const proxy = new ProxyConfig(transformToSdkProxyOptions(url, '', true, ''));
|
||||
expect(proxy.getProxyUrl()).toEqual(url);
|
||||
});
|
||||
expect(proxyConfig.getProxyUrl()).toEqual('https://proxy_username:proxy_password@proxy.com:8080');
|
||||
|
||||
expect(proxyConfig.test('http://a.example.com:80/a')).toBeTruthy();
|
||||
|
||||
const configList = new ProxyConfigList<ProxyConfig>(undefined, []);
|
||||
configList.add(proxyConfig);
|
||||
configList.add(
|
||||
new ProxyConfig({
|
||||
match: 'https://*.example.com:80/*',
|
||||
host: 'proxy.com',
|
||||
port: 8080,
|
||||
tunnel: true,
|
||||
disabled: false,
|
||||
authenticate: true,
|
||||
username: 'proxy_username',
|
||||
password: 'proxy_password',
|
||||
protocol: 'https:',
|
||||
}),
|
||||
);
|
||||
|
||||
const matchedProxyConfigDef = configList.resolve(new Url('http://sub.example.com:80/path'));
|
||||
expect(matchedProxyConfigDef?.host).toEqual('proxy.com');
|
||||
});
|
||||
|
||||
const proxyUrls = [
|
||||
'http://wormhole',
|
||||
'http://wormhole:0',
|
||||
'https://localhost',
|
||||
'http://user:pass@localhost:666',
|
||||
'http://user:pass@localhost:0',
|
||||
'http://user:pass@localhost',
|
||||
];
|
||||
|
||||
proxyUrls.forEach(url => {
|
||||
it(`test proxy transforming: ${url}`, () => {
|
||||
const proxy = new ProxyConfig(transformToSdkProxyOptions(url, '', true, ''));
|
||||
expect(proxy.getProxyUrl()).toEqual(url);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,156 +1,154 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import type { Header} from '../headers';
|
||||
import type { Header } from '../headers';
|
||||
import { HeaderList } from '../headers';
|
||||
import type { RequestBodyOptions} from '../request';
|
||||
import type { RequestBodyOptions } from '../request';
|
||||
import { calculatePayloadSize, mergeRequestBody, Request, RequestBody, toScriptRequestBody } from '../request';
|
||||
|
||||
describe('test request and response objects', () => {
|
||||
it('test RequestBody methods', () => {
|
||||
const reqBody = new RequestBody({
|
||||
mode: 'urlencoded',
|
||||
formdata: [
|
||||
{ key: 'formDataKey', value: 'formDataValue' },
|
||||
],
|
||||
urlencoded: [
|
||||
{ key: 'urlencodedKey', value: 'urlencodedValue' },
|
||||
],
|
||||
options: {},
|
||||
});
|
||||
|
||||
expect(reqBody.toString()).toEqual('urlencodedKey=urlencodedValue');
|
||||
|
||||
reqBody.update({ mode: 'file', file: 'file content here' });
|
||||
expect(reqBody.toString()).toEqual('file content here');
|
||||
it('test RequestBody methods', () => {
|
||||
const reqBody = new RequestBody({
|
||||
mode: 'urlencoded',
|
||||
formdata: [{ key: 'formDataKey', value: 'formDataValue' }],
|
||||
urlencoded: [{ key: 'urlencodedKey', value: 'urlencodedValue' }],
|
||||
options: {},
|
||||
});
|
||||
|
||||
it('test Request methods', () => {
|
||||
const req = new Request({
|
||||
name: 'myReq',
|
||||
url: 'https://hostname.com/path',
|
||||
method: 'GET',
|
||||
header: [
|
||||
{ key: 'header1', value: 'val1' },
|
||||
{ key: 'header2', value: 'val2' },
|
||||
],
|
||||
body: {
|
||||
mode: 'raw',
|
||||
raw: 'body content',
|
||||
},
|
||||
auth: {
|
||||
type: 'basic',
|
||||
basic: [
|
||||
{ key: 'username', value: 'myname' },
|
||||
{ key: 'password', value: 'mypwd' },
|
||||
],
|
||||
},
|
||||
proxy: undefined,
|
||||
certificate: undefined,
|
||||
});
|
||||
expect(reqBody.toString()).toEqual('urlencodedKey=urlencodedValue');
|
||||
|
||||
expect(req.name).toEqual('myReq');
|
||||
reqBody.update({ mode: 'file', file: 'file content here' });
|
||||
expect(reqBody.toString()).toEqual('file content here');
|
||||
});
|
||||
|
||||
req.addHeader({ key: 'newHeader', value: 'newValue' });
|
||||
expect(req.headers.count()).toEqual(3);
|
||||
req.removeHeader('notExist', { ignoreCase: false });
|
||||
expect(req.headers.count()).toEqual(3);
|
||||
req.removeHeader('NEWHEADER', { ignoreCase: false });
|
||||
expect(req.headers.count()).toEqual(3);
|
||||
req.removeHeader('NEWHEADER', { ignoreCase: true });
|
||||
expect(req.headers.count()).toEqual(2);
|
||||
|
||||
req.upsertHeader({ key: 'header1', value: 'new_val1' });
|
||||
expect(req.getHeaders({
|
||||
ignoreCase: true,
|
||||
enabled: true,
|
||||
multiValue: true,
|
||||
sanitizeKeys: true,
|
||||
})).toEqual({
|
||||
header1: ['new_val1'],
|
||||
header2: ['val2'],
|
||||
});
|
||||
|
||||
const req2 = req.clone();
|
||||
expect(req2.toJSON()).toEqual(req.toJSON());
|
||||
it('test Request methods', () => {
|
||||
const req = new Request({
|
||||
name: 'myReq',
|
||||
url: 'https://hostname.com/path',
|
||||
method: 'GET',
|
||||
header: [
|
||||
{ key: 'header1', value: 'val1' },
|
||||
{ key: 'header2', value: 'val2' },
|
||||
],
|
||||
body: {
|
||||
mode: 'raw',
|
||||
raw: 'body content',
|
||||
},
|
||||
auth: {
|
||||
type: 'basic',
|
||||
basic: [
|
||||
{ key: 'username', value: 'myname' },
|
||||
{ key: 'password', value: 'mypwd' },
|
||||
],
|
||||
},
|
||||
proxy: undefined,
|
||||
certificate: undefined,
|
||||
});
|
||||
|
||||
it('test Request body transforming', () => {
|
||||
const bodies = [
|
||||
{
|
||||
mimeType: 'text/plain',
|
||||
text: 'rawContent',
|
||||
},
|
||||
{
|
||||
mimeType: 'application/octet-stream',
|
||||
fileName: 'path/to/file',
|
||||
},
|
||||
{
|
||||
mimeType: 'application/x-www-form-urlencoded',
|
||||
params: [
|
||||
{ name: 'k1', value: 'v1' },
|
||||
{ name: 'k2', value: 'v2' },
|
||||
],
|
||||
},
|
||||
{
|
||||
mimeType: 'application/json',
|
||||
text: `{
|
||||
expect(req.name).toEqual('myReq');
|
||||
|
||||
req.addHeader({ key: 'newHeader', value: 'newValue' });
|
||||
expect(req.headers.count()).toEqual(3);
|
||||
req.removeHeader('notExist', { ignoreCase: false });
|
||||
expect(req.headers.count()).toEqual(3);
|
||||
req.removeHeader('NEWHEADER', { ignoreCase: false });
|
||||
expect(req.headers.count()).toEqual(3);
|
||||
req.removeHeader('NEWHEADER', { ignoreCase: true });
|
||||
expect(req.headers.count()).toEqual(2);
|
||||
|
||||
req.upsertHeader({ key: 'header1', value: 'new_val1' });
|
||||
expect(
|
||||
req.getHeaders({
|
||||
ignoreCase: true,
|
||||
enabled: true,
|
||||
multiValue: true,
|
||||
sanitizeKeys: true,
|
||||
}),
|
||||
).toEqual({
|
||||
header1: ['new_val1'],
|
||||
header2: ['val2'],
|
||||
});
|
||||
|
||||
const req2 = req.clone();
|
||||
expect(req2.toJSON()).toEqual(req.toJSON());
|
||||
});
|
||||
|
||||
it('test Request body transforming', () => {
|
||||
const bodies = [
|
||||
{
|
||||
mimeType: 'text/plain',
|
||||
text: 'rawContent',
|
||||
},
|
||||
{
|
||||
mimeType: 'application/octet-stream',
|
||||
fileName: 'path/to/file',
|
||||
},
|
||||
{
|
||||
mimeType: 'application/x-www-form-urlencoded',
|
||||
params: [
|
||||
{ name: 'k1', value: 'v1' },
|
||||
{ name: 'k2', value: 'v2' },
|
||||
],
|
||||
},
|
||||
{
|
||||
mimeType: 'application/json',
|
||||
text: `{
|
||||
query: 'query',
|
||||
operationName: 'operation',
|
||||
variables: 'var',
|
||||
}`,
|
||||
},
|
||||
{
|
||||
mimeType: 'image/gif',
|
||||
fileName: '/path/to/image',
|
||||
},
|
||||
{
|
||||
mimeType: 'multipart/form-data',
|
||||
params: [
|
||||
{ name: 'k1', type: 'text', value: 'v1' },
|
||||
{ name: 'k2', type: 'file', value: '/path/to/image' },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
bodies.forEach(body => {
|
||||
const originalReqBody = body;
|
||||
const scriptReqBody = new RequestBody(toScriptRequestBody(body));
|
||||
expect(mergeRequestBody(scriptReqBody, originalReqBody)).toEqual(originalReqBody);
|
||||
});
|
||||
});
|
||||
|
||||
const reqBodyTestCases: { body: RequestBodyOptions; headers: HeaderList<Header>; expectedTotal: number }[] = [
|
||||
{
|
||||
body: {
|
||||
mode: 'raw',
|
||||
raw: '1',
|
||||
},
|
||||
headers: new HeaderList<Header>(undefined, []),
|
||||
expectedTotal: 1,
|
||||
},
|
||||
{
|
||||
body: {
|
||||
mode: 'raw',
|
||||
raw: '😎',
|
||||
},
|
||||
headers: new HeaderList<Header>(undefined, []),
|
||||
expectedTotal: 4,
|
||||
},
|
||||
{
|
||||
body: {
|
||||
mode: 'raw',
|
||||
raw: '睡',
|
||||
},
|
||||
headers: new HeaderList<Header>(undefined, []),
|
||||
expectedTotal: 3,
|
||||
},
|
||||
},
|
||||
{
|
||||
mimeType: 'image/gif',
|
||||
fileName: '/path/to/image',
|
||||
},
|
||||
{
|
||||
mimeType: 'multipart/form-data',
|
||||
params: [
|
||||
{ name: 'k1', type: 'text', value: 'v1' },
|
||||
{ name: 'k2', type: 'file', value: '/path/to/image' },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
reqBodyTestCases.forEach(({ body, headers, expectedTotal }) => {
|
||||
it(`test calculatePayloadSize: ${body.raw}`, () => {
|
||||
const reqSize = calculatePayloadSize(new RequestBody(body).toString(), headers);
|
||||
|
||||
expect(reqSize.total).toEqual(expectedTotal);
|
||||
});
|
||||
bodies.forEach(body => {
|
||||
const originalReqBody = body;
|
||||
const scriptReqBody = new RequestBody(toScriptRequestBody(body));
|
||||
expect(mergeRequestBody(scriptReqBody, originalReqBody)).toEqual(originalReqBody);
|
||||
});
|
||||
});
|
||||
|
||||
const reqBodyTestCases: { body: RequestBodyOptions; headers: HeaderList<Header>; expectedTotal: number }[] = [
|
||||
{
|
||||
body: {
|
||||
mode: 'raw',
|
||||
raw: '1',
|
||||
},
|
||||
headers: new HeaderList<Header>(undefined, []),
|
||||
expectedTotal: 1,
|
||||
},
|
||||
{
|
||||
body: {
|
||||
mode: 'raw',
|
||||
raw: '😎',
|
||||
},
|
||||
headers: new HeaderList<Header>(undefined, []),
|
||||
expectedTotal: 4,
|
||||
},
|
||||
{
|
||||
body: {
|
||||
mode: 'raw',
|
||||
raw: '睡',
|
||||
},
|
||||
headers: new HeaderList<Header>(undefined, []),
|
||||
expectedTotal: 3,
|
||||
},
|
||||
];
|
||||
|
||||
reqBodyTestCases.forEach(({ body, headers, expectedTotal }) => {
|
||||
it(`test calculatePayloadSize: ${body.raw}`, () => {
|
||||
const reqSize = calculatePayloadSize(new RequestBody(body).toString(), headers);
|
||||
|
||||
expect(reqSize.total).toEqual(expectedTotal);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,91 +4,91 @@ import { Request } from '../request';
|
||||
import { Response } from '../response';
|
||||
|
||||
describe('test request and response objects', () => {
|
||||
it('test Response methods', () => {
|
||||
const req = new Request({
|
||||
url: 'https://hostname.com/path',
|
||||
method: 'GET',
|
||||
header: [
|
||||
{ key: 'header1', value: 'val1' },
|
||||
{ key: 'header2', value: 'val2' },
|
||||
],
|
||||
body: {
|
||||
mode: 'raw',
|
||||
raw: '{"key": 888}',
|
||||
},
|
||||
auth: {
|
||||
type: 'basic',
|
||||
basic: [
|
||||
{ key: 'username', value: 'myname' },
|
||||
{ key: 'password', value: 'mypwd' },
|
||||
],
|
||||
},
|
||||
proxy: undefined,
|
||||
certificate: undefined,
|
||||
});
|
||||
|
||||
const resp = new Response({
|
||||
code: 200,
|
||||
reason: 'OK',
|
||||
header: [
|
||||
{ key: 'header1', value: 'val1' },
|
||||
{ key: 'header2', value: 'val2' },
|
||||
{ key: 'Content-Length', value: '100' },
|
||||
{ key: 'Content-Disposition', value: 'attachment; filename="filename.txt"' },
|
||||
{ key: 'Content-Type', value: 'text/plain; charset=utf-8' },
|
||||
],
|
||||
cookie: [
|
||||
{ key: 'header1', value: 'val1' },
|
||||
{ key: 'header2', value: 'val2' },
|
||||
],
|
||||
body: '{"key": 888}',
|
||||
stream: undefined,
|
||||
responseTime: 100,
|
||||
originalRequest: req,
|
||||
});
|
||||
|
||||
// TODO: this will work after PropertyList.one is improved
|
||||
// expect(resp.size()).toBe(100);
|
||||
|
||||
expect(resp.json()).toEqual({
|
||||
key: 888,
|
||||
});
|
||||
expect(resp.contentInfo()).toEqual({
|
||||
charset: 'utf-8',
|
||||
contentType: 'text/plain; charset=utf-8',
|
||||
fileExtension: 'txt',
|
||||
fileName: 'filename',
|
||||
mimeFormat: '',
|
||||
mimeType: 'text/plain',
|
||||
});
|
||||
|
||||
// extended assertion chains
|
||||
resp.to.have.status(200);
|
||||
resp.to.have.status('OK');
|
||||
resp.to.have.header('header1');
|
||||
resp.to.have.jsonBody({ 'key': 888 });
|
||||
resp.to.have.body('{"key": 888}');
|
||||
resp.to.have.jsonSchema({
|
||||
type: 'object',
|
||||
properties: {
|
||||
key: { type: 'integer' },
|
||||
},
|
||||
required: ['key'],
|
||||
additionalProperties: false,
|
||||
});
|
||||
|
||||
resp.to.not.have.status(201);
|
||||
resp.to.not.have.status('NOT FOUND');
|
||||
resp.to.not.have.header('header_nonexist');
|
||||
resp.to.not.have.jsonBody({ 'key': 777 });
|
||||
resp.to.not.have.body('{"key": 777}');
|
||||
resp.to.not.have.jsonSchema({
|
||||
type: 'object',
|
||||
properties: {
|
||||
keyNoExist: { type: 'integer' },
|
||||
},
|
||||
required: ['keyNoExist'],
|
||||
additionalProperties: false,
|
||||
});
|
||||
it('test Response methods', () => {
|
||||
const req = new Request({
|
||||
url: 'https://hostname.com/path',
|
||||
method: 'GET',
|
||||
header: [
|
||||
{ key: 'header1', value: 'val1' },
|
||||
{ key: 'header2', value: 'val2' },
|
||||
],
|
||||
body: {
|
||||
mode: 'raw',
|
||||
raw: '{"key": 888}',
|
||||
},
|
||||
auth: {
|
||||
type: 'basic',
|
||||
basic: [
|
||||
{ key: 'username', value: 'myname' },
|
||||
{ key: 'password', value: 'mypwd' },
|
||||
],
|
||||
},
|
||||
proxy: undefined,
|
||||
certificate: undefined,
|
||||
});
|
||||
|
||||
const resp = new Response({
|
||||
code: 200,
|
||||
reason: 'OK',
|
||||
header: [
|
||||
{ key: 'header1', value: 'val1' },
|
||||
{ key: 'header2', value: 'val2' },
|
||||
{ key: 'Content-Length', value: '100' },
|
||||
{ key: 'Content-Disposition', value: 'attachment; filename="filename.txt"' },
|
||||
{ key: 'Content-Type', value: 'text/plain; charset=utf-8' },
|
||||
],
|
||||
cookie: [
|
||||
{ key: 'header1', value: 'val1' },
|
||||
{ key: 'header2', value: 'val2' },
|
||||
],
|
||||
body: '{"key": 888}',
|
||||
stream: undefined,
|
||||
responseTime: 100,
|
||||
originalRequest: req,
|
||||
});
|
||||
|
||||
// TODO: this will work after PropertyList.one is improved
|
||||
// expect(resp.size()).toBe(100);
|
||||
|
||||
expect(resp.json()).toEqual({
|
||||
key: 888,
|
||||
});
|
||||
expect(resp.contentInfo()).toEqual({
|
||||
charset: 'utf-8',
|
||||
contentType: 'text/plain; charset=utf-8',
|
||||
fileExtension: 'txt',
|
||||
fileName: 'filename',
|
||||
mimeFormat: '',
|
||||
mimeType: 'text/plain',
|
||||
});
|
||||
|
||||
// extended assertion chains
|
||||
resp.to.have.status(200);
|
||||
resp.to.have.status('OK');
|
||||
resp.to.have.header('header1');
|
||||
resp.to.have.jsonBody({ key: 888 });
|
||||
resp.to.have.body('{"key": 888}');
|
||||
resp.to.have.jsonSchema({
|
||||
type: 'object',
|
||||
properties: {
|
||||
key: { type: 'integer' },
|
||||
},
|
||||
required: ['key'],
|
||||
additionalProperties: false,
|
||||
});
|
||||
|
||||
resp.to.not.have.status(201);
|
||||
resp.to.not.have.status('NOT FOUND');
|
||||
resp.to.not.have.header('header_nonexist');
|
||||
resp.to.not.have.jsonBody({ key: 777 });
|
||||
resp.to.not.have.body('{"key": 777}');
|
||||
resp.to.not.have.jsonSchema({
|
||||
type: 'object',
|
||||
properties: {
|
||||
keyNoExist: { type: 'integer' },
|
||||
},
|
||||
required: ['keyNoExist'],
|
||||
additionalProperties: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,345 +4,342 @@ import { QueryParam, Url, UrlMatchPattern } from '../urls';
|
||||
import { Variable } from '../variables';
|
||||
|
||||
describe('test Url object', () => {
|
||||
it('test QueryParam', () => {
|
||||
const queryParam = new QueryParam({
|
||||
key: 'uname',
|
||||
value: 'patrick star',
|
||||
});
|
||||
|
||||
expect(queryParam.toString()).toEqual('uname=patrick+star');
|
||||
|
||||
queryParam.update('uname=peter+parker');
|
||||
expect(queryParam.toString()).toEqual('uname=peter+parker');
|
||||
|
||||
expect(
|
||||
QueryParam.unparseSingle({ key: 'uname', value: 'patrick star' })
|
||||
).toEqual('uname=patrick+star');
|
||||
|
||||
expect(
|
||||
QueryParam.unparse({ uname: 'patrick star', password: '123' })
|
||||
).toEqual('uname=patrick+star&password=123');
|
||||
|
||||
expect(
|
||||
QueryParam.parseSingle('uname=patrick+star')
|
||||
).toEqual({ key: 'uname', value: 'patrick star' });
|
||||
|
||||
expect(
|
||||
QueryParam.parse('uname=patrick+star&password=123')
|
||||
).toEqual([{ 'key': 'uname', 'value': 'patrick star' }, { 'key': 'password', 'value': '123' }]);
|
||||
it('test QueryParam', () => {
|
||||
const queryParam = new QueryParam({
|
||||
key: 'uname',
|
||||
value: 'patrick star',
|
||||
});
|
||||
|
||||
it('test Url methods', () => {
|
||||
const url = new Url({
|
||||
auth: {
|
||||
username: 'usernameValue',
|
||||
password: 'passwordValue',
|
||||
},
|
||||
hash: 'hashValue',
|
||||
host: ['hostValue', 'com'],
|
||||
path: ['pathLevel1', 'pathLevel2'],
|
||||
port: '777',
|
||||
protocol: 'https:',
|
||||
query: [
|
||||
new QueryParam({ key: 'key1', value: 'value1' }),
|
||||
new QueryParam({ key: 'key2', value: 'value2' }),
|
||||
new QueryParam({ key: 'key3', value: 'value3' }),
|
||||
],
|
||||
variables: [
|
||||
new Variable({ key: 'varKey', value: 'varValue' }),
|
||||
],
|
||||
});
|
||||
expect(queryParam.toString()).toEqual('uname=patrick+star');
|
||||
|
||||
// expect(url.getHost()).toEqual('hostValue.com');
|
||||
expect(url.getPath()).toEqual('/pathLevel1/pathLevel2');
|
||||
queryParam.update('uname=peter+parker');
|
||||
expect(queryParam.toString()).toEqual('uname=peter+parker');
|
||||
|
||||
expect(url.getQueryString()).toEqual('key1=value1&key2=value2&key3=value3');
|
||||
expect(url.getPathWithQuery()).toEqual('/pathLevel1/pathLevel2?key1=value1&key2=value2&key3=value3');
|
||||
expect(url.getRemote(true)).toEqual('hostvalue.com:777');
|
||||
expect(url.getRemote(false)).toEqual('hostvalue.com:777'); // TODO: add more cases
|
||||
expect(QueryParam.unparseSingle({ key: 'uname', value: 'patrick star' })).toEqual('uname=patrick+star');
|
||||
|
||||
url.removeQueryParams([
|
||||
new QueryParam({ key: 'key1', value: 'value1' }),
|
||||
]);
|
||||
url.removeQueryParams('key3');
|
||||
expect(url.getQueryString()).toEqual('key2=value2');
|
||||
expect(url.toString()).toEqual('https://usernameValue:passwordValue@hostvalue.com:777/pathLevel1/pathLevel2?key2=value2#hashValue');
|
||||
expect(QueryParam.unparse({ uname: 'patrick star', password: '123' })).toEqual('uname=patrick+star&password=123');
|
||||
|
||||
const url2 = new Url('https://usernameValue:passwordValue@hostValue.com:777/pathLevel1/pathLevel2?key1=value1&key2=value2#hashValue');
|
||||
expect(url2.getHost()).toEqual('hostvalue.com');
|
||||
expect(url2.getPath()).toEqual('/pathLevel1/pathLevel2');
|
||||
expect(url2.getQueryString()).toEqual('key1=value1&key2=value2');
|
||||
expect(url2.getPathWithQuery()).toEqual('/pathLevel1/pathLevel2?key1=value1&key2=value2');
|
||||
expect(url2.getRemote(true)).toEqual('hostvalue.com:777');
|
||||
expect(url2.getRemote(false)).toEqual('hostvalue.com:777'); // TODO: add more cases
|
||||
expect(QueryParam.parseSingle('uname=patrick+star')).toEqual({ key: 'uname', value: 'patrick star' });
|
||||
|
||||
url2.removeQueryParams([
|
||||
new QueryParam({ key: 'key1', value: 'value1' }),
|
||||
]);
|
||||
expect(url2.getQueryString()).toEqual('key2=value2');
|
||||
expect(url2.toString()).toEqual('https://usernameValue:passwordValue@hostvalue.com:777/pathLevel1/pathLevel2?key2=value2#hashValue');
|
||||
expect(QueryParam.parse('uname=patrick+star&password=123')).toEqual([
|
||||
{ key: 'uname', value: 'patrick star' },
|
||||
{ key: 'password', value: '123' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('test Url methods', () => {
|
||||
const url = new Url({
|
||||
auth: {
|
||||
username: 'usernameValue',
|
||||
password: 'passwordValue',
|
||||
},
|
||||
hash: 'hashValue',
|
||||
host: ['hostValue', 'com'],
|
||||
path: ['pathLevel1', 'pathLevel2'],
|
||||
port: '777',
|
||||
protocol: 'https:',
|
||||
query: [
|
||||
new QueryParam({ key: 'key1', value: 'value1' }),
|
||||
new QueryParam({ key: 'key2', value: 'value2' }),
|
||||
new QueryParam({ key: 'key3', value: 'value3' }),
|
||||
],
|
||||
variables: [new Variable({ key: 'varKey', value: 'varValue' })],
|
||||
});
|
||||
|
||||
it('test Url static methods', () => {
|
||||
// static methods
|
||||
const urlStr = 'https://myhost.com/path1/path2';
|
||||
const urlOptions = Url.parse(urlStr);
|
||||
const urlObj = new Url(urlOptions || '');
|
||||
// expect(url.getHost()).toEqual('hostValue.com');
|
||||
expect(url.getPath()).toEqual('/pathLevel1/pathLevel2');
|
||||
|
||||
expect(urlObj.toString()).toEqual(urlStr);
|
||||
expect(url.getQueryString()).toEqual('key1=value1&key2=value2&key3=value3');
|
||||
expect(url.getPathWithQuery()).toEqual('/pathLevel1/pathLevel2?key1=value1&key2=value2&key3=value3');
|
||||
expect(url.getRemote(true)).toEqual('hostvalue.com:777');
|
||||
expect(url.getRemote(false)).toEqual('hostvalue.com:777'); // TODO: add more cases
|
||||
|
||||
url.removeQueryParams([new QueryParam({ key: 'key1', value: 'value1' })]);
|
||||
url.removeQueryParams('key3');
|
||||
expect(url.getQueryString()).toEqual('key2=value2');
|
||||
expect(url.toString()).toEqual(
|
||||
'https://usernameValue:passwordValue@hostvalue.com:777/pathLevel1/pathLevel2?key2=value2#hashValue',
|
||||
);
|
||||
|
||||
const url2 = new Url(
|
||||
'https://usernameValue:passwordValue@hostValue.com:777/pathLevel1/pathLevel2?key1=value1&key2=value2#hashValue',
|
||||
);
|
||||
expect(url2.getHost()).toEqual('hostvalue.com');
|
||||
expect(url2.getPath()).toEqual('/pathLevel1/pathLevel2');
|
||||
expect(url2.getQueryString()).toEqual('key1=value1&key2=value2');
|
||||
expect(url2.getPathWithQuery()).toEqual('/pathLevel1/pathLevel2?key1=value1&key2=value2');
|
||||
expect(url2.getRemote(true)).toEqual('hostvalue.com:777');
|
||||
expect(url2.getRemote(false)).toEqual('hostvalue.com:777'); // TODO: add more cases
|
||||
|
||||
url2.removeQueryParams([new QueryParam({ key: 'key1', value: 'value1' })]);
|
||||
expect(url2.getQueryString()).toEqual('key2=value2');
|
||||
expect(url2.toString()).toEqual(
|
||||
'https://usernameValue:passwordValue@hostvalue.com:777/pathLevel1/pathLevel2?key2=value2#hashValue',
|
||||
);
|
||||
});
|
||||
|
||||
it('test Url static methods', () => {
|
||||
// static methods
|
||||
const urlStr = 'https://myhost.com/path1/path2';
|
||||
const urlOptions = Url.parse(urlStr);
|
||||
const urlObj = new Url(urlOptions || '');
|
||||
|
||||
expect(urlObj.toString()).toEqual(urlStr);
|
||||
});
|
||||
|
||||
it('test Url property accessing', () => {
|
||||
const urlStr = 'https://user:pwd@hehe.com:6666/path1/path2?q1=@&q2=:#myHash';
|
||||
const urlObj = new Url(urlStr);
|
||||
|
||||
expect(urlObj.auth).toEqual({ username: 'user', password: 'pwd' });
|
||||
expect(urlObj.hash).toEqual('myHash');
|
||||
expect(urlObj.host).toEqual(['hehe', 'com']);
|
||||
expect(urlObj.path).toEqual(['path1', 'path2']);
|
||||
expect(urlObj.port).toEqual('6666');
|
||||
expect(urlObj.protocol).toEqual('https:');
|
||||
|
||||
const queryParams = urlObj.query.toObject();
|
||||
expect(queryParams[0].key).toEqual('q1');
|
||||
expect(queryParams[0].value).toEqual('@');
|
||||
expect(queryParams[1].key).toEqual('q2');
|
||||
expect(queryParams[1].value).toEqual(':');
|
||||
});
|
||||
|
||||
const urlParsingTests = [
|
||||
{
|
||||
testName: 'interal url',
|
||||
url: 'inso/',
|
||||
},
|
||||
{
|
||||
testName: 'interal url with protocol',
|
||||
url: 'http://inso/',
|
||||
},
|
||||
{
|
||||
testName: 'interal url with auth',
|
||||
url: 'http://name:pwd@inso/',
|
||||
},
|
||||
{
|
||||
testName: 'interal url with auth without protocol',
|
||||
url: 'name:pwd@inso/',
|
||||
},
|
||||
{
|
||||
testName: 'ip address',
|
||||
url: 'http://127.0.0.1/',
|
||||
},
|
||||
{
|
||||
testName: 'localhost',
|
||||
url: 'https://localhost/',
|
||||
},
|
||||
{
|
||||
testName: 'url with query params',
|
||||
url: 'localhost/?k=v',
|
||||
},
|
||||
{
|
||||
testName: 'url with hash',
|
||||
url: 'localhost/#myHash',
|
||||
},
|
||||
{
|
||||
testName: 'url with query params and hash',
|
||||
url: 'localhost/?k=v#myHash',
|
||||
},
|
||||
{
|
||||
testName: 'url with query params and hash',
|
||||
url: 'localhost/?k={{ myValue }}',
|
||||
},
|
||||
{
|
||||
testName: 'url with query params and hash',
|
||||
url: 'localhost/#My{{ hashValue }}',
|
||||
},
|
||||
{
|
||||
testName: 'url with path params',
|
||||
url: 'inso.com/:path1/:path',
|
||||
},
|
||||
{
|
||||
testName: 'url with tags and path params',
|
||||
url: '{{ _.baseUrl }}/:path1/:path',
|
||||
},
|
||||
{
|
||||
testName: 'hybrid of path params and tags',
|
||||
url: '{{ baseUrl }}/:path_{{ _.pathSuffix }}',
|
||||
},
|
||||
{
|
||||
testName: '@ is used in path',
|
||||
url: '{{ baseUrl }}/tom@any.com',
|
||||
},
|
||||
{
|
||||
testName: '@ is used in auth and path',
|
||||
url: 'user:pass@a.com/tom@any.com',
|
||||
},
|
||||
{
|
||||
testName: '@ is used in auth',
|
||||
url: 'user:pass@a.com/',
|
||||
},
|
||||
{
|
||||
testName: '@ is used in path with path params, targs and hash',
|
||||
url: '{{ baseUrl }}/:path__{{ _.pathSuffix }}/tom@any.com#hash',
|
||||
},
|
||||
];
|
||||
|
||||
urlParsingTests.forEach(testCase => {
|
||||
it(`parsing url: ${testCase.testName}`, () => {
|
||||
const urlObj = new Url(testCase.url);
|
||||
expect(urlObj.toString()).toEqual(testCase.url);
|
||||
});
|
||||
});
|
||||
|
||||
it('test Url property accessing', () => {
|
||||
const urlStr = 'https://user:pwd@hehe.com:6666/path1/path2?q1=@&q2=:#myHash';
|
||||
const urlObj = new Url(urlStr);
|
||||
const additionalCases = [
|
||||
{
|
||||
origin: 'http://{{ urlWithTagOnly}}',
|
||||
expected: 'http://{{ urlWithTagOnly}}',
|
||||
},
|
||||
{
|
||||
origin: "http://httpbin.org/{{ method}}/{% uuid 'v4' %}",
|
||||
expected: "http://httpbin.org/{{ method}}/{% uuid 'v4' %}",
|
||||
},
|
||||
{
|
||||
origin: 'my-domain',
|
||||
expected: 'my-domain',
|
||||
},
|
||||
{
|
||||
origin: 'http://my-domain',
|
||||
expected: 'http://my-domain',
|
||||
},
|
||||
{
|
||||
origin: 'https://youdomain/api/validateuser/abc@.contos.com',
|
||||
expected: 'https://youdomain/api/validateuser/abc@.contos.com',
|
||||
},
|
||||
{
|
||||
origin:
|
||||
'https://s3.amazonaws.com/finance-department-bucket/2022/tax-certificate.pdf?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA3SGQVQG7FGA6KKA6/20221104/us-east-1/s3/aws4_request&X-Amz-Date=20221104T140227Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=b228dbec8c1008c80c162e1210e4503dceead1e4d4751b4d9787314fd6da4d55',
|
||||
expected:
|
||||
'https://s3.amazonaws.com/finance-department-bucket/2022/tax-certificate.pdf?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA3SGQVQG7FGA6KKA6/20221104/us-east-1/s3/aws4_request&X-Amz-Date=20221104T140227Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=b228dbec8c1008c80c162e1210e4503dceead1e4d4751b4d9787314fd6da4d55',
|
||||
},
|
||||
{
|
||||
origin: 'https://hehe.com',
|
||||
expected: 'https://hehe.com',
|
||||
},
|
||||
{
|
||||
origin: 'https://2001:db8:3333:4444:5555:6666:7777:8888',
|
||||
expected: 'https://2001:db8:3333:4444:5555:6666:7777:8888',
|
||||
},
|
||||
{
|
||||
origin: 'http://127.0.0.1:6666',
|
||||
expected: 'http://127.0.0.1:6666',
|
||||
},
|
||||
{
|
||||
origin: 'http://ihave@:inhostname.com',
|
||||
expected: 'http://ihave@:inhostname.com',
|
||||
},
|
||||
{
|
||||
origin: "https://{{ _['examplehost']}}",
|
||||
expected: "https://{{ _['examplehost']}}",
|
||||
},
|
||||
{
|
||||
origin: "http://{{ _['a']['b']['c']['url'] }}",
|
||||
expected: "http://{{ _['a']['b']['c']['url'] }}",
|
||||
},
|
||||
{
|
||||
origin: 'invalid?id=@:/&name=张三',
|
||||
expected: 'invalid?id=@:/&name=张三',
|
||||
},
|
||||
];
|
||||
|
||||
expect(urlObj.auth).toEqual({ username: 'user', password: 'pwd' });
|
||||
expect(urlObj.hash).toEqual('myHash');
|
||||
expect(urlObj.host).toEqual(['hehe', 'com']);
|
||||
expect(urlObj.path).toEqual(['path1', 'path2']);
|
||||
expect(urlObj.port).toEqual('6666');
|
||||
expect(urlObj.protocol).toEqual('https:');
|
||||
|
||||
const queryParams = urlObj.query.toObject();
|
||||
expect(queryParams[0].key).toEqual('q1');
|
||||
expect(queryParams[0].value).toEqual('@');
|
||||
expect(queryParams[1].key).toEqual('q2');
|
||||
expect(queryParams[1].value).toEqual(':');
|
||||
});
|
||||
|
||||
const urlParsingTests = [
|
||||
{
|
||||
testName: 'interal url',
|
||||
url: 'inso/',
|
||||
},
|
||||
{
|
||||
testName: 'interal url with protocol',
|
||||
url: 'http://inso/',
|
||||
},
|
||||
{
|
||||
testName: 'interal url with auth',
|
||||
url: 'http://name:pwd@inso/',
|
||||
},
|
||||
{
|
||||
testName: 'interal url with auth without protocol',
|
||||
url: 'name:pwd@inso/',
|
||||
},
|
||||
{
|
||||
testName: 'ip address',
|
||||
url: 'http://127.0.0.1/',
|
||||
},
|
||||
{
|
||||
testName: 'localhost',
|
||||
url: 'https://localhost/',
|
||||
},
|
||||
{
|
||||
testName: 'url with query params',
|
||||
url: 'localhost/?k=v',
|
||||
},
|
||||
{
|
||||
testName: 'url with hash',
|
||||
url: 'localhost/#myHash',
|
||||
},
|
||||
{
|
||||
testName: 'url with query params and hash',
|
||||
url: 'localhost/?k=v#myHash',
|
||||
},
|
||||
{
|
||||
testName: 'url with query params and hash',
|
||||
url: 'localhost/?k={{ myValue }}',
|
||||
},
|
||||
{
|
||||
testName: 'url with query params and hash',
|
||||
url: 'localhost/#My{{ hashValue }}',
|
||||
},
|
||||
{
|
||||
testName: 'url with path params',
|
||||
url: 'inso.com/:path1/:path',
|
||||
},
|
||||
{
|
||||
testName: 'url with tags and path params',
|
||||
url: '{{ _.baseUrl }}/:path1/:path',
|
||||
},
|
||||
{
|
||||
testName: 'hybrid of path params and tags',
|
||||
url: '{{ baseUrl }}/:path_{{ _.pathSuffix }}',
|
||||
},
|
||||
{
|
||||
testName: '@ is used in path',
|
||||
url: '{{ baseUrl }}/tom@any.com',
|
||||
},
|
||||
{
|
||||
testName: '@ is used in auth and path',
|
||||
url: 'user:pass@a.com/tom@any.com',
|
||||
},
|
||||
{
|
||||
testName: '@ is used in auth',
|
||||
url: 'user:pass@a.com/',
|
||||
},
|
||||
{
|
||||
testName: '@ is used in path with path params, targs and hash',
|
||||
url: '{{ baseUrl }}/:path__{{ _.pathSuffix }}/tom@any.com#hash',
|
||||
},
|
||||
];
|
||||
|
||||
urlParsingTests.forEach(testCase => {
|
||||
it(`parsing url: ${testCase.testName}`, () => {
|
||||
const urlObj = new Url(testCase.url);
|
||||
expect(urlObj.toString()).toEqual(testCase.url);
|
||||
});
|
||||
});
|
||||
|
||||
const additionalCases = [
|
||||
{
|
||||
origin: 'http://{{ urlWithTagOnly}}',
|
||||
expected: 'http://{{ urlWithTagOnly}}',
|
||||
},
|
||||
{
|
||||
origin: "http://httpbin.org/{{ method}}/{% uuid 'v4' %}",
|
||||
expected: "http://httpbin.org/{{ method}}/{% uuid 'v4' %}",
|
||||
},
|
||||
{
|
||||
origin: 'my-domain',
|
||||
expected: 'my-domain',
|
||||
},
|
||||
{
|
||||
origin: 'http://my-domain',
|
||||
expected: 'http://my-domain',
|
||||
},
|
||||
{
|
||||
origin: 'https://youdomain/api/validateuser/abc@.contos.com',
|
||||
expected: 'https://youdomain/api/validateuser/abc@.contos.com',
|
||||
},
|
||||
{
|
||||
origin: 'https://s3.amazonaws.com/finance-department-bucket/2022/tax-certificate.pdf?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA3SGQVQG7FGA6KKA6/20221104/us-east-1/s3/aws4_request&X-Amz-Date=20221104T140227Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=b228dbec8c1008c80c162e1210e4503dceead1e4d4751b4d9787314fd6da4d55',
|
||||
expected: 'https://s3.amazonaws.com/finance-department-bucket/2022/tax-certificate.pdf?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIA3SGQVQG7FGA6KKA6/20221104/us-east-1/s3/aws4_request&X-Amz-Date=20221104T140227Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=b228dbec8c1008c80c162e1210e4503dceead1e4d4751b4d9787314fd6da4d55',
|
||||
},
|
||||
{
|
||||
origin: 'https://hehe.com',
|
||||
expected: 'https://hehe.com',
|
||||
},
|
||||
{
|
||||
origin: 'https://2001:db8:3333:4444:5555:6666:7777:8888',
|
||||
expected: 'https://2001:db8:3333:4444:5555:6666:7777:8888',
|
||||
},
|
||||
{
|
||||
origin: 'http://127.0.0.1:6666',
|
||||
expected: 'http://127.0.0.1:6666',
|
||||
},
|
||||
{
|
||||
origin: 'http://ihave@:inhostname.com',
|
||||
expected: 'http://ihave@:inhostname.com',
|
||||
},
|
||||
{
|
||||
origin: "https://{{ _['examplehost']}}",
|
||||
expected: "https://{{ _['examplehost']}}",
|
||||
},
|
||||
{
|
||||
origin: "http://{{ _['a']['b']['c']['url'] }}",
|
||||
expected: "http://{{ _['a']['b']['c']['url'] }}",
|
||||
},
|
||||
{
|
||||
origin: 'invalid?id=@:/&name=张三',
|
||||
expected: 'invalid?id=@:/&name=张三',
|
||||
},
|
||||
];
|
||||
|
||||
additionalCases.forEach(testCase => {
|
||||
it(`parsing url: ${testCase.origin}`, () => {
|
||||
const urlObj = new Url(testCase.origin);
|
||||
urlObj.addQueryParams([{ key: 'key', value: 'value' }]);
|
||||
urlObj.removeQueryParams('key');
|
||||
expect(urlObj.toString()).toEqual(testCase.expected);
|
||||
});
|
||||
additionalCases.forEach(testCase => {
|
||||
it(`parsing url: ${testCase.origin}`, () => {
|
||||
const urlObj = new Url(testCase.origin);
|
||||
urlObj.addQueryParams([{ key: 'key', value: 'value' }]);
|
||||
urlObj.removeQueryParams('key');
|
||||
expect(urlObj.toString()).toEqual(testCase.expected);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('test Url Match Pattern', () => {
|
||||
it('test UrlMatchPattern', () => {
|
||||
const pattern = 'http+https+custom://*.insomnia.com:80/p1/*';
|
||||
const matchPattern = new UrlMatchPattern(pattern);
|
||||
it('test UrlMatchPattern', () => {
|
||||
const pattern = 'http+https+custom://*.insomnia.com:80/p1/*';
|
||||
const matchPattern = new UrlMatchPattern(pattern);
|
||||
|
||||
expect(matchPattern.getProtocols()).toEqual(['http', 'https', 'custom']);
|
||||
expect(matchPattern.testProtocol('http')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('https')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('custom')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('unmatched')).toBeFalsy();
|
||||
expect(matchPattern.getProtocols()).toEqual(['http', 'https', 'custom']);
|
||||
expect(matchPattern.testProtocol('http')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('https')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('custom')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('unmatched')).toBeFalsy();
|
||||
|
||||
expect(matchPattern.testHost('download.insomnia.com')).toBeTruthy();
|
||||
expect(matchPattern.testHost('bin.download.insomnia.com')).toBeFalsy();
|
||||
expect(matchPattern.testHost('insomnia.com')).toBeFalsy();
|
||||
expect(matchPattern.testHost('com')).toBeFalsy();
|
||||
expect(matchPattern.testHost('download.insomnia.com')).toBeTruthy();
|
||||
expect(matchPattern.testHost('bin.download.insomnia.com')).toBeFalsy();
|
||||
expect(matchPattern.testHost('insomnia.com')).toBeFalsy();
|
||||
expect(matchPattern.testHost('com')).toBeFalsy();
|
||||
|
||||
expect(matchPattern.testPath('/p1/abc')).toBeTruthy();
|
||||
expect(matchPattern.testPath('/p1/')).toBeTruthy();
|
||||
expect(matchPattern.testPath('/p1')).toBeFalsy();
|
||||
expect(matchPattern.testPath('/')).toBeFalsy();
|
||||
expect(matchPattern.testPath('')).toBeFalsy();
|
||||
expect(matchPattern.testPath('/p1/abc')).toBeTruthy();
|
||||
expect(matchPattern.testPath('/p1/')).toBeTruthy();
|
||||
expect(matchPattern.testPath('/p1')).toBeFalsy();
|
||||
expect(matchPattern.testPath('/')).toBeFalsy();
|
||||
expect(matchPattern.testPath('')).toBeFalsy();
|
||||
|
||||
expect(matchPattern.testPort('80', 'https')).toBeTruthy();
|
||||
expect(matchPattern.testPort('443', 'https')).toBeFalsy();
|
||||
expect(matchPattern.testPort('80', 'http')).toBeTruthy();
|
||||
expect(matchPattern.testPort('80', 'unmatched')).toBeFalsy();
|
||||
});
|
||||
expect(matchPattern.testPort('80', 'https')).toBeTruthy();
|
||||
expect(matchPattern.testPort('443', 'https')).toBeFalsy();
|
||||
expect(matchPattern.testPort('80', 'http')).toBeTruthy();
|
||||
expect(matchPattern.testPort('80', 'unmatched')).toBeFalsy();
|
||||
});
|
||||
|
||||
it('test UrlMatchPattern with no protocol', () => {
|
||||
const pattern = '*.insomnia.com/p1/*';
|
||||
try {
|
||||
const matchPattern = new UrlMatchPattern(pattern);
|
||||
matchPattern.testProtocol('http');
|
||||
} catch (e: any) {
|
||||
expect(e.message).toContain('UrlMatchPattern: protocol is not specified');
|
||||
}
|
||||
});
|
||||
it('test UrlMatchPattern with no protocol', () => {
|
||||
const pattern = '*.insomnia.com/p1/*';
|
||||
try {
|
||||
const matchPattern = new UrlMatchPattern(pattern);
|
||||
matchPattern.testProtocol('http');
|
||||
} catch (e: any) {
|
||||
expect(e.message).toContain('UrlMatchPattern: protocol is not specified');
|
||||
}
|
||||
});
|
||||
|
||||
it('test UrlMatchPattern with no port', () => {
|
||||
const pattern = 'http+https+custom://*.insomnia.com/p1/*';
|
||||
const matchPattern = new UrlMatchPattern(pattern);
|
||||
it('test UrlMatchPattern with no port', () => {
|
||||
const pattern = 'http+https+custom://*.insomnia.com/p1/*';
|
||||
const matchPattern = new UrlMatchPattern(pattern);
|
||||
|
||||
expect(matchPattern.getProtocols()).toEqual(['http', 'https', 'custom']);
|
||||
expect(matchPattern.testProtocol('http')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('https')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('custom')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('unmatched')).toBeFalsy();
|
||||
expect(matchPattern.getProtocols()).toEqual(['http', 'https', 'custom']);
|
||||
expect(matchPattern.testProtocol('http')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('https')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('custom')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('unmatched')).toBeFalsy();
|
||||
|
||||
expect(matchPattern.testHost('download.insomnia.com')).toBeTruthy();
|
||||
expect(matchPattern.testHost('bin.download.insomnia.com')).toBeFalsy();
|
||||
expect(matchPattern.testHost('insomnia.com')).toBeFalsy();
|
||||
expect(matchPattern.testHost('com')).toBeFalsy();
|
||||
expect(matchPattern.testHost('download.insomnia.com')).toBeTruthy();
|
||||
expect(matchPattern.testHost('bin.download.insomnia.com')).toBeFalsy();
|
||||
expect(matchPattern.testHost('insomnia.com')).toBeFalsy();
|
||||
expect(matchPattern.testHost('com')).toBeFalsy();
|
||||
|
||||
expect(matchPattern.testPath('/p1/abc')).toBeTruthy();
|
||||
expect(matchPattern.testPath('/p1/')).toBeTruthy();
|
||||
expect(matchPattern.testPath('/p1')).toBeFalsy();
|
||||
expect(matchPattern.testPath('/')).toBeFalsy();
|
||||
expect(matchPattern.testPath('')).toBeFalsy();
|
||||
expect(matchPattern.testPath('/p1/abc')).toBeTruthy();
|
||||
expect(matchPattern.testPath('/p1/')).toBeTruthy();
|
||||
expect(matchPattern.testPath('/p1')).toBeFalsy();
|
||||
expect(matchPattern.testPath('/')).toBeFalsy();
|
||||
expect(matchPattern.testPath('')).toBeFalsy();
|
||||
|
||||
expect(matchPattern.testPort('443', 'https')).toBeTruthy();
|
||||
expect(matchPattern.testPort('80', 'http')).toBeTruthy();
|
||||
expect(matchPattern.testPort('443', 'http')).toBeFalsy();
|
||||
expect(matchPattern.testPort('80', 'https')).toBeFalsy();
|
||||
});
|
||||
expect(matchPattern.testPort('443', 'https')).toBeTruthy();
|
||||
expect(matchPattern.testPort('80', 'http')).toBeTruthy();
|
||||
expect(matchPattern.testPort('443', 'http')).toBeFalsy();
|
||||
expect(matchPattern.testPort('80', 'https')).toBeFalsy();
|
||||
});
|
||||
|
||||
it('test UrlMatchPattern with no path', () => {
|
||||
const pattern = 'http+https+custom://*.insomnia.com';
|
||||
const matchPattern = new UrlMatchPattern(pattern);
|
||||
it('test UrlMatchPattern with no path', () => {
|
||||
const pattern = 'http+https+custom://*.insomnia.com';
|
||||
const matchPattern = new UrlMatchPattern(pattern);
|
||||
|
||||
expect(matchPattern.getProtocols()).toEqual(['http', 'https', 'custom']);
|
||||
expect(matchPattern.testProtocol('http')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('https')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('custom')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('unmatched')).toBeFalsy();
|
||||
expect(matchPattern.getProtocols()).toEqual(['http', 'https', 'custom']);
|
||||
expect(matchPattern.testProtocol('http')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('https')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('custom')).toBeTruthy();
|
||||
expect(matchPattern.testProtocol('unmatched')).toBeFalsy();
|
||||
|
||||
expect(matchPattern.testHost('download.insomnia.com')).toBeTruthy();
|
||||
expect(matchPattern.testHost('bin.download.insomnia.com')).toBeFalsy();
|
||||
expect(matchPattern.testHost('insomnia.com')).toBeFalsy();
|
||||
expect(matchPattern.testHost('com')).toBeFalsy();
|
||||
expect(matchPattern.testHost('download.insomnia.com')).toBeTruthy();
|
||||
expect(matchPattern.testHost('bin.download.insomnia.com')).toBeFalsy();
|
||||
expect(matchPattern.testHost('insomnia.com')).toBeFalsy();
|
||||
expect(matchPattern.testHost('com')).toBeFalsy();
|
||||
|
||||
expect(matchPattern.testPath('')).toBeTruthy();
|
||||
expect(matchPattern.testPath('/')).toBeFalsy(); // it is not handled temporarily
|
||||
expect(matchPattern.testPath('')).toBeTruthy();
|
||||
expect(matchPattern.testPath('/')).toBeFalsy(); // it is not handled temporarily
|
||||
|
||||
expect(matchPattern.testPort('443', 'https')).toBeTruthy();
|
||||
expect(matchPattern.testPort('80', 'http')).toBeTruthy();
|
||||
expect(matchPattern.testPort('443', 'http')).toBeFalsy();
|
||||
expect(matchPattern.testPort('80', 'https')).toBeFalsy();
|
||||
});
|
||||
expect(matchPattern.testPort('443', 'https')).toBeTruthy();
|
||||
expect(matchPattern.testPort('80', 'http')).toBeTruthy();
|
||||
expect(matchPattern.testPort('443', 'http')).toBeFalsy();
|
||||
expect(matchPattern.testPort('80', 'https')).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,34 +3,29 @@ import { describe, expect, it } from 'vitest';
|
||||
import { Variable, VariableList } from '../variables';
|
||||
|
||||
describe('test Variables object', () => {
|
||||
it('test basic operations', () => {
|
||||
|
||||
const variable = new Variable({
|
||||
id: 'id',
|
||||
key: 'key',
|
||||
name: 'name',
|
||||
value: 'value',
|
||||
type: 'type',
|
||||
disabled: false,
|
||||
});
|
||||
|
||||
expect(variable.get()).toBe('value');
|
||||
variable.set('value2');
|
||||
expect(variable.get()).toBe('value2');
|
||||
|
||||
it('test basic operations', () => {
|
||||
const variable = new Variable({
|
||||
id: 'id',
|
||||
key: 'key',
|
||||
name: 'name',
|
||||
value: 'value',
|
||||
type: 'type',
|
||||
disabled: false,
|
||||
});
|
||||
|
||||
it('VariableList operations', () => {
|
||||
const varList = new VariableList(
|
||||
undefined,
|
||||
[
|
||||
new Variable({ key: 'h1', value: 'v1' }),
|
||||
new Variable({ key: 'h2', value: 'v2' }),
|
||||
]
|
||||
);
|
||||
expect(variable.get()).toBe('value');
|
||||
variable.set('value2');
|
||||
expect(variable.get()).toBe('value2');
|
||||
});
|
||||
|
||||
const upserted = new Variable({ key: 'h1', value: 'v1upserted' });
|
||||
varList.upsert(upserted);
|
||||
expect(varList.one('h1')).toEqual(upserted);
|
||||
});
|
||||
it('VariableList operations', () => {
|
||||
const varList = new VariableList(undefined, [
|
||||
new Variable({ key: 'h1', value: 'v1' }),
|
||||
new Variable({ key: 'h2', value: 'v2' }),
|
||||
]);
|
||||
|
||||
const upserted = new Variable({ key: 'h1', value: 'v1upserted' });
|
||||
varList.upsert(upserted);
|
||||
expect(varList.one('h1')).toEqual(upserted);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,78 +4,73 @@ let scriptPromises = new Array<Promise<any>>();
|
||||
export const OriginalPromise = Promise;
|
||||
|
||||
export class ProxiedPromise<T> extends Promise<T> {
|
||||
constructor(
|
||||
executor: (
|
||||
resolve: (value: T | PromiseLike<T>) => void,
|
||||
reject: (reason?: any) => void,
|
||||
) => void,
|
||||
) {
|
||||
super(executor);
|
||||
if (monitoring) {
|
||||
scriptPromises.push(this);
|
||||
}
|
||||
constructor(executor: (resolve: (value: T | PromiseLike<T>) => void, reject: (reason?: any) => void) => void) {
|
||||
super(executor);
|
||||
if (monitoring) {
|
||||
scriptPromises.push(this);
|
||||
}
|
||||
}
|
||||
|
||||
static override all(promises: Promise<any>[]) {
|
||||
const promise = super.all(promises);
|
||||
if (monitoring) {
|
||||
scriptPromises.push(promise);
|
||||
}
|
||||
return promise;
|
||||
static override all(promises: Promise<any>[]) {
|
||||
const promise = super.all(promises);
|
||||
if (monitoring) {
|
||||
scriptPromises.push(promise);
|
||||
}
|
||||
return promise;
|
||||
}
|
||||
|
||||
static override allSettled(promises: Promise<any>[]) {
|
||||
// promise will be counted in Promise.resolve
|
||||
return super.allSettled(promises);
|
||||
}
|
||||
static override allSettled(promises: Promise<any>[]) {
|
||||
// promise will be counted in Promise.resolve
|
||||
return super.allSettled(promises);
|
||||
}
|
||||
|
||||
// TODO: super.any seems not supported for the compile target (es2021)
|
||||
|
||||
static any(_: Promise<any>[]) {
|
||||
return super.reject("'super.any' not supported");
|
||||
}
|
||||
// TODO: super.any seems not supported for the compile target (es2021)
|
||||
|
||||
static override race(promises: Promise<any>[]) {
|
||||
const promise = super.race(promises);
|
||||
if (monitoring) {
|
||||
scriptPromises.push(promise);
|
||||
}
|
||||
return promise;
|
||||
}
|
||||
static any(_: Promise<any>[]) {
|
||||
return super.reject("'super.any' not supported");
|
||||
}
|
||||
|
||||
static override reject(value: any) {
|
||||
const promise = super.reject(value);
|
||||
if (monitoring) {
|
||||
scriptPromises.push(promise);
|
||||
}
|
||||
return promise;
|
||||
static override race(promises: Promise<any>[]) {
|
||||
const promise = super.race(promises);
|
||||
if (monitoring) {
|
||||
scriptPromises.push(promise);
|
||||
}
|
||||
return promise;
|
||||
}
|
||||
|
||||
static override resolve<T>(value?: T | PromiseLike<T>) {
|
||||
const promise = super.resolve(value);
|
||||
if (monitoring) {
|
||||
scriptPromises.push(promise);
|
||||
}
|
||||
return promise;
|
||||
static override reject(value: any) {
|
||||
const promise = super.reject(value);
|
||||
if (monitoring) {
|
||||
scriptPromises.push(promise);
|
||||
}
|
||||
return promise;
|
||||
}
|
||||
|
||||
// TODO: Promise.any seems not supported for the compile target (es2021)
|
||||
|
||||
static withResolvers() {
|
||||
return super.reject("'Promise.withResolvers' not supported");
|
||||
static override resolve<T>(value?: T | PromiseLike<T>) {
|
||||
const promise = super.resolve(value);
|
||||
if (monitoring) {
|
||||
scriptPromises.push(promise);
|
||||
}
|
||||
return promise;
|
||||
}
|
||||
|
||||
// TODO: Promise.any seems not supported for the compile target (es2021)
|
||||
|
||||
static withResolvers() {
|
||||
return super.reject("'Promise.withResolvers' not supported");
|
||||
}
|
||||
}
|
||||
|
||||
export const asyncTasksAllSettled = async () => {
|
||||
await Promise.allSettled(scriptPromises);
|
||||
scriptPromises = [];
|
||||
await Promise.allSettled(scriptPromises);
|
||||
scriptPromises = [];
|
||||
};
|
||||
|
||||
export const stopMonitorAsyncTasks = () => {
|
||||
monitoring = false;
|
||||
monitoring = false;
|
||||
};
|
||||
|
||||
export const resetAsyncTasks = async () => {
|
||||
scriptPromises = [];
|
||||
monitoring = true;
|
||||
scriptPromises = [];
|
||||
monitoring = true;
|
||||
};
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,65 +2,61 @@ import { Property } from './properties';
|
||||
import { UrlMatchPattern, UrlMatchPatternList } from './urls';
|
||||
|
||||
export interface SrcRef {
|
||||
src: string; // src is the path of the file
|
||||
src: string; // src is the path of the file
|
||||
}
|
||||
|
||||
export interface CertificateOptions {
|
||||
name?: string;
|
||||
matches?: string[];
|
||||
key?: SrcRef;
|
||||
cert?: SrcRef;
|
||||
passphrase?: string;
|
||||
pfx?: SrcRef; // PFX or PKCS12 Certificate
|
||||
disabled?: boolean;
|
||||
name?: string;
|
||||
matches?: string[];
|
||||
key?: SrcRef;
|
||||
cert?: SrcRef;
|
||||
passphrase?: string;
|
||||
pfx?: SrcRef; // PFX or PKCS12 Certificate
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
export class Certificate extends Property {
|
||||
override _kind = 'Certificate';
|
||||
override _kind = 'Certificate';
|
||||
|
||||
override name?: string;
|
||||
matches?: UrlMatchPatternList<UrlMatchPattern>;
|
||||
key?: SrcRef;
|
||||
cert?: SrcRef;
|
||||
passphrase?: string;
|
||||
pfx?: SrcRef; // PFX or PKCS12 Certificate
|
||||
override name?: string;
|
||||
matches?: UrlMatchPatternList<UrlMatchPattern>;
|
||||
key?: SrcRef;
|
||||
cert?: SrcRef;
|
||||
passphrase?: string;
|
||||
pfx?: SrcRef; // PFX or PKCS12 Certificate
|
||||
|
||||
constructor(options: CertificateOptions) {
|
||||
super();
|
||||
constructor(options: CertificateOptions) {
|
||||
super();
|
||||
|
||||
this.name = options.name;
|
||||
this.matches = new UrlMatchPatternList(
|
||||
undefined,
|
||||
options.matches ?
|
||||
options.matches.map(matchStr => new UrlMatchPattern(matchStr)) :
|
||||
[],
|
||||
);
|
||||
this.key = options.key;
|
||||
this.cert = options.cert;
|
||||
this.passphrase = options.passphrase;
|
||||
this.pfx = options.pfx;
|
||||
this.disabled = options.disabled;
|
||||
}
|
||||
this.name = options.name;
|
||||
this.matches = new UrlMatchPatternList(
|
||||
undefined,
|
||||
options.matches ? options.matches.map(matchStr => new UrlMatchPattern(matchStr)) : [],
|
||||
);
|
||||
this.key = options.key;
|
||||
this.cert = options.cert;
|
||||
this.passphrase = options.passphrase;
|
||||
this.pfx = options.pfx;
|
||||
this.disabled = options.disabled;
|
||||
}
|
||||
|
||||
static isCertificate(obj: object) {
|
||||
return '_kind' in obj && obj._kind === 'Certificate';
|
||||
}
|
||||
static isCertificate(obj: object) {
|
||||
return '_kind' in obj && obj._kind === 'Certificate';
|
||||
}
|
||||
|
||||
canApplyTo(url: string) {
|
||||
return this.matches ? this.matches.test(url) : false;
|
||||
}
|
||||
canApplyTo(url: string) {
|
||||
return this.matches ? this.matches.test(url) : false;
|
||||
}
|
||||
|
||||
update(options: CertificateOptions) {
|
||||
this.name = options.name;
|
||||
this.matches = new UrlMatchPatternList(
|
||||
undefined,
|
||||
options.matches ?
|
||||
options.matches.map(matchStr => new UrlMatchPattern(matchStr)) :
|
||||
[],
|
||||
);
|
||||
this.key = options.key;
|
||||
this.cert = options.cert;
|
||||
this.passphrase = options.passphrase;
|
||||
this.pfx = options.pfx;
|
||||
}
|
||||
update(options: CertificateOptions) {
|
||||
this.name = options.name;
|
||||
this.matches = new UrlMatchPatternList(
|
||||
undefined,
|
||||
options.matches ? options.matches.map(matchStr => new UrlMatchPattern(matchStr)) : [],
|
||||
);
|
||||
this.key = options.key;
|
||||
this.cert = options.cert;
|
||||
this.passphrase = options.passphrase;
|
||||
this.pfx = options.pfx;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,81 +1,77 @@
|
||||
type LogLevel = 'debug' | 'info' | 'log' | 'warn' | 'error';
|
||||
|
||||
export interface Row {
|
||||
value: string;
|
||||
name: string;
|
||||
timestamp: number;
|
||||
value: string;
|
||||
name: string;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
class Console {
|
||||
rows: Row[] = [];
|
||||
rows: Row[] = [];
|
||||
|
||||
// TODO: support replacing substitution
|
||||
printLog = (rows: Row[], level: LogLevel, ...values: any) => {
|
||||
try {
|
||||
const content = values.map(
|
||||
(value: any) => {
|
||||
return typeof value === 'string' ? value : JSON.stringify(value, null, 2);
|
||||
}
|
||||
).join(' ');
|
||||
// TODO: support replacing substitution
|
||||
printLog = (rows: Row[], level: LogLevel, ...values: any) => {
|
||||
try {
|
||||
const content = values
|
||||
.map((value: any) => {
|
||||
return typeof value === 'string' ? value : JSON.stringify(value, null, 2);
|
||||
})
|
||||
.join(' ');
|
||||
|
||||
const row = {
|
||||
value: `${level}: ${content}`,
|
||||
name: 'Text',
|
||||
timestamp: Date.now(),
|
||||
};
|
||||
const row = {
|
||||
value: `${level}: ${content}`,
|
||||
name: 'Text',
|
||||
timestamp: Date.now(),
|
||||
};
|
||||
|
||||
rows.push(row);
|
||||
} catch (e) {
|
||||
rows.push({
|
||||
value: 'error: ' + JSON.stringify(e, null, 2),
|
||||
name: 'Text',
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
}
|
||||
};
|
||||
rows.push(row);
|
||||
} catch (e) {
|
||||
rows.push({
|
||||
value: 'error: ' + JSON.stringify(e, null, 2),
|
||||
name: 'Text',
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
log = (...values: any[]) => {
|
||||
this.printLog(this.rows, 'log', ...values);
|
||||
};
|
||||
log = (...values: any[]) => {
|
||||
this.printLog(this.rows, 'log', ...values);
|
||||
};
|
||||
|
||||
warn = (...values: any[]) => {
|
||||
this.printLog(this.rows, 'warn', ...values);
|
||||
};
|
||||
warn = (...values: any[]) => {
|
||||
this.printLog(this.rows, 'warn', ...values);
|
||||
};
|
||||
|
||||
debug = (...values: any[]) => {
|
||||
this.printLog(this.rows, 'debug', ...values);
|
||||
};
|
||||
debug = (...values: any[]) => {
|
||||
this.printLog(this.rows, 'debug', ...values);
|
||||
};
|
||||
|
||||
info = (...values: any[]) => {
|
||||
this.printLog(this.rows, 'info', ...values);
|
||||
};
|
||||
info = (...values: any[]) => {
|
||||
this.printLog(this.rows, 'info', ...values);
|
||||
};
|
||||
|
||||
error = (...values: any[]) => {
|
||||
this.printLog(this.rows, 'error', ...values);
|
||||
};
|
||||
error = (...values: any[]) => {
|
||||
this.printLog(this.rows, 'error', ...values);
|
||||
};
|
||||
|
||||
clear = (_level: LogLevel, _message?: any, ..._optionalParams: any[]) => {
|
||||
throw Error('currently "clear" is not supported for the timeline');
|
||||
};
|
||||
|
||||
clear = (_level: LogLevel, _message?: any, ..._optionalParams: any[]) => {
|
||||
throw Error('currently "clear" is not supported for the timeline');
|
||||
};
|
||||
dumpLogs = () => {
|
||||
return this.rows.map(row => JSON.stringify(row) + '\n').join('\n');
|
||||
};
|
||||
|
||||
dumpLogs = () => {
|
||||
return this.rows
|
||||
.map(row => JSON.stringify(row) + '\n')
|
||||
.join('\n');
|
||||
};
|
||||
|
||||
dumpLogsAsArray = () => {
|
||||
return this.rows
|
||||
.map(row => JSON.stringify(row) + '\n');
|
||||
};
|
||||
dumpLogsAsArray = () => {
|
||||
return this.rows.map(row => JSON.stringify(row) + '\n');
|
||||
};
|
||||
}
|
||||
|
||||
let builtInConsole = new Console();
|
||||
export function getExistingConsole() {
|
||||
return builtInConsole;
|
||||
return builtInConsole;
|
||||
}
|
||||
export function getNewConsole() {
|
||||
builtInConsole = new Console();
|
||||
return builtInConsole;
|
||||
builtInConsole = new Console();
|
||||
return builtInConsole;
|
||||
}
|
||||
|
||||
@@ -6,372 +6,363 @@ import { getExistingConsole } from './console';
|
||||
import { Property, PropertyList } from './properties';
|
||||
|
||||
export interface InsomniaCookieExtensions {
|
||||
creation?: Date;
|
||||
creationIndex?: number;
|
||||
lastAccessed?: Date;
|
||||
pathIsDefault?: boolean;
|
||||
};
|
||||
creation?: Date;
|
||||
creationIndex?: number;
|
||||
lastAccessed?: Date;
|
||||
pathIsDefault?: boolean;
|
||||
}
|
||||
|
||||
export interface CookieOptions extends InsomniaCookieExtensions {
|
||||
id?: string;
|
||||
key: string;
|
||||
value: string;
|
||||
expires?: Date | string | null;
|
||||
maxAge?: number | 'Infinity' | '-Infinity';
|
||||
domain?: string;
|
||||
path?: string;
|
||||
secure?: boolean;
|
||||
httpOnly?: boolean;
|
||||
hostOnly?: boolean;
|
||||
session?: boolean;
|
||||
extensions?: { key: string; value: string }[];
|
||||
id?: string;
|
||||
key: string;
|
||||
value: string;
|
||||
expires?: Date | string | null;
|
||||
maxAge?: number | 'Infinity' | '-Infinity';
|
||||
domain?: string;
|
||||
path?: string;
|
||||
secure?: boolean;
|
||||
httpOnly?: boolean;
|
||||
hostOnly?: boolean;
|
||||
session?: boolean;
|
||||
extensions?: { key: string; value: string }[];
|
||||
}
|
||||
|
||||
export class Cookie extends Property {
|
||||
override readonly _kind: string = 'Cookie';
|
||||
protected cookie: ToughCookie;
|
||||
private extensions?: { key: string; value: string }[];
|
||||
private insoExtensions: InsomniaCookieExtensions = {};
|
||||
override readonly _kind: string = 'Cookie';
|
||||
protected cookie: ToughCookie;
|
||||
private extensions?: { key: string; value: string }[];
|
||||
private insoExtensions: InsomniaCookieExtensions = {};
|
||||
|
||||
constructor(cookieDef: CookieOptions | string) {
|
||||
super();
|
||||
constructor(cookieDef: CookieOptions | string) {
|
||||
super();
|
||||
|
||||
if (typeof cookieDef === 'string') {
|
||||
const cookieDefParsed = Cookie.parse(cookieDef);
|
||||
if (!cookieDefParsed) {
|
||||
throw Error('failed to parse cookie, the cookie string seems invalid');
|
||||
}
|
||||
cookieDef = cookieDefParsed;
|
||||
}
|
||||
|
||||
const def = { ...cookieDef };
|
||||
this.extensions = def.extensions ? [...def.extensions] : [];
|
||||
def.extensions = [];
|
||||
|
||||
const cookie = ToughCookie.fromJSON(def);
|
||||
if (!cookie) {
|
||||
throw Error('failed to parse cookie, the cookie string seems invalid');
|
||||
}
|
||||
|
||||
this.id = cookieDef.id || '';
|
||||
this.cookie = cookie;
|
||||
this.insoExtensions = {
|
||||
creation: cookieDef.creation,
|
||||
creationIndex: cookieDef.creationIndex,
|
||||
lastAccessed: cookieDef.lastAccessed,
|
||||
pathIsDefault: cookieDef.pathIsDefault,
|
||||
};
|
||||
if (typeof cookieDef === 'string') {
|
||||
const cookieDefParsed = Cookie.parse(cookieDef);
|
||||
if (!cookieDefParsed) {
|
||||
throw Error('failed to parse cookie, the cookie string seems invalid');
|
||||
}
|
||||
cookieDef = cookieDefParsed;
|
||||
}
|
||||
|
||||
static override _index = 'key';
|
||||
const def = { ...cookieDef };
|
||||
this.extensions = def.extensions ? [...def.extensions] : [];
|
||||
def.extensions = [];
|
||||
|
||||
static isCookie(obj: Property) {
|
||||
return '_kind' in obj && obj._kind === 'Cookie';
|
||||
const cookie = ToughCookie.fromJSON(def);
|
||||
if (!cookie) {
|
||||
throw Error('failed to parse cookie, the cookie string seems invalid');
|
||||
}
|
||||
|
||||
static parse(cookieStr: string) {
|
||||
const cookieObj = ToughCookie.parse(cookieStr, { loose: true });
|
||||
if (!cookieObj) {
|
||||
throw Error('failed to parse cookie, the cookie string seems invalid');
|
||||
}
|
||||
|
||||
const hostOnly = cookieObj.extensions?.includes('HostOnly') || false;
|
||||
const session = cookieObj.extensions?.includes('Session') || false;
|
||||
if (hostOnly) {
|
||||
cookieObj.extensions = cookieObj.extensions?.filter(item => item !== 'HostOnly') || [];
|
||||
}
|
||||
if (session) {
|
||||
cookieObj.extensions = cookieObj.extensions?.filter(item => item !== 'Session') || [];
|
||||
}
|
||||
|
||||
// Tough Cookies extensions works well with string[], but not {key: string; value: string}[]
|
||||
const extensions = cookieObj.extensions?.map((entry: string | { key: string; value: string }) => {
|
||||
if (typeof entry === 'string') {
|
||||
const equalPos = entry.indexOf('=');
|
||||
if (equalPos > 0) {
|
||||
return { key: entry.slice(0, equalPos), value: entry.slice(equalPos + 1) };
|
||||
}
|
||||
return { key: entry, value: 'true' };
|
||||
} else if (
|
||||
'key' in entry &&
|
||||
'value' in entry &&
|
||||
typeof entry.key === 'string' &&
|
||||
typeof entry.value === 'string'
|
||||
) {
|
||||
return { key: entry.key, value: entry.value };
|
||||
}
|
||||
throw Error('failed to create cookie, extension must be: { key: string; value: string }[]');
|
||||
|
||||
|
||||
});
|
||||
|
||||
return {
|
||||
key: cookieObj.key,
|
||||
value: cookieObj.value,
|
||||
expires: cookieObj.expires || undefined,
|
||||
maxAge: cookieObj.maxAge,
|
||||
domain: cookieObj.domain || undefined,
|
||||
path: cookieObj.path || undefined,
|
||||
secure: cookieObj.secure || false,
|
||||
httpOnly: cookieObj.httpOnly || false,
|
||||
hostOnly,
|
||||
session,
|
||||
extensions: extensions,
|
||||
};
|
||||
}
|
||||
|
||||
static stringify(cookie: Cookie) {
|
||||
return cookie.toString();
|
||||
}
|
||||
|
||||
static unparseSingle(cookieOpt: CookieOptions) {
|
||||
const cookie = new Cookie(cookieOpt);
|
||||
if (!cookie) {
|
||||
throw Error('failed to unparse cookie, the cookie options seems invalid');
|
||||
}
|
||||
return cookie.toString();
|
||||
}
|
||||
|
||||
static unparse(cookies: Cookie[]) {
|
||||
const cookieStrs = cookies.map(cookie => cookie.toString());
|
||||
return cookieStrs.join('; ');
|
||||
}
|
||||
|
||||
override toString = () => {
|
||||
const hostOnlyPart = this.cookie.hostOnly ? '; HostOnly' : '';
|
||||
const sessionPart = this.cookie.extensions?.includes('session') ? '; Session' : '';
|
||||
const extensionPart = this.extensions && this.extensions.length > 0 ?
|
||||
'; ' + this.extensions.map(ext => `${ext.key}=${ext.value}`).join(';') :
|
||||
'';
|
||||
|
||||
return this.cookie.toString() + hostOnlyPart + sessionPart + extensionPart;
|
||||
this.id = cookieDef.id || '';
|
||||
this.cookie = cookie;
|
||||
this.insoExtensions = {
|
||||
creation: cookieDef.creation,
|
||||
creationIndex: cookieDef.creationIndex,
|
||||
lastAccessed: cookieDef.lastAccessed,
|
||||
pathIsDefault: cookieDef.pathIsDefault,
|
||||
};
|
||||
}
|
||||
|
||||
override valueOf = () => {
|
||||
return this.cookie.toJSON().value;
|
||||
};
|
||||
static override _index = 'key';
|
||||
|
||||
get key() {
|
||||
return this.cookie.toJSON().key;
|
||||
};
|
||||
static isCookie(obj: Property) {
|
||||
return '_kind' in obj && obj._kind === 'Cookie';
|
||||
}
|
||||
|
||||
override toJSON = () => {
|
||||
return {
|
||||
id: this.id,
|
||||
key: this.cookie.key,
|
||||
value: this.cookie.value,
|
||||
expires: this.cookie.expires === 'Infinity' ? undefined : this.cookie.expires,
|
||||
maxAge: this.cookie.maxAge,
|
||||
domain: this.cookie.domain,
|
||||
path: this.cookie.path,
|
||||
secure: this.cookie.secure,
|
||||
httpOnly: this.cookie.httpOnly,
|
||||
hostOnly: this.cookie.hostOnly,
|
||||
session: this.cookie.extensions?.includes('session'),
|
||||
extensions: this.extensions,
|
||||
// extra fields from Insomnia
|
||||
creation: this.insoExtensions.creation,
|
||||
creationIndex: this.insoExtensions.creationIndex,
|
||||
lastAccessed: this.insoExtensions.lastAccessed,
|
||||
pathIsDefault: this.insoExtensions.pathIsDefault,
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export class CookieList extends PropertyList<Cookie> {
|
||||
override _kind = 'CookieList';
|
||||
|
||||
constructor(cookies: Cookie[]) {
|
||||
super(
|
||||
Cookie,
|
||||
undefined,
|
||||
cookies,
|
||||
);
|
||||
static parse(cookieStr: string) {
|
||||
const cookieObj = ToughCookie.parse(cookieStr, { loose: true });
|
||||
if (!cookieObj) {
|
||||
throw Error('failed to parse cookie, the cookie string seems invalid');
|
||||
}
|
||||
|
||||
static isCookieList(obj: object) {
|
||||
return '_kind' in obj && obj._kind === 'CookieList';
|
||||
const hostOnly = cookieObj.extensions?.includes('HostOnly') || false;
|
||||
const session = cookieObj.extensions?.includes('Session') || false;
|
||||
if (hostOnly) {
|
||||
cookieObj.extensions = cookieObj.extensions?.filter(item => item !== 'HostOnly') || [];
|
||||
}
|
||||
}
|
||||
|
||||
export class CookieObject extends CookieList {
|
||||
private cookieJar: CookieJar;
|
||||
|
||||
constructor(cookieJar: InsomniaCookieJar | null) {
|
||||
const cookies = cookieJar
|
||||
? cookieJar.cookies.map((cookie: InsomniaCookie): Cookie => {
|
||||
let expires: string | Date | null = null;
|
||||
if (cookie.expires || cookie.expires === 0) {
|
||||
if (typeof cookie.expires === 'number') {
|
||||
expires = new Date(cookie.expires);
|
||||
} else {
|
||||
expires = cookie.expires;
|
||||
}
|
||||
}
|
||||
|
||||
return new Cookie({
|
||||
id: cookie.id,
|
||||
key: cookie.key,
|
||||
value: cookie.value,
|
||||
expires: expires,
|
||||
maxAge: undefined, // not supported in Insomnia
|
||||
domain: cookie.domain,
|
||||
path: cookie.path,
|
||||
secure: cookie.secure,
|
||||
httpOnly: cookie.httpOnly,
|
||||
hostOnly: cookie.hostOnly,
|
||||
session: undefined, // not supported in Insomnia
|
||||
extensions: undefined, // TODO: its format from Insomnia is unknown
|
||||
// follows are properties from Insomnia
|
||||
creation: cookie.creation,
|
||||
creationIndex: cookie.creationIndex,
|
||||
lastAccessed: cookie.lastAccessed,
|
||||
pathIsDefault: cookie.pathIsDefault,
|
||||
});
|
||||
})
|
||||
: [];
|
||||
|
||||
super(cookies);
|
||||
const scriptCookieJar = cookieJar ? new CookieJar(cookieJar.name, cookies) : new CookieJar('', []);
|
||||
this.cookieJar = scriptCookieJar;
|
||||
this.typeClass = Cookie;
|
||||
if (session) {
|
||||
cookieObj.extensions = cookieObj.extensions?.filter(item => item !== 'Session') || [];
|
||||
}
|
||||
|
||||
jar() {
|
||||
return this.cookieJar;
|
||||
}
|
||||
}
|
||||
|
||||
export class CookieJar {
|
||||
// CookieJar from tough-cookie can not be used, as it will fail in comparing context location and cookies' domain
|
||||
// as it reads location from the browser window, it is "localhost"
|
||||
private jar: Map<string, Map<string, Cookie>>; // Map<domain, Map<cookieKey, cookieObject>>
|
||||
private jarName: string;
|
||||
|
||||
constructor(jarName: string, cookies?: Cookie[]) {
|
||||
this.jarName = jarName;
|
||||
this.jar = new Map();
|
||||
|
||||
if (cookies) {
|
||||
cookies.forEach(cookie => {
|
||||
const properties = cookie.toJSON();
|
||||
if (!properties.domain) {
|
||||
getExistingConsole().warn(`domain is not specified for the cookie "${cookie.key}" so it is omitted`);
|
||||
return;
|
||||
}
|
||||
|
||||
const domainCookies = this.jar.get(properties.domain) || new Map();
|
||||
this.jar.set(properties.domain, domainCookies.set(properties.key, cookie));
|
||||
});
|
||||
// Tough Cookies extensions works well with string[], but not {key: string; value: string}[]
|
||||
const extensions = cookieObj.extensions?.map((entry: string | { key: string; value: string }) => {
|
||||
if (typeof entry === 'string') {
|
||||
const equalPos = entry.indexOf('=');
|
||||
if (equalPos > 0) {
|
||||
return { key: entry.slice(0, equalPos), value: entry.slice(equalPos + 1) };
|
||||
}
|
||||
}
|
||||
|
||||
set(url: string, key: string, value: string | CookieOptions, cb: (error?: Error, cookie?: Cookie) => void) {
|
||||
const domainCookies = this.jar.get(url) || new Map();
|
||||
if (typeof value === 'string') {
|
||||
const domainCookie = new Cookie({
|
||||
key: key,
|
||||
value: value,
|
||||
domain: url,
|
||||
});
|
||||
this.jar.set(url, domainCookies.set(key, domainCookie));
|
||||
cb(undefined, domainCookie);
|
||||
} else {
|
||||
const domainCookie = new Cookie(value);
|
||||
this.jar.set(url, domainCookies.set(key, domainCookie));
|
||||
cb(undefined, domainCookie);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: create a better method for setting cookie, or overload the above method
|
||||
// set(
|
||||
// url: string,
|
||||
// info: { name: string; value: string; httpOnly: boolean },
|
||||
// cb: (error?: Error, cookie?: Cookie) => void,
|
||||
// ) {
|
||||
// try {
|
||||
// const cookie = new ToughCookie({ key: info.name, value: info.value, httpOnly: info.httpOnly });
|
||||
// this.jar.setCookieSync(cookie, url, { http: info.httpOnly });
|
||||
// cb(undefined, new Cookie({ key: info.name, value: info.value, httpOnly: info.httpOnly }));
|
||||
// } catch (e) {
|
||||
// cb(e, undefined);
|
||||
// }
|
||||
// }
|
||||
|
||||
get(url: string, name: string, cb: (error?: Error, cookie?: Cookie) => void) {
|
||||
const domainCookies = this.jar.get(url) || new Map();
|
||||
cb(undefined, domainCookies.get(name));
|
||||
}
|
||||
|
||||
getAll(url: string, cb: (error?: Error, cookies?: Cookie[]) => void) {
|
||||
const domainCookies = this.jar.get(url) || new Map();
|
||||
cb(
|
||||
undefined,
|
||||
Array.from(domainCookies.values()),
|
||||
);
|
||||
}
|
||||
|
||||
unset(url: string, name: string, cb: (error?: Error | null) => void) {
|
||||
const domainCookies = this.jar.get(url);
|
||||
if (!domainCookies) {
|
||||
cb(undefined);
|
||||
} else {
|
||||
domainCookies.delete(name);
|
||||
cb(undefined);
|
||||
}
|
||||
}
|
||||
|
||||
clear(url: string, cb: (error?: Error | null) => void) {
|
||||
this.jar.delete(url);
|
||||
cb(undefined);
|
||||
}
|
||||
|
||||
toInsomniaCookieJar() {
|
||||
const cookies = new Array<Partial<InsomniaCookie>>();
|
||||
Array.from(this.jar.values())
|
||||
.forEach((domainCookies: Map<string, Cookie>) => {
|
||||
Array.from(domainCookies.values()).forEach(cookie => {
|
||||
const cookieObj = cookie.toJSON();
|
||||
cookies.push({
|
||||
id: cookieObj.id,
|
||||
key: cookieObj.key,
|
||||
value: cookieObj.value,
|
||||
expires: cookieObj.expires || 'Infinity', // transform it back to 'Infinity', avoid edge cases
|
||||
domain: cookieObj.domain || undefined,
|
||||
path: cookieObj.path || undefined,
|
||||
secure: cookieObj.secure,
|
||||
httpOnly: cookieObj.httpOnly,
|
||||
extensions: cookieObj.extensions || undefined,
|
||||
creation: cookieObj.creation,
|
||||
creationIndex: cookieObj.creationIndex,
|
||||
hostOnly: cookieObj.hostOnly || undefined,
|
||||
pathIsDefault: cookieObj.pathIsDefault,
|
||||
lastAccessed: cookieObj.lastAccessed,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
name: this.jarName,
|
||||
cookies,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function mergeCookieJar(
|
||||
originalCookieJar: InsomniaCookieJar,
|
||||
updatedCookieJar: { name: string; cookies: Partial<InsomniaCookie>[] },
|
||||
): InsomniaCookieJar {
|
||||
const cookiesWithId = updatedCookieJar.cookies.map((cookie): InsomniaCookie => {
|
||||
if (!cookie.id) {
|
||||
// this follows the generation approach in the `cookie-list.tsx`
|
||||
cookie.id = uuidv4();
|
||||
}
|
||||
return cookie as InsomniaCookie;
|
||||
return { key: entry, value: 'true' };
|
||||
} else if (
|
||||
'key' in entry &&
|
||||
'value' in entry &&
|
||||
typeof entry.key === 'string' &&
|
||||
typeof entry.value === 'string'
|
||||
) {
|
||||
return { key: entry.key, value: entry.value };
|
||||
}
|
||||
throw Error('failed to create cookie, extension must be: { key: string; value: string }[]');
|
||||
});
|
||||
|
||||
return {
|
||||
...originalCookieJar,
|
||||
cookies: cookiesWithId,
|
||||
key: cookieObj.key,
|
||||
value: cookieObj.value,
|
||||
expires: cookieObj.expires || undefined,
|
||||
maxAge: cookieObj.maxAge,
|
||||
domain: cookieObj.domain || undefined,
|
||||
path: cookieObj.path || undefined,
|
||||
secure: cookieObj.secure || false,
|
||||
httpOnly: cookieObj.httpOnly || false,
|
||||
hostOnly,
|
||||
session,
|
||||
extensions: extensions,
|
||||
};
|
||||
}
|
||||
|
||||
static stringify(cookie: Cookie) {
|
||||
return cookie.toString();
|
||||
}
|
||||
|
||||
static unparseSingle(cookieOpt: CookieOptions) {
|
||||
const cookie = new Cookie(cookieOpt);
|
||||
if (!cookie) {
|
||||
throw Error('failed to unparse cookie, the cookie options seems invalid');
|
||||
}
|
||||
return cookie.toString();
|
||||
}
|
||||
|
||||
static unparse(cookies: Cookie[]) {
|
||||
const cookieStrs = cookies.map(cookie => cookie.toString());
|
||||
return cookieStrs.join('; ');
|
||||
}
|
||||
|
||||
override toString = () => {
|
||||
const hostOnlyPart = this.cookie.hostOnly ? '; HostOnly' : '';
|
||||
const sessionPart = this.cookie.extensions?.includes('session') ? '; Session' : '';
|
||||
const extensionPart =
|
||||
this.extensions && this.extensions.length > 0
|
||||
? '; ' + this.extensions.map(ext => `${ext.key}=${ext.value}`).join(';')
|
||||
: '';
|
||||
|
||||
return this.cookie.toString() + hostOnlyPart + sessionPart + extensionPart;
|
||||
};
|
||||
|
||||
override valueOf = () => {
|
||||
return this.cookie.toJSON().value;
|
||||
};
|
||||
|
||||
get key() {
|
||||
return this.cookie.toJSON().key;
|
||||
}
|
||||
|
||||
override toJSON = () => {
|
||||
return {
|
||||
id: this.id,
|
||||
key: this.cookie.key,
|
||||
value: this.cookie.value,
|
||||
expires: this.cookie.expires === 'Infinity' ? undefined : this.cookie.expires,
|
||||
maxAge: this.cookie.maxAge,
|
||||
domain: this.cookie.domain,
|
||||
path: this.cookie.path,
|
||||
secure: this.cookie.secure,
|
||||
httpOnly: this.cookie.httpOnly,
|
||||
hostOnly: this.cookie.hostOnly,
|
||||
session: this.cookie.extensions?.includes('session'),
|
||||
extensions: this.extensions,
|
||||
// extra fields from Insomnia
|
||||
creation: this.insoExtensions.creation,
|
||||
creationIndex: this.insoExtensions.creationIndex,
|
||||
lastAccessed: this.insoExtensions.lastAccessed,
|
||||
pathIsDefault: this.insoExtensions.pathIsDefault,
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export class CookieList extends PropertyList<Cookie> {
|
||||
override _kind = 'CookieList';
|
||||
|
||||
constructor(cookies: Cookie[]) {
|
||||
super(Cookie, undefined, cookies);
|
||||
}
|
||||
|
||||
static isCookieList(obj: object) {
|
||||
return '_kind' in obj && obj._kind === 'CookieList';
|
||||
}
|
||||
}
|
||||
|
||||
export class CookieObject extends CookieList {
|
||||
private cookieJar: CookieJar;
|
||||
|
||||
constructor(cookieJar: InsomniaCookieJar | null) {
|
||||
const cookies = cookieJar
|
||||
? cookieJar.cookies.map((cookie: InsomniaCookie): Cookie => {
|
||||
let expires: string | Date | null = null;
|
||||
if (cookie.expires || cookie.expires === 0) {
|
||||
if (typeof cookie.expires === 'number') {
|
||||
expires = new Date(cookie.expires);
|
||||
} else {
|
||||
expires = cookie.expires;
|
||||
}
|
||||
}
|
||||
|
||||
return new Cookie({
|
||||
id: cookie.id,
|
||||
key: cookie.key,
|
||||
value: cookie.value,
|
||||
expires: expires,
|
||||
maxAge: undefined, // not supported in Insomnia
|
||||
domain: cookie.domain,
|
||||
path: cookie.path,
|
||||
secure: cookie.secure,
|
||||
httpOnly: cookie.httpOnly,
|
||||
hostOnly: cookie.hostOnly,
|
||||
session: undefined, // not supported in Insomnia
|
||||
extensions: undefined, // TODO: its format from Insomnia is unknown
|
||||
// follows are properties from Insomnia
|
||||
creation: cookie.creation,
|
||||
creationIndex: cookie.creationIndex,
|
||||
lastAccessed: cookie.lastAccessed,
|
||||
pathIsDefault: cookie.pathIsDefault,
|
||||
});
|
||||
})
|
||||
: [];
|
||||
|
||||
super(cookies);
|
||||
const scriptCookieJar = cookieJar ? new CookieJar(cookieJar.name, cookies) : new CookieJar('', []);
|
||||
this.cookieJar = scriptCookieJar;
|
||||
this.typeClass = Cookie;
|
||||
}
|
||||
|
||||
jar() {
|
||||
return this.cookieJar;
|
||||
}
|
||||
}
|
||||
|
||||
export class CookieJar {
|
||||
// CookieJar from tough-cookie can not be used, as it will fail in comparing context location and cookies' domain
|
||||
// as it reads location from the browser window, it is "localhost"
|
||||
private jar: Map<string, Map<string, Cookie>>; // Map<domain, Map<cookieKey, cookieObject>>
|
||||
private jarName: string;
|
||||
|
||||
constructor(jarName: string, cookies?: Cookie[]) {
|
||||
this.jarName = jarName;
|
||||
this.jar = new Map();
|
||||
|
||||
if (cookies) {
|
||||
cookies.forEach(cookie => {
|
||||
const properties = cookie.toJSON();
|
||||
if (!properties.domain) {
|
||||
getExistingConsole().warn(`domain is not specified for the cookie "${cookie.key}" so it is omitted`);
|
||||
return;
|
||||
}
|
||||
|
||||
const domainCookies = this.jar.get(properties.domain) || new Map();
|
||||
this.jar.set(properties.domain, domainCookies.set(properties.key, cookie));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
set(url: string, key: string, value: string | CookieOptions, cb: (error?: Error, cookie?: Cookie) => void) {
|
||||
const domainCookies = this.jar.get(url) || new Map();
|
||||
if (typeof value === 'string') {
|
||||
const domainCookie = new Cookie({
|
||||
key: key,
|
||||
value: value,
|
||||
domain: url,
|
||||
});
|
||||
this.jar.set(url, domainCookies.set(key, domainCookie));
|
||||
cb(undefined, domainCookie);
|
||||
} else {
|
||||
const domainCookie = new Cookie(value);
|
||||
this.jar.set(url, domainCookies.set(key, domainCookie));
|
||||
cb(undefined, domainCookie);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: create a better method for setting cookie, or overload the above method
|
||||
// set(
|
||||
// url: string,
|
||||
// info: { name: string; value: string; httpOnly: boolean },
|
||||
// cb: (error?: Error, cookie?: Cookie) => void,
|
||||
// ) {
|
||||
// try {
|
||||
// const cookie = new ToughCookie({ key: info.name, value: info.value, httpOnly: info.httpOnly });
|
||||
// this.jar.setCookieSync(cookie, url, { http: info.httpOnly });
|
||||
// cb(undefined, new Cookie({ key: info.name, value: info.value, httpOnly: info.httpOnly }));
|
||||
// } catch (e) {
|
||||
// cb(e, undefined);
|
||||
// }
|
||||
// }
|
||||
|
||||
get(url: string, name: string, cb: (error?: Error, cookie?: Cookie) => void) {
|
||||
const domainCookies = this.jar.get(url) || new Map();
|
||||
cb(undefined, domainCookies.get(name));
|
||||
}
|
||||
|
||||
getAll(url: string, cb: (error?: Error, cookies?: Cookie[]) => void) {
|
||||
const domainCookies = this.jar.get(url) || new Map();
|
||||
cb(undefined, Array.from(domainCookies.values()));
|
||||
}
|
||||
|
||||
unset(url: string, name: string, cb: (error?: Error | null) => void) {
|
||||
const domainCookies = this.jar.get(url);
|
||||
if (!domainCookies) {
|
||||
cb(undefined);
|
||||
} else {
|
||||
domainCookies.delete(name);
|
||||
cb(undefined);
|
||||
}
|
||||
}
|
||||
|
||||
clear(url: string, cb: (error?: Error | null) => void) {
|
||||
this.jar.delete(url);
|
||||
cb(undefined);
|
||||
}
|
||||
|
||||
toInsomniaCookieJar() {
|
||||
const cookies = new Array<Partial<InsomniaCookie>>();
|
||||
Array.from(this.jar.values()).forEach((domainCookies: Map<string, Cookie>) => {
|
||||
Array.from(domainCookies.values()).forEach(cookie => {
|
||||
const cookieObj = cookie.toJSON();
|
||||
cookies.push({
|
||||
id: cookieObj.id,
|
||||
key: cookieObj.key,
|
||||
value: cookieObj.value,
|
||||
expires: cookieObj.expires || 'Infinity', // transform it back to 'Infinity', avoid edge cases
|
||||
domain: cookieObj.domain || undefined,
|
||||
path: cookieObj.path || undefined,
|
||||
secure: cookieObj.secure,
|
||||
httpOnly: cookieObj.httpOnly,
|
||||
extensions: cookieObj.extensions || undefined,
|
||||
creation: cookieObj.creation,
|
||||
creationIndex: cookieObj.creationIndex,
|
||||
hostOnly: cookieObj.hostOnly || undefined,
|
||||
pathIsDefault: cookieObj.pathIsDefault,
|
||||
lastAccessed: cookieObj.lastAccessed,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
name: this.jarName,
|
||||
cookies,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function mergeCookieJar(
|
||||
originalCookieJar: InsomniaCookieJar,
|
||||
updatedCookieJar: { name: string; cookies: Partial<InsomniaCookie>[] },
|
||||
): InsomniaCookieJar {
|
||||
const cookiesWithId = updatedCookieJar.cookies.map((cookie): InsomniaCookie => {
|
||||
if (!cookie.id) {
|
||||
// this follows the generation approach in the `cookie-list.tsx`
|
||||
cookie.id = uuidv4();
|
||||
}
|
||||
return cookie as InsomniaCookie;
|
||||
});
|
||||
|
||||
return {
|
||||
...originalCookieJar,
|
||||
cookies: cookiesWithId,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2,181 +2,181 @@ import { getExistingConsole } from './console';
|
||||
import { getInterpolator } from './interpolator';
|
||||
|
||||
export class Environment {
|
||||
private _name: string;
|
||||
private kvs = new Map<string, boolean | number | string | undefined>();
|
||||
private _name: string;
|
||||
private kvs = new Map<string, boolean | number | string | undefined>();
|
||||
|
||||
constructor(name: string, jsonObject: object | undefined) {
|
||||
this._name = name;
|
||||
this.kvs = new Map(Object.entries(jsonObject || {}));
|
||||
constructor(name: string, jsonObject: object | undefined) {
|
||||
this._name = name;
|
||||
this.kvs = new Map(Object.entries(jsonObject || {}));
|
||||
}
|
||||
|
||||
get name() {
|
||||
return this._name;
|
||||
}
|
||||
|
||||
has = (variableName: string) => {
|
||||
return this.kvs.has(variableName);
|
||||
};
|
||||
|
||||
get = (variableName: string) => {
|
||||
return this.kvs.get(variableName);
|
||||
};
|
||||
|
||||
set = (variableName: string, variableValue: boolean | number | string | undefined | null) => {
|
||||
if (variableValue === null) {
|
||||
getExistingConsole().warn(`Variable "${variableName}" has a null value`);
|
||||
return;
|
||||
}
|
||||
this.kvs.set(variableName, variableValue);
|
||||
};
|
||||
|
||||
get name() {
|
||||
return this._name;
|
||||
}
|
||||
unset = (variableName: string) => {
|
||||
this.kvs.delete(variableName);
|
||||
};
|
||||
|
||||
has = (variableName: string) => {
|
||||
return this.kvs.has(variableName);
|
||||
};
|
||||
clear = () => {
|
||||
this.kvs.clear();
|
||||
};
|
||||
|
||||
get = (variableName: string) => {
|
||||
return this.kvs.get(variableName);
|
||||
};
|
||||
replaceIn = (template: string) => {
|
||||
return getInterpolator().render(template, this.toObject());
|
||||
};
|
||||
|
||||
set = (variableName: string, variableValue: boolean | number | string | undefined | null) => {
|
||||
if (variableValue === null) {
|
||||
getExistingConsole().warn(`Variable "${variableName}" has a null value`);
|
||||
return;
|
||||
}
|
||||
this.kvs.set(variableName, variableValue);
|
||||
};
|
||||
|
||||
unset = (variableName: string) => {
|
||||
this.kvs.delete(variableName);
|
||||
};
|
||||
|
||||
clear = () => {
|
||||
this.kvs.clear();
|
||||
};
|
||||
|
||||
replaceIn = (template: string) => {
|
||||
return getInterpolator().render(template, this.toObject());
|
||||
};
|
||||
|
||||
toObject = () => {
|
||||
return Object.fromEntries(this.kvs.entries());
|
||||
};
|
||||
toObject = () => {
|
||||
return Object.fromEntries(this.kvs.entries());
|
||||
};
|
||||
}
|
||||
|
||||
function mergeFolderLevelVars(folderLevelVars: Environment[]) {
|
||||
const mergedFolderLevelObject = folderLevelVars.reduce((merged: object, folderLevelEnv: Environment) => {
|
||||
return { ...merged, ...folderLevelEnv.toObject() };
|
||||
}, {});
|
||||
return new Environment('mergedFolderLevelVars', mergedFolderLevelObject);
|
||||
const mergedFolderLevelObject = folderLevelVars.reduce((merged: object, folderLevelEnv: Environment) => {
|
||||
return { ...merged, ...folderLevelEnv.toObject() };
|
||||
}, {});
|
||||
return new Environment('mergedFolderLevelVars', mergedFolderLevelObject);
|
||||
}
|
||||
|
||||
export class Variables {
|
||||
// TODO: support vars for all levels
|
||||
private globalVars: Environment;
|
||||
private collectionVars: Environment;
|
||||
private environmentVars: Environment;
|
||||
private iterationDataVars: Environment;
|
||||
private folderLevelVars: Environment[];
|
||||
private localVars: Environment;
|
||||
// TODO: support vars for all levels
|
||||
private globalVars: Environment;
|
||||
private collectionVars: Environment;
|
||||
private environmentVars: Environment;
|
||||
private iterationDataVars: Environment;
|
||||
private folderLevelVars: Environment[];
|
||||
private localVars: Environment;
|
||||
|
||||
constructor(
|
||||
args: {
|
||||
globalVars: Environment;
|
||||
collectionVars: Environment;
|
||||
environmentVars: Environment;
|
||||
iterationDataVars: Environment;
|
||||
folderLevelVars: Environment[];
|
||||
localVars: Environment;
|
||||
},
|
||||
) {
|
||||
this.globalVars = args.globalVars;
|
||||
this.collectionVars = args.collectionVars;
|
||||
this.environmentVars = args.environmentVars;
|
||||
this.iterationDataVars = args.iterationDataVars;
|
||||
this.folderLevelVars = args.folderLevelVars;
|
||||
this.localVars = args.localVars;
|
||||
constructor(args: {
|
||||
globalVars: Environment;
|
||||
collectionVars: Environment;
|
||||
environmentVars: Environment;
|
||||
iterationDataVars: Environment;
|
||||
folderLevelVars: Environment[];
|
||||
localVars: Environment;
|
||||
}) {
|
||||
this.globalVars = args.globalVars;
|
||||
this.collectionVars = args.collectionVars;
|
||||
this.environmentVars = args.environmentVars;
|
||||
this.iterationDataVars = args.iterationDataVars;
|
||||
this.folderLevelVars = args.folderLevelVars;
|
||||
this.localVars = args.localVars;
|
||||
}
|
||||
|
||||
has = (variableName: string) => {
|
||||
const globalVarsHas = this.globalVars.has(variableName);
|
||||
const collectionVarsHas = this.collectionVars.has(variableName);
|
||||
const environmentVarsHas = this.environmentVars.has(variableName);
|
||||
const iterationDataVarsHas = this.iterationDataVars.has(variableName);
|
||||
const folderLevelVarsHas = this.folderLevelVars.some(vars => vars.has(variableName));
|
||||
const localVarsHas = this.localVars.has(variableName);
|
||||
|
||||
return (
|
||||
globalVarsHas ||
|
||||
collectionVarsHas ||
|
||||
environmentVarsHas ||
|
||||
iterationDataVarsHas ||
|
||||
folderLevelVarsHas ||
|
||||
localVarsHas
|
||||
);
|
||||
};
|
||||
|
||||
get = (variableName: string) => {
|
||||
let finalVal: boolean | number | string | object | undefined = undefined;
|
||||
[
|
||||
this.localVars,
|
||||
mergeFolderLevelVars(this.folderLevelVars),
|
||||
this.iterationDataVars,
|
||||
this.environmentVars,
|
||||
this.collectionVars,
|
||||
this.globalVars,
|
||||
].forEach(vars => {
|
||||
const value = vars.get(variableName);
|
||||
if (!finalVal && value) {
|
||||
finalVal = value;
|
||||
}
|
||||
});
|
||||
|
||||
return finalVal;
|
||||
};
|
||||
|
||||
set = (variableName: string, variableValue: boolean | number | string | undefined | null) => {
|
||||
if (variableValue === null) {
|
||||
getExistingConsole().warn(`Variable "${variableName}" has a null value`);
|
||||
return;
|
||||
}
|
||||
|
||||
has = (variableName: string) => {
|
||||
const globalVarsHas = this.globalVars.has(variableName);
|
||||
const collectionVarsHas = this.collectionVars.has(variableName);
|
||||
const environmentVarsHas = this.environmentVars.has(variableName);
|
||||
const iterationDataVarsHas = this.iterationDataVars.has(variableName);
|
||||
const folderLevelVarsHas = this.folderLevelVars.some(vars => vars.has(variableName));
|
||||
const localVarsHas = this.localVars.has(variableName);
|
||||
this.localVars.set(variableName, variableValue);
|
||||
};
|
||||
|
||||
return globalVarsHas || collectionVarsHas || environmentVarsHas || iterationDataVarsHas || folderLevelVarsHas || localVarsHas;
|
||||
};
|
||||
replaceIn = (template: string) => {
|
||||
const context = this.toObject();
|
||||
return getInterpolator().render(template, context);
|
||||
};
|
||||
|
||||
get = (variableName: string) => {
|
||||
let finalVal: boolean | number | string | object | undefined = undefined;
|
||||
[
|
||||
this.localVars,
|
||||
mergeFolderLevelVars(this.folderLevelVars),
|
||||
this.iterationDataVars,
|
||||
this.environmentVars,
|
||||
this.collectionVars,
|
||||
this.globalVars,
|
||||
].forEach(vars => {
|
||||
const value = vars.get(variableName);
|
||||
if (!finalVal && value) {
|
||||
finalVal = value;
|
||||
}
|
||||
});
|
||||
toObject = () => {
|
||||
return [
|
||||
this.globalVars,
|
||||
this.collectionVars,
|
||||
this.environmentVars,
|
||||
this.iterationDataVars,
|
||||
mergeFolderLevelVars(this.folderLevelVars),
|
||||
this.localVars,
|
||||
]
|
||||
.map(vars => vars.toObject())
|
||||
.reduce((ctx, obj) => ({ ...ctx, ...obj }), {});
|
||||
};
|
||||
|
||||
return finalVal;
|
||||
};
|
||||
|
||||
set = (variableName: string, variableValue: boolean | number | string | undefined | null) => {
|
||||
if (variableValue === null) {
|
||||
getExistingConsole().warn(`Variable "${variableName}" has a null value`);
|
||||
return;
|
||||
}
|
||||
|
||||
this.localVars.set(variableName, variableValue);
|
||||
};
|
||||
|
||||
replaceIn = (template: string) => {
|
||||
const context = this.toObject();
|
||||
return getInterpolator().render(template, context);
|
||||
};
|
||||
|
||||
toObject = () => {
|
||||
return [
|
||||
this.globalVars,
|
||||
this.collectionVars,
|
||||
this.environmentVars,
|
||||
this.iterationDataVars,
|
||||
mergeFolderLevelVars(this.folderLevelVars),
|
||||
this.localVars,
|
||||
].map(
|
||||
vars => vars.toObject()
|
||||
).reduce(
|
||||
(ctx, obj) => ({ ...ctx, ...obj }),
|
||||
{},
|
||||
);
|
||||
};
|
||||
|
||||
localVarsToObject = () => {
|
||||
return this.localVars.toObject();
|
||||
};
|
||||
localVarsToObject = () => {
|
||||
return this.localVars.toObject();
|
||||
};
|
||||
}
|
||||
|
||||
export class Vault extends Environment {
|
||||
constructor(name: string, jsonObject: object | undefined, enableVaultInScripts: boolean) {
|
||||
super(name, jsonObject);
|
||||
return new Proxy(this, {
|
||||
// throw error on get or set method call if enableVaultInScripts is false
|
||||
get: (target, prop, receiver) => {
|
||||
if (!enableVaultInScripts) {
|
||||
throw new Error('Vault is disabled in script');
|
||||
}
|
||||
return Reflect.get(target, prop, receiver);
|
||||
},
|
||||
set: (target, prop, value, receiver) => {
|
||||
if (!enableVaultInScripts) {
|
||||
throw new Error('Vault is disabled in script');
|
||||
}
|
||||
return Reflect.set(target, prop, value, receiver);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
constructor(name: string, jsonObject: object | undefined, enableVaultInScripts: boolean) {
|
||||
super(name, jsonObject);
|
||||
return new Proxy(this, {
|
||||
// throw error on get or set method call if enableVaultInScripts is false
|
||||
get: (target, prop, receiver) => {
|
||||
if (!enableVaultInScripts) {
|
||||
throw new Error('Vault is disabled in script');
|
||||
}
|
||||
return Reflect.get(target, prop, receiver);
|
||||
},
|
||||
set: (target, prop, value, receiver) => {
|
||||
if (!enableVaultInScripts) {
|
||||
throw new Error('Vault is disabled in script');
|
||||
}
|
||||
return Reflect.set(target, prop, value, receiver);
|
||||
},
|
||||
});
|
||||
}
|
||||
unset = () => {
|
||||
throw new Error('Vault can not be unset in script');
|
||||
};
|
||||
|
||||
unset = () => {
|
||||
throw new Error('Vault can not be unset in script');
|
||||
};
|
||||
|
||||
clear = () => {
|
||||
throw new Error('Vault can not be cleared in script');
|
||||
};
|
||||
|
||||
set = () => {
|
||||
throw new Error('Vault can not be set in script');
|
||||
};
|
||||
clear = () => {
|
||||
throw new Error('Vault can not be cleared in script');
|
||||
};
|
||||
|
||||
set = () => {
|
||||
throw new Error('Vault can not be set in script');
|
||||
};
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ export class Execution {
|
||||
get: (target, prop, receiver) => {
|
||||
if (prop === 'current') {
|
||||
return target.length > 0 ? target[target.length - 1] : '';
|
||||
};
|
||||
}
|
||||
return Reflect.get(target, prop, receiver);
|
||||
},
|
||||
});
|
||||
@@ -26,7 +26,7 @@ export class Execution {
|
||||
} else {
|
||||
throw new Error('Location input must be array of string');
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
skipRequest = () => {
|
||||
this._skipRequest = true;
|
||||
@@ -43,4 +43,4 @@ export class Execution {
|
||||
nextRequestIdOrName: this._nextRequestIdOrName,
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2,63 +2,63 @@ import { Environment } from './environments';
|
||||
|
||||
// Folder reprensents a request folder in Insomnia.
|
||||
export class Folder {
|
||||
id: string;
|
||||
name: string;
|
||||
environment: Environment;
|
||||
id: string;
|
||||
name: string;
|
||||
environment: Environment;
|
||||
|
||||
constructor(id: string, name: string, environmentObject: object | undefined) {
|
||||
this.id = id;
|
||||
this.name = name;
|
||||
this.environment = new Environment(`${id}.environment`, environmentObject);
|
||||
}
|
||||
constructor(id: string, name: string, environmentObject: object | undefined) {
|
||||
this.id = id;
|
||||
this.name = name;
|
||||
this.environment = new Environment(`${id}.environment`, environmentObject);
|
||||
}
|
||||
|
||||
toObject = () => {
|
||||
return {
|
||||
id: this.id,
|
||||
name: this.name,
|
||||
environment: this.environment.toObject(),
|
||||
};
|
||||
toObject = () => {
|
||||
return {
|
||||
id: this.id,
|
||||
name: this.name,
|
||||
environment: this.environment.toObject(),
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
// ParentFolders reprensents ancestor folders of the active request
|
||||
export class ParentFolders {
|
||||
constructor(private folders: Folder[]) { }
|
||||
constructor(private folders: Folder[]) {}
|
||||
|
||||
get = (idOrName: string) => {
|
||||
const folder = this.folders.find(folder => folder.name === idOrName || folder.id === idOrName);
|
||||
if (!folder) {
|
||||
throw Error(`Folder "${idOrName}" not found`);
|
||||
}
|
||||
return folder;
|
||||
};
|
||||
get = (idOrName: string) => {
|
||||
const folder = this.folders.find(folder => folder.name === idOrName || folder.id === idOrName);
|
||||
if (!folder) {
|
||||
throw Error(`Folder "${idOrName}" not found`);
|
||||
}
|
||||
return folder;
|
||||
};
|
||||
|
||||
getById = (id: string) => {
|
||||
const folder = this.folders.find(folder => folder.id === id);
|
||||
if (!folder) {
|
||||
throw Error(`Folder "${id}" not found`);
|
||||
}
|
||||
return folder;
|
||||
};
|
||||
getById = (id: string) => {
|
||||
const folder = this.folders.find(folder => folder.id === id);
|
||||
if (!folder) {
|
||||
throw Error(`Folder "${id}" not found`);
|
||||
}
|
||||
return folder;
|
||||
};
|
||||
|
||||
getByName = (folderName: string) => {
|
||||
const folder = this.folders.find(folder => folder.name === folderName);
|
||||
if (!folder) {
|
||||
throw Error(`Folder "${folderName}" not found`);
|
||||
}
|
||||
return folder;
|
||||
};
|
||||
getByName = (folderName: string) => {
|
||||
const folder = this.folders.find(folder => folder.name === folderName);
|
||||
if (!folder) {
|
||||
throw Error(`Folder "${folderName}" not found`);
|
||||
}
|
||||
return folder;
|
||||
};
|
||||
|
||||
findValue = (valueKey: string) => {
|
||||
const targetEnv = [...this.folders].reverse().find(folder => folder.environment.has(valueKey));
|
||||
return targetEnv !== undefined ? targetEnv.environment.get(valueKey) : undefined;
|
||||
};
|
||||
findValue = (valueKey: string) => {
|
||||
const targetEnv = [...this.folders].reverse().find(folder => folder.environment.has(valueKey));
|
||||
return targetEnv !== undefined ? targetEnv.environment.get(valueKey) : undefined;
|
||||
};
|
||||
|
||||
toObject = () => {
|
||||
return this.folders.map(folder => folder.toObject());
|
||||
};
|
||||
toObject = () => {
|
||||
return this.folders.map(folder => folder.toObject());
|
||||
};
|
||||
|
||||
getEnvironments = () => {
|
||||
return this.folders.map(folder => folder.environment);
|
||||
};
|
||||
getEnvironments = () => {
|
||||
return this.folders.map(folder => folder.environment);
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,119 +1,108 @@
|
||||
import { Property, PropertyList } from './properties';
|
||||
|
||||
export interface HeaderDefinition {
|
||||
key: string;
|
||||
value: string;
|
||||
id?: string;
|
||||
name?: string;
|
||||
type?: string;
|
||||
disabled?: boolean;
|
||||
key: string;
|
||||
value: string;
|
||||
id?: string;
|
||||
name?: string;
|
||||
type?: string;
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
export class Header extends Property {
|
||||
override _kind = 'Header';
|
||||
type = '';
|
||||
key: string;
|
||||
value: string;
|
||||
override _kind = 'Header';
|
||||
type = '';
|
||||
key: string;
|
||||
value: string;
|
||||
|
||||
constructor(
|
||||
opts: HeaderDefinition | string,
|
||||
name?: string, // if it is defined, it overrides 'key' (not 'name')
|
||||
) {
|
||||
super();
|
||||
constructor(
|
||||
opts: HeaderDefinition | string,
|
||||
name?: string, // if it is defined, it overrides 'key' (not 'name')
|
||||
) {
|
||||
super();
|
||||
|
||||
if (typeof opts === 'string') {
|
||||
const obj = Header.parseSingle(opts);
|
||||
this.key = obj.key;
|
||||
this.value = obj.value;
|
||||
} else {
|
||||
this.id = opts.id ? opts.id : '';
|
||||
this.key = opts.key ? opts.key : '';
|
||||
this.name = name ? name : (opts.name ? opts.name : '');
|
||||
this.value = opts.value ? opts.value : '';
|
||||
this.type = opts.type ? opts.type : '';
|
||||
this.disabled = opts ? opts.disabled : false;
|
||||
}
|
||||
if (typeof opts === 'string') {
|
||||
const obj = Header.parseSingle(opts);
|
||||
this.key = obj.key;
|
||||
this.value = obj.value;
|
||||
} else {
|
||||
this.id = opts.id ? opts.id : '';
|
||||
this.key = opts.key ? opts.key : '';
|
||||
this.name = name ? name : opts.name ? opts.name : '';
|
||||
this.value = opts.value ? opts.value : '';
|
||||
this.type = opts.type ? opts.type : '';
|
||||
this.disabled = opts ? opts.disabled : false;
|
||||
}
|
||||
}
|
||||
|
||||
static override _index = 'key';
|
||||
|
||||
static create(input?: { key: string; value: string } | string, name?: string): Header {
|
||||
return new Header(input || { key: '', value: '' }, name);
|
||||
}
|
||||
|
||||
static isHeader(obj: object) {
|
||||
return '_kind' in obj && obj._kind === 'Header';
|
||||
}
|
||||
|
||||
// example: 'Content-Type: application/json\nUser-Agent: MyClientLibrary/2.0\n'
|
||||
static parse(headerString: string): { key: string; value: string }[] {
|
||||
return headerString
|
||||
.split('\n')
|
||||
.filter(kvPart => kvPart.trim() !== '')
|
||||
.map(kvPart => Header.parseSingle(kvPart));
|
||||
}
|
||||
|
||||
static parseSingle(headerStr: string): { key: string; value: string } {
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers
|
||||
// the first colon is the separator
|
||||
const separatorPos = headerStr.indexOf(':');
|
||||
|
||||
if (separatorPos <= 0) {
|
||||
throw Error('Header.parseSingle: the header string seems invalid');
|
||||
}
|
||||
|
||||
static override _index = 'key';
|
||||
const key = headerStr.slice(0, separatorPos);
|
||||
const value = headerStr.slice(separatorPos + 1);
|
||||
|
||||
static create(input?: { key: string; value: string } | string, name?: string): Header {
|
||||
return new Header(input || { key: '', value: '' }, name);
|
||||
}
|
||||
return { key: key.trim(), value: value.trim() };
|
||||
}
|
||||
|
||||
static isHeader(obj: object) {
|
||||
return '_kind' in obj && obj._kind === 'Header';
|
||||
}
|
||||
static unparse(headers: { key: string; value: string }[] | PropertyList<Header>, separator?: string): string {
|
||||
const headerArray: { key: string; value: string }[] = [...headers.map(header => this.unparseSingle(header), {})];
|
||||
|
||||
// example: 'Content-Type: application/json\nUser-Agent: MyClientLibrary/2.0\n'
|
||||
static parse(headerString: string): { key: string; value: string }[] {
|
||||
return headerString
|
||||
.split('\n')
|
||||
.filter(kvPart => kvPart.trim() !== '')
|
||||
.map(kvPart => Header.parseSingle(kvPart));
|
||||
}
|
||||
return headerArray.join(separator || '\n');
|
||||
}
|
||||
|
||||
static parseSingle(headerStr: string): { key: string; value: string } {
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers
|
||||
// the first colon is the separator
|
||||
const separatorPos = headerStr.indexOf(':');
|
||||
static unparseSingle(header: { key: string; value: string } | Header): string {
|
||||
// both PropertyList and object contains 'key' and 'value'
|
||||
return `${header.key}: ${header.value}`;
|
||||
}
|
||||
|
||||
if (separatorPos <= 0) {
|
||||
throw Error('Header.parseSingle: the header string seems invalid');
|
||||
}
|
||||
update(newHeader: { key: string; value: string }) {
|
||||
this.key = newHeader.key;
|
||||
this.value = newHeader.value;
|
||||
}
|
||||
|
||||
const key = headerStr.slice(0, separatorPos);
|
||||
const value = headerStr.slice(separatorPos + 1);
|
||||
|
||||
return { key: key.trim(), value: value.trim() };
|
||||
}
|
||||
|
||||
static unparse(headers: { key: string; value: string }[] | PropertyList<Header>, separator?: string): string {
|
||||
const headerArray: { key: string; value: string }[] = [
|
||||
...headers.map(
|
||||
header => this.unparseSingle(header), {}
|
||||
),
|
||||
];
|
||||
|
||||
return headerArray.join(separator || '\n');
|
||||
}
|
||||
|
||||
static unparseSingle(header: { key: string; value: string } | Header): string {
|
||||
// both PropertyList and object contains 'key' and 'value'
|
||||
return `${header.key}: ${header.value}`;
|
||||
}
|
||||
|
||||
update(newHeader: { key: string; value: string }) {
|
||||
this.key = newHeader.key;
|
||||
this.value = newHeader.value;
|
||||
}
|
||||
|
||||
override valueOf() {
|
||||
return this.value;
|
||||
}
|
||||
override valueOf() {
|
||||
return this.value;
|
||||
}
|
||||
}
|
||||
|
||||
export class HeaderList<T extends Header> extends PropertyList<T> {
|
||||
constructor(
|
||||
parent: PropertyList<T> | undefined,
|
||||
populate: T[]
|
||||
) {
|
||||
super(
|
||||
Header,
|
||||
undefined,
|
||||
populate
|
||||
);
|
||||
this.parent = parent;
|
||||
}
|
||||
constructor(parent: PropertyList<T> | undefined, populate: T[]) {
|
||||
super(Header, undefined, populate);
|
||||
this.parent = parent;
|
||||
}
|
||||
|
||||
static isHeaderList(obj: any) {
|
||||
return '_kind' in obj && obj._kind === 'HeaderList';
|
||||
}
|
||||
static isHeaderList(obj: any) {
|
||||
return '_kind' in obj && obj._kind === 'HeaderList';
|
||||
}
|
||||
|
||||
contentSize(): number {
|
||||
return this.list
|
||||
.map(header => header.toString())
|
||||
.map(headerStr => headerStr.length) // TODO: handle special characters
|
||||
.reduce((totalSize, headerSize) => totalSize + headerSize, 0);
|
||||
}
|
||||
contentSize(): number {
|
||||
return this.list
|
||||
.map(header => header.toString())
|
||||
.map(headerStr => headerStr.length) // TODO: handle special characters
|
||||
.reduce((totalSize, headerSize) => totalSize + headerSize, 0);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,260 +22,258 @@ import { toUrlObject } from './urls';
|
||||
import { checkIfUrlIncludesTag } from './utils';
|
||||
|
||||
export class InsomniaObject {
|
||||
public environment: Environment;
|
||||
public collectionVariables: Environment;
|
||||
public baseEnvironment: Environment;
|
||||
public variables: Variables;
|
||||
public request: ScriptRequest;
|
||||
public cookies: CookieObject;
|
||||
public info: RequestInfo;
|
||||
public response?: ScriptResponse;
|
||||
public execution: Execution;
|
||||
public vault?: Vault;
|
||||
public environment: Environment;
|
||||
public collectionVariables: Environment;
|
||||
public baseEnvironment: Environment;
|
||||
public variables: Variables;
|
||||
public request: ScriptRequest;
|
||||
public cookies: CookieObject;
|
||||
public info: RequestInfo;
|
||||
public response?: ScriptResponse;
|
||||
public execution: Execution;
|
||||
public vault?: Vault;
|
||||
|
||||
public clientCertificates: ClientCertificate[];
|
||||
private _expect = expect;
|
||||
private _test = test;
|
||||
private _skip = skip;
|
||||
public clientCertificates: ClientCertificate[];
|
||||
private _expect = expect;
|
||||
private _test = test;
|
||||
private _skip = skip;
|
||||
|
||||
private iterationData: Environment;
|
||||
// TODO: follows will be enabled after Insomnia supports them
|
||||
private globals: Environment;
|
||||
private _settings: Settings;
|
||||
private iterationData: Environment;
|
||||
// TODO: follows will be enabled after Insomnia supports them
|
||||
private globals: Environment;
|
||||
private _settings: Settings;
|
||||
|
||||
private requestTestResults: RequestTestResult[];
|
||||
private requestTestResults: RequestTestResult[];
|
||||
|
||||
private parentFolders: ParentFolders;
|
||||
private parentFolders: ParentFolders;
|
||||
|
||||
constructor(
|
||||
rawObj: {
|
||||
globals: Environment;
|
||||
iterationData: Environment;
|
||||
environment: Environment;
|
||||
baseEnvironment: Environment;
|
||||
variables: Variables;
|
||||
request: ScriptRequest;
|
||||
settings: Settings;
|
||||
clientCertificates: ClientCertificate[];
|
||||
cookies: CookieObject;
|
||||
requestInfo: RequestInfo;
|
||||
execution: Execution;
|
||||
response?: ScriptResponse;
|
||||
parentFolders: ParentFolders;
|
||||
vault?: Vault;
|
||||
},
|
||||
) {
|
||||
this.globals = rawObj.globals;
|
||||
this.environment = rawObj.environment;
|
||||
this.baseEnvironment = rawObj.baseEnvironment;
|
||||
this.collectionVariables = this.baseEnvironment; // collectionVariables is mapped to baseEnvironment
|
||||
this.iterationData = rawObj.iterationData;
|
||||
this.variables = rawObj.variables;
|
||||
this.cookies = rawObj.cookies;
|
||||
this.response = rawObj.response;
|
||||
this.execution = rawObj.execution;
|
||||
this.vault = rawObj.vault;
|
||||
constructor(rawObj: {
|
||||
globals: Environment;
|
||||
iterationData: Environment;
|
||||
environment: Environment;
|
||||
baseEnvironment: Environment;
|
||||
variables: Variables;
|
||||
request: ScriptRequest;
|
||||
settings: Settings;
|
||||
clientCertificates: ClientCertificate[];
|
||||
cookies: CookieObject;
|
||||
requestInfo: RequestInfo;
|
||||
execution: Execution;
|
||||
response?: ScriptResponse;
|
||||
parentFolders: ParentFolders;
|
||||
vault?: Vault;
|
||||
}) {
|
||||
this.globals = rawObj.globals;
|
||||
this.environment = rawObj.environment;
|
||||
this.baseEnvironment = rawObj.baseEnvironment;
|
||||
this.collectionVariables = this.baseEnvironment; // collectionVariables is mapped to baseEnvironment
|
||||
this.iterationData = rawObj.iterationData;
|
||||
this.variables = rawObj.variables;
|
||||
this.cookies = rawObj.cookies;
|
||||
this.response = rawObj.response;
|
||||
this.execution = rawObj.execution;
|
||||
this.vault = rawObj.vault;
|
||||
|
||||
this.info = rawObj.requestInfo;
|
||||
this.request = rawObj.request;
|
||||
this._settings = rawObj.settings;
|
||||
this.clientCertificates = rawObj.clientCertificates;
|
||||
this.info = rawObj.requestInfo;
|
||||
this.request = rawObj.request;
|
||||
this._settings = rawObj.settings;
|
||||
this.clientCertificates = rawObj.clientCertificates;
|
||||
|
||||
this.requestTestResults = new Array<RequestTestResult>();
|
||||
this.parentFolders = rawObj.parentFolders;
|
||||
this.requestTestResults = new Array<RequestTestResult>();
|
||||
this.parentFolders = rawObj.parentFolders;
|
||||
|
||||
return new Proxy(this, {
|
||||
get: (target, prop, receiver) => {
|
||||
if (prop === 'test') {
|
||||
const testHandler: TestHandler = async (msg: string, fn: () => Promise<void>) => {
|
||||
await this._test(msg, fn, this.pushRequestTestResult);
|
||||
};
|
||||
testHandler.skip = async (msg: string, fn: () => Promise<void>) => {
|
||||
await this._skip(msg, fn, this.pushRequestTestResult);
|
||||
};
|
||||
return new Proxy(this, {
|
||||
get: (target, prop, receiver) => {
|
||||
if (prop === 'test') {
|
||||
const testHandler: TestHandler = async (msg: string, fn: () => Promise<void>) => {
|
||||
await this._test(msg, fn, this.pushRequestTestResult);
|
||||
};
|
||||
testHandler.skip = async (msg: string, fn: () => Promise<void>) => {
|
||||
await this._skip(msg, fn, this.pushRequestTestResult);
|
||||
};
|
||||
|
||||
return testHandler;
|
||||
}
|
||||
return Reflect.get(target, prop, receiver);
|
||||
},
|
||||
});
|
||||
}
|
||||
return testHandler;
|
||||
}
|
||||
return Reflect.get(target, prop, receiver);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
sendRequest(
|
||||
request: string | ScriptRequest,
|
||||
cb: (error?: string, response?: ScriptResponse) => void
|
||||
) {
|
||||
return sendRequest(request, cb, this._settings);
|
||||
}
|
||||
sendRequest(request: string | ScriptRequest, cb: (error?: string, response?: ScriptResponse) => void) {
|
||||
return sendRequest(request, cb, this._settings);
|
||||
}
|
||||
|
||||
test = () => {
|
||||
// this method is intercepted by the proxy above
|
||||
};
|
||||
|
||||
private pushRequestTestResult = (testResult: RequestTestResult) => {
|
||||
this.requestTestResults = [...this.requestTestResults, testResult];
|
||||
};
|
||||
|
||||
expect = (exp: boolean | number | string | object) => {
|
||||
return this._expect(exp);
|
||||
};
|
||||
|
||||
get settings() {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
toObject = () => {
|
||||
return {
|
||||
globals: this.globals.toObject(),
|
||||
environment: this.environment.toObject(),
|
||||
baseEnvironment: this.baseEnvironment.toObject(),
|
||||
iterationData: this.iterationData.toObject(),
|
||||
variables: this.variables.localVarsToObject(),
|
||||
request: this.request,
|
||||
settings: this.settings,
|
||||
clientCertificates: this.clientCertificates,
|
||||
cookieJar: this.cookies.jar().toInsomniaCookieJar(),
|
||||
info: this.info.toObject(),
|
||||
response: this.response ? this.response.toObject() : undefined,
|
||||
requestTestResults: this.requestTestResults,
|
||||
execution: this.execution.toObject(),
|
||||
parentFolders: this.parentFolders.toObject(),
|
||||
};
|
||||
test = () => {
|
||||
// this method is intercepted by the proxy above
|
||||
};
|
||||
|
||||
private pushRequestTestResult = (testResult: RequestTestResult) => {
|
||||
this.requestTestResults = [...this.requestTestResults, testResult];
|
||||
};
|
||||
|
||||
expect = (exp: boolean | number | string | object) => {
|
||||
return this._expect(exp);
|
||||
};
|
||||
|
||||
get settings() {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
toObject = () => {
|
||||
return {
|
||||
globals: this.globals.toObject(),
|
||||
environment: this.environment.toObject(),
|
||||
baseEnvironment: this.baseEnvironment.toObject(),
|
||||
iterationData: this.iterationData.toObject(),
|
||||
variables: this.variables.localVarsToObject(),
|
||||
request: this.request,
|
||||
settings: this.settings,
|
||||
clientCertificates: this.clientCertificates,
|
||||
cookieJar: this.cookies.jar().toInsomniaCookieJar(),
|
||||
info: this.info.toObject(),
|
||||
response: this.response ? this.response.toObject() : undefined,
|
||||
requestTestResults: this.requestTestResults,
|
||||
execution: this.execution.toObject(),
|
||||
parentFolders: this.parentFolders.toObject(),
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export async function initInsomniaObject(
|
||||
rawObj: RequestContext,
|
||||
log: (...args: any[]) => void,
|
||||
) {
|
||||
// Mapping rule for the global environment:
|
||||
// - when one global environment is selected, `globals` points to the selected one
|
||||
// Potential mapping rule for the future:
|
||||
// - The base global environment could also be introduced
|
||||
const globals = new Environment('globals', rawObj.globals || {}); // could be undefined
|
||||
// Mapping rule for the environment and base environment:
|
||||
// - If base environment is selected, both `baseEnvironment` and `environment` point to the selected one.
|
||||
// - If one sub environment is selected, `baseEnvironment` points to the base env and `environment` points to the selected one.
|
||||
const baseEnvironment = new Environment(rawObj.baseEnvironment.name || '', rawObj.baseEnvironment.data);
|
||||
// reuse baseEnvironment when the "selected envrionment" points to the base environment
|
||||
const environment = rawObj.baseEnvironment.id === rawObj.environment.id ?
|
||||
baseEnvironment :
|
||||
new Environment(rawObj.environment.name || '', rawObj.environment.data);
|
||||
if (rawObj.baseEnvironment.id === rawObj.environment.id) {
|
||||
log('warning: No environment is selected, modification of insomnia.environment will be applied to the base environment.');
|
||||
}
|
||||
// Mapping rule for the environment user uploaded in collection runner
|
||||
const iterationData = rawObj.iterationData ?
|
||||
new Environment(rawObj.iterationData.name, rawObj.iterationData.data) : new Environment('iterationData', {});
|
||||
const localVariables = rawObj.transientVariables ?
|
||||
new Environment(rawObj.transientVariables.name, rawObj.transientVariables.data) : new Environment('transientVariables', {});
|
||||
const enableVaultInScripts = rawObj.settings?.enableVaultInScripts || false;
|
||||
const vault = rawObj.vault ?
|
||||
new Vault('vault', rawObj.vault, enableVaultInScripts) : new Vault('vault', {}, enableVaultInScripts);
|
||||
const cookies = new CookieObject(rawObj.cookieJar);
|
||||
// TODO: update follows when post-request script and iterationData are introduced
|
||||
const requestInfo = new RequestInfo({
|
||||
eventName: rawObj.requestInfo.eventName || 'prerequest',
|
||||
iteration: rawObj.requestInfo.iteration || 1,
|
||||
iterationCount: rawObj.requestInfo.iterationCount || 0,
|
||||
requestName: rawObj.request.name,
|
||||
requestId: rawObj.request._id,
|
||||
});
|
||||
|
||||
const parentFolders = new ParentFolders(rawObj.parentFolders.map(folderObj =>
|
||||
new Folder(
|
||||
folderObj.id,
|
||||
folderObj.name,
|
||||
folderObj.environment,
|
||||
)
|
||||
));
|
||||
|
||||
const variables = new Variables({
|
||||
globalVars: globals,
|
||||
environmentVars: environment,
|
||||
collectionVars: baseEnvironment,
|
||||
iterationDataVars: iterationData,
|
||||
folderLevelVars: parentFolders.getEnvironments(),
|
||||
localVars: localVariables,
|
||||
});
|
||||
|
||||
// todo: find if theres a better way to get the best cert
|
||||
// (╯°□°)╯︵ ┻━┻
|
||||
const ifUrlIncludesTag = checkIfUrlIncludesTag(rawObj.request.url);
|
||||
const matchedCertificates = filterClientCertificates(rawObj.clientCertificates || [], rawObj.request.url);
|
||||
const initEmptyCert = ifUrlIncludesTag || matchedCertificates?.length === 0;
|
||||
if (initEmptyCert) {
|
||||
getExistingConsole().warn('The URL contains tags or no matched certificate found, insomnia.request.certificate is initialized as an empty certificate.');
|
||||
}
|
||||
const defaultCertificate = initEmptyCert ?
|
||||
{
|
||||
disabled: false,
|
||||
name: 'Default Certificate',
|
||||
matches: [],
|
||||
key: undefined,
|
||||
cert: undefined,
|
||||
passphrase: undefined,
|
||||
pfx: undefined,
|
||||
} : {
|
||||
disabled: matchedCertificates[0].disabled,
|
||||
name: 'The first matched certificate from Settings',
|
||||
matches: [matchedCertificates[0].host],
|
||||
key: { src: matchedCertificates[0].key || '' },
|
||||
cert: { src: matchedCertificates[0].cert || '' },
|
||||
passphrase: matchedCertificates[0].passphrase || undefined,
|
||||
pfx: { src: matchedCertificates[0].pfx || '' }, // PFX or PKCS12 Certificate
|
||||
};
|
||||
|
||||
const proxy = transformToSdkProxyOptions(
|
||||
rawObj.settings.httpProxy,
|
||||
rawObj.settings.httpsProxy,
|
||||
rawObj.settings.proxyEnabled,
|
||||
rawObj.settings.noProxy,
|
||||
export async function initInsomniaObject(rawObj: RequestContext, log: (...args: any[]) => void) {
|
||||
// Mapping rule for the global environment:
|
||||
// - when one global environment is selected, `globals` points to the selected one
|
||||
// Potential mapping rule for the future:
|
||||
// - The base global environment could also be introduced
|
||||
const globals = new Environment('globals', rawObj.globals || {}); // could be undefined
|
||||
// Mapping rule for the environment and base environment:
|
||||
// - If base environment is selected, both `baseEnvironment` and `environment` point to the selected one.
|
||||
// - If one sub environment is selected, `baseEnvironment` points to the base env and `environment` points to the selected one.
|
||||
const baseEnvironment = new Environment(rawObj.baseEnvironment.name || '', rawObj.baseEnvironment.data);
|
||||
// reuse baseEnvironment when the "selected envrionment" points to the base environment
|
||||
const environment =
|
||||
rawObj.baseEnvironment.id === rawObj.environment.id
|
||||
? baseEnvironment
|
||||
: new Environment(rawObj.environment.name || '', rawObj.environment.data);
|
||||
if (rawObj.baseEnvironment.id === rawObj.environment.id) {
|
||||
log(
|
||||
'warning: No environment is selected, modification of insomnia.environment will be applied to the base environment.',
|
||||
);
|
||||
}
|
||||
// Mapping rule for the environment user uploaded in collection runner
|
||||
const iterationData = rawObj.iterationData
|
||||
? new Environment(rawObj.iterationData.name, rawObj.iterationData.data)
|
||||
: new Environment('iterationData', {});
|
||||
const localVariables = rawObj.transientVariables
|
||||
? new Environment(rawObj.transientVariables.name, rawObj.transientVariables.data)
|
||||
: new Environment('transientVariables', {});
|
||||
const enableVaultInScripts = rawObj.settings?.enableVaultInScripts || false;
|
||||
const vault = rawObj.vault
|
||||
? new Vault('vault', rawObj.vault, enableVaultInScripts)
|
||||
: new Vault('vault', {}, enableVaultInScripts);
|
||||
const cookies = new CookieObject(rawObj.cookieJar);
|
||||
// TODO: update follows when post-request script and iterationData are introduced
|
||||
const requestInfo = new RequestInfo({
|
||||
eventName: rawObj.requestInfo.eventName || 'prerequest',
|
||||
iteration: rawObj.requestInfo.iteration || 1,
|
||||
iterationCount: rawObj.requestInfo.iterationCount || 0,
|
||||
requestName: rawObj.request.name,
|
||||
requestId: rawObj.request._id,
|
||||
});
|
||||
|
||||
const reqUrl = toUrlObject(rawObj.request.url);
|
||||
reqUrl.addQueryParams(
|
||||
rawObj.request.parameters
|
||||
.map(param => ({ key: param.name, value: param.value, disabled: param.disabled }))
|
||||
const parentFolders = new ParentFolders(
|
||||
rawObj.parentFolders.map(folderObj => new Folder(folderObj.id, folderObj.name, folderObj.environment)),
|
||||
);
|
||||
|
||||
const variables = new Variables({
|
||||
globalVars: globals,
|
||||
environmentVars: environment,
|
||||
collectionVars: baseEnvironment,
|
||||
iterationDataVars: iterationData,
|
||||
folderLevelVars: parentFolders.getEnvironments(),
|
||||
localVars: localVariables,
|
||||
});
|
||||
|
||||
// todo: find if theres a better way to get the best cert
|
||||
// (╯°□°)╯︵ ┻━┻
|
||||
const ifUrlIncludesTag = checkIfUrlIncludesTag(rawObj.request.url);
|
||||
const matchedCertificates = filterClientCertificates(rawObj.clientCertificates || [], rawObj.request.url);
|
||||
const initEmptyCert = ifUrlIncludesTag || matchedCertificates?.length === 0;
|
||||
if (initEmptyCert) {
|
||||
getExistingConsole().warn(
|
||||
'The URL contains tags or no matched certificate found, insomnia.request.certificate is initialized as an empty certificate.',
|
||||
);
|
||||
}
|
||||
const defaultCertificate = initEmptyCert
|
||||
? {
|
||||
disabled: false,
|
||||
name: 'Default Certificate',
|
||||
matches: [],
|
||||
key: undefined,
|
||||
cert: undefined,
|
||||
passphrase: undefined,
|
||||
pfx: undefined,
|
||||
}
|
||||
: {
|
||||
disabled: matchedCertificates[0].disabled,
|
||||
name: 'The first matched certificate from Settings',
|
||||
matches: [matchedCertificates[0].host],
|
||||
key: { src: matchedCertificates[0].key || '' },
|
||||
cert: { src: matchedCertificates[0].cert || '' },
|
||||
passphrase: matchedCertificates[0].passphrase || undefined,
|
||||
pfx: { src: matchedCertificates[0].pfx || '' }, // PFX or PKCS12 Certificate
|
||||
};
|
||||
|
||||
const reqOpt: RequestOptions = {
|
||||
name: rawObj.request.name,
|
||||
url: reqUrl,
|
||||
method: rawObj.request.method,
|
||||
header: rawObj.request.headers.map(
|
||||
(header: RequestHeader) => ({ key: header.name, value: header.value, disabled: header.disabled })
|
||||
),
|
||||
body: toScriptRequestBody(rawObj.request.body),
|
||||
auth: toPreRequestAuth(rawObj.request.authentication),
|
||||
proxy,
|
||||
certificate: defaultCertificate,
|
||||
pathParameters: rawObj.request.pathParameters,
|
||||
};
|
||||
const request = new ScriptRequest(reqOpt);
|
||||
const execution = new Execution({
|
||||
location: rawObj.execution.location,
|
||||
skipRequest: rawObj.execution.skipRequest,
|
||||
nextRequestIdOrName: rawObj.execution.nextRequestIdOrName,
|
||||
});
|
||||
const proxy = transformToSdkProxyOptions(
|
||||
rawObj.settings.httpProxy,
|
||||
rawObj.settings.httpsProxy,
|
||||
rawObj.settings.proxyEnabled,
|
||||
rawObj.settings.noProxy,
|
||||
);
|
||||
|
||||
const responseBody = await readBodyFromPath(rawObj.response);
|
||||
const response = rawObj.response ? toScriptResponse(request, rawObj.response, responseBody) : undefined;
|
||||
const reqUrl = toUrlObject(rawObj.request.url);
|
||||
reqUrl.addQueryParams(
|
||||
rawObj.request.parameters.map(param => ({ key: param.name, value: param.value, disabled: param.disabled })),
|
||||
);
|
||||
|
||||
return new InsomniaObject({
|
||||
globals,
|
||||
environment,
|
||||
baseEnvironment,
|
||||
iterationData,
|
||||
vault,
|
||||
variables,
|
||||
request,
|
||||
settings: rawObj.settings,
|
||||
clientCertificates: rawObj.clientCertificates,
|
||||
cookies,
|
||||
requestInfo,
|
||||
response,
|
||||
execution,
|
||||
parentFolders,
|
||||
});
|
||||
};
|
||||
const reqOpt: RequestOptions = {
|
||||
name: rawObj.request.name,
|
||||
url: reqUrl,
|
||||
method: rawObj.request.method,
|
||||
header: rawObj.request.headers.map((header: RequestHeader) => ({
|
||||
key: header.name,
|
||||
value: header.value,
|
||||
disabled: header.disabled,
|
||||
})),
|
||||
body: toScriptRequestBody(rawObj.request.body),
|
||||
auth: toPreRequestAuth(rawObj.request.authentication),
|
||||
proxy,
|
||||
certificate: defaultCertificate,
|
||||
pathParameters: rawObj.request.pathParameters,
|
||||
};
|
||||
const request = new ScriptRequest(reqOpt);
|
||||
const execution = new Execution({
|
||||
location: rawObj.execution.location,
|
||||
skipRequest: rawObj.execution.skipRequest,
|
||||
nextRequestIdOrName: rawObj.execution.nextRequestIdOrName,
|
||||
});
|
||||
|
||||
const responseBody = await readBodyFromPath(rawObj.response);
|
||||
const response = rawObj.response ? toScriptResponse(request, rawObj.response, responseBody) : undefined;
|
||||
|
||||
return new InsomniaObject({
|
||||
globals,
|
||||
environment,
|
||||
baseEnvironment,
|
||||
iterationData,
|
||||
vault,
|
||||
variables,
|
||||
request,
|
||||
settings: rawObj.settings,
|
||||
clientCertificates: rawObj.clientCertificates,
|
||||
cookies,
|
||||
requestInfo,
|
||||
response,
|
||||
execution,
|
||||
parentFolders,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -9,29 +9,29 @@ import type { RequestInfoOption } from './request-info';
|
||||
import type { RequestTestResult } from './test';
|
||||
|
||||
export interface IEnvironment {
|
||||
id: string;
|
||||
name: string;
|
||||
data: object;
|
||||
id: string;
|
||||
name: string;
|
||||
data: object;
|
||||
}
|
||||
export interface RequestContext {
|
||||
request: Request;
|
||||
timelinePath: string;
|
||||
environment: IEnvironment;
|
||||
baseEnvironment: IEnvironment;
|
||||
vault?: IEnvironment;
|
||||
collectionVariables?: object;
|
||||
globals?: object;
|
||||
iterationData?: Omit<IEnvironment, 'id'>;
|
||||
timeout: number;
|
||||
settings: Settings;
|
||||
clientCertificates: ClientCertificate[];
|
||||
cookieJar: InsomniaCookieJar;
|
||||
// only for the after-response script
|
||||
response?: sendCurlAndWriteTimelineResponse | sendCurlAndWriteTimelineError;
|
||||
requestTestResults?: RequestTestResult[];
|
||||
requestInfo: RequestInfoOption;
|
||||
execution: ExecutionOption;
|
||||
logs: string[];
|
||||
transientVariables?: Omit<IEnvironment, 'id'>;
|
||||
parentFolders: { id: string; name: string; environment: Record<string, any> }[];
|
||||
request: Request;
|
||||
timelinePath: string;
|
||||
environment: IEnvironment;
|
||||
baseEnvironment: IEnvironment;
|
||||
vault?: IEnvironment;
|
||||
collectionVariables?: object;
|
||||
globals?: object;
|
||||
iterationData?: Omit<IEnvironment, 'id'>;
|
||||
timeout: number;
|
||||
settings: Settings;
|
||||
clientCertificates: ClientCertificate[];
|
||||
cookieJar: InsomniaCookieJar;
|
||||
// only for the after-response script
|
||||
response?: sendCurlAndWriteTimelineResponse | sendCurlAndWriteTimelineError;
|
||||
requestTestResults?: RequestTestResult[];
|
||||
requestInfo: RequestInfoOption;
|
||||
execution: ExecutionOption;
|
||||
logs: string[];
|
||||
transientVariables?: Omit<IEnvironment, 'id'>;
|
||||
parentFolders: { id: string; name: string; environment: Record<string, any> }[];
|
||||
}
|
||||
|
||||
@@ -2,69 +2,64 @@ import { fakerFunctions } from 'insomnia/src/ui/components/templating/faker-func
|
||||
import { configure, type ConfigureOptions, type Environment as NunjuncksEnv } from 'nunjucks';
|
||||
|
||||
class Interpolator {
|
||||
private engine: NunjuncksEnv;
|
||||
private engine: NunjuncksEnv;
|
||||
|
||||
constructor(config: ConfigureOptions) {
|
||||
this.engine = configure(config);
|
||||
constructor(config: ConfigureOptions) {
|
||||
this.engine = configure(config);
|
||||
}
|
||||
|
||||
render = (template: string, context: object) => {
|
||||
// TODO: handle timeout
|
||||
// TODO: support plugin?
|
||||
return this.engine.renderString(this.renderWithFaker(template), context);
|
||||
};
|
||||
|
||||
renderWithFaker = (template: string) => {
|
||||
const segments = template.split('}}');
|
||||
if (segments.length === 1) {
|
||||
return template;
|
||||
}
|
||||
|
||||
render = (template: string, context: object) => {
|
||||
// TODO: handle timeout
|
||||
// TODO: support plugin?
|
||||
return this.engine.renderString(
|
||||
this.renderWithFaker(template),
|
||||
context
|
||||
);
|
||||
};
|
||||
const translatedSegments = segments.map(segment => {
|
||||
const tagStart = segment.lastIndexOf('{{');
|
||||
if (tagStart < 0) {
|
||||
return segment;
|
||||
}
|
||||
|
||||
renderWithFaker = (template: string) => {
|
||||
const segments = template.split('}}');
|
||||
if (segments.length === 1) {
|
||||
return template;
|
||||
}
|
||||
const tagName = segment.slice(tagStart + 2).trim();
|
||||
if (!tagName.startsWith('$')) {
|
||||
// it is a tag probably for interpolating, at least not for generating
|
||||
return segment + '}}';
|
||||
}
|
||||
const funcName = tagName.slice(1) as keyof typeof fakerFunctions; // remove prefix '$'
|
||||
|
||||
const translatedSegments = segments.map(segment => {
|
||||
const tagStart = segment.lastIndexOf('{{');
|
||||
if ((tagStart) < 0) {
|
||||
return segment;
|
||||
}
|
||||
if (!fakerFunctions[funcName]) {
|
||||
throw Error(`replaceIn: no faker function is found: ${funcName}`);
|
||||
}
|
||||
|
||||
const tagName = segment
|
||||
.slice(tagStart + 2)
|
||||
.trim();
|
||||
if (!tagName.startsWith('$')) {
|
||||
// it is a tag probably for interpolating, at least not for generating
|
||||
return segment + '}}';
|
||||
}
|
||||
const funcName = tagName.slice(1) as keyof typeof fakerFunctions; // remove prefix '$'
|
||||
const generated = fakerFunctions[funcName]();
|
||||
return segment.slice(0, tagStart) + generated;
|
||||
});
|
||||
|
||||
if (!fakerFunctions[funcName]) {
|
||||
throw Error(`replaceIn: no faker function is found: ${funcName}`);
|
||||
};
|
||||
|
||||
const generated = fakerFunctions[funcName]();
|
||||
return segment.slice(0, tagStart) + generated;
|
||||
});
|
||||
|
||||
return translatedSegments.join('');
|
||||
};
|
||||
return translatedSegments.join('');
|
||||
};
|
||||
}
|
||||
|
||||
const interpolator = new Interpolator({
|
||||
autoescape: false,
|
||||
// Don't escape HTML
|
||||
throwOnUndefined: true,
|
||||
// Strict mode
|
||||
tags: {
|
||||
blockStart: '{%',
|
||||
blockEnd: '%}',
|
||||
variableStart: '{{',
|
||||
variableEnd: '}}',
|
||||
commentStart: '{#',
|
||||
commentEnd: '#}',
|
||||
},
|
||||
autoescape: false,
|
||||
// Don't escape HTML
|
||||
throwOnUndefined: true,
|
||||
// Strict mode
|
||||
tags: {
|
||||
blockStart: '{%',
|
||||
blockEnd: '%}',
|
||||
variableStart: '{{',
|
||||
variableEnd: '}}',
|
||||
commentStart: '{#',
|
||||
commentEnd: '#}',
|
||||
},
|
||||
});
|
||||
|
||||
export function getInterpolator() {
|
||||
return interpolator;
|
||||
return interpolator;
|
||||
}
|
||||
|
||||
@@ -5,429 +5,411 @@ import _ from 'lodash';
|
||||
import { getInterpolator } from './interpolator';
|
||||
|
||||
export const unsupportedError = (featureName: string, alternative?: string) => {
|
||||
const message = `${featureName} is not supported yet` +
|
||||
(alternative ? `, please use ${alternative} instead temporarily.` : '');
|
||||
return Error(message);
|
||||
const message =
|
||||
`${featureName} is not supported yet` + (alternative ? `, please use ${alternative} instead temporarily.` : '');
|
||||
return Error(message);
|
||||
};
|
||||
|
||||
export class PropertyBase {
|
||||
public _kind = 'PropertyBase';
|
||||
protected _parent: PropertyBase | undefined = undefined;
|
||||
protected description?: string;
|
||||
public _kind = 'PropertyBase';
|
||||
protected _parent: PropertyBase | undefined = undefined;
|
||||
protected description?: string;
|
||||
|
||||
constructor(description?: string) {
|
||||
this.description = description;
|
||||
constructor(description?: string) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
static propertyIsMeta(_value: any, key: string) {
|
||||
// no meta is defined in Insomnia and it basically find properties start with '_'
|
||||
// '_' is also rejected here
|
||||
return key && key.startsWith('_');
|
||||
}
|
||||
|
||||
static propertyUnprefixMeta(_value: any, key: string) {
|
||||
return _.trimStart(key, '_');
|
||||
}
|
||||
|
||||
// TODO: temporarily disable this
|
||||
// static toJSON(obj: { toJSON: () => string }) {
|
||||
// return obj.toJSON();
|
||||
// }
|
||||
|
||||
meta() {
|
||||
return {};
|
||||
}
|
||||
|
||||
parent() {
|
||||
return this._parent;
|
||||
}
|
||||
|
||||
forEachParent(_options: { withRoot?: boolean }, iterator: (obj: PropertyBase) => boolean) {
|
||||
const currentParent = this.parent();
|
||||
if (!currentParent) {
|
||||
return;
|
||||
}
|
||||
|
||||
static propertyIsMeta(_value: any, key: string) {
|
||||
// no meta is defined in Insomnia and it basically find properties start with '_'
|
||||
// '_' is also rejected here
|
||||
return key && key.startsWith('_');
|
||||
const queue: PropertyBase[] = [currentParent];
|
||||
const parents: PropertyBase[] = [];
|
||||
|
||||
while (queue.length > 0) {
|
||||
const ancester = queue.shift();
|
||||
if (!ancester) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// TODO: check options
|
||||
const cloned = clone(ancester);
|
||||
const keepIterating = iterator(cloned);
|
||||
parents.push(cloned);
|
||||
if (!keepIterating) {
|
||||
break;
|
||||
}
|
||||
|
||||
const olderAncester = ancester.parent();
|
||||
if (olderAncester) {
|
||||
queue.push(olderAncester);
|
||||
}
|
||||
}
|
||||
|
||||
static propertyUnprefixMeta(_value: any, key: string) {
|
||||
return _.trimStart(key, '_');
|
||||
return parents;
|
||||
}
|
||||
|
||||
findInParents(property: string, customizer?: (ancester: PropertyBase) => boolean): PropertyBase | undefined {
|
||||
const currentParent = this.parent();
|
||||
if (!currentParent) {
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: temporarily disable this
|
||||
// static toJSON(obj: { toJSON: () => string }) {
|
||||
// return obj.toJSON();
|
||||
// }
|
||||
const queue: PropertyBase[] = [currentParent];
|
||||
|
||||
meta() {
|
||||
return {};
|
||||
};
|
||||
while (queue.length > 0) {
|
||||
const ancester = queue.shift();
|
||||
if (!ancester) {
|
||||
continue;
|
||||
}
|
||||
|
||||
parent() {
|
||||
return this._parent;
|
||||
}
|
||||
|
||||
forEachParent(
|
||||
_options: { withRoot?: boolean },
|
||||
iterator: (obj: PropertyBase) => boolean,
|
||||
) {
|
||||
const currentParent = this.parent();
|
||||
if (!currentParent) {
|
||||
return;
|
||||
const cloned = clone(ancester);
|
||||
const hasProperty = Object.keys(cloned.meta()).includes(property);
|
||||
if (!hasProperty) {
|
||||
// keep traversing until parent has the property
|
||||
// no op
|
||||
} else {
|
||||
if (customizer) {
|
||||
if (customizer(cloned)) {
|
||||
// continue until customizer returns a truthy value
|
||||
return cloned;
|
||||
}
|
||||
} else {
|
||||
// customizer is not specified
|
||||
// stop at the first parent that contains the property
|
||||
return cloned;
|
||||
}
|
||||
}
|
||||
|
||||
const queue: PropertyBase[] = [currentParent];
|
||||
const parents: PropertyBase[] = [];
|
||||
|
||||
while (queue.length > 0) {
|
||||
const ancester = queue.shift();
|
||||
if (!ancester) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// TODO: check options
|
||||
const cloned = clone(ancester);
|
||||
const keepIterating = iterator(cloned);
|
||||
parents.push(cloned);
|
||||
if (!keepIterating) {
|
||||
break;
|
||||
}
|
||||
|
||||
const olderAncester = ancester.parent();
|
||||
if (olderAncester) {
|
||||
queue.push(olderAncester);
|
||||
}
|
||||
}
|
||||
|
||||
return parents;
|
||||
const olderAncester = ancester.parent();
|
||||
if (olderAncester) {
|
||||
queue.push(olderAncester);
|
||||
}
|
||||
}
|
||||
|
||||
findInParents(
|
||||
property: string,
|
||||
customizer?: (ancester: PropertyBase) => boolean,
|
||||
): PropertyBase | undefined {
|
||||
const currentParent = this.parent();
|
||||
if (!currentParent) {
|
||||
return;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const queue: PropertyBase[] = [currentParent];
|
||||
toJSON() {
|
||||
const entriesToExport = Object.entries(this).filter(
|
||||
(kv: [string, any]) => typeof kv[1] !== 'function' && typeof kv[1] !== 'undefined' && kv[0] !== '_kind',
|
||||
);
|
||||
|
||||
while (queue.length > 0) {
|
||||
const ancester = queue.shift();
|
||||
if (!ancester) {
|
||||
continue;
|
||||
}
|
||||
return Object.fromEntries(entriesToExport);
|
||||
}
|
||||
|
||||
const cloned = clone(ancester);
|
||||
const hasProperty = Object.keys(cloned.meta()).includes(property);
|
||||
if (!hasProperty) {
|
||||
// keep traversing until parent has the property
|
||||
// no op
|
||||
} else {
|
||||
if (customizer) {
|
||||
if (customizer(cloned)) {
|
||||
// continue until customizer returns a truthy value
|
||||
return cloned;
|
||||
}
|
||||
} else {
|
||||
// customizer is not specified
|
||||
// stop at the first parent that contains the property
|
||||
return cloned;
|
||||
}
|
||||
}
|
||||
toObject() {
|
||||
return this.toJSON();
|
||||
}
|
||||
|
||||
const olderAncester = ancester.parent();
|
||||
if (olderAncester) {
|
||||
queue.push(olderAncester);
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
const entriesToExport = Object
|
||||
.entries(this)
|
||||
.filter((kv: [string, any]) =>
|
||||
typeof kv[1] !== 'function'
|
||||
&& typeof kv[1] !== 'undefined'
|
||||
&& kv[0] !== '_kind'
|
||||
);
|
||||
|
||||
return Object.fromEntries(entriesToExport);
|
||||
}
|
||||
|
||||
toObject() {
|
||||
return this.toJSON();
|
||||
}
|
||||
|
||||
toString() {
|
||||
return JSON.stringify(this.toJSON());
|
||||
}
|
||||
toString() {
|
||||
return JSON.stringify(this.toJSON());
|
||||
}
|
||||
}
|
||||
|
||||
export class Property extends PropertyBase {
|
||||
id: string;
|
||||
name?: string;
|
||||
disabled?: boolean;
|
||||
// TODO: parent property will be introduced when collection manipulation is supported
|
||||
id: string;
|
||||
name?: string;
|
||||
disabled?: boolean;
|
||||
// TODO: parent property will be introduced when collection manipulation is supported
|
||||
|
||||
constructor(
|
||||
id?: string,
|
||||
name?: string,
|
||||
disabled?: boolean,
|
||||
info?: { id?: string; name?: string },
|
||||
) {
|
||||
super();
|
||||
this._kind = 'Property';
|
||||
this.id = info?.id || id || '';
|
||||
this.name = info?.name || name || '';
|
||||
this.disabled = disabled || false;
|
||||
constructor(id?: string, name?: string, disabled?: boolean, info?: { id?: string; name?: string }) {
|
||||
super();
|
||||
this._kind = 'Property';
|
||||
this.id = info?.id || id || '';
|
||||
this.name = info?.name || name || '';
|
||||
this.disabled = disabled || false;
|
||||
}
|
||||
|
||||
static _index = 'id';
|
||||
|
||||
static replaceSubstitutions(content: string, ...variables: object[]): string {
|
||||
if (!Array.isArray(variables) || typeof content !== 'string') {
|
||||
throw Error("replaceSubstitutions: the first param's type is not string or other parameters are not an array");
|
||||
}
|
||||
|
||||
static _index = 'id';
|
||||
let context: object = {};
|
||||
// the searching priority of rendering is from left to right
|
||||
variables.reverse().forEach(variable => (context = { ...context, ...variable }));
|
||||
|
||||
static replaceSubstitutions(content: string, ...variables: object[]): string {
|
||||
if (!Array.isArray(variables) || typeof content !== 'string') {
|
||||
throw Error("replaceSubstitutions: the first param's type is not string or other parameters are not an array");
|
||||
}
|
||||
return getInterpolator().render(content, context);
|
||||
}
|
||||
|
||||
let context: object = {};
|
||||
// the searching priority of rendering is from left to right
|
||||
variables.reverse().forEach(variable => context = { ...context, ...variable });
|
||||
|
||||
return getInterpolator().render(content, context);
|
||||
static replaceSubstitutionsIn(obj: object, ...variables: object[]): object {
|
||||
if (!Array.isArray(variables) || typeof obj !== 'object') {
|
||||
throw Error("replaceSubstitutions: the first param's type is not object or other parameters are not an array");
|
||||
}
|
||||
|
||||
static replaceSubstitutionsIn(obj: object, ...variables: object[]): object {
|
||||
if (!Array.isArray(variables) || typeof obj !== 'object') {
|
||||
throw Error("replaceSubstitutions: the first param's type is not object or other parameters are not an array");
|
||||
}
|
||||
try {
|
||||
const content = JSON.stringify(obj);
|
||||
|
||||
try {
|
||||
const content = JSON.stringify(obj);
|
||||
let context: object = {};
|
||||
// the searching priority of rendering is from left to right
|
||||
variables.reverse().forEach(variable => {
|
||||
context = { ...context, ...variable };
|
||||
});
|
||||
|
||||
let context: object = {};
|
||||
// the searching priority of rendering is from left to right
|
||||
variables.reverse().forEach(variable => {
|
||||
context = { ...context, ...variable };
|
||||
});
|
||||
|
||||
const rendered = getInterpolator().render(content, context);
|
||||
return JSON.parse(rendered);
|
||||
|
||||
} catch (e: any) {
|
||||
throw Error(`replaceSubstitutionsIn: ${e.toString()}`);
|
||||
}
|
||||
const rendered = getInterpolator().render(content, context);
|
||||
return JSON.parse(rendered);
|
||||
} catch (e: any) {
|
||||
throw Error(`replaceSubstitutionsIn: ${e.toString()}`);
|
||||
}
|
||||
}
|
||||
|
||||
describe(content: string, typeName: string) {
|
||||
this._kind = typeName;
|
||||
this.description = content;
|
||||
}
|
||||
describe(content: string, typeName: string) {
|
||||
this._kind = typeName;
|
||||
this.description = content;
|
||||
}
|
||||
}
|
||||
|
||||
export class PropertyList<T extends Property> {
|
||||
protected _kind = 'PropertyList';
|
||||
protected list: T[] = [];
|
||||
protected _kind = 'PropertyList';
|
||||
protected list: T[] = [];
|
||||
|
||||
constructor(
|
||||
protected typeClass: { _index?: string },
|
||||
protected parent: Property | PropertyList<any> | undefined,
|
||||
populate: T[],
|
||||
) {
|
||||
this.parent = parent;
|
||||
this.list = populate;
|
||||
constructor(
|
||||
protected typeClass: { _index?: string },
|
||||
protected parent: Property | PropertyList<any> | undefined,
|
||||
populate: T[],
|
||||
) {
|
||||
this.parent = parent;
|
||||
this.list = populate;
|
||||
}
|
||||
|
||||
static isPropertyList(obj: object) {
|
||||
return '_kind' in obj && obj._kind === 'PropertyList';
|
||||
}
|
||||
|
||||
add(item: T) {
|
||||
this.list.push(item);
|
||||
}
|
||||
|
||||
all() {
|
||||
return this.list.map(pp => pp.toJSON());
|
||||
}
|
||||
|
||||
append(item: T) {
|
||||
this.add(item);
|
||||
}
|
||||
|
||||
assimilate(source: T[] | PropertyList<T>, prune?: boolean) {
|
||||
// it doesn't update values from a source list
|
||||
if (prune) {
|
||||
this.clear();
|
||||
}
|
||||
if ('list' in source) {
|
||||
// it is PropertyList<T>
|
||||
this.list.push(...source.list);
|
||||
} else {
|
||||
this.list.push(...source);
|
||||
}
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.list = [];
|
||||
}
|
||||
|
||||
count() {
|
||||
return this.list.length;
|
||||
}
|
||||
|
||||
each(iterator: (item: T) => void, context: object) {
|
||||
interface Iterator {
|
||||
context?: object;
|
||||
(item: T): void;
|
||||
}
|
||||
const it: Iterator = iterator;
|
||||
it.context = context;
|
||||
|
||||
this.list.forEach(it);
|
||||
}
|
||||
|
||||
// TODO: unsupported yet as properties are not organized as hierarchy
|
||||
|
||||
eachParent(_iterator: (parent: Property, prev: Property) => void, _context?: object) {
|
||||
throw unsupportedError('eachParent');
|
||||
}
|
||||
|
||||
filter(rule: (item: T) => boolean, context: object) {
|
||||
interface Iterator {
|
||||
context?: object;
|
||||
(item: T): boolean;
|
||||
}
|
||||
const it: Iterator = rule;
|
||||
it.context = context;
|
||||
|
||||
return this.list.filter(it);
|
||||
}
|
||||
|
||||
// TODO: support returning {Item|ItemGroup}
|
||||
find(rule: (item: T) => boolean, context?: object) {
|
||||
interface Finder {
|
||||
context?: object;
|
||||
(item: T): boolean;
|
||||
}
|
||||
const finder: Finder = rule;
|
||||
finder.context = context;
|
||||
|
||||
return this.list.find(finder);
|
||||
}
|
||||
|
||||
// it does not return underlying type of the item because they are not supported
|
||||
get(key: string) {
|
||||
return this.one(key);
|
||||
}
|
||||
|
||||
// TODO: value is not used as its usage is unknown
|
||||
|
||||
has(item: T, _value?: any) {
|
||||
return this.indexOf(item) >= 0;
|
||||
}
|
||||
|
||||
idx(index: number) {
|
||||
if (index <= this.list.length - 1) {
|
||||
return this.list[index];
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
indexOf(item: string | T) {
|
||||
const indexFieldName = this.typeClass._index || 'id';
|
||||
|
||||
for (let i = 0; i < this.list.length; i++) {
|
||||
const record = this.list[i] as Record<string, any>;
|
||||
|
||||
if (typeof item === 'string' && record[indexFieldName] === item) {
|
||||
return i;
|
||||
}
|
||||
const itemRecord = item as Record<string, any>;
|
||||
if (record[indexFieldName] === itemRecord[indexFieldName]) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
insert(item: T, before?: number) {
|
||||
if (before != null && before >= 0 && before <= this.list.length - 1) {
|
||||
this.list = [...this.list.slice(0, before), item, ...this.list.slice(before)];
|
||||
} else {
|
||||
this.append(item);
|
||||
}
|
||||
}
|
||||
|
||||
insertAfter(item: T, after?: number) {
|
||||
if (after != null && after >= 0 && after <= this.list.length - 1) {
|
||||
this.list = [...this.list.slice(0, after + 1), item, ...this.list.slice(after + 1)];
|
||||
} else {
|
||||
this.append(item);
|
||||
}
|
||||
}
|
||||
|
||||
map(iterator: (item: T) => any, context: object) {
|
||||
interface Iterator {
|
||||
context?: object;
|
||||
(item: T): any;
|
||||
}
|
||||
const it: Iterator = iterator;
|
||||
it.context = context;
|
||||
|
||||
return this.list.map(it);
|
||||
}
|
||||
|
||||
one(id: string) {
|
||||
const indexFieldName = this.typeClass._index || 'id';
|
||||
|
||||
for (let i = this.list.length - 1; i >= 0; i--) {
|
||||
const record = this.list[i] as Record<string, any>;
|
||||
if (record[indexFieldName] === id) {
|
||||
return this.list[i];
|
||||
}
|
||||
}
|
||||
|
||||
static isPropertyList(obj: object) {
|
||||
return '_kind' in obj && obj._kind === 'PropertyList';
|
||||
return undefined;
|
||||
}
|
||||
|
||||
populate(items: T[]) {
|
||||
this.list = [...this.list, ...items];
|
||||
}
|
||||
|
||||
prepend(item: T) {
|
||||
this.list = [item, ...this.list];
|
||||
}
|
||||
|
||||
reduce(iterator: (acc: any, item: T) => any, accumulator: any, context: object) {
|
||||
interface Iterator {
|
||||
context?: object;
|
||||
(acc: any, item: T): any;
|
||||
}
|
||||
const it: Iterator = iterator;
|
||||
it.context = context;
|
||||
|
||||
return this.list.reduce(it, accumulator);
|
||||
}
|
||||
|
||||
remove(predicate: T | ((item: T) => boolean), context: object) {
|
||||
if (typeof predicate === 'function') {
|
||||
const reversePredicate = (item: T) => !predicate(item);
|
||||
this.list = this.filter(reversePredicate, context);
|
||||
} else {
|
||||
this.list = this.filter(item => !equal(predicate, item), context);
|
||||
}
|
||||
}
|
||||
|
||||
repopulate(items: T[]) {
|
||||
this.clear();
|
||||
this.populate(items);
|
||||
}
|
||||
|
||||
// TODO: unsupported yet
|
||||
|
||||
toObject(_excludeDisabled?: boolean, _caseSensitive?: boolean, _multiValue?: boolean, _sanitizeKeys?: boolean) {
|
||||
// it just dump all properties of each element without arguments
|
||||
// then user is able to handle them by themself
|
||||
return this.list.map(elem => elem.toJSON());
|
||||
}
|
||||
|
||||
toString() {
|
||||
const itemStrs = this.list.map(item => item.toString());
|
||||
return `[${itemStrs.join('; ')}]`;
|
||||
}
|
||||
|
||||
upsert(item: T): boolean {
|
||||
if (item == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
add(item: T) {
|
||||
this.list.push(item);
|
||||
const itemIdx = this.indexOf(item);
|
||||
if (itemIdx >= 0) {
|
||||
this.list = [...this.list.splice(0, itemIdx), item, ...this.list.splice(itemIdx + 1)];
|
||||
return false;
|
||||
}
|
||||
|
||||
all() {
|
||||
return this.list.map(pp => pp.toJSON());
|
||||
}
|
||||
|
||||
append(item: T) {
|
||||
this.add(item);
|
||||
}
|
||||
|
||||
assimilate(source: T[] | PropertyList<T>, prune?: boolean) {
|
||||
// it doesn't update values from a source list
|
||||
if (prune) {
|
||||
this.clear();
|
||||
}
|
||||
if ('list' in source) { // it is PropertyList<T>
|
||||
this.list.push(...source.list);
|
||||
} else {
|
||||
this.list.push(...source);
|
||||
}
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.list = [];
|
||||
}
|
||||
|
||||
count() {
|
||||
return this.list.length;
|
||||
}
|
||||
|
||||
each(iterator: (item: T) => void, context: object) {
|
||||
interface Iterator {
|
||||
context?: object;
|
||||
(item: T): void;
|
||||
}
|
||||
const it: Iterator = iterator;
|
||||
it.context = context;
|
||||
|
||||
this.list.forEach(it);
|
||||
}
|
||||
|
||||
// TODO: unsupported yet as properties are not organized as hierarchy
|
||||
|
||||
eachParent(_iterator: (parent: Property, prev: Property) => void, _context?: object) {
|
||||
throw unsupportedError('eachParent');
|
||||
}
|
||||
|
||||
filter(rule: (item: T) => boolean, context: object) {
|
||||
interface Iterator {
|
||||
context?: object;
|
||||
(item: T): boolean;
|
||||
}
|
||||
const it: Iterator = rule;
|
||||
it.context = context;
|
||||
|
||||
return this.list.filter(it);
|
||||
}
|
||||
|
||||
// TODO: support returning {Item|ItemGroup}
|
||||
find(rule: (item: T) => boolean, context?: object) {
|
||||
interface Finder {
|
||||
context?: object;
|
||||
(item: T): boolean;
|
||||
}
|
||||
const finder: Finder = rule;
|
||||
finder.context = context;
|
||||
|
||||
return this.list.find(finder);
|
||||
}
|
||||
|
||||
// it does not return underlying type of the item because they are not supported
|
||||
get(key: string) {
|
||||
return this.one(key);
|
||||
}
|
||||
|
||||
// TODO: value is not used as its usage is unknown
|
||||
|
||||
has(item: T, _value?: any) {
|
||||
return this.indexOf(item) >= 0;
|
||||
}
|
||||
|
||||
idx(index: number) {
|
||||
if (index <= this.list.length - 1) {
|
||||
return this.list[index];
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
indexOf(item: string | T) {
|
||||
const indexFieldName = this.typeClass._index || 'id';
|
||||
|
||||
for (let i = 0; i < this.list.length; i++) {
|
||||
const record = this.list[i] as Record<string, any>;
|
||||
|
||||
if (typeof item === 'string' && record[indexFieldName] === item) {
|
||||
return i;
|
||||
}
|
||||
const itemRecord = item as Record<string, any>;
|
||||
if (record[indexFieldName] === itemRecord[indexFieldName]) {
|
||||
return i;
|
||||
}
|
||||
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
insert(item: T, before?: number) {
|
||||
if (before != null && before >= 0 && before <= this.list.length - 1) {
|
||||
this.list = [...this.list.slice(0, before), item, ...this.list.slice(before)];
|
||||
} else {
|
||||
this.append(item);
|
||||
}
|
||||
}
|
||||
|
||||
insertAfter(item: T, after?: number) {
|
||||
if (after != null && after >= 0 && after <= this.list.length - 1) {
|
||||
this.list = [...this.list.slice(0, after + 1), item, ...this.list.slice(after + 1)];
|
||||
} else {
|
||||
this.append(item);
|
||||
}
|
||||
}
|
||||
|
||||
map(iterator: (item: T) => any, context: object) {
|
||||
interface Iterator {
|
||||
context?: object;
|
||||
(item: T): any;
|
||||
}
|
||||
const it: Iterator = iterator;
|
||||
it.context = context;
|
||||
|
||||
return this.list.map(it);
|
||||
}
|
||||
|
||||
one(id: string) {
|
||||
const indexFieldName = this.typeClass._index || 'id';
|
||||
|
||||
for (let i = this.list.length - 1; i >= 0; i--) {
|
||||
|
||||
const record = this.list[i] as Record<string, any>;
|
||||
if (record[indexFieldName] === id) {
|
||||
return this.list[i];
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
populate(items: T[]) {
|
||||
this.list = [...this.list, ...items];
|
||||
}
|
||||
|
||||
prepend(item: T) {
|
||||
this.list = [item, ...this.list];
|
||||
}
|
||||
|
||||
reduce(iterator: ((acc: any, item: T) => any), accumulator: any, context: object) {
|
||||
interface Iterator {
|
||||
context?: object;
|
||||
(acc: any, item: T): any;
|
||||
}
|
||||
const it: Iterator = iterator;
|
||||
it.context = context;
|
||||
|
||||
return this.list.reduce(it, accumulator);
|
||||
}
|
||||
|
||||
remove(predicate: T | ((item: T) => boolean), context: object) {
|
||||
if (typeof predicate === 'function') {
|
||||
const reversePredicate = (item: T) => !predicate(item);
|
||||
this.list = this.filter(reversePredicate, context);
|
||||
} else {
|
||||
this.list = this.filter(item => !equal(predicate, item), context);
|
||||
}
|
||||
}
|
||||
|
||||
repopulate(items: T[]) {
|
||||
this.clear();
|
||||
this.populate(items);
|
||||
}
|
||||
|
||||
// TODO: unsupported yet
|
||||
|
||||
toObject(_excludeDisabled?: boolean, _caseSensitive?: boolean, _multiValue?: boolean, _sanitizeKeys?: boolean) {
|
||||
// it just dump all properties of each element without arguments
|
||||
// then user is able to handle them by themself
|
||||
return this.list.map(elem => elem.toJSON());
|
||||
}
|
||||
|
||||
toString() {
|
||||
const itemStrs = this.list.map(item => item.toString());
|
||||
return `[${itemStrs.join('; ')}]`;
|
||||
}
|
||||
|
||||
upsert(item: T): boolean {
|
||||
if (item == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const itemIdx = this.indexOf(item);
|
||||
if (itemIdx >= 0) {
|
||||
this.list = [...this.list.splice(0, itemIdx), item, ...this.list.splice(itemIdx + 1)];
|
||||
return false;
|
||||
}
|
||||
|
||||
this.add(item);
|
||||
return true;
|
||||
}
|
||||
this.add(item);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,51 @@
|
||||
import { getExistingConsole } from './console';
|
||||
import { Property, PropertyList } from './properties';
|
||||
import type { Url} from './urls';
|
||||
import type { Url } from './urls';
|
||||
import { UrlMatchPattern, UrlMatchPatternList } from './urls';
|
||||
|
||||
export interface ProxyConfigOptions {
|
||||
match: string;
|
||||
host: string;
|
||||
port?: number;
|
||||
tunnel: boolean;
|
||||
disabled?: boolean;
|
||||
authenticate: boolean;
|
||||
username: string;
|
||||
password: string;
|
||||
// follows are for compatibility with Insomnia
|
||||
bypass?: string[];
|
||||
protocol: string;
|
||||
}
|
||||
|
||||
export class ProxyConfig extends Property {
|
||||
override _kind = 'ProxyConfig';
|
||||
type: string;
|
||||
|
||||
host: string;
|
||||
match: string;
|
||||
port?: number;
|
||||
tunnel: boolean;
|
||||
authenticate: boolean;
|
||||
username: string;
|
||||
password: string;
|
||||
bypass: string[]; // it is for compatibility with Insomnia's bypass list
|
||||
protocol: string;
|
||||
|
||||
static authenticate = false;
|
||||
static bypass: UrlMatchPatternList<UrlMatchPattern> = new UrlMatchPatternList<UrlMatchPattern>(undefined, []);
|
||||
static host = '';
|
||||
static match = '';
|
||||
static password = '';
|
||||
static port?: number = undefined;
|
||||
static tunnel = false; // unsupported
|
||||
static username = '';
|
||||
static protocol = 'https:';
|
||||
|
||||
constructor(def: {
|
||||
id?: string;
|
||||
name?: string;
|
||||
type?: string;
|
||||
|
||||
match: string;
|
||||
host: string;
|
||||
port?: number;
|
||||
@@ -12,15 +54,63 @@ export interface ProxyConfigOptions {
|
||||
authenticate: boolean;
|
||||
username: string;
|
||||
password: string;
|
||||
// follows are for compatibility with Insomnia
|
||||
bypass?: string[];
|
||||
protocol: string;
|
||||
}
|
||||
}) {
|
||||
super();
|
||||
|
||||
export class ProxyConfig extends Property {
|
||||
override _kind = 'ProxyConfig';
|
||||
type: string;
|
||||
this.id = def.id ? def.id : '';
|
||||
this.name = def.name ? def.name : '';
|
||||
this.type = def.type ? def.type : '';
|
||||
this.disabled = def.disabled ? def.disabled : false;
|
||||
|
||||
this.host = def.host;
|
||||
this.match = def.match;
|
||||
this.port = def.port;
|
||||
this.tunnel = def.tunnel;
|
||||
this.authenticate = def.authenticate;
|
||||
this.username = def.username;
|
||||
this.password = def.password;
|
||||
this.bypass = def.bypass || [];
|
||||
this.protocol = def.protocol;
|
||||
}
|
||||
|
||||
static override _index = 'key';
|
||||
|
||||
static isProxyConfig(obj: object) {
|
||||
return '_kind' in obj && obj._kind === 'ProxyConfig';
|
||||
}
|
||||
|
||||
getProtocols(): string[] {
|
||||
// match field example: 'http+https://example.com/*'
|
||||
const urlMatch = new UrlMatchPattern(this.match);
|
||||
return urlMatch.getProtocols();
|
||||
}
|
||||
|
||||
getProxyUrl(): string {
|
||||
// http://proxy_username:proxy_password@proxy.com:8080
|
||||
const portSegment = this.port === undefined ? '' : `:${this.port}`;
|
||||
|
||||
if (this.authenticate) {
|
||||
return `${this.protocol}//${this.username}:${this.password}@${this.host}${portSegment}`;
|
||||
}
|
||||
return `${this.protocol}//${this.host}${portSegment}`;
|
||||
}
|
||||
|
||||
test(url?: string) {
|
||||
if (!url) {
|
||||
// TODO: it is confusing in which case url arg is optional
|
||||
return false;
|
||||
}
|
||||
if (this.bypass.includes(url)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const urlMatch = new UrlMatchPattern(this.match);
|
||||
return urlMatch.test(url);
|
||||
}
|
||||
|
||||
update(options: {
|
||||
host: string;
|
||||
match: string;
|
||||
port?: number;
|
||||
@@ -28,112 +118,20 @@ export class ProxyConfig extends Property {
|
||||
authenticate: boolean;
|
||||
username: string;
|
||||
password: string;
|
||||
bypass: string[]; // it is for compatibility with Insomnia's bypass list
|
||||
protocol: string;
|
||||
}) {
|
||||
this.host = options.host;
|
||||
this.match = options.match;
|
||||
this.port = options.port;
|
||||
this.tunnel = options.tunnel;
|
||||
this.authenticate = options.authenticate;
|
||||
this.username = options.username;
|
||||
this.password = options.password;
|
||||
}
|
||||
|
||||
static authenticate = false;
|
||||
static bypass: UrlMatchPatternList<UrlMatchPattern> = new UrlMatchPatternList<UrlMatchPattern>(undefined, []);
|
||||
static host = '';
|
||||
static match = '';
|
||||
static password = '';
|
||||
static port?: number = undefined;
|
||||
static tunnel = false; // unsupported
|
||||
static username = '';
|
||||
static protocol = 'https:';
|
||||
|
||||
constructor(def: {
|
||||
id?: string;
|
||||
name?: string;
|
||||
type?: string;
|
||||
|
||||
match: string;
|
||||
host: string;
|
||||
port?: number;
|
||||
tunnel: boolean;
|
||||
disabled?: boolean;
|
||||
authenticate: boolean;
|
||||
username: string;
|
||||
password: string;
|
||||
bypass?: string[];
|
||||
protocol: string;
|
||||
}) {
|
||||
super();
|
||||
|
||||
this.id = def.id ? def.id : '';
|
||||
this.name = def.name ? def.name : '';
|
||||
this.type = def.type ? def.type : '';
|
||||
this.disabled = def.disabled ? def.disabled : false;
|
||||
|
||||
this.host = def.host;
|
||||
this.match = def.match;
|
||||
this.port = def.port;
|
||||
this.tunnel = def.tunnel;
|
||||
this.authenticate = def.authenticate;
|
||||
this.username = def.username;
|
||||
this.password = def.password;
|
||||
this.bypass = def.bypass || [];
|
||||
this.protocol = def.protocol;
|
||||
}
|
||||
|
||||
static override _index = 'key';
|
||||
|
||||
static isProxyConfig(obj: object) {
|
||||
return '_kind' in obj && obj._kind === 'ProxyConfig';
|
||||
}
|
||||
|
||||
getProtocols(): string[] {
|
||||
// match field example: 'http+https://example.com/*'
|
||||
const urlMatch = new UrlMatchPattern(this.match);
|
||||
return urlMatch.getProtocols();
|
||||
}
|
||||
|
||||
getProxyUrl(): string {
|
||||
// http://proxy_username:proxy_password@proxy.com:8080
|
||||
const portSegment = this.port === undefined ? '' : `:${this.port}`;
|
||||
|
||||
if (this.authenticate) {
|
||||
return `${this.protocol}//${this.username}:${this.password}@${this.host}${portSegment}`;
|
||||
}
|
||||
return `${this.protocol}//${this.host}${portSegment}`;
|
||||
|
||||
}
|
||||
|
||||
test(url?: string) {
|
||||
if (!url) {
|
||||
// TODO: it is confusing in which case url arg is optional
|
||||
return false;
|
||||
}
|
||||
if (this.bypass.includes(url)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const urlMatch = new UrlMatchPattern(this.match);
|
||||
return urlMatch.test(url);
|
||||
}
|
||||
|
||||
update(options: {
|
||||
host: string;
|
||||
match: string;
|
||||
port?: number;
|
||||
tunnel: boolean;
|
||||
authenticate: boolean;
|
||||
username: string;
|
||||
password: string;
|
||||
}) {
|
||||
this.host = options.host;
|
||||
this.match = options.match;
|
||||
this.port = options.port;
|
||||
this.tunnel = options.tunnel;
|
||||
this.authenticate = options.authenticate;
|
||||
this.username = options.username;
|
||||
this.password = options.password;
|
||||
}
|
||||
|
||||
|
||||
updateProtocols(_protocols: string[]) {
|
||||
// In Insomnia there is no whitelist while there is a blacklist
|
||||
throw Error('updateProtocols is not supported in Insomnia');
|
||||
}
|
||||
updateProtocols(_protocols: string[]) {
|
||||
// In Insomnia there is no whitelist while there is a blacklist
|
||||
throw Error('updateProtocols is not supported in Insomnia');
|
||||
}
|
||||
}
|
||||
|
||||
// example:
|
||||
@@ -143,89 +141,81 @@ export class ProxyConfig extends Property {
|
||||
// ]);
|
||||
|
||||
export class ProxyConfigList<T extends ProxyConfig> extends PropertyList<T> {
|
||||
constructor(parent: PropertyList<T> | undefined, populate: T[]) {
|
||||
super(
|
||||
ProxyConfig,
|
||||
undefined,
|
||||
populate
|
||||
);
|
||||
this.parent = parent;
|
||||
constructor(parent: PropertyList<T> | undefined, populate: T[]) {
|
||||
super(ProxyConfig, undefined, populate);
|
||||
this.parent = parent;
|
||||
}
|
||||
|
||||
static isProxyConfigList(obj: any) {
|
||||
return '_kind' in obj && obj._kind === 'ProxyConfigList';
|
||||
}
|
||||
|
||||
resolve(url?: Url) {
|
||||
if (!url) {
|
||||
return null;
|
||||
}
|
||||
|
||||
static isProxyConfigList(obj: any) {
|
||||
return '_kind' in obj && obj._kind === 'ProxyConfigList';
|
||||
}
|
||||
|
||||
resolve(url?: Url) {
|
||||
if (!url) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const urlStr = url.toString();
|
||||
const matches = this.list
|
||||
.filter((proxyConfig: ProxyConfig) => {
|
||||
return proxyConfig.test(urlStr);
|
||||
})
|
||||
.map(proxyConfig => proxyConfig.toJSON());
|
||||
|
||||
if (matches.length > 0) {
|
||||
return matches[0];
|
||||
}
|
||||
return null;
|
||||
const urlStr = url.toString();
|
||||
const matches = this.list
|
||||
.filter((proxyConfig: ProxyConfig) => {
|
||||
return proxyConfig.test(urlStr);
|
||||
})
|
||||
.map(proxyConfig => proxyConfig.toJSON());
|
||||
|
||||
if (matches.length > 0) {
|
||||
return matches[0];
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function transformToSdkProxyOptions(
|
||||
httpProxy: string,
|
||||
httpsProxy: string,
|
||||
proxyEnabled: boolean,
|
||||
noProxy: string,
|
||||
httpProxy: string,
|
||||
httpsProxy: string,
|
||||
proxyEnabled: boolean,
|
||||
noProxy: string,
|
||||
) {
|
||||
const bestProxy = httpsProxy || httpProxy || '';
|
||||
const enabledProxy = proxyEnabled && bestProxy.trim() !== '';
|
||||
const bypassProxyList = noProxy ?
|
||||
noProxy
|
||||
.split(',')
|
||||
.map(urlStr => urlStr.trim()) :
|
||||
[];
|
||||
const proxy: ProxyConfigOptions = {
|
||||
disabled: !enabledProxy,
|
||||
match: '<all_urls>',
|
||||
bypass: bypassProxyList,
|
||||
host: '',
|
||||
port: undefined,
|
||||
tunnel: false,
|
||||
authenticate: false,
|
||||
username: '',
|
||||
password: '',
|
||||
protocol: 'http',
|
||||
};
|
||||
const bestProxy = httpsProxy || httpProxy || '';
|
||||
const enabledProxy = proxyEnabled && bestProxy.trim() !== '';
|
||||
const bypassProxyList = noProxy ? noProxy.split(',').map(urlStr => urlStr.trim()) : [];
|
||||
const proxy: ProxyConfigOptions = {
|
||||
disabled: !enabledProxy,
|
||||
match: '<all_urls>',
|
||||
bypass: bypassProxyList,
|
||||
host: '',
|
||||
port: undefined,
|
||||
tunnel: false,
|
||||
authenticate: false,
|
||||
username: '',
|
||||
password: '',
|
||||
protocol: 'http',
|
||||
};
|
||||
|
||||
if (bestProxy !== '') {
|
||||
let sanitizedProxy = bestProxy;
|
||||
if (bestProxy.indexOf('://') === -1) {
|
||||
getExistingConsole().warn(`The protocol is missing for proxy, 'https:' is enabled for: ${bestProxy}`);
|
||||
sanitizedProxy = 'https://' + bestProxy;
|
||||
}
|
||||
|
||||
try {
|
||||
const sanitizedProxyUrlOptions = new URL(sanitizedProxy); // it should just work in node and browser
|
||||
|
||||
if (sanitizedProxyUrlOptions.port !== '') {
|
||||
proxy.port = parseInt(sanitizedProxyUrlOptions.port, 10);
|
||||
}
|
||||
|
||||
proxy.protocol = sanitizedProxyUrlOptions.protocol;
|
||||
proxy.host = sanitizedProxyUrlOptions.hostname;
|
||||
proxy.username = sanitizedProxyUrlOptions.username;
|
||||
proxy.password = sanitizedProxyUrlOptions.password;
|
||||
if (proxy.username || proxy.password) {
|
||||
proxy.authenticate = true;
|
||||
}
|
||||
} catch (e) {
|
||||
throw `Failed to parse proxy (${sanitizedProxy}): ${e.message}`;
|
||||
}
|
||||
if (bestProxy !== '') {
|
||||
let sanitizedProxy = bestProxy;
|
||||
if (bestProxy.indexOf('://') === -1) {
|
||||
getExistingConsole().warn(`The protocol is missing for proxy, 'https:' is enabled for: ${bestProxy}`);
|
||||
sanitizedProxy = 'https://' + bestProxy;
|
||||
}
|
||||
|
||||
return proxy;
|
||||
try {
|
||||
const sanitizedProxyUrlOptions = new URL(sanitizedProxy); // it should just work in node and browser
|
||||
|
||||
if (sanitizedProxyUrlOptions.port !== '') {
|
||||
proxy.port = parseInt(sanitizedProxyUrlOptions.port, 10);
|
||||
}
|
||||
|
||||
proxy.protocol = sanitizedProxyUrlOptions.protocol;
|
||||
proxy.host = sanitizedProxyUrlOptions.hostname;
|
||||
proxy.username = sanitizedProxyUrlOptions.username;
|
||||
proxy.password = sanitizedProxyUrlOptions.password;
|
||||
if (proxy.username || proxy.password) {
|
||||
proxy.authenticate = true;
|
||||
}
|
||||
} catch (e) {
|
||||
throw `Failed to parse proxy (${sanitizedProxy}): ${e.message}`;
|
||||
}
|
||||
}
|
||||
|
||||
return proxy;
|
||||
}
|
||||
|
||||
@@ -1,35 +1,35 @@
|
||||
export type EventName = 'prerequest' | 'test';
|
||||
|
||||
export interface RequestInfoOption {
|
||||
eventName?: EventName;
|
||||
iteration?: number;
|
||||
iterationCount?: number;
|
||||
requestName?: string;
|
||||
requestId?: string;
|
||||
};
|
||||
eventName?: EventName;
|
||||
iteration?: number;
|
||||
iterationCount?: number;
|
||||
requestName?: string;
|
||||
requestId?: string;
|
||||
}
|
||||
|
||||
export class RequestInfo {
|
||||
public eventName: EventName;
|
||||
public iteration: number;
|
||||
public iterationCount: number;
|
||||
public requestName: string;
|
||||
public requestId: string;
|
||||
public eventName: EventName;
|
||||
public iteration: number;
|
||||
public iterationCount: number;
|
||||
public requestName: string;
|
||||
public requestId: string;
|
||||
|
||||
constructor(options: RequestInfoOption) {
|
||||
this.eventName = options.eventName || 'prerequest';
|
||||
this.iteration = options.iteration || 1;
|
||||
this.iterationCount = options.iterationCount || 1;
|
||||
this.requestName = options.requestName || '';
|
||||
this.requestId = options.requestId || '';
|
||||
}
|
||||
constructor(options: RequestInfoOption) {
|
||||
this.eventName = options.eventName || 'prerequest';
|
||||
this.iteration = options.iteration || 1;
|
||||
this.iterationCount = options.iterationCount || 1;
|
||||
this.requestName = options.requestName || '';
|
||||
this.requestId = options.requestId || '';
|
||||
}
|
||||
|
||||
toObject = () => {
|
||||
return {
|
||||
eventName: this.eventName,
|
||||
iteration: this.iteration,
|
||||
iterationCount: this.iterationCount,
|
||||
requestName: this.requestName,
|
||||
requestId: this.requestId,
|
||||
};
|
||||
toObject = () => {
|
||||
return {
|
||||
eventName: this.eventName,
|
||||
iteration: this.iteration,
|
||||
iterationCount: this.iterationCount,
|
||||
requestName: this.requestName,
|
||||
requestId: this.requestId,
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -12,326 +12,317 @@ import type { Request } from './request';
|
||||
import { calculateHeadersSize } from './request';
|
||||
|
||||
export interface ResponseOptions {
|
||||
code: number;
|
||||
reason?: string;
|
||||
header?: HeaderDefinition[];
|
||||
cookie?: CookieOptions[];
|
||||
body?: string;
|
||||
// ideally it should work in both browser and node
|
||||
stream?: Buffer | ArrayBuffer;
|
||||
responseTime: number;
|
||||
originalRequest: Request;
|
||||
bytesRead?: number; // this is from Insomnia for returning response size() directly
|
||||
code: number;
|
||||
reason?: string;
|
||||
header?: HeaderDefinition[];
|
||||
cookie?: CookieOptions[];
|
||||
body?: string;
|
||||
// ideally it should work in both browser and node
|
||||
stream?: Buffer | ArrayBuffer;
|
||||
responseTime: number;
|
||||
originalRequest: Request;
|
||||
bytesRead?: number; // this is from Insomnia for returning response size() directly
|
||||
}
|
||||
|
||||
export interface ResponseContentInfo {
|
||||
mimeType: string;
|
||||
mimeFormat: string;
|
||||
charset: string;
|
||||
fileExtension: string;
|
||||
fileName: string;
|
||||
contentType: string;
|
||||
mimeType: string;
|
||||
mimeFormat: string;
|
||||
charset: string;
|
||||
fileExtension: string;
|
||||
fileName: string;
|
||||
contentType: string;
|
||||
}
|
||||
|
||||
// TODO: unknown usage
|
||||
// export interface Timings
|
||||
|
||||
export class Response extends Property {
|
||||
body: string;
|
||||
code: number;
|
||||
cookies: CookieList;
|
||||
headers: HeaderList<Header>;
|
||||
originalRequest: Request;
|
||||
responseTime: number;
|
||||
status: string;
|
||||
stream?: Buffer | ArrayBuffer;
|
||||
body: string;
|
||||
code: number;
|
||||
cookies: CookieList;
|
||||
headers: HeaderList<Header>;
|
||||
originalRequest: Request;
|
||||
responseTime: number;
|
||||
status: string;
|
||||
stream?: Buffer | ArrayBuffer;
|
||||
|
||||
private bytesRead: number; //
|
||||
private bytesRead: number; //
|
||||
|
||||
constructor(options: ResponseOptions) {
|
||||
super();
|
||||
constructor(options: ResponseOptions) {
|
||||
super();
|
||||
|
||||
this._kind = 'Response';
|
||||
this._kind = 'Response';
|
||||
|
||||
this.body = options.body || '';
|
||||
this.code = options.code;
|
||||
this.cookies = new CookieList(
|
||||
options.cookie?.map(cookie => new Cookie(cookie)) || [],
|
||||
);
|
||||
this.headers = new HeaderList(
|
||||
undefined,
|
||||
options.header?.map(headerOpt => new Header(headerOpt)) || [],
|
||||
);
|
||||
this.originalRequest = options.originalRequest;
|
||||
this.responseTime = options.responseTime;
|
||||
this.stream = options.stream;
|
||||
const detectedStatus = options.reason || RESPONSE_CODE_REASONS[options.code];
|
||||
if (!detectedStatus) {
|
||||
throw Error(`Response constructor: reason or code field must be set in the options(reason: ${options.reason}, code:${options.code})`);
|
||||
} else {
|
||||
this.status = detectedStatus;
|
||||
}
|
||||
|
||||
this.bytesRead = options.bytesRead || 0;
|
||||
this.body = options.body || '';
|
||||
this.code = options.code;
|
||||
this.cookies = new CookieList(options.cookie?.map(cookie => new Cookie(cookie)) || []);
|
||||
this.headers = new HeaderList(undefined, options.header?.map(headerOpt => new Header(headerOpt)) || []);
|
||||
this.originalRequest = options.originalRequest;
|
||||
this.responseTime = options.responseTime;
|
||||
this.stream = options.stream;
|
||||
const detectedStatus = options.reason || RESPONSE_CODE_REASONS[options.code];
|
||||
if (!detectedStatus) {
|
||||
throw Error(
|
||||
`Response constructor: reason or code field must be set in the options(reason: ${options.reason}, code:${options.code})`,
|
||||
);
|
||||
} else {
|
||||
this.status = detectedStatus;
|
||||
}
|
||||
|
||||
// TODO: the accurate type of the response should be given
|
||||
static createFromNode(
|
||||
response: {
|
||||
body: string;
|
||||
headers: HeaderDefinition[];
|
||||
statusCode: number;
|
||||
statusMessage: string;
|
||||
elapsedTime: number;
|
||||
originalRequest: Request;
|
||||
stream: Buffer | ArrayBuffer;
|
||||
},
|
||||
cookies: CookieOptions[],
|
||||
) {
|
||||
return new Response({
|
||||
cookie: cookies,
|
||||
body: response.body.toString(),
|
||||
stream: response.stream,
|
||||
header: response.headers,
|
||||
code: response.statusCode,
|
||||
reason: response.statusMessage,
|
||||
responseTime: response.elapsedTime,
|
||||
originalRequest: response.originalRequest,
|
||||
this.bytesRead = options.bytesRead || 0;
|
||||
}
|
||||
|
||||
// TODO: the accurate type of the response should be given
|
||||
static createFromNode(
|
||||
response: {
|
||||
body: string;
|
||||
headers: HeaderDefinition[];
|
||||
statusCode: number;
|
||||
statusMessage: string;
|
||||
elapsedTime: number;
|
||||
originalRequest: Request;
|
||||
stream: Buffer | ArrayBuffer;
|
||||
},
|
||||
cookies: CookieOptions[],
|
||||
) {
|
||||
return new Response({
|
||||
cookie: cookies,
|
||||
body: response.body.toString(),
|
||||
stream: response.stream,
|
||||
header: response.headers,
|
||||
code: response.statusCode,
|
||||
reason: response.statusMessage,
|
||||
responseTime: response.elapsedTime,
|
||||
originalRequest: response.originalRequest,
|
||||
});
|
||||
}
|
||||
|
||||
static isResponse(obj: object) {
|
||||
return '_kind' in obj && obj._kind === 'Response';
|
||||
}
|
||||
|
||||
contentInfo(): ResponseContentInfo {
|
||||
const mimeInfo = {
|
||||
mimeType: 'application/octet-stream',
|
||||
mimeFormat: '', // TODO: it's definition is unknown
|
||||
charset: 'utf-8',
|
||||
};
|
||||
|
||||
const contentType = this.headers.find(header => header.key === 'Content-Type');
|
||||
if (contentType) {
|
||||
const directives = contentType.valueOf().split('; ');
|
||||
if (directives.length === 0) {
|
||||
throw Error('contentInfo: header Content-Type value is blank');
|
||||
} else {
|
||||
const mimeType = directives[0];
|
||||
if (!mimeType) {
|
||||
throw Error('contentInfo: mime type in header Content-Type is invalid');
|
||||
}
|
||||
mimeInfo.mimeType = mimeType;
|
||||
directives.forEach(dir => {
|
||||
if (dir.startsWith('charset')) {
|
||||
mimeInfo.charset = dir.slice(dir.indexOf('=') + 1);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
static isResponse(obj: object) {
|
||||
return '_kind' in obj && obj._kind === 'Response';
|
||||
}
|
||||
const fileInfo = {
|
||||
extension: '',
|
||||
name: 'unknown',
|
||||
};
|
||||
|
||||
contentInfo(): ResponseContentInfo {
|
||||
const mimeInfo = {
|
||||
mimeType: 'application/octet-stream',
|
||||
mimeFormat: '', // TODO: it's definition is unknown
|
||||
charset: 'utf-8',
|
||||
};
|
||||
|
||||
const contentType = this.headers.find(header => header.key === 'Content-Type');
|
||||
if (contentType) {
|
||||
const directives = contentType.valueOf().split('; ');
|
||||
if (directives.length === 0) {
|
||||
throw Error('contentInfo: header Content-Type value is blank');
|
||||
} else {
|
||||
const mimeType = directives[0];
|
||||
if (!mimeType) {
|
||||
throw Error('contentInfo: mime type in header Content-Type is invalid');
|
||||
}
|
||||
mimeInfo.mimeType = mimeType;
|
||||
directives.forEach(dir => {
|
||||
if (dir.startsWith('charset')) {
|
||||
mimeInfo.charset = dir.slice(dir.indexOf('=') + 1);
|
||||
}
|
||||
});
|
||||
}
|
||||
const contentDisposition = this.headers.find(header => header.key === 'Content-Disposition');
|
||||
if (contentDisposition) {
|
||||
const directives = contentDisposition.valueOf().split('; ');
|
||||
directives.forEach(dir => {
|
||||
if (dir.startsWith('filename')) {
|
||||
const fileName = (fileInfo.extension = dir.slice(dir.indexOf('=') + 1));
|
||||
fileInfo.name = fileName.slice(1, fileName.lastIndexOf('.')); // ignore '"' arounds the file name
|
||||
fileInfo.extension = fileName.slice(fileName.lastIndexOf('.') + 1, fileName.length - 1);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const fileInfo = {
|
||||
extension: '',
|
||||
name: 'unknown',
|
||||
};
|
||||
return {
|
||||
mimeType: mimeInfo.mimeType,
|
||||
mimeFormat: mimeInfo.mimeFormat,
|
||||
charset: mimeInfo.charset,
|
||||
fileExtension: fileInfo.extension,
|
||||
fileName: fileInfo.name,
|
||||
contentType: contentType?.valueOf() || 'application/octet-stream',
|
||||
};
|
||||
}
|
||||
|
||||
const contentDisposition = this.headers.find(header => header.key === 'Content-Disposition');;
|
||||
if (contentDisposition) {
|
||||
const directives = contentDisposition.valueOf().split('; ');
|
||||
directives.forEach(dir => {
|
||||
if (dir.startsWith('filename')) {
|
||||
const fileName = fileInfo.extension = dir.slice(dir.indexOf('=') + 1);
|
||||
fileInfo.name = fileName.slice(1, fileName.lastIndexOf('.')); // ignore '"' arounds the file name
|
||||
fileInfo.extension = fileName.slice(fileName.lastIndexOf('.') + 1, fileName.length - 1);
|
||||
}
|
||||
});
|
||||
dataURI() {
|
||||
const contentInfo = this.contentInfo();
|
||||
const bodyInBase64 = this.stream || this.body;
|
||||
if (!bodyInBase64) {
|
||||
throw Error('dataURI(): response body is not defined');
|
||||
}
|
||||
|
||||
return `data:${contentInfo.contentType};baseg4, ${bodyInBase64}`;
|
||||
}
|
||||
|
||||
json(reviver?: (key: string, value: any) => any, _strict?: boolean) {
|
||||
// TODO: enable strict after common module is introduced
|
||||
try {
|
||||
return JSON.parse(this.body.toString(), reviver);
|
||||
} catch (e) {
|
||||
throw Error(`json: failed to parse: ${e}`);
|
||||
}
|
||||
}
|
||||
|
||||
jsonp(_reviver?: (key: string, value: any) => any, _strict?: boolean) {
|
||||
throw unsupportedError('jsonp()');
|
||||
}
|
||||
|
||||
reason() {
|
||||
return this.status;
|
||||
}
|
||||
|
||||
size() {
|
||||
const headerSize = calculateHeadersSize(this.headers);
|
||||
return {
|
||||
body: this.bytesRead,
|
||||
header: headerSize,
|
||||
total: this.bytesRead + headerSize,
|
||||
source: 'COMPUTED',
|
||||
};
|
||||
}
|
||||
|
||||
text() {
|
||||
return this.body.toString();
|
||||
}
|
||||
|
||||
// Besides chai.expect, "to" is extended to support cases like:
|
||||
// insomnia.response.to.have.status(200);
|
||||
// insomnia.response.to.not.have.status(200);
|
||||
get to() {
|
||||
type valueType = boolean | number | string | object | undefined;
|
||||
|
||||
const verify = (got: valueType, expected: valueType, checkEquality = true) => {
|
||||
if (['boolean', 'number', 'string', 'undefined'].includes(typeof got)) {
|
||||
if ((checkEquality && expected === got) || (!checkEquality && expected !== got)) {
|
||||
return;
|
||||
}
|
||||
} else if (
|
||||
(checkEquality && deepEqual(got, expected, { strict: true })) ||
|
||||
(!checkEquality && !deepEqual(got, expected, { strict: true }))
|
||||
) {
|
||||
return;
|
||||
}
|
||||
throw Error(`"${got}" is not equal to the expected value: "${expected}"`);
|
||||
};
|
||||
const haveStatus = (expected: number | string, checkEquality: boolean) => {
|
||||
if (typeof expected === 'string') {
|
||||
verify(this.status, expected, checkEquality);
|
||||
} else {
|
||||
verify(this.code, expected, checkEquality);
|
||||
}
|
||||
};
|
||||
const haveHeader = (expected: string, checkEquality: boolean) =>
|
||||
verify(this.headers.toObject().find(header => header.key === expected) !== undefined, checkEquality);
|
||||
const haveBody = (expected: string, checkEquality: boolean) => verify(this.text(), expected, checkEquality);
|
||||
const haveJsonBody = (expected: object, checkEquality: boolean) => verify(this.json(), expected, checkEquality);
|
||||
const haveJsonSchema = (expected: object, checkEquality: boolean) => {
|
||||
const ajv = new Ajv();
|
||||
|
||||
return {
|
||||
mimeType: mimeInfo.mimeType,
|
||||
mimeFormat: mimeInfo.mimeFormat,
|
||||
charset: mimeInfo.charset,
|
||||
fileExtension: fileInfo.extension,
|
||||
fileName: fileInfo.name,
|
||||
contentType: contentType?.valueOf() || 'application/octet-stream',
|
||||
};
|
||||
}
|
||||
|
||||
dataURI() {
|
||||
const contentInfo = this.contentInfo();
|
||||
const bodyInBase64 = this.stream || this.body;
|
||||
if (!bodyInBase64) {
|
||||
throw Error('dataURI(): response body is not defined');
|
||||
try {
|
||||
const jsonBody = JSON.parse(this.body);
|
||||
const schemaMatched = ajv.validate(expected, jsonBody);
|
||||
if ((schemaMatched && checkEquality) || (!schemaMatched && !checkEquality)) {
|
||||
return;
|
||||
}
|
||||
} catch (e) {
|
||||
throw Error(`Failed to verify response body schema, response could not be a valid json: "${e}"`);
|
||||
}
|
||||
throw Error("Response's schema is not equal to the expected value");
|
||||
};
|
||||
|
||||
return `data:${contentInfo.contentType};baseg4, ${bodyInBase64}`;
|
||||
}
|
||||
|
||||
|
||||
json(reviver?: (key: string, value: any) => any, _strict?: boolean) {
|
||||
// TODO: enable strict after common module is introduced
|
||||
try {
|
||||
return JSON.parse(this.body.toString(), reviver);
|
||||
} catch (e) {
|
||||
throw Error(`json: failed to parse: ${e}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
jsonp(_reviver?: (key: string, value: any) => any, _strict?: boolean) {
|
||||
throw unsupportedError('jsonp()');
|
||||
}
|
||||
|
||||
reason() {
|
||||
return this.status;
|
||||
}
|
||||
|
||||
size() {
|
||||
const headerSize = calculateHeadersSize(this.headers);
|
||||
return {
|
||||
body: this.bytesRead,
|
||||
header: headerSize,
|
||||
total: this.bytesRead + headerSize,
|
||||
source: 'COMPUTED',
|
||||
};
|
||||
}
|
||||
|
||||
text() {
|
||||
return this.body.toString();
|
||||
}
|
||||
|
||||
// Besides chai.expect, "to" is extended to support cases like:
|
||||
// insomnia.response.to.have.status(200);
|
||||
// insomnia.response.to.not.have.status(200);
|
||||
get to() {
|
||||
type valueType = boolean | number | string | object | undefined;
|
||||
|
||||
const verify = (got: valueType, expected: valueType, checkEquality = true) => {
|
||||
if (['boolean', 'number', 'string', 'undefined'].includes(typeof got)) {
|
||||
if ((checkEquality && expected === got) || (!checkEquality && expected !== got)) {
|
||||
return;
|
||||
}
|
||||
} else if (
|
||||
(checkEquality && deepEqual(got, expected, { strict: true })) ||
|
||||
(!checkEquality && !deepEqual(got, expected, { strict: true }))
|
||||
) {
|
||||
return;
|
||||
}
|
||||
throw Error(`"${got}" is not equal to the expected value: "${expected}"`);
|
||||
};
|
||||
const haveStatus = (expected: number | string, checkEquality: boolean) => {
|
||||
if (typeof expected === 'string') {
|
||||
verify(this.status, expected, checkEquality);
|
||||
} else {
|
||||
verify(this.code, expected, checkEquality);
|
||||
}
|
||||
};
|
||||
const haveHeader = (expected: string, checkEquality: boolean) => verify(
|
||||
this.headers.toObject().find(header => header.key === expected) !== undefined,
|
||||
checkEquality,
|
||||
);
|
||||
const haveBody = (expected: string, checkEquality: boolean) => verify(this.text(), expected, checkEquality);
|
||||
const haveJsonBody = (expected: object, checkEquality: boolean) => verify(this.json(), expected, checkEquality);
|
||||
const haveJsonSchema = (expected: object, checkEquality: boolean) => {
|
||||
const ajv = new Ajv();
|
||||
|
||||
try {
|
||||
const jsonBody = JSON.parse(this.body);
|
||||
const schemaMatched = ajv.validate(expected, jsonBody);
|
||||
if ((schemaMatched && checkEquality) || (!schemaMatched && !checkEquality)) {
|
||||
return;
|
||||
}
|
||||
} catch (e) {
|
||||
throw Error(`Failed to verify response body schema, response could not be a valid json: "${e}"`);
|
||||
}
|
||||
throw Error("Response's schema is not equal to the expected value");
|
||||
};
|
||||
|
||||
return {
|
||||
// follows extend chai's chains for compatibility
|
||||
have: {
|
||||
status: (expected: number | string) => haveStatus(expected, true),
|
||||
header: (expected: string) => haveHeader(expected, true),
|
||||
body: (expected: string) => haveBody(expected, true),
|
||||
jsonBody: (expected: object) => haveJsonBody(expected, true),
|
||||
jsonSchema: (expected: object) => haveJsonSchema(expected, true),
|
||||
},
|
||||
not: {
|
||||
have: {
|
||||
status: (expected: number | string) => haveStatus(expected, false),
|
||||
header: (expected: string) => haveHeader(expected, false),
|
||||
body: (expected: string) => haveBody(expected, false),
|
||||
jsonBody: (expected: object) => haveJsonBody(expected, false),
|
||||
jsonSchema: (expected: object) => haveJsonSchema(expected, false),
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
// follows extend chai's chains for compatibility
|
||||
have: {
|
||||
status: (expected: number | string) => haveStatus(expected, true),
|
||||
header: (expected: string) => haveHeader(expected, true),
|
||||
body: (expected: string) => haveBody(expected, true),
|
||||
jsonBody: (expected: object) => haveJsonBody(expected, true),
|
||||
jsonSchema: (expected: object) => haveJsonSchema(expected, true),
|
||||
},
|
||||
not: {
|
||||
have: {
|
||||
status: (expected: number | string) => haveStatus(expected, false),
|
||||
header: (expected: string) => haveHeader(expected, false),
|
||||
body: (expected: string) => haveBody(expected, false),
|
||||
jsonBody: (expected: object) => haveJsonBody(expected, false),
|
||||
jsonSchema: (expected: object) => haveJsonSchema(expected, false),
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function toScriptResponse(
|
||||
originalRequest: Request,
|
||||
partialInsoResponse: sendCurlAndWriteTimelineResponse | sendCurlAndWriteTimelineError,
|
||||
responseBody: string,
|
||||
originalRequest: Request,
|
||||
partialInsoResponse: sendCurlAndWriteTimelineResponse | sendCurlAndWriteTimelineError,
|
||||
responseBody: string,
|
||||
): Response | undefined {
|
||||
if ('error' in partialInsoResponse) {
|
||||
// it is sendCurlAndWriteTimelineError and basically doesn't contain anything useful
|
||||
return undefined;
|
||||
}
|
||||
const partialResponse = partialInsoResponse as sendCurlAndWriteTimelineResponse;
|
||||
if ('error' in partialInsoResponse) {
|
||||
// it is sendCurlAndWriteTimelineError and basically doesn't contain anything useful
|
||||
return undefined;
|
||||
}
|
||||
const partialResponse = partialInsoResponse as sendCurlAndWriteTimelineResponse;
|
||||
|
||||
const headers = partialResponse.headers ?
|
||||
partialResponse.headers.map(
|
||||
insoHeader => ({
|
||||
key: insoHeader.name,
|
||||
value: insoHeader.value,
|
||||
}),
|
||||
{},
|
||||
)
|
||||
: [];
|
||||
const headers = partialResponse.headers
|
||||
? partialResponse.headers.map(
|
||||
insoHeader => ({
|
||||
key: insoHeader.name,
|
||||
value: insoHeader.value,
|
||||
}),
|
||||
{},
|
||||
)
|
||||
: [];
|
||||
|
||||
const insoCookieOptions = partialResponse.headers ?
|
||||
partialResponse.headers
|
||||
.filter(
|
||||
header => {
|
||||
return header.name.toLowerCase() === 'set-cookie';
|
||||
},
|
||||
{},
|
||||
).map(
|
||||
setCookieHeader => Cookie.parse(setCookieHeader.value)
|
||||
)
|
||||
: [];
|
||||
const insoCookieOptions = partialResponse.headers
|
||||
? partialResponse.headers
|
||||
.filter(header => {
|
||||
return header.name.toLowerCase() === 'set-cookie';
|
||||
}, {})
|
||||
.map(setCookieHeader => Cookie.parse(setCookieHeader.value))
|
||||
: [];
|
||||
|
||||
const responseOption = {
|
||||
code: partialResponse.statusCode || 0,
|
||||
reason: partialResponse.statusMessage,
|
||||
header: headers,
|
||||
cookie: insoCookieOptions,
|
||||
body: responseBody,
|
||||
// stream is duplicated with body
|
||||
responseTime: partialResponse.elapsedTime,
|
||||
originalRequest,
|
||||
bytesRead: partialResponse.bytesRead,
|
||||
};
|
||||
const responseOption = {
|
||||
code: partialResponse.statusCode || 0,
|
||||
reason: partialResponse.statusMessage,
|
||||
header: headers,
|
||||
cookie: insoCookieOptions,
|
||||
body: responseBody,
|
||||
// stream is duplicated with body
|
||||
responseTime: partialResponse.elapsedTime,
|
||||
originalRequest,
|
||||
bytesRead: partialResponse.bytesRead,
|
||||
};
|
||||
|
||||
return new Response(responseOption);
|
||||
};
|
||||
|
||||
export async function readBodyFromPath(response: sendCurlAndWriteTimelineResponse | sendCurlAndWriteTimelineError | undefined) {
|
||||
// it allows to execute scripts (e.g., for testing) but body contains nothing
|
||||
if (!response || 'error' in response) {
|
||||
return '';
|
||||
} else if (!response.bodyPath) {
|
||||
return '';
|
||||
}
|
||||
const nodejsReadCurlResponse = process.type === 'renderer' ? window.bridge.readCurlResponse : readCurlResponse;
|
||||
const readResponseResult = await nodejsReadCurlResponse({
|
||||
bodyPath: response.bodyPath,
|
||||
bodyCompression: response.bodyCompression,
|
||||
});
|
||||
|
||||
if (readResponseResult.error) {
|
||||
throw Error(`Failed to read body: ${readResponseResult.error}`);
|
||||
}
|
||||
return readResponseResult.body;
|
||||
return new Response(responseOption);
|
||||
}
|
||||
|
||||
export async function readBodyFromPath(
|
||||
response: sendCurlAndWriteTimelineResponse | sendCurlAndWriteTimelineError | undefined,
|
||||
) {
|
||||
// it allows to execute scripts (e.g., for testing) but body contains nothing
|
||||
if (!response || 'error' in response) {
|
||||
return '';
|
||||
} else if (!response.bodyPath) {
|
||||
return '';
|
||||
}
|
||||
const nodejsReadCurlResponse = process.type === 'renderer' ? window.bridge.readCurlResponse : readCurlResponse;
|
||||
const readResponseResult = await nodejsReadCurlResponse({
|
||||
bodyPath: response.bodyPath,
|
||||
bodyCompression: response.bodyCompression,
|
||||
});
|
||||
|
||||
if (readResponseResult.error) {
|
||||
throw Error(`Failed to read body: ${readResponseResult.error}`);
|
||||
}
|
||||
return readResponseResult.body;
|
||||
}
|
||||
|
||||
@@ -11,272 +11,276 @@ import { Request, type RequestOptions } from './request';
|
||||
import { Response } from './response';
|
||||
|
||||
export async function sendRequest(
|
||||
request: string | Request | RequestOptions,
|
||||
cb: (error?: string, response?: Response) => void,
|
||||
settings: Settings,
|
||||
request: string | Request | RequestOptions,
|
||||
cb: (error?: string, response?: Response) => void,
|
||||
settings: Settings,
|
||||
): Promise<Response | undefined> {
|
||||
return new Promise<Response | undefined>(async resolve => {
|
||||
// TODO(george): enable cascading cancellation later as current solution just adds complexity
|
||||
const requestOptions = requestToCurlOptions(request, settings);
|
||||
return new Promise<Response | undefined>(async resolve => {
|
||||
// TODO(george): enable cascading cancellation later as current solution just adds complexity
|
||||
const requestOptions = requestToCurlOptions(request, settings);
|
||||
|
||||
try {
|
||||
const nodejsCurlRequest = process.type === 'renderer' ? window.bridge.curlRequest : (await import('insomnia/src/main/network/libcurl-promise')).curlRequest;
|
||||
nodejsCurlRequest(requestOptions)
|
||||
.then((result: any) => {
|
||||
const output = result as CurlRequestOutput;
|
||||
return curlOutputToResponse(output, request);
|
||||
}).then((transformedOutput: Response) => {
|
||||
cb(undefined, transformedOutput);
|
||||
resolve(transformedOutput);
|
||||
}).catch(e => {
|
||||
cb(e, undefined);
|
||||
resolve(undefined);
|
||||
});
|
||||
} catch (err: any) {
|
||||
if (err.name === 'AbortError') {
|
||||
cb(`Request was cancelled: ${err.message}`, undefined);
|
||||
} else {
|
||||
cb(`Something went wrong: ${err.message}`, undefined);
|
||||
}
|
||||
resolve(undefined);
|
||||
}
|
||||
});
|
||||
};
|
||||
try {
|
||||
const nodejsCurlRequest =
|
||||
process.type === 'renderer'
|
||||
? window.bridge.curlRequest
|
||||
: (await import('insomnia/src/main/network/libcurl-promise')).curlRequest;
|
||||
nodejsCurlRequest(requestOptions)
|
||||
.then((result: any) => {
|
||||
const output = result as CurlRequestOutput;
|
||||
return curlOutputToResponse(output, request);
|
||||
})
|
||||
.then((transformedOutput: Response) => {
|
||||
cb(undefined, transformedOutput);
|
||||
resolve(transformedOutput);
|
||||
})
|
||||
.catch(e => {
|
||||
cb(e, undefined);
|
||||
resolve(undefined);
|
||||
});
|
||||
} catch (err: any) {
|
||||
if (err.name === 'AbortError') {
|
||||
cb(`Request was cancelled: ${err.message}`, undefined);
|
||||
} else {
|
||||
cb(`Something went wrong: ${err.message}`, undefined);
|
||||
}
|
||||
resolve(undefined);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function requestToCurlOptions(req: string | Request | RequestOptions, settings: Settings) {
|
||||
const id = uuidv4();
|
||||
const settingFollowRedirects: 'global' | 'on' | 'off' = settings.followRedirects ? 'on' : 'off';
|
||||
const id = uuidv4();
|
||||
const settingFollowRedirects: 'global' | 'on' | 'off' = settings.followRedirects ? 'on' : 'off';
|
||||
|
||||
if (typeof req === 'string') {
|
||||
return {
|
||||
requestId: `pre-request-script-adhoc-req-simple:${id}`,
|
||||
req: {
|
||||
headers: [],
|
||||
method: 'GET',
|
||||
body: { mimeType: undefined }, // no body is set so it's type is undefined
|
||||
authentication: fromPreRequestAuth(
|
||||
new RequestAuth({ type: 'noauth' }),
|
||||
),
|
||||
settingFollowRedirects: settingFollowRedirects,
|
||||
settingRebuildPath: true,
|
||||
settingSendCookies: true,
|
||||
url: req,
|
||||
// currently cookies should be handled by user in headers
|
||||
cookieJar: {
|
||||
cookies: [],
|
||||
},
|
||||
cookies: [],
|
||||
suppressUserAgent: false,
|
||||
},
|
||||
finalUrl: req,
|
||||
settings,
|
||||
certificates: [],
|
||||
caCertficatePath: null,
|
||||
socketPath: undefined,
|
||||
authHeader: undefined, // TODO: add this for bearer and other auth methods
|
||||
};
|
||||
} else if (req instanceof Request || typeof req === 'object') {
|
||||
const finalReq = req instanceof Request ? req : new Request(req);
|
||||
if (typeof req === 'string') {
|
||||
return {
|
||||
requestId: `pre-request-script-adhoc-req-simple:${id}`,
|
||||
req: {
|
||||
headers: [],
|
||||
method: 'GET',
|
||||
body: { mimeType: undefined }, // no body is set so it's type is undefined
|
||||
authentication: fromPreRequestAuth(new RequestAuth({ type: 'noauth' })),
|
||||
settingFollowRedirects: settingFollowRedirects,
|
||||
settingRebuildPath: true,
|
||||
settingSendCookies: true,
|
||||
url: req,
|
||||
// currently cookies should be handled by user in headers
|
||||
cookieJar: {
|
||||
cookies: [],
|
||||
},
|
||||
cookies: [],
|
||||
suppressUserAgent: false,
|
||||
},
|
||||
finalUrl: req,
|
||||
settings,
|
||||
certificates: [],
|
||||
caCertficatePath: null,
|
||||
socketPath: undefined,
|
||||
authHeader: undefined, // TODO: add this for bearer and other auth methods
|
||||
};
|
||||
} else if (req instanceof Request || typeof req === 'object') {
|
||||
const finalReq = req instanceof Request ? req : new Request(req);
|
||||
|
||||
let mimeType = 'application/octet-stream';
|
||||
if (finalReq.body) {
|
||||
switch (finalReq.body.mode) {
|
||||
case 'raw':
|
||||
mimeType = 'text/plain';
|
||||
break;
|
||||
case 'file':
|
||||
// TODO: improve this by sniffing
|
||||
mimeType = 'application/octet-stream';
|
||||
break;
|
||||
case 'formdata':
|
||||
// boundary should already be part of Content-Type header
|
||||
mimeType = 'multipart/form-data';
|
||||
break;
|
||||
case 'urlencoded':
|
||||
mimeType = 'application/x-www-form-urlencoded';
|
||||
break;
|
||||
case 'graphql':
|
||||
mimeType = 'application/json';
|
||||
break;
|
||||
default:
|
||||
throw Error(`unknown body mode: ${finalReq.body.mode}`);
|
||||
}
|
||||
}
|
||||
|
||||
// const authHeaders = [];
|
||||
// const authObj = fromPreRequestAuth(finalReq.auth);
|
||||
// switch (authObj.type) {
|
||||
// case 'apikey':
|
||||
// if (authObj.in === 'header') {
|
||||
// authHeaders.push({
|
||||
// name: authObj.key,
|
||||
// value: authObj.key,
|
||||
// });
|
||||
// }
|
||||
// case 'bearer':
|
||||
// authHeaders.push({
|
||||
// name: 'Authorization',
|
||||
// value: `Bearer ${authObj.token}`,
|
||||
// });
|
||||
// default:
|
||||
// // TODO: support other methods
|
||||
// }
|
||||
|
||||
const urlencodedParams = finalReq.body?.urlencoded?.all().map(
|
||||
param => ({ name: param.key, value: param.value }),
|
||||
);
|
||||
const formdataParams = finalReq.body?.formdata?.all().map(
|
||||
param => ({
|
||||
type: param.type,
|
||||
name: param.key,
|
||||
value: param.type === 'file' ? '' : param.value,
|
||||
fileName: param.type === 'file' ? param.value : '',
|
||||
}),
|
||||
);
|
||||
|
||||
const params = finalReq.body?.mode === 'formdata' || finalReq.body?.mode === 'urlencoded' ?
|
||||
finalReq.body?.mode === 'formdata' ? formdataParams : urlencodedParams :
|
||||
[];
|
||||
|
||||
return {
|
||||
requestId: finalReq.id || `pre-request-script-adhoc-req-custom:${id}`,
|
||||
req: {
|
||||
headers: finalReq.headers.map(header => ({ name: header.key, value: header.value }), {}),
|
||||
method: finalReq.method,
|
||||
body: {
|
||||
mimeType,
|
||||
method: finalReq.method,
|
||||
text: finalReq.body?.toString(),
|
||||
params,
|
||||
fileName: finalReq.body?.mode === 'file' ? finalReq.body?.toString() : undefined,
|
||||
},
|
||||
authentication: fromPreRequestAuth(finalReq.auth),
|
||||
settingFollowRedirects: settingFollowRedirects,
|
||||
settingRebuildPath: true,
|
||||
settingSendCookies: true,
|
||||
url: finalReq.url.toString(),
|
||||
// currently cookies should be handled by user in headers
|
||||
cookieJar: {
|
||||
cookies: [],
|
||||
},
|
||||
cookies: [],
|
||||
suppressUserAgent: finalReq.headers.map(
|
||||
h => h.key.toLowerCase() === 'user-agent' && h.disabled === true,
|
||||
{},
|
||||
).length > 0,
|
||||
},
|
||||
finalUrl: finalReq.url.toString(),
|
||||
settings,
|
||||
certificates: finalReq.certificate ?
|
||||
[{
|
||||
host: finalReq.certificate?.name || '',
|
||||
passphrase: finalReq.certificate?.passphrase || '',
|
||||
cert: finalReq.certificate?.cert?.src || '',
|
||||
key: finalReq.certificate?.key?.src || '',
|
||||
pfx: finalReq.certificate?.pfx?.src || '',
|
||||
// unused fields because they are not persisted
|
||||
disabled: false,
|
||||
isPrivate: false,
|
||||
_id: '',
|
||||
type: '',
|
||||
parentId: '',
|
||||
modified: 0,
|
||||
created: 0,
|
||||
name: '',
|
||||
}] :
|
||||
[],
|
||||
caCertficatePath: null, // the request in pre-request script doesn't support customizing ca yet
|
||||
socketPath: undefined,
|
||||
authHeader: undefined, // TODO: add this for bearer and other auth methods
|
||||
};
|
||||
let mimeType = 'application/octet-stream';
|
||||
if (finalReq.body) {
|
||||
switch (finalReq.body.mode) {
|
||||
case 'raw':
|
||||
mimeType = 'text/plain';
|
||||
break;
|
||||
case 'file':
|
||||
// TODO: improve this by sniffing
|
||||
mimeType = 'application/octet-stream';
|
||||
break;
|
||||
case 'formdata':
|
||||
// boundary should already be part of Content-Type header
|
||||
mimeType = 'multipart/form-data';
|
||||
break;
|
||||
case 'urlencoded':
|
||||
mimeType = 'application/x-www-form-urlencoded';
|
||||
break;
|
||||
case 'graphql':
|
||||
mimeType = 'application/json';
|
||||
break;
|
||||
default:
|
||||
throw Error(`unknown body mode: ${finalReq.body.mode}`);
|
||||
}
|
||||
}
|
||||
|
||||
throw Error('the request type must be: string | Request | RequestOptions.');
|
||||
// const authHeaders = [];
|
||||
// const authObj = fromPreRequestAuth(finalReq.auth);
|
||||
// switch (authObj.type) {
|
||||
// case 'apikey':
|
||||
// if (authObj.in === 'header') {
|
||||
// authHeaders.push({
|
||||
// name: authObj.key,
|
||||
// value: authObj.key,
|
||||
// });
|
||||
// }
|
||||
// case 'bearer':
|
||||
// authHeaders.push({
|
||||
// name: 'Authorization',
|
||||
// value: `Bearer ${authObj.token}`,
|
||||
// });
|
||||
// default:
|
||||
// // TODO: support other methods
|
||||
// }
|
||||
|
||||
const urlencodedParams = finalReq.body?.urlencoded?.all().map(param => ({ name: param.key, value: param.value }));
|
||||
const formdataParams = finalReq.body?.formdata?.all().map(param => ({
|
||||
type: param.type,
|
||||
name: param.key,
|
||||
value: param.type === 'file' ? '' : param.value,
|
||||
fileName: param.type === 'file' ? param.value : '',
|
||||
}));
|
||||
|
||||
const params =
|
||||
finalReq.body?.mode === 'formdata' || finalReq.body?.mode === 'urlencoded'
|
||||
? finalReq.body?.mode === 'formdata'
|
||||
? formdataParams
|
||||
: urlencodedParams
|
||||
: [];
|
||||
|
||||
return {
|
||||
requestId: finalReq.id || `pre-request-script-adhoc-req-custom:${id}`,
|
||||
req: {
|
||||
headers: finalReq.headers.map(header => ({ name: header.key, value: header.value }), {}),
|
||||
method: finalReq.method,
|
||||
body: {
|
||||
mimeType,
|
||||
method: finalReq.method,
|
||||
text: finalReq.body?.toString(),
|
||||
params,
|
||||
fileName: finalReq.body?.mode === 'file' ? finalReq.body?.toString() : undefined,
|
||||
},
|
||||
authentication: fromPreRequestAuth(finalReq.auth),
|
||||
settingFollowRedirects: settingFollowRedirects,
|
||||
settingRebuildPath: true,
|
||||
settingSendCookies: true,
|
||||
url: finalReq.url.toString(),
|
||||
// currently cookies should be handled by user in headers
|
||||
cookieJar: {
|
||||
cookies: [],
|
||||
},
|
||||
cookies: [],
|
||||
suppressUserAgent:
|
||||
finalReq.headers.map(h => h.key.toLowerCase() === 'user-agent' && h.disabled === true, {}).length > 0,
|
||||
},
|
||||
finalUrl: finalReq.url.toString(),
|
||||
settings,
|
||||
certificates: finalReq.certificate
|
||||
? [
|
||||
{
|
||||
host: finalReq.certificate?.name || '',
|
||||
passphrase: finalReq.certificate?.passphrase || '',
|
||||
cert: finalReq.certificate?.cert?.src || '',
|
||||
key: finalReq.certificate?.key?.src || '',
|
||||
pfx: finalReq.certificate?.pfx?.src || '',
|
||||
// unused fields because they are not persisted
|
||||
disabled: false,
|
||||
isPrivate: false,
|
||||
_id: '',
|
||||
type: '',
|
||||
parentId: '',
|
||||
modified: 0,
|
||||
created: 0,
|
||||
name: '',
|
||||
},
|
||||
]
|
||||
: [],
|
||||
caCertficatePath: null, // the request in pre-request script doesn't support customizing ca yet
|
||||
socketPath: undefined,
|
||||
authHeader: undefined, // TODO: add this for bearer and other auth methods
|
||||
};
|
||||
}
|
||||
|
||||
throw Error('the request type must be: string | Request | RequestOptions.');
|
||||
}
|
||||
|
||||
async function curlOutputToResponse(
|
||||
result: CurlRequestOutput,
|
||||
request: string | Request | RequestOptions,
|
||||
result: CurlRequestOutput,
|
||||
request: string | Request | RequestOptions,
|
||||
): Promise<Response> {
|
||||
if (result.headerResults.length === 0) {
|
||||
throw Error('curlOutputToResponse: no header result is found');
|
||||
}
|
||||
if (result.patch.error) {
|
||||
throw result.patch.error;
|
||||
}
|
||||
if (result.headerResults.length === 0) {
|
||||
throw Error('curlOutputToResponse: no header result is found');
|
||||
}
|
||||
if (result.patch.error) {
|
||||
throw result.patch.error;
|
||||
}
|
||||
|
||||
const lastRedirect = result.headerResults[result.headerResults.length - 1];
|
||||
if (!lastRedirect) {
|
||||
throw Error('curlOutputToResponse: the lastRedirect is not defined');
|
||||
}
|
||||
const lastRedirect = result.headerResults[result.headerResults.length - 1];
|
||||
if (!lastRedirect) {
|
||||
throw Error('curlOutputToResponse: the lastRedirect is not defined');
|
||||
}
|
||||
|
||||
const originalRequest = typeof request === 'string' ?
|
||||
new Request({ url: request, method: 'GET' }) :
|
||||
request instanceof Request ?
|
||||
request :
|
||||
new Request(request);
|
||||
const originalRequest =
|
||||
typeof request === 'string'
|
||||
? new Request({ url: request, method: 'GET' })
|
||||
: request instanceof Request
|
||||
? request
|
||||
: new Request(request);
|
||||
|
||||
const headers = lastRedirect.headers.map(
|
||||
(header: { name: string; value: string }) => ({ key: header.name, value: header.value })
|
||||
);
|
||||
const headers = lastRedirect.headers.map((header: { name: string; value: string }) => ({
|
||||
key: header.name,
|
||||
value: header.value,
|
||||
}));
|
||||
|
||||
const cookieHeaders = lastRedirect.headers.filter(header => {
|
||||
return header.name.toLowerCase() === 'set-cookie';
|
||||
});
|
||||
// TODO: tackle stream field but currently it is just a duplication of body
|
||||
const cookies = cookieHeaders
|
||||
.map(cookieHeader => {
|
||||
const cookieObj = Cookie.parse(cookieHeader.value || '', { loose: true });
|
||||
if (cookieObj) {
|
||||
return {
|
||||
key: cookieObj.key,
|
||||
value: cookieObj.value,
|
||||
expires: cookieObj.expires,
|
||||
maxAge: cookieObj.maxAge,
|
||||
domain: cookieObj.domain,
|
||||
path: cookieObj.path,
|
||||
secure: cookieObj.secure,
|
||||
httpOnly: cookieObj.httpOnly,
|
||||
hostOnly: cookieObj.hostOnly,
|
||||
// session: cookieObj.session, // not supported
|
||||
// extensions: cookieObj.extensions,
|
||||
};
|
||||
}
|
||||
const cookieHeaders = lastRedirect.headers.filter(header => {
|
||||
return header.name.toLowerCase() === 'set-cookie';
|
||||
});
|
||||
// TODO: tackle stream field but currently it is just a duplication of body
|
||||
const cookies = cookieHeaders
|
||||
.map(cookieHeader => {
|
||||
const cookieObj = Cookie.parse(cookieHeader.value || '', { loose: true });
|
||||
if (cookieObj) {
|
||||
return {
|
||||
key: cookieObj.key,
|
||||
value: cookieObj.value,
|
||||
expires: cookieObj.expires,
|
||||
maxAge: cookieObj.maxAge,
|
||||
domain: cookieObj.domain,
|
||||
path: cookieObj.path,
|
||||
secure: cookieObj.secure,
|
||||
httpOnly: cookieObj.httpOnly,
|
||||
hostOnly: cookieObj.hostOnly,
|
||||
// session: cookieObj.session, // not supported
|
||||
// extensions: cookieObj.extensions,
|
||||
};
|
||||
}
|
||||
|
||||
return cookieObj;
|
||||
})
|
||||
.filter(cookieOpt => cookieOpt !== undefined);
|
||||
return cookieObj;
|
||||
})
|
||||
.filter(cookieOpt => cookieOpt !== undefined);
|
||||
|
||||
if (!result.responseBodyPath) {
|
||||
return new Response({
|
||||
code: lastRedirect.code,
|
||||
reason: lastRedirect.reason,
|
||||
header: headers,
|
||||
cookie: cookies as CookieOptions[],
|
||||
body: '',
|
||||
stream: undefined,
|
||||
responseTime: result.patch.elapsedTime,
|
||||
originalRequest,
|
||||
});
|
||||
}
|
||||
const nodejsReadCurlResponse = process.type === 'renderer' ? window.bridge.readCurlResponse : readCurlResponse;
|
||||
const bodyResult = await nodejsReadCurlResponse({
|
||||
bodyPath: result.responseBodyPath,
|
||||
bodyCompression: result.patch.bodyCompression,
|
||||
});
|
||||
if (bodyResult.error) {
|
||||
throw Error(bodyResult.error);
|
||||
}
|
||||
if (!result.responseBodyPath) {
|
||||
return new Response({
|
||||
code: lastRedirect.code,
|
||||
reason: lastRedirect.reason,
|
||||
header: headers,
|
||||
cookie: cookies as CookieOptions[],
|
||||
body: bodyResult.body,
|
||||
// stream is always undefined
|
||||
// because it is inaccurate to differentiate if body is binary
|
||||
stream: undefined,
|
||||
responseTime: result.patch.elapsedTime,
|
||||
originalRequest,
|
||||
code: lastRedirect.code,
|
||||
reason: lastRedirect.reason,
|
||||
header: headers,
|
||||
cookie: cookies as CookieOptions[],
|
||||
body: '',
|
||||
stream: undefined,
|
||||
responseTime: result.patch.elapsedTime,
|
||||
originalRequest,
|
||||
});
|
||||
}
|
||||
const nodejsReadCurlResponse = process.type === 'renderer' ? window.bridge.readCurlResponse : readCurlResponse;
|
||||
const bodyResult = await nodejsReadCurlResponse({
|
||||
bodyPath: result.responseBodyPath,
|
||||
bodyCompression: result.patch.bodyCompression,
|
||||
});
|
||||
if (bodyResult.error) {
|
||||
throw Error(bodyResult.error);
|
||||
}
|
||||
return new Response({
|
||||
code: lastRedirect.code,
|
||||
reason: lastRedirect.reason,
|
||||
header: headers,
|
||||
cookie: cookies as CookieOptions[],
|
||||
body: bodyResult.body,
|
||||
// stream is always undefined
|
||||
// because it is inaccurate to differentiate if body is binary
|
||||
stream: undefined,
|
||||
responseTime: result.patch.elapsedTime,
|
||||
originalRequest,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,71 +1,63 @@
|
||||
export async function test(
|
||||
msg: string,
|
||||
fn: () => Promise<void>,
|
||||
log: (testResult: RequestTestResult) => void,
|
||||
) {
|
||||
const wrapFn = async () => {
|
||||
const started = performance.now();
|
||||
export async function test(msg: string, fn: () => Promise<void>, log: (testResult: RequestTestResult) => void) {
|
||||
const wrapFn = async () => {
|
||||
const started = performance.now();
|
||||
|
||||
try {
|
||||
await fn();
|
||||
try {
|
||||
await fn();
|
||||
|
||||
const executionTime = performance.now() - started;
|
||||
log({
|
||||
testCase: msg,
|
||||
status: 'passed',
|
||||
executionTime,
|
||||
category: 'unknown',
|
||||
});
|
||||
} catch (e) {
|
||||
const executionTime = performance.now() - started;
|
||||
log({
|
||||
testCase: msg,
|
||||
status: 'failed',
|
||||
executionTime,
|
||||
errorMessage: `error: ${e} | ACTUAL: ${e.actual} | EXPECTED: ${e.expected}`,
|
||||
category: 'unknown',
|
||||
});
|
||||
}
|
||||
};
|
||||
const executionTime = performance.now() - started;
|
||||
log({
|
||||
testCase: msg,
|
||||
status: 'passed',
|
||||
executionTime,
|
||||
category: 'unknown',
|
||||
});
|
||||
} catch (e) {
|
||||
const executionTime = performance.now() - started;
|
||||
log({
|
||||
testCase: msg,
|
||||
status: 'failed',
|
||||
executionTime,
|
||||
errorMessage: `error: ${e} | ACTUAL: ${e.actual} | EXPECTED: ${e.expected}`,
|
||||
category: 'unknown',
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const testPromise = wrapFn();
|
||||
startTestObserver(testPromise);
|
||||
return testPromise;
|
||||
const testPromise = wrapFn();
|
||||
startTestObserver(testPromise);
|
||||
return testPromise;
|
||||
}
|
||||
|
||||
let testPromises = new Array<Promise<void>>();
|
||||
export async function waitForAllTestsDone() {
|
||||
await Promise.allSettled(testPromises);
|
||||
testPromises = [];
|
||||
await Promise.allSettled(testPromises);
|
||||
testPromises = [];
|
||||
}
|
||||
function startTestObserver(promise: Promise<void>) {
|
||||
testPromises.push(promise);
|
||||
testPromises.push(promise);
|
||||
}
|
||||
|
||||
export async function skip(
|
||||
msg: string,
|
||||
_: () => Promise<void>,
|
||||
log: (testResult: RequestTestResult) => void,
|
||||
) {
|
||||
log({
|
||||
testCase: msg,
|
||||
status: 'skipped',
|
||||
executionTime: 0,
|
||||
category: 'unknown',
|
||||
});
|
||||
export async function skip(msg: string, _: () => Promise<void>, log: (testResult: RequestTestResult) => void) {
|
||||
log({
|
||||
testCase: msg,
|
||||
status: 'skipped',
|
||||
executionTime: 0,
|
||||
category: 'unknown',
|
||||
});
|
||||
}
|
||||
|
||||
export type TestStatus = 'passed' | 'failed' | 'skipped';
|
||||
export type TestCategory = 'unknown' | 'pre-request' | 'after-response';
|
||||
export interface RequestTestResult {
|
||||
testCase: string;
|
||||
status: TestStatus;
|
||||
executionTime: number; // milliseconds
|
||||
errorMessage?: string;
|
||||
category: TestCategory;
|
||||
testCase: string;
|
||||
status: TestStatus;
|
||||
executionTime: number; // milliseconds
|
||||
errorMessage?: string;
|
||||
category: TestCategory;
|
||||
}
|
||||
|
||||
export interface TestHandler {
|
||||
(msg: string, fn: () => Promise<void>): Promise<void>;
|
||||
skip?: (msg: string, fn: () => Promise<void>) => void;
|
||||
};
|
||||
(msg: string, fn: () => Promise<void>): Promise<void>;
|
||||
skip?: (msg: string, fn: () => Promise<void>) => void;
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,3 @@
|
||||
export function checkIfUrlIncludesTag(url: string): boolean {
|
||||
return /{%/.test(`${url}`) ||
|
||||
/%}/.test(`${url}`) ||
|
||||
/{{/.test(`${url}`) ||
|
||||
/}}/.test(`${url}`);
|
||||
return /{%/.test(`${url}`) || /%}/.test(`${url}`) || /{{/.test(`${url}`) || /}}/.test(`${url}`);
|
||||
}
|
||||
|
||||
@@ -2,68 +2,64 @@ import { unsupportedError } from './properties';
|
||||
import { Property, PropertyList } from './properties';
|
||||
|
||||
export interface VariableDefinition {
|
||||
id?: string;
|
||||
key: string;
|
||||
name?: string;
|
||||
value: string;
|
||||
type?: string;
|
||||
disabled?: boolean;
|
||||
id?: string;
|
||||
key: string;
|
||||
name?: string;
|
||||
value: string;
|
||||
type?: string;
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
export class Variable extends Property {
|
||||
key: string;
|
||||
value: any;
|
||||
type: string;
|
||||
override _kind = 'Variable';
|
||||
key: string;
|
||||
value: any;
|
||||
type: string;
|
||||
override _kind = 'Variable';
|
||||
|
||||
constructor(def?: VariableDefinition) {
|
||||
super();
|
||||
constructor(def?: VariableDefinition) {
|
||||
super();
|
||||
|
||||
this.id = def ? def.id || '' : '';
|
||||
this.key = def ? def.key : '';
|
||||
this.name = def ? def.name : '';
|
||||
this.value = def ? def.value : '';
|
||||
this.type = def && def.type ? def.type : 'Variable';
|
||||
this.disabled = def ? def.disabled : false;
|
||||
this.id = def ? def.id || '' : '';
|
||||
this.key = def ? def.key : '';
|
||||
this.name = def ? def.name : '';
|
||||
this.value = def ? def.value : '';
|
||||
this.type = def && def.type ? def.type : 'Variable';
|
||||
this.disabled = def ? def.disabled : false;
|
||||
}
|
||||
|
||||
static override _index = 'key';
|
||||
|
||||
// unknown usage and unsupported
|
||||
static types() {
|
||||
throw unsupportedError('types');
|
||||
}
|
||||
|
||||
// cast typecasts a value to the Variable.types of this Variable.
|
||||
cast(value: any) {
|
||||
if ('_kind' in value && value._kind === 'Variable') {
|
||||
return value.value;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
static override _index = 'key';
|
||||
get() {
|
||||
return this.value;
|
||||
}
|
||||
|
||||
// unknown usage and unsupported
|
||||
static types() {
|
||||
throw unsupportedError('types');
|
||||
}
|
||||
|
||||
// cast typecasts a value to the Variable.types of this Variable.
|
||||
cast(value: any) {
|
||||
if ('_kind' in value && value._kind === 'Variable') {
|
||||
return value.value;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
get() {
|
||||
return this.value;
|
||||
}
|
||||
|
||||
set(value: any) {
|
||||
this.value = value;
|
||||
}
|
||||
set(value: any) {
|
||||
this.value = value;
|
||||
}
|
||||
}
|
||||
|
||||
export class VariableList<T extends Variable> extends PropertyList<T> {
|
||||
override _kind = 'VariableList';
|
||||
override _kind = 'VariableList';
|
||||
|
||||
constructor(parent: PropertyList<T> | undefined, populate: T[]) {
|
||||
super(
|
||||
Variable,
|
||||
undefined,
|
||||
populate
|
||||
);
|
||||
this.parent = parent;
|
||||
}
|
||||
constructor(parent: PropertyList<T> | undefined, populate: T[]) {
|
||||
super(Variable, undefined, populate);
|
||||
this.parent = parent;
|
||||
}
|
||||
|
||||
static isVariableList(obj: any) {
|
||||
return '_kind' in obj && obj._kind === 'VariableList';
|
||||
}
|
||||
static isVariableList(obj: any) {
|
||||
return '_kind' in obj && obj._kind === 'VariableList';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,17 +19,8 @@
|
||||
"verbatimModuleSyntax": true,
|
||||
"jsx": "react",
|
||||
/* If your code runs in the DOM: */
|
||||
"lib": [
|
||||
"es2023",
|
||||
"dom",
|
||||
"dom.iterable"
|
||||
],
|
||||
"lib": ["es2023", "dom", "dom.iterable"]
|
||||
},
|
||||
"include": [
|
||||
"../insomnia/types"
|
||||
],
|
||||
"exclude": [
|
||||
"**/__tests__",
|
||||
"node_modules"
|
||||
]
|
||||
"include": ["../insomnia/types"],
|
||||
"exclude": ["**/__tests__", "node_modules"]
|
||||
}
|
||||
|
||||
@@ -38,19 +38,24 @@ const pathLookup: Record<string, string | Record<string, string>> = {
|
||||
};
|
||||
|
||||
let binaryPath: string;
|
||||
const platformPath = pathLookup[process.platform]
|
||||
const platformPath = pathLookup[process.platform];
|
||||
if (typeof platformPath === 'string') {
|
||||
binaryPath = platformPath
|
||||
binaryPath = platformPath;
|
||||
} else if (process.arch in platformPath) {
|
||||
binaryPath = platformPath[process.arch]
|
||||
binaryPath = platformPath[process.arch];
|
||||
} else {
|
||||
throw new Error(`Cannot find binary path for ${process.platform} ${process.arch}`)
|
||||
throw new Error(`Cannot find binary path for ${process.platform} ${process.arch}`);
|
||||
}
|
||||
|
||||
export const cwd = path.resolve(__dirname, '..', '..', 'insomnia');
|
||||
const repoRoot = path.resolve(__dirname, '..', '..', '..');
|
||||
const insomniaBinary = path.join(cwd, 'dist', binaryPath);
|
||||
const electronBinary = path.join(repoRoot, 'node_modules', '.bin', process.platform === 'win32' ? 'electron.cmd' : 'electron');
|
||||
const electronBinary = path.join(
|
||||
repoRoot,
|
||||
'node_modules',
|
||||
'.bin',
|
||||
process.platform === 'win32' ? 'electron.cmd' : 'electron',
|
||||
);
|
||||
|
||||
export const executablePath = bundleType() === 'package' ? insomniaBinary : electronBinary;
|
||||
|
||||
|
||||
@@ -2,13 +2,7 @@
|
||||
import { ElectronApplication, test as baseTest, TraceMode } from '@playwright/test';
|
||||
import path from 'path';
|
||||
|
||||
import {
|
||||
bundleType,
|
||||
cwd,
|
||||
executablePath,
|
||||
mainPath,
|
||||
randomDataPath,
|
||||
} from './paths';
|
||||
import { bundleType, cwd, executablePath, mainPath, randomDataPath } from './paths';
|
||||
|
||||
// Throw an error if the condition fails
|
||||
// > Not providing an inline default argument for message as the result is smaller
|
||||
@@ -97,7 +91,7 @@ export const test = baseTest.extend<{
|
||||
INSOMNIA_VAULT_KEY: userConfig.vaultKey || '',
|
||||
INSOMNIA_VAULT_SALT: userConfig.vaultSalt || '',
|
||||
INSOMNIA_VAULT_SRP_SECRET: userConfig.vaultSrpSecret || '',
|
||||
...userConfig.session ? { INSOMNIA_SESSION: JSON.stringify(userConfig.session) } : {},
|
||||
...(userConfig.session ? { INSOMNIA_SESSION: JSON.stringify(userConfig.session) } : {}),
|
||||
};
|
||||
|
||||
const electronApp = await playwright._electron.launch({
|
||||
@@ -113,11 +107,15 @@ export const test = baseTest.extend<{
|
||||
|
||||
const appContext = electronApp.context();
|
||||
|
||||
const traceMode: TraceMode = typeof trace === 'string' ? trace as TraceMode : trace.mode;
|
||||
const traceMode: TraceMode = typeof trace === 'string' ? (trace as TraceMode) : trace.mode;
|
||||
|
||||
const defaultTraceOptions = { screenshots: true, snapshots: true, sources: true };
|
||||
const traceOptions = typeof trace === 'string' ? defaultTraceOptions : { ...defaultTraceOptions, ...trace, mode: undefined };
|
||||
const captureTrace = (traceMode === 'on' || traceMode === 'retain-on-failure' || (traceMode === 'on-first-retry' && testInfo.retry === 1));
|
||||
const traceOptions =
|
||||
typeof trace === 'string' ? defaultTraceOptions : { ...defaultTraceOptions, ...trace, mode: undefined };
|
||||
const captureTrace =
|
||||
traceMode === 'on' ||
|
||||
traceMode === 'retain-on-failure' ||
|
||||
(traceMode === 'on-first-retry' && testInfo.retry === 1);
|
||||
|
||||
if (captureTrace) {
|
||||
await appContext.tracing.start(traceOptions);
|
||||
@@ -132,7 +130,11 @@ export const test = baseTest.extend<{
|
||||
} finally {
|
||||
// set testFailed to true if the test timed out or failed
|
||||
testFailed = testFailed || testInfo.status === 'timedOut' || testInfo.status === 'failed';
|
||||
if (traceMode === 'on' || (traceMode === 'retain-on-failure' && testFailed) || (traceMode === 'on-first-retry' && testInfo.retry === 1)) {
|
||||
if (
|
||||
traceMode === 'on' ||
|
||||
(traceMode === 'retain-on-failure' && testFailed) ||
|
||||
(traceMode === 'on-first-retry' && testInfo.retry === 1)
|
||||
) {
|
||||
// Use a different name rather than the default trace.zip to avoid overwriting the trace.
|
||||
// Refer: https://github.com/microsoft/playwright/issues/35005
|
||||
await appContext.tracing.stop({
|
||||
@@ -153,46 +155,46 @@ export const test = baseTest.extend<{
|
||||
|
||||
await use(page);
|
||||
},
|
||||
dataPath: async ({ }, use) => {
|
||||
dataPath: async ({}, use) => {
|
||||
const insomniaDataPath = randomDataPath();
|
||||
|
||||
await use(insomniaDataPath);
|
||||
},
|
||||
userConfig: async ({ }, use) => {
|
||||
userConfig: async ({}, use) => {
|
||||
await use({
|
||||
skipOnboarding: true,
|
||||
publicKey: 'txb/w8DASTpPQqeHE/hpI3ABKzit+pv5n2We5dbtYRo=',
|
||||
secretKey: 'Tb1QKsI3wVZxhS8TuQESHB2x7f68PzeTzTMmLpnnFVU=',
|
||||
code: 'BTxpIfgXY1VgUpoPpqA25RkCPGQ2MAkZsaY6IZ0bamd0WsYQlJM6iy8PV9hEHS1Gk96SBC6%2BM%2FGhv8IaVl1N6V5wdghHwU2sGKGkW%2Fevx1HiqAUsAqIry8aWRqAkc0n3KmW%2B%2F8lyeHCpy5jhsXqMMqXMbZh8dN1q%2ByRe2C6MJS1A706KbPUhI7PRi%2FsmK0TcNT7lgBKKHRVzPTvjpLcjgzSJFL4K%2BEzgY9Ue4gh0gPw89sM9dV%2F2sAlpw0LA7rF06NyoPhA%3D',
|
||||
session: {
|
||||
'id': 'sess_64a477e6b59d43a5a607f84b4f73e3ce',
|
||||
id: 'sess_64a477e6b59d43a5a607f84b4f73e3ce',
|
||||
// Expire in 2077
|
||||
'sessionExpiry': new Date(2147483647000),
|
||||
'publicKey': {
|
||||
'alg': 'RSA-OAEP-256',
|
||||
'e': 'AQAB',
|
||||
'ext': true,
|
||||
'key_ops': ['encrypt'],
|
||||
'kty': 'RSA',
|
||||
'n': 'pTQVaUaiqggIldSKm6ib6eFRLLoGj9W-2O4gTbiorR-2b8-ZmKUwQ0F-jgYX71AjYaFn5VjOHOHSP6byNAjN7WzJ6A_Z3tytNraLoZfwK8KdfflOCZiZzQeD3nO8BNgh_zEgCHStU61b6N6bSpCKjbyPkmZcOkJfsz0LJMAxrXvFB-I42WYA2vJKReTJKXeYx4d6L_XGNIoYtmGZit8FldT4AucfQUXgdlKvr4_OZmt6hgjwt_Pjcu-_jO7m589mMWMebfUhjte3Lp1jps0MqTOvgRb0FQf5eoBHnL01OZjvFPDKeqlvoz7II9wFNHIKzSvgAKnyemh6DiyPuIukyQ',
|
||||
sessionExpiry: new Date(2147483647000),
|
||||
publicKey: {
|
||||
alg: 'RSA-OAEP-256',
|
||||
e: 'AQAB',
|
||||
ext: true,
|
||||
key_ops: ['encrypt'],
|
||||
kty: 'RSA',
|
||||
n: 'pTQVaUaiqggIldSKm6ib6eFRLLoGj9W-2O4gTbiorR-2b8-ZmKUwQ0F-jgYX71AjYaFn5VjOHOHSP6byNAjN7WzJ6A_Z3tytNraLoZfwK8KdfflOCZiZzQeD3nO8BNgh_zEgCHStU61b6N6bSpCKjbyPkmZcOkJfsz0LJMAxrXvFB-I42WYA2vJKReTJKXeYx4d6L_XGNIoYtmGZit8FldT4AucfQUXgdlKvr4_OZmt6hgjwt_Pjcu-_jO7m589mMWMebfUhjte3Lp1jps0MqTOvgRb0FQf5eoBHnL01OZjvFPDKeqlvoz7II9wFNHIKzSvgAKnyemh6DiyPuIukyQ',
|
||||
},
|
||||
'encPrivateKey': {
|
||||
'iv': '3a1f2bdb8acbf15f469d57a2',
|
||||
't': '904d6b1bc0ece8e5df6fefb9efefda7c',
|
||||
'd': '2a7b0c4beb773fa3e3c2158f0bfa654a88c4041184c3b1e01b4ddd2da2c647244a0d66d258b6abb6a9385251bf5d79e6b03ef35bdfafcb400547f8f88adb8bceb7020f2d873d5a74fb5fc561e7bd67cea0a37c49107bf5c96631374dc44ddb1e4a8b5688dc6560fc6143294ed92c3ad8e1696395dfdf15975aa67b9212366dbfcb31191e4f4fe3559c89a92fb1f0f1cc6cbf90d8a062307fce6e7701f6f5169d9247c56dae79b55fba1e10fde562b971ca708c9a4d87e6e9d9e890b88fa0480360420e610c4e41459570e52ae72f349eadf84fc0a68153722de3280becf8a1762e7faebe964f0ad706991c521feda3440d3e1b22f2c221a80490359879bd47c0d059ace81213c74a1e192dbebd8a80cf58c9eb1fe461a971b88d3899baf4c4ef7141623c93fb4a54758f5e1cf9ee35cd00777fa89b24e4ded57219e770de2670619c6e971935c61ae72e3276cf8db49dfa0e91c68222f02d7e0c69b399af505de7e5a90852d83e0a30934b0362db986f3aaefaaf1a96fef3e8165287a3a7f0ee1e072d9dee3aefb86194e1d877d6b34529d45a70ec4573c35a7fe27833c77c3154b0ad02187e4fcecd408bcf4b29a85a5dc358cb479140f4983fcd936141f581764669651530af97d2b7d9416aea7de67e787f3e29ae3eba6672bcd934dc1e308783aa63a4ab46d48d213cf53ad6bd8828011f5bfa3aa5ee24551c694e829b54c93b1dda6c3ddda04756d68a28bec8d044c8af4147680dc5b972d0ca74299b0ab6306b9e7b99bf0557558df120455a272145b7aa792654730f3d670b76d72408f5ce1cf5fbd453d2903fa72cf26397437854ba8abbb731a8107f6a86a01fa98edc81bb42a4c1330f779e7a0fbd1820eaed78e03e40a996e03884b707556be06fd14ee8f4035469210d1d2bb8f58285fc2ab6de3d3cc0e4e1f40c6d9d24b50dc8e2e2374a0aff52031b3736c2982133bb19dd551ce1f953f4ba02b0cf53382c15752e202c138cb42b2322df103ff17fd886dfd5f992b711673cdf16048c4bff19038138b161c2e1783b85fc7b965a91ac4795fcbfebf827940cacdeae57946863aee027df43b36612f3cb8f34dc44396e87c564bf10f5b1a9dfbd6da3d7f4f65024b0b4f8ce51d01c230840941fc4523b17eb1c2522032f410e8328239a11a15ab755c32945ce52966d5bfb4666909ed2ca04d536e4bf92091563dd44d46cbb35e53c2481400058ab3b52a0280d262551073f61db125ee280e2cc1ec0bdf9c4817824261465011e34c2296411384f7f5e16742157c5520f137631edf498aa39c7c32b107e3634cbeb70feea19a233c8bd939d665135c9f7c1bb33cb47edc58bdbbcde9b0b9eb73a46642e4639289a62638fb7813e1eeaadd105c803de8357236f33c4bcf31a876b5867591af8f165eba0b35cf0b0886af17dab35a6a39f8f576387d6ffb9e677ee46fc0f11ff069a2a068fce441ff8f4125095fad228c2bf45c788d641941ed13c0a16fffcafd7c7eff11bb7550c0b7d54eebdbd2066e3bbdb47aaee2b5f1e499726324a40015458c7de1db0abe872594d8e6802deff7ea9518bdb3a3e46f07139267fd67dc570ba8ab04c2b37ce6a34ec73b802c7052a2eef0cae1b0979322ef86395535db80cf2a9a88aa7c2e5cc28a93612a8dafe1982f741d7cec28a866f6c09dba5b99ead24c3df0ca03c6c5afae41f3d39608a8f49b0d6a0b541a159409791c25ede103eb4f79cfbd0cc9c9aa6b591755c1e9fd07b5b9e38ed85b5939e65d127256f6a4c078f8c9d655c4f072f9cbcfb2e1e17eaa83dc62aaab2a6dc3735ee76ce7a215740f795f1fbe7136c7734ae3714438015e8fc383d63775a8abddb23cbc5f906c046bb0b5b31d492a7c151b40ea82c7c966e25820641c55b343b89d6378f90de5983fa76547e9d6c634effdf019a0fd9b6d3e488a5aa94f0710d517ba4f7c1ed82f9f3072612e953e036c0ec7f3c618368362f6da6f3af76056a66aef914805cc8b628f1c11695f760b535ded9ff66727273ae7e12d67a01243d75f22fec8ed1b043122a211c923aa92ecbbe01dd0d7195c3c0e09a2a6ab3eca354963122d5a0ec16e2b2b81b0ddce6ec0a312c492a96a4fd392f1deb6a1f3318541a3f87e5c9e73ee7edd3b855910f412789e25038108e1eaae04dcfb02b4d958c00c630dc8caa87a40798ce7156d2ade882e68832d39fe8f9bce6a995249a7383013a5093c4af55c3b7232de0f2593d82c30b8dabd0784455037f25f6bb66a6d0d8f72bc7be0dee2d0a8af44bb4e143257d873268d331722c3253ea5c004e72daf04c875e2054f2b4b2bca2979fd046a1e835600045edf2f159d851a540a91a1ab8fbcb64594d21942bbaa2160535d32496ba7ce4a76c6bdeb9bb4c5cab7bed1ae26564058d0be125803d7019b83b3953c4b0cc1f8299c4edcf6a5faa4765092412d368b277689900e71fb5d47581057adaa2dd494e0f66dc1aa16f3741973b0d9ffa1728aeafab84b777394a7afae0f8eabaa6b740f1c60ca26469f0c9356ec880ad6f4dc01b99bd14d7a4bb8afc97662a9e68b0155e4cdf3caa3402819ac6ce562c8fe06edb50a31cfd7a',
|
||||
'ad': '',
|
||||
encPrivateKey: {
|
||||
iv: '3a1f2bdb8acbf15f469d57a2',
|
||||
t: '904d6b1bc0ece8e5df6fefb9efefda7c',
|
||||
d: '2a7b0c4beb773fa3e3c2158f0bfa654a88c4041184c3b1e01b4ddd2da2c647244a0d66d258b6abb6a9385251bf5d79e6b03ef35bdfafcb400547f8f88adb8bceb7020f2d873d5a74fb5fc561e7bd67cea0a37c49107bf5c96631374dc44ddb1e4a8b5688dc6560fc6143294ed92c3ad8e1696395dfdf15975aa67b9212366dbfcb31191e4f4fe3559c89a92fb1f0f1cc6cbf90d8a062307fce6e7701f6f5169d9247c56dae79b55fba1e10fde562b971ca708c9a4d87e6e9d9e890b88fa0480360420e610c4e41459570e52ae72f349eadf84fc0a68153722de3280becf8a1762e7faebe964f0ad706991c521feda3440d3e1b22f2c221a80490359879bd47c0d059ace81213c74a1e192dbebd8a80cf58c9eb1fe461a971b88d3899baf4c4ef7141623c93fb4a54758f5e1cf9ee35cd00777fa89b24e4ded57219e770de2670619c6e971935c61ae72e3276cf8db49dfa0e91c68222f02d7e0c69b399af505de7e5a90852d83e0a30934b0362db986f3aaefaaf1a96fef3e8165287a3a7f0ee1e072d9dee3aefb86194e1d877d6b34529d45a70ec4573c35a7fe27833c77c3154b0ad02187e4fcecd408bcf4b29a85a5dc358cb479140f4983fcd936141f581764669651530af97d2b7d9416aea7de67e787f3e29ae3eba6672bcd934dc1e308783aa63a4ab46d48d213cf53ad6bd8828011f5bfa3aa5ee24551c694e829b54c93b1dda6c3ddda04756d68a28bec8d044c8af4147680dc5b972d0ca74299b0ab6306b9e7b99bf0557558df120455a272145b7aa792654730f3d670b76d72408f5ce1cf5fbd453d2903fa72cf26397437854ba8abbb731a8107f6a86a01fa98edc81bb42a4c1330f779e7a0fbd1820eaed78e03e40a996e03884b707556be06fd14ee8f4035469210d1d2bb8f58285fc2ab6de3d3cc0e4e1f40c6d9d24b50dc8e2e2374a0aff52031b3736c2982133bb19dd551ce1f953f4ba02b0cf53382c15752e202c138cb42b2322df103ff17fd886dfd5f992b711673cdf16048c4bff19038138b161c2e1783b85fc7b965a91ac4795fcbfebf827940cacdeae57946863aee027df43b36612f3cb8f34dc44396e87c564bf10f5b1a9dfbd6da3d7f4f65024b0b4f8ce51d01c230840941fc4523b17eb1c2522032f410e8328239a11a15ab755c32945ce52966d5bfb4666909ed2ca04d536e4bf92091563dd44d46cbb35e53c2481400058ab3b52a0280d262551073f61db125ee280e2cc1ec0bdf9c4817824261465011e34c2296411384f7f5e16742157c5520f137631edf498aa39c7c32b107e3634cbeb70feea19a233c8bd939d665135c9f7c1bb33cb47edc58bdbbcde9b0b9eb73a46642e4639289a62638fb7813e1eeaadd105c803de8357236f33c4bcf31a876b5867591af8f165eba0b35cf0b0886af17dab35a6a39f8f576387d6ffb9e677ee46fc0f11ff069a2a068fce441ff8f4125095fad228c2bf45c788d641941ed13c0a16fffcafd7c7eff11bb7550c0b7d54eebdbd2066e3bbdb47aaee2b5f1e499726324a40015458c7de1db0abe872594d8e6802deff7ea9518bdb3a3e46f07139267fd67dc570ba8ab04c2b37ce6a34ec73b802c7052a2eef0cae1b0979322ef86395535db80cf2a9a88aa7c2e5cc28a93612a8dafe1982f741d7cec28a866f6c09dba5b99ead24c3df0ca03c6c5afae41f3d39608a8f49b0d6a0b541a159409791c25ede103eb4f79cfbd0cc9c9aa6b591755c1e9fd07b5b9e38ed85b5939e65d127256f6a4c078f8c9d655c4f072f9cbcfb2e1e17eaa83dc62aaab2a6dc3735ee76ce7a215740f795f1fbe7136c7734ae3714438015e8fc383d63775a8abddb23cbc5f906c046bb0b5b31d492a7c151b40ea82c7c966e25820641c55b343b89d6378f90de5983fa76547e9d6c634effdf019a0fd9b6d3e488a5aa94f0710d517ba4f7c1ed82f9f3072612e953e036c0ec7f3c618368362f6da6f3af76056a66aef914805cc8b628f1c11695f760b535ded9ff66727273ae7e12d67a01243d75f22fec8ed1b043122a211c923aa92ecbbe01dd0d7195c3c0e09a2a6ab3eca354963122d5a0ec16e2b2b81b0ddce6ec0a312c492a96a4fd392f1deb6a1f3318541a3f87e5c9e73ee7edd3b855910f412789e25038108e1eaae04dcfb02b4d958c00c630dc8caa87a40798ce7156d2ade882e68832d39fe8f9bce6a995249a7383013a5093c4af55c3b7232de0f2593d82c30b8dabd0784455037f25f6bb66a6d0d8f72bc7be0dee2d0a8af44bb4e143257d873268d331722c3253ea5c004e72daf04c875e2054f2b4b2bca2979fd046a1e835600045edf2f159d851a540a91a1ab8fbcb64594d21942bbaa2160535d32496ba7ce4a76c6bdeb9bb4c5cab7bed1ae26564058d0be125803d7019b83b3953c4b0cc1f8299c4edcf6a5faa4765092412d368b277689900e71fb5d47581057adaa2dd494e0f66dc1aa16f3741973b0d9ffa1728aeafab84b777394a7afae0f8eabaa6b740f1c60ca26469f0c9356ec880ad6f4dc01b99bd14d7a4bb8afc97662a9e68b0155e4cdf3caa3402819ac6ce562c8fe06edb50a31cfd7a',
|
||||
ad: '',
|
||||
},
|
||||
'symmetricKey': {
|
||||
'alg': 'A256GCM',
|
||||
'ext': true,
|
||||
'k': 'w62OJNWF4G8iWA8ZrTpModiY8dICyHI7ko1vMLb877g=',
|
||||
'key_ops': ['encrypt', 'decrypt'],
|
||||
'kty': 'oct',
|
||||
symmetricKey: {
|
||||
alg: 'A256GCM',
|
||||
ext: true,
|
||||
k: 'w62OJNWF4G8iWA8ZrTpModiY8dICyHI7ko1vMLb877g=',
|
||||
key_ops: ['encrypt', 'decrypt'],
|
||||
kty: 'oct',
|
||||
},
|
||||
'email': 'insomnia-user@konghq.com',
|
||||
'accountId': 'acct_64a477e6b59d43a5a607f84b4f73e3ce',
|
||||
'firstName': 'Rick',
|
||||
'lastName': 'Morty',
|
||||
email: 'insomnia-user@konghq.com',
|
||||
accountId: 'acct_64a477e6b59d43a5a607f84b4f73e3ce',
|
||||
firstName: 'Rick',
|
||||
lastName: 'Morty',
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
@@ -15,8 +15,5 @@ export const basicAuthRouter = express.Router();
|
||||
basicAuthRouter.use(basicAuth({ users }));
|
||||
|
||||
basicAuthRouter.get('/', (_, res) => {
|
||||
res
|
||||
.status(200)
|
||||
.header('content-type', 'text/plain')
|
||||
.send('basic auth received');
|
||||
res.status(200).header('content-type', 'text/plain').send('basic auth received');
|
||||
});
|
||||
|
||||
@@ -36,13 +36,13 @@ export default (app: Application) => {
|
||||
|
||||
app.post('/v1/oauth/github', (_req, res) => {
|
||||
res.status(200).send({
|
||||
'access_token': '123456789',
|
||||
access_token: '123456789',
|
||||
});
|
||||
});
|
||||
|
||||
app.post('/v1/oauth/github-app', (_req, res) => {
|
||||
res.status(200).send({
|
||||
'access_token': '123456789',
|
||||
access_token: '123456789',
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
@@ -3,11 +3,11 @@ import type { Application } from 'express';
|
||||
export default (app: Application) => {
|
||||
app.post('/gitlab-api/api/graphql', (_req, res) => {
|
||||
res.status(200).send({
|
||||
'data': {
|
||||
'currentUser': {
|
||||
'publicEmail': null,
|
||||
'name': 'Mark Kim',
|
||||
'avatarUrl': null,
|
||||
data: {
|
||||
currentUser: {
|
||||
publicEmail: null,
|
||||
name: 'Mark Kim',
|
||||
avatarUrl: null,
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -22,18 +22,18 @@ export default (app: Application) => {
|
||||
|
||||
app.post('/gitlab-api/oauth/token', (_req, res) => {
|
||||
res.status(200).send({
|
||||
'access_token': '123456789',
|
||||
'created_at': 1652246628,
|
||||
'expires_in': 6955,
|
||||
'refresh_token': '1234567891',
|
||||
access_token: '123456789',
|
||||
created_at: 1652246628,
|
||||
expires_in: 6955,
|
||||
refresh_token: '1234567891',
|
||||
scope: 'api read_user write_repository read_repository email',
|
||||
'token_type': 'Bearer',
|
||||
token_type: 'Bearer',
|
||||
});
|
||||
});
|
||||
|
||||
app.post('/gitlab-api/oauth/authorize', (_req, res) => {
|
||||
res.status(200).send({
|
||||
'access_token': '123456789',
|
||||
access_token: '123456789',
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
import { GraphQLEnumType, GraphQLInputObjectType, GraphQLInt, GraphQLObjectType, GraphQLSchema, GraphQLString } from 'graphql';
|
||||
import {
|
||||
GraphQLEnumType,
|
||||
GraphQLInputObjectType,
|
||||
GraphQLInt,
|
||||
GraphQLObjectType,
|
||||
GraphQLSchema,
|
||||
GraphQLString,
|
||||
} from 'graphql';
|
||||
|
||||
const TypeVars = new GraphQLObjectType({
|
||||
name: 'Vars',
|
||||
@@ -47,14 +54,14 @@ export const schema = new GraphQLSchema({
|
||||
echoNum: {
|
||||
type: GraphQLInt,
|
||||
args: {
|
||||
'intVar': { type: GraphQLInt },
|
||||
intVar: { type: GraphQLInt },
|
||||
},
|
||||
resolve: () => 777,
|
||||
},
|
||||
echoVars: {
|
||||
type: TypeVars,
|
||||
args: {
|
||||
'vars': { type: InputVars },
|
||||
vars: { type: InputVars },
|
||||
},
|
||||
resolve: vars => vars,
|
||||
},
|
||||
|
||||
@@ -9,15 +9,13 @@ import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
const PROTO_PATH = path.resolve('../../packages/insomnia/src/network/grpc/__fixtures__/library/route_guide.proto');
|
||||
const packageDefinition = protoLoader.loadSync(
|
||||
PROTO_PATH,
|
||||
{
|
||||
keepCase: true,
|
||||
longs: String,
|
||||
enums: String,
|
||||
defaults: true,
|
||||
oneofs: true,
|
||||
});
|
||||
const packageDefinition = protoLoader.loadSync(PROTO_PATH, {
|
||||
keepCase: true,
|
||||
longs: String,
|
||||
enums: String,
|
||||
defaults: true,
|
||||
oneofs: true,
|
||||
});
|
||||
|
||||
const routeguide = grpc.loadPackageDefinition(packageDefinition).routeguide;
|
||||
|
||||
@@ -38,8 +36,7 @@ function checkFeature(point: { latitude: any; longitude: any }) {
|
||||
// Check if there is already a feature object for the given point
|
||||
for (let i = 0; i < featureList.length; i++) {
|
||||
feature = featureList[i];
|
||||
if (feature.location.latitude === point.latitude &&
|
||||
feature.location.longitude === point.longitude) {
|
||||
if (feature.location.latitude === point.latitude && feature.location.longitude === point.longitude) {
|
||||
return feature;
|
||||
}
|
||||
}
|
||||
@@ -71,14 +68,16 @@ const listFeatures: HandleCall<any, any> = (call: any) => {
|
||||
const top = Math.max(lo.latitude, hi.latitude);
|
||||
const bottom = Math.min(lo.latitude, hi.latitude);
|
||||
// For each feature, check if it is in the given bounding box
|
||||
featureList.forEach(function(feature) {
|
||||
featureList.forEach(function (feature) {
|
||||
if (feature.name === '') {
|
||||
return;
|
||||
}
|
||||
if (feature.location.longitude >= left &&
|
||||
feature.location.longitude <= right &&
|
||||
feature.location.latitude >= bottom &&
|
||||
feature.location.latitude <= top) {
|
||||
if (
|
||||
feature.location.longitude >= left &&
|
||||
feature.location.longitude <= right &&
|
||||
feature.location.latitude >= bottom &&
|
||||
feature.location.latitude <= top
|
||||
) {
|
||||
call.write(feature);
|
||||
}
|
||||
});
|
||||
@@ -94,9 +93,9 @@ const listFeatures: HandleCall<any, any> = (call: any) => {
|
||||
*/
|
||||
function getDistance(start: { latitude: number; longitude: number }, end: { latitude: number; longitude: number }) {
|
||||
function toRadians(num: number) {
|
||||
return num * Math.PI / 180;
|
||||
return (num * Math.PI) / 180;
|
||||
}
|
||||
const R = 6371000; // earth radius in metres
|
||||
const R = 6371000; // earth radius in metres
|
||||
const lat1 = toRadians(start.latitude / COORD_FACTOR);
|
||||
const lat2 = toRadians(end.latitude / COORD_FACTOR);
|
||||
const lon1 = toRadians(start.longitude / COORD_FACTOR);
|
||||
@@ -104,9 +103,9 @@ function getDistance(start: { latitude: number; longitude: number }, end: { lati
|
||||
|
||||
const deltalat = lat2 - lat1;
|
||||
const deltalon = lon2 - lon1;
|
||||
const a = Math.sin(deltalat / 2) * Math.sin(deltalat / 2) +
|
||||
Math.cos(lat1) * Math.cos(lat2) *
|
||||
Math.sin(deltalon / 2) * Math.sin(deltalon / 2);
|
||||
const a =
|
||||
Math.sin(deltalat / 2) * Math.sin(deltalat / 2) +
|
||||
Math.cos(lat1) * Math.cos(lat2) * Math.sin(deltalon / 2) * Math.sin(deltalon / 2);
|
||||
const c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a));
|
||||
return R * c;
|
||||
}
|
||||
@@ -121,7 +120,7 @@ const recordRoute: HandleCall<any, any> = (call: any, callback: any) => {
|
||||
let previous: { latitude: number; longitude: number } | null = null;
|
||||
// Start a timer
|
||||
const startTime = process.hrtime();
|
||||
call.on('data', function(point: any) {
|
||||
call.on('data', function (point: any) {
|
||||
pointCount += 1;
|
||||
if (checkFeature(point).name !== '') {
|
||||
featureCount += 1;
|
||||
@@ -133,7 +132,7 @@ const recordRoute: HandleCall<any, any> = (call: any, callback: any) => {
|
||||
}
|
||||
previous = point;
|
||||
});
|
||||
call.on('end', function() {
|
||||
call.on('end', function () {
|
||||
callback(null, {
|
||||
point_count: pointCount,
|
||||
feature_count: featureCount,
|
||||
@@ -162,12 +161,12 @@ function pointKey(point: { latitude: string; longitude: string }) {
|
||||
* @param {Duplex} call The stream for incoming and outgoing messages
|
||||
*/
|
||||
const routeChat: HandleCall<any, any> = (call: any) => {
|
||||
call.on('data', function(note: any) {
|
||||
call.on('data', function (note: any) {
|
||||
const key = pointKey(note.location);
|
||||
/* For each note sent, respond with all previous notes that correspond to
|
||||
* the same point */
|
||||
if (routeNotes.hasOwnProperty(key)) {
|
||||
routeNotes[key].forEach(function(note: any) {
|
||||
routeNotes[key].forEach(function (note: any) {
|
||||
call.write(note);
|
||||
});
|
||||
} else {
|
||||
@@ -176,7 +175,7 @@ const routeChat: HandleCall<any, any> = (call: any) => {
|
||||
// Then add the new note to the list
|
||||
routeNotes[key].push(JSON.parse(JSON.stringify(note)));
|
||||
});
|
||||
call.on('end', function() {
|
||||
call.on('end', function () {
|
||||
call.end();
|
||||
});
|
||||
};
|
||||
@@ -204,7 +203,7 @@ export const startGRPCServer = (port: number) => {
|
||||
}
|
||||
|
||||
const dbPath = '../../packages/insomnia/src/network/grpc/__fixtures__/library/route_guide_db.json';
|
||||
fs.readFile(path.resolve(dbPath), function(err, data) {
|
||||
fs.readFile(path.resolve(dbPath), function (err, data) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
@@ -238,7 +237,7 @@ export const startGRPCServer = (port: number) => {
|
||||
private_key: serverKey,
|
||||
},
|
||||
],
|
||||
true // mTLS enabled, temporarily change to false for local testing if needed
|
||||
true, // mTLS enabled, temporarily change to false for local testing if needed
|
||||
);
|
||||
serverWithTLS.bindAsync('localhost:50052', serverCredentials, error => {
|
||||
if (error) {
|
||||
|
||||
@@ -46,7 +46,7 @@ async function echoHandler(req: any, res: any) {
|
||||
data: req.body.toString(),
|
||||
cookies: req.cookies,
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
app.get('/echo', rawParser, echoHandler);
|
||||
app.post('/echo', rawParser, echoHandler);
|
||||
@@ -123,20 +123,27 @@ app.post('/send-event', (request, response) => {
|
||||
response.json({ success: true });
|
||||
});
|
||||
|
||||
startWebSocketServer(app.listen(port, () => {
|
||||
console.log(`Listening at http://localhost:${port}`);
|
||||
console.log(`Listening at ws://localhost:${port}`);
|
||||
}));
|
||||
startWebSocketServer(
|
||||
app.listen(port, () => {
|
||||
console.log(`Listening at http://localhost:${port}`);
|
||||
console.log(`Listening at ws://localhost:${port}`);
|
||||
}),
|
||||
);
|
||||
|
||||
startWebSocketServer(createServer({
|
||||
cert: readFileSync(join(__dirname, '../fixtures/certificates/localhost.pem')),
|
||||
key: readFileSync(join(__dirname, '../fixtures/certificates/localhost-key.pem')),
|
||||
ca: readFileSync(join(__dirname, '../fixtures/certificates/rootCA.pem')),
|
||||
requestCert: true,
|
||||
rejectUnauthorized: false,
|
||||
}, app).listen(httpsPort, () => {
|
||||
console.log(`Listening at https://localhost:${httpsPort}`);
|
||||
console.log(`Listening at wss://localhost:${httpsPort}`);
|
||||
}));
|
||||
startWebSocketServer(
|
||||
createServer(
|
||||
{
|
||||
cert: readFileSync(join(__dirname, '../fixtures/certificates/localhost.pem')),
|
||||
key: readFileSync(join(__dirname, '../fixtures/certificates/localhost-key.pem')),
|
||||
ca: readFileSync(join(__dirname, '../fixtures/certificates/rootCA.pem')),
|
||||
requestCert: true,
|
||||
rejectUnauthorized: false,
|
||||
},
|
||||
app,
|
||||
).listen(httpsPort, () => {
|
||||
console.log(`Listening at https://localhost:${httpsPort}`);
|
||||
console.log(`Listening at wss://localhost:${httpsPort}`);
|
||||
}),
|
||||
);
|
||||
|
||||
startGRPCServer(grpcPort);
|
||||
|
||||
@@ -14,47 +14,49 @@ const currentPlan = {
|
||||
type: 'team',
|
||||
};
|
||||
|
||||
const projectsByOrgId = new Map(Object.entries({
|
||||
'org_7ef19d06-5a24-47ca-bc81-3dea011edec2': [
|
||||
{
|
||||
id: 'proj_org_7ef19d06-5a24-47ca-bc81-3dea011edec2',
|
||||
name: 'Personal Workspace',
|
||||
},
|
||||
],
|
||||
'team_195a6ce0edb1427eb2e8ba7b986072e4': [
|
||||
{
|
||||
id: 'proj_team_195a6ce0edb1427eb2e8ba7b986072e4',
|
||||
name: 'Personal Workspace',
|
||||
},
|
||||
],
|
||||
}));
|
||||
const projectsByOrgId = new Map(
|
||||
Object.entries({
|
||||
'org_7ef19d06-5a24-47ca-bc81-3dea011edec2': [
|
||||
{
|
||||
id: 'proj_org_7ef19d06-5a24-47ca-bc81-3dea011edec2',
|
||||
name: 'Personal Workspace',
|
||||
},
|
||||
],
|
||||
'team_195a6ce0edb1427eb2e8ba7b986072e4': [
|
||||
{
|
||||
id: 'proj_team_195a6ce0edb1427eb2e8ba7b986072e4',
|
||||
name: 'Personal Workspace',
|
||||
},
|
||||
],
|
||||
}),
|
||||
);
|
||||
|
||||
const organizations = [
|
||||
// Personal organization
|
||||
{
|
||||
'id': 'org_7ef19d06-5a24-47ca-bc81-3dea011edec2',
|
||||
'name': 'feb56ab4b19347c4b648c99bfa7db363',
|
||||
'display_name': 'Personal workspace',
|
||||
'branding': {
|
||||
'logo_url': '',
|
||||
id: 'org_7ef19d06-5a24-47ca-bc81-3dea011edec2',
|
||||
name: 'feb56ab4b19347c4b648c99bfa7db363',
|
||||
display_name: 'Personal workspace',
|
||||
branding: {
|
||||
logo_url: '',
|
||||
},
|
||||
'metadata': {
|
||||
'organizationType': 'personal',
|
||||
'ownerAccountId': 'acct_64a477e6b59d43a5a607f84b4f73e3ce',
|
||||
metadata: {
|
||||
organizationType: 'personal',
|
||||
ownerAccountId: 'acct_64a477e6b59d43a5a607f84b4f73e3ce',
|
||||
},
|
||||
},
|
||||
// Team Organization: Was a team before the migration now looks like this:
|
||||
// Teams migrated to Organizations have an id that starts with team_ and the team id is the same as the organization id
|
||||
{
|
||||
'id': 'team_195a6ce0edb1427eb2e8ba7b986072e4',
|
||||
'name': '07df6d95b60e4593af0424c74d96637a-team',
|
||||
'display_name': '🦄 Magic',
|
||||
'branding': {
|
||||
'logo_url': '',
|
||||
id: 'team_195a6ce0edb1427eb2e8ba7b986072e4',
|
||||
name: '07df6d95b60e4593af0424c74d96637a-team',
|
||||
display_name: '🦄 Magic',
|
||||
branding: {
|
||||
logo_url: '',
|
||||
},
|
||||
'metadata': {
|
||||
'organizationType': 'team',
|
||||
'ownerAccountId': 'acct_64a477e6b59d43a5a607f84b4f73e3ce',
|
||||
metadata: {
|
||||
organizationType: 'team',
|
||||
ownerAccountId: 'acct_64a477e6b59d43a5a607f84b4f73e3ce',
|
||||
},
|
||||
},
|
||||
];
|
||||
@@ -68,72 +70,72 @@ const organizationFeatures = {
|
||||
};
|
||||
|
||||
const user = {
|
||||
'id': 'email|64f0dd619ab0786da330d83a',
|
||||
'email': 'insomnia-user@konghq.com',
|
||||
'name': 'Rick Morty',
|
||||
'picture': '',
|
||||
'bio': 'My BIO',
|
||||
'github': '',
|
||||
'linkedin': '',
|
||||
'twitter': '',
|
||||
'identities': null,
|
||||
'given_name': '',
|
||||
'family_name': '',
|
||||
id: 'email|64f0dd619ab0786da330d83a',
|
||||
email: 'insomnia-user@konghq.com',
|
||||
name: 'Rick Morty',
|
||||
picture: '',
|
||||
bio: 'My BIO',
|
||||
github: '',
|
||||
linkedin: '',
|
||||
twitter: '',
|
||||
identities: null,
|
||||
given_name: '',
|
||||
family_name: '',
|
||||
};
|
||||
|
||||
const whoami = {
|
||||
'sessionExpiry': 4838400,
|
||||
'publicKey': {
|
||||
'alg': 'RSA-OAEP-256',
|
||||
'e': 'AQAB',
|
||||
'ext': true,
|
||||
'key_ops': ['encrypt'],
|
||||
'kty': 'RSA',
|
||||
'n': 'pTQVaUaiqggIldSKm6ib6eFRLLoGj9W-2O4gTbiorR-2b8-ZmKUwQ0F-jgYX71AjYaFn5VjOHOHSP6byNAjN7WzJ6A_Z3tytNraLoZfwK8KdfflOCZiZzQeD3nO8BNgh_zEgCHStU61b6N6bSpCKjbyPkmZcOkJfsz0LJMAxrXvFB-I42WYA2vJKReTJKXeYx4d6L_XGNIoYtmGZit8FldT4AucfQUXgdlKvr4_OZmt6hgjwt_Pjcu-_jO7m589mMWMebfUhjte3Lp1jps0MqTOvgRb0FQf5eoBHnL01OZjvFPDKeqlvoz7II9wFNHIKzSvgAKnyemh6DiyPuIukyQ',
|
||||
sessionExpiry: 4838400,
|
||||
publicKey: {
|
||||
alg: 'RSA-OAEP-256',
|
||||
e: 'AQAB',
|
||||
ext: true,
|
||||
key_ops: ['encrypt'],
|
||||
kty: 'RSA',
|
||||
n: 'pTQVaUaiqggIldSKm6ib6eFRLLoGj9W-2O4gTbiorR-2b8-ZmKUwQ0F-jgYX71AjYaFn5VjOHOHSP6byNAjN7WzJ6A_Z3tytNraLoZfwK8KdfflOCZiZzQeD3nO8BNgh_zEgCHStU61b6N6bSpCKjbyPkmZcOkJfsz0LJMAxrXvFB-I42WYA2vJKReTJKXeYx4d6L_XGNIoYtmGZit8FldT4AucfQUXgdlKvr4_OZmt6hgjwt_Pjcu-_jO7m589mMWMebfUhjte3Lp1jps0MqTOvgRb0FQf5eoBHnL01OZjvFPDKeqlvoz7II9wFNHIKzSvgAKnyemh6DiyPuIukyQ',
|
||||
},
|
||||
'encPrivateKey': {
|
||||
'iv': '3a1f2bdb8acbf15f469d57a2',
|
||||
't': '904d6b1bc0ece8e5df6fefb9efefda7c',
|
||||
'd': '2a7b0c4beb773fa3e3c2158f0bfa654a88c4041184c3b1e01b4ddd2da2c647244a0d66d258b6abb6a9385251bf5d79e6b03ef35bdfafcb400547f8f88adb8bceb7020f2d873d5a74fb5fc561e7bd67cea0a37c49107bf5c96631374dc44ddb1e4a8b5688dc6560fc6143294ed92c3ad8e1696395dfdf15975aa67b9212366dbfcb31191e4f4fe3559c89a92fb1f0f1cc6cbf90d8a062307fce6e7701f6f5169d9247c56dae79b55fba1e10fde562b971ca708c9a4d87e6e9d9e890b88fa0480360420e610c4e41459570e52ae72f349eadf84fc0a68153722de3280becf8a1762e7faebe964f0ad706991c521feda3440d3e1b22f2c221a80490359879bd47c0d059ace81213c74a1e192dbebd8a80cf58c9eb1fe461a971b88d3899baf4c4ef7141623c93fb4a54758f5e1cf9ee35cd00777fa89b24e4ded57219e770de2670619c6e971935c61ae72e3276cf8db49dfa0e91c68222f02d7e0c69b399af505de7e5a90852d83e0a30934b0362db986f3aaefaaf1a96fef3e8165287a3a7f0ee1e072d9dee3aefb86194e1d877d6b34529d45a70ec4573c35a7fe27833c77c3154b0ad02187e4fcecd408bcf4b29a85a5dc358cb479140f4983fcd936141f581764669651530af97d2b7d9416aea7de67e787f3e29ae3eba6672bcd934dc1e308783aa63a4ab46d48d213cf53ad6bd8828011f5bfa3aa5ee24551c694e829b54c93b1dda6c3ddda04756d68a28bec8d044c8af4147680dc5b972d0ca74299b0ab6306b9e7b99bf0557558df120455a272145b7aa792654730f3d670b76d72408f5ce1cf5fbd453d2903fa72cf26397437854ba8abbb731a8107f6a86a01fa98edc81bb42a4c1330f779e7a0fbd1820eaed78e03e40a996e03884b707556be06fd14ee8f4035469210d1d2bb8f58285fc2ab6de3d3cc0e4e1f40c6d9d24b50dc8e2e2374a0aff52031b3736c2982133bb19dd551ce1f953f4ba02b0cf53382c15752e202c138cb42b2322df103ff17fd886dfd5f992b711673cdf16048c4bff19038138b161c2e1783b85fc7b965a91ac4795fcbfebf827940cacdeae57946863aee027df43b36612f3cb8f34dc44396e87c564bf10f5b1a9dfbd6da3d7f4f65024b0b4f8ce51d01c230840941fc4523b17eb1c2522032f410e8328239a11a15ab755c32945ce52966d5bfb4666909ed2ca04d536e4bf92091563dd44d46cbb35e53c2481400058ab3b52a0280d262551073f61db125ee280e2cc1ec0bdf9c4817824261465011e34c2296411384f7f5e16742157c5520f137631edf498aa39c7c32b107e3634cbeb70feea19a233c8bd939d665135c9f7c1bb33cb47edc58bdbbcde9b0b9eb73a46642e4639289a62638fb7813e1eeaadd105c803de8357236f33c4bcf31a876b5867591af8f165eba0b35cf0b0886af17dab35a6a39f8f576387d6ffb9e677ee46fc0f11ff069a2a068fce441ff8f4125095fad228c2bf45c788d641941ed13c0a16fffcafd7c7eff11bb7550c0b7d54eebdbd2066e3bbdb47aaee2b5f1e499726324a40015458c7de1db0abe872594d8e6802deff7ea9518bdb3a3e46f07139267fd67dc570ba8ab04c2b37ce6a34ec73b802c7052a2eef0cae1b0979322ef86395535db80cf2a9a88aa7c2e5cc28a93612a8dafe1982f741d7cec28a866f6c09dba5b99ead24c3df0ca03c6c5afae41f3d39608a8f49b0d6a0b541a159409791c25ede103eb4f79cfbd0cc9c9aa6b591755c1e9fd07b5b9e38ed85b5939e65d127256f6a4c078f8c9d655c4f072f9cbcfb2e1e17eaa83dc62aaab2a6dc3735ee76ce7a215740f795f1fbe7136c7734ae3714438015e8fc383d63775a8abddb23cbc5f906c046bb0b5b31d492a7c151b40ea82c7c966e25820641c55b343b89d6378f90de5983fa76547e9d6c634effdf019a0fd9b6d3e488a5aa94f0710d517ba4f7c1ed82f9f3072612e953e036c0ec7f3c618368362f6da6f3af76056a66aef914805cc8b628f1c11695f760b535ded9ff66727273ae7e12d67a01243d75f22fec8ed1b043122a211c923aa92ecbbe01dd0d7195c3c0e09a2a6ab3eca354963122d5a0ec16e2b2b81b0ddce6ec0a312c492a96a4fd392f1deb6a1f3318541a3f87e5c9e73ee7edd3b855910f412789e25038108e1eaae04dcfb02b4d958c00c630dc8caa87a40798ce7156d2ade882e68832d39fe8f9bce6a995249a7383013a5093c4af55c3b7232de0f2593d82c30b8dabd0784455037f25f6bb66a6d0d8f72bc7be0dee2d0a8af44bb4e143257d873268d331722c3253ea5c004e72daf04c875e2054f2b4b2bca2979fd046a1e835600045edf2f159d851a540a91a1ab8fbcb64594d21942bbaa2160535d32496ba7ce4a76c6bdeb9bb4c5cab7bed1ae26564058d0be125803d7019b83b3953c4b0cc1f8299c4edcf6a5faa4765092412d368b277689900e71fb5d47581057adaa2dd494e0f66dc1aa16f3741973b0d9ffa1728aeafab84b777394a7afae0f8eabaa6b740f1c60ca26469f0c9356ec880ad6f4dc01b99bd14d7a4bb8afc97662a9e68b0155e4cdf3caa3402819ac6ce562c8fe06edb50a31cfd7a',
|
||||
'ad': '',
|
||||
encPrivateKey: {
|
||||
iv: '3a1f2bdb8acbf15f469d57a2',
|
||||
t: '904d6b1bc0ece8e5df6fefb9efefda7c',
|
||||
d: '2a7b0c4beb773fa3e3c2158f0bfa654a88c4041184c3b1e01b4ddd2da2c647244a0d66d258b6abb6a9385251bf5d79e6b03ef35bdfafcb400547f8f88adb8bceb7020f2d873d5a74fb5fc561e7bd67cea0a37c49107bf5c96631374dc44ddb1e4a8b5688dc6560fc6143294ed92c3ad8e1696395dfdf15975aa67b9212366dbfcb31191e4f4fe3559c89a92fb1f0f1cc6cbf90d8a062307fce6e7701f6f5169d9247c56dae79b55fba1e10fde562b971ca708c9a4d87e6e9d9e890b88fa0480360420e610c4e41459570e52ae72f349eadf84fc0a68153722de3280becf8a1762e7faebe964f0ad706991c521feda3440d3e1b22f2c221a80490359879bd47c0d059ace81213c74a1e192dbebd8a80cf58c9eb1fe461a971b88d3899baf4c4ef7141623c93fb4a54758f5e1cf9ee35cd00777fa89b24e4ded57219e770de2670619c6e971935c61ae72e3276cf8db49dfa0e91c68222f02d7e0c69b399af505de7e5a90852d83e0a30934b0362db986f3aaefaaf1a96fef3e8165287a3a7f0ee1e072d9dee3aefb86194e1d877d6b34529d45a70ec4573c35a7fe27833c77c3154b0ad02187e4fcecd408bcf4b29a85a5dc358cb479140f4983fcd936141f581764669651530af97d2b7d9416aea7de67e787f3e29ae3eba6672bcd934dc1e308783aa63a4ab46d48d213cf53ad6bd8828011f5bfa3aa5ee24551c694e829b54c93b1dda6c3ddda04756d68a28bec8d044c8af4147680dc5b972d0ca74299b0ab6306b9e7b99bf0557558df120455a272145b7aa792654730f3d670b76d72408f5ce1cf5fbd453d2903fa72cf26397437854ba8abbb731a8107f6a86a01fa98edc81bb42a4c1330f779e7a0fbd1820eaed78e03e40a996e03884b707556be06fd14ee8f4035469210d1d2bb8f58285fc2ab6de3d3cc0e4e1f40c6d9d24b50dc8e2e2374a0aff52031b3736c2982133bb19dd551ce1f953f4ba02b0cf53382c15752e202c138cb42b2322df103ff17fd886dfd5f992b711673cdf16048c4bff19038138b161c2e1783b85fc7b965a91ac4795fcbfebf827940cacdeae57946863aee027df43b36612f3cb8f34dc44396e87c564bf10f5b1a9dfbd6da3d7f4f65024b0b4f8ce51d01c230840941fc4523b17eb1c2522032f410e8328239a11a15ab755c32945ce52966d5bfb4666909ed2ca04d536e4bf92091563dd44d46cbb35e53c2481400058ab3b52a0280d262551073f61db125ee280e2cc1ec0bdf9c4817824261465011e34c2296411384f7f5e16742157c5520f137631edf498aa39c7c32b107e3634cbeb70feea19a233c8bd939d665135c9f7c1bb33cb47edc58bdbbcde9b0b9eb73a46642e4639289a62638fb7813e1eeaadd105c803de8357236f33c4bcf31a876b5867591af8f165eba0b35cf0b0886af17dab35a6a39f8f576387d6ffb9e677ee46fc0f11ff069a2a068fce441ff8f4125095fad228c2bf45c788d641941ed13c0a16fffcafd7c7eff11bb7550c0b7d54eebdbd2066e3bbdb47aaee2b5f1e499726324a40015458c7de1db0abe872594d8e6802deff7ea9518bdb3a3e46f07139267fd67dc570ba8ab04c2b37ce6a34ec73b802c7052a2eef0cae1b0979322ef86395535db80cf2a9a88aa7c2e5cc28a93612a8dafe1982f741d7cec28a866f6c09dba5b99ead24c3df0ca03c6c5afae41f3d39608a8f49b0d6a0b541a159409791c25ede103eb4f79cfbd0cc9c9aa6b591755c1e9fd07b5b9e38ed85b5939e65d127256f6a4c078f8c9d655c4f072f9cbcfb2e1e17eaa83dc62aaab2a6dc3735ee76ce7a215740f795f1fbe7136c7734ae3714438015e8fc383d63775a8abddb23cbc5f906c046bb0b5b31d492a7c151b40ea82c7c966e25820641c55b343b89d6378f90de5983fa76547e9d6c634effdf019a0fd9b6d3e488a5aa94f0710d517ba4f7c1ed82f9f3072612e953e036c0ec7f3c618368362f6da6f3af76056a66aef914805cc8b628f1c11695f760b535ded9ff66727273ae7e12d67a01243d75f22fec8ed1b043122a211c923aa92ecbbe01dd0d7195c3c0e09a2a6ab3eca354963122d5a0ec16e2b2b81b0ddce6ec0a312c492a96a4fd392f1deb6a1f3318541a3f87e5c9e73ee7edd3b855910f412789e25038108e1eaae04dcfb02b4d958c00c630dc8caa87a40798ce7156d2ade882e68832d39fe8f9bce6a995249a7383013a5093c4af55c3b7232de0f2593d82c30b8dabd0784455037f25f6bb66a6d0d8f72bc7be0dee2d0a8af44bb4e143257d873268d331722c3253ea5c004e72daf04c875e2054f2b4b2bca2979fd046a1e835600045edf2f159d851a540a91a1ab8fbcb64594d21942bbaa2160535d32496ba7ce4a76c6bdeb9bb4c5cab7bed1ae26564058d0be125803d7019b83b3953c4b0cc1f8299c4edcf6a5faa4765092412d368b277689900e71fb5d47581057adaa2dd494e0f66dc1aa16f3741973b0d9ffa1728aeafab84b777394a7afae0f8eabaa6b740f1c60ca26469f0c9356ec880ad6f4dc01b99bd14d7a4bb8afc97662a9e68b0155e4cdf3caa3402819ac6ce562c8fe06edb50a31cfd7a',
|
||||
ad: '',
|
||||
},
|
||||
'symmetricKey': {
|
||||
'alg': 'A256GCM',
|
||||
'ext': true,
|
||||
'k': 'w62OJNWF4G8iWA8ZrTpModiY8dICyHI7ko1vMLb877g=',
|
||||
'key_ops': ['encrypt', 'decrypt'],
|
||||
'kty': 'oct',
|
||||
symmetricKey: {
|
||||
alg: 'A256GCM',
|
||||
ext: true,
|
||||
k: 'w62OJNWF4G8iWA8ZrTpModiY8dICyHI7ko1vMLb877g=',
|
||||
key_ops: ['encrypt', 'decrypt'],
|
||||
kty: 'oct',
|
||||
},
|
||||
'email': 'insomnia-user@konghq.com',
|
||||
'accountId': 'acct_64a477e6b59d43a5a607f84b4f73e3ce',
|
||||
'firstName': 'Rick',
|
||||
'lastName': 'Morty',
|
||||
email: 'insomnia-user@konghq.com',
|
||||
accountId: 'acct_64a477e6b59d43a5a607f84b4f73e3ce',
|
||||
firstName: 'Rick',
|
||||
lastName: 'Morty',
|
||||
};
|
||||
|
||||
const userVerifyA = {
|
||||
'sessionStarterId': 'strt_157355638c2c41d19f2ebc366b2a18e6',
|
||||
'srpB': '4e6480c057c206e68ddf3d2e6dba06f1a27d528af2b15c01058da1d37f0d4bfe70cfb0147d8c2092fb9d276f46a114ea3ad9ef2564c2c4ca39e2b82387b0c9ca236717a719dd793feda4392bf7c928ff0728f8a8abe89d6be29d8dc7ab285e12ab4e04e7cb309ddf585c97eb15e7181a96f4ecb73bd0cf3c476a2f9869e3f907107a6419bbc76990a761f1e7b073dffe9c295e06324b41740bde2627221f135c3ef3cb49851d30370c5d9d96d47decc849740a9bc4e1c2c1169dcbd2bd1213a5f87310332c343c1340644d172e01f2e21c71184d69c48faf6eb7f4cac5f56a68747b202314f7f05dc24c728f2bef0d845d3b7bc1b381d5871c8fda153334e353',
|
||||
sessionStarterId: 'strt_157355638c2c41d19f2ebc366b2a18e6',
|
||||
srpB: '4e6480c057c206e68ddf3d2e6dba06f1a27d528af2b15c01058da1d37f0d4bfe70cfb0147d8c2092fb9d276f46a114ea3ad9ef2564c2c4ca39e2b82387b0c9ca236717a719dd793feda4392bf7c928ff0728f8a8abe89d6be29d8dc7ab285e12ab4e04e7cb309ddf585c97eb15e7181a96f4ecb73bd0cf3c476a2f9869e3f907107a6419bbc76990a761f1e7b073dffe9c295e06324b41740bde2627221f135c3ef3cb49851d30370c5d9d96d47decc849740a9bc4e1c2c1169dcbd2bd1213a5f87310332c343c1340644d172e01f2e21c71184d69c48faf6eb7f4cac5f56a68747b202314f7f05dc24c728f2bef0d845d3b7bc1b381d5871c8fda153334e353',
|
||||
};
|
||||
|
||||
const userVerifyM1 = {
|
||||
'srpM2': 'f0f748c3bdc4dc3f1404b74cebd10d0c7ce20c6107a0ce7f5888c04219f1a594',
|
||||
srpM2: 'f0f748c3bdc4dc3f1404b74cebd10d0c7ce20c6107a0ce7f5888c04219f1a594',
|
||||
};
|
||||
|
||||
const allRoles = [
|
||||
{
|
||||
'id': 'role_d32b9d6c-1fb2-4ac1-b780-b4c15074d6cb',
|
||||
'name': 'owner',
|
||||
'description': 'Owner can manage the organization and also delete it.',
|
||||
id: 'role_d32b9d6c-1fb2-4ac1-b780-b4c15074d6cb',
|
||||
name: 'owner',
|
||||
description: 'Owner can manage the organization and also delete it.',
|
||||
},
|
||||
{
|
||||
'id': 'role_013aeb67-15c9-42c5-bcd0-5c70a33f8719',
|
||||
'name': 'admin',
|
||||
'description': 'Admin can only manage the organization.',
|
||||
id: 'role_013aeb67-15c9-42c5-bcd0-5c70a33f8719',
|
||||
name: 'admin',
|
||||
description: 'Admin can only manage the organization.',
|
||||
},
|
||||
{
|
||||
'id': 'role_3fbb17e4-249c-47d5-a5ee-b7f1f43a9c63',
|
||||
'name': 'member',
|
||||
'description': 'Member cannot manage the organization.',
|
||||
id: 'role_3fbb17e4-249c-47d5-a5ee-b7f1f43a9c63',
|
||||
name: 'member',
|
||||
description: 'Member cannot manage the organization.',
|
||||
},
|
||||
];
|
||||
|
||||
@@ -163,105 +165,101 @@ const userPermissions = {
|
||||
};
|
||||
|
||||
const orgInfo = {
|
||||
'id': 'org_3d314c35-b9ca-4aec-b57d-04cea38da05c',
|
||||
'name': 'Sync',
|
||||
'display_name': 'Sync',
|
||||
'branding': {
|
||||
'logo_url': 'https://d2evto68nv31gd.cloudfront.net/org_98e187f8-a753-4abf-b0b2-58cdb852eba6',
|
||||
id: 'org_3d314c35-b9ca-4aec-b57d-04cea38da05c',
|
||||
name: 'Sync',
|
||||
display_name: 'Sync',
|
||||
branding: {
|
||||
logo_url: 'https://d2evto68nv31gd.cloudfront.net/org_98e187f8-a753-4abf-b0b2-58cdb852eba6',
|
||||
},
|
||||
'metadata': {
|
||||
'organizationType': 'team',
|
||||
'ownerAccountId': 'acct_e9cf786dc67b4dbc8c002359b3cc3d70',
|
||||
metadata: {
|
||||
organizationType: 'team',
|
||||
ownerAccountId: 'acct_e9cf786dc67b4dbc8c002359b3cc3d70',
|
||||
},
|
||||
};
|
||||
|
||||
const currentRole = {
|
||||
'roleId': 'role_d32b9d6c-1fb2-4ac1-b780-b4c15074d6cb',
|
||||
'name': 'owner',
|
||||
'description': 'Owner can manage the organization and also delete it.',
|
||||
roleId: 'role_d32b9d6c-1fb2-4ac1-b780-b4c15074d6cb',
|
||||
name: 'owner',
|
||||
description: 'Owner can manage the organization and also delete it.',
|
||||
};
|
||||
|
||||
const storageRule = {
|
||||
'enableCloudSync': true,
|
||||
'enableGitSync': true,
|
||||
'enableLocalVault': true,
|
||||
'isOverridden': false
|
||||
enableCloudSync: true,
|
||||
enableGitSync: true,
|
||||
enableLocalVault: true,
|
||||
isOverridden: false,
|
||||
};
|
||||
|
||||
const members = {
|
||||
'start': 0,
|
||||
'limit': 0,
|
||||
'length': 0,
|
||||
'total': 2,
|
||||
'next': '',
|
||||
'members': [
|
||||
start: 0,
|
||||
limit: 0,
|
||||
length: 0,
|
||||
total: 2,
|
||||
next: '',
|
||||
members: [
|
||||
{
|
||||
'user_id': 'acct_e9cf786dc67b4dbc8c002359b3cc3d70',
|
||||
'picture': 'https://s.gravatar.com/avatar/5301bf735ebace330bb801abb593dc78?s=480\u0026r=pg\u0026d=https%3A%2F%2Fcdn.auth0.com%2Favatars%2Fwe.png',
|
||||
'name': 'wei.yao+2@konghq.com ',
|
||||
'email': 'wei.yao+2@konghq.com',
|
||||
'role_name': 'owner',
|
||||
'created': '2024-08-28T07:02:04.341983Z',
|
||||
user_id: 'acct_e9cf786dc67b4dbc8c002359b3cc3d70',
|
||||
picture:
|
||||
'https://s.gravatar.com/avatar/5301bf735ebace330bb801abb593dc78?s=480\u0026r=pg\u0026d=https%3A%2F%2Fcdn.auth0.com%2Favatars%2Fwe.png',
|
||||
name: 'wei.yao+2@konghq.com ',
|
||||
email: 'wei.yao+2@konghq.com',
|
||||
role_name: 'owner',
|
||||
created: '2024-08-28T07:02:04.341983Z',
|
||||
},
|
||||
{
|
||||
'user_id': 'acct_f883f98dbb9945fba7bb23925361e02a',
|
||||
'picture': 'https://s.gravatar.com/avatar/fe822a9c78b8154da82635055895e6e6?s=480\u0026r=pg\u0026d=https%3A%2F%2Fcdn.auth0.com%2Favatars%2Fwe.png',
|
||||
'name': 'wei.yao+3@konghq.com ',
|
||||
'email': 'wei.yao+3@konghq.com',
|
||||
'role_name': 'member',
|
||||
'created': '2024-09-12T11:40:43.168144Z',
|
||||
user_id: 'acct_f883f98dbb9945fba7bb23925361e02a',
|
||||
picture:
|
||||
'https://s.gravatar.com/avatar/fe822a9c78b8154da82635055895e6e6?s=480\u0026r=pg\u0026d=https%3A%2F%2Fcdn.auth0.com%2Favatars%2Fwe.png',
|
||||
name: 'wei.yao+3@konghq.com ',
|
||||
email: 'wei.yao+3@konghq.com',
|
||||
role_name: 'member',
|
||||
created: '2024-09-12T11:40:43.168144Z',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const invites = {
|
||||
'start': 0,
|
||||
'limit': 3,
|
||||
'length': 0,
|
||||
'total': 3,
|
||||
'next': '',
|
||||
'invitations': [
|
||||
start: 0,
|
||||
limit: 3,
|
||||
length: 0,
|
||||
total: 3,
|
||||
next: '',
|
||||
invitations: [
|
||||
{
|
||||
'id': 'uinv_1dmvK1rTehbiBV85',
|
||||
'inviter': {
|
||||
'name': 'wei.yao+2@konghq.com ',
|
||||
id: 'uinv_1dmvK1rTehbiBV85',
|
||||
inviter: {
|
||||
name: 'wei.yao+2@konghq.com ',
|
||||
},
|
||||
'invitee': {
|
||||
'email': 'wei.yao@konghq.com',
|
||||
invitee: {
|
||||
email: 'wei.yao@konghq.com',
|
||||
},
|
||||
'created_at': '2024-09-14T10:16:10.513Z',
|
||||
'expires_at': '2024-09-21T10:16:10.513Z',
|
||||
'roles': [
|
||||
'member',
|
||||
],
|
||||
created_at: '2024-09-14T10:16:10.513Z',
|
||||
expires_at: '2024-09-21T10:16:10.513Z',
|
||||
roles: ['member'],
|
||||
},
|
||||
{
|
||||
'id': 'uinv_T9uaMMeoRQQVKF2E',
|
||||
'inviter': {
|
||||
'name': 'wei.yao+2@konghq.com ',
|
||||
id: 'uinv_T9uaMMeoRQQVKF2E',
|
||||
inviter: {
|
||||
name: 'wei.yao+2@konghq.com ',
|
||||
},
|
||||
'invitee': {
|
||||
'email': 'wei.yao+6@konghq.com',
|
||||
invitee: {
|
||||
email: 'wei.yao+6@konghq.com',
|
||||
},
|
||||
'created_at': '2024-09-12T10:33:45.320Z',
|
||||
'expires_at': '2024-09-19T10:33:45.320Z',
|
||||
'roles': [
|
||||
'member',
|
||||
],
|
||||
created_at: '2024-09-12T10:33:45.320Z',
|
||||
expires_at: '2024-09-19T10:33:45.320Z',
|
||||
roles: ['member'],
|
||||
},
|
||||
{
|
||||
'id': 'uinv_TIYVQQC2aH7Ev5hW',
|
||||
'inviter': {
|
||||
'name': 'wei.yao+2@konghq.com ',
|
||||
id: 'uinv_TIYVQQC2aH7Ev5hW',
|
||||
inviter: {
|
||||
name: 'wei.yao+2@konghq.com ',
|
||||
},
|
||||
'invitee': {
|
||||
'email': 'wei.yao+4@konghq.com',
|
||||
invitee: {
|
||||
email: 'wei.yao+4@konghq.com',
|
||||
},
|
||||
'created_at': '2024-09-12T10:03:51.638Z',
|
||||
'expires_at': '2024-09-19T10:03:51.638Z',
|
||||
'roles': [
|
||||
'member',
|
||||
],
|
||||
created_at: '2024-09-12T10:03:51.638Z',
|
||||
expires_at: '2024-09-19T10:03:51.638Z',
|
||||
roles: ['member'],
|
||||
},
|
||||
],
|
||||
};
|
||||
@@ -271,19 +269,15 @@ interface CollaboratorSearchResultItem {
|
||||
picture: string;
|
||||
type: CollaboratorType;
|
||||
name: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface EmailsList {
|
||||
invitesCount: number;
|
||||
membersCount: number;
|
||||
groupsCount: number;
|
||||
};
|
||||
}
|
||||
|
||||
const getEmailsForInviteSearch = ({
|
||||
invitesCount = 0,
|
||||
membersCount = 0,
|
||||
groupsCount = 0,
|
||||
}: EmailsList) => {
|
||||
const getEmailsForInviteSearch = ({ invitesCount = 0, membersCount = 0, groupsCount = 0 }: EmailsList) => {
|
||||
const emails: CollaboratorSearchResultItem[] = [];
|
||||
|
||||
for (let i = 0; i < groupsCount; i++) {
|
||||
@@ -332,11 +326,7 @@ const OWNER_ROLE_ID = 'role_b3cf4fed-9208-497a-93c6-ae1a82b7b889';
|
||||
const ADMIN_ROLE_ID = 'role_1c7938bc-c53b-49a1-819e-72f0c3a5baa6';
|
||||
const MEMBER_ROLE_ID = 'role_4c924f55-7706-4de8-94ab-0a2085890641';
|
||||
|
||||
const getCollaborators = ({
|
||||
invitesCount = 0,
|
||||
membersCount = 0,
|
||||
groupsCount = 0,
|
||||
}: EmailsList) => {
|
||||
const getCollaborators = ({ invitesCount = 0, membersCount = 0, groupsCount = 0 }: EmailsList) => {
|
||||
const collaborators: Collaborator[] = [];
|
||||
|
||||
for (let i = 0; i < groupsCount; i++) {
|
||||
@@ -507,10 +497,7 @@ export default (app: Application) => {
|
||||
name: _req.body.name,
|
||||
};
|
||||
|
||||
const projects = [
|
||||
...(projectsByOrgId.get(personalOrg.id) || []),
|
||||
newProject,
|
||||
];
|
||||
const projects = [...(projectsByOrgId.get(personalOrg.id) || []), newProject];
|
||||
|
||||
projectsByOrgId.set(personalOrg.id, projects);
|
||||
res.status(200).send({ ...newProject, organizationId: personalOrg.id });
|
||||
@@ -529,17 +516,14 @@ export default (app: Application) => {
|
||||
name: _req.body.name,
|
||||
};
|
||||
|
||||
const projects = [
|
||||
...(projectsByOrgId.get(organization.id) || []),
|
||||
newProject,
|
||||
];
|
||||
const projects = [...(projectsByOrgId.get(organization.id) || []), newProject];
|
||||
|
||||
projectsByOrgId.set(organization.id, projects);
|
||||
res.status(200).send({ ...newProject, organizationId: organization.id });
|
||||
});
|
||||
|
||||
app.post('/v1/organizations/:organizationId/collaborators', (_req, res) => {
|
||||
res.json({ 'data': [] });
|
||||
res.json({ data: [] });
|
||||
});
|
||||
|
||||
app.get('/v1/organizations/roles', (_req, res) => {
|
||||
@@ -576,36 +560,41 @@ export default (app: Application) => {
|
||||
|
||||
app.post('/v1/desktop/organizations/:organizationId/collaborators/start-adding', (_req, res) => {
|
||||
res.json({
|
||||
'acct_2346c8e88dae47e2a1a5cae04dc68ea3': {
|
||||
'accountId': 'acct_2346c8e88dae47e2a1a5cae04dc68ea3',
|
||||
'publicKey': '{"alg":"RSA-OAEP-256","e":"AQAB","ext":true,"key_ops":["encrypt"],"kty":"RSA","n":"o7QI0X9cue5ErinBTTz24YuTXGCbQQfhuqXKEq8xpBinqL8lW0CgTe3HqDDyGN6Ip3kE2wCCBLNTTheSS3FB0172VhsqE2mnlBsopfGWbNmFT-cT517464u9yrsFK2ywVDURDDjdh2BSl1T-3axy1P74BjvcOz7nzlAMNfT8Wp41Dwzb5o9-HPU_1nJQYzOb1zJlV1pwKzeufq81tNecT7td1QB3mnXhJAFFbRINiGu-uIaP7gl-J4ICOTh0Tjzzn7fKC-3EUbfLRvFUZBtRcZncWa5OjuGB5DhgHj8mcWvGyP_3gKzvOB2b4piE6N3NnbwO9-skIw5MdY-kQMvJLQ=="}',
|
||||
'autoLinked': false,
|
||||
acct_2346c8e88dae47e2a1a5cae04dc68ea3: {
|
||||
accountId: 'acct_2346c8e88dae47e2a1a5cae04dc68ea3',
|
||||
publicKey:
|
||||
'{"alg":"RSA-OAEP-256","e":"AQAB","ext":true,"key_ops":["encrypt"],"kty":"RSA","n":"o7QI0X9cue5ErinBTTz24YuTXGCbQQfhuqXKEq8xpBinqL8lW0CgTe3HqDDyGN6Ip3kE2wCCBLNTTheSS3FB0172VhsqE2mnlBsopfGWbNmFT-cT517464u9yrsFK2ywVDURDDjdh2BSl1T-3axy1P74BjvcOz7nzlAMNfT8Wp41Dwzb5o9-HPU_1nJQYzOb1zJlV1pwKzeufq81tNecT7td1QB3mnXhJAFFbRINiGu-uIaP7gl-J4ICOTh0Tjzzn7fKC-3EUbfLRvFUZBtRcZncWa5OjuGB5DhgHj8mcWvGyP_3gKzvOB2b4piE6N3NnbwO9-skIw5MdY-kQMvJLQ=="}',
|
||||
autoLinked: false,
|
||||
},
|
||||
'acct_2a1f5086018442b98fbb15120b75a27e': {
|
||||
'accountId': 'acct_2a1f5086018442b98fbb15120b75a27e',
|
||||
'publicKey': '{"alg":"RSA-OAEP-256","e":"AQAB","ext":true,"key_ops":["encrypt"],"kty":"RSA","n":"nvmA4jWOAUiopX7Ct9Z5mH6mmTB7I4SlSgDNCMtVxHKjEEegXuxTqkScklHnrZCT7ohmWY-6ouJW4ocjln3Falu8lxxB0V7YqBrxgf81lKlDIGr5f0VYp-R9JSBtR6btVj3xV-3I3APGH5lRBW0VGTdgrBaRAl7o9_4hy7xLSy_hqgqdH2-CS2gEZfRjN-1kjSI4nvqD1BSMfyWhu-pbhP6WdhmOa3JkWLPRtxQInv14Kp1-gWjsAfXYOEvldTH4DvCGYkvEBYvSr9FQ6NQKJFOHho4NAyXJhagvuqwc134XuwiFDgCmK0bh1jXR2fy-OR255S0NseArZPkY3l2Tjw=="}',
|
||||
'autoLinked': false,
|
||||
acct_2a1f5086018442b98fbb15120b75a27e: {
|
||||
accountId: 'acct_2a1f5086018442b98fbb15120b75a27e',
|
||||
publicKey:
|
||||
'{"alg":"RSA-OAEP-256","e":"AQAB","ext":true,"key_ops":["encrypt"],"kty":"RSA","n":"nvmA4jWOAUiopX7Ct9Z5mH6mmTB7I4SlSgDNCMtVxHKjEEegXuxTqkScklHnrZCT7ohmWY-6ouJW4ocjln3Falu8lxxB0V7YqBrxgf81lKlDIGr5f0VYp-R9JSBtR6btVj3xV-3I3APGH5lRBW0VGTdgrBaRAl7o9_4hy7xLSy_hqgqdH2-CS2gEZfRjN-1kjSI4nvqD1BSMfyWhu-pbhP6WdhmOa3JkWLPRtxQInv14Kp1-gWjsAfXYOEvldTH4DvCGYkvEBYvSr9FQ6NQKJFOHho4NAyXJhagvuqwc134XuwiFDgCmK0bh1jXR2fy-OR255S0NseArZPkY3l2Tjw=="}',
|
||||
autoLinked: false,
|
||||
},
|
||||
'acct_6694e55cce2c4dacb69c86844ba92d91': {
|
||||
'accountId': 'acct_6694e55cce2c4dacb69c86844ba92d91',
|
||||
'publicKey': '{"alg":"RSA-OAEP-256","e":"AQAB","ext":true,"key_ops":["encrypt"],"kty":"RSA","n":"wCd42bJqAZz5lRMk8MdMoF35ga9yhIjirMUhUXXKvA29LUYGsT6J_LxF6pXWV7CSZdxZPrf8Ur8L2AC7gz0ESHfV-uAVPBFnPrGBTiiHTBCDAtkt8tW3hqullJxfLS8PsGL6IYGYloq9gbKXiz-u37ba282vYQbbzkWO_382QJKS6eYAlE5JOpxmtNl7r5a3Okxz8JekBN5WhZrxEQzOv7ov7zmmRZPBgCm3Xo7RzAuUpBam1EkO5UvGL3DEjnc_Kx7R9jVbmLgDcryJDooKiCVLWv-tyg9H5QYMVd76uxAcQE9fJNoxSX-UU-Tu78-6CHk68IyTa2Rf4BwvSZJw-Q=="}',
|
||||
'autoLinked': false,
|
||||
acct_6694e55cce2c4dacb69c86844ba92d91: {
|
||||
accountId: 'acct_6694e55cce2c4dacb69c86844ba92d91',
|
||||
publicKey:
|
||||
'{"alg":"RSA-OAEP-256","e":"AQAB","ext":true,"key_ops":["encrypt"],"kty":"RSA","n":"wCd42bJqAZz5lRMk8MdMoF35ga9yhIjirMUhUXXKvA29LUYGsT6J_LxF6pXWV7CSZdxZPrf8Ur8L2AC7gz0ESHfV-uAVPBFnPrGBTiiHTBCDAtkt8tW3hqullJxfLS8PsGL6IYGYloq9gbKXiz-u37ba282vYQbbzkWO_382QJKS6eYAlE5JOpxmtNl7r5a3Okxz8JekBN5WhZrxEQzOv7ov7zmmRZPBgCm3Xo7RzAuUpBam1EkO5UvGL3DEjnc_Kx7R9jVbmLgDcryJDooKiCVLWv-tyg9H5QYMVd76uxAcQE9fJNoxSX-UU-Tu78-6CHk68IyTa2Rf4BwvSZJw-Q=="}',
|
||||
autoLinked: false,
|
||||
},
|
||||
'acct_72196d3295b243b48ea4de15391873b7': {
|
||||
'accountId': 'acct_72196d3295b243b48ea4de15391873b7',
|
||||
'publicKey': '{"alg":"RSA-OAEP-256","e":"AQAB","ext":true,"key_ops":["encrypt"],"kty":"RSA","n":"94S0IWkw5RgnhJy1Dspynt1gsRnOrG_A5UqI2sbp8fNCdlU9Z0M-r9O-ern0Wgupxxqt8s3xpQzaRYSPcCOK4z9F-w2MT6wIKn7EKKWCpXa94pra4J5abVukwtbPILIi9-uKu8RisnaeYT82OfZKAaQi-J24yzRI7qYLyS0GCrSxWgr1-wVzeRrE8gnwQU677TVAyGDTioz3EQ2-pB4fTkXdrBlVZ8qQkruwcTJ--rr550MD1cRK95J0jT1qGn8e0bTMW5lHP3dZH7vveFj1RP3cD7jnO6b3pD7jhDaMLJqXw0Nvxru__lToP-_r054Ea8ffEWVjygtqvplxq4R3Cw=="}',
|
||||
'autoLinked': false,
|
||||
acct_72196d3295b243b48ea4de15391873b7: {
|
||||
accountId: 'acct_72196d3295b243b48ea4de15391873b7',
|
||||
publicKey:
|
||||
'{"alg":"RSA-OAEP-256","e":"AQAB","ext":true,"key_ops":["encrypt"],"kty":"RSA","n":"94S0IWkw5RgnhJy1Dspynt1gsRnOrG_A5UqI2sbp8fNCdlU9Z0M-r9O-ern0Wgupxxqt8s3xpQzaRYSPcCOK4z9F-w2MT6wIKn7EKKWCpXa94pra4J5abVukwtbPILIi9-uKu8RisnaeYT82OfZKAaQi-J24yzRI7qYLyS0GCrSxWgr1-wVzeRrE8gnwQU677TVAyGDTioz3EQ2-pB4fTkXdrBlVZ8qQkruwcTJ--rr550MD1cRK95J0jT1qGn8e0bTMW5lHP3dZH7vveFj1RP3cD7jnO6b3pD7jhDaMLJqXw0Nvxru__lToP-_r054Ea8ffEWVjygtqvplxq4R3Cw=="}',
|
||||
autoLinked: false,
|
||||
},
|
||||
'acct_fe023b1398ab48fd8f9d3dfb622f5bf6': {
|
||||
'accountId': 'acct_fe023b1398ab48fd8f9d3dfb622f5bf6',
|
||||
'publicKey': '{"alg":"RSA-OAEP-256","e":"AQAB","ext":true,"key_ops":["encrypt"],"kty":"RSA","n":"s0W6IbaPmPaMgzf2-rGOffm4tNg8_ZykiX2C6ZgFdC-GsMGiF08pSjD7UfGTPSTIWFv4Ncz6D0J8wbFBa87IYTuIZhewbNAqRcX1eu_g0-4dNIw9KqhvIoy_O-r-MT1T11TuU5gWWyHw8mY2Aax9Z_JDdDMQc-dP_FqxGCTIHfe52xQNaCL3AgMp0nU5sDUp_vo3YXSWk0yuERqQ9TMcB9l27hQhbHZHDfsdHTodXutbBG5MwpcDBppriBVlMVjY8M7QHt61C7KF5mhgniEd2msF0bAZZaVz1ibZ9QNdFHHPrdfLLQvPyZFD4m8a7Wt0Qcq9FfrFubWv1208Ocet3Q=="}',
|
||||
'autoLinked': false,
|
||||
acct_fe023b1398ab48fd8f9d3dfb622f5bf6: {
|
||||
accountId: 'acct_fe023b1398ab48fd8f9d3dfb622f5bf6',
|
||||
publicKey:
|
||||
'{"alg":"RSA-OAEP-256","e":"AQAB","ext":true,"key_ops":["encrypt"],"kty":"RSA","n":"s0W6IbaPmPaMgzf2-rGOffm4tNg8_ZykiX2C6ZgFdC-GsMGiF08pSjD7UfGTPSTIWFv4Ncz6D0J8wbFBa87IYTuIZhewbNAqRcX1eu_g0-4dNIw9KqhvIoy_O-r-MT1T11TuU5gWWyHw8mY2Aax9Z_JDdDMQc-dP_FqxGCTIHfe52xQNaCL3AgMp0nU5sDUp_vo3YXSWk0yuERqQ9TMcB9l27hQhbHZHDfsdHTodXutbBG5MwpcDBppriBVlMVjY8M7QHt61C7KF5mhgniEd2msF0bAZZaVz1ibZ9QNdFHHPrdfLLQvPyZFD4m8a7Wt0Qcq9FfrFubWv1208Ocet3Q=="}',
|
||||
autoLinked: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
app.get('/v1/organizations/:organizationId/my-project-keys', (_req, res) => {
|
||||
res.json({ 'projectKeys': [], 'members': [] });
|
||||
res.json({ projectKeys: [], members: [] });
|
||||
});
|
||||
|
||||
app.post('/v1/organizations/:organizationId/reconcile-keys', (_req, res) => {
|
||||
|
||||
@@ -80,26 +80,19 @@ export const oauthRoutes = async (port: number) => {
|
||||
},
|
||||
],
|
||||
pkce: {
|
||||
methods: [
|
||||
'S256',
|
||||
'plain',
|
||||
],
|
||||
methods: ['S256', 'plain'],
|
||||
required: (_, client) => {
|
||||
// Require PKCE for the PKCE client id
|
||||
return client.clientId === clientIDAuthorizationCodePKCE;
|
||||
},
|
||||
},
|
||||
responseTypes: [
|
||||
'code',
|
||||
'id_token', 'id_token token',
|
||||
'none',
|
||||
],
|
||||
responseTypes: ['code', 'id_token', 'id_token token', 'none'],
|
||||
issueRefreshToken: () => {
|
||||
return false;
|
||||
},
|
||||
// https://github.com/panva/node-oidc-provider/blob/main/recipes/skip_consent.md
|
||||
loadExistingGrant: async ctx => {
|
||||
const grantId = (ctx.oidc.result?.consent?.grantId) || (ctx.oidc.session!.grantIdFor(ctx.oidc.client!.clientId));
|
||||
const grantId = ctx.oidc.result?.consent?.grantId || ctx.oidc.session!.grantIdFor(ctx.oidc.client!.clientId);
|
||||
|
||||
if (grantId) {
|
||||
const grant = await ctx.oidc.provider.Grant.find(grantId);
|
||||
@@ -138,32 +131,21 @@ export const oauthRoutes = async (port: number) => {
|
||||
oauthRouter.get('/id-token', async (req, res) => {
|
||||
const client = await oidc.Client.find(clientIDImplicit);
|
||||
if (!client) {
|
||||
res
|
||||
.status(500)
|
||||
.header('Content-Type', 'text/plain')
|
||||
.send('Client not found');
|
||||
res.status(500).header('Content-Type', 'text/plain').send('Client not found');
|
||||
return;
|
||||
}
|
||||
|
||||
const authorizationHeader = req.header('Authorization');
|
||||
if (!authorizationHeader) {
|
||||
res
|
||||
.status(400)
|
||||
.header('Content-Type', 'text/plain')
|
||||
.send('Missing Authorization header');
|
||||
res.status(400).header('Content-Type', 'text/plain').send('Missing Authorization header');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const validated = await oidc.IdToken.validate(extractToken(authorizationHeader), client);
|
||||
res
|
||||
.status(200)
|
||||
.json(validated);
|
||||
res.status(200).json(validated);
|
||||
} catch (err) {
|
||||
res
|
||||
.status(500)
|
||||
.header('Content-Type', 'text/plain')
|
||||
.send('Invalid authorization header');
|
||||
res.status(500).header('Content-Type', 'text/plain').send('Invalid authorization header');
|
||||
}
|
||||
});
|
||||
|
||||
@@ -171,32 +153,22 @@ export const oauthRoutes = async (port: number) => {
|
||||
oauthRouter.get('/client-credential', async (req, res) => {
|
||||
const authorizationHeader = req.header('Authorization');
|
||||
if (!authorizationHeader) {
|
||||
res
|
||||
.status(400)
|
||||
.header('Content-Type', 'text/plain')
|
||||
.send('Missing Authorization header');
|
||||
res.status(400).header('Content-Type', 'text/plain').send('Missing Authorization header');
|
||||
return;
|
||||
}
|
||||
|
||||
const clientCredentials = await oidc.ClientCredentials.find(extractToken(authorizationHeader));
|
||||
if (!clientCredentials) {
|
||||
res
|
||||
.status(400)
|
||||
.header('Content-Type', 'text/plain')
|
||||
.send('Invalid client credentials');
|
||||
res.status(400).header('Content-Type', 'text/plain').send('Invalid client credentials');
|
||||
return;
|
||||
}
|
||||
|
||||
res
|
||||
.status(200)
|
||||
.json(clientCredentials);
|
||||
res.status(200).json(clientCredentials);
|
||||
});
|
||||
|
||||
oauthRouter.get('/interaction/:uid', async (req, res, next) => {
|
||||
try {
|
||||
const {
|
||||
uid, prompt,
|
||||
} = await oidc.interactionDetails(req, res);
|
||||
const { uid, prompt } = await oidc.interactionDetails(req, res);
|
||||
|
||||
switch (prompt.name) {
|
||||
case 'login': {
|
||||
@@ -227,10 +199,7 @@ export const oauthRoutes = async (port: number) => {
|
||||
try {
|
||||
await oidc.interactionDetails(req, res);
|
||||
|
||||
const account = await (oidc.Account as any).findAccount(
|
||||
null,
|
||||
req.body.login,
|
||||
);
|
||||
const account = await (oidc.Account as any).findAccount(null, req.body.login);
|
||||
|
||||
const result = {
|
||||
login: {
|
||||
@@ -267,64 +236,65 @@ function allowLocalhostImplicit(oidc: Provider) {
|
||||
// https://github.com/panva/node-oidc-provider/tree/main/docs#password-grant-type-ropc
|
||||
const parameters = ['username', 'password', 'scope'];
|
||||
function registerROPC(oidc: Provider) {
|
||||
oidc.registerGrantType('password', async (ctx, next) => {
|
||||
const params = ctx.oidc.params;
|
||||
oidc.registerGrantType(
|
||||
'password',
|
||||
async (ctx, next) => {
|
||||
const params = ctx.oidc.params;
|
||||
|
||||
if (!params) {
|
||||
throw new Error('invalid params provided');
|
||||
}
|
||||
if (!params) {
|
||||
throw new Error('invalid params provided');
|
||||
}
|
||||
|
||||
if (!ctx.oidc.client) {
|
||||
throw new Error('invalid client provided');
|
||||
}
|
||||
if (!ctx.oidc.client) {
|
||||
throw new Error('invalid client provided');
|
||||
}
|
||||
|
||||
if (typeof params.username !== 'string' || typeof params.password !== 'string') {
|
||||
throw new Error('invalid credentials provided');
|
||||
}
|
||||
if (typeof params.username !== 'string' || typeof params.password !== 'string') {
|
||||
throw new Error('invalid credentials provided');
|
||||
}
|
||||
|
||||
const account = await ctx.oidc.provider.Account.findAccount(
|
||||
ctx,
|
||||
params.username
|
||||
);
|
||||
if (!account) {
|
||||
throw new Error('invalid account');
|
||||
}
|
||||
const account = await ctx.oidc.provider.Account.findAccount(ctx, params.username);
|
||||
if (!account) {
|
||||
throw new Error('invalid account');
|
||||
}
|
||||
|
||||
const grant = new ctx.oidc.provider.Grant({
|
||||
clientId: ctx.oidc.client.clientId,
|
||||
accountId: account.accountId,
|
||||
});
|
||||
await grant.save();
|
||||
const grant = new ctx.oidc.provider.Grant({
|
||||
clientId: ctx.oidc.client.clientId,
|
||||
accountId: account.accountId,
|
||||
});
|
||||
await grant.save();
|
||||
|
||||
const { AccessToken } = ctx.oidc.provider;
|
||||
const at = new AccessToken({
|
||||
accountId: account.accountId,
|
||||
client: ctx.oidc.client,
|
||||
grantId: grant.jti,
|
||||
gty: 'password',
|
||||
scope: typeof params.scope === 'string' ? params.scope : '',
|
||||
claims: {
|
||||
userinfo: {
|
||||
sub: {
|
||||
value: account.accountId,
|
||||
const { AccessToken } = ctx.oidc.provider;
|
||||
const at = new AccessToken({
|
||||
accountId: account.accountId,
|
||||
client: ctx.oidc.client,
|
||||
grantId: grant.jti,
|
||||
gty: 'password',
|
||||
scope: typeof params.scope === 'string' ? params.scope : '',
|
||||
claims: {
|
||||
userinfo: {
|
||||
sub: {
|
||||
value: account.accountId,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
const accessToken = await at.save();
|
||||
const accessToken = await at.save();
|
||||
|
||||
/* eslint-disable camelcase */
|
||||
ctx.body = {
|
||||
access_token: accessToken,
|
||||
expires_in: at.expiration,
|
||||
scope: at.scope,
|
||||
token_type: at.tokenType,
|
||||
};
|
||||
/* eslint-enable camelcase */
|
||||
/* eslint-disable camelcase */
|
||||
ctx.body = {
|
||||
access_token: accessToken,
|
||||
expires_in: at.expiration,
|
||||
scope: at.scope,
|
||||
token_type: at.tokenType,
|
||||
};
|
||||
/* eslint-enable camelcase */
|
||||
|
||||
await next();
|
||||
}, parameters);
|
||||
await next();
|
||||
},
|
||||
parameters,
|
||||
);
|
||||
}
|
||||
|
||||
function extractToken(authorizationHeader: string) {
|
||||
|
||||
@@ -79,11 +79,11 @@ const upgrade = (wss: WebSocketServer, request: IncomingMessage, socket: Socket,
|
||||
return redirectOnSuccess(socket);
|
||||
}
|
||||
if (request.url === '/delay') {
|
||||
const delaySec = Number.parseInt(request.headers.duration as string || '5');
|
||||
setTimeout(function() {
|
||||
const delaySec = Number.parseInt((request.headers.duration as string) || '5');
|
||||
setTimeout(function () {
|
||||
redirectOnSuccess(socket);
|
||||
}, delaySec * 1000);
|
||||
return ;
|
||||
return;
|
||||
}
|
||||
if (request.url === '/basic-auth') {
|
||||
// login with user:password
|
||||
|
||||
@@ -5,25 +5,24 @@ import path from 'path';
|
||||
import { test } from '../../playwright/test';
|
||||
|
||||
test('can backup data on new version available', async ({ app, page }) => {
|
||||
|
||||
const dataPath = await app.evaluate(async ({ app }) => app.getPath('userData'));
|
||||
let foundBackups = false;
|
||||
// retry 5 times
|
||||
for (let i = 0; i < 5; i++) {
|
||||
console.log('Retry', i);
|
||||
if (fs.existsSync(path.join(dataPath, 'backups'))) {
|
||||
console.log('Backups exists!');
|
||||
const rootBackupsFolder = fs.readdirSync(path.join(dataPath, 'backups'));
|
||||
const backupDir = fs.readdirSync(path.join(dataPath, 'backups', rootBackupsFolder[0]));
|
||||
const hasFilesInsideBackup = backupDir.length > 0;
|
||||
const hasProjectDbFile = backupDir.includes('insomnia.Project.db');
|
||||
foundBackups = hasFilesInsideBackup && hasProjectDbFile;
|
||||
break;
|
||||
} else {
|
||||
console.log('backups not found. Waiting 5 seconds...');
|
||||
await page.waitForTimeout(5000);
|
||||
}
|
||||
const dataPath = await app.evaluate(async ({ app }) => app.getPath('userData'));
|
||||
let foundBackups = false;
|
||||
// retry 5 times
|
||||
for (let i = 0; i < 5; i++) {
|
||||
console.log('Retry', i);
|
||||
if (fs.existsSync(path.join(dataPath, 'backups'))) {
|
||||
console.log('Backups exists!');
|
||||
const rootBackupsFolder = fs.readdirSync(path.join(dataPath, 'backups'));
|
||||
const backupDir = fs.readdirSync(path.join(dataPath, 'backups', rootBackupsFolder[0]));
|
||||
const hasFilesInsideBackup = backupDir.length > 0;
|
||||
const hasProjectDbFile = backupDir.includes('insomnia.Project.db');
|
||||
foundBackups = hasFilesInsideBackup && hasProjectDbFile;
|
||||
break;
|
||||
} else {
|
||||
console.log('backups not found. Waiting 5 seconds...');
|
||||
await page.waitForTimeout(5000);
|
||||
}
|
||||
}
|
||||
|
||||
await expect(foundBackups).toBe(true);
|
||||
await expect(foundBackups).toBe(true);
|
||||
});
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { test } from '../../playwright/test';
|
||||
|
||||
test('can open scratchpad', async ({ page }) => {
|
||||
await page.locator('[data-testid="user-dropdown"]').click();
|
||||
await page.getByText('Log Out').click();
|
||||
await page.getByLabel('Use the Scratch Pad').click();
|
||||
await page.getByText('Welcome to the Scratch Pad').click();
|
||||
await page.locator('[data-testid="user-dropdown"]').click();
|
||||
await page.getByText('Log Out').click();
|
||||
await page.getByLabel('Use the Scratch Pad').click();
|
||||
await page.getByText('Welcome to the Scratch Pad').click();
|
||||
});
|
||||
|
||||
@@ -20,9 +20,7 @@ testWithLegacyDatabase('Run data migration to version 8', async ({ page, userCon
|
||||
// Migration takes a while, adding this to avoid test timeout before it ends
|
||||
test.slow();
|
||||
|
||||
await page.getByLabel('Continue with Google').click(),
|
||||
|
||||
await page.locator('input[name="code"]').click();
|
||||
await page.getByLabel('Continue with Google').click(), await page.locator('input[name="code"]').click();
|
||||
await page.locator('input[name="code"]').fill(userConfig.code);
|
||||
|
||||
await page.getByRole('button', { name: 'Log in' }).click();
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user