Compare commits

..

38 Commits

Author SHA1 Message Date
Alex Coté
7742e7a54c Allow dots in environment variable names (#323) 2025-12-28 13:53:43 -08:00
Gregory Schier
b516ca877b Fix variable matching in twig grammar to ignore ${var} format (#330)
Co-authored-by: Claude <noreply@anthropic.com>
2025-12-28 13:25:47 -08:00
Gregory Schier
f3dc71a85c Fix multipart form data parsing from cURL --data-raw (#331)
Co-authored-by: Claude <noreply@anthropic.com>
2025-12-28 13:25:35 -08:00
Gregory Schier
394fbbd55d Refactor content viewer components and use for multpart and request body (#333) 2025-12-28 13:25:24 -08:00
Gregory Schier
6869aa49ec Increase max size of multi-part viewer 2025-12-28 08:43:13 -08:00
Gregory Schier
ba00274045 Switch back to unbounded channel 2025-12-28 08:41:56 -08:00
Gregory Schier
e32930034d Merge branch 'multipart-viewer' 2025-12-28 08:09:34 -08:00
Gregory Schier
26a3e88715 Store and show request body in UI (#327) 2025-12-28 08:07:42 -08:00
Gregory Schier
6a0d5d2337 Add Claude Code GitHub Workflow (#332) 2025-12-28 07:07:20 -08:00
gschier
271d8f29ca Deploying to main from @ mountain-loop/yaak@9c5479b206 🚀 2025-12-26 15:37:29 +00:00
Gregory Schier
9c5479b206 Tweak font sizes 2025-12-22 14:40:18 -08:00
Gregory Schier
5f8902e57b Fix cookies not being persisted after HTTP requests (#328) 2025-12-22 10:58:03 -08:00
Gregory Schier
089c7e8dce Http response events (#326) 2025-12-21 14:34:37 -08:00
Gregory Schier
7e0aa919fb Immediate cancellation 2025-12-21 06:28:36 -08:00
Gregory Schier
5776bab288 Tweak response pane and refactor timings 2025-12-21 06:24:01 -08:00
Gregory Schier
6b52a0cbed Try fix tests on Windows 2025-12-20 14:48:23 -08:00
Gregory Schier
46933059f6 Split up HTTP sending logic (#320) 2025-12-20 14:10:55 -08:00
Gregory Schier
cfbfd66eef Reformat project 2025-12-13 08:10:12 -08:00
Gregory Schier
c20c0eff32 Update entitlements.plist for 1Password shared lib 2025-12-11 09:22:27 -08:00
Gregory Schier
9d40949043 Fix warning: unused variable: window on non-mac OSs 2025-12-11 07:18:31 -08:00
Gregory Schier
d435337f2a Don't strip symbols hotfix 2025-12-11 06:49:06 -08:00
Gregory Schier
a32145c054 Merge branch 'hotfix/2025.9.3' 2025-12-11 06:32:35 -08:00
Gregory Schier
e0f547b93f Update tauri 2025-12-11 06:32:14 -08:00
Gregory Schier
5d4268d6a1 Merge branch 'hotfix/2025.9.3' 2025-12-11 06:00:47 -08:00
Gregory Schier
0a3506f81e Also move defaultValue out 2025-12-11 05:59:40 -08:00
Gregory Schier
375b2287b7 Merge branch 'hotfix/2025.9.3' 2025-12-11 05:54:23 -08:00
Gregory Schier
e72c1e68e5 Unify 1Password field back to static name 2025-12-11 05:48:19 -08:00
Gregory Schier
3484db3371 Default cert to open when just added 2025-12-10 15:08:59 -08:00
Gregory Schier
c4b559f34b Support client certificates (#319) 2025-12-10 13:54:22 -08:00
Mikhail Mamontov
ef1ba9b834 fix(gRPC): Cache descriptor pools to avoid re-reflection; add manual “Refresh Schema” to force re-fetch (#317) 2025-12-09 15:35:35 -08:00
Jake Oliver
846f4d9551 Update 1Password template to support the new Desktop authentication method (#316) 2025-12-09 14:50:08 -08:00
Gregory Schier
4780bfe41f Fix curl import: decode Unicode escape sequences in $'...' strings (#318)
Co-authored-by: Claude <noreply@anthropic.com>
2025-12-09 14:15:39 -08:00
Gregory Schier
d0d01b3897 Update license check to use status instead of type 2025-12-09 14:12:13 -08:00
Gregory Schier
fc1e8baa23 Catch any 4XX error on refresh token failure
https://feedback.yaak.app/p/folders-oauth2-refresh-token-issue
2025-12-09 14:08:31 -08:00
Gregory Schier
d35116c494 Add license handling for expired licenses 2025-12-09 13:51:02 -08:00
gschier
1d257b365b Deploying to main from @ mountain-loop/yaak@1076d57e8a 🚀 2025-12-09 18:15:05 +00:00
Gregory Schier
1076d57e8a Remove unused funding model entries from FUNDING.yml 2025-12-09 10:14:19 -08:00
Gregory Schier
1c93d5775f Shorter titles when using native titlebar 2025-12-06 06:47:34 -08:00
186 changed files with 7222 additions and 2250 deletions

5
.gitattributes vendored
View File

@@ -1,2 +1,7 @@
src-tauri/vendored/**/* linguist-generated=true
src-tauri/gen/schemas/**/* linguist-generated=true
**/bindings/* linguist-generated=true
src-tauri/yaak-templates/pkg/* linguist-generated=true
# Ensure consistent line endings for test files that check exact content
src-tauri/yaak-http/tests/test.txt text eol=lf

12
.github/FUNDING.yml vendored
View File

@@ -1,15 +1,3 @@
# These are supported funding model platforms
github: gschier
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
polar: # Replace with a single Polar username
buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
thanks_dev: # Replace with a single thanks.dev username
custom: https://yaak.app/pricing

50
.github/workflows/claude.yml vendored Normal file
View File

@@ -0,0 +1,50 @@
name: Claude Code
on:
issue_comment:
types: [created]
pull_request_review_comment:
types: [created]
issues:
types: [opened, assigned]
pull_request_review:
types: [submitted]
jobs:
claude:
if: |
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: read
issues: read
id-token: write
actions: read # Required for Claude to read CI results on PRs
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run Claude Code
id: claude
uses: anthropics/claude-code-action@v1
with:
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
# This is an optional setting that allows Claude to read CI results on PRs
additional_permissions: |
actions: read
# Optional: Give a custom prompt to Claude. If this is not specified, Claude will perform the instructions specified in the comment that tagged it.
# prompt: 'Update the pull request description to include a summary of changes.'
# Optional: Add claude_args to customize behavior and configuration
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
# or https://code.claude.com/docs/en/cli-reference for available options
# claude_args: '--allowed-tools Bash(gh pr:*)'

3
.gitignore vendored
View File

@@ -25,6 +25,7 @@ dist-ssr
*.sln
*.sw?
.eslintcache
out
*.sqlite
*.sqlite-*
@@ -33,3 +34,5 @@ dist-ssr
.tmp
tmp
.zed
codebook.toml

View File

@@ -19,10 +19,10 @@
<p align="center">
<!-- sponsors-premium --><a href="https://github.com/MVST-Solutions"><img src="https:&#x2F;&#x2F;github.com&#x2F;MVST-Solutions.png" width="80px" alt="User avatar: MVST-Solutions" /></a>&nbsp;&nbsp;<a href="https://github.com/dharsanb"><img src="https:&#x2F;&#x2F;github.com&#x2F;dharsanb.png" width="80px" alt="User avatar: dharsanb" /></a>&nbsp;&nbsp;<a href="https://github.com/railwayapp"><img src="https:&#x2F;&#x2F;github.com&#x2F;railwayapp.png" width="80px" alt="User avatar: railwayapp" /></a>&nbsp;&nbsp;<a href="https://github.com/caseyamcl"><img src="https:&#x2F;&#x2F;github.com&#x2F;caseyamcl.png" width="80px" alt="User avatar: caseyamcl" /></a>&nbsp;&nbsp;<a href="https://github.com/"><img src="https:&#x2F;&#x2F;raw.githubusercontent.com&#x2F;JamesIves&#x2F;github-sponsors-readme-action&#x2F;dev&#x2F;.github&#x2F;assets&#x2F;placeholder.png" width="80px" alt="User avatar: " /></a>&nbsp;&nbsp;<!-- sponsors-premium -->
<!-- sponsors-premium --><a href="https://github.com/MVST-Solutions"><img src="https:&#x2F;&#x2F;github.com&#x2F;MVST-Solutions.png" width="80px" alt="User avatar: MVST-Solutions" /></a>&nbsp;&nbsp;<a href="https://github.com/dharsanb"><img src="https:&#x2F;&#x2F;github.com&#x2F;dharsanb.png" width="80px" alt="User avatar: dharsanb" /></a>&nbsp;&nbsp;<a href="https://github.com/railwayapp"><img src="https:&#x2F;&#x2F;github.com&#x2F;railwayapp.png" width="80px" alt="User avatar: railwayapp" /></a>&nbsp;&nbsp;<a href="https://github.com/caseyamcl"><img src="https:&#x2F;&#x2F;github.com&#x2F;caseyamcl.png" width="80px" alt="User avatar: caseyamcl" /></a>&nbsp;&nbsp;<a href="https://github.com/bytebase"><img src="https:&#x2F;&#x2F;github.com&#x2F;bytebase.png" width="80px" alt="User avatar: bytebase" /></a>&nbsp;&nbsp;<a href="https://github.com/"><img src="https:&#x2F;&#x2F;raw.githubusercontent.com&#x2F;JamesIves&#x2F;github-sponsors-readme-action&#x2F;dev&#x2F;.github&#x2F;assets&#x2F;placeholder.png" width="80px" alt="User avatar: " /></a>&nbsp;&nbsp;<!-- sponsors-premium -->
</p>
<p align="center">
<!-- sponsors-base --><a href="https://github.com/seanwash"><img src="https:&#x2F;&#x2F;github.com&#x2F;seanwash.png" width="50px" alt="User avatar: seanwash" /></a>&nbsp;&nbsp;<a href="https://github.com/jerath"><img src="https:&#x2F;&#x2F;github.com&#x2F;jerath.png" width="50px" alt="User avatar: jerath" /></a>&nbsp;&nbsp;<a href="https://github.com/itsa-sh"><img src="https:&#x2F;&#x2F;github.com&#x2F;itsa-sh.png" width="50px" alt="User avatar: itsa-sh" /></a>&nbsp;&nbsp;<a href="https://github.com/dmmulroy"><img src="https:&#x2F;&#x2F;github.com&#x2F;dmmulroy.png" width="50px" alt="User avatar: dmmulroy" /></a>&nbsp;&nbsp;<a href="https://github.com/timcole"><img src="https:&#x2F;&#x2F;github.com&#x2F;timcole.png" width="50px" alt="User avatar: timcole" /></a>&nbsp;&nbsp;<a href="https://github.com/VLZH"><img src="https:&#x2F;&#x2F;github.com&#x2F;VLZH.png" width="50px" alt="User avatar: VLZH" /></a>&nbsp;&nbsp;<a href="https://github.com/terasaka2k"><img src="https:&#x2F;&#x2F;github.com&#x2F;terasaka2k.png" width="50px" alt="User avatar: terasaka2k" /></a>&nbsp;&nbsp;<a href="https://github.com/andriyor"><img src="https:&#x2F;&#x2F;github.com&#x2F;andriyor.png" width="50px" alt="User avatar: andriyor" /></a>&nbsp;&nbsp;<a href="https://github.com/majudhu"><img src="https:&#x2F;&#x2F;github.com&#x2F;majudhu.png" width="50px" alt="User avatar: majudhu" /></a>&nbsp;&nbsp;<a href="https://github.com/axelrindle"><img src="https:&#x2F;&#x2F;github.com&#x2F;axelrindle.png" width="50px" alt="User avatar: axelrindle" /></a>&nbsp;&nbsp;<a href="https://github.com/jirizverina"><img src="https:&#x2F;&#x2F;github.com&#x2F;jirizverina.png" width="50px" alt="User avatar: jirizverina" /></a>&nbsp;&nbsp;<a href="https://github.com/chip-well"><img src="https:&#x2F;&#x2F;github.com&#x2F;chip-well.png" width="50px" alt="User avatar: chip-well" /></a>&nbsp;&nbsp;<!-- sponsors-base -->
<!-- sponsors-base --><a href="https://github.com/seanwash"><img src="https:&#x2F;&#x2F;github.com&#x2F;seanwash.png" width="50px" alt="User avatar: seanwash" /></a>&nbsp;&nbsp;<a href="https://github.com/jerath"><img src="https:&#x2F;&#x2F;github.com&#x2F;jerath.png" width="50px" alt="User avatar: jerath" /></a>&nbsp;&nbsp;<a href="https://github.com/itsa-sh"><img src="https:&#x2F;&#x2F;github.com&#x2F;itsa-sh.png" width="50px" alt="User avatar: itsa-sh" /></a>&nbsp;&nbsp;<a href="https://github.com/dmmulroy"><img src="https:&#x2F;&#x2F;github.com&#x2F;dmmulroy.png" width="50px" alt="User avatar: dmmulroy" /></a>&nbsp;&nbsp;<a href="https://github.com/timcole"><img src="https:&#x2F;&#x2F;github.com&#x2F;timcole.png" width="50px" alt="User avatar: timcole" /></a>&nbsp;&nbsp;<a href="https://github.com/VLZH"><img src="https:&#x2F;&#x2F;github.com&#x2F;VLZH.png" width="50px" alt="User avatar: VLZH" /></a>&nbsp;&nbsp;<a href="https://github.com/terasaka2k"><img src="https:&#x2F;&#x2F;github.com&#x2F;terasaka2k.png" width="50px" alt="User avatar: terasaka2k" /></a>&nbsp;&nbsp;<a href="https://github.com/andriyor"><img src="https:&#x2F;&#x2F;github.com&#x2F;andriyor.png" width="50px" alt="User avatar: andriyor" /></a>&nbsp;&nbsp;<a href="https://github.com/majudhu"><img src="https:&#x2F;&#x2F;github.com&#x2F;majudhu.png" width="50px" alt="User avatar: majudhu" /></a>&nbsp;&nbsp;<a href="https://github.com/axelrindle"><img src="https:&#x2F;&#x2F;github.com&#x2F;axelrindle.png" width="50px" alt="User avatar: axelrindle" /></a>&nbsp;&nbsp;<a href="https://github.com/jirizverina"><img src="https:&#x2F;&#x2F;github.com&#x2F;jirizverina.png" width="50px" alt="User avatar: jirizverina" /></a>&nbsp;&nbsp;<a href="https://github.com/chip-well"><img src="https:&#x2F;&#x2F;github.com&#x2F;chip-well.png" width="50px" alt="User avatar: chip-well" /></a>&nbsp;&nbsp;<a href="https://github.com/GRAYAH"><img src="https:&#x2F;&#x2F;github.com&#x2F;GRAYAH.png" width="50px" alt="User avatar: GRAYAH" /></a>&nbsp;&nbsp;<!-- sponsors-base -->
</p>
![Yaak API Client](https://yaak.app/static/screenshot.png)

162
package-lock.json generated
View File

@@ -61,7 +61,7 @@
],
"devDependencies": {
"@biomejs/biome": "^2.3.7",
"@tauri-apps/cli": "^2.9.1",
"@tauri-apps/cli": "^2.9.6",
"@yaakapp/cli": "^0.3.4",
"nodejs-file-downloader": "^4.13.0",
"npm-run-all": "^4.1.5",
@@ -3231,9 +3231,9 @@
}
},
"node_modules/@tauri-apps/api": {
"version": "2.9.0",
"resolved": "https://registry.npmjs.org/@tauri-apps/api/-/api-2.9.0.tgz",
"integrity": "sha512-qD5tMjh7utwBk9/5PrTA/aGr3i5QaJ/Mlt7p8NilQ45WgbifUNPyKWsA63iQ8YfQq6R8ajMapU+/Q8nMcPRLNw==",
"version": "2.9.1",
"resolved": "https://registry.npmjs.org/@tauri-apps/api/-/api-2.9.1.tgz",
"integrity": "sha512-IGlhP6EivjXHepbBic618GOmiWe4URJiIeZFlB7x3czM0yDHHYviH1Xvoiv4FefdkQtn6v7TuwWCRfOGdnVUGw==",
"license": "Apache-2.0 OR MIT",
"funding": {
"type": "opencollective",
@@ -3241,9 +3241,9 @@
}
},
"node_modules/@tauri-apps/cli": {
"version": "2.9.1",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli/-/cli-2.9.1.tgz",
"integrity": "sha512-kKi2/WWsNXKoMdatBl4xrT7e1Ce27JvsetBVfWuIb6D3ep/Y0WO5SIr70yarXOSWam8NyDur4ipzjZkg6m7VDg==",
"version": "2.9.6",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli/-/cli-2.9.6.tgz",
"integrity": "sha512-3xDdXL5omQ3sPfBfdC8fCtDKcnyV7OqyzQgfyT5P3+zY6lcPqIYKQBvUasNvppi21RSdfhy44ttvJmftb0PCDw==",
"dev": true,
"license": "Apache-2.0 OR MIT",
"bin": {
@@ -3257,23 +3257,23 @@
"url": "https://opencollective.com/tauri"
},
"optionalDependencies": {
"@tauri-apps/cli-darwin-arm64": "2.9.1",
"@tauri-apps/cli-darwin-x64": "2.9.1",
"@tauri-apps/cli-linux-arm-gnueabihf": "2.9.1",
"@tauri-apps/cli-linux-arm64-gnu": "2.9.1",
"@tauri-apps/cli-linux-arm64-musl": "2.9.1",
"@tauri-apps/cli-linux-riscv64-gnu": "2.9.1",
"@tauri-apps/cli-linux-x64-gnu": "2.9.1",
"@tauri-apps/cli-linux-x64-musl": "2.9.1",
"@tauri-apps/cli-win32-arm64-msvc": "2.9.1",
"@tauri-apps/cli-win32-ia32-msvc": "2.9.1",
"@tauri-apps/cli-win32-x64-msvc": "2.9.1"
"@tauri-apps/cli-darwin-arm64": "2.9.6",
"@tauri-apps/cli-darwin-x64": "2.9.6",
"@tauri-apps/cli-linux-arm-gnueabihf": "2.9.6",
"@tauri-apps/cli-linux-arm64-gnu": "2.9.6",
"@tauri-apps/cli-linux-arm64-musl": "2.9.6",
"@tauri-apps/cli-linux-riscv64-gnu": "2.9.6",
"@tauri-apps/cli-linux-x64-gnu": "2.9.6",
"@tauri-apps/cli-linux-x64-musl": "2.9.6",
"@tauri-apps/cli-win32-arm64-msvc": "2.9.6",
"@tauri-apps/cli-win32-ia32-msvc": "2.9.6",
"@tauri-apps/cli-win32-x64-msvc": "2.9.6"
}
},
"node_modules/@tauri-apps/cli-darwin-arm64": {
"version": "2.9.1",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-darwin-arm64/-/cli-darwin-arm64-2.9.1.tgz",
"integrity": "sha512-sdwhtsE/6njD0AjgfYEj1JyxZH4SBmCJSXpRm6Ph5fQeuZD6MyjzjdVOrrtFguyREVQ7xn0Ujkwvbo01ULthNg==",
"version": "2.9.6",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-darwin-arm64/-/cli-darwin-arm64-2.9.6.tgz",
"integrity": "sha512-gf5no6N9FCk1qMrti4lfwP77JHP5haASZgVbBgpZG7BUepB3fhiLCXGUK8LvuOjP36HivXewjg72LTnPDScnQQ==",
"cpu": [
"arm64"
],
@@ -3288,9 +3288,9 @@
}
},
"node_modules/@tauri-apps/cli-darwin-x64": {
"version": "2.9.1",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-darwin-x64/-/cli-darwin-x64-2.9.1.tgz",
"integrity": "sha512-c86g+67wTdI4TUCD7CaSd/13+oYuLQxVST4ZNJ5C+6i1kdnU3Us1L68N9MvbDLDQGJc9eo0pvuK6sCWkee+BzA==",
"version": "2.9.6",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-darwin-x64/-/cli-darwin-x64-2.9.6.tgz",
"integrity": "sha512-oWh74WmqbERwwrwcueJyY6HYhgCksUc6NT7WKeXyrlY/FPmNgdyQAgcLuTSkhRFuQ6zh4Np1HZpOqCTpeZBDcw==",
"cpu": [
"x64"
],
@@ -3305,9 +3305,9 @@
}
},
"node_modules/@tauri-apps/cli-linux-arm-gnueabihf": {
"version": "2.9.1",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm-gnueabihf/-/cli-linux-arm-gnueabihf-2.9.1.tgz",
"integrity": "sha512-IrB3gFQmueQKJjjisOcMktW/Gh6gxgqYO419doA3YZ7yIV5rbE8ZW52Q3I4AO+SlFEyVYer5kpi066p0JBlLGw==",
"version": "2.9.6",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm-gnueabihf/-/cli-linux-arm-gnueabihf-2.9.6.tgz",
"integrity": "sha512-/zde3bFroFsNXOHN204DC2qUxAcAanUjVXXSdEGmhwMUZeAQalNj5cz2Qli2elsRjKN/hVbZOJj0gQ5zaYUjSg==",
"cpu": [
"arm"
],
@@ -3322,9 +3322,9 @@
}
},
"node_modules/@tauri-apps/cli-linux-arm64-gnu": {
"version": "2.9.1",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm64-gnu/-/cli-linux-arm64-gnu-2.9.1.tgz",
"integrity": "sha512-Ke7TyXvu6HbWSkmVkFbbH19D3cLsd117YtXP/u9NIvSpYwKeFtnbpirrIUfPm44Q+PZFZ2Hvg8X9qoUiAK0zKw==",
"version": "2.9.6",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm64-gnu/-/cli-linux-arm64-gnu-2.9.6.tgz",
"integrity": "sha512-pvbljdhp9VOo4RnID5ywSxgBs7qiylTPlK56cTk7InR3kYSTJKYMqv/4Q/4rGo/mG8cVppesKIeBMH42fw6wjg==",
"cpu": [
"arm64"
],
@@ -3339,9 +3339,9 @@
}
},
"node_modules/@tauri-apps/cli-linux-arm64-musl": {
"version": "2.9.1",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.9.1.tgz",
"integrity": "sha512-sGvy75sv55oeMulR5ArwPD28DsDQxqTzLhXCrpU9/nbFg/JImmI7k994YE9fr3V0qE3Cjk5gjLldRNv7I9sjwQ==",
"version": "2.9.6",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.9.6.tgz",
"integrity": "sha512-02TKUndpodXBCR0oP//6dZWGYcc22Upf2eP27NvC6z0DIqvkBBFziQUcvi2n6SrwTRL0yGgQjkm9K5NIn8s6jw==",
"cpu": [
"arm64"
],
@@ -3356,9 +3356,9 @@
}
},
"node_modules/@tauri-apps/cli-linux-riscv64-gnu": {
"version": "2.9.1",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-riscv64-gnu/-/cli-linux-riscv64-gnu-2.9.1.tgz",
"integrity": "sha512-tEKbJydV3BdIxpAx8aGHW6VDg1xW4LlQuRD/QeFZdZNTreHJpMbJEcdvAcI+Hg6vgQpVpaoEldR9W4F6dYSLqQ==",
"version": "2.9.6",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-riscv64-gnu/-/cli-linux-riscv64-gnu-2.9.6.tgz",
"integrity": "sha512-fmp1hnulbqzl1GkXl4aTX9fV+ubHw2LqlLH1PE3BxZ11EQk+l/TmiEongjnxF0ie4kV8DQfDNJ1KGiIdWe1GvQ==",
"cpu": [
"riscv64"
],
@@ -3373,9 +3373,9 @@
}
},
"node_modules/@tauri-apps/cli-linux-x64-gnu": {
"version": "2.9.1",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-x64-gnu/-/cli-linux-x64-gnu-2.9.1.tgz",
"integrity": "sha512-mg5msXHagtHpyCVWgI01M26JeSrgE/otWyGdYcuTwyRYZYEJRTbcNt7hscOkdNlPBe7isScW7PVKbxmAjJJl4g==",
"version": "2.9.6",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-x64-gnu/-/cli-linux-x64-gnu-2.9.6.tgz",
"integrity": "sha512-vY0le8ad2KaV1PJr+jCd8fUF9VOjwwQP/uBuTJvhvKTloEwxYA/kAjKK9OpIslGA9m/zcnSo74czI6bBrm2sYA==",
"cpu": [
"x64"
],
@@ -3390,9 +3390,9 @@
}
},
"node_modules/@tauri-apps/cli-linux-x64-musl": {
"version": "2.9.1",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-x64-musl/-/cli-linux-x64-musl-2.9.1.tgz",
"integrity": "sha512-lFZEXkpDreUe3zKilvnMsrnKP9gwQudaEjDnOz/GMzbzNceIuPfFZz0cR/ky1Aoq4eSvZonPKHhROq4owz4fzg==",
"version": "2.9.6",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-linux-x64-musl/-/cli-linux-x64-musl-2.9.6.tgz",
"integrity": "sha512-TOEuB8YCFZTWVDzsO2yW0+zGcoMiPPwcUgdnW1ODnmgfwccpnihDRoks+ABT1e3fHb1ol8QQWsHSCovb3o2ENQ==",
"cpu": [
"x64"
],
@@ -3407,9 +3407,9 @@
}
},
"node_modules/@tauri-apps/cli-win32-arm64-msvc": {
"version": "2.9.1",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-arm64-msvc/-/cli-win32-arm64-msvc-2.9.1.tgz",
"integrity": "sha512-ejc5RAp/Lm1Aj0EQHaT+Wdt5PHfdgQV5hIDV00MV6HNbIb5W4ZUFxMDaRkAg65gl9MvY2fH396riePW3RoKXDw==",
"version": "2.9.6",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-arm64-msvc/-/cli-win32-arm64-msvc-2.9.6.tgz",
"integrity": "sha512-ujmDGMRc4qRLAnj8nNG26Rlz9klJ0I0jmZs2BPpmNNf0gM/rcVHhqbEkAaHPTBVIrtUdf7bGvQAD2pyIiUrBHQ==",
"cpu": [
"arm64"
],
@@ -3424,9 +3424,9 @@
}
},
"node_modules/@tauri-apps/cli-win32-ia32-msvc": {
"version": "2.9.1",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-ia32-msvc/-/cli-win32-ia32-msvc-2.9.1.tgz",
"integrity": "sha512-fSATtJDc0fNjVB6ystyi8NbwhNFk8i8E05h6KrsC8Fio5eaJIJvPCbC9pdrPl6kkxN1X7fj25ErBbgfqgcK8Fg==",
"version": "2.9.6",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-ia32-msvc/-/cli-win32-ia32-msvc-2.9.6.tgz",
"integrity": "sha512-S4pT0yAJgFX8QRCyKA1iKjZ9Q/oPjCZf66A/VlG5Yw54Nnr88J1uBpmenINbXxzyhduWrIXBaUbEY1K80ZbpMg==",
"cpu": [
"ia32"
],
@@ -3441,9 +3441,9 @@
}
},
"node_modules/@tauri-apps/cli-win32-x64-msvc": {
"version": "2.9.1",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-x64-msvc/-/cli-win32-x64-msvc-2.9.1.tgz",
"integrity": "sha512-/JHlOzpUDhjBOO9w167bcYxfJbcMQv7ykS/Y07xjtcga8np0rzUzVGWYmLMH7orKcDMC7wjhheEW1x8cbGma/Q==",
"version": "2.9.6",
"resolved": "https://registry.npmjs.org/@tauri-apps/cli-win32-x64-msvc/-/cli-win32-x64-msvc-2.9.6.tgz",
"integrity": "sha512-ldWuWSSkWbKOPjQMJoYVj9wLHcOniv7diyI5UAJ4XsBdtaFB0pKHQsqw/ItUma0VXGC7vB4E9fZjivmxur60aw==",
"cpu": [
"x64"
],
@@ -3458,63 +3458,63 @@
}
},
"node_modules/@tauri-apps/plugin-clipboard-manager": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@tauri-apps/plugin-clipboard-manager/-/plugin-clipboard-manager-2.3.0.tgz",
"integrity": "sha512-81NOBA2P+OTY8RLkBwyl9ZR/0CeggLub4F6zxcxUIfFOAqtky7J61+K/MkH2SC1FMxNBxrX0swDuKvkjkHadlA==",
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/@tauri-apps/plugin-clipboard-manager/-/plugin-clipboard-manager-2.3.2.tgz",
"integrity": "sha512-CUlb5Hqi2oZbcZf4VUyUH53XWPPdtpw43EUpCza5HWZJwxEoDowFzNUDt1tRUXA8Uq+XPn17Ysfptip33sG4eQ==",
"license": "MIT OR Apache-2.0",
"dependencies": {
"@tauri-apps/api": "^2.6.0"
"@tauri-apps/api": "^2.8.0"
}
},
"node_modules/@tauri-apps/plugin-dialog": {
"version": "2.4.0",
"resolved": "https://registry.npmjs.org/@tauri-apps/plugin-dialog/-/plugin-dialog-2.4.0.tgz",
"integrity": "sha512-OvXkrEBfWwtd8tzVCEXIvRfNEX87qs2jv6SqmVPiHcJjBhSF/GUvjqUNIDmKByb5N8nvDqVUM7+g1sXwdC/S9w==",
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/@tauri-apps/plugin-dialog/-/plugin-dialog-2.4.2.tgz",
"integrity": "sha512-lNIn5CZuw8WZOn8zHzmFmDSzg5zfohWoa3mdULP0YFh/VogVdMVWZPcWSHlydsiJhRQYaTNSYKN7RmZKE2lCYQ==",
"license": "MIT OR Apache-2.0",
"dependencies": {
"@tauri-apps/api": "^2.8.0"
}
},
"node_modules/@tauri-apps/plugin-fs": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/@tauri-apps/plugin-fs/-/plugin-fs-2.4.2.tgz",
"integrity": "sha512-YGhmYuTgXGsi6AjoV+5mh2NvicgWBfVJHHheuck6oHD+HC9bVWPaHvCP0/Aw4pHDejwrvT8hE3+zZAaWf+hrig==",
"version": "2.4.4",
"resolved": "https://registry.npmjs.org/@tauri-apps/plugin-fs/-/plugin-fs-2.4.4.tgz",
"integrity": "sha512-MTorXxIRmOnOPT1jZ3w96vjSuScER38ryXY88vl5F0uiKdnvTKKTtaEjTEo8uPbl4e3gnUtfsDVwC7h77GQLvQ==",
"license": "MIT OR Apache-2.0",
"dependencies": {
"@tauri-apps/api": "^2.8.0"
}
},
"node_modules/@tauri-apps/plugin-log": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/@tauri-apps/plugin-log/-/plugin-log-2.7.0.tgz",
"integrity": "sha512-81XQ2f93x4vmIB5OY0XlYAxy60cHdYLs0Ki8Qp38tNATRiuBit+Orh3frpY3qfYQnqEvYVyRub7YRJWlmW2RRA==",
"version": "2.7.1",
"resolved": "https://registry.npmjs.org/@tauri-apps/plugin-log/-/plugin-log-2.7.1.tgz",
"integrity": "sha512-jdb+o0wxQc8PjnLktgGpOs9Dh1YupaOGDXzO+Y8peA1UZ1ep3eXv4E1oiJ7nIQVN0XUFDDhnn3aBszl8ijhR+A==",
"license": "MIT OR Apache-2.0",
"dependencies": {
"@tauri-apps/api": "^2.8.0"
}
},
"node_modules/@tauri-apps/plugin-opener": {
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/@tauri-apps/plugin-opener/-/plugin-opener-2.5.0.tgz",
"integrity": "sha512-B0LShOYae4CZjN8leiNDbnfjSrTwoZakqKaWpfoH6nXiJwt6Rgj6RnVIffG3DoJiKsffRhMkjmBV9VeilSb4TA==",
"version": "2.5.2",
"resolved": "https://registry.npmjs.org/@tauri-apps/plugin-opener/-/plugin-opener-2.5.2.tgz",
"integrity": "sha512-ei/yRRoCklWHImwpCcDK3VhNXx+QXM9793aQ64YxpqVF0BDuuIlXhZgiAkc15wnPVav+IbkYhmDJIv5R326Mew==",
"license": "MIT OR Apache-2.0",
"dependencies": {
"@tauri-apps/api": "^2.8.0"
}
},
"node_modules/@tauri-apps/plugin-os": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/@tauri-apps/plugin-os/-/plugin-os-2.3.1.tgz",
"integrity": "sha512-ty5V8XDUIFbSnrk3zsFoP3kzN+vAufYzalJSlmrVhQTImIZa1aL1a03bOaP2vuBvfR+WDRC6NgV2xBl8G07d+w==",
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/@tauri-apps/plugin-os/-/plugin-os-2.3.2.tgz",
"integrity": "sha512-n+nXWeuSeF9wcEsSPmRnBEGrRgOy6jjkSU+UVCOV8YUGKb2erhDOxis7IqRXiRVHhY8XMKks00BJ0OAdkpf6+A==",
"license": "MIT OR Apache-2.0",
"dependencies": {
"@tauri-apps/api": "^2.8.0"
}
},
"node_modules/@tauri-apps/plugin-shell": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/@tauri-apps/plugin-shell/-/plugin-shell-2.3.1.tgz",
"integrity": "sha512-jjs2WGDO/9z2pjNlydY/F5yYhNsscv99K5lCmU5uKjsVvQ3dRlDhhtVYoa4OLDmktLtQvgvbQjCFibMl6tgGfw==",
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/@tauri-apps/plugin-shell/-/plugin-shell-2.3.3.tgz",
"integrity": "sha512-Xod+pRcFxmOWFWEnqH5yZcA7qwAMuaaDkMR1Sply+F8VfBj++CGnj2xf5UoialmjZ2Cvd8qrvSCbU+7GgNVsKQ==",
"license": "MIT OR Apache-2.0",
"dependencies": {
"@tauri-apps/api": "^2.8.0"
@@ -18703,14 +18703,14 @@
"@tanstack/react-query": "^5.90.5",
"@tanstack/react-router": "^1.133.13",
"@tanstack/react-virtual": "^3.13.12",
"@tauri-apps/api": "^2.9.0",
"@tauri-apps/plugin-clipboard-manager": "^2.3.0",
"@tauri-apps/plugin-dialog": "^2.4.0",
"@tauri-apps/plugin-fs": "^2.4.2",
"@tauri-apps/plugin-log": "^2.7.0",
"@tauri-apps/plugin-opener": "^2.5.0",
"@tauri-apps/plugin-os": "^2.3.1",
"@tauri-apps/plugin-shell": "^2.3.1",
"@tauri-apps/api": "^2.9.1",
"@tauri-apps/plugin-clipboard-manager": "^2.3.2",
"@tauri-apps/plugin-dialog": "^2.4.2",
"@tauri-apps/plugin-fs": "^2.4.4",
"@tauri-apps/plugin-log": "^2.7.1",
"@tauri-apps/plugin-opener": "^2.5.2",
"@tauri-apps/plugin-os": "^2.3.2",
"@tauri-apps/plugin-shell": "^2.3.3",
"buffer": "^6.0.3",
"classnames": "^2.5.1",
"cm6-graphql": "^0.2.1",

View File

@@ -88,7 +88,7 @@
},
"devDependencies": {
"@biomejs/biome": "^2.3.7",
"@tauri-apps/cli": "^2.9.1",
"@tauri-apps/cli": "^2.9.6",
"@yaakapp/cli": "^0.3.4",
"nodejs-file-downloader": "^4.13.0",
"npm-run-all": "^4.1.5",

View File

@@ -12,7 +12,7 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponseHeader = { name: string, value: string, };

View File

@@ -71,10 +71,10 @@ export async function getOrRefreshAccessToken(
httpRequest.authenticationType = 'none'; // Don't inherit workspace auth
const resp = await ctx.httpRequest.send({ httpRequest });
if (resp.status === 401) {
// Bad refresh token, so we'll force it to fetch a fresh access token by deleting
// and returning null;
console.log('[oauth2] Unauthorized refresh_token request');
if (resp.status >= 400 && resp.status < 500) {
// Client errors (4xx) indicate the refresh token is invalid, expired, or revoked
// Delete the token and return null to trigger a fresh authorization flow
console.log('[oauth2] Refresh token request failed with client error, deleting token');
await deleteToken(ctx, tokenArgs);
return null;
}

View File

@@ -55,6 +55,34 @@ export const plugin: PluginDefinition = {
},
};
/**
* Decodes escape sequences in shell $'...' strings
* Handles Unicode escape sequences (\uXXXX) and common escape codes
*/
function decodeShellString(str: string): string {
return str
.replace(/\\u([0-9a-fA-F]{4})/g, (_, hex) => String.fromCharCode(parseInt(hex, 16)))
.replace(/\\x([0-9a-fA-F]{2})/g, (_, hex) => String.fromCharCode(parseInt(hex, 16)))
.replace(/\\n/g, '\n')
.replace(/\\r/g, '\r')
.replace(/\\t/g, '\t')
.replace(/\\'/g, "'")
.replace(/\\"/g, '"')
.replace(/\\\\/g, '\\');
}
/**
* Checks if a string might contain escape sequences that need decoding
* If so, decodes them; otherwise returns the string as-is
*/
function maybeDecodeEscapeSequences(str: string): string {
// Check if the string contains escape sequences that shell-quote might not handle
if (str.includes('\\u') || str.includes('\\x')) {
return decodeShellString(str);
}
return str;
}
export function convertCurl(rawData: string) {
if (!rawData.match(/^\s*curl /)) {
return null;
@@ -86,9 +114,11 @@ export function convertCurl(rawData: string) {
for (const parseEntry of normalizedParseEntries) {
if (typeof parseEntry === 'string') {
if (parseEntry.startsWith('$')) {
currentCommand.push(parseEntry.slice(1));
// Handle $'...' strings from shell-quote - decode escape sequences
currentCommand.push(decodeShellString(parseEntry.slice(1)));
} else {
currentCommand.push(parseEntry);
// Decode escape sequences that shell-quote might not handle
currentCommand.push(maybeDecodeEscapeSequences(parseEntry));
}
continue;
}
@@ -108,7 +138,7 @@ export function convertCurl(rawData: string) {
if (op?.startsWith('$')) {
// Handle the case where literal like -H $'Header: \'Some Quoted Thing\''
const str = op.slice(2, op.length - 1).replace(/\\'/g, "'");
const str = decodeShellString(op.slice(2, op.length - 1));
currentCommand.push(str);
continue;
@@ -164,11 +194,17 @@ function importCommand(parseEntries: ParseEntry[], workspaceId: string) {
let value: string | boolean;
const nextEntry = parseEntries[i + 1];
const hasValue = !BOOLEAN_FLAGS.includes(name);
// Check if nextEntry looks like a flag:
// - Single dash followed by a letter: -X, -H, -d
// - Double dash followed by a letter: --data-raw, --header
// This prevents mistaking data that starts with dashes (like multipart boundaries ------) as flags
const nextEntryIsFlag = typeof nextEntry === 'string' &&
(nextEntry.match(/^-[a-zA-Z]/) || nextEntry.match(/^--[a-zA-Z]/));
if (isSingleDash && name.length > 1) {
// Handle squished arguments like -XPOST
value = name.slice(1);
name = name.slice(0, 1);
} else if (typeof nextEntry === 'string' && hasValue && !nextEntry.startsWith('-')) {
} else if (typeof nextEntry === 'string' && hasValue && !nextEntryIsFlag) {
// Next arg is not a flag, so assign it as the value
value = nextEntry;
i++; // Skip next one
@@ -275,11 +311,32 @@ function importCommand(parseEntries: ParseEntry[], workspaceId: string) {
}
// Body (Text or Blob)
const dataParameters = pairsToDataParameters(flagsByName);
const contentTypeHeader = headers.find((header) => header.name.toLowerCase() === 'content-type');
const mimeType = contentTypeHeader ? contentTypeHeader.value.split(';')[0] : null;
const mimeType = contentTypeHeader ? contentTypeHeader.value.split(';')[0]?.trim() : null;
// Body (Multipart Form Data)
// Extract boundary from Content-Type header for multipart parsing
const boundaryMatch = contentTypeHeader?.value.match(/boundary=([^\s;]+)/i);
const boundary = boundaryMatch?.[1];
// Get raw data from --data-raw flags (before splitting by &)
const rawDataValues = [
...((flagsByName['data-raw'] as string[] | undefined) || []),
...((flagsByName.d as string[] | undefined) || []),
...((flagsByName.data as string[] | undefined) || []),
...((flagsByName['data-binary'] as string[] | undefined) || []),
...((flagsByName['data-ascii'] as string[] | undefined) || []),
];
// Check if this is multipart form data in --data-raw (Chrome DevTools format)
let multipartFormDataFromRaw: { name: string; value?: string; file?: string; enabled: boolean }[] | null = null;
if (mimeType === 'multipart/form-data' && boundary && rawDataValues.length > 0) {
const rawBody = rawDataValues.join('');
multipartFormDataFromRaw = parseMultipartFormData(rawBody, boundary);
}
const dataParameters = pairsToDataParameters(flagsByName);
// Body (Multipart Form Data from -F flags)
const formDataParams = [
...((flagsByName.form as string[] | undefined) || []),
...((flagsByName.F as string[] | undefined) || []),
@@ -306,7 +363,13 @@ function importCommand(parseEntries: ParseEntry[], workspaceId: string) {
let bodyType: string | null = null;
const bodyAsGET = getPairValue(flagsByName, false, ['G', 'get']);
if (dataParameters.length > 0 && bodyAsGET) {
if (multipartFormDataFromRaw) {
// Handle multipart form data parsed from --data-raw (Chrome DevTools format)
bodyType = 'multipart/form-data';
body = {
form: multipartFormDataFromRaw,
};
} else if (dataParameters.length > 0 && bodyAsGET) {
urlParameters.push(...dataParameters);
} else if (
dataParameters.length > 0 &&
@@ -443,6 +506,71 @@ function splitOnce(str: string, sep: string): string[] {
return [str];
}
/**
* Parses multipart form data from a raw body string
* Used when Chrome DevTools exports a cURL with --data-raw containing multipart data
*/
function parseMultipartFormData(
rawBody: string,
boundary: string,
): { name: string; value?: string; file?: string; enabled: boolean }[] | null {
const results: { name: string; value?: string; file?: string; enabled: boolean }[] = [];
// The boundary in the body typically has -- prefix
const boundaryMarker = `--${boundary}`;
const parts = rawBody.split(boundaryMarker);
for (const part of parts) {
// Skip empty parts and the closing boundary marker
if (!part || part.trim() === '--' || part.trim() === '--\r\n') {
continue;
}
// Each part has headers and content separated by \r\n\r\n
const headerContentSplit = part.indexOf('\r\n\r\n');
if (headerContentSplit === -1) {
continue;
}
const headerSection = part.slice(0, headerContentSplit);
let content = part.slice(headerContentSplit + 4); // Skip \r\n\r\n
// Remove trailing \r\n from content
if (content.endsWith('\r\n')) {
content = content.slice(0, -2);
}
// Parse Content-Disposition header to get name and filename
const contentDispositionMatch = headerSection.match(
/Content-Disposition:\s*form-data;\s*name="([^"]+)"(?:;\s*filename="([^"]+)")?/i,
);
if (!contentDispositionMatch) {
continue;
}
const name = contentDispositionMatch[1] ?? '';
const filename = contentDispositionMatch[2];
const item: { name: string; value?: string; file?: string; enabled: boolean } = {
name,
enabled: true,
};
if (filename) {
// This is a file upload field
item.file = filename;
} else {
// This is a regular text field
item.value = content;
}
results.push(item);
}
return results.length > 0 ? results : null;
}
const idCount: Partial<Record<string, number>> = {};
function generateId(model: string): string {

View File

@@ -391,6 +391,122 @@ describe('importer-curl', () => {
},
});
});
test('Imports data with Unicode escape sequences', () => {
expect(
convertCurl(
`curl 'https://yaak.app' -H 'Content-Type: application/json' --data-raw $'{"query":"SearchQueryInput\\u0021"}' -X POST`,
),
).toEqual({
resources: {
workspaces: [baseWorkspace()],
httpRequests: [
baseRequest({
url: 'https://yaak.app',
method: 'POST',
headers: [{ name: 'Content-Type', value: 'application/json', enabled: true }],
bodyType: 'application/json',
body: { text: '{"query":"SearchQueryInput!"}' },
}),
],
},
});
});
test('Imports data with multiple escape sequences', () => {
expect(
convertCurl(
`curl 'https://yaak.app' --data-raw $'Line1\\nLine2\\tTab\\u0021Exclamation' -X POST`,
),
).toEqual({
resources: {
workspaces: [baseWorkspace()],
httpRequests: [
baseRequest({
url: 'https://yaak.app',
method: 'POST',
bodyType: 'application/x-www-form-urlencoded',
body: {
form: [{ name: 'Line1\nLine2\tTab!Exclamation', value: '', enabled: true }],
},
headers: [
{
enabled: true,
name: 'Content-Type',
value: 'application/x-www-form-urlencoded',
},
],
}),
],
},
});
});
test('Imports multipart form data from --data-raw (Chrome DevTools format)', () => {
// This is the format Chrome DevTools uses when copying a multipart form submission as cURL
const curlCommand = `curl 'http://localhost:8080/system' \
-H 'Content-Type: multipart/form-data; boundary=----WebKitFormBoundaryHwsXKi4rKA6P5VBd' \
--data-raw $'------WebKitFormBoundaryHwsXKi4rKA6P5VBd\r\nContent-Disposition: form-data; name="username"\r\n\r\njsgj\r\n------WebKitFormBoundaryHwsXKi4rKA6P5VBd\r\nContent-Disposition: form-data; name="password"\r\n\r\n654321\r\n------WebKitFormBoundaryHwsXKi4rKA6P5VBd\r\nContent-Disposition: form-data; name="captcha"; filename="test.xlsx"\r\nContent-Type: application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\r\n\r\n\r\n------WebKitFormBoundaryHwsXKi4rKA6P5VBd--\r\n'`;
expect(convertCurl(curlCommand)).toEqual({
resources: {
workspaces: [baseWorkspace()],
httpRequests: [
baseRequest({
url: 'http://localhost:8080/system',
method: 'POST',
headers: [
{
name: 'Content-Type',
value: 'multipart/form-data; boundary=----WebKitFormBoundaryHwsXKi4rKA6P5VBd',
enabled: true,
},
],
bodyType: 'multipart/form-data',
body: {
form: [
{ name: 'username', value: 'jsgj', enabled: true },
{ name: 'password', value: '654321', enabled: true },
{ name: 'captcha', file: 'test.xlsx', enabled: true },
],
},
}),
],
},
});
});
test('Imports multipart form data with text-only fields from --data-raw', () => {
const curlCommand = `curl 'http://example.com/api' \
-H 'Content-Type: multipart/form-data; boundary=----FormBoundary123' \
--data-raw $'------FormBoundary123\r\nContent-Disposition: form-data; name="field1"\r\n\r\nvalue1\r\n------FormBoundary123\r\nContent-Disposition: form-data; name="field2"\r\n\r\nvalue2\r\n------FormBoundary123--\r\n'`;
expect(convertCurl(curlCommand)).toEqual({
resources: {
workspaces: [baseWorkspace()],
httpRequests: [
baseRequest({
url: 'http://example.com/api',
method: 'POST',
headers: [
{
name: 'Content-Type',
value: 'multipart/form-data; boundary=----FormBoundary123',
enabled: true,
},
],
bodyType: 'multipart/form-data',
body: {
form: [
{ name: 'field1', value: 'value1', enabled: true },
{ name: 'field2', value: 'value2', enabled: true },
],
},
}),
],
},
});
});
});
const idCount: Partial<Record<string, number>> = {};

View File

@@ -1,26 +1,86 @@
import crypto from 'node:crypto';
import type { Client } from '@1password/sdk';
import { createClient } from '@1password/sdk';
import type { PluginDefinition } from '@yaakapp/api';
import { createClient, DesktopAuth } from '@1password/sdk';
import type { JsonPrimitive, PluginDefinition } from '@yaakapp/api';
import type { CallTemplateFunctionArgs } from '@yaakapp-internal/plugins';
const _clients: Record<string, Client> = {};
async function op(args: CallTemplateFunctionArgs): Promise<Client | null> {
const token = args.values.token;
if (typeof token !== 'string') return null;
async function op(args: CallTemplateFunctionArgs): Promise<{ client?: Client; error?: unknown }> {
let authMethod: string | DesktopAuth | null = null;
let hash: string | null = null;
switch (args.values.authMethod) {
case 'desktop': {
const account = args.values.token;
if (typeof account !== 'string' || !account) return { error: 'Missing account name' };
hash = crypto.createHash('sha256').update(`desktop:${account}`).digest('hex');
authMethod = new DesktopAuth(account);
break;
}
case 'token': {
const token = args.values.token;
if (typeof token !== 'string' || !token) return { error: 'Missing service token' };
hash = crypto.createHash('sha256').update(`token:${token}`).digest('hex');
authMethod = token;
break;
}
}
if (hash == null || authMethod == null) return { error: 'Invalid authentication method' };
const tokenHash = crypto.createHash('sha256').update(token).digest('hex');
try {
_clients[tokenHash] ??= await createClient({
auth: token,
_clients[hash] ??= await createClient({
auth: authMethod,
integrationName: 'Yaak 1Password Plugin',
integrationVersion: 'v1.0.0',
});
} catch {
return null;
} catch (e) {
return { error: e };
}
return _clients[tokenHash];
return { client: _clients[hash] };
}
async function getValue(
args: CallTemplateFunctionArgs,
vaultId?: JsonPrimitive,
itemId?: JsonPrimitive,
fieldId?: JsonPrimitive,
): Promise<{ value?: string; error?: unknown }> {
const { client, error } = await op(args);
if (!client) return { error };
if (vaultId && typeof vaultId === 'string') {
try {
await client.vaults.getOverview(vaultId);
} catch {
return { error: `Vault ${vaultId} not found` };
}
} else {
return { error: 'No vault specified' };
}
if (itemId && typeof itemId === 'string') {
try {
const item = await client.items.get(vaultId, itemId);
if (fieldId && typeof fieldId === 'string') {
const field = item.fields.find((f) => f.id === fieldId);
if (field) {
return { value: field.value };
} else {
return { error: `Field ${fieldId} not found in item ${itemId} in vault ${vaultId}` };
}
}
} catch {
return { error: `Item ${itemId} not found in vault ${vaultId}` };
}
} else {
return { error: 'No item specified' };
}
return {};
}
export const plugin: PluginDefinition = {
@@ -31,14 +91,50 @@ export const plugin: PluginDefinition = {
previewArgs: ['field'],
args: [
{
name: 'token',
type: 'text',
label: '1Password Service Account Token',
description:
'Token can be generated from the 1Password website by visiting Developer > Service Accounts',
// biome-ignore lint/suspicious/noTemplateCurlyInString: Yaak template syntax
defaultValue: '${[1PASSWORD_TOKEN]}',
password: true,
type: 'h_stack',
inputs: [
{
name: 'authMethod',
type: 'select',
label: 'Authentication Method',
defaultValue: 'token',
options: [
{
label: 'Service Account',
value: 'token',
},
{
label: 'Desktop App',
value: 'desktop',
},
],
},
{
name: 'token',
type: 'text',
// biome-ignore lint/suspicious/noTemplateCurlyInString: Yaak template syntax
defaultValue: '${[1PASSWORD_TOKEN]}',
dynamic(_ctx, args) {
switch (args.values.authMethod) {
case 'desktop':
return {
label: 'Account Name',
description:
'Account name can be taken from the sidebar of the 1Password App. Make sure you\'re on the BETA version of the 1Password app and have "Integrate with other apps" enabled in Settings > Developer.',
};
case 'token':
return {
label: 'Token',
description:
'Token can be generated from the 1Password website by visiting Developer > Service Accounts',
password: true,
};
}
return { hidden: true };
},
},
],
},
{
name: 'vault',
@@ -46,7 +142,7 @@ export const plugin: PluginDefinition = {
type: 'select',
options: [],
async dynamic(_ctx, args) {
const client = await op(args);
const { client } = await op(args);
if (client == null) return { hidden: true };
// Fetches a secret.
const vaults = await client.vaults.list({ decryptDetails: true });
@@ -64,18 +160,23 @@ export const plugin: PluginDefinition = {
type: 'select',
options: [],
async dynamic(_ctx, args) {
const client = await op(args);
const { client } = await op(args);
if (client == null) return { hidden: true };
const vaultId = args.values.vault;
if (typeof vaultId !== 'string') return { hidden: true };
const items = await client.items.list(vaultId);
return {
options: items.map((item) => ({
label: `${item.title} ${item.category}`,
value: item.id,
})),
};
try {
const items = await client.items.list(vaultId);
return {
options: items.map((item) => ({
label: `${item.title} ${item.category}`,
value: item.id,
})),
};
} catch {
// Hide as we can't list the items for this vault
return { hidden: true };
}
},
},
{
@@ -84,7 +185,7 @@ export const plugin: PluginDefinition = {
type: 'select',
options: [],
async dynamic(_ctx, args) {
const client = await op(args);
const { client } = await op(args);
if (client == null) return { hidden: true };
const vaultId = args.values.vault;
const itemId = args.values.item;
@@ -92,34 +193,28 @@ export const plugin: PluginDefinition = {
return { hidden: true };
}
const item = await client.items.get(vaultId, itemId);
return {
options: item.fields.map((field) => ({ label: field.title, value: field.id })),
};
try {
const item = await client.items.get(vaultId, itemId);
return {
options: item.fields.map((field) => ({ label: field.title, value: field.id })),
};
} catch {
// Hide as we can't find the item within this vault
return { hidden: true };
}
},
},
],
async onRender(_ctx, args) {
const client = await op(args);
if (client == null) throw new Error('Invalid token');
const vaultId = args.values.vault;
const itemId = args.values.item;
const fieldId = args.values.field;
if (
typeof vaultId !== 'string' ||
typeof itemId !== 'string' ||
typeof fieldId !== 'string'
) {
return null;
const { value, error } = await getValue(args, vaultId, itemId, fieldId);
if (error) {
throw error;
}
const item = await client.items.get(vaultId, itemId);
const field = item.fields.find((f) => f.id === fieldId);
if (field == null) {
throw new Error(`Field not found: ${fieldId}`);
}
return field.value ?? '';
return value ?? '';
},
},
],

View File

@@ -5,3 +5,4 @@ chain_width = 100
max_width = 100
single_line_if_else_max_width = 100
fn_call_width = 100
struct_lit_width = 100

210
src-tauri/Cargo.lock generated
View File

@@ -192,12 +192,14 @@ version = "0.4.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b37fc50485c4f3f736a4fb14199f6d5f5ba008d7f28fe710306c92780f004c07"
dependencies = [
"brotli",
"brotli 8.0.1",
"flate2",
"futures-core",
"memchr",
"pin-project-lite",
"tokio",
"zstd",
"zstd-safe",
]
[[package]]
@@ -473,6 +475,15 @@ dependencies = [
"generic-array",
]
[[package]]
name = "block-padding"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93"
dependencies = [
"generic-array",
]
[[package]]
name = "block2"
version = "0.5.1"
@@ -527,6 +538,17 @@ dependencies = [
"syn 2.0.101",
]
[[package]]
name = "brotli"
version = "7.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc97b8f16f944bba54f0433f07e30be199b6dc2bd25937444bbad560bcea29bd"
dependencies = [
"alloc-no-stdlib",
"alloc-stdlib",
"brotli-decompressor 4.0.3",
]
[[package]]
name = "brotli"
version = "8.0.1"
@@ -535,7 +557,17 @@ checksum = "9991eea70ea4f293524138648e41ee89b0b2b12ddef3b255effa43c8056e0e0d"
dependencies = [
"alloc-no-stdlib",
"alloc-stdlib",
"brotli-decompressor",
"brotli-decompressor 5.0.0",
]
[[package]]
name = "brotli-decompressor"
version = "4.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a334ef7c9e23abf0ce748e8cd309037da93e606ad52eb372e4ce327a0dcfbdfd"
dependencies = [
"alloc-no-stdlib",
"alloc-stdlib",
]
[[package]]
@@ -700,6 +732,15 @@ dependencies = [
"toml 0.8.23",
]
[[package]]
name = "cbc"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6"
dependencies = [
"cipher",
]
[[package]]
name = "cc"
version = "1.2.26"
@@ -1230,6 +1271,15 @@ dependencies = [
"syn 2.0.101",
]
[[package]]
name = "des"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffdd80ce8ce993de27e9f063a444a4d53ce8e8db4c1f00cc03af5ad5a9867a1e"
dependencies = [
"cipher",
]
[[package]]
name = "digest"
version = "0.10.7"
@@ -2623,6 +2673,7 @@ version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01"
dependencies = [
"block-padding",
"generic-array",
]
@@ -3009,9 +3060,9 @@ dependencies = [
[[package]]
name = "log"
version = "0.4.28"
version = "0.4.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
dependencies = [
"value-bag",
]
@@ -3739,6 +3790,23 @@ dependencies = [
"thiserror 2.0.17",
]
[[package]]
name = "p12"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4873306de53fe82e7e484df31e1e947d61514b6ea2ed6cd7b45d63006fd9224"
dependencies = [
"cbc",
"cipher",
"des",
"getrandom 0.2.16",
"hmac",
"lazy_static",
"rc2",
"sha1",
"yasna",
]
[[package]]
name = "pango"
version = "0.18.3"
@@ -4489,6 +4557,15 @@ version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20675572f6f24e9e76ef639bc5552774ed45f1c30e2951e1e99c59888861c539"
[[package]]
name = "rc2"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62c64daa8e9438b84aaae55010a93f396f8e60e3911590fcba770d04643fc1dd"
dependencies = [
"cipher",
]
[[package]]
name = "redox_syscall"
version = "0.5.12"
@@ -4787,6 +4864,15 @@ dependencies = [
"security-framework 3.5.1",
]
[[package]]
name = "rustls-pemfile"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50"
dependencies = [
"rustls-pki-types",
]
[[package]]
name = "rustls-pki-types"
version = "1.12.0"
@@ -5620,9 +5706,9 @@ checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1"
[[package]]
name = "tauri"
version = "2.9.2"
version = "2.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8bceb52453e507c505b330afe3398510e87f428ea42b6e76ecb6bd63b15965b5"
checksum = "8a3868da5508446a7cd08956d523ac3edf0a8bc20bf7e4038f9a95c2800d2033"
dependencies = [
"anyhow",
"bytes",
@@ -5672,9 +5758,9 @@ dependencies = [
[[package]]
name = "tauri-build"
version = "2.5.1"
version = "2.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a924b6c50fe83193f0f8b14072afa7c25b7a72752a2a73d9549b463f5fe91a38"
checksum = "17fcb8819fd16463512a12f531d44826ce566f486d7ccd211c9c8cebdaec4e08"
dependencies = [
"anyhow",
"cargo_toml",
@@ -5694,12 +5780,12 @@ dependencies = [
[[package]]
name = "tauri-codegen"
version = "2.5.0"
version = "2.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c1fe64c74cc40f90848281a90058a6db931eb400b60205840e09801ee30f190"
checksum = "9fa9844cefcf99554a16e0a278156ae73b0d8680bbc0e2ad1e4287aadd8489cf"
dependencies = [
"base64 0.22.1",
"brotli",
"brotli 8.0.1",
"ico",
"json-patch",
"plist",
@@ -5721,9 +5807,9 @@ dependencies = [
[[package]]
name = "tauri-macros"
version = "2.5.0"
version = "2.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "260c5d2eb036b76206b9fca20b7be3614cfd21046c5396f7959e0e64a4b07f2f"
checksum = "3764a12f886d8245e66b7ee9b43ccc47883399be2019a61d80cf0f4117446fde"
dependencies = [
"heck 0.5.0",
"proc-macro2",
@@ -5735,9 +5821,9 @@ dependencies = [
[[package]]
name = "tauri-plugin"
version = "2.5.1"
version = "2.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "076c78a474a7247c90cad0b6e87e593c4c620ed4efdb79cbe0214f0021f6c39d"
checksum = "0e1d0a4860b7ff570c891e1d2a586bf1ede205ff858fbc305e0b5ae5d14c1377"
dependencies = [
"anyhow",
"glob",
@@ -5752,9 +5838,9 @@ dependencies = [
[[package]]
name = "tauri-plugin-clipboard-manager"
version = "2.3.0"
version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adddd9e9275b20e77af3061d100a25a884cced3c4c9ef680bd94dd0f7e26c1ca"
checksum = "206dc20af4ed210748ba945c2774e60fd0acd52b9a73a028402caf809e9b6ecf"
dependencies = [
"arboard",
"log",
@@ -5767,9 +5853,9 @@ dependencies = [
[[package]]
name = "tauri-plugin-deep-link"
version = "2.4.3"
version = "2.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd67112fb1131834c2a7398ffcba520dbbf62c17de3b10329acd1a3554b1a9bb"
checksum = "6e82759f7c7d51de3cbde51c04b3f2332de52436ed84541182cd8944b04e9e73"
dependencies = [
"dunce",
"plist",
@@ -5828,9 +5914,9 @@ dependencies = [
[[package]]
name = "tauri-plugin-log"
version = "2.7.0"
version = "2.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61c1438bc7662acd16d508c919b3c087efd63669a4c75625dff829b1c75975ec"
checksum = "d5709c792b8630290b5d9811a1f8fe983dd925fc87c7fc7f4923616458cd00b6"
dependencies = [
"android_logger",
"byte-unit",
@@ -5850,9 +5936,9 @@ dependencies = [
[[package]]
name = "tauri-plugin-opener"
version = "2.5.0"
version = "2.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "786156aa8e89e03d271fbd3fe642207da8e65f3c961baa9e2930f332bf80a1f5"
checksum = "c26b72571d25dee25667940027114e60f569fc3974f8cefbe50c2cbc5fd65e3b"
dependencies = [
"dunce",
"glob",
@@ -5872,9 +5958,9 @@ dependencies = [
[[package]]
name = "tauri-plugin-os"
version = "2.3.1"
version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77a1c77ebf6f20417ab2a74e8c310820ba52151406d0c80fbcea7df232e3f6ba"
checksum = "d8f08346c8deb39e96f86973da0e2d76cbb933d7ac9b750f6dc4daf955a6f997"
dependencies = [
"gethostname 1.0.2",
"log",
@@ -5911,9 +5997,9 @@ dependencies = [
[[package]]
name = "tauri-plugin-single-instance"
version = "2.3.4"
version = "2.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb9cac815bf11c4a80fb498666bcdad66d65b89e3ae24669e47806febb76389c"
checksum = "dd707f8c86b4e3004e2c141fa24351f1909ba40ce1b8437e30d5ed5277dd3710"
dependencies = [
"serde",
"serde_json",
@@ -5959,9 +6045,9 @@ dependencies = [
[[package]]
name = "tauri-plugin-window-state"
version = "2.4.0"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d5f6fe3291bfa609c7e0b0ee3bedac294d94c7018934086ce782c1d0f2a468e"
checksum = "73736611e14142408d15353e21e3cca2f12a3cfb523ad0ce85999b6d2ef1a704"
dependencies = [
"bitflags 2.9.1",
"log",
@@ -5974,9 +6060,9 @@ dependencies = [
[[package]]
name = "tauri-runtime"
version = "2.9.1"
version = "2.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9368f09358496f2229313fccb37682ad116b7f46fa76981efe116994a0628926"
checksum = "87f766fe9f3d1efc4b59b17e7a891ad5ed195fa8d23582abb02e6c9a01137892"
dependencies = [
"cookie",
"dpi",
@@ -5999,9 +6085,9 @@ dependencies = [
[[package]]
name = "tauri-runtime-wry"
version = "2.9.1"
version = "2.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "929f5df216f5c02a9e894554401bcdab6eec3e39ec6a4a7731c7067fc8688a93"
checksum = "187a3f26f681bdf028f796ccf57cf478c1ee422c50128e5a0a6ebeb3f5910065"
dependencies = [
"gtk",
"http",
@@ -6026,12 +6112,12 @@ dependencies = [
[[package]]
name = "tauri-utils"
version = "2.8.0"
version = "2.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6b8bbe426abdbf52d050e52ed693130dbd68375b9ad82a3fb17efb4c8d85673"
checksum = "76a423c51176eb3616ee9b516a9fa67fed5f0e78baaba680e44eb5dd2cc37490"
dependencies = [
"anyhow",
"brotli",
"brotli 8.0.1",
"cargo_metadata",
"ctor",
"dunce",
@@ -6810,9 +6896,9 @@ dependencies = [
[[package]]
name = "value-bag"
version = "1.11.1"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "943ce29a8a743eb10d6082545d861b24f9d1b160b7d741e0f2cdf726bec909c5"
checksum = "7ba6f5989077681266825251a52748b8c1d8a4ad098cc37e440103d0ea717fc0"
[[package]]
name = "vcpkg"
@@ -7114,9 +7200,9 @@ dependencies = [
[[package]]
name = "webpki-root-certs"
version = "1.0.0"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "01a83f7e1a9f8712695c03eabe9ed3fbca0feff0152f33f12593e5a6303cb1a4"
checksum = "ee3e3b5f5e80bc89f30ce8d0343bf4e5f12341c51f3e26cbeecbc7c85443e85b"
dependencies = [
"rustls-pki-types",
]
@@ -7840,6 +7926,7 @@ dependencies = [
"thiserror 2.0.17",
"tokio",
"tokio-stream",
"tokio-util",
"ts-rs",
"uuid",
"yaak-common",
@@ -7855,6 +7942,7 @@ dependencies = [
"yaak-sse",
"yaak-sync",
"yaak-templates",
"yaak-tls",
"yaak-ws",
]
@@ -7865,6 +7953,7 @@ dependencies = [
"regex",
"reqwest",
"serde",
"serde_json",
"tauri",
"thiserror 2.0.17",
]
@@ -7933,32 +8022,43 @@ dependencies = [
"serde_json",
"tauri",
"tauri-plugin-shell",
"thiserror 2.0.17",
"tokio",
"tokio-stream",
"tonic",
"tonic-reflection",
"uuid",
"yaak-http",
"yaak-tls",
]
[[package]]
name = "yaak-http"
version = "0.1.0"
dependencies = [
"async-compression",
"async-trait",
"brotli 7.0.0",
"bytes",
"flate2",
"futures-util",
"hyper-util",
"log",
"mime_guess",
"regex",
"reqwest",
"reqwest_cookie_store",
"rustls",
"rustls-platform-verifier",
"serde",
"serde_json",
"tauri",
"thiserror 2.0.17",
"tokio",
"tokio-util",
"tower-service",
"urlencoding",
"yaak-common",
"yaak-models",
"yaak-tls",
"zstd",
]
[[package]]
@@ -8093,13 +8193,28 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "yaak-tls"
version = "0.1.0"
dependencies = [
"log",
"p12",
"rustls",
"rustls-pemfile",
"rustls-platform-verifier",
"serde",
"thiserror 2.0.17",
"url",
"yaak-models",
]
[[package]]
name = "yaak-ws"
version = "0.1.0"
dependencies = [
"futures-util",
"log",
"md5 0.7.0",
"md5 0.8.0",
"reqwest_cookie_store",
"serde",
"serde_json",
@@ -8112,8 +8227,15 @@ dependencies = [
"yaak-models",
"yaak-plugins",
"yaak-templates",
"yaak-tls",
]
[[package]]
name = "yasna"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e17bb3549cc1321ae1296b9cdc2698e2b6cb1992adfa19a8c72e5b7a738f44cd"
[[package]]
name = "yoke"
version = "0.8.0"

View File

@@ -12,6 +12,7 @@ members = [
"yaak-sse",
"yaak-sync",
"yaak-templates",
"yaak-tls",
"yaak-ws",
]
@@ -28,7 +29,9 @@ name = "tauri_app_lib"
crate-type = ["staticlib", "cdylib", "lib"]
[profile.release]
strip = true # Automatically strip symbols from the binary.
# Currently disabled due to:
# Warn Failed to add bundler type to the binary: __TAURI_BUNDLE_TYPE variable not found in binary. Make sure tauri crate and tauri-cli are up to date and that symbol stripping is disabled (https://doc.rust-lang.org/cargo/reference/profiles.html#strip). Updater plugin may not be able to update this package. This shouldn't normally happen, please report it to https://github.com/tauri-apps/tauri/issues
strip = false
[features]
cargo-clippy = []
@@ -37,7 +40,7 @@ updater = []
license = ["yaak-license"]
[build-dependencies]
tauri-build = { version = "2.5.0", features = [] }
tauri-build = { version = "2.5.3", features = [] }
[target.'cfg(target_os = "linux")'.dependencies]
openssl-sys = { version = "0.9.105", features = ["vendored"] } # For Ubuntu installation to work
@@ -57,20 +60,21 @@ reqwest_cookie_store = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true, features = ["raw_value"] }
tauri = { workspace = true, features = ["devtools", "protocol-asset"] }
tauri-plugin-clipboard-manager = "2.3.0"
tauri-plugin-deep-link = "2.4.3"
tauri-plugin-clipboard-manager = "2.3.2"
tauri-plugin-deep-link = "2.4.5"
tauri-plugin-dialog = { workspace = true }
tauri-plugin-fs = "2.4.2"
tauri-plugin-log = { version = "2.7.0", features = ["colored"] }
tauri-plugin-opener = "2.5.0"
tauri-plugin-os = "2.3.1"
tauri-plugin-fs = "2.4.4"
tauri-plugin-log = { version = "2.7.1", features = ["colored"] }
tauri-plugin-opener = "2.5.2"
tauri-plugin-os = "2.3.2"
tauri-plugin-shell = { workspace = true }
tauri-plugin-single-instance = { version = "2.3.4", features = ["deep-link"] }
tauri-plugin-single-instance = { version = "2.3.6", features = ["deep-link"] }
tauri-plugin-updater = "2.9.0"
tauri-plugin-window-state = "2.4.0"
tauri-plugin-window-state = "2.4.1"
thiserror = { workspace = true }
tokio = { workspace = true, features = ["sync"] }
tokio-stream = "0.1.17"
tokio-util = { version = "0.7", features = ["codec"] }
ts-rs = { workspace = true }
uuid = "1.12.1"
yaak-common = { workspace = true }
@@ -86,6 +90,7 @@ yaak-plugins = { workspace = true }
yaak-sse = { workspace = true }
yaak-sync = { workspace = true }
yaak-templates = { workspace = true }
yaak-tls = { workspace = true }
yaak-ws = { path = "yaak-ws" }
[workspace.dependencies]
@@ -99,9 +104,9 @@ rustls-platform-verifier = "0.6.2"
serde = "1.0.228"
serde_json = "1.0.145"
sha2 = "0.10.9"
log = "0.4.28"
tauri = "2.9.2"
tauri-plugin = "2.5.1"
log = "0.4.29"
tauri = "2.9.5"
tauri-plugin = "2.5.2"
tauri-plugin-dialog = "2.4.2"
tauri-plugin-shell = "2.3.3"
thiserror = "2.0.17"
@@ -116,3 +121,4 @@ yaak-plugins = { path = "yaak-plugins" }
yaak-sse = { path = "yaak-sse" }
yaak-sync = { path = "yaak-sync" }
yaak-templates = { path = "yaak-templates" }
yaak-tls = { path = "yaak-tls" }

View File

@@ -6,6 +6,10 @@
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
<!-- Allow loading 1Password's dylib (signed with different Team ID) -->
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
<!-- Re-enable for sandboxing. Currently disabled because auto-updater doesn't work with sandboxing.-->
<!-- <key>com.apple.security.app-sandbox</key> <true/>-->
<!-- <key>com.apple.security.files.user-selected.read-write</key> <true/>-->

View File

@@ -1,5 +1,5 @@
use crate::error::Result;
use tauri::{command, AppHandle, Manager, Runtime, State, WebviewWindow};
use tauri::{AppHandle, Manager, Runtime, State, WebviewWindow, command};
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
use yaak_crypto::manager::EncryptionManagerExt;
use yaak_plugins::events::{GetThemesResponse, PluginContext};

View File

@@ -1,4 +1,4 @@
use mime_guess::{mime, Mime};
use mime_guess::{Mime, mime};
use std::path::Path;
use std::str::FromStr;
use tokio::fs;

View File

@@ -1,5 +1,5 @@
use std::io;
use serde::{Serialize, Serializer};
use std::io;
use thiserror::Error;
#[derive(Error, Debug)]
@@ -59,7 +59,7 @@ pub enum Error {
#[error("Request error: {0}")]
RequestError(#[from] reqwest::Error),
#[error("Generic error: {0}")]
#[error("{0}")]
GenericError(String),
}

View File

File diff suppressed because it is too large Load Diff

View File

@@ -32,10 +32,11 @@ use yaak_common::window::WorkspaceWindowTrait;
use yaak_grpc::manager::GrpcHandle;
use yaak_grpc::{Code, ServiceDefinition, serialize_message};
use yaak_mac_window::AppHandleMacWindowExt;
use yaak_models::blob_manager::BlobManagerExt;
use yaak_models::models::{
AnyModel, CookieJar, Environment, GrpcConnection, GrpcConnectionState, GrpcEvent,
GrpcEventType, GrpcRequest, HttpRequest, HttpResponse, HttpResponseState, Plugin, Workspace,
WorkspaceMeta,
GrpcEventType, GrpcRequest, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseState,
Plugin, Workspace, WorkspaceMeta,
};
use yaak_models::query_manager::QueryManagerExt;
use yaak_models::util::{BatchUpsertResult, UpdateSource, get_workspace_export_resources};
@@ -53,6 +54,7 @@ use yaak_plugins::template_callback::PluginTemplateCallback;
use yaak_sse::sse::ServerSentEvent;
use yaak_templates::format_json::format_json;
use yaak_templates::{RenderErrorBehavior, RenderOptions, Tokens, transform_args};
use yaak_tls::find_client_certificate;
mod commands;
mod encoding;
@@ -156,6 +158,7 @@ async fn cmd_grpc_reflect<R: Runtime>(
request_id: &str,
environment_id: Option<&str>,
proto_files: Vec<String>,
skip_cache: Option<bool>,
window: WebviewWindow<R>,
app_handle: AppHandle<R>,
grpc_handle: State<'_, Mutex<GrpcHandle>>,
@@ -178,14 +181,15 @@ async fn cmd_grpc_reflect<R: Runtime>(
&PluginContext::new(&window),
RenderPurpose::Send,
),
&RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
},
&RenderOptions { error_behavior: RenderErrorBehavior::Throw },
)
.await?;
let uri = safe_uri(&req.url);
let metadata = build_metadata(&window, &req, &auth_context_id).await?;
let settings = window.db().get_settings();
let client_certificate =
find_client_certificate(req.url.as_str(), &settings.client_certificates);
Ok(grpc_handle
.lock()
@@ -196,6 +200,8 @@ async fn cmd_grpc_reflect<R: Runtime>(
&proto_files.iter().map(|p| PathBuf::from_str(p).unwrap()).collect(),
&metadata,
workspace.setting_validate_certificates,
client_certificate,
skip_cache.unwrap_or(false),
)
.await
.map_err(|e| GenericError(e.to_string()))?)
@@ -227,14 +233,16 @@ async fn cmd_grpc_go<R: Runtime>(
&PluginContext::new(&window),
RenderPurpose::Send,
),
&RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
},
&RenderOptions { error_behavior: RenderErrorBehavior::Throw },
)
.await?;
let metadata = build_metadata(&window, &request, &auth_context_id).await?;
// Find matching client certificate for this URL
let settings = app_handle.db().get_settings();
let client_cert = find_client_certificate(&request.url, &settings.client_certificates);
let conn = app_handle.db().upsert_grpc_connection(
&GrpcConnection {
workspace_id: request.workspace_id.clone(),
@@ -283,6 +291,7 @@ async fn cmd_grpc_go<R: Runtime>(
&proto_files.iter().map(|p| PathBuf::from_str(p).unwrap()).collect(),
&metadata,
workspace.setting_validate_certificates,
client_cert.clone(),
)
.await;
@@ -292,7 +301,7 @@ async fn cmd_grpc_go<R: Runtime>(
app_handle.db().upsert_grpc_connection(
&GrpcConnection {
elapsed: start.elapsed().as_millis() as i32,
error: Some(err.clone()),
error: Some(err.to_string()),
state: GrpcConnectionState::Closed,
..conn.clone()
},
@@ -350,9 +359,7 @@ async fn cmd_grpc_go<R: Runtime>(
&PluginContext::new(&window),
RenderPurpose::Send,
),
&RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
},
&RenderOptions { error_behavior: RenderErrorBehavior::Throw },
)
.await
.expect("Failed to render template")
@@ -402,9 +409,7 @@ async fn cmd_grpc_go<R: Runtime>(
&PluginContext::new(&window),
RenderPurpose::Send,
),
&RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
},
&RenderOptions { error_behavior: RenderErrorBehavior::Throw },
)
.await?;
@@ -423,7 +428,9 @@ async fn cmd_grpc_go<R: Runtime>(
match (method_desc.is_client_streaming(), method_desc.is_server_streaming()) {
(true, true) => (
Some(
connection.streaming(&service, &method, in_msg_stream, &metadata).await,
connection
.streaming(&service, &method, in_msg_stream, &metadata, client_cert)
.await,
),
None,
),
@@ -431,7 +438,13 @@ async fn cmd_grpc_go<R: Runtime>(
None,
Some(
connection
.client_streaming(&service, &method, in_msg_stream, &metadata)
.client_streaming(
&service,
&method,
in_msg_stream,
&metadata,
client_cert,
)
.await,
),
),
@@ -439,9 +452,12 @@ async fn cmd_grpc_go<R: Runtime>(
Some(connection.server_streaming(&service, &method, &msg, &metadata).await),
None,
),
(false, false) => {
(None, Some(connection.unary(&service, &method, &msg, &metadata).await))
}
(false, false) => (
None,
Some(
connection.unary(&service, &method, &msg, &metadata, client_cert).await,
),
),
};
if !method_desc.is_client_streaming() {
@@ -501,7 +517,7 @@ async fn cmd_grpc_go<R: Runtime>(
)
.unwrap();
}
Some(Err(e)) => {
Some(Err(yaak_grpc::error::Error::GrpcStreamError(e))) => {
app_handle
.db()
.upsert_grpc_event(
@@ -526,6 +542,21 @@ async fn cmd_grpc_go<R: Runtime>(
)
.unwrap();
}
Some(Err(e)) => {
app_handle
.db()
.upsert_grpc_event(
&GrpcEvent {
error: Some(e.to_string()),
status: Some(Code::Unknown as i32),
content: "Failed to connect".to_string(),
event_type: GrpcEventType::ConnectionEnd,
..base_event.clone()
},
&UpdateSource::from_window(&window),
)
.unwrap();
}
None => {
// Server streaming doesn't return the initial message
}
@@ -552,7 +583,7 @@ async fn cmd_grpc_go<R: Runtime>(
.unwrap();
stream.into_inner()
}
Some(Err(e)) => {
Some(Err(yaak_grpc::error::Error::GrpcStreamError(e))) => {
warn!("GRPC stream error {e:?}");
app_handle
.db()
@@ -579,6 +610,22 @@ async fn cmd_grpc_go<R: Runtime>(
.unwrap();
return;
}
Some(Err(e)) => {
app_handle
.db()
.upsert_grpc_event(
&GrpcEvent {
error: Some(e.to_string()),
status: Some(Code::Unknown as i32),
content: "Failed to connect".to_string(),
event_type: GrpcEventType::ConnectionEnd,
..base_event.clone()
},
&UpdateSource::from_window(&window),
)
.unwrap();
return;
}
None => return,
};
@@ -738,7 +785,7 @@ async fn cmd_http_response_body<R: Runtime>(
) -> YaakResult<FilterResponse> {
let body_path = match response.body_path {
None => {
return Err(GenericError("Response body path not set".to_string()));
return Ok(FilterResponse { content: String::new(), error: None });
}
Some(p) => p,
};
@@ -759,13 +806,27 @@ async fn cmd_http_response_body<R: Runtime>(
Some(filter) if !filter.is_empty() => {
Ok(plugin_manager.filter_data(&window, filter, &body, content_type).await?)
}
_ => Ok(FilterResponse {
content: body,
error: None,
}),
_ => Ok(FilterResponse { content: body, error: None }),
}
}
#[tauri::command]
async fn cmd_http_request_body<R: Runtime>(
app_handle: AppHandle<R>,
response_id: &str,
) -> YaakResult<Option<Vec<u8>>> {
let body_id = format!("{}.request", response_id);
let chunks = app_handle.blobs().get_chunks(&body_id)?;
if chunks.is_empty() {
return Ok(None);
}
// Concatenate all chunks
let body: Vec<u8> = chunks.into_iter().flat_map(|c| c.data).collect();
Ok(Some(body))
}
#[tauri::command]
async fn cmd_get_sse_events(file_path: &str) -> YaakResult<Vec<ServerSentEvent>> {
let body = fs::read(file_path)?;
@@ -787,6 +848,15 @@ async fn cmd_get_sse_events(file_path: &str) -> YaakResult<Vec<ServerSentEvent>>
Ok(events)
}
#[tauri::command]
async fn cmd_get_http_response_events<R: Runtime>(
app_handle: AppHandle<R>,
response_id: &str,
) -> YaakResult<Vec<HttpResponseEvent>> {
let events: Vec<HttpResponseEvent> = app_handle.db().list_http_response_events(response_id)?;
Ok(events)
}
#[tauri::command]
async fn cmd_import_data<R: Runtime>(
window: WebviewWindow<R>,
@@ -1061,6 +1131,7 @@ async fn cmd_send_http_request<R: Runtime>(
// that has not yet been saved in the DB.
request: HttpRequest,
) -> YaakResult<HttpResponse> {
let blobs = app_handle.blob_manager();
let response = app_handle.db().upsert_http_response(
&HttpResponse {
request_id: request.id.clone(),
@@ -1068,6 +1139,7 @@ async fn cmd_send_http_request<R: Runtime>(
..Default::default()
},
&UpdateSource::from_window(&window),
&blobs,
)?;
let (cancel_tx, mut cancel_rx) = tokio::sync::watch::channel(false);
@@ -1113,6 +1185,7 @@ async fn cmd_send_http_request<R: Runtime>(
..resp
},
&UpdateSource::from_window(&window),
&blobs,
)?
}
};
@@ -1120,23 +1193,6 @@ async fn cmd_send_http_request<R: Runtime>(
Ok(r)
}
fn response_err<R: Runtime>(
app_handle: &AppHandle<R>,
response: &HttpResponse,
error: String,
update_source: &UpdateSource,
) -> HttpResponse {
warn!("Failed to send request: {error:?}");
let mut response = response.clone();
response.state = HttpResponseState::Closed;
response.error = Some(error.clone());
response = app_handle
.db()
.update_http_response_if_id(&response, update_source)
.expect("Failed to update response");
response
}
#[tauri::command]
async fn cmd_install_plugin<R: Runtime>(
directory: &str,
@@ -1148,11 +1204,7 @@ async fn cmd_install_plugin<R: Runtime>(
plugin_manager.add_plugin_by_dir(&PluginContext::new(&window), &directory).await?;
Ok(app_handle.db().upsert_plugin(
&Plugin {
directory: directory.into(),
url,
..Default::default()
},
&Plugin { directory: directory.into(), url, ..Default::default() },
&UpdateSource::from_window(&window),
)?)
}
@@ -1418,11 +1470,13 @@ pub fn run() {
cmd_delete_send_history,
cmd_dismiss_notification,
cmd_export_data,
cmd_http_request_body,
cmd_http_response_body,
cmd_format_json,
cmd_get_http_authentication_summaries,
cmd_get_http_authentication_config,
cmd_get_sse_events,
cmd_get_http_response_events,
cmd_get_workspace_meta,
cmd_grpc_go,
cmd_grpc_reflect,
@@ -1473,11 +1527,7 @@ pub fn run() {
let _ = db.cancel_pending_websocket_connections();
});
}
RunEvent::WindowEvent {
event: WindowEvent::Focused(true),
label,
..
} => {
RunEvent::WindowEvent { event: WindowEvent::Focused(true), label, .. } => {
if cfg!(feature = "updater") {
// Run update check whenever the window is focused
let w = app_handle.get_webview_window(&label).unwrap();
@@ -1512,10 +1562,7 @@ pub fn run() {
}
});
}
RunEvent::WindowEvent {
event: WindowEvent::CloseRequested { .. },
..
} => {
RunEvent::WindowEvent { event: WindowEvent::CloseRequested { .. }, .. } => {
if let Err(e) = app_handle.save_window_state(StateFlags::all()) {
warn!("Failed to save window state {e:?}");
} else {

View File

@@ -85,13 +85,18 @@ impl YaakNotifier {
let license_check = {
use yaak_license::{LicenseCheckStatus, check_license};
match check_license(window).await {
Ok(LicenseCheckStatus::PersonalUse { .. }) => "personal".to_string(),
Ok(LicenseCheckStatus::CommercialUse) => "commercial".to_string(),
Ok(LicenseCheckStatus::InvalidLicense) => "invalid_license".to_string(),
Ok(LicenseCheckStatus::Trialing { .. }) => "trialing".to_string(),
Err(_) => "unknown".to_string(),
Ok(LicenseCheckStatus::PersonalUse { .. }) => "personal",
Ok(LicenseCheckStatus::Active { .. }) => "commercial",
Ok(LicenseCheckStatus::PastDue { .. }) => "past_due",
Ok(LicenseCheckStatus::Inactive { .. }) => "invalid_license",
Ok(LicenseCheckStatus::Trialing { .. }) => "trialing",
Ok(LicenseCheckStatus::Expired { .. }) => "expired",
Ok(LicenseCheckStatus::Error { .. }) => "error",
Err(_) => "unknown",
}
.to_string()
};
#[cfg(not(feature = "license"))]
let license_check = "disabled".to_string();

View File

@@ -12,6 +12,7 @@ use log::error;
use tauri::{AppHandle, Emitter, Manager, Runtime};
use tauri_plugin_clipboard_manager::ClipboardExt;
use yaak_common::window::WorkspaceWindowTrait;
use yaak_models::blob_manager::BlobManagerExt;
use yaak_models::models::{HttpResponse, Plugin};
use yaak_models::queries::any_request::AnyRequest;
use yaak_models::query_manager::QueryManagerExt;
@@ -78,9 +79,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
environment_id.as_deref(),
)?;
let cb = PluginTemplateCallback::new(app_handle, &plugin_context, req.purpose);
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let grpc_request =
render_grpc_request(&req.grpc_request, environment_chain, &cb, &opt).await?;
Ok(Some(InternalEventPayload::RenderGrpcRequestResponse(RenderGrpcRequestResponse {
@@ -99,9 +98,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
environment_id.as_deref(),
)?;
let cb = PluginTemplateCallback::new(app_handle, &plugin_context, req.purpose);
let opt = &RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = &RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let http_request =
render_http_request(&req.http_request, environment_chain, &cb, &opt).await?;
Ok(Some(InternalEventPayload::RenderHttpRequestResponse(RenderHttpRequestResponse {
@@ -130,9 +127,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
environment_id.as_deref(),
)?;
let cb = PluginTemplateCallback::new(app_handle, &plugin_context, req.purpose);
let opt = RenderOptions {
error_behavior: RenderErrorBehavior::Throw,
};
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let data = render_json_value(req.data, environment_chain, &cb, &opt).await?;
Ok(Some(InternalEventPayload::TemplateRenderResponse(TemplateRenderResponse { data })))
}
@@ -200,6 +195,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
let http_response = if http_request.id.is_empty() {
HttpResponse::default()
} else {
let blobs = window.blob_manager();
window.db().upsert_http_response(
&HttpResponse {
request_id: http_request.id.clone(),
@@ -207,6 +203,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
..Default::default()
},
&UpdateSource::Plugin,
&blobs,
)?
};

View File

@@ -80,12 +80,7 @@ pub async fn render_grpc_request<T: TemplateCallback>(
let url = parse_and_render(r.url.as_str(), vars, cb, &opt).await?;
Ok(GrpcRequest {
url,
metadata,
authentication,
..r.to_owned()
})
Ok(GrpcRequest { url, metadata, authentication, ..r.to_owned() })
}
pub async fn render_http_request<T: TemplateCallback>(
@@ -162,14 +157,7 @@ pub async fn render_http_request<T: TemplateCallback>(
let url = parse_and_render(r.url.clone().as_str(), vars, cb, &opt).await?;
// This doesn't fit perfectly with the concept of "rendering" but it kind of does
let (url, url_parameters) = apply_path_placeholders(&url, url_parameters);
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
Ok(HttpRequest {
url,
url_parameters,
headers,
body,
authentication,
..r.to_owned()
})
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..r.to_owned() })
}

View File

@@ -259,17 +259,11 @@ async fn start_integrated_update<R: Runtime>(
self.win.unlisten(self.id);
}
}
let _guard = Unlisten {
win: window,
id: event_id,
};
let _guard = Unlisten { win: window, id: event_id };
// 2) Emit the event now that listener is in place
let info = UpdateInfo {
version: update.version.to_string(),
downloaded,
reply_event_id: reply_id,
};
let info =
UpdateInfo { version: update.version.to_string(), downloaded, reply_event_id: reply_id };
window
.emit_to(window.label(), "update_available", &info)
.map_err(|e| GenericError(format!("Failed to emit update_available: {e}")))?;

View File

@@ -3,7 +3,8 @@ use crate::window_menu::app_menu;
use log::{info, warn};
use rand::random;
use tauri::{
AppHandle, Emitter, LogicalSize, Manager, PhysicalSize, Runtime, WebviewUrl, WebviewWindow, WindowEvent
AppHandle, Emitter, LogicalSize, Manager, PhysicalSize, Runtime, WebviewUrl, WebviewWindow,
WindowEvent,
};
use tauri_plugin_opener::OpenerExt;
use tokio::sync::mpsc;

View File

@@ -30,7 +30,8 @@ pub fn app_menu<R: Runtime>(app_handle: &AppHandle<R>) -> tauri::Result<Menu<R>>
],
)?;
#[cfg(target_os = "macos")] {
#[cfg(target_os = "macos")]
{
window_menu.set_as_windows_menu_for_nsapp()?;
}
@@ -48,7 +49,8 @@ pub fn app_menu<R: Runtime>(app_handle: &AppHandle<R>) -> tauri::Result<Menu<R>>
],
)?;
#[cfg(target_os = "macos")] {
#[cfg(target_os = "macos")]
{
help_menu.set_as_windows_menu_for_nsapp()?;
}
@@ -151,8 +153,11 @@ pub fn app_menu<R: Runtime>(app_handle: &AppHandle<R>) -> tauri::Result<Menu<R>>
.build(app_handle)?,
&MenuItemBuilder::with_id("dev.reset_size".to_string(), "Reset Size")
.build(app_handle)?,
&MenuItemBuilder::with_id("dev.reset_size_record".to_string(), "Reset Size 16x9")
.build(app_handle)?,
&MenuItemBuilder::with_id(
"dev.reset_size_record".to_string(),
"Reset Size 16x9",
)
.build(app_handle)?,
&MenuItemBuilder::with_id(
"dev.generate_theme_css".to_string(),
"Generate Theme CSS",

View File

@@ -10,3 +10,4 @@ reqwest = { workspace = true, features = ["system-proxy", "gzip"] }
thiserror = { workspace = true }
regex = "1.11.0"
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }

View File

@@ -1,4 +1,5 @@
pub mod window;
pub mod platform;
pub mod api_client;
pub mod error;
pub mod platform;
pub mod serde;
pub mod window;

View File

@@ -0,0 +1,23 @@
use serde_json::Value;
use std::collections::BTreeMap;
pub fn get_bool(v: &Value, key: &str, fallback: bool) -> bool {
match v.get(key) {
None => fallback,
Some(v) => v.as_bool().unwrap_or(fallback),
}
}
pub fn get_str<'a>(v: &'a Value, key: &str) -> &'a str {
match v.get(key) {
None => "",
Some(v) => v.as_str().unwrap_or_default(),
}
}
pub fn get_str_map<'a>(v: &'a BTreeMap<String, Value>, key: &str) -> &'a str {
match v.get(key) {
None => "",
Some(v) => v.as_str().unwrap_or_default(),
}
}

View File

@@ -96,18 +96,12 @@ impl EncryptionManager {
let workspace = tx.get_workspace(workspace_id)?;
let workspace_meta = tx.get_or_create_workspace_meta(workspace_id)?;
tx.upsert_workspace(
&Workspace {
encryption_key_challenge,
..workspace
},
&Workspace { encryption_key_challenge, ..workspace },
&UpdateSource::Background,
)?;
Ok(tx.upsert_workspace_meta(
&WorkspaceMeta {
encryption_key: Some(encrypted_key.clone()),
..workspace_meta
},
&WorkspaceMeta { encryption_key: Some(encrypted_key.clone()), ..workspace_meta },
&UpdateSource::Background,
)?)
})?;

View File

@@ -39,9 +39,7 @@ impl WorkspaceKey {
}
pub(crate) fn from_raw_key(key: &[u8]) -> Self {
Self {
key: Key::<XChaCha20Poly1305>::clone_from_slice(key),
}
Self { key: Key::<XChaCha20Poly1305>::clone_from_slice(key) }
}
pub(crate) fn raw_key(&self) -> &[u8] {

View File

@@ -34,8 +34,5 @@ pub(crate) async fn list() -> Result<Fonts> {
ui_fonts.sort();
editor_fonts.sort();
Ok(Fonts {
ui_fonts,
editor_fonts,
})
Ok(Fonts { ui_fonts, editor_fonts })
}

View File

@@ -1,10 +1,10 @@
use crate::error::Result;
use std::path::Path;
use std::process::{Command, Stdio};
use crate::error::Result;
use crate::error::Error::GitNotFound;
#[cfg(target_os = "windows")]
use std::os::windows::process::CommandExt;
use crate::error::Error::GitNotFound;
#[cfg(target_os = "windows")]
const CREATE_NO_WINDOW: u32 = 0x0800_0000;

View File

@@ -1,4 +1,7 @@
use crate::commands::{add, add_credential, add_remote, branch, checkout, commit, delete_branch, fetch_all, initialize, log, merge_branch, pull, push, remotes, rm_remote, status, unstage};
use crate::commands::{
add, add_credential, add_remote, branch, checkout, commit, delete_branch, fetch_all,
initialize, log, merge_branch, pull, push, remotes, rm_remote, status, unstage,
};
use tauri::{
Runtime, generate_handler,
plugin::{Builder, TauriPlugin},
@@ -10,6 +13,7 @@ mod branch;
mod commands;
mod commit;
mod credential;
pub mod error;
mod fetch;
mod init;
mod log;
@@ -21,7 +25,6 @@ mod repository;
mod status;
mod unstage;
mod util;
pub mod error;
pub fn init<R: Runtime>() -> TauriPlugin<R> {
Builder::new("yaak-git")

View File

@@ -37,10 +37,7 @@ pub(crate) fn git_pull(dir: &Path) -> Result<PullResult> {
info!("Pulled status={} {combined}", out.status);
if combined.to_lowercase().contains("could not read") {
return Ok(PullResult::NeedsCredentials {
url: remote_url.to_string(),
error: None,
});
return Ok(PullResult::NeedsCredentials { url: remote_url.to_string(), error: None });
}
if combined.to_lowercase().contains("unable to access") {
@@ -58,9 +55,7 @@ pub(crate) fn git_pull(dir: &Path) -> Result<PullResult> {
return Ok(PullResult::UpToDate);
}
Ok(PullResult::Success {
message: format!("Pulled from {}/{}", remote_name, branch_name),
})
Ok(PullResult::Success { message: format!("Pulled from {}/{}", remote_name, branch_name) })
}
// pub(crate) fn git_pull_old(dir: &Path) -> Result<PullResult> {

View File

@@ -37,10 +37,7 @@ pub(crate) fn git_push(dir: &Path) -> Result<PushResult> {
info!("Pushed to repo status={} {combined}", out.status);
if combined.to_lowercase().contains("could not read") {
return Ok(PushResult::NeedsCredentials {
url: remote_url.to_string(),
error: None,
});
return Ok(PushResult::NeedsCredentials { url: remote_url.to_string(), error: None });
}
if combined.to_lowercase().contains("unable to access") {
@@ -58,7 +55,5 @@ pub(crate) fn git_push(dir: &Path) -> Result<PushResult> {
return Err(GenericError(format!("Failed to push {combined}")));
}
Ok(PushResult::Success {
message: format!("Pushed to {}/{}", remote_name, branch_name),
})
Ok(PushResult::Success { message: format!("Pushed to {}/{}", remote_name, branch_name) })
}

View File

@@ -28,10 +28,7 @@ pub(crate) fn git_remotes(dir: &Path) -> Result<Vec<GitRemote>> {
continue;
}
};
remotes.push(GitRemote {
name: name.to_string(),
url: r.url().map(|u| u.to_string()),
});
remotes.push(GitRemote { name: name.to_string(), url: r.url().map(|u| u.to_string()) });
}
Ok(remotes)
@@ -40,10 +37,7 @@ pub(crate) fn git_remotes(dir: &Path) -> Result<Vec<GitRemote>> {
pub(crate) fn git_add_remote(dir: &Path, name: &str, url: &str) -> Result<GitRemote> {
let repo = open_repo(dir)?;
repo.remote(name, url)?;
Ok(GitRemote {
name: name.to_string(),
url: Some(url.to_string()),
})
Ok(GitRemote { name: name.to_string(), url: Some(url.to_string()) })
}
pub(crate) fn git_rm_remote(dir: &Path, name: &str) -> Result<()> {

View File

@@ -1,5 +1,5 @@
use std::path::Path;
use crate::error::Error::{GitRepoNotFound, GitUnknown};
use std::path::Path;
pub(crate) fn open_repo(dir: &Path) -> crate::error::Result<git2::Repository> {
match git2::Repository::discover(dir) {
@@ -8,4 +8,3 @@ pub(crate) fn open_repo(dir: &Path) -> crate::error::Result<git2::Repository> {
Err(e) => Err(GitUnknown(e)),
}
}

View File

@@ -1,6 +1,6 @@
use std::path::Path;
use log::info;
use crate::repository::open_repo;
use log::info;
use std::path::Path;
pub(crate) fn git_unstage(dir: &Path, rela_path: &Path) -> crate::error::Result<()> {
let repo = open_repo(dir)?;
@@ -25,4 +25,3 @@ pub(crate) fn git_unstage(dir: &Path, rela_path: &Path) -> crate::error::Result<
Ok(())
}

View File

@@ -24,4 +24,5 @@ tokio-stream = "0.1.14"
tonic = { version = "0.12.3", default-features = false, features = ["transport"] }
tonic-reflection = "0.12.3"
uuid = { version = "1.7.0", features = ["v4"] }
yaak-http = { workspace = true }
yaak-tls = { workspace = true }
thiserror = "2.0.17"

View File

@@ -1,3 +1,5 @@
use crate::error::Error::GenericError;
use crate::error::Result;
use crate::manager::decorate_req;
use crate::transport::get_transport;
use async_recursion::async_recursion;
@@ -18,6 +20,7 @@ use tonic_reflection::pb::v1::{
};
use tonic_reflection::pb::v1::{ExtensionRequest, FileDescriptorResponse};
use tonic_reflection::pb::{v1, v1alpha};
use yaak_tls::ClientCertificateConfig;
pub struct AutoReflectionClient<T = Client<HttpsConnector<HttpConnector>, BoxBody>> {
use_v1alpha: bool,
@@ -26,20 +29,20 @@ pub struct AutoReflectionClient<T = Client<HttpsConnector<HttpConnector>, BoxBod
}
impl AutoReflectionClient {
pub fn new(uri: &Uri, validate_certificates: bool) -> Self {
pub fn new(
uri: &Uri,
validate_certificates: bool,
client_cert: Option<ClientCertificateConfig>,
) -> Result<Self> {
let client_v1 = v1::server_reflection_client::ServerReflectionClient::with_origin(
get_transport(validate_certificates),
get_transport(validate_certificates, client_cert.clone())?,
uri.clone(),
);
let client_v1alpha = v1alpha::server_reflection_client::ServerReflectionClient::with_origin(
get_transport(validate_certificates),
get_transport(validate_certificates, client_cert.clone())?,
uri.clone(),
);
AutoReflectionClient {
use_v1alpha: false,
client_v1,
client_v1alpha,
}
Ok(AutoReflectionClient { use_v1alpha: false, client_v1, client_v1alpha })
}
#[async_recursion]
@@ -47,36 +50,40 @@ impl AutoReflectionClient {
&mut self,
message: MessageRequest,
metadata: &BTreeMap<String, String>,
) -> Result<MessageResponse, String> {
) -> Result<MessageResponse> {
let reflection_request = ServerReflectionRequest {
host: "".into(), // Doesn't matter
message_request: Some(message.clone()),
};
if self.use_v1alpha {
let mut request = Request::new(tokio_stream::once(to_v1alpha_request(reflection_request)));
decorate_req(metadata, &mut request).map_err(|e| e.to_string())?;
let mut request =
Request::new(tokio_stream::once(to_v1alpha_request(reflection_request)));
decorate_req(metadata, &mut request)?;
self.client_v1alpha
.server_reflection_info(request)
.await
.map_err(|e| match e.code() {
tonic::Code::Unavailable => "Failed to connect to endpoint".to_string(),
tonic::Code::Unauthenticated => "Authentication failed".to_string(),
tonic::Code::DeadlineExceeded => "Deadline exceeded".to_string(),
_ => e.to_string(),
tonic::Code::Unavailable => {
GenericError("Failed to connect to endpoint".to_string())
}
tonic::Code::Unauthenticated => {
GenericError("Authentication failed".to_string())
}
tonic::Code::DeadlineExceeded => GenericError("Deadline exceeded".to_string()),
_ => GenericError(e.to_string()),
})?
.into_inner()
.next()
.await
.expect("steamed response")
.map_err(|e| e.to_string())?
.ok_or(GenericError("Missing reflection message".to_string()))??
.message_response
.ok_or("No reflection response".to_string())
.ok_or(GenericError("No reflection response".to_string()))
.map(|resp| to_v1_msg_response(resp))
} else {
let mut request = Request::new(tokio_stream::once(reflection_request));
decorate_req(metadata, &mut request).map_err(|e| e.to_string())?;
decorate_req(metadata, &mut request)?;
let resp = self.client_v1.server_reflection_info(request).await;
match resp {
@@ -92,18 +99,19 @@ impl AutoReflectionClient {
},
}
.map_err(|e| match e.code() {
tonic::Code::Unavailable => "Failed to connect to endpoint".to_string(),
tonic::Code::Unauthenticated => "Authentication failed".to_string(),
tonic::Code::DeadlineExceeded => "Deadline exceeded".to_string(),
_ => e.to_string(),
tonic::Code::Unavailable => {
GenericError("Failed to connect to endpoint".to_string())
}
tonic::Code::Unauthenticated => GenericError("Authentication failed".to_string()),
tonic::Code::DeadlineExceeded => GenericError("Deadline exceeded".to_string()),
_ => GenericError(e.to_string()),
})?
.into_inner()
.next()
.await
.expect("steamed response")
.map_err(|e| e.to_string())?
.ok_or(GenericError("Missing reflection message".to_string()))??
.message_response
.ok_or("No reflection response".to_string())
.ok_or(GenericError("No reflection response".to_string()))
}
}
}
@@ -128,9 +136,7 @@ fn to_v1_msg_response(
service: v
.service
.iter()
.map(|s| ServiceResponse {
name: s.name.clone(),
})
.map(|s| ServiceResponse { name: s.name.clone() })
.collect(),
})
}
@@ -164,10 +170,7 @@ fn to_v1alpha_msg_request(
extension_number,
containing_type,
}) => v1alpha::server_reflection_request::MessageRequest::FileContainingExtension(
v1alpha::ExtensionRequest {
extension_number,
containing_type,
},
v1alpha::ExtensionRequest { extension_number, containing_type },
),
MessageRequest::AllExtensionNumbersOfType(v) => {
v1alpha::server_reflection_request::MessageRequest::AllExtensionNumbersOfType(v)

View File

@@ -1,7 +1,7 @@
use prost_reflect::prost::Message;
use prost_reflect::{DynamicMessage, MethodDescriptor};
use tonic::codec::{Codec, DecodeBuf, Decoder, EncodeBuf, Encoder};
use tonic::Status;
use tonic::codec::{Codec, DecodeBuf, Decoder, EncodeBuf, Encoder};
#[derive(Clone)]
pub struct DynamicCodec(MethodDescriptor);

View File

@@ -0,0 +1,51 @@
use crate::manager::GrpcStreamError;
use prost::DecodeError;
use serde::{Serialize, Serializer};
use serde_json::Error as SerdeJsonError;
use std::io;
use thiserror::Error;
use tonic::Status;
#[derive(Error, Debug)]
pub enum Error {
#[error(transparent)]
TlsError(#[from] yaak_tls::error::Error),
#[error(transparent)]
TonicError(#[from] Status),
#[error("Prost reflect error: {0:?}")]
ProstReflectError(#[from] prost_reflect::DescriptorError),
#[error(transparent)]
DeserializerError(#[from] SerdeJsonError),
#[error(transparent)]
GrpcStreamError(#[from] GrpcStreamError),
#[error(transparent)]
GrpcDecodeError(#[from] DecodeError),
#[error(transparent)]
GrpcInvalidMetadataKeyError(#[from] tonic::metadata::errors::InvalidMetadataKey),
#[error(transparent)]
GrpcInvalidMetadataValueError(#[from] tonic::metadata::errors::InvalidMetadataValue),
#[error(transparent)]
IOError(#[from] io::Error),
#[error("GRPC error: {0}")]
GenericError(String),
}
impl Serialize for Error {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(self.to_string().as_ref())
}
}
pub type Result<T> = std::result::Result<T, Error>;

View File

@@ -11,9 +11,7 @@ struct JsonSchemaGenerator {
impl JsonSchemaGenerator {
pub fn new() -> Self {
JsonSchemaGenerator {
msg_mapping: HashMap::new(),
}
JsonSchemaGenerator { msg_mapping: HashMap::new() }
}
pub fn generate_json_schema(msg: MessageDescriptor) -> JsonSchemaEntry {
@@ -297,16 +295,10 @@ impl JsonSchemaEntry {
impl JsonSchemaEntry {
pub fn object() -> Self {
JsonSchemaEntry {
type_: Some(JsonType::Object),
..Default::default()
}
JsonSchemaEntry { type_: Some(JsonType::Object), ..Default::default() }
}
pub fn boolean() -> Self {
JsonSchemaEntry {
type_: Some(JsonType::Boolean),
..Default::default()
}
JsonSchemaEntry { type_: Some(JsonType::Boolean), ..Default::default() }
}
pub fn number<S: Into<String>>(format: S) -> Self {
JsonSchemaEntry {
@@ -316,10 +308,7 @@ impl JsonSchemaEntry {
}
}
pub fn string() -> Self {
JsonSchemaEntry {
type_: Some(JsonType::String),
..Default::default()
}
JsonSchemaEntry { type_: Some(JsonType::String), ..Default::default() }
}
pub fn string_with_format<S: Into<String>>(format: S) -> Self {
@@ -330,16 +319,10 @@ impl JsonSchemaEntry {
}
}
pub fn reference<S: AsRef<str>>(ref_: S) -> Self {
JsonSchemaEntry {
ref_: Some(format!("#/$defs/{}", ref_.as_ref())),
..Default::default()
}
JsonSchemaEntry { ref_: Some(format!("#/$defs/{}", ref_.as_ref())), ..Default::default() }
}
pub fn root_reference() -> Self{
JsonSchemaEntry {
ref_: Some("#".to_string()),
..Default::default()
}
pub fn root_reference() -> Self {
JsonSchemaEntry { ref_: Some("#".to_string()), ..Default::default() }
}
pub fn array(item: JsonSchemaEntry) -> Self {
JsonSchemaEntry {
@@ -349,11 +332,7 @@ impl JsonSchemaEntry {
}
}
pub fn enums(enums: Vec<String>) -> Self {
JsonSchemaEntry {
type_: Some(JsonType::String),
enum_: Some(enums),
..Default::default()
}
JsonSchemaEntry { type_: Some(JsonType::String), enum_: Some(enums), ..Default::default() }
}
pub fn map(value_type: JsonSchemaEntry) -> Self {
@@ -365,10 +344,7 @@ impl JsonSchemaEntry {
}
pub fn null() -> Self {
JsonSchemaEntry {
type_: Some(JsonType::Null),
..Default::default()
}
JsonSchemaEntry { type_: Some(JsonType::Null), ..Default::default() }
}
}

View File

@@ -2,16 +2,17 @@ use prost_reflect::{DynamicMessage, MethodDescriptor, SerializeOptions};
use serde::{Deserialize, Serialize};
use serde_json::Deserializer;
mod any;
mod client;
mod codec;
pub mod error;
mod json_schema;
pub mod manager;
mod reflection;
mod transport;
mod any;
pub use tonic::metadata::*;
pub use tonic::Code;
pub use tonic::metadata::*;
pub fn serialize_options() -> SerializeOptions {
SerializeOptions::new().skip_default_fields(false)

View File

@@ -1,4 +1,6 @@
use crate::codec::DynamicCodec;
use crate::error::Error::GenericError;
use crate::error::Result;
use crate::reflection::{
fill_pool_from_files, fill_pool_from_reflection, method_desc_to_path, reflect_types_for_message,
};
@@ -7,11 +9,14 @@ use crate::{MethodDefinition, ServiceDefinition, json_schema};
use hyper_rustls::HttpsConnector;
use hyper_util::client::legacy::Client;
use hyper_util::client::legacy::connect::HttpConnector;
use log::warn;
use log::{info, warn};
pub use prost_reflect::DynamicMessage;
use prost_reflect::{DescriptorPool, MethodDescriptor, ServiceDescriptor};
use serde_json::Deserializer;
use std::collections::BTreeMap;
use std::error::Error;
use std::fmt;
use std::fmt::Display;
use std::path::PathBuf;
use std::str::FromStr;
use std::sync::Arc;
@@ -23,6 +28,7 @@ use tonic::body::BoxBody;
use tonic::metadata::{MetadataKey, MetadataValue};
use tonic::transport::Uri;
use tonic::{IntoRequest, IntoStreamingRequest, Request, Response, Status, Streaming};
use yaak_tls::ClientCertificateConfig;
#[derive(Clone)]
pub struct GrpcConnection {
@@ -33,40 +39,49 @@ pub struct GrpcConnection {
}
#[derive(Default, Debug)]
pub struct StreamError {
pub struct GrpcStreamError {
pub message: String,
pub status: Option<Status>,
}
impl From<String> for StreamError {
fn from(value: String) -> Self {
StreamError {
message: value.to_string(),
status: None,
impl Error for GrpcStreamError {}
impl Display for GrpcStreamError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match &self.status {
Some(status) => write!(f, "[{}] {}", status, self.message),
None => write!(f, "{}", self.message),
}
}
}
impl From<Status> for StreamError {
impl From<String> for GrpcStreamError {
fn from(value: String) -> Self {
GrpcStreamError { message: value.to_string(), status: None }
}
}
impl From<Status> for GrpcStreamError {
fn from(s: Status) -> Self {
StreamError {
message: s.message().to_string(),
status: Some(s),
}
GrpcStreamError { message: s.message().to_string(), status: Some(s) }
}
}
impl GrpcConnection {
pub async fn method(&self, service: &str, method: &str) -> Result<MethodDescriptor, String> {
pub async fn method(&self, service: &str, method: &str) -> Result<MethodDescriptor> {
let service = self.service(service).await?;
let method =
service.methods().find(|m| m.name() == method).ok_or("Failed to find method")?;
let method = service
.methods()
.find(|m| m.name() == method)
.ok_or(GenericError("Failed to find method".to_string()))?;
Ok(method)
}
async fn service(&self, service: &str) -> Result<ServiceDescriptor, String> {
async fn service(&self, service: &str) -> Result<ServiceDescriptor> {
let pool = self.pool.read().await;
let service = pool.get_service_by_name(service).ok_or("Failed to find service")?;
let service = pool
.get_service_by_name(service)
.ok_or(GenericError("Failed to find service".to_string()))?;
Ok(service)
}
@@ -76,26 +91,27 @@ impl GrpcConnection {
method: &str,
message: &str,
metadata: &BTreeMap<String, String>,
) -> Result<Response<DynamicMessage>, StreamError> {
client_cert: Option<ClientCertificateConfig>,
) -> Result<Response<DynamicMessage>> {
if self.use_reflection {
reflect_types_for_message(self.pool.clone(), &self.uri, message, metadata).await?;
reflect_types_for_message(self.pool.clone(), &self.uri, message, metadata, client_cert)
.await?;
}
let method = &self.method(&service, &method).await?;
let input_message = method.input();
let mut deserializer = Deserializer::from_str(message);
let req_message = DynamicMessage::deserialize(input_message, &mut deserializer)
.map_err(|e| e.to_string())?;
deserializer.end().unwrap();
let req_message = DynamicMessage::deserialize(input_message, &mut deserializer)?;
deserializer.end()?;
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
let mut req = req_message.into_request();
decorate_req(metadata, &mut req).map_err(|e| e.to_string())?;
decorate_req(metadata, &mut req)?;
let path = method_desc_to_path(method);
let codec = DynamicCodec::new(method.clone());
client.ready().await.unwrap();
client.ready().await.map_err(|e| GenericError(format!("Failed to connect: {}", e)))?;
Ok(client.unary(req, path, codec).await?)
}
@@ -106,7 +122,8 @@ impl GrpcConnection {
method: &str,
stream: ReceiverStream<String>,
metadata: &BTreeMap<String, String>,
) -> Result<Response<Streaming<DynamicMessage>>, StreamError> {
client_cert: Option<ClientCertificateConfig>,
) -> Result<Response<Streaming<DynamicMessage>>> {
let method = &self.method(&service, &method).await?;
let mapped_stream = {
let input_message = method.input();
@@ -114,15 +131,19 @@ impl GrpcConnection {
let uri = self.uri.clone();
let md = metadata.clone();
let use_reflection = self.use_reflection.clone();
let client_cert = client_cert.clone();
stream.filter_map(move |json| {
let pool = pool.clone();
let uri = uri.clone();
let input_message = input_message.clone();
let md = md.clone();
let use_reflection = use_reflection.clone();
let client_cert = client_cert.clone();
tauri::async_runtime::block_on(async move {
if use_reflection {
if let Err(e) = reflect_types_for_message(pool, &uri, &json, &md).await {
if let Err(e) =
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
{
warn!("Failed to resolve Any types: {e}");
}
}
@@ -143,9 +164,9 @@ impl GrpcConnection {
let codec = DynamicCodec::new(method.clone());
let mut req = mapped_stream.into_streaming_request();
decorate_req(metadata, &mut req).map_err(|e| e.to_string())?;
decorate_req(metadata, &mut req)?;
client.ready().await.map_err(|e| e.to_string())?;
client.ready().await.map_err(|e| GenericError(format!("Failed to connect: {}", e)))?;
Ok(client.streaming(req, path, codec).await?)
}
@@ -155,7 +176,8 @@ impl GrpcConnection {
method: &str,
stream: ReceiverStream<String>,
metadata: &BTreeMap<String, String>,
) -> Result<Response<DynamicMessage>, StreamError> {
client_cert: Option<ClientCertificateConfig>,
) -> Result<Response<DynamicMessage>> {
let method = &self.method(&service, &method).await?;
let mapped_stream = {
let input_message = method.input();
@@ -163,15 +185,19 @@ impl GrpcConnection {
let uri = self.uri.clone();
let md = metadata.clone();
let use_reflection = self.use_reflection.clone();
let client_cert = client_cert.clone();
stream.filter_map(move |json| {
let pool = pool.clone();
let uri = uri.clone();
let input_message = input_message.clone();
let md = md.clone();
let use_reflection = use_reflection.clone();
let client_cert = client_cert.clone();
tauri::async_runtime::block_on(async move {
if use_reflection {
if let Err(e) = reflect_types_for_message(pool, &uri, &json, &md).await {
if let Err(e) =
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
{
warn!("Failed to resolve Any types: {e}");
}
}
@@ -192,13 +218,13 @@ impl GrpcConnection {
let codec = DynamicCodec::new(method.clone());
let mut req = mapped_stream.into_streaming_request();
decorate_req(metadata, &mut req).map_err(|e| e.to_string())?;
decorate_req(metadata, &mut req)?;
client.ready().await.unwrap();
client.client_streaming(req, path, codec).await.map_err(|e| StreamError {
message: e.message().to_string(),
status: Some(e),
})
client.ready().await.map_err(|e| GenericError(format!("Failed to connect: {}", e)))?;
Ok(client
.client_streaming(req, path, codec)
.await
.map_err(|e| GrpcStreamError { message: e.message().to_string(), status: Some(e) })?)
}
pub async fn server_streaming(
@@ -207,23 +233,22 @@ impl GrpcConnection {
method: &str,
message: &str,
metadata: &BTreeMap<String, String>,
) -> Result<Response<Streaming<DynamicMessage>>, StreamError> {
) -> Result<Response<Streaming<DynamicMessage>>> {
let method = &self.method(&service, &method).await?;
let input_message = method.input();
let mut deserializer = Deserializer::from_str(message);
let req_message = DynamicMessage::deserialize(input_message, &mut deserializer)
.map_err(|e| e.to_string())?;
deserializer.end().unwrap();
let req_message = DynamicMessage::deserialize(input_message, &mut deserializer)?;
deserializer.end()?;
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
let mut req = req_message.into_request();
decorate_req(metadata, &mut req).map_err(|e| e.to_string())?;
decorate_req(metadata, &mut req)?;
let path = method_desc_to_path(method);
let codec = DynamicCodec::new(method.clone());
client.ready().await.map_err(|e| e.to_string())?;
client.ready().await.map_err(|e| GenericError(format!("Failed to connect: {}", e)))?;
Ok(client.server_streaming(req, path, codec).await?)
}
}
@@ -236,14 +261,17 @@ pub struct GrpcHandle {
impl GrpcHandle {
pub fn new(app_handle: &AppHandle) -> Self {
let pools = BTreeMap::new();
Self {
pools,
app_handle: app_handle.clone(),
}
Self { pools, app_handle: app_handle.clone() }
}
}
impl GrpcHandle {
/// Remove cached descriptor pool for the given key, if present.
pub fn invalidate_pool(&mut self, id: &str, uri: &str, proto_files: &Vec<PathBuf>) {
let key = make_pool_key(id, uri, proto_files);
self.pools.remove(&key);
}
pub async fn reflect(
&mut self,
id: &str,
@@ -251,16 +279,24 @@ impl GrpcHandle {
proto_files: &Vec<PathBuf>,
metadata: &BTreeMap<String, String>,
validate_certificates: bool,
) -> Result<bool, String> {
client_cert: Option<ClientCertificateConfig>,
) -> Result<bool> {
let server_reflection = proto_files.is_empty();
let key = make_pool_key(id, uri, proto_files);
// If we already have a pool for this key, reuse it and avoid re-reflection
if self.pools.contains_key(&key) {
return Ok(server_reflection);
}
let pool = if server_reflection {
let full_uri = uri_from_str(uri)?;
fill_pool_from_reflection(&full_uri, metadata, validate_certificates).await
fill_pool_from_reflection(&full_uri, metadata, validate_certificates, client_cert).await
} else {
fill_pool_from_files(&self.app_handle, proto_files).await
}?;
self.pools.insert(make_pool_key(id, uri, proto_files), pool.clone());
self.pools.insert(key, pool.clone());
Ok(server_reflection)
}
@@ -271,21 +307,27 @@ impl GrpcHandle {
proto_files: &Vec<PathBuf>,
metadata: &BTreeMap<String, String>,
validate_certificates: bool,
) -> Result<Vec<ServiceDefinition>, String> {
// Ensure reflection is up-to-date
self.reflect(id, uri, proto_files, metadata, validate_certificates).await?;
client_cert: Option<ClientCertificateConfig>,
skip_cache: bool,
) -> Result<Vec<ServiceDefinition>> {
// Ensure we have a pool; reflect only if missing
if skip_cache || self.get_pool(id, uri, proto_files).is_none() {
info!("Reflecting gRPC services for {} at {}", id, uri);
self.reflect(id, uri, proto_files, metadata, validate_certificates, client_cert)
.await?;
}
let pool = self.get_pool(id, uri, proto_files).ok_or("Failed to get pool".to_string())?;
let pool = self
.get_pool(id, uri, proto_files)
.ok_or(GenericError("Failed to get pool".to_string()))?;
Ok(self.services_from_pool(&pool))
}
fn services_from_pool(&self, pool: &DescriptorPool) -> Vec<ServiceDefinition> {
pool.services()
.map(|s| {
let mut def = ServiceDefinition {
name: s.full_name().to_string(),
methods: vec![],
};
let mut def =
ServiceDefinition { name: s.full_name().to_string(), methods: vec![] };
for method in s.methods() {
let input_message = method.input();
def.methods.push(MethodDefinition {
@@ -296,7 +338,7 @@ impl GrpcHandle {
&pool,
input_message,
))
.unwrap(),
.expect("Failed to serialize JSON schema"),
})
}
def
@@ -311,18 +353,27 @@ impl GrpcHandle {
proto_files: &Vec<PathBuf>,
metadata: &BTreeMap<String, String>,
validate_certificates: bool,
) -> Result<GrpcConnection, String> {
let use_reflection =
self.reflect(id, uri, proto_files, metadata, validate_certificates).await?;
let pool = self.get_pool(id, uri, proto_files).ok_or("Failed to get pool")?.clone();
client_cert: Option<ClientCertificateConfig>,
) -> Result<GrpcConnection> {
let use_reflection = proto_files.is_empty();
if self.get_pool(id, uri, proto_files).is_none() {
self.reflect(
id,
uri,
proto_files,
metadata,
validate_certificates,
client_cert.clone(),
)
.await?;
}
let pool = self
.get_pool(id, uri, proto_files)
.ok_or(GenericError("Failed to get pool".to_string()))?
.clone();
let uri = uri_from_str(uri)?;
let conn = get_transport(validate_certificates);
Ok(GrpcConnection {
pool: Arc::new(RwLock::new(pool)),
use_reflection,
conn,
uri,
})
let conn = get_transport(validate_certificates, client_cert.clone())?;
Ok(GrpcConnection { pool: Arc::new(RwLock::new(pool)), use_reflection, conn, uri })
}
fn get_pool(&self, id: &str, uri: &str, proto_files: &Vec<PathBuf>) -> Option<&DescriptorPool> {
@@ -333,22 +384,20 @@ impl GrpcHandle {
pub(crate) fn decorate_req<T>(
metadata: &BTreeMap<String, String>,
req: &mut Request<T>,
) -> Result<(), String> {
) -> Result<()> {
for (k, v) in metadata {
req.metadata_mut().insert(
MetadataKey::from_str(k.as_str()).map_err(|e| e.to_string())?,
MetadataValue::from_str(v.as_str()).map_err(|e| e.to_string())?,
);
req.metadata_mut()
.insert(MetadataKey::from_str(k.as_str())?, MetadataValue::from_str(v.as_str())?);
}
Ok(())
}
fn uri_from_str(uri_str: &str) -> Result<Uri, String> {
fn uri_from_str(uri_str: &str) -> Result<Uri> {
match Uri::from_str(uri_str) {
Ok(uri) => Ok(uri),
Err(err) => {
// Uri::from_str basically only returns "invalid format" so we add more context here
Err(format!("Failed to parse URL, {}", err.to_string()))
Err(GenericError(format!("Failed to parse URL, {}", err.to_string())))
}
}
}

View File

@@ -1,5 +1,7 @@
use crate::any::collect_any_types;
use crate::client::AutoReflectionClient;
use crate::error::Error::GenericError;
use crate::error::Result;
use anyhow::anyhow;
use async_recursion::async_recursion;
use log::{debug, info, warn};
@@ -21,11 +23,12 @@ use tonic::codegen::http::uri::PathAndQuery;
use tonic::transport::Uri;
use tonic_reflection::pb::v1::server_reflection_request::MessageRequest;
use tonic_reflection::pb::v1::server_reflection_response::MessageResponse;
use yaak_tls::ClientCertificateConfig;
pub async fn fill_pool_from_files(
app_handle: &AppHandle,
paths: &Vec<PathBuf>,
) -> Result<DescriptorPool, String> {
) -> Result<DescriptorPool> {
let mut pool = DescriptorPool::new();
let random_file_name = format!("{}.desc", uuid::Uuid::new_v4());
let desc_path = temp_dir().join(random_file_name);
@@ -103,18 +106,18 @@ pub async fn fill_pool_from_files(
.expect("yaakprotoc failed to run");
if !out.status.success() {
return Err(format!(
return Err(GenericError(format!(
"protoc failed with status {}: {}",
out.status.code().unwrap(),
String::from_utf8_lossy(out.stderr.as_slice())
));
)));
}
let bytes = fs::read(desc_path).await.map_err(|e| e.to_string())?;
let fdp = FileDescriptorSet::decode(bytes.deref()).map_err(|e| e.to_string())?;
pool.add_file_descriptor_set(fdp).map_err(|e| e.to_string())?;
let bytes = fs::read(desc_path).await?;
let fdp = FileDescriptorSet::decode(bytes.deref())?;
pool.add_file_descriptor_set(fdp)?;
fs::remove_file(desc_path).await.map_err(|e| e.to_string())?;
fs::remove_file(desc_path).await?;
Ok(pool)
}
@@ -123,9 +126,10 @@ pub async fn fill_pool_from_reflection(
uri: &Uri,
metadata: &BTreeMap<String, String>,
validate_certificates: bool,
) -> Result<DescriptorPool, String> {
client_cert: Option<ClientCertificateConfig>,
) -> Result<DescriptorPool> {
let mut pool = DescriptorPool::new();
let mut client = AutoReflectionClient::new(uri, validate_certificates);
let mut client = AutoReflectionClient::new(uri, validate_certificates, client_cert)?;
for service in list_services(&mut client, metadata).await? {
if service == "grpc.reflection.v1alpha.ServerReflection" {
@@ -144,7 +148,7 @@ pub async fn fill_pool_from_reflection(
async fn list_services(
client: &mut AutoReflectionClient,
metadata: &BTreeMap<String, String>,
) -> Result<Vec<String>, String> {
) -> Result<Vec<String>> {
let response =
client.send_reflection_request(MessageRequest::ListServices("".into()), metadata).await?;
@@ -171,7 +175,7 @@ async fn file_descriptor_set_from_service_name(
{
Ok(resp) => resp,
Err(e) => {
warn!("Error fetching file descriptor for service {}: {}", service_name, e);
warn!("Error fetching file descriptor for service {}: {:?}", service_name, e);
return;
}
};
@@ -195,7 +199,8 @@ pub(crate) async fn reflect_types_for_message(
uri: &Uri,
json: &str,
metadata: &BTreeMap<String, String>,
) -> Result<(), String> {
client_cert: Option<ClientCertificateConfig>,
) -> Result<()> {
// 1. Collect all Any types in the JSON
let mut extra_types = Vec::new();
collect_any_types(json, &mut extra_types);
@@ -204,7 +209,7 @@ pub(crate) async fn reflect_types_for_message(
return Ok(()); // nothing to do
}
let mut client = AutoReflectionClient::new(uri, false);
let mut client = AutoReflectionClient::new(uri, false, client_cert)?;
for extra_type in extra_types {
{
let guard = pool.read().await;
@@ -217,9 +222,9 @@ pub(crate) async fn reflect_types_for_message(
let resp = match client.send_reflection_request(req, metadata).await {
Ok(r) => r,
Err(e) => {
return Err(format!(
"Error sending reflection request for @type \"{extra_type}\": {e}",
));
return Err(GenericError(format!(
"Error sending reflection request for @type \"{extra_type}\": {e:?}",
)));
}
};
let files = match resp {
@@ -286,7 +291,7 @@ async fn file_descriptor_set_by_filename(
panic!("Expected a FileDescriptorResponse variant")
}
Err(e) => {
warn!("Error fetching file descriptor for {}: {}", filename, e);
warn!("Error fetching file descriptor for {}: {:?}", filename, e);
return;
}
};
@@ -322,10 +327,7 @@ mod topology {
T: Eq + std::hash::Hash + Clone,
{
pub fn new() -> Self {
SimpleTopoSort {
out_graph: HashMap::new(),
in_graph: HashMap::new(),
}
SimpleTopoSort { out_graph: HashMap::new(), in_graph: HashMap::new() }
}
pub fn insert<I: IntoIterator<Item = T>>(&mut self, node: T, deps: I) {
@@ -371,10 +373,7 @@ mod topology {
}
}
SimpleTopoSortIter {
data,
zero_indegree,
}
SimpleTopoSortIter { data, zero_indegree }
}
}

View File

@@ -1,25 +1,40 @@
use crate::error::Result;
use hyper_rustls::{HttpsConnector, HttpsConnectorBuilder};
use hyper_util::client::legacy::connect::HttpConnector;
use hyper_util::client::legacy::Client;
use hyper_util::client::legacy::connect::HttpConnector;
use hyper_util::rt::TokioExecutor;
use log::info;
use tonic::body::BoxBody;
use yaak_tls::{ClientCertificateConfig, get_tls_config};
// I think ALPN breaks this because we're specifying http2_only
const WITH_ALPN: bool = false;
pub(crate) fn get_transport(validate_certificates: bool) -> Client<HttpsConnector<HttpConnector>, BoxBody> {
let tls_config = yaak_http::tls::get_config(validate_certificates, WITH_ALPN);
pub(crate) fn get_transport(
validate_certificates: bool,
client_cert: Option<ClientCertificateConfig>,
) -> Result<Client<HttpsConnector<HttpConnector>, BoxBody>> {
let tls_config = get_tls_config(validate_certificates, WITH_ALPN, client_cert.clone())?;
let mut http = HttpConnector::new();
http.enforce_http(false);
let connector =
HttpsConnectorBuilder::new().with_tls_config(tls_config).https_or_http().enable_http2().build();
let connector = HttpsConnectorBuilder::new()
.with_tls_config(tls_config)
.https_or_http()
.enable_http2()
.build();
let client = Client::builder(TokioExecutor::new())
.pool_max_idle_per_host(0)
.http2_only(true)
.build(connector);
client
info!(
"Created gRPC client validate_certs={} client_cert={}",
validate_certificates,
client_cert.is_some()
);
Ok(client)
}

View File

@@ -5,17 +5,27 @@ edition = "2024"
publish = false
[dependencies]
yaak-models = { workspace = true }
regex = "1.11.1"
rustls = { workspace = true, default-features = false, features = ["ring"] }
rustls-platform-verifier = { workspace = true }
urlencoding = "2.1.3"
tauri = { workspace = true }
tokio = { workspace = true }
reqwest = { workspace = true, features = ["multipart", "cookies", "gzip", "brotli", "deflate", "json", "rustls-tls-manual-roots-no-provider", "socks", "http2"] }
reqwest_cookie_store = { workspace = true }
thiserror = { workspace = true }
serde = { workspace = true, features = ["derive"] }
async-compression = { version = "0.4", features = ["tokio", "gzip", "deflate", "brotli", "zstd"] }
async-trait = "0.1"
brotli = "7"
bytes = "1.5.0"
flate2 = "1"
futures-util = "0.3"
zstd = "0.13"
hyper-util = { version = "0.1.17", default-features = false, features = ["client-legacy"] }
tower-service = "0.3.3"
log = { workspace = true }
mime_guess = "2.0.5"
regex = "1.11.1"
reqwest = { workspace = true, features = ["cookies", "rustls-tls-manual-roots-no-provider", "socks", "http2", "stream"] }
reqwest_cookie_store = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
tauri = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true, features = ["macros", "rt", "fs", "io-util"] }
tokio-util = { version = "0.7", features = ["codec", "io", "io-util"] }
tower-service = "0.3.3"
urlencoding = "2.1.3"
yaak-common = { workspace = true }
yaak-models = { workspace = true }
yaak-tls = { workspace = true }

View File

@@ -0,0 +1,78 @@
use std::io;
use std::pin::Pin;
use std::task::{Context, Poll};
use tokio::io::{AsyncRead, ReadBuf};
/// A stream that chains multiple AsyncRead sources together
pub(crate) struct ChainedReader {
readers: Vec<ReaderType>,
current_index: usize,
current_reader: Option<Box<dyn AsyncRead + Send + Unpin + 'static>>,
}
#[derive(Clone)]
pub(crate) enum ReaderType {
Bytes(Vec<u8>),
FilePath(String),
}
impl ChainedReader {
pub(crate) fn new(readers: Vec<ReaderType>) -> Self {
Self { readers, current_index: 0, current_reader: None }
}
}
impl AsyncRead for ChainedReader {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<io::Result<()>> {
loop {
// Try to read from current reader if we have one
if let Some(ref mut reader) = self.current_reader {
let before_len = buf.filled().len();
return match Pin::new(reader).poll_read(cx, buf) {
Poll::Ready(Ok(())) => {
if buf.filled().len() == before_len && buf.remaining() > 0 {
// Current reader is exhausted, move to next
self.current_reader = None;
continue;
}
Poll::Ready(Ok(()))
}
Poll::Ready(Err(e)) => Poll::Ready(Err(e)),
Poll::Pending => Poll::Pending,
};
}
// We need to get the next reader
if self.current_index >= self.readers.len() {
// No more readers
return Poll::Ready(Ok(()));
}
// Get the next reader
let reader_type = self.readers[self.current_index].clone();
self.current_index += 1;
match reader_type {
ReaderType::Bytes(bytes) => {
self.current_reader = Some(Box::new(io::Cursor::new(bytes)));
}
ReaderType::FilePath(path) => {
// We need to handle file opening synchronously in poll_read
// This is a limitation - we'll use blocking file open
match std::fs::File::open(&path) {
Ok(file) => {
// Convert std File to tokio File
let tokio_file = tokio::fs::File::from_std(file);
self.current_reader = Some(Box::new(tokio_file));
}
Err(e) => return Poll::Ready(Err(e)),
}
}
}
}
}
}

View File

@@ -1,12 +1,10 @@
use crate::dns::LocalhostResolver;
use crate::error::Result;
use crate::tls;
use log::{debug, warn};
use reqwest::redirect::Policy;
use reqwest::{Client, Proxy};
use log::{debug, info, warn};
use reqwest::{Client, Proxy, redirect};
use reqwest_cookie_store::CookieStoreMutex;
use std::sync::Arc;
use std::time::Duration;
use yaak_tls::{ClientCertificateConfig, get_tls_config};
#[derive(Clone)]
pub struct HttpConnectionProxySettingAuth {
@@ -28,35 +26,33 @@ pub enum HttpConnectionProxySetting {
#[derive(Clone)]
pub struct HttpConnectionOptions {
pub follow_redirects: bool,
pub id: String,
pub validate_certificates: bool,
pub proxy: HttpConnectionProxySetting,
pub cookie_provider: Option<Arc<CookieStoreMutex>>,
pub timeout: Option<Duration>,
pub client_certificate: Option<ClientCertificateConfig>,
}
impl HttpConnectionOptions {
pub(crate) fn build_client(&self) -> Result<Client> {
let mut client = Client::builder()
.connection_verbose(true)
.gzip(true)
.brotli(true)
.deflate(true)
.redirect(redirect::Policy::none())
// Decompression is handled by HttpTransaction, not reqwest
.no_gzip()
.no_brotli()
.no_deflate()
.referer(false)
.tls_info(true);
// Configure TLS
client = client.use_preconfigured_tls(tls::get_config(self.validate_certificates, true));
// Configure TLS with optional client certificate
let config =
get_tls_config(self.validate_certificates, true, self.client_certificate.clone())?;
client = client.use_preconfigured_tls(config);
// Configure DNS resolver
client = client.dns_resolver(LocalhostResolver::new());
// Configure redirects
client = client.redirect(match self.follow_redirects {
true => Policy::limited(10), // TODO: Handle redirects natively
false => Policy::none(),
});
// Configure cookie provider
if let Some(p) = &self.cookie_provider {
client = client.cookie_provider(Arc::clone(&p));
@@ -68,22 +64,18 @@ impl HttpConnectionOptions {
HttpConnectionProxySetting::Disabled => {
client = client.no_proxy();
}
HttpConnectionProxySetting::Enabled {
http,
https,
auth,
bypass,
} => {
HttpConnectionProxySetting::Enabled { http, https, auth, bypass } => {
for p in build_enabled_proxy(http, https, auth, bypass) {
client = client.proxy(p)
}
}
}
// Configure timeout
if let Some(d) = self.timeout {
client = client.timeout(d);
}
info!(
"Building new HTTP client validate_certificates={} client_cert={}",
self.validate_certificates,
self.client_certificate.is_some()
);
Ok(client.build()?)
}

View File

@@ -0,0 +1,188 @@
use crate::error::{Error, Result};
use async_compression::tokio::bufread::{
BrotliDecoder, DeflateDecoder as AsyncDeflateDecoder, GzipDecoder,
ZstdDecoder as AsyncZstdDecoder,
};
use flate2::read::{DeflateDecoder, GzDecoder};
use std::io::Read;
use tokio::io::{AsyncBufRead, AsyncRead};
/// Supported compression encodings
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ContentEncoding {
Gzip,
Deflate,
Brotli,
Zstd,
Identity,
}
impl ContentEncoding {
/// Parse a Content-Encoding header value into an encoding type.
/// Returns Identity for unknown or missing encodings.
pub fn from_header(value: Option<&str>) -> Self {
match value.map(|s| s.trim().to_lowercase()).as_deref() {
Some("gzip") | Some("x-gzip") => ContentEncoding::Gzip,
Some("deflate") => ContentEncoding::Deflate,
Some("br") => ContentEncoding::Brotli,
Some("zstd") => ContentEncoding::Zstd,
_ => ContentEncoding::Identity,
}
}
}
/// Result of decompression, containing both the decompressed data and size info
#[derive(Debug)]
pub struct DecompressResult {
pub data: Vec<u8>,
pub compressed_size: u64,
pub decompressed_size: u64,
}
/// Decompress data based on the Content-Encoding.
/// Returns the original data unchanged if encoding is Identity or unknown.
pub fn decompress(data: Vec<u8>, encoding: ContentEncoding) -> Result<DecompressResult> {
let compressed_size = data.len() as u64;
let decompressed = match encoding {
ContentEncoding::Identity => data,
ContentEncoding::Gzip => decompress_gzip(&data)?,
ContentEncoding::Deflate => decompress_deflate(&data)?,
ContentEncoding::Brotli => decompress_brotli(&data)?,
ContentEncoding::Zstd => decompress_zstd(&data)?,
};
let decompressed_size = decompressed.len() as u64;
Ok(DecompressResult { data: decompressed, compressed_size, decompressed_size })
}
fn decompress_gzip(data: &[u8]) -> Result<Vec<u8>> {
let mut decoder = GzDecoder::new(data);
let mut decompressed = Vec::new();
decoder
.read_to_end(&mut decompressed)
.map_err(|e| Error::DecompressionError(format!("gzip decompression failed: {}", e)))?;
Ok(decompressed)
}
fn decompress_deflate(data: &[u8]) -> Result<Vec<u8>> {
let mut decoder = DeflateDecoder::new(data);
let mut decompressed = Vec::new();
decoder
.read_to_end(&mut decompressed)
.map_err(|e| Error::DecompressionError(format!("deflate decompression failed: {}", e)))?;
Ok(decompressed)
}
fn decompress_brotli(data: &[u8]) -> Result<Vec<u8>> {
let mut decompressed = Vec::new();
brotli::BrotliDecompress(&mut std::io::Cursor::new(data), &mut decompressed)
.map_err(|e| Error::DecompressionError(format!("brotli decompression failed: {}", e)))?;
Ok(decompressed)
}
fn decompress_zstd(data: &[u8]) -> Result<Vec<u8>> {
zstd::stream::decode_all(std::io::Cursor::new(data))
.map_err(|e| Error::DecompressionError(format!("zstd decompression failed: {}", e)))
}
/// Create a streaming decompressor that wraps an async reader.
/// Returns an AsyncRead that decompresses data on-the-fly.
pub fn streaming_decoder<R: AsyncBufRead + Unpin + Send + 'static>(
reader: R,
encoding: ContentEncoding,
) -> Box<dyn AsyncRead + Unpin + Send> {
match encoding {
ContentEncoding::Identity => Box::new(reader),
ContentEncoding::Gzip => Box::new(GzipDecoder::new(reader)),
ContentEncoding::Deflate => Box::new(AsyncDeflateDecoder::new(reader)),
ContentEncoding::Brotli => Box::new(BrotliDecoder::new(reader)),
ContentEncoding::Zstd => Box::new(AsyncZstdDecoder::new(reader)),
}
}
#[cfg(test)]
mod tests {
use super::*;
use flate2::Compression;
use flate2::write::GzEncoder;
use std::io::Write;
#[test]
fn test_content_encoding_from_header() {
assert_eq!(ContentEncoding::from_header(Some("gzip")), ContentEncoding::Gzip);
assert_eq!(ContentEncoding::from_header(Some("x-gzip")), ContentEncoding::Gzip);
assert_eq!(ContentEncoding::from_header(Some("GZIP")), ContentEncoding::Gzip);
assert_eq!(ContentEncoding::from_header(Some("deflate")), ContentEncoding::Deflate);
assert_eq!(ContentEncoding::from_header(Some("br")), ContentEncoding::Brotli);
assert_eq!(ContentEncoding::from_header(Some("zstd")), ContentEncoding::Zstd);
assert_eq!(ContentEncoding::from_header(Some("identity")), ContentEncoding::Identity);
assert_eq!(ContentEncoding::from_header(Some("unknown")), ContentEncoding::Identity);
assert_eq!(ContentEncoding::from_header(None), ContentEncoding::Identity);
}
#[test]
fn test_decompress_identity() {
let data = b"hello world".to_vec();
let result = decompress(data.clone(), ContentEncoding::Identity).unwrap();
assert_eq!(result.data, data);
assert_eq!(result.compressed_size, 11);
assert_eq!(result.decompressed_size, 11);
}
#[test]
fn test_decompress_gzip() {
// Compress some data with gzip
let original = b"hello world, this is a test of gzip compression";
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
encoder.write_all(original).unwrap();
let compressed = encoder.finish().unwrap();
let result = decompress(compressed.clone(), ContentEncoding::Gzip).unwrap();
assert_eq!(result.data, original);
assert_eq!(result.compressed_size, compressed.len() as u64);
assert_eq!(result.decompressed_size, original.len() as u64);
}
#[test]
fn test_decompress_deflate() {
// Compress some data with deflate
let original = b"hello world, this is a test of deflate compression";
let mut encoder = flate2::write::DeflateEncoder::new(Vec::new(), Compression::default());
encoder.write_all(original).unwrap();
let compressed = encoder.finish().unwrap();
let result = decompress(compressed.clone(), ContentEncoding::Deflate).unwrap();
assert_eq!(result.data, original);
assert_eq!(result.compressed_size, compressed.len() as u64);
assert_eq!(result.decompressed_size, original.len() as u64);
}
#[test]
fn test_decompress_brotli() {
// Compress some data with brotli
let original = b"hello world, this is a test of brotli compression";
let mut compressed = Vec::new();
let mut writer = brotli::CompressorWriter::new(&mut compressed, 4096, 4, 22);
writer.write_all(original).unwrap();
drop(writer);
let result = decompress(compressed.clone(), ContentEncoding::Brotli).unwrap();
assert_eq!(result.data, original);
assert_eq!(result.compressed_size, compressed.len() as u64);
assert_eq!(result.decompressed_size, original.len() as u64);
}
#[test]
fn test_decompress_zstd() {
// Compress some data with zstd
let original = b"hello world, this is a test of zstd compression";
let compressed = zstd::stream::encode_all(std::io::Cursor::new(original), 3).unwrap();
let result = decompress(compressed.clone(), ContentEncoding::Zstd).unwrap();
assert_eq!(result.data, original);
assert_eq!(result.compressed_size, compressed.len() as u64);
assert_eq!(result.decompressed_size, original.len() as u64);
}
}

View File

@@ -3,8 +3,26 @@ use thiserror::Error;
#[derive(Error, Debug)]
pub enum Error {
#[error(transparent)]
#[error("Client error: {0:?}")]
Client(#[from] reqwest::Error),
#[error(transparent)]
TlsError(#[from] yaak_tls::error::Error),
#[error("Request failed with {0:?}")]
RequestError(String),
#[error("Request canceled")]
RequestCanceledError,
#[error("Timeout of {0:?} reached")]
RequestTimeout(std::time::Duration),
#[error("Decompression error: {0}")]
DecompressionError(String),
#[error("Failed to read response body: {0}")]
BodyReadError(String),
}
impl Serialize for Error {

View File

@@ -2,12 +2,18 @@ use crate::manager::HttpConnectionManager;
use tauri::plugin::{Builder, TauriPlugin};
use tauri::{Manager, Runtime};
mod chained_reader;
pub mod client;
pub mod decompress;
pub mod dns;
pub mod error;
pub mod manager;
pub mod path_placeholders;
pub mod tls;
mod proto;
pub mod sender;
pub mod tee_reader;
pub mod transaction;
pub mod types;
pub fn init<R: Runtime>() -> TauriPlugin<R> {
Builder::new("yaak-http")

View File

@@ -20,19 +20,19 @@ impl HttpConnectionManager {
}
}
pub async fn get_client(&self, id: &str, opt: &HttpConnectionOptions) -> Result<Client> {
pub async fn get_client(&self, opt: &HttpConnectionOptions) -> Result<Client> {
let mut connections = self.connections.write().await;
let id = opt.id.clone();
// Clean old connections
connections.retain(|_, (_, last_used)| last_used.elapsed() <= self.ttl);
if let Some((c, last_used)) = connections.get_mut(id) {
if let Some((c, last_used)) = connections.get_mut(&id) {
info!("Re-using HTTP client {id}");
*last_used = Instant::now();
return Ok(c.clone());
}
info!("Building new HTTP client {id}");
let c = opt.build_client()?;
connections.insert(id.into(), (c.clone(), Instant::now()));
Ok(c)

View File

@@ -2,7 +2,7 @@ use yaak_models::models::HttpUrlParameter;
pub fn apply_path_placeholders(
url: &str,
parameters: Vec<HttpUrlParameter>,
parameters: &Vec<HttpUrlParameter>,
) -> (String, Vec<HttpUrlParameter>) {
let mut new_parameters = Vec::new();
@@ -18,7 +18,7 @@ pub fn apply_path_placeholders(
// Remove as param if it modified the URL
if old_url_string == *url {
new_parameters.push(p);
new_parameters.push(p.to_owned());
}
}
@@ -55,12 +55,8 @@ mod placeholder_tests {
#[test]
fn placeholder_middle() {
let p = HttpUrlParameter {
name: ":foo".into(),
value: "xxx".into(),
enabled: true,
id: None,
};
let p =
HttpUrlParameter { name: ":foo".into(), value: "xxx".into(), enabled: true, id: None };
assert_eq!(
replace_path_placeholder(&p, "https://example.com/:foo/bar"),
"https://example.com/xxx/bar",
@@ -69,12 +65,8 @@ mod placeholder_tests {
#[test]
fn placeholder_end() {
let p = HttpUrlParameter {
name: ":foo".into(),
value: "xxx".into(),
enabled: true,
id: None,
};
let p =
HttpUrlParameter { name: ":foo".into(), value: "xxx".into(), enabled: true, id: None };
assert_eq!(
replace_path_placeholder(&p, "https://example.com/:foo"),
"https://example.com/xxx",
@@ -83,12 +75,8 @@ mod placeholder_tests {
#[test]
fn placeholder_query() {
let p = HttpUrlParameter {
name: ":foo".into(),
value: "xxx".into(),
enabled: true,
id: None,
};
let p =
HttpUrlParameter { name: ":foo".into(), value: "xxx".into(), enabled: true, id: None };
assert_eq!(
replace_path_placeholder(&p, "https://example.com/:foo?:foo"),
"https://example.com/xxx?:foo",
@@ -125,12 +113,8 @@ mod placeholder_tests {
#[test]
fn placeholder_prefix() {
let p = HttpUrlParameter {
name: ":foo".into(),
value: "xxx".into(),
enabled: true,
id: None,
};
let p =
HttpUrlParameter { name: ":foo".into(), value: "xxx".into(), enabled: true, id: None };
assert_eq!(
replace_path_placeholder(&p, "https://example.com/:foooo"),
"https://example.com/:foooo",
@@ -172,7 +156,7 @@ mod placeholder_tests {
..Default::default()
};
let (url, url_parameters) = apply_path_placeholders(&req.url, req.url_parameters);
let (url, url_parameters) = apply_path_placeholders(&req.url, &req.url_parameters);
// Pattern match back to access it
assert_eq!(url, "example.com/aaa/bar");

View File

@@ -0,0 +1,29 @@
use reqwest::Url;
use std::str::FromStr;
pub(crate) fn ensure_proto(url_str: &str) -> String {
if url_str.is_empty() {
return "".to_string();
}
if url_str.starts_with("http://") || url_str.starts_with("https://") {
return url_str.to_string();
}
// Url::from_str will fail without a proto, so add one
let parseable_url = format!("http://{}", url_str);
if let Ok(u) = Url::from_str(parseable_url.as_str()) {
match u.host() {
Some(host) => {
let h = host.to_string();
// These TLDs force HTTPS
if h.ends_with(".app") || h.ends_with(".dev") || h.ends_with(".page") {
return format!("https://{url_str}");
}
}
None => {}
}
}
format!("http://{url_str}")
}

View File

@@ -0,0 +1,483 @@
use crate::decompress::{ContentEncoding, streaming_decoder};
use crate::error::{Error, Result};
use crate::types::{SendableBody, SendableHttpRequest};
use async_trait::async_trait;
use futures_util::StreamExt;
use reqwest::{Client, Method, Version};
use std::collections::HashMap;
use std::fmt::Display;
use std::pin::Pin;
use std::task::{Context, Poll};
use std::time::Duration;
use tokio::io::{AsyncRead, AsyncReadExt, BufReader, ReadBuf};
use tokio::sync::mpsc;
use tokio_util::io::StreamReader;
#[derive(Debug, Clone)]
pub enum RedirectBehavior {
/// 307/308: Method and body are preserved
Preserve,
/// 303 or 301/302 with POST: Method changed to GET, body dropped
DropBody,
}
#[derive(Debug, Clone)]
pub enum HttpResponseEvent {
Setting(String, String),
Info(String),
Redirect {
url: String,
status: u16,
behavior: RedirectBehavior,
},
SendUrl {
method: String,
path: String,
},
ReceiveUrl {
version: Version,
status: String,
},
HeaderUp(String, String),
HeaderDown(String, String),
ChunkSent {
bytes: usize,
},
ChunkReceived {
bytes: usize,
},
}
impl Display for HttpResponseEvent {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
HttpResponseEvent::Setting(name, value) => write!(f, "* Setting {}={}", name, value),
HttpResponseEvent::Info(s) => write!(f, "* {}", s),
HttpResponseEvent::Redirect { url, status, behavior } => {
let behavior_str = match behavior {
RedirectBehavior::Preserve => "preserve",
RedirectBehavior::DropBody => "drop body",
};
write!(f, "* Redirect {} -> {} ({})", status, url, behavior_str)
}
HttpResponseEvent::SendUrl { method, path } => write!(f, "> {} {}", method, path),
HttpResponseEvent::ReceiveUrl { version, status } => {
write!(f, "< {} {}", version_to_str(version), status)
}
HttpResponseEvent::HeaderUp(name, value) => write!(f, "> {}: {}", name, value),
HttpResponseEvent::HeaderDown(name, value) => write!(f, "< {}: {}", name, value),
HttpResponseEvent::ChunkSent { bytes } => write!(f, "> [{} bytes sent]", bytes),
HttpResponseEvent::ChunkReceived { bytes } => write!(f, "< [{} bytes received]", bytes),
}
}
}
impl From<HttpResponseEvent> for yaak_models::models::HttpResponseEventData {
fn from(event: HttpResponseEvent) -> Self {
use yaak_models::models::HttpResponseEventData as D;
match event {
HttpResponseEvent::Setting(name, value) => D::Setting { name, value },
HttpResponseEvent::Info(message) => D::Info { message },
HttpResponseEvent::Redirect { url, status, behavior } => D::Redirect {
url,
status,
behavior: match behavior {
RedirectBehavior::Preserve => "preserve".to_string(),
RedirectBehavior::DropBody => "drop_body".to_string(),
},
},
HttpResponseEvent::SendUrl { method, path } => D::SendUrl { method, path },
HttpResponseEvent::ReceiveUrl { version, status } => {
D::ReceiveUrl { version: format!("{:?}", version), status }
}
HttpResponseEvent::HeaderUp(name, value) => D::HeaderUp { name, value },
HttpResponseEvent::HeaderDown(name, value) => D::HeaderDown { name, value },
HttpResponseEvent::ChunkSent { bytes } => D::ChunkSent { bytes },
HttpResponseEvent::ChunkReceived { bytes } => D::ChunkReceived { bytes },
}
}
}
/// Statistics about the body after consumption
#[derive(Debug, Default, Clone)]
pub struct BodyStats {
/// Size of the body as received over the wire (before decompression)
pub size_compressed: u64,
/// Size of the body after decompression
pub size_decompressed: u64,
}
/// An AsyncRead wrapper that sends chunk events as data is read
pub struct TrackingRead<R> {
inner: R,
event_tx: mpsc::Sender<HttpResponseEvent>,
ended: bool,
}
impl<R> TrackingRead<R> {
pub fn new(inner: R, event_tx: mpsc::Sender<HttpResponseEvent>) -> Self {
Self { inner, event_tx, ended: false }
}
}
impl<R: AsyncRead + Unpin> AsyncRead for TrackingRead<R> {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
let before = buf.filled().len();
let result = Pin::new(&mut self.inner).poll_read(cx, buf);
if let Poll::Ready(Ok(())) = &result {
let bytes_read = buf.filled().len() - before;
if bytes_read > 0 {
// Ignore send errors - receiver may have been dropped or channel is full
let _ =
self.event_tx.try_send(HttpResponseEvent::ChunkReceived { bytes: bytes_read });
} else if !self.ended {
self.ended = true;
}
}
result
}
}
/// Type alias for the body stream
type BodyStream = Pin<Box<dyn AsyncRead + Send>>;
/// HTTP response with deferred body consumption.
/// Headers are available immediately after send(), body can be consumed in different ways.
/// Note: Debug is manually implemented since BodyStream doesn't implement Debug.
pub struct HttpResponse {
/// HTTP status code
pub status: u16,
/// HTTP status reason phrase (e.g., "OK", "Not Found")
pub status_reason: Option<String>,
/// Response headers
pub headers: HashMap<String, String>,
/// Request headers
pub request_headers: HashMap<String, String>,
/// Content-Length from headers (may differ from actual body size)
pub content_length: Option<u64>,
/// Final URL (after redirects)
pub url: String,
/// Remote address of the server
pub remote_addr: Option<String>,
/// HTTP version (e.g., "HTTP/1.1", "HTTP/2")
pub version: Option<String>,
/// The body stream (consumed when calling bytes(), text(), write_to_file(), or drain())
body_stream: Option<BodyStream>,
/// Content-Encoding for decompression
encoding: ContentEncoding,
}
impl std::fmt::Debug for HttpResponse {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("HttpResponse")
.field("status", &self.status)
.field("status_reason", &self.status_reason)
.field("headers", &self.headers)
.field("content_length", &self.content_length)
.field("url", &self.url)
.field("remote_addr", &self.remote_addr)
.field("version", &self.version)
.field("body_stream", &"<stream>")
.field("encoding", &self.encoding)
.finish()
}
}
impl HttpResponse {
/// Create a new HttpResponse with an unconsumed body stream
#[allow(clippy::too_many_arguments)]
pub fn new(
status: u16,
status_reason: Option<String>,
headers: HashMap<String, String>,
request_headers: HashMap<String, String>,
content_length: Option<u64>,
url: String,
remote_addr: Option<String>,
version: Option<String>,
body_stream: BodyStream,
encoding: ContentEncoding,
) -> Self {
Self {
status,
status_reason,
headers,
request_headers,
content_length,
url,
remote_addr,
version,
body_stream: Some(body_stream),
encoding,
}
}
/// Consume the body and return it as bytes (loads entire body into memory).
/// Also decompresses the body if Content-Encoding is set.
pub async fn bytes(mut self) -> Result<(Vec<u8>, BodyStats)> {
let stream = self.body_stream.take().ok_or_else(|| {
Error::RequestError("Response body has already been consumed".to_string())
})?;
let buf_reader = BufReader::new(stream);
let mut decoder = streaming_decoder(buf_reader, self.encoding);
let mut decompressed = Vec::new();
let mut bytes_read = 0u64;
// Read through the decoder in chunks to track compressed size
let mut buf = [0u8; 8192];
loop {
match decoder.read(&mut buf).await {
Ok(0) => break,
Ok(n) => {
decompressed.extend_from_slice(&buf[..n]);
bytes_read += n as u64;
}
Err(e) => {
return Err(Error::BodyReadError(e.to_string()));
}
}
}
let stats = BodyStats {
// For now, we can't easily track compressed size when streaming through decoder
// Use content_length as an approximation, or decompressed size if identity encoding
size_compressed: self.content_length.unwrap_or(bytes_read),
size_decompressed: decompressed.len() as u64,
};
Ok((decompressed, stats))
}
/// Consume the body and return it as a UTF-8 string.
pub async fn text(self) -> Result<(String, BodyStats)> {
let (bytes, stats) = self.bytes().await?;
let text = String::from_utf8(bytes)
.map_err(|e| Error::RequestError(format!("Response is not valid UTF-8: {}", e)))?;
Ok((text, stats))
}
/// Take the body stream for manual consumption.
/// Returns an AsyncRead that decompresses on-the-fly if Content-Encoding is set.
/// The caller is responsible for reading and processing the stream.
pub fn into_body_stream(&mut self) -> Result<Box<dyn AsyncRead + Unpin + Send>> {
let stream = self.body_stream.take().ok_or_else(|| {
Error::RequestError("Response body has already been consumed".to_string())
})?;
let buf_reader = BufReader::new(stream);
let decoder = streaming_decoder(buf_reader, self.encoding);
Ok(decoder)
}
/// Discard the body without reading it (useful for redirects).
pub async fn drain(mut self) -> Result<()> {
let stream = self.body_stream.take().ok_or_else(|| {
Error::RequestError("Response body has already been consumed".to_string())
})?;
// Just read and discard all bytes
let mut reader = stream;
let mut buf = [0u8; 8192];
loop {
match reader.read(&mut buf).await {
Ok(0) => break,
Ok(_) => continue,
Err(e) => {
return Err(Error::RequestError(format!(
"Failed to drain response body: {}",
e
)));
}
}
}
Ok(())
}
}
/// Trait for sending HTTP requests
#[async_trait]
pub trait HttpSender: Send + Sync {
/// Send an HTTP request and return the response with headers.
/// The body is not consumed until you call bytes(), text(), write_to_file(), or drain().
/// Events are sent through the provided channel.
async fn send(
&self,
request: SendableHttpRequest,
event_tx: mpsc::Sender<HttpResponseEvent>,
) -> Result<HttpResponse>;
}
/// Reqwest-based implementation of HttpSender
pub struct ReqwestSender {
client: Client,
}
impl ReqwestSender {
/// Create a new ReqwestSender with a default client
pub fn new() -> Result<Self> {
let client = Client::builder().build().map_err(Error::Client)?;
Ok(Self { client })
}
/// Create a new ReqwestSender with a custom client
pub fn with_client(client: Client) -> Self {
Self { client }
}
}
#[async_trait]
impl HttpSender for ReqwestSender {
async fn send(
&self,
request: SendableHttpRequest,
event_tx: mpsc::Sender<HttpResponseEvent>,
) -> Result<HttpResponse> {
// Helper to send events (ignores errors if receiver is dropped or channel is full)
let send_event = |event: HttpResponseEvent| {
let _ = event_tx.try_send(event);
};
// Parse the HTTP method
let method = Method::from_bytes(request.method.as_bytes())
.map_err(|e| Error::RequestError(format!("Invalid HTTP method: {}", e)))?;
// Build the request
let mut req_builder = self.client.request(method, &request.url);
// Add headers
for header in request.headers {
req_builder = req_builder.header(&header.0, &header.1);
}
// Configure timeout
if let Some(d) = request.options.timeout
&& !d.is_zero()
{
req_builder = req_builder.timeout(d);
}
// Add body
match request.body {
None => {}
Some(SendableBody::Bytes(bytes)) => {
req_builder = req_builder.body(bytes);
}
Some(SendableBody::Stream(stream)) => {
// Convert AsyncRead stream to reqwest Body
let stream = tokio_util::io::ReaderStream::new(stream);
let body = reqwest::Body::wrap_stream(stream);
req_builder = req_builder.body(body);
}
}
// Send the request
let sendable_req = req_builder.build()?;
send_event(HttpResponseEvent::Setting(
"timeout".to_string(),
if request.options.timeout.unwrap_or_default().is_zero() {
"Infinity".to_string()
} else {
format!("{:?}", request.options.timeout)
},
));
send_event(HttpResponseEvent::SendUrl {
path: sendable_req.url().path().to_string(),
method: sendable_req.method().to_string(),
});
let mut request_headers = HashMap::new();
for (name, value) in sendable_req.headers() {
let v = value.to_str().unwrap_or_default().to_string();
request_headers.insert(name.to_string(), v.clone());
send_event(HttpResponseEvent::HeaderUp(name.to_string(), v));
}
send_event(HttpResponseEvent::Info("Sending request to server".to_string()));
// Map some errors to our own, so they look nicer
let response = self.client.execute(sendable_req).await.map_err(|e| {
if reqwest::Error::is_timeout(&e) {
Error::RequestTimeout(
request.options.timeout.unwrap_or(Duration::from_secs(0)).clone(),
)
} else {
Error::Client(e)
}
})?;
let status = response.status().as_u16();
let status_reason = response.status().canonical_reason().map(|s| s.to_string());
let url = response.url().to_string();
let remote_addr = response.remote_addr().map(|a| a.to_string());
let version = Some(version_to_str(&response.version()));
let content_length = response.content_length();
send_event(HttpResponseEvent::ReceiveUrl {
version: response.version(),
status: response.status().to_string(),
});
// Extract headers
let mut headers = HashMap::new();
for (key, value) in response.headers() {
if let Ok(v) = value.to_str() {
send_event(HttpResponseEvent::HeaderDown(key.to_string(), v.to_string()));
headers.insert(key.to_string(), v.to_string());
}
}
// Determine content encoding for decompression
// HTTP headers are case-insensitive, so we need to search for any casing
let encoding = ContentEncoding::from_header(
headers
.iter()
.find(|(k, _)| k.eq_ignore_ascii_case("content-encoding"))
.map(|(_, v)| v.as_str()),
);
// Get the byte stream instead of loading into memory
let byte_stream = response.bytes_stream();
// Convert the stream to an AsyncRead
let stream_reader = StreamReader::new(
byte_stream.map(|result| result.map_err(|e| std::io::Error::other(e))),
);
// Wrap the stream with tracking to emit chunk received events via the same channel
let tracking_reader = TrackingRead::new(stream_reader, event_tx);
let body_stream: BodyStream = Box::pin(tracking_reader);
Ok(HttpResponse::new(
status,
status_reason,
headers,
request_headers,
content_length,
url,
remote_addr,
version,
body_stream,
encoding,
))
}
}
fn version_to_str(version: &Version) -> String {
match *version {
Version::HTTP_09 => "HTTP/0.9".to_string(),
Version::HTTP_10 => "HTTP/1.0".to_string(),
Version::HTTP_11 => "HTTP/1.1".to_string(),
Version::HTTP_2 => "HTTP/2".to_string(),
Version::HTTP_3 => "HTTP/3".to_string(),
_ => "unknown".to_string(),
}
}

View File

@@ -0,0 +1,159 @@
use std::io;
use std::pin::Pin;
use std::task::{Context, Poll};
use tokio::io::{AsyncRead, ReadBuf};
use tokio::sync::mpsc;
/// A reader that forwards all read data to a channel while also returning it to the caller.
/// This allows capturing request body data as it's being sent.
/// Uses an unbounded channel to ensure all data is captured without blocking the request.
pub struct TeeReader<R> {
inner: R,
tx: mpsc::UnboundedSender<Vec<u8>>,
}
impl<R> TeeReader<R> {
pub fn new(inner: R, tx: mpsc::UnboundedSender<Vec<u8>>) -> Self {
Self { inner, tx }
}
}
impl<R: AsyncRead + Unpin> AsyncRead for TeeReader<R> {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<io::Result<()>> {
let before_len = buf.filled().len();
match Pin::new(&mut self.inner).poll_read(cx, buf) {
Poll::Ready(Ok(())) => {
let after_len = buf.filled().len();
if after_len > before_len {
// Data was read, send a copy to the channel
let data = buf.filled()[before_len..after_len].to_vec();
// Send to unbounded channel - this never blocks
// Ignore error if receiver is closed
let _ = self.tx.send(data);
}
Poll::Ready(Ok(()))
}
Poll::Ready(Err(e)) => Poll::Ready(Err(e)),
Poll::Pending => Poll::Pending,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::Cursor;
use tokio::io::AsyncReadExt;
#[tokio::test]
async fn test_tee_reader_captures_all_data() {
let data = b"Hello, World!";
let cursor = Cursor::new(data.to_vec());
let (tx, mut rx) = mpsc::unbounded_channel();
let mut tee = TeeReader::new(cursor, tx);
let mut output = Vec::new();
tee.read_to_end(&mut output).await.unwrap();
// Verify the reader returns the correct data
assert_eq!(output, data);
// Verify the channel received the data
let mut captured = Vec::new();
while let Ok(chunk) = rx.try_recv() {
captured.extend(chunk);
}
assert_eq!(captured, data);
}
#[tokio::test]
async fn test_tee_reader_with_chunked_reads() {
let data = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ";
let cursor = Cursor::new(data.to_vec());
let (tx, mut rx) = mpsc::unbounded_channel();
let mut tee = TeeReader::new(cursor, tx);
// Read in small chunks
let mut buf = [0u8; 5];
let mut output = Vec::new();
loop {
let n = tee.read(&mut buf).await.unwrap();
if n == 0 {
break;
}
output.extend_from_slice(&buf[..n]);
}
// Verify the reader returns the correct data
assert_eq!(output, data);
// Verify the channel received all chunks
let mut captured = Vec::new();
while let Ok(chunk) = rx.try_recv() {
captured.extend(chunk);
}
assert_eq!(captured, data);
}
#[tokio::test]
async fn test_tee_reader_empty_data() {
let data: Vec<u8> = vec![];
let cursor = Cursor::new(data.clone());
let (tx, mut rx) = mpsc::unbounded_channel();
let mut tee = TeeReader::new(cursor, tx);
let mut output = Vec::new();
tee.read_to_end(&mut output).await.unwrap();
// Verify empty output
assert!(output.is_empty());
// Verify no data was sent to channel
assert!(rx.try_recv().is_err());
}
#[tokio::test]
async fn test_tee_reader_works_when_receiver_dropped() {
let data = b"Hello, World!";
let cursor = Cursor::new(data.to_vec());
let (tx, rx) = mpsc::unbounded_channel();
// Drop the receiver before reading
drop(rx);
let mut tee = TeeReader::new(cursor, tx);
let mut output = Vec::new();
// Should still work even though receiver is dropped
tee.read_to_end(&mut output).await.unwrap();
assert_eq!(output, data);
}
#[tokio::test]
async fn test_tee_reader_large_data() {
// Test with 1MB of data
let data: Vec<u8> = (0..1024 * 1024).map(|i| (i % 256) as u8).collect();
let cursor = Cursor::new(data.clone());
let (tx, mut rx) = mpsc::unbounded_channel();
let mut tee = TeeReader::new(cursor, tx);
let mut output = Vec::new();
tee.read_to_end(&mut output).await.unwrap();
// Verify the reader returns the correct data
assert_eq!(output, data);
// Verify the channel received all data
let mut captured = Vec::new();
while let Ok(chunk) = rx.try_recv() {
captured.extend(chunk);
}
assert_eq!(captured, data);
}
}

View File

@@ -1,81 +0,0 @@
use rustls::client::danger::{HandshakeSignatureValid, ServerCertVerified, ServerCertVerifier};
use rustls::crypto::ring;
use rustls::pki_types::{CertificateDer, ServerName, UnixTime};
use rustls::{ClientConfig, DigitallySignedStruct, SignatureScheme};
use rustls_platform_verifier::BuilderVerifierExt;
use std::sync::Arc;
pub fn get_config(validate_certificates: bool, with_alpn: bool) -> ClientConfig {
let arc_crypto_provider = Arc::new(ring::default_provider());
let config_builder = ClientConfig::builder_with_provider(arc_crypto_provider)
.with_safe_default_protocol_versions()
.unwrap();
let mut client = if validate_certificates {
// Use platform-native verifier to validate certificates
config_builder.with_platform_verifier().unwrap().with_no_client_auth()
} else {
config_builder
.dangerous()
.with_custom_certificate_verifier(Arc::new(NoVerifier))
.with_no_client_auth()
};
if with_alpn {
client.alpn_protocols = vec![b"h2".to_vec(), b"http/1.1".to_vec()];
}
client
}
// Copied from reqwest: https://github.com/seanmonstar/reqwest/blob/595c80b1fbcdab73ac2ae93e4edc3406f453df25/src/tls.rs#L608
#[derive(Debug)]
struct NoVerifier;
impl ServerCertVerifier for NoVerifier {
fn verify_server_cert(
&self,
_end_entity: &CertificateDer,
_intermediates: &[CertificateDer],
_server_name: &ServerName,
_ocsp_response: &[u8],
_now: UnixTime,
) -> Result<ServerCertVerified, rustls::Error> {
Ok(ServerCertVerified::assertion())
}
fn verify_tls12_signature(
&self,
_message: &[u8],
_cert: &CertificateDer,
_dss: &DigitallySignedStruct,
) -> Result<HandshakeSignatureValid, rustls::Error> {
Ok(HandshakeSignatureValid::assertion())
}
fn verify_tls13_signature(
&self,
_message: &[u8],
_cert: &CertificateDer,
_dss: &DigitallySignedStruct,
) -> Result<HandshakeSignatureValid, rustls::Error> {
Ok(HandshakeSignatureValid::assertion())
}
fn supported_verify_schemes(&self) -> Vec<SignatureScheme> {
vec![
SignatureScheme::RSA_PKCS1_SHA1,
SignatureScheme::ECDSA_SHA1_Legacy,
SignatureScheme::RSA_PKCS1_SHA256,
SignatureScheme::ECDSA_NISTP256_SHA256,
SignatureScheme::RSA_PKCS1_SHA384,
SignatureScheme::ECDSA_NISTP384_SHA384,
SignatureScheme::RSA_PKCS1_SHA512,
SignatureScheme::ECDSA_NISTP521_SHA512,
SignatureScheme::RSA_PSS_SHA256,
SignatureScheme::RSA_PSS_SHA384,
SignatureScheme::RSA_PSS_SHA512,
SignatureScheme::ED25519,
SignatureScheme::ED448,
]
}
}

View File

@@ -0,0 +1,391 @@
use crate::error::Result;
use crate::sender::{HttpResponse, HttpResponseEvent, HttpSender, RedirectBehavior};
use crate::types::SendableHttpRequest;
use tokio::sync::mpsc;
use tokio::sync::watch::Receiver;
/// HTTP Transaction that manages the lifecycle of a request, including redirect handling
pub struct HttpTransaction<S: HttpSender> {
sender: S,
max_redirects: usize,
}
impl<S: HttpSender> HttpTransaction<S> {
/// Create a new transaction with default settings
pub fn new(sender: S) -> Self {
Self { sender, max_redirects: 10 }
}
/// Create a new transaction with custom max redirects
pub fn with_max_redirects(sender: S, max_redirects: usize) -> Self {
Self { sender, max_redirects }
}
/// Execute the request with cancellation support.
/// Returns an HttpResponse with unconsumed body - caller decides how to consume it.
/// Events are sent through the provided channel.
pub async fn execute_with_cancellation(
&self,
request: SendableHttpRequest,
mut cancelled_rx: Receiver<bool>,
event_tx: mpsc::Sender<HttpResponseEvent>,
) -> Result<HttpResponse> {
let mut redirect_count = 0;
let mut current_url = request.url;
let mut current_method = request.method;
let mut current_headers = request.headers;
let mut current_body = request.body;
// Helper to send events (ignores errors if receiver is dropped or channel is full)
let send_event = |event: HttpResponseEvent| {
let _ = event_tx.try_send(event);
};
loop {
// Check for cancellation before each request
if *cancelled_rx.borrow() {
return Err(crate::error::Error::RequestCanceledError);
}
// Build request for this iteration
let req = SendableHttpRequest {
url: current_url.clone(),
method: current_method.clone(),
headers: current_headers.clone(),
body: current_body,
options: request.options.clone(),
};
// Send the request
send_event(HttpResponseEvent::Setting(
"redirects".to_string(),
request.options.follow_redirects.to_string(),
));
// Execute with cancellation support
let response = tokio::select! {
result = self.sender.send(req, event_tx.clone()) => result?,
_ = cancelled_rx.changed() => {
return Err(crate::error::Error::RequestCanceledError);
}
};
if !Self::is_redirect(response.status) {
// Not a redirect - return the response for caller to consume body
return Ok(response);
}
if !request.options.follow_redirects {
// Redirects disabled - return the redirect response as-is
return Ok(response);
}
// Check if we've exceeded max redirects
if redirect_count >= self.max_redirects {
// Drain the response before returning error
let _ = response.drain().await;
return Err(crate::error::Error::RequestError(format!(
"Maximum redirect limit ({}) exceeded",
self.max_redirects
)));
}
// Extract Location header before draining (headers are available immediately)
// HTTP headers are case-insensitive, so we need to search for any casing
let location = response
.headers
.iter()
.find(|(k, _)| k.eq_ignore_ascii_case("location"))
.map(|(_, v)| v.clone())
.ok_or_else(|| {
crate::error::Error::RequestError(
"Redirect response missing Location header".to_string(),
)
})?;
// Also get status before draining
let status = response.status;
send_event(HttpResponseEvent::Info("Ignoring the response body".to_string()));
// Drain the redirect response body before following
response.drain().await?;
// Update the request URL
current_url = if location.starts_with("http://") || location.starts_with("https://") {
// Absolute URL
location
} else if location.starts_with('/') {
// Absolute path - need to extract base URL from current request
let base_url = Self::extract_base_url(&current_url)?;
format!("{}{}", base_url, location)
} else {
// Relative path - need to resolve relative to current path
let base_path = Self::extract_base_path(&current_url)?;
format!("{}/{}", base_path, location)
};
// Determine redirect behavior based on status code and method
let behavior = if status == 303 {
// 303 See Other always changes to GET
RedirectBehavior::DropBody
} else if (status == 301 || status == 302) && current_method == "POST" {
// For 301/302, change POST to GET (common browser behavior)
RedirectBehavior::DropBody
} else {
// For 307 and 308, the method and body are preserved
// Also for 301/302 with non-POST methods
RedirectBehavior::Preserve
};
send_event(HttpResponseEvent::Redirect {
url: current_url.clone(),
status,
behavior: behavior.clone(),
});
// Handle method changes for certain redirect codes
if matches!(behavior, RedirectBehavior::DropBody) {
if current_method != "GET" {
current_method = "GET".to_string();
}
// Remove content-related headers
current_headers.retain(|h| {
let name_lower = h.0.to_lowercase();
!name_lower.starts_with("content-") && name_lower != "transfer-encoding"
});
}
// Reset body for next iteration (since it was moved in the send call)
// For redirects that change method to GET or for all redirects since body was consumed
current_body = None;
redirect_count += 1;
}
}
/// Check if a status code indicates a redirect
fn is_redirect(status: u16) -> bool {
matches!(status, 301 | 302 | 303 | 307 | 308)
}
/// Extract the base URL (scheme + host) from a full URL
fn extract_base_url(url: &str) -> Result<String> {
// Find the position after "://"
let scheme_end = url.find("://").ok_or_else(|| {
crate::error::Error::RequestError(format!("Invalid URL format: {}", url))
})?;
// Find the first '/' after the scheme
let path_start = url[scheme_end + 3..].find('/');
if let Some(idx) = path_start {
Ok(url[..scheme_end + 3 + idx].to_string())
} else {
// No path, return entire URL
Ok(url.to_string())
}
}
/// Extract the base path (everything except the last segment) from a URL
fn extract_base_path(url: &str) -> Result<String> {
if let Some(last_slash) = url.rfind('/') {
// Don't include the trailing slash if it's part of the host
if url[..last_slash].ends_with("://") || url[..last_slash].ends_with(':') {
Ok(url.to_string())
} else {
Ok(url[..last_slash].to_string())
}
} else {
Ok(url.to_string())
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::decompress::ContentEncoding;
use crate::sender::{HttpResponseEvent, HttpSender};
use async_trait::async_trait;
use std::collections::HashMap;
use std::pin::Pin;
use std::sync::Arc;
use tokio::io::AsyncRead;
use tokio::sync::Mutex;
/// Mock sender for testing
struct MockSender {
responses: Arc<Mutex<Vec<MockResponse>>>,
}
struct MockResponse {
status: u16,
headers: HashMap<String, String>,
body: Vec<u8>,
}
impl MockSender {
fn new(responses: Vec<MockResponse>) -> Self {
Self { responses: Arc::new(Mutex::new(responses)) }
}
}
#[async_trait]
impl HttpSender for MockSender {
async fn send(
&self,
_request: SendableHttpRequest,
_event_tx: mpsc::Sender<HttpResponseEvent>,
) -> Result<HttpResponse> {
let mut responses = self.responses.lock().await;
if responses.is_empty() {
Err(crate::error::Error::RequestError("No more mock responses".to_string()))
} else {
let mock = responses.remove(0);
// Create a simple in-memory stream from the body
let body_stream: Pin<Box<dyn AsyncRead + Send>> =
Box::pin(std::io::Cursor::new(mock.body));
Ok(HttpResponse::new(
mock.status,
None, // status_reason
mock.headers,
HashMap::new(),
None, // content_length
"https://example.com".to_string(), // url
None, // remote_addr
Some("HTTP/1.1".to_string()), // version
body_stream,
ContentEncoding::Identity,
))
}
}
}
#[tokio::test]
async fn test_transaction_no_redirect() {
let response = MockResponse { status: 200, headers: HashMap::new(), body: b"OK".to_vec() };
let sender = MockSender::new(vec![response]);
let transaction = HttpTransaction::new(sender);
let request = SendableHttpRequest {
url: "https://example.com".to_string(),
method: "GET".to_string(),
headers: vec![],
..Default::default()
};
let (_tx, rx) = tokio::sync::watch::channel(false);
let (event_tx, _event_rx) = mpsc::channel(100);
let result = transaction.execute_with_cancellation(request, rx, event_tx).await.unwrap();
assert_eq!(result.status, 200);
// Consume the body to verify it
let (body, _) = result.bytes().await.unwrap();
assert_eq!(body, b"OK");
}
#[tokio::test]
async fn test_transaction_single_redirect() {
let mut redirect_headers = HashMap::new();
redirect_headers.insert("Location".to_string(), "https://example.com/new".to_string());
let responses = vec![
MockResponse { status: 302, headers: redirect_headers, body: vec![] },
MockResponse { status: 200, headers: HashMap::new(), body: b"Final".to_vec() },
];
let sender = MockSender::new(responses);
let transaction = HttpTransaction::new(sender);
let request = SendableHttpRequest {
url: "https://example.com/old".to_string(),
method: "GET".to_string(),
options: crate::types::SendableHttpRequestOptions {
follow_redirects: true,
..Default::default()
},
..Default::default()
};
let (_tx, rx) = tokio::sync::watch::channel(false);
let (event_tx, _event_rx) = mpsc::channel(100);
let result = transaction.execute_with_cancellation(request, rx, event_tx).await.unwrap();
assert_eq!(result.status, 200);
let (body, _) = result.bytes().await.unwrap();
assert_eq!(body, b"Final");
}
#[tokio::test]
async fn test_transaction_max_redirects_exceeded() {
let mut redirect_headers = HashMap::new();
redirect_headers.insert("Location".to_string(), "https://example.com/loop".to_string());
// Create more redirects than allowed
let responses: Vec<MockResponse> = (0..12)
.map(|_| MockResponse { status: 302, headers: redirect_headers.clone(), body: vec![] })
.collect();
let sender = MockSender::new(responses);
let transaction = HttpTransaction::with_max_redirects(sender, 10);
let request = SendableHttpRequest {
url: "https://example.com/start".to_string(),
method: "GET".to_string(),
options: crate::types::SendableHttpRequestOptions {
follow_redirects: true,
..Default::default()
},
..Default::default()
};
let (_tx, rx) = tokio::sync::watch::channel(false);
let (event_tx, _event_rx) = mpsc::channel(100);
let result = transaction.execute_with_cancellation(request, rx, event_tx).await;
if let Err(crate::error::Error::RequestError(msg)) = result {
assert!(msg.contains("Maximum redirect limit"));
} else {
panic!("Expected RequestError with max redirect message. Got {result:?}");
}
}
#[test]
fn test_is_redirect() {
assert!(HttpTransaction::<MockSender>::is_redirect(301));
assert!(HttpTransaction::<MockSender>::is_redirect(302));
assert!(HttpTransaction::<MockSender>::is_redirect(303));
assert!(HttpTransaction::<MockSender>::is_redirect(307));
assert!(HttpTransaction::<MockSender>::is_redirect(308));
assert!(!HttpTransaction::<MockSender>::is_redirect(200));
assert!(!HttpTransaction::<MockSender>::is_redirect(404));
assert!(!HttpTransaction::<MockSender>::is_redirect(500));
}
#[test]
fn test_extract_base_url() {
let result =
HttpTransaction::<MockSender>::extract_base_url("https://example.com/path/to/resource");
assert_eq!(result.unwrap(), "https://example.com");
let result = HttpTransaction::<MockSender>::extract_base_url("http://localhost:8080/api");
assert_eq!(result.unwrap(), "http://localhost:8080");
let result = HttpTransaction::<MockSender>::extract_base_url("invalid-url");
assert!(result.is_err());
}
#[test]
fn test_extract_base_path() {
let result = HttpTransaction::<MockSender>::extract_base_path(
"https://example.com/path/to/resource",
);
assert_eq!(result.unwrap(), "https://example.com/path/to");
let result = HttpTransaction::<MockSender>::extract_base_path("https://example.com/single");
assert_eq!(result.unwrap(), "https://example.com");
let result = HttpTransaction::<MockSender>::extract_base_path("https://example.com/");
assert_eq!(result.unwrap(), "https://example.com");
}
}

View File

@@ -0,0 +1,981 @@
use crate::chained_reader::{ChainedReader, ReaderType};
use crate::error::Error::RequestError;
use crate::error::Result;
use crate::path_placeholders::apply_path_placeholders;
use crate::proto::ensure_proto;
use bytes::Bytes;
use log::warn;
use std::collections::BTreeMap;
use std::pin::Pin;
use std::time::Duration;
use tokio::io::AsyncRead;
use yaak_common::serde::{get_bool, get_str, get_str_map};
use yaak_models::models::HttpRequest;
pub(crate) const MULTIPART_BOUNDARY: &str = "------YaakFormBoundary";
pub enum SendableBody {
Bytes(Bytes),
Stream(Pin<Box<dyn AsyncRead + Send + 'static>>),
}
enum SendableBodyWithMeta {
Bytes(Bytes),
Stream {
data: Pin<Box<dyn AsyncRead + Send + 'static>>,
content_length: Option<usize>,
},
}
impl From<SendableBodyWithMeta> for SendableBody {
fn from(value: SendableBodyWithMeta) -> Self {
match value {
SendableBodyWithMeta::Bytes(b) => SendableBody::Bytes(b),
SendableBodyWithMeta::Stream { data, .. } => SendableBody::Stream(data),
}
}
}
#[derive(Default)]
pub struct SendableHttpRequest {
pub url: String,
pub method: String,
pub headers: Vec<(String, String)>,
pub body: Option<SendableBody>,
pub options: SendableHttpRequestOptions,
}
#[derive(Default, Clone)]
pub struct SendableHttpRequestOptions {
pub timeout: Option<Duration>,
pub follow_redirects: bool,
}
impl SendableHttpRequest {
pub async fn from_http_request(
r: &HttpRequest,
options: SendableHttpRequestOptions,
) -> Result<Self> {
let initial_headers = build_headers(r);
let (body, headers) = build_body(&r.method, &r.body_type, &r.body, initial_headers).await?;
Ok(Self {
url: build_url(r),
method: r.method.to_uppercase(),
headers,
body: body.into(),
options,
})
}
pub fn insert_header(&mut self, header: (String, String)) {
if let Some(existing) =
self.headers.iter_mut().find(|h| h.0.to_lowercase() == header.0.to_lowercase())
{
existing.1 = header.1;
} else {
self.headers.push(header);
}
}
}
pub fn append_query_params(url: &str, params: Vec<(String, String)>) -> String {
let url_string = url.to_string();
if params.is_empty() {
return url.to_string();
}
// Build query string
let query_string = params
.iter()
.map(|(name, value)| {
format!("{}={}", urlencoding::encode(name), urlencoding::encode(value))
})
.collect::<Vec<_>>()
.join("&");
// Split URL into parts: base URL, query, and fragment
let (base_and_query, fragment) = if let Some(hash_pos) = url_string.find('#') {
let (before_hash, after_hash) = url_string.split_at(hash_pos);
(before_hash.to_string(), Some(after_hash.to_string()))
} else {
(url_string, None)
};
// Now handle query parameters on the base URL (without fragment)
let mut result = if base_and_query.contains('?') {
// Check if there's already a query string after the '?'
let parts: Vec<&str> = base_and_query.splitn(2, '?').collect();
if parts.len() == 2 && !parts[1].trim().is_empty() {
// Append with & if there are existing parameters
format!("{}&{}", base_and_query, query_string)
} else {
// Just append the new parameters directly (URL ends with '?')
format!("{}{}", base_and_query, query_string)
}
} else {
// No existing query parameters, add with '?'
format!("{}?{}", base_and_query, query_string)
};
// Re-append the fragment if it exists
if let Some(fragment) = fragment {
result.push_str(&fragment);
}
result
}
fn build_url(r: &HttpRequest) -> String {
let (url_string, params) = apply_path_placeholders(&ensure_proto(&r.url), &r.url_parameters);
append_query_params(
&url_string,
params
.iter()
.filter(|p| p.enabled && !p.name.is_empty())
.map(|p| (p.name.clone(), p.value.clone()))
.collect(),
)
}
fn build_headers(r: &HttpRequest) -> Vec<(String, String)> {
r.headers
.iter()
.filter_map(|h| {
if h.enabled && !h.name.is_empty() {
Some((h.name.clone(), h.value.clone()))
} else {
None
}
})
.collect()
}
async fn build_body(
method: &str,
body_type: &Option<String>,
body: &BTreeMap<String, serde_json::Value>,
headers: Vec<(String, String)>,
) -> Result<(Option<SendableBody>, Vec<(String, String)>)> {
let body_type = match &body_type {
None => return Ok((None, headers)),
Some(t) => t,
};
let (body, content_type) = match body_type.as_str() {
"binary" => (build_binary_body(&body).await?, None),
"graphql" => (build_graphql_body(&method, &body), Some("application/json".to_string())),
"application/x-www-form-urlencoded" => {
(build_form_body(&body), Some("application/x-www-form-urlencoded".to_string()))
}
"multipart/form-data" => build_multipart_body(&body, &headers).await?,
_ if body.contains_key("text") => (build_text_body(&body), None),
t => {
warn!("Unsupported body type: {}", t);
(None, None)
}
};
// Add or update the Content-Type header
let mut headers = headers;
if let Some(ct) = content_type {
if let Some(existing) = headers.iter_mut().find(|h| h.0.to_lowercase() == "content-type") {
existing.1 = ct;
} else {
headers.push(("Content-Type".to_string(), ct));
}
}
// Check if Transfer-Encoding: chunked is already set
let has_chunked_encoding = headers.iter().any(|h| {
h.0.to_lowercase() == "transfer-encoding" && h.1.to_lowercase().contains("chunked")
});
// Add a Content-Length header only if chunked encoding is not being used
if !has_chunked_encoding {
let content_length = match body {
Some(SendableBodyWithMeta::Bytes(ref bytes)) => Some(bytes.len()),
Some(SendableBodyWithMeta::Stream { content_length, .. }) => content_length,
None => None,
};
if let Some(cl) = content_length {
headers.push(("Content-Length".to_string(), cl.to_string()));
}
}
Ok((body.map(|b| b.into()), headers))
}
fn build_form_body(body: &BTreeMap<String, serde_json::Value>) -> Option<SendableBodyWithMeta> {
let form_params = match body.get("form").map(|f| f.as_array()) {
Some(Some(f)) => f,
_ => return None,
};
let mut body = String::new();
for p in form_params {
let enabled = get_bool(p, "enabled", true);
let name = get_str(p, "name");
if !enabled || name.is_empty() {
continue;
}
let value = get_str(p, "value");
if !body.is_empty() {
body.push('&');
}
body.push_str(&urlencoding::encode(&name));
body.push('=');
body.push_str(&urlencoding::encode(&value));
}
if body.is_empty() { None } else { Some(SendableBodyWithMeta::Bytes(Bytes::from(body))) }
}
async fn build_binary_body(
body: &BTreeMap<String, serde_json::Value>,
) -> Result<Option<SendableBodyWithMeta>> {
let file_path = match body.get("filePath").map(|f| f.as_str()) {
Some(Some(f)) => f,
_ => return Ok(None),
};
// Open a file for streaming
let content_length = tokio::fs::metadata(file_path)
.await
.map_err(|e| RequestError(format!("Failed to get file metadata: {}", e)))?
.len();
let file = tokio::fs::File::open(file_path)
.await
.map_err(|e| RequestError(format!("Failed to open file: {}", e)))?;
Ok(Some(SendableBodyWithMeta::Stream {
data: Box::pin(file),
content_length: Some(content_length as usize),
}))
}
fn build_text_body(body: &BTreeMap<String, serde_json::Value>) -> Option<SendableBodyWithMeta> {
let text = get_str_map(body, "text");
if text.is_empty() {
None
} else {
Some(SendableBodyWithMeta::Bytes(Bytes::from(text.to_string())))
}
}
fn build_graphql_body(
method: &str,
body: &BTreeMap<String, serde_json::Value>,
) -> Option<SendableBodyWithMeta> {
let query = get_str_map(body, "query");
let variables = get_str_map(body, "variables");
if method.to_lowercase() == "get" {
// GraphQL GET requests use query parameters, not a body
return None;
}
let body = if variables.trim().is_empty() {
format!(r#"{{"query":{}}}"#, serde_json::to_string(&query).unwrap_or_default())
} else {
format!(
r#"{{"query":{},"variables":{}}}"#,
serde_json::to_string(&query).unwrap_or_default(),
variables
)
};
Some(SendableBodyWithMeta::Bytes(Bytes::from(body)))
}
async fn build_multipart_body(
body: &BTreeMap<String, serde_json::Value>,
headers: &Vec<(String, String)>,
) -> Result<(Option<SendableBodyWithMeta>, Option<String>)> {
let boundary = extract_boundary_from_headers(headers);
let form_params = match body.get("form").map(|f| f.as_array()) {
Some(Some(f)) => f,
_ => return Ok((None, None)),
};
// Build a list of readers for streaming and calculate total content length
let mut readers: Vec<ReaderType> = Vec::new();
let mut has_content = false;
let mut total_size: usize = 0;
for p in form_params {
let enabled = get_bool(p, "enabled", true);
let name = get_str(p, "name");
if !enabled || name.is_empty() {
continue;
}
has_content = true;
// Add boundary delimiter
let boundary_bytes = format!("--{}\r\n", boundary).into_bytes();
total_size += boundary_bytes.len();
readers.push(ReaderType::Bytes(boundary_bytes));
let file_path = get_str(p, "file");
let value = get_str(p, "value");
let content_type = get_str(p, "contentType");
if file_path.is_empty() {
// Text field
let header = if !content_type.is_empty() {
format!(
"Content-Disposition: form-data; name=\"{}\"\r\nContent-Type: {}\r\n\r\n{}",
name, content_type, value
)
} else {
format!("Content-Disposition: form-data; name=\"{}\"\r\n\r\n{}", name, value)
};
let header_bytes = header.into_bytes();
total_size += header_bytes.len();
readers.push(ReaderType::Bytes(header_bytes));
} else {
// File field - validate that file exists first
if !tokio::fs::try_exists(file_path).await.unwrap_or(false) {
return Err(RequestError(format!("File not found: {}", file_path)));
}
// Get file size for content length calculation
let file_metadata = tokio::fs::metadata(file_path)
.await
.map_err(|e| RequestError(format!("Failed to get file metadata: {}", e)))?;
let file_size = file_metadata.len() as usize;
let filename = get_str(p, "filename");
let filename = if filename.is_empty() {
std::path::Path::new(file_path)
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("file")
} else {
filename
};
// Add content type
let mime_type = if !content_type.is_empty() {
content_type.to_string()
} else {
// Guess mime type from file extension
mime_guess::from_path(file_path).first_or_octet_stream().to_string()
};
let header = format!(
"Content-Disposition: form-data; name=\"{}\"; filename=\"{}\"\r\nContent-Type: {}\r\n\r\n",
name, filename, mime_type
);
let header_bytes = header.into_bytes();
total_size += header_bytes.len();
total_size += file_size;
readers.push(ReaderType::Bytes(header_bytes));
// Add a file path for streaming
readers.push(ReaderType::FilePath(file_path.to_string()));
}
let line_ending = b"\r\n".to_vec();
total_size += line_ending.len();
readers.push(ReaderType::Bytes(line_ending));
}
if has_content {
// Add the final boundary
let final_boundary = format!("--{}--\r\n", boundary).into_bytes();
total_size += final_boundary.len();
readers.push(ReaderType::Bytes(final_boundary));
let content_type = format!("multipart/form-data; boundary={}", boundary);
let stream = ChainedReader::new(readers);
Ok((
Some(SendableBodyWithMeta::Stream {
data: Box::pin(stream),
content_length: Some(total_size),
}),
Some(content_type),
))
} else {
Ok((None, None))
}
}
fn extract_boundary_from_headers(headers: &Vec<(String, String)>) -> String {
headers
.iter()
.find(|h| h.0.to_lowercase() == "content-type")
.and_then(|h| {
// Extract boundary from the Content-Type header (e.g., "multipart/form-data; boundary=xyz")
h.1.split(';')
.find(|part| part.trim().starts_with("boundary="))
.and_then(|boundary_part| boundary_part.split('=').nth(1))
.map(|b| b.trim().to_string())
})
.unwrap_or_else(|| MULTIPART_BOUNDARY.to_string())
}
#[cfg(test)]
mod tests {
use super::*;
use bytes::Bytes;
use serde_json::json;
use std::collections::BTreeMap;
use yaak_models::models::{HttpRequest, HttpUrlParameter};
#[test]
fn test_build_url_no_params() {
let r = HttpRequest {
url: "https://example.com/api".to_string(),
url_parameters: vec![],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com/api");
}
#[test]
fn test_build_url_with_params() {
let r = HttpRequest {
url: "https://example.com/api".to_string(),
url_parameters: vec![
HttpUrlParameter {
enabled: true,
name: "foo".to_string(),
value: "bar".to_string(),
id: None,
},
HttpUrlParameter {
enabled: true,
name: "baz".to_string(),
value: "qux".to_string(),
id: None,
},
],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com/api?foo=bar&baz=qux");
}
#[test]
fn test_build_url_with_disabled_params() {
let r = HttpRequest {
url: "https://example.com/api".to_string(),
url_parameters: vec![
HttpUrlParameter {
enabled: false,
name: "disabled".to_string(),
value: "value".to_string(),
id: None,
},
HttpUrlParameter {
enabled: true,
name: "enabled".to_string(),
value: "value".to_string(),
id: None,
},
],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com/api?enabled=value");
}
#[test]
fn test_build_url_with_existing_query() {
let r = HttpRequest {
url: "https://example.com/api?existing=param".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "new".to_string(),
value: "value".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com/api?existing=param&new=value");
}
#[test]
fn test_build_url_with_empty_existing_query() {
let r = HttpRequest {
url: "https://example.com/api?".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "new".to_string(),
value: "value".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com/api?new=value");
}
#[test]
fn test_build_url_with_special_chars() {
let r = HttpRequest {
url: "https://example.com/api".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "special chars!@#".to_string(),
value: "value with spaces & symbols".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
assert_eq!(
result,
"https://example.com/api?special%20chars%21%40%23=value%20with%20spaces%20%26%20symbols"
);
}
#[test]
fn test_build_url_adds_protocol() {
let r = HttpRequest {
url: "example.com/api".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "foo".to_string(),
value: "bar".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
// ensure_proto defaults to http:// for regular domains
assert_eq!(result, "http://example.com/api?foo=bar");
}
#[test]
fn test_build_url_adds_https_for_dev_domain() {
let r = HttpRequest {
url: "example.dev/api".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "foo".to_string(),
value: "bar".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
// .dev domains force https
assert_eq!(result, "https://example.dev/api?foo=bar");
}
#[test]
fn test_build_url_with_fragment() {
let r = HttpRequest {
url: "https://example.com/api#section".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "foo".to_string(),
value: "bar".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com/api?foo=bar#section");
}
#[test]
fn test_build_url_with_existing_query_and_fragment() {
let r = HttpRequest {
url: "https://yaak.app?foo=bar#some-hash".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "baz".to_string(),
value: "qux".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://yaak.app?foo=bar&baz=qux#some-hash");
}
#[test]
fn test_build_url_with_empty_query_and_fragment() {
let r = HttpRequest {
url: "https://example.com/api?#section".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "foo".to_string(),
value: "bar".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com/api?foo=bar#section");
}
#[test]
fn test_build_url_with_fragment_containing_special_chars() {
let r = HttpRequest {
url: "https://example.com#section/with/slashes?and=fake&query".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "real".to_string(),
value: "param".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com?real=param#section/with/slashes?and=fake&query");
}
#[test]
fn test_build_url_preserves_empty_fragment() {
let r = HttpRequest {
url: "https://example.com/api#".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "foo".to_string(),
value: "bar".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
assert_eq!(result, "https://example.com/api?foo=bar#");
}
#[test]
fn test_build_url_with_multiple_fragments() {
// Testing edge case where the URL has multiple # characters (though technically invalid)
let r = HttpRequest {
url: "https://example.com#section#subsection".to_string(),
url_parameters: vec![HttpUrlParameter {
enabled: true,
name: "foo".to_string(),
value: "bar".to_string(),
id: None,
}],
..Default::default()
};
let result = build_url(&r);
// Should treat everything after first # as fragment
assert_eq!(result, "https://example.com?foo=bar#section#subsection");
}
#[tokio::test]
async fn test_text_body() {
let mut body = BTreeMap::new();
body.insert("text".to_string(), json!("Hello, World!"));
let result = build_text_body(&body);
match result {
Some(SendableBodyWithMeta::Bytes(bytes)) => {
assert_eq!(bytes, Bytes::from("Hello, World!"))
}
_ => panic!("Expected Some(SendableBody::Bytes)"),
}
}
#[tokio::test]
async fn test_text_body_empty() {
let mut body = BTreeMap::new();
body.insert("text".to_string(), json!(""));
let result = build_text_body(&body);
assert!(result.is_none());
}
#[tokio::test]
async fn test_text_body_missing() {
let body = BTreeMap::new();
let result = build_text_body(&body);
assert!(result.is_none());
}
#[tokio::test]
async fn test_form_urlencoded_body() -> Result<()> {
let mut body = BTreeMap::new();
body.insert(
"form".to_string(),
json!([
{ "enabled": true, "name": "basic", "value": "aaa"},
{ "enabled": true, "name": "fUnkey Stuff!$*#(", "value": "*)%&#$)@ *$#)@&"},
{ "enabled": false, "name": "disabled", "value": "won't show"},
]),
);
let result = build_form_body(&body);
match result {
Some(SendableBodyWithMeta::Bytes(bytes)) => {
let expected = "basic=aaa&fUnkey%20Stuff%21%24%2A%23%28=%2A%29%25%26%23%24%29%40%20%2A%24%23%29%40%26";
assert_eq!(bytes, Bytes::from(expected));
}
_ => panic!("Expected Some(SendableBody::Bytes)"),
}
Ok(())
}
#[tokio::test]
async fn test_form_urlencoded_body_missing_form() {
let body = BTreeMap::new();
let result = build_form_body(&body);
assert!(result.is_none());
}
#[tokio::test]
async fn test_binary_body() -> Result<()> {
let mut body = BTreeMap::new();
body.insert("filePath".to_string(), json!("./tests/test.txt"));
let result = build_binary_body(&body).await?;
assert!(matches!(result, Some(SendableBodyWithMeta::Stream { .. })));
Ok(())
}
#[tokio::test]
async fn test_binary_body_file_not_found() {
let mut body = BTreeMap::new();
body.insert("filePath".to_string(), json!("./nonexistent/file.txt"));
let result = build_binary_body(&body).await;
assert!(result.is_err());
if let Err(e) = result {
assert!(matches!(e, RequestError(_)));
}
}
#[tokio::test]
async fn test_graphql_body_with_variables() {
let mut body = BTreeMap::new();
body.insert("query".to_string(), json!("{ user(id: $id) { name } }"));
body.insert("variables".to_string(), json!(r#"{"id": "123"}"#));
let result = build_graphql_body("POST", &body);
match result {
Some(SendableBodyWithMeta::Bytes(bytes)) => {
let expected =
r#"{"query":"{ user(id: $id) { name } }","variables":{"id": "123"}}"#;
assert_eq!(bytes, Bytes::from(expected));
}
_ => panic!("Expected Some(SendableBody::Bytes)"),
}
}
#[tokio::test]
async fn test_graphql_body_without_variables() {
let mut body = BTreeMap::new();
body.insert("query".to_string(), json!("{ users { name } }"));
body.insert("variables".to_string(), json!(""));
let result = build_graphql_body("POST", &body);
match result {
Some(SendableBodyWithMeta::Bytes(bytes)) => {
let expected = r#"{"query":"{ users { name } }"}"#;
assert_eq!(bytes, Bytes::from(expected));
}
_ => panic!("Expected Some(SendableBody::Bytes)"),
}
}
#[tokio::test]
async fn test_graphql_body_get_method() {
let mut body = BTreeMap::new();
body.insert("query".to_string(), json!("{ users { name } }"));
let result = build_graphql_body("GET", &body);
assert!(result.is_none());
}
#[tokio::test]
async fn test_multipart_body_text_fields() -> Result<()> {
let mut body = BTreeMap::new();
body.insert(
"form".to_string(),
json!([
{ "enabled": true, "name": "field1", "value": "value1", "file": "" },
{ "enabled": true, "name": "field2", "value": "value2", "file": "" },
{ "enabled": false, "name": "disabled", "value": "won't show", "file": "" },
]),
);
let (result, content_type) = build_multipart_body(&body, &vec![]).await?;
assert!(content_type.is_some());
match result {
Some(SendableBodyWithMeta::Stream { data: mut stream, content_length }) => {
// Read the entire stream to verify content
let mut buf = Vec::new();
use tokio::io::AsyncReadExt;
stream.read_to_end(&mut buf).await.expect("Failed to read stream");
let body_str = String::from_utf8_lossy(&buf);
assert_eq!(
body_str,
"--------YaakFormBoundary\r\nContent-Disposition: form-data; name=\"field1\"\r\n\r\nvalue1\r\n--------YaakFormBoundary\r\nContent-Disposition: form-data; name=\"field2\"\r\n\r\nvalue2\r\n--------YaakFormBoundary--\r\n",
);
assert_eq!(content_length, Some(body_str.len()));
}
_ => panic!("Expected Some(SendableBody::Stream)"),
}
assert_eq!(
content_type.unwrap(),
format!("multipart/form-data; boundary={}", MULTIPART_BOUNDARY)
);
Ok(())
}
#[tokio::test]
async fn test_multipart_body_with_file() -> Result<()> {
let mut body = BTreeMap::new();
body.insert(
"form".to_string(),
json!([
{ "enabled": true, "name": "file_field", "file": "./tests/test.txt", "filename": "custom.txt", "contentType": "text/plain" },
]),
);
let (result, content_type) = build_multipart_body(&body, &vec![]).await?;
assert!(content_type.is_some());
match result {
Some(SendableBodyWithMeta::Stream { data: mut stream, content_length }) => {
// Read the entire stream to verify content
let mut buf = Vec::new();
use tokio::io::AsyncReadExt;
stream.read_to_end(&mut buf).await.expect("Failed to read stream");
let body_str = String::from_utf8_lossy(&buf);
assert_eq!(
body_str,
"--------YaakFormBoundary\r\nContent-Disposition: form-data; name=\"file_field\"; filename=\"custom.txt\"\r\nContent-Type: text/plain\r\n\r\nThis is a test file!\n\r\n--------YaakFormBoundary--\r\n"
);
assert_eq!(content_length, Some(body_str.len()));
}
_ => panic!("Expected Some(SendableBody::Stream)"),
}
assert_eq!(
content_type.unwrap(),
format!("multipart/form-data; boundary={}", MULTIPART_BOUNDARY)
);
Ok(())
}
#[tokio::test]
async fn test_multipart_body_empty() -> Result<()> {
let body = BTreeMap::new();
let (result, content_type) = build_multipart_body(&body, &vec![]).await?;
assert!(result.is_none());
assert_eq!(content_type, None);
Ok(())
}
#[test]
fn test_extract_boundary_from_headers_with_custom_boundary() {
let headers = vec![(
"Content-Type".to_string(),
"multipart/form-data; boundary=customBoundary123".to_string(),
)];
let boundary = extract_boundary_from_headers(&headers);
assert_eq!(boundary, "customBoundary123");
}
#[test]
fn test_extract_boundary_from_headers_default() {
let headers = vec![("Accept".to_string(), "*/*".to_string())];
let boundary = extract_boundary_from_headers(&headers);
assert_eq!(boundary, MULTIPART_BOUNDARY);
}
#[test]
fn test_extract_boundary_from_headers_no_boundary_in_content_type() {
let headers = vec![("Content-Type".to_string(), "multipart/form-data".to_string())];
let boundary = extract_boundary_from_headers(&headers);
assert_eq!(boundary, MULTIPART_BOUNDARY);
}
#[test]
fn test_extract_boundary_case_insensitive() {
let headers = vec![(
"Content-Type".to_string(),
"multipart/form-data; boundary=myBoundary".to_string(),
)];
let boundary = extract_boundary_from_headers(&headers);
assert_eq!(boundary, "myBoundary");
}
#[tokio::test]
async fn test_no_content_length_with_chunked_encoding() -> Result<()> {
let mut body = BTreeMap::new();
body.insert("text".to_string(), json!("Hello, World!"));
// Headers with Transfer-Encoding: chunked
let headers = vec![("Transfer-Encoding".to_string(), "chunked".to_string())];
let (_, result_headers) =
build_body("POST", &Some("text/plain".to_string()), &body, headers).await?;
// Verify that Content-Length is NOT present when Transfer-Encoding: chunked is set
let has_content_length =
result_headers.iter().any(|h| h.0.to_lowercase() == "content-length");
assert!(!has_content_length, "Content-Length should not be present with chunked encoding");
// Verify that the Transfer-Encoding header is still present
let has_chunked = result_headers.iter().any(|h| {
h.0.to_lowercase() == "transfer-encoding" && h.1.to_lowercase().contains("chunked")
});
assert!(has_chunked, "Transfer-Encoding: chunked should be preserved");
Ok(())
}
#[tokio::test]
async fn test_content_length_without_chunked_encoding() -> Result<()> {
let mut body = BTreeMap::new();
body.insert("text".to_string(), json!("Hello, World!"));
// Headers without Transfer-Encoding: chunked
let headers = vec![];
let (_, result_headers) =
build_body("POST", &Some("text/plain".to_string()), &body, headers).await?;
// Verify that Content-Length IS present when Transfer-Encoding: chunked is NOT set
let content_length_header =
result_headers.iter().find(|h| h.0.to_lowercase() == "content-length");
assert!(
content_length_header.is_some(),
"Content-Length should be present without chunked encoding"
);
assert_eq!(
content_length_header.unwrap().1,
"13",
"Content-Length should match the body size"
);
Ok(())
}
}

View File

@@ -0,0 +1 @@
This is a test file!

View File

@@ -6,8 +6,6 @@ export type ActivateLicenseRequestPayload = { licenseKey: string, appVersion: st
export type ActivateLicenseResponsePayload = { activationId: string, };
export type CheckActivationResponsePayload = { active: boolean, };
export type DeactivateLicenseRequestPayload = { appVersion: string, appPlatform: string, };
export type LicenseCheckStatus = { "type": "personal_use", trial_ended: string, } | { "type": "commercial_use" } | { "type": "invalid_license" } | { "type": "trialing", end: string, };
export type LicenseCheckStatus = { "status": "personal_use", "data": { trial_ended: string, } } | { "status": "trialing", "data": { end: string, } } | { "status": "error", "data": { message: string, code: string, } } | { "status": "active", "data": { periodEnd: string, cancelAt: string | null, } } | { "status": "inactive", "data": { status: string, } } | { "status": "expired", "data": { changes: number, changesUrl: string | null, billingUrl: string, periodEnd: string, } } | { "status": "past_due", "data": { billingUrl: string, periodEnd: string, } };

View File

@@ -1,7 +1,6 @@
use tauri::{
generate_handler,
Runtime, generate_handler,
plugin::{Builder, TauriPlugin},
Runtime,
};
mod commands;

View File

@@ -1,6 +1,6 @@
use crate::error::Error::{ClientError, ServerError};
use crate::error::Error::{ClientError, JsonError, ServerError};
use crate::error::Result;
use chrono::{NaiveDateTime, Utc};
use chrono::{DateTime, Utc};
use log::{info, warn};
use serde::{Deserialize, Serialize};
use std::ops::Add;
@@ -24,13 +24,6 @@ pub struct CheckActivationRequestPayload {
pub app_platform: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export, export_to = "license.ts")]
pub struct CheckActivationResponsePayload {
pub active: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export, export_to = "license.ts")]
@@ -63,6 +56,49 @@ pub struct APIErrorResponsePayload {
pub message: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[serde(rename_all = "snake_case", tag = "status", content = "data")]
#[ts(export, export_to = "license.ts")]
pub enum LicenseCheckStatus {
// Local Types
PersonalUse {
trial_ended: DateTime<Utc>,
},
Trialing {
end: DateTime<Utc>,
},
Error {
message: String,
code: String,
},
// Server Types
Active {
#[serde(rename = "periodEnd")]
period_end: DateTime<Utc>,
#[serde(rename = "cancelAt")]
cancel_at: Option<DateTime<Utc>>,
},
Inactive {
status: String,
},
Expired {
changes: i32,
#[serde(rename = "changesUrl")]
changes_url: Option<String>,
#[serde(rename = "billingUrl")]
billing_url: String,
#[serde(rename = "periodEnd")]
period_end: DateTime<Utc>,
},
PastDue {
#[serde(rename = "billingUrl")]
billing_url: String,
#[serde(rename = "periodEnd")]
period_end: DateTime<Utc>,
},
}
pub async fn activate_license<R: Runtime>(
window: &WebviewWindow<R>,
license_key: &str,
@@ -78,10 +114,7 @@ pub async fn activate_license<R: Runtime>(
if response.status().is_client_error() {
let body: APIErrorResponsePayload = response.json().await?;
return Err(ClientError {
message: body.message,
error: body.error,
});
return Err(ClientError { message: body.message, error: body.error });
}
if response.status().is_server_error() {
@@ -118,10 +151,7 @@ pub async fn deactivate_license<R: Runtime>(window: &WebviewWindow<R>) -> Result
if response.status().is_client_error() {
let body: APIErrorResponsePayload = response.json().await?;
return Err(ClientError {
message: body.message,
error: body.error,
});
return Err(ClientError { message: body.message, error: body.error });
}
if response.status().is_server_error() {
@@ -141,16 +171,6 @@ pub async fn deactivate_license<R: Runtime>(window: &WebviewWindow<R>) -> Result
Ok(())
}
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[serde(rename_all = "snake_case", tag = "type")]
#[ts(export, export_to = "license.ts")]
pub enum LicenseCheckStatus {
PersonalUse { trial_ended: NaiveDateTime },
CommercialUse,
InvalidLicense,
Trialing { end: NaiveDateTime },
}
pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<LicenseCheckStatus> {
let payload = CheckActivationRequestPayload {
app_platform: get_os_str().to_string(),
@@ -159,29 +179,24 @@ pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<Lice
let activation_id = get_activation_id(window.app_handle()).await;
let settings = window.db().get_settings();
let trial_end = settings.created_at.add(Duration::from_secs(TRIAL_SECONDS));
let trial_end = settings.created_at.add(Duration::from_secs(TRIAL_SECONDS)).and_utc();
let has_activation_id = !activation_id.is_empty();
let trial_period_active = Utc::now().naive_utc() < trial_end;
let trial_period_active = Utc::now() < trial_end;
match (has_activation_id, trial_period_active) {
(false, true) => Ok(LicenseCheckStatus::Trialing { end: trial_end }),
(false, false) => Ok(LicenseCheckStatus::PersonalUse {
trial_ended: trial_end,
}),
(false, false) => Ok(LicenseCheckStatus::PersonalUse { trial_ended: trial_end }),
(true, _) => {
info!("Checking license activation");
// A license has been activated, so let's check the license server
let client = yaak_api_client(window.app_handle())?;
let path = format!("/licenses/activations/{activation_id}/check");
let path = format!("/licenses/activations/{activation_id}/check-v2");
let response = client.post(build_url(&path)).json(&payload).send().await?;
if response.status().is_client_error() {
let body: APIErrorResponsePayload = response.json().await?;
return Err(ClientError {
message: body.message,
error: body.error,
});
return Err(ClientError { message: body.message, error: body.error });
}
if response.status().is_server_error() {
@@ -189,13 +204,14 @@ pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<Lice
return Err(ServerError);
}
let body: CheckActivationResponsePayload = response.json().await?;
if !body.active {
info!("Inactive License {:?}", body);
return Ok(LicenseCheckStatus::InvalidLicense);
let body_text = response.text().await?;
match serde_json::from_str::<LicenseCheckStatus>(&body_text) {
Ok(b) => Ok(b),
Err(e) => {
warn!("Failed to decode server response: {} {:?}", body_text, e);
Err(JsonError(e))
}
}
Ok(LicenseCheckStatus::CommercialUse)
}
}
}

View File

@@ -1,4 +1,4 @@
use tauri::{command, Runtime, Window};
use tauri::{Runtime, Window, command};
#[command]
pub(crate) fn set_title<R: Runtime>(window: Window<R>, title: &str) {

View File

@@ -5,7 +5,7 @@ mod mac;
use crate::commands::{set_theme, set_title};
use std::sync::atomic::AtomicBool;
use tauri::{generate_handler, plugin, plugin::TauriPlugin, Manager, Runtime};
use tauri::{Manager, Runtime, generate_handler, plugin, plugin::TauriPlugin};
pub trait AppHandleMacWindowExt {
/// Sets whether to use the native titlebar
@@ -14,7 +14,9 @@ pub trait AppHandleMacWindowExt {
impl<R: Runtime> AppHandleMacWindowExt for tauri::AppHandle<R> {
fn set_native_titlebar(&self, enable: bool) {
self.state::<PluginState>().native_titlebar.store(enable, std::sync::atomic::Ordering::Relaxed);
self.state::<PluginState>()
.native_titlebar
.store(enable, std::sync::atomic::Ordering::Relaxed);
}
}
@@ -23,17 +25,19 @@ pub(crate) struct PluginState {
}
pub fn init<R: Runtime>() -> TauriPlugin<R> {
plugin::Builder::new("yaak-mac-window")
let mut builder = plugin::Builder::new("yaak-mac-window")
.setup(move |app, _| {
app.manage(PluginState { native_titlebar: AtomicBool::new(false) });
Ok(())
})
.invoke_handler(generate_handler![set_title, set_theme])
.on_window_ready(move |window| {
#[cfg(target_os = "macos")]
{
mac::setup_traffic_light_positioner(&window);
}
})
.build()
.invoke_handler(generate_handler![set_title, set_theme]);
#[cfg(target_os = "macos")]
{
builder = builder.on_window_ready(move |window| {
mac::setup_traffic_light_positioner(&window);
});
}
builder.build()
}

View File

@@ -371,9 +371,7 @@ pub fn setup_traffic_light_positioner<R: Runtime>(window: &Window<R>) {
// Are we de-allocing this properly? (I miss safe Rust :( )
let window_label = window.label().to_string();
let app_state = WindowState {
window: window.clone(),
};
let app_state = WindowState { window: window.clone() };
let app_box = Box::into_raw(Box::new(app_state)) as *mut c_void;
let random_str: String =
rand::rng().sample_iter(&Alphanumeric).take(20).map(char::from).collect();

View File

@@ -1,6 +1,8 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type AnyModel = CookieJar | Environment | Folder | GraphQlIntrospection | GrpcConnection | GrpcEvent | GrpcRequest | HttpRequest | HttpResponse | KeyValue | Plugin | Settings | SyncState | WebsocketConnection | WebsocketEvent | WebsocketRequest | Workspace | WorkspaceMeta;
export type AnyModel = CookieJar | Environment | Folder | GraphQlIntrospection | GrpcConnection | GrpcEvent | GrpcRequest | HttpRequest | HttpResponse | HttpResponseEvent | KeyValue | Plugin | Settings | SyncState | WebsocketConnection | WebsocketEvent | WebsocketRequest | Workspace | WorkspaceMeta;
export type ClientCertificate = { host: string, port: number | null, crtFile: string | null, keyFile: string | null, pfxFile: string | null, passphrase: string | null, enabled?: boolean, };
export type Cookie = { raw_cookie: string, domain: CookieDomain, expires: CookieExpires, path: [string, boolean], };
@@ -36,7 +38,16 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponseEvent = { model: "http_response_event", id: string, createdAt: string, updatedAt: string, workspaceId: string, responseId: string, event: HttpResponseEventData, };
/**
* Serializable representation of HTTP response events for DB storage.
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
* The `From` impl is in yaak-http to avoid circular dependencies.
*/
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, };
export type HttpResponseHeader = { name: string, value: string, };
@@ -62,7 +73,7 @@ export type ProxySetting = { "type": "enabled", http: string, https: string, aut
export type ProxySettingAuth = { user: string, password: string, };
export type Settings = { model: "settings", id: string, createdAt: string, updatedAt: string, appearance: string, coloredMethods: boolean, editorFont: string | null, editorFontSize: number, editorKeymap: EditorKeymap, editorSoftWrap: boolean, hideWindowControls: boolean, useNativeTitlebar: boolean, interfaceFont: string | null, interfaceFontSize: number, interfaceScale: number, openWorkspaceNewWindow: boolean | null, proxy: ProxySetting | null, themeDark: string, themeLight: string, updateChannel: string, hideLicenseBadge: boolean, autoupdate: boolean, autoDownloadUpdates: boolean, checkNotifications: boolean, };
export type Settings = { model: "settings", id: string, createdAt: string, updatedAt: string, appearance: string, clientCertificates: Array<ClientCertificate>, coloredMethods: boolean, editorFont: string | null, editorFontSize: number, editorKeymap: EditorKeymap, editorSoftWrap: boolean, hideWindowControls: boolean, useNativeTitlebar: boolean, interfaceFont: string | null, interfaceFontSize: number, interfaceScale: number, openWorkspaceNewWindow: boolean | null, proxy: ProxySetting | null, themeDark: string, themeLight: string, updateChannel: string, hideLicenseBadge: boolean, autoupdate: boolean, autoDownloadUpdates: boolean, checkNotifications: boolean, };
export type SyncState = { model: "sync_state", id: string, workspaceId: string, createdAt: string, updatedAt: string, flushedAt: string, modelId: string, checksum: string, relPath: string, syncDir: string, };

View File

@@ -0,0 +1,12 @@
CREATE TABLE body_chunks
(
id TEXT PRIMARY KEY,
body_id TEXT NOT NULL,
chunk_index INTEGER NOT NULL,
data BLOB NOT NULL,
created_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL,
UNIQUE (body_id, chunk_index)
);
CREATE INDEX idx_body_chunks_body_id ON body_chunks (body_id, chunk_index);

View File

@@ -15,6 +15,7 @@ export const grpcEventsAtom = createOrderedModelAtom('grpc_event', 'createdAt',
export const grpcRequestsAtom = createModelAtom('grpc_request');
export const httpRequestsAtom = createModelAtom('http_request');
export const httpResponsesAtom = createOrderedModelAtom('http_response', 'createdAt', 'desc');
export const httpResponseEventsAtom = createOrderedModelAtom('http_response_event', 'createdAt', 'asc');
export const keyValuesAtom = createModelAtom('key_value');
export const pluginsAtom = createModelAtom('plugin');
export const settingsAtom = createSingularModelAtom('settings');

View File

@@ -11,6 +11,7 @@ export function newStoreData(): ModelStoreData {
grpc_request: {},
http_request: {},
http_response: {},
http_response_event: {},
key_value: {},
plugin: {},
settings: {},

View File

@@ -0,0 +1 @@
ALTER TABLE settings ADD COLUMN client_certificates TEXT DEFAULT '[]' NOT NULL;

View File

@@ -0,0 +1,15 @@
-- Add default User-Agent header to workspaces that don't already have one (case-insensitive check)
UPDATE workspaces
SET headers = json_insert(headers, '$[#]', json('{"enabled":true,"name":"User-Agent","value":"yaak"}'))
WHERE NOT EXISTS (
SELECT 1 FROM json_each(workspaces.headers)
WHERE LOWER(json_extract(value, '$.name')) = 'user-agent'
);
-- Add default Accept header to workspaces that don't already have one (case-insensitive check)
UPDATE workspaces
SET headers = json_insert(headers, '$[#]', json('{"enabled":true,"name":"Accept","value":"*/*"}'))
WHERE NOT EXISTS (
SELECT 1 FROM json_each(workspaces.headers)
WHERE LOWER(json_extract(value, '$.name')) = 'accept'
);

View File

@@ -0,0 +1,3 @@
-- Add request_headers and content_length_compressed columns to http_responses table
ALTER TABLE http_responses ADD COLUMN request_headers TEXT NOT NULL DEFAULT '[]';
ALTER TABLE http_responses ADD COLUMN content_length_compressed INTEGER;

View File

@@ -0,0 +1,15 @@
CREATE TABLE http_response_events
(
id TEXT NOT NULL
PRIMARY KEY,
model TEXT DEFAULT 'http_response_event' NOT NULL,
workspace_id TEXT NOT NULL
REFERENCES workspaces
ON DELETE CASCADE,
response_id TEXT NOT NULL
REFERENCES http_responses
ON DELETE CASCADE,
created_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL,
updated_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL,
event TEXT NOT NULL
);

View File

@@ -0,0 +1,2 @@
ALTER TABLE http_responses
ADD COLUMN request_content_length INTEGER;

View File

@@ -0,0 +1,372 @@
use crate::error::Result;
use crate::util::generate_prefixed_id;
use include_dir::{Dir, include_dir};
use log::{debug, info};
use r2d2::Pool;
use r2d2_sqlite::SqliteConnectionManager;
use rusqlite::{OptionalExtension, params};
use std::sync::{Arc, Mutex};
use tauri::{Manager, Runtime, State};
static BLOB_MIGRATIONS_DIR: Dir = include_dir!("$CARGO_MANIFEST_DIR/blob_migrations");
/// A chunk of body data stored in the blob database.
#[derive(Debug, Clone)]
pub struct BodyChunk {
pub id: String,
pub body_id: String,
pub chunk_index: i32,
pub data: Vec<u8>,
}
impl BodyChunk {
pub fn new(body_id: impl Into<String>, chunk_index: i32, data: Vec<u8>) -> Self {
Self { id: generate_prefixed_id("bc"), body_id: body_id.into(), chunk_index, data }
}
}
/// Extension trait for accessing the blob manager from app handle.
pub trait BlobManagerExt<'a, R> {
fn blob_manager(&'a self) -> State<'a, BlobManager>;
fn blobs(&'a self) -> BlobContext;
}
impl<'a, R: Runtime, M: Manager<R>> BlobManagerExt<'a, R> for M {
fn blob_manager(&'a self) -> State<'a, BlobManager> {
self.state::<BlobManager>()
}
fn blobs(&'a self) -> BlobContext {
let manager = self.state::<BlobManager>();
manager.inner().connect()
}
}
/// Manages the blob database connection pool.
#[derive(Debug, Clone)]
pub struct BlobManager {
pool: Arc<Mutex<Pool<SqliteConnectionManager>>>,
}
impl BlobManager {
pub fn new(pool: Pool<SqliteConnectionManager>) -> Self {
Self { pool: Arc::new(Mutex::new(pool)) }
}
pub fn connect(&self) -> BlobContext {
let conn = self
.pool
.lock()
.expect("Failed to gain lock on blob DB")
.get()
.expect("Failed to get blob DB connection from pool");
BlobContext { conn }
}
}
/// Context for blob database operations.
pub struct BlobContext {
conn: r2d2::PooledConnection<SqliteConnectionManager>,
}
impl BlobContext {
/// Insert a single chunk.
pub fn insert_chunk(&self, chunk: &BodyChunk) -> Result<()> {
self.conn.execute(
"INSERT INTO body_chunks (id, body_id, chunk_index, data) VALUES (?1, ?2, ?3, ?4)",
params![chunk.id, chunk.body_id, chunk.chunk_index, chunk.data],
)?;
Ok(())
}
/// Get all chunks for a body, ordered by chunk_index.
pub fn get_chunks(&self, body_id: &str) -> Result<Vec<BodyChunk>> {
let mut stmt = self.conn.prepare(
"SELECT id, body_id, chunk_index, data FROM body_chunks
WHERE body_id = ?1 ORDER BY chunk_index ASC",
)?;
let chunks = stmt
.query_map(params![body_id], |row| {
Ok(BodyChunk {
id: row.get(0)?,
body_id: row.get(1)?,
chunk_index: row.get(2)?,
data: row.get(3)?,
})
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(chunks)
}
/// Delete all chunks for a body.
pub fn delete_chunks(&self, body_id: &str) -> Result<()> {
self.conn.execute("DELETE FROM body_chunks WHERE body_id = ?1", params![body_id])?;
Ok(())
}
/// Delete all chunks matching a body_id prefix (e.g., "rs_abc123.%" to delete all bodies for a response).
pub fn delete_chunks_like(&self, body_id_prefix: &str) -> Result<()> {
self.conn
.execute("DELETE FROM body_chunks WHERE body_id LIKE ?1", params![body_id_prefix])?;
Ok(())
}
}
/// Get total size of a body without loading data.
impl BlobContext {
pub fn get_body_size(&self, body_id: &str) -> Result<usize> {
let size: i64 = self
.conn
.query_row(
"SELECT COALESCE(SUM(LENGTH(data)), 0) FROM body_chunks WHERE body_id = ?1",
params![body_id],
|row| row.get(0),
)
.unwrap_or(0);
Ok(size as usize)
}
/// Check if a body exists.
pub fn body_exists(&self, body_id: &str) -> Result<bool> {
let count: i64 = self
.conn
.query_row(
"SELECT COUNT(*) FROM body_chunks WHERE body_id = ?1",
params![body_id],
|row| row.get(0),
)
.unwrap_or(0);
Ok(count > 0)
}
}
/// Run migrations for the blob database.
pub fn migrate_blob_db(pool: &Pool<SqliteConnectionManager>) -> Result<()> {
info!("Running blob database migrations");
// Create migrations tracking table
pool.get()?.execute(
"CREATE TABLE IF NOT EXISTS _blob_migrations (
version TEXT PRIMARY KEY,
description TEXT NOT NULL,
applied_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL
)",
[],
)?;
// Read and sort all .sql files
let mut entries: Vec<_> = BLOB_MIGRATIONS_DIR
.entries()
.iter()
.filter(|e| e.path().extension().map(|ext| ext == "sql").unwrap_or(false))
.collect();
entries.sort_by_key(|e| e.path());
let mut ran_migrations = 0;
for entry in &entries {
let filename = entry.path().file_name().unwrap().to_str().unwrap();
let version = filename.split('_').next().unwrap();
// Check if already applied
let already_applied: Option<i64> = pool
.get()?
.query_row("SELECT 1 FROM _blob_migrations WHERE version = ?", [version], |r| r.get(0))
.optional()?;
if already_applied.is_some() {
debug!("Skipping already applied blob migration: {}", filename);
continue;
}
let sql =
entry.as_file().unwrap().contents_utf8().expect("Failed to read blob migration file");
info!("Applying blob migration: {}", filename);
let conn = pool.get()?;
conn.execute_batch(sql)?;
// Record migration
conn.execute(
"INSERT INTO _blob_migrations (version, description) VALUES (?, ?)",
params![version, filename],
)?;
ran_migrations += 1;
}
if ran_migrations == 0 {
info!("No blob migrations to run");
} else {
info!("Ran {} blob migration(s)", ran_migrations);
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
fn create_test_pool() -> Pool<SqliteConnectionManager> {
let manager = SqliteConnectionManager::memory();
let pool = Pool::builder().max_size(1).build(manager).unwrap();
migrate_blob_db(&pool).unwrap();
pool
}
#[test]
fn test_insert_and_get_chunks() {
let pool = create_test_pool();
let manager = BlobManager::new(pool);
let ctx = manager.connect();
let body_id = "rs_test123.request";
let chunk1 = BodyChunk::new(body_id, 0, b"Hello, ".to_vec());
let chunk2 = BodyChunk::new(body_id, 1, b"World!".to_vec());
ctx.insert_chunk(&chunk1).unwrap();
ctx.insert_chunk(&chunk2).unwrap();
let chunks = ctx.get_chunks(body_id).unwrap();
assert_eq!(chunks.len(), 2);
assert_eq!(chunks[0].chunk_index, 0);
assert_eq!(chunks[0].data, b"Hello, ");
assert_eq!(chunks[1].chunk_index, 1);
assert_eq!(chunks[1].data, b"World!");
}
#[test]
fn test_get_chunks_ordered_by_index() {
let pool = create_test_pool();
let manager = BlobManager::new(pool);
let ctx = manager.connect();
let body_id = "rs_test123.request";
// Insert out of order
ctx.insert_chunk(&BodyChunk::new(body_id, 2, b"C".to_vec())).unwrap();
ctx.insert_chunk(&BodyChunk::new(body_id, 0, b"A".to_vec())).unwrap();
ctx.insert_chunk(&BodyChunk::new(body_id, 1, b"B".to_vec())).unwrap();
let chunks = ctx.get_chunks(body_id).unwrap();
assert_eq!(chunks.len(), 3);
assert_eq!(chunks[0].data, b"A");
assert_eq!(chunks[1].data, b"B");
assert_eq!(chunks[2].data, b"C");
}
#[test]
fn test_delete_chunks() {
let pool = create_test_pool();
let manager = BlobManager::new(pool);
let ctx = manager.connect();
let body_id = "rs_test123.request";
ctx.insert_chunk(&BodyChunk::new(body_id, 0, b"data".to_vec())).unwrap();
assert!(ctx.body_exists(body_id).unwrap());
ctx.delete_chunks(body_id).unwrap();
assert!(!ctx.body_exists(body_id).unwrap());
assert_eq!(ctx.get_chunks(body_id).unwrap().len(), 0);
}
#[test]
fn test_delete_chunks_like() {
let pool = create_test_pool();
let manager = BlobManager::new(pool);
let ctx = manager.connect();
// Insert chunks for same response but different body types
ctx.insert_chunk(&BodyChunk::new("rs_abc.request", 0, b"req".to_vec())).unwrap();
ctx.insert_chunk(&BodyChunk::new("rs_abc.response", 0, b"resp".to_vec())).unwrap();
ctx.insert_chunk(&BodyChunk::new("rs_other.request", 0, b"other".to_vec())).unwrap();
// Delete all bodies for rs_abc
ctx.delete_chunks_like("rs_abc.%").unwrap();
// rs_abc bodies should be gone
assert!(!ctx.body_exists("rs_abc.request").unwrap());
assert!(!ctx.body_exists("rs_abc.response").unwrap());
// rs_other should still exist
assert!(ctx.body_exists("rs_other.request").unwrap());
}
#[test]
fn test_get_body_size() {
let pool = create_test_pool();
let manager = BlobManager::new(pool);
let ctx = manager.connect();
let body_id = "rs_test123.request";
ctx.insert_chunk(&BodyChunk::new(body_id, 0, b"Hello".to_vec())).unwrap();
ctx.insert_chunk(&BodyChunk::new(body_id, 1, b"World".to_vec())).unwrap();
let size = ctx.get_body_size(body_id).unwrap();
assert_eq!(size, 10); // "Hello" + "World" = 10 bytes
}
#[test]
fn test_get_body_size_empty() {
let pool = create_test_pool();
let manager = BlobManager::new(pool);
let ctx = manager.connect();
let size = ctx.get_body_size("nonexistent").unwrap();
assert_eq!(size, 0);
}
#[test]
fn test_body_exists() {
let pool = create_test_pool();
let manager = BlobManager::new(pool);
let ctx = manager.connect();
assert!(!ctx.body_exists("rs_test.request").unwrap());
ctx.insert_chunk(&BodyChunk::new("rs_test.request", 0, b"data".to_vec())).unwrap();
assert!(ctx.body_exists("rs_test.request").unwrap());
}
#[test]
fn test_multiple_bodies_isolated() {
let pool = create_test_pool();
let manager = BlobManager::new(pool);
let ctx = manager.connect();
ctx.insert_chunk(&BodyChunk::new("body1", 0, b"data1".to_vec())).unwrap();
ctx.insert_chunk(&BodyChunk::new("body2", 0, b"data2".to_vec())).unwrap();
let chunks1 = ctx.get_chunks("body1").unwrap();
let chunks2 = ctx.get_chunks("body2").unwrap();
assert_eq!(chunks1.len(), 1);
assert_eq!(chunks1[0].data, b"data1");
assert_eq!(chunks2.len(), 1);
assert_eq!(chunks2[0].data, b"data2");
}
#[test]
fn test_large_chunk() {
let pool = create_test_pool();
let manager = BlobManager::new(pool);
let ctx = manager.connect();
// 1MB chunk
let large_data: Vec<u8> = (0..1024 * 1024).map(|i| (i % 256) as u8).collect();
let body_id = "rs_large.request";
ctx.insert_chunk(&BodyChunk::new(body_id, 0, large_data.clone())).unwrap();
let chunks = ctx.get_chunks(body_id).unwrap();
assert_eq!(chunks.len(), 1);
assert_eq!(chunks[0].data, large_data);
assert_eq!(ctx.get_body_size(body_id).unwrap(), 1024 * 1024);
}
}

View File

@@ -1,3 +1,4 @@
use crate::blob_manager::BlobManagerExt;
use crate::error::Error::GenericError;
use crate::error::Result;
use crate::models::{AnyModel, GraphQlIntrospection, GrpcEvent, Settings, WebsocketEvent};
@@ -8,6 +9,7 @@ use tauri::{AppHandle, Runtime, WebviewWindow};
#[tauri::command]
pub(crate) fn upsert<R: Runtime>(window: WebviewWindow<R>, model: AnyModel) -> Result<String> {
let db = window.db();
let blobs = window.blob_manager();
let source = &UpdateSource::from_window(&window);
let id = match model {
AnyModel::CookieJar(m) => db.upsert_cookie_jar(&m, source)?.id,
@@ -15,7 +17,7 @@ pub(crate) fn upsert<R: Runtime>(window: WebviewWindow<R>, model: AnyModel) -> R
AnyModel::Folder(m) => db.upsert_folder(&m, source)?.id,
AnyModel::GrpcRequest(m) => db.upsert_grpc_request(&m, source)?.id,
AnyModel::HttpRequest(m) => db.upsert_http_request(&m, source)?.id,
AnyModel::HttpResponse(m) => db.upsert_http_response(&m, source)?.id,
AnyModel::HttpResponse(m) => db.upsert_http_response(&m, source, &blobs)?.id,
AnyModel::KeyValue(m) => db.upsert_key_value(&m, source)?.id,
AnyModel::Plugin(m) => db.upsert_plugin(&m, source)?.id,
AnyModel::Settings(m) => db.upsert_settings(&m, source)?.id,
@@ -30,6 +32,7 @@ pub(crate) fn upsert<R: Runtime>(window: WebviewWindow<R>, model: AnyModel) -> R
#[tauri::command]
pub(crate) fn delete<R: Runtime>(window: WebviewWindow<R>, model: AnyModel) -> Result<String> {
let blobs = window.blob_manager();
// Use transaction for deletions because it might recurse
window.with_tx(|tx| {
let source = &UpdateSource::from_window(&window);
@@ -40,7 +43,7 @@ pub(crate) fn delete<R: Runtime>(window: WebviewWindow<R>, model: AnyModel) -> R
AnyModel::GrpcConnection(m) => tx.delete_grpc_connection(&m, source)?.id,
AnyModel::GrpcRequest(m) => tx.delete_grpc_request(&m, source)?.id,
AnyModel::HttpRequest(m) => tx.delete_http_request(&m, source)?.id,
AnyModel::HttpResponse(m) => tx.delete_http_response(&m, source)?.id,
AnyModel::HttpResponse(m) => tx.delete_http_response(&m, source, &blobs)?.id,
AnyModel::Plugin(m) => tx.delete_plugin(&m, source)?.id,
AnyModel::WebsocketConnection(m) => tx.delete_websocket_connection(&m, source)?.id,
AnyModel::WebsocketRequest(m) => tx.delete_websocket_request(&m, source)?.id,

View File

@@ -67,7 +67,7 @@ impl<'a> DbContext<'a> {
.expect("Failed to run find on DB")
}
pub fn find_all<'s, M>(&self) -> Result<Vec<M>>
pub(crate) fn find_all<'s, M>(&self) -> Result<Vec<M>>
where
M: Into<AnyModel> + Clone + UpsertModelInfo,
{
@@ -82,7 +82,7 @@ impl<'a> DbContext<'a> {
Ok(items.map(|v| v.unwrap()).collect())
}
pub fn find_many<'s, M>(
pub(crate) fn find_many<'s, M>(
&self,
col: impl IntoColumnRef,
value: impl Into<SimpleExpr>,
@@ -115,7 +115,7 @@ impl<'a> DbContext<'a> {
Ok(items.map(|v| v.unwrap()).collect())
}
pub fn upsert<M>(&self, model: &M, source: &UpdateSource) -> Result<M>
pub(crate) fn upsert<M>(&self, model: &M, source: &UpdateSource) -> Result<M>
where
M: Into<AnyModel> + From<AnyModel> + UpsertModelInfo + Clone,
{

View File

@@ -18,7 +18,7 @@ pub enum Error {
#[error("Model serialization error: {0}")]
ModelSerializationError(String),
#[error("Model error: {0}")]
#[error("HTTP error: {0}")]
GenericError(String),
#[error("DB Migration Failed: {0}")]

View File

@@ -1,3 +1,4 @@
use crate::blob_manager::{BlobManager, migrate_blob_db};
use crate::commands::*;
use crate::migrate::migrate_db;
use crate::query_manager::QueryManager;
@@ -14,6 +15,7 @@ use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
mod commands;
pub mod blob_manager;
mod connection_or_tx;
pub mod db_context;
pub mod error;
@@ -50,7 +52,9 @@ pub fn init<R: Runtime>() -> TauriPlugin<R> {
create_dir_all(app_path.clone()).expect("Problem creating App directory!");
let db_file_path = app_path.join("db.sqlite");
let blob_db_file_path = app_path.join("blobs.sqlite");
// Main database pool
let manager = SqliteConnectionManager::file(db_file_path);
let pool = Pool::builder()
.max_size(100) // Up from 10 (just in case)
@@ -68,7 +72,26 @@ pub fn init<R: Runtime>() -> TauriPlugin<R> {
return Err(Box::from(e.to_string()));
};
// Blob database pool
let blob_manager = SqliteConnectionManager::file(blob_db_file_path);
let blob_pool = Pool::builder()
.max_size(50)
.connection_timeout(Duration::from_secs(10))
.build(blob_manager)
.unwrap();
if let Err(e) = migrate_blob_db(&blob_pool) {
error!("Failed to run blob database migration {e:?}");
app_handle
.dialog()
.message(e.to_string())
.kind(MessageDialogKind::Error)
.blocking_show();
return Err(Box::from(e.to_string()));
};
app_handle.manage(SqliteConnection::new(pool.clone()));
app_handle.manage(BlobManager::new(blob_pool));
{
let (tx, rx) = mpsc::channel();

View File

@@ -52,6 +52,26 @@ pub struct ProxySettingAuth {
pub password: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export, export_to = "gen_models.ts")]
pub struct ClientCertificate {
pub host: String,
#[serde(default)]
pub port: Option<i32>,
#[serde(default)]
pub crt_file: Option<String>,
#[serde(default)]
pub key_file: Option<String>,
#[serde(default)]
pub pfx_file: Option<String>,
#[serde(default)]
pub passphrase: Option<String>,
#[serde(default = "default_true")]
#[ts(optional, as = "Option<bool>")]
pub enabled: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[serde(rename_all = "snake_case")]
#[ts(export, export_to = "gen_models.ts")]
@@ -106,6 +126,7 @@ pub struct Settings {
pub updated_at: NaiveDateTime,
pub appearance: String,
pub client_certificates: Vec<ClientCertificate>,
pub colored_methods: bool,
pub editor_font: Option<String>,
pub editor_font_size: i32,
@@ -158,10 +179,12 @@ impl UpsertModelInfo for Settings {
None => None,
Some(p) => Some(serde_json::to_string(&p)?),
};
let client_certificates = serde_json::to_string(&self.client_certificates)?;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
(UpdatedAt, upsert_date(source, self.updated_at)),
(Appearance, self.appearance.as_str().into()),
(ClientCertificates, client_certificates.into()),
(EditorFontSize, self.editor_font_size.into()),
(EditorKeymap, self.editor_keymap.to_string().into()),
(EditorSoftWrap, self.editor_soft_wrap.into()),
@@ -188,6 +211,7 @@ impl UpsertModelInfo for Settings {
vec![
SettingsIden::UpdatedAt,
SettingsIden::Appearance,
SettingsIden::ClientCertificates,
SettingsIden::EditorFontSize,
SettingsIden::EditorKeymap,
SettingsIden::EditorSoftWrap,
@@ -215,6 +239,7 @@ impl UpsertModelInfo for Settings {
Self: Sized,
{
let proxy: Option<String> = row.get("proxy")?;
let client_certificates: String = row.get("client_certificates")?;
let editor_keymap: String = row.get("editor_keymap")?;
Ok(Self {
id: row.get("id")?,
@@ -222,6 +247,7 @@ impl UpsertModelInfo for Settings {
created_at: row.get("created_at")?,
updated_at: row.get("updated_at")?,
appearance: row.get("appearance")?,
client_certificates: serde_json::from_str(&client_certificates).unwrap_or_default(),
editor_font_size: row.get("editor_font_size")?,
editor_font: row.get("editor_font")?,
editor_keymap: EditorKeymap::from_str(editor_keymap.as_str()).unwrap(),
@@ -1296,12 +1322,15 @@ pub struct HttpResponse {
pub request_id: String,
pub body_path: Option<String>,
pub content_length: Option<i32>,
pub content_length: Option<i64>,
pub content_length_compressed: Option<i64>,
pub elapsed: i32,
pub elapsed_headers: i32,
pub error: Option<String>,
pub headers: Vec<HttpResponseHeader>,
pub remote_addr: Option<String>,
pub request_content_length: Option<i64>,
pub request_headers: Vec<HttpResponseHeader>,
pub status: i32,
pub status_reason: Option<String>,
pub state: HttpResponseState,
@@ -1342,16 +1371,19 @@ impl UpsertModelInfo for HttpResponse {
(WorkspaceId, self.workspace_id.into()),
(BodyPath, self.body_path.into()),
(ContentLength, self.content_length.into()),
(ContentLengthCompressed, self.content_length_compressed.into()),
(Elapsed, self.elapsed.into()),
(ElapsedHeaders, self.elapsed_headers.into()),
(Error, self.error.into()),
(Headers, serde_json::to_string(&self.headers)?.into()),
(RemoteAddr, self.remote_addr.into()),
(RequestHeaders, serde_json::to_string(&self.request_headers)?.into()),
(State, serde_json::to_value(self.state)?.as_str().into()),
(Status, self.status.into()),
(StatusReason, self.status_reason.into()),
(Url, self.url.into()),
(Version, self.version.into()),
(RequestContentLength, self.request_content_length.into()),
])
}
@@ -1360,11 +1392,14 @@ impl UpsertModelInfo for HttpResponse {
HttpResponseIden::UpdatedAt,
HttpResponseIden::BodyPath,
HttpResponseIden::ContentLength,
HttpResponseIden::ContentLengthCompressed,
HttpResponseIden::Elapsed,
HttpResponseIden::ElapsedHeaders,
HttpResponseIden::Error,
HttpResponseIden::Headers,
HttpResponseIden::RemoteAddr,
HttpResponseIden::RequestContentLength,
HttpResponseIden::RequestHeaders,
HttpResponseIden::State,
HttpResponseIden::Status,
HttpResponseIden::StatusReason,
@@ -1389,6 +1424,7 @@ impl UpsertModelInfo for HttpResponse {
error: r.get("error")?,
url: r.get("url")?,
content_length: r.get("content_length")?,
content_length_compressed: r.get("content_length_compressed").unwrap_or_default(),
version: r.get("version")?,
elapsed: r.get("elapsed")?,
elapsed_headers: r.get("elapsed_headers")?,
@@ -1398,10 +1434,152 @@ impl UpsertModelInfo for HttpResponse {
state: serde_json::from_str(format!(r#""{state}""#).as_str()).unwrap(),
body_path: r.get("body_path")?,
headers: serde_json::from_str(headers.as_str()).unwrap_or_default(),
request_content_length: r.get("request_content_length").unwrap_or_default(),
request_headers: serde_json::from_str(
r.get::<_, String>("request_headers").unwrap_or_default().as_str(),
)
.unwrap_or_default(),
})
}
}
/// Serializable representation of HTTP response events for DB storage.
/// This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
/// The `From` impl is in yaak-http to avoid circular dependencies.
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[serde(tag = "type", rename_all = "snake_case")]
#[ts(export, export_to = "gen_models.ts")]
pub enum HttpResponseEventData {
Setting {
name: String,
value: String,
},
Info {
message: String,
},
Redirect {
url: String,
status: u16,
behavior: String,
},
SendUrl {
method: String,
path: String,
},
ReceiveUrl {
version: String,
status: String,
},
HeaderUp {
name: String,
value: String,
},
HeaderDown {
name: String,
value: String,
},
ChunkSent {
bytes: usize,
},
ChunkReceived {
bytes: usize,
},
}
impl Default for HttpResponseEventData {
fn default() -> Self {
Self::Info { message: String::new() }
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_models.ts")]
#[enum_def(table_name = "http_response_events")]
pub struct HttpResponseEvent {
#[ts(type = "\"http_response_event\"")]
pub model: String,
pub id: String,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
pub workspace_id: String,
pub response_id: String,
pub event: HttpResponseEventData,
}
impl UpsertModelInfo for HttpResponseEvent {
fn table_name() -> impl IntoTableRef + IntoIden {
HttpResponseEventIden::Table
}
fn id_column() -> impl IntoIden + Eq + Clone {
HttpResponseEventIden::Id
}
fn generate_id() -> String {
generate_prefixed_id("re")
}
fn order_by() -> (impl IntoColumnRef, Order) {
(HttpResponseEventIden::CreatedAt, Order::Asc)
}
fn get_id(&self) -> String {
self.id.clone()
}
fn insert_values(
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use HttpResponseEventIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
(UpdatedAt, upsert_date(source, self.updated_at)),
(WorkspaceId, self.workspace_id.into()),
(ResponseId, self.response_id.into()),
(Event, serde_json::to_string(&self.event)?.into()),
])
}
fn update_columns() -> Vec<impl IntoIden> {
vec![
HttpResponseEventIden::UpdatedAt,
HttpResponseEventIden::Event,
]
}
fn from_row(r: &Row) -> rusqlite::Result<Self>
where
Self: Sized,
{
let event: String = r.get("event")?;
Ok(Self {
id: r.get("id")?,
model: r.get("model")?,
workspace_id: r.get("workspace_id")?,
response_id: r.get("response_id")?,
created_at: r.get("created_at")?,
updated_at: r.get("updated_at")?,
event: serde_json::from_str(&event).unwrap_or_default(),
})
}
}
impl HttpResponseEvent {
pub fn new(response_id: &str, workspace_id: &str, event: HttpResponseEventData) -> Self {
Self {
model: "http_response_event".to_string(),
id: Self::generate_id(),
created_at: Utc::now().naive_utc(),
updated_at: Utc::now().naive_utc(),
workspace_id: workspace_id.to_string(),
response_id: response_id.to_string(),
event,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_models.ts")]
@@ -2152,6 +2330,7 @@ define_any_model! {
GrpcRequest,
HttpRequest,
HttpResponse,
HttpResponseEvent,
KeyValue,
Plugin,
Settings,

View File

@@ -1,8 +1,6 @@
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{
GrpcRequest, HttpRequest, WebsocketRequest,
};
use crate::models::{GrpcRequest, HttpRequest, WebsocketRequest};
pub enum AnyRequest {
HttpRequest(HttpRequest),

View File

@@ -143,11 +143,7 @@ impl<'a> DbContext<'a> {
}
self.upsert(
&Environment {
name,
variables: cleaned_variables,
..environment.clone()
},
&Environment { name, variables: cleaned_variables, ..environment.clone() },
source,
)
}

View File

@@ -1,7 +1,10 @@
use crate::connection_or_tx::ConnectionOrTx;
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{Environment, EnvironmentIden, Folder, FolderIden, GrpcRequest, GrpcRequestIden, HttpRequest, HttpRequestHeader, HttpRequestIden, WebsocketRequest, WebsocketRequestIden};
use crate::models::{
Environment, EnvironmentIden, Folder, FolderIden, GrpcRequest, GrpcRequestIden, HttpRequest,
HttpRequestHeader, HttpRequestIden, WebsocketRequest, WebsocketRequestIden,
};
use crate::util::UpdateSource;
use serde_json::Value;
use std::collections::BTreeMap;
@@ -69,57 +72,35 @@ impl<'a> DbContext<'a> {
for m in self.find_many::<HttpRequest>(HttpRequestIden::FolderId, fid, None)? {
self.upsert_http_request(
&HttpRequest {
id: "".into(),
folder_id: Some(new_folder.id.clone()),
..m
},
&HttpRequest { id: "".into(), folder_id: Some(new_folder.id.clone()), ..m },
source,
)?;
}
for m in self.find_many::<WebsocketRequest>(WebsocketRequestIden::FolderId, fid, None)? {
self.upsert_websocket_request(
&WebsocketRequest {
id: "".into(),
folder_id: Some(new_folder.id.clone()),
..m
},
&WebsocketRequest { id: "".into(), folder_id: Some(new_folder.id.clone()), ..m },
source,
)?;
}
for m in self.find_many::<GrpcRequest>(GrpcRequestIden::FolderId, fid, None)? {
self.upsert_grpc_request(
&GrpcRequest {
id: "".into(),
folder_id: Some(new_folder.id.clone()),
..m
},
&GrpcRequest { id: "".into(), folder_id: Some(new_folder.id.clone()), ..m },
source,
)?;
}
for m in self.find_many::<Environment>(EnvironmentIden::ParentId, fid, None)? {
self.upsert_environment(
&Environment {
id: "".into(),
parent_id: Some(new_folder.id.clone()),
..m
},
&Environment { id: "".into(), parent_id: Some(new_folder.id.clone()), ..m },
source,
)?;
}
for m in self.find_many::<Folder>(FolderIden::FolderId, fid, None)? {
// Recurse down
self.duplicate_folder(
&Folder {
folder_id: Some(new_folder.id.clone()),
..m
},
source,
)?;
self.duplicate_folder(&Folder { folder_id: Some(new_folder.id.clone()), ..m }, source)?;
}
Ok(new_folder)

View File

@@ -31,13 +31,9 @@ impl<'a> DbContext<'a> {
},
source,
),
Some(introspection) => self.upsert(
&GraphQlIntrospection {
content,
..introspection
},
source,
),
Some(introspection) => {
self.upsert(&GraphQlIntrospection { content, ..introspection }, source)
}
}
}

View File

@@ -0,0 +1,18 @@
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{HttpResponseEvent, HttpResponseEventIden};
use crate::util::UpdateSource;
impl<'a> DbContext<'a> {
pub fn list_http_response_events(&self, response_id: &str) -> Result<Vec<HttpResponseEvent>> {
self.find_many(HttpResponseEventIden::ResponseId, response_id, None)
}
pub fn upsert_http_response_event(
&self,
http_response_event: &HttpResponseEvent,
source: &UpdateSource,
) -> Result<HttpResponseEvent> {
self.upsert(http_response_event, source)
}
}

View File

@@ -1,12 +1,13 @@
use crate::blob_manager::BlobManager;
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{HttpResponse, HttpResponseIden, HttpResponseState};
use crate::queries::MAX_HISTORY_ITEMS;
use crate::util::UpdateSource;
use log::{debug, error};
use sea_query::{Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
use std::fs;
use crate::db_context::DbContext;
use crate::queries::MAX_HISTORY_ITEMS;
impl<'a> DbContext<'a> {
pub fn get_http_response(&self, id: &str) -> Result<HttpResponse> {
@@ -58,6 +59,7 @@ impl<'a> DbContext<'a> {
&self,
http_response: &HttpResponse,
source: &UpdateSource,
blob_manager: &BlobManager,
) -> Result<HttpResponse> {
// Delete the body file if it exists
if let Some(p) = http_response.body_path.clone() {
@@ -66,6 +68,13 @@ impl<'a> DbContext<'a> {
};
}
// Delete request body blobs (pattern: {response_id}.request)
let blob_ctx = blob_manager.connect();
let body_id = format!("{}.request", http_response.id);
if let Err(e) = blob_ctx.delete_chunks(&body_id) {
error!("Failed to delete request body blobs: {}", e);
}
Ok(self.delete(http_response, source)?)
}
@@ -73,12 +82,13 @@ impl<'a> DbContext<'a> {
&self,
http_response: &HttpResponse,
source: &UpdateSource,
blob_manager: &BlobManager,
) -> Result<HttpResponse> {
let responses = self.list_http_responses_for_request(&http_response.request_id, None)?;
for m in responses.iter().skip(MAX_HISTORY_ITEMS - 1) {
debug!("Deleting old HTTP response {}", http_response.id);
self.delete_http_response(&m, source)?;
self.delete_http_response(&m, source, blob_manager)?;
}
self.upsert(http_response, source)
@@ -101,10 +111,6 @@ impl<'a> DbContext<'a> {
response: &HttpResponse,
source: &UpdateSource,
) -> Result<HttpResponse> {
if response.id.is_empty() {
Ok(response.clone())
} else {
self.upsert(response, source)
}
if response.id.is_empty() { Ok(response.clone()) } else { self.upsert(response, source) }
}
}

View File

@@ -1,8 +1,8 @@
use chrono::NaiveDateTime;
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{KeyValue, KeyValueIden, UpsertModelInfo};
use crate::util::UpdateSource;
use chrono::NaiveDateTime;
use log::error;
use sea_query::{Asterisk, Cond, Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
@@ -39,7 +39,12 @@ impl<'a> DbContext<'a> {
}
}
pub fn get_key_value_dte(&self, namespace: &str, key: &str, default: NaiveDateTime) -> NaiveDateTime {
pub fn get_key_value_dte(
&self,
namespace: &str,
key: &str,
default: NaiveDateTime,
) -> NaiveDateTime {
match self.get_key_value_raw(namespace, key) {
None => default,
Some(v) => {
@@ -139,14 +144,8 @@ impl<'a> DbContext<'a> {
true,
),
Some(kv) => (
self.upsert_key_value(
&KeyValue {
value: value.to_string(),
..kv
},
source,
)
.expect("Failed to update key value"),
self.upsert_key_value(&KeyValue { value: value.to_string(), ..kv }, source)
.expect("Failed to update key value"),
false,
),
}

View File

@@ -8,6 +8,7 @@ mod grpc_connections;
mod grpc_events;
mod grpc_requests;
mod http_requests;
mod http_response_events;
mod http_responses;
mod key_values;
mod plugin_key_values;

View File

@@ -18,6 +18,7 @@ impl<'a> DbContext<'a> {
updated_at: Default::default(),
appearance: "system".to_string(),
client_certificates: Vec::new(),
editor_font_size: 12,
editor_font: None,
editor_keymap: EditorKeymap::Default,

View File

@@ -1,10 +1,10 @@
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{SyncState, SyncStateIden, UpsertModelInfo};
use crate::util::UpdateSource;
use sea_query::{Asterisk, Cond, Expr, Query, SqliteQueryBuilder};
use sea_query_rusqlite::RusqliteBinder;
use std::path::Path;
use crate::db_context::DbContext;
impl<'a> DbContext<'a> {
pub fn get_sync_state(&self, id: &str) -> Result<SyncState> {

View File

@@ -1,9 +1,6 @@
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{
WebsocketEvent,
WebsocketEventIden,
};
use crate::models::{WebsocketEvent, WebsocketEventIden};
use crate::util::UpdateSource;
impl<'a> DbContext<'a> {

View File

@@ -56,7 +56,11 @@ impl<'a> DbContext<'a> {
websocket_request: &WebsocketRequest,
) -> Result<(Option<String>, BTreeMap<String, Value>, String)> {
if let Some(at) = websocket_request.authentication_type.clone() {
return Ok((Some(at), websocket_request.authentication.clone(), websocket_request.id.clone()));
return Ok((
Some(at),
websocket_request.authentication.clone(),
websocket_request.id.clone(),
));
}
if let Some(folder_id) = websocket_request.folder_id.clone() {

View File

@@ -14,10 +14,7 @@ impl<'a> DbContext<'a> {
self.find_many(WorkspaceMetaIden::WorkspaceId, workspace_id, None)?;
if workspace_metas.is_empty() {
let wm = WorkspaceMeta {
workspace_id: workspace_id.to_string(),
..Default::default()
};
let wm = WorkspaceMeta { workspace_id: workspace_id.to_string(), ..Default::default() };
workspace_metas.push(self.upsert_workspace_meta(&wm, &UpdateSource::Background)?)
}
@@ -30,10 +27,8 @@ impl<'a> DbContext<'a> {
return Ok(workspace_meta);
}
let workspace_meta = WorkspaceMeta {
workspace_id: workspace_id.to_string(),
..Default::default()
};
let workspace_meta =
WorkspaceMeta { workspace_id: workspace_id.to_string(), ..Default::default() };
info!("Creating WorkspaceMeta for {workspace_id}");

View File

@@ -6,7 +6,7 @@ use crate::util::ModelPayload;
use r2d2::Pool;
use r2d2_sqlite::SqliteConnectionManager;
use rusqlite::TransactionBehavior;
use std::sync::{mpsc, Arc, Mutex};
use std::sync::{Arc, Mutex, mpsc};
use tauri::{Manager, Runtime, State};
pub trait QueryManagerExt<'a, R> {
@@ -58,10 +58,7 @@ impl QueryManager {
pool: Pool<SqliteConnectionManager>,
events_tx: mpsc::Sender<ModelPayload>,
) -> Self {
QueryManager {
pool: Arc::new(Mutex::new(pool)),
events_tx,
}
QueryManager { pool: Arc::new(Mutex::new(pool)), events_tx }
}
pub fn connect(&self) -> DbContext<'_> {
@@ -71,10 +68,7 @@ impl QueryManager {
.expect("Failed to gain lock on DB")
.get()
.expect("Failed to get a new DB connection from the pool");
DbContext {
events_tx: self.events_tx.clone(),
conn: ConnectionOrTx::Connection(conn),
}
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) }
}
pub fn with_conn<F, T>(&self, func: F) -> T
@@ -88,10 +82,8 @@ impl QueryManager {
.get()
.expect("Failed to get new DB connection from the pool");
let db_context = DbContext {
events_tx: self.events_tx.clone(),
conn: ConnectionOrTx::Connection(conn),
};
let db_context =
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) };
func(&db_context)
}
@@ -113,10 +105,8 @@ impl QueryManager {
.transaction_with_behavior(TransactionBehavior::Immediate)
.expect("Failed to start DB transaction");
let db_context = DbContext {
events_tx: self.events_tx.clone(),
conn: ConnectionOrTx::Transaction(&tx),
};
let db_context =
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Transaction(&tx) };
match func(&db_context) {
Ok(val) => {

View File

@@ -62,9 +62,7 @@ pub enum UpdateSource {
impl UpdateSource {
pub fn from_window<R: Runtime>(window: &WebviewWindow<R>) -> Self {
Self::Window {
label: window.label().to_string(),
}
Self::Window { label: window.label().to_string() }
}
}

Some files were not shown because too many files have changed in this diff Show More