mirror of
https://github.com/mountain-loop/yaak.git
synced 2026-02-23 10:16:40 -05:00
Compare commits
93 Commits
v2026.1.0
...
yaak-cli-0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a19ee9b502 | ||
|
|
0130bdee6f | ||
|
|
71ae9f41ed | ||
|
|
d06b6ce636 | ||
|
|
f5727b28c4 | ||
|
|
c62db7be06 | ||
|
|
4e56daa555 | ||
|
|
746bedf885 | ||
|
|
949c4a445a | ||
|
|
1f588d0498 | ||
|
|
4573edc1e1 | ||
|
|
5a184c1b83 | ||
|
|
7b73401dcf | ||
|
|
8571440d84 | ||
|
|
bc37a5d666 | ||
|
|
a80f2ccf9a | ||
|
|
1eaf276b75 | ||
|
|
e9559dfdfa | ||
|
|
4c2e7b8609 | ||
|
|
e638cecf07 | ||
|
|
076058da4f | ||
|
|
f1bc4aa146 | ||
|
|
773c4a24a5 | ||
|
|
6cc659e5c4 | ||
|
|
e1580210dc | ||
|
|
0a4ffde319 | ||
|
|
cc4d598af3 | ||
|
|
f5d11cb6d3 | ||
|
|
65e91aec6b | ||
|
|
ae943a5fd2 | ||
|
|
9e1a11de0b | ||
|
|
52732e12ec | ||
|
|
1127d7e3fa | ||
|
|
7d4d228236 | ||
|
|
565e053ee8 | ||
|
|
26aba6034f | ||
|
|
9a1d613034 | ||
|
|
3e4de7d3c4 | ||
|
|
b64b5ec0f8 | ||
|
|
510d1c7d17 | ||
|
|
ed13a62269 | ||
|
|
935d613959 | ||
|
|
adeaaccc45 | ||
|
|
d253093333 | ||
|
|
f265b7a572 | ||
|
|
68b2ff016f | ||
|
|
a1c6295810 | ||
|
|
76ee3fa61b | ||
|
|
7fef35ce0a | ||
|
|
654af09951 | ||
|
|
484dcfade0 | ||
|
|
fda18c5434 | ||
|
|
a8176d6e9e | ||
|
|
957d8d9d46 | ||
|
|
5f18bf25e2 | ||
|
|
66942eaf2c | ||
|
|
38796b1833 | ||
|
|
49ffa6fc45 | ||
|
|
1f56ba2eb6 | ||
|
|
f98a70ecb4 | ||
|
|
2984eb40c9 | ||
|
|
cc5d4742f0 | ||
|
|
5b8e4b98a0 | ||
|
|
8637c90a21 | ||
|
|
b88c5e71a0 | ||
|
|
1899d512ab | ||
|
|
7c31718f5e | ||
|
|
8f1463e5d0 | ||
|
|
0dc8807808 | ||
|
|
f24a159b8a | ||
|
|
0b91d3aaff | ||
|
|
431dc1c896 | ||
|
|
bc8277b56b | ||
|
|
0afed185d9 | ||
|
|
55cee00601 | ||
|
|
b41a8e04cb | ||
|
|
eff4519d91 | ||
|
|
c4ce458f79 | ||
|
|
f02ae35634 | ||
|
|
c2f068970b | ||
|
|
eec2d6bc38 | ||
|
|
efa22e470e | ||
|
|
c00d2e981f | ||
|
|
9c45254952 | ||
|
|
d031ff231a | ||
|
|
f056894ddb | ||
|
|
1b0315165f | ||
|
|
bd7e840a57 | ||
|
|
8969748c3c | ||
|
|
4e15ac10a6 | ||
|
|
47a3d44888 | ||
|
|
eb10910d20 | ||
|
|
6ba83d424d |
@@ -1,35 +1,46 @@
|
||||
---
|
||||
description: Review a PR in a new worktree
|
||||
allowed-tools: Bash(git worktree:*), Bash(gh pr:*)
|
||||
allowed-tools: Bash(git worktree:*), Bash(gh pr:*), Bash(git branch:*)
|
||||
---
|
||||
|
||||
Review a GitHub pull request in a new git worktree.
|
||||
Check out a GitHub pull request for review.
|
||||
|
||||
## Usage
|
||||
|
||||
```
|
||||
/review-pr <PR_NUMBER>
|
||||
/check-out-pr <PR_NUMBER>
|
||||
```
|
||||
|
||||
## What to do
|
||||
|
||||
1. List all open pull requests and ask the user to select one
|
||||
1. If no PR number is provided, list all open pull requests and ask the user to select one
|
||||
2. Get PR information using `gh pr view <PR_NUMBER> --json number,headRefName`
|
||||
3. Extract the branch name from the PR
|
||||
4. Create a new worktree at `../yaak-worktrees/pr-<PR_NUMBER>` using `git worktree add` with a timeout of at least 300000ms (5 minutes) since the post-checkout hook runs a bootstrap script
|
||||
5. Checkout the PR branch in the new worktree using `gh pr checkout <PR_NUMBER>`
|
||||
6. The post-checkout hook will automatically:
|
||||
3. **Ask the user** whether they want to:
|
||||
- **A) Check out in current directory** — simple `gh pr checkout <PR_NUMBER>`
|
||||
- **B) Create a new worktree** — isolated copy at `../yaak-worktrees/pr-<PR_NUMBER>`
|
||||
4. Follow the appropriate path below
|
||||
|
||||
## Option A: Check out in current directory
|
||||
|
||||
1. Run `gh pr checkout <PR_NUMBER>`
|
||||
2. Inform the user which branch they're now on
|
||||
|
||||
## Option B: Create a new worktree
|
||||
|
||||
1. Create a new worktree at `../yaak-worktrees/pr-<PR_NUMBER>` using `git worktree add` with a timeout of at least 300000ms (5 minutes) since the post-checkout hook runs a bootstrap script
|
||||
2. Checkout the PR branch in the new worktree using `gh pr checkout <PR_NUMBER>`
|
||||
3. The post-checkout hook will automatically:
|
||||
- Create `.env.local` with unique ports
|
||||
- Copy editor config folders
|
||||
- Run `npm install && npm run bootstrap`
|
||||
7. Inform the user:
|
||||
4. Inform the user:
|
||||
- Where the worktree was created
|
||||
- What ports were assigned
|
||||
- How to access it (cd command)
|
||||
- How to run the dev server
|
||||
- How to remove the worktree when done
|
||||
|
||||
## Example Output
|
||||
### Example worktree output
|
||||
|
||||
```
|
||||
Created worktree for PR #123 at ../yaak-worktrees/pr-123
|
||||
|
||||
@@ -43,5 +43,7 @@ The skill generates markdown-formatted release notes following this structure:
|
||||
After outputting the release notes, ask the user if they would like to create a draft GitHub release with these notes. If they confirm, create the release using:
|
||||
|
||||
```bash
|
||||
gh release create <tag> --draft --prerelease --title "<tag>" --notes '<release notes>'
|
||||
gh release create <tag> --draft --prerelease --title "Release <version>" --notes '<release notes>'
|
||||
```
|
||||
|
||||
**IMPORTANT**: The release title format is "Release XXXX" where XXXX is the version WITHOUT the `v` prefix. For example, tag `v2026.2.1-beta.1` gets title "Release 2026.2.1-beta.1".
|
||||
|
||||
46
.codex/skills/release-check-out-pr/SKILL.md
Normal file
46
.codex/skills/release-check-out-pr/SKILL.md
Normal file
@@ -0,0 +1,46 @@
|
||||
---
|
||||
name: release-check-out-pr
|
||||
description: Check out a GitHub pull request for review in this repo, either in the current directory or in a new isolated worktree at ../yaak-worktrees/pr-<PR_NUMBER>. Use when asked to run or replace the old Claude check-out-pr command.
|
||||
---
|
||||
|
||||
# Check Out PR
|
||||
|
||||
Check out a PR by number and let the user choose between current-directory checkout and isolated worktree checkout.
|
||||
|
||||
## Workflow
|
||||
|
||||
1. Confirm `gh` CLI is available.
|
||||
2. If no PR number is provided, list open PRs (`gh pr list`) and ask the user to choose one.
|
||||
3. Read PR metadata:
|
||||
- `gh pr view <PR_NUMBER> --json number,headRefName`
|
||||
4. Ask the user to choose:
|
||||
- Option A: check out in the current directory
|
||||
- Option B: create a new worktree at `../yaak-worktrees/pr-<PR_NUMBER>`
|
||||
|
||||
## Option A: Current Directory
|
||||
|
||||
1. Run:
|
||||
- `gh pr checkout <PR_NUMBER>`
|
||||
2. Report the checked-out branch.
|
||||
|
||||
## Option B: New Worktree
|
||||
|
||||
1. Use path:
|
||||
- `../yaak-worktrees/pr-<PR_NUMBER>`
|
||||
2. Create the worktree with a timeout of at least 5 minutes because checkout hooks run bootstrap.
|
||||
3. In the new worktree, run:
|
||||
- `gh pr checkout <PR_NUMBER>`
|
||||
4. Report:
|
||||
- Worktree path
|
||||
- Assigned ports from `.env.local` if present
|
||||
- How to start work:
|
||||
- `cd ../yaak-worktrees/pr-<PR_NUMBER>`
|
||||
- `npm run app-dev`
|
||||
- How to remove when done:
|
||||
- `git worktree remove ../yaak-worktrees/pr-<PR_NUMBER>`
|
||||
|
||||
## Error Handling
|
||||
|
||||
- If PR does not exist, show a clear error.
|
||||
- If worktree already exists, ask whether to reuse it or remove/recreate it.
|
||||
- If `gh` is missing, instruct the user to install/authenticate it.
|
||||
48
.codex/skills/release-generate-release-notes/SKILL.md
Normal file
48
.codex/skills/release-generate-release-notes/SKILL.md
Normal file
@@ -0,0 +1,48 @@
|
||||
---
|
||||
name: release-generate-release-notes
|
||||
description: Generate Yaak release notes from git history and PR metadata, including feedback links and full changelog compare links. Use when asked to run or replace the old Claude generate-release-notes command.
|
||||
---
|
||||
|
||||
# Generate Release Notes
|
||||
|
||||
Generate formatted markdown release notes for a Yaak tag.
|
||||
|
||||
## Workflow
|
||||
|
||||
1. Determine target tag.
|
||||
2. Determine previous comparable tag:
|
||||
- Beta tag: compare against previous beta (if the root version is the same) or stable tag.
|
||||
- Stable tag: compare against previous stable tag.
|
||||
3. Collect commits in range:
|
||||
- `git log --oneline <prev_tag>..<target_tag>`
|
||||
4. For linked PRs, fetch metadata:
|
||||
- `gh pr view <PR_NUMBER> --json number,title,body,author,url`
|
||||
5. Extract useful details:
|
||||
- Feedback URLs (`feedback.yaak.app`)
|
||||
- Plugin install links or other notable context
|
||||
6. Format notes using Yaak style:
|
||||
- Changelog badge at top
|
||||
- Bulleted items with PR links where available
|
||||
- Feedback links where available
|
||||
- Full changelog compare link at bottom
|
||||
|
||||
## Formatting Rules
|
||||
|
||||
- Wrap final notes in a markdown code fence.
|
||||
- Keep a blank line before and after the code fence.
|
||||
- Output the markdown code block last.
|
||||
- Do not append `by @gschier` for PRs authored by `@gschier`.
|
||||
|
||||
## Release Creation Prompt
|
||||
|
||||
After producing notes, ask whether to create a draft GitHub release.
|
||||
|
||||
If confirmed and release does not yet exist, run:
|
||||
|
||||
`gh release create <tag> --draft --prerelease --title "Release <version_without_v>" --notes '<release notes>'`
|
||||
|
||||
If a draft release for the tag already exists, update it instead:
|
||||
|
||||
`gh release edit <tag> --title "Release <version_without_v>" --notes-file <path_to_notes>`
|
||||
|
||||
Use title format `Release <version_without_v>`, e.g. `v2026.2.1-beta.1` -> `Release 2026.2.1-beta.1`.
|
||||
37
.codex/skills/worktree-management/SKILL.md
Normal file
37
.codex/skills/worktree-management/SKILL.md
Normal file
@@ -0,0 +1,37 @@
|
||||
---
|
||||
name: worktree-management
|
||||
description: Manage Yaak git worktrees using the standard ../yaak-worktrees/<NAME> layout, including creation, removal, and expected automatic setup behavior and port assignments.
|
||||
---
|
||||
|
||||
# Worktree Management
|
||||
|
||||
Use the Yaak-standard worktree path layout and lifecycle commands.
|
||||
|
||||
## Path Convention
|
||||
|
||||
Always create worktrees under:
|
||||
|
||||
`../yaak-worktrees/<NAME>`
|
||||
|
||||
Examples:
|
||||
- `git worktree add ../yaak-worktrees/feature-auth`
|
||||
- `git worktree add ../yaak-worktrees/bugfix-login`
|
||||
- `git worktree add ../yaak-worktrees/refactor-api`
|
||||
|
||||
## Automatic Setup After Checkout
|
||||
|
||||
Project git hooks automatically:
|
||||
1. Create `.env.local` with unique `YAAK_DEV_PORT` and `YAAK_PLUGIN_MCP_SERVER_PORT`
|
||||
2. Copy gitignored editor config folders
|
||||
3. Run `npm install && npm run bootstrap`
|
||||
|
||||
## Remove Worktree
|
||||
|
||||
`git worktree remove ../yaak-worktrees/<NAME>`
|
||||
|
||||
## Port Pattern
|
||||
|
||||
- Main worktree: Vite `1420`, MCP `64343`
|
||||
- First extra worktree: `1421`, `64344`
|
||||
- Second extra worktree: `1422`, `64345`
|
||||
- Continue incrementally for additional worktrees
|
||||
18
.github/pull_request_template.md
vendored
Normal file
18
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
## Summary
|
||||
|
||||
<!-- Describe the bug and the fix in 1-3 sentences. -->
|
||||
|
||||
## Submission
|
||||
|
||||
- [ ] This PR is a bug fix or small-scope improvement.
|
||||
- [ ] If this PR is not a bug fix or small-scope improvement, I linked an approved feedback item below.
|
||||
- [ ] I have read and followed [`CONTRIBUTING.md`](CONTRIBUTING.md).
|
||||
- [ ] I tested this change locally.
|
||||
- [ ] I added or updated tests when reasonable.
|
||||
|
||||
Approved feedback item (required if not a bug fix or small-scope improvement):
|
||||
<!-- https://yaak.app/feedback/... -->
|
||||
|
||||
## Related
|
||||
|
||||
<!-- Link related issues, discussions, or feedback items. -->
|
||||
52
.github/workflows/flathub.yml
vendored
Normal file
52
.github/workflows/flathub.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
name: Update Flathub
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
update-flathub:
|
||||
name: Update Flathub manifest
|
||||
runs-on: ubuntu-latest
|
||||
# Only run for stable releases (skip betas/pre-releases)
|
||||
if: ${{ !github.event.release.prerelease }}
|
||||
steps:
|
||||
- name: Checkout app repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Checkout Flathub repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: flathub/app.yaak.Yaak
|
||||
token: ${{ secrets.FLATHUB_TOKEN }}
|
||||
path: flathub-repo
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Install source generators
|
||||
run: |
|
||||
pip install flatpak-node-generator tomlkit aiohttp
|
||||
git clone --depth 1 https://github.com/flatpak/flatpak-builder-tools flatpak/flatpak-builder-tools
|
||||
|
||||
- name: Run update-manifest.sh
|
||||
run: bash flatpak/update-manifest.sh "${{ github.event.release.tag_name }}" flathub-repo
|
||||
|
||||
- name: Commit and push to Flathub
|
||||
working-directory: flathub-repo
|
||||
run: |
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git add -A
|
||||
git diff --cached --quiet && echo "No changes to commit" && exit 0
|
||||
git commit -m "Update to ${{ github.event.release.tag_name }}"
|
||||
git push
|
||||
161
.github/workflows/release-cli-npm.yml
vendored
Normal file
161
.github/workflows/release-cli-npm.yml
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
name: Release CLI to NPM
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: [yaak-cli-*]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: CLI version to publish (for example 0.4.0 or v0.4.0)
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build-binaries:
|
||||
name: Build ${{ matrix.pkg }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- pkg: cli-darwin-arm64
|
||||
runner: macos-latest
|
||||
target: aarch64-apple-darwin
|
||||
binary: yaak
|
||||
- pkg: cli-darwin-x64
|
||||
runner: macos-latest
|
||||
target: x86_64-apple-darwin
|
||||
binary: yaak
|
||||
- pkg: cli-linux-arm64
|
||||
runner: ubuntu-22.04-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
binary: yaak
|
||||
- pkg: cli-linux-x64
|
||||
runner: ubuntu-22.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
binary: yaak
|
||||
- pkg: cli-win32-arm64
|
||||
runner: windows-latest
|
||||
target: aarch64-pc-windows-msvc
|
||||
binary: yaak.exe
|
||||
- pkg: cli-win32-x64
|
||||
runner: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
binary: yaak.exe
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust stable
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- name: Restore Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
shared-key: release-cli-npm
|
||||
cache-on-failure: true
|
||||
|
||||
- name: Install Linux build dependencies
|
||||
if: startsWith(matrix.runner, 'ubuntu')
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y pkg-config libdbus-1-dev
|
||||
|
||||
- name: Build yaak
|
||||
run: cargo build --locked --release -p yaak-cli --bin yaak --target ${{ matrix.target }}
|
||||
|
||||
- name: Stage binary artifact
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p "npm/dist/${{ matrix.pkg }}"
|
||||
cp "target/${{ matrix.target }}/release/${{ matrix.binary }}" "npm/dist/${{ matrix.pkg }}/${{ matrix.binary }}"
|
||||
|
||||
- name: Upload binary artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.pkg }}
|
||||
path: npm/dist/${{ matrix.pkg }}/${{ matrix.binary }}
|
||||
if-no-files-found: error
|
||||
|
||||
publish-npm:
|
||||
name: Publish @yaakapp/cli packages
|
||||
needs: build-binaries
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: lts/*
|
||||
registry-url: https://registry.npmjs.org
|
||||
|
||||
- name: Download binary artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: cli-*
|
||||
path: npm/dist
|
||||
merge-multiple: false
|
||||
|
||||
- name: Prepare npm packages
|
||||
shell: bash
|
||||
env:
|
||||
WORKFLOW_VERSION: ${{ inputs.version }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
VERSION="$WORKFLOW_VERSION"
|
||||
else
|
||||
VERSION="${GITHUB_REF_NAME#yaak-cli-}"
|
||||
fi
|
||||
VERSION="${VERSION#v}"
|
||||
if [[ "$VERSION" == *-* ]]; then
|
||||
PRERELEASE="${VERSION#*-}"
|
||||
NPM_TAG="${PRERELEASE%%.*}"
|
||||
else
|
||||
NPM_TAG="latest"
|
||||
fi
|
||||
echo "Preparing CLI npm packages for version: $VERSION"
|
||||
echo "Publishing with npm dist-tag: $NPM_TAG"
|
||||
echo "NPM_TAG=$NPM_TAG" >> "$GITHUB_ENV"
|
||||
YAAK_CLI_VERSION="$VERSION" node npm/prepare-publish.js
|
||||
|
||||
- name: Publish @yaakapp/cli-darwin-arm64
|
||||
run: npm publish --provenance --access public --tag "$NPM_TAG"
|
||||
working-directory: npm/cli-darwin-arm64
|
||||
|
||||
- name: Publish @yaakapp/cli-darwin-x64
|
||||
run: npm publish --provenance --access public --tag "$NPM_TAG"
|
||||
working-directory: npm/cli-darwin-x64
|
||||
|
||||
- name: Publish @yaakapp/cli-linux-arm64
|
||||
run: npm publish --provenance --access public --tag "$NPM_TAG"
|
||||
working-directory: npm/cli-linux-arm64
|
||||
|
||||
- name: Publish @yaakapp/cli-linux-x64
|
||||
run: npm publish --provenance --access public --tag "$NPM_TAG"
|
||||
working-directory: npm/cli-linux-x64
|
||||
|
||||
- name: Publish @yaakapp/cli-win32-arm64
|
||||
run: npm publish --provenance --access public --tag "$NPM_TAG"
|
||||
working-directory: npm/cli-win32-arm64
|
||||
|
||||
- name: Publish @yaakapp/cli-win32-x64
|
||||
run: npm publish --provenance --access public --tag "$NPM_TAG"
|
||||
working-directory: npm/cli-win32-x64
|
||||
|
||||
- name: Publish @yaakapp/cli
|
||||
run: npm publish --provenance --access public --tag "$NPM_TAG"
|
||||
working-directory: npm/cli
|
||||
26
.github/workflows/release.yml
vendored
26
.github/workflows/release.yml
vendored
@@ -89,6 +89,8 @@ jobs:
|
||||
|
||||
- run: npm ci
|
||||
- run: npm run bootstrap
|
||||
env:
|
||||
YAAK_TARGET_ARCH: ${{ matrix.yaak_arch }}
|
||||
- run: npm run lint
|
||||
- name: Run JS Tests
|
||||
run: npm test
|
||||
@@ -151,3 +153,27 @@ jobs:
|
||||
releaseDraft: true
|
||||
prerelease: true
|
||||
args: "${{ matrix.args }} --config ./crates-tauri/yaak-app/tauri.release.conf.json"
|
||||
|
||||
# Build a per-machine NSIS installer for enterprise deployment (PDQ, SCCM, Intune)
|
||||
- name: Build and upload machine-wide installer (Windows only)
|
||||
if: matrix.os == 'windows'
|
||||
shell: pwsh
|
||||
env:
|
||||
YAAK_TARGET_ARCH: ${{ matrix.yaak_arch }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
|
||||
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
|
||||
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
|
||||
run: |
|
||||
Get-ChildItem -Recurse -Path target -File -Filter "*.exe.sig" | Remove-Item -Force
|
||||
npx tauri bundle ${{ matrix.args }} --bundles nsis --config ./crates-tauri/yaak-app/tauri.release.conf.json --config '{"bundle":{"createUpdaterArtifacts":true,"windows":{"nsis":{"installMode":"perMachine"}}}}'
|
||||
$setup = Get-ChildItem -Recurse -Path target -Filter "*setup*.exe" | Select-Object -First 1
|
||||
$setupSig = "$($setup.FullName).sig"
|
||||
$dest = $setup.FullName -replace '-setup\.exe$', '-setup-machine.exe'
|
||||
$destSig = "$dest.sig"
|
||||
Copy-Item $setup.FullName $dest
|
||||
Copy-Item $setupSig $destSig
|
||||
gh release upload "${{ github.ref_name }}" "$dest" --clobber
|
||||
gh release upload "${{ github.ref_name }}" "$destSig" --clobber
|
||||
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -44,3 +44,13 @@ crates-tauri/yaak-app/tauri.worktree.conf.json
|
||||
# Tauri auto-generated permission files
|
||||
**/permissions/autogenerated
|
||||
**/permissions/schemas
|
||||
|
||||
# Flatpak build artifacts
|
||||
flatpak-repo/
|
||||
.flatpak-builder/
|
||||
flatpak/flatpak-builder-tools/
|
||||
flatpak/cargo-sources.json
|
||||
flatpak/node-sources.json
|
||||
|
||||
# Local Codex desktop env state
|
||||
.codex/environments/environment.toml
|
||||
|
||||
16
CONTRIBUTING.md
Normal file
16
CONTRIBUTING.md
Normal file
@@ -0,0 +1,16 @@
|
||||
# Contributing to Yaak
|
||||
|
||||
Yaak accepts community pull requests for:
|
||||
|
||||
- Bug fixes
|
||||
- Small-scope improvements directly tied to existing behavior
|
||||
|
||||
Pull requests that introduce broad new features, major redesigns, or large refactors are out of scope unless explicitly approved first.
|
||||
|
||||
## Approval for Non-Bugfix Changes
|
||||
|
||||
If your PR is not a bug fix or small-scope improvement, include a link to the approved [feedback item](https://yaak.app/feedback) where contribution approval was explicitly stated.
|
||||
|
||||
## Development Setup
|
||||
|
||||
For local setup and development workflows, see [`DEVELOPMENT.md`](DEVELOPMENT.md).
|
||||
2485
Cargo.lock
generated
2485
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,7 @@
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"crates/yaak",
|
||||
# Shared crates (no Tauri dependency)
|
||||
"crates/yaak-core",
|
||||
"crates/yaak-common",
|
||||
@@ -15,6 +16,7 @@ members = [
|
||||
"crates/yaak-templates",
|
||||
"crates/yaak-tls",
|
||||
"crates/yaak-ws",
|
||||
"crates/yaak-api",
|
||||
# CLI crates
|
||||
"crates-cli/yaak-cli",
|
||||
# Tauri-specific crates
|
||||
@@ -33,6 +35,7 @@ log = "0.4.29"
|
||||
reqwest = "0.12.20"
|
||||
rustls = { version = "0.23.34", default-features = false }
|
||||
rustls-platform-verifier = "0.6.2"
|
||||
schemars = { version = "0.8.22", features = ["chrono"] }
|
||||
serde = "1.0.228"
|
||||
serde_json = "1.0.145"
|
||||
sha2 = "0.10.9"
|
||||
@@ -46,6 +49,7 @@ ts-rs = "11.1.0"
|
||||
|
||||
# Internal crates - shared
|
||||
yaak-core = { path = "crates/yaak-core" }
|
||||
yaak = { path = "crates/yaak" }
|
||||
yaak-common = { path = "crates/yaak-common" }
|
||||
yaak-crypto = { path = "crates/yaak-crypto" }
|
||||
yaak-git = { path = "crates/yaak-git" }
|
||||
@@ -58,6 +62,7 @@ yaak-sync = { path = "crates/yaak-sync" }
|
||||
yaak-templates = { path = "crates/yaak-templates" }
|
||||
yaak-tls = { path = "crates/yaak-tls" }
|
||||
yaak-ws = { path = "crates/yaak-ws" }
|
||||
yaak-api = { path = "crates/yaak-api" }
|
||||
|
||||
# Internal crates - Tauri-specific
|
||||
yaak-fonts = { path = "crates-tauri/yaak-fonts" }
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
<!-- sponsors-premium --><a href="https://github.com/MVST-Solutions"><img src="https://github.com/MVST-Solutions.png" width="80px" alt="User avatar: MVST-Solutions" /></a> <a href="https://github.com/dharsanb"><img src="https://github.com/dharsanb.png" width="80px" alt="User avatar: dharsanb" /></a> <a href="https://github.com/railwayapp"><img src="https://github.com/railwayapp.png" width="80px" alt="User avatar: railwayapp" /></a> <a href="https://github.com/caseyamcl"><img src="https://github.com/caseyamcl.png" width="80px" alt="User avatar: caseyamcl" /></a> <a href="https://github.com/bytebase"><img src="https://github.com/bytebase.png" width="80px" alt="User avatar: bytebase" /></a> <a href="https://github.com/"><img src="https://raw.githubusercontent.com/JamesIves/github-sponsors-readme-action/dev/.github/assets/placeholder.png" width="80px" alt="User avatar: " /></a> <!-- sponsors-premium -->
|
||||
</p>
|
||||
<p align="center">
|
||||
<!-- sponsors-base --><a href="https://github.com/seanwash"><img src="https://github.com/seanwash.png" width="50px" alt="User avatar: seanwash" /></a> <a href="https://github.com/jerath"><img src="https://github.com/jerath.png" width="50px" alt="User avatar: jerath" /></a> <a href="https://github.com/itsa-sh"><img src="https://github.com/itsa-sh.png" width="50px" alt="User avatar: itsa-sh" /></a> <a href="https://github.com/dmmulroy"><img src="https://github.com/dmmulroy.png" width="50px" alt="User avatar: dmmulroy" /></a> <a href="https://github.com/timcole"><img src="https://github.com/timcole.png" width="50px" alt="User avatar: timcole" /></a> <a href="https://github.com/VLZH"><img src="https://github.com/VLZH.png" width="50px" alt="User avatar: VLZH" /></a> <a href="https://github.com/terasaka2k"><img src="https://github.com/terasaka2k.png" width="50px" alt="User avatar: terasaka2k" /></a> <a href="https://github.com/andriyor"><img src="https://github.com/andriyor.png" width="50px" alt="User avatar: andriyor" /></a> <a href="https://github.com/majudhu"><img src="https://github.com/majudhu.png" width="50px" alt="User avatar: majudhu" /></a> <a href="https://github.com/axelrindle"><img src="https://github.com/axelrindle.png" width="50px" alt="User avatar: axelrindle" /></a> <a href="https://github.com/jirizverina"><img src="https://github.com/jirizverina.png" width="50px" alt="User avatar: jirizverina" /></a> <a href="https://github.com/chip-well"><img src="https://github.com/chip-well.png" width="50px" alt="User avatar: chip-well" /></a> <a href="https://github.com/GRAYAH"><img src="https://github.com/GRAYAH.png" width="50px" alt="User avatar: GRAYAH" /></a> <!-- sponsors-base -->
|
||||
<!-- sponsors-base --><a href="https://github.com/seanwash"><img src="https://github.com/seanwash.png" width="50px" alt="User avatar: seanwash" /></a> <a href="https://github.com/jerath"><img src="https://github.com/jerath.png" width="50px" alt="User avatar: jerath" /></a> <a href="https://github.com/itsa-sh"><img src="https://github.com/itsa-sh.png" width="50px" alt="User avatar: itsa-sh" /></a> <a href="https://github.com/dmmulroy"><img src="https://github.com/dmmulroy.png" width="50px" alt="User avatar: dmmulroy" /></a> <a href="https://github.com/timcole"><img src="https://github.com/timcole.png" width="50px" alt="User avatar: timcole" /></a> <a href="https://github.com/VLZH"><img src="https://github.com/VLZH.png" width="50px" alt="User avatar: VLZH" /></a> <a href="https://github.com/terasaka2k"><img src="https://github.com/terasaka2k.png" width="50px" alt="User avatar: terasaka2k" /></a> <a href="https://github.com/andriyor"><img src="https://github.com/andriyor.png" width="50px" alt="User avatar: andriyor" /></a> <a href="https://github.com/majudhu"><img src="https://github.com/majudhu.png" width="50px" alt="User avatar: majudhu" /></a> <a href="https://github.com/axelrindle"><img src="https://github.com/axelrindle.png" width="50px" alt="User avatar: axelrindle" /></a> <a href="https://github.com/jirizverina"><img src="https://github.com/jirizverina.png" width="50px" alt="User avatar: jirizverina" /></a> <a href="https://github.com/chip-well"><img src="https://github.com/chip-well.png" width="50px" alt="User avatar: chip-well" /></a> <a href="https://github.com/GRAYAH"><img src="https://github.com/GRAYAH.png" width="50px" alt="User avatar: GRAYAH" /></a> <a href="https://github.com/flashblaze"><img src="https://github.com/flashblaze.png" width="50px" alt="User avatar: flashblaze" /></a> <!-- sponsors-base -->
|
||||
</p>
|
||||
|
||||

|
||||
@@ -58,8 +58,10 @@ Built with [Tauri](https://tauri.app), Rust, and React, it’s fast, lightweight
|
||||
|
||||
## Contribution Policy
|
||||
|
||||
Yaak is open source but only accepting contributions for bug fixes. To get started,
|
||||
visit [`DEVELOPMENT.md`](DEVELOPMENT.md) for tips on setting up your environment.
|
||||
> [!IMPORTANT]
|
||||
> Community PRs are currently limited to bug fixes and small-scope improvements.
|
||||
> If your PR is out of scope, link an approved feedback item from [yaak.app/feedback](https://yaak.app/feedback).
|
||||
> See [`CONTRIBUTING.md`](CONTRIBUTING.md) for policy details and [`DEVELOPMENT.md`](DEVELOPMENT.md) for local setup.
|
||||
|
||||
## Useful Resources
|
||||
|
||||
|
||||
@@ -47,7 +47,8 @@
|
||||
"!src-web/vite.config.ts",
|
||||
"!src-web/routeTree.gen.ts",
|
||||
"!packages/plugin-runtime-types/lib",
|
||||
"!**/bindings"
|
||||
"!**/bindings",
|
||||
"!flatpak"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,18 +5,39 @@ edition = "2024"
|
||||
publish = false
|
||||
|
||||
[[bin]]
|
||||
name = "yaakcli"
|
||||
name = "yaak"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
base64 = "0.22"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
console = "0.15"
|
||||
dirs = "6"
|
||||
env_logger = "0.11"
|
||||
futures = "0.3"
|
||||
hex = { workspace = true }
|
||||
keyring = { workspace = true, features = ["apple-native", "windows-native", "sync-secret-service"] }
|
||||
log = { workspace = true }
|
||||
rand = "0.8"
|
||||
reqwest = { workspace = true }
|
||||
rolldown = "0.1.0"
|
||||
oxc_resolver = "=11.10.0"
|
||||
schemars = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
|
||||
sha2 = { workspace = true }
|
||||
tokio = { workspace = true, features = ["rt-multi-thread", "macros", "io-util", "net", "signal", "time"] }
|
||||
walkdir = "2"
|
||||
webbrowser = "1"
|
||||
zip = "4"
|
||||
yaak = { workspace = true }
|
||||
yaak-crypto = { workspace = true }
|
||||
yaak-http = { workspace = true }
|
||||
yaak-models = { workspace = true }
|
||||
yaak-plugins = { workspace = true }
|
||||
yaak-templates = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = "2"
|
||||
predicates = "3"
|
||||
tempfile = "3"
|
||||
|
||||
87
crates-cli/yaak-cli/README.md
Normal file
87
crates-cli/yaak-cli/README.md
Normal file
@@ -0,0 +1,87 @@
|
||||
# yaak-cli
|
||||
|
||||
Command-line interface for Yaak.
|
||||
|
||||
## Command Overview
|
||||
|
||||
Current top-level commands:
|
||||
|
||||
```text
|
||||
yaakcli send <request_id>
|
||||
yaakcli workspace list
|
||||
yaakcli workspace show <workspace_id>
|
||||
yaakcli workspace create --name <name>
|
||||
yaakcli workspace create --json '{"name":"My Workspace"}'
|
||||
yaakcli workspace create '{"name":"My Workspace"}'
|
||||
yaakcli workspace update --json '{"id":"wk_abc","description":"Updated"}'
|
||||
yaakcli workspace delete <workspace_id> [--yes]
|
||||
yaakcli request list <workspace_id>
|
||||
yaakcli request show <request_id>
|
||||
yaakcli request send <request_id>
|
||||
yaakcli request create <workspace_id> --name <name> --url <url> [--method GET]
|
||||
yaakcli request create --json '{"workspaceId":"wk_abc","name":"Users","url":"https://api.example.com/users"}'
|
||||
yaakcli request create '{"workspaceId":"wk_abc","name":"Users","url":"https://api.example.com/users"}'
|
||||
yaakcli request update --json '{"id":"rq_abc","name":"Users v2"}'
|
||||
yaakcli request delete <request_id> [--yes]
|
||||
yaakcli folder list <workspace_id>
|
||||
yaakcli folder show <folder_id>
|
||||
yaakcli folder create <workspace_id> --name <name>
|
||||
yaakcli folder create --json '{"workspaceId":"wk_abc","name":"Auth"}'
|
||||
yaakcli folder create '{"workspaceId":"wk_abc","name":"Auth"}'
|
||||
yaakcli folder update --json '{"id":"fl_abc","name":"Auth v2"}'
|
||||
yaakcli folder delete <folder_id> [--yes]
|
||||
yaakcli environment list <workspace_id>
|
||||
yaakcli environment show <environment_id>
|
||||
yaakcli environment create <workspace_id> --name <name>
|
||||
yaakcli environment create --json '{"workspaceId":"wk_abc","name":"Production"}'
|
||||
yaakcli environment create '{"workspaceId":"wk_abc","name":"Production"}'
|
||||
yaakcli environment update --json '{"id":"ev_abc","color":"#00ff00"}'
|
||||
yaakcli environment delete <environment_id> [--yes]
|
||||
```
|
||||
|
||||
Global options:
|
||||
|
||||
- `--data-dir <path>`: use a custom data directory
|
||||
- `-e, --environment <id>`: environment to use during request rendering/sending
|
||||
- `-v, --verbose`: verbose logging and send output
|
||||
|
||||
Notes:
|
||||
|
||||
- `send` is currently a shortcut for sending an HTTP request ID.
|
||||
- `delete` commands prompt for confirmation unless `--yes` is provided.
|
||||
- In non-interactive mode, `delete` commands require `--yes`.
|
||||
- `create` and `update` commands support `--json` and positional JSON shorthand.
|
||||
- `update` uses JSON Merge Patch semantics (RFC 7386) for partial updates.
|
||||
|
||||
## Examples
|
||||
|
||||
```bash
|
||||
yaakcli workspace list
|
||||
yaakcli workspace create --name "My Workspace"
|
||||
yaakcli workspace show wk_abc
|
||||
yaakcli workspace update --json '{"id":"wk_abc","description":"Team workspace"}'
|
||||
yaakcli request list wk_abc
|
||||
yaakcli request show rq_abc
|
||||
yaakcli request create wk_abc --name "Users" --url "https://api.example.com/users"
|
||||
yaakcli request update --json '{"id":"rq_abc","name":"Users v2"}'
|
||||
yaakcli request send rq_abc -e ev_abc
|
||||
yaakcli request delete rq_abc --yes
|
||||
yaakcli folder create wk_abc --name "Auth"
|
||||
yaakcli folder update --json '{"id":"fl_abc","name":"Auth v2"}'
|
||||
yaakcli environment create wk_abc --name "Production"
|
||||
yaakcli environment update --json '{"id":"ev_abc","color":"#00ff00"}'
|
||||
```
|
||||
|
||||
## Roadmap
|
||||
|
||||
Planned command expansion (request schema and polymorphic send) is tracked in `PLAN.md`.
|
||||
|
||||
When command behavior changes, update this README and verify with:
|
||||
|
||||
```bash
|
||||
cargo run -q -p yaak-cli -- --help
|
||||
cargo run -q -p yaak-cli -- request --help
|
||||
cargo run -q -p yaak-cli -- workspace --help
|
||||
cargo run -q -p yaak-cli -- folder --help
|
||||
cargo run -q -p yaak-cli -- environment --help
|
||||
```
|
||||
375
crates-cli/yaak-cli/src/cli.rs
Normal file
375
crates-cli/yaak-cli/src/cli.rs
Normal file
@@ -0,0 +1,375 @@
|
||||
use clap::{Args, Parser, Subcommand, ValueEnum};
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "yaak")]
|
||||
#[command(about = "Yaak CLI - API client from the command line")]
|
||||
pub struct Cli {
|
||||
/// Use a custom data directory
|
||||
#[arg(long, global = true)]
|
||||
pub data_dir: Option<PathBuf>,
|
||||
|
||||
/// Environment ID to use for variable substitution
|
||||
#[arg(long, short, global = true)]
|
||||
pub environment: Option<String>,
|
||||
|
||||
/// Enable verbose logging
|
||||
#[arg(long, short, global = true)]
|
||||
pub verbose: bool,
|
||||
|
||||
#[command(subcommand)]
|
||||
pub command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum Commands {
|
||||
/// Authentication commands
|
||||
Auth(AuthArgs),
|
||||
|
||||
/// Plugin development and publishing commands
|
||||
Plugin(PluginArgs),
|
||||
|
||||
#[command(hide = true)]
|
||||
Build(PluginPathArg),
|
||||
|
||||
#[command(hide = true)]
|
||||
Dev(PluginPathArg),
|
||||
|
||||
/// Send a request, folder, or workspace by ID
|
||||
Send(SendArgs),
|
||||
|
||||
/// Workspace commands
|
||||
Workspace(WorkspaceArgs),
|
||||
|
||||
/// Request commands
|
||||
Request(RequestArgs),
|
||||
|
||||
/// Folder commands
|
||||
Folder(FolderArgs),
|
||||
|
||||
/// Environment commands
|
||||
Environment(EnvironmentArgs),
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct SendArgs {
|
||||
/// Request, folder, or workspace ID
|
||||
pub id: String,
|
||||
|
||||
/// Execute requests sequentially (default)
|
||||
#[arg(long, conflicts_with = "parallel")]
|
||||
pub sequential: bool,
|
||||
|
||||
/// Execute requests in parallel
|
||||
#[arg(long, conflicts_with = "sequential")]
|
||||
pub parallel: bool,
|
||||
|
||||
/// Stop on first request failure when sending folders/workspaces
|
||||
#[arg(long, conflicts_with = "parallel")]
|
||||
pub fail_fast: bool,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct WorkspaceArgs {
|
||||
#[command(subcommand)]
|
||||
pub command: WorkspaceCommands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum WorkspaceCommands {
|
||||
/// List all workspaces
|
||||
List,
|
||||
|
||||
/// Show a workspace as JSON
|
||||
Show {
|
||||
/// Workspace ID
|
||||
workspace_id: String,
|
||||
},
|
||||
|
||||
/// Create a workspace
|
||||
Create {
|
||||
/// Workspace name
|
||||
#[arg(short, long)]
|
||||
name: Option<String>,
|
||||
|
||||
/// JSON payload
|
||||
#[arg(long, conflicts_with = "json_input")]
|
||||
json: Option<String>,
|
||||
|
||||
/// JSON payload shorthand
|
||||
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||
json_input: Option<String>,
|
||||
},
|
||||
|
||||
/// Update a workspace
|
||||
Update {
|
||||
/// JSON payload
|
||||
#[arg(long, conflicts_with = "json_input")]
|
||||
json: Option<String>,
|
||||
|
||||
/// JSON payload shorthand
|
||||
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||
json_input: Option<String>,
|
||||
},
|
||||
|
||||
/// Delete a workspace
|
||||
Delete {
|
||||
/// Workspace ID
|
||||
workspace_id: String,
|
||||
|
||||
/// Skip confirmation prompt
|
||||
#[arg(short, long)]
|
||||
yes: bool,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct RequestArgs {
|
||||
#[command(subcommand)]
|
||||
pub command: RequestCommands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum RequestCommands {
|
||||
/// List requests in a workspace
|
||||
List {
|
||||
/// Workspace ID
|
||||
workspace_id: String,
|
||||
},
|
||||
|
||||
/// Show a request as JSON
|
||||
Show {
|
||||
/// Request ID
|
||||
request_id: String,
|
||||
},
|
||||
|
||||
/// Send a request by ID
|
||||
Send {
|
||||
/// Request ID
|
||||
request_id: String,
|
||||
},
|
||||
|
||||
/// Output JSON schema for request create/update payloads
|
||||
Schema {
|
||||
#[arg(value_enum)]
|
||||
request_type: RequestSchemaType,
|
||||
},
|
||||
|
||||
/// Create a new HTTP request
|
||||
Create {
|
||||
/// Workspace ID (or positional JSON payload shorthand)
|
||||
workspace_id: Option<String>,
|
||||
|
||||
/// Request name
|
||||
#[arg(short, long)]
|
||||
name: Option<String>,
|
||||
|
||||
/// HTTP method
|
||||
#[arg(short, long)]
|
||||
method: Option<String>,
|
||||
|
||||
/// URL
|
||||
#[arg(short, long)]
|
||||
url: Option<String>,
|
||||
|
||||
/// JSON payload
|
||||
#[arg(long)]
|
||||
json: Option<String>,
|
||||
},
|
||||
|
||||
/// Update an HTTP request
|
||||
Update {
|
||||
/// JSON payload
|
||||
#[arg(long, conflicts_with = "json_input")]
|
||||
json: Option<String>,
|
||||
|
||||
/// JSON payload shorthand
|
||||
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||
json_input: Option<String>,
|
||||
},
|
||||
|
||||
/// Delete a request
|
||||
Delete {
|
||||
/// Request ID
|
||||
request_id: String,
|
||||
|
||||
/// Skip confirmation prompt
|
||||
#[arg(short, long)]
|
||||
yes: bool,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, ValueEnum)]
|
||||
pub enum RequestSchemaType {
|
||||
Http,
|
||||
Grpc,
|
||||
Websocket,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct FolderArgs {
|
||||
#[command(subcommand)]
|
||||
pub command: FolderCommands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum FolderCommands {
|
||||
/// List folders in a workspace
|
||||
List {
|
||||
/// Workspace ID
|
||||
workspace_id: String,
|
||||
},
|
||||
|
||||
/// Show a folder as JSON
|
||||
Show {
|
||||
/// Folder ID
|
||||
folder_id: String,
|
||||
},
|
||||
|
||||
/// Create a folder
|
||||
Create {
|
||||
/// Workspace ID (or positional JSON payload shorthand)
|
||||
workspace_id: Option<String>,
|
||||
|
||||
/// Folder name
|
||||
#[arg(short, long)]
|
||||
name: Option<String>,
|
||||
|
||||
/// JSON payload
|
||||
#[arg(long)]
|
||||
json: Option<String>,
|
||||
},
|
||||
|
||||
/// Update a folder
|
||||
Update {
|
||||
/// JSON payload
|
||||
#[arg(long, conflicts_with = "json_input")]
|
||||
json: Option<String>,
|
||||
|
||||
/// JSON payload shorthand
|
||||
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||
json_input: Option<String>,
|
||||
},
|
||||
|
||||
/// Delete a folder
|
||||
Delete {
|
||||
/// Folder ID
|
||||
folder_id: String,
|
||||
|
||||
/// Skip confirmation prompt
|
||||
#[arg(short, long)]
|
||||
yes: bool,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct EnvironmentArgs {
|
||||
#[command(subcommand)]
|
||||
pub command: EnvironmentCommands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum EnvironmentCommands {
|
||||
/// List environments in a workspace
|
||||
List {
|
||||
/// Workspace ID
|
||||
workspace_id: String,
|
||||
},
|
||||
|
||||
/// Show an environment as JSON
|
||||
Show {
|
||||
/// Environment ID
|
||||
environment_id: String,
|
||||
},
|
||||
|
||||
/// Create an environment
|
||||
Create {
|
||||
/// Workspace ID (or positional JSON payload shorthand)
|
||||
workspace_id: Option<String>,
|
||||
|
||||
/// Environment name
|
||||
#[arg(short, long)]
|
||||
name: Option<String>,
|
||||
|
||||
/// JSON payload
|
||||
#[arg(long)]
|
||||
json: Option<String>,
|
||||
},
|
||||
|
||||
/// Update an environment
|
||||
Update {
|
||||
/// JSON payload
|
||||
#[arg(long, conflicts_with = "json_input")]
|
||||
json: Option<String>,
|
||||
|
||||
/// JSON payload shorthand
|
||||
#[arg(value_name = "JSON", conflicts_with = "json")]
|
||||
json_input: Option<String>,
|
||||
},
|
||||
|
||||
/// Delete an environment
|
||||
Delete {
|
||||
/// Environment ID
|
||||
environment_id: String,
|
||||
|
||||
/// Skip confirmation prompt
|
||||
#[arg(short, long)]
|
||||
yes: bool,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct AuthArgs {
|
||||
#[command(subcommand)]
|
||||
pub command: AuthCommands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum AuthCommands {
|
||||
/// Login to Yaak via web browser
|
||||
Login,
|
||||
|
||||
/// Sign out of the Yaak CLI
|
||||
Logout,
|
||||
|
||||
/// Print the current logged-in user's info
|
||||
Whoami,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct PluginArgs {
|
||||
#[command(subcommand)]
|
||||
pub command: PluginCommands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum PluginCommands {
|
||||
/// Transpile code into a runnable plugin bundle
|
||||
Build(PluginPathArg),
|
||||
|
||||
/// Build plugin bundle continuously when the filesystem changes
|
||||
Dev(PluginPathArg),
|
||||
|
||||
/// Generate a "Hello World" Yaak plugin
|
||||
Generate(GenerateArgs),
|
||||
|
||||
/// Publish a Yaak plugin version to the plugin registry
|
||||
Publish(PluginPathArg),
|
||||
}
|
||||
|
||||
#[derive(Args, Clone)]
|
||||
pub struct PluginPathArg {
|
||||
/// Path to plugin directory (defaults to current working directory)
|
||||
pub path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Args, Clone)]
|
||||
pub struct GenerateArgs {
|
||||
/// Plugin name (defaults to a generated name in interactive mode)
|
||||
#[arg(long)]
|
||||
pub name: Option<String>,
|
||||
|
||||
/// Output directory for the generated plugin (defaults to ./<name> in interactive mode)
|
||||
#[arg(long)]
|
||||
pub dir: Option<PathBuf>,
|
||||
}
|
||||
556
crates-cli/yaak-cli/src/commands/auth.rs
Normal file
556
crates-cli/yaak-cli/src/commands/auth.rs
Normal file
@@ -0,0 +1,556 @@
|
||||
use crate::cli::{AuthArgs, AuthCommands};
|
||||
use crate::ui;
|
||||
use base64::Engine as _;
|
||||
use keyring::Entry;
|
||||
use rand::RngCore;
|
||||
use rand::rngs::OsRng;
|
||||
use reqwest::Url;
|
||||
use serde_json::Value;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::io::{self, IsTerminal, Write};
|
||||
use std::time::Duration;
|
||||
use tokio::io::{AsyncReadExt, AsyncWriteExt};
|
||||
use tokio::net::{TcpListener, TcpStream};
|
||||
|
||||
const OAUTH_CLIENT_ID: &str = "a1fe44800c2d7e803cad1b4bf07a291c";
|
||||
const KEYRING_USER: &str = "yaak";
|
||||
const AUTH_TIMEOUT: Duration = Duration::from_secs(300);
|
||||
const MAX_REQUEST_BYTES: usize = 16 * 1024;
|
||||
|
||||
type CommandResult<T = ()> = std::result::Result<T, String>;
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
enum Environment {
|
||||
Production,
|
||||
Staging,
|
||||
Development,
|
||||
}
|
||||
|
||||
impl Environment {
|
||||
fn app_base_url(self) -> &'static str {
|
||||
match self {
|
||||
Environment::Production => "https://yaak.app",
|
||||
Environment::Staging => "https://todo.yaak.app",
|
||||
Environment::Development => "http://localhost:9444",
|
||||
}
|
||||
}
|
||||
|
||||
fn api_base_url(self) -> &'static str {
|
||||
match self {
|
||||
Environment::Production => "https://api.yaak.app",
|
||||
Environment::Staging => "https://todo.yaak.app",
|
||||
Environment::Development => "http://localhost:9444",
|
||||
}
|
||||
}
|
||||
|
||||
fn keyring_service(self) -> &'static str {
|
||||
match self {
|
||||
Environment::Production => "app.yaak.cli.Token",
|
||||
Environment::Staging => "app.yaak.cli.staging.Token",
|
||||
Environment::Development => "app.yaak.cli.dev.Token",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct OAuthFlow {
|
||||
app_base_url: String,
|
||||
auth_url: Url,
|
||||
token_url: String,
|
||||
redirect_url: String,
|
||||
state: String,
|
||||
code_verifier: String,
|
||||
}
|
||||
|
||||
pub async fn run(args: AuthArgs) -> i32 {
|
||||
let result = match args.command {
|
||||
AuthCommands::Login => login().await,
|
||||
AuthCommands::Logout => logout(),
|
||||
AuthCommands::Whoami => whoami().await,
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(()) => 0,
|
||||
Err(error) => {
|
||||
ui::error(&error);
|
||||
1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn login() -> CommandResult {
|
||||
let environment = current_environment();
|
||||
|
||||
let listener = TcpListener::bind("127.0.0.1:0")
|
||||
.await
|
||||
.map_err(|e| format!("Failed to start OAuth callback server: {e}"))?;
|
||||
let port = listener
|
||||
.local_addr()
|
||||
.map_err(|e| format!("Failed to determine callback server port: {e}"))?
|
||||
.port();
|
||||
|
||||
let oauth = build_oauth_flow(environment, port)?;
|
||||
|
||||
ui::info(&format!("Initiating login to {}", oauth.auth_url));
|
||||
if !confirm_open_browser()? {
|
||||
ui::info("Login canceled");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Err(err) = webbrowser::open(oauth.auth_url.as_ref()) {
|
||||
ui::warning(&format!("Failed to open browser: {err}"));
|
||||
ui::info(&format!("Open this URL manually:\n{}", oauth.auth_url));
|
||||
}
|
||||
ui::info("Waiting for authentication...");
|
||||
|
||||
let code = tokio::select! {
|
||||
result = receive_oauth_code(listener, &oauth.state, &oauth.app_base_url) => result?,
|
||||
_ = tokio::signal::ctrl_c() => {
|
||||
return Err("Interrupted by user".to_string());
|
||||
}
|
||||
_ = tokio::time::sleep(AUTH_TIMEOUT) => {
|
||||
return Err("Timeout waiting for authentication".to_string());
|
||||
}
|
||||
};
|
||||
|
||||
let token = exchange_access_token(&oauth, &code).await?;
|
||||
store_auth_token(environment, &token)?;
|
||||
ui::success("Authentication successful!");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn logout() -> CommandResult {
|
||||
delete_auth_token(current_environment())?;
|
||||
ui::success("Signed out of Yaak");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn whoami() -> CommandResult {
|
||||
let environment = current_environment();
|
||||
let token = match get_auth_token(environment)? {
|
||||
Some(token) => token,
|
||||
None => {
|
||||
ui::warning("Not logged in");
|
||||
ui::info("Please run `yaak auth login`");
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
let url = format!("{}/api/v1/whoami", environment.api_base_url());
|
||||
let response = reqwest::Client::new()
|
||||
.get(url)
|
||||
.header("X-Yaak-Session", token)
|
||||
.header(reqwest::header::USER_AGENT, user_agent())
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to call whoami endpoint: {e}"))?;
|
||||
|
||||
let status = response.status();
|
||||
let body =
|
||||
response.text().await.map_err(|e| format!("Failed to read whoami response body: {e}"))?;
|
||||
|
||||
if !status.is_success() {
|
||||
if status.as_u16() == 401 {
|
||||
let _ = delete_auth_token(environment);
|
||||
return Err(
|
||||
"Unauthorized to access CLI. Run `yaak auth login` to refresh credentials."
|
||||
.to_string(),
|
||||
);
|
||||
}
|
||||
return Err(parse_api_error(status.as_u16(), &body));
|
||||
}
|
||||
|
||||
println!("{body}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn current_environment() -> Environment {
|
||||
let value = std::env::var("ENVIRONMENT").ok();
|
||||
parse_environment(value.as_deref())
|
||||
}
|
||||
|
||||
fn parse_environment(value: Option<&str>) -> Environment {
|
||||
match value {
|
||||
Some("staging") => Environment::Staging,
|
||||
Some("development") => Environment::Development,
|
||||
_ => Environment::Production,
|
||||
}
|
||||
}
|
||||
|
||||
fn build_oauth_flow(environment: Environment, callback_port: u16) -> CommandResult<OAuthFlow> {
|
||||
let code_verifier = random_hex(32);
|
||||
let state = random_hex(24);
|
||||
let redirect_url = format!("http://127.0.0.1:{callback_port}/oauth/callback");
|
||||
|
||||
let code_challenge = base64::engine::general_purpose::URL_SAFE_NO_PAD
|
||||
.encode(Sha256::digest(code_verifier.as_bytes()));
|
||||
|
||||
let mut auth_url = Url::parse(&format!("{}/login/oauth/authorize", environment.app_base_url()))
|
||||
.map_err(|e| format!("Failed to build OAuth authorize URL: {e}"))?;
|
||||
auth_url
|
||||
.query_pairs_mut()
|
||||
.append_pair("response_type", "code")
|
||||
.append_pair("client_id", OAUTH_CLIENT_ID)
|
||||
.append_pair("redirect_uri", &redirect_url)
|
||||
.append_pair("state", &state)
|
||||
.append_pair("code_challenge_method", "S256")
|
||||
.append_pair("code_challenge", &code_challenge);
|
||||
|
||||
Ok(OAuthFlow {
|
||||
app_base_url: environment.app_base_url().to_string(),
|
||||
auth_url,
|
||||
token_url: format!("{}/login/oauth/access_token", environment.app_base_url()),
|
||||
redirect_url,
|
||||
state,
|
||||
code_verifier,
|
||||
})
|
||||
}
|
||||
|
||||
async fn receive_oauth_code(
|
||||
listener: TcpListener,
|
||||
expected_state: &str,
|
||||
app_base_url: &str,
|
||||
) -> CommandResult<String> {
|
||||
loop {
|
||||
let (mut stream, _) = listener
|
||||
.accept()
|
||||
.await
|
||||
.map_err(|e| format!("OAuth callback server accept error: {e}"))?;
|
||||
|
||||
match parse_callback_request(&mut stream).await {
|
||||
Ok((state, code)) => {
|
||||
if state != expected_state {
|
||||
let _ = write_bad_request(&mut stream, "Invalid OAuth state").await;
|
||||
continue;
|
||||
}
|
||||
|
||||
let success_redirect = format!("{app_base_url}/login/oauth/success");
|
||||
write_redirect(&mut stream, &success_redirect)
|
||||
.await
|
||||
.map_err(|e| format!("Failed responding to OAuth callback: {e}"))?;
|
||||
return Ok(code);
|
||||
}
|
||||
Err(error) => {
|
||||
let _ = write_bad_request(&mut stream, &error).await;
|
||||
if error.starts_with("OAuth provider returned error:") {
|
||||
return Err(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn parse_callback_request(stream: &mut TcpStream) -> CommandResult<(String, String)> {
|
||||
let target = read_http_target(stream).await?;
|
||||
if !target.starts_with("/oauth/callback") {
|
||||
return Err("Expected /oauth/callback path".to_string());
|
||||
}
|
||||
|
||||
let url = Url::parse(&format!("http://127.0.0.1{target}"))
|
||||
.map_err(|e| format!("Failed to parse callback URL: {e}"))?;
|
||||
let mut state: Option<String> = None;
|
||||
let mut code: Option<String> = None;
|
||||
let mut oauth_error: Option<String> = None;
|
||||
let mut oauth_error_description: Option<String> = None;
|
||||
|
||||
for (k, v) in url.query_pairs() {
|
||||
if k == "state" {
|
||||
state = Some(v.into_owned());
|
||||
} else if k == "code" {
|
||||
code = Some(v.into_owned());
|
||||
} else if k == "error" {
|
||||
oauth_error = Some(v.into_owned());
|
||||
} else if k == "error_description" {
|
||||
oauth_error_description = Some(v.into_owned());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(error) = oauth_error {
|
||||
let mut message = format!("OAuth provider returned error: {error}");
|
||||
if let Some(description) = oauth_error_description.filter(|d| !d.is_empty()) {
|
||||
message.push_str(&format!(" ({description})"));
|
||||
}
|
||||
return Err(message);
|
||||
}
|
||||
|
||||
let state = state.ok_or_else(|| "Missing 'state' query parameter".to_string())?;
|
||||
let code = code.ok_or_else(|| "Missing 'code' query parameter".to_string())?;
|
||||
|
||||
if code.is_empty() {
|
||||
return Err("Missing 'code' query parameter".to_string());
|
||||
}
|
||||
|
||||
Ok((state, code))
|
||||
}
|
||||
|
||||
async fn read_http_target(stream: &mut TcpStream) -> CommandResult<String> {
|
||||
let mut buf = vec![0_u8; MAX_REQUEST_BYTES];
|
||||
let mut total_read = 0_usize;
|
||||
|
||||
loop {
|
||||
let n = stream
|
||||
.read(&mut buf[total_read..])
|
||||
.await
|
||||
.map_err(|e| format!("Failed reading callback request: {e}"))?;
|
||||
if n == 0 {
|
||||
break;
|
||||
}
|
||||
total_read += n;
|
||||
|
||||
if buf[..total_read].windows(4).any(|w| w == b"\r\n\r\n") {
|
||||
break;
|
||||
}
|
||||
|
||||
if total_read == MAX_REQUEST_BYTES {
|
||||
return Err("OAuth callback request too large".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
let req = String::from_utf8_lossy(&buf[..total_read]);
|
||||
let request_line =
|
||||
req.lines().next().ok_or_else(|| "Invalid callback request line".to_string())?;
|
||||
let mut parts = request_line.split_whitespace();
|
||||
let method = parts.next().unwrap_or_default();
|
||||
let target = parts.next().unwrap_or_default();
|
||||
|
||||
if method != "GET" {
|
||||
return Err(format!("Expected GET callback request, got '{method}'"));
|
||||
}
|
||||
if target.is_empty() {
|
||||
return Err("Missing callback request target".to_string());
|
||||
}
|
||||
|
||||
Ok(target.to_string())
|
||||
}
|
||||
|
||||
async fn write_bad_request(stream: &mut TcpStream, message: &str) -> std::io::Result<()> {
|
||||
let body = format!("Failed to authenticate: {message}");
|
||||
let response = format!(
|
||||
"HTTP/1.1 400 Bad Request\r\nContent-Type: text/plain; charset=utf-8\r\nContent-Length: {}\r\nConnection: close\r\n\r\n{}",
|
||||
body.len(),
|
||||
body
|
||||
);
|
||||
stream.write_all(response.as_bytes()).await?;
|
||||
stream.shutdown().await
|
||||
}
|
||||
|
||||
async fn write_redirect(stream: &mut TcpStream, location: &str) -> std::io::Result<()> {
|
||||
let response = format!(
|
||||
"HTTP/1.1 302 Found\r\nLocation: {location}\r\nContent-Length: 0\r\nConnection: close\r\n\r\n"
|
||||
);
|
||||
stream.write_all(response.as_bytes()).await?;
|
||||
stream.shutdown().await
|
||||
}
|
||||
|
||||
async fn exchange_access_token(oauth: &OAuthFlow, code: &str) -> CommandResult<String> {
|
||||
let response = reqwest::Client::new()
|
||||
.post(&oauth.token_url)
|
||||
.header(reqwest::header::USER_AGENT, user_agent())
|
||||
.form(&[
|
||||
("grant_type", "authorization_code"),
|
||||
("client_id", OAUTH_CLIENT_ID),
|
||||
("code", code),
|
||||
("redirect_uri", oauth.redirect_url.as_str()),
|
||||
("code_verifier", oauth.code_verifier.as_str()),
|
||||
])
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to exchange OAuth code for access token: {e}"))?;
|
||||
|
||||
let status = response.status();
|
||||
let body =
|
||||
response.text().await.map_err(|e| format!("Failed to read token response body: {e}"))?;
|
||||
|
||||
if !status.is_success() {
|
||||
return Err(format!(
|
||||
"Failed to fetch access token: status={} body={}",
|
||||
status.as_u16(),
|
||||
body
|
||||
));
|
||||
}
|
||||
|
||||
let parsed: Value =
|
||||
serde_json::from_str(&body).map_err(|e| format!("Invalid token response JSON: {e}"))?;
|
||||
let token = parsed
|
||||
.get("access_token")
|
||||
.and_then(Value::as_str)
|
||||
.filter(|s| !s.is_empty())
|
||||
.ok_or_else(|| format!("Token response missing access_token: {body}"))?;
|
||||
|
||||
Ok(token.to_string())
|
||||
}
|
||||
|
||||
fn keyring_entry(environment: Environment) -> CommandResult<Entry> {
|
||||
Entry::new(environment.keyring_service(), KEYRING_USER)
|
||||
.map_err(|e| format!("Failed to initialize auth keyring entry: {e}"))
|
||||
}
|
||||
|
||||
fn get_auth_token(environment: Environment) -> CommandResult<Option<String>> {
|
||||
let entry = keyring_entry(environment)?;
|
||||
match entry.get_password() {
|
||||
Ok(token) => Ok(Some(token)),
|
||||
Err(keyring::Error::NoEntry) => Ok(None),
|
||||
Err(err) => Err(format!("Failed to read auth token: {err}")),
|
||||
}
|
||||
}
|
||||
|
||||
fn store_auth_token(environment: Environment, token: &str) -> CommandResult {
|
||||
let entry = keyring_entry(environment)?;
|
||||
entry.set_password(token).map_err(|e| format!("Failed to store auth token: {e}"))
|
||||
}
|
||||
|
||||
fn delete_auth_token(environment: Environment) -> CommandResult {
|
||||
let entry = keyring_entry(environment)?;
|
||||
match entry.delete_credential() {
|
||||
Ok(()) | Err(keyring::Error::NoEntry) => Ok(()),
|
||||
Err(err) => Err(format!("Failed to delete auth token: {err}")),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_api_error(status: u16, body: &str) -> String {
|
||||
if let Ok(value) = serde_json::from_str::<Value>(body) {
|
||||
if let Some(message) = value.get("message").and_then(Value::as_str) {
|
||||
return message.to_string();
|
||||
}
|
||||
if let Some(error) = value.get("error").and_then(Value::as_str) {
|
||||
return error.to_string();
|
||||
}
|
||||
}
|
||||
|
||||
format!("API error {status}: {body}")
|
||||
}
|
||||
|
||||
fn random_hex(bytes: usize) -> String {
|
||||
let mut data = vec![0_u8; bytes];
|
||||
OsRng.fill_bytes(&mut data);
|
||||
hex::encode(data)
|
||||
}
|
||||
|
||||
fn user_agent() -> String {
|
||||
format!("YaakCli/{} ({})", env!("CARGO_PKG_VERSION"), ua_platform())
|
||||
}
|
||||
|
||||
fn ua_platform() -> &'static str {
|
||||
match std::env::consts::OS {
|
||||
"windows" => "Win",
|
||||
"darwin" => "Mac",
|
||||
"linux" => "Linux",
|
||||
_ => "Unknown",
|
||||
}
|
||||
}
|
||||
|
||||
fn confirm_open_browser() -> CommandResult<bool> {
|
||||
if !io::stdin().is_terminal() {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
loop {
|
||||
print!("Open default browser? [Y/n]: ");
|
||||
io::stdout().flush().map_err(|e| format!("Failed to flush stdout: {e}"))?;
|
||||
|
||||
let mut input = String::new();
|
||||
io::stdin().read_line(&mut input).map_err(|e| format!("Failed to read input: {e}"))?;
|
||||
|
||||
match input.trim().to_ascii_lowercase().as_str() {
|
||||
"" | "y" | "yes" => return Ok(true),
|
||||
"n" | "no" => return Ok(false),
|
||||
_ => ui::warning("Please answer y or n"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn environment_mapping() {
|
||||
assert_eq!(parse_environment(Some("staging")), Environment::Staging);
|
||||
assert_eq!(parse_environment(Some("development")), Environment::Development);
|
||||
assert_eq!(parse_environment(Some("production")), Environment::Production);
|
||||
assert_eq!(parse_environment(None), Environment::Production);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn parses_callback_request() {
|
||||
let listener = TcpListener::bind("127.0.0.1:0").await.expect("bind");
|
||||
let addr = listener.local_addr().expect("local addr");
|
||||
|
||||
let server = tokio::spawn(async move {
|
||||
let (mut stream, _) = listener.accept().await.expect("accept");
|
||||
parse_callback_request(&mut stream).await
|
||||
});
|
||||
|
||||
let mut client = TcpStream::connect(addr).await.expect("connect");
|
||||
client
|
||||
.write_all(
|
||||
b"GET /oauth/callback?code=abc123&state=xyz HTTP/1.1\r\nHost: localhost\r\n\r\n",
|
||||
)
|
||||
.await
|
||||
.expect("write");
|
||||
|
||||
let parsed = server.await.expect("join").expect("parse");
|
||||
assert_eq!(parsed.0, "xyz");
|
||||
assert_eq!(parsed.1, "abc123");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn parse_callback_request_oauth_error() {
|
||||
let listener = TcpListener::bind("127.0.0.1:0").await.expect("bind");
|
||||
let addr = listener.local_addr().expect("local addr");
|
||||
|
||||
let server = tokio::spawn(async move {
|
||||
let (mut stream, _) = listener.accept().await.expect("accept");
|
||||
parse_callback_request(&mut stream).await
|
||||
});
|
||||
|
||||
let mut client = TcpStream::connect(addr).await.expect("connect");
|
||||
client
|
||||
.write_all(
|
||||
b"GET /oauth/callback?error=access_denied&error_description=User%20denied&state=xyz HTTP/1.1\r\nHost: localhost\r\n\r\n",
|
||||
)
|
||||
.await
|
||||
.expect("write");
|
||||
|
||||
let err = server.await.expect("join").expect_err("should fail");
|
||||
assert!(err.contains("OAuth provider returned error: access_denied"));
|
||||
assert!(err.contains("User denied"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn receive_oauth_code_fails_fast_on_provider_error() {
|
||||
let listener = TcpListener::bind("127.0.0.1:0").await.expect("bind");
|
||||
let addr = listener.local_addr().expect("local addr");
|
||||
|
||||
let server = tokio::spawn(async move {
|
||||
receive_oauth_code(listener, "expected-state", "http://localhost:9444").await
|
||||
});
|
||||
|
||||
let mut client = TcpStream::connect(addr).await.expect("connect");
|
||||
client
|
||||
.write_all(
|
||||
b"GET /oauth/callback?error=access_denied&state=expected-state HTTP/1.1\r\nHost: localhost\r\n\r\n",
|
||||
)
|
||||
.await
|
||||
.expect("write");
|
||||
|
||||
let result = tokio::time::timeout(std::time::Duration::from_secs(2), server)
|
||||
.await
|
||||
.expect("should not timeout")
|
||||
.expect("join");
|
||||
let err = result.expect_err("should return oauth error");
|
||||
assert!(err.contains("OAuth provider returned error: access_denied"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn builds_oauth_flow_with_pkce() {
|
||||
let flow = build_oauth_flow(Environment::Development, 8080).expect("flow");
|
||||
assert!(flow.auth_url.as_str().contains("code_challenge_method=S256"));
|
||||
assert!(
|
||||
flow.auth_url
|
||||
.as_str()
|
||||
.contains("redirect_uri=http%3A%2F%2F127.0.0.1%3A8080%2Foauth%2Fcallback")
|
||||
);
|
||||
assert_eq!(flow.redirect_url, "http://127.0.0.1:8080/oauth/callback");
|
||||
assert_eq!(flow.token_url, "http://localhost:9444/login/oauth/access_token");
|
||||
}
|
||||
}
|
||||
159
crates-cli/yaak-cli/src/commands/environment.rs
Normal file
159
crates-cli/yaak-cli/src/commands/environment.rs
Normal file
@@ -0,0 +1,159 @@
|
||||
use crate::cli::{EnvironmentArgs, EnvironmentCommands};
|
||||
use crate::context::CliContext;
|
||||
use crate::utils::confirm::confirm_delete;
|
||||
use crate::utils::json::{
|
||||
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
|
||||
validate_create_id,
|
||||
};
|
||||
use yaak_models::models::Environment;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
type CommandResult<T = ()> = std::result::Result<T, String>;
|
||||
|
||||
pub fn run(ctx: &CliContext, args: EnvironmentArgs) -> i32 {
|
||||
let result = match args.command {
|
||||
EnvironmentCommands::List { workspace_id } => list(ctx, &workspace_id),
|
||||
EnvironmentCommands::Show { environment_id } => show(ctx, &environment_id),
|
||||
EnvironmentCommands::Create { workspace_id, name, json } => {
|
||||
create(ctx, workspace_id, name, json)
|
||||
}
|
||||
EnvironmentCommands::Update { json, json_input } => update(ctx, json, json_input),
|
||||
EnvironmentCommands::Delete { environment_id, yes } => delete(ctx, &environment_id, yes),
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(()) => 0,
|
||||
Err(error) => {
|
||||
eprintln!("Error: {error}");
|
||||
1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn list(ctx: &CliContext, workspace_id: &str) -> CommandResult {
|
||||
let environments = ctx
|
||||
.db()
|
||||
.list_environments_ensure_base(workspace_id)
|
||||
.map_err(|e| format!("Failed to list environments: {e}"))?;
|
||||
|
||||
if environments.is_empty() {
|
||||
println!("No environments found in workspace {}", workspace_id);
|
||||
} else {
|
||||
for environment in environments {
|
||||
println!("{} - {} ({})", environment.id, environment.name, environment.parent_model);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn show(ctx: &CliContext, environment_id: &str) -> CommandResult {
|
||||
let environment = ctx
|
||||
.db()
|
||||
.get_environment(environment_id)
|
||||
.map_err(|e| format!("Failed to get environment: {e}"))?;
|
||||
let output = serde_json::to_string_pretty(&environment)
|
||||
.map_err(|e| format!("Failed to serialize environment: {e}"))?;
|
||||
println!("{output}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create(
|
||||
ctx: &CliContext,
|
||||
workspace_id: Option<String>,
|
||||
name: Option<String>,
|
||||
json: Option<String>,
|
||||
) -> CommandResult {
|
||||
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
|
||||
return Err(
|
||||
"environment create cannot combine workspace_id with --json payload".to_string()
|
||||
);
|
||||
}
|
||||
|
||||
let payload = parse_optional_json(
|
||||
json,
|
||||
workspace_id.clone().filter(|v| is_json_shorthand(v)),
|
||||
"environment create",
|
||||
)?;
|
||||
|
||||
if let Some(payload) = payload {
|
||||
if name.is_some() {
|
||||
return Err("environment create cannot combine --name with JSON payload".to_string());
|
||||
}
|
||||
|
||||
validate_create_id(&payload, "environment")?;
|
||||
let mut environment: Environment = serde_json::from_value(payload)
|
||||
.map_err(|e| format!("Failed to parse environment create JSON: {e}"))?;
|
||||
|
||||
if environment.workspace_id.is_empty() {
|
||||
return Err("environment create JSON requires non-empty \"workspaceId\"".to_string());
|
||||
}
|
||||
|
||||
if environment.parent_model.is_empty() {
|
||||
environment.parent_model = "environment".to_string();
|
||||
}
|
||||
|
||||
let created = ctx
|
||||
.db()
|
||||
.upsert_environment(&environment, &UpdateSource::Sync)
|
||||
.map_err(|e| format!("Failed to create environment: {e}"))?;
|
||||
|
||||
println!("Created environment: {}", created.id);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let workspace_id = workspace_id.ok_or_else(|| {
|
||||
"environment create requires workspace_id unless JSON payload is provided".to_string()
|
||||
})?;
|
||||
let name = name.ok_or_else(|| {
|
||||
"environment create requires --name unless JSON payload is provided".to_string()
|
||||
})?;
|
||||
|
||||
let environment = Environment {
|
||||
workspace_id,
|
||||
name,
|
||||
parent_model: "environment".to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let created = ctx
|
||||
.db()
|
||||
.upsert_environment(&environment, &UpdateSource::Sync)
|
||||
.map_err(|e| format!("Failed to create environment: {e}"))?;
|
||||
|
||||
println!("Created environment: {}", created.id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) -> CommandResult {
|
||||
let patch = parse_required_json(json, json_input, "environment update")?;
|
||||
let id = require_id(&patch, "environment update")?;
|
||||
|
||||
let existing = ctx
|
||||
.db()
|
||||
.get_environment(&id)
|
||||
.map_err(|e| format!("Failed to get environment for update: {e}"))?;
|
||||
let updated = apply_merge_patch(&existing, &patch, &id, "environment update")?;
|
||||
|
||||
let saved = ctx
|
||||
.db()
|
||||
.upsert_environment(&updated, &UpdateSource::Sync)
|
||||
.map_err(|e| format!("Failed to update environment: {e}"))?;
|
||||
|
||||
println!("Updated environment: {}", saved.id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn delete(ctx: &CliContext, environment_id: &str, yes: bool) -> CommandResult {
|
||||
if !yes && !confirm_delete("environment", environment_id) {
|
||||
println!("Aborted");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let deleted = ctx
|
||||
.db()
|
||||
.delete_environment_by_id(environment_id, &UpdateSource::Sync)
|
||||
.map_err(|e| format!("Failed to delete environment: {e}"))?;
|
||||
|
||||
println!("Deleted environment: {}", deleted.id);
|
||||
Ok(())
|
||||
}
|
||||
141
crates-cli/yaak-cli/src/commands/folder.rs
Normal file
141
crates-cli/yaak-cli/src/commands/folder.rs
Normal file
@@ -0,0 +1,141 @@
|
||||
use crate::cli::{FolderArgs, FolderCommands};
|
||||
use crate::context::CliContext;
|
||||
use crate::utils::confirm::confirm_delete;
|
||||
use crate::utils::json::{
|
||||
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
|
||||
validate_create_id,
|
||||
};
|
||||
use yaak_models::models::Folder;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
type CommandResult<T = ()> = std::result::Result<T, String>;
|
||||
|
||||
pub fn run(ctx: &CliContext, args: FolderArgs) -> i32 {
|
||||
let result = match args.command {
|
||||
FolderCommands::List { workspace_id } => list(ctx, &workspace_id),
|
||||
FolderCommands::Show { folder_id } => show(ctx, &folder_id),
|
||||
FolderCommands::Create { workspace_id, name, json } => {
|
||||
create(ctx, workspace_id, name, json)
|
||||
}
|
||||
FolderCommands::Update { json, json_input } => update(ctx, json, json_input),
|
||||
FolderCommands::Delete { folder_id, yes } => delete(ctx, &folder_id, yes),
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(()) => 0,
|
||||
Err(error) => {
|
||||
eprintln!("Error: {error}");
|
||||
1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn list(ctx: &CliContext, workspace_id: &str) -> CommandResult {
|
||||
let folders =
|
||||
ctx.db().list_folders(workspace_id).map_err(|e| format!("Failed to list folders: {e}"))?;
|
||||
if folders.is_empty() {
|
||||
println!("No folders found in workspace {}", workspace_id);
|
||||
} else {
|
||||
for folder in folders {
|
||||
println!("{} - {}", folder.id, folder.name);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn show(ctx: &CliContext, folder_id: &str) -> CommandResult {
|
||||
let folder =
|
||||
ctx.db().get_folder(folder_id).map_err(|e| format!("Failed to get folder: {e}"))?;
|
||||
let output = serde_json::to_string_pretty(&folder)
|
||||
.map_err(|e| format!("Failed to serialize folder: {e}"))?;
|
||||
println!("{output}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create(
|
||||
ctx: &CliContext,
|
||||
workspace_id: Option<String>,
|
||||
name: Option<String>,
|
||||
json: Option<String>,
|
||||
) -> CommandResult {
|
||||
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
|
||||
return Err("folder create cannot combine workspace_id with --json payload".to_string());
|
||||
}
|
||||
|
||||
let payload = parse_optional_json(
|
||||
json,
|
||||
workspace_id.clone().filter(|v| is_json_shorthand(v)),
|
||||
"folder create",
|
||||
)?;
|
||||
|
||||
if let Some(payload) = payload {
|
||||
if name.is_some() {
|
||||
return Err("folder create cannot combine --name with JSON payload".to_string());
|
||||
}
|
||||
|
||||
validate_create_id(&payload, "folder")?;
|
||||
let folder: Folder = serde_json::from_value(payload)
|
||||
.map_err(|e| format!("Failed to parse folder create JSON: {e}"))?;
|
||||
|
||||
if folder.workspace_id.is_empty() {
|
||||
return Err("folder create JSON requires non-empty \"workspaceId\"".to_string());
|
||||
}
|
||||
|
||||
let created = ctx
|
||||
.db()
|
||||
.upsert_folder(&folder, &UpdateSource::Sync)
|
||||
.map_err(|e| format!("Failed to create folder: {e}"))?;
|
||||
|
||||
println!("Created folder: {}", created.id);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let workspace_id = workspace_id.ok_or_else(|| {
|
||||
"folder create requires workspace_id unless JSON payload is provided".to_string()
|
||||
})?;
|
||||
let name = name.ok_or_else(|| {
|
||||
"folder create requires --name unless JSON payload is provided".to_string()
|
||||
})?;
|
||||
|
||||
let folder = Folder { workspace_id, name, ..Default::default() };
|
||||
|
||||
let created = ctx
|
||||
.db()
|
||||
.upsert_folder(&folder, &UpdateSource::Sync)
|
||||
.map_err(|e| format!("Failed to create folder: {e}"))?;
|
||||
|
||||
println!("Created folder: {}", created.id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) -> CommandResult {
|
||||
let patch = parse_required_json(json, json_input, "folder update")?;
|
||||
let id = require_id(&patch, "folder update")?;
|
||||
|
||||
let existing =
|
||||
ctx.db().get_folder(&id).map_err(|e| format!("Failed to get folder for update: {e}"))?;
|
||||
let updated = apply_merge_patch(&existing, &patch, &id, "folder update")?;
|
||||
|
||||
let saved = ctx
|
||||
.db()
|
||||
.upsert_folder(&updated, &UpdateSource::Sync)
|
||||
.map_err(|e| format!("Failed to update folder: {e}"))?;
|
||||
|
||||
println!("Updated folder: {}", saved.id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn delete(ctx: &CliContext, folder_id: &str, yes: bool) -> CommandResult {
|
||||
if !yes && !confirm_delete("folder", folder_id) {
|
||||
println!("Aborted");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let deleted = ctx
|
||||
.db()
|
||||
.delete_folder_by_id(folder_id, &UpdateSource::Sync)
|
||||
.map_err(|e| format!("Failed to delete folder: {e}"))?;
|
||||
|
||||
println!("Deleted folder: {}", deleted.id);
|
||||
Ok(())
|
||||
}
|
||||
7
crates-cli/yaak-cli/src/commands/mod.rs
Normal file
7
crates-cli/yaak-cli/src/commands/mod.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
pub mod auth;
|
||||
pub mod environment;
|
||||
pub mod folder;
|
||||
pub mod plugin;
|
||||
pub mod request;
|
||||
pub mod send;
|
||||
pub mod workspace;
|
||||
553
crates-cli/yaak-cli/src/commands/plugin.rs
Normal file
553
crates-cli/yaak-cli/src/commands/plugin.rs
Normal file
@@ -0,0 +1,553 @@
|
||||
use crate::cli::{GenerateArgs, PluginArgs, PluginCommands, PluginPathArg};
|
||||
use crate::ui;
|
||||
use keyring::Entry;
|
||||
use rand::Rng;
|
||||
use rolldown::{
|
||||
Bundler, BundlerOptions, ExperimentalOptions, InputItem, LogLevel, OutputFormat, Platform,
|
||||
WatchOption, Watcher,
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::io::{self, IsTerminal, Read, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex;
|
||||
use walkdir::WalkDir;
|
||||
use zip::CompressionMethod;
|
||||
use zip::write::SimpleFileOptions;
|
||||
|
||||
type CommandResult<T = ()> = std::result::Result<T, String>;
|
||||
|
||||
const KEYRING_USER: &str = "yaak";
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
enum Environment {
|
||||
Production,
|
||||
Staging,
|
||||
Development,
|
||||
}
|
||||
|
||||
impl Environment {
|
||||
fn api_base_url(self) -> &'static str {
|
||||
match self {
|
||||
Environment::Production => "https://api.yaak.app",
|
||||
Environment::Staging => "https://todo.yaak.app",
|
||||
Environment::Development => "http://localhost:9444",
|
||||
}
|
||||
}
|
||||
|
||||
fn keyring_service(self) -> &'static str {
|
||||
match self {
|
||||
Environment::Production => "app.yaak.cli.Token",
|
||||
Environment::Staging => "app.yaak.cli.staging.Token",
|
||||
Environment::Development => "app.yaak.cli.dev.Token",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run_build(args: PluginPathArg) -> i32 {
|
||||
match build(args).await {
|
||||
Ok(()) => 0,
|
||||
Err(error) => {
|
||||
ui::error(&error);
|
||||
1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run(args: PluginArgs) -> i32 {
|
||||
match args.command {
|
||||
PluginCommands::Build(args) => run_build(args).await,
|
||||
PluginCommands::Dev(args) => run_dev(args).await,
|
||||
PluginCommands::Generate(args) => run_generate(args).await,
|
||||
PluginCommands::Publish(args) => run_publish(args).await,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run_dev(args: PluginPathArg) -> i32 {
|
||||
match dev(args).await {
|
||||
Ok(()) => 0,
|
||||
Err(error) => {
|
||||
ui::error(&error);
|
||||
1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run_generate(args: GenerateArgs) -> i32 {
|
||||
match generate(args) {
|
||||
Ok(()) => 0,
|
||||
Err(error) => {
|
||||
ui::error(&error);
|
||||
1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run_publish(args: PluginPathArg) -> i32 {
|
||||
match publish(args).await {
|
||||
Ok(()) => 0,
|
||||
Err(error) => {
|
||||
ui::error(&error);
|
||||
1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn build(args: PluginPathArg) -> CommandResult {
|
||||
let plugin_dir = resolve_plugin_dir(args.path)?;
|
||||
ensure_plugin_build_inputs(&plugin_dir)?;
|
||||
|
||||
ui::info(&format!("Building plugin {}...", plugin_dir.display()));
|
||||
let warnings = build_plugin_bundle(&plugin_dir).await?;
|
||||
for warning in warnings {
|
||||
ui::warning(&warning);
|
||||
}
|
||||
ui::success(&format!("Built plugin bundle at {}", plugin_dir.join("build/index.js").display()));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn dev(args: PluginPathArg) -> CommandResult {
|
||||
let plugin_dir = resolve_plugin_dir(args.path)?;
|
||||
ensure_plugin_build_inputs(&plugin_dir)?;
|
||||
|
||||
ui::info(&format!("Watching plugin {}...", plugin_dir.display()));
|
||||
ui::info("Press Ctrl-C to stop");
|
||||
|
||||
let bundler = Bundler::new(bundler_options(&plugin_dir, true))
|
||||
.map_err(|err| format!("Failed to initialize Rolldown watcher: {err}"))?;
|
||||
let watcher = Watcher::new(vec![Arc::new(Mutex::new(bundler))], None)
|
||||
.map_err(|err| format!("Failed to start Rolldown watcher: {err}"))?;
|
||||
|
||||
watcher.start().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn generate(args: GenerateArgs) -> CommandResult {
|
||||
let default_name = random_name();
|
||||
let name = match args.name {
|
||||
Some(name) => name,
|
||||
None => prompt_with_default("Plugin name", &default_name)?,
|
||||
};
|
||||
|
||||
let default_dir = format!("./{name}");
|
||||
let output_dir = match args.dir {
|
||||
Some(dir) => dir,
|
||||
None => PathBuf::from(prompt_with_default("Plugin dir", &default_dir)?),
|
||||
};
|
||||
|
||||
if output_dir.exists() {
|
||||
return Err(format!("Plugin directory already exists: {}", output_dir.display()));
|
||||
}
|
||||
|
||||
ui::info(&format!("Generating plugin in {}", output_dir.display()));
|
||||
fs::create_dir_all(output_dir.join("src"))
|
||||
.map_err(|e| format!("Failed creating plugin directory {}: {e}", output_dir.display()))?;
|
||||
|
||||
write_file(&output_dir.join(".gitignore"), TEMPLATE_GITIGNORE)?;
|
||||
write_file(
|
||||
&output_dir.join("package.json"),
|
||||
&TEMPLATE_PACKAGE_JSON.replace("yaak-plugin-name", &name),
|
||||
)?;
|
||||
write_file(&output_dir.join("tsconfig.json"), TEMPLATE_TSCONFIG)?;
|
||||
write_file(&output_dir.join("README.md"), &TEMPLATE_README.replace("yaak-plugin-name", &name))?;
|
||||
write_file(
|
||||
&output_dir.join("src/index.ts"),
|
||||
&TEMPLATE_INDEX_TS.replace("yaak-plugin-name", &name),
|
||||
)?;
|
||||
write_file(&output_dir.join("src/index.test.ts"), TEMPLATE_INDEX_TEST_TS)?;
|
||||
|
||||
ui::success("Plugin scaffold generated");
|
||||
ui::info("Next steps:");
|
||||
println!(" 1. cd {}", output_dir.display());
|
||||
println!(" 2. npm install");
|
||||
println!(" 3. yaak plugin build");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn publish(args: PluginPathArg) -> CommandResult {
|
||||
let plugin_dir = resolve_plugin_dir(args.path)?;
|
||||
ensure_plugin_build_inputs(&plugin_dir)?;
|
||||
|
||||
let environment = current_environment();
|
||||
let token = get_auth_token(environment)?
|
||||
.ok_or_else(|| "Not logged in. Run `yaak auth login`.".to_string())?;
|
||||
|
||||
ui::info(&format!("Building plugin {}...", plugin_dir.display()));
|
||||
let warnings = build_plugin_bundle(&plugin_dir).await?;
|
||||
for warning in warnings {
|
||||
ui::warning(&warning);
|
||||
}
|
||||
|
||||
ui::info("Archiving plugin");
|
||||
let archive = create_publish_archive(&plugin_dir)?;
|
||||
|
||||
ui::info("Uploading plugin");
|
||||
let url = format!("{}/api/v1/plugins/publish", environment.api_base_url());
|
||||
let response = reqwest::Client::new()
|
||||
.post(url)
|
||||
.header("X-Yaak-Session", token)
|
||||
.header(reqwest::header::USER_AGENT, user_agent())
|
||||
.header(reqwest::header::CONTENT_TYPE, "application/zip")
|
||||
.body(archive)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to upload plugin: {e}"))?;
|
||||
|
||||
let status = response.status();
|
||||
let body =
|
||||
response.text().await.map_err(|e| format!("Failed reading publish response body: {e}"))?;
|
||||
|
||||
if !status.is_success() {
|
||||
return Err(parse_api_error(status.as_u16(), &body));
|
||||
}
|
||||
|
||||
let published: PublishResponse = serde_json::from_str(&body)
|
||||
.map_err(|e| format!("Failed parsing publish response JSON: {e}\nResponse: {body}"))?;
|
||||
ui::success(&format!("Plugin published {}", published.version));
|
||||
println!(" -> {}", published.url);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PublishResponse {
|
||||
version: String,
|
||||
url: String,
|
||||
}
|
||||
|
||||
async fn build_plugin_bundle(plugin_dir: &Path) -> CommandResult<Vec<String>> {
|
||||
prepare_build_output_dir(plugin_dir)?;
|
||||
let mut bundler = Bundler::new(bundler_options(plugin_dir, false))
|
||||
.map_err(|err| format!("Failed to initialize Rolldown: {err}"))?;
|
||||
let output = bundler.write().await.map_err(|err| format!("Plugin build failed:\n{err}"))?;
|
||||
|
||||
Ok(output.warnings.into_iter().map(|w| w.to_string()).collect())
|
||||
}
|
||||
|
||||
fn prepare_build_output_dir(plugin_dir: &Path) -> CommandResult {
|
||||
let build_dir = plugin_dir.join("build");
|
||||
if build_dir.exists() {
|
||||
fs::remove_dir_all(&build_dir)
|
||||
.map_err(|e| format!("Failed to clean build directory {}: {e}", build_dir.display()))?;
|
||||
}
|
||||
fs::create_dir_all(&build_dir)
|
||||
.map_err(|e| format!("Failed to create build directory {}: {e}", build_dir.display()))
|
||||
}
|
||||
|
||||
fn bundler_options(plugin_dir: &Path, watch: bool) -> BundlerOptions {
|
||||
BundlerOptions {
|
||||
input: Some(vec![InputItem { import: "./src/index.ts".to_string(), ..Default::default() }]),
|
||||
cwd: Some(plugin_dir.to_path_buf()),
|
||||
file: Some("build/index.js".to_string()),
|
||||
format: Some(OutputFormat::Cjs),
|
||||
platform: Some(Platform::Node),
|
||||
log_level: Some(LogLevel::Info),
|
||||
experimental: watch
|
||||
.then_some(ExperimentalOptions { incremental_build: Some(true), ..Default::default() }),
|
||||
watch: watch.then_some(WatchOption::default()),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_plugin_dir(path: Option<PathBuf>) -> CommandResult<PathBuf> {
|
||||
let cwd =
|
||||
std::env::current_dir().map_err(|e| format!("Failed to read current directory: {e}"))?;
|
||||
let candidate = match path {
|
||||
Some(path) if path.is_absolute() => path,
|
||||
Some(path) => cwd.join(path),
|
||||
None => cwd,
|
||||
};
|
||||
|
||||
if !candidate.exists() {
|
||||
return Err(format!("Plugin directory does not exist: {}", candidate.display()));
|
||||
}
|
||||
if !candidate.is_dir() {
|
||||
return Err(format!("Plugin path is not a directory: {}", candidate.display()));
|
||||
}
|
||||
|
||||
candidate
|
||||
.canonicalize()
|
||||
.map_err(|e| format!("Failed to resolve plugin directory {}: {e}", candidate.display()))
|
||||
}
|
||||
|
||||
fn ensure_plugin_build_inputs(plugin_dir: &Path) -> CommandResult {
|
||||
let package_json = plugin_dir.join("package.json");
|
||||
if !package_json.is_file() {
|
||||
return Err(format!(
|
||||
"{} does not exist. Ensure that you are in a plugin directory.",
|
||||
package_json.display()
|
||||
));
|
||||
}
|
||||
|
||||
let entry = plugin_dir.join("src/index.ts");
|
||||
if !entry.is_file() {
|
||||
return Err(format!("Required entrypoint missing: {}", entry.display()));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_publish_archive(plugin_dir: &Path) -> CommandResult<Vec<u8>> {
|
||||
let required_files = [
|
||||
"README.md",
|
||||
"package.json",
|
||||
"build/index.js",
|
||||
"src/index.ts",
|
||||
];
|
||||
let optional_files = ["package-lock.json"];
|
||||
|
||||
let mut selected = HashSet::new();
|
||||
for required in required_files {
|
||||
let required_path = plugin_dir.join(required);
|
||||
if !required_path.is_file() {
|
||||
return Err(format!("Missing required file: {required}"));
|
||||
}
|
||||
selected.insert(required.to_string());
|
||||
}
|
||||
for optional in optional_files {
|
||||
selected.insert(optional.to_string());
|
||||
}
|
||||
|
||||
let cursor = std::io::Cursor::new(Vec::new());
|
||||
let mut zip = zip::ZipWriter::new(cursor);
|
||||
let options = SimpleFileOptions::default().compression_method(CompressionMethod::Deflated);
|
||||
|
||||
for entry in WalkDir::new(plugin_dir) {
|
||||
let entry = entry.map_err(|e| format!("Failed walking plugin directory: {e}"))?;
|
||||
if !entry.file_type().is_file() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let path = entry.path();
|
||||
let rel = path
|
||||
.strip_prefix(plugin_dir)
|
||||
.map_err(|e| format!("Failed deriving relative path for {}: {e}", path.display()))?;
|
||||
let rel = rel.to_string_lossy().replace('\\', "/");
|
||||
|
||||
let keep = rel.starts_with("src/") || rel.starts_with("build/") || selected.contains(&rel);
|
||||
if !keep {
|
||||
continue;
|
||||
}
|
||||
|
||||
zip.start_file(rel, options).map_err(|e| format!("Failed adding file to archive: {e}"))?;
|
||||
let mut file = fs::File::open(path)
|
||||
.map_err(|e| format!("Failed opening file {}: {e}", path.display()))?;
|
||||
let mut contents = Vec::new();
|
||||
file.read_to_end(&mut contents)
|
||||
.map_err(|e| format!("Failed reading file {}: {e}", path.display()))?;
|
||||
zip.write_all(&contents).map_err(|e| format!("Failed writing archive contents: {e}"))?;
|
||||
}
|
||||
|
||||
let cursor = zip.finish().map_err(|e| format!("Failed finalizing plugin archive: {e}"))?;
|
||||
Ok(cursor.into_inner())
|
||||
}
|
||||
|
||||
fn write_file(path: &Path, contents: &str) -> CommandResult {
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent)
|
||||
.map_err(|e| format!("Failed creating directory {}: {e}", parent.display()))?;
|
||||
}
|
||||
fs::write(path, contents).map_err(|e| format!("Failed writing file {}: {e}", path.display()))
|
||||
}
|
||||
|
||||
fn prompt_with_default(label: &str, default: &str) -> CommandResult<String> {
|
||||
if !io::stdin().is_terminal() {
|
||||
return Ok(default.to_string());
|
||||
}
|
||||
|
||||
print!("{label} [{default}]: ");
|
||||
io::stdout().flush().map_err(|e| format!("Failed to flush stdout: {e}"))?;
|
||||
|
||||
let mut input = String::new();
|
||||
io::stdin().read_line(&mut input).map_err(|e| format!("Failed to read input: {e}"))?;
|
||||
let trimmed = input.trim();
|
||||
|
||||
if trimmed.is_empty() { Ok(default.to_string()) } else { Ok(trimmed.to_string()) }
|
||||
}
|
||||
|
||||
fn current_environment() -> Environment {
|
||||
match std::env::var("ENVIRONMENT").as_deref() {
|
||||
Ok("staging") => Environment::Staging,
|
||||
Ok("development") => Environment::Development,
|
||||
_ => Environment::Production,
|
||||
}
|
||||
}
|
||||
|
||||
fn keyring_entry(environment: Environment) -> CommandResult<Entry> {
|
||||
Entry::new(environment.keyring_service(), KEYRING_USER)
|
||||
.map_err(|e| format!("Failed to initialize auth keyring entry: {e}"))
|
||||
}
|
||||
|
||||
fn get_auth_token(environment: Environment) -> CommandResult<Option<String>> {
|
||||
let entry = keyring_entry(environment)?;
|
||||
match entry.get_password() {
|
||||
Ok(token) => Ok(Some(token)),
|
||||
Err(keyring::Error::NoEntry) => Ok(None),
|
||||
Err(err) => Err(format!("Failed to read auth token: {err}")),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_api_error(status: u16, body: &str) -> String {
|
||||
if let Ok(value) = serde_json::from_str::<Value>(body) {
|
||||
if let Some(message) = value.get("message").and_then(Value::as_str) {
|
||||
return message.to_string();
|
||||
}
|
||||
if let Some(error) = value.get("error").and_then(Value::as_str) {
|
||||
return error.to_string();
|
||||
}
|
||||
}
|
||||
|
||||
format!("API error {status}: {body}")
|
||||
}
|
||||
|
||||
fn user_agent() -> String {
|
||||
format!("YaakCli/{} ({})", env!("CARGO_PKG_VERSION"), ua_platform())
|
||||
}
|
||||
|
||||
fn ua_platform() -> &'static str {
|
||||
match std::env::consts::OS {
|
||||
"windows" => "Win",
|
||||
"darwin" => "Mac",
|
||||
"linux" => "Linux",
|
||||
_ => "Unknown",
|
||||
}
|
||||
}
|
||||
|
||||
fn random_name() -> String {
|
||||
const ADJECTIVES: &[&str] = &[
|
||||
"young", "youthful", "yellow", "yielding", "yappy", "yawning", "yummy", "yucky", "yearly",
|
||||
"yester", "yeasty", "yelling",
|
||||
];
|
||||
const NOUNS: &[&str] = &[
|
||||
"yak", "yarn", "year", "yell", "yoke", "yoga", "yam", "yacht", "yodel",
|
||||
];
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
let adjective = ADJECTIVES[rng.gen_range(0..ADJECTIVES.len())];
|
||||
let noun = NOUNS[rng.gen_range(0..NOUNS.len())];
|
||||
format!("{adjective}-{noun}")
|
||||
}
|
||||
|
||||
const TEMPLATE_GITIGNORE: &str = "node_modules\n";
|
||||
|
||||
const TEMPLATE_PACKAGE_JSON: &str = r#"{
|
||||
"name": "yaak-plugin-name",
|
||||
"private": true,
|
||||
"version": "0.0.1",
|
||||
"scripts": {
|
||||
"build": "yaak plugin build",
|
||||
"dev": "yaak plugin dev"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^24.10.1",
|
||||
"typescript": "^5.9.3",
|
||||
"vitest": "^4.0.14"
|
||||
},
|
||||
"dependencies": {
|
||||
"@yaakapp/api": "^0.7.0"
|
||||
}
|
||||
}
|
||||
"#;
|
||||
|
||||
const TEMPLATE_TSCONFIG: &str = r#"{
|
||||
"compilerOptions": {
|
||||
"target": "es2021",
|
||||
"lib": ["DOM", "DOM.Iterable", "ESNext"],
|
||||
"useDefineForClassFields": true,
|
||||
"allowJs": false,
|
||||
"skipLibCheck": true,
|
||||
"esModuleInterop": false,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"strict": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "Node",
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"noEmit": true,
|
||||
"jsx": "react-jsx"
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
"#;
|
||||
|
||||
const TEMPLATE_README: &str = r#"# yaak-plugin-name
|
||||
|
||||
Describe what your plugin does.
|
||||
"#;
|
||||
|
||||
const TEMPLATE_INDEX_TS: &str = r#"import type { PluginDefinition } from "@yaakapp/api";
|
||||
|
||||
export const plugin: PluginDefinition = {
|
||||
httpRequestActions: [
|
||||
{
|
||||
label: "Hello, From Plugin",
|
||||
icon: "info",
|
||||
async onSelect(ctx, args) {
|
||||
await ctx.toast.show({
|
||||
color: "success",
|
||||
message: `You clicked the request ${args.httpRequest.id}`,
|
||||
});
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
"#;
|
||||
|
||||
const TEMPLATE_INDEX_TEST_TS: &str = r#"import { describe, expect, test } from "vitest";
|
||||
import { plugin } from "./index";
|
||||
|
||||
describe("Example Plugin", () => {
|
||||
test("Exports plugin object", () => {
|
||||
expect(plugin).toBeTypeOf("object");
|
||||
});
|
||||
});
|
||||
"#;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::create_publish_archive;
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::io::Cursor;
|
||||
use tempfile::TempDir;
|
||||
use zip::ZipArchive;
|
||||
|
||||
#[test]
|
||||
fn publish_archive_includes_required_and_optional_files() {
|
||||
let dir = TempDir::new().expect("temp dir");
|
||||
let root = dir.path();
|
||||
|
||||
fs::create_dir_all(root.join("src")).expect("create src");
|
||||
fs::create_dir_all(root.join("build")).expect("create build");
|
||||
fs::create_dir_all(root.join("ignored")).expect("create ignored");
|
||||
|
||||
fs::write(root.join("README.md"), "# Demo\n").expect("write README");
|
||||
fs::write(root.join("package.json"), "{}").expect("write package.json");
|
||||
fs::write(root.join("package-lock.json"), "{}").expect("write package-lock.json");
|
||||
fs::write(root.join("src/index.ts"), "export const plugin = {};\n")
|
||||
.expect("write src/index.ts");
|
||||
fs::write(root.join("build/index.js"), "exports.plugin = {};\n")
|
||||
.expect("write build/index.js");
|
||||
fs::write(root.join("ignored/secret.txt"), "do-not-ship").expect("write ignored file");
|
||||
|
||||
let archive = create_publish_archive(root).expect("create archive");
|
||||
let mut zip = ZipArchive::new(Cursor::new(archive)).expect("open zip");
|
||||
|
||||
let mut names = HashSet::new();
|
||||
for i in 0..zip.len() {
|
||||
let file = zip.by_index(i).expect("zip entry");
|
||||
names.insert(file.name().to_string());
|
||||
}
|
||||
|
||||
assert!(names.contains("README.md"));
|
||||
assert!(names.contains("package.json"));
|
||||
assert!(names.contains("package-lock.json"));
|
||||
assert!(names.contains("src/index.ts"));
|
||||
assert!(names.contains("build/index.js"));
|
||||
assert!(!names.contains("ignored/secret.txt"));
|
||||
}
|
||||
}
|
||||
485
crates-cli/yaak-cli/src/commands/request.rs
Normal file
485
crates-cli/yaak-cli/src/commands/request.rs
Normal file
@@ -0,0 +1,485 @@
|
||||
use crate::cli::{RequestArgs, RequestCommands, RequestSchemaType};
|
||||
use crate::context::CliContext;
|
||||
use crate::utils::confirm::confirm_delete;
|
||||
use crate::utils::json::{
|
||||
apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
|
||||
validate_create_id,
|
||||
};
|
||||
use schemars::schema_for;
|
||||
use serde_json::{Map, Value, json};
|
||||
use std::collections::HashMap;
|
||||
use tokio::sync::mpsc;
|
||||
use yaak::send::{SendHttpRequestByIdWithPluginsParams, send_http_request_by_id_with_plugins};
|
||||
use yaak_models::models::{GrpcRequest, HttpRequest, WebsocketRequest};
|
||||
use yaak_models::queries::any_request::AnyRequest;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::events::{FormInput, FormInputBase, JsonPrimitive, PluginContext};
|
||||
|
||||
type CommandResult<T = ()> = std::result::Result<T, String>;
|
||||
|
||||
pub async fn run(
|
||||
ctx: &CliContext,
|
||||
args: RequestArgs,
|
||||
environment: Option<&str>,
|
||||
verbose: bool,
|
||||
) -> i32 {
|
||||
let result = match args.command {
|
||||
RequestCommands::List { workspace_id } => list(ctx, &workspace_id),
|
||||
RequestCommands::Show { request_id } => show(ctx, &request_id),
|
||||
RequestCommands::Send { request_id } => {
|
||||
return match send_request_by_id(ctx, &request_id, environment, verbose).await {
|
||||
Ok(()) => 0,
|
||||
Err(error) => {
|
||||
eprintln!("Error: {error}");
|
||||
1
|
||||
}
|
||||
};
|
||||
}
|
||||
RequestCommands::Schema { request_type } => {
|
||||
return match schema(ctx, request_type).await {
|
||||
Ok(()) => 0,
|
||||
Err(error) => {
|
||||
eprintln!("Error: {error}");
|
||||
1
|
||||
}
|
||||
};
|
||||
}
|
||||
RequestCommands::Create { workspace_id, name, method, url, json } => {
|
||||
create(ctx, workspace_id, name, method, url, json)
|
||||
}
|
||||
RequestCommands::Update { json, json_input } => update(ctx, json, json_input),
|
||||
RequestCommands::Delete { request_id, yes } => delete(ctx, &request_id, yes),
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(()) => 0,
|
||||
Err(error) => {
|
||||
eprintln!("Error: {error}");
|
||||
1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn list(ctx: &CliContext, workspace_id: &str) -> CommandResult {
|
||||
let requests = ctx
|
||||
.db()
|
||||
.list_http_requests(workspace_id)
|
||||
.map_err(|e| format!("Failed to list requests: {e}"))?;
|
||||
if requests.is_empty() {
|
||||
println!("No requests found in workspace {}", workspace_id);
|
||||
} else {
|
||||
for request in requests {
|
||||
println!("{} - {} {}", request.id, request.method, request.name);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn schema(ctx: &CliContext, request_type: RequestSchemaType) -> CommandResult {
|
||||
let mut schema = match request_type {
|
||||
RequestSchemaType::Http => serde_json::to_value(schema_for!(HttpRequest))
|
||||
.map_err(|e| format!("Failed to serialize HTTP request schema: {e}"))?,
|
||||
RequestSchemaType::Grpc => serde_json::to_value(schema_for!(GrpcRequest))
|
||||
.map_err(|e| format!("Failed to serialize gRPC request schema: {e}"))?,
|
||||
RequestSchemaType::Websocket => serde_json::to_value(schema_for!(WebsocketRequest))
|
||||
.map_err(|e| format!("Failed to serialize WebSocket request schema: {e}"))?,
|
||||
};
|
||||
|
||||
if let Err(error) = merge_auth_schema_from_plugins(ctx, &mut schema).await {
|
||||
eprintln!("Warning: Failed to enrich authentication schema from plugins: {error}");
|
||||
}
|
||||
|
||||
let output = serde_json::to_string_pretty(&schema)
|
||||
.map_err(|e| format!("Failed to format schema JSON: {e}"))?;
|
||||
println!("{output}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn merge_auth_schema_from_plugins(
|
||||
ctx: &CliContext,
|
||||
schema: &mut Value,
|
||||
) -> Result<(), String> {
|
||||
let plugin_context = PluginContext::new_empty();
|
||||
let plugin_manager = ctx.plugin_manager();
|
||||
let summaries = plugin_manager
|
||||
.get_http_authentication_summaries(&plugin_context)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let mut auth_variants = Vec::new();
|
||||
for (_, summary) in summaries {
|
||||
let config = match plugin_manager
|
||||
.get_http_authentication_config(
|
||||
&plugin_context,
|
||||
&summary.name,
|
||||
HashMap::<String, JsonPrimitive>::new(),
|
||||
"yaakcli_request_schema",
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(config) => config,
|
||||
Err(error) => {
|
||||
eprintln!(
|
||||
"Warning: Failed to load auth config for strategy '{}': {}",
|
||||
summary.name, error
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
auth_variants.push(auth_variant_schema(&summary.name, &summary.label, &config.args));
|
||||
}
|
||||
|
||||
let Some(properties) = schema.get_mut("properties").and_then(Value::as_object_mut) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let Some(auth_schema) = properties.get_mut("authentication") else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
if !auth_variants.is_empty() {
|
||||
let mut one_of = vec![auth_schema.clone()];
|
||||
one_of.extend(auth_variants);
|
||||
*auth_schema = json!({ "oneOf": one_of });
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn auth_variant_schema(auth_name: &str, auth_label: &str, args: &[FormInput]) -> Value {
|
||||
let mut properties = Map::new();
|
||||
let mut required = Vec::new();
|
||||
for input in args {
|
||||
add_input_schema(input, &mut properties, &mut required);
|
||||
}
|
||||
|
||||
let mut schema = json!({
|
||||
"title": auth_label,
|
||||
"description": format!("Authentication values for strategy '{}'", auth_name),
|
||||
"type": "object",
|
||||
"properties": properties,
|
||||
"additionalProperties": true
|
||||
});
|
||||
|
||||
if !required.is_empty() {
|
||||
schema["required"] = json!(required);
|
||||
}
|
||||
|
||||
schema
|
||||
}
|
||||
|
||||
fn add_input_schema(
|
||||
input: &FormInput,
|
||||
properties: &mut Map<String, Value>,
|
||||
required: &mut Vec<String>,
|
||||
) {
|
||||
match input {
|
||||
FormInput::Text(v) => add_base_schema(
|
||||
&v.base,
|
||||
json!({
|
||||
"type": "string",
|
||||
"writeOnly": v.password.unwrap_or(false),
|
||||
}),
|
||||
properties,
|
||||
required,
|
||||
),
|
||||
FormInput::Editor(v) => add_base_schema(
|
||||
&v.base,
|
||||
json!({
|
||||
"type": "string",
|
||||
"x-editorLanguage": v.language.clone(),
|
||||
}),
|
||||
properties,
|
||||
required,
|
||||
),
|
||||
FormInput::Select(v) => {
|
||||
let options: Vec<Value> =
|
||||
v.options.iter().map(|o| Value::String(o.value.clone())).collect();
|
||||
add_base_schema(
|
||||
&v.base,
|
||||
json!({
|
||||
"type": "string",
|
||||
"enum": options,
|
||||
}),
|
||||
properties,
|
||||
required,
|
||||
);
|
||||
}
|
||||
FormInput::Checkbox(v) => {
|
||||
add_base_schema(&v.base, json!({ "type": "boolean" }), properties, required);
|
||||
}
|
||||
FormInput::File(v) => {
|
||||
if v.multiple.unwrap_or(false) {
|
||||
add_base_schema(
|
||||
&v.base,
|
||||
json!({
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
}),
|
||||
properties,
|
||||
required,
|
||||
);
|
||||
} else {
|
||||
add_base_schema(&v.base, json!({ "type": "string" }), properties, required);
|
||||
}
|
||||
}
|
||||
FormInput::HttpRequest(v) => {
|
||||
add_base_schema(&v.base, json!({ "type": "string" }), properties, required);
|
||||
}
|
||||
FormInput::KeyValue(v) => {
|
||||
add_base_schema(
|
||||
&v.base,
|
||||
json!({
|
||||
"type": "object",
|
||||
"additionalProperties": true,
|
||||
}),
|
||||
properties,
|
||||
required,
|
||||
);
|
||||
}
|
||||
FormInput::Accordion(v) => {
|
||||
if let Some(children) = &v.inputs {
|
||||
for child in children {
|
||||
add_input_schema(child, properties, required);
|
||||
}
|
||||
}
|
||||
}
|
||||
FormInput::HStack(v) => {
|
||||
if let Some(children) = &v.inputs {
|
||||
for child in children {
|
||||
add_input_schema(child, properties, required);
|
||||
}
|
||||
}
|
||||
}
|
||||
FormInput::Banner(v) => {
|
||||
if let Some(children) = &v.inputs {
|
||||
for child in children {
|
||||
add_input_schema(child, properties, required);
|
||||
}
|
||||
}
|
||||
}
|
||||
FormInput::Markdown(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn add_base_schema(
|
||||
base: &FormInputBase,
|
||||
mut schema: Value,
|
||||
properties: &mut Map<String, Value>,
|
||||
required: &mut Vec<String>,
|
||||
) {
|
||||
if base.hidden.unwrap_or(false) || base.name.trim().is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(description) = &base.description {
|
||||
schema["description"] = Value::String(description.clone());
|
||||
}
|
||||
if let Some(label) = &base.label {
|
||||
schema["title"] = Value::String(label.clone());
|
||||
}
|
||||
if let Some(default_value) = &base.default_value {
|
||||
schema["default"] = Value::String(default_value.clone());
|
||||
}
|
||||
|
||||
let name = base.name.clone();
|
||||
properties.insert(name.clone(), schema);
|
||||
if !base.optional.unwrap_or(false) {
|
||||
required.push(name);
|
||||
}
|
||||
}
|
||||
|
||||
fn create(
|
||||
ctx: &CliContext,
|
||||
workspace_id: Option<String>,
|
||||
name: Option<String>,
|
||||
method: Option<String>,
|
||||
url: Option<String>,
|
||||
json: Option<String>,
|
||||
) -> CommandResult {
|
||||
if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
|
||||
return Err("request create cannot combine workspace_id with --json payload".to_string());
|
||||
}
|
||||
|
||||
let payload = parse_optional_json(
|
||||
json,
|
||||
workspace_id.clone().filter(|v| is_json_shorthand(v)),
|
||||
"request create",
|
||||
)?;
|
||||
|
||||
if let Some(payload) = payload {
|
||||
if name.is_some() || method.is_some() || url.is_some() {
|
||||
return Err("request create cannot combine simple flags with JSON payload".to_string());
|
||||
}
|
||||
|
||||
validate_create_id(&payload, "request")?;
|
||||
let request: HttpRequest = serde_json::from_value(payload)
|
||||
.map_err(|e| format!("Failed to parse request create JSON: {e}"))?;
|
||||
|
||||
if request.workspace_id.is_empty() {
|
||||
return Err("request create JSON requires non-empty \"workspaceId\"".to_string());
|
||||
}
|
||||
|
||||
let created = ctx
|
||||
.db()
|
||||
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||
.map_err(|e| format!("Failed to create request: {e}"))?;
|
||||
|
||||
println!("Created request: {}", created.id);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let workspace_id = workspace_id.ok_or_else(|| {
|
||||
"request create requires workspace_id unless JSON payload is provided".to_string()
|
||||
})?;
|
||||
let name = name.unwrap_or_default();
|
||||
let url = url.unwrap_or_default();
|
||||
let method = method.unwrap_or_else(|| "GET".to_string());
|
||||
|
||||
let request = HttpRequest {
|
||||
workspace_id,
|
||||
name,
|
||||
method: method.to_uppercase(),
|
||||
url,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let created = ctx
|
||||
.db()
|
||||
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||
.map_err(|e| format!("Failed to create request: {e}"))?;
|
||||
|
||||
println!("Created request: {}", created.id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) -> CommandResult {
|
||||
let patch = parse_required_json(json, json_input, "request update")?;
|
||||
let id = require_id(&patch, "request update")?;
|
||||
|
||||
let existing = ctx
|
||||
.db()
|
||||
.get_http_request(&id)
|
||||
.map_err(|e| format!("Failed to get request for update: {e}"))?;
|
||||
let updated = apply_merge_patch(&existing, &patch, &id, "request update")?;
|
||||
|
||||
let saved = ctx
|
||||
.db()
|
||||
.upsert_http_request(&updated, &UpdateSource::Sync)
|
||||
.map_err(|e| format!("Failed to update request: {e}"))?;
|
||||
|
||||
println!("Updated request: {}", saved.id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn show(ctx: &CliContext, request_id: &str) -> CommandResult {
|
||||
let request =
|
||||
ctx.db().get_http_request(request_id).map_err(|e| format!("Failed to get request: {e}"))?;
|
||||
let output = serde_json::to_string_pretty(&request)
|
||||
.map_err(|e| format!("Failed to serialize request: {e}"))?;
|
||||
println!("{output}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn delete(ctx: &CliContext, request_id: &str, yes: bool) -> CommandResult {
|
||||
if !yes && !confirm_delete("request", request_id) {
|
||||
println!("Aborted");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let deleted = ctx
|
||||
.db()
|
||||
.delete_http_request_by_id(request_id, &UpdateSource::Sync)
|
||||
.map_err(|e| format!("Failed to delete request: {e}"))?;
|
||||
println!("Deleted request: {}", deleted.id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Send a request by ID and print response in the same format as legacy `send`.
|
||||
pub async fn send_request_by_id(
|
||||
ctx: &CliContext,
|
||||
request_id: &str,
|
||||
environment: Option<&str>,
|
||||
verbose: bool,
|
||||
) -> Result<(), String> {
|
||||
let request =
|
||||
ctx.db().get_any_request(request_id).map_err(|e| format!("Failed to get request: {e}"))?;
|
||||
match request {
|
||||
AnyRequest::HttpRequest(http_request) => {
|
||||
send_http_request_by_id(
|
||||
ctx,
|
||||
&http_request.id,
|
||||
&http_request.workspace_id,
|
||||
environment,
|
||||
verbose,
|
||||
)
|
||||
.await
|
||||
}
|
||||
AnyRequest::GrpcRequest(_) => {
|
||||
Err("gRPC request send is not implemented yet in yaak-cli".to_string())
|
||||
}
|
||||
AnyRequest::WebsocketRequest(_) => {
|
||||
Err("WebSocket request send is not implemented yet in yaak-cli".to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn send_http_request_by_id(
|
||||
ctx: &CliContext,
|
||||
request_id: &str,
|
||||
workspace_id: &str,
|
||||
environment: Option<&str>,
|
||||
verbose: bool,
|
||||
) -> Result<(), String> {
|
||||
let plugin_context = PluginContext::new(None, Some(workspace_id.to_string()));
|
||||
|
||||
let (event_tx, mut event_rx) = mpsc::channel(100);
|
||||
let event_handle = tokio::spawn(async move {
|
||||
while let Some(event) = event_rx.recv().await {
|
||||
if verbose {
|
||||
println!("{}", event);
|
||||
}
|
||||
}
|
||||
});
|
||||
let response_dir = ctx.data_dir().join("responses");
|
||||
|
||||
let result = send_http_request_by_id_with_plugins(SendHttpRequestByIdWithPluginsParams {
|
||||
query_manager: ctx.query_manager(),
|
||||
blob_manager: ctx.blob_manager(),
|
||||
request_id,
|
||||
environment_id: environment,
|
||||
update_source: UpdateSource::Sync,
|
||||
cookie_jar_id: None,
|
||||
response_dir: &response_dir,
|
||||
emit_events_to: Some(event_tx),
|
||||
plugin_manager: ctx.plugin_manager(),
|
||||
encryption_manager: ctx.encryption_manager.clone(),
|
||||
plugin_context: &plugin_context,
|
||||
cancelled_rx: None,
|
||||
connection_manager: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
let _ = event_handle.await;
|
||||
let result = result.map_err(|e| e.to_string())?;
|
||||
|
||||
if verbose {
|
||||
println!();
|
||||
}
|
||||
println!(
|
||||
"HTTP {} {}",
|
||||
result.response.status,
|
||||
result.response.status_reason.as_deref().unwrap_or("")
|
||||
);
|
||||
if verbose {
|
||||
for header in &result.response.headers {
|
||||
println!("{}: {}", header.name, header.value);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
let body = String::from_utf8(result.response_body)
|
||||
.map_err(|e| format!("Failed to read response body: {e}"))?;
|
||||
println!("{}", body);
|
||||
Ok(())
|
||||
}
|
||||
184
crates-cli/yaak-cli/src/commands/send.rs
Normal file
184
crates-cli/yaak-cli/src/commands/send.rs
Normal file
@@ -0,0 +1,184 @@
|
||||
use crate::cli::SendArgs;
|
||||
use crate::commands::request;
|
||||
use crate::context::CliContext;
|
||||
use futures::future::join_all;
|
||||
|
||||
enum ExecutionMode {
|
||||
Sequential,
|
||||
Parallel,
|
||||
}
|
||||
|
||||
pub async fn run(
|
||||
ctx: &CliContext,
|
||||
args: SendArgs,
|
||||
environment: Option<&str>,
|
||||
verbose: bool,
|
||||
) -> i32 {
|
||||
match send_target(ctx, args, environment, verbose).await {
|
||||
Ok(()) => 0,
|
||||
Err(error) => {
|
||||
eprintln!("Error: {error}");
|
||||
1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn send_target(
|
||||
ctx: &CliContext,
|
||||
args: SendArgs,
|
||||
environment: Option<&str>,
|
||||
verbose: bool,
|
||||
) -> Result<(), String> {
|
||||
let mode = if args.parallel { ExecutionMode::Parallel } else { ExecutionMode::Sequential };
|
||||
|
||||
if ctx.db().get_any_request(&args.id).is_ok() {
|
||||
return request::send_request_by_id(ctx, &args.id, environment, verbose).await;
|
||||
}
|
||||
|
||||
if ctx.db().get_folder(&args.id).is_ok() {
|
||||
let request_ids = collect_folder_request_ids(ctx, &args.id)?;
|
||||
if request_ids.is_empty() {
|
||||
println!("No requests found in folder {}", args.id);
|
||||
return Ok(());
|
||||
}
|
||||
return send_many(ctx, request_ids, mode, args.fail_fast, environment, verbose).await;
|
||||
}
|
||||
|
||||
if ctx.db().get_workspace(&args.id).is_ok() {
|
||||
let request_ids = collect_workspace_request_ids(ctx, &args.id)?;
|
||||
if request_ids.is_empty() {
|
||||
println!("No requests found in workspace {}", args.id);
|
||||
return Ok(());
|
||||
}
|
||||
return send_many(ctx, request_ids, mode, args.fail_fast, environment, verbose).await;
|
||||
}
|
||||
|
||||
Err(format!("Could not resolve ID '{}' as request, folder, or workspace", args.id))
|
||||
}
|
||||
|
||||
fn collect_folder_request_ids(ctx: &CliContext, folder_id: &str) -> Result<Vec<String>, String> {
|
||||
let mut ids = Vec::new();
|
||||
|
||||
let mut http_ids = ctx
|
||||
.db()
|
||||
.list_http_requests_for_folder_recursive(folder_id)
|
||||
.map_err(|e| format!("Failed to list HTTP requests in folder: {e}"))?
|
||||
.into_iter()
|
||||
.map(|r| r.id)
|
||||
.collect::<Vec<_>>();
|
||||
ids.append(&mut http_ids);
|
||||
|
||||
let mut grpc_ids = ctx
|
||||
.db()
|
||||
.list_grpc_requests_for_folder_recursive(folder_id)
|
||||
.map_err(|e| format!("Failed to list gRPC requests in folder: {e}"))?
|
||||
.into_iter()
|
||||
.map(|r| r.id)
|
||||
.collect::<Vec<_>>();
|
||||
ids.append(&mut grpc_ids);
|
||||
|
||||
let mut websocket_ids = ctx
|
||||
.db()
|
||||
.list_websocket_requests_for_folder_recursive(folder_id)
|
||||
.map_err(|e| format!("Failed to list WebSocket requests in folder: {e}"))?
|
||||
.into_iter()
|
||||
.map(|r| r.id)
|
||||
.collect::<Vec<_>>();
|
||||
ids.append(&mut websocket_ids);
|
||||
|
||||
Ok(ids)
|
||||
}
|
||||
|
||||
fn collect_workspace_request_ids(
|
||||
ctx: &CliContext,
|
||||
workspace_id: &str,
|
||||
) -> Result<Vec<String>, String> {
|
||||
let mut ids = Vec::new();
|
||||
|
||||
let mut http_ids = ctx
|
||||
.db()
|
||||
.list_http_requests(workspace_id)
|
||||
.map_err(|e| format!("Failed to list HTTP requests in workspace: {e}"))?
|
||||
.into_iter()
|
||||
.map(|r| r.id)
|
||||
.collect::<Vec<_>>();
|
||||
ids.append(&mut http_ids);
|
||||
|
||||
let mut grpc_ids = ctx
|
||||
.db()
|
||||
.list_grpc_requests(workspace_id)
|
||||
.map_err(|e| format!("Failed to list gRPC requests in workspace: {e}"))?
|
||||
.into_iter()
|
||||
.map(|r| r.id)
|
||||
.collect::<Vec<_>>();
|
||||
ids.append(&mut grpc_ids);
|
||||
|
||||
let mut websocket_ids = ctx
|
||||
.db()
|
||||
.list_websocket_requests(workspace_id)
|
||||
.map_err(|e| format!("Failed to list WebSocket requests in workspace: {e}"))?
|
||||
.into_iter()
|
||||
.map(|r| r.id)
|
||||
.collect::<Vec<_>>();
|
||||
ids.append(&mut websocket_ids);
|
||||
|
||||
Ok(ids)
|
||||
}
|
||||
|
||||
async fn send_many(
|
||||
ctx: &CliContext,
|
||||
request_ids: Vec<String>,
|
||||
mode: ExecutionMode,
|
||||
fail_fast: bool,
|
||||
environment: Option<&str>,
|
||||
verbose: bool,
|
||||
) -> Result<(), String> {
|
||||
let mut success_count = 0usize;
|
||||
let mut failures: Vec<(String, String)> = Vec::new();
|
||||
|
||||
match mode {
|
||||
ExecutionMode::Sequential => {
|
||||
for request_id in request_ids {
|
||||
match request::send_request_by_id(ctx, &request_id, environment, verbose).await {
|
||||
Ok(()) => success_count += 1,
|
||||
Err(error) => {
|
||||
failures.push((request_id, error));
|
||||
if fail_fast {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ExecutionMode::Parallel => {
|
||||
let tasks = request_ids
|
||||
.iter()
|
||||
.map(|request_id| async move {
|
||||
(
|
||||
request_id.clone(),
|
||||
request::send_request_by_id(ctx, request_id, environment, verbose).await,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for (request_id, result) in join_all(tasks).await {
|
||||
match result {
|
||||
Ok(()) => success_count += 1,
|
||||
Err(error) => failures.push((request_id, error)),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let failure_count = failures.len();
|
||||
println!("Send summary: {success_count} succeeded, {failure_count} failed");
|
||||
|
||||
if failure_count == 0 {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
for (request_id, error) in failures {
|
||||
eprintln!(" {}: {}", request_id, error);
|
||||
}
|
||||
Err("One or more requests failed".to_string())
|
||||
}
|
||||
123
crates-cli/yaak-cli/src/commands/workspace.rs
Normal file
123
crates-cli/yaak-cli/src/commands/workspace.rs
Normal file
@@ -0,0 +1,123 @@
|
||||
use crate::cli::{WorkspaceArgs, WorkspaceCommands};
|
||||
use crate::context::CliContext;
|
||||
use crate::utils::confirm::confirm_delete;
|
||||
use crate::utils::json::{
|
||||
apply_merge_patch, parse_optional_json, parse_required_json, require_id, validate_create_id,
|
||||
};
|
||||
use yaak_models::models::Workspace;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
type CommandResult<T = ()> = std::result::Result<T, String>;
|
||||
|
||||
pub fn run(ctx: &CliContext, args: WorkspaceArgs) -> i32 {
|
||||
let result = match args.command {
|
||||
WorkspaceCommands::List => list(ctx),
|
||||
WorkspaceCommands::Show { workspace_id } => show(ctx, &workspace_id),
|
||||
WorkspaceCommands::Create { name, json, json_input } => create(ctx, name, json, json_input),
|
||||
WorkspaceCommands::Update { json, json_input } => update(ctx, json, json_input),
|
||||
WorkspaceCommands::Delete { workspace_id, yes } => delete(ctx, &workspace_id, yes),
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(()) => 0,
|
||||
Err(error) => {
|
||||
eprintln!("Error: {error}");
|
||||
1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn list(ctx: &CliContext) -> CommandResult {
|
||||
let workspaces =
|
||||
ctx.db().list_workspaces().map_err(|e| format!("Failed to list workspaces: {e}"))?;
|
||||
if workspaces.is_empty() {
|
||||
println!("No workspaces found");
|
||||
} else {
|
||||
for workspace in workspaces {
|
||||
println!("{} - {}", workspace.id, workspace.name);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn show(ctx: &CliContext, workspace_id: &str) -> CommandResult {
|
||||
let workspace = ctx
|
||||
.db()
|
||||
.get_workspace(workspace_id)
|
||||
.map_err(|e| format!("Failed to get workspace: {e}"))?;
|
||||
let output = serde_json::to_string_pretty(&workspace)
|
||||
.map_err(|e| format!("Failed to serialize workspace: {e}"))?;
|
||||
println!("{output}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create(
|
||||
ctx: &CliContext,
|
||||
name: Option<String>,
|
||||
json: Option<String>,
|
||||
json_input: Option<String>,
|
||||
) -> CommandResult {
|
||||
let payload = parse_optional_json(json, json_input, "workspace create")?;
|
||||
|
||||
if let Some(payload) = payload {
|
||||
if name.is_some() {
|
||||
return Err("workspace create cannot combine --name with JSON payload".to_string());
|
||||
}
|
||||
|
||||
validate_create_id(&payload, "workspace")?;
|
||||
let workspace: Workspace = serde_json::from_value(payload)
|
||||
.map_err(|e| format!("Failed to parse workspace create JSON: {e}"))?;
|
||||
|
||||
let created = ctx
|
||||
.db()
|
||||
.upsert_workspace(&workspace, &UpdateSource::Sync)
|
||||
.map_err(|e| format!("Failed to create workspace: {e}"))?;
|
||||
println!("Created workspace: {}", created.id);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let name = name.ok_or_else(|| {
|
||||
"workspace create requires --name unless JSON payload is provided".to_string()
|
||||
})?;
|
||||
|
||||
let workspace = Workspace { name, ..Default::default() };
|
||||
let created = ctx
|
||||
.db()
|
||||
.upsert_workspace(&workspace, &UpdateSource::Sync)
|
||||
.map_err(|e| format!("Failed to create workspace: {e}"))?;
|
||||
println!("Created workspace: {}", created.id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update(ctx: &CliContext, json: Option<String>, json_input: Option<String>) -> CommandResult {
|
||||
let patch = parse_required_json(json, json_input, "workspace update")?;
|
||||
let id = require_id(&patch, "workspace update")?;
|
||||
|
||||
let existing = ctx
|
||||
.db()
|
||||
.get_workspace(&id)
|
||||
.map_err(|e| format!("Failed to get workspace for update: {e}"))?;
|
||||
let updated = apply_merge_patch(&existing, &patch, &id, "workspace update")?;
|
||||
|
||||
let saved = ctx
|
||||
.db()
|
||||
.upsert_workspace(&updated, &UpdateSource::Sync)
|
||||
.map_err(|e| format!("Failed to update workspace: {e}"))?;
|
||||
|
||||
println!("Updated workspace: {}", saved.id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn delete(ctx: &CliContext, workspace_id: &str, yes: bool) -> CommandResult {
|
||||
if !yes && !confirm_delete("workspace", workspace_id) {
|
||||
println!("Aborted");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let deleted = ctx
|
||||
.db()
|
||||
.delete_workspace_by_id(workspace_id, &UpdateSource::Sync)
|
||||
.map_err(|e| format!("Failed to delete workspace: {e}"))?;
|
||||
println!("Deleted workspace: {}", deleted.id);
|
||||
Ok(())
|
||||
}
|
||||
115
crates-cli/yaak-cli/src/context.rs
Normal file
115
crates-cli/yaak-cli/src/context.rs
Normal file
@@ -0,0 +1,115 @@
|
||||
use crate::plugin_events::CliPluginEventBridge;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex;
|
||||
use yaak_crypto::manager::EncryptionManager;
|
||||
use yaak_models::blob_manager::BlobManager;
|
||||
use yaak_models::db_context::DbContext;
|
||||
use yaak_models::query_manager::QueryManager;
|
||||
use yaak_plugins::events::PluginContext;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
|
||||
pub struct CliContext {
|
||||
data_dir: PathBuf,
|
||||
query_manager: QueryManager,
|
||||
blob_manager: BlobManager,
|
||||
pub encryption_manager: Arc<EncryptionManager>,
|
||||
plugin_manager: Option<Arc<PluginManager>>,
|
||||
plugin_event_bridge: Mutex<Option<CliPluginEventBridge>>,
|
||||
}
|
||||
|
||||
impl CliContext {
|
||||
pub async fn initialize(data_dir: PathBuf, app_id: &str, with_plugins: bool) -> Self {
|
||||
let db_path = data_dir.join("db.sqlite");
|
||||
let blob_path = data_dir.join("blobs.sqlite");
|
||||
|
||||
let (query_manager, blob_manager, _rx) = yaak_models::init_standalone(&db_path, &blob_path)
|
||||
.expect("Failed to initialize database");
|
||||
|
||||
let encryption_manager = Arc::new(EncryptionManager::new(query_manager.clone(), app_id));
|
||||
|
||||
let plugin_manager = if with_plugins {
|
||||
let vendored_plugin_dir = data_dir.join("vendored-plugins");
|
||||
let installed_plugin_dir = data_dir.join("installed-plugins");
|
||||
let node_bin_path = PathBuf::from("node");
|
||||
|
||||
let plugin_runtime_main =
|
||||
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
|
||||
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
|
||||
});
|
||||
|
||||
let plugin_manager = Arc::new(
|
||||
PluginManager::new(
|
||||
vendored_plugin_dir,
|
||||
installed_plugin_dir,
|
||||
node_bin_path,
|
||||
plugin_runtime_main,
|
||||
false,
|
||||
)
|
||||
.await,
|
||||
);
|
||||
|
||||
let plugins = query_manager.connect().list_plugins().unwrap_or_default();
|
||||
if !plugins.is_empty() {
|
||||
let errors = plugin_manager
|
||||
.initialize_all_plugins(plugins, &PluginContext::new_empty())
|
||||
.await;
|
||||
for (plugin_dir, error_msg) in errors {
|
||||
eprintln!(
|
||||
"Warning: Failed to initialize plugin '{}': {}",
|
||||
plugin_dir, error_msg
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Some(plugin_manager)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let plugin_event_bridge = if let Some(plugin_manager) = &plugin_manager {
|
||||
Some(CliPluginEventBridge::start(plugin_manager.clone(), query_manager.clone()).await)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Self {
|
||||
data_dir,
|
||||
query_manager,
|
||||
blob_manager,
|
||||
encryption_manager,
|
||||
plugin_manager,
|
||||
plugin_event_bridge: Mutex::new(plugin_event_bridge),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn data_dir(&self) -> &Path {
|
||||
&self.data_dir
|
||||
}
|
||||
|
||||
pub fn db(&self) -> DbContext<'_> {
|
||||
self.query_manager.connect()
|
||||
}
|
||||
|
||||
pub fn query_manager(&self) -> &QueryManager {
|
||||
&self.query_manager
|
||||
}
|
||||
|
||||
pub fn blob_manager(&self) -> &BlobManager {
|
||||
&self.blob_manager
|
||||
}
|
||||
|
||||
pub fn plugin_manager(&self) -> Arc<PluginManager> {
|
||||
self.plugin_manager.clone().expect("Plugin manager was not initialized for this command")
|
||||
}
|
||||
|
||||
pub async fn shutdown(&self) {
|
||||
if let Some(plugin_manager) = &self.plugin_manager {
|
||||
if let Some(plugin_event_bridge) = self.plugin_event_bridge.lock().await.take() {
|
||||
plugin_event_bridge.shutdown(plugin_manager).await;
|
||||
}
|
||||
plugin_manager.terminate().await;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,409 +1,92 @@
|
||||
use clap::{Parser, Subcommand};
|
||||
use log::info;
|
||||
use serde_json::Value;
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::mpsc;
|
||||
use yaak_crypto::manager::EncryptionManager;
|
||||
use yaak_http::path_placeholders::apply_path_placeholders;
|
||||
use yaak_http::sender::{HttpSender, ReqwestSender};
|
||||
use yaak_http::types::{SendableHttpRequest, SendableHttpRequestOptions};
|
||||
use yaak_models::models::{HttpRequest, HttpRequestHeader, HttpUrlParameter};
|
||||
use yaak_models::render::make_vars_hashmap;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::events::{PluginContext, RenderPurpose};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::template_callback::PluginTemplateCallback;
|
||||
use yaak_templates::{RenderOptions, parse_and_render, render_json_value_raw};
|
||||
mod cli;
|
||||
mod commands;
|
||||
mod context;
|
||||
mod plugin_events;
|
||||
mod ui;
|
||||
mod utils;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "yaakcli")]
|
||||
#[command(about = "Yaak CLI - API client from the command line")]
|
||||
struct Cli {
|
||||
/// Use a custom data directory
|
||||
#[arg(long, global = true)]
|
||||
data_dir: Option<PathBuf>,
|
||||
|
||||
/// Environment ID to use for variable substitution
|
||||
#[arg(long, short, global = true)]
|
||||
environment: Option<String>,
|
||||
|
||||
/// Enable verbose logging
|
||||
#[arg(long, short, global = true)]
|
||||
verbose: bool,
|
||||
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
/// List all workspaces
|
||||
Workspaces,
|
||||
/// List requests in a workspace
|
||||
Requests {
|
||||
/// Workspace ID
|
||||
workspace_id: String,
|
||||
},
|
||||
/// Send an HTTP request by ID
|
||||
Send {
|
||||
/// Request ID
|
||||
request_id: String,
|
||||
},
|
||||
/// Send a GET request to a URL
|
||||
Get {
|
||||
/// URL to request
|
||||
url: String,
|
||||
},
|
||||
/// Create a new HTTP request
|
||||
Create {
|
||||
/// Workspace ID
|
||||
workspace_id: String,
|
||||
/// Request name
|
||||
#[arg(short, long)]
|
||||
name: String,
|
||||
/// HTTP method
|
||||
#[arg(short, long, default_value = "GET")]
|
||||
method: String,
|
||||
/// URL
|
||||
#[arg(short, long)]
|
||||
url: String,
|
||||
},
|
||||
}
|
||||
|
||||
/// Render an HTTP request with template variables and plugin functions
|
||||
async fn render_http_request(
|
||||
r: &HttpRequest,
|
||||
environment_chain: Vec<yaak_models::models::Environment>,
|
||||
cb: &PluginTemplateCallback,
|
||||
opt: &RenderOptions,
|
||||
) -> yaak_templates::error::Result<HttpRequest> {
|
||||
let vars = &make_vars_hashmap(environment_chain);
|
||||
|
||||
let mut url_parameters = Vec::new();
|
||||
for p in r.url_parameters.clone() {
|
||||
if !p.enabled {
|
||||
continue;
|
||||
}
|
||||
url_parameters.push(HttpUrlParameter {
|
||||
enabled: p.enabled,
|
||||
name: parse_and_render(p.name.as_str(), vars, cb, opt).await?,
|
||||
value: parse_and_render(p.value.as_str(), vars, cb, opt).await?,
|
||||
id: p.id,
|
||||
})
|
||||
}
|
||||
|
||||
let mut headers = Vec::new();
|
||||
for p in r.headers.clone() {
|
||||
if !p.enabled {
|
||||
continue;
|
||||
}
|
||||
headers.push(HttpRequestHeader {
|
||||
enabled: p.enabled,
|
||||
name: parse_and_render(p.name.as_str(), vars, cb, opt).await?,
|
||||
value: parse_and_render(p.value.as_str(), vars, cb, opt).await?,
|
||||
id: p.id,
|
||||
})
|
||||
}
|
||||
|
||||
let mut body = BTreeMap::new();
|
||||
for (k, v) in r.body.clone() {
|
||||
body.insert(k, render_json_value_raw(v, vars, cb, opt).await?);
|
||||
}
|
||||
|
||||
let authentication = {
|
||||
let mut disabled = false;
|
||||
let mut auth = BTreeMap::new();
|
||||
match r.authentication.get("disabled") {
|
||||
Some(Value::Bool(true)) => {
|
||||
disabled = true;
|
||||
}
|
||||
Some(Value::String(tmpl)) => {
|
||||
disabled = parse_and_render(tmpl.as_str(), vars, cb, opt)
|
||||
.await
|
||||
.unwrap_or_default()
|
||||
.is_empty();
|
||||
info!(
|
||||
"Rendering authentication.disabled as a template: {disabled} from \"{tmpl}\""
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
if disabled {
|
||||
auth.insert("disabled".to_string(), Value::Bool(true));
|
||||
} else {
|
||||
for (k, v) in r.authentication.clone() {
|
||||
if k == "disabled" {
|
||||
auth.insert(k, Value::Bool(false));
|
||||
} else {
|
||||
auth.insert(k, render_json_value_raw(v, vars, cb, opt).await?);
|
||||
}
|
||||
}
|
||||
}
|
||||
auth
|
||||
};
|
||||
|
||||
let url = parse_and_render(r.url.clone().as_str(), vars, cb, opt).await?;
|
||||
|
||||
// Apply path placeholders (e.g., /users/:id -> /users/123)
|
||||
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
|
||||
|
||||
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..r.to_owned() })
|
||||
}
|
||||
use clap::Parser;
|
||||
use cli::{Cli, Commands, RequestCommands};
|
||||
use context::CliContext;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let cli = Cli::parse();
|
||||
let Cli { data_dir, environment, verbose, command } = Cli::parse();
|
||||
|
||||
// Initialize logging
|
||||
if cli.verbose {
|
||||
if verbose {
|
||||
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init();
|
||||
}
|
||||
|
||||
// Use the same app_id for both data directory and keyring
|
||||
let app_id = if cfg!(debug_assertions) { "app.yaak.desktop.dev" } else { "app.yaak.desktop" };
|
||||
|
||||
let data_dir = cli.data_dir.unwrap_or_else(|| {
|
||||
let data_dir = data_dir.unwrap_or_else(|| {
|
||||
dirs::data_dir().expect("Could not determine data directory").join(app_id)
|
||||
});
|
||||
|
||||
let db_path = data_dir.join("db.sqlite");
|
||||
let blob_path = data_dir.join("blobs.sqlite");
|
||||
|
||||
let (query_manager, _blob_manager, _rx) =
|
||||
yaak_models::init_standalone(&db_path, &blob_path).expect("Failed to initialize database");
|
||||
|
||||
let db = query_manager.connect();
|
||||
|
||||
// Initialize encryption manager for secure() template function
|
||||
// Use the same app_id as the Tauri app for keyring access
|
||||
let encryption_manager = Arc::new(EncryptionManager::new(query_manager.clone(), app_id));
|
||||
|
||||
// Initialize plugin manager for template functions
|
||||
let vendored_plugin_dir = data_dir.join("vendored-plugins");
|
||||
let installed_plugin_dir = data_dir.join("installed-plugins");
|
||||
|
||||
// Use system node for CLI (must be in PATH)
|
||||
let node_bin_path = PathBuf::from("node");
|
||||
|
||||
// Find the plugin runtime - check YAAK_PLUGIN_RUNTIME env var, then fallback to development path
|
||||
let plugin_runtime_main =
|
||||
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
|
||||
// Development fallback: look relative to crate root
|
||||
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
|
||||
});
|
||||
|
||||
// Create plugin manager (plugins may not be available in CLI context)
|
||||
let plugin_manager = Arc::new(
|
||||
PluginManager::new(
|
||||
vendored_plugin_dir,
|
||||
installed_plugin_dir,
|
||||
node_bin_path,
|
||||
plugin_runtime_main,
|
||||
false,
|
||||
)
|
||||
.await,
|
||||
let needs_context = matches!(
|
||||
&command,
|
||||
Commands::Send(_)
|
||||
| Commands::Workspace(_)
|
||||
| Commands::Request(_)
|
||||
| Commands::Folder(_)
|
||||
| Commands::Environment(_)
|
||||
);
|
||||
|
||||
// Initialize plugins from database
|
||||
let plugins = db.list_plugins().unwrap_or_default();
|
||||
if !plugins.is_empty() {
|
||||
let errors =
|
||||
plugin_manager.initialize_all_plugins(plugins, &PluginContext::new_empty()).await;
|
||||
for (plugin_dir, error_msg) in errors {
|
||||
eprintln!("Warning: Failed to initialize plugin '{}': {}", plugin_dir, error_msg);
|
||||
}
|
||||
}
|
||||
let needs_plugins = matches!(
|
||||
&command,
|
||||
Commands::Send(_)
|
||||
| Commands::Request(cli::RequestArgs {
|
||||
command: RequestCommands::Send { .. } | RequestCommands::Schema { .. },
|
||||
})
|
||||
);
|
||||
|
||||
match cli.command {
|
||||
Commands::Workspaces => {
|
||||
let workspaces = db.list_workspaces().expect("Failed to list workspaces");
|
||||
if workspaces.is_empty() {
|
||||
println!("No workspaces found");
|
||||
} else {
|
||||
for ws in workspaces {
|
||||
println!("{} - {}", ws.id, ws.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
Commands::Requests { workspace_id } => {
|
||||
let requests = db.list_http_requests(&workspace_id).expect("Failed to list requests");
|
||||
if requests.is_empty() {
|
||||
println!("No requests found in workspace {}", workspace_id);
|
||||
} else {
|
||||
for req in requests {
|
||||
println!("{} - {} {}", req.id, req.method, req.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
Commands::Send { request_id } => {
|
||||
let request = db.get_http_request(&request_id).expect("Failed to get request");
|
||||
let context = if needs_context {
|
||||
Some(CliContext::initialize(data_dir, app_id, needs_plugins).await)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Resolve environment chain for variable substitution
|
||||
let environment_chain = db
|
||||
.resolve_environments(
|
||||
&request.workspace_id,
|
||||
request.folder_id.as_deref(),
|
||||
cli.environment.as_deref(),
|
||||
)
|
||||
.unwrap_or_default();
|
||||
|
||||
// Create template callback with plugin support
|
||||
let plugin_context = PluginContext::new(None, Some(request.workspace_id.clone()));
|
||||
let template_callback = PluginTemplateCallback::new(
|
||||
plugin_manager.clone(),
|
||||
encryption_manager.clone(),
|
||||
&plugin_context,
|
||||
RenderPurpose::Send,
|
||||
);
|
||||
|
||||
// Render templates in the request
|
||||
let rendered_request = render_http_request(
|
||||
&request,
|
||||
environment_chain,
|
||||
&template_callback,
|
||||
&RenderOptions::throw(),
|
||||
let exit_code = match command {
|
||||
Commands::Auth(args) => commands::auth::run(args).await,
|
||||
Commands::Plugin(args) => commands::plugin::run(args).await,
|
||||
Commands::Build(args) => commands::plugin::run_build(args).await,
|
||||
Commands::Dev(args) => commands::plugin::run_dev(args).await,
|
||||
Commands::Send(args) => {
|
||||
commands::send::run(
|
||||
context.as_ref().expect("context initialized for send"),
|
||||
args,
|
||||
environment.as_deref(),
|
||||
verbose,
|
||||
)
|
||||
.await
|
||||
.expect("Failed to render request templates");
|
||||
|
||||
if cli.verbose {
|
||||
println!("> {} {}", rendered_request.method, rendered_request.url);
|
||||
}
|
||||
|
||||
// Convert to sendable request
|
||||
let sendable = SendableHttpRequest::from_http_request(
|
||||
&rendered_request,
|
||||
SendableHttpRequestOptions::default(),
|
||||
}
|
||||
Commands::Workspace(args) => commands::workspace::run(
|
||||
context.as_ref().expect("context initialized for workspace"),
|
||||
args,
|
||||
),
|
||||
Commands::Request(args) => {
|
||||
commands::request::run(
|
||||
context.as_ref().expect("context initialized for request"),
|
||||
args,
|
||||
environment.as_deref(),
|
||||
verbose,
|
||||
)
|
||||
.await
|
||||
.expect("Failed to build request");
|
||||
|
||||
// Create event channel for progress
|
||||
let (event_tx, mut event_rx) = mpsc::channel(100);
|
||||
|
||||
// Spawn task to print events if verbose
|
||||
let verbose = cli.verbose;
|
||||
let verbose_handle = if verbose {
|
||||
Some(tokio::spawn(async move {
|
||||
while let Some(event) = event_rx.recv().await {
|
||||
println!("{}", event);
|
||||
}
|
||||
}))
|
||||
} else {
|
||||
// Drain events silently
|
||||
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
|
||||
None
|
||||
};
|
||||
|
||||
// Send the request
|
||||
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
|
||||
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
|
||||
|
||||
// Wait for event handler to finish
|
||||
if let Some(handle) = verbose_handle {
|
||||
let _ = handle.await;
|
||||
}
|
||||
|
||||
// Print response
|
||||
if verbose {
|
||||
println!();
|
||||
}
|
||||
println!(
|
||||
"HTTP {} {}",
|
||||
response.status,
|
||||
response.status_reason.as_deref().unwrap_or("")
|
||||
);
|
||||
|
||||
if verbose {
|
||||
for (name, value) in &response.headers {
|
||||
println!("{}: {}", name, value);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
// Print body
|
||||
let (body, _stats) = response.text().await.expect("Failed to read response body");
|
||||
println!("{}", body);
|
||||
}
|
||||
Commands::Get { url } => {
|
||||
if cli.verbose {
|
||||
println!("> GET {}", url);
|
||||
}
|
||||
|
||||
// Build a simple GET request
|
||||
let sendable = SendableHttpRequest {
|
||||
url: url.clone(),
|
||||
method: "GET".to_string(),
|
||||
headers: vec![],
|
||||
body: None,
|
||||
options: SendableHttpRequestOptions::default(),
|
||||
};
|
||||
|
||||
// Create event channel for progress
|
||||
let (event_tx, mut event_rx) = mpsc::channel(100);
|
||||
|
||||
// Spawn task to print events if verbose
|
||||
let verbose = cli.verbose;
|
||||
let verbose_handle = if verbose {
|
||||
Some(tokio::spawn(async move {
|
||||
while let Some(event) = event_rx.recv().await {
|
||||
println!("{}", event);
|
||||
}
|
||||
}))
|
||||
} else {
|
||||
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
|
||||
None
|
||||
};
|
||||
|
||||
// Send the request
|
||||
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
|
||||
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
|
||||
|
||||
if let Some(handle) = verbose_handle {
|
||||
let _ = handle.await;
|
||||
}
|
||||
|
||||
// Print response
|
||||
if verbose {
|
||||
println!();
|
||||
}
|
||||
println!(
|
||||
"HTTP {} {}",
|
||||
response.status,
|
||||
response.status_reason.as_deref().unwrap_or("")
|
||||
);
|
||||
|
||||
if verbose {
|
||||
for (name, value) in &response.headers {
|
||||
println!("{}: {}", name, value);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
// Print body
|
||||
let (body, _stats) = response.text().await.expect("Failed to read response body");
|
||||
println!("{}", body);
|
||||
Commands::Folder(args) => {
|
||||
commands::folder::run(context.as_ref().expect("context initialized for folder"), args)
|
||||
}
|
||||
Commands::Create { workspace_id, name, method, url } => {
|
||||
let request = HttpRequest {
|
||||
workspace_id,
|
||||
name,
|
||||
method: method.to_uppercase(),
|
||||
url,
|
||||
..Default::default()
|
||||
};
|
||||
Commands::Environment(args) => commands::environment::run(
|
||||
context.as_ref().expect("context initialized for environment"),
|
||||
args,
|
||||
),
|
||||
};
|
||||
|
||||
let created = db
|
||||
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||
.expect("Failed to create request");
|
||||
|
||||
println!("Created request: {}", created.id);
|
||||
}
|
||||
if let Some(context) = &context {
|
||||
context.shutdown().await;
|
||||
}
|
||||
|
||||
// Terminate plugin manager gracefully
|
||||
plugin_manager.terminate().await;
|
||||
if exit_code != 0 {
|
||||
std::process::exit(exit_code);
|
||||
}
|
||||
}
|
||||
|
||||
212
crates-cli/yaak-cli/src/plugin_events.rs
Normal file
212
crates-cli/yaak-cli/src/plugin_events.rs
Normal file
@@ -0,0 +1,212 @@
|
||||
use std::sync::Arc;
|
||||
use tokio::task::JoinHandle;
|
||||
use yaak::plugin_events::{
|
||||
GroupedPluginEvent, HostRequest, SharedPluginEventContext, handle_shared_plugin_event,
|
||||
};
|
||||
use yaak_models::query_manager::QueryManager;
|
||||
use yaak_plugins::events::{
|
||||
EmptyPayload, ErrorResponse, InternalEvent, InternalEventPayload, ListOpenWorkspacesResponse,
|
||||
WorkspaceInfo,
|
||||
};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
|
||||
pub struct CliPluginEventBridge {
|
||||
rx_id: String,
|
||||
task: JoinHandle<()>,
|
||||
}
|
||||
|
||||
impl CliPluginEventBridge {
|
||||
pub async fn start(plugin_manager: Arc<PluginManager>, query_manager: QueryManager) -> Self {
|
||||
let (rx_id, mut rx) = plugin_manager.subscribe("cli").await;
|
||||
let rx_id_for_task = rx_id.clone();
|
||||
let pm = plugin_manager.clone();
|
||||
|
||||
let task = tokio::spawn(async move {
|
||||
while let Some(event) = rx.recv().await {
|
||||
// Events with reply IDs are replies to app-originated requests.
|
||||
if event.reply_id.is_some() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let Some(plugin_handle) = pm.get_plugin_by_ref_id(&event.plugin_ref_id).await
|
||||
else {
|
||||
eprintln!(
|
||||
"Warning: Ignoring plugin event with unknown plugin ref '{}'",
|
||||
event.plugin_ref_id
|
||||
);
|
||||
continue;
|
||||
};
|
||||
|
||||
let plugin_name = plugin_handle.info().name;
|
||||
let Some(reply_payload) = build_plugin_reply(&query_manager, &event, &plugin_name)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if let Err(err) = pm.reply(&event, &reply_payload).await {
|
||||
eprintln!("Warning: Failed replying to plugin event: {err}");
|
||||
}
|
||||
}
|
||||
|
||||
pm.unsubscribe(&rx_id_for_task).await;
|
||||
});
|
||||
|
||||
Self { rx_id, task }
|
||||
}
|
||||
|
||||
pub async fn shutdown(self, plugin_manager: &PluginManager) {
|
||||
plugin_manager.unsubscribe(&self.rx_id).await;
|
||||
self.task.abort();
|
||||
let _ = self.task.await;
|
||||
}
|
||||
}
|
||||
|
||||
fn build_plugin_reply(
|
||||
query_manager: &QueryManager,
|
||||
event: &InternalEvent,
|
||||
plugin_name: &str,
|
||||
) -> Option<InternalEventPayload> {
|
||||
match handle_shared_plugin_event(
|
||||
query_manager,
|
||||
&event.payload,
|
||||
SharedPluginEventContext {
|
||||
plugin_name,
|
||||
workspace_id: event.context.workspace_id.as_deref(),
|
||||
},
|
||||
) {
|
||||
GroupedPluginEvent::Handled(payload) => payload,
|
||||
GroupedPluginEvent::ToHandle(host_request) => match host_request {
|
||||
HostRequest::ErrorResponse(resp) => {
|
||||
eprintln!("[plugin:{}] error: {}", plugin_name, resp.error);
|
||||
None
|
||||
}
|
||||
HostRequest::ReloadResponse(_) => None,
|
||||
HostRequest::ShowToast(req) => {
|
||||
eprintln!("[plugin:{}] {}", plugin_name, req.message);
|
||||
Some(InternalEventPayload::ShowToastResponse(EmptyPayload {}))
|
||||
}
|
||||
HostRequest::ListOpenWorkspaces(_) => {
|
||||
let workspaces = match query_manager.connect().list_workspaces() {
|
||||
Ok(workspaces) => workspaces
|
||||
.into_iter()
|
||||
.map(|w| WorkspaceInfo { id: w.id.clone(), name: w.name, label: w.id })
|
||||
.collect(),
|
||||
Err(err) => {
|
||||
return Some(InternalEventPayload::ErrorResponse(ErrorResponse {
|
||||
error: format!("Failed to list workspaces in CLI: {err}"),
|
||||
}));
|
||||
}
|
||||
};
|
||||
Some(InternalEventPayload::ListOpenWorkspacesResponse(ListOpenWorkspacesResponse {
|
||||
workspaces,
|
||||
}))
|
||||
}
|
||||
req => Some(InternalEventPayload::ErrorResponse(ErrorResponse {
|
||||
error: format!("Unsupported plugin request in CLI: {}", req.type_name()),
|
||||
})),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tempfile::TempDir;
|
||||
use yaak_plugins::events::{GetKeyValueRequest, PluginContext, WindowInfoRequest};
|
||||
|
||||
fn query_manager_for_test() -> (QueryManager, TempDir) {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let db_path = temp_dir.path().join("db.sqlite");
|
||||
let blob_path = temp_dir.path().join("blobs.sqlite");
|
||||
let (query_manager, _blob_manager, _rx) =
|
||||
yaak_models::init_standalone(&db_path, &blob_path).expect("Failed to initialize DB");
|
||||
(query_manager, temp_dir)
|
||||
}
|
||||
|
||||
fn event(payload: InternalEventPayload) -> InternalEvent {
|
||||
InternalEvent {
|
||||
id: "evt_1".to_string(),
|
||||
plugin_ref_id: "plugin_ref_1".to_string(),
|
||||
plugin_name: "@yaak/test-plugin".to_string(),
|
||||
reply_id: None,
|
||||
context: PluginContext::new_empty(),
|
||||
payload,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn key_value_requests_round_trip() {
|
||||
let (query_manager, _temp_dir) = query_manager_for_test();
|
||||
let plugin_name = "@yaak/test-plugin";
|
||||
|
||||
let get_missing = build_plugin_reply(
|
||||
&query_manager,
|
||||
&event(InternalEventPayload::GetKeyValueRequest(GetKeyValueRequest {
|
||||
key: "missing".to_string(),
|
||||
})),
|
||||
plugin_name,
|
||||
);
|
||||
match get_missing {
|
||||
Some(InternalEventPayload::GetKeyValueResponse(r)) => assert_eq!(r.value, None),
|
||||
other => panic!("unexpected payload for missing get: {other:?}"),
|
||||
}
|
||||
|
||||
let set = build_plugin_reply(
|
||||
&query_manager,
|
||||
&event(InternalEventPayload::SetKeyValueRequest(
|
||||
yaak_plugins::events::SetKeyValueRequest {
|
||||
key: "token".to_string(),
|
||||
value: "{\"access_token\":\"abc\"}".to_string(),
|
||||
},
|
||||
)),
|
||||
plugin_name,
|
||||
);
|
||||
assert!(matches!(set, Some(InternalEventPayload::SetKeyValueResponse(_))));
|
||||
|
||||
let get_present = build_plugin_reply(
|
||||
&query_manager,
|
||||
&event(InternalEventPayload::GetKeyValueRequest(GetKeyValueRequest {
|
||||
key: "token".to_string(),
|
||||
})),
|
||||
plugin_name,
|
||||
);
|
||||
match get_present {
|
||||
Some(InternalEventPayload::GetKeyValueResponse(r)) => {
|
||||
assert_eq!(r.value, Some("{\"access_token\":\"abc\"}".to_string()))
|
||||
}
|
||||
other => panic!("unexpected payload for present get: {other:?}"),
|
||||
}
|
||||
|
||||
let delete = build_plugin_reply(
|
||||
&query_manager,
|
||||
&event(InternalEventPayload::DeleteKeyValueRequest(
|
||||
yaak_plugins::events::DeleteKeyValueRequest { key: "token".to_string() },
|
||||
)),
|
||||
plugin_name,
|
||||
);
|
||||
match delete {
|
||||
Some(InternalEventPayload::DeleteKeyValueResponse(r)) => assert!(r.deleted),
|
||||
other => panic!("unexpected payload for delete: {other:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unsupported_request_gets_error_reply() {
|
||||
let (query_manager, _temp_dir) = query_manager_for_test();
|
||||
let payload = build_plugin_reply(
|
||||
&query_manager,
|
||||
&event(InternalEventPayload::WindowInfoRequest(WindowInfoRequest {
|
||||
label: "main".to_string(),
|
||||
})),
|
||||
"@yaak/test-plugin",
|
||||
);
|
||||
|
||||
match payload {
|
||||
Some(InternalEventPayload::ErrorResponse(err)) => {
|
||||
assert!(err.error.contains("Unsupported plugin request in CLI"));
|
||||
assert!(err.error.contains("window_info_request"));
|
||||
}
|
||||
other => panic!("unexpected payload for unsupported request: {other:?}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
34
crates-cli/yaak-cli/src/ui.rs
Normal file
34
crates-cli/yaak-cli/src/ui.rs
Normal file
@@ -0,0 +1,34 @@
|
||||
use console::style;
|
||||
use std::io::{self, IsTerminal};
|
||||
|
||||
pub fn info(message: &str) {
|
||||
if io::stdout().is_terminal() {
|
||||
println!("{:<8} {}", style("INFO").cyan().bold(), style(message).cyan());
|
||||
} else {
|
||||
println!("INFO {message}");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn warning(message: &str) {
|
||||
if io::stdout().is_terminal() {
|
||||
println!("{:<8} {}", style("WARNING").yellow().bold(), style(message).yellow());
|
||||
} else {
|
||||
println!("WARNING {message}");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn success(message: &str) {
|
||||
if io::stdout().is_terminal() {
|
||||
println!("{:<8} {}", style("SUCCESS").green().bold(), style(message).green());
|
||||
} else {
|
||||
println!("SUCCESS {message}");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn error(message: &str) {
|
||||
if io::stderr().is_terminal() {
|
||||
eprintln!("{:<8} {}", style("ERROR").red().bold(), style(message).red());
|
||||
} else {
|
||||
eprintln!("Error: {message}");
|
||||
}
|
||||
}
|
||||
16
crates-cli/yaak-cli/src/utils/confirm.rs
Normal file
16
crates-cli/yaak-cli/src/utils/confirm.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use std::io::{self, IsTerminal, Write};
|
||||
|
||||
pub fn confirm_delete(resource_name: &str, resource_id: &str) -> bool {
|
||||
if !io::stdin().is_terminal() {
|
||||
eprintln!("Refusing to delete in non-interactive mode without --yes");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
print!("Delete {resource_name} {resource_id}? [y/N]: ");
|
||||
io::stdout().flush().expect("Failed to flush stdout");
|
||||
|
||||
let mut input = String::new();
|
||||
io::stdin().read_line(&mut input).expect("Failed to read confirmation");
|
||||
|
||||
matches!(input.trim().to_lowercase().as_str(), "y" | "yes")
|
||||
}
|
||||
107
crates-cli/yaak-cli/src/utils/json.rs
Normal file
107
crates-cli/yaak-cli/src/utils/json.rs
Normal file
@@ -0,0 +1,107 @@
|
||||
use serde::Serialize;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde_json::{Map, Value};
|
||||
|
||||
type JsonResult<T> = std::result::Result<T, String>;
|
||||
|
||||
pub fn is_json_shorthand(input: &str) -> bool {
|
||||
input.trim_start().starts_with('{')
|
||||
}
|
||||
|
||||
pub fn parse_json_object(raw: &str, context: &str) -> JsonResult<Value> {
|
||||
let value: Value = serde_json::from_str(raw)
|
||||
.map_err(|error| format!("Invalid JSON for {context}: {error}"))?;
|
||||
|
||||
if !value.is_object() {
|
||||
return Err(format!("JSON payload for {context} must be an object"));
|
||||
}
|
||||
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
pub fn parse_optional_json(
|
||||
json_flag: Option<String>,
|
||||
json_shorthand: Option<String>,
|
||||
context: &str,
|
||||
) -> JsonResult<Option<Value>> {
|
||||
match (json_flag, json_shorthand) {
|
||||
(Some(_), Some(_)) => {
|
||||
Err(format!("Cannot provide both --json and positional JSON for {context}"))
|
||||
}
|
||||
(Some(raw), None) => parse_json_object(&raw, context).map(Some),
|
||||
(None, Some(raw)) => parse_json_object(&raw, context).map(Some),
|
||||
(None, None) => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_required_json(
|
||||
json_flag: Option<String>,
|
||||
json_shorthand: Option<String>,
|
||||
context: &str,
|
||||
) -> JsonResult<Value> {
|
||||
parse_optional_json(json_flag, json_shorthand, context)?
|
||||
.ok_or_else(|| format!("Missing JSON payload for {context}. Use --json or positional JSON"))
|
||||
}
|
||||
|
||||
pub fn require_id(payload: &Value, context: &str) -> JsonResult<String> {
|
||||
payload
|
||||
.get("id")
|
||||
.and_then(|value| value.as_str())
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(|value| value.to_string())
|
||||
.ok_or_else(|| format!("{context} requires a non-empty \"id\" field"))
|
||||
}
|
||||
|
||||
pub fn validate_create_id(payload: &Value, context: &str) -> JsonResult<()> {
|
||||
let Some(id_value) = payload.get("id") else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
match id_value {
|
||||
Value::String(id) if id.is_empty() => Ok(()),
|
||||
_ => Err(format!("{context} create JSON must omit \"id\" or set it to an empty string")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_merge_patch<T>(existing: &T, patch: &Value, id: &str, context: &str) -> JsonResult<T>
|
||||
where
|
||||
T: Serialize + DeserializeOwned,
|
||||
{
|
||||
let mut base = serde_json::to_value(existing)
|
||||
.map_err(|error| format!("Failed to serialize existing model for {context}: {error}"))?;
|
||||
merge_patch(&mut base, patch);
|
||||
|
||||
let Some(base_object) = base.as_object_mut() else {
|
||||
return Err(format!("Merged payload for {context} must be an object"));
|
||||
};
|
||||
base_object.insert("id".to_string(), Value::String(id.to_string()));
|
||||
|
||||
serde_json::from_value(base)
|
||||
.map_err(|error| format!("Failed to deserialize merged payload for {context}: {error}"))
|
||||
}
|
||||
|
||||
fn merge_patch(target: &mut Value, patch: &Value) {
|
||||
match patch {
|
||||
Value::Object(patch_map) => {
|
||||
if !target.is_object() {
|
||||
*target = Value::Object(Map::new());
|
||||
}
|
||||
|
||||
let target_map =
|
||||
target.as_object_mut().expect("merge_patch target expected to be object");
|
||||
|
||||
for (key, patch_value) in patch_map {
|
||||
if patch_value.is_null() {
|
||||
target_map.remove(key);
|
||||
continue;
|
||||
}
|
||||
|
||||
let target_entry = target_map.entry(key.clone()).or_insert(Value::Null);
|
||||
merge_patch(target_entry, patch_value);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
*target = patch.clone();
|
||||
}
|
||||
}
|
||||
}
|
||||
2
crates-cli/yaak-cli/src/utils/mod.rs
Normal file
2
crates-cli/yaak-cli/src/utils/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub mod confirm;
|
||||
pub mod json;
|
||||
42
crates-cli/yaak-cli/tests/common/http_server.rs
Normal file
42
crates-cli/yaak-cli/tests/common/http_server.rs
Normal file
@@ -0,0 +1,42 @@
|
||||
use std::io::{Read, Write};
|
||||
use std::net::TcpListener;
|
||||
use std::thread;
|
||||
|
||||
pub struct TestHttpServer {
|
||||
pub url: String,
|
||||
handle: Option<thread::JoinHandle<()>>,
|
||||
}
|
||||
|
||||
impl TestHttpServer {
|
||||
pub fn spawn_ok(body: &'static str) -> Self {
|
||||
let listener = TcpListener::bind("127.0.0.1:0").expect("Failed to bind test HTTP server");
|
||||
let addr = listener.local_addr().expect("Failed to get local addr");
|
||||
let url = format!("http://{addr}/test");
|
||||
let body_bytes = body.as_bytes().to_vec();
|
||||
|
||||
let handle = thread::spawn(move || {
|
||||
if let Ok((mut stream, _)) = listener.accept() {
|
||||
let mut request_buf = [0u8; 4096];
|
||||
let _ = stream.read(&mut request_buf);
|
||||
|
||||
let response = format!(
|
||||
"HTTP/1.1 200 OK\r\nContent-Type: text/plain\r\nContent-Length: {}\r\nConnection: close\r\n\r\n",
|
||||
body_bytes.len()
|
||||
);
|
||||
let _ = stream.write_all(response.as_bytes());
|
||||
let _ = stream.write_all(&body_bytes);
|
||||
let _ = stream.flush();
|
||||
}
|
||||
});
|
||||
|
||||
Self { url, handle: Some(handle) }
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for TestHttpServer {
|
||||
fn drop(&mut self) {
|
||||
if let Some(handle) = self.handle.take() {
|
||||
let _ = handle.join();
|
||||
}
|
||||
}
|
||||
}
|
||||
106
crates-cli/yaak-cli/tests/common/mod.rs
Normal file
106
crates-cli/yaak-cli/tests/common/mod.rs
Normal file
@@ -0,0 +1,106 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
pub mod http_server;
|
||||
|
||||
use assert_cmd::Command;
|
||||
use assert_cmd::cargo::cargo_bin_cmd;
|
||||
use std::path::Path;
|
||||
use yaak_models::models::{Folder, GrpcRequest, HttpRequest, WebsocketRequest, Workspace};
|
||||
use yaak_models::query_manager::QueryManager;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
pub fn cli_cmd(data_dir: &Path) -> Command {
|
||||
let mut cmd = cargo_bin_cmd!("yaak");
|
||||
cmd.arg("--data-dir").arg(data_dir);
|
||||
cmd
|
||||
}
|
||||
|
||||
pub fn parse_created_id(stdout: &[u8], label: &str) -> String {
|
||||
String::from_utf8_lossy(stdout)
|
||||
.trim()
|
||||
.split_once(": ")
|
||||
.map(|(_, id)| id.to_string())
|
||||
.unwrap_or_else(|| panic!("Expected id in '{label}' output"))
|
||||
}
|
||||
|
||||
pub fn query_manager(data_dir: &Path) -> QueryManager {
|
||||
let db_path = data_dir.join("db.sqlite");
|
||||
let blob_path = data_dir.join("blobs.sqlite");
|
||||
let (query_manager, _blob_manager, _rx) =
|
||||
yaak_models::init_standalone(&db_path, &blob_path).expect("Failed to initialize DB");
|
||||
query_manager
|
||||
}
|
||||
|
||||
pub fn seed_workspace(data_dir: &Path, workspace_id: &str) {
|
||||
let workspace = Workspace {
|
||||
id: workspace_id.to_string(),
|
||||
name: "Seed Workspace".to_string(),
|
||||
description: "Seeded for integration tests".to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
query_manager(data_dir)
|
||||
.connect()
|
||||
.upsert_workspace(&workspace, &UpdateSource::Sync)
|
||||
.expect("Failed to seed workspace");
|
||||
}
|
||||
|
||||
pub fn seed_request(data_dir: &Path, workspace_id: &str, request_id: &str) {
|
||||
let request = HttpRequest {
|
||||
id: request_id.to_string(),
|
||||
workspace_id: workspace_id.to_string(),
|
||||
name: "Seeded Request".to_string(),
|
||||
method: "GET".to_string(),
|
||||
url: "https://example.com".to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
query_manager(data_dir)
|
||||
.connect()
|
||||
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||
.expect("Failed to seed request");
|
||||
}
|
||||
|
||||
pub fn seed_folder(data_dir: &Path, workspace_id: &str, folder_id: &str) {
|
||||
let folder = Folder {
|
||||
id: folder_id.to_string(),
|
||||
workspace_id: workspace_id.to_string(),
|
||||
name: "Seed Folder".to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
query_manager(data_dir)
|
||||
.connect()
|
||||
.upsert_folder(&folder, &UpdateSource::Sync)
|
||||
.expect("Failed to seed folder");
|
||||
}
|
||||
|
||||
pub fn seed_grpc_request(data_dir: &Path, workspace_id: &str, request_id: &str) {
|
||||
let request = GrpcRequest {
|
||||
id: request_id.to_string(),
|
||||
workspace_id: workspace_id.to_string(),
|
||||
name: "Seeded gRPC Request".to_string(),
|
||||
url: "https://example.com".to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
query_manager(data_dir)
|
||||
.connect()
|
||||
.upsert_grpc_request(&request, &UpdateSource::Sync)
|
||||
.expect("Failed to seed gRPC request");
|
||||
}
|
||||
|
||||
pub fn seed_websocket_request(data_dir: &Path, workspace_id: &str, request_id: &str) {
|
||||
let request = WebsocketRequest {
|
||||
id: request_id.to_string(),
|
||||
workspace_id: workspace_id.to_string(),
|
||||
name: "Seeded WebSocket Request".to_string(),
|
||||
url: "wss://example.com/socket".to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
query_manager(data_dir)
|
||||
.connect()
|
||||
.upsert_websocket_request(&request, &UpdateSource::Sync)
|
||||
.expect("Failed to seed WebSocket request");
|
||||
}
|
||||
80
crates-cli/yaak-cli/tests/environment_commands.rs
Normal file
80
crates-cli/yaak-cli/tests/environment_commands.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
mod common;
|
||||
|
||||
use common::{cli_cmd, parse_created_id, query_manager, seed_workspace};
|
||||
use predicates::str::contains;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[test]
|
||||
fn create_list_show_delete_round_trip() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["environment", "list", "wk_test"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("Global Variables"));
|
||||
|
||||
let create_assert = cli_cmd(data_dir)
|
||||
.args(["environment", "create", "wk_test", "--name", "Production"])
|
||||
.assert()
|
||||
.success();
|
||||
let environment_id = parse_created_id(&create_assert.get_output().stdout, "environment create");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["environment", "list", "wk_test"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(&environment_id))
|
||||
.stdout(contains("Production"));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["environment", "show", &environment_id])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("\"id\": \"{environment_id}\"")))
|
||||
.stdout(contains("\"parentModel\": \"environment\""));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["environment", "delete", &environment_id, "--yes"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("Deleted environment: {environment_id}")));
|
||||
|
||||
assert!(query_manager(data_dir).connect().get_environment(&environment_id).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn json_create_and_update_merge_patch_round_trip() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
|
||||
let create_assert = cli_cmd(data_dir)
|
||||
.args([
|
||||
"environment",
|
||||
"create",
|
||||
r#"{"workspaceId":"wk_test","name":"Json Environment"}"#,
|
||||
])
|
||||
.assert()
|
||||
.success();
|
||||
let environment_id = parse_created_id(&create_assert.get_output().stdout, "environment create");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args([
|
||||
"environment",
|
||||
"update",
|
||||
&format!(r##"{{"id":"{}","color":"#00ff00"}}"##, environment_id),
|
||||
])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("Updated environment: {environment_id}")));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["environment", "show", &environment_id])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("\"name\": \"Json Environment\""))
|
||||
.stdout(contains("\"color\": \"#00ff00\""));
|
||||
}
|
||||
74
crates-cli/yaak-cli/tests/folder_commands.rs
Normal file
74
crates-cli/yaak-cli/tests/folder_commands.rs
Normal file
@@ -0,0 +1,74 @@
|
||||
mod common;
|
||||
|
||||
use common::{cli_cmd, parse_created_id, query_manager, seed_workspace};
|
||||
use predicates::str::contains;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[test]
|
||||
fn create_list_show_delete_round_trip() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
|
||||
let create_assert = cli_cmd(data_dir)
|
||||
.args(["folder", "create", "wk_test", "--name", "Auth"])
|
||||
.assert()
|
||||
.success();
|
||||
let folder_id = parse_created_id(&create_assert.get_output().stdout, "folder create");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["folder", "list", "wk_test"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(&folder_id))
|
||||
.stdout(contains("Auth"));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["folder", "show", &folder_id])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("\"id\": \"{folder_id}\"")))
|
||||
.stdout(contains("\"workspaceId\": \"wk_test\""));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["folder", "delete", &folder_id, "--yes"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("Deleted folder: {folder_id}")));
|
||||
|
||||
assert!(query_manager(data_dir).connect().get_folder(&folder_id).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn json_create_and_update_merge_patch_round_trip() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
|
||||
let create_assert = cli_cmd(data_dir)
|
||||
.args([
|
||||
"folder",
|
||||
"create",
|
||||
r#"{"workspaceId":"wk_test","name":"Json Folder"}"#,
|
||||
])
|
||||
.assert()
|
||||
.success();
|
||||
let folder_id = parse_created_id(&create_assert.get_output().stdout, "folder create");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args([
|
||||
"folder",
|
||||
"update",
|
||||
&format!(r#"{{"id":"{}","description":"Folder Description"}}"#, folder_id),
|
||||
])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("Updated folder: {folder_id}")));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["folder", "show", &folder_id])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("\"name\": \"Json Folder\""))
|
||||
.stdout(contains("\"description\": \"Folder Description\""));
|
||||
}
|
||||
224
crates-cli/yaak-cli/tests/request_commands.rs
Normal file
224
crates-cli/yaak-cli/tests/request_commands.rs
Normal file
@@ -0,0 +1,224 @@
|
||||
mod common;
|
||||
|
||||
use common::http_server::TestHttpServer;
|
||||
use common::{
|
||||
cli_cmd, parse_created_id, query_manager, seed_grpc_request, seed_request,
|
||||
seed_websocket_request, seed_workspace,
|
||||
};
|
||||
use predicates::str::contains;
|
||||
use tempfile::TempDir;
|
||||
use yaak_models::models::HttpResponseState;
|
||||
|
||||
#[test]
|
||||
fn show_and_delete_yes_round_trip() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
|
||||
let create_assert = cli_cmd(data_dir)
|
||||
.args([
|
||||
"request",
|
||||
"create",
|
||||
"wk_test",
|
||||
"--name",
|
||||
"Smoke Test",
|
||||
"--url",
|
||||
"https://example.com",
|
||||
])
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["request", "show", &request_id])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("\"id\": \"{request_id}\"")))
|
||||
.stdout(contains("\"workspaceId\": \"wk_test\""));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["request", "delete", &request_id, "--yes"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("Deleted request: {request_id}")));
|
||||
|
||||
assert!(query_manager(data_dir).connect().get_http_request(&request_id).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn delete_without_yes_fails_in_non_interactive_mode() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
seed_request(data_dir, "wk_test", "rq_seed_delete_noninteractive");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["request", "delete", "rq_seed_delete_noninteractive"])
|
||||
.assert()
|
||||
.failure()
|
||||
.code(1)
|
||||
.stderr(contains("Refusing to delete in non-interactive mode without --yes"));
|
||||
|
||||
assert!(
|
||||
query_manager(data_dir).connect().get_http_request("rq_seed_delete_noninteractive").is_ok()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn json_create_and_update_merge_patch_round_trip() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
|
||||
let create_assert = cli_cmd(data_dir)
|
||||
.args([
|
||||
"request",
|
||||
"create",
|
||||
r#"{"workspaceId":"wk_test","name":"Json Request","url":"https://example.com"}"#,
|
||||
])
|
||||
.assert()
|
||||
.success();
|
||||
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args([
|
||||
"request",
|
||||
"update",
|
||||
&format!(r#"{{"id":"{}","name":"Renamed Request"}}"#, request_id),
|
||||
])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("Updated request: {request_id}")));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["request", "show", &request_id])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("\"name\": \"Renamed Request\""))
|
||||
.stdout(contains("\"url\": \"https://example.com\""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn update_requires_id_in_json_payload() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["request", "update", r#"{"name":"No ID"}"#])
|
||||
.assert()
|
||||
.failure()
|
||||
.stderr(contains("request update requires a non-empty \"id\" field"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn create_allows_workspace_only_with_empty_defaults() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
|
||||
let create_assert = cli_cmd(data_dir).args(["request", "create", "wk_test"]).assert().success();
|
||||
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
|
||||
|
||||
let request = query_manager(data_dir)
|
||||
.connect()
|
||||
.get_http_request(&request_id)
|
||||
.expect("Failed to load created request");
|
||||
assert_eq!(request.workspace_id, "wk_test");
|
||||
assert_eq!(request.method, "GET");
|
||||
assert_eq!(request.name, "");
|
||||
assert_eq!(request.url, "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn request_send_persists_response_body_and_events() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
|
||||
let server = TestHttpServer::spawn_ok("hello from integration test");
|
||||
|
||||
let create_assert = cli_cmd(data_dir)
|
||||
.args([
|
||||
"request",
|
||||
"create",
|
||||
"wk_test",
|
||||
"--name",
|
||||
"Send Test",
|
||||
"--url",
|
||||
&server.url,
|
||||
])
|
||||
.assert()
|
||||
.success();
|
||||
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["request", "send", &request_id])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("HTTP 200 OK"))
|
||||
.stdout(contains("hello from integration test"));
|
||||
|
||||
let qm = query_manager(data_dir);
|
||||
let db = qm.connect();
|
||||
let responses =
|
||||
db.list_http_responses_for_request(&request_id, None).expect("Failed to load responses");
|
||||
assert_eq!(responses.len(), 1, "expected exactly one persisted response");
|
||||
|
||||
let response = &responses[0];
|
||||
assert_eq!(response.status, 200);
|
||||
assert!(matches!(response.state, HttpResponseState::Closed));
|
||||
assert!(response.error.is_none());
|
||||
|
||||
let body_path =
|
||||
response.body_path.as_ref().expect("expected persisted response body path").to_string();
|
||||
let body = std::fs::read_to_string(&body_path).expect("Failed to read response body file");
|
||||
assert_eq!(body, "hello from integration test");
|
||||
|
||||
let events =
|
||||
db.list_http_response_events(&response.id).expect("Failed to load response events");
|
||||
assert!(!events.is_empty(), "expected at least one persisted response event");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn request_schema_http_outputs_json_schema() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["request", "schema", "http"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("\"type\": \"object\""))
|
||||
.stdout(contains("\"authentication\""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn request_send_grpc_returns_explicit_nyi_error() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
seed_grpc_request(data_dir, "wk_test", "gr_seed_nyi");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["request", "send", "gr_seed_nyi"])
|
||||
.assert()
|
||||
.failure()
|
||||
.code(1)
|
||||
.stderr(contains("gRPC request send is not implemented yet in yaak-cli"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn request_send_websocket_returns_explicit_nyi_error() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
seed_websocket_request(data_dir, "wk_test", "wr_seed_nyi");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["request", "send", "wr_seed_nyi"])
|
||||
.assert()
|
||||
.failure()
|
||||
.code(1)
|
||||
.stderr(contains("WebSocket request send is not implemented yet in yaak-cli"));
|
||||
}
|
||||
81
crates-cli/yaak-cli/tests/send_commands.rs
Normal file
81
crates-cli/yaak-cli/tests/send_commands.rs
Normal file
@@ -0,0 +1,81 @@
|
||||
mod common;
|
||||
|
||||
use common::http_server::TestHttpServer;
|
||||
use common::{cli_cmd, query_manager, seed_folder, seed_workspace};
|
||||
use predicates::str::contains;
|
||||
use tempfile::TempDir;
|
||||
use yaak_models::models::HttpRequest;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
#[test]
|
||||
fn top_level_send_workspace_sends_http_requests_and_prints_summary() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
|
||||
let server = TestHttpServer::spawn_ok("workspace bulk send");
|
||||
let request = HttpRequest {
|
||||
id: "rq_workspace_send".to_string(),
|
||||
workspace_id: "wk_test".to_string(),
|
||||
name: "Workspace Send".to_string(),
|
||||
method: "GET".to_string(),
|
||||
url: server.url.clone(),
|
||||
..Default::default()
|
||||
};
|
||||
query_manager(data_dir)
|
||||
.connect()
|
||||
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||
.expect("Failed to seed workspace request");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["send", "wk_test"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("HTTP 200 OK"))
|
||||
.stdout(contains("workspace bulk send"))
|
||||
.stdout(contains("Send summary: 1 succeeded, 0 failed"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn top_level_send_folder_sends_http_requests_and_prints_summary() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
seed_workspace(data_dir, "wk_test");
|
||||
seed_folder(data_dir, "wk_test", "fl_test");
|
||||
|
||||
let server = TestHttpServer::spawn_ok("folder bulk send");
|
||||
let request = HttpRequest {
|
||||
id: "rq_folder_send".to_string(),
|
||||
workspace_id: "wk_test".to_string(),
|
||||
folder_id: Some("fl_test".to_string()),
|
||||
name: "Folder Send".to_string(),
|
||||
method: "GET".to_string(),
|
||||
url: server.url.clone(),
|
||||
..Default::default()
|
||||
};
|
||||
query_manager(data_dir)
|
||||
.connect()
|
||||
.upsert_http_request(&request, &UpdateSource::Sync)
|
||||
.expect("Failed to seed folder request");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["send", "fl_test"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("HTTP 200 OK"))
|
||||
.stdout(contains("folder bulk send"))
|
||||
.stdout(contains("Send summary: 1 succeeded, 0 failed"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn top_level_send_unknown_id_fails_with_clear_error() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["send", "does_not_exist"])
|
||||
.assert()
|
||||
.failure()
|
||||
.code(1)
|
||||
.stderr(contains("Could not resolve ID 'does_not_exist' as request, folder, or workspace"));
|
||||
}
|
||||
59
crates-cli/yaak-cli/tests/workspace_commands.rs
Normal file
59
crates-cli/yaak-cli/tests/workspace_commands.rs
Normal file
@@ -0,0 +1,59 @@
|
||||
mod common;
|
||||
|
||||
use common::{cli_cmd, parse_created_id, query_manager};
|
||||
use predicates::str::contains;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[test]
|
||||
fn create_show_delete_round_trip() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
|
||||
let create_assert =
|
||||
cli_cmd(data_dir).args(["workspace", "create", "--name", "WS One"]).assert().success();
|
||||
let workspace_id = parse_created_id(&create_assert.get_output().stdout, "workspace create");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["workspace", "show", &workspace_id])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("\"id\": \"{workspace_id}\"")))
|
||||
.stdout(contains("\"name\": \"WS One\""));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["workspace", "delete", &workspace_id, "--yes"])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("Deleted workspace: {workspace_id}")));
|
||||
|
||||
assert!(query_manager(data_dir).connect().get_workspace(&workspace_id).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn json_create_and_update_merge_patch_round_trip() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp dir");
|
||||
let data_dir = temp_dir.path();
|
||||
|
||||
let create_assert = cli_cmd(data_dir)
|
||||
.args(["workspace", "create", r#"{"name":"Json Workspace"}"#])
|
||||
.assert()
|
||||
.success();
|
||||
let workspace_id = parse_created_id(&create_assert.get_output().stdout, "workspace create");
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args([
|
||||
"workspace",
|
||||
"update",
|
||||
&format!(r#"{{"id":"{}","description":"Updated via JSON"}}"#, workspace_id),
|
||||
])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains(format!("Updated workspace: {workspace_id}")));
|
||||
|
||||
cli_cmd(data_dir)
|
||||
.args(["workspace", "show", &workspace_id])
|
||||
.assert()
|
||||
.success()
|
||||
.stdout(contains("\"name\": \"Json Workspace\""))
|
||||
.stdout(contains("\"description\": \"Updated via JSON\""));
|
||||
}
|
||||
@@ -57,9 +57,11 @@ url = "2"
|
||||
tokio-util = { version = "0.7", features = ["codec"] }
|
||||
ts-rs = { workspace = true }
|
||||
uuid = "1.12.1"
|
||||
yaak-api = { workspace = true }
|
||||
yaak-common = { workspace = true }
|
||||
yaak-tauri-utils = { workspace = true }
|
||||
yaak-core = { workspace = true }
|
||||
yaak = { workspace = true }
|
||||
yaak-crypto = { workspace = true }
|
||||
yaak-fonts = { workspace = true }
|
||||
yaak-git = { workspace = true }
|
||||
|
||||
@@ -2,7 +2,6 @@ use crate::PluginContextExt;
|
||||
use crate::error::Result;
|
||||
use std::sync::Arc;
|
||||
use tauri::{AppHandle, Manager, Runtime, State, WebviewWindow, command};
|
||||
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
|
||||
use yaak_crypto::manager::EncryptionManager;
|
||||
use yaak_models::models::HttpRequestHeader;
|
||||
use yaak_models::queries::workspaces::default_headers;
|
||||
@@ -23,20 +22,6 @@ impl<'a, R: Runtime, M: Manager<R>> EncryptionManagerExt<'a, R> for M {
|
||||
}
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_show_workspace_key<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
workspace_id: &str,
|
||||
) -> Result<()> {
|
||||
let key = window.crypto().reveal_workspace_key(workspace_id)?;
|
||||
window
|
||||
.dialog()
|
||||
.message(format!("Your workspace key is \n\n{}", key))
|
||||
.kind(MessageDialogKind::Info)
|
||||
.show(|_v| {});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_decrypt_template<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
@@ -100,6 +85,15 @@ pub(crate) async fn cmd_set_workspace_key<R: Runtime>(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_disable_encryption<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
workspace_id: &str,
|
||||
) -> Result<()> {
|
||||
window.crypto().disable_encryption(workspace_id)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) fn cmd_default_headers() -> Vec<HttpRequestHeader> {
|
||||
default_headers()
|
||||
|
||||
@@ -36,7 +36,7 @@ pub enum Error {
|
||||
PluginError(#[from] yaak_plugins::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
TauriUtilsError(#[from] yaak_tauri_utils::error::Error),
|
||||
ApiError(#[from] yaak_api::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
ClipboardError(#[from] tauri_plugin_clipboard_manager::Error),
|
||||
|
||||
@@ -6,32 +6,47 @@ use crate::error::Result;
|
||||
use std::path::{Path, PathBuf};
|
||||
use tauri::command;
|
||||
use yaak_git::{
|
||||
GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult, git_add, git_add_credential,
|
||||
git_add_remote, git_checkout_branch, git_commit, git_create_branch, git_delete_branch,
|
||||
git_fetch_all, git_init, git_log, git_merge_branch, git_pull, git_push, git_remotes,
|
||||
git_rm_remote, git_status, git_unstage,
|
||||
BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult,
|
||||
PushResult, git_add, git_add_credential, git_add_remote, git_checkout_branch, git_clone,
|
||||
git_commit, git_create_branch, git_delete_branch, git_delete_remote_branch, git_fetch_all,
|
||||
git_init, git_log, git_merge_branch, git_pull, git_pull_force_reset, git_pull_merge, git_push,
|
||||
git_remotes, git_rename_branch, git_reset_changes, git_rm_remote, git_status, git_unstage,
|
||||
};
|
||||
|
||||
// NOTE: All of these commands are async to prevent blocking work from locking up the UI
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_checkout(dir: &Path, branch: &str, force: bool) -> Result<String> {
|
||||
Ok(git_checkout_branch(dir, branch, force)?)
|
||||
Ok(git_checkout_branch(dir, branch, force).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_branch(dir: &Path, branch: &str) -> Result<()> {
|
||||
Ok(git_create_branch(dir, branch)?)
|
||||
pub async fn cmd_git_branch(dir: &Path, branch: &str, base: Option<&str>) -> Result<()> {
|
||||
Ok(git_create_branch(dir, branch, base).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_delete_branch(dir: &Path, branch: &str) -> Result<()> {
|
||||
Ok(git_delete_branch(dir, branch)?)
|
||||
pub async fn cmd_git_delete_branch(
|
||||
dir: &Path,
|
||||
branch: &str,
|
||||
force: Option<bool>,
|
||||
) -> Result<BranchDeleteResult> {
|
||||
Ok(git_delete_branch(dir, branch, force.unwrap_or(false)).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_merge_branch(dir: &Path, branch: &str, force: bool) -> Result<()> {
|
||||
Ok(git_merge_branch(dir, branch, force)?)
|
||||
pub async fn cmd_git_delete_remote_branch(dir: &Path, branch: &str) -> Result<()> {
|
||||
Ok(git_delete_remote_branch(dir, branch).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_merge_branch(dir: &Path, branch: &str) -> Result<()> {
|
||||
Ok(git_merge_branch(dir, branch).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_rename_branch(dir: &Path, old_name: &str, new_name: &str) -> Result<()> {
|
||||
Ok(git_rename_branch(dir, old_name, new_name).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
@@ -49,6 +64,11 @@ pub async fn cmd_git_initialize(dir: &Path) -> Result<()> {
|
||||
Ok(git_init(dir)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_clone(url: &str, dir: &Path) -> Result<CloneResult> {
|
||||
Ok(git_clone(url, dir).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_commit(dir: &Path, message: &str) -> Result<()> {
|
||||
Ok(git_commit(dir, message).await?)
|
||||
@@ -69,6 +89,20 @@ pub async fn cmd_git_pull(dir: &Path) -> Result<PullResult> {
|
||||
Ok(git_pull(dir).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_pull_force_reset(
|
||||
dir: &Path,
|
||||
remote: &str,
|
||||
branch: &str,
|
||||
) -> Result<PullResult> {
|
||||
Ok(git_pull_force_reset(dir, remote, branch).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_pull_merge(dir: &Path, remote: &str, branch: &str) -> Result<PullResult> {
|
||||
Ok(git_pull_merge(dir, remote, branch).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_add(dir: &Path, rela_paths: Vec<PathBuf>) -> Result<()> {
|
||||
for path in rela_paths {
|
||||
@@ -85,14 +119,18 @@ pub async fn cmd_git_unstage(dir: &Path, rela_paths: Vec<PathBuf>) -> Result<()>
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_reset_changes(dir: &Path) -> Result<()> {
|
||||
Ok(git_reset_changes(dir).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_add_credential(
|
||||
dir: &Path,
|
||||
remote_url: &str,
|
||||
username: &str,
|
||||
password: &str,
|
||||
) -> Result<()> {
|
||||
Ok(git_add_credential(dir, remote_url, username, password).await?)
|
||||
Ok(git_add_credential(remote_url, username, password).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
|
||||
@@ -3,45 +3,18 @@ use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::BlobManagerExt;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::render::render_http_request;
|
||||
use log::{debug, warn};
|
||||
use std::pin::Pin;
|
||||
use log::warn;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicI32, Ordering};
|
||||
use std::time::{Duration, Instant};
|
||||
use std::time::Instant;
|
||||
use tauri::{AppHandle, Manager, Runtime, WebviewWindow};
|
||||
use tokio::fs::{File, create_dir_all};
|
||||
use tokio::io::{AsyncRead, AsyncReadExt, AsyncWriteExt};
|
||||
use tokio::sync::watch::Receiver;
|
||||
use tokio_util::bytes::Bytes;
|
||||
use yaak::send::{SendHttpRequestWithPluginsParams, send_http_request_with_plugins};
|
||||
use yaak_crypto::manager::EncryptionManager;
|
||||
use yaak_http::client::{
|
||||
HttpConnectionOptions, HttpConnectionProxySetting, HttpConnectionProxySettingAuth,
|
||||
};
|
||||
use yaak_http::cookies::CookieStore;
|
||||
use yaak_http::manager::{CachedClient, HttpConnectionManager};
|
||||
use yaak_http::sender::ReqwestSender;
|
||||
use yaak_http::tee_reader::TeeReader;
|
||||
use yaak_http::transaction::HttpTransaction;
|
||||
use yaak_http::types::{
|
||||
SendableBody, SendableHttpRequest, SendableHttpRequestOptions, append_query_params,
|
||||
};
|
||||
use yaak_models::blob_manager::BodyChunk;
|
||||
use yaak_models::models::{
|
||||
CookieJar, Environment, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseHeader,
|
||||
HttpResponseState, ProxySetting, ProxySettingAuth,
|
||||
};
|
||||
use yaak_http::manager::HttpConnectionManager;
|
||||
use yaak_models::models::{CookieJar, Environment, HttpRequest, HttpResponse, HttpResponseState};
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::events::{
|
||||
CallHttpAuthenticationRequest, HttpHeader, PluginContext, RenderPurpose,
|
||||
};
|
||||
use yaak_plugins::events::PluginContext;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::template_callback::PluginTemplateCallback;
|
||||
use yaak_templates::RenderOptions;
|
||||
use yaak_tls::find_client_certificate;
|
||||
|
||||
/// Chunk size for storing request bodies (1MB)
|
||||
const REQUEST_BODY_CHUNK_SIZE: usize = 1024 * 1024;
|
||||
|
||||
/// Context for managing response state during HTTP transactions.
|
||||
/// Handles both persisted responses (stored in DB) and ephemeral responses (in-memory only).
|
||||
@@ -168,135 +141,30 @@ async fn send_http_request_inner<R: Runtime>(
|
||||
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
|
||||
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
|
||||
let connection_manager = app_handle.state::<HttpConnectionManager>();
|
||||
let settings = window.db().get_settings();
|
||||
let workspace_id = &unrendered_request.workspace_id;
|
||||
let folder_id = unrendered_request.folder_id.as_deref();
|
||||
let environment_id = environment.map(|e| e.id);
|
||||
let workspace = window.db().get_workspace(workspace_id)?;
|
||||
let (resolved, auth_context_id) = resolve_http_request(window, unrendered_request)?;
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager.clone(),
|
||||
encryption_manager.clone(),
|
||||
&plugin_context,
|
||||
RenderPurpose::Send,
|
||||
);
|
||||
let env_chain =
|
||||
window.db().resolve_environments(&workspace.id, folder_id, environment_id.as_deref())?;
|
||||
let request = render_http_request(&resolved, env_chain, &cb, &RenderOptions::throw()).await?;
|
||||
let cookie_jar_id = cookie_jar.as_ref().map(|jar| jar.id.clone());
|
||||
|
||||
// Build the sendable request using the new SendableHttpRequest type
|
||||
let options = SendableHttpRequestOptions {
|
||||
follow_redirects: workspace.setting_follow_redirects,
|
||||
timeout: if workspace.setting_request_timeout > 0 {
|
||||
Some(Duration::from_millis(workspace.setting_request_timeout.unsigned_abs() as u64))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
};
|
||||
let mut sendable_request = SendableHttpRequest::from_http_request(&request, options).await?;
|
||||
|
||||
debug!("Sending request to {} {}", sendable_request.method, sendable_request.url);
|
||||
|
||||
let proxy_setting = match settings.proxy {
|
||||
None => HttpConnectionProxySetting::System,
|
||||
Some(ProxySetting::Disabled) => HttpConnectionProxySetting::Disabled,
|
||||
Some(ProxySetting::Enabled { http, https, auth, bypass, disabled }) => {
|
||||
if disabled {
|
||||
HttpConnectionProxySetting::System
|
||||
} else {
|
||||
HttpConnectionProxySetting::Enabled {
|
||||
http,
|
||||
https,
|
||||
bypass,
|
||||
auth: match auth {
|
||||
None => None,
|
||||
Some(ProxySettingAuth { user, password }) => {
|
||||
Some(HttpConnectionProxySettingAuth { user, password })
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let client_certificate =
|
||||
find_client_certificate(&sendable_request.url, &settings.client_certificates);
|
||||
|
||||
// Create cookie store if a cookie jar is specified
|
||||
let maybe_cookie_store = match cookie_jar.clone() {
|
||||
Some(CookieJar { id, .. }) => {
|
||||
// NOTE: We need to refetch the cookie jar because a chained request might have
|
||||
// updated cookies when we rendered the request.
|
||||
let cj = window.db().get_cookie_jar(&id)?;
|
||||
let cookie_store = CookieStore::from_cookies(cj.cookies.clone());
|
||||
Some((cookie_store, cj))
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
let cached_client = connection_manager
|
||||
.get_client(&HttpConnectionOptions {
|
||||
id: plugin_context.id.clone(),
|
||||
validate_certificates: workspace.setting_validate_certificates,
|
||||
proxy: proxy_setting,
|
||||
client_certificate,
|
||||
dns_overrides: workspace.setting_dns_overrides.clone(),
|
||||
})
|
||||
.await?;
|
||||
|
||||
// Apply authentication to the request
|
||||
apply_authentication(
|
||||
&window,
|
||||
&mut sendable_request,
|
||||
&request,
|
||||
auth_context_id,
|
||||
&plugin_manager,
|
||||
let response_dir = app_handle.path().app_data_dir()?.join("responses");
|
||||
let result = send_http_request_with_plugins(SendHttpRequestWithPluginsParams {
|
||||
query_manager: app_handle.db_manager().inner(),
|
||||
blob_manager: app_handle.blob_manager().inner(),
|
||||
request: unrendered_request.clone(),
|
||||
environment_id: environment_id.as_deref(),
|
||||
update_source: response_ctx.update_source.clone(),
|
||||
cookie_jar_id,
|
||||
response_dir: &response_dir,
|
||||
emit_events_to: None,
|
||||
existing_response: Some(response_ctx.response().clone()),
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
plugin_context,
|
||||
)
|
||||
.await?;
|
||||
cancelled_rx: Some(cancelled_rx.clone()),
|
||||
connection_manager: Some(connection_manager.inner()),
|
||||
})
|
||||
.await
|
||||
.map_err(|e| GenericError(e.to_string()))?;
|
||||
|
||||
let cookie_store = maybe_cookie_store.as_ref().map(|(cs, _)| cs.clone());
|
||||
let result = execute_transaction(
|
||||
cached_client,
|
||||
sendable_request,
|
||||
response_ctx,
|
||||
cancelled_rx.clone(),
|
||||
cookie_store,
|
||||
)
|
||||
.await;
|
||||
|
||||
// Wait for blob writing to complete and check for errors
|
||||
let final_result = match result {
|
||||
Ok((response, maybe_blob_write_handle)) => {
|
||||
// Check if blob writing failed
|
||||
if let Some(handle) = maybe_blob_write_handle {
|
||||
if let Ok(Err(e)) = handle.await {
|
||||
// Update response with the storage error
|
||||
let _ = response_ctx.update(|r| {
|
||||
let error_msg =
|
||||
format!("Request succeeded but failed to store request body: {}", e);
|
||||
r.error = Some(match &r.error {
|
||||
Some(existing) => format!("{}; {}", existing, error_msg),
|
||||
None => error_msg,
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(response)
|
||||
}
|
||||
Err(e) => Err(e),
|
||||
};
|
||||
|
||||
// Persist cookies back to the database after the request completes
|
||||
if let Some((cookie_store, mut cj)) = maybe_cookie_store {
|
||||
let cookies = cookie_store.get_all_cookies();
|
||||
cj.cookies = cookies;
|
||||
if let Err(e) = window.db().upsert_cookie_jar(&cj, &UpdateSource::Background) {
|
||||
warn!("Failed to persist cookies to database: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
final_result
|
||||
Ok(result.response)
|
||||
}
|
||||
|
||||
pub fn resolve_http_request<R: Runtime>(
|
||||
@@ -315,395 +183,3 @@ pub fn resolve_http_request<R: Runtime>(
|
||||
|
||||
Ok((new_request, authentication_context_id))
|
||||
}
|
||||
|
||||
async fn execute_transaction<R: Runtime>(
|
||||
cached_client: CachedClient,
|
||||
mut sendable_request: SendableHttpRequest,
|
||||
response_ctx: &mut ResponseContext<R>,
|
||||
mut cancelled_rx: Receiver<bool>,
|
||||
cookie_store: Option<CookieStore>,
|
||||
) -> Result<(HttpResponse, Option<tauri::async_runtime::JoinHandle<Result<()>>>)> {
|
||||
let app_handle = &response_ctx.app_handle.clone();
|
||||
let response_id = response_ctx.response().id.clone();
|
||||
let workspace_id = response_ctx.response().workspace_id.clone();
|
||||
let is_persisted = response_ctx.is_persisted();
|
||||
|
||||
// Keep a reference to the resolver for DNS timing events
|
||||
let resolver = cached_client.resolver.clone();
|
||||
|
||||
let sender = ReqwestSender::with_client(cached_client.client);
|
||||
let transaction = match cookie_store {
|
||||
Some(cs) => HttpTransaction::with_cookie_store(sender, cs),
|
||||
None => HttpTransaction::new(sender),
|
||||
};
|
||||
let start = Instant::now();
|
||||
|
||||
// Capture request headers before sending
|
||||
let request_headers: Vec<HttpResponseHeader> = sendable_request
|
||||
.headers
|
||||
.iter()
|
||||
.map(|(name, value)| HttpResponseHeader { name: name.clone(), value: value.clone() })
|
||||
.collect();
|
||||
|
||||
// Update response with headers info
|
||||
response_ctx.update(|r| {
|
||||
r.url = sendable_request.url.clone();
|
||||
r.request_headers = request_headers;
|
||||
})?;
|
||||
|
||||
// Create bounded channel for receiving events and spawn a task to store them in DB
|
||||
// Buffer size of 100 events provides back pressure if DB writes are slow
|
||||
let (event_tx, mut event_rx) =
|
||||
tokio::sync::mpsc::channel::<yaak_http::sender::HttpResponseEvent>(100);
|
||||
|
||||
// Set the event sender on the DNS resolver so it can emit DNS timing events
|
||||
resolver.set_event_sender(Some(event_tx.clone())).await;
|
||||
|
||||
// Shared state to capture DNS timing from the event processing task
|
||||
let dns_elapsed = Arc::new(AtomicI32::new(0));
|
||||
|
||||
// Write events to DB in a task (only for persisted responses)
|
||||
if is_persisted {
|
||||
let response_id = response_id.clone();
|
||||
let app_handle = app_handle.clone();
|
||||
let update_source = response_ctx.update_source.clone();
|
||||
let workspace_id = workspace_id.clone();
|
||||
let dns_elapsed = dns_elapsed.clone();
|
||||
tokio::spawn(async move {
|
||||
while let Some(event) = event_rx.recv().await {
|
||||
// Capture DNS timing when we see a DNS event
|
||||
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
|
||||
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
|
||||
}
|
||||
let db_event = HttpResponseEvent::new(&response_id, &workspace_id, event.into());
|
||||
let _ = app_handle.db().upsert_http_response_event(&db_event, &update_source);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// For ephemeral responses, just drain the events but still capture DNS timing
|
||||
let dns_elapsed = dns_elapsed.clone();
|
||||
tokio::spawn(async move {
|
||||
while let Some(event) = event_rx.recv().await {
|
||||
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
|
||||
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// Capture request body as it's sent (only for persisted responses)
|
||||
let body_id = format!("{}.request", response_id);
|
||||
let maybe_blob_write_handle = match sendable_request.body {
|
||||
Some(SendableBody::Bytes(bytes)) => {
|
||||
if is_persisted {
|
||||
write_bytes_to_db_sync(response_ctx, &body_id, bytes.clone())?;
|
||||
}
|
||||
sendable_request.body = Some(SendableBody::Bytes(bytes));
|
||||
None
|
||||
}
|
||||
Some(SendableBody::Stream(stream)) => {
|
||||
// Wrap stream with TeeReader to capture data as it's read
|
||||
// Use unbounded channel to ensure all data is captured without blocking the HTTP request
|
||||
let (body_chunk_tx, body_chunk_rx) = tokio::sync::mpsc::unbounded_channel::<Vec<u8>>();
|
||||
let tee_reader = TeeReader::new(stream, body_chunk_tx);
|
||||
let pinned: Pin<Box<dyn AsyncRead + Send + 'static>> = Box::pin(tee_reader);
|
||||
|
||||
let handle = if is_persisted {
|
||||
// Spawn task to write request body chunks to blob DB
|
||||
let app_handle = app_handle.clone();
|
||||
let response_id = response_id.clone();
|
||||
let workspace_id = workspace_id.clone();
|
||||
let body_id = body_id.clone();
|
||||
let update_source = response_ctx.update_source.clone();
|
||||
Some(tauri::async_runtime::spawn(async move {
|
||||
write_stream_chunks_to_db(
|
||||
app_handle,
|
||||
&body_id,
|
||||
&workspace_id,
|
||||
&response_id,
|
||||
&update_source,
|
||||
body_chunk_rx,
|
||||
)
|
||||
.await
|
||||
}))
|
||||
} else {
|
||||
// For ephemeral responses, just drain the body chunks
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let mut rx = body_chunk_rx;
|
||||
while rx.recv().await.is_some() {}
|
||||
});
|
||||
None
|
||||
};
|
||||
|
||||
sendable_request.body = Some(SendableBody::Stream(pinned));
|
||||
handle
|
||||
}
|
||||
None => {
|
||||
sendable_request.body = None;
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
// Execute the transaction with cancellation support
|
||||
// This returns the response with headers, but body is not yet consumed
|
||||
// Events (headers, settings, chunks) are sent through the channel
|
||||
let mut http_response = transaction
|
||||
.execute_with_cancellation(sendable_request, cancelled_rx.clone(), event_tx)
|
||||
.await?;
|
||||
|
||||
// Prepare the response path before consuming the body
|
||||
let body_path = if response_id.is_empty() {
|
||||
// Ephemeral responses: use OS temp directory for automatic cleanup
|
||||
let temp_dir = std::env::temp_dir().join("yaak-ephemeral-responses");
|
||||
create_dir_all(&temp_dir).await?;
|
||||
temp_dir.join(uuid::Uuid::new_v4().to_string())
|
||||
} else {
|
||||
// Persisted responses: use app data directory
|
||||
let dir = app_handle.path().app_data_dir()?;
|
||||
let base_dir = dir.join("responses");
|
||||
create_dir_all(&base_dir).await?;
|
||||
base_dir.join(&response_id)
|
||||
};
|
||||
|
||||
// Extract metadata before consuming the body (headers are available immediately)
|
||||
// Url might change, so update again
|
||||
response_ctx.update(|r| {
|
||||
r.body_path = Some(body_path.to_string_lossy().to_string());
|
||||
r.elapsed_headers = start.elapsed().as_millis() as i32;
|
||||
r.status = http_response.status as i32;
|
||||
r.status_reason = http_response.status_reason.clone();
|
||||
r.url = http_response.url.clone();
|
||||
r.remote_addr = http_response.remote_addr.clone();
|
||||
r.version = http_response.version.clone();
|
||||
r.headers = http_response
|
||||
.headers
|
||||
.iter()
|
||||
.map(|(name, value)| HttpResponseHeader { name: name.clone(), value: value.clone() })
|
||||
.collect();
|
||||
r.content_length = http_response.content_length.map(|l| l as i32);
|
||||
r.state = HttpResponseState::Connected;
|
||||
r.request_headers = http_response
|
||||
.request_headers
|
||||
.iter()
|
||||
.map(|(n, v)| HttpResponseHeader { name: n.clone(), value: v.clone() })
|
||||
.collect();
|
||||
})?;
|
||||
|
||||
// Get the body stream for manual consumption
|
||||
let mut body_stream = http_response.into_body_stream()?;
|
||||
|
||||
// Open file for writing
|
||||
let mut file = File::options()
|
||||
.create(true)
|
||||
.truncate(true)
|
||||
.write(true)
|
||||
.open(&body_path)
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("Failed to open file: {}", e)))?;
|
||||
|
||||
// Stream body to file, with throttled DB updates to avoid excessive writes
|
||||
let mut written_bytes: usize = 0;
|
||||
let mut last_update_time = start;
|
||||
let mut buf = [0u8; 8192];
|
||||
|
||||
// Throttle settings: update DB at most every 100ms
|
||||
const UPDATE_INTERVAL_MS: u128 = 100;
|
||||
|
||||
loop {
|
||||
// Check for cancellation. If we already have headers/body, just close cleanly without error
|
||||
if *cancelled_rx.borrow() {
|
||||
break;
|
||||
}
|
||||
|
||||
// Use select! to race between reading and cancellation, so cancellation is immediate
|
||||
let read_result = tokio::select! {
|
||||
biased;
|
||||
_ = cancelled_rx.changed() => {
|
||||
break;
|
||||
}
|
||||
result = body_stream.read(&mut buf) => result,
|
||||
};
|
||||
|
||||
match read_result {
|
||||
Ok(0) => break, // EOF
|
||||
Ok(n) => {
|
||||
file.write_all(&buf[..n])
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("Failed to write to file: {}", e)))?;
|
||||
file.flush()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("Failed to flush file: {}", e)))?;
|
||||
written_bytes += n;
|
||||
|
||||
// Throttle DB updates: only update if enough time has passed
|
||||
let now = Instant::now();
|
||||
let elapsed_since_update = now.duration_since(last_update_time).as_millis();
|
||||
|
||||
if elapsed_since_update >= UPDATE_INTERVAL_MS {
|
||||
response_ctx.update(|r| {
|
||||
r.elapsed = start.elapsed().as_millis() as i32;
|
||||
r.content_length = Some(written_bytes as i32);
|
||||
})?;
|
||||
last_update_time = now;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(GenericError(format!("Failed to read response body: {}", e)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Final update with closed state and accurate byte count
|
||||
response_ctx.update(|r| {
|
||||
r.elapsed = start.elapsed().as_millis() as i32;
|
||||
r.elapsed_dns = dns_elapsed.load(Ordering::SeqCst);
|
||||
r.content_length = Some(written_bytes as i32);
|
||||
r.state = HttpResponseState::Closed;
|
||||
})?;
|
||||
|
||||
// Clear the event sender from the resolver since this request is done
|
||||
resolver.set_event_sender(None).await;
|
||||
|
||||
Ok((response_ctx.response().clone(), maybe_blob_write_handle))
|
||||
}
|
||||
|
||||
fn write_bytes_to_db_sync<R: Runtime>(
|
||||
response_ctx: &mut ResponseContext<R>,
|
||||
body_id: &str,
|
||||
data: Bytes,
|
||||
) -> Result<()> {
|
||||
if data.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Write in chunks if data is large
|
||||
let mut offset = 0;
|
||||
let mut chunk_index = 0;
|
||||
while offset < data.len() {
|
||||
let end = std::cmp::min(offset + REQUEST_BODY_CHUNK_SIZE, data.len());
|
||||
let chunk_data = data.slice(offset..end).to_vec();
|
||||
let chunk = BodyChunk::new(body_id, chunk_index, chunk_data);
|
||||
response_ctx.app_handle.blobs().insert_chunk(&chunk)?;
|
||||
offset = end;
|
||||
chunk_index += 1;
|
||||
}
|
||||
|
||||
// Update the response with the total request body size
|
||||
response_ctx.update(|r| {
|
||||
r.request_content_length = Some(data.len() as i32);
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn write_stream_chunks_to_db<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
body_id: &str,
|
||||
workspace_id: &str,
|
||||
response_id: &str,
|
||||
update_source: &UpdateSource,
|
||||
mut rx: tokio::sync::mpsc::UnboundedReceiver<Vec<u8>>,
|
||||
) -> Result<()> {
|
||||
let mut buffer = Vec::with_capacity(REQUEST_BODY_CHUNK_SIZE);
|
||||
let mut chunk_index = 0;
|
||||
let mut total_bytes: usize = 0;
|
||||
|
||||
while let Some(data) = rx.recv().await {
|
||||
total_bytes += data.len();
|
||||
buffer.extend_from_slice(&data);
|
||||
|
||||
// Flush when buffer reaches chunk size
|
||||
while buffer.len() >= REQUEST_BODY_CHUNK_SIZE {
|
||||
debug!("Writing chunk {chunk_index} to DB");
|
||||
let chunk_data: Vec<u8> = buffer.drain(..REQUEST_BODY_CHUNK_SIZE).collect();
|
||||
let chunk = BodyChunk::new(body_id, chunk_index, chunk_data);
|
||||
app_handle.blobs().insert_chunk(&chunk)?;
|
||||
app_handle.db().upsert_http_response_event(
|
||||
&HttpResponseEvent::new(
|
||||
response_id,
|
||||
workspace_id,
|
||||
yaak_http::sender::HttpResponseEvent::ChunkSent {
|
||||
bytes: REQUEST_BODY_CHUNK_SIZE,
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
update_source,
|
||||
)?;
|
||||
chunk_index += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Flush remaining data
|
||||
if !buffer.is_empty() {
|
||||
let chunk = BodyChunk::new(body_id, chunk_index, buffer);
|
||||
debug!("Flushing remaining data {chunk_index} {}", chunk.data.len());
|
||||
app_handle.blobs().insert_chunk(&chunk)?;
|
||||
app_handle.db().upsert_http_response_event(
|
||||
&HttpResponseEvent::new(
|
||||
response_id,
|
||||
workspace_id,
|
||||
yaak_http::sender::HttpResponseEvent::ChunkSent { bytes: chunk.data.len() }.into(),
|
||||
),
|
||||
update_source,
|
||||
)?;
|
||||
}
|
||||
|
||||
// Update the response with the total request body size
|
||||
app_handle.with_tx(|tx| {
|
||||
debug!("Updating final body length {total_bytes}");
|
||||
if let Ok(mut response) = tx.get_http_response(&response_id) {
|
||||
response.request_content_length = Some(total_bytes as i32);
|
||||
tx.update_http_response_if_id(&response, update_source)?;
|
||||
}
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn apply_authentication<R: Runtime>(
|
||||
_window: &WebviewWindow<R>,
|
||||
sendable_request: &mut SendableHttpRequest,
|
||||
request: &HttpRequest,
|
||||
auth_context_id: String,
|
||||
plugin_manager: &PluginManager,
|
||||
plugin_context: &PluginContext,
|
||||
) -> Result<()> {
|
||||
match &request.authentication_type {
|
||||
None => {
|
||||
// No authentication found. Not even inherited
|
||||
}
|
||||
Some(authentication_type) if authentication_type == "none" => {
|
||||
// Explicitly no authentication
|
||||
}
|
||||
Some(authentication_type) => {
|
||||
let req = CallHttpAuthenticationRequest {
|
||||
context_id: format!("{:x}", md5::compute(auth_context_id)),
|
||||
values: serde_json::from_value(serde_json::to_value(&request.authentication)?)?,
|
||||
url: sendable_request.url.clone(),
|
||||
method: sendable_request.method.clone(),
|
||||
headers: sendable_request
|
||||
.headers
|
||||
.iter()
|
||||
.map(|(name, value)| HttpHeader {
|
||||
name: name.to_string(),
|
||||
value: value.to_string(),
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
let plugin_result = plugin_manager
|
||||
.call_http_authentication(plugin_context, &authentication_type, req)
|
||||
.await?;
|
||||
|
||||
for header in plugin_result.set_headers.unwrap_or_default() {
|
||||
sendable_request.insert_header((header.name, header.value));
|
||||
}
|
||||
|
||||
if let Some(params) = plugin_result.set_query_parameters {
|
||||
let params = params.into_iter().map(|p| (p.name, p.value)).collect::<Vec<_>>();
|
||||
sendable_request.url = append_query_params(&sendable_request.url, params);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -37,8 +37,8 @@ use yaak_grpc::{Code, ServiceDefinition, serialize_message};
|
||||
use yaak_mac_window::AppHandleMacWindowExt;
|
||||
use yaak_models::models::{
|
||||
AnyModel, CookieJar, Environment, GrpcConnection, GrpcConnectionState, GrpcEvent,
|
||||
GrpcEventType, GrpcRequest, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseState,
|
||||
Plugin, Workspace, WorkspaceMeta,
|
||||
GrpcEventType, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseState, Plugin,
|
||||
Workspace, WorkspaceMeta,
|
||||
};
|
||||
use yaak_models::util::{BatchUpsertResult, UpdateSource, get_workspace_export_resources};
|
||||
use yaak_plugins::events::{
|
||||
@@ -101,6 +101,7 @@ struct AppMetaData {
|
||||
app_data_dir: String,
|
||||
app_log_dir: String,
|
||||
vendored_plugin_dir: String,
|
||||
default_project_dir: String,
|
||||
feature_updater: bool,
|
||||
feature_license: bool,
|
||||
}
|
||||
@@ -111,6 +112,7 @@ async fn cmd_metadata(app_handle: AppHandle) -> YaakResult<AppMetaData> {
|
||||
let app_log_dir = app_handle.path().app_log_dir()?;
|
||||
let vendored_plugin_dir =
|
||||
app_handle.path().resolve("vendored/plugins", BaseDirectory::Resource)?;
|
||||
let default_project_dir = app_handle.path().home_dir()?.join("YaakProjects");
|
||||
Ok(AppMetaData {
|
||||
is_dev: is_dev(),
|
||||
version: app_handle.package_info().version.to_string(),
|
||||
@@ -118,6 +120,7 @@ async fn cmd_metadata(app_handle: AppHandle) -> YaakResult<AppMetaData> {
|
||||
app_data_dir: app_data_dir.to_string_lossy().to_string(),
|
||||
app_log_dir: app_log_dir.to_string_lossy().to_string(),
|
||||
vendored_plugin_dir: vendored_plugin_dir.to_string_lossy().to_string(),
|
||||
default_project_dir: default_project_dir.to_string_lossy().to_string(),
|
||||
feature_license: cfg!(feature = "license"),
|
||||
feature_updater: cfg!(feature = "updater"),
|
||||
})
|
||||
@@ -1093,7 +1096,8 @@ async fn cmd_get_http_authentication_config<R: Runtime>(
|
||||
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
|
||||
let values_json: serde_json::Value = serde_json::to_value(&values)?;
|
||||
let rendered_json =
|
||||
render_json_value(values_json, environment_chain, &cb, &RenderOptions::throw()).await?;
|
||||
render_json_value(values_json, environment_chain, &cb, &RenderOptions::return_empty())
|
||||
.await?;
|
||||
|
||||
// Convert back to HashMap<String, JsonPrimitive>
|
||||
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
|
||||
@@ -1268,35 +1272,6 @@ async fn cmd_save_response<R: Runtime>(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn cmd_send_folder<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
window: WebviewWindow<R>,
|
||||
environment_id: Option<String>,
|
||||
cookie_jar_id: Option<String>,
|
||||
folder_id: &str,
|
||||
) -> YaakResult<()> {
|
||||
let requests = app_handle.db().list_http_requests_for_folder_recursive(folder_id)?;
|
||||
for request in requests {
|
||||
let app_handle = app_handle.clone();
|
||||
let window = window.clone();
|
||||
let environment_id = environment_id.clone();
|
||||
let cookie_jar_id = cookie_jar_id.clone();
|
||||
tokio::spawn(async move {
|
||||
let _ = cmd_send_http_request(
|
||||
app_handle,
|
||||
window,
|
||||
environment_id.as_deref(),
|
||||
cookie_jar_id.as_deref(),
|
||||
request,
|
||||
)
|
||||
.await;
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn cmd_send_http_request<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
@@ -1393,27 +1368,6 @@ async fn cmd_install_plugin<R: Runtime>(
|
||||
Ok(plugin)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn cmd_create_grpc_request<R: Runtime>(
|
||||
workspace_id: &str,
|
||||
name: &str,
|
||||
sort_priority: f64,
|
||||
folder_id: Option<&str>,
|
||||
app_handle: AppHandle<R>,
|
||||
window: WebviewWindow<R>,
|
||||
) -> YaakResult<GrpcRequest> {
|
||||
Ok(app_handle.db().upsert_grpc_request(
|
||||
&GrpcRequest {
|
||||
workspace_id: workspace_id.to_string(),
|
||||
name: name.to_string(),
|
||||
folder_id: folder_id.map(|s| s.to_string()),
|
||||
sort_priority,
|
||||
..Default::default()
|
||||
},
|
||||
&UpdateSource::from_window_label(window.label()),
|
||||
)?)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn cmd_reload_plugins<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
@@ -1676,7 +1630,6 @@ pub fn run() {
|
||||
cmd_call_folder_action,
|
||||
cmd_call_grpc_request_action,
|
||||
cmd_check_for_updates,
|
||||
cmd_create_grpc_request,
|
||||
cmd_curl_to_request,
|
||||
cmd_delete_all_grpc_connections,
|
||||
cmd_delete_all_http_responses,
|
||||
@@ -1710,7 +1663,6 @@ pub fn run() {
|
||||
cmd_save_response,
|
||||
cmd_send_ephemeral_request,
|
||||
cmd_send_http_request,
|
||||
cmd_send_folder,
|
||||
cmd_template_function_config,
|
||||
cmd_template_function_summaries,
|
||||
cmd_template_tokens_to_string,
|
||||
@@ -1719,12 +1671,12 @@ pub fn run() {
|
||||
// Migrated commands
|
||||
crate::commands::cmd_decrypt_template,
|
||||
crate::commands::cmd_default_headers,
|
||||
crate::commands::cmd_disable_encryption,
|
||||
crate::commands::cmd_enable_encryption,
|
||||
crate::commands::cmd_get_themes,
|
||||
crate::commands::cmd_reveal_workspace_key,
|
||||
crate::commands::cmd_secure_template,
|
||||
crate::commands::cmd_set_workspace_key,
|
||||
crate::commands::cmd_show_workspace_key,
|
||||
//
|
||||
// Models commands
|
||||
models_ext::models_delete,
|
||||
@@ -1747,16 +1699,22 @@ pub fn run() {
|
||||
git_ext::cmd_git_checkout,
|
||||
git_ext::cmd_git_branch,
|
||||
git_ext::cmd_git_delete_branch,
|
||||
git_ext::cmd_git_delete_remote_branch,
|
||||
git_ext::cmd_git_merge_branch,
|
||||
git_ext::cmd_git_rename_branch,
|
||||
git_ext::cmd_git_status,
|
||||
git_ext::cmd_git_log,
|
||||
git_ext::cmd_git_initialize,
|
||||
git_ext::cmd_git_clone,
|
||||
git_ext::cmd_git_commit,
|
||||
git_ext::cmd_git_fetch_all,
|
||||
git_ext::cmd_git_push,
|
||||
git_ext::cmd_git_pull,
|
||||
git_ext::cmd_git_pull_force_reset,
|
||||
git_ext::cmd_git_pull_merge,
|
||||
git_ext::cmd_git_add,
|
||||
git_ext::cmd_git_unstage,
|
||||
git_ext::cmd_git_reset_changes,
|
||||
git_ext::cmd_git_add_credential,
|
||||
git_ext::cmd_git_remotes,
|
||||
git_ext::cmd_git_add_remote,
|
||||
@@ -1770,14 +1728,7 @@ pub fn run() {
|
||||
plugins_ext::cmd_plugins_update_all,
|
||||
//
|
||||
// WebSocket commands
|
||||
ws_ext::cmd_ws_upsert_request,
|
||||
ws_ext::cmd_ws_duplicate_request,
|
||||
ws_ext::cmd_ws_delete_request,
|
||||
ws_ext::cmd_ws_delete_connection,
|
||||
ws_ext::cmd_ws_delete_connections,
|
||||
ws_ext::cmd_ws_list_events,
|
||||
ws_ext::cmd_ws_list_requests,
|
||||
ws_ext::cmd_ws_list_connections,
|
||||
ws_ext::cmd_ws_send,
|
||||
ws_ext::cmd_ws_close,
|
||||
ws_ext::cmd_ws_connect,
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
//! This module provides the Tauri plugin initialization and extension traits
|
||||
//! that allow accessing QueryManager and BlobManager from Tauri's Manager types.
|
||||
|
||||
use chrono::Utc;
|
||||
use log::error;
|
||||
use std::time::Duration;
|
||||
use tauri::plugin::TauriPlugin;
|
||||
use tauri::{Emitter, Manager, Runtime, State};
|
||||
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
|
||||
@@ -13,6 +16,74 @@ use yaak_models::models::{AnyModel, GraphQlIntrospection, GrpcEvent, Settings, W
|
||||
use yaak_models::query_manager::QueryManager;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
const MODEL_CHANGES_RETENTION_HOURS: i64 = 1;
|
||||
const MODEL_CHANGES_POLL_INTERVAL_MS: u64 = 1000;
|
||||
const MODEL_CHANGES_POLL_BATCH_SIZE: usize = 200;
|
||||
|
||||
struct ModelChangeCursor {
|
||||
created_at: String,
|
||||
id: i64,
|
||||
}
|
||||
|
||||
impl ModelChangeCursor {
|
||||
fn from_launch_time() -> Self {
|
||||
Self {
|
||||
created_at: Utc::now().naive_utc().format("%Y-%m-%d %H:%M:%S%.3f").to_string(),
|
||||
id: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn drain_model_changes_batch<R: Runtime>(
|
||||
query_manager: &QueryManager,
|
||||
app_handle: &tauri::AppHandle<R>,
|
||||
cursor: &mut ModelChangeCursor,
|
||||
) -> bool {
|
||||
let changes = match query_manager.connect().list_model_changes_since(
|
||||
&cursor.created_at,
|
||||
cursor.id,
|
||||
MODEL_CHANGES_POLL_BATCH_SIZE,
|
||||
) {
|
||||
Ok(changes) => changes,
|
||||
Err(err) => {
|
||||
error!("Failed to poll model_changes rows: {err:?}");
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
if changes.is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let fetched_count = changes.len();
|
||||
for change in changes {
|
||||
cursor.created_at = change.created_at;
|
||||
cursor.id = change.id;
|
||||
|
||||
// Local window-originated writes are forwarded immediately from the
|
||||
// in-memory model event channel.
|
||||
if matches!(change.payload.update_source, UpdateSource::Window { .. }) {
|
||||
continue;
|
||||
}
|
||||
if let Err(err) = app_handle.emit("model_write", change.payload) {
|
||||
error!("Failed to emit model_write event: {err:?}");
|
||||
}
|
||||
}
|
||||
|
||||
fetched_count == MODEL_CHANGES_POLL_BATCH_SIZE
|
||||
}
|
||||
|
||||
async fn run_model_change_poller<R: Runtime>(
|
||||
query_manager: QueryManager,
|
||||
app_handle: tauri::AppHandle<R>,
|
||||
mut cursor: ModelChangeCursor,
|
||||
) {
|
||||
loop {
|
||||
while drain_model_changes_batch(&query_manager, &app_handle, &mut cursor) {}
|
||||
tokio::time::sleep(Duration::from_millis(MODEL_CHANGES_POLL_INTERVAL_MS)).await;
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension trait for accessing the QueryManager from Tauri Manager types.
|
||||
pub trait QueryManagerExt<'a, R> {
|
||||
fn db_manager(&'a self) -> State<'a, QueryManager>;
|
||||
@@ -262,14 +333,37 @@ pub fn init<R: Runtime>() -> TauriPlugin<R> {
|
||||
}
|
||||
};
|
||||
|
||||
let db = query_manager.connect();
|
||||
if let Err(err) = db.prune_model_changes_older_than_hours(MODEL_CHANGES_RETENTION_HOURS)
|
||||
{
|
||||
error!("Failed to prune model_changes rows on startup: {err:?}");
|
||||
}
|
||||
// Only stream writes that happen after this app launch.
|
||||
let cursor = ModelChangeCursor::from_launch_time();
|
||||
|
||||
let poll_query_manager = query_manager.clone();
|
||||
|
||||
app_handle.manage(query_manager);
|
||||
app_handle.manage(blob_manager);
|
||||
|
||||
// Forward model change events to the frontend
|
||||
let app_handle = app_handle.clone();
|
||||
// Poll model_changes so all writers (including external CLI processes) update the UI.
|
||||
let app_handle_poll = app_handle.clone();
|
||||
let query_manager = poll_query_manager;
|
||||
tauri::async_runtime::spawn(async move {
|
||||
run_model_change_poller(query_manager, app_handle_poll, cursor).await;
|
||||
});
|
||||
|
||||
// Fast path for local app writes initiated by frontend windows. This keeps the
|
||||
// current sync-model UX snappy, while DB polling handles external writers (CLI).
|
||||
let app_handle_local = app_handle.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
for payload in rx {
|
||||
app_handle.emit("model_write", payload).unwrap();
|
||||
if !matches!(payload.update_source, UpdateSource::Window { .. }) {
|
||||
continue;
|
||||
}
|
||||
if let Err(err) = app_handle_local.emit("model_write", payload) {
|
||||
error!("Failed to emit local model_write event: {err:?}");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -8,9 +8,9 @@ use serde::{Deserialize, Serialize};
|
||||
use std::time::Instant;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow};
|
||||
use ts_rs::TS;
|
||||
use yaak_api::yaak_api_client;
|
||||
use yaak_common::platform::get_os_str;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
|
||||
// Check for updates every hour
|
||||
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;
|
||||
@@ -101,7 +101,8 @@ impl YaakNotifier {
|
||||
let license_check = "disabled".to_string();
|
||||
|
||||
let launch_info = get_or_upsert_launch_info(app_handle);
|
||||
let req = yaak_api_client(app_handle)?
|
||||
let app_version = app_handle.package_info().version.to_string();
|
||||
let req = yaak_api_client(&app_version)?
|
||||
.request(Method::GET, "https://notify.yaak.app/notifications")
|
||||
.query(&[
|
||||
("version", &launch_info.current_version),
|
||||
|
||||
@@ -12,21 +12,23 @@ use chrono::Utc;
|
||||
use cookie::Cookie;
|
||||
use log::error;
|
||||
use std::sync::Arc;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime};
|
||||
use tauri::{AppHandle, Emitter, Listener, Manager, Runtime};
|
||||
use tauri_plugin_clipboard_manager::ClipboardExt;
|
||||
use tauri_plugin_opener::OpenerExt;
|
||||
use yaak::plugin_events::{
|
||||
GroupedPluginEvent, HostRequest, SharedPluginEventContext, handle_shared_plugin_event,
|
||||
};
|
||||
use yaak_crypto::manager::EncryptionManager;
|
||||
use yaak_models::models::{AnyModel, HttpResponse, Plugin};
|
||||
use yaak_models::queries::any_request::AnyRequest;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::error::Error::PluginErr;
|
||||
use yaak_plugins::events::{
|
||||
Color, DeleteKeyValueResponse, EmptyPayload, ErrorResponse, FindHttpResponsesResponse,
|
||||
GetCookieValueResponse, GetHttpRequestByIdResponse, GetKeyValueResponse, Icon, InternalEvent,
|
||||
InternalEventPayload, ListCookieNamesResponse, ListHttpRequestsResponse,
|
||||
ListWorkspacesResponse, RenderGrpcRequestResponse, RenderHttpRequestResponse,
|
||||
SendHttpRequestResponse, SetKeyValueResponse, ShowToastRequest, TemplateRenderResponse,
|
||||
WindowInfoResponse, WindowNavigateEvent, WorkspaceInfo,
|
||||
Color, EmptyPayload, ErrorResponse, FindHttpResponsesResponse, GetCookieValueResponse, Icon,
|
||||
InternalEvent, InternalEventPayload, ListCookieNamesResponse, ListOpenWorkspacesResponse,
|
||||
RenderGrpcRequestResponse, RenderHttpRequestResponse, SendHttpRequestResponse,
|
||||
ShowToastRequest, TemplateRenderResponse, WindowInfoResponse, WindowNavigateEvent,
|
||||
WorkspaceInfo,
|
||||
};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::plugin_handle::PluginHandle;
|
||||
@@ -41,27 +43,154 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
) -> Result<Option<InternalEventPayload>> {
|
||||
// log::debug!("Got event to app {event:?}");
|
||||
let plugin_context = event.context.to_owned();
|
||||
match event.clone().payload {
|
||||
InternalEventPayload::CopyTextRequest(req) => {
|
||||
let plugin_name = plugin_handle.info().name;
|
||||
let fallback_workspace_id = plugin_context.workspace_id.clone().or_else(|| {
|
||||
plugin_context
|
||||
.label
|
||||
.as_ref()
|
||||
.and_then(|label| app_handle.get_webview_window(label))
|
||||
.and_then(|window| workspace_from_window(&window).map(|workspace| workspace.id))
|
||||
});
|
||||
|
||||
match handle_shared_plugin_event(
|
||||
app_handle.db_manager().inner(),
|
||||
&event.payload,
|
||||
SharedPluginEventContext {
|
||||
plugin_name: &plugin_name,
|
||||
workspace_id: fallback_workspace_id.as_deref(),
|
||||
},
|
||||
) {
|
||||
GroupedPluginEvent::Handled(payload) => Ok(payload),
|
||||
GroupedPluginEvent::ToHandle(host_request) => {
|
||||
handle_host_plugin_request(
|
||||
app_handle,
|
||||
event,
|
||||
plugin_handle,
|
||||
&plugin_context,
|
||||
host_request,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_host_plugin_request<R: Runtime>(
|
||||
app_handle: &AppHandle<R>,
|
||||
event: &InternalEvent,
|
||||
plugin_handle: &PluginHandle,
|
||||
plugin_context: &yaak_plugins::events::PluginContext,
|
||||
host_request: HostRequest<'_>,
|
||||
) -> Result<Option<InternalEventPayload>> {
|
||||
match host_request {
|
||||
HostRequest::ErrorResponse(resp) => {
|
||||
error!("Plugin error: {}: {:?}", resp.error, resp);
|
||||
let toast_event = plugin_handle.build_event_to_send(
|
||||
plugin_context,
|
||||
&InternalEventPayload::ShowToastRequest(ShowToastRequest {
|
||||
message: format!(
|
||||
"Plugin error from {}: {}",
|
||||
plugin_handle.info().name,
|
||||
resp.error
|
||||
),
|
||||
color: Some(Color::Danger),
|
||||
timeout: Some(30000),
|
||||
..Default::default()
|
||||
}),
|
||||
None,
|
||||
);
|
||||
Box::pin(handle_plugin_event(app_handle, &toast_event, plugin_handle)).await
|
||||
}
|
||||
HostRequest::ReloadResponse(req) => {
|
||||
let plugins = app_handle.db().list_plugins()?;
|
||||
for plugin in plugins {
|
||||
if plugin.directory != plugin_handle.dir {
|
||||
continue;
|
||||
}
|
||||
|
||||
let new_plugin = Plugin { updated_at: Utc::now().naive_utc(), ..plugin };
|
||||
app_handle.db().upsert_plugin(&new_plugin, &UpdateSource::Plugin)?;
|
||||
}
|
||||
|
||||
if !req.silent {
|
||||
let info = plugin_handle.info();
|
||||
let toast_event = plugin_handle.build_event_to_send(
|
||||
plugin_context,
|
||||
&InternalEventPayload::ShowToastRequest(ShowToastRequest {
|
||||
message: format!("Reloaded plugin {}@{}", info.name, info.version),
|
||||
icon: Some(Icon::Info),
|
||||
timeout: Some(3000),
|
||||
..Default::default()
|
||||
}),
|
||||
None,
|
||||
);
|
||||
Box::pin(handle_plugin_event(app_handle, &toast_event, plugin_handle)).await
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
HostRequest::CopyText(req) => {
|
||||
app_handle.clipboard().write_text(req.text.as_str())?;
|
||||
Ok(Some(InternalEventPayload::CopyTextResponse(EmptyPayload {})))
|
||||
}
|
||||
InternalEventPayload::ShowToastRequest(req) => {
|
||||
match plugin_context.label {
|
||||
HostRequest::ShowToast(req) => {
|
||||
match &plugin_context.label {
|
||||
Some(label) => app_handle.emit_to(label, "show_toast", req)?,
|
||||
None => app_handle.emit("show_toast", req)?,
|
||||
};
|
||||
Ok(Some(InternalEventPayload::ShowToastResponse(EmptyPayload {})))
|
||||
}
|
||||
InternalEventPayload::PromptTextRequest(_) => {
|
||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
||||
HostRequest::PromptText(_) => {
|
||||
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
|
||||
Ok(call_frontend(&window, event).await)
|
||||
}
|
||||
InternalEventPayload::PromptFormRequest(_) => {
|
||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
||||
Ok(call_frontend(&window, event).await)
|
||||
HostRequest::PromptForm(_) => {
|
||||
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
|
||||
if event.reply_id.is_some() {
|
||||
window.emit_to(window.label(), "plugin_event", event.clone())?;
|
||||
Ok(None)
|
||||
} else {
|
||||
window.emit_to(window.label(), "plugin_event", event.clone()).unwrap();
|
||||
|
||||
let event_id = event.id.clone();
|
||||
let plugin_handle = plugin_handle.clone();
|
||||
let plugin_context = plugin_context.clone();
|
||||
let window = window.clone();
|
||||
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let (tx, mut rx) = tokio::sync::mpsc::channel::<InternalEvent>(128);
|
||||
|
||||
let listener_id = window.listen(event_id, move |ev: tauri::Event| {
|
||||
let resp: InternalEvent = serde_json::from_str(ev.payload()).unwrap();
|
||||
let _ = tx.try_send(resp);
|
||||
});
|
||||
|
||||
while let Some(resp) = rx.recv().await {
|
||||
let is_done = matches!(
|
||||
&resp.payload,
|
||||
InternalEventPayload::PromptFormResponse(r) if r.done.unwrap_or(false)
|
||||
);
|
||||
|
||||
let event_to_send = plugin_handle.build_event_to_send(
|
||||
&plugin_context,
|
||||
&resp.payload,
|
||||
Some(resp.reply_id.unwrap_or_default()),
|
||||
);
|
||||
if let Err(e) = plugin_handle.send(&event_to_send).await {
|
||||
log::warn!("Failed to forward form response to plugin: {:?}", e);
|
||||
}
|
||||
|
||||
if is_done {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
window.unlisten(listener_id);
|
||||
});
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
InternalEventPayload::FindHttpResponsesRequest(req) => {
|
||||
HostRequest::FindHttpResponses(req) => {
|
||||
let http_responses = app_handle
|
||||
.db()
|
||||
.list_http_responses_for_request(&req.request_id, req.limit.map(|l| l as u64))
|
||||
@@ -70,32 +199,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
http_responses,
|
||||
})))
|
||||
}
|
||||
InternalEventPayload::ListHttpRequestsRequest(req) => {
|
||||
let w = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
||||
let workspace = workspace_from_window(&w)
|
||||
.ok_or(PluginErr("Failed to get workspace from window".into()))?;
|
||||
|
||||
let http_requests = if let Some(folder_id) = req.folder_id {
|
||||
app_handle.db().list_http_requests_for_folder_recursive(&folder_id)?
|
||||
} else {
|
||||
app_handle.db().list_http_requests(&workspace.id)?
|
||||
};
|
||||
|
||||
Ok(Some(InternalEventPayload::ListHttpRequestsResponse(ListHttpRequestsResponse {
|
||||
http_requests,
|
||||
})))
|
||||
}
|
||||
InternalEventPayload::ListFoldersRequest(_req) => {
|
||||
let w = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
||||
let workspace = workspace_from_window(&w)
|
||||
.ok_or(PluginErr("Failed to get workspace from window".into()))?;
|
||||
let folders = app_handle.db().list_folders(&workspace.id)?;
|
||||
|
||||
Ok(Some(InternalEventPayload::ListFoldersResponse(
|
||||
yaak_plugins::events::ListFoldersResponse { folders },
|
||||
)))
|
||||
}
|
||||
InternalEventPayload::UpsertModelRequest(req) => {
|
||||
HostRequest::UpsertModel(req) => {
|
||||
use AnyModel::*;
|
||||
let model = match &req.model {
|
||||
HttpRequest(m) => {
|
||||
@@ -123,7 +227,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
yaak_plugins::events::UpsertModelResponse { model },
|
||||
)))
|
||||
}
|
||||
InternalEventPayload::DeleteModelRequest(req) => {
|
||||
HostRequest::DeleteModel(req) => {
|
||||
let model = match req.model.as_str() {
|
||||
"http_request" => AnyModel::HttpRequest(
|
||||
app_handle.db().delete_http_request_by_id(&req.id, &UpdateSource::Plugin)?,
|
||||
@@ -151,14 +255,8 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
yaak_plugins::events::DeleteModelResponse { model },
|
||||
)))
|
||||
}
|
||||
InternalEventPayload::GetHttpRequestByIdRequest(req) => {
|
||||
let http_request = app_handle.db().get_http_request(&req.id).ok();
|
||||
Ok(Some(InternalEventPayload::GetHttpRequestByIdResponse(GetHttpRequestByIdResponse {
|
||||
http_request,
|
||||
})))
|
||||
}
|
||||
InternalEventPayload::RenderGrpcRequestRequest(req) => {
|
||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
||||
HostRequest::RenderGrpcRequest(req) => {
|
||||
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
|
||||
|
||||
let workspace =
|
||||
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
||||
@@ -173,8 +271,8 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
&plugin_context,
|
||||
req.purpose,
|
||||
plugin_context,
|
||||
req.purpose.clone(),
|
||||
);
|
||||
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
||||
let grpc_request =
|
||||
@@ -183,8 +281,8 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
grpc_request,
|
||||
})))
|
||||
}
|
||||
InternalEventPayload::RenderHttpRequestRequest(req) => {
|
||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
||||
HostRequest::RenderHttpRequest(req) => {
|
||||
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
|
||||
|
||||
let workspace =
|
||||
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
||||
@@ -199,18 +297,18 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
&plugin_context,
|
||||
req.purpose,
|
||||
plugin_context,
|
||||
req.purpose.clone(),
|
||||
);
|
||||
let opt = &RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
||||
let http_request =
|
||||
render_http_request(&req.http_request, environment_chain, &cb, &opt).await?;
|
||||
render_http_request(&req.http_request, environment_chain, &cb, opt).await?;
|
||||
Ok(Some(InternalEventPayload::RenderHttpRequestResponse(RenderHttpRequestResponse {
|
||||
http_request,
|
||||
})))
|
||||
}
|
||||
InternalEventPayload::TemplateRenderRequest(req) => {
|
||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
||||
HostRequest::TemplateRender(req) => {
|
||||
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
|
||||
|
||||
let workspace =
|
||||
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
||||
@@ -235,65 +333,16 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
&plugin_context,
|
||||
req.purpose,
|
||||
plugin_context,
|
||||
req.purpose.clone(),
|
||||
);
|
||||
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
||||
let data = render_json_value(req.data, environment_chain, &cb, &opt).await?;
|
||||
let data = render_json_value(req.data.clone(), environment_chain, &cb, &opt).await?;
|
||||
Ok(Some(InternalEventPayload::TemplateRenderResponse(TemplateRenderResponse { data })))
|
||||
}
|
||||
InternalEventPayload::ErrorResponse(resp) => {
|
||||
error!("Plugin error: {}: {:?}", resp.error, resp);
|
||||
let toast_event = plugin_handle.build_event_to_send(
|
||||
&plugin_context,
|
||||
&InternalEventPayload::ShowToastRequest(ShowToastRequest {
|
||||
message: format!(
|
||||
"Plugin error from {}: {}",
|
||||
plugin_handle.info().name,
|
||||
resp.error
|
||||
),
|
||||
color: Some(Color::Danger),
|
||||
timeout: Some(30000),
|
||||
..Default::default()
|
||||
}),
|
||||
None,
|
||||
);
|
||||
Box::pin(handle_plugin_event(app_handle, &toast_event, plugin_handle)).await
|
||||
}
|
||||
InternalEventPayload::ReloadResponse(req) => {
|
||||
let plugins = app_handle.db().list_plugins()?;
|
||||
for plugin in plugins {
|
||||
if plugin.directory != plugin_handle.dir {
|
||||
continue;
|
||||
}
|
||||
|
||||
let new_plugin = Plugin {
|
||||
updated_at: Utc::now().naive_utc(), // TODO: Add reloaded_at field to use instead
|
||||
..plugin
|
||||
};
|
||||
app_handle.db().upsert_plugin(&new_plugin, &UpdateSource::Plugin)?;
|
||||
}
|
||||
|
||||
if !req.silent {
|
||||
let info = plugin_handle.info();
|
||||
let toast_event = plugin_handle.build_event_to_send(
|
||||
&plugin_context,
|
||||
&InternalEventPayload::ShowToastRequest(ShowToastRequest {
|
||||
message: format!("Reloaded plugin {}@{}", info.name, info.version),
|
||||
icon: Some(Icon::Info),
|
||||
timeout: Some(3000),
|
||||
..Default::default()
|
||||
}),
|
||||
None,
|
||||
);
|
||||
Box::pin(handle_plugin_event(app_handle, &toast_event, plugin_handle)).await
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
InternalEventPayload::SendHttpRequestRequest(req) => {
|
||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
||||
let mut http_request = req.http_request;
|
||||
HostRequest::SendHttpRequest(req) => {
|
||||
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
|
||||
let mut http_request = req.http_request.clone();
|
||||
let workspace =
|
||||
workspace_from_window(&window).expect("Failed to get workspace_id from window URL");
|
||||
let cookie_jar = cookie_jar_from_window(&window);
|
||||
@@ -324,8 +373,8 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
&http_response,
|
||||
environment,
|
||||
cookie_jar,
|
||||
&mut tokio::sync::watch::channel(false).1, // No-op cancel channel
|
||||
&plugin_context,
|
||||
&mut tokio::sync::watch::channel(false).1,
|
||||
plugin_context,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -333,7 +382,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
http_response,
|
||||
})))
|
||||
}
|
||||
InternalEventPayload::OpenWindowRequest(req) => {
|
||||
HostRequest::OpenWindow(req) => {
|
||||
let (navigation_tx, mut navigation_rx) = tokio::sync::mpsc::channel(128);
|
||||
let (close_tx, mut close_rx) = tokio::sync::mpsc::channel(128);
|
||||
let win_config = CreateWindowConfig {
|
||||
@@ -348,7 +397,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
};
|
||||
if let Err(e) = create_window(app_handle, win_config) {
|
||||
let error_event = plugin_handle.build_event_to_send(
|
||||
&plugin_context,
|
||||
plugin_context,
|
||||
&InternalEventPayload::ErrorResponse(ErrorResponse {
|
||||
error: format!("Failed to create window: {:?}", e),
|
||||
}),
|
||||
@@ -366,7 +415,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
while let Some(url) = navigation_rx.recv().await {
|
||||
let url = url.to_string();
|
||||
let event_to_send = plugin_handle.build_event_to_send(
|
||||
&plugin_context, // NOTE: Sending existing context on purpose here
|
||||
&plugin_context,
|
||||
&InternalEventPayload::WindowNavigateEvent(WindowNavigateEvent { url }),
|
||||
Some(event_id.clone()),
|
||||
);
|
||||
@@ -380,7 +429,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
let plugin_handle = plugin_handle.clone();
|
||||
let plugin_context = plugin_context.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
while let Some(_) = close_rx.recv().await {
|
||||
while close_rx.recv().await.is_some() {
|
||||
let event_to_send = plugin_handle.build_event_to_send(
|
||||
&plugin_context,
|
||||
&InternalEventPayload::WindowCloseEvent,
|
||||
@@ -393,35 +442,33 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
InternalEventPayload::CloseWindowRequest(req) => {
|
||||
HostRequest::CloseWindow(req) => {
|
||||
if let Some(window) = app_handle.webview_windows().get(&req.label) {
|
||||
window.close()?;
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
InternalEventPayload::OpenExternalUrlRequest(req) => {
|
||||
HostRequest::OpenExternalUrl(req) => {
|
||||
app_handle.opener().open_url(&req.url, None::<&str>)?;
|
||||
Ok(Some(InternalEventPayload::OpenExternalUrlResponse(EmptyPayload {})))
|
||||
}
|
||||
InternalEventPayload::SetKeyValueRequest(req) => {
|
||||
let name = plugin_handle.info().name;
|
||||
app_handle.db().set_plugin_key_value(&name, &req.key, &req.value);
|
||||
Ok(Some(InternalEventPayload::SetKeyValueResponse(SetKeyValueResponse {})))
|
||||
}
|
||||
InternalEventPayload::GetKeyValueRequest(req) => {
|
||||
let name = plugin_handle.info().name;
|
||||
let value = app_handle.db().get_plugin_key_value(&name, &req.key).map(|v| v.value);
|
||||
Ok(Some(InternalEventPayload::GetKeyValueResponse(GetKeyValueResponse { value })))
|
||||
}
|
||||
InternalEventPayload::DeleteKeyValueRequest(req) => {
|
||||
let name = plugin_handle.info().name;
|
||||
let deleted = app_handle.db().delete_plugin_key_value(&name, &req.key)?;
|
||||
Ok(Some(InternalEventPayload::DeleteKeyValueResponse(DeleteKeyValueResponse {
|
||||
deleted,
|
||||
HostRequest::ListOpenWorkspaces(_) => {
|
||||
let mut workspaces = Vec::new();
|
||||
for (_, window) in app_handle.webview_windows() {
|
||||
if let Some(workspace) = workspace_from_window(&window) {
|
||||
workspaces.push(WorkspaceInfo {
|
||||
id: workspace.id.clone(),
|
||||
name: workspace.name.clone(),
|
||||
label: window.label().to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(Some(InternalEventPayload::ListOpenWorkspacesResponse(ListOpenWorkspacesResponse {
|
||||
workspaces,
|
||||
})))
|
||||
}
|
||||
InternalEventPayload::ListCookieNamesRequest(_req) => {
|
||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
||||
HostRequest::ListCookieNames(_) => {
|
||||
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
|
||||
let names = match cookie_jar_from_window(&window) {
|
||||
None => Vec::new(),
|
||||
Some(j) => j
|
||||
@@ -434,8 +481,8 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
names,
|
||||
})))
|
||||
}
|
||||
InternalEventPayload::GetCookieValueRequest(req) => {
|
||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
||||
HostRequest::GetCookieValue(req) => {
|
||||
let window = get_window_from_plugin_context(app_handle, plugin_context)?;
|
||||
let value = match cookie_jar_from_window(&window) {
|
||||
None => None,
|
||||
Some(j) => j.cookies.into_iter().find_map(|c| match Cookie::parse(c.raw_cookie) {
|
||||
@@ -447,12 +494,11 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
};
|
||||
Ok(Some(InternalEventPayload::GetCookieValueResponse(GetCookieValueResponse { value })))
|
||||
}
|
||||
InternalEventPayload::WindowInfoRequest(req) => {
|
||||
HostRequest::WindowInfo(req) => {
|
||||
let w = app_handle
|
||||
.get_webview_window(&req.label)
|
||||
.ok_or(PluginErr(format!("Failed to find window for {}", req.label)))?;
|
||||
|
||||
// Actually look up the data so we never return an invalid ID
|
||||
let environment_id = environment_from_window(&w).map(|m| m.id);
|
||||
let workspace_id = workspace_from_window(&w).map(|m| m.id);
|
||||
let request_id =
|
||||
@@ -470,25 +516,13 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
environment_id,
|
||||
})))
|
||||
}
|
||||
|
||||
InternalEventPayload::ListWorkspacesRequest(_) => {
|
||||
let mut workspaces = Vec::new();
|
||||
|
||||
for (_, window) in app_handle.webview_windows() {
|
||||
if let Some(workspace) = workspace_from_window(&window) {
|
||||
workspaces.push(WorkspaceInfo {
|
||||
id: workspace.id.clone(),
|
||||
name: workspace.name.clone(),
|
||||
label: window.label().to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Some(InternalEventPayload::ListWorkspacesResponse(ListWorkspacesResponse {
|
||||
workspaces,
|
||||
HostRequest::OtherRequest(req) => {
|
||||
Ok(Some(InternalEventPayload::ErrorResponse(ErrorResponse {
|
||||
error: format!(
|
||||
"Unsupported plugin request in app host handler: {}",
|
||||
req.type_name()
|
||||
),
|
||||
})))
|
||||
}
|
||||
|
||||
_ => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ use tauri::{
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
use ts_rs::TS;
|
||||
use yaak_api::yaak_api_client;
|
||||
use yaak_models::models::Plugin;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::api::{
|
||||
@@ -31,7 +32,6 @@ use yaak_plugins::events::{Color, Icon, PluginContext, ShowToastRequest};
|
||||
use yaak_plugins::install::{delete_and_uninstall, download_and_install};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::plugin_meta::get_plugin_meta;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
|
||||
static EXITING: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
@@ -72,7 +72,8 @@ impl PluginUpdater {
|
||||
|
||||
info!("Checking for plugin updates");
|
||||
|
||||
let http_client = yaak_api_client(window.app_handle())?;
|
||||
let app_version = window.app_handle().package_info().version.to_string();
|
||||
let http_client = yaak_api_client(&app_version)?;
|
||||
let plugins = window.app_handle().db().list_plugins()?;
|
||||
let updates = check_plugin_updates(&http_client, plugins.clone()).await?;
|
||||
|
||||
@@ -136,7 +137,8 @@ pub async fn cmd_plugins_search<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
query: &str,
|
||||
) -> Result<PluginSearchResponse> {
|
||||
let http_client = yaak_api_client(&app_handle)?;
|
||||
let app_version = app_handle.package_info().version.to_string();
|
||||
let http_client = yaak_api_client(&app_version)?;
|
||||
Ok(search_plugins(&http_client, query).await?)
|
||||
}
|
||||
|
||||
@@ -147,7 +149,8 @@ pub async fn cmd_plugins_install<R: Runtime>(
|
||||
version: Option<String>,
|
||||
) -> Result<()> {
|
||||
let plugin_manager = Arc::new((*window.state::<PluginManager>()).clone());
|
||||
let http_client = yaak_api_client(window.app_handle())?;
|
||||
let app_version = window.app_handle().package_info().version.to_string();
|
||||
let http_client = yaak_api_client(&app_version)?;
|
||||
let query_manager = window.state::<yaak_models::query_manager::QueryManager>();
|
||||
let plugin_context = window.plugin_context();
|
||||
download_and_install(
|
||||
@@ -177,7 +180,8 @@ pub async fn cmd_plugins_uninstall<R: Runtime>(
|
||||
pub async fn cmd_plugins_updates<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
) -> Result<PluginUpdatesResponse> {
|
||||
let http_client = yaak_api_client(&app_handle)?;
|
||||
let app_version = app_handle.package_info().version.to_string();
|
||||
let http_client = yaak_api_client(&app_version)?;
|
||||
let plugins = app_handle.db().list_plugins()?;
|
||||
Ok(check_plugin_updates(&http_client, plugins).await?)
|
||||
}
|
||||
@@ -186,7 +190,8 @@ pub async fn cmd_plugins_updates<R: Runtime>(
|
||||
pub async fn cmd_plugins_update_all<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<Vec<PluginNameVersion>> {
|
||||
let http_client = yaak_api_client(window.app_handle())?;
|
||||
let app_version = window.app_handle().package_info().version.to_string();
|
||||
let http_client = yaak_api_client(&app_version)?;
|
||||
let plugins = window.db().list_plugins()?;
|
||||
|
||||
// Get list of available updates (already filtered to only registry plugins)
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
use log::info;
|
||||
use serde_json::Value;
|
||||
use std::collections::BTreeMap;
|
||||
use yaak_http::path_placeholders::apply_path_placeholders;
|
||||
use yaak_models::models::{
|
||||
Environment, GrpcRequest, HttpRequest, HttpRequestHeader, HttpUrlParameter,
|
||||
};
|
||||
pub use yaak::render::render_http_request;
|
||||
use yaak_models::models::{Environment, GrpcRequest, HttpRequestHeader};
|
||||
use yaak_models::render::make_vars_hashmap;
|
||||
use yaak_templates::{RenderOptions, TemplateCallback, parse_and_render, render_json_value_raw};
|
||||
|
||||
@@ -38,6 +36,9 @@ pub async fn render_grpc_request<T: TemplateCallback>(
|
||||
|
||||
let mut metadata = Vec::new();
|
||||
for p in r.metadata.clone() {
|
||||
if !p.enabled {
|
||||
continue;
|
||||
}
|
||||
metadata.push(HttpRequestHeader {
|
||||
enabled: p.enabled,
|
||||
name: parse_and_render(p.name.as_str(), vars, cb, &opt).await?,
|
||||
@@ -82,82 +83,3 @@ pub async fn render_grpc_request<T: TemplateCallback>(
|
||||
|
||||
Ok(GrpcRequest { url, metadata, authentication, ..r.to_owned() })
|
||||
}
|
||||
|
||||
pub async fn render_http_request<T: TemplateCallback>(
|
||||
r: &HttpRequest,
|
||||
environment_chain: Vec<Environment>,
|
||||
cb: &T,
|
||||
opt: &RenderOptions,
|
||||
) -> yaak_templates::error::Result<HttpRequest> {
|
||||
let vars = &make_vars_hashmap(environment_chain);
|
||||
|
||||
let mut url_parameters = Vec::new();
|
||||
for p in r.url_parameters.clone() {
|
||||
if !p.enabled {
|
||||
continue;
|
||||
}
|
||||
url_parameters.push(HttpUrlParameter {
|
||||
enabled: p.enabled,
|
||||
name: parse_and_render(p.name.as_str(), vars, cb, &opt).await?,
|
||||
value: parse_and_render(p.value.as_str(), vars, cb, &opt).await?,
|
||||
id: p.id,
|
||||
})
|
||||
}
|
||||
|
||||
let mut headers = Vec::new();
|
||||
for p in r.headers.clone() {
|
||||
if !p.enabled {
|
||||
continue;
|
||||
}
|
||||
headers.push(HttpRequestHeader {
|
||||
enabled: p.enabled,
|
||||
name: parse_and_render(p.name.as_str(), vars, cb, &opt).await?,
|
||||
value: parse_and_render(p.value.as_str(), vars, cb, &opt).await?,
|
||||
id: p.id,
|
||||
})
|
||||
}
|
||||
|
||||
let mut body = BTreeMap::new();
|
||||
for (k, v) in r.body.clone() {
|
||||
body.insert(k, render_json_value_raw(v, vars, cb, &opt).await?);
|
||||
}
|
||||
|
||||
let authentication = {
|
||||
let mut disabled = false;
|
||||
let mut auth = BTreeMap::new();
|
||||
match r.authentication.get("disabled") {
|
||||
Some(Value::Bool(true)) => {
|
||||
disabled = true;
|
||||
}
|
||||
Some(Value::String(tmpl)) => {
|
||||
disabled = parse_and_render(tmpl.as_str(), vars, cb, &opt)
|
||||
.await
|
||||
.unwrap_or_default()
|
||||
.is_empty();
|
||||
info!(
|
||||
"Rendering authentication.disabled as a template: {disabled} from \"{tmpl}\""
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
if disabled {
|
||||
auth.insert("disabled".to_string(), Value::Bool(true));
|
||||
} else {
|
||||
for (k, v) in r.authentication.clone() {
|
||||
if k == "disabled" {
|
||||
auth.insert(k, Value::Bool(false));
|
||||
} else {
|
||||
auth.insert(k, render_json_value_raw(v, vars, cb, &opt).await?);
|
||||
}
|
||||
}
|
||||
}
|
||||
auth
|
||||
};
|
||||
|
||||
let url = parse_and_render(r.url.clone().as_str(), vars, cb, &opt).await?;
|
||||
|
||||
// This doesn't fit perfectly with the concept of "rendering" but it kind of does
|
||||
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
|
||||
|
||||
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..r.to_owned() })
|
||||
}
|
||||
|
||||
@@ -15,6 +15,9 @@ use ts_rs::TS;
|
||||
use yaak_models::util::generate_id;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
|
||||
use url::Url;
|
||||
use yaak_api::get_system_proxy_url;
|
||||
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::is_dev;
|
||||
|
||||
@@ -87,8 +90,13 @@ impl YaakUpdater {
|
||||
info!("Checking for updates mode={} autodl={}", mode, auto_download);
|
||||
|
||||
let w = window.clone();
|
||||
let update_check_result = w
|
||||
.updater_builder()
|
||||
let mut updater_builder = w.updater_builder();
|
||||
if let Some(proxy_url) = get_system_proxy_url() {
|
||||
if let Ok(url) = Url::parse(&proxy_url) {
|
||||
updater_builder = updater_builder.proxy(url);
|
||||
}
|
||||
}
|
||||
let update_check_result = updater_builder
|
||||
.on_before_exit(move || {
|
||||
// Kill plugin manager before exit or NSIS installer will fail to replace sidecar
|
||||
// while it's running.
|
||||
@@ -111,6 +119,7 @@ impl YaakUpdater {
|
||||
UpdateTrigger::User => "user",
|
||||
},
|
||||
)?
|
||||
.header("X-Install-Mode", detect_install_mode().unwrap_or("unknown"))?
|
||||
.build()?
|
||||
.check()
|
||||
.await;
|
||||
@@ -353,6 +362,22 @@ pub async fn download_update_idempotent<R: Runtime>(
|
||||
Ok(dl_path)
|
||||
}
|
||||
|
||||
/// Detect the installer type so the update server can serve the correct artifact.
|
||||
fn detect_install_mode() -> Option<&'static str> {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
if let Ok(exe) = std::env::current_exe() {
|
||||
let path = exe.to_string_lossy().to_lowercase();
|
||||
if path.starts_with(r"c:\program files") {
|
||||
return Some("nsis-machine");
|
||||
}
|
||||
}
|
||||
return Some("nsis");
|
||||
}
|
||||
#[allow(unreachable_code)]
|
||||
None
|
||||
}
|
||||
|
||||
pub async fn install_update_maybe_download<R: Runtime>(
|
||||
window: &WebviewWindow<R>,
|
||||
update: &Update,
|
||||
|
||||
@@ -8,11 +8,11 @@ use std::fs;
|
||||
use std::sync::Arc;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, Url};
|
||||
use tauri_plugin_dialog::{DialogExt, MessageDialogButtons, MessageDialogKind};
|
||||
use yaak_api::yaak_api_client;
|
||||
use yaak_models::util::generate_id;
|
||||
use yaak_plugins::events::{Color, ShowToastRequest};
|
||||
use yaak_plugins::install::download_and_install;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
|
||||
pub(crate) async fn handle_deep_link<R: Runtime>(
|
||||
app_handle: &AppHandle<R>,
|
||||
@@ -46,7 +46,8 @@ pub(crate) async fn handle_deep_link<R: Runtime>(
|
||||
|
||||
let plugin_manager = Arc::new((*window.state::<PluginManager>()).clone());
|
||||
let query_manager = app_handle.db_manager();
|
||||
let http_client = yaak_api_client(app_handle)?;
|
||||
let app_version = app_handle.package_info().version.to_string();
|
||||
let http_client = yaak_api_client(&app_version)?;
|
||||
let plugin_context = window.plugin_context();
|
||||
let pv = download_and_install(
|
||||
plugin_manager,
|
||||
@@ -86,7 +87,8 @@ pub(crate) async fn handle_deep_link<R: Runtime>(
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let resp = yaak_api_client(app_handle)?.get(file_url).send().await?;
|
||||
let app_version = app_handle.package_info().version.to_string();
|
||||
let resp = yaak_api_client(&app_version)?.get(file_url).send().await?;
|
||||
let json = resp.bytes().await?;
|
||||
let p = app_handle
|
||||
.path()
|
||||
|
||||
@@ -162,11 +162,16 @@ pub(crate) fn create_window<R: Runtime>(
|
||||
"dev.reset_size" => webview_window
|
||||
.set_size(LogicalSize::new(DEFAULT_WINDOW_WIDTH, DEFAULT_WINDOW_HEIGHT))
|
||||
.unwrap(),
|
||||
"dev.reset_size_record" => {
|
||||
"dev.reset_size_16x9" => {
|
||||
let width = webview_window.outer_size().unwrap().width;
|
||||
let height = width * 9 / 16;
|
||||
webview_window.set_size(PhysicalSize::new(width, height)).unwrap()
|
||||
}
|
||||
"dev.reset_size_16x10" => {
|
||||
let width = webview_window.outer_size().unwrap().width;
|
||||
let height = width * 10 / 16;
|
||||
webview_window.set_size(PhysicalSize::new(width, height)).unwrap()
|
||||
}
|
||||
"dev.refresh" => webview_window.eval("location.reload()").unwrap(),
|
||||
"dev.generate_theme_css" => {
|
||||
w.emit("generate_theme_css", true).unwrap();
|
||||
|
||||
@@ -153,9 +153,11 @@ pub fn app_menu<R: Runtime>(app_handle: &AppHandle<R>) -> tauri::Result<Menu<R>>
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id("dev.reset_size".to_string(), "Reset Size")
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id("dev.reset_size_16x9".to_string(), "Resize to 16x9")
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id(
|
||||
"dev.reset_size_record".to_string(),
|
||||
"Reset Size 16x9",
|
||||
"dev.reset_size_16x10".to_string(),
|
||||
"Resize to 16x10",
|
||||
)
|
||||
.build(app_handle)?,
|
||||
&MenuItemBuilder::with_id(
|
||||
|
||||
@@ -28,52 +28,6 @@ use yaak_templates::{RenderErrorBehavior, RenderOptions};
|
||||
use yaak_tls::find_client_certificate;
|
||||
use yaak_ws::{WebsocketManager, render_websocket_request};
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_ws_upsert_request<R: Runtime>(
|
||||
request: WebsocketRequest,
|
||||
app_handle: AppHandle<R>,
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<WebsocketRequest> {
|
||||
Ok(app_handle
|
||||
.db()
|
||||
.upsert_websocket_request(&request, &UpdateSource::from_window_label(window.label()))?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_ws_duplicate_request<R: Runtime>(
|
||||
request_id: &str,
|
||||
app_handle: AppHandle<R>,
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<WebsocketRequest> {
|
||||
let db = app_handle.db();
|
||||
let request = db.get_websocket_request(request_id)?;
|
||||
Ok(db.duplicate_websocket_request(&request, &UpdateSource::from_window_label(window.label()))?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_ws_delete_request<R: Runtime>(
|
||||
request_id: &str,
|
||||
app_handle: AppHandle<R>,
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<WebsocketRequest> {
|
||||
Ok(app_handle.db().delete_websocket_request_by_id(
|
||||
request_id,
|
||||
&UpdateSource::from_window_label(window.label()),
|
||||
)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_ws_delete_connection<R: Runtime>(
|
||||
connection_id: &str,
|
||||
app_handle: AppHandle<R>,
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<WebsocketConnection> {
|
||||
Ok(app_handle.db().delete_websocket_connection_by_id(
|
||||
connection_id,
|
||||
&UpdateSource::from_window_label(window.label()),
|
||||
)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_ws_delete_connections<R: Runtime>(
|
||||
request_id: &str,
|
||||
@@ -86,30 +40,6 @@ pub async fn cmd_ws_delete_connections<R: Runtime>(
|
||||
)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_ws_list_events<R: Runtime>(
|
||||
connection_id: &str,
|
||||
app_handle: AppHandle<R>,
|
||||
) -> Result<Vec<WebsocketEvent>> {
|
||||
Ok(app_handle.db().list_websocket_events(connection_id)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_ws_list_requests<R: Runtime>(
|
||||
workspace_id: &str,
|
||||
app_handle: AppHandle<R>,
|
||||
) -> Result<Vec<WebsocketRequest>> {
|
||||
Ok(app_handle.db().list_websocket_requests(workspace_id)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_ws_list_connections<R: Runtime>(
|
||||
workspace_id: &str,
|
||||
app_handle: AppHandle<R>,
|
||||
) -> Result<Vec<WebsocketConnection>> {
|
||||
Ok(app_handle.db().list_websocket_connections(workspace_id)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_ws_send<R: Runtime>(
|
||||
connection_id: &str,
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
{
|
||||
"build": {
|
||||
"features": [
|
||||
"updater",
|
||||
"license"
|
||||
]
|
||||
"features": ["updater", "license"]
|
||||
},
|
||||
"app": {
|
||||
"security": {
|
||||
@@ -11,12 +8,8 @@
|
||||
"default",
|
||||
{
|
||||
"identifier": "release",
|
||||
"windows": [
|
||||
"*"
|
||||
],
|
||||
"permissions": [
|
||||
"yaak-license:default"
|
||||
]
|
||||
"windows": ["*"],
|
||||
"permissions": ["yaak-license:default"]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -39,14 +32,7 @@
|
||||
"createUpdaterArtifacts": true,
|
||||
"longDescription": "A cross-platform desktop app for interacting with REST, GraphQL, and gRPC",
|
||||
"shortDescription": "Play with APIs, intuitively",
|
||||
"targets": [
|
||||
"app",
|
||||
"appimage",
|
||||
"deb",
|
||||
"dmg",
|
||||
"nsis",
|
||||
"rpm"
|
||||
],
|
||||
"targets": ["app", "appimage", "deb", "dmg", "nsis", "rpm"],
|
||||
"macOS": {
|
||||
"minimumSystemVersion": "13.0",
|
||||
"exceptionDomain": "",
|
||||
@@ -58,10 +44,16 @@
|
||||
},
|
||||
"linux": {
|
||||
"deb": {
|
||||
"desktopTemplate": "./template.desktop"
|
||||
"desktopTemplate": "./template.desktop",
|
||||
"files": {
|
||||
"/usr/share/metainfo/app.yaak.Yaak.metainfo.xml": "../../flatpak/app.yaak.Yaak.metainfo.xml"
|
||||
}
|
||||
},
|
||||
"rpm": {
|
||||
"desktopTemplate": "./template.desktop"
|
||||
"desktopTemplate": "./template.desktop",
|
||||
"files": {
|
||||
"/usr/share/metainfo/app.yaak.Yaak.metainfo.xml": "../../flatpak/app.yaak.Yaak.metainfo.xml"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ thiserror = { workspace = true }
|
||||
ts-rs = { workspace = true }
|
||||
yaak-common = { workspace = true }
|
||||
yaak-models = { workspace = true }
|
||||
yaak-tauri-utils = { workspace = true }
|
||||
yaak-api = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
tauri-plugin = { workspace = true, features = ["build"] }
|
||||
|
||||
@@ -16,7 +16,7 @@ pub enum Error {
|
||||
ModelError(#[from] yaak_models::error::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
TauriUtilsError(#[from] yaak_tauri_utils::error::Error),
|
||||
ApiError(#[from] yaak_api::Error),
|
||||
|
||||
#[error("Internal server error")]
|
||||
ServerError,
|
||||
|
||||
@@ -7,11 +7,11 @@ use std::ops::Add;
|
||||
use std::time::Duration;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow, is_dev};
|
||||
use ts_rs::TS;
|
||||
use yaak_api::yaak_api_client;
|
||||
use yaak_common::platform::get_os_str;
|
||||
use yaak_models::db_context::DbContext;
|
||||
use yaak_models::query_manager::QueryManager;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
|
||||
/// Extension trait for accessing the QueryManager from Tauri Manager types.
|
||||
/// This is needed temporarily until all crates are refactored to not use Tauri.
|
||||
@@ -118,11 +118,12 @@ pub async fn activate_license<R: Runtime>(
|
||||
license_key: &str,
|
||||
) -> Result<()> {
|
||||
info!("Activating license {}", license_key);
|
||||
let client = reqwest::Client::new();
|
||||
let app_version = window.app_handle().package_info().version.to_string();
|
||||
let client = yaak_api_client(&app_version)?;
|
||||
let payload = ActivateLicenseRequestPayload {
|
||||
license_key: license_key.to_string(),
|
||||
app_platform: get_os_str().to_string(),
|
||||
app_version: window.app_handle().package_info().version.to_string(),
|
||||
app_version,
|
||||
};
|
||||
let response = client.post(build_url("/licenses/activate")).json(&payload).send().await?;
|
||||
|
||||
@@ -155,12 +156,11 @@ pub async fn deactivate_license<R: Runtime>(window: &WebviewWindow<R>) -> Result
|
||||
let app_handle = window.app_handle();
|
||||
let activation_id = get_activation_id(app_handle).await;
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
let app_version = window.app_handle().package_info().version.to_string();
|
||||
let client = yaak_api_client(&app_version)?;
|
||||
let path = format!("/licenses/activations/{}/deactivate", activation_id);
|
||||
let payload = DeactivateLicenseRequestPayload {
|
||||
app_platform: get_os_str().to_string(),
|
||||
app_version: window.app_handle().package_info().version.to_string(),
|
||||
};
|
||||
let payload =
|
||||
DeactivateLicenseRequestPayload { app_platform: get_os_str().to_string(), app_version };
|
||||
let response = client.post(build_url(&path)).json(&payload).send().await?;
|
||||
|
||||
if response.status().is_client_error() {
|
||||
@@ -186,10 +186,9 @@ pub async fn deactivate_license<R: Runtime>(window: &WebviewWindow<R>) -> Result
|
||||
}
|
||||
|
||||
pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<LicenseCheckStatus> {
|
||||
let payload = CheckActivationRequestPayload {
|
||||
app_platform: get_os_str().to_string(),
|
||||
app_version: window.package_info().version.to_string(),
|
||||
};
|
||||
let app_version = window.app_handle().package_info().version.to_string();
|
||||
let payload =
|
||||
CheckActivationRequestPayload { app_platform: get_os_str().to_string(), app_version };
|
||||
let activation_id = get_activation_id(window.app_handle()).await;
|
||||
|
||||
let settings = window.db().get_settings();
|
||||
@@ -204,7 +203,7 @@ pub async fn check_license<R: Runtime>(window: &WebviewWindow<R>) -> Result<Lice
|
||||
(true, _) => {
|
||||
info!("Checking license activation");
|
||||
// A license has been activated, so let's check the license server
|
||||
let client = yaak_api_client(window.app_handle())?;
|
||||
let client = yaak_api_client(&payload.app_version)?;
|
||||
let path = format!("/licenses/activations/{activation_id}/check-v2");
|
||||
let response = client.post(build_url(&path)).json(&payload).send().await?;
|
||||
|
||||
|
||||
@@ -6,8 +6,4 @@ publish = false
|
||||
|
||||
[dependencies]
|
||||
tauri = { workspace = true }
|
||||
reqwest = { workspace = true, features = ["gzip"] }
|
||||
thiserror = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
regex = "1.11.0"
|
||||
yaak-common = { workspace = true }
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
use crate::error::Result;
|
||||
use reqwest::Client;
|
||||
use std::time::Duration;
|
||||
use tauri::http::{HeaderMap, HeaderValue};
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use yaak_common::platform::{get_ua_arch, get_ua_platform};
|
||||
|
||||
pub fn yaak_api_client<R: Runtime>(app_handle: &AppHandle<R>) -> Result<Client> {
|
||||
let platform = get_ua_platform();
|
||||
let version = app_handle.package_info().version.clone();
|
||||
let arch = get_ua_arch();
|
||||
let ua = format!("Yaak/{version} ({platform}; {arch})");
|
||||
let mut default_headers = HeaderMap::new();
|
||||
default_headers.insert("Accept", HeaderValue::from_str("application/json").unwrap());
|
||||
|
||||
let client = reqwest::ClientBuilder::new()
|
||||
.timeout(Duration::from_secs(20))
|
||||
.default_headers(default_headers)
|
||||
.gzip(true)
|
||||
.user_agent(ua)
|
||||
.build()?;
|
||||
|
||||
Ok(client)
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
use serde::{Serialize, Serializer};
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
ReqwestError(#[from] reqwest::Error),
|
||||
}
|
||||
|
||||
impl Serialize for Error {
|
||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_str(self.to_string().as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
@@ -1,3 +1 @@
|
||||
pub mod api_client;
|
||||
pub mod error;
|
||||
pub mod window;
|
||||
|
||||
12
crates/yaak-api/Cargo.toml
Normal file
12
crates/yaak-api/Cargo.toml
Normal file
@@ -0,0 +1,12 @@
|
||||
[package]
|
||||
name = "yaak-api"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
log = { workspace = true }
|
||||
reqwest = { workspace = true, features = ["gzip"] }
|
||||
sysproxy = "0.3"
|
||||
thiserror = { workspace = true }
|
||||
yaak-common = { workspace = true }
|
||||
9
crates/yaak-api/src/error.rs
Normal file
9
crates/yaak-api/src/error.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
ReqwestError(#[from] reqwest::Error),
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
70
crates/yaak-api/src/lib.rs
Normal file
70
crates/yaak-api/src/lib.rs
Normal file
@@ -0,0 +1,70 @@
|
||||
mod error;
|
||||
|
||||
pub use error::{Error, Result};
|
||||
|
||||
use log::{debug, warn};
|
||||
use reqwest::Client;
|
||||
use reqwest::header::{HeaderMap, HeaderValue};
|
||||
use std::time::Duration;
|
||||
use yaak_common::platform::{get_ua_arch, get_ua_platform};
|
||||
|
||||
/// Build a reqwest Client configured for Yaak's own API calls.
|
||||
///
|
||||
/// Includes a custom User-Agent, JSON accept header, 20s timeout, gzip,
|
||||
/// and automatic OS-level proxy detection via sysproxy.
|
||||
pub fn yaak_api_client(version: &str) -> Result<Client> {
|
||||
let platform = get_ua_platform();
|
||||
let arch = get_ua_arch();
|
||||
let ua = format!("Yaak/{version} ({platform}; {arch})");
|
||||
|
||||
let mut default_headers = HeaderMap::new();
|
||||
default_headers.insert("Accept", HeaderValue::from_str("application/json").unwrap());
|
||||
|
||||
let mut builder = reqwest::ClientBuilder::new()
|
||||
.timeout(Duration::from_secs(20))
|
||||
.default_headers(default_headers)
|
||||
.gzip(true)
|
||||
.user_agent(ua);
|
||||
|
||||
if let Some(sys) = get_enabled_system_proxy() {
|
||||
let proxy_url = format!("http://{}:{}", sys.host, sys.port);
|
||||
match reqwest::Proxy::all(&proxy_url) {
|
||||
Ok(p) => {
|
||||
let p = if !sys.bypass.is_empty() {
|
||||
p.no_proxy(reqwest::NoProxy::from_string(&sys.bypass))
|
||||
} else {
|
||||
p
|
||||
};
|
||||
builder = builder.proxy(p);
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Failed to configure system proxy: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(builder.build()?)
|
||||
}
|
||||
|
||||
/// Returns the system proxy URL if one is enabled, e.g. `http://host:port`.
|
||||
pub fn get_system_proxy_url() -> Option<String> {
|
||||
let sys = get_enabled_system_proxy()?;
|
||||
Some(format!("http://{}:{}", sys.host, sys.port))
|
||||
}
|
||||
|
||||
fn get_enabled_system_proxy() -> Option<sysproxy::Sysproxy> {
|
||||
match sysproxy::Sysproxy::get_system_proxy() {
|
||||
Ok(sys) if sys.enable => {
|
||||
debug!("Detected system proxy: http://{}:{}", sys.host, sys.port);
|
||||
Some(sys)
|
||||
}
|
||||
Ok(_) => {
|
||||
debug!("System proxy detected but not enabled");
|
||||
None
|
||||
}
|
||||
Err(e) => {
|
||||
debug!("Could not detect system proxy: {e}");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -11,3 +11,7 @@ export function revealWorkspaceKey(workspaceId: string) {
|
||||
export function setWorkspaceKey(args: { workspaceId: string; key: string }) {
|
||||
return invoke<void>('cmd_set_workspace_key', args);
|
||||
}
|
||||
|
||||
export function disableEncryption(workspaceId: string) {
|
||||
return invoke<void>('cmd_disable_encryption', { workspaceId });
|
||||
}
|
||||
|
||||
@@ -115,6 +115,35 @@ impl EncryptionManager {
|
||||
self.set_workspace_key(workspace_id, &wkey)
|
||||
}
|
||||
|
||||
pub fn disable_encryption(&self, workspace_id: &str) -> Result<()> {
|
||||
info!("Disabling encryption for {workspace_id}");
|
||||
|
||||
self.query_manager.with_tx::<(), Error>(|tx| {
|
||||
let workspace = tx.get_workspace(workspace_id)?;
|
||||
let workspace_meta = tx.get_or_create_workspace_meta(workspace_id)?;
|
||||
|
||||
// Clear encryption challenge on workspace
|
||||
tx.upsert_workspace(
|
||||
&Workspace { encryption_key_challenge: None, ..workspace },
|
||||
&UpdateSource::Background,
|
||||
)?;
|
||||
|
||||
// Clear encryption key on workspace meta
|
||||
tx.upsert_workspace_meta(
|
||||
&WorkspaceMeta { encryption_key: None, ..workspace_meta },
|
||||
&UpdateSource::Background,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Remove from cache
|
||||
let mut cache = self.cached_workspace_keys.lock().unwrap();
|
||||
cache.remove(workspace_id);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_workspace_key(&self, workspace_id: &str) -> Result<WorkspaceKey> {
|
||||
{
|
||||
let cache = self.cached_workspace_keys.lock().unwrap();
|
||||
|
||||
@@ -6,7 +6,7 @@ publish = false
|
||||
|
||||
[dependencies]
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
git2 = { version = "0.20.0", features = ["vendored-libgit2", "vendored-openssl"] }
|
||||
git2 = { version = "0.20.4", features = ["vendored-libgit2", "vendored-openssl"] }
|
||||
log = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
|
||||
8
crates/yaak-git/bindings/gen_git.ts
generated
8
crates/yaak-git/bindings/gen_git.ts
generated
@@ -1,6 +1,10 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { SyncModel } from "./gen_models";
|
||||
|
||||
export type BranchDeleteResult = { "type": "success", message: string, } | { "type": "not_fully_merged" };
|
||||
|
||||
export type CloneResult = { "type": "success" } | { "type": "cancelled" } | { "type": "needs_credentials", url: string, error: string | null, };
|
||||
|
||||
export type GitAuthor = { name: string | null, email: string | null, };
|
||||
|
||||
export type GitCommit = { author: GitAuthor, when: string, message: string | null, };
|
||||
@@ -11,8 +15,8 @@ export type GitStatus = "untracked" | "conflict" | "current" | "modified" | "rem
|
||||
|
||||
export type GitStatusEntry = { relaPath: string, status: GitStatus, staged: boolean, prev: SyncModel | null, next: SyncModel | null, };
|
||||
|
||||
export type GitStatusSummary = { path: string, headRef: string | null, headRefShorthand: string | null, entries: Array<GitStatusEntry>, origins: Array<string>, localBranches: Array<string>, remoteBranches: Array<string>, };
|
||||
export type GitStatusSummary = { path: string, headRef: string | null, headRefShorthand: string | null, entries: Array<GitStatusEntry>, origins: Array<string>, localBranches: Array<string>, remoteBranches: Array<string>, ahead: number, behind: number, };
|
||||
|
||||
export type PullResult = { "type": "success", message: string, } | { "type": "up_to_date" } | { "type": "needs_credentials", url: string, error: string | null, };
|
||||
export type PullResult = { "type": "success", message: string, } | { "type": "up_to_date" } | { "type": "needs_credentials", url: string, error: string | null, } | { "type": "diverged", remote: string, branch: string, } | { "type": "uncommitted_changes" };
|
||||
|
||||
export type PushResult = { "type": "success", message: string, } | { "type": "up_to_date" } | { "type": "needs_credentials", url: string, error: string | null, };
|
||||
|
||||
@@ -3,40 +3,59 @@ import { invoke } from '@tauri-apps/api/core';
|
||||
import { createFastMutation } from '@yaakapp/app/hooks/useFastMutation';
|
||||
import { queryClient } from '@yaakapp/app/lib/queryClient';
|
||||
import { useMemo } from 'react';
|
||||
import { GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult } from './bindings/gen_git';
|
||||
import { BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult } from './bindings/gen_git';
|
||||
import { showToast } from '@yaakapp/app/lib/toast';
|
||||
|
||||
export * from './bindings/gen_git';
|
||||
export * from './bindings/gen_models';
|
||||
|
||||
export interface GitCredentials {
|
||||
username: string;
|
||||
password: string;
|
||||
}
|
||||
|
||||
export type DivergedStrategy = 'force_reset' | 'merge' | 'cancel';
|
||||
|
||||
export type UncommittedChangesStrategy = 'reset' | 'cancel';
|
||||
|
||||
export interface GitCallbacks {
|
||||
addRemote: () => Promise<GitRemote | null>;
|
||||
promptCredentials: (
|
||||
result: Extract<PushResult, { type: 'needs_credentials' }>,
|
||||
) => Promise<GitCredentials | null>;
|
||||
promptDiverged: (
|
||||
result: Extract<PullResult, { type: 'diverged' }>,
|
||||
) => Promise<DivergedStrategy>;
|
||||
promptUncommittedChanges: () => Promise<UncommittedChangesStrategy>;
|
||||
forceSync: () => Promise<void>;
|
||||
}
|
||||
|
||||
const onSuccess = () => queryClient.invalidateQueries({ queryKey: ['git'] });
|
||||
|
||||
export function useGit(dir: string, callbacks: GitCallbacks) {
|
||||
export function useGit(dir: string, callbacks: GitCallbacks, refreshKey?: string) {
|
||||
const mutations = useMemo(() => gitMutations(dir, callbacks), [dir, callbacks]);
|
||||
const fetchAll = useQuery<void, string>({
|
||||
queryKey: ['git', 'fetch_all', dir, refreshKey],
|
||||
queryFn: () => invoke('cmd_git_fetch_all', { dir }),
|
||||
refetchInterval: 10 * 60_000,
|
||||
});
|
||||
return [
|
||||
{
|
||||
remotes: useQuery<GitRemote[], string>({
|
||||
queryKey: ['git', 'remotes', dir],
|
||||
queryKey: ['git', 'remotes', dir, refreshKey],
|
||||
queryFn: () => getRemotes(dir),
|
||||
placeholderData: (prev) => prev,
|
||||
}),
|
||||
log: useQuery<GitCommit[], string>({
|
||||
queryKey: ['git', 'log', dir],
|
||||
queryKey: ['git', 'log', dir, refreshKey],
|
||||
queryFn: () => invoke('cmd_git_log', { dir }),
|
||||
placeholderData: (prev) => prev,
|
||||
}),
|
||||
status: useQuery<GitStatusSummary, string>({
|
||||
refetchOnMount: true,
|
||||
queryKey: ['git', 'status', dir],
|
||||
queryKey: ['git', 'status', dir, refreshKey, fetchAll.dataUpdatedAt],
|
||||
queryFn: () => invoke('cmd_git_status', { dir }),
|
||||
placeholderData: (prev) => prev,
|
||||
}),
|
||||
},
|
||||
mutations,
|
||||
@@ -59,7 +78,6 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
if (creds == null) throw new Error('Canceled');
|
||||
|
||||
await invoke('cmd_git_add_credential', {
|
||||
dir,
|
||||
remoteUrl: result.url,
|
||||
username: creds.username,
|
||||
password: creds.password,
|
||||
@@ -69,6 +87,15 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
return invoke<PushResult>('cmd_git_push', { dir });
|
||||
};
|
||||
|
||||
const handleError = (err: unknown) => {
|
||||
showToast({
|
||||
id: `${err}`,
|
||||
message: `${err}`,
|
||||
color: 'danger',
|
||||
timeout: 5000,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
init: createFastMutation<void, string, void>({
|
||||
mutationKey: ['git', 'init'],
|
||||
@@ -90,21 +117,31 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
mutationFn: (args) => invoke('cmd_git_rm_remote', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
branch: createFastMutation<void, string, { branch: string }>({
|
||||
createBranch: createFastMutation<void, string, { branch: string; base?: string }>({
|
||||
mutationKey: ['git', 'branch', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
mergeBranch: createFastMutation<void, string, { branch: string; force: boolean }>({
|
||||
mergeBranch: createFastMutation<void, string, { branch: string }>({
|
||||
mutationKey: ['git', 'merge', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_merge_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
deleteBranch: createFastMutation<void, string, { branch: string }>({
|
||||
deleteBranch: createFastMutation<BranchDeleteResult, string, { branch: string, force?: boolean }>({
|
||||
mutationKey: ['git', 'delete-branch', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_delete_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
deleteRemoteBranch: createFastMutation<void, string, { branch: string }>({
|
||||
mutationKey: ['git', 'delete-remote-branch', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_delete_remote_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
renameBranch: createFastMutation<void, string, { oldName: string, newName: string }>({
|
||||
mutationKey: ['git', 'rename-branch', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_rename_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
checkout: createFastMutation<string, string, { branch: string; force: boolean }>({
|
||||
mutationKey: ['git', 'checkout', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_checkout', { dir, ...args }),
|
||||
@@ -123,11 +160,7 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
},
|
||||
onSuccess,
|
||||
}),
|
||||
fetchAll: createFastMutation<string, string, void>({
|
||||
mutationKey: ['git', 'checkout', dir],
|
||||
mutationFn: () => invoke('cmd_git_fetch_all', { dir }),
|
||||
onSuccess,
|
||||
}),
|
||||
|
||||
push: createFastMutation<PushResult, string, void>({
|
||||
mutationKey: ['git', 'push', dir],
|
||||
mutationFn: push,
|
||||
@@ -137,21 +170,51 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
mutationKey: ['git', 'pull', dir],
|
||||
async mutationFn() {
|
||||
const result = await invoke<PullResult>('cmd_git_pull', { dir });
|
||||
if (result.type !== 'needs_credentials') return result;
|
||||
|
||||
// Needs credentials, prompt for them
|
||||
const creds = await callbacks.promptCredentials(result);
|
||||
if (creds == null) throw new Error('Canceled');
|
||||
if (result.type === 'needs_credentials') {
|
||||
const creds = await callbacks.promptCredentials(result);
|
||||
if (creds == null) throw new Error('Canceled');
|
||||
|
||||
await invoke('cmd_git_add_credential', {
|
||||
dir,
|
||||
remoteUrl: result.url,
|
||||
username: creds.username,
|
||||
password: creds.password,
|
||||
});
|
||||
await invoke('cmd_git_add_credential', {
|
||||
remoteUrl: result.url,
|
||||
username: creds.username,
|
||||
password: creds.password,
|
||||
});
|
||||
|
||||
// Pull again
|
||||
return invoke<PullResult>('cmd_git_pull', { dir });
|
||||
// Pull again after credentials
|
||||
return invoke<PullResult>('cmd_git_pull', { dir });
|
||||
}
|
||||
|
||||
if (result.type === 'uncommitted_changes') {
|
||||
callbacks.promptUncommittedChanges().then(async (strategy) => {
|
||||
if (strategy === 'cancel') return;
|
||||
|
||||
await invoke('cmd_git_reset_changes', { dir });
|
||||
return invoke<PullResult>('cmd_git_pull', { dir });
|
||||
}).then(async () => { onSuccess(); await callbacks.forceSync(); }, handleError);
|
||||
}
|
||||
|
||||
if (result.type === 'diverged') {
|
||||
callbacks.promptDiverged(result).then((strategy) => {
|
||||
if (strategy === 'cancel') return;
|
||||
|
||||
if (strategy === 'force_reset') {
|
||||
return invoke<PullResult>('cmd_git_pull_force_reset', {
|
||||
dir,
|
||||
remote: result.remote,
|
||||
branch: result.branch,
|
||||
});
|
||||
}
|
||||
|
||||
return invoke<PullResult>('cmd_git_pull_merge', {
|
||||
dir,
|
||||
remote: result.remote,
|
||||
branch: result.branch,
|
||||
});
|
||||
}).then(async () => { onSuccess(); await callbacks.forceSync(); }, handleError);
|
||||
}
|
||||
|
||||
return result;
|
||||
},
|
||||
onSuccess,
|
||||
}),
|
||||
@@ -160,9 +223,39 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
mutationFn: (args) => invoke('cmd_git_unstage', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
resetChanges: createFastMutation<void, string, void>({
|
||||
mutationKey: ['git', 'reset-changes', dir],
|
||||
mutationFn: () => invoke('cmd_git_reset_changes', { dir }),
|
||||
onSuccess,
|
||||
}),
|
||||
} as const;
|
||||
};
|
||||
|
||||
async function getRemotes(dir: string) {
|
||||
return invoke<GitRemote[]>('cmd_git_remotes', { dir });
|
||||
}
|
||||
|
||||
/**
|
||||
* Clone a git repository, prompting for credentials if needed.
|
||||
*/
|
||||
export async function gitClone(
|
||||
url: string,
|
||||
dir: string,
|
||||
promptCredentials: (args: { url: string; error: string | null }) => Promise<GitCredentials | null>,
|
||||
): Promise<CloneResult> {
|
||||
const result = await invoke<CloneResult>('cmd_git_clone', { url, dir });
|
||||
if (result.type !== 'needs_credentials') return result;
|
||||
|
||||
// Prompt for credentials
|
||||
const creds = await promptCredentials({ url: result.url, error: result.error });
|
||||
if (creds == null) return {type: 'cancelled'};
|
||||
|
||||
// Store credentials and retry
|
||||
await invoke('cmd_git_add_credential', {
|
||||
remoteUrl: result.url,
|
||||
username: creds.username,
|
||||
password: creds.password,
|
||||
});
|
||||
|
||||
return invoke<CloneResult>('cmd_git_clone', { url, dir });
|
||||
}
|
||||
|
||||
@@ -5,7 +5,15 @@ use std::process::Stdio;
|
||||
use tokio::process::Command;
|
||||
use yaak_common::command::new_xplatform_command;
|
||||
|
||||
/// Create a git command that runs in the specified directory
|
||||
pub(crate) async fn new_binary_command(dir: &Path) -> Result<Command> {
|
||||
let mut cmd = new_binary_command_global().await?;
|
||||
cmd.arg("-C").arg(dir);
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
/// Create a git command without a specific directory (for global operations)
|
||||
pub(crate) async fn new_binary_command_global() -> Result<Command> {
|
||||
// 1. Probe that `git` exists and is runnable
|
||||
let mut probe = new_xplatform_command("git");
|
||||
probe.arg("--version").stdin(Stdio::null()).stdout(Stdio::null()).stderr(Stdio::null());
|
||||
@@ -17,8 +25,6 @@ pub(crate) async fn new_binary_command(dir: &Path) -> Result<Command> {
|
||||
}
|
||||
|
||||
// 2. Build the reusable git command
|
||||
let mut cmd = new_xplatform_command("git");
|
||||
cmd.arg("-C").arg(dir);
|
||||
|
||||
let cmd = new_xplatform_command("git");
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
@@ -1,99 +1,153 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::binary::new_binary_command;
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use crate::merge::do_merge;
|
||||
use crate::repository::open_repo;
|
||||
use crate::util::{bytes_to_string, get_branch_by_name, get_current_branch};
|
||||
use git2::BranchType;
|
||||
use git2::build::CheckoutBuilder;
|
||||
use log::info;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn git_checkout_branch(dir: &Path, branch_name: &str, force: bool) -> Result<String> {
|
||||
if branch_name.starts_with("origin/") {
|
||||
return git_checkout_remote_branch(dir, branch_name, force);
|
||||
}
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
|
||||
#[serde(rename_all = "snake_case", tag = "type")]
|
||||
#[ts(export, export_to = "gen_git.ts")]
|
||||
pub enum BranchDeleteResult {
|
||||
Success { message: String },
|
||||
NotFullyMerged,
|
||||
}
|
||||
|
||||
let repo = open_repo(dir)?;
|
||||
let branch = get_branch_by_name(&repo, branch_name)?;
|
||||
let branch_ref = branch.into_reference();
|
||||
let branch_tree = branch_ref.peel_to_tree()?;
|
||||
pub async fn git_checkout_branch(dir: &Path, branch_name: &str, force: bool) -> Result<String> {
|
||||
let branch_name = branch_name.trim_start_matches("origin/");
|
||||
|
||||
let mut options = CheckoutBuilder::default();
|
||||
let mut args = vec!["checkout"];
|
||||
if force {
|
||||
options.force();
|
||||
args.push("--force");
|
||||
}
|
||||
args.push(branch_name);
|
||||
|
||||
repo.checkout_tree(branch_tree.as_object(), Some(&mut options))?;
|
||||
repo.set_head(branch_ref.name().unwrap())?;
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(&args)
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git checkout: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to checkout: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(branch_name.to_string())
|
||||
}
|
||||
|
||||
pub(crate) fn git_checkout_remote_branch(
|
||||
dir: &Path,
|
||||
branch_name: &str,
|
||||
force: bool,
|
||||
) -> Result<String> {
|
||||
let branch_name = branch_name.trim_start_matches("origin/");
|
||||
let repo = open_repo(dir)?;
|
||||
|
||||
let refname = format!("refs/remotes/origin/{}", branch_name);
|
||||
let remote_ref = repo.find_reference(&refname)?;
|
||||
let commit = remote_ref.peel_to_commit()?;
|
||||
|
||||
let mut new_branch = repo.branch(branch_name, &commit, false)?;
|
||||
let upstream_name = format!("origin/{}", branch_name);
|
||||
new_branch.set_upstream(Some(&upstream_name))?;
|
||||
|
||||
git_checkout_branch(dir, branch_name, force)
|
||||
}
|
||||
|
||||
pub fn git_create_branch(dir: &Path, name: &str) -> Result<()> {
|
||||
let repo = open_repo(dir)?;
|
||||
let head = match repo.head() {
|
||||
Ok(h) => h,
|
||||
Err(e) if e.code() == git2::ErrorCode::UnbornBranch => {
|
||||
let msg = "Cannot create branch when there are no commits";
|
||||
return Err(GenericError(msg.into()));
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
let head = head.peel_to_commit()?;
|
||||
|
||||
repo.branch(name, &head, false)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn git_delete_branch(dir: &Path, name: &str) -> Result<()> {
|
||||
let repo = open_repo(dir)?;
|
||||
let mut branch = get_branch_by_name(&repo, name)?;
|
||||
|
||||
if branch.is_head() {
|
||||
info!("Deleting head branch");
|
||||
let branches = repo.branches(Some(BranchType::Local))?;
|
||||
let other_branch = branches.into_iter().filter_map(|b| b.ok()).find(|b| !b.0.is_head());
|
||||
let other_branch = match other_branch {
|
||||
None => return Err(GenericError("Cannot delete only branch".into())),
|
||||
Some(b) => bytes_to_string(b.0.name_bytes()?)?,
|
||||
};
|
||||
|
||||
git_checkout_branch(dir, &other_branch, true)?;
|
||||
pub async fn git_create_branch(dir: &Path, name: &str, base: Option<&str>) -> Result<()> {
|
||||
let mut cmd = new_binary_command(dir).await?;
|
||||
cmd.arg("branch").arg(name);
|
||||
if let Some(base_branch) = base {
|
||||
cmd.arg(base_branch);
|
||||
}
|
||||
|
||||
branch.delete()?;
|
||||
let out =
|
||||
cmd.output().await.map_err(|e| GenericError(format!("failed to run git branch: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to create branch: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn git_merge_branch(dir: &Path, name: &str, _force: bool) -> Result<()> {
|
||||
let repo = open_repo(dir)?;
|
||||
let local_branch = get_current_branch(&repo)?.unwrap();
|
||||
pub async fn git_delete_branch(dir: &Path, name: &str, force: bool) -> Result<BranchDeleteResult> {
|
||||
let mut cmd = new_binary_command(dir).await?;
|
||||
|
||||
let commit_to_merge = get_branch_by_name(&repo, name)?.into_reference();
|
||||
let commit_to_merge = repo.reference_to_annotated_commit(&commit_to_merge)?;
|
||||
let out =
|
||||
if force { cmd.args(["branch", "-D", name]) } else { cmd.args(["branch", "-d", name]) }
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git branch -d: {e}")))?;
|
||||
|
||||
do_merge(&repo, &local_branch, &commit_to_merge)?;
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() && stderr.to_lowercase().contains("not fully merged") {
|
||||
return Ok(BranchDeleteResult::NotFullyMerged);
|
||||
}
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to delete branch: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(BranchDeleteResult::Success { message: combined })
|
||||
}
|
||||
|
||||
pub async fn git_merge_branch(dir: &Path, name: &str) -> Result<()> {
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["merge", name])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git merge: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
// Check for merge conflicts
|
||||
if combined.to_lowercase().contains("conflict") {
|
||||
return Err(GenericError(
|
||||
"Merge conflicts detected. Please resolve them manually.".to_string(),
|
||||
));
|
||||
}
|
||||
return Err(GenericError(format!("Failed to merge: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn git_delete_remote_branch(dir: &Path, name: &str) -> Result<()> {
|
||||
// Remote branch names come in as "origin/branch-name", extract the branch name
|
||||
let branch_name = name.trim_start_matches("origin/");
|
||||
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["push", "origin", "--delete", branch_name])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git push --delete: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to delete remote branch: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn git_rename_branch(dir: &Path, old_name: &str, new_name: &str) -> Result<()> {
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["branch", "-m", old_name, new_name])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git branch -m: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to rename branch: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
53
crates/yaak-git/src/clone.rs
Normal file
53
crates/yaak-git/src/clone.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
use crate::binary::new_binary_command;
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use log::info;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
|
||||
#[serde(rename_all = "snake_case", tag = "type")]
|
||||
#[ts(export, export_to = "gen_git.ts")]
|
||||
pub enum CloneResult {
|
||||
Success,
|
||||
Cancelled,
|
||||
NeedsCredentials { url: String, error: Option<String> },
|
||||
}
|
||||
|
||||
pub async fn git_clone(url: &str, dir: &Path) -> Result<CloneResult> {
|
||||
let parent = dir.parent().ok_or_else(|| GenericError("Invalid clone directory".to_string()))?;
|
||||
fs::create_dir_all(parent)
|
||||
.map_err(|e| GenericError(format!("Failed to create directory: {e}")))?;
|
||||
let mut cmd = new_binary_command(parent).await?;
|
||||
cmd.args(["clone", url]).arg(dir).env("GIT_TERMINAL_PROMPT", "0");
|
||||
|
||||
let out =
|
||||
cmd.output().await.map_err(|e| GenericError(format!("failed to run git clone: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
let combined_lower = combined.to_lowercase();
|
||||
|
||||
info!("Cloned status={}: {combined}", out.status);
|
||||
|
||||
if !out.status.success() {
|
||||
// Check for credentials error
|
||||
if combined_lower.contains("could not read") {
|
||||
return Ok(CloneResult::NeedsCredentials { url: url.to_string(), error: None });
|
||||
}
|
||||
if combined_lower.contains("unable to access")
|
||||
|| combined_lower.contains("authentication failed")
|
||||
{
|
||||
return Ok(CloneResult::NeedsCredentials {
|
||||
url: url.to_string(),
|
||||
error: Some(combined.to_string()),
|
||||
});
|
||||
}
|
||||
return Err(GenericError(format!("Failed to clone: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(CloneResult::Success)
|
||||
}
|
||||
@@ -1,24 +1,18 @@
|
||||
use crate::binary::new_binary_command;
|
||||
use crate::binary::new_binary_command_global;
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use std::path::Path;
|
||||
use std::process::Stdio;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use url::Url;
|
||||
|
||||
pub async fn git_add_credential(
|
||||
dir: &Path,
|
||||
remote_url: &str,
|
||||
username: &str,
|
||||
password: &str,
|
||||
) -> Result<()> {
|
||||
pub async fn git_add_credential(remote_url: &str, username: &str, password: &str) -> Result<()> {
|
||||
let url = Url::parse(remote_url)
|
||||
.map_err(|e| GenericError(format!("Failed to parse remote url {remote_url}: {e:?}")))?;
|
||||
let protocol = url.scheme();
|
||||
let host = url.host_str().unwrap();
|
||||
let path = Some(url.path());
|
||||
|
||||
let mut child = new_binary_command(dir)
|
||||
let mut child = new_binary_command_global()
|
||||
.await?
|
||||
.args(["credential", "approve"])
|
||||
.stdin(Stdio::piped())
|
||||
|
||||
@@ -1,31 +1,38 @@
|
||||
mod add;
|
||||
mod binary;
|
||||
mod branch;
|
||||
mod clone;
|
||||
mod commit;
|
||||
mod credential;
|
||||
pub mod error;
|
||||
mod fetch;
|
||||
mod init;
|
||||
mod log;
|
||||
mod merge;
|
||||
|
||||
mod pull;
|
||||
mod push;
|
||||
mod remotes;
|
||||
mod repository;
|
||||
mod reset;
|
||||
mod status;
|
||||
mod unstage;
|
||||
mod util;
|
||||
|
||||
// Re-export all git functions for external use
|
||||
pub use add::git_add;
|
||||
pub use branch::{git_checkout_branch, git_create_branch, git_delete_branch, git_merge_branch};
|
||||
pub use branch::{
|
||||
BranchDeleteResult, git_checkout_branch, git_create_branch, git_delete_branch,
|
||||
git_delete_remote_branch, git_merge_branch, git_rename_branch,
|
||||
};
|
||||
pub use clone::{CloneResult, git_clone};
|
||||
pub use commit::git_commit;
|
||||
pub use credential::git_add_credential;
|
||||
pub use fetch::git_fetch_all;
|
||||
pub use init::git_init;
|
||||
pub use log::{GitCommit, git_log};
|
||||
pub use pull::{PullResult, git_pull};
|
||||
pub use pull::{PullResult, git_pull, git_pull_force_reset, git_pull_merge};
|
||||
pub use push::{PushResult, git_push};
|
||||
pub use remotes::{GitRemote, git_add_remote, git_remotes, git_rm_remote};
|
||||
pub use reset::git_reset_changes;
|
||||
pub use status::{GitStatusSummary, git_status};
|
||||
pub use unstage::git_unstage;
|
||||
|
||||
@@ -1,135 +0,0 @@
|
||||
use crate::error::Error::MergeConflicts;
|
||||
use crate::util::bytes_to_string;
|
||||
use git2::{AnnotatedCommit, Branch, IndexEntry, Reference, Repository};
|
||||
use log::{debug, info};
|
||||
|
||||
pub(crate) fn do_merge(
|
||||
repo: &Repository,
|
||||
local_branch: &Branch,
|
||||
commit_to_merge: &AnnotatedCommit,
|
||||
) -> crate::error::Result<()> {
|
||||
debug!("Merging remote branches");
|
||||
let analysis = repo.merge_analysis(&[&commit_to_merge])?;
|
||||
|
||||
if analysis.0.is_fast_forward() {
|
||||
let refname = bytes_to_string(local_branch.get().name_bytes())?;
|
||||
match repo.find_reference(&refname) {
|
||||
Ok(mut r) => {
|
||||
merge_fast_forward(repo, &mut r, &commit_to_merge)?;
|
||||
}
|
||||
Err(_) => {
|
||||
// The branch doesn't exist, so set the reference to the commit directly. Usually
|
||||
// this is because you are pulling into an empty repository.
|
||||
repo.reference(
|
||||
&refname,
|
||||
commit_to_merge.id(),
|
||||
true,
|
||||
&format!("Setting {} to {}", refname, commit_to_merge.id()),
|
||||
)?;
|
||||
repo.set_head(&refname)?;
|
||||
repo.checkout_head(Some(
|
||||
git2::build::CheckoutBuilder::default()
|
||||
.allow_conflicts(true)
|
||||
.conflict_style_merge(true)
|
||||
.force(),
|
||||
))?;
|
||||
}
|
||||
};
|
||||
} else if analysis.0.is_normal() {
|
||||
let head_commit = repo.reference_to_annotated_commit(&repo.head()?)?;
|
||||
merge_normal(repo, &head_commit, commit_to_merge)?;
|
||||
} else {
|
||||
debug!("Skipping merge. Nothing to do")
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn merge_fast_forward(
|
||||
repo: &Repository,
|
||||
local_reference: &mut Reference,
|
||||
remote_commit: &AnnotatedCommit,
|
||||
) -> crate::error::Result<()> {
|
||||
info!("Performing fast forward");
|
||||
let name = match local_reference.name() {
|
||||
Some(s) => s.to_string(),
|
||||
None => String::from_utf8_lossy(local_reference.name_bytes()).to_string(),
|
||||
};
|
||||
let msg = format!("Fast-Forward: Setting {} to id: {}", name, remote_commit.id());
|
||||
local_reference.set_target(remote_commit.id(), &msg)?;
|
||||
repo.set_head(&name)?;
|
||||
repo.checkout_head(Some(
|
||||
git2::build::CheckoutBuilder::default()
|
||||
// For some reason, the force is required to make the working directory actually get
|
||||
// updated I suspect we should be adding some logic to handle dirty working directory
|
||||
// states, but this is just an example so maybe not.
|
||||
.force(),
|
||||
))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn merge_normal(
|
||||
repo: &Repository,
|
||||
local: &AnnotatedCommit,
|
||||
remote: &AnnotatedCommit,
|
||||
) -> crate::error::Result<()> {
|
||||
info!("Performing normal merge");
|
||||
let local_tree = repo.find_commit(local.id())?.tree()?;
|
||||
let remote_tree = repo.find_commit(remote.id())?.tree()?;
|
||||
let ancestor = repo.find_commit(repo.merge_base(local.id(), remote.id())?)?.tree()?;
|
||||
|
||||
let mut idx = repo.merge_trees(&ancestor, &local_tree, &remote_tree, None)?;
|
||||
|
||||
if idx.has_conflicts() {
|
||||
let conflicts = idx.conflicts()?;
|
||||
for conflict in conflicts {
|
||||
if let Ok(conflict) = conflict {
|
||||
print_conflict(&conflict);
|
||||
}
|
||||
}
|
||||
return Err(MergeConflicts);
|
||||
}
|
||||
|
||||
let result_tree = repo.find_tree(idx.write_tree_to(repo)?)?;
|
||||
// now create the merge commit
|
||||
let msg = format!("Merge: {} into {}", remote.id(), local.id());
|
||||
let sig = repo.signature()?;
|
||||
let local_commit = repo.find_commit(local.id())?;
|
||||
let remote_commit = repo.find_commit(remote.id())?;
|
||||
|
||||
// Do our merge commit and set current branch head to that commit.
|
||||
let _merge_commit = repo.commit(
|
||||
Some("HEAD"),
|
||||
&sig,
|
||||
&sig,
|
||||
&msg,
|
||||
&result_tree,
|
||||
&[&local_commit, &remote_commit],
|
||||
)?;
|
||||
|
||||
// Set working tree to match head.
|
||||
repo.checkout_head(None)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_conflict(conflict: &git2::IndexConflict) {
|
||||
let ancestor = conflict.ancestor.as_ref().map(path_from_index_entry);
|
||||
let ours = conflict.our.as_ref().map(path_from_index_entry);
|
||||
let theirs = conflict.their.as_ref().map(path_from_index_entry);
|
||||
|
||||
println!("Conflict detected:");
|
||||
if let Some(path) = ancestor {
|
||||
println!(" Common ancestor: {:?}", path);
|
||||
}
|
||||
if let Some(path) = ours {
|
||||
println!(" Ours: {:?}", path);
|
||||
}
|
||||
if let Some(path) = theirs {
|
||||
println!(" Theirs: {:?}", path);
|
||||
}
|
||||
}
|
||||
|
||||
fn path_from_index_entry(entry: &IndexEntry) -> String {
|
||||
String::from_utf8_lossy(entry.path.as_slice()).into_owned()
|
||||
}
|
||||
@@ -15,9 +15,23 @@ pub enum PullResult {
|
||||
Success { message: String },
|
||||
UpToDate,
|
||||
NeedsCredentials { url: String, error: Option<String> },
|
||||
Diverged { remote: String, branch: String },
|
||||
UncommittedChanges,
|
||||
}
|
||||
|
||||
fn has_uncommitted_changes(dir: &Path) -> Result<bool> {
|
||||
let repo = open_repo(dir)?;
|
||||
let mut opts = git2::StatusOptions::new();
|
||||
opts.include_ignored(false).include_untracked(false);
|
||||
let statuses = repo.statuses(Some(&mut opts))?;
|
||||
Ok(statuses.iter().any(|e| e.status() != git2::Status::CURRENT))
|
||||
}
|
||||
|
||||
pub async fn git_pull(dir: &Path) -> Result<PullResult> {
|
||||
if has_uncommitted_changes(dir)? {
|
||||
return Ok(PullResult::UncommittedChanges);
|
||||
}
|
||||
|
||||
// Extract all git2 data before any await points (git2 types are not Send)
|
||||
let (branch_name, remote_name, remote_url) = {
|
||||
let repo = open_repo(dir)?;
|
||||
@@ -30,42 +44,130 @@ pub async fn git_pull(dir: &Path) -> Result<PullResult> {
|
||||
(branch_name, remote_name, remote_url)
|
||||
};
|
||||
|
||||
let out = new_binary_command(dir)
|
||||
// Step 1: fetch the specific branch
|
||||
// NOTE: We use fetch + merge instead of `git pull` to avoid conflicts with
|
||||
// global git config (e.g. pull.ff=only) and the background fetch --all.
|
||||
let fetch_out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["pull", &remote_name, &branch_name])
|
||||
.args(["fetch", &remote_name, &branch_name])
|
||||
.env("GIT_TERMINAL_PROMPT", "0")
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git pull: {e}")))?;
|
||||
.map_err(|e| GenericError(format!("failed to run git fetch: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = stdout + stderr;
|
||||
let fetch_stdout = String::from_utf8_lossy(&fetch_out.stdout);
|
||||
let fetch_stderr = String::from_utf8_lossy(&fetch_out.stderr);
|
||||
let fetch_combined = format!("{fetch_stdout}{fetch_stderr}");
|
||||
|
||||
info!("Pulled status={} {combined}", out.status);
|
||||
info!("Fetched status={} {fetch_combined}", fetch_out.status);
|
||||
|
||||
if combined.to_lowercase().contains("could not read") {
|
||||
if fetch_combined.to_lowercase().contains("could not read") {
|
||||
return Ok(PullResult::NeedsCredentials { url: remote_url.to_string(), error: None });
|
||||
}
|
||||
|
||||
if combined.to_lowercase().contains("unable to access") {
|
||||
if fetch_combined.to_lowercase().contains("unable to access") {
|
||||
return Ok(PullResult::NeedsCredentials {
|
||||
url: remote_url.to_string(),
|
||||
error: Some(combined.to_string()),
|
||||
error: Some(fetch_combined.to_string()),
|
||||
});
|
||||
}
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to pull {combined}")));
|
||||
if !fetch_out.status.success() {
|
||||
return Err(GenericError(format!("Failed to fetch: {fetch_combined}")));
|
||||
}
|
||||
|
||||
if combined.to_lowercase().contains("up to date") {
|
||||
// Step 2: merge the fetched branch
|
||||
let ref_name = format!("{}/{}", remote_name, branch_name);
|
||||
let merge_out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["merge", "--ff-only", &ref_name])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git merge: {e}")))?;
|
||||
|
||||
let merge_stdout = String::from_utf8_lossy(&merge_out.stdout);
|
||||
let merge_stderr = String::from_utf8_lossy(&merge_out.stderr);
|
||||
let merge_combined = format!("{merge_stdout}{merge_stderr}");
|
||||
|
||||
info!("Merged status={} {merge_combined}", merge_out.status);
|
||||
|
||||
if !merge_out.status.success() {
|
||||
let merge_lower = merge_combined.to_lowercase();
|
||||
if merge_lower.contains("cannot fast-forward")
|
||||
|| merge_lower.contains("not possible to fast-forward")
|
||||
|| merge_lower.contains("diverged")
|
||||
{
|
||||
return Ok(PullResult::Diverged { remote: remote_name, branch: branch_name });
|
||||
}
|
||||
return Err(GenericError(format!("Failed to merge: {merge_combined}")));
|
||||
}
|
||||
|
||||
if merge_combined.to_lowercase().contains("up to date") {
|
||||
return Ok(PullResult::UpToDate);
|
||||
}
|
||||
|
||||
Ok(PullResult::Success { message: format!("Pulled from {}/{}", remote_name, branch_name) })
|
||||
}
|
||||
|
||||
pub async fn git_pull_force_reset(dir: &Path, remote: &str, branch: &str) -> Result<PullResult> {
|
||||
// Step 1: fetch the remote
|
||||
let fetch_out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["fetch", remote])
|
||||
.env("GIT_TERMINAL_PROMPT", "0")
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git fetch: {e}")))?;
|
||||
|
||||
if !fetch_out.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&fetch_out.stderr);
|
||||
return Err(GenericError(format!("Failed to fetch: {stderr}")));
|
||||
}
|
||||
|
||||
// Step 2: reset --hard to remote/branch
|
||||
let ref_name = format!("{}/{}", remote, branch);
|
||||
let reset_out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["reset", "--hard", &ref_name])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git reset: {e}")))?;
|
||||
|
||||
if !reset_out.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&reset_out.stderr);
|
||||
return Err(GenericError(format!("Failed to reset: {}", stderr.trim())));
|
||||
}
|
||||
|
||||
Ok(PullResult::Success { message: format!("Reset to {}/{}", remote, branch) })
|
||||
}
|
||||
|
||||
pub async fn git_pull_merge(dir: &Path, remote: &str, branch: &str) -> Result<PullResult> {
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["pull", "--no-rebase", remote, branch])
|
||||
.env("GIT_TERMINAL_PROMPT", "0")
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git pull --no-rebase: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
info!("Pull merge status={} {combined}", out.status);
|
||||
|
||||
if !out.status.success() {
|
||||
if combined.to_lowercase().contains("conflict") {
|
||||
return Err(GenericError(
|
||||
"Merge conflicts detected. Please resolve them manually.".to_string(),
|
||||
));
|
||||
}
|
||||
return Err(GenericError(format!("Failed to merge pull: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(PullResult::Success { message: format!("Merged from {}/{}", remote, branch) })
|
||||
}
|
||||
|
||||
// pub(crate) fn git_pull_old(dir: &Path) -> Result<PullResult> {
|
||||
// let repo = open_repo(dir)?;
|
||||
//
|
||||
|
||||
20
crates/yaak-git/src/reset.rs
Normal file
20
crates/yaak-git/src/reset.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use crate::binary::new_binary_command;
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use std::path::Path;
|
||||
|
||||
pub async fn git_reset_changes(dir: &Path) -> Result<()> {
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["reset", "--hard", "HEAD"])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git reset: {e}")))?;
|
||||
|
||||
if !out.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
return Err(GenericError(format!("Failed to reset: {}", stderr.trim())));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -18,6 +18,8 @@ pub struct GitStatusSummary {
|
||||
pub origins: Vec<String>,
|
||||
pub local_branches: Vec<String>,
|
||||
pub remote_branches: Vec<String>,
|
||||
pub ahead: u32,
|
||||
pub behind: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
|
||||
@@ -160,6 +162,18 @@ pub fn git_status(dir: &Path) -> crate::error::Result<GitStatusSummary> {
|
||||
let local_branches = local_branch_names(&repo)?;
|
||||
let remote_branches = remote_branch_names(&repo)?;
|
||||
|
||||
// Compute ahead/behind relative to remote tracking branch
|
||||
let (ahead, behind) = (|| -> Option<(usize, usize)> {
|
||||
let head = repo.head().ok()?;
|
||||
let local_oid = head.target()?;
|
||||
let branch_name = head.shorthand()?;
|
||||
let upstream_ref =
|
||||
repo.find_branch(&format!("origin/{branch_name}"), git2::BranchType::Remote).ok()?;
|
||||
let upstream_oid = upstream_ref.get().target()?;
|
||||
repo.graph_ahead_behind(local_oid, upstream_oid).ok()
|
||||
})()
|
||||
.unwrap_or((0, 0));
|
||||
|
||||
Ok(GitStatusSummary {
|
||||
entries,
|
||||
origins,
|
||||
@@ -168,5 +182,7 @@ pub fn git_status(dir: &Path) -> crate::error::Result<GitStatusSummary> {
|
||||
head_ref_shorthand,
|
||||
local_branches,
|
||||
remote_branches,
|
||||
ahead: ahead as u32,
|
||||
behind: behind as u32,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -47,10 +47,6 @@ pub(crate) fn remote_branch_names(repo: &Repository) -> Result<Vec<String>> {
|
||||
Ok(branches)
|
||||
}
|
||||
|
||||
pub(crate) fn get_branch_by_name<'s>(repo: &'s Repository, name: &str) -> Result<Branch<'s>> {
|
||||
Ok(repo.find_branch(name, BranchType::Local)?)
|
||||
}
|
||||
|
||||
pub(crate) fn bytes_to_string(bytes: &[u8]) -> Result<String> {
|
||||
Ok(String::from_utf8(bytes.to_vec())?)
|
||||
}
|
||||
|
||||
@@ -55,6 +55,7 @@ mod tests {
|
||||
|
||||
let mut out = Vec::new();
|
||||
super::collect_any_types(json, &mut out);
|
||||
out.sort();
|
||||
assert_eq!(out, vec!["foo.bar", "mount_source.MountSourceRBDVolume"]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,10 +8,11 @@ publish = false
|
||||
async-compression = { version = "0.4", features = ["tokio", "gzip", "deflate", "brotli", "zstd"] }
|
||||
async-trait = "0.1"
|
||||
brotli = "7"
|
||||
bytes = "1.5.0"
|
||||
bytes = "1.11.1"
|
||||
cookie = "0.18.1"
|
||||
flate2 = "1"
|
||||
futures-util = "0.3"
|
||||
http-body = "1"
|
||||
url = "2"
|
||||
zstd = "0.13"
|
||||
hyper-util = { version = "0.1.17", default-features = false, features = ["client-legacy"] }
|
||||
|
||||
@@ -2,7 +2,9 @@ use crate::decompress::{ContentEncoding, streaming_decoder};
|
||||
use crate::error::{Error, Result};
|
||||
use crate::types::{SendableBody, SendableHttpRequest};
|
||||
use async_trait::async_trait;
|
||||
use bytes::Bytes;
|
||||
use futures_util::StreamExt;
|
||||
use http_body::{Body as HttpBody, Frame, SizeHint};
|
||||
use reqwest::{Client, Method, Version};
|
||||
use std::fmt::Display;
|
||||
use std::pin::Pin;
|
||||
@@ -31,7 +33,14 @@ pub enum HttpResponseEvent {
|
||||
},
|
||||
SendUrl {
|
||||
method: String,
|
||||
scheme: String,
|
||||
username: String,
|
||||
password: String,
|
||||
host: String,
|
||||
port: u16,
|
||||
path: String,
|
||||
query: String,
|
||||
fragment: String,
|
||||
},
|
||||
ReceiveUrl {
|
||||
version: Version,
|
||||
@@ -65,7 +74,32 @@ impl Display for HttpResponseEvent {
|
||||
};
|
||||
write!(f, "* Redirect {} -> {} ({})", status, url, behavior_str)
|
||||
}
|
||||
HttpResponseEvent::SendUrl { method, path } => write!(f, "> {} {}", method, path),
|
||||
HttpResponseEvent::SendUrl {
|
||||
method,
|
||||
scheme,
|
||||
username,
|
||||
password,
|
||||
host,
|
||||
port,
|
||||
path,
|
||||
query,
|
||||
fragment,
|
||||
} => {
|
||||
let auth_str = if username.is_empty() && password.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("{}:{}@", username, password)
|
||||
};
|
||||
let query_str =
|
||||
if query.is_empty() { String::new() } else { format!("?{}", query) };
|
||||
let fragment_str =
|
||||
if fragment.is_empty() { String::new() } else { format!("#{}", fragment) };
|
||||
write!(
|
||||
f,
|
||||
"> {} {}://{}{}:{}{}{}{}",
|
||||
method, scheme, auth_str, host, port, path, query_str, fragment_str
|
||||
)
|
||||
}
|
||||
HttpResponseEvent::ReceiveUrl { version, status } => {
|
||||
write!(f, "< {} {}", version_to_str(version), status)
|
||||
}
|
||||
@@ -104,7 +138,19 @@ impl From<HttpResponseEvent> for yaak_models::models::HttpResponseEventData {
|
||||
RedirectBehavior::DropBody => "drop_body".to_string(),
|
||||
},
|
||||
},
|
||||
HttpResponseEvent::SendUrl { method, path } => D::SendUrl { method, path },
|
||||
HttpResponseEvent::SendUrl {
|
||||
method,
|
||||
scheme,
|
||||
username,
|
||||
password,
|
||||
host,
|
||||
port,
|
||||
path,
|
||||
query,
|
||||
fragment,
|
||||
} => {
|
||||
D::SendUrl { method, scheme, username, password, host, port, path, query, fragment }
|
||||
}
|
||||
HttpResponseEvent::ReceiveUrl { version, status } => {
|
||||
D::ReceiveUrl { version: format!("{:?}", version), status }
|
||||
}
|
||||
@@ -395,10 +441,16 @@ impl HttpSender for ReqwestSender {
|
||||
Some(SendableBody::Bytes(bytes)) => {
|
||||
req_builder = req_builder.body(bytes);
|
||||
}
|
||||
Some(SendableBody::Stream(stream)) => {
|
||||
// Convert AsyncRead stream to reqwest Body
|
||||
let stream = tokio_util::io::ReaderStream::new(stream);
|
||||
let body = reqwest::Body::wrap_stream(stream);
|
||||
Some(SendableBody::Stream { data, content_length }) => {
|
||||
// Convert AsyncRead stream to reqwest Body. If content length is
|
||||
// known, wrap with a SizedBody so hyper can set Content-Length
|
||||
// automatically (for both HTTP/1.1 and HTTP/2).
|
||||
let stream = tokio_util::io::ReaderStream::new(data);
|
||||
let body = if let Some(len) = content_length {
|
||||
reqwest::Body::wrap(SizedBody::new(stream, len))
|
||||
} else {
|
||||
reqwest::Body::wrap_stream(stream)
|
||||
};
|
||||
req_builder = req_builder.body(body);
|
||||
}
|
||||
}
|
||||
@@ -415,8 +467,15 @@ impl HttpSender for ReqwestSender {
|
||||
));
|
||||
|
||||
send_event(HttpResponseEvent::SendUrl {
|
||||
path: sendable_req.url().path().to_string(),
|
||||
method: sendable_req.method().to_string(),
|
||||
scheme: sendable_req.url().scheme().to_string(),
|
||||
username: sendable_req.url().username().to_string(),
|
||||
password: sendable_req.url().password().unwrap_or_default().to_string(),
|
||||
host: sendable_req.url().host_str().unwrap_or_default().to_string(),
|
||||
port: sendable_req.url().port_or_known_default().unwrap_or(0),
|
||||
path: sendable_req.url().path().to_string(),
|
||||
query: sendable_req.url().query().unwrap_or_default().to_string(),
|
||||
fragment: sendable_req.url().fragment().unwrap_or_default().to_string(),
|
||||
});
|
||||
|
||||
let mut request_headers = Vec::new();
|
||||
@@ -495,6 +554,54 @@ impl HttpSender for ReqwestSender {
|
||||
}
|
||||
}
|
||||
|
||||
/// A wrapper around a byte stream that reports a known content length via
|
||||
/// `size_hint()`. This lets hyper set the `Content-Length` header
|
||||
/// automatically based on the body size, without us having to add it as an
|
||||
/// explicit header — which can cause duplicate `Content-Length` headers and
|
||||
/// break HTTP/2.
|
||||
struct SizedBody<S> {
|
||||
stream: std::sync::Mutex<S>,
|
||||
remaining: u64,
|
||||
}
|
||||
|
||||
impl<S> SizedBody<S> {
|
||||
fn new(stream: S, content_length: u64) -> Self {
|
||||
Self { stream: std::sync::Mutex::new(stream), remaining: content_length }
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> HttpBody for SizedBody<S>
|
||||
where
|
||||
S: futures_util::Stream<Item = std::result::Result<Bytes, std::io::Error>>
|
||||
+ Send
|
||||
+ Unpin
|
||||
+ 'static,
|
||||
{
|
||||
type Data = Bytes;
|
||||
type Error = std::io::Error;
|
||||
|
||||
fn poll_frame(
|
||||
self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<Option<std::result::Result<Frame<Self::Data>, Self::Error>>> {
|
||||
let this = self.get_mut();
|
||||
let mut stream = this.stream.lock().unwrap();
|
||||
match stream.poll_next_unpin(cx) {
|
||||
Poll::Ready(Some(Ok(chunk))) => {
|
||||
this.remaining = this.remaining.saturating_sub(chunk.len() as u64);
|
||||
Poll::Ready(Some(Ok(Frame::data(chunk))))
|
||||
}
|
||||
Poll::Ready(Some(Err(e))) => Poll::Ready(Some(Err(e))),
|
||||
Poll::Ready(None) => Poll::Ready(None),
|
||||
Poll::Pending => Poll::Pending,
|
||||
}
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> SizeHint {
|
||||
SizeHint::with_exact(self.remaining)
|
||||
}
|
||||
}
|
||||
|
||||
fn version_to_str(version: &Version) -> String {
|
||||
match *version {
|
||||
Version::HTTP_09 => "HTTP/0.9".to_string(),
|
||||
|
||||
@@ -168,6 +168,7 @@ impl<S: HttpSender> HttpTransaction<S> {
|
||||
response.drain().await?;
|
||||
|
||||
// Update the request URL
|
||||
let previous_url = current_url.clone();
|
||||
current_url = if location.starts_with("http://") || location.starts_with("https://") {
|
||||
// Absolute URL
|
||||
location
|
||||
@@ -181,6 +182,8 @@ impl<S: HttpSender> HttpTransaction<S> {
|
||||
format!("{}/{}", base_path, location)
|
||||
};
|
||||
|
||||
Self::remove_sensitive_headers(&mut current_headers, &previous_url, ¤t_url);
|
||||
|
||||
// Determine redirect behavior based on status code and method
|
||||
let behavior = if status == 303 {
|
||||
// 303 See Other always changes to GET
|
||||
@@ -220,6 +223,33 @@ impl<S: HttpSender> HttpTransaction<S> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Remove sensitive headers when redirecting to a different host.
|
||||
/// This matches reqwest's `remove_sensitive_headers()` behavior and prevents
|
||||
/// credentials from being forwarded to third-party servers (e.g., an
|
||||
/// Authorization header sent from an API redirect to an S3 bucket).
|
||||
fn remove_sensitive_headers(
|
||||
headers: &mut Vec<(String, String)>,
|
||||
previous_url: &str,
|
||||
next_url: &str,
|
||||
) {
|
||||
let previous_host = Url::parse(previous_url).ok().and_then(|u| {
|
||||
u.host_str().map(|h| format!("{}:{}", h, u.port_or_known_default().unwrap_or(0)))
|
||||
});
|
||||
let next_host = Url::parse(next_url).ok().and_then(|u| {
|
||||
u.host_str().map(|h| format!("{}:{}", h, u.port_or_known_default().unwrap_or(0)))
|
||||
});
|
||||
if previous_host != next_host {
|
||||
headers.retain(|h| {
|
||||
let name_lower = h.0.to_lowercase();
|
||||
name_lower != "authorization"
|
||||
&& name_lower != "cookie"
|
||||
&& name_lower != "cookie2"
|
||||
&& name_lower != "proxy-authorization"
|
||||
&& name_lower != "www-authenticate"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a status code indicates a redirect
|
||||
fn is_redirect(status: u16) -> bool {
|
||||
matches!(status, 301 | 302 | 303 | 307 | 308)
|
||||
@@ -269,9 +299,20 @@ mod tests {
|
||||
use tokio::io::AsyncRead;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
/// Captured request metadata for test assertions
|
||||
#[derive(Debug, Clone)]
|
||||
#[allow(dead_code)]
|
||||
struct CapturedRequest {
|
||||
url: String,
|
||||
method: String,
|
||||
headers: Vec<(String, String)>,
|
||||
}
|
||||
|
||||
/// Mock sender for testing
|
||||
struct MockSender {
|
||||
responses: Arc<Mutex<Vec<MockResponse>>>,
|
||||
/// Captured requests for assertions
|
||||
captured_requests: Arc<Mutex<Vec<CapturedRequest>>>,
|
||||
}
|
||||
|
||||
struct MockResponse {
|
||||
@@ -282,7 +323,10 @@ mod tests {
|
||||
|
||||
impl MockSender {
|
||||
fn new(responses: Vec<MockResponse>) -> Self {
|
||||
Self { responses: Arc::new(Mutex::new(responses)) }
|
||||
Self {
|
||||
responses: Arc::new(Mutex::new(responses)),
|
||||
captured_requests: Arc::new(Mutex::new(Vec::new())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -290,9 +334,16 @@ mod tests {
|
||||
impl HttpSender for MockSender {
|
||||
async fn send(
|
||||
&self,
|
||||
_request: SendableHttpRequest,
|
||||
request: SendableHttpRequest,
|
||||
_event_tx: mpsc::Sender<HttpResponseEvent>,
|
||||
) -> Result<HttpResponse> {
|
||||
// Capture the request metadata for later assertions
|
||||
self.captured_requests.lock().await.push(CapturedRequest {
|
||||
url: request.url.clone(),
|
||||
method: request.method.clone(),
|
||||
headers: request.headers.clone(),
|
||||
});
|
||||
|
||||
let mut responses = self.responses.lock().await;
|
||||
if responses.is_empty() {
|
||||
Err(crate::error::Error::RequestError("No more mock responses".to_string()))
|
||||
@@ -726,4 +777,116 @@ mod tests {
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(request_count.load(Ordering::SeqCst), 2);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_cross_origin_redirect_strips_auth_headers() {
|
||||
// Redirect from api.example.com -> s3.amazonaws.com should strip Authorization
|
||||
let responses = vec![
|
||||
MockResponse {
|
||||
status: 302,
|
||||
headers: vec![(
|
||||
"Location".to_string(),
|
||||
"https://s3.amazonaws.com/bucket/file.pdf".to_string(),
|
||||
)],
|
||||
body: vec![],
|
||||
},
|
||||
MockResponse { status: 200, headers: Vec::new(), body: b"PDF content".to_vec() },
|
||||
];
|
||||
|
||||
let sender = MockSender::new(responses);
|
||||
let captured = sender.captured_requests.clone();
|
||||
let transaction = HttpTransaction::new(sender);
|
||||
|
||||
let request = SendableHttpRequest {
|
||||
url: "https://api.example.com/download".to_string(),
|
||||
method: "GET".to_string(),
|
||||
headers: vec![
|
||||
("Authorization".to_string(), "Basic dXNlcjpwYXNz".to_string()),
|
||||
("Accept".to_string(), "application/pdf".to_string()),
|
||||
],
|
||||
options: crate::types::SendableHttpRequestOptions {
|
||||
follow_redirects: true,
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let (_tx, rx) = tokio::sync::watch::channel(false);
|
||||
let (event_tx, _event_rx) = mpsc::channel(100);
|
||||
let result = transaction.execute_with_cancellation(request, rx, event_tx).await.unwrap();
|
||||
assert_eq!(result.status, 200);
|
||||
|
||||
let requests = captured.lock().await;
|
||||
assert_eq!(requests.len(), 2);
|
||||
|
||||
// First request should have the Authorization header
|
||||
assert!(
|
||||
requests[0].headers.iter().any(|(k, _)| k.eq_ignore_ascii_case("authorization")),
|
||||
"First request should have Authorization header"
|
||||
);
|
||||
|
||||
// Second request (to different host) should NOT have the Authorization header
|
||||
assert!(
|
||||
!requests[1].headers.iter().any(|(k, _)| k.eq_ignore_ascii_case("authorization")),
|
||||
"Redirected request to different host should NOT have Authorization header"
|
||||
);
|
||||
|
||||
// Non-sensitive headers should still be present
|
||||
assert!(
|
||||
requests[1].headers.iter().any(|(k, _)| k.eq_ignore_ascii_case("accept")),
|
||||
"Non-sensitive headers should be preserved across cross-origin redirects"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_same_origin_redirect_preserves_auth_headers() {
|
||||
// Redirect within the same host should keep Authorization
|
||||
let responses = vec![
|
||||
MockResponse {
|
||||
status: 302,
|
||||
headers: vec![(
|
||||
"Location".to_string(),
|
||||
"https://api.example.com/v2/download".to_string(),
|
||||
)],
|
||||
body: vec![],
|
||||
},
|
||||
MockResponse { status: 200, headers: Vec::new(), body: b"OK".to_vec() },
|
||||
];
|
||||
|
||||
let sender = MockSender::new(responses);
|
||||
let captured = sender.captured_requests.clone();
|
||||
let transaction = HttpTransaction::new(sender);
|
||||
|
||||
let request = SendableHttpRequest {
|
||||
url: "https://api.example.com/v1/download".to_string(),
|
||||
method: "GET".to_string(),
|
||||
headers: vec![
|
||||
("Authorization".to_string(), "Bearer token123".to_string()),
|
||||
("Accept".to_string(), "application/json".to_string()),
|
||||
],
|
||||
options: crate::types::SendableHttpRequestOptions {
|
||||
follow_redirects: true,
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let (_tx, rx) = tokio::sync::watch::channel(false);
|
||||
let (event_tx, _event_rx) = mpsc::channel(100);
|
||||
let result = transaction.execute_with_cancellation(request, rx, event_tx).await.unwrap();
|
||||
assert_eq!(result.status, 200);
|
||||
|
||||
let requests = captured.lock().await;
|
||||
assert_eq!(requests.len(), 2);
|
||||
|
||||
// Both requests should have the Authorization header (same host)
|
||||
assert!(
|
||||
requests[0].headers.iter().any(|(k, _)| k.eq_ignore_ascii_case("authorization")),
|
||||
"First request should have Authorization header"
|
||||
);
|
||||
assert!(
|
||||
requests[1].headers.iter().any(|(k, _)| k.eq_ignore_ascii_case("authorization")),
|
||||
"Redirected request to same host should preserve Authorization header"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,13 @@ pub(crate) const MULTIPART_BOUNDARY: &str = "------YaakFormBoundary";
|
||||
|
||||
pub enum SendableBody {
|
||||
Bytes(Bytes),
|
||||
Stream(Pin<Box<dyn AsyncRead + Send + 'static>>),
|
||||
Stream {
|
||||
data: Pin<Box<dyn AsyncRead + Send + 'static>>,
|
||||
/// Known content length for the stream, if available. This is used by
|
||||
/// the sender to set the body size hint so that hyper can set
|
||||
/// Content-Length automatically for both HTTP/1.1 and HTTP/2.
|
||||
content_length: Option<u64>,
|
||||
},
|
||||
}
|
||||
|
||||
enum SendableBodyWithMeta {
|
||||
@@ -31,7 +37,9 @@ impl From<SendableBodyWithMeta> for SendableBody {
|
||||
fn from(value: SendableBodyWithMeta) -> Self {
|
||||
match value {
|
||||
SendableBodyWithMeta::Bytes(b) => SendableBody::Bytes(b),
|
||||
SendableBodyWithMeta::Stream { data, .. } => SendableBody::Stream(data),
|
||||
SendableBodyWithMeta::Stream { data, content_length } => {
|
||||
SendableBody::Stream { data, content_length: content_length.map(|l| l as u64) }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -186,23 +194,11 @@ async fn build_body(
|
||||
}
|
||||
}
|
||||
|
||||
// Check if Transfer-Encoding: chunked is already set
|
||||
let has_chunked_encoding = headers.iter().any(|h| {
|
||||
h.0.to_lowercase() == "transfer-encoding" && h.1.to_lowercase().contains("chunked")
|
||||
});
|
||||
|
||||
// Add a Content-Length header only if chunked encoding is not being used
|
||||
if !has_chunked_encoding {
|
||||
let content_length = match body {
|
||||
Some(SendableBodyWithMeta::Bytes(ref bytes)) => Some(bytes.len()),
|
||||
Some(SendableBodyWithMeta::Stream { content_length, .. }) => content_length,
|
||||
None => None,
|
||||
};
|
||||
|
||||
if let Some(cl) = content_length {
|
||||
headers.push(("Content-Length".to_string(), cl.to_string()));
|
||||
}
|
||||
}
|
||||
// NOTE: Content-Length is NOT set as an explicit header here. Instead, the
|
||||
// body's content length is carried via SendableBody::Stream { content_length }
|
||||
// and used by the sender to set the body size hint. This lets hyper handle
|
||||
// Content-Length automatically for both HTTP/1.1 and HTTP/2, avoiding the
|
||||
// duplicate Content-Length that breaks HTTP/2 servers.
|
||||
|
||||
Ok((body.map(|b| b.into()), headers))
|
||||
}
|
||||
@@ -928,7 +924,27 @@ mod tests {
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_no_content_length_with_chunked_encoding() -> Result<()> {
|
||||
async fn test_no_content_length_header_added_by_build_body() -> Result<()> {
|
||||
let mut body = BTreeMap::new();
|
||||
body.insert("text".to_string(), json!("Hello, World!"));
|
||||
|
||||
let headers = vec![];
|
||||
|
||||
let (_, result_headers) =
|
||||
build_body("POST", &Some("text/plain".to_string()), &body, headers).await?;
|
||||
|
||||
// Content-Length should NOT be set as an explicit header. Instead, the
|
||||
// sender uses the body's size_hint to let hyper set it automatically,
|
||||
// which works correctly for both HTTP/1.1 and HTTP/2.
|
||||
let has_content_length =
|
||||
result_headers.iter().any(|h| h.0.to_lowercase() == "content-length");
|
||||
assert!(!has_content_length, "Content-Length should not be set as an explicit header");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_chunked_encoding_header_preserved() -> Result<()> {
|
||||
let mut body = BTreeMap::new();
|
||||
body.insert("text".to_string(), json!("Hello, World!"));
|
||||
|
||||
@@ -938,11 +954,6 @@ mod tests {
|
||||
let (_, result_headers) =
|
||||
build_body("POST", &Some("text/plain".to_string()), &body, headers).await?;
|
||||
|
||||
// Verify that Content-Length is NOT present when Transfer-Encoding: chunked is set
|
||||
let has_content_length =
|
||||
result_headers.iter().any(|h| h.0.to_lowercase() == "content-length");
|
||||
assert!(!has_content_length, "Content-Length should not be present with chunked encoding");
|
||||
|
||||
// Verify that the Transfer-Encoding header is still present
|
||||
let has_chunked = result_headers.iter().any(|h| {
|
||||
h.0.to_lowercase() == "transfer-encoding" && h.1.to_lowercase().contains("chunked")
|
||||
@@ -951,31 +962,4 @@ mod tests {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_content_length_without_chunked_encoding() -> Result<()> {
|
||||
let mut body = BTreeMap::new();
|
||||
body.insert("text".to_string(), json!("Hello, World!"));
|
||||
|
||||
// Headers without Transfer-Encoding: chunked
|
||||
let headers = vec![];
|
||||
|
||||
let (_, result_headers) =
|
||||
build_body("POST", &Some("text/plain".to_string()), &body, headers).await?;
|
||||
|
||||
// Verify that Content-Length IS present when Transfer-Encoding: chunked is NOT set
|
||||
let content_length_header =
|
||||
result_headers.iter().find(|h| h.0.to_lowercase() == "content-length");
|
||||
assert!(
|
||||
content_length_header.is_some(),
|
||||
"Content-Length should be present without chunked encoding"
|
||||
);
|
||||
assert_eq!(
|
||||
content_length_header.unwrap().1,
|
||||
"13",
|
||||
"Content-Length should match the body size"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ sea-query = { version = "0.32.1", features = ["with-chrono", "attr"] }
|
||||
sea-query-rusqlite = { version = "0.7.0", features = ["with-chrono"] }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
schemars = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
ts-rs = { workspace = true, features = ["chrono-impl", "serde-json-impl"] }
|
||||
|
||||
2
crates/yaak-models/bindings/gen_models.ts
generated
2
crates/yaak-models/bindings/gen_models.ts
generated
@@ -49,7 +49,7 @@ export type HttpResponseEvent = { model: "http_response_event", id: string, crea
|
||||
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
|
||||
* The `From` impl is in yaak-http to avoid circular dependencies.
|
||||
*/
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, } | { "type": "dns_resolved", hostname: string, addresses: Array<string>, duration: bigint, overridden: boolean, };
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, scheme: string, username: string, password: string, host: string, port: number, path: string, query: string, fragment: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, } | { "type": "dns_resolved", hostname: string, addresses: Array<string>, duration: bigint, overridden: boolean, };
|
||||
|
||||
export type HttpResponseHeader = { name: string, value: string, };
|
||||
|
||||
|
||||
5
crates/yaak-models/build.rs
Normal file
5
crates/yaak-models/build.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
fn main() {
|
||||
// Migrations are embedded with include_dir!, so trigger rebuilds when SQL files change.
|
||||
println!("cargo:rerun-if-changed=migrations");
|
||||
println!("cargo:rerun-if-changed=blob_migrations");
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
CREATE TABLE model_changes
|
||||
(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
model TEXT NOT NULL,
|
||||
model_id TEXT NOT NULL,
|
||||
change TEXT NOT NULL,
|
||||
update_source TEXT NOT NULL,
|
||||
payload TEXT NOT NULL,
|
||||
created_at DATETIME DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX idx_model_changes_created_at ON model_changes (created_at);
|
||||
@@ -0,0 +1,3 @@
|
||||
-- Remove stale plugin rows left over from the brief period when faker shipped as bundled.
|
||||
DELETE FROM plugins
|
||||
WHERE directory LIKE '%template-function-faker';
|
||||
@@ -3,8 +3,7 @@ use crate::error::Error::ModelNotFound;
|
||||
use crate::error::Result;
|
||||
use crate::models::{AnyModel, UpsertModelInfo};
|
||||
use crate::util::{ModelChangeEvent, ModelPayload, UpdateSource};
|
||||
use log::error;
|
||||
use rusqlite::OptionalExtension;
|
||||
use rusqlite::{OptionalExtension, params};
|
||||
use sea_query::{
|
||||
Asterisk, Expr, Func, IntoColumnRef, IntoIden, IntoTableRef, OnConflict, Query, SimpleExpr,
|
||||
SqliteQueryBuilder,
|
||||
@@ -14,7 +13,7 @@ use std::fmt::Debug;
|
||||
use std::sync::mpsc;
|
||||
|
||||
pub struct DbContext<'a> {
|
||||
pub(crate) events_tx: mpsc::Sender<ModelPayload>,
|
||||
pub(crate) _events_tx: mpsc::Sender<ModelPayload>,
|
||||
pub(crate) conn: ConnectionOrTx<'a>,
|
||||
}
|
||||
|
||||
@@ -180,9 +179,8 @@ impl<'a> DbContext<'a> {
|
||||
change: ModelChangeEvent::Upsert { created },
|
||||
};
|
||||
|
||||
if let Err(e) = self.events_tx.send(payload.clone()) {
|
||||
error!("Failed to send model change {source:?}: {e:?}");
|
||||
}
|
||||
self.record_model_change(&payload)?;
|
||||
let _ = self._events_tx.send(payload);
|
||||
|
||||
Ok(m)
|
||||
}
|
||||
@@ -203,9 +201,31 @@ impl<'a> DbContext<'a> {
|
||||
change: ModelChangeEvent::Delete,
|
||||
};
|
||||
|
||||
if let Err(e) = self.events_tx.send(payload) {
|
||||
error!("Failed to send model change {source:?}: {e:?}");
|
||||
}
|
||||
self.record_model_change(&payload)?;
|
||||
let _ = self._events_tx.send(payload);
|
||||
|
||||
Ok(m.clone())
|
||||
}
|
||||
|
||||
fn record_model_change(&self, payload: &ModelPayload) -> Result<()> {
|
||||
let payload_json = serde_json::to_string(payload)?;
|
||||
let source_json = serde_json::to_string(&payload.update_source)?;
|
||||
let change_json = serde_json::to_string(&payload.change)?;
|
||||
|
||||
self.conn.resolve().execute(
|
||||
r#"
|
||||
INSERT INTO model_changes (model, model_id, change, update_source, payload)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5)
|
||||
"#,
|
||||
params![
|
||||
payload.model.model(),
|
||||
payload.model.id(),
|
||||
change_json,
|
||||
source_json,
|
||||
payload_json,
|
||||
],
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ use crate::models::HttpRequestIden::{
|
||||
use crate::util::{UpdateSource, generate_prefixed_id};
|
||||
use chrono::{NaiveDateTime, Utc};
|
||||
use rusqlite::Row;
|
||||
use schemars::JsonSchema;
|
||||
use sea_query::Order::Desc;
|
||||
use sea_query::{IntoColumnRef, IntoIden, IntoTableRef, Order, SimpleExpr, enum_def};
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
@@ -824,7 +825,7 @@ impl UpsertModelInfo for Folder {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS)]
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, JsonSchema, TS)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
#[ts(export, export_to = "gen_models.ts")]
|
||||
pub struct HttpRequestHeader {
|
||||
@@ -837,7 +838,7 @@ pub struct HttpRequestHeader {
|
||||
pub id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS)]
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, JsonSchema, TS)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
#[ts(export, export_to = "gen_models.ts")]
|
||||
pub struct HttpUrlParameter {
|
||||
@@ -850,7 +851,7 @@ pub struct HttpUrlParameter {
|
||||
pub id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS)]
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, JsonSchema, TS)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
#[ts(export, export_to = "gen_models.ts")]
|
||||
#[enum_def(table_name = "http_requests")]
|
||||
@@ -1095,7 +1096,7 @@ impl Default for WebsocketMessageType {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS)]
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, JsonSchema, TS)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
#[ts(export, export_to = "gen_models.ts")]
|
||||
#[enum_def(table_name = "websocket_requests")]
|
||||
@@ -1495,7 +1496,21 @@ pub enum HttpResponseEventData {
|
||||
},
|
||||
SendUrl {
|
||||
method: String,
|
||||
#[serde(default)]
|
||||
scheme: String,
|
||||
#[serde(default)]
|
||||
username: String,
|
||||
#[serde(default)]
|
||||
password: String,
|
||||
#[serde(default)]
|
||||
host: String,
|
||||
#[serde(default)]
|
||||
port: u16,
|
||||
path: String,
|
||||
#[serde(default)]
|
||||
query: String,
|
||||
#[serde(default)]
|
||||
fragment: String,
|
||||
},
|
||||
ReceiveUrl {
|
||||
version: String,
|
||||
@@ -1690,7 +1705,7 @@ impl UpsertModelInfo for GraphQlIntrospection {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS)]
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, JsonSchema, TS)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
#[ts(export, export_to = "gen_models.ts")]
|
||||
#[enum_def(table_name = "grpc_requests")]
|
||||
@@ -2333,6 +2348,15 @@ macro_rules! define_any_model {
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn model(&self) -> &str {
|
||||
match self {
|
||||
$(
|
||||
AnyModel::$type(inner) => &inner.model,
|
||||
)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$(
|
||||
@@ -2386,30 +2410,29 @@ impl<'de> Deserialize<'de> for AnyModel {
|
||||
{
|
||||
let value = Value::deserialize(deserializer)?;
|
||||
let model = value.as_object().unwrap();
|
||||
use AnyModel::*;
|
||||
use serde_json::from_value as fv;
|
||||
|
||||
let model = match model.get("model") {
|
||||
Some(m) if m == "cookie_jar" => AnyModel::CookieJar(fv(value).unwrap()),
|
||||
Some(m) if m == "environment" => AnyModel::Environment(fv(value).unwrap()),
|
||||
Some(m) if m == "folder" => AnyModel::Folder(fv(value).unwrap()),
|
||||
Some(m) if m == "graphql_introspection" => {
|
||||
AnyModel::GraphQlIntrospection(fv(value).unwrap())
|
||||
}
|
||||
Some(m) if m == "grpc_connection" => AnyModel::GrpcConnection(fv(value).unwrap()),
|
||||
Some(m) if m == "grpc_event" => AnyModel::GrpcEvent(fv(value).unwrap()),
|
||||
Some(m) if m == "grpc_request" => AnyModel::GrpcRequest(fv(value).unwrap()),
|
||||
Some(m) if m == "http_request" => AnyModel::HttpRequest(fv(value).unwrap()),
|
||||
Some(m) if m == "http_response" => AnyModel::HttpResponse(fv(value).unwrap()),
|
||||
Some(m) if m == "key_value" => AnyModel::KeyValue(fv(value).unwrap()),
|
||||
Some(m) if m == "plugin" => AnyModel::Plugin(fv(value).unwrap()),
|
||||
Some(m) if m == "settings" => AnyModel::Settings(fv(value).unwrap()),
|
||||
Some(m) if m == "websocket_connection" => {
|
||||
AnyModel::WebsocketConnection(fv(value).unwrap())
|
||||
}
|
||||
Some(m) if m == "websocket_event" => AnyModel::WebsocketEvent(fv(value).unwrap()),
|
||||
Some(m) if m == "websocket_request" => AnyModel::WebsocketRequest(fv(value).unwrap()),
|
||||
Some(m) if m == "workspace" => AnyModel::Workspace(fv(value).unwrap()),
|
||||
Some(m) if m == "workspace_meta" => AnyModel::WorkspaceMeta(fv(value).unwrap()),
|
||||
Some(m) if m == "cookie_jar" => CookieJar(fv(value).unwrap()),
|
||||
Some(m) if m == "environment" => Environment(fv(value).unwrap()),
|
||||
Some(m) if m == "folder" => Folder(fv(value).unwrap()),
|
||||
Some(m) if m == "graphql_introspection" => GraphQlIntrospection(fv(value).unwrap()),
|
||||
Some(m) if m == "grpc_connection" => GrpcConnection(fv(value).unwrap()),
|
||||
Some(m) if m == "grpc_event" => GrpcEvent(fv(value).unwrap()),
|
||||
Some(m) if m == "grpc_request" => GrpcRequest(fv(value).unwrap()),
|
||||
Some(m) if m == "http_request" => HttpRequest(fv(value).unwrap()),
|
||||
Some(m) if m == "http_response" => HttpResponse(fv(value).unwrap()),
|
||||
Some(m) if m == "http_response_event" => HttpResponseEvent(fv(value).unwrap()),
|
||||
Some(m) if m == "key_value" => KeyValue(fv(value).unwrap()),
|
||||
Some(m) if m == "plugin" => Plugin(fv(value).unwrap()),
|
||||
Some(m) if m == "settings" => Settings(fv(value).unwrap()),
|
||||
Some(m) if m == "sync_state" => SyncState(fv(value).unwrap()),
|
||||
Some(m) if m == "websocket_connection" => WebsocketConnection(fv(value).unwrap()),
|
||||
Some(m) if m == "websocket_event" => WebsocketEvent(fv(value).unwrap()),
|
||||
Some(m) if m == "websocket_request" => WebsocketRequest(fv(value).unwrap()),
|
||||
Some(m) if m == "workspace" => Workspace(fv(value).unwrap()),
|
||||
Some(m) if m == "workspace_meta" => WorkspaceMeta(fv(value).unwrap()),
|
||||
Some(m) => {
|
||||
return Err(serde::de::Error::custom(format!(
|
||||
"Failed to deserialize AnyModel {}",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use super::dedupe_headers;
|
||||
use crate::db_context::DbContext;
|
||||
use crate::error::Result;
|
||||
use crate::models::{GrpcRequest, GrpcRequestIden, HttpRequestHeader};
|
||||
use crate::models::{Folder, FolderIden, GrpcRequest, GrpcRequestIden, HttpRequestHeader};
|
||||
use crate::util::UpdateSource;
|
||||
use serde_json::Value;
|
||||
use std::collections::BTreeMap;
|
||||
@@ -15,6 +15,20 @@ impl<'a> DbContext<'a> {
|
||||
self.find_many(GrpcRequestIden::WorkspaceId, workspace_id, None)
|
||||
}
|
||||
|
||||
pub fn list_grpc_requests_for_folder_recursive(
|
||||
&self,
|
||||
folder_id: &str,
|
||||
) -> Result<Vec<GrpcRequest>> {
|
||||
let mut children = Vec::new();
|
||||
for folder in self.find_many::<Folder>(FolderIden::FolderId, folder_id, None)? {
|
||||
children.extend(self.list_grpc_requests_for_folder_recursive(&folder.id)?);
|
||||
}
|
||||
for request in self.find_many::<GrpcRequest>(GrpcRequestIden::FolderId, folder_id, None)? {
|
||||
children.push(request);
|
||||
}
|
||||
Ok(children)
|
||||
}
|
||||
|
||||
pub fn delete_grpc_request(
|
||||
&self,
|
||||
m: &GrpcRequest,
|
||||
|
||||
@@ -11,6 +11,7 @@ mod http_requests;
|
||||
mod http_response_events;
|
||||
mod http_responses;
|
||||
mod key_values;
|
||||
mod model_changes;
|
||||
mod plugin_key_values;
|
||||
mod plugins;
|
||||
mod settings;
|
||||
@@ -20,6 +21,7 @@ mod websocket_events;
|
||||
mod websocket_requests;
|
||||
mod workspace_metas;
|
||||
pub mod workspaces;
|
||||
pub use model_changes::PersistedModelChange;
|
||||
|
||||
const MAX_HISTORY_ITEMS: usize = 20;
|
||||
|
||||
|
||||
289
crates/yaak-models/src/queries/model_changes.rs
Normal file
289
crates/yaak-models/src/queries/model_changes.rs
Normal file
@@ -0,0 +1,289 @@
|
||||
use crate::db_context::DbContext;
|
||||
use crate::error::Result;
|
||||
use crate::util::ModelPayload;
|
||||
use rusqlite::params;
|
||||
use rusqlite::types::Type;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PersistedModelChange {
|
||||
pub id: i64,
|
||||
pub created_at: String,
|
||||
pub payload: ModelPayload,
|
||||
}
|
||||
|
||||
impl<'a> DbContext<'a> {
|
||||
pub fn list_model_changes_after(
|
||||
&self,
|
||||
after_id: i64,
|
||||
limit: usize,
|
||||
) -> Result<Vec<PersistedModelChange>> {
|
||||
let mut stmt = self.conn.prepare(
|
||||
r#"
|
||||
SELECT id, created_at, payload
|
||||
FROM model_changes
|
||||
WHERE id > ?1
|
||||
ORDER BY id ASC
|
||||
LIMIT ?2
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let items = stmt.query_map(params![after_id, limit as i64], |row| {
|
||||
let id: i64 = row.get(0)?;
|
||||
let created_at: String = row.get(1)?;
|
||||
let payload_raw: String = row.get(2)?;
|
||||
let payload = serde_json::from_str::<ModelPayload>(&payload_raw).map_err(|e| {
|
||||
rusqlite::Error::FromSqlConversionFailure(2, Type::Text, Box::new(e))
|
||||
})?;
|
||||
Ok(PersistedModelChange { id, created_at, payload })
|
||||
})?;
|
||||
|
||||
Ok(items.collect::<std::result::Result<Vec<_>, rusqlite::Error>>()?)
|
||||
}
|
||||
|
||||
pub fn list_model_changes_since(
|
||||
&self,
|
||||
since_created_at: &str,
|
||||
since_id: i64,
|
||||
limit: usize,
|
||||
) -> Result<Vec<PersistedModelChange>> {
|
||||
let mut stmt = self.conn.prepare(
|
||||
r#"
|
||||
SELECT id, created_at, payload
|
||||
FROM model_changes
|
||||
WHERE created_at > ?1
|
||||
OR (created_at = ?1 AND id > ?2)
|
||||
ORDER BY created_at ASC, id ASC
|
||||
LIMIT ?3
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let items = stmt.query_map(params![since_created_at, since_id, limit as i64], |row| {
|
||||
let id: i64 = row.get(0)?;
|
||||
let created_at: String = row.get(1)?;
|
||||
let payload_raw: String = row.get(2)?;
|
||||
let payload = serde_json::from_str::<ModelPayload>(&payload_raw).map_err(|e| {
|
||||
rusqlite::Error::FromSqlConversionFailure(2, Type::Text, Box::new(e))
|
||||
})?;
|
||||
Ok(PersistedModelChange { id, created_at, payload })
|
||||
})?;
|
||||
|
||||
Ok(items.collect::<std::result::Result<Vec<_>, rusqlite::Error>>()?)
|
||||
}
|
||||
|
||||
pub fn prune_model_changes_older_than_days(&self, days: i64) -> Result<usize> {
|
||||
let offset = format!("-{days} days");
|
||||
Ok(self.conn.resolve().execute(
|
||||
r#"
|
||||
DELETE FROM model_changes
|
||||
WHERE created_at < STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', ?1)
|
||||
"#,
|
||||
params![offset],
|
||||
)?)
|
||||
}
|
||||
|
||||
pub fn prune_model_changes_older_than_hours(&self, hours: i64) -> Result<usize> {
|
||||
let offset = format!("-{hours} hours");
|
||||
Ok(self.conn.resolve().execute(
|
||||
r#"
|
||||
DELETE FROM model_changes
|
||||
WHERE created_at < STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', ?1)
|
||||
"#,
|
||||
params![offset],
|
||||
)?)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::init_in_memory;
|
||||
use crate::models::Workspace;
|
||||
use crate::util::{ModelChangeEvent, UpdateSource};
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn records_model_changes_for_upsert_and_delete() {
|
||||
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
|
||||
let db = query_manager.connect();
|
||||
|
||||
let workspace = db
|
||||
.upsert_workspace(
|
||||
&Workspace {
|
||||
name: "Changes Test".to_string(),
|
||||
setting_follow_redirects: true,
|
||||
setting_validate_certificates: true,
|
||||
..Default::default()
|
||||
},
|
||||
&UpdateSource::Sync,
|
||||
)
|
||||
.expect("Failed to upsert workspace");
|
||||
|
||||
let created_changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||
assert_eq!(created_changes.len(), 1);
|
||||
assert_eq!(created_changes[0].payload.model.id(), workspace.id);
|
||||
assert_eq!(created_changes[0].payload.model.model(), "workspace");
|
||||
assert!(matches!(
|
||||
created_changes[0].payload.change,
|
||||
ModelChangeEvent::Upsert { created: true }
|
||||
));
|
||||
assert!(matches!(created_changes[0].payload.update_source, UpdateSource::Sync));
|
||||
|
||||
db.delete_workspace_by_id(&workspace.id, &UpdateSource::Sync)
|
||||
.expect("Failed to delete workspace");
|
||||
|
||||
let all_changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||
assert_eq!(all_changes.len(), 2);
|
||||
assert!(matches!(all_changes[1].payload.change, ModelChangeEvent::Delete));
|
||||
assert!(all_changes[1].id > all_changes[0].id);
|
||||
|
||||
let changes_after_first = db
|
||||
.list_model_changes_after(all_changes[0].id, 10)
|
||||
.expect("Failed to list changes after cursor");
|
||||
assert_eq!(changes_after_first.len(), 1);
|
||||
assert!(matches!(changes_after_first[0].payload.change, ModelChangeEvent::Delete));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prunes_old_model_changes() {
|
||||
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
|
||||
let db = query_manager.connect();
|
||||
|
||||
db.upsert_workspace(
|
||||
&Workspace {
|
||||
name: "Prune Test".to_string(),
|
||||
setting_follow_redirects: true,
|
||||
setting_validate_certificates: true,
|
||||
..Default::default()
|
||||
},
|
||||
&UpdateSource::Sync,
|
||||
)
|
||||
.expect("Failed to upsert workspace");
|
||||
|
||||
let changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||
assert_eq!(changes.len(), 1);
|
||||
|
||||
db.conn
|
||||
.resolve()
|
||||
.execute(
|
||||
"UPDATE model_changes SET created_at = '2000-01-01 00:00:00.000' WHERE id = ?1",
|
||||
params![changes[0].id],
|
||||
)
|
||||
.expect("Failed to age model change row");
|
||||
|
||||
let pruned =
|
||||
db.prune_model_changes_older_than_days(30).expect("Failed to prune model changes");
|
||||
assert_eq!(pruned, 1);
|
||||
assert!(db.list_model_changes_after(0, 10).expect("Failed to list changes").is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_model_changes_since_uses_timestamp_with_id_tiebreaker() {
|
||||
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
|
||||
let db = query_manager.connect();
|
||||
|
||||
let workspace = db
|
||||
.upsert_workspace(
|
||||
&Workspace {
|
||||
name: "Cursor Test".to_string(),
|
||||
setting_follow_redirects: true,
|
||||
setting_validate_certificates: true,
|
||||
..Default::default()
|
||||
},
|
||||
&UpdateSource::Sync,
|
||||
)
|
||||
.expect("Failed to upsert workspace");
|
||||
db.delete_workspace_by_id(&workspace.id, &UpdateSource::Sync)
|
||||
.expect("Failed to delete workspace");
|
||||
|
||||
let all = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||
assert_eq!(all.len(), 2);
|
||||
|
||||
let fixed_ts = "2026-02-16 00:00:00.000";
|
||||
db.conn
|
||||
.resolve()
|
||||
.execute("UPDATE model_changes SET created_at = ?1", params![fixed_ts])
|
||||
.expect("Failed to normalize timestamps");
|
||||
|
||||
let after_first =
|
||||
db.list_model_changes_since(fixed_ts, all[0].id, 10).expect("Failed to query cursor");
|
||||
assert_eq!(after_first.len(), 1);
|
||||
assert_eq!(after_first[0].id, all[1].id);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prunes_old_model_changes_by_hours() {
|
||||
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
|
||||
let db = query_manager.connect();
|
||||
|
||||
db.upsert_workspace(
|
||||
&Workspace {
|
||||
name: "Prune Hour Test".to_string(),
|
||||
setting_follow_redirects: true,
|
||||
setting_validate_certificates: true,
|
||||
..Default::default()
|
||||
},
|
||||
&UpdateSource::Sync,
|
||||
)
|
||||
.expect("Failed to upsert workspace");
|
||||
|
||||
let changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||
assert_eq!(changes.len(), 1);
|
||||
|
||||
db.conn
|
||||
.resolve()
|
||||
.execute(
|
||||
"UPDATE model_changes SET created_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW', '-2 hours') WHERE id = ?1",
|
||||
params![changes[0].id],
|
||||
)
|
||||
.expect("Failed to age model change row");
|
||||
|
||||
let pruned =
|
||||
db.prune_model_changes_older_than_hours(1).expect("Failed to prune model changes");
|
||||
assert_eq!(pruned, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn list_model_changes_deserializes_http_response_event_payload() {
|
||||
let (query_manager, _blob_manager, _rx) = init_in_memory().expect("Failed to init DB");
|
||||
let db = query_manager.connect();
|
||||
|
||||
let payload = json!({
|
||||
"model": {
|
||||
"model": "http_response_event",
|
||||
"id": "re_test",
|
||||
"createdAt": "2026-02-16T21:01:34.809162",
|
||||
"updatedAt": "2026-02-16T21:01:34.809163",
|
||||
"workspaceId": "wk_test",
|
||||
"responseId": "rs_test",
|
||||
"event": {
|
||||
"type": "info",
|
||||
"message": "hello"
|
||||
}
|
||||
},
|
||||
"updateSource": { "type": "sync" },
|
||||
"change": { "type": "upsert", "created": false }
|
||||
});
|
||||
|
||||
db.conn
|
||||
.resolve()
|
||||
.execute(
|
||||
r#"
|
||||
INSERT INTO model_changes (model, model_id, change, update_source, payload)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5)
|
||||
"#,
|
||||
params![
|
||||
"http_response_event",
|
||||
"re_test",
|
||||
r#"{"type":"upsert","created":false}"#,
|
||||
r#"{"type":"sync"}"#,
|
||||
payload.to_string(),
|
||||
],
|
||||
)
|
||||
.expect("Failed to insert model change row");
|
||||
|
||||
let changes = db.list_model_changes_after(0, 10).expect("Failed to list changes");
|
||||
assert_eq!(changes.len(), 1);
|
||||
assert_eq!(changes[0].payload.model.model(), "http_response_event");
|
||||
assert_eq!(changes[0].payload.model.id(), "re_test");
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,9 @@
|
||||
use super::dedupe_headers;
|
||||
use crate::db_context::DbContext;
|
||||
use crate::error::Result;
|
||||
use crate::models::{HttpRequestHeader, WebsocketRequest, WebsocketRequestIden};
|
||||
use crate::models::{
|
||||
Folder, FolderIden, HttpRequestHeader, WebsocketRequest, WebsocketRequestIden,
|
||||
};
|
||||
use crate::util::UpdateSource;
|
||||
use serde_json::Value;
|
||||
use std::collections::BTreeMap;
|
||||
@@ -15,6 +17,22 @@ impl<'a> DbContext<'a> {
|
||||
self.find_many(WebsocketRequestIden::WorkspaceId, workspace_id, None)
|
||||
}
|
||||
|
||||
pub fn list_websocket_requests_for_folder_recursive(
|
||||
&self,
|
||||
folder_id: &str,
|
||||
) -> Result<Vec<WebsocketRequest>> {
|
||||
let mut children = Vec::new();
|
||||
for folder in self.find_many::<Folder>(FolderIden::FolderId, folder_id, None)? {
|
||||
children.extend(self.list_websocket_requests_for_folder_recursive(&folder.id)?);
|
||||
}
|
||||
for request in
|
||||
self.find_many::<WebsocketRequest>(WebsocketRequestIden::FolderId, folder_id, None)?
|
||||
{
|
||||
children.push(request);
|
||||
}
|
||||
Ok(children)
|
||||
}
|
||||
|
||||
pub fn delete_websocket_request(
|
||||
&self,
|
||||
websocket_request: &WebsocketRequest,
|
||||
|
||||
@@ -25,7 +25,7 @@ impl QueryManager {
|
||||
.expect("Failed to gain lock on DB")
|
||||
.get()
|
||||
.expect("Failed to get a new DB connection from the pool");
|
||||
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) }
|
||||
DbContext { _events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) }
|
||||
}
|
||||
|
||||
pub fn with_conn<F, T>(&self, func: F) -> T
|
||||
@@ -39,8 +39,10 @@ impl QueryManager {
|
||||
.get()
|
||||
.expect("Failed to get new DB connection from the pool");
|
||||
|
||||
let db_context =
|
||||
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Connection(conn) };
|
||||
let db_context = DbContext {
|
||||
_events_tx: self.events_tx.clone(),
|
||||
conn: ConnectionOrTx::Connection(conn),
|
||||
};
|
||||
|
||||
func(&db_context)
|
||||
}
|
||||
@@ -62,8 +64,10 @@ impl QueryManager {
|
||||
.transaction_with_behavior(TransactionBehavior::Immediate)
|
||||
.expect("Failed to start DB transaction");
|
||||
|
||||
let db_context =
|
||||
DbContext { events_tx: self.events_tx.clone(), conn: ConnectionOrTx::Transaction(&tx) };
|
||||
let db_context = DbContext {
|
||||
_events_tx: self.events_tx.clone(),
|
||||
conn: ConnectionOrTx::Transaction(&tx),
|
||||
};
|
||||
|
||||
match func(&db_context) {
|
||||
Ok(val) => {
|
||||
|
||||
20
crates/yaak-plugins/bindings/gen_events.ts
generated
20
crates/yaak-plugins/bindings/gen_events.ts
generated
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user