mirror of
https://github.com/mountain-loop/yaak.git
synced 2026-01-31 09:42:10 -05:00
Compare commits
53 Commits
omnara/pre
...
v2026.2.0-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c2f068970b | ||
|
|
eec2d6bc38 | ||
|
|
efa22e470e | ||
|
|
c00d2e981f | ||
|
|
9c45254952 | ||
|
|
d031ff231a | ||
|
|
f056894ddb | ||
|
|
1b0315165f | ||
|
|
bd7e840a57 | ||
|
|
8969748c3c | ||
|
|
4e15ac10a6 | ||
|
|
47a3d44888 | ||
|
|
eb10910d20 | ||
|
|
6ba83d424d | ||
|
|
beb47a6b6a | ||
|
|
1893b8f8dd | ||
|
|
7a5bca7aae | ||
|
|
9a75bc2ae7 | ||
|
|
65514e3882 | ||
|
|
9ddaafb79f | ||
|
|
de47ee19ec | ||
|
|
ea730d0184 | ||
|
|
fe706998d4 | ||
|
|
99209e088f | ||
|
|
3eb29ff2fe | ||
|
|
b759003c83 | ||
|
|
6cba38ac89 | ||
|
|
ba8f85baaf | ||
|
|
9970d5fa6f | ||
|
|
d550b42ca3 | ||
|
|
2e1f0cb53f | ||
|
|
eead422ada | ||
|
|
b5753da3b7 | ||
|
|
ae2f2459e9 | ||
|
|
306e6f358a | ||
|
|
822d52a57e | ||
|
|
e665ce04df | ||
|
|
e4828e1b17 | ||
|
|
42143249a2 | ||
|
|
72a7e6963d | ||
|
|
494e9efb64 | ||
|
|
9fe077f598 | ||
|
|
a6eca1cf2e | ||
|
|
31edd1013f | ||
|
|
28e9657ea5 | ||
|
|
ff084a224a | ||
|
|
bbcae34575 | ||
|
|
2a5587c128 | ||
|
|
c41e173a63 | ||
|
|
2b43407ddf | ||
|
|
4d75b8ef06 | ||
|
|
aa79fb05f9 | ||
|
|
fe01796536 |
@@ -37,3 +37,11 @@ The skill generates markdown-formatted release notes following this structure:
|
||||
|
||||
**IMPORTANT**: Always add a blank lines around the markdown code fence and output the markdown code block last
|
||||
**IMPORTANT**: PRs by `@gschier` should not mention the @username
|
||||
|
||||
## After Generating Release Notes
|
||||
|
||||
After outputting the release notes, ask the user if they would like to create a draft GitHub release with these notes. If they confirm, create the release using:
|
||||
|
||||
```bash
|
||||
gh release create <tag> --draft --prerelease --title "<tag>" --notes '<release notes>'
|
||||
```
|
||||
|
||||
96
.github/workflows/release.yml
vendored
96
.github/workflows/release.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: Generate Artifacts
|
||||
on:
|
||||
push:
|
||||
tags: [ v* ]
|
||||
tags: [v*]
|
||||
|
||||
jobs:
|
||||
build-artifacts:
|
||||
@@ -13,37 +13,37 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: 'macos-latest' # for Arm-based Macs (M1 and above).
|
||||
args: '--target aarch64-apple-darwin'
|
||||
yaak_arch: 'arm64'
|
||||
os: 'macos'
|
||||
targets: 'aarch64-apple-darwin'
|
||||
- platform: 'macos-latest' # for Intel-based Macs.
|
||||
args: '--target x86_64-apple-darwin'
|
||||
yaak_arch: 'x64'
|
||||
os: 'macos'
|
||||
targets: 'x86_64-apple-darwin'
|
||||
- platform: 'ubuntu-22.04'
|
||||
args: ''
|
||||
yaak_arch: 'x64'
|
||||
os: 'ubuntu'
|
||||
targets: ''
|
||||
- platform: 'ubuntu-22.04-arm'
|
||||
args: ''
|
||||
yaak_arch: 'arm64'
|
||||
os: 'ubuntu'
|
||||
targets: ''
|
||||
- platform: 'windows-latest'
|
||||
args: ''
|
||||
yaak_arch: 'x64'
|
||||
os: 'windows'
|
||||
targets: ''
|
||||
- platform: "macos-latest" # for Arm-based Macs (M1 and above).
|
||||
args: "--target aarch64-apple-darwin"
|
||||
yaak_arch: "arm64"
|
||||
os: "macos"
|
||||
targets: "aarch64-apple-darwin"
|
||||
- platform: "macos-latest" # for Intel-based Macs.
|
||||
args: "--target x86_64-apple-darwin"
|
||||
yaak_arch: "x64"
|
||||
os: "macos"
|
||||
targets: "x86_64-apple-darwin"
|
||||
- platform: "ubuntu-22.04"
|
||||
args: ""
|
||||
yaak_arch: "x64"
|
||||
os: "ubuntu"
|
||||
targets: ""
|
||||
- platform: "ubuntu-22.04-arm"
|
||||
args: ""
|
||||
yaak_arch: "arm64"
|
||||
os: "ubuntu"
|
||||
targets: ""
|
||||
- platform: "windows-latest"
|
||||
args: ""
|
||||
yaak_arch: "x64"
|
||||
os: "windows"
|
||||
targets: ""
|
||||
# Windows ARM64
|
||||
- platform: 'windows-latest'
|
||||
args: '--target aarch64-pc-windows-msvc'
|
||||
yaak_arch: 'arm64'
|
||||
os: 'windows'
|
||||
targets: 'aarch64-pc-windows-msvc'
|
||||
- platform: "windows-latest"
|
||||
args: "--target aarch64-pc-windows-msvc"
|
||||
yaak_arch: "arm64"
|
||||
os: "windows"
|
||||
targets: "aarch64-pc-windows-msvc"
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 40
|
||||
steps:
|
||||
@@ -88,6 +88,9 @@ jobs:
|
||||
& $exe --version
|
||||
|
||||
- run: npm ci
|
||||
- run: npm run bootstrap
|
||||
env:
|
||||
YAAK_TARGET_ARCH: ${{ matrix.yaak_arch }}
|
||||
- run: npm run lint
|
||||
- name: Run JS Tests
|
||||
run: npm test
|
||||
@@ -99,6 +102,29 @@ jobs:
|
||||
env:
|
||||
YAAK_VERSION: ${{ github.ref_name }}
|
||||
|
||||
- name: Sign vendored binaries (macOS only)
|
||||
if: matrix.os == 'macos'
|
||||
env:
|
||||
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
|
||||
KEYCHAIN_PASSWORD: ${{ secrets.KEYCHAIN_PASSWORD }}
|
||||
run: |
|
||||
# Create keychain
|
||||
KEYCHAIN_PATH=$RUNNER_TEMP/app-signing.keychain-db
|
||||
security create-keychain -p "$KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
|
||||
security set-keychain-settings -lut 21600 $KEYCHAIN_PATH
|
||||
security unlock-keychain -p "$KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
|
||||
|
||||
# Import certificate
|
||||
echo "$APPLE_CERTIFICATE" | base64 --decode > certificate.p12
|
||||
security import certificate.p12 -P "$APPLE_CERTIFICATE_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
|
||||
security list-keychain -d user -s $KEYCHAIN_PATH
|
||||
|
||||
# Sign vendored binaries with hardened runtime and their specific entitlements
|
||||
codesign --force --options runtime --entitlements crates-tauri/yaak-app/macos/entitlements.yaakprotoc.plist --sign "$APPLE_SIGNING_IDENTITY" crates-tauri/yaak-app/vendored/protoc/yaakprotoc || true
|
||||
codesign --force --options runtime --entitlements crates-tauri/yaak-app/macos/entitlements.yaaknode.plist --sign "$APPLE_SIGNING_IDENTITY" crates-tauri/yaak-app/vendored/node/yaaknode || true
|
||||
|
||||
- uses: tauri-apps/tauri-action@v0
|
||||
env:
|
||||
YAAK_TARGET_ARCH: ${{ matrix.yaak_arch }}
|
||||
@@ -121,9 +147,9 @@ jobs:
|
||||
AZURE_CLIENT_SECRET: ${{ matrix.os == 'windows' && secrets.AZURE_CLIENT_SECRET }}
|
||||
AZURE_TENANT_ID: ${{ matrix.os == 'windows' && secrets.AZURE_TENANT_ID }}
|
||||
with:
|
||||
tagName: 'v__VERSION__'
|
||||
releaseName: 'Release __VERSION__'
|
||||
releaseBody: '[Changelog __VERSION__](https://yaak.app/blog/__VERSION__)'
|
||||
tagName: "v__VERSION__"
|
||||
releaseName: "Release __VERSION__"
|
||||
releaseBody: "[Changelog __VERSION__](https://yaak.app/blog/__VERSION__)"
|
||||
releaseDraft: true
|
||||
prerelease: true
|
||||
args: '${{ matrix.args }} --config ./crates-tauri/yaak-app/tauri.release.conf.json'
|
||||
args: "${{ matrix.args }} --config ./crates-tauri/yaak-app/tauri.release.conf.json"
|
||||
|
||||
4
Cargo.lock
generated
4
Cargo.lock
generated
@@ -8075,6 +8075,7 @@ name = "yaak-common"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"serde_json",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -8121,8 +8122,10 @@ dependencies = [
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"thiserror 2.0.17",
|
||||
"tokio",
|
||||
"ts-rs",
|
||||
"url",
|
||||
"yaak-common",
|
||||
"yaak-models",
|
||||
"yaak-sync",
|
||||
]
|
||||
@@ -8149,6 +8152,7 @@ dependencies = [
|
||||
"tonic",
|
||||
"tonic-reflection",
|
||||
"uuid",
|
||||
"yaak-common",
|
||||
"yaak-tls",
|
||||
]
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
<p align="center">
|
||||
<a href="https://github.com/JamesIves/github-sponsors-readme-action">
|
||||
<img width="200px" src="https://github.com/mountain-loop/yaak/raw/main/src-tauri/icons/icon.png">
|
||||
<img width="200px" src="https://github.com/mountain-loop/yaak/raw/main/crates-tauri/yaak-app/icons/icon.png">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
@@ -64,7 +64,7 @@ visit [`DEVELOPMENT.md`](DEVELOPMENT.md) for tips on setting up your environment
|
||||
## Useful Resources
|
||||
|
||||
- [Feedback and Bug Reports](https://feedback.yaak.app)
|
||||
- [Documentation](https://feedback.yaak.app/help)
|
||||
- [Documentation](https://yaak.app/docs)
|
||||
- [Yaak vs Postman](https://yaak.app/alternatives/postman)
|
||||
- [Yaak vs Bruno](https://yaak.app/alternatives/bruno)
|
||||
- [Yaak vs Insomnia](https://yaak.app/alternatives/insomnia)
|
||||
|
||||
@@ -15,7 +15,7 @@ use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::events::{PluginContext, RenderPurpose};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::template_callback::PluginTemplateCallback;
|
||||
use yaak_templates::{parse_and_render, render_json_value_raw, RenderOptions};
|
||||
use yaak_templates::{RenderOptions, parse_and_render, render_json_value_raw};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "yaakcli")]
|
||||
@@ -149,14 +149,7 @@ async fn render_http_request(
|
||||
// Apply path placeholders (e.g., /users/:id -> /users/123)
|
||||
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
|
||||
|
||||
Ok(HttpRequest {
|
||||
url,
|
||||
url_parameters,
|
||||
headers,
|
||||
body,
|
||||
authentication,
|
||||
..r.to_owned()
|
||||
})
|
||||
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..r.to_owned() })
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
@@ -169,16 +162,10 @@ async fn main() {
|
||||
}
|
||||
|
||||
// Use the same app_id for both data directory and keyring
|
||||
let app_id = if cfg!(debug_assertions) {
|
||||
"app.yaak.desktop.dev"
|
||||
} else {
|
||||
"app.yaak.desktop"
|
||||
};
|
||||
let app_id = if cfg!(debug_assertions) { "app.yaak.desktop.dev" } else { "app.yaak.desktop" };
|
||||
|
||||
let data_dir = cli.data_dir.unwrap_or_else(|| {
|
||||
dirs::data_dir()
|
||||
.expect("Could not determine data directory")
|
||||
.join(app_id)
|
||||
dirs::data_dir().expect("Could not determine data directory").join(app_id)
|
||||
});
|
||||
|
||||
let db_path = data_dir.join("db.sqlite");
|
||||
@@ -191,9 +178,7 @@ async fn main() {
|
||||
|
||||
// Initialize encryption manager for secure() template function
|
||||
// Use the same app_id as the Tauri app for keyring access
|
||||
let encryption_manager = Arc::new(
|
||||
EncryptionManager::new(query_manager.clone(), app_id),
|
||||
);
|
||||
let encryption_manager = Arc::new(EncryptionManager::new(query_manager.clone(), app_id));
|
||||
|
||||
// Initialize plugin manager for template functions
|
||||
let vendored_plugin_dir = data_dir.join("vendored-plugins");
|
||||
@@ -203,9 +188,8 @@ async fn main() {
|
||||
let node_bin_path = PathBuf::from("node");
|
||||
|
||||
// Find the plugin runtime - check YAAK_PLUGIN_RUNTIME env var, then fallback to development path
|
||||
let plugin_runtime_main = std::env::var("YAAK_PLUGIN_RUNTIME")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| {
|
||||
let plugin_runtime_main =
|
||||
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
|
||||
// Development fallback: look relative to crate root
|
||||
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
|
||||
@@ -226,14 +210,10 @@ async fn main() {
|
||||
// Initialize plugins from database
|
||||
let plugins = db.list_plugins().unwrap_or_default();
|
||||
if !plugins.is_empty() {
|
||||
let errors = plugin_manager
|
||||
.initialize_all_plugins(plugins, &PluginContext::new_empty())
|
||||
.await;
|
||||
let errors =
|
||||
plugin_manager.initialize_all_plugins(plugins, &PluginContext::new_empty()).await;
|
||||
for (plugin_dir, error_msg) in errors {
|
||||
eprintln!(
|
||||
"Warning: Failed to initialize plugin '{}': {}",
|
||||
plugin_dir, error_msg
|
||||
);
|
||||
eprintln!("Warning: Failed to initialize plugin '{}': {}", plugin_dir, error_msg);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -249,9 +229,7 @@ async fn main() {
|
||||
}
|
||||
}
|
||||
Commands::Requests { workspace_id } => {
|
||||
let requests = db
|
||||
.list_http_requests(&workspace_id)
|
||||
.expect("Failed to list requests");
|
||||
let requests = db.list_http_requests(&workspace_id).expect("Failed to list requests");
|
||||
if requests.is_empty() {
|
||||
println!("No requests found in workspace {}", workspace_id);
|
||||
} else {
|
||||
@@ -261,9 +239,7 @@ async fn main() {
|
||||
}
|
||||
}
|
||||
Commands::Send { request_id } => {
|
||||
let request = db
|
||||
.get_http_request(&request_id)
|
||||
.expect("Failed to get request");
|
||||
let request = db.get_http_request(&request_id).expect("Failed to get request");
|
||||
|
||||
// Resolve environment chain for variable substitution
|
||||
let environment_chain = db
|
||||
@@ -318,18 +294,13 @@ async fn main() {
|
||||
}))
|
||||
} else {
|
||||
// Drain events silently
|
||||
tokio::spawn(async move {
|
||||
while event_rx.recv().await.is_some() {}
|
||||
});
|
||||
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
|
||||
None
|
||||
};
|
||||
|
||||
// Send the request
|
||||
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
|
||||
let response = sender
|
||||
.send(sendable, event_tx)
|
||||
.await
|
||||
.expect("Failed to send request");
|
||||
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
|
||||
|
||||
// Wait for event handler to finish
|
||||
if let Some(handle) = verbose_handle {
|
||||
@@ -383,18 +354,13 @@ async fn main() {
|
||||
}
|
||||
}))
|
||||
} else {
|
||||
tokio::spawn(async move {
|
||||
while event_rx.recv().await.is_some() {}
|
||||
});
|
||||
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
|
||||
None
|
||||
};
|
||||
|
||||
// Send the request
|
||||
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
|
||||
let response = sender
|
||||
.send(sendable, event_tx)
|
||||
.await
|
||||
.expect("Failed to send request");
|
||||
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
|
||||
|
||||
if let Some(handle) = verbose_handle {
|
||||
let _ = handle.await;
|
||||
@@ -421,12 +387,7 @@ async fn main() {
|
||||
let (body, _stats) = response.text().await.expect("Failed to read response body");
|
||||
println!("{}", body);
|
||||
}
|
||||
Commands::Create {
|
||||
workspace_id,
|
||||
name,
|
||||
method,
|
||||
url,
|
||||
} => {
|
||||
Commands::Create { workspace_id, name, method, url } => {
|
||||
let request = HttpRequest {
|
||||
workspace_id,
|
||||
name,
|
||||
|
||||
@@ -2,14 +2,6 @@
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<!-- Enable for NodeJS execution -->
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
|
||||
<!-- Allow loading 1Password's dylib (signed with different Team ID) -->
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
|
||||
<!-- Re-enable for sandboxing. Currently disabled because auto-updater doesn't work with sandboxing.-->
|
||||
<!-- <key>com.apple.security.app-sandbox</key> <true/>-->
|
||||
<!-- <key>com.apple.security.files.user-selected.read-write</key> <true/>-->
|
||||
|
||||
13
crates-tauri/yaak-app/macos/entitlements.yaaknode.plist
Normal file
13
crates-tauri/yaak-app/macos/entitlements.yaaknode.plist
Normal file
@@ -0,0 +1,13 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<!-- Enable for NodeJS/V8 JIT compiler -->
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
|
||||
<!-- Allow loading plugins signed with different Team IDs (e.g., 1Password) -->
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,9 +1,11 @@
|
||||
use crate::error::Result;
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Result;
|
||||
use std::sync::Arc;
|
||||
use tauri::{AppHandle, Manager, Runtime, State, WebviewWindow, command};
|
||||
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
|
||||
use yaak_crypto::manager::EncryptionManager;
|
||||
use yaak_models::models::HttpRequestHeader;
|
||||
use yaak_models::queries::workspaces::default_headers;
|
||||
use yaak_plugins::events::GetThemesResponse;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::native_template_functions::{
|
||||
@@ -54,7 +56,12 @@ pub(crate) async fn cmd_secure_template<R: Runtime>(
|
||||
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
|
||||
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
|
||||
let plugin_context = window.plugin_context();
|
||||
Ok(encrypt_secure_template_function(plugin_manager, encryption_manager, &plugin_context, template)?)
|
||||
Ok(encrypt_secure_template_function(
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
&plugin_context,
|
||||
template,
|
||||
)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
@@ -92,3 +99,17 @@ pub(crate) async fn cmd_set_workspace_key<R: Runtime>(
|
||||
window.crypto().set_human_key(workspace_id, key)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_disable_encryption<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
workspace_id: &str,
|
||||
) -> Result<()> {
|
||||
window.crypto().disable_encryption(workspace_id)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) fn cmd_default_headers() -> Vec<HttpRequestHeader> {
|
||||
default_headers()
|
||||
}
|
||||
|
||||
@@ -6,33 +6,47 @@ use crate::error::Result;
|
||||
use std::path::{Path, PathBuf};
|
||||
use tauri::command;
|
||||
use yaak_git::{
|
||||
GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult,
|
||||
git_add, git_add_credential, git_add_remote, git_checkout_branch, git_commit,
|
||||
git_create_branch, git_delete_branch, git_fetch_all, git_init, git_log,
|
||||
git_merge_branch, git_pull, git_push, git_remotes, git_rm_remote, git_status,
|
||||
git_unstage,
|
||||
BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult,
|
||||
PushResult, git_add, git_add_credential, git_add_remote, git_checkout_branch, git_clone,
|
||||
git_commit, git_create_branch, git_delete_branch, git_delete_remote_branch, git_fetch_all,
|
||||
git_init, git_log, git_merge_branch, git_pull, git_push, git_remotes, git_rename_branch,
|
||||
git_rm_remote, git_status, git_unstage,
|
||||
};
|
||||
|
||||
// NOTE: All of these commands are async to prevent blocking work from locking up the UI
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_checkout(dir: &Path, branch: &str, force: bool) -> Result<String> {
|
||||
Ok(git_checkout_branch(dir, branch, force)?)
|
||||
Ok(git_checkout_branch(dir, branch, force).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_branch(dir: &Path, branch: &str) -> Result<()> {
|
||||
Ok(git_create_branch(dir, branch)?)
|
||||
pub async fn cmd_git_branch(dir: &Path, branch: &str, base: Option<&str>) -> Result<()> {
|
||||
Ok(git_create_branch(dir, branch, base).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_delete_branch(dir: &Path, branch: &str) -> Result<()> {
|
||||
Ok(git_delete_branch(dir, branch)?)
|
||||
pub async fn cmd_git_delete_branch(
|
||||
dir: &Path,
|
||||
branch: &str,
|
||||
force: Option<bool>,
|
||||
) -> Result<BranchDeleteResult> {
|
||||
Ok(git_delete_branch(dir, branch, force.unwrap_or(false)).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_merge_branch(dir: &Path, branch: &str, force: bool) -> Result<()> {
|
||||
Ok(git_merge_branch(dir, branch, force)?)
|
||||
pub async fn cmd_git_delete_remote_branch(dir: &Path, branch: &str) -> Result<()> {
|
||||
Ok(git_delete_remote_branch(dir, branch).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_merge_branch(dir: &Path, branch: &str) -> Result<()> {
|
||||
Ok(git_merge_branch(dir, branch).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_rename_branch(dir: &Path, old_name: &str, new_name: &str) -> Result<()> {
|
||||
Ok(git_rename_branch(dir, old_name, new_name).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
@@ -50,24 +64,29 @@ pub async fn cmd_git_initialize(dir: &Path) -> Result<()> {
|
||||
Ok(git_init(dir)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_clone(url: &str, dir: &Path) -> Result<CloneResult> {
|
||||
Ok(git_clone(url, dir).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_commit(dir: &Path, message: &str) -> Result<()> {
|
||||
Ok(git_commit(dir, message)?)
|
||||
Ok(git_commit(dir, message).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_fetch_all(dir: &Path) -> Result<()> {
|
||||
Ok(git_fetch_all(dir)?)
|
||||
Ok(git_fetch_all(dir).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_push(dir: &Path) -> Result<PushResult> {
|
||||
Ok(git_push(dir)?)
|
||||
Ok(git_push(dir).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_pull(dir: &Path) -> Result<PullResult> {
|
||||
Ok(git_pull(dir)?)
|
||||
Ok(git_pull(dir).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
@@ -88,12 +107,11 @@ pub async fn cmd_git_unstage(dir: &Path, rela_paths: Vec<PathBuf>) -> Result<()>
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_add_credential(
|
||||
dir: &Path,
|
||||
remote_url: &str,
|
||||
username: &str,
|
||||
password: &str,
|
||||
) -> Result<()> {
|
||||
Ok(git_add_credential(dir, remote_url, username, password).await?)
|
||||
Ok(git_add_credential(remote_url, username, password).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use KeyAndValueRef::{Ascii, Binary};
|
||||
use tauri::{Manager, Runtime, WebviewWindow};
|
||||
use yaak_grpc::{KeyAndValueRef, MetadataMap};
|
||||
use yaak_models::models::GrpcRequest;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_plugins::events::{CallHttpAuthenticationRequest, HttpHeader};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use chrono::{NaiveDateTime, Utc};
|
||||
use log::debug;
|
||||
use std::sync::OnceLock;
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
const NAMESPACE: &str = "analytics";
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::BlobManagerExt;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::render::render_http_request;
|
||||
use log::{debug, warn};
|
||||
use std::pin::Pin;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicI32, Ordering};
|
||||
use std::time::{Duration, Instant};
|
||||
use tauri::{AppHandle, Manager, Runtime, WebviewWindow};
|
||||
use tokio::fs::{File, create_dir_all};
|
||||
@@ -15,22 +19,19 @@ use yaak_http::client::{
|
||||
HttpConnectionOptions, HttpConnectionProxySetting, HttpConnectionProxySettingAuth,
|
||||
};
|
||||
use yaak_http::cookies::CookieStore;
|
||||
use yaak_http::manager::HttpConnectionManager;
|
||||
use yaak_http::manager::{CachedClient, HttpConnectionManager};
|
||||
use yaak_http::sender::ReqwestSender;
|
||||
use yaak_http::tee_reader::TeeReader;
|
||||
use yaak_http::transaction::HttpTransaction;
|
||||
use yaak_http::types::{
|
||||
SendableBody, SendableHttpRequest, SendableHttpRequestOptions, append_query_params,
|
||||
};
|
||||
use crate::models_ext::BlobManagerExt;
|
||||
use yaak_models::blob_manager::BodyChunk;
|
||||
use yaak_models::models::{
|
||||
CookieJar, Environment, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseHeader,
|
||||
HttpResponseState, ProxySetting, ProxySettingAuth,
|
||||
};
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use crate::PluginContextExt;
|
||||
use yaak_plugins::events::{
|
||||
CallHttpAuthenticationRequest, HttpHeader, PluginContext, RenderPurpose,
|
||||
};
|
||||
@@ -173,7 +174,12 @@ async fn send_http_request_inner<R: Runtime>(
|
||||
let environment_id = environment.map(|e| e.id);
|
||||
let workspace = window.db().get_workspace(workspace_id)?;
|
||||
let (resolved, auth_context_id) = resolve_http_request(window, unrendered_request)?;
|
||||
let cb = PluginTemplateCallback::new(plugin_manager.clone(), encryption_manager.clone(), &plugin_context, RenderPurpose::Send);
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager.clone(),
|
||||
encryption_manager.clone(),
|
||||
&plugin_context,
|
||||
RenderPurpose::Send,
|
||||
);
|
||||
let env_chain =
|
||||
window.db().resolve_environments(&workspace.id, folder_id, environment_id.as_deref())?;
|
||||
let request = render_http_request(&resolved, env_chain, &cb, &RenderOptions::throw()).await?;
|
||||
@@ -228,12 +234,13 @@ async fn send_http_request_inner<R: Runtime>(
|
||||
None => None,
|
||||
};
|
||||
|
||||
let client = connection_manager
|
||||
let cached_client = connection_manager
|
||||
.get_client(&HttpConnectionOptions {
|
||||
id: plugin_context.id.clone(),
|
||||
validate_certificates: workspace.setting_validate_certificates,
|
||||
proxy: proxy_setting,
|
||||
client_certificate,
|
||||
dns_overrides: workspace.setting_dns_overrides.clone(),
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -250,7 +257,7 @@ async fn send_http_request_inner<R: Runtime>(
|
||||
|
||||
let cookie_store = maybe_cookie_store.as_ref().map(|(cs, _)| cs.clone());
|
||||
let result = execute_transaction(
|
||||
client,
|
||||
cached_client,
|
||||
sendable_request,
|
||||
response_ctx,
|
||||
cancelled_rx.clone(),
|
||||
@@ -310,7 +317,7 @@ pub fn resolve_http_request<R: Runtime>(
|
||||
}
|
||||
|
||||
async fn execute_transaction<R: Runtime>(
|
||||
client: reqwest::Client,
|
||||
cached_client: CachedClient,
|
||||
mut sendable_request: SendableHttpRequest,
|
||||
response_ctx: &mut ResponseContext<R>,
|
||||
mut cancelled_rx: Receiver<bool>,
|
||||
@@ -321,7 +328,10 @@ async fn execute_transaction<R: Runtime>(
|
||||
let workspace_id = response_ctx.response().workspace_id.clone();
|
||||
let is_persisted = response_ctx.is_persisted();
|
||||
|
||||
let sender = ReqwestSender::with_client(client);
|
||||
// Keep a reference to the resolver for DNS timing events
|
||||
let resolver = cached_client.resolver.clone();
|
||||
|
||||
let sender = ReqwestSender::with_client(cached_client.client);
|
||||
let transaction = match cookie_store {
|
||||
Some(cs) => HttpTransaction::with_cookie_store(sender, cs),
|
||||
None => HttpTransaction::new(sender),
|
||||
@@ -346,21 +356,39 @@ async fn execute_transaction<R: Runtime>(
|
||||
let (event_tx, mut event_rx) =
|
||||
tokio::sync::mpsc::channel::<yaak_http::sender::HttpResponseEvent>(100);
|
||||
|
||||
// Set the event sender on the DNS resolver so it can emit DNS timing events
|
||||
resolver.set_event_sender(Some(event_tx.clone())).await;
|
||||
|
||||
// Shared state to capture DNS timing from the event processing task
|
||||
let dns_elapsed = Arc::new(AtomicI32::new(0));
|
||||
|
||||
// Write events to DB in a task (only for persisted responses)
|
||||
if is_persisted {
|
||||
let response_id = response_id.clone();
|
||||
let app_handle = app_handle.clone();
|
||||
let update_source = response_ctx.update_source.clone();
|
||||
let workspace_id = workspace_id.clone();
|
||||
let dns_elapsed = dns_elapsed.clone();
|
||||
tokio::spawn(async move {
|
||||
while let Some(event) = event_rx.recv().await {
|
||||
// Capture DNS timing when we see a DNS event
|
||||
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
|
||||
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
|
||||
}
|
||||
let db_event = HttpResponseEvent::new(&response_id, &workspace_id, event.into());
|
||||
let _ = app_handle.db().upsert_http_response_event(&db_event, &update_source);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// For ephemeral responses, just drain the events
|
||||
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
|
||||
// For ephemeral responses, just drain the events but still capture DNS timing
|
||||
let dns_elapsed = dns_elapsed.clone();
|
||||
tokio::spawn(async move {
|
||||
while let Some(event) = event_rx.recv().await {
|
||||
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
|
||||
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// Capture request body as it's sent (only for persisted responses)
|
||||
@@ -528,10 +556,14 @@ async fn execute_transaction<R: Runtime>(
|
||||
// Final update with closed state and accurate byte count
|
||||
response_ctx.update(|r| {
|
||||
r.elapsed = start.elapsed().as_millis() as i32;
|
||||
r.elapsed_dns = dns_elapsed.load(Ordering::SeqCst);
|
||||
r.content_length = Some(written_bytes as i32);
|
||||
r.state = HttpResponseState::Closed;
|
||||
})?;
|
||||
|
||||
// Clear the event sender from the resolver since this request is done
|
||||
resolver.set_event_sender(None).await;
|
||||
|
||||
Ok((response_ctx.response().clone(), maybe_blob_write_handle))
|
||||
}
|
||||
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::PluginContextExt;
|
||||
use log::info;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fs::read_to_string;
|
||||
use tauri::{Manager, Runtime, WebviewWindow};
|
||||
use yaak_tauri_utils::window::WorkspaceWindowTrait;
|
||||
use yaak_core::WorkspaceContext;
|
||||
use yaak_models::models::{
|
||||
Environment, Folder, GrpcRequest, HttpRequest, WebsocketRequest, Workspace,
|
||||
};
|
||||
use yaak_models::util::{BatchUpsertResult, UpdateSource, maybe_gen_id, maybe_gen_id_opt};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_tauri_utils::window::WorkspaceWindowTrait;
|
||||
|
||||
pub(crate) async fn import_data<R: Runtime>(
|
||||
window: &WebviewWindow<R>,
|
||||
|
||||
@@ -7,7 +7,7 @@ use crate::http_request::{resolve_http_request, send_http_request};
|
||||
use crate::import::import_data;
|
||||
use crate::models_ext::{BlobManagerExt, QueryManagerExt};
|
||||
use crate::notifications::YaakNotifier;
|
||||
use crate::render::{render_grpc_request, render_template};
|
||||
use crate::render::{render_grpc_request, render_json_value, render_template};
|
||||
use crate::updates::{UpdateMode, UpdateTrigger, YaakUpdater};
|
||||
use crate::uri_scheme::handle_deep_link;
|
||||
use error::Result as YaakResult;
|
||||
@@ -101,6 +101,7 @@ struct AppMetaData {
|
||||
app_data_dir: String,
|
||||
app_log_dir: String,
|
||||
vendored_plugin_dir: String,
|
||||
default_project_dir: String,
|
||||
feature_updater: bool,
|
||||
feature_license: bool,
|
||||
}
|
||||
@@ -111,6 +112,7 @@ async fn cmd_metadata(app_handle: AppHandle) -> YaakResult<AppMetaData> {
|
||||
let app_log_dir = app_handle.path().app_log_dir()?;
|
||||
let vendored_plugin_dir =
|
||||
app_handle.path().resolve("vendored/plugins", BaseDirectory::Resource)?;
|
||||
let default_project_dir = app_handle.path().home_dir()?.join("YaakProjects");
|
||||
Ok(AppMetaData {
|
||||
is_dev: is_dev(),
|
||||
version: app_handle.package_info().version.to_string(),
|
||||
@@ -118,6 +120,7 @@ async fn cmd_metadata(app_handle: AppHandle) -> YaakResult<AppMetaData> {
|
||||
app_data_dir: app_data_dir.to_string_lossy().to_string(),
|
||||
app_log_dir: app_log_dir.to_string_lossy().to_string(),
|
||||
vendored_plugin_dir: vendored_plugin_dir.to_string_lossy().to_string(),
|
||||
default_project_dir: default_project_dir.to_string_lossy().to_string(),
|
||||
feature_license: cfg!(feature = "license"),
|
||||
feature_updater: cfg!(feature = "updater"),
|
||||
})
|
||||
@@ -189,7 +192,6 @@ async fn cmd_grpc_reflect<R: Runtime>(
|
||||
request_id: &str,
|
||||
environment_id: Option<&str>,
|
||||
proto_files: Vec<String>,
|
||||
skip_cache: Option<bool>,
|
||||
window: WebviewWindow<R>,
|
||||
app_handle: AppHandle<R>,
|
||||
grpc_handle: State<'_, Mutex<GrpcHandle>>,
|
||||
@@ -224,18 +226,21 @@ async fn cmd_grpc_reflect<R: Runtime>(
|
||||
let settings = window.db().get_settings();
|
||||
let client_certificate =
|
||||
find_client_certificate(req.url.as_str(), &settings.client_certificates);
|
||||
let proto_files: Vec<PathBuf> =
|
||||
proto_files.iter().map(|p| PathBuf::from_str(p).unwrap()).collect();
|
||||
|
||||
Ok(grpc_handle
|
||||
.lock()
|
||||
.await
|
||||
// Always invalidate cached pool when this command is called, to force re-reflection
|
||||
let mut handle = grpc_handle.lock().await;
|
||||
handle.invalidate_pool(&req.id, &uri, &proto_files);
|
||||
|
||||
Ok(handle
|
||||
.services(
|
||||
&req.id,
|
||||
&uri,
|
||||
&proto_files.iter().map(|p| PathBuf::from_str(p).unwrap()).collect(),
|
||||
&proto_files,
|
||||
&metadata,
|
||||
workspace.setting_validate_certificates,
|
||||
client_certificate,
|
||||
skip_cache.unwrap_or(false),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| GenericError(e.to_string()))?)
|
||||
@@ -360,10 +365,8 @@ async fn cmd_grpc_go<R: Runtime>(
|
||||
|
||||
let cb = {
|
||||
let cancelled_rx = cancelled_rx.clone();
|
||||
let app_handle = app_handle.clone();
|
||||
let environment_chain = environment_chain.clone();
|
||||
let window = window.clone();
|
||||
let base_msg = base_msg.clone();
|
||||
let plugin_manager = plugin_manager.clone();
|
||||
let encryption_manager = encryption_manager.clone();
|
||||
|
||||
@@ -385,14 +388,12 @@ async fn cmd_grpc_go<R: Runtime>(
|
||||
match serde_json::from_str::<IncomingMsg>(ev.payload()) {
|
||||
Ok(IncomingMsg::Message(msg)) => {
|
||||
let window = window.clone();
|
||||
let app_handle = app_handle.clone();
|
||||
let base_msg = base_msg.clone();
|
||||
let environment_chain = environment_chain.clone();
|
||||
let plugin_manager = plugin_manager.clone();
|
||||
let encryption_manager = encryption_manager.clone();
|
||||
let msg = block_in_place(|| {
|
||||
tauri::async_runtime::block_on(async {
|
||||
render_template(
|
||||
let result = render_template(
|
||||
msg.as_str(),
|
||||
environment_chain,
|
||||
&PluginTemplateCallback::new(
|
||||
@@ -406,24 +407,11 @@ async fn cmd_grpc_go<R: Runtime>(
|
||||
),
|
||||
&RenderOptions { error_behavior: RenderErrorBehavior::Throw },
|
||||
)
|
||||
.await
|
||||
.expect("Failed to render template")
|
||||
.await;
|
||||
result.expect("Failed to render template")
|
||||
})
|
||||
});
|
||||
in_msg_tx.try_send(msg.clone()).unwrap();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
app_handle
|
||||
.db()
|
||||
.upsert_grpc_event(
|
||||
&GrpcEvent {
|
||||
content: msg,
|
||||
event_type: GrpcEventType::ClientMessage,
|
||||
..base_msg.clone()
|
||||
},
|
||||
&UpdateSource::from_window_label(window.label()),
|
||||
)
|
||||
.unwrap();
|
||||
});
|
||||
}
|
||||
Ok(IncomingMsg::Commit) => {
|
||||
maybe_in_msg_tx.take();
|
||||
@@ -470,12 +458,48 @@ async fn cmd_grpc_go<R: Runtime>(
|
||||
)?;
|
||||
|
||||
async move {
|
||||
// Create callback for streaming methods that handles both success and error
|
||||
let on_message = {
|
||||
let app_handle = app_handle.clone();
|
||||
let base_event = base_event.clone();
|
||||
let window_label = window.label().to_string();
|
||||
move |result: std::result::Result<String, String>| match result {
|
||||
Ok(msg) => {
|
||||
let _ = app_handle.db().upsert_grpc_event(
|
||||
&GrpcEvent {
|
||||
content: msg,
|
||||
event_type: GrpcEventType::ClientMessage,
|
||||
..base_event.clone()
|
||||
},
|
||||
&UpdateSource::from_window_label(&window_label),
|
||||
);
|
||||
}
|
||||
Err(error) => {
|
||||
let _ = app_handle.db().upsert_grpc_event(
|
||||
&GrpcEvent {
|
||||
content: format!("Failed to send message: {}", error),
|
||||
event_type: GrpcEventType::Error,
|
||||
..base_event.clone()
|
||||
},
|
||||
&UpdateSource::from_window_label(&window_label),
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let (maybe_stream, maybe_msg) =
|
||||
match (method_desc.is_client_streaming(), method_desc.is_server_streaming()) {
|
||||
(true, true) => (
|
||||
Some(
|
||||
connection
|
||||
.streaming(&service, &method, in_msg_stream, &metadata, client_cert)
|
||||
.streaming(
|
||||
&service,
|
||||
&method,
|
||||
in_msg_stream,
|
||||
&metadata,
|
||||
client_cert,
|
||||
on_message.clone(),
|
||||
)
|
||||
.await,
|
||||
),
|
||||
None,
|
||||
@@ -490,6 +514,7 @@ async fn cmd_grpc_go<R: Runtime>(
|
||||
in_msg_stream,
|
||||
&metadata,
|
||||
client_cert,
|
||||
on_message.clone(),
|
||||
)
|
||||
.await,
|
||||
),
|
||||
@@ -1035,14 +1060,54 @@ async fn cmd_get_http_authentication_summaries<R: Runtime>(
|
||||
#[tauri::command]
|
||||
async fn cmd_get_http_authentication_config<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
app_handle: AppHandle<R>,
|
||||
plugin_manager: State<'_, PluginManager>,
|
||||
encryption_manager: State<'_, EncryptionManager>,
|
||||
auth_name: &str,
|
||||
values: HashMap<String, JsonPrimitive>,
|
||||
model: AnyModel,
|
||||
_environment_id: Option<&str>,
|
||||
environment_id: Option<&str>,
|
||||
) -> YaakResult<GetHttpAuthenticationConfigResponse> {
|
||||
// Extract workspace_id and folder_id from the model to resolve the environment chain
|
||||
let (workspace_id, folder_id) = match &model {
|
||||
AnyModel::HttpRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::GrpcRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::WebsocketRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::Folder(f) => (f.workspace_id.clone(), f.folder_id.clone()),
|
||||
AnyModel::Workspace(w) => (w.id.clone(), None),
|
||||
_ => return Err(GenericError("Unsupported model type for authentication config".into())),
|
||||
};
|
||||
|
||||
// Resolve environment chain and render the values for token lookup
|
||||
let environment_chain = app_handle.db().resolve_environments(
|
||||
&workspace_id,
|
||||
folder_id.as_deref(),
|
||||
environment_id,
|
||||
)?;
|
||||
let plugin_manager_arc = Arc::new((*plugin_manager).clone());
|
||||
let encryption_manager_arc = Arc::new((*encryption_manager).clone());
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager_arc,
|
||||
encryption_manager_arc,
|
||||
&window.plugin_context(),
|
||||
RenderPurpose::Preview,
|
||||
);
|
||||
|
||||
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
|
||||
let values_json: serde_json::Value = serde_json::to_value(&values)?;
|
||||
let rendered_json =
|
||||
render_json_value(values_json, environment_chain, &cb, &RenderOptions::throw()).await?;
|
||||
|
||||
// Convert back to HashMap<String, JsonPrimitive>
|
||||
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
|
||||
|
||||
Ok(plugin_manager
|
||||
.get_http_authentication_config(&window.plugin_context(), auth_name, values, model.id())
|
||||
.get_http_authentication_config(
|
||||
&window.plugin_context(),
|
||||
auth_name,
|
||||
rendered_values,
|
||||
model.id(),
|
||||
)
|
||||
.await?)
|
||||
}
|
||||
|
||||
@@ -1089,19 +1154,54 @@ async fn cmd_call_grpc_request_action<R: Runtime>(
|
||||
#[tauri::command]
|
||||
async fn cmd_call_http_authentication_action<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
app_handle: AppHandle<R>,
|
||||
plugin_manager: State<'_, PluginManager>,
|
||||
encryption_manager: State<'_, EncryptionManager>,
|
||||
auth_name: &str,
|
||||
action_index: i32,
|
||||
values: HashMap<String, JsonPrimitive>,
|
||||
model: AnyModel,
|
||||
_environment_id: Option<&str>,
|
||||
environment_id: Option<&str>,
|
||||
) -> YaakResult<()> {
|
||||
// Extract workspace_id and folder_id from the model to resolve the environment chain
|
||||
let (workspace_id, folder_id) = match &model {
|
||||
AnyModel::HttpRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::GrpcRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::WebsocketRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::Folder(f) => (f.workspace_id.clone(), f.folder_id.clone()),
|
||||
AnyModel::Workspace(w) => (w.id.clone(), None),
|
||||
_ => return Err(GenericError("Unsupported model type for authentication action".into())),
|
||||
};
|
||||
|
||||
// Resolve environment chain and render the values
|
||||
let environment_chain = app_handle.db().resolve_environments(
|
||||
&workspace_id,
|
||||
folder_id.as_deref(),
|
||||
environment_id,
|
||||
)?;
|
||||
let plugin_manager_arc = Arc::new((*plugin_manager).clone());
|
||||
let encryption_manager_arc = Arc::new((*encryption_manager).clone());
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager_arc,
|
||||
encryption_manager_arc,
|
||||
&window.plugin_context(),
|
||||
RenderPurpose::Send,
|
||||
);
|
||||
|
||||
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
|
||||
let values_json: serde_json::Value = serde_json::to_value(&values)?;
|
||||
let rendered_json =
|
||||
render_json_value(values_json, environment_chain, &cb, &RenderOptions::throw()).await?;
|
||||
|
||||
// Convert back to HashMap<String, JsonPrimitive>
|
||||
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
|
||||
|
||||
Ok(plugin_manager
|
||||
.call_http_authentication_action(
|
||||
&window.plugin_context(),
|
||||
auth_name,
|
||||
action_index,
|
||||
values,
|
||||
rendered_values,
|
||||
&model.id(),
|
||||
)
|
||||
.await?)
|
||||
@@ -1621,6 +1721,8 @@ pub fn run() {
|
||||
//
|
||||
// Migrated commands
|
||||
crate::commands::cmd_decrypt_template,
|
||||
crate::commands::cmd_default_headers,
|
||||
crate::commands::cmd_disable_encryption,
|
||||
crate::commands::cmd_enable_encryption,
|
||||
crate::commands::cmd_get_themes,
|
||||
crate::commands::cmd_reveal_workspace_key,
|
||||
@@ -1649,10 +1751,13 @@ pub fn run() {
|
||||
git_ext::cmd_git_checkout,
|
||||
git_ext::cmd_git_branch,
|
||||
git_ext::cmd_git_delete_branch,
|
||||
git_ext::cmd_git_delete_remote_branch,
|
||||
git_ext::cmd_git_merge_branch,
|
||||
git_ext::cmd_git_rename_branch,
|
||||
git_ext::cmd_git_status,
|
||||
git_ext::cmd_git_log,
|
||||
git_ext::cmd_git_initialize,
|
||||
git_ext::cmd_git_clone,
|
||||
git_ext::cmd_git_commit,
|
||||
git_ext::cmd_git_fetch_all,
|
||||
git_ext::cmd_git_push,
|
||||
@@ -1664,6 +1769,13 @@ pub fn run() {
|
||||
git_ext::cmd_git_add_remote,
|
||||
git_ext::cmd_git_rm_remote,
|
||||
//
|
||||
// Plugin commands
|
||||
plugins_ext::cmd_plugins_search,
|
||||
plugins_ext::cmd_plugins_install,
|
||||
plugins_ext::cmd_plugins_uninstall,
|
||||
plugins_ext::cmd_plugins_updates,
|
||||
plugins_ext::cmd_plugins_update_all,
|
||||
//
|
||||
// WebSocket commands
|
||||
ws_ext::cmd_ws_upsert_request,
|
||||
ws_ext::cmd_ws_duplicate_request,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use crate::error::Result;
|
||||
use crate::history::get_or_upsert_launch_info;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::{debug, info};
|
||||
use reqwest::Method;
|
||||
@@ -8,9 +9,8 @@ use std::time::Instant;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow};
|
||||
use ts_rs::TS;
|
||||
use yaak_common::platform::get_os_str;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
|
||||
// Check for updates every hour
|
||||
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
use crate::error::Result;
|
||||
use crate::http_request::send_http_request_with_context;
|
||||
use crate::models_ext::BlobManagerExt;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::render::{render_grpc_request, render_http_request, render_json_value};
|
||||
use crate::window::{CreateWindowConfig, create_window};
|
||||
use crate::{
|
||||
@@ -14,11 +16,8 @@ use tauri::{AppHandle, Emitter, Manager, Runtime};
|
||||
use tauri_plugin_clipboard_manager::ClipboardExt;
|
||||
use tauri_plugin_opener::OpenerExt;
|
||||
use yaak_crypto::manager::EncryptionManager;
|
||||
use yaak_tauri_utils::window::WorkspaceWindowTrait;
|
||||
use crate::models_ext::BlobManagerExt;
|
||||
use yaak_models::models::{AnyModel, HttpResponse, Plugin};
|
||||
use yaak_models::queries::any_request::AnyRequest;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::error::Error::PluginErr;
|
||||
use yaak_plugins::events::{
|
||||
@@ -32,6 +31,7 @@ use yaak_plugins::events::{
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::plugin_handle::PluginHandle;
|
||||
use yaak_plugins::template_callback::PluginTemplateCallback;
|
||||
use yaak_tauri_utils::window::WorkspaceWindowTrait;
|
||||
use yaak_templates::{RenderErrorBehavior, RenderOptions};
|
||||
|
||||
pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
@@ -57,6 +57,10 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
||||
Ok(call_frontend(&window, event).await)
|
||||
}
|
||||
InternalEventPayload::PromptFormRequest(_) => {
|
||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
||||
Ok(call_frontend(&window, event).await)
|
||||
}
|
||||
InternalEventPayload::FindHttpResponsesRequest(req) => {
|
||||
let http_responses = app_handle
|
||||
.db()
|
||||
@@ -166,7 +170,12 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
)?;
|
||||
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
|
||||
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
|
||||
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
&plugin_context,
|
||||
req.purpose,
|
||||
);
|
||||
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
||||
let grpc_request =
|
||||
render_grpc_request(&req.grpc_request, environment_chain, &cb, &opt).await?;
|
||||
@@ -187,7 +196,12 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
)?;
|
||||
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
|
||||
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
|
||||
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
&plugin_context,
|
||||
req.purpose,
|
||||
);
|
||||
let opt = &RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
||||
let http_request =
|
||||
render_http_request(&req.http_request, environment_chain, &cb, &opt).await?;
|
||||
@@ -218,7 +232,12 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
)?;
|
||||
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
|
||||
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
|
||||
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
&plugin_context,
|
||||
req.purpose,
|
||||
);
|
||||
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
||||
let data = render_json_value(req.data, environment_chain, &cb, &opt).await?;
|
||||
Ok(Some(InternalEventPayload::TemplateRenderResponse(TemplateRenderResponse { data })))
|
||||
|
||||
@@ -17,7 +17,7 @@ use tauri::path::BaseDirectory;
|
||||
use tauri::plugin::{Builder, TauriPlugin};
|
||||
use tauri::{
|
||||
AppHandle, Emitter, Manager, RunEvent, Runtime, State, WebviewWindow, WindowEvent, command,
|
||||
generate_handler, is_dev,
|
||||
is_dev,
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
use ts_rs::TS;
|
||||
@@ -132,7 +132,7 @@ impl PluginUpdater {
|
||||
// ============================================================================
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_plugins_search<R: Runtime>(
|
||||
pub async fn cmd_plugins_search<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
query: &str,
|
||||
) -> Result<PluginSearchResponse> {
|
||||
@@ -141,7 +141,7 @@ pub(crate) async fn cmd_plugins_search<R: Runtime>(
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_plugins_install<R: Runtime>(
|
||||
pub async fn cmd_plugins_install<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
name: &str,
|
||||
version: Option<String>,
|
||||
@@ -163,7 +163,7 @@ pub(crate) async fn cmd_plugins_install<R: Runtime>(
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_plugins_uninstall<R: Runtime>(
|
||||
pub async fn cmd_plugins_uninstall<R: Runtime>(
|
||||
plugin_id: &str,
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<Plugin> {
|
||||
@@ -174,7 +174,7 @@ pub(crate) async fn cmd_plugins_uninstall<R: Runtime>(
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_plugins_updates<R: Runtime>(
|
||||
pub async fn cmd_plugins_updates<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
) -> Result<PluginUpdatesResponse> {
|
||||
let http_client = yaak_api_client(&app_handle)?;
|
||||
@@ -183,7 +183,7 @@ pub(crate) async fn cmd_plugins_updates<R: Runtime>(
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_plugins_update_all<R: Runtime>(
|
||||
pub async fn cmd_plugins_update_all<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<Vec<PluginNameVersion>> {
|
||||
let http_client = yaak_api_client(window.app_handle())?;
|
||||
@@ -233,13 +233,6 @@ pub(crate) async fn cmd_plugins_update_all<R: Runtime>(
|
||||
|
||||
pub fn init<R: Runtime>() -> TauriPlugin<R> {
|
||||
Builder::new("yaak-plugins")
|
||||
.invoke_handler(generate_handler![
|
||||
cmd_plugins_search,
|
||||
cmd_plugins_install,
|
||||
cmd_plugins_uninstall,
|
||||
cmd_plugins_updates,
|
||||
cmd_plugins_update_all
|
||||
])
|
||||
.setup(|app_handle, _| {
|
||||
// Resolve paths for plugin manager
|
||||
let vendored_plugin_dir = app_handle
|
||||
|
||||
@@ -3,6 +3,7 @@ use std::path::PathBuf;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use log::{debug, error, info, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::{Emitter, Listener, Manager, Runtime, WebviewWindow};
|
||||
@@ -11,7 +12,6 @@ use tauri_plugin_updater::{Update, UpdaterExt};
|
||||
use tokio::task::block_in_place;
|
||||
use tokio::time::sleep;
|
||||
use ts_rs::TS;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_models::util::generate_id;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Result;
|
||||
use crate::import::import_data;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::PluginContextExt;
|
||||
use log::{info, warn};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::sync::Arc;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, Url};
|
||||
use tauri_plugin_dialog::{DialogExt, MessageDialogButtons, MessageDialogKind};
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
use yaak_models::util::generate_id;
|
||||
use yaak_plugins::events::{Color, ShowToastRequest};
|
||||
use yaak_plugins::install::download_and_install;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
|
||||
pub(crate) async fn handle_deep_link<R: Runtime>(
|
||||
app_handle: &AppHandle<R>,
|
||||
@@ -55,7 +55,8 @@ pub(crate) async fn handle_deep_link<R: Runtime>(
|
||||
&plugin_context,
|
||||
name,
|
||||
version,
|
||||
).await?;
|
||||
)
|
||||
.await?;
|
||||
app_handle.emit(
|
||||
"show_toast",
|
||||
ShowToastRequest {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::window_menu::app_menu;
|
||||
use log::{info, warn};
|
||||
use rand::random;
|
||||
@@ -8,7 +9,6 @@ use tauri::{
|
||||
};
|
||||
use tauri_plugin_opener::OpenerExt;
|
||||
use tokio::sync::mpsc;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
|
||||
const DEFAULT_WINDOW_WIDTH: f64 = 1100.0;
|
||||
const DEFAULT_WINDOW_HEIGHT: f64 = 600.0;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
//! WebSocket Tauri command wrappers
|
||||
//! These wrap the core yaak-ws functionality for Tauri IPC.
|
||||
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::PluginContextExt;
|
||||
use http::HeaderMap;
|
||||
use log::{debug, info, warn};
|
||||
use std::str::FromStr;
|
||||
@@ -56,9 +56,10 @@ pub async fn cmd_ws_delete_request<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<WebsocketRequest> {
|
||||
Ok(app_handle
|
||||
.db()
|
||||
.delete_websocket_request_by_id(request_id, &UpdateSource::from_window_label(window.label()))?)
|
||||
Ok(app_handle.db().delete_websocket_request_by_id(
|
||||
request_id,
|
||||
&UpdateSource::from_window_label(window.label()),
|
||||
)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
@@ -67,12 +68,10 @@ pub async fn cmd_ws_delete_connection<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<WebsocketConnection> {
|
||||
Ok(app_handle
|
||||
.db()
|
||||
.delete_websocket_connection_by_id(
|
||||
connection_id,
|
||||
&UpdateSource::from_window_label(window.label()),
|
||||
)?)
|
||||
Ok(app_handle.db().delete_websocket_connection_by_id(
|
||||
connection_id,
|
||||
&UpdateSource::from_window_label(window.label()),
|
||||
)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
@@ -296,8 +295,10 @@ pub async fn cmd_ws_connect<R: Runtime>(
|
||||
)
|
||||
.await?;
|
||||
for header in plugin_result.set_headers.unwrap_or_default() {
|
||||
match (http::HeaderName::from_str(&header.name), HeaderValue::from_str(&header.value))
|
||||
{
|
||||
match (
|
||||
http::HeaderName::from_str(&header.name),
|
||||
HeaderValue::from_str(&header.value),
|
||||
) {
|
||||
(Ok(name), Ok(value)) => {
|
||||
headers.insert(name, value);
|
||||
}
|
||||
|
||||
@@ -44,8 +44,8 @@
|
||||
"vendored/protoc/include",
|
||||
"vendored/plugins",
|
||||
"vendored/plugin-runtime",
|
||||
"vendored/node/yaaknode",
|
||||
"vendored/protoc/yaakprotoc"
|
||||
"vendored/node/yaaknode*",
|
||||
"vendored/protoc/yaakprotoc*"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,10 +8,10 @@ use std::time::Duration;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow, is_dev};
|
||||
use ts_rs::TS;
|
||||
use yaak_common::platform::get_os_str;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
use yaak_models::db_context::DbContext;
|
||||
use yaak_models::query_manager::QueryManager;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
|
||||
/// Extension trait for accessing the QueryManager from Tauri Manager types.
|
||||
/// This is needed temporarily until all crates are refactored to not use Tauri.
|
||||
|
||||
@@ -6,3 +6,4 @@ publish = false
|
||||
|
||||
[dependencies]
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["process"] }
|
||||
|
||||
16
crates/yaak-common/src/command.rs
Normal file
16
crates/yaak-common/src/command.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use std::ffi::OsStr;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
const CREATE_NO_WINDOW: u32 = 0x0800_0000;
|
||||
|
||||
/// Creates a new `tokio::process::Command` that won't spawn a console window on Windows.
|
||||
pub fn new_xplatform_command<S: AsRef<OsStr>>(program: S) -> tokio::process::Command {
|
||||
#[allow(unused_mut)]
|
||||
let mut cmd = tokio::process::Command::new(program);
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use std::os::windows::process::CommandExt;
|
||||
cmd.creation_flags(CREATE_NO_WINDOW);
|
||||
}
|
||||
cmd
|
||||
}
|
||||
@@ -1,2 +1,3 @@
|
||||
pub mod command;
|
||||
pub mod platform;
|
||||
pub mod serde;
|
||||
|
||||
@@ -11,3 +11,7 @@ export function revealWorkspaceKey(workspaceId: string) {
|
||||
export function setWorkspaceKey(args: { workspaceId: string; key: string }) {
|
||||
return invoke<void>('cmd_set_workspace_key', args);
|
||||
}
|
||||
|
||||
export function disableEncryption(workspaceId: string) {
|
||||
return invoke<void>('cmd_disable_encryption', { workspaceId });
|
||||
}
|
||||
|
||||
@@ -115,6 +115,35 @@ impl EncryptionManager {
|
||||
self.set_workspace_key(workspace_id, &wkey)
|
||||
}
|
||||
|
||||
pub fn disable_encryption(&self, workspace_id: &str) -> Result<()> {
|
||||
info!("Disabling encryption for {workspace_id}");
|
||||
|
||||
self.query_manager.with_tx::<(), Error>(|tx| {
|
||||
let workspace = tx.get_workspace(workspace_id)?;
|
||||
let workspace_meta = tx.get_or_create_workspace_meta(workspace_id)?;
|
||||
|
||||
// Clear encryption challenge on workspace
|
||||
tx.upsert_workspace(
|
||||
&Workspace { encryption_key_challenge: None, ..workspace },
|
||||
&UpdateSource::Background,
|
||||
)?;
|
||||
|
||||
// Clear encryption key on workspace meta
|
||||
tx.upsert_workspace_meta(
|
||||
&WorkspaceMeta { encryption_key: None, ..workspace_meta },
|
||||
&UpdateSource::Background,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Remove from cache
|
||||
let mut cache = self.cached_workspace_keys.lock().unwrap();
|
||||
cache.remove(workspace_id);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_workspace_key(&self, workspace_id: &str) -> Result<WorkspaceKey> {
|
||||
{
|
||||
let cache = self.cached_workspace_keys.lock().unwrap();
|
||||
|
||||
@@ -12,7 +12,9 @@ serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
serde_yaml = "0.9.34"
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true, features = ["io-util"] }
|
||||
ts-rs = { workspace = true, features = ["chrono-impl", "serde-json-impl"] }
|
||||
url = "2"
|
||||
yaak-common = { workspace = true }
|
||||
yaak-models = { workspace = true }
|
||||
yaak-sync = { workspace = true }
|
||||
|
||||
4
crates/yaak-git/bindings/gen_git.ts
generated
4
crates/yaak-git/bindings/gen_git.ts
generated
@@ -1,6 +1,10 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { SyncModel } from "./gen_models";
|
||||
|
||||
export type BranchDeleteResult = { "type": "success", message: string, } | { "type": "not_fully_merged" };
|
||||
|
||||
export type CloneResult = { "type": "success" } | { "type": "cancelled" } | { "type": "needs_credentials", url: string, error: string | null, };
|
||||
|
||||
export type GitAuthor = { name: string | null, email: string | null, };
|
||||
|
||||
export type GitCommit = { author: GitAuthor, when: string, message: string | null, };
|
||||
|
||||
4
crates/yaak-git/bindings/gen_models.ts
generated
4
crates/yaak-git/bindings/gen_models.ts
generated
@@ -1,5 +1,7 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
|
||||
|
||||
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, parentModel: string, parentId: string | null, variables: Array<EnvironmentVariable>, color: string | null, sortPriority: number, };
|
||||
|
||||
export type EnvironmentVariable = { enabled?: boolean, name: string, value: string, id?: string, };
|
||||
@@ -18,4 +20,4 @@ export type SyncModel = { "type": "workspace" } & Workspace | { "type": "environ
|
||||
|
||||
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
|
||||
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
|
||||
|
||||
@@ -3,9 +3,10 @@ import { invoke } from '@tauri-apps/api/core';
|
||||
import { createFastMutation } from '@yaakapp/app/hooks/useFastMutation';
|
||||
import { queryClient } from '@yaakapp/app/lib/queryClient';
|
||||
import { useMemo } from 'react';
|
||||
import { GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult } from './bindings/gen_git';
|
||||
import { BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult } from './bindings/gen_git';
|
||||
|
||||
export * from './bindings/gen_git';
|
||||
export * from './bindings/gen_models';
|
||||
|
||||
export interface GitCredentials {
|
||||
username: string;
|
||||
@@ -59,7 +60,6 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
if (creds == null) throw new Error('Canceled');
|
||||
|
||||
await invoke('cmd_git_add_credential', {
|
||||
dir,
|
||||
remoteUrl: result.url,
|
||||
username: creds.username,
|
||||
password: creds.password,
|
||||
@@ -90,21 +90,31 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
mutationFn: (args) => invoke('cmd_git_rm_remote', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
branch: createFastMutation<void, string, { branch: string }>({
|
||||
createBranch: createFastMutation<void, string, { branch: string; base?: string }>({
|
||||
mutationKey: ['git', 'branch', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
mergeBranch: createFastMutation<void, string, { branch: string; force: boolean }>({
|
||||
mergeBranch: createFastMutation<void, string, { branch: string }>({
|
||||
mutationKey: ['git', 'merge', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_merge_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
deleteBranch: createFastMutation<void, string, { branch: string }>({
|
||||
deleteBranch: createFastMutation<BranchDeleteResult, string, { branch: string, force?: boolean }>({
|
||||
mutationKey: ['git', 'delete-branch', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_delete_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
deleteRemoteBranch: createFastMutation<void, string, { branch: string }>({
|
||||
mutationKey: ['git', 'delete-remote-branch', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_delete_remote_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
renameBranch: createFastMutation<void, string, { oldName: string, newName: string }>({
|
||||
mutationKey: ['git', 'rename-branch', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_rename_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
checkout: createFastMutation<string, string, { branch: string; force: boolean }>({
|
||||
mutationKey: ['git', 'checkout', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_checkout', { dir, ...args }),
|
||||
@@ -144,7 +154,6 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
if (creds == null) throw new Error('Canceled');
|
||||
|
||||
await invoke('cmd_git_add_credential', {
|
||||
dir,
|
||||
remoteUrl: result.url,
|
||||
username: creds.username,
|
||||
password: creds.password,
|
||||
@@ -166,3 +175,28 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
async function getRemotes(dir: string) {
|
||||
return invoke<GitRemote[]>('cmd_git_remotes', { dir });
|
||||
}
|
||||
|
||||
/**
|
||||
* Clone a git repository, prompting for credentials if needed.
|
||||
*/
|
||||
export async function gitClone(
|
||||
url: string,
|
||||
dir: string,
|
||||
promptCredentials: (args: { url: string; error: string | null }) => Promise<GitCredentials | null>,
|
||||
): Promise<CloneResult> {
|
||||
const result = await invoke<CloneResult>('cmd_git_clone', { url, dir });
|
||||
if (result.type !== 'needs_credentials') return result;
|
||||
|
||||
// Prompt for credentials
|
||||
const creds = await promptCredentials({ url: result.url, error: result.error });
|
||||
if (creds == null) return {type: 'cancelled'};
|
||||
|
||||
// Store credentials and retry
|
||||
await invoke('cmd_git_add_credential', {
|
||||
remoteUrl: result.url,
|
||||
username: creds.username,
|
||||
password: creds.password,
|
||||
});
|
||||
|
||||
return invoke<CloneResult>('cmd_git_clone', { url, dir });
|
||||
}
|
||||
|
||||
@@ -1,38 +1,30 @@
|
||||
use crate::error::Error::GitNotFound;
|
||||
use crate::error::Result;
|
||||
use std::path::Path;
|
||||
use std::process::{Command, Stdio};
|
||||
use std::process::Stdio;
|
||||
use tokio::process::Command;
|
||||
use yaak_common::command::new_xplatform_command;
|
||||
|
||||
use crate::error::Error::GitNotFound;
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::os::windows::process::CommandExt;
|
||||
/// Create a git command that runs in the specified directory
|
||||
pub(crate) async fn new_binary_command(dir: &Path) -> Result<Command> {
|
||||
let mut cmd = new_binary_command_global().await?;
|
||||
cmd.arg("-C").arg(dir);
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
const CREATE_NO_WINDOW: u32 = 0x0800_0000;
|
||||
|
||||
pub(crate) fn new_binary_command(dir: &Path) -> Result<Command> {
|
||||
/// Create a git command without a specific directory (for global operations)
|
||||
pub(crate) async fn new_binary_command_global() -> Result<Command> {
|
||||
// 1. Probe that `git` exists and is runnable
|
||||
let mut probe = Command::new("git");
|
||||
let mut probe = new_xplatform_command("git");
|
||||
probe.arg("--version").stdin(Stdio::null()).stdout(Stdio::null()).stderr(Stdio::null());
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
probe.creation_flags(CREATE_NO_WINDOW);
|
||||
}
|
||||
|
||||
let status = probe.status().map_err(|_| GitNotFound)?;
|
||||
let status = probe.status().await.map_err(|_| GitNotFound)?;
|
||||
|
||||
if !status.success() {
|
||||
return Err(GitNotFound);
|
||||
}
|
||||
|
||||
// 2. Build the reusable git command
|
||||
let mut cmd = Command::new("git");
|
||||
cmd.arg("-C").arg(dir);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
cmd.creation_flags(CREATE_NO_WINDOW);
|
||||
}
|
||||
|
||||
let cmd = new_xplatform_command("git");
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
@@ -1,99 +1,153 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::binary::new_binary_command;
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use crate::merge::do_merge;
|
||||
use crate::repository::open_repo;
|
||||
use crate::util::{bytes_to_string, get_branch_by_name, get_current_branch};
|
||||
use git2::BranchType;
|
||||
use git2::build::CheckoutBuilder;
|
||||
use log::info;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn git_checkout_branch(dir: &Path, branch_name: &str, force: bool) -> Result<String> {
|
||||
if branch_name.starts_with("origin/") {
|
||||
return git_checkout_remote_branch(dir, branch_name, force);
|
||||
}
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
|
||||
#[serde(rename_all = "snake_case", tag = "type")]
|
||||
#[ts(export, export_to = "gen_git.ts")]
|
||||
pub enum BranchDeleteResult {
|
||||
Success { message: String },
|
||||
NotFullyMerged,
|
||||
}
|
||||
|
||||
let repo = open_repo(dir)?;
|
||||
let branch = get_branch_by_name(&repo, branch_name)?;
|
||||
let branch_ref = branch.into_reference();
|
||||
let branch_tree = branch_ref.peel_to_tree()?;
|
||||
pub async fn git_checkout_branch(dir: &Path, branch_name: &str, force: bool) -> Result<String> {
|
||||
let branch_name = branch_name.trim_start_matches("origin/");
|
||||
|
||||
let mut options = CheckoutBuilder::default();
|
||||
let mut args = vec!["checkout"];
|
||||
if force {
|
||||
options.force();
|
||||
args.push("--force");
|
||||
}
|
||||
args.push(branch_name);
|
||||
|
||||
repo.checkout_tree(branch_tree.as_object(), Some(&mut options))?;
|
||||
repo.set_head(branch_ref.name().unwrap())?;
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(&args)
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git checkout: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to checkout: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(branch_name.to_string())
|
||||
}
|
||||
|
||||
pub(crate) fn git_checkout_remote_branch(
|
||||
dir: &Path,
|
||||
branch_name: &str,
|
||||
force: bool,
|
||||
) -> Result<String> {
|
||||
let branch_name = branch_name.trim_start_matches("origin/");
|
||||
let repo = open_repo(dir)?;
|
||||
|
||||
let refname = format!("refs/remotes/origin/{}", branch_name);
|
||||
let remote_ref = repo.find_reference(&refname)?;
|
||||
let commit = remote_ref.peel_to_commit()?;
|
||||
|
||||
let mut new_branch = repo.branch(branch_name, &commit, false)?;
|
||||
let upstream_name = format!("origin/{}", branch_name);
|
||||
new_branch.set_upstream(Some(&upstream_name))?;
|
||||
|
||||
git_checkout_branch(dir, branch_name, force)
|
||||
}
|
||||
|
||||
pub fn git_create_branch(dir: &Path, name: &str) -> Result<()> {
|
||||
let repo = open_repo(dir)?;
|
||||
let head = match repo.head() {
|
||||
Ok(h) => h,
|
||||
Err(e) if e.code() == git2::ErrorCode::UnbornBranch => {
|
||||
let msg = "Cannot create branch when there are no commits";
|
||||
return Err(GenericError(msg.into()));
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
let head = head.peel_to_commit()?;
|
||||
|
||||
repo.branch(name, &head, false)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn git_delete_branch(dir: &Path, name: &str) -> Result<()> {
|
||||
let repo = open_repo(dir)?;
|
||||
let mut branch = get_branch_by_name(&repo, name)?;
|
||||
|
||||
if branch.is_head() {
|
||||
info!("Deleting head branch");
|
||||
let branches = repo.branches(Some(BranchType::Local))?;
|
||||
let other_branch = branches.into_iter().filter_map(|b| b.ok()).find(|b| !b.0.is_head());
|
||||
let other_branch = match other_branch {
|
||||
None => return Err(GenericError("Cannot delete only branch".into())),
|
||||
Some(b) => bytes_to_string(b.0.name_bytes()?)?,
|
||||
};
|
||||
|
||||
git_checkout_branch(dir, &other_branch, true)?;
|
||||
pub async fn git_create_branch(dir: &Path, name: &str, base: Option<&str>) -> Result<()> {
|
||||
let mut cmd = new_binary_command(dir).await?;
|
||||
cmd.arg("branch").arg(name);
|
||||
if let Some(base_branch) = base {
|
||||
cmd.arg(base_branch);
|
||||
}
|
||||
|
||||
branch.delete()?;
|
||||
let out =
|
||||
cmd.output().await.map_err(|e| GenericError(format!("failed to run git branch: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to create branch: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn git_merge_branch(dir: &Path, name: &str, _force: bool) -> Result<()> {
|
||||
let repo = open_repo(dir)?;
|
||||
let local_branch = get_current_branch(&repo)?.unwrap();
|
||||
pub async fn git_delete_branch(dir: &Path, name: &str, force: bool) -> Result<BranchDeleteResult> {
|
||||
let mut cmd = new_binary_command(dir).await?;
|
||||
|
||||
let commit_to_merge = get_branch_by_name(&repo, name)?.into_reference();
|
||||
let commit_to_merge = repo.reference_to_annotated_commit(&commit_to_merge)?;
|
||||
let out =
|
||||
if force { cmd.args(["branch", "-D", name]) } else { cmd.args(["branch", "-d", name]) }
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git branch -d: {e}")))?;
|
||||
|
||||
do_merge(&repo, &local_branch, &commit_to_merge)?;
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() && stderr.to_lowercase().contains("not fully merged") {
|
||||
return Ok(BranchDeleteResult::NotFullyMerged);
|
||||
}
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to delete branch: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(BranchDeleteResult::Success { message: combined })
|
||||
}
|
||||
|
||||
pub async fn git_merge_branch(dir: &Path, name: &str) -> Result<()> {
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["merge", name])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git merge: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
// Check for merge conflicts
|
||||
if combined.to_lowercase().contains("conflict") {
|
||||
return Err(GenericError(
|
||||
"Merge conflicts detected. Please resolve them manually.".to_string(),
|
||||
));
|
||||
}
|
||||
return Err(GenericError(format!("Failed to merge: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn git_delete_remote_branch(dir: &Path, name: &str) -> Result<()> {
|
||||
// Remote branch names come in as "origin/branch-name", extract the branch name
|
||||
let branch_name = name.trim_start_matches("origin/");
|
||||
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["push", "origin", "--delete", branch_name])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git push --delete: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to delete remote branch: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn git_rename_branch(dir: &Path, old_name: &str, new_name: &str) -> Result<()> {
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["branch", "-m", old_name, new_name])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git branch -m: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to rename branch: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
53
crates/yaak-git/src/clone.rs
Normal file
53
crates/yaak-git/src/clone.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
use crate::binary::new_binary_command;
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use log::info;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
|
||||
#[serde(rename_all = "snake_case", tag = "type")]
|
||||
#[ts(export, export_to = "gen_git.ts")]
|
||||
pub enum CloneResult {
|
||||
Success,
|
||||
Cancelled,
|
||||
NeedsCredentials { url: String, error: Option<String> },
|
||||
}
|
||||
|
||||
pub async fn git_clone(url: &str, dir: &Path) -> Result<CloneResult> {
|
||||
let parent = dir.parent().ok_or_else(|| GenericError("Invalid clone directory".to_string()))?;
|
||||
fs::create_dir_all(parent)
|
||||
.map_err(|e| GenericError(format!("Failed to create directory: {e}")))?;
|
||||
let mut cmd = new_binary_command(parent).await?;
|
||||
cmd.args(["clone", url]).arg(dir).env("GIT_TERMINAL_PROMPT", "0");
|
||||
|
||||
let out =
|
||||
cmd.output().await.map_err(|e| GenericError(format!("failed to run git clone: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
let combined_lower = combined.to_lowercase();
|
||||
|
||||
info!("Cloned status={}: {combined}", out.status);
|
||||
|
||||
if !out.status.success() {
|
||||
// Check for credentials error
|
||||
if combined_lower.contains("could not read") {
|
||||
return Ok(CloneResult::NeedsCredentials { url: url.to_string(), error: None });
|
||||
}
|
||||
if combined_lower.contains("unable to access")
|
||||
|| combined_lower.contains("authentication failed")
|
||||
{
|
||||
return Ok(CloneResult::NeedsCredentials {
|
||||
url: url.to_string(),
|
||||
error: Some(combined.to_string()),
|
||||
});
|
||||
}
|
||||
return Err(GenericError(format!("Failed to clone: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(CloneResult::Success)
|
||||
}
|
||||
@@ -3,8 +3,9 @@ use crate::error::Error::GenericError;
|
||||
use log::info;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn git_commit(dir: &Path, message: &str) -> crate::error::Result<()> {
|
||||
let out = new_binary_command(dir)?.args(["commit", "--message", message]).output()?;
|
||||
pub async fn git_commit(dir: &Path, message: &str) -> crate::error::Result<()> {
|
||||
let out =
|
||||
new_binary_command(dir).await?.args(["commit", "--message", message]).output().await?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
|
||||
@@ -1,24 +1,19 @@
|
||||
use crate::binary::new_binary_command;
|
||||
use crate::binary::new_binary_command_global;
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::process::Stdio;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use url::Url;
|
||||
|
||||
pub async fn git_add_credential(
|
||||
dir: &Path,
|
||||
remote_url: &str,
|
||||
username: &str,
|
||||
password: &str,
|
||||
) -> Result<()> {
|
||||
pub async fn git_add_credential(remote_url: &str, username: &str, password: &str) -> Result<()> {
|
||||
let url = Url::parse(remote_url)
|
||||
.map_err(|e| GenericError(format!("Failed to parse remote url {remote_url}: {e:?}")))?;
|
||||
let protocol = url.scheme();
|
||||
let host = url.host_str().unwrap();
|
||||
let path = Some(url.path());
|
||||
|
||||
let mut child = new_binary_command(dir)?
|
||||
let mut child = new_binary_command_global()
|
||||
.await?
|
||||
.args(["credential", "approve"])
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::null())
|
||||
@@ -26,19 +21,21 @@ pub async fn git_add_credential(
|
||||
|
||||
{
|
||||
let stdin = child.stdin.as_mut().unwrap();
|
||||
writeln!(stdin, "protocol={}", protocol)?;
|
||||
writeln!(stdin, "host={}", host)?;
|
||||
stdin.write_all(format!("protocol={}\n", protocol).as_bytes()).await?;
|
||||
stdin.write_all(format!("host={}\n", host).as_bytes()).await?;
|
||||
if let Some(path) = path {
|
||||
if !path.is_empty() {
|
||||
writeln!(stdin, "path={}", path.trim_start_matches('/'))?;
|
||||
stdin
|
||||
.write_all(format!("path={}\n", path.trim_start_matches('/')).as_bytes())
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
writeln!(stdin, "username={}", username)?;
|
||||
writeln!(stdin, "password={}", password)?;
|
||||
writeln!(stdin)?; // blank line terminator
|
||||
stdin.write_all(format!("username={}\n", username).as_bytes()).await?;
|
||||
stdin.write_all(format!("password={}\n", password).as_bytes()).await?;
|
||||
stdin.write_all(b"\n").await?; // blank line terminator
|
||||
}
|
||||
|
||||
let status = child.wait()?;
|
||||
let status = child.wait().await?;
|
||||
if !status.success() {
|
||||
return Err(GenericError("Failed to approve git credential".to_string()));
|
||||
}
|
||||
|
||||
@@ -3,10 +3,12 @@ use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn git_fetch_all(dir: &Path) -> Result<()> {
|
||||
let out = new_binary_command(dir)?
|
||||
pub async fn git_fetch_all(dir: &Path) -> Result<()> {
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["fetch", "--all", "--prune", "--tags"])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git pull: {e}")))?;
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
mod add;
|
||||
mod binary;
|
||||
mod branch;
|
||||
mod clone;
|
||||
mod commit;
|
||||
mod credential;
|
||||
pub mod error;
|
||||
mod fetch;
|
||||
mod init;
|
||||
mod log;
|
||||
mod merge;
|
||||
|
||||
mod pull;
|
||||
mod push;
|
||||
mod remotes;
|
||||
@@ -18,7 +19,11 @@ mod util;
|
||||
|
||||
// Re-export all git functions for external use
|
||||
pub use add::git_add;
|
||||
pub use branch::{git_checkout_branch, git_create_branch, git_delete_branch, git_merge_branch};
|
||||
pub use branch::{
|
||||
BranchDeleteResult, git_checkout_branch, git_create_branch, git_delete_branch,
|
||||
git_delete_remote_branch, git_merge_branch, git_rename_branch,
|
||||
};
|
||||
pub use clone::{CloneResult, git_clone};
|
||||
pub use commit::git_commit;
|
||||
pub use credential::git_add_credential;
|
||||
pub use fetch::git_fetch_all;
|
||||
|
||||
@@ -1,135 +0,0 @@
|
||||
use crate::error::Error::MergeConflicts;
|
||||
use crate::util::bytes_to_string;
|
||||
use git2::{AnnotatedCommit, Branch, IndexEntry, Reference, Repository};
|
||||
use log::{debug, info};
|
||||
|
||||
pub(crate) fn do_merge(
|
||||
repo: &Repository,
|
||||
local_branch: &Branch,
|
||||
commit_to_merge: &AnnotatedCommit,
|
||||
) -> crate::error::Result<()> {
|
||||
debug!("Merging remote branches");
|
||||
let analysis = repo.merge_analysis(&[&commit_to_merge])?;
|
||||
|
||||
if analysis.0.is_fast_forward() {
|
||||
let refname = bytes_to_string(local_branch.get().name_bytes())?;
|
||||
match repo.find_reference(&refname) {
|
||||
Ok(mut r) => {
|
||||
merge_fast_forward(repo, &mut r, &commit_to_merge)?;
|
||||
}
|
||||
Err(_) => {
|
||||
// The branch doesn't exist, so set the reference to the commit directly. Usually
|
||||
// this is because you are pulling into an empty repository.
|
||||
repo.reference(
|
||||
&refname,
|
||||
commit_to_merge.id(),
|
||||
true,
|
||||
&format!("Setting {} to {}", refname, commit_to_merge.id()),
|
||||
)?;
|
||||
repo.set_head(&refname)?;
|
||||
repo.checkout_head(Some(
|
||||
git2::build::CheckoutBuilder::default()
|
||||
.allow_conflicts(true)
|
||||
.conflict_style_merge(true)
|
||||
.force(),
|
||||
))?;
|
||||
}
|
||||
};
|
||||
} else if analysis.0.is_normal() {
|
||||
let head_commit = repo.reference_to_annotated_commit(&repo.head()?)?;
|
||||
merge_normal(repo, &head_commit, commit_to_merge)?;
|
||||
} else {
|
||||
debug!("Skipping merge. Nothing to do")
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn merge_fast_forward(
|
||||
repo: &Repository,
|
||||
local_reference: &mut Reference,
|
||||
remote_commit: &AnnotatedCommit,
|
||||
) -> crate::error::Result<()> {
|
||||
info!("Performing fast forward");
|
||||
let name = match local_reference.name() {
|
||||
Some(s) => s.to_string(),
|
||||
None => String::from_utf8_lossy(local_reference.name_bytes()).to_string(),
|
||||
};
|
||||
let msg = format!("Fast-Forward: Setting {} to id: {}", name, remote_commit.id());
|
||||
local_reference.set_target(remote_commit.id(), &msg)?;
|
||||
repo.set_head(&name)?;
|
||||
repo.checkout_head(Some(
|
||||
git2::build::CheckoutBuilder::default()
|
||||
// For some reason, the force is required to make the working directory actually get
|
||||
// updated I suspect we should be adding some logic to handle dirty working directory
|
||||
// states, but this is just an example so maybe not.
|
||||
.force(),
|
||||
))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn merge_normal(
|
||||
repo: &Repository,
|
||||
local: &AnnotatedCommit,
|
||||
remote: &AnnotatedCommit,
|
||||
) -> crate::error::Result<()> {
|
||||
info!("Performing normal merge");
|
||||
let local_tree = repo.find_commit(local.id())?.tree()?;
|
||||
let remote_tree = repo.find_commit(remote.id())?.tree()?;
|
||||
let ancestor = repo.find_commit(repo.merge_base(local.id(), remote.id())?)?.tree()?;
|
||||
|
||||
let mut idx = repo.merge_trees(&ancestor, &local_tree, &remote_tree, None)?;
|
||||
|
||||
if idx.has_conflicts() {
|
||||
let conflicts = idx.conflicts()?;
|
||||
for conflict in conflicts {
|
||||
if let Ok(conflict) = conflict {
|
||||
print_conflict(&conflict);
|
||||
}
|
||||
}
|
||||
return Err(MergeConflicts);
|
||||
}
|
||||
|
||||
let result_tree = repo.find_tree(idx.write_tree_to(repo)?)?;
|
||||
// now create the merge commit
|
||||
let msg = format!("Merge: {} into {}", remote.id(), local.id());
|
||||
let sig = repo.signature()?;
|
||||
let local_commit = repo.find_commit(local.id())?;
|
||||
let remote_commit = repo.find_commit(remote.id())?;
|
||||
|
||||
// Do our merge commit and set current branch head to that commit.
|
||||
let _merge_commit = repo.commit(
|
||||
Some("HEAD"),
|
||||
&sig,
|
||||
&sig,
|
||||
&msg,
|
||||
&result_tree,
|
||||
&[&local_commit, &remote_commit],
|
||||
)?;
|
||||
|
||||
// Set working tree to match head.
|
||||
repo.checkout_head(None)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_conflict(conflict: &git2::IndexConflict) {
|
||||
let ancestor = conflict.ancestor.as_ref().map(path_from_index_entry);
|
||||
let ours = conflict.our.as_ref().map(path_from_index_entry);
|
||||
let theirs = conflict.their.as_ref().map(path_from_index_entry);
|
||||
|
||||
println!("Conflict detected:");
|
||||
if let Some(path) = ancestor {
|
||||
println!(" Common ancestor: {:?}", path);
|
||||
}
|
||||
if let Some(path) = ours {
|
||||
println!(" Ours: {:?}", path);
|
||||
}
|
||||
if let Some(path) = theirs {
|
||||
println!(" Theirs: {:?}", path);
|
||||
}
|
||||
}
|
||||
|
||||
fn path_from_index_entry(entry: &IndexEntry) -> String {
|
||||
String::from_utf8_lossy(entry.path.as_slice()).into_owned()
|
||||
}
|
||||
@@ -17,17 +17,25 @@ pub enum PullResult {
|
||||
NeedsCredentials { url: String, error: Option<String> },
|
||||
}
|
||||
|
||||
pub fn git_pull(dir: &Path) -> Result<PullResult> {
|
||||
let repo = open_repo(dir)?;
|
||||
let branch_name = get_current_branch_name(&repo)?;
|
||||
let remote = get_default_remote_in_repo(&repo)?;
|
||||
let remote_name = remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?;
|
||||
let remote_url = remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?;
|
||||
pub async fn git_pull(dir: &Path) -> Result<PullResult> {
|
||||
// Extract all git2 data before any await points (git2 types are not Send)
|
||||
let (branch_name, remote_name, remote_url) = {
|
||||
let repo = open_repo(dir)?;
|
||||
let branch_name = get_current_branch_name(&repo)?;
|
||||
let remote = get_default_remote_in_repo(&repo)?;
|
||||
let remote_name =
|
||||
remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?.to_string();
|
||||
let remote_url =
|
||||
remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?.to_string();
|
||||
(branch_name, remote_name, remote_url)
|
||||
};
|
||||
|
||||
let out = new_binary_command(dir)?
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["pull", &remote_name, &branch_name])
|
||||
.env("GIT_TERMINAL_PROMPT", "0")
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git pull: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
|
||||
@@ -17,17 +17,25 @@ pub enum PushResult {
|
||||
NeedsCredentials { url: String, error: Option<String> },
|
||||
}
|
||||
|
||||
pub fn git_push(dir: &Path) -> Result<PushResult> {
|
||||
let repo = open_repo(dir)?;
|
||||
let branch_name = get_current_branch_name(&repo)?;
|
||||
let remote = get_default_remote_for_push_in_repo(&repo)?;
|
||||
let remote_name = remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?;
|
||||
let remote_url = remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?;
|
||||
pub async fn git_push(dir: &Path) -> Result<PushResult> {
|
||||
// Extract all git2 data before any await points (git2 types are not Send)
|
||||
let (branch_name, remote_name, remote_url) = {
|
||||
let repo = open_repo(dir)?;
|
||||
let branch_name = get_current_branch_name(&repo)?;
|
||||
let remote = get_default_remote_for_push_in_repo(&repo)?;
|
||||
let remote_name =
|
||||
remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?.to_string();
|
||||
let remote_url =
|
||||
remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?.to_string();
|
||||
(branch_name, remote_name, remote_url)
|
||||
};
|
||||
|
||||
let out = new_binary_command(dir)?
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["push", &remote_name, &branch_name])
|
||||
.env("GIT_TERMINAL_PROMPT", "0")
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git push: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
|
||||
@@ -47,10 +47,6 @@ pub(crate) fn remote_branch_names(repo: &Repository) -> Result<Vec<String>> {
|
||||
Ok(branches)
|
||||
}
|
||||
|
||||
pub(crate) fn get_branch_by_name<'s>(repo: &'s Repository, name: &str) -> Result<Branch<'s>> {
|
||||
Ok(repo.find_branch(name, BranchType::Local)?)
|
||||
}
|
||||
|
||||
pub(crate) fn bytes_to_string(bytes: &[u8]) -> Result<String> {
|
||||
Ok(String::from_utf8(bytes.to_vec())?)
|
||||
}
|
||||
|
||||
@@ -22,5 +22,6 @@ tokio-stream = "0.1.14"
|
||||
tonic = { version = "0.12.3", default-features = false, features = ["transport"] }
|
||||
tonic-reflection = "0.12.3"
|
||||
uuid = { version = "1.7.0", features = ["v4"] }
|
||||
yaak-common = { workspace = true }
|
||||
yaak-tls = { workspace = true }
|
||||
thiserror = "2.0.17"
|
||||
|
||||
@@ -115,14 +115,18 @@ impl GrpcConnection {
|
||||
Ok(client.unary(req, path, codec).await?)
|
||||
}
|
||||
|
||||
pub async fn streaming(
|
||||
pub async fn streaming<F>(
|
||||
&self,
|
||||
service: &str,
|
||||
method: &str,
|
||||
stream: ReceiverStream<String>,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
client_cert: Option<ClientCertificateConfig>,
|
||||
) -> Result<Response<Streaming<DynamicMessage>>> {
|
||||
on_message: F,
|
||||
) -> Result<Response<Streaming<DynamicMessage>>>
|
||||
where
|
||||
F: Fn(std::result::Result<String, String>) + Send + Sync + Clone + 'static,
|
||||
{
|
||||
let method = &self.method(&service, &method).await?;
|
||||
let mapped_stream = {
|
||||
let input_message = method.input();
|
||||
@@ -131,31 +135,39 @@ impl GrpcConnection {
|
||||
let md = metadata.clone();
|
||||
let use_reflection = self.use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
stream.filter_map(move |json| {
|
||||
let pool = pool.clone();
|
||||
let uri = uri.clone();
|
||||
let input_message = input_message.clone();
|
||||
let md = md.clone();
|
||||
let use_reflection = use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
tokio::runtime::Handle::current().block_on(async move {
|
||||
if use_reflection {
|
||||
if let Err(e) =
|
||||
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
|
||||
{
|
||||
warn!("Failed to resolve Any types: {e}");
|
||||
stream
|
||||
.then(move |json| {
|
||||
let pool = pool.clone();
|
||||
let uri = uri.clone();
|
||||
let input_message = input_message.clone();
|
||||
let md = md.clone();
|
||||
let use_reflection = use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
let on_message = on_message.clone();
|
||||
let json_clone = json.clone();
|
||||
async move {
|
||||
if use_reflection {
|
||||
if let Err(e) =
|
||||
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
|
||||
{
|
||||
warn!("Failed to resolve Any types: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut de = Deserializer::from_str(&json);
|
||||
match DynamicMessage::deserialize(input_message, &mut de) {
|
||||
Ok(m) => Some(m),
|
||||
Err(e) => {
|
||||
warn!("Failed to deserialize message: {e}");
|
||||
None
|
||||
let mut de = Deserializer::from_str(&json);
|
||||
match DynamicMessage::deserialize(input_message, &mut de) {
|
||||
Ok(m) => {
|
||||
on_message(Ok(json_clone));
|
||||
Some(m)
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Failed to deserialize message: {e}");
|
||||
on_message(Err(e.to_string()));
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
.filter_map(|x| x)
|
||||
};
|
||||
|
||||
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
|
||||
@@ -169,14 +181,18 @@ impl GrpcConnection {
|
||||
Ok(client.streaming(req, path, codec).await?)
|
||||
}
|
||||
|
||||
pub async fn client_streaming(
|
||||
pub async fn client_streaming<F>(
|
||||
&self,
|
||||
service: &str,
|
||||
method: &str,
|
||||
stream: ReceiverStream<String>,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
client_cert: Option<ClientCertificateConfig>,
|
||||
) -> Result<Response<DynamicMessage>> {
|
||||
on_message: F,
|
||||
) -> Result<Response<DynamicMessage>>
|
||||
where
|
||||
F: Fn(std::result::Result<String, String>) + Send + Sync + Clone + 'static,
|
||||
{
|
||||
let method = &self.method(&service, &method).await?;
|
||||
let mapped_stream = {
|
||||
let input_message = method.input();
|
||||
@@ -185,31 +201,39 @@ impl GrpcConnection {
|
||||
let md = metadata.clone();
|
||||
let use_reflection = self.use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
stream.filter_map(move |json| {
|
||||
let pool = pool.clone();
|
||||
let uri = uri.clone();
|
||||
let input_message = input_message.clone();
|
||||
let md = md.clone();
|
||||
let use_reflection = use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
tokio::runtime::Handle::current().block_on(async move {
|
||||
if use_reflection {
|
||||
if let Err(e) =
|
||||
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
|
||||
{
|
||||
warn!("Failed to resolve Any types: {e}");
|
||||
stream
|
||||
.then(move |json| {
|
||||
let pool = pool.clone();
|
||||
let uri = uri.clone();
|
||||
let input_message = input_message.clone();
|
||||
let md = md.clone();
|
||||
let use_reflection = use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
let on_message = on_message.clone();
|
||||
let json_clone = json.clone();
|
||||
async move {
|
||||
if use_reflection {
|
||||
if let Err(e) =
|
||||
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
|
||||
{
|
||||
warn!("Failed to resolve Any types: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut de = Deserializer::from_str(&json);
|
||||
match DynamicMessage::deserialize(input_message, &mut de) {
|
||||
Ok(m) => Some(m),
|
||||
Err(e) => {
|
||||
warn!("Failed to deserialize message: {e}");
|
||||
None
|
||||
let mut de = Deserializer::from_str(&json);
|
||||
match DynamicMessage::deserialize(input_message, &mut de) {
|
||||
Ok(m) => {
|
||||
on_message(Ok(json_clone));
|
||||
Some(m)
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Failed to deserialize message: {e}");
|
||||
on_message(Err(e.to_string()));
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
.filter_map(|x| x)
|
||||
};
|
||||
|
||||
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
|
||||
@@ -316,10 +340,9 @@ impl GrpcHandle {
|
||||
metadata: &BTreeMap<String, String>,
|
||||
validate_certificates: bool,
|
||||
client_cert: Option<ClientCertificateConfig>,
|
||||
skip_cache: bool,
|
||||
) -> Result<Vec<ServiceDefinition>> {
|
||||
// Ensure we have a pool; reflect only if missing
|
||||
if skip_cache || self.get_pool(id, uri, proto_files).is_none() {
|
||||
if self.get_pool(id, uri, proto_files).is_none() {
|
||||
info!("Reflecting gRPC services for {} at {}", id, uri);
|
||||
self.reflect(id, uri, proto_files, metadata, validate_certificates, client_cert)
|
||||
.await?;
|
||||
|
||||
@@ -16,12 +16,12 @@ use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use tokio::fs;
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::RwLock;
|
||||
use tonic::codegen::http::uri::PathAndQuery;
|
||||
use tonic::transport::Uri;
|
||||
use tonic_reflection::pb::v1::server_reflection_request::MessageRequest;
|
||||
use tonic_reflection::pb::v1::server_reflection_response::MessageResponse;
|
||||
use yaak_common::command::new_xplatform_command;
|
||||
use yaak_tls::ClientCertificateConfig;
|
||||
|
||||
pub async fn fill_pool_from_files(
|
||||
@@ -91,11 +91,11 @@ pub async fn fill_pool_from_files(
|
||||
|
||||
info!("Invoking protoc with {}", args.join(" "));
|
||||
|
||||
let out = Command::new(&config.protoc_bin_path)
|
||||
.args(&args)
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("Failed to run protoc: {}", e)))?;
|
||||
let mut cmd = new_xplatform_command(&config.protoc_bin_path);
|
||||
cmd.args(&args);
|
||||
|
||||
let out =
|
||||
cmd.output().await.map_err(|e| GenericError(format!("Failed to run protoc: {}", e)))?;
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!(
|
||||
|
||||
@@ -2,6 +2,8 @@ use crate::dns::LocalhostResolver;
|
||||
use crate::error::Result;
|
||||
use log::{debug, info, warn};
|
||||
use reqwest::{Client, Proxy, redirect};
|
||||
use std::sync::Arc;
|
||||
use yaak_models::models::DnsOverride;
|
||||
use yaak_tls::{ClientCertificateConfig, get_tls_config};
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -28,10 +30,14 @@ pub struct HttpConnectionOptions {
|
||||
pub validate_certificates: bool,
|
||||
pub proxy: HttpConnectionProxySetting,
|
||||
pub client_certificate: Option<ClientCertificateConfig>,
|
||||
pub dns_overrides: Vec<DnsOverride>,
|
||||
}
|
||||
|
||||
impl HttpConnectionOptions {
|
||||
pub(crate) fn build_client(&self) -> Result<Client> {
|
||||
/// Build a reqwest Client and return it along with the DNS resolver.
|
||||
/// The resolver is returned separately so it can be configured per-request
|
||||
/// to emit DNS timing events to the appropriate channel.
|
||||
pub(crate) fn build_client(&self) -> Result<(Client, Arc<LocalhostResolver>)> {
|
||||
let mut client = Client::builder()
|
||||
.connection_verbose(true)
|
||||
.redirect(redirect::Policy::none())
|
||||
@@ -40,15 +46,19 @@ impl HttpConnectionOptions {
|
||||
.no_brotli()
|
||||
.no_deflate()
|
||||
.referer(false)
|
||||
.tls_info(true);
|
||||
.tls_info(true)
|
||||
// Disable connection pooling to ensure DNS resolution happens on each request
|
||||
// This is needed so we can emit DNS timing events for each request
|
||||
.pool_max_idle_per_host(0);
|
||||
|
||||
// Configure TLS with optional client certificate
|
||||
let config =
|
||||
get_tls_config(self.validate_certificates, true, self.client_certificate.clone())?;
|
||||
client = client.use_preconfigured_tls(config);
|
||||
|
||||
// Configure DNS resolver
|
||||
client = client.dns_resolver(LocalhostResolver::new());
|
||||
// Configure DNS resolver - keep a reference to configure per-request
|
||||
let resolver = LocalhostResolver::new(self.dns_overrides.clone());
|
||||
client = client.dns_resolver(resolver.clone());
|
||||
|
||||
// Configure proxy
|
||||
match self.proxy.clone() {
|
||||
@@ -69,7 +79,7 @@ impl HttpConnectionOptions {
|
||||
self.client_certificate.is_some()
|
||||
);
|
||||
|
||||
Ok(client.build()?)
|
||||
Ok((client.build()?, resolver))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,53 +1,185 @@
|
||||
use crate::sender::HttpResponseEvent;
|
||||
use hyper_util::client::legacy::connect::dns::{
|
||||
GaiResolver as HyperGaiResolver, Name as HyperName,
|
||||
};
|
||||
use log::info;
|
||||
use reqwest::dns::{Addrs, Name, Resolve, Resolving};
|
||||
use std::collections::HashMap;
|
||||
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr};
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
use tokio::sync::{RwLock, mpsc};
|
||||
use tower_service::Service;
|
||||
use yaak_models::models::DnsOverride;
|
||||
|
||||
/// Stores resolved addresses for a hostname override
|
||||
#[derive(Clone)]
|
||||
pub struct ResolvedOverride {
|
||||
pub ipv4: Vec<Ipv4Addr>,
|
||||
pub ipv6: Vec<Ipv6Addr>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct LocalhostResolver {
|
||||
fallback: HyperGaiResolver,
|
||||
event_tx: Arc<RwLock<Option<mpsc::Sender<HttpResponseEvent>>>>,
|
||||
overrides: Arc<HashMap<String, ResolvedOverride>>,
|
||||
}
|
||||
|
||||
impl LocalhostResolver {
|
||||
pub fn new() -> Arc<Self> {
|
||||
pub fn new(dns_overrides: Vec<DnsOverride>) -> Arc<Self> {
|
||||
let resolver = HyperGaiResolver::new();
|
||||
Arc::new(Self { fallback: resolver })
|
||||
|
||||
// Pre-parse DNS overrides into a lookup map
|
||||
let mut overrides = HashMap::new();
|
||||
for o in dns_overrides {
|
||||
if !o.enabled {
|
||||
continue;
|
||||
}
|
||||
let hostname = o.hostname.to_lowercase();
|
||||
|
||||
let ipv4: Vec<Ipv4Addr> =
|
||||
o.ipv4.iter().filter_map(|s| s.parse::<Ipv4Addr>().ok()).collect();
|
||||
|
||||
let ipv6: Vec<Ipv6Addr> =
|
||||
o.ipv6.iter().filter_map(|s| s.parse::<Ipv6Addr>().ok()).collect();
|
||||
|
||||
// Only add if at least one address is valid
|
||||
if !ipv4.is_empty() || !ipv6.is_empty() {
|
||||
overrides.insert(hostname, ResolvedOverride { ipv4, ipv6 });
|
||||
}
|
||||
}
|
||||
|
||||
Arc::new(Self {
|
||||
fallback: resolver,
|
||||
event_tx: Arc::new(RwLock::new(None)),
|
||||
overrides: Arc::new(overrides),
|
||||
})
|
||||
}
|
||||
|
||||
/// Set the event sender for the current request.
|
||||
/// This should be called before each request to direct DNS events
|
||||
/// to the appropriate channel.
|
||||
pub async fn set_event_sender(&self, tx: Option<mpsc::Sender<HttpResponseEvent>>) {
|
||||
let mut guard = self.event_tx.write().await;
|
||||
*guard = tx;
|
||||
}
|
||||
}
|
||||
|
||||
impl Resolve for LocalhostResolver {
|
||||
fn resolve(&self, name: Name) -> Resolving {
|
||||
let host = name.as_str().to_lowercase();
|
||||
let event_tx = self.event_tx.clone();
|
||||
let overrides = self.overrides.clone();
|
||||
|
||||
info!("DNS resolve called for: {}", host);
|
||||
|
||||
// Check for DNS override first
|
||||
if let Some(resolved) = overrides.get(&host) {
|
||||
log::debug!("DNS override found for: {}", host);
|
||||
let hostname = host.clone();
|
||||
let mut addrs: Vec<SocketAddr> = Vec::new();
|
||||
|
||||
// Add IPv4 addresses
|
||||
for ip in &resolved.ipv4 {
|
||||
addrs.push(SocketAddr::new(IpAddr::V4(*ip), 0));
|
||||
}
|
||||
|
||||
// Add IPv6 addresses
|
||||
for ip in &resolved.ipv6 {
|
||||
addrs.push(SocketAddr::new(IpAddr::V6(*ip), 0));
|
||||
}
|
||||
|
||||
let addresses: Vec<String> = addrs.iter().map(|a| a.ip().to_string()).collect();
|
||||
|
||||
return Box::pin(async move {
|
||||
// Emit DNS event for override
|
||||
let guard = event_tx.read().await;
|
||||
if let Some(tx) = guard.as_ref() {
|
||||
let _ = tx
|
||||
.send(HttpResponseEvent::DnsResolved {
|
||||
hostname,
|
||||
addresses,
|
||||
duration: 0,
|
||||
overridden: true,
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
Ok::<Addrs, Box<dyn std::error::Error + Send + Sync>>(Box::new(addrs.into_iter()))
|
||||
});
|
||||
}
|
||||
|
||||
// Check for .localhost suffix
|
||||
let is_localhost = host.ends_with(".localhost");
|
||||
if is_localhost {
|
||||
let hostname = host.clone();
|
||||
// Port 0 is fine; reqwest replaces it with the URL's explicit
|
||||
// port or the scheme’s default (80/443, etc.).
|
||||
// (See docs note below.)
|
||||
// port or the scheme's default (80/443, etc.).
|
||||
let addrs: Vec<SocketAddr> = vec![
|
||||
SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0),
|
||||
SocketAddr::new(IpAddr::V6(Ipv6Addr::LOCALHOST), 0),
|
||||
];
|
||||
|
||||
let addresses: Vec<String> = addrs.iter().map(|a| a.ip().to_string()).collect();
|
||||
|
||||
return Box::pin(async move {
|
||||
// Emit DNS event for localhost resolution
|
||||
let guard = event_tx.read().await;
|
||||
if let Some(tx) = guard.as_ref() {
|
||||
let _ = tx
|
||||
.send(HttpResponseEvent::DnsResolved {
|
||||
hostname,
|
||||
addresses,
|
||||
duration: 0,
|
||||
overridden: false,
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
Ok::<Addrs, Box<dyn std::error::Error + Send + Sync>>(Box::new(addrs.into_iter()))
|
||||
});
|
||||
}
|
||||
|
||||
// Fall back to system DNS
|
||||
let mut fallback = self.fallback.clone();
|
||||
let name_str = name.as_str().to_string();
|
||||
let hostname = host.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
match HyperName::from_str(&name_str) {
|
||||
Ok(n) => fallback
|
||||
.call(n)
|
||||
.await
|
||||
.map(|addrs| Box::new(addrs) as Addrs)
|
||||
.map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync>),
|
||||
Err(e) => Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>),
|
||||
let start = Instant::now();
|
||||
|
||||
let result = match HyperName::from_str(&name_str) {
|
||||
Ok(n) => fallback.call(n).await,
|
||||
Err(e) => return Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>),
|
||||
};
|
||||
|
||||
let duration = start.elapsed().as_millis() as u64;
|
||||
|
||||
match result {
|
||||
Ok(addrs) => {
|
||||
// Collect addresses for event emission
|
||||
let addr_vec: Vec<SocketAddr> = addrs.collect();
|
||||
let addresses: Vec<String> =
|
||||
addr_vec.iter().map(|a| a.ip().to_string()).collect();
|
||||
|
||||
// Emit DNS event
|
||||
let guard = event_tx.read().await;
|
||||
if let Some(tx) = guard.as_ref() {
|
||||
let _ = tx
|
||||
.send(HttpResponseEvent::DnsResolved {
|
||||
hostname,
|
||||
addresses,
|
||||
duration,
|
||||
overridden: false,
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
Ok(Box::new(addr_vec.into_iter()) as Addrs)
|
||||
}
|
||||
Err(err) => Err(Box::new(err) as Box<dyn std::error::Error + Send + Sync>),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::client::HttpConnectionOptions;
|
||||
use crate::dns::LocalhostResolver;
|
||||
use crate::error::Result;
|
||||
use log::info;
|
||||
use reqwest::Client;
|
||||
@@ -7,8 +8,15 @@ use std::sync::Arc;
|
||||
use std::time::{Duration, Instant};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
/// A cached HTTP client along with its DNS resolver.
|
||||
/// The resolver is needed to set the event sender per-request.
|
||||
pub struct CachedClient {
|
||||
pub client: Client,
|
||||
pub resolver: Arc<LocalhostResolver>,
|
||||
}
|
||||
|
||||
pub struct HttpConnectionManager {
|
||||
connections: Arc<RwLock<BTreeMap<String, (Client, Instant)>>>,
|
||||
connections: Arc<RwLock<BTreeMap<String, (CachedClient, Instant)>>>,
|
||||
ttl: Duration,
|
||||
}
|
||||
|
||||
@@ -20,21 +28,26 @@ impl HttpConnectionManager {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_client(&self, opt: &HttpConnectionOptions) -> Result<Client> {
|
||||
pub async fn get_client(&self, opt: &HttpConnectionOptions) -> Result<CachedClient> {
|
||||
let mut connections = self.connections.write().await;
|
||||
let id = opt.id.clone();
|
||||
|
||||
// Clean old connections
|
||||
connections.retain(|_, (_, last_used)| last_used.elapsed() <= self.ttl);
|
||||
|
||||
if let Some((c, last_used)) = connections.get_mut(&id) {
|
||||
if let Some((cached, last_used)) = connections.get_mut(&id) {
|
||||
info!("Re-using HTTP client {id}");
|
||||
*last_used = Instant::now();
|
||||
return Ok(c.clone());
|
||||
return Ok(CachedClient {
|
||||
client: cached.client.clone(),
|
||||
resolver: cached.resolver.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
let c = opt.build_client()?;
|
||||
connections.insert(id.into(), (c.clone(), Instant::now()));
|
||||
Ok(c)
|
||||
let (client, resolver) = opt.build_client()?;
|
||||
let cached = CachedClient { client: client.clone(), resolver: resolver.clone() };
|
||||
connections.insert(id.into(), (cached, Instant::now()));
|
||||
|
||||
Ok(CachedClient { client, resolver })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,7 +31,14 @@ pub enum HttpResponseEvent {
|
||||
},
|
||||
SendUrl {
|
||||
method: String,
|
||||
scheme: String,
|
||||
username: String,
|
||||
password: String,
|
||||
host: String,
|
||||
port: u16,
|
||||
path: String,
|
||||
query: String,
|
||||
fragment: String,
|
||||
},
|
||||
ReceiveUrl {
|
||||
version: Version,
|
||||
@@ -45,6 +52,12 @@ pub enum HttpResponseEvent {
|
||||
ChunkReceived {
|
||||
bytes: usize,
|
||||
},
|
||||
DnsResolved {
|
||||
hostname: String,
|
||||
addresses: Vec<String>,
|
||||
duration: u64,
|
||||
overridden: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl Display for HttpResponseEvent {
|
||||
@@ -59,7 +72,16 @@ impl Display for HttpResponseEvent {
|
||||
};
|
||||
write!(f, "* Redirect {} -> {} ({})", status, url, behavior_str)
|
||||
}
|
||||
HttpResponseEvent::SendUrl { method, path } => write!(f, "> {} {}", method, path),
|
||||
HttpResponseEvent::SendUrl { method, scheme, username, password, host, port, path, query, fragment } => {
|
||||
let auth_str = if username.is_empty() && password.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("{}:{}@", username, password)
|
||||
};
|
||||
let query_str = if query.is_empty() { String::new() } else { format!("?{}", query) };
|
||||
let fragment_str = if fragment.is_empty() { String::new() } else { format!("#{}", fragment) };
|
||||
write!(f, "> {} {}://{}{}:{}{}{}{}", method, scheme, auth_str, host, port, path, query_str, fragment_str)
|
||||
}
|
||||
HttpResponseEvent::ReceiveUrl { version, status } => {
|
||||
write!(f, "< {} {}", version_to_str(version), status)
|
||||
}
|
||||
@@ -67,6 +89,19 @@ impl Display for HttpResponseEvent {
|
||||
HttpResponseEvent::HeaderDown(name, value) => write!(f, "< {}: {}", name, value),
|
||||
HttpResponseEvent::ChunkSent { bytes } => write!(f, "> [{} bytes sent]", bytes),
|
||||
HttpResponseEvent::ChunkReceived { bytes } => write!(f, "< [{} bytes received]", bytes),
|
||||
HttpResponseEvent::DnsResolved { hostname, addresses, duration, overridden } => {
|
||||
if *overridden {
|
||||
write!(f, "* DNS override {} -> {}", hostname, addresses.join(", "))
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"* DNS resolved {} to {} ({}ms)",
|
||||
hostname,
|
||||
addresses.join(", "),
|
||||
duration
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -85,7 +120,9 @@ impl From<HttpResponseEvent> for yaak_models::models::HttpResponseEventData {
|
||||
RedirectBehavior::DropBody => "drop_body".to_string(),
|
||||
},
|
||||
},
|
||||
HttpResponseEvent::SendUrl { method, path } => D::SendUrl { method, path },
|
||||
HttpResponseEvent::SendUrl { method, scheme, username, password, host, port, path, query, fragment } => {
|
||||
D::SendUrl { method, scheme, username, password, host, port, path, query, fragment }
|
||||
}
|
||||
HttpResponseEvent::ReceiveUrl { version, status } => {
|
||||
D::ReceiveUrl { version: format!("{:?}", version), status }
|
||||
}
|
||||
@@ -93,6 +130,9 @@ impl From<HttpResponseEvent> for yaak_models::models::HttpResponseEventData {
|
||||
HttpResponseEvent::HeaderDown(name, value) => D::HeaderDown { name, value },
|
||||
HttpResponseEvent::ChunkSent { bytes } => D::ChunkSent { bytes },
|
||||
HttpResponseEvent::ChunkReceived { bytes } => D::ChunkReceived { bytes },
|
||||
HttpResponseEvent::DnsResolved { hostname, addresses, duration, overridden } => {
|
||||
D::DnsResolved { hostname, addresses, duration, overridden }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -354,6 +394,9 @@ impl HttpSender for ReqwestSender {
|
||||
|
||||
// Add headers
|
||||
for header in request.headers {
|
||||
if header.0.is_empty() {
|
||||
continue;
|
||||
}
|
||||
req_builder = req_builder.header(&header.0, &header.1);
|
||||
}
|
||||
|
||||
@@ -390,8 +433,15 @@ impl HttpSender for ReqwestSender {
|
||||
));
|
||||
|
||||
send_event(HttpResponseEvent::SendUrl {
|
||||
path: sendable_req.url().path().to_string(),
|
||||
method: sendable_req.method().to_string(),
|
||||
scheme: sendable_req.url().scheme().to_string(),
|
||||
username: sendable_req.url().username().to_string(),
|
||||
password: sendable_req.url().password().unwrap_or_default().to_string(),
|
||||
host: sendable_req.url().host_str().unwrap_or_default().to_string(),
|
||||
port: sendable_req.url().port_or_known_default().unwrap_or(0),
|
||||
path: sendable_req.url().path().to_string(),
|
||||
query: sendable_req.url().query().unwrap_or_default().to_string(),
|
||||
fragment: sendable_req.url().fragment().unwrap_or_default().to_string(),
|
||||
});
|
||||
|
||||
let mut request_headers = Vec::new();
|
||||
|
||||
@@ -342,7 +342,8 @@ mod tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_transaction_single_redirect() {
|
||||
let redirect_headers = vec![("Location".to_string(), "https://example.com/new".to_string())];
|
||||
let redirect_headers =
|
||||
vec![("Location".to_string(), "https://example.com/new".to_string())];
|
||||
|
||||
let responses = vec![
|
||||
MockResponse { status: 302, headers: redirect_headers, body: vec![] },
|
||||
@@ -373,7 +374,8 @@ mod tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_transaction_max_redirects_exceeded() {
|
||||
let redirect_headers = vec![("Location".to_string(), "https://example.com/loop".to_string())];
|
||||
let redirect_headers =
|
||||
vec![("Location".to_string(), "https://example.com/loop".to_string())];
|
||||
|
||||
// Create more redirects than allowed
|
||||
let responses: Vec<MockResponse> = (0..12)
|
||||
@@ -525,7 +527,8 @@ mod tests {
|
||||
_request: SendableHttpRequest,
|
||||
_event_tx: mpsc::Sender<HttpResponseEvent>,
|
||||
) -> Result<HttpResponse> {
|
||||
let headers = vec![("set-cookie".to_string(), "session=xyz789; Path=/".to_string())];
|
||||
let headers =
|
||||
vec![("set-cookie".to_string(), "session=xyz789; Path=/".to_string())];
|
||||
|
||||
let body_stream: Pin<Box<dyn AsyncRead + Send>> =
|
||||
Box::pin(std::io::Cursor::new(vec![]));
|
||||
@@ -584,7 +587,10 @@ mod tests {
|
||||
let headers = vec![
|
||||
("set-cookie".to_string(), "session=abc123; Path=/".to_string()),
|
||||
("set-cookie".to_string(), "user_id=42; Path=/".to_string()),
|
||||
("set-cookie".to_string(), "preferences=dark; Path=/; Max-Age=86400".to_string()),
|
||||
(
|
||||
"set-cookie".to_string(),
|
||||
"preferences=dark; Path=/; Max-Age=86400".to_string(),
|
||||
),
|
||||
];
|
||||
|
||||
let body_stream: Pin<Box<dyn AsyncRead + Send>> =
|
||||
|
||||
8
crates/yaak-models/bindings/gen_models.ts
generated
8
crates/yaak-models/bindings/gen_models.ts
generated
@@ -12,6 +12,8 @@ export type CookieExpires = { "AtUtc": string } | "SessionEnd";
|
||||
|
||||
export type CookieJar = { model: "cookie_jar", id: string, createdAt: string, updatedAt: string, workspaceId: string, cookies: Array<Cookie>, name: string, };
|
||||
|
||||
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
|
||||
|
||||
export type EditorKeymap = "default" | "vim" | "vscode" | "emacs";
|
||||
|
||||
export type EncryptedKey = { encryptedKey: string, };
|
||||
@@ -38,7 +40,7 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
|
||||
|
||||
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
|
||||
|
||||
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
|
||||
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, elapsedDns: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
|
||||
|
||||
export type HttpResponseEvent = { model: "http_response_event", id: string, createdAt: string, updatedAt: string, workspaceId: string, responseId: string, event: HttpResponseEventData, };
|
||||
|
||||
@@ -47,7 +49,7 @@ export type HttpResponseEvent = { model: "http_response_event", id: string, crea
|
||||
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
|
||||
* The `From` impl is in yaak-http to avoid circular dependencies.
|
||||
*/
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, };
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, scheme: string, username: string, password: string, host: string, port: number, path: string, query: string, fragment: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, } | { "type": "dns_resolved", hostname: string, addresses: Array<string>, duration: bigint, overridden: boolean, };
|
||||
|
||||
export type HttpResponseHeader = { name: string, value: string, };
|
||||
|
||||
@@ -91,6 +93,6 @@ export type WebsocketMessageType = "text" | "binary";
|
||||
|
||||
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
|
||||
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
|
||||
|
||||
export type WorkspaceMeta = { model: "workspace_meta", id: string, workspaceId: string, createdAt: string, updatedAt: string, encryptionKey: EncryptedKey | null, settingSyncDir: string | null, };
|
||||
|
||||
@@ -206,6 +206,34 @@ export function replaceModelsInStore<
|
||||
});
|
||||
}
|
||||
|
||||
export function mergeModelsInStore<
|
||||
M extends AnyModel['model'],
|
||||
T extends Extract<AnyModel, { model: M }>,
|
||||
>(model: M, models: T[], filter?: (model: T) => boolean) {
|
||||
mustStore().set(modelStoreDataAtom, (prev: ModelStoreData) => {
|
||||
const existingModels = { ...prev[model] } as Record<string, T>;
|
||||
|
||||
// Merge in new models first
|
||||
for (const m of models) {
|
||||
existingModels[m.id] = m;
|
||||
}
|
||||
|
||||
// Then filter out unwanted models
|
||||
if (filter) {
|
||||
for (const [id, m] of Object.entries(existingModels)) {
|
||||
if (!filter(m)) {
|
||||
delete existingModels[id];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...prev,
|
||||
[model]: existingModels,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function shouldIgnoreModel({ model, updateSource }: ModelPayload) {
|
||||
// Never ignore updates from non-user sources
|
||||
if (updateSource.type !== 'window') {
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
-- Add DNS resolution timing to http_responses
|
||||
ALTER TABLE http_responses ADD COLUMN elapsed_dns INTEGER DEFAULT 0 NOT NULL;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- Add DNS overrides setting to workspaces
|
||||
ALTER TABLE workspaces ADD COLUMN setting_dns_overrides TEXT DEFAULT '[]' NOT NULL;
|
||||
@@ -0,0 +1,12 @@
|
||||
-- Filter out headers that match the hardcoded defaults (User-Agent: yaak, Accept: */*),
|
||||
-- keeping any other custom headers the user may have added.
|
||||
UPDATE workspaces
|
||||
SET headers = (
|
||||
SELECT json_group_array(json(value))
|
||||
FROM json_each(headers)
|
||||
WHERE NOT (
|
||||
(LOWER(json_extract(value, '$.name')) = 'user-agent' AND json_extract(value, '$.value') = 'yaak')
|
||||
OR (LOWER(json_extract(value, '$.name')) = 'accept' AND json_extract(value, '$.value') = '*/*')
|
||||
)
|
||||
)
|
||||
WHERE json_array_length(headers) > 0;
|
||||
@@ -73,6 +73,20 @@ pub struct ClientCertificate {
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export, export_to = "gen_models.ts")]
|
||||
pub struct DnsOverride {
|
||||
pub hostname: String,
|
||||
#[serde(default)]
|
||||
pub ipv4: Vec<String>,
|
||||
#[serde(default)]
|
||||
pub ipv6: Vec<String>,
|
||||
#[serde(default = "default_true")]
|
||||
#[ts(optional, as = "Option<bool>")]
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[ts(export, export_to = "gen_models.ts")]
|
||||
@@ -303,6 +317,8 @@ pub struct Workspace {
|
||||
#[serde(default = "default_true")]
|
||||
pub setting_follow_redirects: bool,
|
||||
pub setting_request_timeout: i32,
|
||||
#[serde(default)]
|
||||
pub setting_dns_overrides: Vec<DnsOverride>,
|
||||
}
|
||||
|
||||
impl UpsertModelInfo for Workspace {
|
||||
@@ -343,6 +359,7 @@ impl UpsertModelInfo for Workspace {
|
||||
(SettingFollowRedirects, self.setting_follow_redirects.into()),
|
||||
(SettingRequestTimeout, self.setting_request_timeout.into()),
|
||||
(SettingValidateCertificates, self.setting_validate_certificates.into()),
|
||||
(SettingDnsOverrides, serde_json::to_string(&self.setting_dns_overrides)?.into()),
|
||||
])
|
||||
}
|
||||
|
||||
@@ -359,6 +376,7 @@ impl UpsertModelInfo for Workspace {
|
||||
WorkspaceIden::SettingFollowRedirects,
|
||||
WorkspaceIden::SettingRequestTimeout,
|
||||
WorkspaceIden::SettingValidateCertificates,
|
||||
WorkspaceIden::SettingDnsOverrides,
|
||||
]
|
||||
}
|
||||
|
||||
@@ -368,6 +386,7 @@ impl UpsertModelInfo for Workspace {
|
||||
{
|
||||
let headers: String = row.get("headers")?;
|
||||
let authentication: String = row.get("authentication")?;
|
||||
let setting_dns_overrides: String = row.get("setting_dns_overrides")?;
|
||||
Ok(Self {
|
||||
id: row.get("id")?,
|
||||
model: row.get("model")?,
|
||||
@@ -382,6 +401,7 @@ impl UpsertModelInfo for Workspace {
|
||||
setting_follow_redirects: row.get("setting_follow_redirects")?,
|
||||
setting_request_timeout: row.get("setting_request_timeout")?,
|
||||
setting_validate_certificates: row.get("setting_validate_certificates")?,
|
||||
setting_dns_overrides: serde_json::from_str(&setting_dns_overrides).unwrap_or_default(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1333,6 +1353,7 @@ pub struct HttpResponse {
|
||||
pub content_length_compressed: Option<i32>,
|
||||
pub elapsed: i32,
|
||||
pub elapsed_headers: i32,
|
||||
pub elapsed_dns: i32,
|
||||
pub error: Option<String>,
|
||||
pub headers: Vec<HttpResponseHeader>,
|
||||
pub remote_addr: Option<String>,
|
||||
@@ -1381,6 +1402,7 @@ impl UpsertModelInfo for HttpResponse {
|
||||
(ContentLengthCompressed, self.content_length_compressed.into()),
|
||||
(Elapsed, self.elapsed.into()),
|
||||
(ElapsedHeaders, self.elapsed_headers.into()),
|
||||
(ElapsedDns, self.elapsed_dns.into()),
|
||||
(Error, self.error.into()),
|
||||
(Headers, serde_json::to_string(&self.headers)?.into()),
|
||||
(RemoteAddr, self.remote_addr.into()),
|
||||
@@ -1402,6 +1424,7 @@ impl UpsertModelInfo for HttpResponse {
|
||||
HttpResponseIden::ContentLengthCompressed,
|
||||
HttpResponseIden::Elapsed,
|
||||
HttpResponseIden::ElapsedHeaders,
|
||||
HttpResponseIden::ElapsedDns,
|
||||
HttpResponseIden::Error,
|
||||
HttpResponseIden::Headers,
|
||||
HttpResponseIden::RemoteAddr,
|
||||
@@ -1435,6 +1458,7 @@ impl UpsertModelInfo for HttpResponse {
|
||||
version: r.get("version")?,
|
||||
elapsed: r.get("elapsed")?,
|
||||
elapsed_headers: r.get("elapsed_headers")?,
|
||||
elapsed_dns: r.get("elapsed_dns").unwrap_or_default(),
|
||||
remote_addr: r.get("remote_addr")?,
|
||||
status: r.get("status")?,
|
||||
status_reason: r.get("status_reason")?,
|
||||
@@ -1471,7 +1495,21 @@ pub enum HttpResponseEventData {
|
||||
},
|
||||
SendUrl {
|
||||
method: String,
|
||||
#[serde(default)]
|
||||
scheme: String,
|
||||
#[serde(default)]
|
||||
username: String,
|
||||
#[serde(default)]
|
||||
password: String,
|
||||
#[serde(default)]
|
||||
host: String,
|
||||
#[serde(default)]
|
||||
port: u16,
|
||||
path: String,
|
||||
#[serde(default)]
|
||||
query: String,
|
||||
#[serde(default)]
|
||||
fragment: String,
|
||||
},
|
||||
ReceiveUrl {
|
||||
version: String,
|
||||
@@ -1491,6 +1529,12 @@ pub enum HttpResponseEventData {
|
||||
ChunkReceived {
|
||||
bytes: usize,
|
||||
},
|
||||
DnsResolved {
|
||||
hostname: String,
|
||||
addresses: Vec<String>,
|
||||
duration: u64,
|
||||
overridden: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl Default for HttpResponseEventData {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use super::dedupe_headers;
|
||||
use crate::db_context::DbContext;
|
||||
use crate::error::Result;
|
||||
use crate::models::{GrpcRequest, GrpcRequestIden, HttpRequestHeader};
|
||||
@@ -87,6 +88,6 @@ impl<'a> DbContext<'a> {
|
||||
|
||||
metadata.append(&mut grpc_request.metadata.clone());
|
||||
|
||||
Ok(metadata)
|
||||
Ok(dedupe_headers(metadata))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use super::dedupe_headers;
|
||||
use crate::db_context::DbContext;
|
||||
use crate::error::Result;
|
||||
use crate::models::{Folder, FolderIden, HttpRequest, HttpRequestHeader, HttpRequestIden};
|
||||
@@ -87,7 +88,7 @@ impl<'a> DbContext<'a> {
|
||||
|
||||
headers.append(&mut http_request.headers.clone());
|
||||
|
||||
Ok(headers)
|
||||
Ok(dedupe_headers(headers))
|
||||
}
|
||||
|
||||
pub fn list_http_requests_for_folder_recursive(
|
||||
|
||||
@@ -19,6 +19,26 @@ mod websocket_connections;
|
||||
mod websocket_events;
|
||||
mod websocket_requests;
|
||||
mod workspace_metas;
|
||||
mod workspaces;
|
||||
pub mod workspaces;
|
||||
|
||||
const MAX_HISTORY_ITEMS: usize = 20;
|
||||
|
||||
use crate::models::HttpRequestHeader;
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Deduplicate headers by name (case-insensitive), keeping the latest (most specific) value.
|
||||
/// Preserves the order of first occurrence for each header name.
|
||||
pub(crate) fn dedupe_headers(headers: Vec<HttpRequestHeader>) -> Vec<HttpRequestHeader> {
|
||||
let mut index_by_name: HashMap<String, usize> = HashMap::new();
|
||||
let mut deduped: Vec<HttpRequestHeader> = Vec::new();
|
||||
for header in headers {
|
||||
let key = header.name.to_lowercase();
|
||||
if let Some(&idx) = index_by_name.get(&key) {
|
||||
deduped[idx] = header;
|
||||
} else {
|
||||
index_by_name.insert(key, deduped.len());
|
||||
deduped.push(header);
|
||||
}
|
||||
}
|
||||
deduped
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use super::dedupe_headers;
|
||||
use crate::db_context::DbContext;
|
||||
use crate::error::Result;
|
||||
use crate::models::{HttpRequestHeader, WebsocketRequest, WebsocketRequestIden};
|
||||
@@ -95,6 +96,6 @@ impl<'a> DbContext<'a> {
|
||||
|
||||
headers.append(&mut websocket_request.headers.clone());
|
||||
|
||||
Ok(headers)
|
||||
Ok(dedupe_headers(headers))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -65,28 +65,7 @@ impl<'a> DbContext<'a> {
|
||||
}
|
||||
|
||||
pub fn upsert_workspace(&self, w: &Workspace, source: &UpdateSource) -> Result<Workspace> {
|
||||
let mut workspace = w.clone();
|
||||
|
||||
// Add default headers only for NEW workspaces (empty ID means insert, not update)
|
||||
// This prevents re-adding headers if a user intentionally removes all headers
|
||||
if workspace.id.is_empty() && workspace.headers.is_empty() {
|
||||
workspace.headers = vec![
|
||||
HttpRequestHeader {
|
||||
enabled: true,
|
||||
name: "User-Agent".to_string(),
|
||||
value: "yaak".to_string(),
|
||||
id: None,
|
||||
},
|
||||
HttpRequestHeader {
|
||||
enabled: true,
|
||||
name: "Accept".to_string(),
|
||||
value: "*/*".to_string(),
|
||||
id: None,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
self.upsert(&workspace, source)
|
||||
self.upsert(w, source)
|
||||
}
|
||||
|
||||
pub fn resolve_auth_for_workspace(
|
||||
@@ -101,6 +80,28 @@ impl<'a> DbContext<'a> {
|
||||
}
|
||||
|
||||
pub fn resolve_headers_for_workspace(&self, workspace: &Workspace) -> Vec<HttpRequestHeader> {
|
||||
workspace.headers.clone()
|
||||
let mut headers = default_headers();
|
||||
headers.extend(workspace.headers.clone());
|
||||
headers
|
||||
}
|
||||
}
|
||||
|
||||
/// Global default headers that are always sent with requests unless overridden.
|
||||
/// These are prepended to the inheritance chain so workspace/folder/request headers
|
||||
/// can override or disable them.
|
||||
pub fn default_headers() -> Vec<HttpRequestHeader> {
|
||||
vec![
|
||||
HttpRequestHeader {
|
||||
enabled: true,
|
||||
name: "User-Agent".to_string(),
|
||||
value: "yaak".to_string(),
|
||||
id: None,
|
||||
},
|
||||
HttpRequestHeader {
|
||||
enabled: true,
|
||||
name: "Accept".to_string(),
|
||||
value: "*/*".to_string(),
|
||||
id: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
6
crates/yaak-plugins/bindings/gen_events.ts
generated
6
crates/yaak-plugins/bindings/gen_events.ts
generated
File diff suppressed because one or more lines are too long
8
crates/yaak-plugins/bindings/gen_models.ts
generated
8
crates/yaak-plugins/bindings/gen_models.ts
generated
@@ -12,6 +12,8 @@ export type CookieExpires = { "AtUtc": string } | "SessionEnd";
|
||||
|
||||
export type CookieJar = { model: "cookie_jar", id: string, createdAt: string, updatedAt: string, workspaceId: string, cookies: Array<Cookie>, name: string, };
|
||||
|
||||
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
|
||||
|
||||
export type EditorKeymap = "default" | "vim" | "vscode" | "emacs";
|
||||
|
||||
export type EncryptedKey = { encryptedKey: string, };
|
||||
@@ -38,7 +40,7 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
|
||||
|
||||
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
|
||||
|
||||
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
|
||||
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, elapsedDns: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
|
||||
|
||||
export type HttpResponseEvent = { model: "http_response_event", id: string, createdAt: string, updatedAt: string, workspaceId: string, responseId: string, event: HttpResponseEventData, };
|
||||
|
||||
@@ -47,7 +49,7 @@ export type HttpResponseEvent = { model: "http_response_event", id: string, crea
|
||||
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
|
||||
* The `From` impl is in yaak-http to avoid circular dependencies.
|
||||
*/
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, };
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, } | { "type": "dns_resolved", hostname: string, addresses: Array<string>, duration: bigint, overridden: boolean, };
|
||||
|
||||
export type HttpResponseHeader = { name: string, value: string, };
|
||||
|
||||
@@ -77,6 +79,6 @@ export type WebsocketEventType = "binary" | "close" | "frame" | "open" | "ping"
|
||||
|
||||
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
|
||||
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
|
||||
|
||||
export type WorkspaceMeta = { model: "workspace_meta", id: string, workspaceId: string, createdAt: string, updatedAt: string, encryptionKey: EncryptedKey | null, settingSyncDir: string | null, };
|
||||
|
||||
@@ -80,10 +80,7 @@ pub async fn check_plugin_updates(
|
||||
}
|
||||
|
||||
/// Search for plugins in the registry.
|
||||
pub async fn search_plugins(
|
||||
http_client: &Client,
|
||||
query: &str,
|
||||
) -> Result<PluginSearchResponse> {
|
||||
pub async fn search_plugins(http_client: &Client, query: &str) -> Result<PluginSearchResponse> {
|
||||
let mut url = build_url("/search");
|
||||
{
|
||||
let mut query_pairs = url.query_pairs_mut();
|
||||
|
||||
@@ -157,6 +157,9 @@ pub enum InternalEventPayload {
|
||||
PromptTextRequest(PromptTextRequest),
|
||||
PromptTextResponse(PromptTextResponse),
|
||||
|
||||
PromptFormRequest(PromptFormRequest),
|
||||
PromptFormResponse(PromptFormResponse),
|
||||
|
||||
WindowInfoRequest(WindowInfoRequest),
|
||||
WindowInfoResponse(WindowInfoResponse),
|
||||
|
||||
@@ -571,6 +574,28 @@ pub struct PromptTextResponse {
|
||||
pub value: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
#[ts(export, export_to = "gen_events.ts")]
|
||||
pub struct PromptFormRequest {
|
||||
pub id: String,
|
||||
pub title: String,
|
||||
#[ts(optional)]
|
||||
pub description: Option<String>,
|
||||
pub inputs: Vec<FormInput>,
|
||||
#[ts(optional)]
|
||||
pub confirm_text: Option<String>,
|
||||
#[ts(optional)]
|
||||
pub cancel_text: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
#[ts(export, export_to = "gen_events.ts")]
|
||||
pub struct PromptFormResponse {
|
||||
pub values: Option<HashMap<String, JsonPrimitive>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
#[ts(export, export_to = "gen_events.ts")]
|
||||
|
||||
@@ -378,7 +378,8 @@ impl PluginManager {
|
||||
plugins: Vec<PluginHandle>,
|
||||
timeout_duration: Duration,
|
||||
) -> Result<Vec<InternalEvent>> {
|
||||
let label = format!("wait[{}.{}]", plugins.len(), payload.type_name());
|
||||
let event_type = payload.type_name();
|
||||
let label = format!("wait[{}.{}]", plugins.len(), event_type);
|
||||
let (rx_id, mut rx) = self.subscribe(label.as_str()).await;
|
||||
|
||||
// 1. Build the events with IDs and everything
|
||||
@@ -412,10 +413,21 @@ impl PluginManager {
|
||||
|
||||
// Timeout to prevent hanging forever if plugin doesn't respond
|
||||
if timeout(timeout_duration, collect_events).await.is_err() {
|
||||
let responded_ids: Vec<&String> =
|
||||
found_events.iter().filter_map(|e| e.reply_id.as_ref()).collect();
|
||||
let non_responding: Vec<&str> = events_to_send
|
||||
.iter()
|
||||
.filter(|e| !responded_ids.contains(&&e.id))
|
||||
.map(|e| e.plugin_name.as_str())
|
||||
.collect();
|
||||
warn!(
|
||||
"Timeout waiting for plugin responses. Got {}/{} responses",
|
||||
"Timeout ({:?}) waiting for {} responses. Got {}/{} responses. \
|
||||
Non-responding plugins: [{}]",
|
||||
timeout_duration,
|
||||
event_type,
|
||||
found_events.len(),
|
||||
events_to_send.len()
|
||||
events_to_send.len(),
|
||||
non_responding.join(", ")
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -196,7 +196,11 @@ pub fn decrypt_secure_template_function(
|
||||
}
|
||||
}
|
||||
new_tokens.push(Token::Raw {
|
||||
text: template_function_secure_run(encryption_manager, args_map, plugin_context)?,
|
||||
text: template_function_secure_run(
|
||||
encryption_manager,
|
||||
args_map,
|
||||
plugin_context,
|
||||
)?,
|
||||
});
|
||||
}
|
||||
t => {
|
||||
@@ -216,7 +220,8 @@ pub fn encrypt_secure_template_function(
|
||||
plugin_context: &PluginContext,
|
||||
template: &str,
|
||||
) -> Result<String> {
|
||||
let decrypted = decrypt_secure_template_function(&encryption_manager, plugin_context, template)?;
|
||||
let decrypted =
|
||||
decrypt_secure_template_function(&encryption_manager, plugin_context, template)?;
|
||||
let tokens = Tokens {
|
||||
tokens: vec. Do not edit this file manually.
|
||||
|
||||
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
|
||||
|
||||
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, parentModel: string, parentId: string | null, variables: Array<EnvironmentVariable>, color: string | null, sortPriority: number, };
|
||||
|
||||
export type EnvironmentVariable = { enabled?: boolean, name: string, value: string, id?: string, };
|
||||
@@ -20,4 +22,4 @@ export type SyncState = { model: "sync_state", id: string, workspaceId: string,
|
||||
|
||||
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
|
||||
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
|
||||
|
||||
@@ -296,11 +296,7 @@ pub fn compute_sync_ops(
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn workspace_models(
|
||||
db: &DbContext,
|
||||
version: &str,
|
||||
workspace_id: &str,
|
||||
) -> Result<Vec<SyncModel>> {
|
||||
fn workspace_models(db: &DbContext, version: &str, workspace_id: &str) -> Result<Vec<SyncModel>> {
|
||||
// We want to include private environments here so that we can take them into account during
|
||||
// the sync process. Otherwise, they would be treated as deleted.
|
||||
let include_private_environments = true;
|
||||
|
||||
@@ -2,6 +2,7 @@ use crate::connect::ws_connect;
|
||||
use crate::error::Result;
|
||||
use futures_util::stream::SplitSink;
|
||||
use futures_util::{SinkExt, StreamExt};
|
||||
use http::HeaderMap;
|
||||
use log::{debug, info, warn};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
@@ -10,7 +11,6 @@ use tokio::net::TcpStream;
|
||||
use tokio::sync::{Mutex, mpsc};
|
||||
use tokio_tungstenite::tungstenite::Message;
|
||||
use tokio_tungstenite::tungstenite::handshake::client::Response;
|
||||
use http::HeaderMap;
|
||||
use tokio_tungstenite::tungstenite::http::HeaderValue;
|
||||
use tokio_tungstenite::{MaybeTlsStream, WebSocketStream};
|
||||
use yaak_tls::ClientCertificateConfig;
|
||||
|
||||
123
package-lock.json
generated
123
package-lock.json
generated
@@ -63,7 +63,7 @@
|
||||
"src-web"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^2.3.10",
|
||||
"@biomejs/biome": "^2.3.13",
|
||||
"@tauri-apps/cli": "^2.9.6",
|
||||
"@yaakapp/cli": "^0.3.4",
|
||||
"dotenv-cli": "^11.0.0",
|
||||
@@ -501,9 +501,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/biome": {
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/biome/-/biome-2.3.11.tgz",
|
||||
"integrity": "sha512-/zt+6qazBWguPG6+eWmiELqO+9jRsMZ/DBU3lfuU2ngtIQYzymocHhKiZRyrbra4aCOoyTg/BmY+6WH5mv9xmQ==",
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/biome/-/biome-2.3.13.tgz",
|
||||
"integrity": "sha512-Fw7UsV0UAtWIBIm0M7g5CRerpu1eKyKAXIazzxhbXYUyMkwNrkX/KLkGI7b+uVDQ5cLUMfOC9vR60q9IDYDstA==",
|
||||
"dev": true,
|
||||
"license": "MIT OR Apache-2.0",
|
||||
"bin": {
|
||||
@@ -517,20 +517,20 @@
|
||||
"url": "https://opencollective.com/biome"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@biomejs/cli-darwin-arm64": "2.3.11",
|
||||
"@biomejs/cli-darwin-x64": "2.3.11",
|
||||
"@biomejs/cli-linux-arm64": "2.3.11",
|
||||
"@biomejs/cli-linux-arm64-musl": "2.3.11",
|
||||
"@biomejs/cli-linux-x64": "2.3.11",
|
||||
"@biomejs/cli-linux-x64-musl": "2.3.11",
|
||||
"@biomejs/cli-win32-arm64": "2.3.11",
|
||||
"@biomejs/cli-win32-x64": "2.3.11"
|
||||
"@biomejs/cli-darwin-arm64": "2.3.13",
|
||||
"@biomejs/cli-darwin-x64": "2.3.13",
|
||||
"@biomejs/cli-linux-arm64": "2.3.13",
|
||||
"@biomejs/cli-linux-arm64-musl": "2.3.13",
|
||||
"@biomejs/cli-linux-x64": "2.3.13",
|
||||
"@biomejs/cli-linux-x64-musl": "2.3.13",
|
||||
"@biomejs/cli-win32-arm64": "2.3.13",
|
||||
"@biomejs/cli-win32-x64": "2.3.13"
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/cli-darwin-arm64": {
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.3.11.tgz",
|
||||
"integrity": "sha512-/uXXkBcPKVQY7rc9Ys2CrlirBJYbpESEDme7RKiBD6MmqR2w3j0+ZZXRIL2xiaNPsIMMNhP1YnA+jRRxoOAFrA==",
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.3.13.tgz",
|
||||
"integrity": "sha512-0OCwP0/BoKzyJHnFdaTk/i7hIP9JHH9oJJq6hrSCPmJPo8JWcJhprK4gQlhFzrwdTBAW4Bjt/RmCf3ZZe59gwQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -545,9 +545,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/cli-darwin-x64": {
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.3.11.tgz",
|
||||
"integrity": "sha512-fh7nnvbweDPm2xEmFjfmq7zSUiox88plgdHF9OIW4i99WnXrAC3o2P3ag9judoUMv8FCSUnlwJCM1B64nO5Fbg==",
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.3.13.tgz",
|
||||
"integrity": "sha512-AGr8OoemT/ejynbIu56qeil2+F2WLkIjn2d8jGK1JkchxnMUhYOfnqc9sVzcRxpG9Ycvw4weQ5sprRvtb7Yhcw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -562,9 +562,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/cli-linux-arm64": {
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.3.11.tgz",
|
||||
"integrity": "sha512-l4xkGa9E7Uc0/05qU2lMYfN1H+fzzkHgaJoy98wO+b/7Gl78srbCRRgwYSW+BTLixTBrM6Ede5NSBwt7rd/i6g==",
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.3.13.tgz",
|
||||
"integrity": "sha512-xvOiFkrDNu607MPMBUQ6huHmBG1PZLOrqhtK6pXJW3GjfVqJg0Z/qpTdhXfcqWdSZHcT+Nct2fOgewZvytESkw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -579,9 +579,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/cli-linux-arm64-musl": {
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.3.11.tgz",
|
||||
"integrity": "sha512-XPSQ+XIPZMLaZ6zveQdwNjbX+QdROEd1zPgMwD47zvHV+tCGB88VH+aynyGxAHdzL+Tm/+DtKST5SECs4iwCLg==",
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.3.13.tgz",
|
||||
"integrity": "sha512-TUdDCSY+Eo/EHjhJz7P2GnWwfqet+lFxBZzGHldrvULr59AgahamLs/N85SC4+bdF86EhqDuuw9rYLvLFWWlXA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -596,9 +596,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/cli-linux-x64": {
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.3.11.tgz",
|
||||
"integrity": "sha512-/1s9V/H3cSe0r0Mv/Z8JryF5x9ywRxywomqZVLHAoa/uN0eY7F8gEngWKNS5vbbN/BsfpCG5yeBT5ENh50Frxg==",
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.3.13.tgz",
|
||||
"integrity": "sha512-s+YsZlgiXNq8XkgHs6xdvKDFOj/bwTEevqEY6rC2I3cBHbxXYU1LOZstH3Ffw9hE5tE1sqT7U23C00MzkXztMw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -613,9 +613,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/cli-linux-x64-musl": {
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.3.11.tgz",
|
||||
"integrity": "sha512-vU7a8wLs5C9yJ4CB8a44r12aXYb8yYgBn+WeyzbMjaCMklzCv1oXr8x+VEyWodgJt9bDmhiaW/I0RHbn7rsNmw==",
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.3.13.tgz",
|
||||
"integrity": "sha512-0bdwFVSbbM//Sds6OjtnmQGp4eUjOTt6kHvR/1P0ieR9GcTUAlPNvPC3DiavTqq302W34Ae2T6u5VVNGuQtGlQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -630,9 +630,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/cli-win32-arm64": {
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.3.11.tgz",
|
||||
"integrity": "sha512-PZQ6ElCOnkYapSsysiTy0+fYX+agXPlWugh6+eQ6uPKI3vKAqNp6TnMhoM3oY2NltSB89hz59o8xIfOdyhi9Iw==",
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.3.13.tgz",
|
||||
"integrity": "sha512-QweDxY89fq0VvrxME+wS/BXKmqMrOTZlN9SqQ79kQSIc3FrEwvW/PvUegQF6XIVaekncDykB5dzPqjbwSKs9DA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -647,9 +647,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/cli-win32-x64": {
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.3.11.tgz",
|
||||
"integrity": "sha512-43VrG813EW+b5+YbDbz31uUsheX+qFKCpXeY9kfdAx+ww3naKxeVkTD9zLIWxUPfJquANMHrmW3wbe/037G0Qg==",
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.3.13.tgz",
|
||||
"integrity": "sha512-trDw2ogdM2lyav9WFQsdsfdVy1dvZALymRpgmWsvSez0BJzBjulhOT/t+wyKeh3pZWvwP3VMs1SoOKwO3wecMQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -807,6 +807,21 @@
|
||||
"@lezer/xml": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/lang-yaml": {
|
||||
"version": "6.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/lang-yaml/-/lang-yaml-6.1.2.tgz",
|
||||
"integrity": "sha512-dxrfG8w5Ce/QbT7YID7mWZFKhdhsaTNOYjOkSIMt1qmC4VQnXSDSYVHHHn8k6kJUfIhtLo8t1JJgltlxWdsITw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@codemirror/autocomplete": "^6.0.0",
|
||||
"@codemirror/language": "^6.0.0",
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@lezer/common": "^1.2.0",
|
||||
"@lezer/highlight": "^1.2.0",
|
||||
"@lezer/lr": "^1.0.0",
|
||||
"@lezer/yaml": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/language": {
|
||||
"version": "6.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.12.1.tgz",
|
||||
@@ -832,6 +847,19 @@
|
||||
"crelt": "^1.0.5"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/merge": {
|
||||
"version": "6.11.2",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/merge/-/merge-6.11.2.tgz",
|
||||
"integrity": "sha512-NO5EJd2rLRbwVWLgMdhIntDIhfDtMOKYEZgqV5WnkNUS2oXOCVWLPjG/kgl/Jth2fGiOuG947bteqxP9nBXmMg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@codemirror/language": "^6.0.0",
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.17.0",
|
||||
"@lezer/highlight": "^1.0.0",
|
||||
"style-mod": "^4.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/search": {
|
||||
"version": "6.5.11",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/search/-/search-6.5.11.tgz",
|
||||
@@ -1614,6 +1642,17 @@
|
||||
"@lezer/lr": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@lezer/yaml": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@lezer/yaml/-/yaml-1.0.3.tgz",
|
||||
"integrity": "sha512-GuBLekbw9jDBDhGur82nuwkxKQ+a3W5H0GfaAthDXcAu+XdpS43VlnxA9E9hllkpSP5ellRDKjLLj7Lu9Wr6xA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@lezer/common": "^1.2.0",
|
||||
"@lezer/highlight": "^1.0.0",
|
||||
"@lezer/lr": "^1.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@marijn/find-cluster-break": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@marijn/find-cluster-break/-/find-cluster-break-1.0.2.tgz",
|
||||
@@ -7811,9 +7850,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/hono": {
|
||||
"version": "4.11.3",
|
||||
"resolved": "https://registry.npmjs.org/hono/-/hono-4.11.3.tgz",
|
||||
"integrity": "sha512-PmQi306+M/ct/m5s66Hrg+adPnkD5jiO6IjA7WhWw0gSBSo1EcRegwuI1deZ+wd5pzCGynCcn2DprnE4/yEV4w==",
|
||||
"version": "4.11.7",
|
||||
"resolved": "https://registry.npmjs.org/hono/-/hono-4.11.7.tgz",
|
||||
"integrity": "sha512-l7qMiNee7t82bH3SeyUCt9UF15EVmaBvsppY2zQtrbIhl/yzBTny+YUxsVjSjQ6gaqaeVtZmGocom8TzBlA4Yw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=16.9.0"
|
||||
@@ -15721,7 +15760,7 @@
|
||||
},
|
||||
"packages/plugin-runtime-types": {
|
||||
"name": "@yaakapp/api",
|
||||
"version": "0.7.1",
|
||||
"version": "0.8.0",
|
||||
"dependencies": {
|
||||
"@types/node": "^24.0.13"
|
||||
},
|
||||
@@ -15743,7 +15782,7 @@
|
||||
"@hono/mcp": "^0.2.3",
|
||||
"@hono/node-server": "^1.19.7",
|
||||
"@modelcontextprotocol/sdk": "^1.25.2",
|
||||
"hono": "^4.11.3",
|
||||
"hono": "^4.11.7",
|
||||
"zod": "^3.25.76"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -15984,7 +16023,9 @@
|
||||
"@codemirror/lang-json": "^6.0.1",
|
||||
"@codemirror/lang-markdown": "^6.3.2",
|
||||
"@codemirror/lang-xml": "^6.1.0",
|
||||
"@codemirror/lang-yaml": "^6.1.2",
|
||||
"@codemirror/language": "^6.11.0",
|
||||
"@codemirror/merge": "^6.11.2",
|
||||
"@codemirror/search": "^6.5.11",
|
||||
"@dnd-kit/core": "^6.3.1",
|
||||
"@gilbarbara/deep-equal": "^0.3.1",
|
||||
|
||||
@@ -95,7 +95,7 @@
|
||||
"js-yaml": "^4.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^2.3.10",
|
||||
"@biomejs/biome": "^2.3.13",
|
||||
"@tauri-apps/cli": "^2.9.6",
|
||||
"@yaakapp/cli": "^0.3.4",
|
||||
"dotenv-cli": "^11.0.0",
|
||||
|
||||
@@ -17,7 +17,7 @@ npx @yaakapp/cli generate
|
||||
```
|
||||
|
||||
For more details on creating plugins, check out
|
||||
the [Quick Start Guide](https://feedback.yaak.app/help/articles/6911763-plugins-quick-start)
|
||||
the [Quick Start Guide](https://yaak.app/docs/plugin-development/plugins-quick-start)
|
||||
|
||||
## Installation
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@yaakapp/api",
|
||||
"version": "0.7.1",
|
||||
"version": "0.8.0",
|
||||
"keywords": [
|
||||
"api-client",
|
||||
"insomnia-alternative",
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -12,6 +12,8 @@ export type CookieExpires = { "AtUtc": string } | "SessionEnd";
|
||||
|
||||
export type CookieJar = { model: "cookie_jar", id: string, createdAt: string, updatedAt: string, workspaceId: string, cookies: Array<Cookie>, name: string, };
|
||||
|
||||
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
|
||||
|
||||
export type EditorKeymap = "default" | "vim" | "vscode" | "emacs";
|
||||
|
||||
export type EncryptedKey = { encryptedKey: string, };
|
||||
@@ -38,7 +40,7 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
|
||||
|
||||
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
|
||||
|
||||
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
|
||||
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, elapsedDns: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
|
||||
|
||||
export type HttpResponseEvent = { model: "http_response_event", id: string, createdAt: string, updatedAt: string, workspaceId: string, responseId: string, event: HttpResponseEventData, };
|
||||
|
||||
@@ -47,7 +49,7 @@ export type HttpResponseEvent = { model: "http_response_event", id: string, crea
|
||||
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
|
||||
* The `From` impl is in yaak-http to avoid circular dependencies.
|
||||
*/
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, };
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, } | { "type": "dns_resolved", hostname: string, addresses: Array<string>, duration: bigint, overridden: boolean, };
|
||||
|
||||
export type HttpResponseHeader = { name: string, value: string, };
|
||||
|
||||
@@ -77,6 +79,6 @@ export type WebsocketEventType = "binary" | "close" | "frame" | "open" | "ping"
|
||||
|
||||
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
|
||||
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
|
||||
|
||||
export type WorkspaceMeta = { model: "workspace_meta", id: string, workspaceId: string, createdAt: string, updatedAt: string, encryptionKey: EncryptedKey | null, settingSyncDir: string | null, };
|
||||
|
||||
@@ -11,6 +11,8 @@ import type {
|
||||
ListHttpRequestsRequest,
|
||||
ListHttpRequestsResponse,
|
||||
OpenWindowRequest,
|
||||
PromptFormRequest,
|
||||
PromptFormResponse,
|
||||
PromptTextRequest,
|
||||
PromptTextResponse,
|
||||
RenderGrpcRequestRequest,
|
||||
@@ -23,7 +25,7 @@ import type {
|
||||
TemplateRenderRequest,
|
||||
WorkspaceInfo,
|
||||
} from '../bindings/gen_events.ts';
|
||||
import type { HttpRequest } from '../bindings/gen_models.ts';
|
||||
import type { Folder, HttpRequest } from '../bindings/gen_models.ts';
|
||||
import type { JsonValue } from '../bindings/serde_json/JsonValue';
|
||||
|
||||
export type WorkspaceHandle = Pick<WorkspaceInfo, 'id' | 'name'>;
|
||||
@@ -37,6 +39,7 @@ export interface Context {
|
||||
};
|
||||
prompt: {
|
||||
text(args: PromptTextRequest): Promise<PromptTextResponse['value']>;
|
||||
form(args: PromptFormRequest): Promise<PromptFormResponse['values']>;
|
||||
};
|
||||
store: {
|
||||
set<T>(key: string, value: T): Promise<void>;
|
||||
@@ -79,6 +82,15 @@ export interface Context {
|
||||
};
|
||||
folder: {
|
||||
list(args?: ListFoldersRequest): Promise<ListFoldersResponse['folders']>;
|
||||
getById(args: { id: string }): Promise<Folder | null>;
|
||||
create(
|
||||
args: Omit<Partial<Folder>, 'id' | 'model' | 'createdAt' | 'updatedAt'> &
|
||||
Pick<Folder, 'workspaceId' | 'name'>,
|
||||
): Promise<Folder>;
|
||||
update(
|
||||
args: Omit<Partial<Folder>, 'model' | 'createdAt' | 'updatedAt'> & Pick<Folder, 'id'>,
|
||||
): Promise<Folder>;
|
||||
delete(args: { id: string }): Promise<Folder>;
|
||||
};
|
||||
httpResponse: {
|
||||
find(args: FindHttpResponsesRequest): Promise<FindHttpResponsesResponse['httpResponses']>;
|
||||
|
||||
@@ -11,6 +11,7 @@ import type {
|
||||
DeleteKeyValueResponse,
|
||||
DeleteModelResponse,
|
||||
FindHttpResponsesResponse,
|
||||
Folder,
|
||||
GetCookieValueRequest,
|
||||
GetCookieValueResponse,
|
||||
GetHttpRequestByIdResponse,
|
||||
@@ -28,6 +29,7 @@ import type {
|
||||
ListHttpRequestsResponse,
|
||||
ListWorkspacesResponse,
|
||||
PluginContext,
|
||||
PromptFormResponse,
|
||||
PromptTextResponse,
|
||||
RenderGrpcRequestResponse,
|
||||
RenderHttpRequestResponse,
|
||||
@@ -661,6 +663,13 @@ export class PluginInstance {
|
||||
});
|
||||
return reply.value;
|
||||
},
|
||||
form: async (args) => {
|
||||
const reply: PromptFormResponse = await this.#sendForReply(context, {
|
||||
type: 'prompt_form_request',
|
||||
...args,
|
||||
});
|
||||
return reply.values;
|
||||
},
|
||||
},
|
||||
httpResponse: {
|
||||
find: async (args) => {
|
||||
@@ -774,6 +783,44 @@ export class PluginInstance {
|
||||
const { folders } = await this.#sendForReply<ListFoldersResponse>(context, payload);
|
||||
return folders;
|
||||
},
|
||||
getById: async (args: { id: string }) => {
|
||||
const payload = { type: 'list_folders_request' } as const;
|
||||
const { folders } = await this.#sendForReply<ListFoldersResponse>(context, payload);
|
||||
return folders.find((f) => f.id === args.id) ?? null;
|
||||
},
|
||||
create: async (args) => {
|
||||
const payload = {
|
||||
type: 'upsert_model_request',
|
||||
model: {
|
||||
name: '',
|
||||
...args,
|
||||
id: '',
|
||||
model: 'folder',
|
||||
},
|
||||
} as InternalEventPayload;
|
||||
const response = await this.#sendForReply<UpsertModelResponse>(context, payload);
|
||||
return response.model as Folder;
|
||||
},
|
||||
update: async (args) => {
|
||||
const payload = {
|
||||
type: 'upsert_model_request',
|
||||
model: {
|
||||
model: 'folder',
|
||||
...args,
|
||||
},
|
||||
} as InternalEventPayload;
|
||||
const response = await this.#sendForReply<UpsertModelResponse>(context, payload);
|
||||
return response.model as Folder;
|
||||
},
|
||||
delete: async (args: { id: string }) => {
|
||||
const payload = {
|
||||
type: 'delete_model_request',
|
||||
model: 'folder',
|
||||
id: args.id,
|
||||
} as InternalEventPayload;
|
||||
const response = await this.#sendForReply<DeleteModelResponse>(context, payload);
|
||||
return response.model as Folder;
|
||||
},
|
||||
},
|
||||
cookies: {
|
||||
getValue: async (args: GetCookieValueRequest) => {
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
"@hono/mcp": "^0.2.3",
|
||||
"@hono/node-server": "^1.19.7",
|
||||
"@modelcontextprotocol/sdk": "^1.25.2",
|
||||
"hono": "^4.11.3",
|
||||
"hono": "^4.11.7",
|
||||
"zod": "^3.25.76"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -2,6 +2,12 @@ import type { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
|
||||
import * as z from 'zod';
|
||||
import type { McpServerContext } from '../types.js';
|
||||
import { getWorkspaceContext } from './helpers.js';
|
||||
import {
|
||||
authenticationSchema,
|
||||
authenticationTypeSchema,
|
||||
headersSchema,
|
||||
workspaceIdSchema,
|
||||
} from './schemas.js';
|
||||
|
||||
export function registerFolderTools(server: McpServer, ctx: McpServerContext) {
|
||||
server.registerTool(
|
||||
@@ -10,10 +16,7 @@ export function registerFolderTools(server: McpServer, ctx: McpServerContext) {
|
||||
title: 'List Folders',
|
||||
description: 'List all folders in a workspace',
|
||||
inputSchema: {
|
||||
workspaceId: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Workspace ID (required if multiple workspaces are open)'),
|
||||
workspaceId: workspaceIdSchema,
|
||||
},
|
||||
},
|
||||
async ({ workspaceId }) => {
|
||||
@@ -30,4 +33,116 @@ export function registerFolderTools(server: McpServer, ctx: McpServerContext) {
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
server.registerTool(
|
||||
'get_folder',
|
||||
{
|
||||
title: 'Get Folder',
|
||||
description: 'Get details of a specific folder by ID',
|
||||
inputSchema: {
|
||||
id: z.string().describe('The folder ID'),
|
||||
workspaceId: workspaceIdSchema,
|
||||
},
|
||||
},
|
||||
async ({ id, workspaceId }) => {
|
||||
const workspaceCtx = await getWorkspaceContext(ctx, workspaceId);
|
||||
const folder = await workspaceCtx.yaak.folder.getById({ id });
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text' as const,
|
||||
text: JSON.stringify(folder, null, 2),
|
||||
},
|
||||
],
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
server.registerTool(
|
||||
'create_folder',
|
||||
{
|
||||
title: 'Create Folder',
|
||||
description: 'Create a new folder in a workspace',
|
||||
inputSchema: {
|
||||
workspaceId: workspaceIdSchema,
|
||||
name: z.string().describe('Folder name'),
|
||||
folderId: z.string().optional().describe('Parent folder ID (for nested folders)'),
|
||||
description: z.string().optional().describe('Folder description'),
|
||||
sortPriority: z.number().optional().describe('Sort priority for ordering'),
|
||||
headers: headersSchema.describe('Default headers to apply to requests in this folder'),
|
||||
authenticationType: authenticationTypeSchema,
|
||||
authentication: authenticationSchema,
|
||||
},
|
||||
},
|
||||
async ({ workspaceId: ogWorkspaceId, ...args }) => {
|
||||
const workspaceCtx = await getWorkspaceContext(ctx, ogWorkspaceId);
|
||||
const workspaceId = await workspaceCtx.yaak.window.workspaceId();
|
||||
if (!workspaceId) {
|
||||
throw new Error('No workspace is open');
|
||||
}
|
||||
|
||||
const folder = await workspaceCtx.yaak.folder.create({
|
||||
workspaceId: workspaceId,
|
||||
...args,
|
||||
});
|
||||
|
||||
return {
|
||||
content: [{ type: 'text' as const, text: JSON.stringify(folder, null, 2) }],
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
server.registerTool(
|
||||
'update_folder',
|
||||
{
|
||||
title: 'Update Folder',
|
||||
description: 'Update an existing folder',
|
||||
inputSchema: {
|
||||
id: z.string().describe('Folder ID to update'),
|
||||
workspaceId: workspaceIdSchema,
|
||||
name: z.string().optional().describe('Folder name'),
|
||||
folderId: z.string().optional().describe('Parent folder ID (for nested folders)'),
|
||||
description: z.string().optional().describe('Folder description'),
|
||||
sortPriority: z.number().optional().describe('Sort priority for ordering'),
|
||||
headers: headersSchema.describe('Default headers to apply to requests in this folder'),
|
||||
authenticationType: authenticationTypeSchema,
|
||||
authentication: authenticationSchema,
|
||||
},
|
||||
},
|
||||
async ({ id, workspaceId, ...updates }) => {
|
||||
const workspaceCtx = await getWorkspaceContext(ctx, workspaceId);
|
||||
// Fetch existing folder to merge with updates
|
||||
const existing = await workspaceCtx.yaak.folder.getById({ id });
|
||||
if (!existing) {
|
||||
throw new Error(`Folder with ID ${id} not found`);
|
||||
}
|
||||
// Merge existing fields with updates
|
||||
const folder = await workspaceCtx.yaak.folder.update({
|
||||
...existing,
|
||||
...updates,
|
||||
id,
|
||||
});
|
||||
return {
|
||||
content: [{ type: 'text' as const, text: JSON.stringify(folder, null, 2) }],
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
server.registerTool(
|
||||
'delete_folder',
|
||||
{
|
||||
title: 'Delete Folder',
|
||||
description: 'Delete a folder by ID',
|
||||
inputSchema: {
|
||||
id: z.string().describe('Folder ID to delete'),
|
||||
},
|
||||
},
|
||||
async ({ id }) => {
|
||||
const folder = await ctx.yaak.folder.delete({ id });
|
||||
return {
|
||||
content: [{ type: 'text' as const, text: `Deleted: ${folder.name} (${folder.id})` }],
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2,6 +2,15 @@ import type { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
|
||||
import * as z from 'zod';
|
||||
import type { McpServerContext } from '../types.js';
|
||||
import { getWorkspaceContext } from './helpers.js';
|
||||
import {
|
||||
authenticationSchema,
|
||||
authenticationTypeSchema,
|
||||
bodySchema,
|
||||
bodyTypeSchema,
|
||||
headersSchema,
|
||||
urlParametersSchema,
|
||||
workspaceIdSchema,
|
||||
} from './schemas.js';
|
||||
|
||||
export function registerHttpRequestTools(server: McpServer, ctx: McpServerContext) {
|
||||
server.registerTool(
|
||||
@@ -10,10 +19,7 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
|
||||
title: 'List HTTP Requests',
|
||||
description: 'List all HTTP requests in a workspace',
|
||||
inputSchema: {
|
||||
workspaceId: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Workspace ID (required if multiple workspaces are open)'),
|
||||
workspaceId: workspaceIdSchema,
|
||||
},
|
||||
},
|
||||
async ({ workspaceId }) => {
|
||||
@@ -38,10 +44,7 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
|
||||
description: 'Get details of a specific HTTP request by ID',
|
||||
inputSchema: {
|
||||
id: z.string().describe('The HTTP request ID'),
|
||||
workspaceId: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Workspace ID (required if multiple workspaces are open)'),
|
||||
workspaceId: workspaceIdSchema,
|
||||
},
|
||||
},
|
||||
async ({ id, workspaceId }) => {
|
||||
@@ -67,10 +70,7 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
|
||||
inputSchema: {
|
||||
id: z.string().describe('The HTTP request ID to send'),
|
||||
environmentId: z.string().optional().describe('Optional environment ID to use'),
|
||||
workspaceId: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Workspace ID (required if multiple workspaces are open)'),
|
||||
workspaceId: workspaceIdSchema,
|
||||
},
|
||||
},
|
||||
async ({ id, workspaceId }) => {
|
||||
@@ -99,10 +99,7 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
|
||||
title: 'Create HTTP Request',
|
||||
description: 'Create a new HTTP request',
|
||||
inputSchema: {
|
||||
workspaceId: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Workspace ID (required if multiple workspaces are open)'),
|
||||
workspaceId: workspaceIdSchema,
|
||||
name: z
|
||||
.string()
|
||||
.optional()
|
||||
@@ -111,62 +108,12 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
|
||||
method: z.string().optional().describe('HTTP method (defaults to GET)'),
|
||||
folderId: z.string().optional().describe('Parent folder ID'),
|
||||
description: z.string().optional().describe('Request description'),
|
||||
headers: z
|
||||
.array(
|
||||
z.object({
|
||||
name: z.string(),
|
||||
value: z.string(),
|
||||
enabled: z.boolean().default(true),
|
||||
}),
|
||||
)
|
||||
.optional()
|
||||
.describe('Request headers'),
|
||||
urlParameters: z
|
||||
.array(
|
||||
z.object({
|
||||
name: z.string(),
|
||||
value: z.string(),
|
||||
enabled: z.boolean().default(true),
|
||||
}),
|
||||
)
|
||||
.optional()
|
||||
.describe('URL query parameters'),
|
||||
bodyType: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Body type. Supported values: "binary", "graphql", "application/x-www-form-urlencoded", "multipart/form-data", or any text-based type (e.g., "application/json", "text/plain")',
|
||||
),
|
||||
body: z
|
||||
.record(z.string(), z.any())
|
||||
.optional()
|
||||
.describe(
|
||||
'Body content object. Structure varies by bodyType:\n' +
|
||||
'- "binary": { filePath: "/path/to/file" }\n' +
|
||||
'- "graphql": { query: "{ users { name } }", variables: "{\\"id\\": \\"123\\"}" }\n' +
|
||||
'- "application/x-www-form-urlencoded": { form: [{ name: "key", value: "val", enabled: true }] }\n' +
|
||||
'- "multipart/form-data": { form: [{ name: "field", value: "text", file: "/path/to/file", enabled: true }] }\n' +
|
||||
'- text-based (application/json, etc.): { text: "raw body content" }',
|
||||
),
|
||||
authenticationType: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Authentication type. Common values: "basic", "bearer", "oauth2", "apikey", "jwt", "awsv4", "oauth1", "ntlm", "none". Use null to inherit from parent folder/workspace.',
|
||||
),
|
||||
authentication: z
|
||||
.record(z.string(), z.any())
|
||||
.optional()
|
||||
.describe(
|
||||
'Authentication configuration object. Structure varies by authenticationType:\n' +
|
||||
'- "basic": { username: "user", password: "pass" }\n' +
|
||||
'- "bearer": { token: "abc123", prefix: "Bearer" }\n' +
|
||||
'- "oauth2": { clientId: "...", clientSecret: "...", grantType: "authorization_code", authorizationUrl: "...", accessTokenUrl: "...", scope: "...", ... }\n' +
|
||||
'- "apikey": { location: "header" | "query", key: "X-API-Key", value: "..." }\n' +
|
||||
'- "jwt": { algorithm: "HS256", secret: "...", payload: "{ ... }" }\n' +
|
||||
'- "awsv4": { accessKeyId: "...", secretAccessKey: "...", service: "sts", region: "us-east-1", sessionToken: "..." }\n' +
|
||||
'- "none": {}',
|
||||
),
|
||||
headers: headersSchema.describe('Request headers'),
|
||||
urlParameters: urlParametersSchema,
|
||||
bodyType: bodyTypeSchema,
|
||||
body: bodySchema,
|
||||
authenticationType: authenticationTypeSchema,
|
||||
authentication: authenticationSchema,
|
||||
},
|
||||
},
|
||||
async ({ workspaceId: ogWorkspaceId, ...args }) => {
|
||||
@@ -194,68 +141,18 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
|
||||
description: 'Update an existing HTTP request',
|
||||
inputSchema: {
|
||||
id: z.string().describe('HTTP request ID to update'),
|
||||
workspaceId: z.string().describe('Workspace ID'),
|
||||
workspaceId: workspaceIdSchema,
|
||||
name: z.string().optional().describe('Request name'),
|
||||
url: z.string().optional().describe('Request URL'),
|
||||
method: z.string().optional().describe('HTTP method'),
|
||||
folderId: z.string().optional().describe('Parent folder ID'),
|
||||
description: z.string().optional().describe('Request description'),
|
||||
headers: z
|
||||
.array(
|
||||
z.object({
|
||||
name: z.string(),
|
||||
value: z.string(),
|
||||
enabled: z.boolean().default(true),
|
||||
}),
|
||||
)
|
||||
.optional()
|
||||
.describe('Request headers'),
|
||||
urlParameters: z
|
||||
.array(
|
||||
z.object({
|
||||
name: z.string(),
|
||||
value: z.string(),
|
||||
enabled: z.boolean().default(true),
|
||||
}),
|
||||
)
|
||||
.optional()
|
||||
.describe('URL query parameters'),
|
||||
bodyType: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Body type. Supported values: "binary", "graphql", "application/x-www-form-urlencoded", "multipart/form-data", or any text-based type (e.g., "application/json", "text/plain")',
|
||||
),
|
||||
body: z
|
||||
.record(z.string(), z.any())
|
||||
.optional()
|
||||
.describe(
|
||||
'Body content object. Structure varies by bodyType:\n' +
|
||||
'- "binary": { filePath: "/path/to/file" }\n' +
|
||||
'- "graphql": { query: "{ users { name } }", variables: "{\\"id\\": \\"123\\"}" }\n' +
|
||||
'- "application/x-www-form-urlencoded": { form: [{ name: "key", value: "val", enabled: true }] }\n' +
|
||||
'- "multipart/form-data": { form: [{ name: "field", value: "text", file: "/path/to/file", enabled: true }] }\n' +
|
||||
'- text-based (application/json, etc.): { text: "raw body content" }',
|
||||
),
|
||||
authenticationType: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Authentication type. Common values: "basic", "bearer", "oauth2", "apikey", "jwt", "awsv4", "oauth1", "ntlm", "none". Use null to inherit from parent folder/workspace.',
|
||||
),
|
||||
authentication: z
|
||||
.record(z.string(), z.any())
|
||||
.optional()
|
||||
.describe(
|
||||
'Authentication configuration object. Structure varies by authenticationType:\n' +
|
||||
'- "basic": { username: "user", password: "pass" }\n' +
|
||||
'- "bearer": { token: "abc123", prefix: "Bearer" }\n' +
|
||||
'- "oauth2": { clientId: "...", clientSecret: "...", grantType: "authorization_code", authorizationUrl: "...", accessTokenUrl: "...", scope: "...", ... }\n' +
|
||||
'- "apikey": { location: "header" | "query", key: "X-API-Key", value: "..." }\n' +
|
||||
'- "jwt": { algorithm: "HS256", secret: "...", payload: "{ ... }" }\n' +
|
||||
'- "awsv4": { accessKeyId: "...", secretAccessKey: "...", service: "sts", region: "us-east-1", sessionToken: "..." }\n' +
|
||||
'- "none": {}',
|
||||
),
|
||||
headers: headersSchema.describe('Request headers'),
|
||||
urlParameters: urlParametersSchema,
|
||||
bodyType: bodyTypeSchema,
|
||||
body: bodySchema,
|
||||
authenticationType: authenticationTypeSchema,
|
||||
authentication: authenticationSchema,
|
||||
},
|
||||
},
|
||||
async ({ id, workspaceId, ...updates }) => {
|
||||
|
||||
67
plugins-external/mcp-server/src/tools/schemas.ts
Normal file
67
plugins-external/mcp-server/src/tools/schemas.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import * as z from 'zod';
|
||||
|
||||
export const workspaceIdSchema = z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Workspace ID (required if multiple workspaces are open)');
|
||||
|
||||
export const headersSchema = z
|
||||
.array(
|
||||
z.object({
|
||||
name: z.string(),
|
||||
value: z.string(),
|
||||
enabled: z.boolean().default(true),
|
||||
}),
|
||||
)
|
||||
.optional();
|
||||
|
||||
export const urlParametersSchema = z
|
||||
.array(
|
||||
z.object({
|
||||
name: z.string(),
|
||||
value: z.string(),
|
||||
enabled: z.boolean().default(true),
|
||||
}),
|
||||
)
|
||||
.optional()
|
||||
.describe('URL query parameters');
|
||||
|
||||
export const bodyTypeSchema = z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Body type. Supported values: "binary", "graphql", "application/x-www-form-urlencoded", "multipart/form-data", or any text-based type (e.g., "application/json", "text/plain")',
|
||||
);
|
||||
|
||||
export const bodySchema = z
|
||||
.record(z.string(), z.any())
|
||||
.optional()
|
||||
.describe(
|
||||
'Body content object. Structure varies by bodyType:\n' +
|
||||
'- "binary": { filePath: "/path/to/file" }\n' +
|
||||
'- "graphql": { query: "{ users { name } }", variables: "{\\"id\\": \\"123\\"}" }\n' +
|
||||
'- "application/x-www-form-urlencoded": { form: [{ name: "key", value: "val", enabled: true }] }\n' +
|
||||
'- "multipart/form-data": { form: [{ name: "field", value: "text", file: "/path/to/file", enabled: true }] }\n' +
|
||||
'- text-based (application/json, etc.): { text: "raw body content" }',
|
||||
);
|
||||
|
||||
export const authenticationTypeSchema = z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Authentication type. Common values: "basic", "bearer", "oauth2", "apikey", "jwt", "awsv4", "oauth1", "ntlm", "none". Use null to inherit from parent.',
|
||||
);
|
||||
|
||||
export const authenticationSchema = z
|
||||
.record(z.string(), z.any())
|
||||
.optional()
|
||||
.describe(
|
||||
'Authentication configuration object. Structure varies by authenticationType:\n' +
|
||||
'- "basic": { username: "user", password: "pass" }\n' +
|
||||
'- "bearer": { token: "abc123", prefix: "Bearer" }\n' +
|
||||
'- "oauth2": { clientId: "...", clientSecret: "...", grantType: "authorization_code", authorizationUrl: "...", accessTokenUrl: "...", scope: "...", ... }\n' +
|
||||
'- "apikey": { location: "header" | "query", key: "X-API-Key", value: "..." }\n' +
|
||||
'- "jwt": { algorithm: "HS256", secret: "...", payload: "{ ... }" }\n' +
|
||||
'- "awsv4": { accessKeyId: "...", secretAccessKey: "...", service: "sts", region: "us-east-1", sessionToken: "..." }\n' +
|
||||
'- "none": {}',
|
||||
);
|
||||
@@ -11,6 +11,7 @@
|
||||
"version": "0.1.0",
|
||||
"scripts": {
|
||||
"build": "yaakcli build",
|
||||
"dev": "yaakcli dev"
|
||||
"dev": "yaakcli dev",
|
||||
"test": "vitest --run tests"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,8 @@ export const plugin: PluginDefinition = {
|
||||
},
|
||||
],
|
||||
async onApply(_ctx, { values }) {
|
||||
const { username, password } = values;
|
||||
const username = values.username ?? '';
|
||||
const password = values.password ?? '';
|
||||
const value = `Basic ${Buffer.from(`${username}:${password}`).toString('base64')}`;
|
||||
return { setHeaders: [{ name: 'Authorization', value }] };
|
||||
},
|
||||
|
||||
77
plugins/auth-basic/tests/index.test.ts
Normal file
77
plugins/auth-basic/tests/index.test.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import type { Context } from '@yaakapp/api';
|
||||
import { describe, expect, test } from 'vitest';
|
||||
import { plugin } from '../src';
|
||||
|
||||
const ctx = {} as Context;
|
||||
|
||||
describe('auth-basic', () => {
|
||||
test('Both username and password', async () => {
|
||||
expect(
|
||||
await plugin.authentication?.onApply(ctx, {
|
||||
values: { username: 'user', password: 'pass' },
|
||||
headers: [],
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
contextId: '111',
|
||||
}),
|
||||
).toEqual({
|
||||
setHeaders: [{ name: 'Authorization', value: `Basic ${Buffer.from('user:pass').toString('base64')}` }],
|
||||
});
|
||||
});
|
||||
|
||||
test('Empty password', async () => {
|
||||
expect(
|
||||
await plugin.authentication?.onApply(ctx, {
|
||||
values: { username: 'apikey', password: '' },
|
||||
headers: [],
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
contextId: '111',
|
||||
}),
|
||||
).toEqual({
|
||||
setHeaders: [{ name: 'Authorization', value: `Basic ${Buffer.from('apikey:').toString('base64')}` }],
|
||||
});
|
||||
});
|
||||
|
||||
test('Missing password (undefined)', async () => {
|
||||
expect(
|
||||
await plugin.authentication?.onApply(ctx, {
|
||||
values: { username: 'apikey' },
|
||||
headers: [],
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
contextId: '111',
|
||||
}),
|
||||
).toEqual({
|
||||
setHeaders: [{ name: 'Authorization', value: `Basic ${Buffer.from('apikey:').toString('base64')}` }],
|
||||
});
|
||||
});
|
||||
|
||||
test('Missing username (undefined)', async () => {
|
||||
expect(
|
||||
await plugin.authentication?.onApply(ctx, {
|
||||
values: { password: 'secret' },
|
||||
headers: [],
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
contextId: '111',
|
||||
}),
|
||||
).toEqual({
|
||||
setHeaders: [{ name: 'Authorization', value: `Basic ${Buffer.from(':secret').toString('base64')}` }],
|
||||
});
|
||||
});
|
||||
|
||||
test('No values (both undefined)', async () => {
|
||||
expect(
|
||||
await plugin.authentication?.onApply(ctx, {
|
||||
values: {},
|
||||
headers: [],
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
contextId: '111',
|
||||
}),
|
||||
).toEqual({
|
||||
setHeaders: [{ name: 'Authorization', value: `Basic ${Buffer.from(':').toString('base64')}` }],
|
||||
});
|
||||
});
|
||||
});
|
||||
335
plugins/auth-oauth2/src/callbackServer.ts
Normal file
335
plugins/auth-oauth2/src/callbackServer.ts
Normal file
@@ -0,0 +1,335 @@
|
||||
import type { IncomingMessage, ServerResponse } from 'node:http';
|
||||
import http from 'node:http';
|
||||
import type { Context } from '@yaakapp/api';
|
||||
|
||||
export const HOSTED_CALLBACK_URL = 'https://oauth.yaak.app/redirect';
|
||||
export const DEFAULT_LOCALHOST_PORT = 8765;
|
||||
const CALLBACK_TIMEOUT_MS = 5 * 60 * 1000; // 5 minutes
|
||||
|
||||
/** Singleton: only one callback server runs at a time across all OAuth flows. */
|
||||
let activeServer: CallbackServerResult | null = null;
|
||||
|
||||
export interface CallbackServerResult {
|
||||
/** The port the server is listening on */
|
||||
port: number;
|
||||
/** The full redirect URI to register with the OAuth provider */
|
||||
redirectUri: string;
|
||||
/** Promise that resolves with the callback URL when received */
|
||||
waitForCallback: () => Promise<string>;
|
||||
/** Stop the server */
|
||||
stop: () => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a local HTTP server to receive OAuth callbacks.
|
||||
* Only one server runs at a time — if a previous server is still active,
|
||||
* it is stopped before starting the new one.
|
||||
* Returns the port, redirect URI, and a promise that resolves when the callback is received.
|
||||
*/
|
||||
export function startCallbackServer(options: {
|
||||
/** Specific port to use, or 0 for random available port */
|
||||
port?: number;
|
||||
/** Path for the callback endpoint */
|
||||
path?: string;
|
||||
/** Timeout in milliseconds (default 5 minutes) */
|
||||
timeoutMs?: number;
|
||||
}): Promise<CallbackServerResult> {
|
||||
// Stop any previously active server before starting a new one
|
||||
if (activeServer) {
|
||||
console.log('[oauth2] Stopping previous callback server before starting new one');
|
||||
activeServer.stop();
|
||||
activeServer = null;
|
||||
}
|
||||
|
||||
const { port = 0, path = '/callback', timeoutMs = CALLBACK_TIMEOUT_MS } = options;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let callbackResolve: ((url: string) => void) | null = null;
|
||||
let callbackReject: ((err: Error) => void) | null = null;
|
||||
let timeoutHandle: ReturnType<typeof setTimeout> | null = null;
|
||||
let stopped = false;
|
||||
|
||||
const server = http.createServer((req: IncomingMessage, res: ServerResponse) => {
|
||||
const reqUrl = new URL(req.url ?? '/', `http://${req.headers.host}`);
|
||||
|
||||
// Only handle the callback path
|
||||
if (reqUrl.pathname !== path && reqUrl.pathname !== `${path}/`) {
|
||||
res.writeHead(404, { 'Content-Type': 'text/plain' });
|
||||
res.end('Not Found');
|
||||
return;
|
||||
}
|
||||
|
||||
if (req.method === 'POST') {
|
||||
// POST: read JSON body with the final callback URL and resolve
|
||||
let body = '';
|
||||
req.on('data', (chunk: Buffer) => {
|
||||
body += chunk.toString();
|
||||
});
|
||||
req.on('end', () => {
|
||||
try {
|
||||
const { url: callbackUrl } = JSON.parse(body);
|
||||
if (!callbackUrl || typeof callbackUrl !== 'string') {
|
||||
res.writeHead(400, { 'Content-Type': 'text/plain' });
|
||||
res.end('Missing url in request body');
|
||||
return;
|
||||
}
|
||||
|
||||
// Send success response
|
||||
res.writeHead(200, { 'Content-Type': 'text/plain' });
|
||||
res.end('OK');
|
||||
|
||||
// Resolve the callback promise
|
||||
if (callbackResolve) {
|
||||
callbackResolve(callbackUrl);
|
||||
callbackResolve = null;
|
||||
callbackReject = null;
|
||||
}
|
||||
|
||||
// Stop the server after a short delay to ensure response is sent
|
||||
setTimeout(() => stopServer(), 100);
|
||||
} catch {
|
||||
res.writeHead(400, { 'Content-Type': 'text/plain' });
|
||||
res.end('Invalid JSON');
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// GET: serve intermediate page that reads the fragment and POSTs back
|
||||
res.writeHead(200, { 'Content-Type': 'text/html' });
|
||||
res.end(getFragmentForwardingHtml());
|
||||
});
|
||||
|
||||
server.on('error', (err: Error) => {
|
||||
if (!stopped) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
|
||||
const stopServer = () => {
|
||||
if (stopped) return;
|
||||
stopped = true;
|
||||
|
||||
// Clear the singleton reference
|
||||
if (activeServer?.stop === stopServer) {
|
||||
activeServer = null;
|
||||
}
|
||||
|
||||
if (timeoutHandle) {
|
||||
clearTimeout(timeoutHandle);
|
||||
timeoutHandle = null;
|
||||
}
|
||||
|
||||
server.close();
|
||||
|
||||
if (callbackReject) {
|
||||
callbackReject(new Error('Callback server stopped'));
|
||||
callbackResolve = null;
|
||||
callbackReject = null;
|
||||
}
|
||||
};
|
||||
|
||||
server.listen(port, '127.0.0.1', () => {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === 'string') {
|
||||
reject(new Error('Failed to get server address'));
|
||||
return;
|
||||
}
|
||||
|
||||
const actualPort = address.port;
|
||||
const redirectUri = `http://127.0.0.1:${actualPort}${path}`;
|
||||
|
||||
console.log(`[oauth2] Callback server listening on ${redirectUri}`);
|
||||
|
||||
const result: CallbackServerResult = {
|
||||
port: actualPort,
|
||||
redirectUri,
|
||||
waitForCallback: () => {
|
||||
return new Promise<string>((res, rej) => {
|
||||
if (stopped) {
|
||||
rej(new Error('Callback server already stopped'));
|
||||
return;
|
||||
}
|
||||
|
||||
callbackResolve = res;
|
||||
callbackReject = rej;
|
||||
|
||||
// Set timeout
|
||||
timeoutHandle = setTimeout(() => {
|
||||
if (callbackReject) {
|
||||
callbackReject(new Error('Authorization timed out'));
|
||||
callbackResolve = null;
|
||||
callbackReject = null;
|
||||
}
|
||||
stopServer();
|
||||
}, timeoutMs);
|
||||
});
|
||||
},
|
||||
stop: stopServer,
|
||||
};
|
||||
|
||||
activeServer = result;
|
||||
resolve(result);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the redirect URI for the hosted callback page.
|
||||
* The hosted page will redirect to the local server with the OAuth response.
|
||||
*/
|
||||
export function buildHostedCallbackRedirectUri(localPort: number, localPath: string): string {
|
||||
const localRedirectUri = `http://127.0.0.1:${localPort}${localPath}`;
|
||||
// The hosted callback page will read params and redirect to the local server
|
||||
return `${HOSTED_CALLBACK_URL}?redirect_to=${encodeURIComponent(localRedirectUri)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Open an authorization URL in the system browser, start a local callback server,
|
||||
* and wait for the OAuth provider to redirect back.
|
||||
*
|
||||
* Returns the raw callback URL and the redirect URI that was registered with the
|
||||
* OAuth provider (needed for token exchange).
|
||||
*/
|
||||
export async function getRedirectUrlViaExternalBrowser(
|
||||
ctx: Context,
|
||||
authorizationUrl: URL,
|
||||
options: {
|
||||
callbackType: 'localhost' | 'hosted';
|
||||
callbackPort?: number;
|
||||
},
|
||||
): Promise<{ callbackUrl: string; redirectUri: string }> {
|
||||
const { callbackType, callbackPort } = options;
|
||||
|
||||
// Determine port based on callback type:
|
||||
// - localhost: use specified port or default stable port
|
||||
// - hosted: use random port (0) since hosted page redirects to local
|
||||
const port = callbackType === 'localhost' ? (callbackPort ?? DEFAULT_LOCALHOST_PORT) : 0;
|
||||
|
||||
console.log(
|
||||
`[oauth2] Starting callback server (type: ${callbackType}, port: ${port || 'random'})`,
|
||||
);
|
||||
|
||||
const server = await startCallbackServer({
|
||||
port,
|
||||
path: '/callback',
|
||||
});
|
||||
|
||||
try {
|
||||
// Determine the redirect URI to send to the OAuth provider
|
||||
let oauthRedirectUri: string;
|
||||
|
||||
if (callbackType === 'hosted') {
|
||||
oauthRedirectUri = buildHostedCallbackRedirectUri(server.port, '/callback');
|
||||
console.log('[oauth2] Using hosted callback redirect:', oauthRedirectUri);
|
||||
} else {
|
||||
oauthRedirectUri = server.redirectUri;
|
||||
console.log('[oauth2] Using localhost callback redirect:', oauthRedirectUri);
|
||||
}
|
||||
|
||||
// Set the redirect URI on the authorization URL
|
||||
authorizationUrl.searchParams.set('redirect_uri', oauthRedirectUri);
|
||||
|
||||
const authorizationUrlStr = authorizationUrl.toString();
|
||||
console.log('[oauth2] Opening external browser:', authorizationUrlStr);
|
||||
|
||||
// Show toast to inform user
|
||||
await ctx.toast.show({
|
||||
message: 'Opening browser for authorization...',
|
||||
icon: 'info',
|
||||
timeout: 3000,
|
||||
});
|
||||
|
||||
// Open the system browser
|
||||
await ctx.window.openExternalUrl(authorizationUrlStr);
|
||||
|
||||
// Wait for the callback
|
||||
console.log('[oauth2] Waiting for callback on', server.redirectUri);
|
||||
const callbackUrl = await server.waitForCallback();
|
||||
|
||||
console.log('[oauth2] Received callback:', callbackUrl);
|
||||
|
||||
return { callbackUrl, redirectUri: oauthRedirectUri };
|
||||
} finally {
|
||||
server.stop();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Intermediate HTML page that reads the URL fragment and _fragment query param,
|
||||
* reconstructs a proper OAuth callback URL, and POSTs it back to the server.
|
||||
*
|
||||
* Handles three cases:
|
||||
* - Localhost implicit: fragment is in location.hash (e.g. #access_token=...)
|
||||
* - Hosted implicit: fragment was converted to ?_fragment=... by the hosted redirect page
|
||||
* - Auth code: no fragment, code is already in query params
|
||||
*/
|
||||
function getFragmentForwardingHtml(): string {
|
||||
return `<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Yaak</title>
|
||||
<style>
|
||||
* { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, sans-serif;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
min-height: 100vh;
|
||||
background: hsl(244,23%,14%);
|
||||
color: hsl(245,23%,85%);
|
||||
}
|
||||
.container { text-align: center; }
|
||||
.logo { display: block; width: 100px; height: 100px; margin: 0 auto 32px; border-radius: 50%; }
|
||||
h1 { font-size: 28px; font-weight: 600; margin-bottom: 12px; }
|
||||
p { font-size: 16px; color: hsl(245,18%,58%); }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<svg class="logo" viewBox="0 0 1024 1024" xmlns="http://www.w3.org/2000/svg"><defs><linearGradient id="g" x1="0" y1="0" x2="1" y2="0" gradientUnits="userSpaceOnUse" gradientTransform="matrix(649.94,712.03,-712.03,649.94,179.25,220.59)"><stop offset="0" stop-color="#4cc48c"/><stop offset=".5" stop-color="#476cc9"/><stop offset="1" stop-color="#ba1ab7"/></linearGradient></defs><rect x="0" y="0" width="1024" height="1024" fill="url(#g)"/><g transform="matrix(0.822,0,0,0.822,91.26,91.26)"><path d="M766.775,105.176C902.046,190.129 992.031,340.639 992.031,512C992.031,706.357 876.274,873.892 710,949.361C684.748,838.221 632.417,791.074 538.602,758.96C536.859,790.593 545.561,854.983 522.327,856.611C477.951,859.719 321.557,782.368 310.75,710.135C300.443,641.237 302.536,535.834 294.475,482.283C86.974,483.114 245.65,303.256 245.65,303.256L261.925,368.357L294.475,368.357C294.475,368.357 298.094,296.03 310.75,286.981C326.511,275.713 366.457,254.592 473.502,254.431C519.506,190.629 692.164,133.645 766.775,105.176ZM603.703,352.082C598.577,358.301 614.243,384.787 623.39,401.682C639.967,432.299 672.34,459.32 760.231,456.739C780.796,456.135 808.649,456.743 831.555,448.316C919.689,369.191 665.548,260.941 652.528,270.706C629.157,288.235 677.433,340.481 685.079,352.082C663.595,350.818 630.521,352.121 603.703,352.082ZM515.817,516.822C491.026,516.822 470.898,536.949 470.898,561.741C470.898,586.532 491.026,606.66 515.817,606.66C540.609,606.66 560.736,586.532 560.736,561.741C560.736,536.949 540.609,516.822 515.817,516.822ZM656.608,969.83C610.979,984.25 562.391,992.031 512,992.031C247.063,992.031 31.969,776.937 31.969,512C31.969,247.063 247.063,31.969 512,31.969C581.652,31.969 647.859,46.835 707.634,73.574C674.574,86.913 627.224,104.986 620,103.081C343.573,30.201 98.64,283.528 98.64,511.993C98.64,761.842 376.244,989.043 627.831,910C637.21,907.053 645.743,936.753 656.608,969.83Z" fill="#fff"/></g></svg>
|
||||
<h1 id="title">Authorizing...</h1>
|
||||
<p id="message">Please wait</p>
|
||||
</div>
|
||||
<script>
|
||||
(function() {
|
||||
var title = document.getElementById('title');
|
||||
var message = document.getElementById('message');
|
||||
var url = new URL(window.location.href);
|
||||
var fragment = window.location.hash;
|
||||
var fragmentParam = url.searchParams.get('_fragment');
|
||||
|
||||
// Build the final callback URL:
|
||||
// 1. If _fragment query param exists (from hosted redirect), convert it back to a real fragment
|
||||
// 2. If location.hash exists (direct localhost implicit), use it as-is
|
||||
// 3. Otherwise (auth code flow), use the URL as-is with query params
|
||||
if (fragmentParam) {
|
||||
url.searchParams.delete('_fragment');
|
||||
url.hash = fragmentParam;
|
||||
} else if (fragment && fragment.length > 1) {
|
||||
url.hash = fragment;
|
||||
}
|
||||
|
||||
// POST the final URL back to the callback server
|
||||
fetch(url.pathname, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ url: url.toString() })
|
||||
}).then(function(res) {
|
||||
if (res.ok) {
|
||||
title.textContent = 'Authorization Complete';
|
||||
message.textContent = 'You may close this tab and return to Yaak';
|
||||
} else {
|
||||
title.textContent = 'Authorization Failed';
|
||||
message.textContent = 'Something went wrong. Please try again.';
|
||||
}
|
||||
}).catch(function() {
|
||||
title.textContent = 'Authorization Failed';
|
||||
message.textContent = 'Something went wrong. Please try again.';
|
||||
});
|
||||
})();
|
||||
</script>
|
||||
</body>
|
||||
</html>`;
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import { createHash, randomBytes } from 'node:crypto';
|
||||
import type { Context } from '@yaakapp/api';
|
||||
import { getRedirectUrlViaExternalBrowser } from '../callbackServer';
|
||||
import { fetchAccessToken } from '../fetchAccessToken';
|
||||
import { getOrRefreshAccessToken } from '../getOrRefreshAccessToken';
|
||||
import type { AccessToken, TokenStoreArgs } from '../store';
|
||||
@@ -10,6 +11,15 @@ export const PKCE_SHA256 = 'S256';
|
||||
export const PKCE_PLAIN = 'plain';
|
||||
export const DEFAULT_PKCE_METHOD = PKCE_SHA256;
|
||||
|
||||
export type CallbackType = 'localhost' | 'hosted';
|
||||
|
||||
export interface ExternalBrowserOptions {
|
||||
useExternalBrowser: boolean;
|
||||
callbackType: CallbackType;
|
||||
/** Port for localhost callback (only used when callbackType is 'localhost') */
|
||||
callbackPort?: number;
|
||||
}
|
||||
|
||||
export async function getAuthorizationCode(
|
||||
ctx: Context,
|
||||
contextId: string,
|
||||
@@ -25,6 +35,7 @@ export async function getAuthorizationCode(
|
||||
credentialsInBody,
|
||||
pkce,
|
||||
tokenName,
|
||||
externalBrowser,
|
||||
}: {
|
||||
authorizationUrl: string;
|
||||
accessTokenUrl: string;
|
||||
@@ -40,6 +51,7 @@ export async function getAuthorizationCode(
|
||||
codeVerifier: string;
|
||||
} | null;
|
||||
tokenName: 'access_token' | 'id_token';
|
||||
externalBrowser?: ExternalBrowserOptions;
|
||||
},
|
||||
): Promise<AccessToken> {
|
||||
const tokenArgs: TokenStoreArgs = {
|
||||
@@ -68,7 +80,6 @@ export async function getAuthorizationCode(
|
||||
}
|
||||
authorizationUrl.searchParams.set('response_type', 'code');
|
||||
authorizationUrl.searchParams.set('client_id', clientId);
|
||||
if (redirectUri) authorizationUrl.searchParams.set('redirect_uri', redirectUri);
|
||||
if (scope) authorizationUrl.searchParams.set('scope', scope);
|
||||
if (state) authorizationUrl.searchParams.set('state', state);
|
||||
if (audience) authorizationUrl.searchParams.set('audience', audience);
|
||||
@@ -80,12 +91,65 @@ export async function getAuthorizationCode(
|
||||
authorizationUrl.searchParams.set('code_challenge_method', pkce.challengeMethod);
|
||||
}
|
||||
|
||||
let code: string;
|
||||
let actualRedirectUri: string | null = redirectUri;
|
||||
|
||||
// Use external browser flow if enabled
|
||||
if (externalBrowser?.useExternalBrowser) {
|
||||
const result = await getRedirectUrlViaExternalBrowser(ctx, authorizationUrl, {
|
||||
callbackType: externalBrowser.callbackType,
|
||||
callbackPort: externalBrowser.callbackPort,
|
||||
});
|
||||
// Pass null to skip redirect URI matching — the callback came from our own local server
|
||||
const extractedCode = extractCode(result.callbackUrl, null);
|
||||
if (!extractedCode) {
|
||||
throw new Error('No authorization code found in callback URL');
|
||||
}
|
||||
code = extractedCode;
|
||||
actualRedirectUri = result.redirectUri;
|
||||
} else {
|
||||
// Use embedded browser flow (original behavior)
|
||||
if (redirectUri) {
|
||||
authorizationUrl.searchParams.set('redirect_uri', redirectUri);
|
||||
}
|
||||
code = await getCodeViaEmbeddedBrowser(ctx, contextId, authorizationUrl, redirectUri);
|
||||
}
|
||||
|
||||
console.log('[oauth2] Code found');
|
||||
const response = await fetchAccessToken(ctx, {
|
||||
grantType: 'authorization_code',
|
||||
accessTokenUrl,
|
||||
clientId,
|
||||
clientSecret,
|
||||
scope,
|
||||
audience,
|
||||
credentialsInBody,
|
||||
params: [
|
||||
{ name: 'code', value: code },
|
||||
...(pkce ? [{ name: 'code_verifier', value: pkce.codeVerifier }] : []),
|
||||
...(actualRedirectUri ? [{ name: 'redirect_uri', value: actualRedirectUri }] : []),
|
||||
],
|
||||
});
|
||||
|
||||
return storeToken(ctx, tokenArgs, response, tokenName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get authorization code using the embedded browser window.
|
||||
* This is the original flow that monitors navigation events.
|
||||
*/
|
||||
async function getCodeViaEmbeddedBrowser(
|
||||
ctx: Context,
|
||||
contextId: string,
|
||||
authorizationUrl: URL,
|
||||
redirectUri: string | null,
|
||||
): Promise<string> {
|
||||
const dataDirKey = await getDataDirKey(ctx, contextId);
|
||||
const authorizationUrlStr = authorizationUrl.toString();
|
||||
console.log('[oauth2] Authorizing', authorizationUrlStr);
|
||||
console.log('[oauth2] Authorizing via embedded browser', authorizationUrlStr);
|
||||
|
||||
// biome-ignore lint/suspicious/noAsyncPromiseExecutor: none
|
||||
const code = await new Promise<string>(async (resolve, reject) => {
|
||||
// biome-ignore lint/suspicious/noAsyncPromiseExecutor: Required for this pattern
|
||||
return new Promise<string>(async (resolve, reject) => {
|
||||
let foundCode = false;
|
||||
const { close } = await ctx.window.openUrl({
|
||||
dataDirKey,
|
||||
@@ -110,31 +174,12 @@ export async function getAuthorizationCode(
|
||||
return;
|
||||
}
|
||||
|
||||
// Close the window here, because we don't need it anymore!
|
||||
foundCode = true;
|
||||
close();
|
||||
resolve(code);
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
console.log('[oauth2] Code found');
|
||||
const response = await fetchAccessToken(ctx, {
|
||||
grantType: 'authorization_code',
|
||||
accessTokenUrl,
|
||||
clientId,
|
||||
clientSecret,
|
||||
scope,
|
||||
audience,
|
||||
credentialsInBody,
|
||||
params: [
|
||||
{ name: 'code', value: code },
|
||||
...(pkce ? [{ name: 'code_verifier', value: pkce.codeVerifier }] : []),
|
||||
...(redirectUri ? [{ name: 'redirect_uri', value: redirectUri }] : []),
|
||||
],
|
||||
});
|
||||
|
||||
return storeToken(ctx, tokenArgs, response, tokenName);
|
||||
}
|
||||
|
||||
export function genPkceCodeVerifier() {
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import type { Context } from '@yaakapp/api';
|
||||
import { getRedirectUrlViaExternalBrowser } from '../callbackServer';
|
||||
import type { AccessToken, AccessTokenRawResponse } from '../store';
|
||||
import { getDataDirKey, getToken, storeToken } from '../store';
|
||||
import { isTokenExpired } from '../util';
|
||||
import type { ExternalBrowserOptions } from './authorizationCode';
|
||||
|
||||
export async function getImplicit(
|
||||
ctx: Context,
|
||||
@@ -15,6 +17,7 @@ export async function getImplicit(
|
||||
state,
|
||||
audience,
|
||||
tokenName,
|
||||
externalBrowser,
|
||||
}: {
|
||||
authorizationUrl: string;
|
||||
responseType: string;
|
||||
@@ -24,6 +27,7 @@ export async function getImplicit(
|
||||
state: string | null;
|
||||
audience: string | null;
|
||||
tokenName: 'access_token' | 'id_token';
|
||||
externalBrowser?: ExternalBrowserOptions;
|
||||
},
|
||||
): Promise<AccessToken> {
|
||||
const tokenArgs = {
|
||||
@@ -43,9 +47,8 @@ export async function getImplicit(
|
||||
} catch {
|
||||
throw new Error(`Invalid authorization URL "${authorizationUrlRaw}"`);
|
||||
}
|
||||
authorizationUrl.searchParams.set('response_type', 'token');
|
||||
authorizationUrl.searchParams.set('response_type', responseType);
|
||||
authorizationUrl.searchParams.set('client_id', clientId);
|
||||
if (redirectUri) authorizationUrl.searchParams.set('redirect_uri', redirectUri);
|
||||
if (scope) authorizationUrl.searchParams.set('scope', scope);
|
||||
if (state) authorizationUrl.searchParams.set('state', state);
|
||||
if (audience) authorizationUrl.searchParams.set('audience', audience);
|
||||
@@ -56,11 +59,55 @@ export async function getImplicit(
|
||||
);
|
||||
}
|
||||
|
||||
// biome-ignore lint/suspicious/noAsyncPromiseExecutor: none
|
||||
const newToken = await new Promise<AccessToken>(async (resolve, reject) => {
|
||||
let newToken: AccessToken;
|
||||
|
||||
// Use external browser flow if enabled
|
||||
if (externalBrowser?.useExternalBrowser) {
|
||||
const result = await getRedirectUrlViaExternalBrowser(ctx, authorizationUrl, {
|
||||
callbackType: externalBrowser.callbackType,
|
||||
callbackPort: externalBrowser.callbackPort,
|
||||
});
|
||||
newToken = await extractImplicitToken(ctx, result.callbackUrl, tokenArgs, tokenName);
|
||||
} else {
|
||||
// Use embedded browser flow (original behavior)
|
||||
if (redirectUri) {
|
||||
authorizationUrl.searchParams.set('redirect_uri', redirectUri);
|
||||
}
|
||||
newToken = await getTokenViaEmbeddedBrowser(
|
||||
ctx,
|
||||
contextId,
|
||||
authorizationUrl,
|
||||
tokenArgs,
|
||||
tokenName,
|
||||
);
|
||||
}
|
||||
|
||||
return newToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get token using the embedded browser window.
|
||||
* This is the original flow that monitors navigation events.
|
||||
*/
|
||||
async function getTokenViaEmbeddedBrowser(
|
||||
ctx: Context,
|
||||
contextId: string,
|
||||
authorizationUrl: URL,
|
||||
tokenArgs: {
|
||||
contextId: string;
|
||||
clientId: string;
|
||||
accessTokenUrl: null;
|
||||
authorizationUrl: string;
|
||||
},
|
||||
tokenName: 'access_token' | 'id_token',
|
||||
): Promise<AccessToken> {
|
||||
const dataDirKey = await getDataDirKey(ctx, contextId);
|
||||
const authorizationUrlStr = authorizationUrl.toString();
|
||||
console.log('[oauth2] Authorizing via embedded browser (implicit)', authorizationUrlStr);
|
||||
|
||||
// biome-ignore lint/suspicious/noAsyncPromiseExecutor: Required for this pattern
|
||||
return new Promise<AccessToken>(async (resolve, reject) => {
|
||||
let foundAccessToken = false;
|
||||
const authorizationUrlStr = authorizationUrl.toString();
|
||||
const dataDirKey = await getDataDirKey(ctx, contextId);
|
||||
const { close } = await ctx.window.openUrl({
|
||||
dataDirKey,
|
||||
url: authorizationUrlStr,
|
||||
@@ -97,6 +144,56 @@ export async function getImplicit(
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
return newToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the implicit grant token from a callback URL and store it.
|
||||
*/
|
||||
async function extractImplicitToken(
|
||||
ctx: Context,
|
||||
callbackUrl: string,
|
||||
tokenArgs: {
|
||||
contextId: string;
|
||||
clientId: string;
|
||||
accessTokenUrl: null;
|
||||
authorizationUrl: string;
|
||||
},
|
||||
tokenName: 'access_token' | 'id_token',
|
||||
): Promise<AccessToken> {
|
||||
const url = new URL(callbackUrl);
|
||||
|
||||
// Check for errors
|
||||
if (url.searchParams.has('error')) {
|
||||
throw new Error(`Failed to authorize: ${url.searchParams.get('error')}`);
|
||||
}
|
||||
|
||||
// Extract token from fragment
|
||||
const hash = url.hash.slice(1);
|
||||
const params = new URLSearchParams(hash);
|
||||
|
||||
// Also check query params (in case fragment was converted)
|
||||
const accessToken = params.get(tokenName) ?? url.searchParams.get(tokenName);
|
||||
if (!accessToken) {
|
||||
throw new Error(`No ${tokenName} found in callback URL`);
|
||||
}
|
||||
|
||||
// Build response from params (prefer fragment, fall back to query)
|
||||
const response: AccessTokenRawResponse = {
|
||||
access_token: params.get('access_token') ?? url.searchParams.get('access_token') ?? '',
|
||||
token_type: params.get('token_type') ?? url.searchParams.get('token_type') ?? undefined,
|
||||
expires_in: params.has('expires_in')
|
||||
? parseInt(params.get('expires_in') ?? '0', 10)
|
||||
: url.searchParams.has('expires_in')
|
||||
? parseInt(url.searchParams.get('expires_in') ?? '0', 10)
|
||||
: undefined,
|
||||
scope: params.get('scope') ?? url.searchParams.get('scope') ?? undefined,
|
||||
};
|
||||
|
||||
// Include id_token if present
|
||||
const idToken = params.get('id_token') ?? url.searchParams.get('id_token');
|
||||
if (idToken) {
|
||||
response.id_token = idToken;
|
||||
}
|
||||
|
||||
return storeToken(ctx, tokenArgs, response);
|
||||
}
|
||||
|
||||
@@ -5,7 +5,9 @@ import type {
|
||||
JsonPrimitive,
|
||||
PluginDefinition,
|
||||
} from '@yaakapp/api';
|
||||
import { DEFAULT_LOCALHOST_PORT, HOSTED_CALLBACK_URL } from './callbackServer';
|
||||
import {
|
||||
type CallbackType,
|
||||
DEFAULT_PKCE_METHOD,
|
||||
genPkceCodeVerifier,
|
||||
getAuthorizationCode,
|
||||
@@ -134,8 +136,6 @@ export const plugin: PluginDefinition = {
|
||||
defaultValue: defaultGrantType,
|
||||
options: grantTypes,
|
||||
},
|
||||
|
||||
// Always-present fields
|
||||
{
|
||||
type: 'text',
|
||||
name: 'clientId',
|
||||
@@ -169,11 +169,105 @@ export const plugin: PluginDefinition = {
|
||||
completionOptions: accessTokenUrls.map((url) => ({ label: url, value: url })),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'redirectUri',
|
||||
label: 'Redirect URI',
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(['authorization_code', 'implicit']),
|
||||
type: 'banner',
|
||||
inputs: [
|
||||
{
|
||||
type: 'checkbox',
|
||||
name: 'useExternalBrowser',
|
||||
label: 'Use External Browser',
|
||||
description:
|
||||
'Open authorization URL in your system browser instead of the embedded browser. ' +
|
||||
'Useful when the OAuth provider blocks embedded browsers or you need existing browser sessions.',
|
||||
dynamic: hiddenIfNot(['authorization_code', 'implicit']),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'redirectUri',
|
||||
label: 'Redirect URI',
|
||||
description:
|
||||
'URI the OAuth provider redirects to after authorization. Yaak intercepts this automatically in its embedded browser so any valid URI will work.',
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(
|
||||
['authorization_code', 'implicit'],
|
||||
({ useExternalBrowser }) => !useExternalBrowser,
|
||||
),
|
||||
},
|
||||
{
|
||||
type: 'h_stack',
|
||||
inputs: [
|
||||
{
|
||||
type: 'select',
|
||||
name: 'callbackType',
|
||||
label: 'Callback Type',
|
||||
description:
|
||||
'"Hosted Redirect" uses an external Yaak-hosted endpoint. "Localhost" starts a local server to receive the callback.',
|
||||
defaultValue: 'hosted',
|
||||
options: [
|
||||
{ label: 'Hosted Redirect', value: 'hosted' },
|
||||
{ label: 'Localhost', value: 'localhost' },
|
||||
],
|
||||
dynamic: hiddenIfNot(
|
||||
['authorization_code', 'implicit'],
|
||||
({ useExternalBrowser }) => !!useExternalBrowser,
|
||||
),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'callbackPort',
|
||||
label: 'Callback Port',
|
||||
placeholder: `${DEFAULT_LOCALHOST_PORT}`,
|
||||
description:
|
||||
'Port for the local callback server. Defaults to ' +
|
||||
DEFAULT_LOCALHOST_PORT +
|
||||
' if empty.',
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(
|
||||
['authorization_code', 'implicit'],
|
||||
({ useExternalBrowser, callbackType }) =>
|
||||
!!useExternalBrowser && callbackType === 'localhost',
|
||||
),
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'banner',
|
||||
color: 'info',
|
||||
inputs: [
|
||||
{
|
||||
type: 'markdown',
|
||||
content: 'Redirect URI to Register',
|
||||
async dynamic(_ctx, { values }) {
|
||||
const grantType = String(values.grantType ?? defaultGrantType);
|
||||
const useExternalBrowser = !!values.useExternalBrowser;
|
||||
const callbackType = (stringArg(values, 'callbackType') ||
|
||||
'localhost') as CallbackType;
|
||||
|
||||
// Only show for authorization_code and implicit with external browser enabled
|
||||
if (
|
||||
!['authorization_code', 'implicit'].includes(grantType) ||
|
||||
!useExternalBrowser
|
||||
) {
|
||||
return { hidden: true };
|
||||
}
|
||||
|
||||
// Compute the redirect URI based on callback type
|
||||
let redirectUri: string;
|
||||
if (callbackType === 'hosted') {
|
||||
redirectUri = HOSTED_CALLBACK_URL;
|
||||
} else {
|
||||
const port = intArg(values, 'callbackPort') || DEFAULT_LOCALHOST_PORT;
|
||||
redirectUri = `http://127.0.0.1:${port}/callback`;
|
||||
}
|
||||
|
||||
return {
|
||||
hidden: false,
|
||||
content: `Register \`${redirectUri}\` as a redirect URI with your OAuth provider.`,
|
||||
};
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
@@ -182,12 +276,8 @@ export const plugin: PluginDefinition = {
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(['authorization_code', 'implicit']),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'audience',
|
||||
label: 'Audience',
|
||||
optional: true,
|
||||
},
|
||||
{ type: 'text', name: 'scope', label: 'Scope', optional: true },
|
||||
{ type: 'text', name: 'audience', label: 'Audience', optional: true },
|
||||
{
|
||||
type: 'select',
|
||||
name: 'tokenName',
|
||||
@@ -203,44 +293,54 @@ export const plugin: PluginDefinition = {
|
||||
dynamic: hiddenIfNot(['authorization_code', 'implicit']),
|
||||
},
|
||||
{
|
||||
type: 'checkbox',
|
||||
name: 'usePkce',
|
||||
label: 'Use PKCE',
|
||||
dynamic: hiddenIfNot(['authorization_code']),
|
||||
},
|
||||
{
|
||||
type: 'select',
|
||||
name: 'pkceChallengeMethod',
|
||||
label: 'Code Challenge Method',
|
||||
options: [
|
||||
{ label: 'SHA-256', value: PKCE_SHA256 },
|
||||
{ label: 'Plain', value: PKCE_PLAIN },
|
||||
type: 'banner',
|
||||
inputs: [
|
||||
{
|
||||
type: 'checkbox',
|
||||
name: 'usePkce',
|
||||
label: 'Use PKCE',
|
||||
dynamic: hiddenIfNot(['authorization_code']),
|
||||
},
|
||||
{
|
||||
type: 'select',
|
||||
name: 'pkceChallengeMethod',
|
||||
label: 'Code Challenge Method',
|
||||
options: [
|
||||
{ label: 'SHA-256', value: PKCE_SHA256 },
|
||||
{ label: 'Plain', value: PKCE_PLAIN },
|
||||
],
|
||||
defaultValue: DEFAULT_PKCE_METHOD,
|
||||
dynamic: hiddenIfNot(['authorization_code'], ({ usePkce }) => !!usePkce),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'pkceCodeChallenge',
|
||||
label: 'Code Verifier',
|
||||
placeholder: 'Automatically generated when not set',
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(['authorization_code'], ({ usePkce }) => !!usePkce),
|
||||
},
|
||||
],
|
||||
defaultValue: DEFAULT_PKCE_METHOD,
|
||||
dynamic: hiddenIfNot(['authorization_code'], ({ usePkce }) => !!usePkce),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'pkceCodeChallenge',
|
||||
label: 'Code Verifier',
|
||||
placeholder: 'Automatically generated when not set',
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(['authorization_code'], ({ usePkce }) => !!usePkce),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'username',
|
||||
label: 'Username',
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(['password']),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'password',
|
||||
label: 'Password',
|
||||
password: true,
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(['password']),
|
||||
type: 'h_stack',
|
||||
inputs: [
|
||||
{
|
||||
type: 'text',
|
||||
name: 'username',
|
||||
label: 'Username',
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(['password']),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'password',
|
||||
label: 'Password',
|
||||
password: true,
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(['password']),
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'select',
|
||||
@@ -258,7 +358,6 @@ export const plugin: PluginDefinition = {
|
||||
type: 'accordion',
|
||||
label: 'Advanced',
|
||||
inputs: [
|
||||
{ type: 'text', name: 'scope', label: 'Scope', optional: true },
|
||||
{
|
||||
type: 'text',
|
||||
name: 'headerName',
|
||||
@@ -321,6 +420,16 @@ export const plugin: PluginDefinition = {
|
||||
const credentialsInBody = values.credentials === 'body';
|
||||
const tokenName = values.tokenName === 'id_token' ? 'id_token' : 'access_token';
|
||||
|
||||
// Build external browser options if enabled
|
||||
const useExternalBrowser = !!values.useExternalBrowser;
|
||||
const externalBrowserOptions = useExternalBrowser
|
||||
? {
|
||||
useExternalBrowser: true,
|
||||
callbackType: (stringArg(values, 'callbackType') || 'localhost') as CallbackType,
|
||||
callbackPort: intArg(values, 'callbackPort') ?? undefined,
|
||||
}
|
||||
: undefined;
|
||||
|
||||
let token: AccessToken;
|
||||
if (grantType === 'authorization_code') {
|
||||
const authorizationUrl = stringArg(values, 'authorizationUrl');
|
||||
@@ -348,6 +457,7 @@ export const plugin: PluginDefinition = {
|
||||
}
|
||||
: null,
|
||||
tokenName: tokenName,
|
||||
externalBrowser: externalBrowserOptions,
|
||||
});
|
||||
} else if (grantType === 'implicit') {
|
||||
const authorizationUrl = stringArg(values, 'authorizationUrl');
|
||||
@@ -362,6 +472,7 @@ export const plugin: PluginDefinition = {
|
||||
audience: stringArgOrNull(values, 'audience'),
|
||||
state: stringArgOrNull(values, 'state'),
|
||||
tokenName: tokenName,
|
||||
externalBrowser: externalBrowserOptions,
|
||||
});
|
||||
} else if (grantType === 'client_credentials') {
|
||||
const accessTokenUrl = stringArg(values, 'accessTokenUrl');
|
||||
@@ -414,3 +525,10 @@ function stringArg(values: Record<string, JsonPrimitive | undefined>, name: stri
|
||||
if (!arg) return '';
|
||||
return arg;
|
||||
}
|
||||
|
||||
function intArg(values: Record<string, JsonPrimitive | undefined>, name: string): number | null {
|
||||
const arg = values[name];
|
||||
if (arg == null || arg === '') return null;
|
||||
const num = parseInt(`${arg}`, 10);
|
||||
return Number.isNaN(num) ? null : num;
|
||||
}
|
||||
|
||||
@@ -19,9 +19,6 @@ export const synthwave84: Theme = {
|
||||
danger: 'hsl(340, 100%, 65%)',
|
||||
},
|
||||
components: {
|
||||
dialog: {
|
||||
surface: 'hsl(253, 45%, 12%)',
|
||||
},
|
||||
sidebar: {
|
||||
surface: 'hsl(253, 42%, 18%)',
|
||||
border: 'hsl(253, 40%, 22%)',
|
||||
|
||||
161
src-web/components/CloneGitRepositoryDialog.tsx
Normal file
161
src-web/components/CloneGitRepositoryDialog.tsx
Normal file
@@ -0,0 +1,161 @@
|
||||
import { open } from '@tauri-apps/plugin-dialog';
|
||||
import { gitClone } from '@yaakapp-internal/git';
|
||||
import { useState } from 'react';
|
||||
import { openWorkspaceFromSyncDir } from '../commands/openWorkspaceFromSyncDir';
|
||||
import { appInfo } from '../lib/appInfo';
|
||||
import { showErrorToast } from '../lib/toast';
|
||||
import { Banner } from './core/Banner';
|
||||
import { Button } from './core/Button';
|
||||
import { Checkbox } from './core/Checkbox';
|
||||
import { IconButton } from './core/IconButton';
|
||||
import { PlainInput } from './core/PlainInput';
|
||||
import { VStack } from './core/Stacks';
|
||||
import { promptCredentials } from './git/credentials';
|
||||
|
||||
interface Props {
|
||||
hide: () => void;
|
||||
}
|
||||
|
||||
// Detect path separator from an existing path (defaults to /)
|
||||
function getPathSeparator(path: string): string {
|
||||
return path.includes('\\') ? '\\' : '/';
|
||||
}
|
||||
|
||||
export function CloneGitRepositoryDialog({ hide }: Props) {
|
||||
const [url, setUrl] = useState<string>('');
|
||||
const [baseDirectory, setBaseDirectory] = useState<string>(appInfo.defaultProjectDir);
|
||||
const [directoryOverride, setDirectoryOverride] = useState<string | null>(null);
|
||||
const [hasSubdirectory, setHasSubdirectory] = useState(false);
|
||||
const [subdirectory, setSubdirectory] = useState<string>('');
|
||||
const [isCloning, setIsCloning] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const repoName = extractRepoName(url);
|
||||
const sep = getPathSeparator(baseDirectory);
|
||||
const computedDirectory = repoName ? `${baseDirectory}${sep}${repoName}` : baseDirectory;
|
||||
const directory = directoryOverride ?? computedDirectory;
|
||||
const workspaceDirectory =
|
||||
hasSubdirectory && subdirectory ? `${directory}${sep}${subdirectory}` : directory;
|
||||
|
||||
const handleSelectDirectory = async () => {
|
||||
const dir = await open({
|
||||
title: 'Select Directory',
|
||||
directory: true,
|
||||
multiple: false,
|
||||
});
|
||||
if (dir != null) {
|
||||
setBaseDirectory(dir);
|
||||
setDirectoryOverride(null);
|
||||
}
|
||||
};
|
||||
|
||||
const handleClone = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
if (!url || !directory) return;
|
||||
|
||||
setIsCloning(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const result = await gitClone(url, directory, promptCredentials);
|
||||
|
||||
if (result.type === 'needs_credentials') {
|
||||
setError(
|
||||
result.error ?? 'Authentication failed. Please check your credentials and try again.',
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Open the workspace from the cloned directory (or subdirectory)
|
||||
await openWorkspaceFromSyncDir.mutateAsync(workspaceDirectory);
|
||||
|
||||
hide();
|
||||
} catch (err) {
|
||||
setError(String(err));
|
||||
showErrorToast({
|
||||
id: 'git-clone-error',
|
||||
title: 'Clone Failed',
|
||||
message: String(err),
|
||||
});
|
||||
} finally {
|
||||
setIsCloning(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<VStack as="form" space={3} alignItems="start" className="pb-3" onSubmit={handleClone}>
|
||||
{error && (
|
||||
<Banner color="danger" className="w-full">
|
||||
{error}
|
||||
</Banner>
|
||||
)}
|
||||
|
||||
<PlainInput
|
||||
required
|
||||
label="Repository URL"
|
||||
placeholder="https://github.com/user/repo.git"
|
||||
defaultValue={url}
|
||||
onChange={setUrl}
|
||||
/>
|
||||
|
||||
<PlainInput
|
||||
label="Directory"
|
||||
placeholder={appInfo.defaultProjectDir}
|
||||
defaultValue={directory}
|
||||
onChange={setDirectoryOverride}
|
||||
rightSlot={
|
||||
<IconButton
|
||||
size="xs"
|
||||
className="mr-0.5 !h-auto my-0.5"
|
||||
icon="folder"
|
||||
title="Browse"
|
||||
onClick={handleSelectDirectory}
|
||||
/>
|
||||
}
|
||||
/>
|
||||
|
||||
<Checkbox
|
||||
checked={hasSubdirectory}
|
||||
onChange={setHasSubdirectory}
|
||||
title="Workspace is in a subdirectory"
|
||||
help="Enable if the Yaak workspace files are not at the root of the repository"
|
||||
/>
|
||||
|
||||
{hasSubdirectory && (
|
||||
<PlainInput
|
||||
label="Subdirectory"
|
||||
placeholder="path/to/workspace"
|
||||
defaultValue={subdirectory}
|
||||
onChange={setSubdirectory}
|
||||
/>
|
||||
)}
|
||||
|
||||
<Button
|
||||
type="submit"
|
||||
color="primary"
|
||||
className="w-full mt-3"
|
||||
disabled={!url || !directory || isCloning}
|
||||
isLoading={isCloning}
|
||||
>
|
||||
{isCloning ? 'Cloning...' : 'Clone Repository'}
|
||||
</Button>
|
||||
</VStack>
|
||||
);
|
||||
}
|
||||
|
||||
function extractRepoName(url: string): string {
|
||||
// Handle various Git URL formats:
|
||||
// https://github.com/user/repo.git
|
||||
// git@github.com:user/repo.git
|
||||
// https://github.com/user/repo
|
||||
const match = url.match(/\/([^/]+?)(\.git)?$/);
|
||||
if (match?.[1]) {
|
||||
return match[1];
|
||||
}
|
||||
// Fallback for SSH-style URLs
|
||||
const sshMatch = url.match(/:([^/]+?)(\.git)?$/);
|
||||
if (sshMatch?.[1]) {
|
||||
return sshMatch[1];
|
||||
}
|
||||
return '';
|
||||
}
|
||||
181
src-web/components/DnsOverridesEditor.tsx
Normal file
181
src-web/components/DnsOverridesEditor.tsx
Normal file
@@ -0,0 +1,181 @@
|
||||
import type { DnsOverride, Workspace } from '@yaakapp-internal/models';
|
||||
import { patchModel } from '@yaakapp-internal/models';
|
||||
import { useCallback, useId, useMemo } from 'react';
|
||||
import { Button } from './core/Button';
|
||||
import { Checkbox } from './core/Checkbox';
|
||||
import { IconButton } from './core/IconButton';
|
||||
import { PlainInput } from './core/PlainInput';
|
||||
import { HStack, VStack } from './core/Stacks';
|
||||
import { Table, TableBody, TableCell, TableHead, TableHeaderCell, TableRow } from './core/Table';
|
||||
|
||||
interface Props {
|
||||
workspace: Workspace;
|
||||
}
|
||||
|
||||
interface DnsOverrideWithId extends DnsOverride {
|
||||
_id: string;
|
||||
}
|
||||
|
||||
export function DnsOverridesEditor({ workspace }: Props) {
|
||||
const reactId = useId();
|
||||
|
||||
// Ensure each override has an internal ID for React keys
|
||||
const overridesWithIds = useMemo<DnsOverrideWithId[]>(() => {
|
||||
return workspace.settingDnsOverrides.map((override, index) => ({
|
||||
...override,
|
||||
_id: `${reactId}-${index}`,
|
||||
}));
|
||||
}, [workspace.settingDnsOverrides, reactId]);
|
||||
|
||||
const handleChange = useCallback(
|
||||
(overrides: DnsOverride[]) => {
|
||||
patchModel(workspace, { settingDnsOverrides: overrides });
|
||||
},
|
||||
[workspace],
|
||||
);
|
||||
|
||||
const handleAdd = useCallback(() => {
|
||||
const newOverride: DnsOverride = {
|
||||
hostname: '',
|
||||
ipv4: [''],
|
||||
ipv6: [],
|
||||
enabled: true,
|
||||
};
|
||||
handleChange([...workspace.settingDnsOverrides, newOverride]);
|
||||
}, [workspace.settingDnsOverrides, handleChange]);
|
||||
|
||||
const handleUpdate = useCallback(
|
||||
(index: number, update: Partial<DnsOverride>) => {
|
||||
const updated = workspace.settingDnsOverrides.map((o, i) =>
|
||||
i === index ? { ...o, ...update } : o,
|
||||
);
|
||||
handleChange(updated);
|
||||
},
|
||||
[workspace.settingDnsOverrides, handleChange],
|
||||
);
|
||||
|
||||
const handleDelete = useCallback(
|
||||
(index: number) => {
|
||||
const updated = workspace.settingDnsOverrides.filter((_, i) => i !== index);
|
||||
handleChange(updated);
|
||||
},
|
||||
[workspace.settingDnsOverrides, handleChange],
|
||||
);
|
||||
|
||||
return (
|
||||
<VStack space={3} className="pb-3">
|
||||
<div className="text-text-subtle text-sm">
|
||||
Override DNS resolution for specific hostnames. This works like{' '}
|
||||
<code className="text-text-subtlest bg-surface-highlight px-1 rounded">/etc/hosts</code>{' '}
|
||||
but only for requests made from this workspace.
|
||||
</div>
|
||||
|
||||
{overridesWithIds.length > 0 && (
|
||||
<Table>
|
||||
<TableHead>
|
||||
<TableRow>
|
||||
<TableHeaderCell className="w-8" />
|
||||
<TableHeaderCell>Hostname</TableHeaderCell>
|
||||
<TableHeaderCell>IPv4 Address</TableHeaderCell>
|
||||
<TableHeaderCell>IPv6 Address</TableHeaderCell>
|
||||
<TableHeaderCell className="w-10" />
|
||||
</TableRow>
|
||||
</TableHead>
|
||||
<TableBody>
|
||||
{overridesWithIds.map((override, index) => (
|
||||
<DnsOverrideRow
|
||||
key={override._id}
|
||||
override={override}
|
||||
onUpdate={(update) => handleUpdate(index, update)}
|
||||
onDelete={() => handleDelete(index)}
|
||||
/>
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
)}
|
||||
|
||||
<HStack>
|
||||
<Button size="xs" color="secondary" variant="border" onClick={handleAdd}>
|
||||
Add DNS Override
|
||||
</Button>
|
||||
</HStack>
|
||||
</VStack>
|
||||
);
|
||||
}
|
||||
|
||||
interface DnsOverrideRowProps {
|
||||
override: DnsOverride;
|
||||
onUpdate: (update: Partial<DnsOverride>) => void;
|
||||
onDelete: () => void;
|
||||
}
|
||||
|
||||
function DnsOverrideRow({ override, onUpdate, onDelete }: DnsOverrideRowProps) {
|
||||
const ipv4Value = override.ipv4.join(', ');
|
||||
const ipv6Value = override.ipv6.join(', ');
|
||||
|
||||
return (
|
||||
<TableRow>
|
||||
<TableCell>
|
||||
<Checkbox
|
||||
hideLabel
|
||||
title={override.enabled ? 'Disable override' : 'Enable override'}
|
||||
checked={override.enabled ?? true}
|
||||
onChange={(enabled) => onUpdate({ enabled })}
|
||||
/>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<PlainInput
|
||||
size="sm"
|
||||
hideLabel
|
||||
label="Hostname"
|
||||
placeholder="api.example.com"
|
||||
defaultValue={override.hostname}
|
||||
onChange={(hostname) => onUpdate({ hostname })}
|
||||
/>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<PlainInput
|
||||
size="sm"
|
||||
hideLabel
|
||||
label="IPv4 addresses"
|
||||
placeholder="127.0.0.1"
|
||||
defaultValue={ipv4Value}
|
||||
onChange={(value) =>
|
||||
onUpdate({
|
||||
ipv4: value
|
||||
.split(',')
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean),
|
||||
})
|
||||
}
|
||||
/>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<PlainInput
|
||||
size="sm"
|
||||
hideLabel
|
||||
label="IPv6 addresses"
|
||||
placeholder="::1"
|
||||
defaultValue={ipv6Value}
|
||||
onChange={(value) =>
|
||||
onUpdate({
|
||||
ipv6: value
|
||||
.split(',')
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean),
|
||||
})
|
||||
}
|
||||
/>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<IconButton
|
||||
size="xs"
|
||||
iconSize="sm"
|
||||
icon="trash"
|
||||
title="Delete override"
|
||||
onClick={onDelete}
|
||||
/>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
);
|
||||
}
|
||||
@@ -83,7 +83,7 @@ export function DynamicForm<T extends Record<string, JsonPrimitive>>({
|
||||
function FormInputsStack<T extends Record<string, JsonPrimitive>>({
|
||||
className,
|
||||
...props
|
||||
}: FormInputsProps<T> & { className?: string}) {
|
||||
}: FormInputsProps<T> & { className?: string }) {
|
||||
return (
|
||||
<VStack
|
||||
space={3}
|
||||
@@ -198,6 +198,9 @@ function FormInputs<T extends Record<string, JsonPrimitive>>({
|
||||
/>
|
||||
);
|
||||
case 'accordion':
|
||||
if (!hasVisibleInputs(input.inputs)) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<div key={i + stateKey}>
|
||||
<DetailsBanner
|
||||
@@ -219,6 +222,9 @@ function FormInputs<T extends Record<string, JsonPrimitive>>({
|
||||
</div>
|
||||
);
|
||||
case 'h_stack':
|
||||
if (!hasVisibleInputs(input.inputs)) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<div className="flex flex-wrap sm:flex-nowrap gap-3 items-end" key={i + stateKey}>
|
||||
<FormInputs
|
||||
@@ -233,6 +239,9 @@ function FormInputs<T extends Record<string, JsonPrimitive>>({
|
||||
</div>
|
||||
);
|
||||
case 'banner':
|
||||
if (!hasVisibleInputs(input.inputs)) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<Banner
|
||||
key={i + stateKey}
|
||||
@@ -603,3 +612,8 @@ function KeyValueArg({
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function hasVisibleInputs(inputs: FormInput[] | undefined): boolean {
|
||||
if (!inputs) return false;
|
||||
return inputs.some((i) => !i.hidden);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { createWorkspaceModel, foldersAtom, patchModel } from '@yaakapp-internal/models';
|
||||
import { useAtomValue } from 'jotai';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useMemo } from 'react';
|
||||
import { useAuthTab } from '../hooks/useAuthTab';
|
||||
import { useEnvironmentsBreakdown } from '../hooks/useEnvironmentsBreakdown';
|
||||
import { useHeadersTab } from '../hooks/useHeadersTab';
|
||||
@@ -37,7 +37,6 @@ export type FolderSettingsTab =
|
||||
export function FolderSettingsDialog({ folderId, tab }: Props) {
|
||||
const folders = useAtomValue(foldersAtom);
|
||||
const folder = folders.find((f) => f.id === folderId) ?? null;
|
||||
const [activeTab, setActiveTab] = useState<string>(tab ?? TAB_GENERAL);
|
||||
const authTab = useAuthTab(TAB_AUTH, folder);
|
||||
const headersTab = useHeadersTab(TAB_HEADERS, folder);
|
||||
const inheritedHeaders = useInheritedHeaders(folder);
|
||||
@@ -69,8 +68,7 @@ export function FolderSettingsDialog({ folderId, tab }: Props) {
|
||||
|
||||
return (
|
||||
<Tabs
|
||||
value={activeTab}
|
||||
onChangeValue={setActiveTab}
|
||||
defaultValue={tab ?? TAB_GENERAL}
|
||||
label="Folder Settings"
|
||||
className="pt-2 pb-2 pl-3 pr-1"
|
||||
layout="horizontal"
|
||||
@@ -113,7 +111,7 @@ export function FolderSettingsDialog({ folderId, tab }: Props) {
|
||||
<VStack alignItems="center" space={1.5}>
|
||||
<p>
|
||||
Override{' '}
|
||||
<Link href="https://feedback.yaak.app/help/articles/3284139-environments-and-variables">
|
||||
<Link href="https://yaak.app/docs/using-yaak/environments-and-variables">
|
||||
Variables
|
||||
</Link>{' '}
|
||||
for requests within this folder.
|
||||
|
||||
@@ -10,7 +10,7 @@ import {
|
||||
stateExtensions,
|
||||
updateSchema,
|
||||
} from 'codemirror-json-schema';
|
||||
import { useCallback, useEffect, useMemo, useRef } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import type { ReflectResponseService } from '../hooks/useGrpc';
|
||||
import { showAlert } from '../lib/alert';
|
||||
import { showDialog } from '../lib/dialog';
|
||||
@@ -39,15 +39,15 @@ export function GrpcEditor({
|
||||
protoFiles,
|
||||
...extraEditorProps
|
||||
}: Props) {
|
||||
const editorViewRef = useRef<EditorView>(null);
|
||||
const [editorView, setEditorView] = useState<EditorView | null>(null);
|
||||
const handleInitEditorViewRef = useCallback((h: EditorView | null) => {
|
||||
editorViewRef.current = h;
|
||||
setEditorView(h);
|
||||
}, []);
|
||||
|
||||
// Find the schema for the selected service and method and update the editor
|
||||
useEffect(() => {
|
||||
if (
|
||||
editorViewRef.current == null ||
|
||||
editorView == null ||
|
||||
services === null ||
|
||||
request.service === null ||
|
||||
request.method === null
|
||||
@@ -91,7 +91,7 @@ export function GrpcEditor({
|
||||
}
|
||||
|
||||
try {
|
||||
updateSchema(editorViewRef.current, JSON.parse(schema));
|
||||
updateSchema(editorView, JSON.parse(schema));
|
||||
} catch (err) {
|
||||
showAlert({
|
||||
id: 'grpc-parse-schema-error',
|
||||
@@ -107,7 +107,7 @@ export function GrpcEditor({
|
||||
),
|
||||
});
|
||||
}
|
||||
}, [services, request.method, request.service]);
|
||||
}, [editorView, services, request.method, request.service]);
|
||||
|
||||
const extraExtensions = useMemo(
|
||||
() => [
|
||||
@@ -118,7 +118,7 @@ export function GrpcEditor({
|
||||
jsonLanguage.data.of({
|
||||
autocomplete: jsonCompletion(),
|
||||
}),
|
||||
stateExtensions(/** Init with empty schema **/),
|
||||
stateExtensions({}),
|
||||
],
|
||||
[],
|
||||
);
|
||||
|
||||
@@ -7,7 +7,6 @@ import { useContainerSize } from '../hooks/useContainerQuery';
|
||||
import type { ReflectResponseService } from '../hooks/useGrpc';
|
||||
import { useHeadersTab } from '../hooks/useHeadersTab';
|
||||
import { useInheritedHeaders } from '../hooks/useInheritedHeaders';
|
||||
import { useKeyValue } from '../hooks/useKeyValue';
|
||||
import { useRequestUpdateKey } from '../hooks/useRequestUpdateKey';
|
||||
import { resolvedModelName } from '../lib/resolvedModelName';
|
||||
import { Button } from './core/Button';
|
||||
@@ -69,11 +68,6 @@ export function GrpcRequestPane({
|
||||
const authTab = useAuthTab(TAB_AUTH, activeRequest);
|
||||
const metadataTab = useHeadersTab(TAB_METADATA, activeRequest, 'Metadata');
|
||||
const inheritedHeaders = useInheritedHeaders(activeRequest);
|
||||
const { value: activeTabs, set: setActiveTabs } = useKeyValue<Record<string, string>>({
|
||||
namespace: 'no_sync',
|
||||
key: 'grpcRequestActiveTabs',
|
||||
fallback: {},
|
||||
});
|
||||
const forceUpdateKey = useRequestUpdateKey(activeRequest.id ?? null);
|
||||
|
||||
const urlContainerEl = useRef<HTMLDivElement>(null);
|
||||
@@ -145,14 +139,6 @@ export function GrpcRequestPane({
|
||||
[activeRequest.description, authTab, metadataTab],
|
||||
);
|
||||
|
||||
const activeTab = activeTabs?.[activeRequest.id];
|
||||
const setActiveTab = useCallback(
|
||||
async (tab: string) => {
|
||||
await setActiveTabs((r) => ({ ...r, [activeRequest.id]: tab }));
|
||||
},
|
||||
[activeRequest.id, setActiveTabs],
|
||||
);
|
||||
|
||||
const handleMetadataChange = useCallback(
|
||||
(metadata: HttpRequestHeader[]) => patchModel(activeRequest, { metadata }),
|
||||
[activeRequest],
|
||||
@@ -265,12 +251,11 @@ export function GrpcRequestPane({
|
||||
</HStack>
|
||||
</div>
|
||||
<Tabs
|
||||
value={activeTab}
|
||||
label="Request"
|
||||
onChangeValue={setActiveTab}
|
||||
tabs={tabs}
|
||||
tabListClassName="mt-1 !mb-1.5"
|
||||
storageKey="grpc_request_tabs_order"
|
||||
storageKey="grpc_request_tabs"
|
||||
activeTabKey={activeRequest.id}
|
||||
>
|
||||
<TabContent value="message">
|
||||
<GrpcEditor
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user