mirror of
https://github.com/mountain-loop/yaak.git
synced 2026-02-02 02:32:07 -05:00
Compare commits
1 Commits
fix-redire
...
omnara/rep
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fa3e6e6508 |
@@ -37,11 +37,3 @@ The skill generates markdown-formatted release notes following this structure:
|
||||
|
||||
**IMPORTANT**: Always add a blank lines around the markdown code fence and output the markdown code block last
|
||||
**IMPORTANT**: PRs by `@gschier` should not mention the @username
|
||||
|
||||
## After Generating Release Notes
|
||||
|
||||
After outputting the release notes, ask the user if they would like to create a draft GitHub release with these notes. If they confirm, create the release using:
|
||||
|
||||
```bash
|
||||
gh release create <tag> --draft --prerelease --title "<tag>" --notes '<release notes>'
|
||||
```
|
||||
|
||||
96
.github/workflows/release.yml
vendored
96
.github/workflows/release.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: Generate Artifacts
|
||||
on:
|
||||
push:
|
||||
tags: [v*]
|
||||
tags: [ v* ]
|
||||
|
||||
jobs:
|
||||
build-artifacts:
|
||||
@@ -13,37 +13,37 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: "macos-latest" # for Arm-based Macs (M1 and above).
|
||||
args: "--target aarch64-apple-darwin"
|
||||
yaak_arch: "arm64"
|
||||
os: "macos"
|
||||
targets: "aarch64-apple-darwin"
|
||||
- platform: "macos-latest" # for Intel-based Macs.
|
||||
args: "--target x86_64-apple-darwin"
|
||||
yaak_arch: "x64"
|
||||
os: "macos"
|
||||
targets: "x86_64-apple-darwin"
|
||||
- platform: "ubuntu-22.04"
|
||||
args: ""
|
||||
yaak_arch: "x64"
|
||||
os: "ubuntu"
|
||||
targets: ""
|
||||
- platform: "ubuntu-22.04-arm"
|
||||
args: ""
|
||||
yaak_arch: "arm64"
|
||||
os: "ubuntu"
|
||||
targets: ""
|
||||
- platform: "windows-latest"
|
||||
args: ""
|
||||
yaak_arch: "x64"
|
||||
os: "windows"
|
||||
targets: ""
|
||||
- platform: 'macos-latest' # for Arm-based Macs (M1 and above).
|
||||
args: '--target aarch64-apple-darwin'
|
||||
yaak_arch: 'arm64'
|
||||
os: 'macos'
|
||||
targets: 'aarch64-apple-darwin'
|
||||
- platform: 'macos-latest' # for Intel-based Macs.
|
||||
args: '--target x86_64-apple-darwin'
|
||||
yaak_arch: 'x64'
|
||||
os: 'macos'
|
||||
targets: 'x86_64-apple-darwin'
|
||||
- platform: 'ubuntu-22.04'
|
||||
args: ''
|
||||
yaak_arch: 'x64'
|
||||
os: 'ubuntu'
|
||||
targets: ''
|
||||
- platform: 'ubuntu-22.04-arm'
|
||||
args: ''
|
||||
yaak_arch: 'arm64'
|
||||
os: 'ubuntu'
|
||||
targets: ''
|
||||
- platform: 'windows-latest'
|
||||
args: ''
|
||||
yaak_arch: 'x64'
|
||||
os: 'windows'
|
||||
targets: ''
|
||||
# Windows ARM64
|
||||
- platform: "windows-latest"
|
||||
args: "--target aarch64-pc-windows-msvc"
|
||||
yaak_arch: "arm64"
|
||||
os: "windows"
|
||||
targets: "aarch64-pc-windows-msvc"
|
||||
- platform: 'windows-latest'
|
||||
args: '--target aarch64-pc-windows-msvc'
|
||||
yaak_arch: 'arm64'
|
||||
os: 'windows'
|
||||
targets: 'aarch64-pc-windows-msvc'
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 40
|
||||
steps:
|
||||
@@ -88,9 +88,6 @@ jobs:
|
||||
& $exe --version
|
||||
|
||||
- run: npm ci
|
||||
- run: npm run bootstrap
|
||||
env:
|
||||
YAAK_TARGET_ARCH: ${{ matrix.yaak_arch }}
|
||||
- run: npm run lint
|
||||
- name: Run JS Tests
|
||||
run: npm test
|
||||
@@ -102,29 +99,6 @@ jobs:
|
||||
env:
|
||||
YAAK_VERSION: ${{ github.ref_name }}
|
||||
|
||||
- name: Sign vendored binaries (macOS only)
|
||||
if: matrix.os == 'macos'
|
||||
env:
|
||||
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
|
||||
KEYCHAIN_PASSWORD: ${{ secrets.KEYCHAIN_PASSWORD }}
|
||||
run: |
|
||||
# Create keychain
|
||||
KEYCHAIN_PATH=$RUNNER_TEMP/app-signing.keychain-db
|
||||
security create-keychain -p "$KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
|
||||
security set-keychain-settings -lut 21600 $KEYCHAIN_PATH
|
||||
security unlock-keychain -p "$KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
|
||||
|
||||
# Import certificate
|
||||
echo "$APPLE_CERTIFICATE" | base64 --decode > certificate.p12
|
||||
security import certificate.p12 -P "$APPLE_CERTIFICATE_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
|
||||
security list-keychain -d user -s $KEYCHAIN_PATH
|
||||
|
||||
# Sign vendored binaries with hardened runtime and their specific entitlements
|
||||
codesign --force --options runtime --entitlements crates-tauri/yaak-app/macos/entitlements.yaakprotoc.plist --sign "$APPLE_SIGNING_IDENTITY" crates-tauri/yaak-app/vendored/protoc/yaakprotoc || true
|
||||
codesign --force --options runtime --entitlements crates-tauri/yaak-app/macos/entitlements.yaaknode.plist --sign "$APPLE_SIGNING_IDENTITY" crates-tauri/yaak-app/vendored/node/yaaknode || true
|
||||
|
||||
- uses: tauri-apps/tauri-action@v0
|
||||
env:
|
||||
YAAK_TARGET_ARCH: ${{ matrix.yaak_arch }}
|
||||
@@ -147,9 +121,9 @@ jobs:
|
||||
AZURE_CLIENT_SECRET: ${{ matrix.os == 'windows' && secrets.AZURE_CLIENT_SECRET }}
|
||||
AZURE_TENANT_ID: ${{ matrix.os == 'windows' && secrets.AZURE_TENANT_ID }}
|
||||
with:
|
||||
tagName: "v__VERSION__"
|
||||
releaseName: "Release __VERSION__"
|
||||
releaseBody: "[Changelog __VERSION__](https://yaak.app/blog/__VERSION__)"
|
||||
tagName: 'v__VERSION__'
|
||||
releaseName: 'Release __VERSION__'
|
||||
releaseBody: '[Changelog __VERSION__](https://yaak.app/blog/__VERSION__)'
|
||||
releaseDraft: true
|
||||
prerelease: true
|
||||
args: "${{ matrix.args }} --config ./crates-tauri/yaak-app/tauri.release.conf.json"
|
||||
args: '${{ matrix.args }} --config ./crates-tauri/yaak-app/tauri.release.conf.json'
|
||||
|
||||
4
Cargo.lock
generated
4
Cargo.lock
generated
@@ -8075,7 +8075,6 @@ name = "yaak-common"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"serde_json",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -8122,10 +8121,8 @@ dependencies = [
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"thiserror 2.0.17",
|
||||
"tokio",
|
||||
"ts-rs",
|
||||
"url",
|
||||
"yaak-common",
|
||||
"yaak-models",
|
||||
"yaak-sync",
|
||||
]
|
||||
@@ -8152,7 +8149,6 @@ dependencies = [
|
||||
"tonic",
|
||||
"tonic-reflection",
|
||||
"uuid",
|
||||
"yaak-common",
|
||||
"yaak-tls",
|
||||
]
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
<p align="center">
|
||||
<a href="https://github.com/JamesIves/github-sponsors-readme-action">
|
||||
<img width="200px" src="https://github.com/mountain-loop/yaak/raw/main/crates-tauri/yaak-app/icons/icon.png">
|
||||
<img width="200px" src="https://github.com/mountain-loop/yaak/raw/main/src-tauri/icons/icon.png">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
@@ -64,7 +64,7 @@ visit [`DEVELOPMENT.md`](DEVELOPMENT.md) for tips on setting up your environment
|
||||
## Useful Resources
|
||||
|
||||
- [Feedback and Bug Reports](https://feedback.yaak.app)
|
||||
- [Documentation](https://yaak.app/docs)
|
||||
- [Documentation](https://feedback.yaak.app/help)
|
||||
- [Yaak vs Postman](https://yaak.app/alternatives/postman)
|
||||
- [Yaak vs Bruno](https://yaak.app/alternatives/bruno)
|
||||
- [Yaak vs Insomnia](https://yaak.app/alternatives/insomnia)
|
||||
|
||||
@@ -15,7 +15,7 @@ use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::events::{PluginContext, RenderPurpose};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::template_callback::PluginTemplateCallback;
|
||||
use yaak_templates::{RenderOptions, parse_and_render, render_json_value_raw};
|
||||
use yaak_templates::{parse_and_render, render_json_value_raw, RenderOptions};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "yaakcli")]
|
||||
@@ -149,7 +149,14 @@ async fn render_http_request(
|
||||
// Apply path placeholders (e.g., /users/:id -> /users/123)
|
||||
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
|
||||
|
||||
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..r.to_owned() })
|
||||
Ok(HttpRequest {
|
||||
url,
|
||||
url_parameters,
|
||||
headers,
|
||||
body,
|
||||
authentication,
|
||||
..r.to_owned()
|
||||
})
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
@@ -162,10 +169,16 @@ async fn main() {
|
||||
}
|
||||
|
||||
// Use the same app_id for both data directory and keyring
|
||||
let app_id = if cfg!(debug_assertions) { "app.yaak.desktop.dev" } else { "app.yaak.desktop" };
|
||||
let app_id = if cfg!(debug_assertions) {
|
||||
"app.yaak.desktop.dev"
|
||||
} else {
|
||||
"app.yaak.desktop"
|
||||
};
|
||||
|
||||
let data_dir = cli.data_dir.unwrap_or_else(|| {
|
||||
dirs::data_dir().expect("Could not determine data directory").join(app_id)
|
||||
dirs::data_dir()
|
||||
.expect("Could not determine data directory")
|
||||
.join(app_id)
|
||||
});
|
||||
|
||||
let db_path = data_dir.join("db.sqlite");
|
||||
@@ -178,7 +191,9 @@ async fn main() {
|
||||
|
||||
// Initialize encryption manager for secure() template function
|
||||
// Use the same app_id as the Tauri app for keyring access
|
||||
let encryption_manager = Arc::new(EncryptionManager::new(query_manager.clone(), app_id));
|
||||
let encryption_manager = Arc::new(
|
||||
EncryptionManager::new(query_manager.clone(), app_id),
|
||||
);
|
||||
|
||||
// Initialize plugin manager for template functions
|
||||
let vendored_plugin_dir = data_dir.join("vendored-plugins");
|
||||
@@ -188,8 +203,9 @@ async fn main() {
|
||||
let node_bin_path = PathBuf::from("node");
|
||||
|
||||
// Find the plugin runtime - check YAAK_PLUGIN_RUNTIME env var, then fallback to development path
|
||||
let plugin_runtime_main =
|
||||
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
|
||||
let plugin_runtime_main = std::env::var("YAAK_PLUGIN_RUNTIME")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| {
|
||||
// Development fallback: look relative to crate root
|
||||
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
|
||||
@@ -210,10 +226,14 @@ async fn main() {
|
||||
// Initialize plugins from database
|
||||
let plugins = db.list_plugins().unwrap_or_default();
|
||||
if !plugins.is_empty() {
|
||||
let errors =
|
||||
plugin_manager.initialize_all_plugins(plugins, &PluginContext::new_empty()).await;
|
||||
let errors = plugin_manager
|
||||
.initialize_all_plugins(plugins, &PluginContext::new_empty())
|
||||
.await;
|
||||
for (plugin_dir, error_msg) in errors {
|
||||
eprintln!("Warning: Failed to initialize plugin '{}': {}", plugin_dir, error_msg);
|
||||
eprintln!(
|
||||
"Warning: Failed to initialize plugin '{}': {}",
|
||||
plugin_dir, error_msg
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -229,7 +249,9 @@ async fn main() {
|
||||
}
|
||||
}
|
||||
Commands::Requests { workspace_id } => {
|
||||
let requests = db.list_http_requests(&workspace_id).expect("Failed to list requests");
|
||||
let requests = db
|
||||
.list_http_requests(&workspace_id)
|
||||
.expect("Failed to list requests");
|
||||
if requests.is_empty() {
|
||||
println!("No requests found in workspace {}", workspace_id);
|
||||
} else {
|
||||
@@ -239,7 +261,9 @@ async fn main() {
|
||||
}
|
||||
}
|
||||
Commands::Send { request_id } => {
|
||||
let request = db.get_http_request(&request_id).expect("Failed to get request");
|
||||
let request = db
|
||||
.get_http_request(&request_id)
|
||||
.expect("Failed to get request");
|
||||
|
||||
// Resolve environment chain for variable substitution
|
||||
let environment_chain = db
|
||||
@@ -294,13 +318,18 @@ async fn main() {
|
||||
}))
|
||||
} else {
|
||||
// Drain events silently
|
||||
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
|
||||
tokio::spawn(async move {
|
||||
while event_rx.recv().await.is_some() {}
|
||||
});
|
||||
None
|
||||
};
|
||||
|
||||
// Send the request
|
||||
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
|
||||
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
|
||||
let response = sender
|
||||
.send(sendable, event_tx)
|
||||
.await
|
||||
.expect("Failed to send request");
|
||||
|
||||
// Wait for event handler to finish
|
||||
if let Some(handle) = verbose_handle {
|
||||
@@ -354,13 +383,18 @@ async fn main() {
|
||||
}
|
||||
}))
|
||||
} else {
|
||||
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
|
||||
tokio::spawn(async move {
|
||||
while event_rx.recv().await.is_some() {}
|
||||
});
|
||||
None
|
||||
};
|
||||
|
||||
// Send the request
|
||||
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
|
||||
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
|
||||
let response = sender
|
||||
.send(sendable, event_tx)
|
||||
.await
|
||||
.expect("Failed to send request");
|
||||
|
||||
if let Some(handle) = verbose_handle {
|
||||
let _ = handle.await;
|
||||
@@ -387,7 +421,12 @@ async fn main() {
|
||||
let (body, _stats) = response.text().await.expect("Failed to read response body");
|
||||
println!("{}", body);
|
||||
}
|
||||
Commands::Create { workspace_id, name, method, url } => {
|
||||
Commands::Create {
|
||||
workspace_id,
|
||||
name,
|
||||
method,
|
||||
url,
|
||||
} => {
|
||||
let request = HttpRequest {
|
||||
workspace_id,
|
||||
name,
|
||||
|
||||
@@ -2,6 +2,14 @@
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<!-- Enable for NodeJS execution -->
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
|
||||
<!-- Allow loading 1Password's dylib (signed with different Team ID) -->
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
|
||||
<!-- Re-enable for sandboxing. Currently disabled because auto-updater doesn't work with sandboxing.-->
|
||||
<!-- <key>com.apple.security.app-sandbox</key> <true/>-->
|
||||
<!-- <key>com.apple.security.files.user-selected.read-write</key> <true/>-->
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<!-- Enable for NodeJS/V8 JIT compiler -->
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
|
||||
<!-- Allow loading plugins signed with different Team IDs (e.g., 1Password) -->
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,6 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,11 +1,9 @@
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Result;
|
||||
use crate::PluginContextExt;
|
||||
use std::sync::Arc;
|
||||
use tauri::{AppHandle, Manager, Runtime, State, WebviewWindow, command};
|
||||
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
|
||||
use yaak_crypto::manager::EncryptionManager;
|
||||
use yaak_models::models::HttpRequestHeader;
|
||||
use yaak_models::queries::workspaces::default_headers;
|
||||
use yaak_plugins::events::GetThemesResponse;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::native_template_functions::{
|
||||
@@ -56,12 +54,7 @@ pub(crate) async fn cmd_secure_template<R: Runtime>(
|
||||
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
|
||||
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
|
||||
let plugin_context = window.plugin_context();
|
||||
Ok(encrypt_secure_template_function(
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
&plugin_context,
|
||||
template,
|
||||
)?)
|
||||
Ok(encrypt_secure_template_function(plugin_manager, encryption_manager, &plugin_context, template)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
@@ -99,17 +92,3 @@ pub(crate) async fn cmd_set_workspace_key<R: Runtime>(
|
||||
window.crypto().set_human_key(workspace_id, key)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_disable_encryption<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
workspace_id: &str,
|
||||
) -> Result<()> {
|
||||
window.crypto().disable_encryption(workspace_id)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) fn cmd_default_headers() -> Vec<HttpRequestHeader> {
|
||||
default_headers()
|
||||
}
|
||||
|
||||
@@ -6,47 +6,33 @@ use crate::error::Result;
|
||||
use std::path::{Path, PathBuf};
|
||||
use tauri::command;
|
||||
use yaak_git::{
|
||||
BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult,
|
||||
PushResult, git_add, git_add_credential, git_add_remote, git_checkout_branch, git_clone,
|
||||
git_commit, git_create_branch, git_delete_branch, git_delete_remote_branch, git_fetch_all,
|
||||
git_init, git_log, git_merge_branch, git_pull, git_push, git_remotes, git_rename_branch,
|
||||
git_rm_remote, git_status, git_unstage,
|
||||
GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult,
|
||||
git_add, git_add_credential, git_add_remote, git_checkout_branch, git_commit,
|
||||
git_create_branch, git_delete_branch, git_fetch_all, git_init, git_log,
|
||||
git_merge_branch, git_pull, git_push, git_remotes, git_rm_remote, git_status,
|
||||
git_unstage,
|
||||
};
|
||||
|
||||
// NOTE: All of these commands are async to prevent blocking work from locking up the UI
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_checkout(dir: &Path, branch: &str, force: bool) -> Result<String> {
|
||||
Ok(git_checkout_branch(dir, branch, force).await?)
|
||||
Ok(git_checkout_branch(dir, branch, force)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_branch(dir: &Path, branch: &str, base: Option<&str>) -> Result<()> {
|
||||
Ok(git_create_branch(dir, branch, base).await?)
|
||||
pub async fn cmd_git_branch(dir: &Path, branch: &str) -> Result<()> {
|
||||
Ok(git_create_branch(dir, branch)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_delete_branch(
|
||||
dir: &Path,
|
||||
branch: &str,
|
||||
force: Option<bool>,
|
||||
) -> Result<BranchDeleteResult> {
|
||||
Ok(git_delete_branch(dir, branch, force.unwrap_or(false)).await?)
|
||||
pub async fn cmd_git_delete_branch(dir: &Path, branch: &str) -> Result<()> {
|
||||
Ok(git_delete_branch(dir, branch)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_delete_remote_branch(dir: &Path, branch: &str) -> Result<()> {
|
||||
Ok(git_delete_remote_branch(dir, branch).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_merge_branch(dir: &Path, branch: &str) -> Result<()> {
|
||||
Ok(git_merge_branch(dir, branch).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_rename_branch(dir: &Path, old_name: &str, new_name: &str) -> Result<()> {
|
||||
Ok(git_rename_branch(dir, old_name, new_name).await?)
|
||||
pub async fn cmd_git_merge_branch(dir: &Path, branch: &str, force: bool) -> Result<()> {
|
||||
Ok(git_merge_branch(dir, branch, force)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
@@ -64,29 +50,24 @@ pub async fn cmd_git_initialize(dir: &Path) -> Result<()> {
|
||||
Ok(git_init(dir)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_clone(url: &str, dir: &Path) -> Result<CloneResult> {
|
||||
Ok(git_clone(url, dir).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_commit(dir: &Path, message: &str) -> Result<()> {
|
||||
Ok(git_commit(dir, message).await?)
|
||||
Ok(git_commit(dir, message)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_fetch_all(dir: &Path) -> Result<()> {
|
||||
Ok(git_fetch_all(dir).await?)
|
||||
Ok(git_fetch_all(dir)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_push(dir: &Path) -> Result<PushResult> {
|
||||
Ok(git_push(dir).await?)
|
||||
Ok(git_push(dir)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_pull(dir: &Path) -> Result<PullResult> {
|
||||
Ok(git_pull(dir).await?)
|
||||
Ok(git_pull(dir)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
@@ -107,11 +88,12 @@ pub async fn cmd_git_unstage(dir: &Path, rela_paths: Vec<PathBuf>) -> Result<()>
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_add_credential(
|
||||
dir: &Path,
|
||||
remote_url: &str,
|
||||
username: &str,
|
||||
password: &str,
|
||||
) -> Result<()> {
|
||||
Ok(git_add_credential(remote_url, username, password).await?)
|
||||
Ok(git_add_credential(dir, remote_url, username, password).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::PluginContextExt;
|
||||
use KeyAndValueRef::{Ascii, Binary};
|
||||
use tauri::{Manager, Runtime, WebviewWindow};
|
||||
use yaak_grpc::{KeyAndValueRef, MetadataMap};
|
||||
use yaak_models::models::GrpcRequest;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_plugins::events::{CallHttpAuthenticationRequest, HttpHeader};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use chrono::{NaiveDateTime, Utc};
|
||||
use log::debug;
|
||||
use std::sync::OnceLock;
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
const NAMESPACE: &str = "analytics";
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::BlobManagerExt;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::render::render_http_request;
|
||||
use log::{debug, warn};
|
||||
use std::pin::Pin;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicI32, Ordering};
|
||||
use std::time::{Duration, Instant};
|
||||
use tauri::{AppHandle, Manager, Runtime, WebviewWindow};
|
||||
use tokio::fs::{File, create_dir_all};
|
||||
@@ -19,19 +15,22 @@ use yaak_http::client::{
|
||||
HttpConnectionOptions, HttpConnectionProxySetting, HttpConnectionProxySettingAuth,
|
||||
};
|
||||
use yaak_http::cookies::CookieStore;
|
||||
use yaak_http::manager::{CachedClient, HttpConnectionManager};
|
||||
use yaak_http::manager::HttpConnectionManager;
|
||||
use yaak_http::sender::ReqwestSender;
|
||||
use yaak_http::tee_reader::TeeReader;
|
||||
use yaak_http::transaction::HttpTransaction;
|
||||
use yaak_http::types::{
|
||||
SendableBody, SendableHttpRequest, SendableHttpRequestOptions, append_query_params,
|
||||
};
|
||||
use crate::models_ext::BlobManagerExt;
|
||||
use yaak_models::blob_manager::BodyChunk;
|
||||
use yaak_models::models::{
|
||||
CookieJar, Environment, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseHeader,
|
||||
HttpResponseState, ProxySetting, ProxySettingAuth,
|
||||
};
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use crate::PluginContextExt;
|
||||
use yaak_plugins::events::{
|
||||
CallHttpAuthenticationRequest, HttpHeader, PluginContext, RenderPurpose,
|
||||
};
|
||||
@@ -174,12 +173,7 @@ async fn send_http_request_inner<R: Runtime>(
|
||||
let environment_id = environment.map(|e| e.id);
|
||||
let workspace = window.db().get_workspace(workspace_id)?;
|
||||
let (resolved, auth_context_id) = resolve_http_request(window, unrendered_request)?;
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager.clone(),
|
||||
encryption_manager.clone(),
|
||||
&plugin_context,
|
||||
RenderPurpose::Send,
|
||||
);
|
||||
let cb = PluginTemplateCallback::new(plugin_manager.clone(), encryption_manager.clone(), &plugin_context, RenderPurpose::Send);
|
||||
let env_chain =
|
||||
window.db().resolve_environments(&workspace.id, folder_id, environment_id.as_deref())?;
|
||||
let request = render_http_request(&resolved, env_chain, &cb, &RenderOptions::throw()).await?;
|
||||
@@ -234,13 +228,12 @@ async fn send_http_request_inner<R: Runtime>(
|
||||
None => None,
|
||||
};
|
||||
|
||||
let cached_client = connection_manager
|
||||
let client = connection_manager
|
||||
.get_client(&HttpConnectionOptions {
|
||||
id: plugin_context.id.clone(),
|
||||
validate_certificates: workspace.setting_validate_certificates,
|
||||
proxy: proxy_setting,
|
||||
client_certificate,
|
||||
dns_overrides: workspace.setting_dns_overrides.clone(),
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -257,7 +250,7 @@ async fn send_http_request_inner<R: Runtime>(
|
||||
|
||||
let cookie_store = maybe_cookie_store.as_ref().map(|(cs, _)| cs.clone());
|
||||
let result = execute_transaction(
|
||||
cached_client,
|
||||
client,
|
||||
sendable_request,
|
||||
response_ctx,
|
||||
cancelled_rx.clone(),
|
||||
@@ -317,7 +310,7 @@ pub fn resolve_http_request<R: Runtime>(
|
||||
}
|
||||
|
||||
async fn execute_transaction<R: Runtime>(
|
||||
cached_client: CachedClient,
|
||||
client: reqwest::Client,
|
||||
mut sendable_request: SendableHttpRequest,
|
||||
response_ctx: &mut ResponseContext<R>,
|
||||
mut cancelled_rx: Receiver<bool>,
|
||||
@@ -328,10 +321,7 @@ async fn execute_transaction<R: Runtime>(
|
||||
let workspace_id = response_ctx.response().workspace_id.clone();
|
||||
let is_persisted = response_ctx.is_persisted();
|
||||
|
||||
// Keep a reference to the resolver for DNS timing events
|
||||
let resolver = cached_client.resolver.clone();
|
||||
|
||||
let sender = ReqwestSender::with_client(cached_client.client);
|
||||
let sender = ReqwestSender::with_client(client);
|
||||
let transaction = match cookie_store {
|
||||
Some(cs) => HttpTransaction::with_cookie_store(sender, cs),
|
||||
None => HttpTransaction::new(sender),
|
||||
@@ -356,39 +346,21 @@ async fn execute_transaction<R: Runtime>(
|
||||
let (event_tx, mut event_rx) =
|
||||
tokio::sync::mpsc::channel::<yaak_http::sender::HttpResponseEvent>(100);
|
||||
|
||||
// Set the event sender on the DNS resolver so it can emit DNS timing events
|
||||
resolver.set_event_sender(Some(event_tx.clone())).await;
|
||||
|
||||
// Shared state to capture DNS timing from the event processing task
|
||||
let dns_elapsed = Arc::new(AtomicI32::new(0));
|
||||
|
||||
// Write events to DB in a task (only for persisted responses)
|
||||
if is_persisted {
|
||||
let response_id = response_id.clone();
|
||||
let app_handle = app_handle.clone();
|
||||
let update_source = response_ctx.update_source.clone();
|
||||
let workspace_id = workspace_id.clone();
|
||||
let dns_elapsed = dns_elapsed.clone();
|
||||
tokio::spawn(async move {
|
||||
while let Some(event) = event_rx.recv().await {
|
||||
// Capture DNS timing when we see a DNS event
|
||||
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
|
||||
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
|
||||
}
|
||||
let db_event = HttpResponseEvent::new(&response_id, &workspace_id, event.into());
|
||||
let _ = app_handle.db().upsert_http_response_event(&db_event, &update_source);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// For ephemeral responses, just drain the events but still capture DNS timing
|
||||
let dns_elapsed = dns_elapsed.clone();
|
||||
tokio::spawn(async move {
|
||||
while let Some(event) = event_rx.recv().await {
|
||||
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
|
||||
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
|
||||
}
|
||||
}
|
||||
});
|
||||
// For ephemeral responses, just drain the events
|
||||
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
|
||||
};
|
||||
|
||||
// Capture request body as it's sent (only for persisted responses)
|
||||
@@ -556,14 +528,10 @@ async fn execute_transaction<R: Runtime>(
|
||||
// Final update with closed state and accurate byte count
|
||||
response_ctx.update(|r| {
|
||||
r.elapsed = start.elapsed().as_millis() as i32;
|
||||
r.elapsed_dns = dns_elapsed.load(Ordering::SeqCst);
|
||||
r.content_length = Some(written_bytes as i32);
|
||||
r.state = HttpResponseState::Closed;
|
||||
})?;
|
||||
|
||||
// Clear the event sender from the resolver since this request is done
|
||||
resolver.set_event_sender(None).await;
|
||||
|
||||
Ok((response_ctx.response().clone(), maybe_blob_write_handle))
|
||||
}
|
||||
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::PluginContextExt;
|
||||
use log::info;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fs::read_to_string;
|
||||
use tauri::{Manager, Runtime, WebviewWindow};
|
||||
use yaak_tauri_utils::window::WorkspaceWindowTrait;
|
||||
use yaak_core::WorkspaceContext;
|
||||
use yaak_models::models::{
|
||||
Environment, Folder, GrpcRequest, HttpRequest, WebsocketRequest, Workspace,
|
||||
};
|
||||
use yaak_models::util::{BatchUpsertResult, UpdateSource, maybe_gen_id, maybe_gen_id_opt};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_tauri_utils::window::WorkspaceWindowTrait;
|
||||
|
||||
pub(crate) async fn import_data<R: Runtime>(
|
||||
window: &WebviewWindow<R>,
|
||||
|
||||
@@ -7,7 +7,7 @@ use crate::http_request::{resolve_http_request, send_http_request};
|
||||
use crate::import::import_data;
|
||||
use crate::models_ext::{BlobManagerExt, QueryManagerExt};
|
||||
use crate::notifications::YaakNotifier;
|
||||
use crate::render::{render_grpc_request, render_json_value, render_template};
|
||||
use crate::render::{render_grpc_request, render_template};
|
||||
use crate::updates::{UpdateMode, UpdateTrigger, YaakUpdater};
|
||||
use crate::uri_scheme::handle_deep_link;
|
||||
use error::Result as YaakResult;
|
||||
@@ -101,7 +101,6 @@ struct AppMetaData {
|
||||
app_data_dir: String,
|
||||
app_log_dir: String,
|
||||
vendored_plugin_dir: String,
|
||||
default_project_dir: String,
|
||||
feature_updater: bool,
|
||||
feature_license: bool,
|
||||
}
|
||||
@@ -112,7 +111,6 @@ async fn cmd_metadata(app_handle: AppHandle) -> YaakResult<AppMetaData> {
|
||||
let app_log_dir = app_handle.path().app_log_dir()?;
|
||||
let vendored_plugin_dir =
|
||||
app_handle.path().resolve("vendored/plugins", BaseDirectory::Resource)?;
|
||||
let default_project_dir = app_handle.path().home_dir()?.join("YaakProjects");
|
||||
Ok(AppMetaData {
|
||||
is_dev: is_dev(),
|
||||
version: app_handle.package_info().version.to_string(),
|
||||
@@ -120,7 +118,6 @@ async fn cmd_metadata(app_handle: AppHandle) -> YaakResult<AppMetaData> {
|
||||
app_data_dir: app_data_dir.to_string_lossy().to_string(),
|
||||
app_log_dir: app_log_dir.to_string_lossy().to_string(),
|
||||
vendored_plugin_dir: vendored_plugin_dir.to_string_lossy().to_string(),
|
||||
default_project_dir: default_project_dir.to_string_lossy().to_string(),
|
||||
feature_license: cfg!(feature = "license"),
|
||||
feature_updater: cfg!(feature = "updater"),
|
||||
})
|
||||
@@ -192,6 +189,7 @@ async fn cmd_grpc_reflect<R: Runtime>(
|
||||
request_id: &str,
|
||||
environment_id: Option<&str>,
|
||||
proto_files: Vec<String>,
|
||||
skip_cache: Option<bool>,
|
||||
window: WebviewWindow<R>,
|
||||
app_handle: AppHandle<R>,
|
||||
grpc_handle: State<'_, Mutex<GrpcHandle>>,
|
||||
@@ -226,21 +224,18 @@ async fn cmd_grpc_reflect<R: Runtime>(
|
||||
let settings = window.db().get_settings();
|
||||
let client_certificate =
|
||||
find_client_certificate(req.url.as_str(), &settings.client_certificates);
|
||||
let proto_files: Vec<PathBuf> =
|
||||
proto_files.iter().map(|p| PathBuf::from_str(p).unwrap()).collect();
|
||||
|
||||
// Always invalidate cached pool when this command is called, to force re-reflection
|
||||
let mut handle = grpc_handle.lock().await;
|
||||
handle.invalidate_pool(&req.id, &uri, &proto_files);
|
||||
|
||||
Ok(handle
|
||||
Ok(grpc_handle
|
||||
.lock()
|
||||
.await
|
||||
.services(
|
||||
&req.id,
|
||||
&uri,
|
||||
&proto_files,
|
||||
&proto_files.iter().map(|p| PathBuf::from_str(p).unwrap()).collect(),
|
||||
&metadata,
|
||||
workspace.setting_validate_certificates,
|
||||
client_certificate,
|
||||
skip_cache.unwrap_or(false),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| GenericError(e.to_string()))?)
|
||||
@@ -365,8 +360,10 @@ async fn cmd_grpc_go<R: Runtime>(
|
||||
|
||||
let cb = {
|
||||
let cancelled_rx = cancelled_rx.clone();
|
||||
let app_handle = app_handle.clone();
|
||||
let environment_chain = environment_chain.clone();
|
||||
let window = window.clone();
|
||||
let base_msg = base_msg.clone();
|
||||
let plugin_manager = plugin_manager.clone();
|
||||
let encryption_manager = encryption_manager.clone();
|
||||
|
||||
@@ -388,12 +385,14 @@ async fn cmd_grpc_go<R: Runtime>(
|
||||
match serde_json::from_str::<IncomingMsg>(ev.payload()) {
|
||||
Ok(IncomingMsg::Message(msg)) => {
|
||||
let window = window.clone();
|
||||
let app_handle = app_handle.clone();
|
||||
let base_msg = base_msg.clone();
|
||||
let environment_chain = environment_chain.clone();
|
||||
let plugin_manager = plugin_manager.clone();
|
||||
let encryption_manager = encryption_manager.clone();
|
||||
let msg = block_in_place(|| {
|
||||
tauri::async_runtime::block_on(async {
|
||||
let result = render_template(
|
||||
render_template(
|
||||
msg.as_str(),
|
||||
environment_chain,
|
||||
&PluginTemplateCallback::new(
|
||||
@@ -407,11 +406,24 @@ async fn cmd_grpc_go<R: Runtime>(
|
||||
),
|
||||
&RenderOptions { error_behavior: RenderErrorBehavior::Throw },
|
||||
)
|
||||
.await;
|
||||
result.expect("Failed to render template")
|
||||
.await
|
||||
.expect("Failed to render template")
|
||||
})
|
||||
});
|
||||
in_msg_tx.try_send(msg.clone()).unwrap();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
app_handle
|
||||
.db()
|
||||
.upsert_grpc_event(
|
||||
&GrpcEvent {
|
||||
content: msg,
|
||||
event_type: GrpcEventType::ClientMessage,
|
||||
..base_msg.clone()
|
||||
},
|
||||
&UpdateSource::from_window_label(window.label()),
|
||||
)
|
||||
.unwrap();
|
||||
});
|
||||
}
|
||||
Ok(IncomingMsg::Commit) => {
|
||||
maybe_in_msg_tx.take();
|
||||
@@ -458,48 +470,12 @@ async fn cmd_grpc_go<R: Runtime>(
|
||||
)?;
|
||||
|
||||
async move {
|
||||
// Create callback for streaming methods that handles both success and error
|
||||
let on_message = {
|
||||
let app_handle = app_handle.clone();
|
||||
let base_event = base_event.clone();
|
||||
let window_label = window.label().to_string();
|
||||
move |result: std::result::Result<String, String>| match result {
|
||||
Ok(msg) => {
|
||||
let _ = app_handle.db().upsert_grpc_event(
|
||||
&GrpcEvent {
|
||||
content: msg,
|
||||
event_type: GrpcEventType::ClientMessage,
|
||||
..base_event.clone()
|
||||
},
|
||||
&UpdateSource::from_window_label(&window_label),
|
||||
);
|
||||
}
|
||||
Err(error) => {
|
||||
let _ = app_handle.db().upsert_grpc_event(
|
||||
&GrpcEvent {
|
||||
content: format!("Failed to send message: {}", error),
|
||||
event_type: GrpcEventType::Error,
|
||||
..base_event.clone()
|
||||
},
|
||||
&UpdateSource::from_window_label(&window_label),
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let (maybe_stream, maybe_msg) =
|
||||
match (method_desc.is_client_streaming(), method_desc.is_server_streaming()) {
|
||||
(true, true) => (
|
||||
Some(
|
||||
connection
|
||||
.streaming(
|
||||
&service,
|
||||
&method,
|
||||
in_msg_stream,
|
||||
&metadata,
|
||||
client_cert,
|
||||
on_message.clone(),
|
||||
)
|
||||
.streaming(&service, &method, in_msg_stream, &metadata, client_cert)
|
||||
.await,
|
||||
),
|
||||
None,
|
||||
@@ -514,7 +490,6 @@ async fn cmd_grpc_go<R: Runtime>(
|
||||
in_msg_stream,
|
||||
&metadata,
|
||||
client_cert,
|
||||
on_message.clone(),
|
||||
)
|
||||
.await,
|
||||
),
|
||||
@@ -1060,54 +1035,14 @@ async fn cmd_get_http_authentication_summaries<R: Runtime>(
|
||||
#[tauri::command]
|
||||
async fn cmd_get_http_authentication_config<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
app_handle: AppHandle<R>,
|
||||
plugin_manager: State<'_, PluginManager>,
|
||||
encryption_manager: State<'_, EncryptionManager>,
|
||||
auth_name: &str,
|
||||
values: HashMap<String, JsonPrimitive>,
|
||||
model: AnyModel,
|
||||
environment_id: Option<&str>,
|
||||
_environment_id: Option<&str>,
|
||||
) -> YaakResult<GetHttpAuthenticationConfigResponse> {
|
||||
// Extract workspace_id and folder_id from the model to resolve the environment chain
|
||||
let (workspace_id, folder_id) = match &model {
|
||||
AnyModel::HttpRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::GrpcRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::WebsocketRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::Folder(f) => (f.workspace_id.clone(), f.folder_id.clone()),
|
||||
AnyModel::Workspace(w) => (w.id.clone(), None),
|
||||
_ => return Err(GenericError("Unsupported model type for authentication config".into())),
|
||||
};
|
||||
|
||||
// Resolve environment chain and render the values for token lookup
|
||||
let environment_chain = app_handle.db().resolve_environments(
|
||||
&workspace_id,
|
||||
folder_id.as_deref(),
|
||||
environment_id,
|
||||
)?;
|
||||
let plugin_manager_arc = Arc::new((*plugin_manager).clone());
|
||||
let encryption_manager_arc = Arc::new((*encryption_manager).clone());
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager_arc,
|
||||
encryption_manager_arc,
|
||||
&window.plugin_context(),
|
||||
RenderPurpose::Preview,
|
||||
);
|
||||
|
||||
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
|
||||
let values_json: serde_json::Value = serde_json::to_value(&values)?;
|
||||
let rendered_json =
|
||||
render_json_value(values_json, environment_chain, &cb, &RenderOptions::throw()).await?;
|
||||
|
||||
// Convert back to HashMap<String, JsonPrimitive>
|
||||
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
|
||||
|
||||
Ok(plugin_manager
|
||||
.get_http_authentication_config(
|
||||
&window.plugin_context(),
|
||||
auth_name,
|
||||
rendered_values,
|
||||
model.id(),
|
||||
)
|
||||
.get_http_authentication_config(&window.plugin_context(), auth_name, values, model.id())
|
||||
.await?)
|
||||
}
|
||||
|
||||
@@ -1154,54 +1089,19 @@ async fn cmd_call_grpc_request_action<R: Runtime>(
|
||||
#[tauri::command]
|
||||
async fn cmd_call_http_authentication_action<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
app_handle: AppHandle<R>,
|
||||
plugin_manager: State<'_, PluginManager>,
|
||||
encryption_manager: State<'_, EncryptionManager>,
|
||||
auth_name: &str,
|
||||
action_index: i32,
|
||||
values: HashMap<String, JsonPrimitive>,
|
||||
model: AnyModel,
|
||||
environment_id: Option<&str>,
|
||||
_environment_id: Option<&str>,
|
||||
) -> YaakResult<()> {
|
||||
// Extract workspace_id and folder_id from the model to resolve the environment chain
|
||||
let (workspace_id, folder_id) = match &model {
|
||||
AnyModel::HttpRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::GrpcRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::WebsocketRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::Folder(f) => (f.workspace_id.clone(), f.folder_id.clone()),
|
||||
AnyModel::Workspace(w) => (w.id.clone(), None),
|
||||
_ => return Err(GenericError("Unsupported model type for authentication action".into())),
|
||||
};
|
||||
|
||||
// Resolve environment chain and render the values
|
||||
let environment_chain = app_handle.db().resolve_environments(
|
||||
&workspace_id,
|
||||
folder_id.as_deref(),
|
||||
environment_id,
|
||||
)?;
|
||||
let plugin_manager_arc = Arc::new((*plugin_manager).clone());
|
||||
let encryption_manager_arc = Arc::new((*encryption_manager).clone());
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager_arc,
|
||||
encryption_manager_arc,
|
||||
&window.plugin_context(),
|
||||
RenderPurpose::Send,
|
||||
);
|
||||
|
||||
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
|
||||
let values_json: serde_json::Value = serde_json::to_value(&values)?;
|
||||
let rendered_json =
|
||||
render_json_value(values_json, environment_chain, &cb, &RenderOptions::throw()).await?;
|
||||
|
||||
// Convert back to HashMap<String, JsonPrimitive>
|
||||
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
|
||||
|
||||
Ok(plugin_manager
|
||||
.call_http_authentication_action(
|
||||
&window.plugin_context(),
|
||||
auth_name,
|
||||
action_index,
|
||||
rendered_values,
|
||||
values,
|
||||
&model.id(),
|
||||
)
|
||||
.await?)
|
||||
@@ -1721,8 +1621,6 @@ pub fn run() {
|
||||
//
|
||||
// Migrated commands
|
||||
crate::commands::cmd_decrypt_template,
|
||||
crate::commands::cmd_default_headers,
|
||||
crate::commands::cmd_disable_encryption,
|
||||
crate::commands::cmd_enable_encryption,
|
||||
crate::commands::cmd_get_themes,
|
||||
crate::commands::cmd_reveal_workspace_key,
|
||||
@@ -1751,13 +1649,10 @@ pub fn run() {
|
||||
git_ext::cmd_git_checkout,
|
||||
git_ext::cmd_git_branch,
|
||||
git_ext::cmd_git_delete_branch,
|
||||
git_ext::cmd_git_delete_remote_branch,
|
||||
git_ext::cmd_git_merge_branch,
|
||||
git_ext::cmd_git_rename_branch,
|
||||
git_ext::cmd_git_status,
|
||||
git_ext::cmd_git_log,
|
||||
git_ext::cmd_git_initialize,
|
||||
git_ext::cmd_git_clone,
|
||||
git_ext::cmd_git_commit,
|
||||
git_ext::cmd_git_fetch_all,
|
||||
git_ext::cmd_git_push,
|
||||
@@ -1769,13 +1664,6 @@ pub fn run() {
|
||||
git_ext::cmd_git_add_remote,
|
||||
git_ext::cmd_git_rm_remote,
|
||||
//
|
||||
// Plugin commands
|
||||
plugins_ext::cmd_plugins_search,
|
||||
plugins_ext::cmd_plugins_install,
|
||||
plugins_ext::cmd_plugins_uninstall,
|
||||
plugins_ext::cmd_plugins_updates,
|
||||
plugins_ext::cmd_plugins_update_all,
|
||||
//
|
||||
// WebSocket commands
|
||||
ws_ext::cmd_ws_upsert_request,
|
||||
ws_ext::cmd_ws_duplicate_request,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use crate::error::Result;
|
||||
use crate::history::get_or_upsert_launch_info;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::{debug, info};
|
||||
use reqwest::Method;
|
||||
@@ -9,8 +8,9 @@ use std::time::Instant;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow};
|
||||
use ts_rs::TS;
|
||||
use yaak_common::platform::get_os_str;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
// Check for updates every hour
|
||||
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
use crate::error::Result;
|
||||
use crate::http_request::send_http_request_with_context;
|
||||
use crate::models_ext::BlobManagerExt;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::render::{render_grpc_request, render_http_request, render_json_value};
|
||||
use crate::window::{CreateWindowConfig, create_window};
|
||||
use crate::{
|
||||
@@ -16,8 +14,11 @@ use tauri::{AppHandle, Emitter, Manager, Runtime};
|
||||
use tauri_plugin_clipboard_manager::ClipboardExt;
|
||||
use tauri_plugin_opener::OpenerExt;
|
||||
use yaak_crypto::manager::EncryptionManager;
|
||||
use yaak_tauri_utils::window::WorkspaceWindowTrait;
|
||||
use crate::models_ext::BlobManagerExt;
|
||||
use yaak_models::models::{AnyModel, HttpResponse, Plugin};
|
||||
use yaak_models::queries::any_request::AnyRequest;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::error::Error::PluginErr;
|
||||
use yaak_plugins::events::{
|
||||
@@ -31,7 +32,6 @@ use yaak_plugins::events::{
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::plugin_handle::PluginHandle;
|
||||
use yaak_plugins::template_callback::PluginTemplateCallback;
|
||||
use yaak_tauri_utils::window::WorkspaceWindowTrait;
|
||||
use yaak_templates::{RenderErrorBehavior, RenderOptions};
|
||||
|
||||
pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
@@ -57,10 +57,6 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
||||
Ok(call_frontend(&window, event).await)
|
||||
}
|
||||
InternalEventPayload::PromptFormRequest(_) => {
|
||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
||||
Ok(call_frontend(&window, event).await)
|
||||
}
|
||||
InternalEventPayload::FindHttpResponsesRequest(req) => {
|
||||
let http_responses = app_handle
|
||||
.db()
|
||||
@@ -170,12 +166,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
)?;
|
||||
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
|
||||
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
&plugin_context,
|
||||
req.purpose,
|
||||
);
|
||||
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
|
||||
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
||||
let grpc_request =
|
||||
render_grpc_request(&req.grpc_request, environment_chain, &cb, &opt).await?;
|
||||
@@ -196,12 +187,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
)?;
|
||||
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
|
||||
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
&plugin_context,
|
||||
req.purpose,
|
||||
);
|
||||
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
|
||||
let opt = &RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
||||
let http_request =
|
||||
render_http_request(&req.http_request, environment_chain, &cb, &opt).await?;
|
||||
@@ -232,12 +218,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
)?;
|
||||
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
|
||||
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
&plugin_context,
|
||||
req.purpose,
|
||||
);
|
||||
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
|
||||
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
||||
let data = render_json_value(req.data, environment_chain, &cb, &opt).await?;
|
||||
Ok(Some(InternalEventPayload::TemplateRenderResponse(TemplateRenderResponse { data })))
|
||||
|
||||
@@ -17,7 +17,7 @@ use tauri::path::BaseDirectory;
|
||||
use tauri::plugin::{Builder, TauriPlugin};
|
||||
use tauri::{
|
||||
AppHandle, Emitter, Manager, RunEvent, Runtime, State, WebviewWindow, WindowEvent, command,
|
||||
is_dev,
|
||||
generate_handler, is_dev,
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
use ts_rs::TS;
|
||||
@@ -132,7 +132,7 @@ impl PluginUpdater {
|
||||
// ============================================================================
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_plugins_search<R: Runtime>(
|
||||
pub(crate) async fn cmd_plugins_search<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
query: &str,
|
||||
) -> Result<PluginSearchResponse> {
|
||||
@@ -141,7 +141,7 @@ pub async fn cmd_plugins_search<R: Runtime>(
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_plugins_install<R: Runtime>(
|
||||
pub(crate) async fn cmd_plugins_install<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
name: &str,
|
||||
version: Option<String>,
|
||||
@@ -163,7 +163,7 @@ pub async fn cmd_plugins_install<R: Runtime>(
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_plugins_uninstall<R: Runtime>(
|
||||
pub(crate) async fn cmd_plugins_uninstall<R: Runtime>(
|
||||
plugin_id: &str,
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<Plugin> {
|
||||
@@ -174,7 +174,7 @@ pub async fn cmd_plugins_uninstall<R: Runtime>(
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_plugins_updates<R: Runtime>(
|
||||
pub(crate) async fn cmd_plugins_updates<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
) -> Result<PluginUpdatesResponse> {
|
||||
let http_client = yaak_api_client(&app_handle)?;
|
||||
@@ -183,7 +183,7 @@ pub async fn cmd_plugins_updates<R: Runtime>(
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_plugins_update_all<R: Runtime>(
|
||||
pub(crate) async fn cmd_plugins_update_all<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<Vec<PluginNameVersion>> {
|
||||
let http_client = yaak_api_client(window.app_handle())?;
|
||||
@@ -233,6 +233,13 @@ pub async fn cmd_plugins_update_all<R: Runtime>(
|
||||
|
||||
pub fn init<R: Runtime>() -> TauriPlugin<R> {
|
||||
Builder::new("yaak-plugins")
|
||||
.invoke_handler(generate_handler![
|
||||
cmd_plugins_search,
|
||||
cmd_plugins_install,
|
||||
cmd_plugins_uninstall,
|
||||
cmd_plugins_updates,
|
||||
cmd_plugins_update_all
|
||||
])
|
||||
.setup(|app_handle, _| {
|
||||
// Resolve paths for plugin manager
|
||||
let vendored_plugin_dir = app_handle
|
||||
|
||||
@@ -3,7 +3,6 @@ use std::path::PathBuf;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use log::{debug, error, info, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::{Emitter, Listener, Manager, Runtime, WebviewWindow};
|
||||
@@ -12,6 +11,7 @@ use tauri_plugin_updater::{Update, UpdaterExt};
|
||||
use tokio::task::block_in_place;
|
||||
use tokio::time::sleep;
|
||||
use ts_rs::TS;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_models::util::generate_id;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Result;
|
||||
use crate::import::import_data;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::PluginContextExt;
|
||||
use log::{info, warn};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::sync::Arc;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, Url};
|
||||
use tauri_plugin_dialog::{DialogExt, MessageDialogButtons, MessageDialogKind};
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
use yaak_models::util::generate_id;
|
||||
use yaak_plugins::events::{Color, ShowToastRequest};
|
||||
use yaak_plugins::install::download_and_install;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
|
||||
pub(crate) async fn handle_deep_link<R: Runtime>(
|
||||
app_handle: &AppHandle<R>,
|
||||
@@ -55,8 +55,7 @@ pub(crate) async fn handle_deep_link<R: Runtime>(
|
||||
&plugin_context,
|
||||
name,
|
||||
version,
|
||||
)
|
||||
.await?;
|
||||
).await?;
|
||||
app_handle.emit(
|
||||
"show_toast",
|
||||
ShowToastRequest {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::window_menu::app_menu;
|
||||
use log::{info, warn};
|
||||
use rand::random;
|
||||
@@ -9,6 +8,7 @@ use tauri::{
|
||||
};
|
||||
use tauri_plugin_opener::OpenerExt;
|
||||
use tokio::sync::mpsc;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
|
||||
const DEFAULT_WINDOW_WIDTH: f64 = 1100.0;
|
||||
const DEFAULT_WINDOW_HEIGHT: f64 = 600.0;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
//! WebSocket Tauri command wrappers
|
||||
//! These wrap the core yaak-ws functionality for Tauri IPC.
|
||||
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::PluginContextExt;
|
||||
use http::HeaderMap;
|
||||
use log::{debug, info, warn};
|
||||
use std::str::FromStr;
|
||||
@@ -56,10 +56,9 @@ pub async fn cmd_ws_delete_request<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<WebsocketRequest> {
|
||||
Ok(app_handle.db().delete_websocket_request_by_id(
|
||||
request_id,
|
||||
&UpdateSource::from_window_label(window.label()),
|
||||
)?)
|
||||
Ok(app_handle
|
||||
.db()
|
||||
.delete_websocket_request_by_id(request_id, &UpdateSource::from_window_label(window.label()))?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
@@ -68,10 +67,12 @@ pub async fn cmd_ws_delete_connection<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<WebsocketConnection> {
|
||||
Ok(app_handle.db().delete_websocket_connection_by_id(
|
||||
connection_id,
|
||||
&UpdateSource::from_window_label(window.label()),
|
||||
)?)
|
||||
Ok(app_handle
|
||||
.db()
|
||||
.delete_websocket_connection_by_id(
|
||||
connection_id,
|
||||
&UpdateSource::from_window_label(window.label()),
|
||||
)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
@@ -295,10 +296,8 @@ pub async fn cmd_ws_connect<R: Runtime>(
|
||||
)
|
||||
.await?;
|
||||
for header in plugin_result.set_headers.unwrap_or_default() {
|
||||
match (
|
||||
http::HeaderName::from_str(&header.name),
|
||||
HeaderValue::from_str(&header.value),
|
||||
) {
|
||||
match (http::HeaderName::from_str(&header.name), HeaderValue::from_str(&header.value))
|
||||
{
|
||||
(Ok(name), Ok(value)) => {
|
||||
headers.insert(name, value);
|
||||
}
|
||||
|
||||
@@ -44,8 +44,8 @@
|
||||
"vendored/protoc/include",
|
||||
"vendored/plugins",
|
||||
"vendored/plugin-runtime",
|
||||
"vendored/node/yaaknode*",
|
||||
"vendored/protoc/yaakprotoc*"
|
||||
"vendored/node/yaaknode",
|
||||
"vendored/protoc/yaakprotoc"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,10 +8,10 @@ use std::time::Duration;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow, is_dev};
|
||||
use ts_rs::TS;
|
||||
use yaak_common::platform::get_os_str;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
use yaak_models::db_context::DbContext;
|
||||
use yaak_models::query_manager::QueryManager;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
|
||||
/// Extension trait for accessing the QueryManager from Tauri Manager types.
|
||||
/// This is needed temporarily until all crates are refactored to not use Tauri.
|
||||
|
||||
@@ -6,4 +6,3 @@ publish = false
|
||||
|
||||
[dependencies]
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["process"] }
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
use std::ffi::OsStr;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
const CREATE_NO_WINDOW: u32 = 0x0800_0000;
|
||||
|
||||
/// Creates a new `tokio::process::Command` that won't spawn a console window on Windows.
|
||||
pub fn new_xplatform_command<S: AsRef<OsStr>>(program: S) -> tokio::process::Command {
|
||||
#[allow(unused_mut)]
|
||||
let mut cmd = tokio::process::Command::new(program);
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use std::os::windows::process::CommandExt;
|
||||
cmd.creation_flags(CREATE_NO_WINDOW);
|
||||
}
|
||||
cmd
|
||||
}
|
||||
@@ -1,3 +1,2 @@
|
||||
pub mod command;
|
||||
pub mod platform;
|
||||
pub mod serde;
|
||||
|
||||
@@ -11,7 +11,3 @@ export function revealWorkspaceKey(workspaceId: string) {
|
||||
export function setWorkspaceKey(args: { workspaceId: string; key: string }) {
|
||||
return invoke<void>('cmd_set_workspace_key', args);
|
||||
}
|
||||
|
||||
export function disableEncryption(workspaceId: string) {
|
||||
return invoke<void>('cmd_disable_encryption', { workspaceId });
|
||||
}
|
||||
|
||||
@@ -115,35 +115,6 @@ impl EncryptionManager {
|
||||
self.set_workspace_key(workspace_id, &wkey)
|
||||
}
|
||||
|
||||
pub fn disable_encryption(&self, workspace_id: &str) -> Result<()> {
|
||||
info!("Disabling encryption for {workspace_id}");
|
||||
|
||||
self.query_manager.with_tx::<(), Error>(|tx| {
|
||||
let workspace = tx.get_workspace(workspace_id)?;
|
||||
let workspace_meta = tx.get_or_create_workspace_meta(workspace_id)?;
|
||||
|
||||
// Clear encryption challenge on workspace
|
||||
tx.upsert_workspace(
|
||||
&Workspace { encryption_key_challenge: None, ..workspace },
|
||||
&UpdateSource::Background,
|
||||
)?;
|
||||
|
||||
// Clear encryption key on workspace meta
|
||||
tx.upsert_workspace_meta(
|
||||
&WorkspaceMeta { encryption_key: None, ..workspace_meta },
|
||||
&UpdateSource::Background,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Remove from cache
|
||||
let mut cache = self.cached_workspace_keys.lock().unwrap();
|
||||
cache.remove(workspace_id);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_workspace_key(&self, workspace_id: &str) -> Result<WorkspaceKey> {
|
||||
{
|
||||
let cache = self.cached_workspace_keys.lock().unwrap();
|
||||
|
||||
@@ -12,9 +12,7 @@ serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
serde_yaml = "0.9.34"
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true, features = ["io-util"] }
|
||||
ts-rs = { workspace = true, features = ["chrono-impl", "serde-json-impl"] }
|
||||
url = "2"
|
||||
yaak-common = { workspace = true }
|
||||
yaak-models = { workspace = true }
|
||||
yaak-sync = { workspace = true }
|
||||
|
||||
4
crates/yaak-git/bindings/gen_git.ts
generated
4
crates/yaak-git/bindings/gen_git.ts
generated
@@ -1,10 +1,6 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { SyncModel } from "./gen_models";
|
||||
|
||||
export type BranchDeleteResult = { "type": "success", message: string, } | { "type": "not_fully_merged" };
|
||||
|
||||
export type CloneResult = { "type": "success" } | { "type": "cancelled" } | { "type": "needs_credentials", url: string, error: string | null, };
|
||||
|
||||
export type GitAuthor = { name: string | null, email: string | null, };
|
||||
|
||||
export type GitCommit = { author: GitAuthor, when: string, message: string | null, };
|
||||
|
||||
4
crates/yaak-git/bindings/gen_models.ts
generated
4
crates/yaak-git/bindings/gen_models.ts
generated
@@ -1,7 +1,5 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
|
||||
|
||||
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, parentModel: string, parentId: string | null, variables: Array<EnvironmentVariable>, color: string | null, sortPriority: number, };
|
||||
|
||||
export type EnvironmentVariable = { enabled?: boolean, name: string, value: string, id?: string, };
|
||||
@@ -20,4 +18,4 @@ export type SyncModel = { "type": "workspace" } & Workspace | { "type": "environ
|
||||
|
||||
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
|
||||
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
|
||||
@@ -3,10 +3,9 @@ import { invoke } from '@tauri-apps/api/core';
|
||||
import { createFastMutation } from '@yaakapp/app/hooks/useFastMutation';
|
||||
import { queryClient } from '@yaakapp/app/lib/queryClient';
|
||||
import { useMemo } from 'react';
|
||||
import { BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult } from './bindings/gen_git';
|
||||
import { GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult } from './bindings/gen_git';
|
||||
|
||||
export * from './bindings/gen_git';
|
||||
export * from './bindings/gen_models';
|
||||
|
||||
export interface GitCredentials {
|
||||
username: string;
|
||||
@@ -60,6 +59,7 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
if (creds == null) throw new Error('Canceled');
|
||||
|
||||
await invoke('cmd_git_add_credential', {
|
||||
dir,
|
||||
remoteUrl: result.url,
|
||||
username: creds.username,
|
||||
password: creds.password,
|
||||
@@ -90,31 +90,21 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
mutationFn: (args) => invoke('cmd_git_rm_remote', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
createBranch: createFastMutation<void, string, { branch: string; base?: string }>({
|
||||
branch: createFastMutation<void, string, { branch: string }>({
|
||||
mutationKey: ['git', 'branch', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
mergeBranch: createFastMutation<void, string, { branch: string }>({
|
||||
mergeBranch: createFastMutation<void, string, { branch: string; force: boolean }>({
|
||||
mutationKey: ['git', 'merge', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_merge_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
deleteBranch: createFastMutation<BranchDeleteResult, string, { branch: string, force?: boolean }>({
|
||||
deleteBranch: createFastMutation<void, string, { branch: string }>({
|
||||
mutationKey: ['git', 'delete-branch', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_delete_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
deleteRemoteBranch: createFastMutation<void, string, { branch: string }>({
|
||||
mutationKey: ['git', 'delete-remote-branch', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_delete_remote_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
renameBranch: createFastMutation<void, string, { oldName: string, newName: string }>({
|
||||
mutationKey: ['git', 'rename-branch', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_rename_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
checkout: createFastMutation<string, string, { branch: string; force: boolean }>({
|
||||
mutationKey: ['git', 'checkout', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_checkout', { dir, ...args }),
|
||||
@@ -154,6 +144,7 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
if (creds == null) throw new Error('Canceled');
|
||||
|
||||
await invoke('cmd_git_add_credential', {
|
||||
dir,
|
||||
remoteUrl: result.url,
|
||||
username: creds.username,
|
||||
password: creds.password,
|
||||
@@ -175,28 +166,3 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
async function getRemotes(dir: string) {
|
||||
return invoke<GitRemote[]>('cmd_git_remotes', { dir });
|
||||
}
|
||||
|
||||
/**
|
||||
* Clone a git repository, prompting for credentials if needed.
|
||||
*/
|
||||
export async function gitClone(
|
||||
url: string,
|
||||
dir: string,
|
||||
promptCredentials: (args: { url: string; error: string | null }) => Promise<GitCredentials | null>,
|
||||
): Promise<CloneResult> {
|
||||
const result = await invoke<CloneResult>('cmd_git_clone', { url, dir });
|
||||
if (result.type !== 'needs_credentials') return result;
|
||||
|
||||
// Prompt for credentials
|
||||
const creds = await promptCredentials({ url: result.url, error: result.error });
|
||||
if (creds == null) return {type: 'cancelled'};
|
||||
|
||||
// Store credentials and retry
|
||||
await invoke('cmd_git_add_credential', {
|
||||
remoteUrl: result.url,
|
||||
username: creds.username,
|
||||
password: creds.password,
|
||||
});
|
||||
|
||||
return invoke<CloneResult>('cmd_git_clone', { url, dir });
|
||||
}
|
||||
|
||||
@@ -1,30 +1,38 @@
|
||||
use crate::error::Error::GitNotFound;
|
||||
use crate::error::Result;
|
||||
use std::path::Path;
|
||||
use std::process::Stdio;
|
||||
use tokio::process::Command;
|
||||
use yaak_common::command::new_xplatform_command;
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
/// Create a git command that runs in the specified directory
|
||||
pub(crate) async fn new_binary_command(dir: &Path) -> Result<Command> {
|
||||
let mut cmd = new_binary_command_global().await?;
|
||||
cmd.arg("-C").arg(dir);
|
||||
Ok(cmd)
|
||||
}
|
||||
use crate::error::Error::GitNotFound;
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::os::windows::process::CommandExt;
|
||||
|
||||
/// Create a git command without a specific directory (for global operations)
|
||||
pub(crate) async fn new_binary_command_global() -> Result<Command> {
|
||||
#[cfg(target_os = "windows")]
|
||||
const CREATE_NO_WINDOW: u32 = 0x0800_0000;
|
||||
|
||||
pub(crate) fn new_binary_command(dir: &Path) -> Result<Command> {
|
||||
// 1. Probe that `git` exists and is runnable
|
||||
let mut probe = new_xplatform_command("git");
|
||||
let mut probe = Command::new("git");
|
||||
probe.arg("--version").stdin(Stdio::null()).stdout(Stdio::null()).stderr(Stdio::null());
|
||||
|
||||
let status = probe.status().await.map_err(|_| GitNotFound)?;
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
probe.creation_flags(CREATE_NO_WINDOW);
|
||||
}
|
||||
|
||||
let status = probe.status().map_err(|_| GitNotFound)?;
|
||||
|
||||
if !status.success() {
|
||||
return Err(GitNotFound);
|
||||
}
|
||||
|
||||
// 2. Build the reusable git command
|
||||
let cmd = new_xplatform_command("git");
|
||||
let mut cmd = Command::new("git");
|
||||
cmd.arg("-C").arg(dir);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
cmd.creation_flags(CREATE_NO_WINDOW);
|
||||
}
|
||||
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
@@ -1,153 +1,99 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::binary::new_binary_command;
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use crate::merge::do_merge;
|
||||
use crate::repository::open_repo;
|
||||
use crate::util::{bytes_to_string, get_branch_by_name, get_current_branch};
|
||||
use git2::BranchType;
|
||||
use git2::build::CheckoutBuilder;
|
||||
use log::info;
|
||||
use std::path::Path;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
|
||||
#[serde(rename_all = "snake_case", tag = "type")]
|
||||
#[ts(export, export_to = "gen_git.ts")]
|
||||
pub enum BranchDeleteResult {
|
||||
Success { message: String },
|
||||
NotFullyMerged,
|
||||
}
|
||||
pub fn git_checkout_branch(dir: &Path, branch_name: &str, force: bool) -> Result<String> {
|
||||
if branch_name.starts_with("origin/") {
|
||||
return git_checkout_remote_branch(dir, branch_name, force);
|
||||
}
|
||||
|
||||
pub async fn git_checkout_branch(dir: &Path, branch_name: &str, force: bool) -> Result<String> {
|
||||
let branch_name = branch_name.trim_start_matches("origin/");
|
||||
let repo = open_repo(dir)?;
|
||||
let branch = get_branch_by_name(&repo, branch_name)?;
|
||||
let branch_ref = branch.into_reference();
|
||||
let branch_tree = branch_ref.peel_to_tree()?;
|
||||
|
||||
let mut args = vec!["checkout"];
|
||||
let mut options = CheckoutBuilder::default();
|
||||
if force {
|
||||
args.push("--force");
|
||||
options.force();
|
||||
}
|
||||
args.push(branch_name);
|
||||
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(&args)
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git checkout: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to checkout: {}", combined.trim())));
|
||||
}
|
||||
repo.checkout_tree(branch_tree.as_object(), Some(&mut options))?;
|
||||
repo.set_head(branch_ref.name().unwrap())?;
|
||||
|
||||
Ok(branch_name.to_string())
|
||||
}
|
||||
|
||||
pub async fn git_create_branch(dir: &Path, name: &str, base: Option<&str>) -> Result<()> {
|
||||
let mut cmd = new_binary_command(dir).await?;
|
||||
cmd.arg("branch").arg(name);
|
||||
if let Some(base_branch) = base {
|
||||
cmd.arg(base_branch);
|
||||
}
|
||||
pub(crate) fn git_checkout_remote_branch(
|
||||
dir: &Path,
|
||||
branch_name: &str,
|
||||
force: bool,
|
||||
) -> Result<String> {
|
||||
let branch_name = branch_name.trim_start_matches("origin/");
|
||||
let repo = open_repo(dir)?;
|
||||
|
||||
let out =
|
||||
cmd.output().await.map_err(|e| GenericError(format!("failed to run git branch: {e}")))?;
|
||||
let refname = format!("refs/remotes/origin/{}", branch_name);
|
||||
let remote_ref = repo.find_reference(&refname)?;
|
||||
let commit = remote_ref.peel_to_commit()?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
let mut new_branch = repo.branch(branch_name, &commit, false)?;
|
||||
let upstream_name = format!("origin/{}", branch_name);
|
||||
new_branch.set_upstream(Some(&upstream_name))?;
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to create branch: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
git_checkout_branch(dir, branch_name, force)
|
||||
}
|
||||
|
||||
pub async fn git_delete_branch(dir: &Path, name: &str, force: bool) -> Result<BranchDeleteResult> {
|
||||
let mut cmd = new_binary_command(dir).await?;
|
||||
|
||||
let out =
|
||||
if force { cmd.args(["branch", "-D", name]) } else { cmd.args(["branch", "-d", name]) }
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git branch -d: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() && stderr.to_lowercase().contains("not fully merged") {
|
||||
return Ok(BranchDeleteResult::NotFullyMerged);
|
||||
}
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to delete branch: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(BranchDeleteResult::Success { message: combined })
|
||||
}
|
||||
|
||||
pub async fn git_merge_branch(dir: &Path, name: &str) -> Result<()> {
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["merge", name])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git merge: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
// Check for merge conflicts
|
||||
if combined.to_lowercase().contains("conflict") {
|
||||
return Err(GenericError(
|
||||
"Merge conflicts detected. Please resolve them manually.".to_string(),
|
||||
));
|
||||
pub fn git_create_branch(dir: &Path, name: &str) -> Result<()> {
|
||||
let repo = open_repo(dir)?;
|
||||
let head = match repo.head() {
|
||||
Ok(h) => h,
|
||||
Err(e) if e.code() == git2::ErrorCode::UnbornBranch => {
|
||||
let msg = "Cannot create branch when there are no commits";
|
||||
return Err(GenericError(msg.into()));
|
||||
}
|
||||
return Err(GenericError(format!("Failed to merge: {}", combined.trim())));
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
let head = head.peel_to_commit()?;
|
||||
|
||||
repo.branch(name, &head, false)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn git_delete_remote_branch(dir: &Path, name: &str) -> Result<()> {
|
||||
// Remote branch names come in as "origin/branch-name", extract the branch name
|
||||
let branch_name = name.trim_start_matches("origin/");
|
||||
pub fn git_delete_branch(dir: &Path, name: &str) -> Result<()> {
|
||||
let repo = open_repo(dir)?;
|
||||
let mut branch = get_branch_by_name(&repo, name)?;
|
||||
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["push", "origin", "--delete", branch_name])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git push --delete: {e}")))?;
|
||||
if branch.is_head() {
|
||||
info!("Deleting head branch");
|
||||
let branches = repo.branches(Some(BranchType::Local))?;
|
||||
let other_branch = branches.into_iter().filter_map(|b| b.ok()).find(|b| !b.0.is_head());
|
||||
let other_branch = match other_branch {
|
||||
None => return Err(GenericError("Cannot delete only branch".into())),
|
||||
Some(b) => bytes_to_string(b.0.name_bytes()?)?,
|
||||
};
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to delete remote branch: {}", combined.trim())));
|
||||
git_checkout_branch(dir, &other_branch, true)?;
|
||||
}
|
||||
|
||||
branch.delete()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn git_rename_branch(dir: &Path, old_name: &str, new_name: &str) -> Result<()> {
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["branch", "-m", old_name, new_name])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git branch -m: {e}")))?;
|
||||
pub fn git_merge_branch(dir: &Path, name: &str, _force: bool) -> Result<()> {
|
||||
let repo = open_repo(dir)?;
|
||||
let local_branch = get_current_branch(&repo)?.unwrap();
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
let commit_to_merge = get_branch_by_name(&repo, name)?.into_reference();
|
||||
let commit_to_merge = repo.reference_to_annotated_commit(&commit_to_merge)?;
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to rename branch: {}", combined.trim())));
|
||||
}
|
||||
do_merge(&repo, &local_branch, &commit_to_merge)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
use crate::binary::new_binary_command;
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use log::info;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
|
||||
#[serde(rename_all = "snake_case", tag = "type")]
|
||||
#[ts(export, export_to = "gen_git.ts")]
|
||||
pub enum CloneResult {
|
||||
Success,
|
||||
Cancelled,
|
||||
NeedsCredentials { url: String, error: Option<String> },
|
||||
}
|
||||
|
||||
pub async fn git_clone(url: &str, dir: &Path) -> Result<CloneResult> {
|
||||
let parent = dir.parent().ok_or_else(|| GenericError("Invalid clone directory".to_string()))?;
|
||||
fs::create_dir_all(parent)
|
||||
.map_err(|e| GenericError(format!("Failed to create directory: {e}")))?;
|
||||
let mut cmd = new_binary_command(parent).await?;
|
||||
cmd.args(["clone", url]).arg(dir).env("GIT_TERMINAL_PROMPT", "0");
|
||||
|
||||
let out =
|
||||
cmd.output().await.map_err(|e| GenericError(format!("failed to run git clone: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
let combined_lower = combined.to_lowercase();
|
||||
|
||||
info!("Cloned status={}: {combined}", out.status);
|
||||
|
||||
if !out.status.success() {
|
||||
// Check for credentials error
|
||||
if combined_lower.contains("could not read") {
|
||||
return Ok(CloneResult::NeedsCredentials { url: url.to_string(), error: None });
|
||||
}
|
||||
if combined_lower.contains("unable to access")
|
||||
|| combined_lower.contains("authentication failed")
|
||||
{
|
||||
return Ok(CloneResult::NeedsCredentials {
|
||||
url: url.to_string(),
|
||||
error: Some(combined.to_string()),
|
||||
});
|
||||
}
|
||||
return Err(GenericError(format!("Failed to clone: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(CloneResult::Success)
|
||||
}
|
||||
@@ -3,9 +3,8 @@ use crate::error::Error::GenericError;
|
||||
use log::info;
|
||||
use std::path::Path;
|
||||
|
||||
pub async fn git_commit(dir: &Path, message: &str) -> crate::error::Result<()> {
|
||||
let out =
|
||||
new_binary_command(dir).await?.args(["commit", "--message", message]).output().await?;
|
||||
pub fn git_commit(dir: &Path, message: &str) -> crate::error::Result<()> {
|
||||
let out = new_binary_command(dir)?.args(["commit", "--message", message]).output()?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
|
||||
@@ -1,19 +1,24 @@
|
||||
use crate::binary::new_binary_command_global;
|
||||
use crate::binary::new_binary_command;
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::process::Stdio;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use url::Url;
|
||||
|
||||
pub async fn git_add_credential(remote_url: &str, username: &str, password: &str) -> Result<()> {
|
||||
pub async fn git_add_credential(
|
||||
dir: &Path,
|
||||
remote_url: &str,
|
||||
username: &str,
|
||||
password: &str,
|
||||
) -> Result<()> {
|
||||
let url = Url::parse(remote_url)
|
||||
.map_err(|e| GenericError(format!("Failed to parse remote url {remote_url}: {e:?}")))?;
|
||||
let protocol = url.scheme();
|
||||
let host = url.host_str().unwrap();
|
||||
let path = Some(url.path());
|
||||
|
||||
let mut child = new_binary_command_global()
|
||||
.await?
|
||||
let mut child = new_binary_command(dir)?
|
||||
.args(["credential", "approve"])
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::null())
|
||||
@@ -21,21 +26,19 @@ pub async fn git_add_credential(remote_url: &str, username: &str, password: &str
|
||||
|
||||
{
|
||||
let stdin = child.stdin.as_mut().unwrap();
|
||||
stdin.write_all(format!("protocol={}\n", protocol).as_bytes()).await?;
|
||||
stdin.write_all(format!("host={}\n", host).as_bytes()).await?;
|
||||
writeln!(stdin, "protocol={}", protocol)?;
|
||||
writeln!(stdin, "host={}", host)?;
|
||||
if let Some(path) = path {
|
||||
if !path.is_empty() {
|
||||
stdin
|
||||
.write_all(format!("path={}\n", path.trim_start_matches('/')).as_bytes())
|
||||
.await?;
|
||||
writeln!(stdin, "path={}", path.trim_start_matches('/'))?;
|
||||
}
|
||||
}
|
||||
stdin.write_all(format!("username={}\n", username).as_bytes()).await?;
|
||||
stdin.write_all(format!("password={}\n", password).as_bytes()).await?;
|
||||
stdin.write_all(b"\n").await?; // blank line terminator
|
||||
writeln!(stdin, "username={}", username)?;
|
||||
writeln!(stdin, "password={}", password)?;
|
||||
writeln!(stdin)?; // blank line terminator
|
||||
}
|
||||
|
||||
let status = child.wait().await?;
|
||||
let status = child.wait()?;
|
||||
if !status.success() {
|
||||
return Err(GenericError("Failed to approve git credential".to_string()));
|
||||
}
|
||||
|
||||
@@ -3,12 +3,10 @@ use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use std::path::Path;
|
||||
|
||||
pub async fn git_fetch_all(dir: &Path) -> Result<()> {
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
pub fn git_fetch_all(dir: &Path) -> Result<()> {
|
||||
let out = new_binary_command(dir)?
|
||||
.args(["fetch", "--all", "--prune", "--tags"])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git pull: {e}")))?;
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
mod add;
|
||||
mod binary;
|
||||
mod branch;
|
||||
mod clone;
|
||||
mod commit;
|
||||
mod credential;
|
||||
pub mod error;
|
||||
mod fetch;
|
||||
mod init;
|
||||
mod log;
|
||||
|
||||
mod merge;
|
||||
mod pull;
|
||||
mod push;
|
||||
mod remotes;
|
||||
@@ -19,11 +18,7 @@ mod util;
|
||||
|
||||
// Re-export all git functions for external use
|
||||
pub use add::git_add;
|
||||
pub use branch::{
|
||||
BranchDeleteResult, git_checkout_branch, git_create_branch, git_delete_branch,
|
||||
git_delete_remote_branch, git_merge_branch, git_rename_branch,
|
||||
};
|
||||
pub use clone::{CloneResult, git_clone};
|
||||
pub use branch::{git_checkout_branch, git_create_branch, git_delete_branch, git_merge_branch};
|
||||
pub use commit::git_commit;
|
||||
pub use credential::git_add_credential;
|
||||
pub use fetch::git_fetch_all;
|
||||
|
||||
135
crates/yaak-git/src/merge.rs
Normal file
135
crates/yaak-git/src/merge.rs
Normal file
@@ -0,0 +1,135 @@
|
||||
use crate::error::Error::MergeConflicts;
|
||||
use crate::util::bytes_to_string;
|
||||
use git2::{AnnotatedCommit, Branch, IndexEntry, Reference, Repository};
|
||||
use log::{debug, info};
|
||||
|
||||
pub(crate) fn do_merge(
|
||||
repo: &Repository,
|
||||
local_branch: &Branch,
|
||||
commit_to_merge: &AnnotatedCommit,
|
||||
) -> crate::error::Result<()> {
|
||||
debug!("Merging remote branches");
|
||||
let analysis = repo.merge_analysis(&[&commit_to_merge])?;
|
||||
|
||||
if analysis.0.is_fast_forward() {
|
||||
let refname = bytes_to_string(local_branch.get().name_bytes())?;
|
||||
match repo.find_reference(&refname) {
|
||||
Ok(mut r) => {
|
||||
merge_fast_forward(repo, &mut r, &commit_to_merge)?;
|
||||
}
|
||||
Err(_) => {
|
||||
// The branch doesn't exist, so set the reference to the commit directly. Usually
|
||||
// this is because you are pulling into an empty repository.
|
||||
repo.reference(
|
||||
&refname,
|
||||
commit_to_merge.id(),
|
||||
true,
|
||||
&format!("Setting {} to {}", refname, commit_to_merge.id()),
|
||||
)?;
|
||||
repo.set_head(&refname)?;
|
||||
repo.checkout_head(Some(
|
||||
git2::build::CheckoutBuilder::default()
|
||||
.allow_conflicts(true)
|
||||
.conflict_style_merge(true)
|
||||
.force(),
|
||||
))?;
|
||||
}
|
||||
};
|
||||
} else if analysis.0.is_normal() {
|
||||
let head_commit = repo.reference_to_annotated_commit(&repo.head()?)?;
|
||||
merge_normal(repo, &head_commit, commit_to_merge)?;
|
||||
} else {
|
||||
debug!("Skipping merge. Nothing to do")
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn merge_fast_forward(
|
||||
repo: &Repository,
|
||||
local_reference: &mut Reference,
|
||||
remote_commit: &AnnotatedCommit,
|
||||
) -> crate::error::Result<()> {
|
||||
info!("Performing fast forward");
|
||||
let name = match local_reference.name() {
|
||||
Some(s) => s.to_string(),
|
||||
None => String::from_utf8_lossy(local_reference.name_bytes()).to_string(),
|
||||
};
|
||||
let msg = format!("Fast-Forward: Setting {} to id: {}", name, remote_commit.id());
|
||||
local_reference.set_target(remote_commit.id(), &msg)?;
|
||||
repo.set_head(&name)?;
|
||||
repo.checkout_head(Some(
|
||||
git2::build::CheckoutBuilder::default()
|
||||
// For some reason, the force is required to make the working directory actually get
|
||||
// updated I suspect we should be adding some logic to handle dirty working directory
|
||||
// states, but this is just an example so maybe not.
|
||||
.force(),
|
||||
))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn merge_normal(
|
||||
repo: &Repository,
|
||||
local: &AnnotatedCommit,
|
||||
remote: &AnnotatedCommit,
|
||||
) -> crate::error::Result<()> {
|
||||
info!("Performing normal merge");
|
||||
let local_tree = repo.find_commit(local.id())?.tree()?;
|
||||
let remote_tree = repo.find_commit(remote.id())?.tree()?;
|
||||
let ancestor = repo.find_commit(repo.merge_base(local.id(), remote.id())?)?.tree()?;
|
||||
|
||||
let mut idx = repo.merge_trees(&ancestor, &local_tree, &remote_tree, None)?;
|
||||
|
||||
if idx.has_conflicts() {
|
||||
let conflicts = idx.conflicts()?;
|
||||
for conflict in conflicts {
|
||||
if let Ok(conflict) = conflict {
|
||||
print_conflict(&conflict);
|
||||
}
|
||||
}
|
||||
return Err(MergeConflicts);
|
||||
}
|
||||
|
||||
let result_tree = repo.find_tree(idx.write_tree_to(repo)?)?;
|
||||
// now create the merge commit
|
||||
let msg = format!("Merge: {} into {}", remote.id(), local.id());
|
||||
let sig = repo.signature()?;
|
||||
let local_commit = repo.find_commit(local.id())?;
|
||||
let remote_commit = repo.find_commit(remote.id())?;
|
||||
|
||||
// Do our merge commit and set current branch head to that commit.
|
||||
let _merge_commit = repo.commit(
|
||||
Some("HEAD"),
|
||||
&sig,
|
||||
&sig,
|
||||
&msg,
|
||||
&result_tree,
|
||||
&[&local_commit, &remote_commit],
|
||||
)?;
|
||||
|
||||
// Set working tree to match head.
|
||||
repo.checkout_head(None)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_conflict(conflict: &git2::IndexConflict) {
|
||||
let ancestor = conflict.ancestor.as_ref().map(path_from_index_entry);
|
||||
let ours = conflict.our.as_ref().map(path_from_index_entry);
|
||||
let theirs = conflict.their.as_ref().map(path_from_index_entry);
|
||||
|
||||
println!("Conflict detected:");
|
||||
if let Some(path) = ancestor {
|
||||
println!(" Common ancestor: {:?}", path);
|
||||
}
|
||||
if let Some(path) = ours {
|
||||
println!(" Ours: {:?}", path);
|
||||
}
|
||||
if let Some(path) = theirs {
|
||||
println!(" Theirs: {:?}", path);
|
||||
}
|
||||
}
|
||||
|
||||
fn path_from_index_entry(entry: &IndexEntry) -> String {
|
||||
String::from_utf8_lossy(entry.path.as_slice()).into_owned()
|
||||
}
|
||||
@@ -17,25 +17,17 @@ pub enum PullResult {
|
||||
NeedsCredentials { url: String, error: Option<String> },
|
||||
}
|
||||
|
||||
pub async fn git_pull(dir: &Path) -> Result<PullResult> {
|
||||
// Extract all git2 data before any await points (git2 types are not Send)
|
||||
let (branch_name, remote_name, remote_url) = {
|
||||
let repo = open_repo(dir)?;
|
||||
let branch_name = get_current_branch_name(&repo)?;
|
||||
let remote = get_default_remote_in_repo(&repo)?;
|
||||
let remote_name =
|
||||
remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?.to_string();
|
||||
let remote_url =
|
||||
remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?.to_string();
|
||||
(branch_name, remote_name, remote_url)
|
||||
};
|
||||
pub fn git_pull(dir: &Path) -> Result<PullResult> {
|
||||
let repo = open_repo(dir)?;
|
||||
let branch_name = get_current_branch_name(&repo)?;
|
||||
let remote = get_default_remote_in_repo(&repo)?;
|
||||
let remote_name = remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?;
|
||||
let remote_url = remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?;
|
||||
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
let out = new_binary_command(dir)?
|
||||
.args(["pull", &remote_name, &branch_name])
|
||||
.env("GIT_TERMINAL_PROMPT", "0")
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git pull: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
|
||||
@@ -17,25 +17,17 @@ pub enum PushResult {
|
||||
NeedsCredentials { url: String, error: Option<String> },
|
||||
}
|
||||
|
||||
pub async fn git_push(dir: &Path) -> Result<PushResult> {
|
||||
// Extract all git2 data before any await points (git2 types are not Send)
|
||||
let (branch_name, remote_name, remote_url) = {
|
||||
let repo = open_repo(dir)?;
|
||||
let branch_name = get_current_branch_name(&repo)?;
|
||||
let remote = get_default_remote_for_push_in_repo(&repo)?;
|
||||
let remote_name =
|
||||
remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?.to_string();
|
||||
let remote_url =
|
||||
remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?.to_string();
|
||||
(branch_name, remote_name, remote_url)
|
||||
};
|
||||
pub fn git_push(dir: &Path) -> Result<PushResult> {
|
||||
let repo = open_repo(dir)?;
|
||||
let branch_name = get_current_branch_name(&repo)?;
|
||||
let remote = get_default_remote_for_push_in_repo(&repo)?;
|
||||
let remote_name = remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?;
|
||||
let remote_url = remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?;
|
||||
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
let out = new_binary_command(dir)?
|
||||
.args(["push", &remote_name, &branch_name])
|
||||
.env("GIT_TERMINAL_PROMPT", "0")
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git push: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
|
||||
@@ -47,6 +47,10 @@ pub(crate) fn remote_branch_names(repo: &Repository) -> Result<Vec<String>> {
|
||||
Ok(branches)
|
||||
}
|
||||
|
||||
pub(crate) fn get_branch_by_name<'s>(repo: &'s Repository, name: &str) -> Result<Branch<'s>> {
|
||||
Ok(repo.find_branch(name, BranchType::Local)?)
|
||||
}
|
||||
|
||||
pub(crate) fn bytes_to_string(bytes: &[u8]) -> Result<String> {
|
||||
Ok(String::from_utf8(bytes.to_vec())?)
|
||||
}
|
||||
|
||||
@@ -22,6 +22,5 @@ tokio-stream = "0.1.14"
|
||||
tonic = { version = "0.12.3", default-features = false, features = ["transport"] }
|
||||
tonic-reflection = "0.12.3"
|
||||
uuid = { version = "1.7.0", features = ["v4"] }
|
||||
yaak-common = { workspace = true }
|
||||
yaak-tls = { workspace = true }
|
||||
thiserror = "2.0.17"
|
||||
|
||||
@@ -115,18 +115,14 @@ impl GrpcConnection {
|
||||
Ok(client.unary(req, path, codec).await?)
|
||||
}
|
||||
|
||||
pub async fn streaming<F>(
|
||||
pub async fn streaming(
|
||||
&self,
|
||||
service: &str,
|
||||
method: &str,
|
||||
stream: ReceiverStream<String>,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
client_cert: Option<ClientCertificateConfig>,
|
||||
on_message: F,
|
||||
) -> Result<Response<Streaming<DynamicMessage>>>
|
||||
where
|
||||
F: Fn(std::result::Result<String, String>) + Send + Sync + Clone + 'static,
|
||||
{
|
||||
) -> Result<Response<Streaming<DynamicMessage>>> {
|
||||
let method = &self.method(&service, &method).await?;
|
||||
let mapped_stream = {
|
||||
let input_message = method.input();
|
||||
@@ -135,39 +131,31 @@ impl GrpcConnection {
|
||||
let md = metadata.clone();
|
||||
let use_reflection = self.use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
stream
|
||||
.then(move |json| {
|
||||
let pool = pool.clone();
|
||||
let uri = uri.clone();
|
||||
let input_message = input_message.clone();
|
||||
let md = md.clone();
|
||||
let use_reflection = use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
let on_message = on_message.clone();
|
||||
let json_clone = json.clone();
|
||||
async move {
|
||||
if use_reflection {
|
||||
if let Err(e) =
|
||||
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
|
||||
{
|
||||
warn!("Failed to resolve Any types: {e}");
|
||||
}
|
||||
stream.filter_map(move |json| {
|
||||
let pool = pool.clone();
|
||||
let uri = uri.clone();
|
||||
let input_message = input_message.clone();
|
||||
let md = md.clone();
|
||||
let use_reflection = use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
tokio::runtime::Handle::current().block_on(async move {
|
||||
if use_reflection {
|
||||
if let Err(e) =
|
||||
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
|
||||
{
|
||||
warn!("Failed to resolve Any types: {e}");
|
||||
}
|
||||
let mut de = Deserializer::from_str(&json);
|
||||
match DynamicMessage::deserialize(input_message, &mut de) {
|
||||
Ok(m) => {
|
||||
on_message(Ok(json_clone));
|
||||
Some(m)
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Failed to deserialize message: {e}");
|
||||
on_message(Err(e.to_string()));
|
||||
None
|
||||
}
|
||||
}
|
||||
let mut de = Deserializer::from_str(&json);
|
||||
match DynamicMessage::deserialize(input_message, &mut de) {
|
||||
Ok(m) => Some(m),
|
||||
Err(e) => {
|
||||
warn!("Failed to deserialize message: {e}");
|
||||
None
|
||||
}
|
||||
}
|
||||
})
|
||||
.filter_map(|x| x)
|
||||
})
|
||||
};
|
||||
|
||||
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
|
||||
@@ -181,18 +169,14 @@ impl GrpcConnection {
|
||||
Ok(client.streaming(req, path, codec).await?)
|
||||
}
|
||||
|
||||
pub async fn client_streaming<F>(
|
||||
pub async fn client_streaming(
|
||||
&self,
|
||||
service: &str,
|
||||
method: &str,
|
||||
stream: ReceiverStream<String>,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
client_cert: Option<ClientCertificateConfig>,
|
||||
on_message: F,
|
||||
) -> Result<Response<DynamicMessage>>
|
||||
where
|
||||
F: Fn(std::result::Result<String, String>) + Send + Sync + Clone + 'static,
|
||||
{
|
||||
) -> Result<Response<DynamicMessage>> {
|
||||
let method = &self.method(&service, &method).await?;
|
||||
let mapped_stream = {
|
||||
let input_message = method.input();
|
||||
@@ -201,39 +185,31 @@ impl GrpcConnection {
|
||||
let md = metadata.clone();
|
||||
let use_reflection = self.use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
stream
|
||||
.then(move |json| {
|
||||
let pool = pool.clone();
|
||||
let uri = uri.clone();
|
||||
let input_message = input_message.clone();
|
||||
let md = md.clone();
|
||||
let use_reflection = use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
let on_message = on_message.clone();
|
||||
let json_clone = json.clone();
|
||||
async move {
|
||||
if use_reflection {
|
||||
if let Err(e) =
|
||||
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
|
||||
{
|
||||
warn!("Failed to resolve Any types: {e}");
|
||||
}
|
||||
stream.filter_map(move |json| {
|
||||
let pool = pool.clone();
|
||||
let uri = uri.clone();
|
||||
let input_message = input_message.clone();
|
||||
let md = md.clone();
|
||||
let use_reflection = use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
tokio::runtime::Handle::current().block_on(async move {
|
||||
if use_reflection {
|
||||
if let Err(e) =
|
||||
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
|
||||
{
|
||||
warn!("Failed to resolve Any types: {e}");
|
||||
}
|
||||
let mut de = Deserializer::from_str(&json);
|
||||
match DynamicMessage::deserialize(input_message, &mut de) {
|
||||
Ok(m) => {
|
||||
on_message(Ok(json_clone));
|
||||
Some(m)
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Failed to deserialize message: {e}");
|
||||
on_message(Err(e.to_string()));
|
||||
None
|
||||
}
|
||||
}
|
||||
let mut de = Deserializer::from_str(&json);
|
||||
match DynamicMessage::deserialize(input_message, &mut de) {
|
||||
Ok(m) => Some(m),
|
||||
Err(e) => {
|
||||
warn!("Failed to deserialize message: {e}");
|
||||
None
|
||||
}
|
||||
}
|
||||
})
|
||||
.filter_map(|x| x)
|
||||
})
|
||||
};
|
||||
|
||||
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
|
||||
@@ -340,9 +316,10 @@ impl GrpcHandle {
|
||||
metadata: &BTreeMap<String, String>,
|
||||
validate_certificates: bool,
|
||||
client_cert: Option<ClientCertificateConfig>,
|
||||
skip_cache: bool,
|
||||
) -> Result<Vec<ServiceDefinition>> {
|
||||
// Ensure we have a pool; reflect only if missing
|
||||
if self.get_pool(id, uri, proto_files).is_none() {
|
||||
if skip_cache || self.get_pool(id, uri, proto_files).is_none() {
|
||||
info!("Reflecting gRPC services for {} at {}", id, uri);
|
||||
self.reflect(id, uri, proto_files, metadata, validate_certificates, client_cert)
|
||||
.await?;
|
||||
|
||||
@@ -16,12 +16,12 @@ use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use tokio::fs;
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::RwLock;
|
||||
use tonic::codegen::http::uri::PathAndQuery;
|
||||
use tonic::transport::Uri;
|
||||
use tonic_reflection::pb::v1::server_reflection_request::MessageRequest;
|
||||
use tonic_reflection::pb::v1::server_reflection_response::MessageResponse;
|
||||
use yaak_common::command::new_xplatform_command;
|
||||
use yaak_tls::ClientCertificateConfig;
|
||||
|
||||
pub async fn fill_pool_from_files(
|
||||
@@ -91,11 +91,11 @@ pub async fn fill_pool_from_files(
|
||||
|
||||
info!("Invoking protoc with {}", args.join(" "));
|
||||
|
||||
let mut cmd = new_xplatform_command(&config.protoc_bin_path);
|
||||
cmd.args(&args);
|
||||
|
||||
let out =
|
||||
cmd.output().await.map_err(|e| GenericError(format!("Failed to run protoc: {}", e)))?;
|
||||
let out = Command::new(&config.protoc_bin_path)
|
||||
.args(&args)
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("Failed to run protoc: {}", e)))?;
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!(
|
||||
|
||||
@@ -2,8 +2,6 @@ use crate::dns::LocalhostResolver;
|
||||
use crate::error::Result;
|
||||
use log::{debug, info, warn};
|
||||
use reqwest::{Client, Proxy, redirect};
|
||||
use std::sync::Arc;
|
||||
use yaak_models::models::DnsOverride;
|
||||
use yaak_tls::{ClientCertificateConfig, get_tls_config};
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -30,14 +28,10 @@ pub struct HttpConnectionOptions {
|
||||
pub validate_certificates: bool,
|
||||
pub proxy: HttpConnectionProxySetting,
|
||||
pub client_certificate: Option<ClientCertificateConfig>,
|
||||
pub dns_overrides: Vec<DnsOverride>,
|
||||
}
|
||||
|
||||
impl HttpConnectionOptions {
|
||||
/// Build a reqwest Client and return it along with the DNS resolver.
|
||||
/// The resolver is returned separately so it can be configured per-request
|
||||
/// to emit DNS timing events to the appropriate channel.
|
||||
pub(crate) fn build_client(&self) -> Result<(Client, Arc<LocalhostResolver>)> {
|
||||
pub(crate) fn build_client(&self) -> Result<Client> {
|
||||
let mut client = Client::builder()
|
||||
.connection_verbose(true)
|
||||
.redirect(redirect::Policy::none())
|
||||
@@ -46,19 +40,15 @@ impl HttpConnectionOptions {
|
||||
.no_brotli()
|
||||
.no_deflate()
|
||||
.referer(false)
|
||||
.tls_info(true)
|
||||
// Disable connection pooling to ensure DNS resolution happens on each request
|
||||
// This is needed so we can emit DNS timing events for each request
|
||||
.pool_max_idle_per_host(0);
|
||||
.tls_info(true);
|
||||
|
||||
// Configure TLS with optional client certificate
|
||||
let config =
|
||||
get_tls_config(self.validate_certificates, true, self.client_certificate.clone())?;
|
||||
client = client.use_preconfigured_tls(config);
|
||||
|
||||
// Configure DNS resolver - keep a reference to configure per-request
|
||||
let resolver = LocalhostResolver::new(self.dns_overrides.clone());
|
||||
client = client.dns_resolver(resolver.clone());
|
||||
// Configure DNS resolver
|
||||
client = client.dns_resolver(LocalhostResolver::new());
|
||||
|
||||
// Configure proxy
|
||||
match self.proxy.clone() {
|
||||
@@ -79,7 +69,7 @@ impl HttpConnectionOptions {
|
||||
self.client_certificate.is_some()
|
||||
);
|
||||
|
||||
Ok((client.build()?, resolver))
|
||||
Ok(client.build()?)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,185 +1,53 @@
|
||||
use crate::sender::HttpResponseEvent;
|
||||
use hyper_util::client::legacy::connect::dns::{
|
||||
GaiResolver as HyperGaiResolver, Name as HyperName,
|
||||
};
|
||||
use log::info;
|
||||
use reqwest::dns::{Addrs, Name, Resolve, Resolving};
|
||||
use std::collections::HashMap;
|
||||
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr};
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
use tokio::sync::{RwLock, mpsc};
|
||||
use tower_service::Service;
|
||||
use yaak_models::models::DnsOverride;
|
||||
|
||||
/// Stores resolved addresses for a hostname override
|
||||
#[derive(Clone)]
|
||||
pub struct ResolvedOverride {
|
||||
pub ipv4: Vec<Ipv4Addr>,
|
||||
pub ipv6: Vec<Ipv6Addr>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct LocalhostResolver {
|
||||
fallback: HyperGaiResolver,
|
||||
event_tx: Arc<RwLock<Option<mpsc::Sender<HttpResponseEvent>>>>,
|
||||
overrides: Arc<HashMap<String, ResolvedOverride>>,
|
||||
}
|
||||
|
||||
impl LocalhostResolver {
|
||||
pub fn new(dns_overrides: Vec<DnsOverride>) -> Arc<Self> {
|
||||
pub fn new() -> Arc<Self> {
|
||||
let resolver = HyperGaiResolver::new();
|
||||
|
||||
// Pre-parse DNS overrides into a lookup map
|
||||
let mut overrides = HashMap::new();
|
||||
for o in dns_overrides {
|
||||
if !o.enabled {
|
||||
continue;
|
||||
}
|
||||
let hostname = o.hostname.to_lowercase();
|
||||
|
||||
let ipv4: Vec<Ipv4Addr> =
|
||||
o.ipv4.iter().filter_map(|s| s.parse::<Ipv4Addr>().ok()).collect();
|
||||
|
||||
let ipv6: Vec<Ipv6Addr> =
|
||||
o.ipv6.iter().filter_map(|s| s.parse::<Ipv6Addr>().ok()).collect();
|
||||
|
||||
// Only add if at least one address is valid
|
||||
if !ipv4.is_empty() || !ipv6.is_empty() {
|
||||
overrides.insert(hostname, ResolvedOverride { ipv4, ipv6 });
|
||||
}
|
||||
}
|
||||
|
||||
Arc::new(Self {
|
||||
fallback: resolver,
|
||||
event_tx: Arc::new(RwLock::new(None)),
|
||||
overrides: Arc::new(overrides),
|
||||
})
|
||||
}
|
||||
|
||||
/// Set the event sender for the current request.
|
||||
/// This should be called before each request to direct DNS events
|
||||
/// to the appropriate channel.
|
||||
pub async fn set_event_sender(&self, tx: Option<mpsc::Sender<HttpResponseEvent>>) {
|
||||
let mut guard = self.event_tx.write().await;
|
||||
*guard = tx;
|
||||
Arc::new(Self { fallback: resolver })
|
||||
}
|
||||
}
|
||||
|
||||
impl Resolve for LocalhostResolver {
|
||||
fn resolve(&self, name: Name) -> Resolving {
|
||||
let host = name.as_str().to_lowercase();
|
||||
let event_tx = self.event_tx.clone();
|
||||
let overrides = self.overrides.clone();
|
||||
|
||||
info!("DNS resolve called for: {}", host);
|
||||
|
||||
// Check for DNS override first
|
||||
if let Some(resolved) = overrides.get(&host) {
|
||||
log::debug!("DNS override found for: {}", host);
|
||||
let hostname = host.clone();
|
||||
let mut addrs: Vec<SocketAddr> = Vec::new();
|
||||
|
||||
// Add IPv4 addresses
|
||||
for ip in &resolved.ipv4 {
|
||||
addrs.push(SocketAddr::new(IpAddr::V4(*ip), 0));
|
||||
}
|
||||
|
||||
// Add IPv6 addresses
|
||||
for ip in &resolved.ipv6 {
|
||||
addrs.push(SocketAddr::new(IpAddr::V6(*ip), 0));
|
||||
}
|
||||
|
||||
let addresses: Vec<String> = addrs.iter().map(|a| a.ip().to_string()).collect();
|
||||
|
||||
return Box::pin(async move {
|
||||
// Emit DNS event for override
|
||||
let guard = event_tx.read().await;
|
||||
if let Some(tx) = guard.as_ref() {
|
||||
let _ = tx
|
||||
.send(HttpResponseEvent::DnsResolved {
|
||||
hostname,
|
||||
addresses,
|
||||
duration: 0,
|
||||
overridden: true,
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
Ok::<Addrs, Box<dyn std::error::Error + Send + Sync>>(Box::new(addrs.into_iter()))
|
||||
});
|
||||
}
|
||||
|
||||
// Check for .localhost suffix
|
||||
let is_localhost = host.ends_with(".localhost");
|
||||
if is_localhost {
|
||||
let hostname = host.clone();
|
||||
// Port 0 is fine; reqwest replaces it with the URL's explicit
|
||||
// port or the scheme's default (80/443, etc.).
|
||||
// port or the scheme’s default (80/443, etc.).
|
||||
// (See docs note below.)
|
||||
let addrs: Vec<SocketAddr> = vec![
|
||||
SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0),
|
||||
SocketAddr::new(IpAddr::V6(Ipv6Addr::LOCALHOST), 0),
|
||||
];
|
||||
|
||||
let addresses: Vec<String> = addrs.iter().map(|a| a.ip().to_string()).collect();
|
||||
|
||||
return Box::pin(async move {
|
||||
// Emit DNS event for localhost resolution
|
||||
let guard = event_tx.read().await;
|
||||
if let Some(tx) = guard.as_ref() {
|
||||
let _ = tx
|
||||
.send(HttpResponseEvent::DnsResolved {
|
||||
hostname,
|
||||
addresses,
|
||||
duration: 0,
|
||||
overridden: false,
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
Ok::<Addrs, Box<dyn std::error::Error + Send + Sync>>(Box::new(addrs.into_iter()))
|
||||
});
|
||||
}
|
||||
|
||||
// Fall back to system DNS
|
||||
let mut fallback = self.fallback.clone();
|
||||
let name_str = name.as_str().to_string();
|
||||
let hostname = host.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
let start = Instant::now();
|
||||
|
||||
let result = match HyperName::from_str(&name_str) {
|
||||
Ok(n) => fallback.call(n).await,
|
||||
Err(e) => return Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>),
|
||||
};
|
||||
|
||||
let duration = start.elapsed().as_millis() as u64;
|
||||
|
||||
match result {
|
||||
Ok(addrs) => {
|
||||
// Collect addresses for event emission
|
||||
let addr_vec: Vec<SocketAddr> = addrs.collect();
|
||||
let addresses: Vec<String> =
|
||||
addr_vec.iter().map(|a| a.ip().to_string()).collect();
|
||||
|
||||
// Emit DNS event
|
||||
let guard = event_tx.read().await;
|
||||
if let Some(tx) = guard.as_ref() {
|
||||
let _ = tx
|
||||
.send(HttpResponseEvent::DnsResolved {
|
||||
hostname,
|
||||
addresses,
|
||||
duration,
|
||||
overridden: false,
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
Ok(Box::new(addr_vec.into_iter()) as Addrs)
|
||||
}
|
||||
Err(err) => Err(Box::new(err) as Box<dyn std::error::Error + Send + Sync>),
|
||||
match HyperName::from_str(&name_str) {
|
||||
Ok(n) => fallback
|
||||
.call(n)
|
||||
.await
|
||||
.map(|addrs| Box::new(addrs) as Addrs)
|
||||
.map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync>),
|
||||
Err(e) => Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use crate::client::HttpConnectionOptions;
|
||||
use crate::dns::LocalhostResolver;
|
||||
use crate::error::Result;
|
||||
use log::info;
|
||||
use reqwest::Client;
|
||||
@@ -8,15 +7,8 @@ use std::sync::Arc;
|
||||
use std::time::{Duration, Instant};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
/// A cached HTTP client along with its DNS resolver.
|
||||
/// The resolver is needed to set the event sender per-request.
|
||||
pub struct CachedClient {
|
||||
pub client: Client,
|
||||
pub resolver: Arc<LocalhostResolver>,
|
||||
}
|
||||
|
||||
pub struct HttpConnectionManager {
|
||||
connections: Arc<RwLock<BTreeMap<String, (CachedClient, Instant)>>>,
|
||||
connections: Arc<RwLock<BTreeMap<String, (Client, Instant)>>>,
|
||||
ttl: Duration,
|
||||
}
|
||||
|
||||
@@ -28,26 +20,21 @@ impl HttpConnectionManager {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_client(&self, opt: &HttpConnectionOptions) -> Result<CachedClient> {
|
||||
pub async fn get_client(&self, opt: &HttpConnectionOptions) -> Result<Client> {
|
||||
let mut connections = self.connections.write().await;
|
||||
let id = opt.id.clone();
|
||||
|
||||
// Clean old connections
|
||||
connections.retain(|_, (_, last_used)| last_used.elapsed() <= self.ttl);
|
||||
|
||||
if let Some((cached, last_used)) = connections.get_mut(&id) {
|
||||
if let Some((c, last_used)) = connections.get_mut(&id) {
|
||||
info!("Re-using HTTP client {id}");
|
||||
*last_used = Instant::now();
|
||||
return Ok(CachedClient {
|
||||
client: cached.client.clone(),
|
||||
resolver: cached.resolver.clone(),
|
||||
});
|
||||
return Ok(c.clone());
|
||||
}
|
||||
|
||||
let (client, resolver) = opt.build_client()?;
|
||||
let cached = CachedClient { client: client.clone(), resolver: resolver.clone() };
|
||||
connections.insert(id.into(), (cached, Instant::now()));
|
||||
|
||||
Ok(CachedClient { client, resolver })
|
||||
let c = opt.build_client()?;
|
||||
connections.insert(id.into(), (c.clone(), Instant::now()));
|
||||
Ok(c)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,14 +31,7 @@ pub enum HttpResponseEvent {
|
||||
},
|
||||
SendUrl {
|
||||
method: String,
|
||||
scheme: String,
|
||||
username: String,
|
||||
password: String,
|
||||
host: String,
|
||||
port: u16,
|
||||
path: String,
|
||||
query: String,
|
||||
fragment: String,
|
||||
},
|
||||
ReceiveUrl {
|
||||
version: Version,
|
||||
@@ -52,12 +45,6 @@ pub enum HttpResponseEvent {
|
||||
ChunkReceived {
|
||||
bytes: usize,
|
||||
},
|
||||
DnsResolved {
|
||||
hostname: String,
|
||||
addresses: Vec<String>,
|
||||
duration: u64,
|
||||
overridden: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl Display for HttpResponseEvent {
|
||||
@@ -72,16 +59,7 @@ impl Display for HttpResponseEvent {
|
||||
};
|
||||
write!(f, "* Redirect {} -> {} ({})", status, url, behavior_str)
|
||||
}
|
||||
HttpResponseEvent::SendUrl { method, scheme, username, password, host, port, path, query, fragment } => {
|
||||
let auth_str = if username.is_empty() && password.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("{}:{}@", username, password)
|
||||
};
|
||||
let query_str = if query.is_empty() { String::new() } else { format!("?{}", query) };
|
||||
let fragment_str = if fragment.is_empty() { String::new() } else { format!("#{}", fragment) };
|
||||
write!(f, "> {} {}://{}{}:{}{}{}{}", method, scheme, auth_str, host, port, path, query_str, fragment_str)
|
||||
}
|
||||
HttpResponseEvent::SendUrl { method, path } => write!(f, "> {} {}", method, path),
|
||||
HttpResponseEvent::ReceiveUrl { version, status } => {
|
||||
write!(f, "< {} {}", version_to_str(version), status)
|
||||
}
|
||||
@@ -89,19 +67,6 @@ impl Display for HttpResponseEvent {
|
||||
HttpResponseEvent::HeaderDown(name, value) => write!(f, "< {}: {}", name, value),
|
||||
HttpResponseEvent::ChunkSent { bytes } => write!(f, "> [{} bytes sent]", bytes),
|
||||
HttpResponseEvent::ChunkReceived { bytes } => write!(f, "< [{} bytes received]", bytes),
|
||||
HttpResponseEvent::DnsResolved { hostname, addresses, duration, overridden } => {
|
||||
if *overridden {
|
||||
write!(f, "* DNS override {} -> {}", hostname, addresses.join(", "))
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"* DNS resolved {} to {} ({}ms)",
|
||||
hostname,
|
||||
addresses.join(", "),
|
||||
duration
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -120,9 +85,7 @@ impl From<HttpResponseEvent> for yaak_models::models::HttpResponseEventData {
|
||||
RedirectBehavior::DropBody => "drop_body".to_string(),
|
||||
},
|
||||
},
|
||||
HttpResponseEvent::SendUrl { method, scheme, username, password, host, port, path, query, fragment } => {
|
||||
D::SendUrl { method, scheme, username, password, host, port, path, query, fragment }
|
||||
}
|
||||
HttpResponseEvent::SendUrl { method, path } => D::SendUrl { method, path },
|
||||
HttpResponseEvent::ReceiveUrl { version, status } => {
|
||||
D::ReceiveUrl { version: format!("{:?}", version), status }
|
||||
}
|
||||
@@ -130,9 +93,6 @@ impl From<HttpResponseEvent> for yaak_models::models::HttpResponseEventData {
|
||||
HttpResponseEvent::HeaderDown(name, value) => D::HeaderDown { name, value },
|
||||
HttpResponseEvent::ChunkSent { bytes } => D::ChunkSent { bytes },
|
||||
HttpResponseEvent::ChunkReceived { bytes } => D::ChunkReceived { bytes },
|
||||
HttpResponseEvent::DnsResolved { hostname, addresses, duration, overridden } => {
|
||||
D::DnsResolved { hostname, addresses, duration, overridden }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -394,9 +354,6 @@ impl HttpSender for ReqwestSender {
|
||||
|
||||
// Add headers
|
||||
for header in request.headers {
|
||||
if header.0.is_empty() {
|
||||
continue;
|
||||
}
|
||||
req_builder = req_builder.header(&header.0, &header.1);
|
||||
}
|
||||
|
||||
@@ -433,15 +390,8 @@ impl HttpSender for ReqwestSender {
|
||||
));
|
||||
|
||||
send_event(HttpResponseEvent::SendUrl {
|
||||
method: sendable_req.method().to_string(),
|
||||
scheme: sendable_req.url().scheme().to_string(),
|
||||
username: sendable_req.url().username().to_string(),
|
||||
password: sendable_req.url().password().unwrap_or_default().to_string(),
|
||||
host: sendable_req.url().host_str().unwrap_or_default().to_string(),
|
||||
port: sendable_req.url().port_or_known_default().unwrap_or(0),
|
||||
path: sendable_req.url().path().to_string(),
|
||||
query: sendable_req.url().query().unwrap_or_default().to_string(),
|
||||
fragment: sendable_req.url().fragment().unwrap_or_default().to_string(),
|
||||
method: sendable_req.method().to_string(),
|
||||
});
|
||||
|
||||
let mut request_headers = Vec::new();
|
||||
|
||||
@@ -168,7 +168,6 @@ impl<S: HttpSender> HttpTransaction<S> {
|
||||
response.drain().await?;
|
||||
|
||||
// Update the request URL
|
||||
let previous_url = current_url.clone();
|
||||
current_url = if location.starts_with("http://") || location.starts_with("https://") {
|
||||
// Absolute URL
|
||||
location
|
||||
@@ -182,8 +181,6 @@ impl<S: HttpSender> HttpTransaction<S> {
|
||||
format!("{}/{}", base_path, location)
|
||||
};
|
||||
|
||||
Self::remove_sensitive_headers(&mut current_headers, &previous_url, ¤t_url);
|
||||
|
||||
// Determine redirect behavior based on status code and method
|
||||
let behavior = if status == 303 {
|
||||
// 303 See Other always changes to GET
|
||||
@@ -223,33 +220,6 @@ impl<S: HttpSender> HttpTransaction<S> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Remove sensitive headers when redirecting to a different host.
|
||||
/// This matches reqwest's `remove_sensitive_headers()` behavior and prevents
|
||||
/// credentials from being forwarded to third-party servers (e.g., an
|
||||
/// Authorization header sent from an API redirect to an S3 bucket).
|
||||
fn remove_sensitive_headers(
|
||||
headers: &mut Vec<(String, String)>,
|
||||
previous_url: &str,
|
||||
next_url: &str,
|
||||
) {
|
||||
let previous_host = Url::parse(previous_url).ok().and_then(|u| {
|
||||
u.host_str().map(|h| format!("{}:{}", h, u.port_or_known_default().unwrap_or(0)))
|
||||
});
|
||||
let next_host = Url::parse(next_url).ok().and_then(|u| {
|
||||
u.host_str().map(|h| format!("{}:{}", h, u.port_or_known_default().unwrap_or(0)))
|
||||
});
|
||||
if previous_host != next_host {
|
||||
headers.retain(|h| {
|
||||
let name_lower = h.0.to_lowercase();
|
||||
name_lower != "authorization"
|
||||
&& name_lower != "cookie"
|
||||
&& name_lower != "cookie2"
|
||||
&& name_lower != "proxy-authorization"
|
||||
&& name_lower != "www-authenticate"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a status code indicates a redirect
|
||||
fn is_redirect(status: u16) -> bool {
|
||||
matches!(status, 301 | 302 | 303 | 307 | 308)
|
||||
@@ -299,20 +269,9 @@ mod tests {
|
||||
use tokio::io::AsyncRead;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
/// Captured request metadata for test assertions
|
||||
#[derive(Debug, Clone)]
|
||||
#[allow(dead_code)]
|
||||
struct CapturedRequest {
|
||||
url: String,
|
||||
method: String,
|
||||
headers: Vec<(String, String)>,
|
||||
}
|
||||
|
||||
/// Mock sender for testing
|
||||
struct MockSender {
|
||||
responses: Arc<Mutex<Vec<MockResponse>>>,
|
||||
/// Captured requests for assertions
|
||||
captured_requests: Arc<Mutex<Vec<CapturedRequest>>>,
|
||||
}
|
||||
|
||||
struct MockResponse {
|
||||
@@ -323,10 +282,7 @@ mod tests {
|
||||
|
||||
impl MockSender {
|
||||
fn new(responses: Vec<MockResponse>) -> Self {
|
||||
Self {
|
||||
responses: Arc::new(Mutex::new(responses)),
|
||||
captured_requests: Arc::new(Mutex::new(Vec::new())),
|
||||
}
|
||||
Self { responses: Arc::new(Mutex::new(responses)) }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -334,16 +290,9 @@ mod tests {
|
||||
impl HttpSender for MockSender {
|
||||
async fn send(
|
||||
&self,
|
||||
request: SendableHttpRequest,
|
||||
_request: SendableHttpRequest,
|
||||
_event_tx: mpsc::Sender<HttpResponseEvent>,
|
||||
) -> Result<HttpResponse> {
|
||||
// Capture the request metadata for later assertions
|
||||
self.captured_requests.lock().await.push(CapturedRequest {
|
||||
url: request.url.clone(),
|
||||
method: request.method.clone(),
|
||||
headers: request.headers.clone(),
|
||||
});
|
||||
|
||||
let mut responses = self.responses.lock().await;
|
||||
if responses.is_empty() {
|
||||
Err(crate::error::Error::RequestError("No more mock responses".to_string()))
|
||||
@@ -393,8 +342,7 @@ mod tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_transaction_single_redirect() {
|
||||
let redirect_headers =
|
||||
vec![("Location".to_string(), "https://example.com/new".to_string())];
|
||||
let redirect_headers = vec![("Location".to_string(), "https://example.com/new".to_string())];
|
||||
|
||||
let responses = vec![
|
||||
MockResponse { status: 302, headers: redirect_headers, body: vec![] },
|
||||
@@ -425,8 +373,7 @@ mod tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_transaction_max_redirects_exceeded() {
|
||||
let redirect_headers =
|
||||
vec![("Location".to_string(), "https://example.com/loop".to_string())];
|
||||
let redirect_headers = vec![("Location".to_string(), "https://example.com/loop".to_string())];
|
||||
|
||||
// Create more redirects than allowed
|
||||
let responses: Vec<MockResponse> = (0..12)
|
||||
@@ -578,8 +525,7 @@ mod tests {
|
||||
_request: SendableHttpRequest,
|
||||
_event_tx: mpsc::Sender<HttpResponseEvent>,
|
||||
) -> Result<HttpResponse> {
|
||||
let headers =
|
||||
vec![("set-cookie".to_string(), "session=xyz789; Path=/".to_string())];
|
||||
let headers = vec![("set-cookie".to_string(), "session=xyz789; Path=/".to_string())];
|
||||
|
||||
let body_stream: Pin<Box<dyn AsyncRead + Send>> =
|
||||
Box::pin(std::io::Cursor::new(vec![]));
|
||||
@@ -638,10 +584,7 @@ mod tests {
|
||||
let headers = vec![
|
||||
("set-cookie".to_string(), "session=abc123; Path=/".to_string()),
|
||||
("set-cookie".to_string(), "user_id=42; Path=/".to_string()),
|
||||
(
|
||||
"set-cookie".to_string(),
|
||||
"preferences=dark; Path=/; Max-Age=86400".to_string(),
|
||||
),
|
||||
("set-cookie".to_string(), "preferences=dark; Path=/; Max-Age=86400".to_string()),
|
||||
];
|
||||
|
||||
let body_stream: Pin<Box<dyn AsyncRead + Send>> =
|
||||
@@ -777,116 +720,4 @@ mod tests {
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(request_count.load(Ordering::SeqCst), 2);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_cross_origin_redirect_strips_auth_headers() {
|
||||
// Redirect from api.example.com -> s3.amazonaws.com should strip Authorization
|
||||
let responses = vec![
|
||||
MockResponse {
|
||||
status: 302,
|
||||
headers: vec![(
|
||||
"Location".to_string(),
|
||||
"https://s3.amazonaws.com/bucket/file.pdf".to_string(),
|
||||
)],
|
||||
body: vec![],
|
||||
},
|
||||
MockResponse { status: 200, headers: Vec::new(), body: b"PDF content".to_vec() },
|
||||
];
|
||||
|
||||
let sender = MockSender::new(responses);
|
||||
let captured = sender.captured_requests.clone();
|
||||
let transaction = HttpTransaction::new(sender);
|
||||
|
||||
let request = SendableHttpRequest {
|
||||
url: "https://api.example.com/download".to_string(),
|
||||
method: "GET".to_string(),
|
||||
headers: vec![
|
||||
("Authorization".to_string(), "Basic dXNlcjpwYXNz".to_string()),
|
||||
("Accept".to_string(), "application/pdf".to_string()),
|
||||
],
|
||||
options: crate::types::SendableHttpRequestOptions {
|
||||
follow_redirects: true,
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let (_tx, rx) = tokio::sync::watch::channel(false);
|
||||
let (event_tx, _event_rx) = mpsc::channel(100);
|
||||
let result = transaction.execute_with_cancellation(request, rx, event_tx).await.unwrap();
|
||||
assert_eq!(result.status, 200);
|
||||
|
||||
let requests = captured.lock().await;
|
||||
assert_eq!(requests.len(), 2);
|
||||
|
||||
// First request should have the Authorization header
|
||||
assert!(
|
||||
requests[0].headers.iter().any(|(k, _)| k.eq_ignore_ascii_case("authorization")),
|
||||
"First request should have Authorization header"
|
||||
);
|
||||
|
||||
// Second request (to different host) should NOT have the Authorization header
|
||||
assert!(
|
||||
!requests[1].headers.iter().any(|(k, _)| k.eq_ignore_ascii_case("authorization")),
|
||||
"Redirected request to different host should NOT have Authorization header"
|
||||
);
|
||||
|
||||
// Non-sensitive headers should still be present
|
||||
assert!(
|
||||
requests[1].headers.iter().any(|(k, _)| k.eq_ignore_ascii_case("accept")),
|
||||
"Non-sensitive headers should be preserved across cross-origin redirects"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_same_origin_redirect_preserves_auth_headers() {
|
||||
// Redirect within the same host should keep Authorization
|
||||
let responses = vec![
|
||||
MockResponse {
|
||||
status: 302,
|
||||
headers: vec![(
|
||||
"Location".to_string(),
|
||||
"https://api.example.com/v2/download".to_string(),
|
||||
)],
|
||||
body: vec![],
|
||||
},
|
||||
MockResponse { status: 200, headers: Vec::new(), body: b"OK".to_vec() },
|
||||
];
|
||||
|
||||
let sender = MockSender::new(responses);
|
||||
let captured = sender.captured_requests.clone();
|
||||
let transaction = HttpTransaction::new(sender);
|
||||
|
||||
let request = SendableHttpRequest {
|
||||
url: "https://api.example.com/v1/download".to_string(),
|
||||
method: "GET".to_string(),
|
||||
headers: vec![
|
||||
("Authorization".to_string(), "Bearer token123".to_string()),
|
||||
("Accept".to_string(), "application/json".to_string()),
|
||||
],
|
||||
options: crate::types::SendableHttpRequestOptions {
|
||||
follow_redirects: true,
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let (_tx, rx) = tokio::sync::watch::channel(false);
|
||||
let (event_tx, _event_rx) = mpsc::channel(100);
|
||||
let result = transaction.execute_with_cancellation(request, rx, event_tx).await.unwrap();
|
||||
assert_eq!(result.status, 200);
|
||||
|
||||
let requests = captured.lock().await;
|
||||
assert_eq!(requests.len(), 2);
|
||||
|
||||
// Both requests should have the Authorization header (same host)
|
||||
assert!(
|
||||
requests[0].headers.iter().any(|(k, _)| k.eq_ignore_ascii_case("authorization")),
|
||||
"First request should have Authorization header"
|
||||
);
|
||||
assert!(
|
||||
requests[1].headers.iter().any(|(k, _)| k.eq_ignore_ascii_case("authorization")),
|
||||
"Redirected request to same host should preserve Authorization header"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
8
crates/yaak-models/bindings/gen_models.ts
generated
8
crates/yaak-models/bindings/gen_models.ts
generated
@@ -12,8 +12,6 @@ export type CookieExpires = { "AtUtc": string } | "SessionEnd";
|
||||
|
||||
export type CookieJar = { model: "cookie_jar", id: string, createdAt: string, updatedAt: string, workspaceId: string, cookies: Array<Cookie>, name: string, };
|
||||
|
||||
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
|
||||
|
||||
export type EditorKeymap = "default" | "vim" | "vscode" | "emacs";
|
||||
|
||||
export type EncryptedKey = { encryptedKey: string, };
|
||||
@@ -40,7 +38,7 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
|
||||
|
||||
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
|
||||
|
||||
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, elapsedDns: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
|
||||
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
|
||||
|
||||
export type HttpResponseEvent = { model: "http_response_event", id: string, createdAt: string, updatedAt: string, workspaceId: string, responseId: string, event: HttpResponseEventData, };
|
||||
|
||||
@@ -49,7 +47,7 @@ export type HttpResponseEvent = { model: "http_response_event", id: string, crea
|
||||
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
|
||||
* The `From` impl is in yaak-http to avoid circular dependencies.
|
||||
*/
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, scheme: string, username: string, password: string, host: string, port: number, path: string, query: string, fragment: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, } | { "type": "dns_resolved", hostname: string, addresses: Array<string>, duration: bigint, overridden: boolean, };
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, };
|
||||
|
||||
export type HttpResponseHeader = { name: string, value: string, };
|
||||
|
||||
@@ -93,6 +91,6 @@ export type WebsocketMessageType = "text" | "binary";
|
||||
|
||||
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
|
||||
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
|
||||
export type WorkspaceMeta = { model: "workspace_meta", id: string, workspaceId: string, createdAt: string, updatedAt: string, encryptionKey: EncryptedKey | null, settingSyncDir: string | null, };
|
||||
|
||||
@@ -206,34 +206,6 @@ export function replaceModelsInStore<
|
||||
});
|
||||
}
|
||||
|
||||
export function mergeModelsInStore<
|
||||
M extends AnyModel['model'],
|
||||
T extends Extract<AnyModel, { model: M }>,
|
||||
>(model: M, models: T[], filter?: (model: T) => boolean) {
|
||||
mustStore().set(modelStoreDataAtom, (prev: ModelStoreData) => {
|
||||
const existingModels = { ...prev[model] } as Record<string, T>;
|
||||
|
||||
// Merge in new models first
|
||||
for (const m of models) {
|
||||
existingModels[m.id] = m;
|
||||
}
|
||||
|
||||
// Then filter out unwanted models
|
||||
if (filter) {
|
||||
for (const [id, m] of Object.entries(existingModels)) {
|
||||
if (!filter(m)) {
|
||||
delete existingModels[id];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...prev,
|
||||
[model]: existingModels,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function shouldIgnoreModel({ model, updateSource }: ModelPayload) {
|
||||
// Never ignore updates from non-user sources
|
||||
if (updateSource.type !== 'window') {
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
-- Add DNS resolution timing to http_responses
|
||||
ALTER TABLE http_responses ADD COLUMN elapsed_dns INTEGER DEFAULT 0 NOT NULL;
|
||||
@@ -1,2 +0,0 @@
|
||||
-- Add DNS overrides setting to workspaces
|
||||
ALTER TABLE workspaces ADD COLUMN setting_dns_overrides TEXT DEFAULT '[]' NOT NULL;
|
||||
@@ -1,12 +0,0 @@
|
||||
-- Filter out headers that match the hardcoded defaults (User-Agent: yaak, Accept: */*),
|
||||
-- keeping any other custom headers the user may have added.
|
||||
UPDATE workspaces
|
||||
SET headers = (
|
||||
SELECT json_group_array(json(value))
|
||||
FROM json_each(headers)
|
||||
WHERE NOT (
|
||||
(LOWER(json_extract(value, '$.name')) = 'user-agent' AND json_extract(value, '$.value') = 'yaak')
|
||||
OR (LOWER(json_extract(value, '$.name')) = 'accept' AND json_extract(value, '$.value') = '*/*')
|
||||
)
|
||||
)
|
||||
WHERE json_array_length(headers) > 0;
|
||||
@@ -73,20 +73,6 @@ pub struct ClientCertificate {
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export, export_to = "gen_models.ts")]
|
||||
pub struct DnsOverride {
|
||||
pub hostname: String,
|
||||
#[serde(default)]
|
||||
pub ipv4: Vec<String>,
|
||||
#[serde(default)]
|
||||
pub ipv6: Vec<String>,
|
||||
#[serde(default = "default_true")]
|
||||
#[ts(optional, as = "Option<bool>")]
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[ts(export, export_to = "gen_models.ts")]
|
||||
@@ -317,8 +303,6 @@ pub struct Workspace {
|
||||
#[serde(default = "default_true")]
|
||||
pub setting_follow_redirects: bool,
|
||||
pub setting_request_timeout: i32,
|
||||
#[serde(default)]
|
||||
pub setting_dns_overrides: Vec<DnsOverride>,
|
||||
}
|
||||
|
||||
impl UpsertModelInfo for Workspace {
|
||||
@@ -359,7 +343,6 @@ impl UpsertModelInfo for Workspace {
|
||||
(SettingFollowRedirects, self.setting_follow_redirects.into()),
|
||||
(SettingRequestTimeout, self.setting_request_timeout.into()),
|
||||
(SettingValidateCertificates, self.setting_validate_certificates.into()),
|
||||
(SettingDnsOverrides, serde_json::to_string(&self.setting_dns_overrides)?.into()),
|
||||
])
|
||||
}
|
||||
|
||||
@@ -376,7 +359,6 @@ impl UpsertModelInfo for Workspace {
|
||||
WorkspaceIden::SettingFollowRedirects,
|
||||
WorkspaceIden::SettingRequestTimeout,
|
||||
WorkspaceIden::SettingValidateCertificates,
|
||||
WorkspaceIden::SettingDnsOverrides,
|
||||
]
|
||||
}
|
||||
|
||||
@@ -386,7 +368,6 @@ impl UpsertModelInfo for Workspace {
|
||||
{
|
||||
let headers: String = row.get("headers")?;
|
||||
let authentication: String = row.get("authentication")?;
|
||||
let setting_dns_overrides: String = row.get("setting_dns_overrides")?;
|
||||
Ok(Self {
|
||||
id: row.get("id")?,
|
||||
model: row.get("model")?,
|
||||
@@ -401,7 +382,6 @@ impl UpsertModelInfo for Workspace {
|
||||
setting_follow_redirects: row.get("setting_follow_redirects")?,
|
||||
setting_request_timeout: row.get("setting_request_timeout")?,
|
||||
setting_validate_certificates: row.get("setting_validate_certificates")?,
|
||||
setting_dns_overrides: serde_json::from_str(&setting_dns_overrides).unwrap_or_default(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1353,7 +1333,6 @@ pub struct HttpResponse {
|
||||
pub content_length_compressed: Option<i32>,
|
||||
pub elapsed: i32,
|
||||
pub elapsed_headers: i32,
|
||||
pub elapsed_dns: i32,
|
||||
pub error: Option<String>,
|
||||
pub headers: Vec<HttpResponseHeader>,
|
||||
pub remote_addr: Option<String>,
|
||||
@@ -1402,7 +1381,6 @@ impl UpsertModelInfo for HttpResponse {
|
||||
(ContentLengthCompressed, self.content_length_compressed.into()),
|
||||
(Elapsed, self.elapsed.into()),
|
||||
(ElapsedHeaders, self.elapsed_headers.into()),
|
||||
(ElapsedDns, self.elapsed_dns.into()),
|
||||
(Error, self.error.into()),
|
||||
(Headers, serde_json::to_string(&self.headers)?.into()),
|
||||
(RemoteAddr, self.remote_addr.into()),
|
||||
@@ -1424,7 +1402,6 @@ impl UpsertModelInfo for HttpResponse {
|
||||
HttpResponseIden::ContentLengthCompressed,
|
||||
HttpResponseIden::Elapsed,
|
||||
HttpResponseIden::ElapsedHeaders,
|
||||
HttpResponseIden::ElapsedDns,
|
||||
HttpResponseIden::Error,
|
||||
HttpResponseIden::Headers,
|
||||
HttpResponseIden::RemoteAddr,
|
||||
@@ -1458,7 +1435,6 @@ impl UpsertModelInfo for HttpResponse {
|
||||
version: r.get("version")?,
|
||||
elapsed: r.get("elapsed")?,
|
||||
elapsed_headers: r.get("elapsed_headers")?,
|
||||
elapsed_dns: r.get("elapsed_dns").unwrap_or_default(),
|
||||
remote_addr: r.get("remote_addr")?,
|
||||
status: r.get("status")?,
|
||||
status_reason: r.get("status_reason")?,
|
||||
@@ -1495,21 +1471,7 @@ pub enum HttpResponseEventData {
|
||||
},
|
||||
SendUrl {
|
||||
method: String,
|
||||
#[serde(default)]
|
||||
scheme: String,
|
||||
#[serde(default)]
|
||||
username: String,
|
||||
#[serde(default)]
|
||||
password: String,
|
||||
#[serde(default)]
|
||||
host: String,
|
||||
#[serde(default)]
|
||||
port: u16,
|
||||
path: String,
|
||||
#[serde(default)]
|
||||
query: String,
|
||||
#[serde(default)]
|
||||
fragment: String,
|
||||
},
|
||||
ReceiveUrl {
|
||||
version: String,
|
||||
@@ -1529,12 +1491,6 @@ pub enum HttpResponseEventData {
|
||||
ChunkReceived {
|
||||
bytes: usize,
|
||||
},
|
||||
DnsResolved {
|
||||
hostname: String,
|
||||
addresses: Vec<String>,
|
||||
duration: u64,
|
||||
overridden: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl Default for HttpResponseEventData {
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use super::dedupe_headers;
|
||||
use crate::db_context::DbContext;
|
||||
use crate::error::Result;
|
||||
use crate::models::{GrpcRequest, GrpcRequestIden, HttpRequestHeader};
|
||||
@@ -88,6 +87,6 @@ impl<'a> DbContext<'a> {
|
||||
|
||||
metadata.append(&mut grpc_request.metadata.clone());
|
||||
|
||||
Ok(dedupe_headers(metadata))
|
||||
Ok(metadata)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use super::dedupe_headers;
|
||||
use crate::db_context::DbContext;
|
||||
use crate::error::Result;
|
||||
use crate::models::{Folder, FolderIden, HttpRequest, HttpRequestHeader, HttpRequestIden};
|
||||
@@ -88,7 +87,7 @@ impl<'a> DbContext<'a> {
|
||||
|
||||
headers.append(&mut http_request.headers.clone());
|
||||
|
||||
Ok(dedupe_headers(headers))
|
||||
Ok(headers)
|
||||
}
|
||||
|
||||
pub fn list_http_requests_for_folder_recursive(
|
||||
|
||||
@@ -19,26 +19,6 @@ mod websocket_connections;
|
||||
mod websocket_events;
|
||||
mod websocket_requests;
|
||||
mod workspace_metas;
|
||||
pub mod workspaces;
|
||||
mod workspaces;
|
||||
|
||||
const MAX_HISTORY_ITEMS: usize = 20;
|
||||
|
||||
use crate::models::HttpRequestHeader;
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Deduplicate headers by name (case-insensitive), keeping the latest (most specific) value.
|
||||
/// Preserves the order of first occurrence for each header name.
|
||||
pub(crate) fn dedupe_headers(headers: Vec<HttpRequestHeader>) -> Vec<HttpRequestHeader> {
|
||||
let mut index_by_name: HashMap<String, usize> = HashMap::new();
|
||||
let mut deduped: Vec<HttpRequestHeader> = Vec::new();
|
||||
for header in headers {
|
||||
let key = header.name.to_lowercase();
|
||||
if let Some(&idx) = index_by_name.get(&key) {
|
||||
deduped[idx] = header;
|
||||
} else {
|
||||
index_by_name.insert(key, deduped.len());
|
||||
deduped.push(header);
|
||||
}
|
||||
}
|
||||
deduped
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use super::dedupe_headers;
|
||||
use crate::db_context::DbContext;
|
||||
use crate::error::Result;
|
||||
use crate::models::{HttpRequestHeader, WebsocketRequest, WebsocketRequestIden};
|
||||
@@ -96,6 +95,6 @@ impl<'a> DbContext<'a> {
|
||||
|
||||
headers.append(&mut websocket_request.headers.clone());
|
||||
|
||||
Ok(dedupe_headers(headers))
|
||||
Ok(headers)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -80,28 +80,6 @@ impl<'a> DbContext<'a> {
|
||||
}
|
||||
|
||||
pub fn resolve_headers_for_workspace(&self, workspace: &Workspace) -> Vec<HttpRequestHeader> {
|
||||
let mut headers = default_headers();
|
||||
headers.extend(workspace.headers.clone());
|
||||
headers
|
||||
workspace.headers.clone()
|
||||
}
|
||||
}
|
||||
|
||||
/// Global default headers that are always sent with requests unless overridden.
|
||||
/// These are prepended to the inheritance chain so workspace/folder/request headers
|
||||
/// can override or disable them.
|
||||
pub fn default_headers() -> Vec<HttpRequestHeader> {
|
||||
vec![
|
||||
HttpRequestHeader {
|
||||
enabled: true,
|
||||
name: "User-Agent".to_string(),
|
||||
value: "yaak".to_string(),
|
||||
id: None,
|
||||
},
|
||||
HttpRequestHeader {
|
||||
enabled: true,
|
||||
name: "Accept".to_string(),
|
||||
value: "*/*".to_string(),
|
||||
id: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
8
crates/yaak-plugins/bindings/gen_models.ts
generated
8
crates/yaak-plugins/bindings/gen_models.ts
generated
@@ -12,8 +12,6 @@ export type CookieExpires = { "AtUtc": string } | "SessionEnd";
|
||||
|
||||
export type CookieJar = { model: "cookie_jar", id: string, createdAt: string, updatedAt: string, workspaceId: string, cookies: Array<Cookie>, name: string, };
|
||||
|
||||
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
|
||||
|
||||
export type EditorKeymap = "default" | "vim" | "vscode" | "emacs";
|
||||
|
||||
export type EncryptedKey = { encryptedKey: string, };
|
||||
@@ -40,7 +38,7 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
|
||||
|
||||
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
|
||||
|
||||
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, elapsedDns: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
|
||||
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
|
||||
|
||||
export type HttpResponseEvent = { model: "http_response_event", id: string, createdAt: string, updatedAt: string, workspaceId: string, responseId: string, event: HttpResponseEventData, };
|
||||
|
||||
@@ -49,7 +47,7 @@ export type HttpResponseEvent = { model: "http_response_event", id: string, crea
|
||||
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
|
||||
* The `From` impl is in yaak-http to avoid circular dependencies.
|
||||
*/
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, } | { "type": "dns_resolved", hostname: string, addresses: Array<string>, duration: bigint, overridden: boolean, };
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, };
|
||||
|
||||
export type HttpResponseHeader = { name: string, value: string, };
|
||||
|
||||
@@ -79,6 +77,6 @@ export type WebsocketEventType = "binary" | "close" | "frame" | "open" | "ping"
|
||||
|
||||
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
|
||||
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
|
||||
export type WorkspaceMeta = { model: "workspace_meta", id: string, workspaceId: string, createdAt: string, updatedAt: string, encryptionKey: EncryptedKey | null, settingSyncDir: string | null, };
|
||||
|
||||
@@ -80,7 +80,10 @@ pub async fn check_plugin_updates(
|
||||
}
|
||||
|
||||
/// Search for plugins in the registry.
|
||||
pub async fn search_plugins(http_client: &Client, query: &str) -> Result<PluginSearchResponse> {
|
||||
pub async fn search_plugins(
|
||||
http_client: &Client,
|
||||
query: &str,
|
||||
) -> Result<PluginSearchResponse> {
|
||||
let mut url = build_url("/search");
|
||||
{
|
||||
let mut query_pairs = url.query_pairs_mut();
|
||||
|
||||
@@ -378,8 +378,7 @@ impl PluginManager {
|
||||
plugins: Vec<PluginHandle>,
|
||||
timeout_duration: Duration,
|
||||
) -> Result<Vec<InternalEvent>> {
|
||||
let event_type = payload.type_name();
|
||||
let label = format!("wait[{}.{}]", plugins.len(), event_type);
|
||||
let label = format!("wait[{}.{}]", plugins.len(), payload.type_name());
|
||||
let (rx_id, mut rx) = self.subscribe(label.as_str()).await;
|
||||
|
||||
// 1. Build the events with IDs and everything
|
||||
@@ -413,21 +412,10 @@ impl PluginManager {
|
||||
|
||||
// Timeout to prevent hanging forever if plugin doesn't respond
|
||||
if timeout(timeout_duration, collect_events).await.is_err() {
|
||||
let responded_ids: Vec<&String> =
|
||||
found_events.iter().filter_map(|e| e.reply_id.as_ref()).collect();
|
||||
let non_responding: Vec<&str> = events_to_send
|
||||
.iter()
|
||||
.filter(|e| !responded_ids.contains(&&e.id))
|
||||
.map(|e| e.plugin_name.as_str())
|
||||
.collect();
|
||||
warn!(
|
||||
"Timeout ({:?}) waiting for {} responses. Got {}/{} responses. \
|
||||
Non-responding plugins: [{}]",
|
||||
timeout_duration,
|
||||
event_type,
|
||||
"Timeout waiting for plugin responses. Got {}/{} responses",
|
||||
found_events.len(),
|
||||
events_to_send.len(),
|
||||
non_responding.join(", ")
|
||||
events_to_send.len()
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -196,11 +196,7 @@ pub fn decrypt_secure_template_function(
|
||||
}
|
||||
}
|
||||
new_tokens.push(Token::Raw {
|
||||
text: template_function_secure_run(
|
||||
encryption_manager,
|
||||
args_map,
|
||||
plugin_context,
|
||||
)?,
|
||||
text: template_function_secure_run(encryption_manager, args_map, plugin_context)?,
|
||||
});
|
||||
}
|
||||
t => {
|
||||
@@ -220,8 +216,7 @@ pub fn encrypt_secure_template_function(
|
||||
plugin_context: &PluginContext,
|
||||
template: &str,
|
||||
) -> Result<String> {
|
||||
let decrypted =
|
||||
decrypt_secure_template_function(&encryption_manager, plugin_context, template)?;
|
||||
let decrypted = decrypt_secure_template_function(&encryption_manager, plugin_context, template)?;
|
||||
let tokens = Tokens {
|
||||
tokens: vec. Do not edit this file manually.
|
||||
|
||||
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
|
||||
|
||||
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, parentModel: string, parentId: string | null, variables: Array<EnvironmentVariable>, color: string | null, sortPriority: number, };
|
||||
|
||||
export type EnvironmentVariable = { enabled?: boolean, name: string, value: string, id?: string, };
|
||||
@@ -22,4 +20,4 @@ export type SyncState = { model: "sync_state", id: string, workspaceId: string,
|
||||
|
||||
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
|
||||
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
|
||||
@@ -296,7 +296,11 @@ pub fn compute_sync_ops(
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn workspace_models(db: &DbContext, version: &str, workspace_id: &str) -> Result<Vec<SyncModel>> {
|
||||
fn workspace_models(
|
||||
db: &DbContext,
|
||||
version: &str,
|
||||
workspace_id: &str,
|
||||
) -> Result<Vec<SyncModel>> {
|
||||
// We want to include private environments here so that we can take them into account during
|
||||
// the sync process. Otherwise, they would be treated as deleted.
|
||||
let include_private_environments = true;
|
||||
|
||||
@@ -2,7 +2,6 @@ use crate::connect::ws_connect;
|
||||
use crate::error::Result;
|
||||
use futures_util::stream::SplitSink;
|
||||
use futures_util::{SinkExt, StreamExt};
|
||||
use http::HeaderMap;
|
||||
use log::{debug, info, warn};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
@@ -11,6 +10,7 @@ use tokio::net::TcpStream;
|
||||
use tokio::sync::{Mutex, mpsc};
|
||||
use tokio_tungstenite::tungstenite::Message;
|
||||
use tokio_tungstenite::tungstenite::handshake::client::Response;
|
||||
use http::HeaderMap;
|
||||
use tokio_tungstenite::tungstenite::http::HeaderValue;
|
||||
use tokio_tungstenite::{MaybeTlsStream, WebSocketStream};
|
||||
use yaak_tls::ClientCertificateConfig;
|
||||
|
||||
123
package-lock.json
generated
123
package-lock.json
generated
@@ -63,7 +63,7 @@
|
||||
"src-web"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^2.3.13",
|
||||
"@biomejs/biome": "^2.3.10",
|
||||
"@tauri-apps/cli": "^2.9.6",
|
||||
"@yaakapp/cli": "^0.3.4",
|
||||
"dotenv-cli": "^11.0.0",
|
||||
@@ -501,9 +501,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/biome": {
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/biome/-/biome-2.3.13.tgz",
|
||||
"integrity": "sha512-Fw7UsV0UAtWIBIm0M7g5CRerpu1eKyKAXIazzxhbXYUyMkwNrkX/KLkGI7b+uVDQ5cLUMfOC9vR60q9IDYDstA==",
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/biome/-/biome-2.3.11.tgz",
|
||||
"integrity": "sha512-/zt+6qazBWguPG6+eWmiELqO+9jRsMZ/DBU3lfuU2ngtIQYzymocHhKiZRyrbra4aCOoyTg/BmY+6WH5mv9xmQ==",
|
||||
"dev": true,
|
||||
"license": "MIT OR Apache-2.0",
|
||||
"bin": {
|
||||
@@ -517,20 +517,20 @@
|
||||
"url": "https://opencollective.com/biome"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@biomejs/cli-darwin-arm64": "2.3.13",
|
||||
"@biomejs/cli-darwin-x64": "2.3.13",
|
||||
"@biomejs/cli-linux-arm64": "2.3.13",
|
||||
"@biomejs/cli-linux-arm64-musl": "2.3.13",
|
||||
"@biomejs/cli-linux-x64": "2.3.13",
|
||||
"@biomejs/cli-linux-x64-musl": "2.3.13",
|
||||
"@biomejs/cli-win32-arm64": "2.3.13",
|
||||
"@biomejs/cli-win32-x64": "2.3.13"
|
||||
"@biomejs/cli-darwin-arm64": "2.3.11",
|
||||
"@biomejs/cli-darwin-x64": "2.3.11",
|
||||
"@biomejs/cli-linux-arm64": "2.3.11",
|
||||
"@biomejs/cli-linux-arm64-musl": "2.3.11",
|
||||
"@biomejs/cli-linux-x64": "2.3.11",
|
||||
"@biomejs/cli-linux-x64-musl": "2.3.11",
|
||||
"@biomejs/cli-win32-arm64": "2.3.11",
|
||||
"@biomejs/cli-win32-x64": "2.3.11"
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/cli-darwin-arm64": {
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.3.13.tgz",
|
||||
"integrity": "sha512-0OCwP0/BoKzyJHnFdaTk/i7hIP9JHH9oJJq6hrSCPmJPo8JWcJhprK4gQlhFzrwdTBAW4Bjt/RmCf3ZZe59gwQ==",
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.3.11.tgz",
|
||||
"integrity": "sha512-/uXXkBcPKVQY7rc9Ys2CrlirBJYbpESEDme7RKiBD6MmqR2w3j0+ZZXRIL2xiaNPsIMMNhP1YnA+jRRxoOAFrA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -545,9 +545,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/cli-darwin-x64": {
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.3.13.tgz",
|
||||
"integrity": "sha512-AGr8OoemT/ejynbIu56qeil2+F2WLkIjn2d8jGK1JkchxnMUhYOfnqc9sVzcRxpG9Ycvw4weQ5sprRvtb7Yhcw==",
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.3.11.tgz",
|
||||
"integrity": "sha512-fh7nnvbweDPm2xEmFjfmq7zSUiox88plgdHF9OIW4i99WnXrAC3o2P3ag9judoUMv8FCSUnlwJCM1B64nO5Fbg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -562,9 +562,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/cli-linux-arm64": {
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.3.13.tgz",
|
||||
"integrity": "sha512-xvOiFkrDNu607MPMBUQ6huHmBG1PZLOrqhtK6pXJW3GjfVqJg0Z/qpTdhXfcqWdSZHcT+Nct2fOgewZvytESkw==",
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.3.11.tgz",
|
||||
"integrity": "sha512-l4xkGa9E7Uc0/05qU2lMYfN1H+fzzkHgaJoy98wO+b/7Gl78srbCRRgwYSW+BTLixTBrM6Ede5NSBwt7rd/i6g==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -579,9 +579,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/cli-linux-arm64-musl": {
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.3.13.tgz",
|
||||
"integrity": "sha512-TUdDCSY+Eo/EHjhJz7P2GnWwfqet+lFxBZzGHldrvULr59AgahamLs/N85SC4+bdF86EhqDuuw9rYLvLFWWlXA==",
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.3.11.tgz",
|
||||
"integrity": "sha512-XPSQ+XIPZMLaZ6zveQdwNjbX+QdROEd1zPgMwD47zvHV+tCGB88VH+aynyGxAHdzL+Tm/+DtKST5SECs4iwCLg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -596,9 +596,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/cli-linux-x64": {
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.3.13.tgz",
|
||||
"integrity": "sha512-s+YsZlgiXNq8XkgHs6xdvKDFOj/bwTEevqEY6rC2I3cBHbxXYU1LOZstH3Ffw9hE5tE1sqT7U23C00MzkXztMw==",
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.3.11.tgz",
|
||||
"integrity": "sha512-/1s9V/H3cSe0r0Mv/Z8JryF5x9ywRxywomqZVLHAoa/uN0eY7F8gEngWKNS5vbbN/BsfpCG5yeBT5ENh50Frxg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -613,9 +613,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/cli-linux-x64-musl": {
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.3.13.tgz",
|
||||
"integrity": "sha512-0bdwFVSbbM//Sds6OjtnmQGp4eUjOTt6kHvR/1P0ieR9GcTUAlPNvPC3DiavTqq302W34Ae2T6u5VVNGuQtGlQ==",
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.3.11.tgz",
|
||||
"integrity": "sha512-vU7a8wLs5C9yJ4CB8a44r12aXYb8yYgBn+WeyzbMjaCMklzCv1oXr8x+VEyWodgJt9bDmhiaW/I0RHbn7rsNmw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -630,9 +630,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/cli-win32-arm64": {
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.3.13.tgz",
|
||||
"integrity": "sha512-QweDxY89fq0VvrxME+wS/BXKmqMrOTZlN9SqQ79kQSIc3FrEwvW/PvUegQF6XIVaekncDykB5dzPqjbwSKs9DA==",
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.3.11.tgz",
|
||||
"integrity": "sha512-PZQ6ElCOnkYapSsysiTy0+fYX+agXPlWugh6+eQ6uPKI3vKAqNp6TnMhoM3oY2NltSB89hz59o8xIfOdyhi9Iw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -647,9 +647,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@biomejs/cli-win32-x64": {
|
||||
"version": "2.3.13",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.3.13.tgz",
|
||||
"integrity": "sha512-trDw2ogdM2lyav9WFQsdsfdVy1dvZALymRpgmWsvSez0BJzBjulhOT/t+wyKeh3pZWvwP3VMs1SoOKwO3wecMQ==",
|
||||
"version": "2.3.11",
|
||||
"resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.3.11.tgz",
|
||||
"integrity": "sha512-43VrG813EW+b5+YbDbz31uUsheX+qFKCpXeY9kfdAx+ww3naKxeVkTD9zLIWxUPfJquANMHrmW3wbe/037G0Qg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -807,21 +807,6 @@
|
||||
"@lezer/xml": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/lang-yaml": {
|
||||
"version": "6.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/lang-yaml/-/lang-yaml-6.1.2.tgz",
|
||||
"integrity": "sha512-dxrfG8w5Ce/QbT7YID7mWZFKhdhsaTNOYjOkSIMt1qmC4VQnXSDSYVHHHn8k6kJUfIhtLo8t1JJgltlxWdsITw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@codemirror/autocomplete": "^6.0.0",
|
||||
"@codemirror/language": "^6.0.0",
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@lezer/common": "^1.2.0",
|
||||
"@lezer/highlight": "^1.2.0",
|
||||
"@lezer/lr": "^1.0.0",
|
||||
"@lezer/yaml": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/language": {
|
||||
"version": "6.12.1",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.12.1.tgz",
|
||||
@@ -847,19 +832,6 @@
|
||||
"crelt": "^1.0.5"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/merge": {
|
||||
"version": "6.11.2",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/merge/-/merge-6.11.2.tgz",
|
||||
"integrity": "sha512-NO5EJd2rLRbwVWLgMdhIntDIhfDtMOKYEZgqV5WnkNUS2oXOCVWLPjG/kgl/Jth2fGiOuG947bteqxP9nBXmMg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@codemirror/language": "^6.0.0",
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.17.0",
|
||||
"@lezer/highlight": "^1.0.0",
|
||||
"style-mod": "^4.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/search": {
|
||||
"version": "6.5.11",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/search/-/search-6.5.11.tgz",
|
||||
@@ -1642,17 +1614,6 @@
|
||||
"@lezer/lr": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@lezer/yaml": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@lezer/yaml/-/yaml-1.0.3.tgz",
|
||||
"integrity": "sha512-GuBLekbw9jDBDhGur82nuwkxKQ+a3W5H0GfaAthDXcAu+XdpS43VlnxA9E9hllkpSP5ellRDKjLLj7Lu9Wr6xA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@lezer/common": "^1.2.0",
|
||||
"@lezer/highlight": "^1.0.0",
|
||||
"@lezer/lr": "^1.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@marijn/find-cluster-break": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@marijn/find-cluster-break/-/find-cluster-break-1.0.2.tgz",
|
||||
@@ -7850,9 +7811,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/hono": {
|
||||
"version": "4.11.7",
|
||||
"resolved": "https://registry.npmjs.org/hono/-/hono-4.11.7.tgz",
|
||||
"integrity": "sha512-l7qMiNee7t82bH3SeyUCt9UF15EVmaBvsppY2zQtrbIhl/yzBTny+YUxsVjSjQ6gaqaeVtZmGocom8TzBlA4Yw==",
|
||||
"version": "4.11.3",
|
||||
"resolved": "https://registry.npmjs.org/hono/-/hono-4.11.3.tgz",
|
||||
"integrity": "sha512-PmQi306+M/ct/m5s66Hrg+adPnkD5jiO6IjA7WhWw0gSBSo1EcRegwuI1deZ+wd5pzCGynCcn2DprnE4/yEV4w==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=16.9.0"
|
||||
@@ -15760,7 +15721,7 @@
|
||||
},
|
||||
"packages/plugin-runtime-types": {
|
||||
"name": "@yaakapp/api",
|
||||
"version": "0.8.0",
|
||||
"version": "0.7.1",
|
||||
"dependencies": {
|
||||
"@types/node": "^24.0.13"
|
||||
},
|
||||
@@ -15782,7 +15743,7 @@
|
||||
"@hono/mcp": "^0.2.3",
|
||||
"@hono/node-server": "^1.19.7",
|
||||
"@modelcontextprotocol/sdk": "^1.25.2",
|
||||
"hono": "^4.11.7",
|
||||
"hono": "^4.11.3",
|
||||
"zod": "^3.25.76"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -16023,9 +15984,7 @@
|
||||
"@codemirror/lang-json": "^6.0.1",
|
||||
"@codemirror/lang-markdown": "^6.3.2",
|
||||
"@codemirror/lang-xml": "^6.1.0",
|
||||
"@codemirror/lang-yaml": "^6.1.2",
|
||||
"@codemirror/language": "^6.11.0",
|
||||
"@codemirror/merge": "^6.11.2",
|
||||
"@codemirror/search": "^6.5.11",
|
||||
"@dnd-kit/core": "^6.3.1",
|
||||
"@gilbarbara/deep-equal": "^0.3.1",
|
||||
|
||||
@@ -95,7 +95,7 @@
|
||||
"js-yaml": "^4.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^2.3.13",
|
||||
"@biomejs/biome": "^2.3.10",
|
||||
"@tauri-apps/cli": "^2.9.6",
|
||||
"@yaakapp/cli": "^0.3.4",
|
||||
"dotenv-cli": "^11.0.0",
|
||||
|
||||
@@ -17,7 +17,7 @@ npx @yaakapp/cli generate
|
||||
```
|
||||
|
||||
For more details on creating plugins, check out
|
||||
the [Quick Start Guide](https://yaak.app/docs/plugin-development/plugins-quick-start)
|
||||
the [Quick Start Guide](https://feedback.yaak.app/help/articles/6911763-plugins-quick-start)
|
||||
|
||||
## Installation
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@yaakapp/api",
|
||||
"version": "0.8.0",
|
||||
"version": "0.7.1",
|
||||
"keywords": [
|
||||
"api-client",
|
||||
"insomnia-alternative",
|
||||
|
||||
@@ -12,8 +12,6 @@ export type CookieExpires = { "AtUtc": string } | "SessionEnd";
|
||||
|
||||
export type CookieJar = { model: "cookie_jar", id: string, createdAt: string, updatedAt: string, workspaceId: string, cookies: Array<Cookie>, name: string, };
|
||||
|
||||
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
|
||||
|
||||
export type EditorKeymap = "default" | "vim" | "vscode" | "emacs";
|
||||
|
||||
export type EncryptedKey = { encryptedKey: string, };
|
||||
@@ -40,7 +38,7 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
|
||||
|
||||
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
|
||||
|
||||
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, elapsedDns: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
|
||||
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
|
||||
|
||||
export type HttpResponseEvent = { model: "http_response_event", id: string, createdAt: string, updatedAt: string, workspaceId: string, responseId: string, event: HttpResponseEventData, };
|
||||
|
||||
@@ -49,7 +47,7 @@ export type HttpResponseEvent = { model: "http_response_event", id: string, crea
|
||||
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
|
||||
* The `From` impl is in yaak-http to avoid circular dependencies.
|
||||
*/
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, } | { "type": "dns_resolved", hostname: string, addresses: Array<string>, duration: bigint, overridden: boolean, };
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, };
|
||||
|
||||
export type HttpResponseHeader = { name: string, value: string, };
|
||||
|
||||
@@ -79,6 +77,6 @@ export type WebsocketEventType = "binary" | "close" | "frame" | "open" | "ping"
|
||||
|
||||
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
|
||||
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
|
||||
export type WorkspaceMeta = { model: "workspace_meta", id: string, workspaceId: string, createdAt: string, updatedAt: string, encryptionKey: EncryptedKey | null, settingSyncDir: string | null, };
|
||||
|
||||
@@ -25,7 +25,7 @@ import type {
|
||||
TemplateRenderRequest,
|
||||
WorkspaceInfo,
|
||||
} from '../bindings/gen_events.ts';
|
||||
import type { Folder, HttpRequest } from '../bindings/gen_models.ts';
|
||||
import type { HttpRequest } from '../bindings/gen_models.ts';
|
||||
import type { JsonValue } from '../bindings/serde_json/JsonValue';
|
||||
|
||||
export type WorkspaceHandle = Pick<WorkspaceInfo, 'id' | 'name'>;
|
||||
@@ -82,15 +82,6 @@ export interface Context {
|
||||
};
|
||||
folder: {
|
||||
list(args?: ListFoldersRequest): Promise<ListFoldersResponse['folders']>;
|
||||
getById(args: { id: string }): Promise<Folder | null>;
|
||||
create(
|
||||
args: Omit<Partial<Folder>, 'id' | 'model' | 'createdAt' | 'updatedAt'> &
|
||||
Pick<Folder, 'workspaceId' | 'name'>,
|
||||
): Promise<Folder>;
|
||||
update(
|
||||
args: Omit<Partial<Folder>, 'model' | 'createdAt' | 'updatedAt'> & Pick<Folder, 'id'>,
|
||||
): Promise<Folder>;
|
||||
delete(args: { id: string }): Promise<Folder>;
|
||||
};
|
||||
httpResponse: {
|
||||
find(args: FindHttpResponsesRequest): Promise<FindHttpResponsesResponse['httpResponses']>;
|
||||
|
||||
@@ -11,7 +11,6 @@ import type {
|
||||
DeleteKeyValueResponse,
|
||||
DeleteModelResponse,
|
||||
FindHttpResponsesResponse,
|
||||
Folder,
|
||||
GetCookieValueRequest,
|
||||
GetCookieValueResponse,
|
||||
GetHttpRequestByIdResponse,
|
||||
@@ -338,8 +337,8 @@ export class PluginInstance {
|
||||
if (payload.type === 'call_http_authentication_request' && this.#mod?.authentication) {
|
||||
const auth = this.#mod.authentication;
|
||||
if (typeof auth?.onApply === 'function') {
|
||||
const resolvedArgs = await applyDynamicFormInput(ctx, auth.args, payload);
|
||||
payload.values = applyFormInputDefaults(resolvedArgs, payload.values);
|
||||
auth.args = await applyDynamicFormInput(ctx, auth.args, payload);
|
||||
payload.values = applyFormInputDefaults(auth.args, payload.values);
|
||||
this.#sendPayload(
|
||||
context,
|
||||
{
|
||||
@@ -783,44 +782,6 @@ export class PluginInstance {
|
||||
const { folders } = await this.#sendForReply<ListFoldersResponse>(context, payload);
|
||||
return folders;
|
||||
},
|
||||
getById: async (args: { id: string }) => {
|
||||
const payload = { type: 'list_folders_request' } as const;
|
||||
const { folders } = await this.#sendForReply<ListFoldersResponse>(context, payload);
|
||||
return folders.find((f) => f.id === args.id) ?? null;
|
||||
},
|
||||
create: async (args) => {
|
||||
const payload = {
|
||||
type: 'upsert_model_request',
|
||||
model: {
|
||||
name: '',
|
||||
...args,
|
||||
id: '',
|
||||
model: 'folder',
|
||||
},
|
||||
} as InternalEventPayload;
|
||||
const response = await this.#sendForReply<UpsertModelResponse>(context, payload);
|
||||
return response.model as Folder;
|
||||
},
|
||||
update: async (args) => {
|
||||
const payload = {
|
||||
type: 'upsert_model_request',
|
||||
model: {
|
||||
model: 'folder',
|
||||
...args,
|
||||
},
|
||||
} as InternalEventPayload;
|
||||
const response = await this.#sendForReply<UpsertModelResponse>(context, payload);
|
||||
return response.model as Folder;
|
||||
},
|
||||
delete: async (args: { id: string }) => {
|
||||
const payload = {
|
||||
type: 'delete_model_request',
|
||||
model: 'folder',
|
||||
id: args.id,
|
||||
} as InternalEventPayload;
|
||||
const response = await this.#sendForReply<DeleteModelResponse>(context, payload);
|
||||
return response.model as Folder;
|
||||
},
|
||||
},
|
||||
cookies: {
|
||||
getValue: async (args: GetCookieValueRequest) => {
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
"@hono/mcp": "^0.2.3",
|
||||
"@hono/node-server": "^1.19.7",
|
||||
"@modelcontextprotocol/sdk": "^1.25.2",
|
||||
"hono": "^4.11.7",
|
||||
"hono": "^4.11.3",
|
||||
"zod": "^3.25.76"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -2,12 +2,6 @@ import type { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
|
||||
import * as z from 'zod';
|
||||
import type { McpServerContext } from '../types.js';
|
||||
import { getWorkspaceContext } from './helpers.js';
|
||||
import {
|
||||
authenticationSchema,
|
||||
authenticationTypeSchema,
|
||||
headersSchema,
|
||||
workspaceIdSchema,
|
||||
} from './schemas.js';
|
||||
|
||||
export function registerFolderTools(server: McpServer, ctx: McpServerContext) {
|
||||
server.registerTool(
|
||||
@@ -16,7 +10,10 @@ export function registerFolderTools(server: McpServer, ctx: McpServerContext) {
|
||||
title: 'List Folders',
|
||||
description: 'List all folders in a workspace',
|
||||
inputSchema: {
|
||||
workspaceId: workspaceIdSchema,
|
||||
workspaceId: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Workspace ID (required if multiple workspaces are open)'),
|
||||
},
|
||||
},
|
||||
async ({ workspaceId }) => {
|
||||
@@ -33,116 +30,4 @@ export function registerFolderTools(server: McpServer, ctx: McpServerContext) {
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
server.registerTool(
|
||||
'get_folder',
|
||||
{
|
||||
title: 'Get Folder',
|
||||
description: 'Get details of a specific folder by ID',
|
||||
inputSchema: {
|
||||
id: z.string().describe('The folder ID'),
|
||||
workspaceId: workspaceIdSchema,
|
||||
},
|
||||
},
|
||||
async ({ id, workspaceId }) => {
|
||||
const workspaceCtx = await getWorkspaceContext(ctx, workspaceId);
|
||||
const folder = await workspaceCtx.yaak.folder.getById({ id });
|
||||
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text' as const,
|
||||
text: JSON.stringify(folder, null, 2),
|
||||
},
|
||||
],
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
server.registerTool(
|
||||
'create_folder',
|
||||
{
|
||||
title: 'Create Folder',
|
||||
description: 'Create a new folder in a workspace',
|
||||
inputSchema: {
|
||||
workspaceId: workspaceIdSchema,
|
||||
name: z.string().describe('Folder name'),
|
||||
folderId: z.string().optional().describe('Parent folder ID (for nested folders)'),
|
||||
description: z.string().optional().describe('Folder description'),
|
||||
sortPriority: z.number().optional().describe('Sort priority for ordering'),
|
||||
headers: headersSchema.describe('Default headers to apply to requests in this folder'),
|
||||
authenticationType: authenticationTypeSchema,
|
||||
authentication: authenticationSchema,
|
||||
},
|
||||
},
|
||||
async ({ workspaceId: ogWorkspaceId, ...args }) => {
|
||||
const workspaceCtx = await getWorkspaceContext(ctx, ogWorkspaceId);
|
||||
const workspaceId = await workspaceCtx.yaak.window.workspaceId();
|
||||
if (!workspaceId) {
|
||||
throw new Error('No workspace is open');
|
||||
}
|
||||
|
||||
const folder = await workspaceCtx.yaak.folder.create({
|
||||
workspaceId: workspaceId,
|
||||
...args,
|
||||
});
|
||||
|
||||
return {
|
||||
content: [{ type: 'text' as const, text: JSON.stringify(folder, null, 2) }],
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
server.registerTool(
|
||||
'update_folder',
|
||||
{
|
||||
title: 'Update Folder',
|
||||
description: 'Update an existing folder',
|
||||
inputSchema: {
|
||||
id: z.string().describe('Folder ID to update'),
|
||||
workspaceId: workspaceIdSchema,
|
||||
name: z.string().optional().describe('Folder name'),
|
||||
folderId: z.string().optional().describe('Parent folder ID (for nested folders)'),
|
||||
description: z.string().optional().describe('Folder description'),
|
||||
sortPriority: z.number().optional().describe('Sort priority for ordering'),
|
||||
headers: headersSchema.describe('Default headers to apply to requests in this folder'),
|
||||
authenticationType: authenticationTypeSchema,
|
||||
authentication: authenticationSchema,
|
||||
},
|
||||
},
|
||||
async ({ id, workspaceId, ...updates }) => {
|
||||
const workspaceCtx = await getWorkspaceContext(ctx, workspaceId);
|
||||
// Fetch existing folder to merge with updates
|
||||
const existing = await workspaceCtx.yaak.folder.getById({ id });
|
||||
if (!existing) {
|
||||
throw new Error(`Folder with ID ${id} not found`);
|
||||
}
|
||||
// Merge existing fields with updates
|
||||
const folder = await workspaceCtx.yaak.folder.update({
|
||||
...existing,
|
||||
...updates,
|
||||
id,
|
||||
});
|
||||
return {
|
||||
content: [{ type: 'text' as const, text: JSON.stringify(folder, null, 2) }],
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
server.registerTool(
|
||||
'delete_folder',
|
||||
{
|
||||
title: 'Delete Folder',
|
||||
description: 'Delete a folder by ID',
|
||||
inputSchema: {
|
||||
id: z.string().describe('Folder ID to delete'),
|
||||
},
|
||||
},
|
||||
async ({ id }) => {
|
||||
const folder = await ctx.yaak.folder.delete({ id });
|
||||
return {
|
||||
content: [{ type: 'text' as const, text: `Deleted: ${folder.name} (${folder.id})` }],
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2,15 +2,6 @@ import type { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
|
||||
import * as z from 'zod';
|
||||
import type { McpServerContext } from '../types.js';
|
||||
import { getWorkspaceContext } from './helpers.js';
|
||||
import {
|
||||
authenticationSchema,
|
||||
authenticationTypeSchema,
|
||||
bodySchema,
|
||||
bodyTypeSchema,
|
||||
headersSchema,
|
||||
urlParametersSchema,
|
||||
workspaceIdSchema,
|
||||
} from './schemas.js';
|
||||
|
||||
export function registerHttpRequestTools(server: McpServer, ctx: McpServerContext) {
|
||||
server.registerTool(
|
||||
@@ -19,7 +10,10 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
|
||||
title: 'List HTTP Requests',
|
||||
description: 'List all HTTP requests in a workspace',
|
||||
inputSchema: {
|
||||
workspaceId: workspaceIdSchema,
|
||||
workspaceId: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Workspace ID (required if multiple workspaces are open)'),
|
||||
},
|
||||
},
|
||||
async ({ workspaceId }) => {
|
||||
@@ -44,7 +38,10 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
|
||||
description: 'Get details of a specific HTTP request by ID',
|
||||
inputSchema: {
|
||||
id: z.string().describe('The HTTP request ID'),
|
||||
workspaceId: workspaceIdSchema,
|
||||
workspaceId: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Workspace ID (required if multiple workspaces are open)'),
|
||||
},
|
||||
},
|
||||
async ({ id, workspaceId }) => {
|
||||
@@ -70,7 +67,10 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
|
||||
inputSchema: {
|
||||
id: z.string().describe('The HTTP request ID to send'),
|
||||
environmentId: z.string().optional().describe('Optional environment ID to use'),
|
||||
workspaceId: workspaceIdSchema,
|
||||
workspaceId: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Workspace ID (required if multiple workspaces are open)'),
|
||||
},
|
||||
},
|
||||
async ({ id, workspaceId }) => {
|
||||
@@ -99,7 +99,10 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
|
||||
title: 'Create HTTP Request',
|
||||
description: 'Create a new HTTP request',
|
||||
inputSchema: {
|
||||
workspaceId: workspaceIdSchema,
|
||||
workspaceId: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Workspace ID (required if multiple workspaces are open)'),
|
||||
name: z
|
||||
.string()
|
||||
.optional()
|
||||
@@ -108,12 +111,62 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
|
||||
method: z.string().optional().describe('HTTP method (defaults to GET)'),
|
||||
folderId: z.string().optional().describe('Parent folder ID'),
|
||||
description: z.string().optional().describe('Request description'),
|
||||
headers: headersSchema.describe('Request headers'),
|
||||
urlParameters: urlParametersSchema,
|
||||
bodyType: bodyTypeSchema,
|
||||
body: bodySchema,
|
||||
authenticationType: authenticationTypeSchema,
|
||||
authentication: authenticationSchema,
|
||||
headers: z
|
||||
.array(
|
||||
z.object({
|
||||
name: z.string(),
|
||||
value: z.string(),
|
||||
enabled: z.boolean().default(true),
|
||||
}),
|
||||
)
|
||||
.optional()
|
||||
.describe('Request headers'),
|
||||
urlParameters: z
|
||||
.array(
|
||||
z.object({
|
||||
name: z.string(),
|
||||
value: z.string(),
|
||||
enabled: z.boolean().default(true),
|
||||
}),
|
||||
)
|
||||
.optional()
|
||||
.describe('URL query parameters'),
|
||||
bodyType: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Body type. Supported values: "binary", "graphql", "application/x-www-form-urlencoded", "multipart/form-data", or any text-based type (e.g., "application/json", "text/plain")',
|
||||
),
|
||||
body: z
|
||||
.record(z.string(), z.any())
|
||||
.optional()
|
||||
.describe(
|
||||
'Body content object. Structure varies by bodyType:\n' +
|
||||
'- "binary": { filePath: "/path/to/file" }\n' +
|
||||
'- "graphql": { query: "{ users { name } }", variables: "{\\"id\\": \\"123\\"}" }\n' +
|
||||
'- "application/x-www-form-urlencoded": { form: [{ name: "key", value: "val", enabled: true }] }\n' +
|
||||
'- "multipart/form-data": { form: [{ name: "field", value: "text", file: "/path/to/file", enabled: true }] }\n' +
|
||||
'- text-based (application/json, etc.): { text: "raw body content" }',
|
||||
),
|
||||
authenticationType: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Authentication type. Common values: "basic", "bearer", "oauth2", "apikey", "jwt", "awsv4", "oauth1", "ntlm", "none". Use null to inherit from parent folder/workspace.',
|
||||
),
|
||||
authentication: z
|
||||
.record(z.string(), z.any())
|
||||
.optional()
|
||||
.describe(
|
||||
'Authentication configuration object. Structure varies by authenticationType:\n' +
|
||||
'- "basic": { username: "user", password: "pass" }\n' +
|
||||
'- "bearer": { token: "abc123", prefix: "Bearer" }\n' +
|
||||
'- "oauth2": { clientId: "...", clientSecret: "...", grantType: "authorization_code", authorizationUrl: "...", accessTokenUrl: "...", scope: "...", ... }\n' +
|
||||
'- "apikey": { location: "header" | "query", key: "X-API-Key", value: "..." }\n' +
|
||||
'- "jwt": { algorithm: "HS256", secret: "...", payload: "{ ... }" }\n' +
|
||||
'- "awsv4": { accessKeyId: "...", secretAccessKey: "...", service: "sts", region: "us-east-1", sessionToken: "..." }\n' +
|
||||
'- "none": {}',
|
||||
),
|
||||
},
|
||||
},
|
||||
async ({ workspaceId: ogWorkspaceId, ...args }) => {
|
||||
@@ -141,18 +194,68 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
|
||||
description: 'Update an existing HTTP request',
|
||||
inputSchema: {
|
||||
id: z.string().describe('HTTP request ID to update'),
|
||||
workspaceId: workspaceIdSchema,
|
||||
workspaceId: z.string().describe('Workspace ID'),
|
||||
name: z.string().optional().describe('Request name'),
|
||||
url: z.string().optional().describe('Request URL'),
|
||||
method: z.string().optional().describe('HTTP method'),
|
||||
folderId: z.string().optional().describe('Parent folder ID'),
|
||||
description: z.string().optional().describe('Request description'),
|
||||
headers: headersSchema.describe('Request headers'),
|
||||
urlParameters: urlParametersSchema,
|
||||
bodyType: bodyTypeSchema,
|
||||
body: bodySchema,
|
||||
authenticationType: authenticationTypeSchema,
|
||||
authentication: authenticationSchema,
|
||||
headers: z
|
||||
.array(
|
||||
z.object({
|
||||
name: z.string(),
|
||||
value: z.string(),
|
||||
enabled: z.boolean().default(true),
|
||||
}),
|
||||
)
|
||||
.optional()
|
||||
.describe('Request headers'),
|
||||
urlParameters: z
|
||||
.array(
|
||||
z.object({
|
||||
name: z.string(),
|
||||
value: z.string(),
|
||||
enabled: z.boolean().default(true),
|
||||
}),
|
||||
)
|
||||
.optional()
|
||||
.describe('URL query parameters'),
|
||||
bodyType: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Body type. Supported values: "binary", "graphql", "application/x-www-form-urlencoded", "multipart/form-data", or any text-based type (e.g., "application/json", "text/plain")',
|
||||
),
|
||||
body: z
|
||||
.record(z.string(), z.any())
|
||||
.optional()
|
||||
.describe(
|
||||
'Body content object. Structure varies by bodyType:\n' +
|
||||
'- "binary": { filePath: "/path/to/file" }\n' +
|
||||
'- "graphql": { query: "{ users { name } }", variables: "{\\"id\\": \\"123\\"}" }\n' +
|
||||
'- "application/x-www-form-urlencoded": { form: [{ name: "key", value: "val", enabled: true }] }\n' +
|
||||
'- "multipart/form-data": { form: [{ name: "field", value: "text", file: "/path/to/file", enabled: true }] }\n' +
|
||||
'- text-based (application/json, etc.): { text: "raw body content" }',
|
||||
),
|
||||
authenticationType: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Authentication type. Common values: "basic", "bearer", "oauth2", "apikey", "jwt", "awsv4", "oauth1", "ntlm", "none". Use null to inherit from parent folder/workspace.',
|
||||
),
|
||||
authentication: z
|
||||
.record(z.string(), z.any())
|
||||
.optional()
|
||||
.describe(
|
||||
'Authentication configuration object. Structure varies by authenticationType:\n' +
|
||||
'- "basic": { username: "user", password: "pass" }\n' +
|
||||
'- "bearer": { token: "abc123", prefix: "Bearer" }\n' +
|
||||
'- "oauth2": { clientId: "...", clientSecret: "...", grantType: "authorization_code", authorizationUrl: "...", accessTokenUrl: "...", scope: "...", ... }\n' +
|
||||
'- "apikey": { location: "header" | "query", key: "X-API-Key", value: "..." }\n' +
|
||||
'- "jwt": { algorithm: "HS256", secret: "...", payload: "{ ... }" }\n' +
|
||||
'- "awsv4": { accessKeyId: "...", secretAccessKey: "...", service: "sts", region: "us-east-1", sessionToken: "..." }\n' +
|
||||
'- "none": {}',
|
||||
),
|
||||
},
|
||||
},
|
||||
async ({ id, workspaceId, ...updates }) => {
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
import * as z from 'zod';
|
||||
|
||||
export const workspaceIdSchema = z
|
||||
.string()
|
||||
.optional()
|
||||
.describe('Workspace ID (required if multiple workspaces are open)');
|
||||
|
||||
export const headersSchema = z
|
||||
.array(
|
||||
z.object({
|
||||
name: z.string(),
|
||||
value: z.string(),
|
||||
enabled: z.boolean().default(true),
|
||||
}),
|
||||
)
|
||||
.optional();
|
||||
|
||||
export const urlParametersSchema = z
|
||||
.array(
|
||||
z.object({
|
||||
name: z.string(),
|
||||
value: z.string(),
|
||||
enabled: z.boolean().default(true),
|
||||
}),
|
||||
)
|
||||
.optional()
|
||||
.describe('URL query parameters');
|
||||
|
||||
export const bodyTypeSchema = z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Body type. Supported values: "binary", "graphql", "application/x-www-form-urlencoded", "multipart/form-data", or any text-based type (e.g., "application/json", "text/plain")',
|
||||
);
|
||||
|
||||
export const bodySchema = z
|
||||
.record(z.string(), z.any())
|
||||
.optional()
|
||||
.describe(
|
||||
'Body content object. Structure varies by bodyType:\n' +
|
||||
'- "binary": { filePath: "/path/to/file" }\n' +
|
||||
'- "graphql": { query: "{ users { name } }", variables: "{\\"id\\": \\"123\\"}" }\n' +
|
||||
'- "application/x-www-form-urlencoded": { form: [{ name: "key", value: "val", enabled: true }] }\n' +
|
||||
'- "multipart/form-data": { form: [{ name: "field", value: "text", file: "/path/to/file", enabled: true }] }\n' +
|
||||
'- text-based (application/json, etc.): { text: "raw body content" }',
|
||||
);
|
||||
|
||||
export const authenticationTypeSchema = z
|
||||
.string()
|
||||
.optional()
|
||||
.describe(
|
||||
'Authentication type. Common values: "basic", "bearer", "oauth2", "apikey", "jwt", "awsv4", "oauth1", "ntlm", "none". Use null to inherit from parent.',
|
||||
);
|
||||
|
||||
export const authenticationSchema = z
|
||||
.record(z.string(), z.any())
|
||||
.optional()
|
||||
.describe(
|
||||
'Authentication configuration object. Structure varies by authenticationType:\n' +
|
||||
'- "basic": { username: "user", password: "pass" }\n' +
|
||||
'- "bearer": { token: "abc123", prefix: "Bearer" }\n' +
|
||||
'- "oauth2": { clientId: "...", clientSecret: "...", grantType: "authorization_code", authorizationUrl: "...", accessTokenUrl: "...", scope: "...", ... }\n' +
|
||||
'- "apikey": { location: "header" | "query", key: "X-API-Key", value: "..." }\n' +
|
||||
'- "jwt": { algorithm: "HS256", secret: "...", payload: "{ ... }" }\n' +
|
||||
'- "awsv4": { accessKeyId: "...", secretAccessKey: "...", service: "sts", region: "us-east-1", sessionToken: "..." }\n' +
|
||||
'- "none": {}',
|
||||
);
|
||||
@@ -11,7 +11,6 @@
|
||||
"version": "0.1.0",
|
||||
"scripts": {
|
||||
"build": "yaakcli build",
|
||||
"dev": "yaakcli dev",
|
||||
"test": "vitest --run tests"
|
||||
"dev": "yaakcli dev"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,8 +21,7 @@ export const plugin: PluginDefinition = {
|
||||
},
|
||||
],
|
||||
async onApply(_ctx, { values }) {
|
||||
const username = values.username ?? '';
|
||||
const password = values.password ?? '';
|
||||
const { username, password } = values;
|
||||
const value = `Basic ${Buffer.from(`${username}:${password}`).toString('base64')}`;
|
||||
return { setHeaders: [{ name: 'Authorization', value }] };
|
||||
},
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
import type { Context } from '@yaakapp/api';
|
||||
import { describe, expect, test } from 'vitest';
|
||||
import { plugin } from '../src';
|
||||
|
||||
const ctx = {} as Context;
|
||||
|
||||
describe('auth-basic', () => {
|
||||
test('Both username and password', async () => {
|
||||
expect(
|
||||
await plugin.authentication?.onApply(ctx, {
|
||||
values: { username: 'user', password: 'pass' },
|
||||
headers: [],
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
contextId: '111',
|
||||
}),
|
||||
).toEqual({
|
||||
setHeaders: [{ name: 'Authorization', value: `Basic ${Buffer.from('user:pass').toString('base64')}` }],
|
||||
});
|
||||
});
|
||||
|
||||
test('Empty password', async () => {
|
||||
expect(
|
||||
await plugin.authentication?.onApply(ctx, {
|
||||
values: { username: 'apikey', password: '' },
|
||||
headers: [],
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
contextId: '111',
|
||||
}),
|
||||
).toEqual({
|
||||
setHeaders: [{ name: 'Authorization', value: `Basic ${Buffer.from('apikey:').toString('base64')}` }],
|
||||
});
|
||||
});
|
||||
|
||||
test('Missing password (undefined)', async () => {
|
||||
expect(
|
||||
await plugin.authentication?.onApply(ctx, {
|
||||
values: { username: 'apikey' },
|
||||
headers: [],
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
contextId: '111',
|
||||
}),
|
||||
).toEqual({
|
||||
setHeaders: [{ name: 'Authorization', value: `Basic ${Buffer.from('apikey:').toString('base64')}` }],
|
||||
});
|
||||
});
|
||||
|
||||
test('Missing username (undefined)', async () => {
|
||||
expect(
|
||||
await plugin.authentication?.onApply(ctx, {
|
||||
values: { password: 'secret' },
|
||||
headers: [],
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
contextId: '111',
|
||||
}),
|
||||
).toEqual({
|
||||
setHeaders: [{ name: 'Authorization', value: `Basic ${Buffer.from(':secret').toString('base64')}` }],
|
||||
});
|
||||
});
|
||||
|
||||
test('No values (both undefined)', async () => {
|
||||
expect(
|
||||
await plugin.authentication?.onApply(ctx, {
|
||||
values: {},
|
||||
headers: [],
|
||||
url: 'https://yaak.app',
|
||||
method: 'POST',
|
||||
contextId: '111',
|
||||
}),
|
||||
).toEqual({
|
||||
setHeaders: [{ name: 'Authorization', value: `Basic ${Buffer.from(':').toString('base64')}` }],
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,335 +0,0 @@
|
||||
import type { IncomingMessage, ServerResponse } from 'node:http';
|
||||
import http from 'node:http';
|
||||
import type { Context } from '@yaakapp/api';
|
||||
|
||||
export const HOSTED_CALLBACK_URL = 'https://oauth.yaak.app/redirect';
|
||||
export const DEFAULT_LOCALHOST_PORT = 8765;
|
||||
const CALLBACK_TIMEOUT_MS = 5 * 60 * 1000; // 5 minutes
|
||||
|
||||
/** Singleton: only one callback server runs at a time across all OAuth flows. */
|
||||
let activeServer: CallbackServerResult | null = null;
|
||||
|
||||
export interface CallbackServerResult {
|
||||
/** The port the server is listening on */
|
||||
port: number;
|
||||
/** The full redirect URI to register with the OAuth provider */
|
||||
redirectUri: string;
|
||||
/** Promise that resolves with the callback URL when received */
|
||||
waitForCallback: () => Promise<string>;
|
||||
/** Stop the server */
|
||||
stop: () => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a local HTTP server to receive OAuth callbacks.
|
||||
* Only one server runs at a time — if a previous server is still active,
|
||||
* it is stopped before starting the new one.
|
||||
* Returns the port, redirect URI, and a promise that resolves when the callback is received.
|
||||
*/
|
||||
export function startCallbackServer(options: {
|
||||
/** Specific port to use, or 0 for random available port */
|
||||
port?: number;
|
||||
/** Path for the callback endpoint */
|
||||
path?: string;
|
||||
/** Timeout in milliseconds (default 5 minutes) */
|
||||
timeoutMs?: number;
|
||||
}): Promise<CallbackServerResult> {
|
||||
// Stop any previously active server before starting a new one
|
||||
if (activeServer) {
|
||||
console.log('[oauth2] Stopping previous callback server before starting new one');
|
||||
activeServer.stop();
|
||||
activeServer = null;
|
||||
}
|
||||
|
||||
const { port = 0, path = '/callback', timeoutMs = CALLBACK_TIMEOUT_MS } = options;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let callbackResolve: ((url: string) => void) | null = null;
|
||||
let callbackReject: ((err: Error) => void) | null = null;
|
||||
let timeoutHandle: ReturnType<typeof setTimeout> | null = null;
|
||||
let stopped = false;
|
||||
|
||||
const server = http.createServer((req: IncomingMessage, res: ServerResponse) => {
|
||||
const reqUrl = new URL(req.url ?? '/', `http://${req.headers.host}`);
|
||||
|
||||
// Only handle the callback path
|
||||
if (reqUrl.pathname !== path && reqUrl.pathname !== `${path}/`) {
|
||||
res.writeHead(404, { 'Content-Type': 'text/plain' });
|
||||
res.end('Not Found');
|
||||
return;
|
||||
}
|
||||
|
||||
if (req.method === 'POST') {
|
||||
// POST: read JSON body with the final callback URL and resolve
|
||||
let body = '';
|
||||
req.on('data', (chunk: Buffer) => {
|
||||
body += chunk.toString();
|
||||
});
|
||||
req.on('end', () => {
|
||||
try {
|
||||
const { url: callbackUrl } = JSON.parse(body);
|
||||
if (!callbackUrl || typeof callbackUrl !== 'string') {
|
||||
res.writeHead(400, { 'Content-Type': 'text/plain' });
|
||||
res.end('Missing url in request body');
|
||||
return;
|
||||
}
|
||||
|
||||
// Send success response
|
||||
res.writeHead(200, { 'Content-Type': 'text/plain' });
|
||||
res.end('OK');
|
||||
|
||||
// Resolve the callback promise
|
||||
if (callbackResolve) {
|
||||
callbackResolve(callbackUrl);
|
||||
callbackResolve = null;
|
||||
callbackReject = null;
|
||||
}
|
||||
|
||||
// Stop the server after a short delay to ensure response is sent
|
||||
setTimeout(() => stopServer(), 100);
|
||||
} catch {
|
||||
res.writeHead(400, { 'Content-Type': 'text/plain' });
|
||||
res.end('Invalid JSON');
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// GET: serve intermediate page that reads the fragment and POSTs back
|
||||
res.writeHead(200, { 'Content-Type': 'text/html' });
|
||||
res.end(getFragmentForwardingHtml());
|
||||
});
|
||||
|
||||
server.on('error', (err: Error) => {
|
||||
if (!stopped) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
|
||||
const stopServer = () => {
|
||||
if (stopped) return;
|
||||
stopped = true;
|
||||
|
||||
// Clear the singleton reference
|
||||
if (activeServer?.stop === stopServer) {
|
||||
activeServer = null;
|
||||
}
|
||||
|
||||
if (timeoutHandle) {
|
||||
clearTimeout(timeoutHandle);
|
||||
timeoutHandle = null;
|
||||
}
|
||||
|
||||
server.close();
|
||||
|
||||
if (callbackReject) {
|
||||
callbackReject(new Error('Callback server stopped'));
|
||||
callbackResolve = null;
|
||||
callbackReject = null;
|
||||
}
|
||||
};
|
||||
|
||||
server.listen(port, '127.0.0.1', () => {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === 'string') {
|
||||
reject(new Error('Failed to get server address'));
|
||||
return;
|
||||
}
|
||||
|
||||
const actualPort = address.port;
|
||||
const redirectUri = `http://127.0.0.1:${actualPort}${path}`;
|
||||
|
||||
console.log(`[oauth2] Callback server listening on ${redirectUri}`);
|
||||
|
||||
const result: CallbackServerResult = {
|
||||
port: actualPort,
|
||||
redirectUri,
|
||||
waitForCallback: () => {
|
||||
return new Promise<string>((res, rej) => {
|
||||
if (stopped) {
|
||||
rej(new Error('Callback server already stopped'));
|
||||
return;
|
||||
}
|
||||
|
||||
callbackResolve = res;
|
||||
callbackReject = rej;
|
||||
|
||||
// Set timeout
|
||||
timeoutHandle = setTimeout(() => {
|
||||
if (callbackReject) {
|
||||
callbackReject(new Error('Authorization timed out'));
|
||||
callbackResolve = null;
|
||||
callbackReject = null;
|
||||
}
|
||||
stopServer();
|
||||
}, timeoutMs);
|
||||
});
|
||||
},
|
||||
stop: stopServer,
|
||||
};
|
||||
|
||||
activeServer = result;
|
||||
resolve(result);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the redirect URI for the hosted callback page.
|
||||
* The hosted page will redirect to the local server with the OAuth response.
|
||||
*/
|
||||
export function buildHostedCallbackRedirectUri(localPort: number, localPath: string): string {
|
||||
const localRedirectUri = `http://127.0.0.1:${localPort}${localPath}`;
|
||||
// The hosted callback page will read params and redirect to the local server
|
||||
return `${HOSTED_CALLBACK_URL}?redirect_to=${encodeURIComponent(localRedirectUri)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Open an authorization URL in the system browser, start a local callback server,
|
||||
* and wait for the OAuth provider to redirect back.
|
||||
*
|
||||
* Returns the raw callback URL and the redirect URI that was registered with the
|
||||
* OAuth provider (needed for token exchange).
|
||||
*/
|
||||
export async function getRedirectUrlViaExternalBrowser(
|
||||
ctx: Context,
|
||||
authorizationUrl: URL,
|
||||
options: {
|
||||
callbackType: 'localhost' | 'hosted';
|
||||
callbackPort?: number;
|
||||
},
|
||||
): Promise<{ callbackUrl: string; redirectUri: string }> {
|
||||
const { callbackType, callbackPort } = options;
|
||||
|
||||
// Determine port based on callback type:
|
||||
// - localhost: use specified port or default stable port
|
||||
// - hosted: use random port (0) since hosted page redirects to local
|
||||
const port = callbackType === 'localhost' ? (callbackPort ?? DEFAULT_LOCALHOST_PORT) : 0;
|
||||
|
||||
console.log(
|
||||
`[oauth2] Starting callback server (type: ${callbackType}, port: ${port || 'random'})`,
|
||||
);
|
||||
|
||||
const server = await startCallbackServer({
|
||||
port,
|
||||
path: '/callback',
|
||||
});
|
||||
|
||||
try {
|
||||
// Determine the redirect URI to send to the OAuth provider
|
||||
let oauthRedirectUri: string;
|
||||
|
||||
if (callbackType === 'hosted') {
|
||||
oauthRedirectUri = buildHostedCallbackRedirectUri(server.port, '/callback');
|
||||
console.log('[oauth2] Using hosted callback redirect:', oauthRedirectUri);
|
||||
} else {
|
||||
oauthRedirectUri = server.redirectUri;
|
||||
console.log('[oauth2] Using localhost callback redirect:', oauthRedirectUri);
|
||||
}
|
||||
|
||||
// Set the redirect URI on the authorization URL
|
||||
authorizationUrl.searchParams.set('redirect_uri', oauthRedirectUri);
|
||||
|
||||
const authorizationUrlStr = authorizationUrl.toString();
|
||||
console.log('[oauth2] Opening external browser:', authorizationUrlStr);
|
||||
|
||||
// Show toast to inform user
|
||||
await ctx.toast.show({
|
||||
message: 'Opening browser for authorization...',
|
||||
icon: 'info',
|
||||
timeout: 3000,
|
||||
});
|
||||
|
||||
// Open the system browser
|
||||
await ctx.window.openExternalUrl(authorizationUrlStr);
|
||||
|
||||
// Wait for the callback
|
||||
console.log('[oauth2] Waiting for callback on', server.redirectUri);
|
||||
const callbackUrl = await server.waitForCallback();
|
||||
|
||||
console.log('[oauth2] Received callback:', callbackUrl);
|
||||
|
||||
return { callbackUrl, redirectUri: oauthRedirectUri };
|
||||
} finally {
|
||||
server.stop();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Intermediate HTML page that reads the URL fragment and _fragment query param,
|
||||
* reconstructs a proper OAuth callback URL, and POSTs it back to the server.
|
||||
*
|
||||
* Handles three cases:
|
||||
* - Localhost implicit: fragment is in location.hash (e.g. #access_token=...)
|
||||
* - Hosted implicit: fragment was converted to ?_fragment=... by the hosted redirect page
|
||||
* - Auth code: no fragment, code is already in query params
|
||||
*/
|
||||
function getFragmentForwardingHtml(): string {
|
||||
return `<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Yaak</title>
|
||||
<style>
|
||||
* { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, sans-serif;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
min-height: 100vh;
|
||||
background: hsl(244,23%,14%);
|
||||
color: hsl(245,23%,85%);
|
||||
}
|
||||
.container { text-align: center; }
|
||||
.logo { display: block; width: 100px; height: 100px; margin: 0 auto 32px; border-radius: 50%; }
|
||||
h1 { font-size: 28px; font-weight: 600; margin-bottom: 12px; }
|
||||
p { font-size: 16px; color: hsl(245,18%,58%); }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<svg class="logo" viewBox="0 0 1024 1024" xmlns="http://www.w3.org/2000/svg"><defs><linearGradient id="g" x1="0" y1="0" x2="1" y2="0" gradientUnits="userSpaceOnUse" gradientTransform="matrix(649.94,712.03,-712.03,649.94,179.25,220.59)"><stop offset="0" stop-color="#4cc48c"/><stop offset=".5" stop-color="#476cc9"/><stop offset="1" stop-color="#ba1ab7"/></linearGradient></defs><rect x="0" y="0" width="1024" height="1024" fill="url(#g)"/><g transform="matrix(0.822,0,0,0.822,91.26,91.26)"><path d="M766.775,105.176C902.046,190.129 992.031,340.639 992.031,512C992.031,706.357 876.274,873.892 710,949.361C684.748,838.221 632.417,791.074 538.602,758.96C536.859,790.593 545.561,854.983 522.327,856.611C477.951,859.719 321.557,782.368 310.75,710.135C300.443,641.237 302.536,535.834 294.475,482.283C86.974,483.114 245.65,303.256 245.65,303.256L261.925,368.357L294.475,368.357C294.475,368.357 298.094,296.03 310.75,286.981C326.511,275.713 366.457,254.592 473.502,254.431C519.506,190.629 692.164,133.645 766.775,105.176ZM603.703,352.082C598.577,358.301 614.243,384.787 623.39,401.682C639.967,432.299 672.34,459.32 760.231,456.739C780.796,456.135 808.649,456.743 831.555,448.316C919.689,369.191 665.548,260.941 652.528,270.706C629.157,288.235 677.433,340.481 685.079,352.082C663.595,350.818 630.521,352.121 603.703,352.082ZM515.817,516.822C491.026,516.822 470.898,536.949 470.898,561.741C470.898,586.532 491.026,606.66 515.817,606.66C540.609,606.66 560.736,586.532 560.736,561.741C560.736,536.949 540.609,516.822 515.817,516.822ZM656.608,969.83C610.979,984.25 562.391,992.031 512,992.031C247.063,992.031 31.969,776.937 31.969,512C31.969,247.063 247.063,31.969 512,31.969C581.652,31.969 647.859,46.835 707.634,73.574C674.574,86.913 627.224,104.986 620,103.081C343.573,30.201 98.64,283.528 98.64,511.993C98.64,761.842 376.244,989.043 627.831,910C637.21,907.053 645.743,936.753 656.608,969.83Z" fill="#fff"/></g></svg>
|
||||
<h1 id="title">Authorizing...</h1>
|
||||
<p id="message">Please wait</p>
|
||||
</div>
|
||||
<script>
|
||||
(function() {
|
||||
var title = document.getElementById('title');
|
||||
var message = document.getElementById('message');
|
||||
var url = new URL(window.location.href);
|
||||
var fragment = window.location.hash;
|
||||
var fragmentParam = url.searchParams.get('_fragment');
|
||||
|
||||
// Build the final callback URL:
|
||||
// 1. If _fragment query param exists (from hosted redirect), convert it back to a real fragment
|
||||
// 2. If location.hash exists (direct localhost implicit), use it as-is
|
||||
// 3. Otherwise (auth code flow), use the URL as-is with query params
|
||||
if (fragmentParam) {
|
||||
url.searchParams.delete('_fragment');
|
||||
url.hash = fragmentParam;
|
||||
} else if (fragment && fragment.length > 1) {
|
||||
url.hash = fragment;
|
||||
}
|
||||
|
||||
// POST the final URL back to the callback server
|
||||
fetch(url.pathname, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ url: url.toString() })
|
||||
}).then(function(res) {
|
||||
if (res.ok) {
|
||||
title.textContent = 'Authorization Complete';
|
||||
message.textContent = 'You may close this tab and return to Yaak';
|
||||
} else {
|
||||
title.textContent = 'Authorization Failed';
|
||||
message.textContent = 'Something went wrong. Please try again.';
|
||||
}
|
||||
}).catch(function() {
|
||||
title.textContent = 'Authorization Failed';
|
||||
message.textContent = 'Something went wrong. Please try again.';
|
||||
});
|
||||
})();
|
||||
</script>
|
||||
</body>
|
||||
</html>`;
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createHash, randomBytes } from 'node:crypto';
|
||||
import type { Context } from '@yaakapp/api';
|
||||
import { getRedirectUrlViaExternalBrowser } from '../callbackServer';
|
||||
import { fetchAccessToken } from '../fetchAccessToken';
|
||||
import { getOrRefreshAccessToken } from '../getOrRefreshAccessToken';
|
||||
import type { AccessToken, TokenStoreArgs } from '../store';
|
||||
@@ -11,15 +10,6 @@ export const PKCE_SHA256 = 'S256';
|
||||
export const PKCE_PLAIN = 'plain';
|
||||
export const DEFAULT_PKCE_METHOD = PKCE_SHA256;
|
||||
|
||||
export type CallbackType = 'localhost' | 'hosted';
|
||||
|
||||
export interface ExternalBrowserOptions {
|
||||
useExternalBrowser: boolean;
|
||||
callbackType: CallbackType;
|
||||
/** Port for localhost callback (only used when callbackType is 'localhost') */
|
||||
callbackPort?: number;
|
||||
}
|
||||
|
||||
export async function getAuthorizationCode(
|
||||
ctx: Context,
|
||||
contextId: string,
|
||||
@@ -35,7 +25,6 @@ export async function getAuthorizationCode(
|
||||
credentialsInBody,
|
||||
pkce,
|
||||
tokenName,
|
||||
externalBrowser,
|
||||
}: {
|
||||
authorizationUrl: string;
|
||||
accessTokenUrl: string;
|
||||
@@ -51,7 +40,6 @@ export async function getAuthorizationCode(
|
||||
codeVerifier: string;
|
||||
} | null;
|
||||
tokenName: 'access_token' | 'id_token';
|
||||
externalBrowser?: ExternalBrowserOptions;
|
||||
},
|
||||
): Promise<AccessToken> {
|
||||
const tokenArgs: TokenStoreArgs = {
|
||||
@@ -80,6 +68,7 @@ export async function getAuthorizationCode(
|
||||
}
|
||||
authorizationUrl.searchParams.set('response_type', 'code');
|
||||
authorizationUrl.searchParams.set('client_id', clientId);
|
||||
if (redirectUri) authorizationUrl.searchParams.set('redirect_uri', redirectUri);
|
||||
if (scope) authorizationUrl.searchParams.set('scope', scope);
|
||||
if (state) authorizationUrl.searchParams.set('state', state);
|
||||
if (audience) authorizationUrl.searchParams.set('audience', audience);
|
||||
@@ -91,65 +80,12 @@ export async function getAuthorizationCode(
|
||||
authorizationUrl.searchParams.set('code_challenge_method', pkce.challengeMethod);
|
||||
}
|
||||
|
||||
let code: string;
|
||||
let actualRedirectUri: string | null = redirectUri;
|
||||
|
||||
// Use external browser flow if enabled
|
||||
if (externalBrowser?.useExternalBrowser) {
|
||||
const result = await getRedirectUrlViaExternalBrowser(ctx, authorizationUrl, {
|
||||
callbackType: externalBrowser.callbackType,
|
||||
callbackPort: externalBrowser.callbackPort,
|
||||
});
|
||||
// Pass null to skip redirect URI matching — the callback came from our own local server
|
||||
const extractedCode = extractCode(result.callbackUrl, null);
|
||||
if (!extractedCode) {
|
||||
throw new Error('No authorization code found in callback URL');
|
||||
}
|
||||
code = extractedCode;
|
||||
actualRedirectUri = result.redirectUri;
|
||||
} else {
|
||||
// Use embedded browser flow (original behavior)
|
||||
if (redirectUri) {
|
||||
authorizationUrl.searchParams.set('redirect_uri', redirectUri);
|
||||
}
|
||||
code = await getCodeViaEmbeddedBrowser(ctx, contextId, authorizationUrl, redirectUri);
|
||||
}
|
||||
|
||||
console.log('[oauth2] Code found');
|
||||
const response = await fetchAccessToken(ctx, {
|
||||
grantType: 'authorization_code',
|
||||
accessTokenUrl,
|
||||
clientId,
|
||||
clientSecret,
|
||||
scope,
|
||||
audience,
|
||||
credentialsInBody,
|
||||
params: [
|
||||
{ name: 'code', value: code },
|
||||
...(pkce ? [{ name: 'code_verifier', value: pkce.codeVerifier }] : []),
|
||||
...(actualRedirectUri ? [{ name: 'redirect_uri', value: actualRedirectUri }] : []),
|
||||
],
|
||||
});
|
||||
|
||||
return storeToken(ctx, tokenArgs, response, tokenName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get authorization code using the embedded browser window.
|
||||
* This is the original flow that monitors navigation events.
|
||||
*/
|
||||
async function getCodeViaEmbeddedBrowser(
|
||||
ctx: Context,
|
||||
contextId: string,
|
||||
authorizationUrl: URL,
|
||||
redirectUri: string | null,
|
||||
): Promise<string> {
|
||||
const dataDirKey = await getDataDirKey(ctx, contextId);
|
||||
const authorizationUrlStr = authorizationUrl.toString();
|
||||
console.log('[oauth2] Authorizing via embedded browser', authorizationUrlStr);
|
||||
console.log('[oauth2] Authorizing', authorizationUrlStr);
|
||||
|
||||
// biome-ignore lint/suspicious/noAsyncPromiseExecutor: Required for this pattern
|
||||
return new Promise<string>(async (resolve, reject) => {
|
||||
// biome-ignore lint/suspicious/noAsyncPromiseExecutor: none
|
||||
const code = await new Promise<string>(async (resolve, reject) => {
|
||||
let foundCode = false;
|
||||
const { close } = await ctx.window.openUrl({
|
||||
dataDirKey,
|
||||
@@ -174,12 +110,31 @@ async function getCodeViaEmbeddedBrowser(
|
||||
return;
|
||||
}
|
||||
|
||||
// Close the window here, because we don't need it anymore!
|
||||
foundCode = true;
|
||||
close();
|
||||
resolve(code);
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
console.log('[oauth2] Code found');
|
||||
const response = await fetchAccessToken(ctx, {
|
||||
grantType: 'authorization_code',
|
||||
accessTokenUrl,
|
||||
clientId,
|
||||
clientSecret,
|
||||
scope,
|
||||
audience,
|
||||
credentialsInBody,
|
||||
params: [
|
||||
{ name: 'code', value: code },
|
||||
...(pkce ? [{ name: 'code_verifier', value: pkce.codeVerifier }] : []),
|
||||
...(redirectUri ? [{ name: 'redirect_uri', value: redirectUri }] : []),
|
||||
],
|
||||
});
|
||||
|
||||
return storeToken(ctx, tokenArgs, response, tokenName);
|
||||
}
|
||||
|
||||
export function genPkceCodeVerifier() {
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import type { Context } from '@yaakapp/api';
|
||||
import { getRedirectUrlViaExternalBrowser } from '../callbackServer';
|
||||
import type { AccessToken, AccessTokenRawResponse } from '../store';
|
||||
import { getDataDirKey, getToken, storeToken } from '../store';
|
||||
import { isTokenExpired } from '../util';
|
||||
import type { ExternalBrowserOptions } from './authorizationCode';
|
||||
|
||||
export async function getImplicit(
|
||||
ctx: Context,
|
||||
@@ -17,7 +15,6 @@ export async function getImplicit(
|
||||
state,
|
||||
audience,
|
||||
tokenName,
|
||||
externalBrowser,
|
||||
}: {
|
||||
authorizationUrl: string;
|
||||
responseType: string;
|
||||
@@ -27,7 +24,6 @@ export async function getImplicit(
|
||||
state: string | null;
|
||||
audience: string | null;
|
||||
tokenName: 'access_token' | 'id_token';
|
||||
externalBrowser?: ExternalBrowserOptions;
|
||||
},
|
||||
): Promise<AccessToken> {
|
||||
const tokenArgs = {
|
||||
@@ -47,8 +43,9 @@ export async function getImplicit(
|
||||
} catch {
|
||||
throw new Error(`Invalid authorization URL "${authorizationUrlRaw}"`);
|
||||
}
|
||||
authorizationUrl.searchParams.set('response_type', responseType);
|
||||
authorizationUrl.searchParams.set('response_type', 'token');
|
||||
authorizationUrl.searchParams.set('client_id', clientId);
|
||||
if (redirectUri) authorizationUrl.searchParams.set('redirect_uri', redirectUri);
|
||||
if (scope) authorizationUrl.searchParams.set('scope', scope);
|
||||
if (state) authorizationUrl.searchParams.set('state', state);
|
||||
if (audience) authorizationUrl.searchParams.set('audience', audience);
|
||||
@@ -59,55 +56,11 @@ export async function getImplicit(
|
||||
);
|
||||
}
|
||||
|
||||
let newToken: AccessToken;
|
||||
|
||||
// Use external browser flow if enabled
|
||||
if (externalBrowser?.useExternalBrowser) {
|
||||
const result = await getRedirectUrlViaExternalBrowser(ctx, authorizationUrl, {
|
||||
callbackType: externalBrowser.callbackType,
|
||||
callbackPort: externalBrowser.callbackPort,
|
||||
});
|
||||
newToken = await extractImplicitToken(ctx, result.callbackUrl, tokenArgs, tokenName);
|
||||
} else {
|
||||
// Use embedded browser flow (original behavior)
|
||||
if (redirectUri) {
|
||||
authorizationUrl.searchParams.set('redirect_uri', redirectUri);
|
||||
}
|
||||
newToken = await getTokenViaEmbeddedBrowser(
|
||||
ctx,
|
||||
contextId,
|
||||
authorizationUrl,
|
||||
tokenArgs,
|
||||
tokenName,
|
||||
);
|
||||
}
|
||||
|
||||
return newToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get token using the embedded browser window.
|
||||
* This is the original flow that monitors navigation events.
|
||||
*/
|
||||
async function getTokenViaEmbeddedBrowser(
|
||||
ctx: Context,
|
||||
contextId: string,
|
||||
authorizationUrl: URL,
|
||||
tokenArgs: {
|
||||
contextId: string;
|
||||
clientId: string;
|
||||
accessTokenUrl: null;
|
||||
authorizationUrl: string;
|
||||
},
|
||||
tokenName: 'access_token' | 'id_token',
|
||||
): Promise<AccessToken> {
|
||||
const dataDirKey = await getDataDirKey(ctx, contextId);
|
||||
const authorizationUrlStr = authorizationUrl.toString();
|
||||
console.log('[oauth2] Authorizing via embedded browser (implicit)', authorizationUrlStr);
|
||||
|
||||
// biome-ignore lint/suspicious/noAsyncPromiseExecutor: Required for this pattern
|
||||
return new Promise<AccessToken>(async (resolve, reject) => {
|
||||
// biome-ignore lint/suspicious/noAsyncPromiseExecutor: none
|
||||
const newToken = await new Promise<AccessToken>(async (resolve, reject) => {
|
||||
let foundAccessToken = false;
|
||||
const authorizationUrlStr = authorizationUrl.toString();
|
||||
const dataDirKey = await getDataDirKey(ctx, contextId);
|
||||
const { close } = await ctx.window.openUrl({
|
||||
dataDirKey,
|
||||
url: authorizationUrlStr,
|
||||
@@ -144,56 +97,6 @@ async function getTokenViaEmbeddedBrowser(
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the implicit grant token from a callback URL and store it.
|
||||
*/
|
||||
async function extractImplicitToken(
|
||||
ctx: Context,
|
||||
callbackUrl: string,
|
||||
tokenArgs: {
|
||||
contextId: string;
|
||||
clientId: string;
|
||||
accessTokenUrl: null;
|
||||
authorizationUrl: string;
|
||||
},
|
||||
tokenName: 'access_token' | 'id_token',
|
||||
): Promise<AccessToken> {
|
||||
const url = new URL(callbackUrl);
|
||||
|
||||
// Check for errors
|
||||
if (url.searchParams.has('error')) {
|
||||
throw new Error(`Failed to authorize: ${url.searchParams.get('error')}`);
|
||||
}
|
||||
|
||||
// Extract token from fragment
|
||||
const hash = url.hash.slice(1);
|
||||
const params = new URLSearchParams(hash);
|
||||
|
||||
// Also check query params (in case fragment was converted)
|
||||
const accessToken = params.get(tokenName) ?? url.searchParams.get(tokenName);
|
||||
if (!accessToken) {
|
||||
throw new Error(`No ${tokenName} found in callback URL`);
|
||||
}
|
||||
|
||||
// Build response from params (prefer fragment, fall back to query)
|
||||
const response: AccessTokenRawResponse = {
|
||||
access_token: params.get('access_token') ?? url.searchParams.get('access_token') ?? '',
|
||||
token_type: params.get('token_type') ?? url.searchParams.get('token_type') ?? undefined,
|
||||
expires_in: params.has('expires_in')
|
||||
? parseInt(params.get('expires_in') ?? '0', 10)
|
||||
: url.searchParams.has('expires_in')
|
||||
? parseInt(url.searchParams.get('expires_in') ?? '0', 10)
|
||||
: undefined,
|
||||
scope: params.get('scope') ?? url.searchParams.get('scope') ?? undefined,
|
||||
};
|
||||
|
||||
// Include id_token if present
|
||||
const idToken = params.get('id_token') ?? url.searchParams.get('id_token');
|
||||
if (idToken) {
|
||||
response.id_token = idToken;
|
||||
}
|
||||
|
||||
return storeToken(ctx, tokenArgs, response);
|
||||
|
||||
return newToken;
|
||||
}
|
||||
|
||||
@@ -5,9 +5,7 @@ import type {
|
||||
JsonPrimitive,
|
||||
PluginDefinition,
|
||||
} from '@yaakapp/api';
|
||||
import { DEFAULT_LOCALHOST_PORT, HOSTED_CALLBACK_URL } from './callbackServer';
|
||||
import {
|
||||
type CallbackType,
|
||||
DEFAULT_PKCE_METHOD,
|
||||
genPkceCodeVerifier,
|
||||
getAuthorizationCode,
|
||||
@@ -136,6 +134,8 @@ export const plugin: PluginDefinition = {
|
||||
defaultValue: defaultGrantType,
|
||||
options: grantTypes,
|
||||
},
|
||||
|
||||
// Always-present fields
|
||||
{
|
||||
type: 'text',
|
||||
name: 'clientId',
|
||||
@@ -169,105 +169,11 @@ export const plugin: PluginDefinition = {
|
||||
completionOptions: accessTokenUrls.map((url) => ({ label: url, value: url })),
|
||||
},
|
||||
{
|
||||
type: 'banner',
|
||||
inputs: [
|
||||
{
|
||||
type: 'checkbox',
|
||||
name: 'useExternalBrowser',
|
||||
label: 'Use External Browser',
|
||||
description:
|
||||
'Open authorization URL in your system browser instead of the embedded browser. ' +
|
||||
'Useful when the OAuth provider blocks embedded browsers or you need existing browser sessions.',
|
||||
dynamic: hiddenIfNot(['authorization_code', 'implicit']),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'redirectUri',
|
||||
label: 'Redirect URI',
|
||||
description:
|
||||
'URI the OAuth provider redirects to after authorization. Yaak intercepts this automatically in its embedded browser so any valid URI will work.',
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(
|
||||
['authorization_code', 'implicit'],
|
||||
({ useExternalBrowser }) => !useExternalBrowser,
|
||||
),
|
||||
},
|
||||
{
|
||||
type: 'h_stack',
|
||||
inputs: [
|
||||
{
|
||||
type: 'select',
|
||||
name: 'callbackType',
|
||||
label: 'Callback Type',
|
||||
description:
|
||||
'"Hosted Redirect" uses an external Yaak-hosted endpoint. "Localhost" starts a local server to receive the callback.',
|
||||
defaultValue: 'hosted',
|
||||
options: [
|
||||
{ label: 'Hosted Redirect', value: 'hosted' },
|
||||
{ label: 'Localhost', value: 'localhost' },
|
||||
],
|
||||
dynamic: hiddenIfNot(
|
||||
['authorization_code', 'implicit'],
|
||||
({ useExternalBrowser }) => !!useExternalBrowser,
|
||||
),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'callbackPort',
|
||||
label: 'Callback Port',
|
||||
placeholder: `${DEFAULT_LOCALHOST_PORT}`,
|
||||
description:
|
||||
'Port for the local callback server. Defaults to ' +
|
||||
DEFAULT_LOCALHOST_PORT +
|
||||
' if empty.',
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(
|
||||
['authorization_code', 'implicit'],
|
||||
({ useExternalBrowser, callbackType }) =>
|
||||
!!useExternalBrowser && callbackType === 'localhost',
|
||||
),
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'banner',
|
||||
color: 'info',
|
||||
inputs: [
|
||||
{
|
||||
type: 'markdown',
|
||||
content: 'Redirect URI to Register',
|
||||
async dynamic(_ctx, { values }) {
|
||||
const grantType = String(values.grantType ?? defaultGrantType);
|
||||
const useExternalBrowser = !!values.useExternalBrowser;
|
||||
const callbackType = (stringArg(values, 'callbackType') ||
|
||||
'localhost') as CallbackType;
|
||||
|
||||
// Only show for authorization_code and implicit with external browser enabled
|
||||
if (
|
||||
!['authorization_code', 'implicit'].includes(grantType) ||
|
||||
!useExternalBrowser
|
||||
) {
|
||||
return { hidden: true };
|
||||
}
|
||||
|
||||
// Compute the redirect URI based on callback type
|
||||
let redirectUri: string;
|
||||
if (callbackType === 'hosted') {
|
||||
redirectUri = HOSTED_CALLBACK_URL;
|
||||
} else {
|
||||
const port = intArg(values, 'callbackPort') || DEFAULT_LOCALHOST_PORT;
|
||||
redirectUri = `http://127.0.0.1:${port}/callback`;
|
||||
}
|
||||
|
||||
return {
|
||||
hidden: false,
|
||||
content: `Register \`${redirectUri}\` as a redirect URI with your OAuth provider.`,
|
||||
};
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
type: 'text',
|
||||
name: 'redirectUri',
|
||||
label: 'Redirect URI',
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(['authorization_code', 'implicit']),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
@@ -276,8 +182,12 @@ export const plugin: PluginDefinition = {
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(['authorization_code', 'implicit']),
|
||||
},
|
||||
{ type: 'text', name: 'scope', label: 'Scope', optional: true },
|
||||
{ type: 'text', name: 'audience', label: 'Audience', optional: true },
|
||||
{
|
||||
type: 'text',
|
||||
name: 'audience',
|
||||
label: 'Audience',
|
||||
optional: true,
|
||||
},
|
||||
{
|
||||
type: 'select',
|
||||
name: 'tokenName',
|
||||
@@ -293,54 +203,44 @@ export const plugin: PluginDefinition = {
|
||||
dynamic: hiddenIfNot(['authorization_code', 'implicit']),
|
||||
},
|
||||
{
|
||||
type: 'banner',
|
||||
inputs: [
|
||||
{
|
||||
type: 'checkbox',
|
||||
name: 'usePkce',
|
||||
label: 'Use PKCE',
|
||||
dynamic: hiddenIfNot(['authorization_code']),
|
||||
},
|
||||
{
|
||||
type: 'select',
|
||||
name: 'pkceChallengeMethod',
|
||||
label: 'Code Challenge Method',
|
||||
options: [
|
||||
{ label: 'SHA-256', value: PKCE_SHA256 },
|
||||
{ label: 'Plain', value: PKCE_PLAIN },
|
||||
],
|
||||
defaultValue: DEFAULT_PKCE_METHOD,
|
||||
dynamic: hiddenIfNot(['authorization_code'], ({ usePkce }) => !!usePkce),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'pkceCodeChallenge',
|
||||
label: 'Code Verifier',
|
||||
placeholder: 'Automatically generated when not set',
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(['authorization_code'], ({ usePkce }) => !!usePkce),
|
||||
},
|
||||
],
|
||||
type: 'checkbox',
|
||||
name: 'usePkce',
|
||||
label: 'Use PKCE',
|
||||
dynamic: hiddenIfNot(['authorization_code']),
|
||||
},
|
||||
{
|
||||
type: 'h_stack',
|
||||
inputs: [
|
||||
{
|
||||
type: 'text',
|
||||
name: 'username',
|
||||
label: 'Username',
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(['password']),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'password',
|
||||
label: 'Password',
|
||||
password: true,
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(['password']),
|
||||
},
|
||||
type: 'select',
|
||||
name: 'pkceChallengeMethod',
|
||||
label: 'Code Challenge Method',
|
||||
options: [
|
||||
{ label: 'SHA-256', value: PKCE_SHA256 },
|
||||
{ label: 'Plain', value: PKCE_PLAIN },
|
||||
],
|
||||
defaultValue: DEFAULT_PKCE_METHOD,
|
||||
dynamic: hiddenIfNot(['authorization_code'], ({ usePkce }) => !!usePkce),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'pkceCodeChallenge',
|
||||
label: 'Code Verifier',
|
||||
placeholder: 'Automatically generated when not set',
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(['authorization_code'], ({ usePkce }) => !!usePkce),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'username',
|
||||
label: 'Username',
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(['password']),
|
||||
},
|
||||
{
|
||||
type: 'text',
|
||||
name: 'password',
|
||||
label: 'Password',
|
||||
password: true,
|
||||
optional: true,
|
||||
dynamic: hiddenIfNot(['password']),
|
||||
},
|
||||
{
|
||||
type: 'select',
|
||||
@@ -358,6 +258,7 @@ export const plugin: PluginDefinition = {
|
||||
type: 'accordion',
|
||||
label: 'Advanced',
|
||||
inputs: [
|
||||
{ type: 'text', name: 'scope', label: 'Scope', optional: true },
|
||||
{
|
||||
type: 'text',
|
||||
name: 'headerName',
|
||||
@@ -420,16 +321,6 @@ export const plugin: PluginDefinition = {
|
||||
const credentialsInBody = values.credentials === 'body';
|
||||
const tokenName = values.tokenName === 'id_token' ? 'id_token' : 'access_token';
|
||||
|
||||
// Build external browser options if enabled
|
||||
const useExternalBrowser = !!values.useExternalBrowser;
|
||||
const externalBrowserOptions = useExternalBrowser
|
||||
? {
|
||||
useExternalBrowser: true,
|
||||
callbackType: (stringArg(values, 'callbackType') || 'localhost') as CallbackType,
|
||||
callbackPort: intArg(values, 'callbackPort') ?? undefined,
|
||||
}
|
||||
: undefined;
|
||||
|
||||
let token: AccessToken;
|
||||
if (grantType === 'authorization_code') {
|
||||
const authorizationUrl = stringArg(values, 'authorizationUrl');
|
||||
@@ -457,7 +348,6 @@ export const plugin: PluginDefinition = {
|
||||
}
|
||||
: null,
|
||||
tokenName: tokenName,
|
||||
externalBrowser: externalBrowserOptions,
|
||||
});
|
||||
} else if (grantType === 'implicit') {
|
||||
const authorizationUrl = stringArg(values, 'authorizationUrl');
|
||||
@@ -472,7 +362,6 @@ export const plugin: PluginDefinition = {
|
||||
audience: stringArgOrNull(values, 'audience'),
|
||||
state: stringArgOrNull(values, 'state'),
|
||||
tokenName: tokenName,
|
||||
externalBrowser: externalBrowserOptions,
|
||||
});
|
||||
} else if (grantType === 'client_credentials') {
|
||||
const accessTokenUrl = stringArg(values, 'accessTokenUrl');
|
||||
@@ -525,10 +414,3 @@ function stringArg(values: Record<string, JsonPrimitive | undefined>, name: stri
|
||||
if (!arg) return '';
|
||||
return arg;
|
||||
}
|
||||
|
||||
function intArg(values: Record<string, JsonPrimitive | undefined>, name: string): number | null {
|
||||
const arg = values[name];
|
||||
if (arg == null || arg === '') return null;
|
||||
const num = parseInt(`${arg}`, 10);
|
||||
return Number.isNaN(num) ? null : num;
|
||||
}
|
||||
|
||||
@@ -19,6 +19,9 @@ export const synthwave84: Theme = {
|
||||
danger: 'hsl(340, 100%, 65%)',
|
||||
},
|
||||
components: {
|
||||
dialog: {
|
||||
surface: 'hsl(253, 45%, 12%)',
|
||||
},
|
||||
sidebar: {
|
||||
surface: 'hsl(253, 42%, 18%)',
|
||||
border: 'hsl(253, 40%, 22%)',
|
||||
|
||||
@@ -1,161 +0,0 @@
|
||||
import { open } from '@tauri-apps/plugin-dialog';
|
||||
import { gitClone } from '@yaakapp-internal/git';
|
||||
import { useState } from 'react';
|
||||
import { openWorkspaceFromSyncDir } from '../commands/openWorkspaceFromSyncDir';
|
||||
import { appInfo } from '../lib/appInfo';
|
||||
import { showErrorToast } from '../lib/toast';
|
||||
import { Banner } from './core/Banner';
|
||||
import { Button } from './core/Button';
|
||||
import { Checkbox } from './core/Checkbox';
|
||||
import { IconButton } from './core/IconButton';
|
||||
import { PlainInput } from './core/PlainInput';
|
||||
import { VStack } from './core/Stacks';
|
||||
import { promptCredentials } from './git/credentials';
|
||||
|
||||
interface Props {
|
||||
hide: () => void;
|
||||
}
|
||||
|
||||
// Detect path separator from an existing path (defaults to /)
|
||||
function getPathSeparator(path: string): string {
|
||||
return path.includes('\\') ? '\\' : '/';
|
||||
}
|
||||
|
||||
export function CloneGitRepositoryDialog({ hide }: Props) {
|
||||
const [url, setUrl] = useState<string>('');
|
||||
const [baseDirectory, setBaseDirectory] = useState<string>(appInfo.defaultProjectDir);
|
||||
const [directoryOverride, setDirectoryOverride] = useState<string | null>(null);
|
||||
const [hasSubdirectory, setHasSubdirectory] = useState(false);
|
||||
const [subdirectory, setSubdirectory] = useState<string>('');
|
||||
const [isCloning, setIsCloning] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const repoName = extractRepoName(url);
|
||||
const sep = getPathSeparator(baseDirectory);
|
||||
const computedDirectory = repoName ? `${baseDirectory}${sep}${repoName}` : baseDirectory;
|
||||
const directory = directoryOverride ?? computedDirectory;
|
||||
const workspaceDirectory =
|
||||
hasSubdirectory && subdirectory ? `${directory}${sep}${subdirectory}` : directory;
|
||||
|
||||
const handleSelectDirectory = async () => {
|
||||
const dir = await open({
|
||||
title: 'Select Directory',
|
||||
directory: true,
|
||||
multiple: false,
|
||||
});
|
||||
if (dir != null) {
|
||||
setBaseDirectory(dir);
|
||||
setDirectoryOverride(null);
|
||||
}
|
||||
};
|
||||
|
||||
const handleClone = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
if (!url || !directory) return;
|
||||
|
||||
setIsCloning(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const result = await gitClone(url, directory, promptCredentials);
|
||||
|
||||
if (result.type === 'needs_credentials') {
|
||||
setError(
|
||||
result.error ?? 'Authentication failed. Please check your credentials and try again.',
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Open the workspace from the cloned directory (or subdirectory)
|
||||
await openWorkspaceFromSyncDir.mutateAsync(workspaceDirectory);
|
||||
|
||||
hide();
|
||||
} catch (err) {
|
||||
setError(String(err));
|
||||
showErrorToast({
|
||||
id: 'git-clone-error',
|
||||
title: 'Clone Failed',
|
||||
message: String(err),
|
||||
});
|
||||
} finally {
|
||||
setIsCloning(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<VStack as="form" space={3} alignItems="start" className="pb-3" onSubmit={handleClone}>
|
||||
{error && (
|
||||
<Banner color="danger" className="w-full">
|
||||
{error}
|
||||
</Banner>
|
||||
)}
|
||||
|
||||
<PlainInput
|
||||
required
|
||||
label="Repository URL"
|
||||
placeholder="https://github.com/user/repo.git"
|
||||
defaultValue={url}
|
||||
onChange={setUrl}
|
||||
/>
|
||||
|
||||
<PlainInput
|
||||
label="Directory"
|
||||
placeholder={appInfo.defaultProjectDir}
|
||||
defaultValue={directory}
|
||||
onChange={setDirectoryOverride}
|
||||
rightSlot={
|
||||
<IconButton
|
||||
size="xs"
|
||||
className="mr-0.5 !h-auto my-0.5"
|
||||
icon="folder"
|
||||
title="Browse"
|
||||
onClick={handleSelectDirectory}
|
||||
/>
|
||||
}
|
||||
/>
|
||||
|
||||
<Checkbox
|
||||
checked={hasSubdirectory}
|
||||
onChange={setHasSubdirectory}
|
||||
title="Workspace is in a subdirectory"
|
||||
help="Enable if the Yaak workspace files are not at the root of the repository"
|
||||
/>
|
||||
|
||||
{hasSubdirectory && (
|
||||
<PlainInput
|
||||
label="Subdirectory"
|
||||
placeholder="path/to/workspace"
|
||||
defaultValue={subdirectory}
|
||||
onChange={setSubdirectory}
|
||||
/>
|
||||
)}
|
||||
|
||||
<Button
|
||||
type="submit"
|
||||
color="primary"
|
||||
className="w-full mt-3"
|
||||
disabled={!url || !directory || isCloning}
|
||||
isLoading={isCloning}
|
||||
>
|
||||
{isCloning ? 'Cloning...' : 'Clone Repository'}
|
||||
</Button>
|
||||
</VStack>
|
||||
);
|
||||
}
|
||||
|
||||
function extractRepoName(url: string): string {
|
||||
// Handle various Git URL formats:
|
||||
// https://github.com/user/repo.git
|
||||
// git@github.com:user/repo.git
|
||||
// https://github.com/user/repo
|
||||
const match = url.match(/\/([^/]+?)(\.git)?$/);
|
||||
if (match?.[1]) {
|
||||
return match[1];
|
||||
}
|
||||
// Fallback for SSH-style URLs
|
||||
const sshMatch = url.match(/:([^/]+?)(\.git)?$/);
|
||||
if (sshMatch?.[1]) {
|
||||
return sshMatch[1];
|
||||
}
|
||||
return '';
|
||||
}
|
||||
@@ -1,181 +0,0 @@
|
||||
import type { DnsOverride, Workspace } from '@yaakapp-internal/models';
|
||||
import { patchModel } from '@yaakapp-internal/models';
|
||||
import { useCallback, useId, useMemo } from 'react';
|
||||
import { Button } from './core/Button';
|
||||
import { Checkbox } from './core/Checkbox';
|
||||
import { IconButton } from './core/IconButton';
|
||||
import { PlainInput } from './core/PlainInput';
|
||||
import { HStack, VStack } from './core/Stacks';
|
||||
import { Table, TableBody, TableCell, TableHead, TableHeaderCell, TableRow } from './core/Table';
|
||||
|
||||
interface Props {
|
||||
workspace: Workspace;
|
||||
}
|
||||
|
||||
interface DnsOverrideWithId extends DnsOverride {
|
||||
_id: string;
|
||||
}
|
||||
|
||||
export function DnsOverridesEditor({ workspace }: Props) {
|
||||
const reactId = useId();
|
||||
|
||||
// Ensure each override has an internal ID for React keys
|
||||
const overridesWithIds = useMemo<DnsOverrideWithId[]>(() => {
|
||||
return workspace.settingDnsOverrides.map((override, index) => ({
|
||||
...override,
|
||||
_id: `${reactId}-${index}`,
|
||||
}));
|
||||
}, [workspace.settingDnsOverrides, reactId]);
|
||||
|
||||
const handleChange = useCallback(
|
||||
(overrides: DnsOverride[]) => {
|
||||
patchModel(workspace, { settingDnsOverrides: overrides });
|
||||
},
|
||||
[workspace],
|
||||
);
|
||||
|
||||
const handleAdd = useCallback(() => {
|
||||
const newOverride: DnsOverride = {
|
||||
hostname: '',
|
||||
ipv4: [''],
|
||||
ipv6: [],
|
||||
enabled: true,
|
||||
};
|
||||
handleChange([...workspace.settingDnsOverrides, newOverride]);
|
||||
}, [workspace.settingDnsOverrides, handleChange]);
|
||||
|
||||
const handleUpdate = useCallback(
|
||||
(index: number, update: Partial<DnsOverride>) => {
|
||||
const updated = workspace.settingDnsOverrides.map((o, i) =>
|
||||
i === index ? { ...o, ...update } : o,
|
||||
);
|
||||
handleChange(updated);
|
||||
},
|
||||
[workspace.settingDnsOverrides, handleChange],
|
||||
);
|
||||
|
||||
const handleDelete = useCallback(
|
||||
(index: number) => {
|
||||
const updated = workspace.settingDnsOverrides.filter((_, i) => i !== index);
|
||||
handleChange(updated);
|
||||
},
|
||||
[workspace.settingDnsOverrides, handleChange],
|
||||
);
|
||||
|
||||
return (
|
||||
<VStack space={3} className="pb-3">
|
||||
<div className="text-text-subtle text-sm">
|
||||
Override DNS resolution for specific hostnames. This works like{' '}
|
||||
<code className="text-text-subtlest bg-surface-highlight px-1 rounded">/etc/hosts</code>{' '}
|
||||
but only for requests made from this workspace.
|
||||
</div>
|
||||
|
||||
{overridesWithIds.length > 0 && (
|
||||
<Table>
|
||||
<TableHead>
|
||||
<TableRow>
|
||||
<TableHeaderCell className="w-8" />
|
||||
<TableHeaderCell>Hostname</TableHeaderCell>
|
||||
<TableHeaderCell>IPv4 Address</TableHeaderCell>
|
||||
<TableHeaderCell>IPv6 Address</TableHeaderCell>
|
||||
<TableHeaderCell className="w-10" />
|
||||
</TableRow>
|
||||
</TableHead>
|
||||
<TableBody>
|
||||
{overridesWithIds.map((override, index) => (
|
||||
<DnsOverrideRow
|
||||
key={override._id}
|
||||
override={override}
|
||||
onUpdate={(update) => handleUpdate(index, update)}
|
||||
onDelete={() => handleDelete(index)}
|
||||
/>
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
)}
|
||||
|
||||
<HStack>
|
||||
<Button size="xs" color="secondary" variant="border" onClick={handleAdd}>
|
||||
Add DNS Override
|
||||
</Button>
|
||||
</HStack>
|
||||
</VStack>
|
||||
);
|
||||
}
|
||||
|
||||
interface DnsOverrideRowProps {
|
||||
override: DnsOverride;
|
||||
onUpdate: (update: Partial<DnsOverride>) => void;
|
||||
onDelete: () => void;
|
||||
}
|
||||
|
||||
function DnsOverrideRow({ override, onUpdate, onDelete }: DnsOverrideRowProps) {
|
||||
const ipv4Value = override.ipv4.join(', ');
|
||||
const ipv6Value = override.ipv6.join(', ');
|
||||
|
||||
return (
|
||||
<TableRow>
|
||||
<TableCell>
|
||||
<Checkbox
|
||||
hideLabel
|
||||
title={override.enabled ? 'Disable override' : 'Enable override'}
|
||||
checked={override.enabled ?? true}
|
||||
onChange={(enabled) => onUpdate({ enabled })}
|
||||
/>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<PlainInput
|
||||
size="sm"
|
||||
hideLabel
|
||||
label="Hostname"
|
||||
placeholder="api.example.com"
|
||||
defaultValue={override.hostname}
|
||||
onChange={(hostname) => onUpdate({ hostname })}
|
||||
/>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<PlainInput
|
||||
size="sm"
|
||||
hideLabel
|
||||
label="IPv4 addresses"
|
||||
placeholder="127.0.0.1"
|
||||
defaultValue={ipv4Value}
|
||||
onChange={(value) =>
|
||||
onUpdate({
|
||||
ipv4: value
|
||||
.split(',')
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean),
|
||||
})
|
||||
}
|
||||
/>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<PlainInput
|
||||
size="sm"
|
||||
hideLabel
|
||||
label="IPv6 addresses"
|
||||
placeholder="::1"
|
||||
defaultValue={ipv6Value}
|
||||
onChange={(value) =>
|
||||
onUpdate({
|
||||
ipv6: value
|
||||
.split(',')
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean),
|
||||
})
|
||||
}
|
||||
/>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<IconButton
|
||||
size="xs"
|
||||
iconSize="sm"
|
||||
icon="trash"
|
||||
title="Delete override"
|
||||
onClick={onDelete}
|
||||
/>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
);
|
||||
}
|
||||
@@ -83,7 +83,7 @@ export function DynamicForm<T extends Record<string, JsonPrimitive>>({
|
||||
function FormInputsStack<T extends Record<string, JsonPrimitive>>({
|
||||
className,
|
||||
...props
|
||||
}: FormInputsProps<T> & { className?: string }) {
|
||||
}: FormInputsProps<T> & { className?: string}) {
|
||||
return (
|
||||
<VStack
|
||||
space={3}
|
||||
@@ -198,9 +198,6 @@ function FormInputs<T extends Record<string, JsonPrimitive>>({
|
||||
/>
|
||||
);
|
||||
case 'accordion':
|
||||
if (!hasVisibleInputs(input.inputs)) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<div key={i + stateKey}>
|
||||
<DetailsBanner
|
||||
@@ -222,9 +219,6 @@ function FormInputs<T extends Record<string, JsonPrimitive>>({
|
||||
</div>
|
||||
);
|
||||
case 'h_stack':
|
||||
if (!hasVisibleInputs(input.inputs)) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<div className="flex flex-wrap sm:flex-nowrap gap-3 items-end" key={i + stateKey}>
|
||||
<FormInputs
|
||||
@@ -239,9 +233,6 @@ function FormInputs<T extends Record<string, JsonPrimitive>>({
|
||||
</div>
|
||||
);
|
||||
case 'banner':
|
||||
if (!hasVisibleInputs(input.inputs)) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<Banner
|
||||
key={i + stateKey}
|
||||
@@ -612,8 +603,3 @@ function KeyValueArg({
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function hasVisibleInputs(inputs: FormInput[] | undefined): boolean {
|
||||
if (!inputs) return false;
|
||||
return inputs.some((i) => !i.hidden);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { createWorkspaceModel, foldersAtom, patchModel } from '@yaakapp-internal/models';
|
||||
import { useAtomValue } from 'jotai';
|
||||
import { useMemo } from 'react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useAuthTab } from '../hooks/useAuthTab';
|
||||
import { useEnvironmentsBreakdown } from '../hooks/useEnvironmentsBreakdown';
|
||||
import { useHeadersTab } from '../hooks/useHeadersTab';
|
||||
@@ -37,6 +37,7 @@ export type FolderSettingsTab =
|
||||
export function FolderSettingsDialog({ folderId, tab }: Props) {
|
||||
const folders = useAtomValue(foldersAtom);
|
||||
const folder = folders.find((f) => f.id === folderId) ?? null;
|
||||
const [activeTab, setActiveTab] = useState<string>(tab ?? TAB_GENERAL);
|
||||
const authTab = useAuthTab(TAB_AUTH, folder);
|
||||
const headersTab = useHeadersTab(TAB_HEADERS, folder);
|
||||
const inheritedHeaders = useInheritedHeaders(folder);
|
||||
@@ -68,7 +69,8 @@ export function FolderSettingsDialog({ folderId, tab }: Props) {
|
||||
|
||||
return (
|
||||
<Tabs
|
||||
defaultValue={tab ?? TAB_GENERAL}
|
||||
value={activeTab}
|
||||
onChangeValue={setActiveTab}
|
||||
label="Folder Settings"
|
||||
className="pt-2 pb-2 pl-3 pr-1"
|
||||
layout="horizontal"
|
||||
@@ -111,7 +113,7 @@ export function FolderSettingsDialog({ folderId, tab }: Props) {
|
||||
<VStack alignItems="center" space={1.5}>
|
||||
<p>
|
||||
Override{' '}
|
||||
<Link href="https://yaak.app/docs/using-yaak/environments-and-variables">
|
||||
<Link href="https://feedback.yaak.app/help/articles/3284139-environments-and-variables">
|
||||
Variables
|
||||
</Link>{' '}
|
||||
for requests within this folder.
|
||||
|
||||
@@ -10,7 +10,7 @@ import {
|
||||
stateExtensions,
|
||||
updateSchema,
|
||||
} from 'codemirror-json-schema';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useRef } from 'react';
|
||||
import type { ReflectResponseService } from '../hooks/useGrpc';
|
||||
import { showAlert } from '../lib/alert';
|
||||
import { showDialog } from '../lib/dialog';
|
||||
@@ -39,15 +39,15 @@ export function GrpcEditor({
|
||||
protoFiles,
|
||||
...extraEditorProps
|
||||
}: Props) {
|
||||
const [editorView, setEditorView] = useState<EditorView | null>(null);
|
||||
const editorViewRef = useRef<EditorView>(null);
|
||||
const handleInitEditorViewRef = useCallback((h: EditorView | null) => {
|
||||
setEditorView(h);
|
||||
editorViewRef.current = h;
|
||||
}, []);
|
||||
|
||||
// Find the schema for the selected service and method and update the editor
|
||||
useEffect(() => {
|
||||
if (
|
||||
editorView == null ||
|
||||
editorViewRef.current == null ||
|
||||
services === null ||
|
||||
request.service === null ||
|
||||
request.method === null
|
||||
@@ -91,7 +91,7 @@ export function GrpcEditor({
|
||||
}
|
||||
|
||||
try {
|
||||
updateSchema(editorView, JSON.parse(schema));
|
||||
updateSchema(editorViewRef.current, JSON.parse(schema));
|
||||
} catch (err) {
|
||||
showAlert({
|
||||
id: 'grpc-parse-schema-error',
|
||||
@@ -107,7 +107,7 @@ export function GrpcEditor({
|
||||
),
|
||||
});
|
||||
}
|
||||
}, [editorView, services, request.method, request.service]);
|
||||
}, [services, request.method, request.service]);
|
||||
|
||||
const extraExtensions = useMemo(
|
||||
() => [
|
||||
@@ -118,7 +118,7 @@ export function GrpcEditor({
|
||||
jsonLanguage.data.of({
|
||||
autocomplete: jsonCompletion(),
|
||||
}),
|
||||
stateExtensions({}),
|
||||
stateExtensions(/** Init with empty schema **/),
|
||||
],
|
||||
[],
|
||||
);
|
||||
|
||||
@@ -7,6 +7,7 @@ import { useContainerSize } from '../hooks/useContainerQuery';
|
||||
import type { ReflectResponseService } from '../hooks/useGrpc';
|
||||
import { useHeadersTab } from '../hooks/useHeadersTab';
|
||||
import { useInheritedHeaders } from '../hooks/useInheritedHeaders';
|
||||
import { useKeyValue } from '../hooks/useKeyValue';
|
||||
import { useRequestUpdateKey } from '../hooks/useRequestUpdateKey';
|
||||
import { resolvedModelName } from '../lib/resolvedModelName';
|
||||
import { Button } from './core/Button';
|
||||
@@ -68,6 +69,11 @@ export function GrpcRequestPane({
|
||||
const authTab = useAuthTab(TAB_AUTH, activeRequest);
|
||||
const metadataTab = useHeadersTab(TAB_METADATA, activeRequest, 'Metadata');
|
||||
const inheritedHeaders = useInheritedHeaders(activeRequest);
|
||||
const { value: activeTabs, set: setActiveTabs } = useKeyValue<Record<string, string>>({
|
||||
namespace: 'no_sync',
|
||||
key: 'grpcRequestActiveTabs',
|
||||
fallback: {},
|
||||
});
|
||||
const forceUpdateKey = useRequestUpdateKey(activeRequest.id ?? null);
|
||||
|
||||
const urlContainerEl = useRef<HTMLDivElement>(null);
|
||||
@@ -139,6 +145,14 @@ export function GrpcRequestPane({
|
||||
[activeRequest.description, authTab, metadataTab],
|
||||
);
|
||||
|
||||
const activeTab = activeTabs?.[activeRequest.id];
|
||||
const setActiveTab = useCallback(
|
||||
async (tab: string) => {
|
||||
await setActiveTabs((r) => ({ ...r, [activeRequest.id]: tab }));
|
||||
},
|
||||
[activeRequest.id, setActiveTabs],
|
||||
);
|
||||
|
||||
const handleMetadataChange = useCallback(
|
||||
(metadata: HttpRequestHeader[]) => patchModel(activeRequest, { metadata }),
|
||||
[activeRequest],
|
||||
@@ -251,11 +265,12 @@ export function GrpcRequestPane({
|
||||
</HStack>
|
||||
</div>
|
||||
<Tabs
|
||||
value={activeTab}
|
||||
label="Request"
|
||||
onChangeValue={setActiveTab}
|
||||
tabs={tabs}
|
||||
tabListClassName="mt-1 !mb-1.5"
|
||||
storageKey="grpc_request_tabs"
|
||||
activeTabKey={activeRequest.id}
|
||||
storageKey="grpc_request_tabs_order"
|
||||
>
|
||||
<TabContent value="message">
|
||||
<GrpcEditor
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import type { GrpcEvent, GrpcRequest } from '@yaakapp-internal/models';
|
||||
import classNames from 'classnames';
|
||||
import { format } from 'date-fns';
|
||||
import { useAtomValue, useSetAtom } from 'jotai';
|
||||
import type { CSSProperties } from 'react';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useEffect, useMemo, useRef, useState } from 'react';
|
||||
import {
|
||||
activeGrpcConnectionAtom,
|
||||
activeGrpcConnections,
|
||||
@@ -9,14 +11,18 @@ import {
|
||||
useGrpcEvents,
|
||||
} from '../hooks/usePinnedGrpcConnection';
|
||||
import { useStateWithDeps } from '../hooks/useStateWithDeps';
|
||||
import { copyToClipboard } from '../lib/copy';
|
||||
import { AutoScroller } from './core/AutoScroller';
|
||||
import { Banner } from './core/Banner';
|
||||
import { Button } from './core/Button';
|
||||
import { Editor } from './core/Editor/LazyEditor';
|
||||
import { EventDetailHeader, EventViewer } from './core/EventViewer';
|
||||
import { EventViewerRow } from './core/EventViewerRow';
|
||||
import { HotkeyList } from './core/HotkeyList';
|
||||
import { Icon, type IconProps } from './core/Icon';
|
||||
import { Icon } from './core/Icon';
|
||||
import { IconButton } from './core/IconButton';
|
||||
import { KeyValueRow, KeyValueRows } from './core/KeyValueRow';
|
||||
import { LoadingIcon } from './core/LoadingIcon';
|
||||
import { Separator } from './core/Separator';
|
||||
import { SplitLayout } from './core/SplitLayout';
|
||||
import { HStack, VStack } from './core/Stacks';
|
||||
import { EmptyStateText } from './EmptyStateText';
|
||||
import { ErrorBoundary } from './ErrorBoundary';
|
||||
@@ -36,7 +42,7 @@ interface Props {
|
||||
}
|
||||
|
||||
export function GrpcResponsePane({ style, methodType, activeRequest }: Props) {
|
||||
const [activeEventIndex, setActiveEventIndex] = useState<number | null>(null);
|
||||
const [activeEventId, setActiveEventId] = useState<string | null>(null);
|
||||
const [showLarge, setShowLarge] = useStateWithDeps<boolean>(false, [activeRequest.id]);
|
||||
const [showingLarge, setShowingLarge] = useState<boolean>(false);
|
||||
const connections = useAtomValue(activeGrpcConnections);
|
||||
@@ -45,8 +51,8 @@ export function GrpcResponsePane({ style, methodType, activeRequest }: Props) {
|
||||
const setPinnedGrpcConnectionId = useSetAtom(pinnedGrpcConnectionIdAtom);
|
||||
|
||||
const activeEvent = useMemo(
|
||||
() => (activeEventIndex != null ? events[activeEventIndex] : null),
|
||||
[activeEventIndex, events],
|
||||
() => events.find((m) => m.id === activeEventId) ?? null,
|
||||
[activeEventId, events],
|
||||
);
|
||||
|
||||
// Set the active message to the first message received if unary
|
||||
@@ -55,196 +61,223 @@ export function GrpcResponsePane({ style, methodType, activeRequest }: Props) {
|
||||
if (events.length === 0 || activeEvent != null || methodType !== 'unary') {
|
||||
return;
|
||||
}
|
||||
const firstServerMessageIndex = events.findIndex((m) => m.eventType === 'server_message');
|
||||
if (firstServerMessageIndex !== -1) {
|
||||
setActiveEventIndex(firstServerMessageIndex);
|
||||
}
|
||||
setActiveEventId(events.find((m) => m.eventType === 'server_message')?.id ?? null);
|
||||
}, [events.length]);
|
||||
|
||||
if (activeConnection == null) {
|
||||
return (
|
||||
<HotkeyList hotkeys={['request.send', 'model.create', 'sidebar.focus', 'url_bar.focus']} />
|
||||
);
|
||||
}
|
||||
|
||||
const header = (
|
||||
<HStack className="pl-3 mb-1 font-mono text-sm text-text-subtle overflow-x-auto hide-scrollbars">
|
||||
<HStack space={2}>
|
||||
<span className="whitespace-nowrap">{events.length} Messages</span>
|
||||
{activeConnection.state !== 'closed' && (
|
||||
<LoadingIcon size="sm" className="text-text-subtlest" />
|
||||
)}
|
||||
</HStack>
|
||||
<div className="ml-auto">
|
||||
<RecentGrpcConnectionsDropdown
|
||||
connections={connections}
|
||||
activeConnection={activeConnection}
|
||||
onPinnedConnectionId={setPinnedGrpcConnectionId}
|
||||
/>
|
||||
</div>
|
||||
</HStack>
|
||||
);
|
||||
|
||||
return (
|
||||
<div style={style} className="h-full">
|
||||
<ErrorBoundary name="GRPC Events">
|
||||
<EventViewer
|
||||
events={events}
|
||||
getEventKey={(event) => event.id}
|
||||
error={activeConnection.error}
|
||||
header={header}
|
||||
splitLayoutName="grpc_events"
|
||||
defaultRatio={0.4}
|
||||
renderRow={({ event, isActive, onClick }) => (
|
||||
<GrpcEventRow event={event} isActive={isActive} onClick={onClick} />
|
||||
)}
|
||||
renderDetail={({ event, onClose }) => (
|
||||
<GrpcEventDetail
|
||||
event={event}
|
||||
showLarge={showLarge}
|
||||
showingLarge={showingLarge}
|
||||
setShowLarge={setShowLarge}
|
||||
setShowingLarge={setShowingLarge}
|
||||
onClose={onClose}
|
||||
/>
|
||||
)}
|
||||
/>
|
||||
</ErrorBoundary>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function GrpcEventRow({
|
||||
event,
|
||||
isActive,
|
||||
onClick,
|
||||
}: {
|
||||
event: GrpcEvent;
|
||||
isActive: boolean;
|
||||
onClick: () => void;
|
||||
}) {
|
||||
const { eventType, status, content, error } = event;
|
||||
const display = getEventDisplay(eventType, status);
|
||||
|
||||
return (
|
||||
<EventViewerRow
|
||||
isActive={isActive}
|
||||
onClick={onClick}
|
||||
icon={<Icon color={display.color} title={display.title} icon={display.icon} />}
|
||||
content={
|
||||
<span className="text-xs">
|
||||
{content.slice(0, 1000)}
|
||||
{error && <span className="text-warning"> ({error})</span>}
|
||||
</span>
|
||||
<SplitLayout
|
||||
layout="vertical"
|
||||
style={style}
|
||||
name="grpc_events"
|
||||
defaultRatio={0.4}
|
||||
minHeightPx={20}
|
||||
firstSlot={() =>
|
||||
activeConnection == null ? (
|
||||
<HotkeyList
|
||||
hotkeys={['request.send', 'model.create', 'sidebar.focus', 'url_bar.focus']}
|
||||
/>
|
||||
) : (
|
||||
<div className="w-full grid grid-rows-[auto_minmax(0,1fr)] grid-cols-1 items-center">
|
||||
<HStack className="pl-3 mb-1 font-mono text-sm text-text-subtle overflow-x-auto hide-scrollbars">
|
||||
<HStack space={2}>
|
||||
<span className="whitespace-nowrap">{events.length} Messages</span>
|
||||
{activeConnection.state !== 'closed' && (
|
||||
<LoadingIcon size="sm" className="text-text-subtlest" />
|
||||
)}
|
||||
</HStack>
|
||||
<div className="ml-auto">
|
||||
<RecentGrpcConnectionsDropdown
|
||||
connections={connections}
|
||||
activeConnection={activeConnection}
|
||||
onPinnedConnectionId={setPinnedGrpcConnectionId}
|
||||
/>
|
||||
</div>
|
||||
</HStack>
|
||||
<ErrorBoundary name="GRPC Events">
|
||||
<AutoScroller
|
||||
data={events}
|
||||
header={
|
||||
activeConnection.error && (
|
||||
<Banner color="danger" className="m-3">
|
||||
{activeConnection.error}
|
||||
</Banner>
|
||||
)
|
||||
}
|
||||
render={(event) => (
|
||||
<EventRow
|
||||
key={event.id}
|
||||
event={event}
|
||||
isActive={event.id === activeEventId}
|
||||
onClick={() => {
|
||||
if (event.id === activeEventId) setActiveEventId(null);
|
||||
else setActiveEventId(event.id);
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
/>
|
||||
</ErrorBoundary>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
secondSlot={
|
||||
activeEvent != null && activeConnection != null
|
||||
? () => (
|
||||
<div className="grid grid-rows-[auto_minmax(0,1fr)]">
|
||||
<div className="pb-3 px-2">
|
||||
<Separator />
|
||||
</div>
|
||||
<div className="h-full pl-2 overflow-y-auto grid grid-rows-[auto_minmax(0,1fr)] ">
|
||||
{activeEvent.eventType === 'client_message' ||
|
||||
activeEvent.eventType === 'server_message' ? (
|
||||
<>
|
||||
<div className="mb-2 select-text cursor-text grid grid-cols-[minmax(0,1fr)_auto] items-center">
|
||||
<div className="font-semibold">
|
||||
Message {activeEvent.eventType === 'client_message' ? 'Sent' : 'Received'}
|
||||
</div>
|
||||
<IconButton
|
||||
title="Copy message"
|
||||
icon="copy"
|
||||
size="xs"
|
||||
onClick={() => copyToClipboard(activeEvent.content)}
|
||||
/>
|
||||
</div>
|
||||
{!showLarge && activeEvent.content.length > 1000 * 1000 ? (
|
||||
<VStack space={2} className="italic text-text-subtlest">
|
||||
Message previews larger than 1MB are hidden
|
||||
<div>
|
||||
<Button
|
||||
onClick={() => {
|
||||
setShowingLarge(true);
|
||||
setTimeout(() => {
|
||||
setShowLarge(true);
|
||||
setShowingLarge(false);
|
||||
}, 500);
|
||||
}}
|
||||
isLoading={showingLarge}
|
||||
color="secondary"
|
||||
variant="border"
|
||||
size="xs"
|
||||
>
|
||||
Try Showing
|
||||
</Button>
|
||||
</div>
|
||||
</VStack>
|
||||
) : (
|
||||
<Editor
|
||||
language="json"
|
||||
defaultValue={activeEvent.content ?? ''}
|
||||
wrapLines={false}
|
||||
readOnly={true}
|
||||
stateKey={null}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
<div className="h-full grid grid-rows-[auto_minmax(0,1fr)]">
|
||||
<div>
|
||||
<div className="select-text cursor-text font-semibold">
|
||||
{activeEvent.content}
|
||||
</div>
|
||||
{activeEvent.error && (
|
||||
<div className="select-text cursor-text text-sm font-mono py-1 text-warning">
|
||||
{activeEvent.error}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div className="py-2 h-full">
|
||||
{Object.keys(activeEvent.metadata).length === 0 ? (
|
||||
<EmptyStateText>
|
||||
No{' '}
|
||||
{activeEvent.eventType === 'connection_end' ? 'trailers' : 'metadata'}
|
||||
</EmptyStateText>
|
||||
) : (
|
||||
<KeyValueRows>
|
||||
{Object.entries(activeEvent.metadata).map(([key, value]) => (
|
||||
<KeyValueRow key={key} label={key}>
|
||||
{value}
|
||||
</KeyValueRow>
|
||||
))}
|
||||
</KeyValueRows>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
: null
|
||||
}
|
||||
timestamp={event.createdAt}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
function GrpcEventDetail({
|
||||
function EventRow({
|
||||
onClick,
|
||||
isActive,
|
||||
event,
|
||||
showLarge,
|
||||
showingLarge,
|
||||
setShowLarge,
|
||||
setShowingLarge,
|
||||
onClose,
|
||||
}: {
|
||||
onClick?: () => void;
|
||||
isActive?: boolean;
|
||||
event: GrpcEvent;
|
||||
showLarge: boolean;
|
||||
showingLarge: boolean;
|
||||
setShowLarge: (v: boolean) => void;
|
||||
setShowingLarge: (v: boolean) => void;
|
||||
onClose: () => void;
|
||||
}) {
|
||||
if (event.eventType === 'client_message' || event.eventType === 'server_message') {
|
||||
const title = `Message ${event.eventType === 'client_message' ? 'Sent' : 'Received'}`;
|
||||
const { eventType, status, createdAt, content, error } = event;
|
||||
const ref = useRef<HTMLDivElement>(null);
|
||||
|
||||
return (
|
||||
<div className="h-full grid grid-rows-[auto_minmax(0,1fr)]">
|
||||
<EventDetailHeader
|
||||
title={title}
|
||||
timestamp={event.createdAt}
|
||||
copyText={event.content}
|
||||
onClose={onClose}
|
||||
/>
|
||||
{!showLarge && event.content.length > 1000 * 1000 ? (
|
||||
<VStack space={2} className="italic text-text-subtlest">
|
||||
Message previews larger than 1MB are hidden
|
||||
<div>
|
||||
<Button
|
||||
onClick={() => {
|
||||
setShowingLarge(true);
|
||||
setTimeout(() => {
|
||||
setShowLarge(true);
|
||||
setShowingLarge(false);
|
||||
}, 500);
|
||||
}}
|
||||
isLoading={showingLarge}
|
||||
color="secondary"
|
||||
variant="border"
|
||||
size="xs"
|
||||
>
|
||||
Try Showing
|
||||
</Button>
|
||||
</div>
|
||||
</VStack>
|
||||
) : (
|
||||
<Editor
|
||||
language="json"
|
||||
defaultValue={event.content ?? ''}
|
||||
wrapLines={false}
|
||||
readOnly={true}
|
||||
stateKey={null}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Error or connection_end - show metadata/trailers
|
||||
return (
|
||||
<div className="h-full grid grid-rows-[auto_minmax(0,1fr)]">
|
||||
<EventDetailHeader title={event.content} timestamp={event.createdAt} onClose={onClose} />
|
||||
{event.error && (
|
||||
<div className="select-text cursor-text text-sm font-mono py-1 text-warning">
|
||||
{event.error}
|
||||
</div>
|
||||
)}
|
||||
<div className="py-2 h-full">
|
||||
{Object.keys(event.metadata).length === 0 ? (
|
||||
<EmptyStateText>
|
||||
No {event.eventType === 'connection_end' ? 'trailers' : 'metadata'}
|
||||
</EmptyStateText>
|
||||
) : (
|
||||
<KeyValueRows>
|
||||
{Object.entries(event.metadata).map(([key, value]) => (
|
||||
<KeyValueRow key={key} label={key}>
|
||||
{value}
|
||||
</KeyValueRow>
|
||||
))}
|
||||
</KeyValueRows>
|
||||
<div className="px-1" ref={ref}>
|
||||
<button
|
||||
type="button"
|
||||
onClick={onClick}
|
||||
className={classNames(
|
||||
'w-full grid grid-cols-[auto_minmax(0,3fr)_auto] gap-2 items-center text-left',
|
||||
'px-1.5 h-xs font-mono cursor-default group focus:outline-none focus:text-text rounded',
|
||||
isActive && '!bg-surface-active !text-text',
|
||||
'text-text-subtle hover:text',
|
||||
)}
|
||||
</div>
|
||||
>
|
||||
<Icon
|
||||
color={
|
||||
eventType === 'server_message'
|
||||
? 'info'
|
||||
: eventType === 'client_message'
|
||||
? 'primary'
|
||||
: eventType === 'error' || (status != null && status > 0)
|
||||
? 'danger'
|
||||
: eventType === 'connection_end'
|
||||
? 'success'
|
||||
: undefined
|
||||
}
|
||||
title={
|
||||
eventType === 'server_message'
|
||||
? 'Server message'
|
||||
: eventType === 'client_message'
|
||||
? 'Client message'
|
||||
: eventType === 'error' || (status != null && status > 0)
|
||||
? 'Error'
|
||||
: eventType === 'connection_end'
|
||||
? 'Connection response'
|
||||
: undefined
|
||||
}
|
||||
icon={
|
||||
eventType === 'server_message'
|
||||
? 'arrow_big_down_dash'
|
||||
: eventType === 'client_message'
|
||||
? 'arrow_big_up_dash'
|
||||
: eventType === 'error' || (status != null && status > 0)
|
||||
? 'alert_triangle'
|
||||
: eventType === 'connection_end'
|
||||
? 'check'
|
||||
: 'info'
|
||||
}
|
||||
/>
|
||||
<div className={classNames('w-full truncate text-xs')}>
|
||||
{content.slice(0, 1000)}
|
||||
{error && <span className="text-warning"> ({error})</span>}
|
||||
</div>
|
||||
<div className={classNames('opacity-50 text-xs')}>
|
||||
{format(`${createdAt}Z`, 'HH:mm:ss.SSS')}
|
||||
</div>
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function getEventDisplay(
|
||||
eventType: GrpcEvent['eventType'],
|
||||
status: GrpcEvent['status'],
|
||||
): { icon: IconProps['icon']; color: IconProps['color']; title: string } {
|
||||
if (eventType === 'server_message') {
|
||||
return { icon: 'arrow_big_down_dash', color: 'info', title: 'Server message' };
|
||||
}
|
||||
if (eventType === 'client_message') {
|
||||
return { icon: 'arrow_big_up_dash', color: 'primary', title: 'Client message' };
|
||||
}
|
||||
if (eventType === 'error' || (status != null && status > 0)) {
|
||||
return { icon: 'alert_triangle', color: 'danger', title: 'Error' };
|
||||
}
|
||||
if (eventType === 'connection_end') {
|
||||
return { icon: 'check', color: 'success', title: 'Connection response' };
|
||||
}
|
||||
return { icon: 'info', color: undefined, title: 'Event' };
|
||||
}
|
||||
|
||||
@@ -19,7 +19,6 @@ type Props = {
|
||||
forceUpdateKey: string;
|
||||
headers: HttpRequestHeader[];
|
||||
inheritedHeaders?: HttpRequestHeader[];
|
||||
inheritedHeadersLabel?: string;
|
||||
stateKey: string;
|
||||
onChange: (headers: HttpRequestHeader[]) => void;
|
||||
label?: string;
|
||||
@@ -29,36 +28,20 @@ export function HeadersEditor({
|
||||
stateKey,
|
||||
headers,
|
||||
inheritedHeaders,
|
||||
inheritedHeadersLabel = 'Inherited',
|
||||
onChange,
|
||||
forceUpdateKey,
|
||||
}: Props) {
|
||||
// Get header names defined at current level (case-insensitive)
|
||||
const currentHeaderNames = new Set(
|
||||
headers.filter((h) => h.name).map((h) => h.name.toLowerCase()),
|
||||
);
|
||||
// Filter inherited headers: must be enabled, have content, and not be overridden by current level
|
||||
const validInheritedHeaders =
|
||||
inheritedHeaders?.filter(
|
||||
(pair) =>
|
||||
pair.enabled && (pair.name || pair.value) && !currentHeaderNames.has(pair.name.toLowerCase()),
|
||||
) ?? [];
|
||||
const hasInheritedHeaders = validInheritedHeaders.length > 0;
|
||||
inheritedHeaders?.filter((pair) => pair.enabled && (pair.name || pair.value)) ?? [];
|
||||
return (
|
||||
<div
|
||||
className={
|
||||
hasInheritedHeaders
|
||||
? '@container w-full h-full grid grid-rows-[auto_minmax(0,1fr)] gap-y-1.5'
|
||||
: '@container w-full h-full'
|
||||
}
|
||||
>
|
||||
{hasInheritedHeaders && (
|
||||
<div className="@container w-full h-full grid grid-rows-[auto_minmax(0,1fr)] gap-y-1.5">
|
||||
{validInheritedHeaders.length > 0 ? (
|
||||
<DetailsBanner
|
||||
color="secondary"
|
||||
className="text-sm"
|
||||
summary={
|
||||
<HStack>
|
||||
{inheritedHeadersLabel} <CountBadge count={validInheritedHeaders.length} />
|
||||
Inherited <CountBadge count={validInheritedHeaders.length} />
|
||||
</HStack>
|
||||
}
|
||||
>
|
||||
@@ -80,6 +63,8 @@ export function HeadersEditor({
|
||||
))}
|
||||
</div>
|
||||
</DetailsBanner>
|
||||
) : (
|
||||
<span />
|
||||
)}
|
||||
<PairOrBulkEditor
|
||||
forceUpdateKey={forceUpdateKey}
|
||||
|
||||
@@ -62,7 +62,9 @@ export function HttpAuthenticationEditor({ model }: Props) {
|
||||
<p>
|
||||
Apply auth to all requests in <strong>{resolvedModelName(model)}</strong>
|
||||
</p>
|
||||
<Link href="https://yaak.app/docs/using-yaak/request-inheritance">Documentation</Link>
|
||||
<Link href="https://feedback.yaak.app/help/articles/2112119-request-inheritance">
|
||||
Documentation
|
||||
</Link>
|
||||
</EmptyStateText>
|
||||
);
|
||||
}
|
||||
@@ -138,12 +140,7 @@ export function HttpAuthenticationEditor({ model }: Props) {
|
||||
}),
|
||||
)}
|
||||
>
|
||||
<IconButton
|
||||
title="Authentication Actions"
|
||||
icon="settings"
|
||||
size="xs"
|
||||
className="!text-secondary"
|
||||
/>
|
||||
<IconButton title="Authentication Actions" icon="settings" size="xs" />
|
||||
</Dropdown>
|
||||
)}
|
||||
</HStack>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user