Compare commits

..

2 Commits

Author SHA1 Message Date
Gregory Schier
0a52032988 Merge branch 'main' into omnara/premium-deviator 2026-01-09 20:23:20 -08:00
Gregory Schier
4b7497a908 feat: implement layered settings system for HTTP requests and folders
Add support for settings overrides at folder and HTTP request levels. Introduces nullable settings columns to database tables and implements resolution logic to merge workspace, folder, and request-level settings with proper precedence.
2026-01-09 20:22:53 -08:00
144 changed files with 2057 additions and 4027 deletions

View File

@@ -37,11 +37,3 @@ The skill generates markdown-formatted release notes following this structure:
**IMPORTANT**: Always add a blank lines around the markdown code fence and output the markdown code block last
**IMPORTANT**: PRs by `@gschier` should not mention the @username
## After Generating Release Notes
After outputting the release notes, ask the user if they would like to create a draft GitHub release with these notes. If they confirm, create the release using:
```bash
gh release create <tag> --draft --prerelease --title "<tag>" --notes '<release notes>'
```

View File

@@ -1,7 +1,7 @@
name: Generate Artifacts
on:
push:
tags: [v*]
tags: [ v* ]
jobs:
build-artifacts:
@@ -13,37 +13,37 @@ jobs:
fail-fast: false
matrix:
include:
- platform: "macos-latest" # for Arm-based Macs (M1 and above).
args: "--target aarch64-apple-darwin"
yaak_arch: "arm64"
os: "macos"
targets: "aarch64-apple-darwin"
- platform: "macos-latest" # for Intel-based Macs.
args: "--target x86_64-apple-darwin"
yaak_arch: "x64"
os: "macos"
targets: "x86_64-apple-darwin"
- platform: "ubuntu-22.04"
args: ""
yaak_arch: "x64"
os: "ubuntu"
targets: ""
- platform: "ubuntu-22.04-arm"
args: ""
yaak_arch: "arm64"
os: "ubuntu"
targets: ""
- platform: "windows-latest"
args: ""
yaak_arch: "x64"
os: "windows"
targets: ""
- platform: 'macos-latest' # for Arm-based Macs (M1 and above).
args: '--target aarch64-apple-darwin'
yaak_arch: 'arm64'
os: 'macos'
targets: 'aarch64-apple-darwin'
- platform: 'macos-latest' # for Intel-based Macs.
args: '--target x86_64-apple-darwin'
yaak_arch: 'x64'
os: 'macos'
targets: 'x86_64-apple-darwin'
- platform: 'ubuntu-22.04'
args: ''
yaak_arch: 'x64'
os: 'ubuntu'
targets: ''
- platform: 'ubuntu-22.04-arm'
args: ''
yaak_arch: 'arm64'
os: 'ubuntu'
targets: ''
- platform: 'windows-latest'
args: ''
yaak_arch: 'x64'
os: 'windows'
targets: ''
# Windows ARM64
- platform: "windows-latest"
args: "--target aarch64-pc-windows-msvc"
yaak_arch: "arm64"
os: "windows"
targets: "aarch64-pc-windows-msvc"
- platform: 'windows-latest'
args: '--target aarch64-pc-windows-msvc'
yaak_arch: 'arm64'
os: 'windows'
targets: 'aarch64-pc-windows-msvc'
runs-on: ${{ matrix.platform }}
timeout-minutes: 40
steps:
@@ -88,7 +88,6 @@ jobs:
& $exe --version
- run: npm ci
- run: npm run bootstrap
- run: npm run lint
- name: Run JS Tests
run: npm test
@@ -100,29 +99,6 @@ jobs:
env:
YAAK_VERSION: ${{ github.ref_name }}
- name: Sign vendored binaries (macOS only)
if: matrix.os == 'macos'
env:
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
KEYCHAIN_PASSWORD: ${{ secrets.KEYCHAIN_PASSWORD }}
run: |
# Create keychain
KEYCHAIN_PATH=$RUNNER_TEMP/app-signing.keychain-db
security create-keychain -p "$KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
security set-keychain-settings -lut 21600 $KEYCHAIN_PATH
security unlock-keychain -p "$KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
# Import certificate
echo "$APPLE_CERTIFICATE" | base64 --decode > certificate.p12
security import certificate.p12 -P "$APPLE_CERTIFICATE_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
security list-keychain -d user -s $KEYCHAIN_PATH
# Sign vendored binaries with hardened runtime and their specific entitlements
codesign --force --options runtime --entitlements crates-tauri/yaak-app/macos/entitlements.yaakprotoc.plist --sign "$APPLE_SIGNING_IDENTITY" crates-tauri/yaak-app/vendored/protoc/yaakprotoc || true
codesign --force --options runtime --entitlements crates-tauri/yaak-app/macos/entitlements.yaaknode.plist --sign "$APPLE_SIGNING_IDENTITY" crates-tauri/yaak-app/vendored/node/yaaknode || true
- uses: tauri-apps/tauri-action@v0
env:
YAAK_TARGET_ARCH: ${{ matrix.yaak_arch }}
@@ -145,9 +121,9 @@ jobs:
AZURE_CLIENT_SECRET: ${{ matrix.os == 'windows' && secrets.AZURE_CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ matrix.os == 'windows' && secrets.AZURE_TENANT_ID }}
with:
tagName: "v__VERSION__"
releaseName: "Release __VERSION__"
releaseBody: "[Changelog __VERSION__](https://yaak.app/blog/__VERSION__)"
tagName: 'v__VERSION__'
releaseName: 'Release __VERSION__'
releaseBody: '[Changelog __VERSION__](https://yaak.app/blog/__VERSION__)'
releaseDraft: true
prerelease: true
args: "${{ matrix.args }} --config ./crates-tauri/yaak-app/tauri.release.conf.json"
args: '${{ matrix.args }} --config ./crates-tauri/yaak-app/tauri.release.conf.json'

4
Cargo.lock generated
View File

@@ -8075,7 +8075,6 @@ name = "yaak-common"
version = "0.1.0"
dependencies = [
"serde_json",
"tokio",
]
[[package]]
@@ -8122,10 +8121,8 @@ dependencies = [
"serde_json",
"serde_yaml",
"thiserror 2.0.17",
"tokio",
"ts-rs",
"url",
"yaak-common",
"yaak-models",
"yaak-sync",
]
@@ -8152,7 +8149,6 @@ dependencies = [
"tonic",
"tonic-reflection",
"uuid",
"yaak-common",
"yaak-tls",
]

View File

@@ -1,6 +1,6 @@
<p align="center">
<a href="https://github.com/JamesIves/github-sponsors-readme-action">
<img width="200px" src="https://github.com/mountain-loop/yaak/raw/main/crates-tauri/yaak-app/icons/icon.png">
<img width="200px" src="https://github.com/mountain-loop/yaak/raw/main/src-tauri/icons/icon.png">
</a>
</p>
@@ -64,7 +64,7 @@ visit [`DEVELOPMENT.md`](DEVELOPMENT.md) for tips on setting up your environment
## Useful Resources
- [Feedback and Bug Reports](https://feedback.yaak.app)
- [Documentation](https://yaak.app/docs)
- [Documentation](https://feedback.yaak.app/help)
- [Yaak vs Postman](https://yaak.app/alternatives/postman)
- [Yaak vs Bruno](https://yaak.app/alternatives/bruno)
- [Yaak vs Insomnia](https://yaak.app/alternatives/insomnia)

View File

@@ -15,7 +15,7 @@ use yaak_models::util::UpdateSource;
use yaak_plugins::events::{PluginContext, RenderPurpose};
use yaak_plugins::manager::PluginManager;
use yaak_plugins::template_callback::PluginTemplateCallback;
use yaak_templates::{RenderOptions, parse_and_render, render_json_value_raw};
use yaak_templates::{parse_and_render, render_json_value_raw, RenderOptions};
#[derive(Parser)]
#[command(name = "yaakcli")]
@@ -149,7 +149,14 @@ async fn render_http_request(
// Apply path placeholders (e.g., /users/:id -> /users/123)
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..r.to_owned() })
Ok(HttpRequest {
url,
url_parameters,
headers,
body,
authentication,
..r.to_owned()
})
}
#[tokio::main]
@@ -162,10 +169,16 @@ async fn main() {
}
// Use the same app_id for both data directory and keyring
let app_id = if cfg!(debug_assertions) { "app.yaak.desktop.dev" } else { "app.yaak.desktop" };
let app_id = if cfg!(debug_assertions) {
"app.yaak.desktop.dev"
} else {
"app.yaak.desktop"
};
let data_dir = cli.data_dir.unwrap_or_else(|| {
dirs::data_dir().expect("Could not determine data directory").join(app_id)
dirs::data_dir()
.expect("Could not determine data directory")
.join(app_id)
});
let db_path = data_dir.join("db.sqlite");
@@ -178,7 +191,9 @@ async fn main() {
// Initialize encryption manager for secure() template function
// Use the same app_id as the Tauri app for keyring access
let encryption_manager = Arc::new(EncryptionManager::new(query_manager.clone(), app_id));
let encryption_manager = Arc::new(
EncryptionManager::new(query_manager.clone(), app_id),
);
// Initialize plugin manager for template functions
let vendored_plugin_dir = data_dir.join("vendored-plugins");
@@ -188,8 +203,9 @@ async fn main() {
let node_bin_path = PathBuf::from("node");
// Find the plugin runtime - check YAAK_PLUGIN_RUNTIME env var, then fallback to development path
let plugin_runtime_main =
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
let plugin_runtime_main = std::env::var("YAAK_PLUGIN_RUNTIME")
.map(PathBuf::from)
.unwrap_or_else(|_| {
// Development fallback: look relative to crate root
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
@@ -210,10 +226,14 @@ async fn main() {
// Initialize plugins from database
let plugins = db.list_plugins().unwrap_or_default();
if !plugins.is_empty() {
let errors =
plugin_manager.initialize_all_plugins(plugins, &PluginContext::new_empty()).await;
let errors = plugin_manager
.initialize_all_plugins(plugins, &PluginContext::new_empty())
.await;
for (plugin_dir, error_msg) in errors {
eprintln!("Warning: Failed to initialize plugin '{}': {}", plugin_dir, error_msg);
eprintln!(
"Warning: Failed to initialize plugin '{}': {}",
plugin_dir, error_msg
);
}
}
@@ -229,7 +249,9 @@ async fn main() {
}
}
Commands::Requests { workspace_id } => {
let requests = db.list_http_requests(&workspace_id).expect("Failed to list requests");
let requests = db
.list_http_requests(&workspace_id)
.expect("Failed to list requests");
if requests.is_empty() {
println!("No requests found in workspace {}", workspace_id);
} else {
@@ -239,7 +261,9 @@ async fn main() {
}
}
Commands::Send { request_id } => {
let request = db.get_http_request(&request_id).expect("Failed to get request");
let request = db
.get_http_request(&request_id)
.expect("Failed to get request");
// Resolve environment chain for variable substitution
let environment_chain = db
@@ -294,13 +318,18 @@ async fn main() {
}))
} else {
// Drain events silently
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
tokio::spawn(async move {
while event_rx.recv().await.is_some() {}
});
None
};
// Send the request
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
let response = sender
.send(sendable, event_tx)
.await
.expect("Failed to send request");
// Wait for event handler to finish
if let Some(handle) = verbose_handle {
@@ -354,13 +383,18 @@ async fn main() {
}
}))
} else {
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
tokio::spawn(async move {
while event_rx.recv().await.is_some() {}
});
None
};
// Send the request
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
let response = sender
.send(sendable, event_tx)
.await
.expect("Failed to send request");
if let Some(handle) = verbose_handle {
let _ = handle.await;
@@ -387,7 +421,12 @@ async fn main() {
let (body, _stats) = response.text().await.expect("Failed to read response body");
println!("{}", body);
}
Commands::Create { workspace_id, name, method, url } => {
Commands::Create {
workspace_id,
name,
method,
url,
} => {
let request = HttpRequest {
workspace_id,
name,

View File

@@ -2,6 +2,14 @@
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<!-- Enable for NodeJS execution -->
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
<!-- Allow loading 1Password's dylib (signed with different Team ID) -->
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
<!-- Re-enable for sandboxing. Currently disabled because auto-updater doesn't work with sandboxing.-->
<!-- <key>com.apple.security.app-sandbox</key> <true/>-->
<!-- <key>com.apple.security.files.user-selected.read-write</key> <true/>-->

View File

@@ -1,13 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<!-- Enable for NodeJS/V8 JIT compiler -->
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
<!-- Allow loading plugins signed with different Team IDs (e.g., 1Password) -->
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
</dict>
</plist>

View File

@@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
</dict>
</plist>

View File

@@ -1,11 +1,9 @@
use crate::PluginContextExt;
use crate::error::Result;
use crate::PluginContextExt;
use std::sync::Arc;
use tauri::{AppHandle, Manager, Runtime, State, WebviewWindow, command};
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
use yaak_crypto::manager::EncryptionManager;
use yaak_models::models::HttpRequestHeader;
use yaak_models::queries::workspaces::default_headers;
use yaak_plugins::events::GetThemesResponse;
use yaak_plugins::manager::PluginManager;
use yaak_plugins::native_template_functions::{
@@ -56,12 +54,7 @@ pub(crate) async fn cmd_secure_template<R: Runtime>(
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
let plugin_context = window.plugin_context();
Ok(encrypt_secure_template_function(
plugin_manager,
encryption_manager,
&plugin_context,
template,
)?)
Ok(encrypt_secure_template_function(plugin_manager, encryption_manager, &plugin_context, template)?)
}
#[command]
@@ -99,17 +92,3 @@ pub(crate) async fn cmd_set_workspace_key<R: Runtime>(
window.crypto().set_human_key(workspace_id, key)?;
Ok(())
}
#[command]
pub(crate) async fn cmd_disable_encryption<R: Runtime>(
window: WebviewWindow<R>,
workspace_id: &str,
) -> Result<()> {
window.crypto().disable_encryption(workspace_id)?;
Ok(())
}
#[command]
pub(crate) fn cmd_default_headers() -> Vec<HttpRequestHeader> {
default_headers()
}

View File

@@ -6,47 +6,33 @@ use crate::error::Result;
use std::path::{Path, PathBuf};
use tauri::command;
use yaak_git::{
BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult,
PushResult, git_add, git_add_credential, git_add_remote, git_checkout_branch, git_clone,
git_commit, git_create_branch, git_delete_branch, git_delete_remote_branch, git_fetch_all,
git_init, git_log, git_merge_branch, git_pull, git_push, git_remotes, git_rename_branch,
git_rm_remote, git_status, git_unstage,
GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult,
git_add, git_add_credential, git_add_remote, git_checkout_branch, git_commit,
git_create_branch, git_delete_branch, git_fetch_all, git_init, git_log,
git_merge_branch, git_pull, git_push, git_remotes, git_rm_remote, git_status,
git_unstage,
};
// NOTE: All of these commands are async to prevent blocking work from locking up the UI
#[command]
pub async fn cmd_git_checkout(dir: &Path, branch: &str, force: bool) -> Result<String> {
Ok(git_checkout_branch(dir, branch, force).await?)
Ok(git_checkout_branch(dir, branch, force)?)
}
#[command]
pub async fn cmd_git_branch(dir: &Path, branch: &str, base: Option<&str>) -> Result<()> {
Ok(git_create_branch(dir, branch, base).await?)
pub async fn cmd_git_branch(dir: &Path, branch: &str) -> Result<()> {
Ok(git_create_branch(dir, branch)?)
}
#[command]
pub async fn cmd_git_delete_branch(
dir: &Path,
branch: &str,
force: Option<bool>,
) -> Result<BranchDeleteResult> {
Ok(git_delete_branch(dir, branch, force.unwrap_or(false)).await?)
pub async fn cmd_git_delete_branch(dir: &Path, branch: &str) -> Result<()> {
Ok(git_delete_branch(dir, branch)?)
}
#[command]
pub async fn cmd_git_delete_remote_branch(dir: &Path, branch: &str) -> Result<()> {
Ok(git_delete_remote_branch(dir, branch).await?)
}
#[command]
pub async fn cmd_git_merge_branch(dir: &Path, branch: &str) -> Result<()> {
Ok(git_merge_branch(dir, branch).await?)
}
#[command]
pub async fn cmd_git_rename_branch(dir: &Path, old_name: &str, new_name: &str) -> Result<()> {
Ok(git_rename_branch(dir, old_name, new_name).await?)
pub async fn cmd_git_merge_branch(dir: &Path, branch: &str, force: bool) -> Result<()> {
Ok(git_merge_branch(dir, branch, force)?)
}
#[command]
@@ -64,29 +50,24 @@ pub async fn cmd_git_initialize(dir: &Path) -> Result<()> {
Ok(git_init(dir)?)
}
#[command]
pub async fn cmd_git_clone(url: &str, dir: &Path) -> Result<CloneResult> {
Ok(git_clone(url, dir).await?)
}
#[command]
pub async fn cmd_git_commit(dir: &Path, message: &str) -> Result<()> {
Ok(git_commit(dir, message).await?)
Ok(git_commit(dir, message)?)
}
#[command]
pub async fn cmd_git_fetch_all(dir: &Path) -> Result<()> {
Ok(git_fetch_all(dir).await?)
Ok(git_fetch_all(dir)?)
}
#[command]
pub async fn cmd_git_push(dir: &Path) -> Result<PushResult> {
Ok(git_push(dir).await?)
Ok(git_push(dir)?)
}
#[command]
pub async fn cmd_git_pull(dir: &Path) -> Result<PullResult> {
Ok(git_pull(dir).await?)
Ok(git_pull(dir)?)
}
#[command]
@@ -107,11 +88,12 @@ pub async fn cmd_git_unstage(dir: &Path, rela_paths: Vec<PathBuf>) -> Result<()>
#[command]
pub async fn cmd_git_add_credential(
dir: &Path,
remote_url: &str,
username: &str,
password: &str,
) -> Result<()> {
Ok(git_add_credential(remote_url, username, password).await?)
Ok(git_add_credential(dir, remote_url, username, password).await?)
}
#[command]

View File

@@ -1,12 +1,12 @@
use std::collections::BTreeMap;
use crate::PluginContextExt;
use crate::error::Result;
use crate::models_ext::QueryManagerExt;
use crate::PluginContextExt;
use KeyAndValueRef::{Ascii, Binary};
use tauri::{Manager, Runtime, WebviewWindow};
use yaak_grpc::{KeyAndValueRef, MetadataMap};
use yaak_models::models::GrpcRequest;
use crate::models_ext::QueryManagerExt;
use yaak_plugins::events::{CallHttpAuthenticationRequest, HttpHeader};
use yaak_plugins::manager::PluginManager;

View File

@@ -1,8 +1,8 @@
use crate::models_ext::QueryManagerExt;
use chrono::{NaiveDateTime, Utc};
use log::debug;
use std::sync::OnceLock;
use tauri::{AppHandle, Runtime};
use crate::models_ext::QueryManagerExt;
use yaak_models::util::UpdateSource;
const NAMESPACE: &str = "analytics";

View File

@@ -1,13 +1,9 @@
use crate::PluginContextExt;
use crate::error::Error::GenericError;
use crate::error::Result;
use crate::models_ext::BlobManagerExt;
use crate::models_ext::QueryManagerExt;
use crate::render::render_http_request;
use log::{debug, warn};
use std::pin::Pin;
use std::sync::Arc;
use std::sync::atomic::{AtomicI32, Ordering};
use std::time::{Duration, Instant};
use tauri::{AppHandle, Manager, Runtime, WebviewWindow};
use tokio::fs::{File, create_dir_all};
@@ -19,19 +15,22 @@ use yaak_http::client::{
HttpConnectionOptions, HttpConnectionProxySetting, HttpConnectionProxySettingAuth,
};
use yaak_http::cookies::CookieStore;
use yaak_http::manager::{CachedClient, HttpConnectionManager};
use yaak_http::manager::HttpConnectionManager;
use yaak_http::sender::ReqwestSender;
use yaak_http::tee_reader::TeeReader;
use yaak_http::transaction::HttpTransaction;
use yaak_http::types::{
SendableBody, SendableHttpRequest, SendableHttpRequestOptions, append_query_params,
};
use crate::models_ext::BlobManagerExt;
use yaak_models::blob_manager::BodyChunk;
use yaak_models::models::{
CookieJar, Environment, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseHeader,
HttpResponseState, ProxySetting, ProxySettingAuth,
};
use crate::models_ext::QueryManagerExt;
use yaak_models::util::UpdateSource;
use crate::PluginContextExt;
use yaak_plugins::events::{
CallHttpAuthenticationRequest, HttpHeader, PluginContext, RenderPurpose,
};
@@ -174,21 +173,19 @@ async fn send_http_request_inner<R: Runtime>(
let environment_id = environment.map(|e| e.id);
let workspace = window.db().get_workspace(workspace_id)?;
let (resolved, auth_context_id) = resolve_http_request(window, unrendered_request)?;
let cb = PluginTemplateCallback::new(
plugin_manager.clone(),
encryption_manager.clone(),
&plugin_context,
RenderPurpose::Send,
);
let cb = PluginTemplateCallback::new(plugin_manager.clone(), encryption_manager.clone(), &plugin_context, RenderPurpose::Send);
let env_chain =
window.db().resolve_environments(&workspace.id, folder_id, environment_id.as_deref())?;
let request = render_http_request(&resolved, env_chain, &cb, &RenderOptions::throw()).await?;
// Resolve inherited settings for this request
let resolved_settings = window.db().resolve_settings_for_http_request(&resolved)?;
// Build the sendable request using the new SendableHttpRequest type
let options = SendableHttpRequestOptions {
follow_redirects: workspace.setting_follow_redirects,
timeout: if workspace.setting_request_timeout > 0 {
Some(Duration::from_millis(workspace.setting_request_timeout.unsigned_abs() as u64))
follow_redirects: resolved_settings.follow_redirects,
timeout: if resolved_settings.request_timeout > 0 {
Some(Duration::from_millis(resolved_settings.request_timeout.unsigned_abs() as u64))
} else {
None
},
@@ -234,13 +231,12 @@ async fn send_http_request_inner<R: Runtime>(
None => None,
};
let cached_client = connection_manager
let client = connection_manager
.get_client(&HttpConnectionOptions {
id: plugin_context.id.clone(),
validate_certificates: workspace.setting_validate_certificates,
validate_certificates: resolved_settings.validate_certificates,
proxy: proxy_setting,
client_certificate,
dns_overrides: workspace.setting_dns_overrides.clone(),
})
.await?;
@@ -257,7 +253,7 @@ async fn send_http_request_inner<R: Runtime>(
let cookie_store = maybe_cookie_store.as_ref().map(|(cs, _)| cs.clone());
let result = execute_transaction(
cached_client,
client,
sendable_request,
response_ctx,
cancelled_rx.clone(),
@@ -317,7 +313,7 @@ pub fn resolve_http_request<R: Runtime>(
}
async fn execute_transaction<R: Runtime>(
cached_client: CachedClient,
client: reqwest::Client,
mut sendable_request: SendableHttpRequest,
response_ctx: &mut ResponseContext<R>,
mut cancelled_rx: Receiver<bool>,
@@ -328,10 +324,7 @@ async fn execute_transaction<R: Runtime>(
let workspace_id = response_ctx.response().workspace_id.clone();
let is_persisted = response_ctx.is_persisted();
// Keep a reference to the resolver for DNS timing events
let resolver = cached_client.resolver.clone();
let sender = ReqwestSender::with_client(cached_client.client);
let sender = ReqwestSender::with_client(client);
let transaction = match cookie_store {
Some(cs) => HttpTransaction::with_cookie_store(sender, cs),
None => HttpTransaction::new(sender),
@@ -356,39 +349,21 @@ async fn execute_transaction<R: Runtime>(
let (event_tx, mut event_rx) =
tokio::sync::mpsc::channel::<yaak_http::sender::HttpResponseEvent>(100);
// Set the event sender on the DNS resolver so it can emit DNS timing events
resolver.set_event_sender(Some(event_tx.clone())).await;
// Shared state to capture DNS timing from the event processing task
let dns_elapsed = Arc::new(AtomicI32::new(0));
// Write events to DB in a task (only for persisted responses)
if is_persisted {
let response_id = response_id.clone();
let app_handle = app_handle.clone();
let update_source = response_ctx.update_source.clone();
let workspace_id = workspace_id.clone();
let dns_elapsed = dns_elapsed.clone();
tokio::spawn(async move {
while let Some(event) = event_rx.recv().await {
// Capture DNS timing when we see a DNS event
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
}
let db_event = HttpResponseEvent::new(&response_id, &workspace_id, event.into());
let _ = app_handle.db().upsert_http_response_event(&db_event, &update_source);
}
});
} else {
// For ephemeral responses, just drain the events but still capture DNS timing
let dns_elapsed = dns_elapsed.clone();
tokio::spawn(async move {
while let Some(event) = event_rx.recv().await {
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
}
}
});
// For ephemeral responses, just drain the events
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
};
// Capture request body as it's sent (only for persisted responses)
@@ -556,14 +531,10 @@ async fn execute_transaction<R: Runtime>(
// Final update with closed state and accurate byte count
response_ctx.update(|r| {
r.elapsed = start.elapsed().as_millis() as i32;
r.elapsed_dns = dns_elapsed.load(Ordering::SeqCst);
r.content_length = Some(written_bytes as i32);
r.state = HttpResponseState::Closed;
})?;
// Clear the event sender from the resolver since this request is done
resolver.set_event_sender(None).await;
Ok((response_ctx.response().clone(), maybe_blob_write_handle))
}

View File

@@ -1,17 +1,17 @@
use crate::PluginContextExt;
use crate::error::Result;
use crate::models_ext::QueryManagerExt;
use crate::PluginContextExt;
use log::info;
use std::collections::BTreeMap;
use std::fs::read_to_string;
use tauri::{Manager, Runtime, WebviewWindow};
use yaak_tauri_utils::window::WorkspaceWindowTrait;
use yaak_core::WorkspaceContext;
use yaak_models::models::{
Environment, Folder, GrpcRequest, HttpRequest, WebsocketRequest, Workspace,
};
use yaak_models::util::{BatchUpsertResult, UpdateSource, maybe_gen_id, maybe_gen_id_opt};
use yaak_plugins::manager::PluginManager;
use yaak_tauri_utils::window::WorkspaceWindowTrait;
pub(crate) async fn import_data<R: Runtime>(
window: &WebviewWindow<R>,

View File

@@ -7,7 +7,7 @@ use crate::http_request::{resolve_http_request, send_http_request};
use crate::import::import_data;
use crate::models_ext::{BlobManagerExt, QueryManagerExt};
use crate::notifications::YaakNotifier;
use crate::render::{render_grpc_request, render_json_value, render_template};
use crate::render::{render_grpc_request, render_template};
use crate::updates::{UpdateMode, UpdateTrigger, YaakUpdater};
use crate::uri_scheme::handle_deep_link;
use error::Result as YaakResult;
@@ -101,7 +101,6 @@ struct AppMetaData {
app_data_dir: String,
app_log_dir: String,
vendored_plugin_dir: String,
default_project_dir: String,
feature_updater: bool,
feature_license: bool,
}
@@ -112,7 +111,6 @@ async fn cmd_metadata(app_handle: AppHandle) -> YaakResult<AppMetaData> {
let app_log_dir = app_handle.path().app_log_dir()?;
let vendored_plugin_dir =
app_handle.path().resolve("vendored/plugins", BaseDirectory::Resource)?;
let default_project_dir = app_handle.path().home_dir()?.join("YaakProjects");
Ok(AppMetaData {
is_dev: is_dev(),
version: app_handle.package_info().version.to_string(),
@@ -120,7 +118,6 @@ async fn cmd_metadata(app_handle: AppHandle) -> YaakResult<AppMetaData> {
app_data_dir: app_data_dir.to_string_lossy().to_string(),
app_log_dir: app_log_dir.to_string_lossy().to_string(),
vendored_plugin_dir: vendored_plugin_dir.to_string_lossy().to_string(),
default_project_dir: default_project_dir.to_string_lossy().to_string(),
feature_license: cfg!(feature = "license"),
feature_updater: cfg!(feature = "updater"),
})
@@ -192,6 +189,7 @@ async fn cmd_grpc_reflect<R: Runtime>(
request_id: &str,
environment_id: Option<&str>,
proto_files: Vec<String>,
skip_cache: Option<bool>,
window: WebviewWindow<R>,
app_handle: AppHandle<R>,
grpc_handle: State<'_, Mutex<GrpcHandle>>,
@@ -226,21 +224,18 @@ async fn cmd_grpc_reflect<R: Runtime>(
let settings = window.db().get_settings();
let client_certificate =
find_client_certificate(req.url.as_str(), &settings.client_certificates);
let proto_files: Vec<PathBuf> =
proto_files.iter().map(|p| PathBuf::from_str(p).unwrap()).collect();
// Always invalidate cached pool when this command is called, to force re-reflection
let mut handle = grpc_handle.lock().await;
handle.invalidate_pool(&req.id, &uri, &proto_files);
Ok(handle
Ok(grpc_handle
.lock()
.await
.services(
&req.id,
&uri,
&proto_files,
&proto_files.iter().map(|p| PathBuf::from_str(p).unwrap()).collect(),
&metadata,
workspace.setting_validate_certificates,
workspace.setting_validate_certificates.unwrap_or(true),
client_certificate,
skip_cache.unwrap_or(false),
)
.await
.map_err(|e| GenericError(e.to_string()))?)
@@ -332,7 +327,7 @@ async fn cmd_grpc_go<R: Runtime>(
uri.as_str(),
&proto_files.iter().map(|p| PathBuf::from_str(p).unwrap()).collect(),
&metadata,
workspace.setting_validate_certificates,
workspace.setting_validate_certificates.unwrap_or(true),
client_cert.clone(),
)
.await;
@@ -365,8 +360,10 @@ async fn cmd_grpc_go<R: Runtime>(
let cb = {
let cancelled_rx = cancelled_rx.clone();
let app_handle = app_handle.clone();
let environment_chain = environment_chain.clone();
let window = window.clone();
let base_msg = base_msg.clone();
let plugin_manager = plugin_manager.clone();
let encryption_manager = encryption_manager.clone();
@@ -388,12 +385,14 @@ async fn cmd_grpc_go<R: Runtime>(
match serde_json::from_str::<IncomingMsg>(ev.payload()) {
Ok(IncomingMsg::Message(msg)) => {
let window = window.clone();
let app_handle = app_handle.clone();
let base_msg = base_msg.clone();
let environment_chain = environment_chain.clone();
let plugin_manager = plugin_manager.clone();
let encryption_manager = encryption_manager.clone();
let msg = block_in_place(|| {
tauri::async_runtime::block_on(async {
let result = render_template(
render_template(
msg.as_str(),
environment_chain,
&PluginTemplateCallback::new(
@@ -407,11 +406,24 @@ async fn cmd_grpc_go<R: Runtime>(
),
&RenderOptions { error_behavior: RenderErrorBehavior::Throw },
)
.await;
result.expect("Failed to render template")
.await
.expect("Failed to render template")
})
});
in_msg_tx.try_send(msg.clone()).unwrap();
tauri::async_runtime::spawn(async move {
app_handle
.db()
.upsert_grpc_event(
&GrpcEvent {
content: msg,
event_type: GrpcEventType::ClientMessage,
..base_msg.clone()
},
&UpdateSource::from_window_label(window.label()),
)
.unwrap();
});
}
Ok(IncomingMsg::Commit) => {
maybe_in_msg_tx.take();
@@ -458,48 +470,12 @@ async fn cmd_grpc_go<R: Runtime>(
)?;
async move {
// Create callback for streaming methods that handles both success and error
let on_message = {
let app_handle = app_handle.clone();
let base_event = base_event.clone();
let window_label = window.label().to_string();
move |result: std::result::Result<String, String>| match result {
Ok(msg) => {
let _ = app_handle.db().upsert_grpc_event(
&GrpcEvent {
content: msg,
event_type: GrpcEventType::ClientMessage,
..base_event.clone()
},
&UpdateSource::from_window_label(&window_label),
);
}
Err(error) => {
let _ = app_handle.db().upsert_grpc_event(
&GrpcEvent {
content: format!("Failed to send message: {}", error),
event_type: GrpcEventType::Error,
..base_event.clone()
},
&UpdateSource::from_window_label(&window_label),
);
}
}
};
let (maybe_stream, maybe_msg) =
match (method_desc.is_client_streaming(), method_desc.is_server_streaming()) {
(true, true) => (
Some(
connection
.streaming(
&service,
&method,
in_msg_stream,
&metadata,
client_cert,
on_message.clone(),
)
.streaming(&service, &method, in_msg_stream, &metadata, client_cert)
.await,
),
None,
@@ -514,7 +490,6 @@ async fn cmd_grpc_go<R: Runtime>(
in_msg_stream,
&metadata,
client_cert,
on_message.clone(),
)
.await,
),
@@ -1060,54 +1035,14 @@ async fn cmd_get_http_authentication_summaries<R: Runtime>(
#[tauri::command]
async fn cmd_get_http_authentication_config<R: Runtime>(
window: WebviewWindow<R>,
app_handle: AppHandle<R>,
plugin_manager: State<'_, PluginManager>,
encryption_manager: State<'_, EncryptionManager>,
auth_name: &str,
values: HashMap<String, JsonPrimitive>,
model: AnyModel,
environment_id: Option<&str>,
_environment_id: Option<&str>,
) -> YaakResult<GetHttpAuthenticationConfigResponse> {
// Extract workspace_id and folder_id from the model to resolve the environment chain
let (workspace_id, folder_id) = match &model {
AnyModel::HttpRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::GrpcRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::WebsocketRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::Folder(f) => (f.workspace_id.clone(), f.folder_id.clone()),
AnyModel::Workspace(w) => (w.id.clone(), None),
_ => return Err(GenericError("Unsupported model type for authentication config".into())),
};
// Resolve environment chain and render the values for token lookup
let environment_chain = app_handle.db().resolve_environments(
&workspace_id,
folder_id.as_deref(),
environment_id,
)?;
let plugin_manager_arc = Arc::new((*plugin_manager).clone());
let encryption_manager_arc = Arc::new((*encryption_manager).clone());
let cb = PluginTemplateCallback::new(
plugin_manager_arc,
encryption_manager_arc,
&window.plugin_context(),
RenderPurpose::Preview,
);
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
let values_json: serde_json::Value = serde_json::to_value(&values)?;
let rendered_json =
render_json_value(values_json, environment_chain, &cb, &RenderOptions::throw()).await?;
// Convert back to HashMap<String, JsonPrimitive>
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
Ok(plugin_manager
.get_http_authentication_config(
&window.plugin_context(),
auth_name,
rendered_values,
model.id(),
)
.get_http_authentication_config(&window.plugin_context(), auth_name, values, model.id())
.await?)
}
@@ -1154,54 +1089,19 @@ async fn cmd_call_grpc_request_action<R: Runtime>(
#[tauri::command]
async fn cmd_call_http_authentication_action<R: Runtime>(
window: WebviewWindow<R>,
app_handle: AppHandle<R>,
plugin_manager: State<'_, PluginManager>,
encryption_manager: State<'_, EncryptionManager>,
auth_name: &str,
action_index: i32,
values: HashMap<String, JsonPrimitive>,
model: AnyModel,
environment_id: Option<&str>,
_environment_id: Option<&str>,
) -> YaakResult<()> {
// Extract workspace_id and folder_id from the model to resolve the environment chain
let (workspace_id, folder_id) = match &model {
AnyModel::HttpRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::GrpcRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::WebsocketRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::Folder(f) => (f.workspace_id.clone(), f.folder_id.clone()),
AnyModel::Workspace(w) => (w.id.clone(), None),
_ => return Err(GenericError("Unsupported model type for authentication action".into())),
};
// Resolve environment chain and render the values
let environment_chain = app_handle.db().resolve_environments(
&workspace_id,
folder_id.as_deref(),
environment_id,
)?;
let plugin_manager_arc = Arc::new((*plugin_manager).clone());
let encryption_manager_arc = Arc::new((*encryption_manager).clone());
let cb = PluginTemplateCallback::new(
plugin_manager_arc,
encryption_manager_arc,
&window.plugin_context(),
RenderPurpose::Send,
);
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
let values_json: serde_json::Value = serde_json::to_value(&values)?;
let rendered_json =
render_json_value(values_json, environment_chain, &cb, &RenderOptions::throw()).await?;
// Convert back to HashMap<String, JsonPrimitive>
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
Ok(plugin_manager
.call_http_authentication_action(
&window.plugin_context(),
auth_name,
action_index,
rendered_values,
values,
&model.id(),
)
.await?)
@@ -1721,8 +1621,6 @@ pub fn run() {
//
// Migrated commands
crate::commands::cmd_decrypt_template,
crate::commands::cmd_default_headers,
crate::commands::cmd_disable_encryption,
crate::commands::cmd_enable_encryption,
crate::commands::cmd_get_themes,
crate::commands::cmd_reveal_workspace_key,
@@ -1751,13 +1649,10 @@ pub fn run() {
git_ext::cmd_git_checkout,
git_ext::cmd_git_branch,
git_ext::cmd_git_delete_branch,
git_ext::cmd_git_delete_remote_branch,
git_ext::cmd_git_merge_branch,
git_ext::cmd_git_rename_branch,
git_ext::cmd_git_status,
git_ext::cmd_git_log,
git_ext::cmd_git_initialize,
git_ext::cmd_git_clone,
git_ext::cmd_git_commit,
git_ext::cmd_git_fetch_all,
git_ext::cmd_git_push,
@@ -1769,13 +1664,6 @@ pub fn run() {
git_ext::cmd_git_add_remote,
git_ext::cmd_git_rm_remote,
//
// Plugin commands
plugins_ext::cmd_plugins_search,
plugins_ext::cmd_plugins_install,
plugins_ext::cmd_plugins_uninstall,
plugins_ext::cmd_plugins_updates,
plugins_ext::cmd_plugins_update_all,
//
// WebSocket commands
ws_ext::cmd_ws_upsert_request,
ws_ext::cmd_ws_duplicate_request,

View File

@@ -1,6 +1,5 @@
use crate::error::Result;
use crate::history::get_or_upsert_launch_info;
use crate::models_ext::QueryManagerExt;
use chrono::{DateTime, Utc};
use log::{debug, info};
use reqwest::Method;
@@ -9,8 +8,9 @@ use std::time::Instant;
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow};
use ts_rs::TS;
use yaak_common::platform::get_os_str;
use yaak_models::util::UpdateSource;
use yaak_tauri_utils::api_client::yaak_api_client;
use crate::models_ext::QueryManagerExt;
use yaak_models::util::UpdateSource;
// Check for updates every hour
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;

View File

@@ -1,7 +1,5 @@
use crate::error::Result;
use crate::http_request::send_http_request_with_context;
use crate::models_ext::BlobManagerExt;
use crate::models_ext::QueryManagerExt;
use crate::render::{render_grpc_request, render_http_request, render_json_value};
use crate::window::{CreateWindowConfig, create_window};
use crate::{
@@ -16,8 +14,11 @@ use tauri::{AppHandle, Emitter, Manager, Runtime};
use tauri_plugin_clipboard_manager::ClipboardExt;
use tauri_plugin_opener::OpenerExt;
use yaak_crypto::manager::EncryptionManager;
use yaak_tauri_utils::window::WorkspaceWindowTrait;
use crate::models_ext::BlobManagerExt;
use yaak_models::models::{AnyModel, HttpResponse, Plugin};
use yaak_models::queries::any_request::AnyRequest;
use crate::models_ext::QueryManagerExt;
use yaak_models::util::UpdateSource;
use yaak_plugins::error::Error::PluginErr;
use yaak_plugins::events::{
@@ -31,7 +32,6 @@ use yaak_plugins::events::{
use yaak_plugins::manager::PluginManager;
use yaak_plugins::plugin_handle::PluginHandle;
use yaak_plugins::template_callback::PluginTemplateCallback;
use yaak_tauri_utils::window::WorkspaceWindowTrait;
use yaak_templates::{RenderErrorBehavior, RenderOptions};
pub(crate) async fn handle_plugin_event<R: Runtime>(
@@ -57,10 +57,6 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
Ok(call_frontend(&window, event).await)
}
InternalEventPayload::PromptFormRequest(_) => {
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
Ok(call_frontend(&window, event).await)
}
InternalEventPayload::FindHttpResponsesRequest(req) => {
let http_responses = app_handle
.db()
@@ -170,12 +166,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
)?;
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
let cb = PluginTemplateCallback::new(
plugin_manager,
encryption_manager,
&plugin_context,
req.purpose,
);
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let grpc_request =
render_grpc_request(&req.grpc_request, environment_chain, &cb, &opt).await?;
@@ -196,12 +187,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
)?;
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
let cb = PluginTemplateCallback::new(
plugin_manager,
encryption_manager,
&plugin_context,
req.purpose,
);
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
let opt = &RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let http_request =
render_http_request(&req.http_request, environment_chain, &cb, &opt).await?;
@@ -232,12 +218,7 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
)?;
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
let cb = PluginTemplateCallback::new(
plugin_manager,
encryption_manager,
&plugin_context,
req.purpose,
);
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let data = render_json_value(req.data, environment_chain, &cb, &opt).await?;
Ok(Some(InternalEventPayload::TemplateRenderResponse(TemplateRenderResponse { data })))

View File

@@ -17,7 +17,7 @@ use tauri::path::BaseDirectory;
use tauri::plugin::{Builder, TauriPlugin};
use tauri::{
AppHandle, Emitter, Manager, RunEvent, Runtime, State, WebviewWindow, WindowEvent, command,
is_dev,
generate_handler, is_dev,
};
use tokio::sync::Mutex;
use ts_rs::TS;
@@ -132,7 +132,7 @@ impl PluginUpdater {
// ============================================================================
#[command]
pub async fn cmd_plugins_search<R: Runtime>(
pub(crate) async fn cmd_plugins_search<R: Runtime>(
app_handle: AppHandle<R>,
query: &str,
) -> Result<PluginSearchResponse> {
@@ -141,7 +141,7 @@ pub async fn cmd_plugins_search<R: Runtime>(
}
#[command]
pub async fn cmd_plugins_install<R: Runtime>(
pub(crate) async fn cmd_plugins_install<R: Runtime>(
window: WebviewWindow<R>,
name: &str,
version: Option<String>,
@@ -163,7 +163,7 @@ pub async fn cmd_plugins_install<R: Runtime>(
}
#[command]
pub async fn cmd_plugins_uninstall<R: Runtime>(
pub(crate) async fn cmd_plugins_uninstall<R: Runtime>(
plugin_id: &str,
window: WebviewWindow<R>,
) -> Result<Plugin> {
@@ -174,7 +174,7 @@ pub async fn cmd_plugins_uninstall<R: Runtime>(
}
#[command]
pub async fn cmd_plugins_updates<R: Runtime>(
pub(crate) async fn cmd_plugins_updates<R: Runtime>(
app_handle: AppHandle<R>,
) -> Result<PluginUpdatesResponse> {
let http_client = yaak_api_client(&app_handle)?;
@@ -183,7 +183,7 @@ pub async fn cmd_plugins_updates<R: Runtime>(
}
#[command]
pub async fn cmd_plugins_update_all<R: Runtime>(
pub(crate) async fn cmd_plugins_update_all<R: Runtime>(
window: WebviewWindow<R>,
) -> Result<Vec<PluginNameVersion>> {
let http_client = yaak_api_client(window.app_handle())?;
@@ -233,6 +233,13 @@ pub async fn cmd_plugins_update_all<R: Runtime>(
pub fn init<R: Runtime>() -> TauriPlugin<R> {
Builder::new("yaak-plugins")
.invoke_handler(generate_handler![
cmd_plugins_search,
cmd_plugins_install,
cmd_plugins_uninstall,
cmd_plugins_updates,
cmd_plugins_update_all
])
.setup(|app_handle, _| {
// Resolve paths for plugin manager
let vendored_plugin_dir = app_handle

View File

@@ -3,7 +3,6 @@ use std::path::PathBuf;
use std::time::{Duration, Instant};
use crate::error::Result;
use crate::models_ext::QueryManagerExt;
use log::{debug, error, info, warn};
use serde::{Deserialize, Serialize};
use tauri::{Emitter, Listener, Manager, Runtime, WebviewWindow};
@@ -12,6 +11,7 @@ use tauri_plugin_updater::{Update, UpdaterExt};
use tokio::task::block_in_place;
use tokio::time::sleep;
use ts_rs::TS;
use crate::models_ext::QueryManagerExt;
use yaak_models::util::generate_id;
use yaak_plugins::manager::PluginManager;

View File

@@ -1,18 +1,18 @@
use crate::PluginContextExt;
use crate::error::Result;
use crate::import::import_data;
use crate::models_ext::QueryManagerExt;
use crate::PluginContextExt;
use log::{info, warn};
use std::collections::HashMap;
use std::fs;
use std::sync::Arc;
use tauri::{AppHandle, Emitter, Manager, Runtime, Url};
use tauri_plugin_dialog::{DialogExt, MessageDialogButtons, MessageDialogKind};
use yaak_tauri_utils::api_client::yaak_api_client;
use yaak_models::util::generate_id;
use yaak_plugins::events::{Color, ShowToastRequest};
use yaak_plugins::install::download_and_install;
use yaak_plugins::manager::PluginManager;
use yaak_tauri_utils::api_client::yaak_api_client;
pub(crate) async fn handle_deep_link<R: Runtime>(
app_handle: &AppHandle<R>,
@@ -55,8 +55,7 @@ pub(crate) async fn handle_deep_link<R: Runtime>(
&plugin_context,
name,
version,
)
.await?;
).await?;
app_handle.emit(
"show_toast",
ShowToastRequest {

View File

@@ -1,5 +1,4 @@
use crate::error::Result;
use crate::models_ext::QueryManagerExt;
use crate::window_menu::app_menu;
use log::{info, warn};
use rand::random;
@@ -9,6 +8,7 @@ use tauri::{
};
use tauri_plugin_opener::OpenerExt;
use tokio::sync::mpsc;
use crate::models_ext::QueryManagerExt;
const DEFAULT_WINDOW_WIDTH: f64 = 1100.0;
const DEFAULT_WINDOW_HEIGHT: f64 = 600.0;

View File

@@ -1,9 +1,9 @@
//! WebSocket Tauri command wrappers
//! These wrap the core yaak-ws functionality for Tauri IPC.
use crate::PluginContextExt;
use crate::error::Result;
use crate::models_ext::QueryManagerExt;
use crate::PluginContextExt;
use http::HeaderMap;
use log::{debug, info, warn};
use std::str::FromStr;
@@ -56,10 +56,9 @@ pub async fn cmd_ws_delete_request<R: Runtime>(
app_handle: AppHandle<R>,
window: WebviewWindow<R>,
) -> Result<WebsocketRequest> {
Ok(app_handle.db().delete_websocket_request_by_id(
request_id,
&UpdateSource::from_window_label(window.label()),
)?)
Ok(app_handle
.db()
.delete_websocket_request_by_id(request_id, &UpdateSource::from_window_label(window.label()))?)
}
#[command]
@@ -68,10 +67,12 @@ pub async fn cmd_ws_delete_connection<R: Runtime>(
app_handle: AppHandle<R>,
window: WebviewWindow<R>,
) -> Result<WebsocketConnection> {
Ok(app_handle.db().delete_websocket_connection_by_id(
connection_id,
&UpdateSource::from_window_label(window.label()),
)?)
Ok(app_handle
.db()
.delete_websocket_connection_by_id(
connection_id,
&UpdateSource::from_window_label(window.label()),
)?)
}
#[command]
@@ -295,10 +296,8 @@ pub async fn cmd_ws_connect<R: Runtime>(
)
.await?;
for header in plugin_result.set_headers.unwrap_or_default() {
match (
http::HeaderName::from_str(&header.name),
HeaderValue::from_str(&header.value),
) {
match (http::HeaderName::from_str(&header.name), HeaderValue::from_str(&header.value))
{
(Ok(name), Ok(value)) => {
headers.insert(name, value);
}
@@ -356,7 +355,7 @@ pub async fn cmd_ws_connect<R: Runtime>(
url.as_str(),
headers,
receive_tx,
workspace.setting_validate_certificates,
workspace.setting_validate_certificates.unwrap_or(true),
client_cert,
)
.await

View File

@@ -44,8 +44,8 @@
"vendored/protoc/include",
"vendored/plugins",
"vendored/plugin-runtime",
"vendored/node/yaaknode*",
"vendored/protoc/yaakprotoc*"
"vendored/node/yaaknode",
"vendored/protoc/yaakprotoc"
]
}
}

View File

@@ -8,10 +8,10 @@ use std::time::Duration;
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow, is_dev};
use ts_rs::TS;
use yaak_common::platform::get_os_str;
use yaak_tauri_utils::api_client::yaak_api_client;
use yaak_models::db_context::DbContext;
use yaak_models::query_manager::QueryManager;
use yaak_models::util::UpdateSource;
use yaak_tauri_utils::api_client::yaak_api_client;
/// Extension trait for accessing the QueryManager from Tauri Manager types.
/// This is needed temporarily until all crates are refactored to not use Tauri.

View File

@@ -6,4 +6,3 @@ publish = false
[dependencies]
serde_json = { workspace = true }
tokio = { workspace = true, features = ["process"] }

View File

@@ -1,16 +0,0 @@
use std::ffi::OsStr;
#[cfg(target_os = "windows")]
const CREATE_NO_WINDOW: u32 = 0x0800_0000;
/// Creates a new `tokio::process::Command` that won't spawn a console window on Windows.
pub fn new_xplatform_command<S: AsRef<OsStr>>(program: S) -> tokio::process::Command {
#[allow(unused_mut)]
let mut cmd = tokio::process::Command::new(program);
#[cfg(target_os = "windows")]
{
use std::os::windows::process::CommandExt;
cmd.creation_flags(CREATE_NO_WINDOW);
}
cmd
}

View File

@@ -1,3 +1,2 @@
pub mod command;
pub mod platform;
pub mod serde;

View File

@@ -11,7 +11,3 @@ export function revealWorkspaceKey(workspaceId: string) {
export function setWorkspaceKey(args: { workspaceId: string; key: string }) {
return invoke<void>('cmd_set_workspace_key', args);
}
export function disableEncryption(workspaceId: string) {
return invoke<void>('cmd_disable_encryption', { workspaceId });
}

View File

@@ -115,35 +115,6 @@ impl EncryptionManager {
self.set_workspace_key(workspace_id, &wkey)
}
pub fn disable_encryption(&self, workspace_id: &str) -> Result<()> {
info!("Disabling encryption for {workspace_id}");
self.query_manager.with_tx::<(), Error>(|tx| {
let workspace = tx.get_workspace(workspace_id)?;
let workspace_meta = tx.get_or_create_workspace_meta(workspace_id)?;
// Clear encryption challenge on workspace
tx.upsert_workspace(
&Workspace { encryption_key_challenge: None, ..workspace },
&UpdateSource::Background,
)?;
// Clear encryption key on workspace meta
tx.upsert_workspace_meta(
&WorkspaceMeta { encryption_key: None, ..workspace_meta },
&UpdateSource::Background,
)?;
Ok(())
})?;
// Remove from cache
let mut cache = self.cached_workspace_keys.lock().unwrap();
cache.remove(workspace_id);
Ok(())
}
fn get_workspace_key(&self, workspace_id: &str) -> Result<WorkspaceKey> {
{
let cache = self.cached_workspace_keys.lock().unwrap();

View File

@@ -12,9 +12,7 @@ serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
serde_yaml = "0.9.34"
thiserror = { workspace = true }
tokio = { workspace = true, features = ["io-util"] }
ts-rs = { workspace = true, features = ["chrono-impl", "serde-json-impl"] }
url = "2"
yaak-common = { workspace = true }
yaak-models = { workspace = true }
yaak-sync = { workspace = true }

View File

@@ -1,10 +1,6 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SyncModel } from "./gen_models";
export type BranchDeleteResult = { "type": "success", message: string, } | { "type": "not_fully_merged" };
export type CloneResult = { "type": "success" } | { "type": "cancelled" } | { "type": "needs_credentials", url: string, error: string | null, };
export type GitAuthor = { name: string | null, email: string | null, };
export type GitCommit = { author: GitAuthor, when: string, message: string | null, };

View File

@@ -1,7 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, parentModel: string, parentId: string | null, variables: Array<EnvironmentVariable>, color: string | null, sortPriority: number, };
export type EnvironmentVariable = { enabled?: boolean, name: string, value: string, id?: string, };
@@ -20,4 +18,4 @@ export type SyncModel = { "type": "workspace" } & Workspace | { "type": "environ
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };

View File

@@ -3,7 +3,7 @@ import { invoke } from '@tauri-apps/api/core';
import { createFastMutation } from '@yaakapp/app/hooks/useFastMutation';
import { queryClient } from '@yaakapp/app/lib/queryClient';
import { useMemo } from 'react';
import { BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult } from './bindings/gen_git';
import { GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult } from './bindings/gen_git';
export * from './bindings/gen_git';
@@ -59,6 +59,7 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
if (creds == null) throw new Error('Canceled');
await invoke('cmd_git_add_credential', {
dir,
remoteUrl: result.url,
username: creds.username,
password: creds.password,
@@ -89,31 +90,21 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
mutationFn: (args) => invoke('cmd_git_rm_remote', { dir, ...args }),
onSuccess,
}),
createBranch: createFastMutation<void, string, { branch: string; base?: string }>({
branch: createFastMutation<void, string, { branch: string }>({
mutationKey: ['git', 'branch', dir],
mutationFn: (args) => invoke('cmd_git_branch', { dir, ...args }),
onSuccess,
}),
mergeBranch: createFastMutation<void, string, { branch: string }>({
mergeBranch: createFastMutation<void, string, { branch: string; force: boolean }>({
mutationKey: ['git', 'merge', dir],
mutationFn: (args) => invoke('cmd_git_merge_branch', { dir, ...args }),
onSuccess,
}),
deleteBranch: createFastMutation<BranchDeleteResult, string, { branch: string, force?: boolean }>({
deleteBranch: createFastMutation<void, string, { branch: string }>({
mutationKey: ['git', 'delete-branch', dir],
mutationFn: (args) => invoke('cmd_git_delete_branch', { dir, ...args }),
onSuccess,
}),
deleteRemoteBranch: createFastMutation<void, string, { branch: string }>({
mutationKey: ['git', 'delete-remote-branch', dir],
mutationFn: (args) => invoke('cmd_git_delete_remote_branch', { dir, ...args }),
onSuccess,
}),
renameBranch: createFastMutation<void, string, { oldName: string, newName: string }>({
mutationKey: ['git', 'rename-branch', dir],
mutationFn: (args) => invoke('cmd_git_rename_branch', { dir, ...args }),
onSuccess,
}),
checkout: createFastMutation<string, string, { branch: string; force: boolean }>({
mutationKey: ['git', 'checkout', dir],
mutationFn: (args) => invoke('cmd_git_checkout', { dir, ...args }),
@@ -153,6 +144,7 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
if (creds == null) throw new Error('Canceled');
await invoke('cmd_git_add_credential', {
dir,
remoteUrl: result.url,
username: creds.username,
password: creds.password,
@@ -174,28 +166,3 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
async function getRemotes(dir: string) {
return invoke<GitRemote[]>('cmd_git_remotes', { dir });
}
/**
* Clone a git repository, prompting for credentials if needed.
*/
export async function gitClone(
url: string,
dir: string,
promptCredentials: (args: { url: string; error: string | null }) => Promise<GitCredentials | null>,
): Promise<CloneResult> {
const result = await invoke<CloneResult>('cmd_git_clone', { url, dir });
if (result.type !== 'needs_credentials') return result;
// Prompt for credentials
const creds = await promptCredentials({ url: result.url, error: result.error });
if (creds == null) return {type: 'cancelled'};
// Store credentials and retry
await invoke('cmd_git_add_credential', {
remoteUrl: result.url,
username: creds.username,
password: creds.password,
});
return invoke<CloneResult>('cmd_git_clone', { url, dir });
}

View File

@@ -1,30 +1,38 @@
use crate::error::Error::GitNotFound;
use crate::error::Result;
use std::path::Path;
use std::process::Stdio;
use tokio::process::Command;
use yaak_common::command::new_xplatform_command;
use std::process::{Command, Stdio};
/// Create a git command that runs in the specified directory
pub(crate) async fn new_binary_command(dir: &Path) -> Result<Command> {
let mut cmd = new_binary_command_global().await?;
cmd.arg("-C").arg(dir);
Ok(cmd)
}
use crate::error::Error::GitNotFound;
#[cfg(target_os = "windows")]
use std::os::windows::process::CommandExt;
/// Create a git command without a specific directory (for global operations)
pub(crate) async fn new_binary_command_global() -> Result<Command> {
#[cfg(target_os = "windows")]
const CREATE_NO_WINDOW: u32 = 0x0800_0000;
pub(crate) fn new_binary_command(dir: &Path) -> Result<Command> {
// 1. Probe that `git` exists and is runnable
let mut probe = new_xplatform_command("git");
let mut probe = Command::new("git");
probe.arg("--version").stdin(Stdio::null()).stdout(Stdio::null()).stderr(Stdio::null());
let status = probe.status().await.map_err(|_| GitNotFound)?;
#[cfg(target_os = "windows")]
{
probe.creation_flags(CREATE_NO_WINDOW);
}
let status = probe.status().map_err(|_| GitNotFound)?;
if !status.success() {
return Err(GitNotFound);
}
// 2. Build the reusable git command
let cmd = new_xplatform_command("git");
let mut cmd = Command::new("git");
cmd.arg("-C").arg(dir);
#[cfg(target_os = "windows")]
{
cmd.creation_flags(CREATE_NO_WINDOW);
}
Ok(cmd)
}

View File

@@ -1,153 +1,99 @@
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use crate::binary::new_binary_command;
use crate::error::Error::GenericError;
use crate::error::Result;
use crate::merge::do_merge;
use crate::repository::open_repo;
use crate::util::{bytes_to_string, get_branch_by_name, get_current_branch};
use git2::BranchType;
use git2::build::CheckoutBuilder;
use log::info;
use std::path::Path;
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
#[serde(rename_all = "snake_case", tag = "type")]
#[ts(export, export_to = "gen_git.ts")]
pub enum BranchDeleteResult {
Success { message: String },
NotFullyMerged,
}
pub fn git_checkout_branch(dir: &Path, branch_name: &str, force: bool) -> Result<String> {
if branch_name.starts_with("origin/") {
return git_checkout_remote_branch(dir, branch_name, force);
}
pub async fn git_checkout_branch(dir: &Path, branch_name: &str, force: bool) -> Result<String> {
let branch_name = branch_name.trim_start_matches("origin/");
let repo = open_repo(dir)?;
let branch = get_branch_by_name(&repo, branch_name)?;
let branch_ref = branch.into_reference();
let branch_tree = branch_ref.peel_to_tree()?;
let mut args = vec!["checkout"];
let mut options = CheckoutBuilder::default();
if force {
args.push("--force");
options.force();
}
args.push(branch_name);
let out = new_binary_command(dir)
.await?
.args(&args)
.output()
.await
.map_err(|e| GenericError(format!("failed to run git checkout: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
if !out.status.success() {
return Err(GenericError(format!("Failed to checkout: {}", combined.trim())));
}
repo.checkout_tree(branch_tree.as_object(), Some(&mut options))?;
repo.set_head(branch_ref.name().unwrap())?;
Ok(branch_name.to_string())
}
pub async fn git_create_branch(dir: &Path, name: &str, base: Option<&str>) -> Result<()> {
let mut cmd = new_binary_command(dir).await?;
cmd.arg("branch").arg(name);
if let Some(base_branch) = base {
cmd.arg(base_branch);
}
pub(crate) fn git_checkout_remote_branch(
dir: &Path,
branch_name: &str,
force: bool,
) -> Result<String> {
let branch_name = branch_name.trim_start_matches("origin/");
let repo = open_repo(dir)?;
let out =
cmd.output().await.map_err(|e| GenericError(format!("failed to run git branch: {e}")))?;
let refname = format!("refs/remotes/origin/{}", branch_name);
let remote_ref = repo.find_reference(&refname)?;
let commit = remote_ref.peel_to_commit()?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
let mut new_branch = repo.branch(branch_name, &commit, false)?;
let upstream_name = format!("origin/{}", branch_name);
new_branch.set_upstream(Some(&upstream_name))?;
if !out.status.success() {
return Err(GenericError(format!("Failed to create branch: {}", combined.trim())));
}
Ok(())
git_checkout_branch(dir, branch_name, force)
}
pub async fn git_delete_branch(dir: &Path, name: &str, force: bool) -> Result<BranchDeleteResult> {
let mut cmd = new_binary_command(dir).await?;
let out =
if force { cmd.args(["branch", "-D", name]) } else { cmd.args(["branch", "-d", name]) }
.output()
.await
.map_err(|e| GenericError(format!("failed to run git branch -d: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
if !out.status.success() && stderr.to_lowercase().contains("not fully merged") {
return Ok(BranchDeleteResult::NotFullyMerged);
}
if !out.status.success() {
return Err(GenericError(format!("Failed to delete branch: {}", combined.trim())));
}
Ok(BranchDeleteResult::Success { message: combined })
}
pub async fn git_merge_branch(dir: &Path, name: &str) -> Result<()> {
let out = new_binary_command(dir)
.await?
.args(["merge", name])
.output()
.await
.map_err(|e| GenericError(format!("failed to run git merge: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
if !out.status.success() {
// Check for merge conflicts
if combined.to_lowercase().contains("conflict") {
return Err(GenericError(
"Merge conflicts detected. Please resolve them manually.".to_string(),
));
pub fn git_create_branch(dir: &Path, name: &str) -> Result<()> {
let repo = open_repo(dir)?;
let head = match repo.head() {
Ok(h) => h,
Err(e) if e.code() == git2::ErrorCode::UnbornBranch => {
let msg = "Cannot create branch when there are no commits";
return Err(GenericError(msg.into()));
}
return Err(GenericError(format!("Failed to merge: {}", combined.trim())));
}
Err(e) => return Err(e.into()),
};
let head = head.peel_to_commit()?;
repo.branch(name, &head, false)?;
Ok(())
}
pub async fn git_delete_remote_branch(dir: &Path, name: &str) -> Result<()> {
// Remote branch names come in as "origin/branch-name", extract the branch name
let branch_name = name.trim_start_matches("origin/");
pub fn git_delete_branch(dir: &Path, name: &str) -> Result<()> {
let repo = open_repo(dir)?;
let mut branch = get_branch_by_name(&repo, name)?;
let out = new_binary_command(dir)
.await?
.args(["push", "origin", "--delete", branch_name])
.output()
.await
.map_err(|e| GenericError(format!("failed to run git push --delete: {e}")))?;
if branch.is_head() {
info!("Deleting head branch");
let branches = repo.branches(Some(BranchType::Local))?;
let other_branch = branches.into_iter().filter_map(|b| b.ok()).find(|b| !b.0.is_head());
let other_branch = match other_branch {
None => return Err(GenericError("Cannot delete only branch".into())),
Some(b) => bytes_to_string(b.0.name_bytes()?)?,
};
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
if !out.status.success() {
return Err(GenericError(format!("Failed to delete remote branch: {}", combined.trim())));
git_checkout_branch(dir, &other_branch, true)?;
}
branch.delete()?;
Ok(())
}
pub async fn git_rename_branch(dir: &Path, old_name: &str, new_name: &str) -> Result<()> {
let out = new_binary_command(dir)
.await?
.args(["branch", "-m", old_name, new_name])
.output()
.await
.map_err(|e| GenericError(format!("failed to run git branch -m: {e}")))?;
pub fn git_merge_branch(dir: &Path, name: &str, _force: bool) -> Result<()> {
let repo = open_repo(dir)?;
let local_branch = get_current_branch(&repo)?.unwrap();
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
let commit_to_merge = get_branch_by_name(&repo, name)?.into_reference();
let commit_to_merge = repo.reference_to_annotated_commit(&commit_to_merge)?;
if !out.status.success() {
return Err(GenericError(format!("Failed to rename branch: {}", combined.trim())));
}
do_merge(&repo, &local_branch, &commit_to_merge)?;
Ok(())
}

View File

@@ -1,53 +0,0 @@
use crate::binary::new_binary_command;
use crate::error::Error::GenericError;
use crate::error::Result;
use log::info;
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::Path;
use ts_rs::TS;
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
#[serde(rename_all = "snake_case", tag = "type")]
#[ts(export, export_to = "gen_git.ts")]
pub enum CloneResult {
Success,
Cancelled,
NeedsCredentials { url: String, error: Option<String> },
}
pub async fn git_clone(url: &str, dir: &Path) -> Result<CloneResult> {
let parent = dir.parent().ok_or_else(|| GenericError("Invalid clone directory".to_string()))?;
fs::create_dir_all(parent)
.map_err(|e| GenericError(format!("Failed to create directory: {e}")))?;
let mut cmd = new_binary_command(parent).await?;
cmd.args(["clone", url]).arg(dir).env("GIT_TERMINAL_PROMPT", "0");
let out =
cmd.output().await.map_err(|e| GenericError(format!("failed to run git clone: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
let combined_lower = combined.to_lowercase();
info!("Cloned status={}: {combined}", out.status);
if !out.status.success() {
// Check for credentials error
if combined_lower.contains("could not read") {
return Ok(CloneResult::NeedsCredentials { url: url.to_string(), error: None });
}
if combined_lower.contains("unable to access")
|| combined_lower.contains("authentication failed")
{
return Ok(CloneResult::NeedsCredentials {
url: url.to_string(),
error: Some(combined.to_string()),
});
}
return Err(GenericError(format!("Failed to clone: {}", combined.trim())));
}
Ok(CloneResult::Success)
}

View File

@@ -3,9 +3,8 @@ use crate::error::Error::GenericError;
use log::info;
use std::path::Path;
pub async fn git_commit(dir: &Path, message: &str) -> crate::error::Result<()> {
let out =
new_binary_command(dir).await?.args(["commit", "--message", message]).output().await?;
pub fn git_commit(dir: &Path, message: &str) -> crate::error::Result<()> {
let out = new_binary_command(dir)?.args(["commit", "--message", message]).output()?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);

View File

@@ -1,19 +1,24 @@
use crate::binary::new_binary_command_global;
use crate::binary::new_binary_command;
use crate::error::Error::GenericError;
use crate::error::Result;
use std::io::Write;
use std::path::Path;
use std::process::Stdio;
use tokio::io::AsyncWriteExt;
use url::Url;
pub async fn git_add_credential(remote_url: &str, username: &str, password: &str) -> Result<()> {
pub async fn git_add_credential(
dir: &Path,
remote_url: &str,
username: &str,
password: &str,
) -> Result<()> {
let url = Url::parse(remote_url)
.map_err(|e| GenericError(format!("Failed to parse remote url {remote_url}: {e:?}")))?;
let protocol = url.scheme();
let host = url.host_str().unwrap();
let path = Some(url.path());
let mut child = new_binary_command_global()
.await?
let mut child = new_binary_command(dir)?
.args(["credential", "approve"])
.stdin(Stdio::piped())
.stdout(Stdio::null())
@@ -21,21 +26,19 @@ pub async fn git_add_credential(remote_url: &str, username: &str, password: &str
{
let stdin = child.stdin.as_mut().unwrap();
stdin.write_all(format!("protocol={}\n", protocol).as_bytes()).await?;
stdin.write_all(format!("host={}\n", host).as_bytes()).await?;
writeln!(stdin, "protocol={}", protocol)?;
writeln!(stdin, "host={}", host)?;
if let Some(path) = path {
if !path.is_empty() {
stdin
.write_all(format!("path={}\n", path.trim_start_matches('/')).as_bytes())
.await?;
writeln!(stdin, "path={}", path.trim_start_matches('/'))?;
}
}
stdin.write_all(format!("username={}\n", username).as_bytes()).await?;
stdin.write_all(format!("password={}\n", password).as_bytes()).await?;
stdin.write_all(b"\n").await?; // blank line terminator
writeln!(stdin, "username={}", username)?;
writeln!(stdin, "password={}", password)?;
writeln!(stdin)?; // blank line terminator
}
let status = child.wait().await?;
let status = child.wait()?;
if !status.success() {
return Err(GenericError("Failed to approve git credential".to_string()));
}

View File

@@ -3,12 +3,10 @@ use crate::error::Error::GenericError;
use crate::error::Result;
use std::path::Path;
pub async fn git_fetch_all(dir: &Path) -> Result<()> {
let out = new_binary_command(dir)
.await?
pub fn git_fetch_all(dir: &Path) -> Result<()> {
let out = new_binary_command(dir)?
.args(["fetch", "--all", "--prune", "--tags"])
.output()
.await
.map_err(|e| GenericError(format!("failed to run git pull: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);

View File

@@ -1,14 +1,13 @@
mod add;
mod binary;
mod branch;
mod clone;
mod commit;
mod credential;
pub mod error;
mod fetch;
mod init;
mod log;
mod merge;
mod pull;
mod push;
mod remotes;
@@ -19,11 +18,7 @@ mod util;
// Re-export all git functions for external use
pub use add::git_add;
pub use branch::{
BranchDeleteResult, git_checkout_branch, git_create_branch, git_delete_branch,
git_delete_remote_branch, git_merge_branch, git_rename_branch,
};
pub use clone::{CloneResult, git_clone};
pub use branch::{git_checkout_branch, git_create_branch, git_delete_branch, git_merge_branch};
pub use commit::git_commit;
pub use credential::git_add_credential;
pub use fetch::git_fetch_all;

View File

@@ -0,0 +1,135 @@
use crate::error::Error::MergeConflicts;
use crate::util::bytes_to_string;
use git2::{AnnotatedCommit, Branch, IndexEntry, Reference, Repository};
use log::{debug, info};
pub(crate) fn do_merge(
repo: &Repository,
local_branch: &Branch,
commit_to_merge: &AnnotatedCommit,
) -> crate::error::Result<()> {
debug!("Merging remote branches");
let analysis = repo.merge_analysis(&[&commit_to_merge])?;
if analysis.0.is_fast_forward() {
let refname = bytes_to_string(local_branch.get().name_bytes())?;
match repo.find_reference(&refname) {
Ok(mut r) => {
merge_fast_forward(repo, &mut r, &commit_to_merge)?;
}
Err(_) => {
// The branch doesn't exist, so set the reference to the commit directly. Usually
// this is because you are pulling into an empty repository.
repo.reference(
&refname,
commit_to_merge.id(),
true,
&format!("Setting {} to {}", refname, commit_to_merge.id()),
)?;
repo.set_head(&refname)?;
repo.checkout_head(Some(
git2::build::CheckoutBuilder::default()
.allow_conflicts(true)
.conflict_style_merge(true)
.force(),
))?;
}
};
} else if analysis.0.is_normal() {
let head_commit = repo.reference_to_annotated_commit(&repo.head()?)?;
merge_normal(repo, &head_commit, commit_to_merge)?;
} else {
debug!("Skipping merge. Nothing to do")
}
Ok(())
}
pub(crate) fn merge_fast_forward(
repo: &Repository,
local_reference: &mut Reference,
remote_commit: &AnnotatedCommit,
) -> crate::error::Result<()> {
info!("Performing fast forward");
let name = match local_reference.name() {
Some(s) => s.to_string(),
None => String::from_utf8_lossy(local_reference.name_bytes()).to_string(),
};
let msg = format!("Fast-Forward: Setting {} to id: {}", name, remote_commit.id());
local_reference.set_target(remote_commit.id(), &msg)?;
repo.set_head(&name)?;
repo.checkout_head(Some(
git2::build::CheckoutBuilder::default()
// For some reason, the force is required to make the working directory actually get
// updated I suspect we should be adding some logic to handle dirty working directory
// states, but this is just an example so maybe not.
.force(),
))?;
Ok(())
}
pub(crate) fn merge_normal(
repo: &Repository,
local: &AnnotatedCommit,
remote: &AnnotatedCommit,
) -> crate::error::Result<()> {
info!("Performing normal merge");
let local_tree = repo.find_commit(local.id())?.tree()?;
let remote_tree = repo.find_commit(remote.id())?.tree()?;
let ancestor = repo.find_commit(repo.merge_base(local.id(), remote.id())?)?.tree()?;
let mut idx = repo.merge_trees(&ancestor, &local_tree, &remote_tree, None)?;
if idx.has_conflicts() {
let conflicts = idx.conflicts()?;
for conflict in conflicts {
if let Ok(conflict) = conflict {
print_conflict(&conflict);
}
}
return Err(MergeConflicts);
}
let result_tree = repo.find_tree(idx.write_tree_to(repo)?)?;
// now create the merge commit
let msg = format!("Merge: {} into {}", remote.id(), local.id());
let sig = repo.signature()?;
let local_commit = repo.find_commit(local.id())?;
let remote_commit = repo.find_commit(remote.id())?;
// Do our merge commit and set current branch head to that commit.
let _merge_commit = repo.commit(
Some("HEAD"),
&sig,
&sig,
&msg,
&result_tree,
&[&local_commit, &remote_commit],
)?;
// Set working tree to match head.
repo.checkout_head(None)?;
Ok(())
}
fn print_conflict(conflict: &git2::IndexConflict) {
let ancestor = conflict.ancestor.as_ref().map(path_from_index_entry);
let ours = conflict.our.as_ref().map(path_from_index_entry);
let theirs = conflict.their.as_ref().map(path_from_index_entry);
println!("Conflict detected:");
if let Some(path) = ancestor {
println!(" Common ancestor: {:?}", path);
}
if let Some(path) = ours {
println!(" Ours: {:?}", path);
}
if let Some(path) = theirs {
println!(" Theirs: {:?}", path);
}
}
fn path_from_index_entry(entry: &IndexEntry) -> String {
String::from_utf8_lossy(entry.path.as_slice()).into_owned()
}

View File

@@ -17,25 +17,17 @@ pub enum PullResult {
NeedsCredentials { url: String, error: Option<String> },
}
pub async fn git_pull(dir: &Path) -> Result<PullResult> {
// Extract all git2 data before any await points (git2 types are not Send)
let (branch_name, remote_name, remote_url) = {
let repo = open_repo(dir)?;
let branch_name = get_current_branch_name(&repo)?;
let remote = get_default_remote_in_repo(&repo)?;
let remote_name =
remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?.to_string();
let remote_url =
remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?.to_string();
(branch_name, remote_name, remote_url)
};
pub fn git_pull(dir: &Path) -> Result<PullResult> {
let repo = open_repo(dir)?;
let branch_name = get_current_branch_name(&repo)?;
let remote = get_default_remote_in_repo(&repo)?;
let remote_name = remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?;
let remote_url = remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?;
let out = new_binary_command(dir)
.await?
let out = new_binary_command(dir)?
.args(["pull", &remote_name, &branch_name])
.env("GIT_TERMINAL_PROMPT", "0")
.output()
.await
.map_err(|e| GenericError(format!("failed to run git pull: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);

View File

@@ -17,25 +17,17 @@ pub enum PushResult {
NeedsCredentials { url: String, error: Option<String> },
}
pub async fn git_push(dir: &Path) -> Result<PushResult> {
// Extract all git2 data before any await points (git2 types are not Send)
let (branch_name, remote_name, remote_url) = {
let repo = open_repo(dir)?;
let branch_name = get_current_branch_name(&repo)?;
let remote = get_default_remote_for_push_in_repo(&repo)?;
let remote_name =
remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?.to_string();
let remote_url =
remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?.to_string();
(branch_name, remote_name, remote_url)
};
pub fn git_push(dir: &Path) -> Result<PushResult> {
let repo = open_repo(dir)?;
let branch_name = get_current_branch_name(&repo)?;
let remote = get_default_remote_for_push_in_repo(&repo)?;
let remote_name = remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?;
let remote_url = remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?;
let out = new_binary_command(dir)
.await?
let out = new_binary_command(dir)?
.args(["push", &remote_name, &branch_name])
.env("GIT_TERMINAL_PROMPT", "0")
.output()
.await
.map_err(|e| GenericError(format!("failed to run git push: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);

View File

@@ -47,6 +47,10 @@ pub(crate) fn remote_branch_names(repo: &Repository) -> Result<Vec<String>> {
Ok(branches)
}
pub(crate) fn get_branch_by_name<'s>(repo: &'s Repository, name: &str) -> Result<Branch<'s>> {
Ok(repo.find_branch(name, BranchType::Local)?)
}
pub(crate) fn bytes_to_string(bytes: &[u8]) -> Result<String> {
Ok(String::from_utf8(bytes.to_vec())?)
}

View File

@@ -22,6 +22,5 @@ tokio-stream = "0.1.14"
tonic = { version = "0.12.3", default-features = false, features = ["transport"] }
tonic-reflection = "0.12.3"
uuid = { version = "1.7.0", features = ["v4"] }
yaak-common = { workspace = true }
yaak-tls = { workspace = true }
thiserror = "2.0.17"

View File

@@ -115,18 +115,14 @@ impl GrpcConnection {
Ok(client.unary(req, path, codec).await?)
}
pub async fn streaming<F>(
pub async fn streaming(
&self,
service: &str,
method: &str,
stream: ReceiverStream<String>,
metadata: &BTreeMap<String, String>,
client_cert: Option<ClientCertificateConfig>,
on_message: F,
) -> Result<Response<Streaming<DynamicMessage>>>
where
F: Fn(std::result::Result<String, String>) + Send + Sync + Clone + 'static,
{
) -> Result<Response<Streaming<DynamicMessage>>> {
let method = &self.method(&service, &method).await?;
let mapped_stream = {
let input_message = method.input();
@@ -135,39 +131,31 @@ impl GrpcConnection {
let md = metadata.clone();
let use_reflection = self.use_reflection.clone();
let client_cert = client_cert.clone();
stream
.then(move |json| {
let pool = pool.clone();
let uri = uri.clone();
let input_message = input_message.clone();
let md = md.clone();
let use_reflection = use_reflection.clone();
let client_cert = client_cert.clone();
let on_message = on_message.clone();
let json_clone = json.clone();
async move {
if use_reflection {
if let Err(e) =
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
{
warn!("Failed to resolve Any types: {e}");
}
stream.filter_map(move |json| {
let pool = pool.clone();
let uri = uri.clone();
let input_message = input_message.clone();
let md = md.clone();
let use_reflection = use_reflection.clone();
let client_cert = client_cert.clone();
tokio::runtime::Handle::current().block_on(async move {
if use_reflection {
if let Err(e) =
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
{
warn!("Failed to resolve Any types: {e}");
}
let mut de = Deserializer::from_str(&json);
match DynamicMessage::deserialize(input_message, &mut de) {
Ok(m) => {
on_message(Ok(json_clone));
Some(m)
}
Err(e) => {
warn!("Failed to deserialize message: {e}");
on_message(Err(e.to_string()));
None
}
}
let mut de = Deserializer::from_str(&json);
match DynamicMessage::deserialize(input_message, &mut de) {
Ok(m) => Some(m),
Err(e) => {
warn!("Failed to deserialize message: {e}");
None
}
}
})
.filter_map(|x| x)
})
};
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
@@ -181,18 +169,14 @@ impl GrpcConnection {
Ok(client.streaming(req, path, codec).await?)
}
pub async fn client_streaming<F>(
pub async fn client_streaming(
&self,
service: &str,
method: &str,
stream: ReceiverStream<String>,
metadata: &BTreeMap<String, String>,
client_cert: Option<ClientCertificateConfig>,
on_message: F,
) -> Result<Response<DynamicMessage>>
where
F: Fn(std::result::Result<String, String>) + Send + Sync + Clone + 'static,
{
) -> Result<Response<DynamicMessage>> {
let method = &self.method(&service, &method).await?;
let mapped_stream = {
let input_message = method.input();
@@ -201,39 +185,31 @@ impl GrpcConnection {
let md = metadata.clone();
let use_reflection = self.use_reflection.clone();
let client_cert = client_cert.clone();
stream
.then(move |json| {
let pool = pool.clone();
let uri = uri.clone();
let input_message = input_message.clone();
let md = md.clone();
let use_reflection = use_reflection.clone();
let client_cert = client_cert.clone();
let on_message = on_message.clone();
let json_clone = json.clone();
async move {
if use_reflection {
if let Err(e) =
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
{
warn!("Failed to resolve Any types: {e}");
}
stream.filter_map(move |json| {
let pool = pool.clone();
let uri = uri.clone();
let input_message = input_message.clone();
let md = md.clone();
let use_reflection = use_reflection.clone();
let client_cert = client_cert.clone();
tokio::runtime::Handle::current().block_on(async move {
if use_reflection {
if let Err(e) =
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
{
warn!("Failed to resolve Any types: {e}");
}
let mut de = Deserializer::from_str(&json);
match DynamicMessage::deserialize(input_message, &mut de) {
Ok(m) => {
on_message(Ok(json_clone));
Some(m)
}
Err(e) => {
warn!("Failed to deserialize message: {e}");
on_message(Err(e.to_string()));
None
}
}
let mut de = Deserializer::from_str(&json);
match DynamicMessage::deserialize(input_message, &mut de) {
Ok(m) => Some(m),
Err(e) => {
warn!("Failed to deserialize message: {e}");
None
}
}
})
.filter_map(|x| x)
})
};
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
@@ -340,9 +316,10 @@ impl GrpcHandle {
metadata: &BTreeMap<String, String>,
validate_certificates: bool,
client_cert: Option<ClientCertificateConfig>,
skip_cache: bool,
) -> Result<Vec<ServiceDefinition>> {
// Ensure we have a pool; reflect only if missing
if self.get_pool(id, uri, proto_files).is_none() {
if skip_cache || self.get_pool(id, uri, proto_files).is_none() {
info!("Reflecting gRPC services for {} at {}", id, uri);
self.reflect(id, uri, proto_files, metadata, validate_certificates, client_cert)
.await?;

View File

@@ -16,12 +16,12 @@ use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::sync::Arc;
use tokio::fs;
use tokio::process::Command;
use tokio::sync::RwLock;
use tonic::codegen::http::uri::PathAndQuery;
use tonic::transport::Uri;
use tonic_reflection::pb::v1::server_reflection_request::MessageRequest;
use tonic_reflection::pb::v1::server_reflection_response::MessageResponse;
use yaak_common::command::new_xplatform_command;
use yaak_tls::ClientCertificateConfig;
pub async fn fill_pool_from_files(
@@ -91,11 +91,11 @@ pub async fn fill_pool_from_files(
info!("Invoking protoc with {}", args.join(" "));
let mut cmd = new_xplatform_command(&config.protoc_bin_path);
cmd.args(&args);
let out =
cmd.output().await.map_err(|e| GenericError(format!("Failed to run protoc: {}", e)))?;
let out = Command::new(&config.protoc_bin_path)
.args(&args)
.output()
.await
.map_err(|e| GenericError(format!("Failed to run protoc: {}", e)))?;
if !out.status.success() {
return Err(GenericError(format!(

View File

@@ -2,8 +2,6 @@ use crate::dns::LocalhostResolver;
use crate::error::Result;
use log::{debug, info, warn};
use reqwest::{Client, Proxy, redirect};
use std::sync::Arc;
use yaak_models::models::DnsOverride;
use yaak_tls::{ClientCertificateConfig, get_tls_config};
#[derive(Clone)]
@@ -30,14 +28,10 @@ pub struct HttpConnectionOptions {
pub validate_certificates: bool,
pub proxy: HttpConnectionProxySetting,
pub client_certificate: Option<ClientCertificateConfig>,
pub dns_overrides: Vec<DnsOverride>,
}
impl HttpConnectionOptions {
/// Build a reqwest Client and return it along with the DNS resolver.
/// The resolver is returned separately so it can be configured per-request
/// to emit DNS timing events to the appropriate channel.
pub(crate) fn build_client(&self) -> Result<(Client, Arc<LocalhostResolver>)> {
pub(crate) fn build_client(&self) -> Result<Client> {
let mut client = Client::builder()
.connection_verbose(true)
.redirect(redirect::Policy::none())
@@ -46,19 +40,15 @@ impl HttpConnectionOptions {
.no_brotli()
.no_deflate()
.referer(false)
.tls_info(true)
// Disable connection pooling to ensure DNS resolution happens on each request
// This is needed so we can emit DNS timing events for each request
.pool_max_idle_per_host(0);
.tls_info(true);
// Configure TLS with optional client certificate
let config =
get_tls_config(self.validate_certificates, true, self.client_certificate.clone())?;
client = client.use_preconfigured_tls(config);
// Configure DNS resolver - keep a reference to configure per-request
let resolver = LocalhostResolver::new(self.dns_overrides.clone());
client = client.dns_resolver(resolver.clone());
// Configure DNS resolver
client = client.dns_resolver(LocalhostResolver::new());
// Configure proxy
match self.proxy.clone() {
@@ -79,7 +69,7 @@ impl HttpConnectionOptions {
self.client_certificate.is_some()
);
Ok((client.build()?, resolver))
Ok(client.build()?)
}
}

View File

@@ -1,185 +1,53 @@
use crate::sender::HttpResponseEvent;
use hyper_util::client::legacy::connect::dns::{
GaiResolver as HyperGaiResolver, Name as HyperName,
};
use log::info;
use reqwest::dns::{Addrs, Name, Resolve, Resolving};
use std::collections::HashMap;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr};
use std::str::FromStr;
use std::sync::Arc;
use std::time::Instant;
use tokio::sync::{RwLock, mpsc};
use tower_service::Service;
use yaak_models::models::DnsOverride;
/// Stores resolved addresses for a hostname override
#[derive(Clone)]
pub struct ResolvedOverride {
pub ipv4: Vec<Ipv4Addr>,
pub ipv6: Vec<Ipv6Addr>,
}
#[derive(Clone)]
pub struct LocalhostResolver {
fallback: HyperGaiResolver,
event_tx: Arc<RwLock<Option<mpsc::Sender<HttpResponseEvent>>>>,
overrides: Arc<HashMap<String, ResolvedOverride>>,
}
impl LocalhostResolver {
pub fn new(dns_overrides: Vec<DnsOverride>) -> Arc<Self> {
pub fn new() -> Arc<Self> {
let resolver = HyperGaiResolver::new();
// Pre-parse DNS overrides into a lookup map
let mut overrides = HashMap::new();
for o in dns_overrides {
if !o.enabled {
continue;
}
let hostname = o.hostname.to_lowercase();
let ipv4: Vec<Ipv4Addr> =
o.ipv4.iter().filter_map(|s| s.parse::<Ipv4Addr>().ok()).collect();
let ipv6: Vec<Ipv6Addr> =
o.ipv6.iter().filter_map(|s| s.parse::<Ipv6Addr>().ok()).collect();
// Only add if at least one address is valid
if !ipv4.is_empty() || !ipv6.is_empty() {
overrides.insert(hostname, ResolvedOverride { ipv4, ipv6 });
}
}
Arc::new(Self {
fallback: resolver,
event_tx: Arc::new(RwLock::new(None)),
overrides: Arc::new(overrides),
})
}
/// Set the event sender for the current request.
/// This should be called before each request to direct DNS events
/// to the appropriate channel.
pub async fn set_event_sender(&self, tx: Option<mpsc::Sender<HttpResponseEvent>>) {
let mut guard = self.event_tx.write().await;
*guard = tx;
Arc::new(Self { fallback: resolver })
}
}
impl Resolve for LocalhostResolver {
fn resolve(&self, name: Name) -> Resolving {
let host = name.as_str().to_lowercase();
let event_tx = self.event_tx.clone();
let overrides = self.overrides.clone();
info!("DNS resolve called for: {}", host);
// Check for DNS override first
if let Some(resolved) = overrides.get(&host) {
log::debug!("DNS override found for: {}", host);
let hostname = host.clone();
let mut addrs: Vec<SocketAddr> = Vec::new();
// Add IPv4 addresses
for ip in &resolved.ipv4 {
addrs.push(SocketAddr::new(IpAddr::V4(*ip), 0));
}
// Add IPv6 addresses
for ip in &resolved.ipv6 {
addrs.push(SocketAddr::new(IpAddr::V6(*ip), 0));
}
let addresses: Vec<String> = addrs.iter().map(|a| a.ip().to_string()).collect();
return Box::pin(async move {
// Emit DNS event for override
let guard = event_tx.read().await;
if let Some(tx) = guard.as_ref() {
let _ = tx
.send(HttpResponseEvent::DnsResolved {
hostname,
addresses,
duration: 0,
overridden: true,
})
.await;
}
Ok::<Addrs, Box<dyn std::error::Error + Send + Sync>>(Box::new(addrs.into_iter()))
});
}
// Check for .localhost suffix
let is_localhost = host.ends_with(".localhost");
if is_localhost {
let hostname = host.clone();
// Port 0 is fine; reqwest replaces it with the URL's explicit
// port or the scheme's default (80/443, etc.).
// port or the schemes default (80/443, etc.).
// (See docs note below.)
let addrs: Vec<SocketAddr> = vec![
SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0),
SocketAddr::new(IpAddr::V6(Ipv6Addr::LOCALHOST), 0),
];
let addresses: Vec<String> = addrs.iter().map(|a| a.ip().to_string()).collect();
return Box::pin(async move {
// Emit DNS event for localhost resolution
let guard = event_tx.read().await;
if let Some(tx) = guard.as_ref() {
let _ = tx
.send(HttpResponseEvent::DnsResolved {
hostname,
addresses,
duration: 0,
overridden: false,
})
.await;
}
Ok::<Addrs, Box<dyn std::error::Error + Send + Sync>>(Box::new(addrs.into_iter()))
});
}
// Fall back to system DNS
let mut fallback = self.fallback.clone();
let name_str = name.as_str().to_string();
let hostname = host.clone();
Box::pin(async move {
let start = Instant::now();
let result = match HyperName::from_str(&name_str) {
Ok(n) => fallback.call(n).await,
Err(e) => return Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>),
};
let duration = start.elapsed().as_millis() as u64;
match result {
Ok(addrs) => {
// Collect addresses for event emission
let addr_vec: Vec<SocketAddr> = addrs.collect();
let addresses: Vec<String> =
addr_vec.iter().map(|a| a.ip().to_string()).collect();
// Emit DNS event
let guard = event_tx.read().await;
if let Some(tx) = guard.as_ref() {
let _ = tx
.send(HttpResponseEvent::DnsResolved {
hostname,
addresses,
duration,
overridden: false,
})
.await;
}
Ok(Box::new(addr_vec.into_iter()) as Addrs)
}
Err(err) => Err(Box::new(err) as Box<dyn std::error::Error + Send + Sync>),
match HyperName::from_str(&name_str) {
Ok(n) => fallback
.call(n)
.await
.map(|addrs| Box::new(addrs) as Addrs)
.map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync>),
Err(e) => Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>),
}
})
}

View File

@@ -1,5 +1,4 @@
use crate::client::HttpConnectionOptions;
use crate::dns::LocalhostResolver;
use crate::error::Result;
use log::info;
use reqwest::Client;
@@ -8,15 +7,8 @@ use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::sync::RwLock;
/// A cached HTTP client along with its DNS resolver.
/// The resolver is needed to set the event sender per-request.
pub struct CachedClient {
pub client: Client,
pub resolver: Arc<LocalhostResolver>,
}
pub struct HttpConnectionManager {
connections: Arc<RwLock<BTreeMap<String, (CachedClient, Instant)>>>,
connections: Arc<RwLock<BTreeMap<String, (Client, Instant)>>>,
ttl: Duration,
}
@@ -28,26 +20,21 @@ impl HttpConnectionManager {
}
}
pub async fn get_client(&self, opt: &HttpConnectionOptions) -> Result<CachedClient> {
pub async fn get_client(&self, opt: &HttpConnectionOptions) -> Result<Client> {
let mut connections = self.connections.write().await;
let id = opt.id.clone();
// Clean old connections
connections.retain(|_, (_, last_used)| last_used.elapsed() <= self.ttl);
if let Some((cached, last_used)) = connections.get_mut(&id) {
if let Some((c, last_used)) = connections.get_mut(&id) {
info!("Re-using HTTP client {id}");
*last_used = Instant::now();
return Ok(CachedClient {
client: cached.client.clone(),
resolver: cached.resolver.clone(),
});
return Ok(c.clone());
}
let (client, resolver) = opt.build_client()?;
let cached = CachedClient { client: client.clone(), resolver: resolver.clone() };
connections.insert(id.into(), (cached, Instant::now()));
Ok(CachedClient { client, resolver })
let c = opt.build_client()?;
connections.insert(id.into(), (c.clone(), Instant::now()));
Ok(c)
}
}

View File

@@ -45,12 +45,6 @@ pub enum HttpResponseEvent {
ChunkReceived {
bytes: usize,
},
DnsResolved {
hostname: String,
addresses: Vec<String>,
duration: u64,
overridden: bool,
},
}
impl Display for HttpResponseEvent {
@@ -73,19 +67,6 @@ impl Display for HttpResponseEvent {
HttpResponseEvent::HeaderDown(name, value) => write!(f, "< {}: {}", name, value),
HttpResponseEvent::ChunkSent { bytes } => write!(f, "> [{} bytes sent]", bytes),
HttpResponseEvent::ChunkReceived { bytes } => write!(f, "< [{} bytes received]", bytes),
HttpResponseEvent::DnsResolved { hostname, addresses, duration, overridden } => {
if *overridden {
write!(f, "* DNS override {} -> {}", hostname, addresses.join(", "))
} else {
write!(
f,
"* DNS resolved {} to {} ({}ms)",
hostname,
addresses.join(", "),
duration
)
}
}
}
}
}
@@ -112,9 +93,6 @@ impl From<HttpResponseEvent> for yaak_models::models::HttpResponseEventData {
HttpResponseEvent::HeaderDown(name, value) => D::HeaderDown { name, value },
HttpResponseEvent::ChunkSent { bytes } => D::ChunkSent { bytes },
HttpResponseEvent::ChunkReceived { bytes } => D::ChunkReceived { bytes },
HttpResponseEvent::DnsResolved { hostname, addresses, duration, overridden } => {
D::DnsResolved { hostname, addresses, duration, overridden }
}
}
}
}
@@ -376,9 +354,6 @@ impl HttpSender for ReqwestSender {
// Add headers
for header in request.headers {
if header.0.is_empty() {
continue;
}
req_builder = req_builder.header(&header.0, &header.1);
}

View File

@@ -342,8 +342,7 @@ mod tests {
#[tokio::test]
async fn test_transaction_single_redirect() {
let redirect_headers =
vec![("Location".to_string(), "https://example.com/new".to_string())];
let redirect_headers = vec![("Location".to_string(), "https://example.com/new".to_string())];
let responses = vec![
MockResponse { status: 302, headers: redirect_headers, body: vec![] },
@@ -374,8 +373,7 @@ mod tests {
#[tokio::test]
async fn test_transaction_max_redirects_exceeded() {
let redirect_headers =
vec![("Location".to_string(), "https://example.com/loop".to_string())];
let redirect_headers = vec![("Location".to_string(), "https://example.com/loop".to_string())];
// Create more redirects than allowed
let responses: Vec<MockResponse> = (0..12)
@@ -527,8 +525,7 @@ mod tests {
_request: SendableHttpRequest,
_event_tx: mpsc::Sender<HttpResponseEvent>,
) -> Result<HttpResponse> {
let headers =
vec![("set-cookie".to_string(), "session=xyz789; Path=/".to_string())];
let headers = vec![("set-cookie".to_string(), "session=xyz789; Path=/".to_string())];
let body_stream: Pin<Box<dyn AsyncRead + Send>> =
Box::pin(std::io::Cursor::new(vec![]));
@@ -587,10 +584,7 @@ mod tests {
let headers = vec![
("set-cookie".to_string(), "session=abc123; Path=/".to_string()),
("set-cookie".to_string(), "user_id=42; Path=/".to_string()),
(
"set-cookie".to_string(),
"preferences=dark; Path=/; Max-Age=86400".to_string(),
),
("set-cookie".to_string(), "preferences=dark; Path=/; Max-Age=86400".to_string()),
];
let body_stream: Pin<Box<dyn AsyncRead + Send>> =

View File

@@ -12,8 +12,6 @@ export type CookieExpires = { "AtUtc": string } | "SessionEnd";
export type CookieJar = { model: "cookie_jar", id: string, createdAt: string, updatedAt: string, workspaceId: string, cookies: Array<Cookie>, name: string, };
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
export type EditorKeymap = "default" | "vim" | "vscode" | "emacs";
export type EncryptedKey = { encryptedKey: string, };
@@ -40,7 +38,7 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, elapsedDns: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponseEvent = { model: "http_response_event", id: string, createdAt: string, updatedAt: string, workspaceId: string, responseId: string, event: HttpResponseEventData, };
@@ -49,7 +47,7 @@ export type HttpResponseEvent = { model: "http_response_event", id: string, crea
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
* The `From` impl is in yaak-http to avoid circular dependencies.
*/
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, } | { "type": "dns_resolved", hostname: string, addresses: Array<string>, duration: bigint, overridden: boolean, };
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, };
export type HttpResponseHeader = { name: string, value: string, };
@@ -93,6 +91,6 @@ export type WebsocketMessageType = "text" | "binary";
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
export type WorkspaceMeta = { model: "workspace_meta", id: string, workspaceId: string, createdAt: string, updatedAt: string, encryptionKey: EncryptedKey | null, settingSyncDir: string | null, };

View File

@@ -206,34 +206,6 @@ export function replaceModelsInStore<
});
}
export function mergeModelsInStore<
M extends AnyModel['model'],
T extends Extract<AnyModel, { model: M }>,
>(model: M, models: T[], filter?: (model: T) => boolean) {
mustStore().set(modelStoreDataAtom, (prev: ModelStoreData) => {
const existingModels = { ...prev[model] } as Record<string, T>;
// Merge in new models first
for (const m of models) {
existingModels[m.id] = m;
}
// Then filter out unwanted models
if (filter) {
for (const [id, m] of Object.entries(existingModels)) {
if (!filter(m)) {
delete existingModels[id];
}
}
}
return {
...prev,
[model]: existingModels,
};
});
}
function shouldIgnoreModel({ model, updateSource }: ModelPayload) {
// Never ignore updates from non-user sources
if (updateSource.type !== 'window') {

View File

@@ -0,0 +1,9 @@
-- Add nullable settings columns to folders (NULL = inherit from parent)
ALTER TABLE folders ADD COLUMN setting_request_timeout INTEGER DEFAULT NULL;
ALTER TABLE folders ADD COLUMN setting_validate_certificates BOOLEAN DEFAULT NULL;
ALTER TABLE folders ADD COLUMN setting_follow_redirects BOOLEAN DEFAULT NULL;
-- Add nullable settings columns to http_requests (NULL = inherit from parent)
ALTER TABLE http_requests ADD COLUMN setting_request_timeout INTEGER DEFAULT NULL;
ALTER TABLE http_requests ADD COLUMN setting_validate_certificates BOOLEAN DEFAULT NULL;
ALTER TABLE http_requests ADD COLUMN setting_follow_redirects BOOLEAN DEFAULT NULL;

View File

@@ -1,2 +0,0 @@
-- Add DNS resolution timing to http_responses
ALTER TABLE http_responses ADD COLUMN elapsed_dns INTEGER DEFAULT 0 NOT NULL;

View File

@@ -1,2 +0,0 @@
-- Add DNS overrides setting to workspaces
ALTER TABLE workspaces ADD COLUMN setting_dns_overrides TEXT DEFAULT '[]' NOT NULL;

View File

@@ -1,12 +0,0 @@
-- Filter out headers that match the hardcoded defaults (User-Agent: yaak, Accept: */*),
-- keeping any other custom headers the user may have added.
UPDATE workspaces
SET headers = (
SELECT json_group_array(json(value))
FROM json_each(headers)
WHERE NOT (
(LOWER(json_extract(value, '$.name')) = 'user-agent' AND json_extract(value, '$.value') = 'yaak')
OR (LOWER(json_extract(value, '$.name')) = 'accept' AND json_extract(value, '$.value') = '*/*')
)
)
WHERE json_array_length(headers) > 0;

View File

@@ -1,8 +1,4 @@
use crate::error::Result;
use crate::models::HttpRequestIden::{
Authentication, AuthenticationType, Body, BodyType, CreatedAt, Description, FolderId, Headers,
Method, Name, SortPriority, UpdatedAt, Url, UrlParameters, WorkspaceId,
};
use crate::util::{UpdateSource, generate_prefixed_id};
use chrono::{NaiveDateTime, Utc};
use rusqlite::Row;
@@ -73,20 +69,6 @@ pub struct ClientCertificate {
pub enabled: bool,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export, export_to = "gen_models.ts")]
pub struct DnsOverride {
pub hostname: String,
#[serde(default)]
pub ipv4: Vec<String>,
#[serde(default)]
pub ipv6: Vec<String>,
#[serde(default = "default_true")]
#[ts(optional, as = "Option<bool>")]
pub enabled: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[serde(rename_all = "snake_case")]
#[ts(export, export_to = "gen_models.ts")]
@@ -129,6 +111,36 @@ impl Default for EditorKeymap {
}
}
/// Settings that can be inherited at workspace → folder → request level.
/// All fields optional - None means "inherit from parent" (or use default if at root).
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_models.ts")]
pub struct HttpRequestSettingsOverride {
pub setting_validate_certificates: Option<bool>,
pub setting_follow_redirects: Option<bool>,
pub setting_request_timeout: Option<i32>,
}
/// Resolved settings with concrete values (after inheritance + defaults applied)
#[derive(Debug, Clone, PartialEq)]
pub struct ResolvedHttpRequestSettings {
pub validate_certificates: bool,
pub follow_redirects: bool,
pub request_timeout: i32,
}
impl ResolvedHttpRequestSettings {
/// Default values when nothing is set in the inheritance chain
pub fn defaults() -> Self {
Self {
validate_certificates: true,
follow_redirects: true,
request_timeout: 0,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_models.ts")]
@@ -311,14 +323,10 @@ pub struct Workspace {
pub name: String,
pub encryption_key_challenge: Option<String>,
// Settings
#[serde(default = "default_true")]
pub setting_validate_certificates: bool,
#[serde(default = "default_true")]
pub setting_follow_redirects: bool,
pub setting_request_timeout: i32,
#[serde(default)]
pub setting_dns_overrides: Vec<DnsOverride>,
// Inheritable settings (Option = can be null, defaults applied at resolution time)
pub setting_validate_certificates: Option<bool>,
pub setting_follow_redirects: Option<bool>,
pub setting_request_timeout: Option<i32>,
}
impl UpsertModelInfo for Workspace {
@@ -359,7 +367,6 @@ impl UpsertModelInfo for Workspace {
(SettingFollowRedirects, self.setting_follow_redirects.into()),
(SettingRequestTimeout, self.setting_request_timeout.into()),
(SettingValidateCertificates, self.setting_validate_certificates.into()),
(SettingDnsOverrides, serde_json::to_string(&self.setting_dns_overrides)?.into()),
])
}
@@ -376,7 +383,6 @@ impl UpsertModelInfo for Workspace {
WorkspaceIden::SettingFollowRedirects,
WorkspaceIden::SettingRequestTimeout,
WorkspaceIden::SettingValidateCertificates,
WorkspaceIden::SettingDnsOverrides,
]
}
@@ -386,7 +392,6 @@ impl UpsertModelInfo for Workspace {
{
let headers: String = row.get("headers")?;
let authentication: String = row.get("authentication")?;
let setting_dns_overrides: String = row.get("setting_dns_overrides")?;
Ok(Self {
id: row.get("id")?,
model: row.get("model")?,
@@ -401,7 +406,6 @@ impl UpsertModelInfo for Workspace {
setting_follow_redirects: row.get("setting_follow_redirects")?,
setting_request_timeout: row.get("setting_request_timeout")?,
setting_validate_certificates: row.get("setting_validate_certificates")?,
setting_dns_overrides: serde_json::from_str(&setting_dns_overrides).unwrap_or_default(),
})
}
}
@@ -746,6 +750,11 @@ pub struct Folder {
pub headers: Vec<HttpRequestHeader>,
pub name: String,
pub sort_priority: f64,
// Inheritable settings (Option = null means inherit from parent)
pub setting_validate_certificates: Option<bool>,
pub setting_follow_redirects: Option<bool>,
pub setting_request_timeout: Option<i32>,
}
impl UpsertModelInfo for Folder {
@@ -785,6 +794,9 @@ impl UpsertModelInfo for Folder {
(Description, self.description.into()),
(Name, self.name.trim().into()),
(SortPriority, self.sort_priority.into()),
(SettingValidateCertificates, self.setting_validate_certificates.into()),
(SettingFollowRedirects, self.setting_follow_redirects.into()),
(SettingRequestTimeout, self.setting_request_timeout.into()),
])
}
@@ -798,6 +810,9 @@ impl UpsertModelInfo for Folder {
FolderIden::Description,
FolderIden::FolderId,
FolderIden::SortPriority,
FolderIden::SettingValidateCertificates,
FolderIden::SettingFollowRedirects,
FolderIden::SettingRequestTimeout,
]
}
@@ -820,6 +835,9 @@ impl UpsertModelInfo for Folder {
headers: serde_json::from_str(&headers).unwrap_or_default(),
authentication_type: row.get("authentication_type")?,
authentication: serde_json::from_str(&authentication).unwrap_or_default(),
setting_validate_certificates: row.get("setting_validate_certificates")?,
setting_follow_redirects: row.get("setting_follow_redirects")?,
setting_request_timeout: row.get("setting_request_timeout")?,
})
}
}
@@ -877,6 +895,11 @@ pub struct HttpRequest {
pub sort_priority: f64,
pub url: String,
pub url_parameters: Vec<HttpUrlParameter>,
// Inheritable settings (Option = null means inherit from parent)
pub setting_validate_certificates: Option<bool>,
pub setting_follow_redirects: Option<bool>,
pub setting_request_timeout: Option<i32>,
}
impl UpsertModelInfo for HttpRequest {
@@ -904,6 +927,7 @@ impl UpsertModelInfo for HttpRequest {
self,
source: &UpdateSource,
) -> Result<Vec<(impl IntoIden + Eq, impl Into<SimpleExpr>)>> {
use HttpRequestIden::*;
Ok(vec![
(CreatedAt, upsert_date(source, self.created_at)),
(UpdatedAt, upsert_date(source, self.updated_at)),
@@ -920,10 +944,14 @@ impl UpsertModelInfo for HttpRequest {
(AuthenticationType, self.authentication_type.into()),
(Headers, serde_json::to_string(&self.headers)?.into()),
(SortPriority, self.sort_priority.into()),
(SettingValidateCertificates, self.setting_validate_certificates.into()),
(SettingFollowRedirects, self.setting_follow_redirects.into()),
(SettingRequestTimeout, self.setting_request_timeout.into()),
])
}
fn update_columns() -> Vec<impl IntoIden> {
use HttpRequestIden::*;
vec![
UpdatedAt,
WorkspaceId,
@@ -939,6 +967,9 @@ impl UpsertModelInfo for HttpRequest {
Url,
UrlParameters,
SortPriority,
SettingValidateCertificates,
SettingFollowRedirects,
SettingRequestTimeout,
]
}
@@ -965,6 +996,9 @@ impl UpsertModelInfo for HttpRequest {
sort_priority: row.get("sort_priority")?,
url: row.get("url")?,
url_parameters: serde_json::from_str(url_parameters.as_str()).unwrap_or_default(),
setting_validate_certificates: row.get("setting_validate_certificates")?,
setting_follow_redirects: row.get("setting_follow_redirects")?,
setting_request_timeout: row.get("setting_request_timeout")?,
})
}
}
@@ -1353,7 +1387,6 @@ pub struct HttpResponse {
pub content_length_compressed: Option<i32>,
pub elapsed: i32,
pub elapsed_headers: i32,
pub elapsed_dns: i32,
pub error: Option<String>,
pub headers: Vec<HttpResponseHeader>,
pub remote_addr: Option<String>,
@@ -1402,7 +1435,6 @@ impl UpsertModelInfo for HttpResponse {
(ContentLengthCompressed, self.content_length_compressed.into()),
(Elapsed, self.elapsed.into()),
(ElapsedHeaders, self.elapsed_headers.into()),
(ElapsedDns, self.elapsed_dns.into()),
(Error, self.error.into()),
(Headers, serde_json::to_string(&self.headers)?.into()),
(RemoteAddr, self.remote_addr.into()),
@@ -1424,7 +1456,6 @@ impl UpsertModelInfo for HttpResponse {
HttpResponseIden::ContentLengthCompressed,
HttpResponseIden::Elapsed,
HttpResponseIden::ElapsedHeaders,
HttpResponseIden::ElapsedDns,
HttpResponseIden::Error,
HttpResponseIden::Headers,
HttpResponseIden::RemoteAddr,
@@ -1458,7 +1489,6 @@ impl UpsertModelInfo for HttpResponse {
version: r.get("version")?,
elapsed: r.get("elapsed")?,
elapsed_headers: r.get("elapsed_headers")?,
elapsed_dns: r.get("elapsed_dns").unwrap_or_default(),
remote_addr: r.get("remote_addr")?,
status: r.get("status")?,
status_reason: r.get("status_reason")?,
@@ -1515,12 +1545,6 @@ pub enum HttpResponseEventData {
ChunkReceived {
bytes: usize,
},
DnsResolved {
hostname: String,
addresses: Vec<String>,
duration: u64,
overridden: bool,
},
}
impl Default for HttpResponseEventData {

View File

@@ -1,4 +1,3 @@
use super::dedupe_headers;
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{GrpcRequest, GrpcRequestIden, HttpRequestHeader};
@@ -88,6 +87,6 @@ impl<'a> DbContext<'a> {
metadata.append(&mut grpc_request.metadata.clone());
Ok(dedupe_headers(metadata))
Ok(metadata)
}
}

View File

@@ -1,7 +1,6 @@
use super::dedupe_headers;
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{Folder, FolderIden, HttpRequest, HttpRequestHeader, HttpRequestIden};
use crate::models::{Folder, FolderIden, HttpRequest, HttpRequestHeader, HttpRequestIden, ResolvedHttpRequestSettings};
use crate::util::UpdateSource;
use serde_json::Value;
use std::collections::BTreeMap;
@@ -88,7 +87,7 @@ impl<'a> DbContext<'a> {
headers.append(&mut http_request.headers.clone());
Ok(dedupe_headers(headers))
Ok(headers)
}
pub fn list_http_requests_for_folder_recursive(
@@ -104,4 +103,79 @@ impl<'a> DbContext<'a> {
}
Ok(children)
}
/// Resolve settings for an HTTP request by walking the inheritance chain:
/// Workspace → Folder(s) → Request
/// Last non-None value wins, then defaults are applied.
pub fn resolve_settings_for_http_request(
&self,
http_request: &HttpRequest,
) -> Result<ResolvedHttpRequestSettings> {
let workspace = self.get_workspace(&http_request.workspace_id)?;
// Start with None for all settings
let mut validate_certs: Option<bool> = None;
let mut follow_redirects: Option<bool> = None;
let mut timeout: Option<i32> = None;
// Apply workspace settings
if workspace.setting_validate_certificates.is_some() {
validate_certs = workspace.setting_validate_certificates;
}
if workspace.setting_follow_redirects.is_some() {
follow_redirects = workspace.setting_follow_redirects;
}
if workspace.setting_request_timeout.is_some() {
timeout = workspace.setting_request_timeout;
}
// Apply folder chain settings (root first, immediate parent last)
if let Some(folder_id) = &http_request.folder_id {
let folders = self.get_folder_ancestors(folder_id)?;
for folder in folders {
if folder.setting_validate_certificates.is_some() {
validate_certs = folder.setting_validate_certificates;
}
if folder.setting_follow_redirects.is_some() {
follow_redirects = folder.setting_follow_redirects;
}
if folder.setting_request_timeout.is_some() {
timeout = folder.setting_request_timeout;
}
}
}
// Apply request-level settings (highest priority)
if http_request.setting_validate_certificates.is_some() {
validate_certs = http_request.setting_validate_certificates;
}
if http_request.setting_follow_redirects.is_some() {
follow_redirects = http_request.setting_follow_redirects;
}
if http_request.setting_request_timeout.is_some() {
timeout = http_request.setting_request_timeout;
}
// Apply defaults for anything still None
Ok(ResolvedHttpRequestSettings {
validate_certificates: validate_certs.unwrap_or(true),
follow_redirects: follow_redirects.unwrap_or(true),
request_timeout: timeout.unwrap_or(0),
})
}
/// Get folder ancestors in order from root to immediate parent
fn get_folder_ancestors(&self, folder_id: &str) -> Result<Vec<Folder>> {
let mut ancestors = Vec::new();
let mut current_id = Some(folder_id.to_string());
while let Some(id) = current_id {
let folder = self.get_folder(&id)?;
current_id = folder.folder_id.clone();
ancestors.push(folder);
}
ancestors.reverse(); // Root first, immediate parent last
Ok(ancestors)
}
}

View File

@@ -19,26 +19,6 @@ mod websocket_connections;
mod websocket_events;
mod websocket_requests;
mod workspace_metas;
pub mod workspaces;
mod workspaces;
const MAX_HISTORY_ITEMS: usize = 20;
use crate::models::HttpRequestHeader;
use std::collections::HashMap;
/// Deduplicate headers by name (case-insensitive), keeping the latest (most specific) value.
/// Preserves the order of first occurrence for each header name.
pub(crate) fn dedupe_headers(headers: Vec<HttpRequestHeader>) -> Vec<HttpRequestHeader> {
let mut index_by_name: HashMap<String, usize> = HashMap::new();
let mut deduped: Vec<HttpRequestHeader> = Vec::new();
for header in headers {
let key = header.name.to_lowercase();
if let Some(&idx) = index_by_name.get(&key) {
deduped[idx] = header;
} else {
index_by_name.insert(key, deduped.len());
deduped.push(header);
}
}
deduped
}

View File

@@ -1,4 +1,3 @@
use super::dedupe_headers;
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{HttpRequestHeader, WebsocketRequest, WebsocketRequestIden};
@@ -96,6 +95,6 @@ impl<'a> DbContext<'a> {
headers.append(&mut websocket_request.headers.clone());
Ok(dedupe_headers(headers))
Ok(headers)
}
}

View File

@@ -20,8 +20,8 @@ impl<'a> DbContext<'a> {
workspaces.push(self.upsert_workspace(
&Workspace {
name: "Yaak".to_string(),
setting_follow_redirects: true,
setting_validate_certificates: true,
setting_follow_redirects: Some(true),
setting_validate_certificates: Some(true),
..Default::default()
},
&UpdateSource::Background,
@@ -65,7 +65,28 @@ impl<'a> DbContext<'a> {
}
pub fn upsert_workspace(&self, w: &Workspace, source: &UpdateSource) -> Result<Workspace> {
self.upsert(w, source)
let mut workspace = w.clone();
// Add default headers only for NEW workspaces (empty ID means insert, not update)
// This prevents re-adding headers if a user intentionally removes all headers
if workspace.id.is_empty() && workspace.headers.is_empty() {
workspace.headers = vec![
HttpRequestHeader {
enabled: true,
name: "User-Agent".to_string(),
value: "yaak".to_string(),
id: None,
},
HttpRequestHeader {
enabled: true,
name: "Accept".to_string(),
value: "*/*".to_string(),
id: None,
},
];
}
self.upsert(&workspace, source)
}
pub fn resolve_auth_for_workspace(
@@ -80,28 +101,6 @@ impl<'a> DbContext<'a> {
}
pub fn resolve_headers_for_workspace(&self, workspace: &Workspace) -> Vec<HttpRequestHeader> {
let mut headers = default_headers();
headers.extend(workspace.headers.clone());
headers
workspace.headers.clone()
}
}
/// Global default headers that are always sent with requests unless overridden.
/// These are prepended to the inheritance chain so workspace/folder/request headers
/// can override or disable them.
pub fn default_headers() -> Vec<HttpRequestHeader> {
vec![
HttpRequestHeader {
enabled: true,
name: "User-Agent".to_string(),
value: "yaak".to_string(),
id: None,
},
HttpRequestHeader {
enabled: true,
name: "Accept".to_string(),
value: "*/*".to_string(),
id: None,
},
]
}

View File

File diff suppressed because one or more lines are too long

View File

@@ -12,8 +12,6 @@ export type CookieExpires = { "AtUtc": string } | "SessionEnd";
export type CookieJar = { model: "cookie_jar", id: string, createdAt: string, updatedAt: string, workspaceId: string, cookies: Array<Cookie>, name: string, };
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
export type EditorKeymap = "default" | "vim" | "vscode" | "emacs";
export type EncryptedKey = { encryptedKey: string, };
@@ -40,7 +38,7 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, elapsedDns: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponseEvent = { model: "http_response_event", id: string, createdAt: string, updatedAt: string, workspaceId: string, responseId: string, event: HttpResponseEventData, };
@@ -49,7 +47,7 @@ export type HttpResponseEvent = { model: "http_response_event", id: string, crea
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
* The `From` impl is in yaak-http to avoid circular dependencies.
*/
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, } | { "type": "dns_resolved", hostname: string, addresses: Array<string>, duration: bigint, overridden: boolean, };
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, };
export type HttpResponseHeader = { name: string, value: string, };
@@ -79,6 +77,6 @@ export type WebsocketEventType = "binary" | "close" | "frame" | "open" | "ping"
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
export type WorkspaceMeta = { model: "workspace_meta", id: string, workspaceId: string, createdAt: string, updatedAt: string, encryptionKey: EncryptedKey | null, settingSyncDir: string | null, };

View File

@@ -80,7 +80,10 @@ pub async fn check_plugin_updates(
}
/// Search for plugins in the registry.
pub async fn search_plugins(http_client: &Client, query: &str) -> Result<PluginSearchResponse> {
pub async fn search_plugins(
http_client: &Client,
query: &str,
) -> Result<PluginSearchResponse> {
let mut url = build_url("/search");
{
let mut query_pairs = url.query_pairs_mut();

View File

@@ -157,9 +157,6 @@ pub enum InternalEventPayload {
PromptTextRequest(PromptTextRequest),
PromptTextResponse(PromptTextResponse),
PromptFormRequest(PromptFormRequest),
PromptFormResponse(PromptFormResponse),
WindowInfoRequest(WindowInfoRequest),
WindowInfoResponse(WindowInfoResponse),
@@ -574,28 +571,6 @@ pub struct PromptTextResponse {
pub value: Option<String>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_events.ts")]
pub struct PromptFormRequest {
pub id: String,
pub title: String,
#[ts(optional)]
pub description: Option<String>,
pub inputs: Vec<FormInput>,
#[ts(optional)]
pub confirm_text: Option<String>,
#[ts(optional)]
pub cancel_text: Option<String>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_events.ts")]
pub struct PromptFormResponse {
pub values: Option<HashMap<String, JsonPrimitive>>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_events.ts")]

View File

@@ -378,8 +378,7 @@ impl PluginManager {
plugins: Vec<PluginHandle>,
timeout_duration: Duration,
) -> Result<Vec<InternalEvent>> {
let event_type = payload.type_name();
let label = format!("wait[{}.{}]", plugins.len(), event_type);
let label = format!("wait[{}.{}]", plugins.len(), payload.type_name());
let (rx_id, mut rx) = self.subscribe(label.as_str()).await;
// 1. Build the events with IDs and everything
@@ -413,21 +412,10 @@ impl PluginManager {
// Timeout to prevent hanging forever if plugin doesn't respond
if timeout(timeout_duration, collect_events).await.is_err() {
let responded_ids: Vec<&String> =
found_events.iter().filter_map(|e| e.reply_id.as_ref()).collect();
let non_responding: Vec<&str> = events_to_send
.iter()
.filter(|e| !responded_ids.contains(&&e.id))
.map(|e| e.plugin_name.as_str())
.collect();
warn!(
"Timeout ({:?}) waiting for {} responses. Got {}/{} responses. \
Non-responding plugins: [{}]",
timeout_duration,
event_type,
"Timeout waiting for plugin responses. Got {}/{} responses",
found_events.len(),
events_to_send.len(),
non_responding.join(", ")
events_to_send.len()
);
}

View File

@@ -196,11 +196,7 @@ pub fn decrypt_secure_template_function(
}
}
new_tokens.push(Token::Raw {
text: template_function_secure_run(
encryption_manager,
args_map,
plugin_context,
)?,
text: template_function_secure_run(encryption_manager, args_map, plugin_context)?,
});
}
t => {
@@ -220,8 +216,7 @@ pub fn encrypt_secure_template_function(
plugin_context: &PluginContext,
template: &str,
) -> Result<String> {
let decrypted =
decrypt_secure_template_function(&encryption_manager, plugin_context, template)?;
let decrypted = decrypt_secure_template_function(&encryption_manager, plugin_context, template)?;
let tokens = Tokens {
tokens: vec![Token::Tag {
val: Val::Fn {
@@ -236,12 +231,7 @@ pub fn encrypt_secure_template_function(
Ok(transform_args(
tokens,
&PluginTemplateCallback::new(
plugin_manager,
encryption_manager,
plugin_context,
RenderPurpose::Preview,
),
&PluginTemplateCallback::new(plugin_manager, encryption_manager, plugin_context, RenderPurpose::Preview),
)?
.to_string())
}

View File

@@ -4,8 +4,8 @@ use std::net::SocketAddr;
use std::path::Path;
use std::process::Stdio;
use tokio::io::{AsyncBufReadExt, BufReader};
use tokio::process::Command;
use tokio::sync::watch::Receiver;
use yaak_common::command::new_xplatform_command;
/// Start the Node.js plugin runtime process.
///
@@ -30,14 +30,13 @@ pub async fn start_nodejs_plugin_runtime(
plugin_runtime_main_str
);
let mut cmd = new_xplatform_command(node_bin_path);
cmd.env("HOST", addr.ip().to_string())
let mut child = Command::new(node_bin_path)
.env("HOST", addr.ip().to_string())
.env("PORT", addr.port().to_string())
.arg(&plugin_runtime_main_str)
.stdout(Stdio::piped())
.stderr(Stdio::piped());
let mut child = cmd.spawn()?;
.stderr(Stdio::piped())
.spawn()?;
info!("Spawned plugin runtime");

View File

@@ -46,11 +46,7 @@ impl TemplateCallback for PluginTemplateCallback {
let fn_name = if fn_name == "Response" { "response" } else { fn_name };
if fn_name == "secure" {
return template_function_secure_run(
&self.encryption_manager,
args,
&self.plugin_context,
);
return template_function_secure_run(&self.encryption_manager, args, &self.plugin_context);
} else if fn_name == "keychain" || fn_name == "keyring" {
return template_function_keychain_run(args);
}
@@ -60,8 +56,7 @@ impl TemplateCallback for PluginTemplateCallback {
primitive_args.insert(key, JsonPrimitive::from(value));
}
let resp = self
.plugin_manager
let resp = self.plugin_manager
.call_template_function(
&self.plugin_context,
fn_name,

View File

@@ -1,7 +1,5 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, parentModel: string, parentId: string | null, variables: Array<EnvironmentVariable>, color: string | null, sortPriority: number, };
export type EnvironmentVariable = { enabled?: boolean, name: string, value: string, id?: string, };
@@ -22,4 +20,4 @@ export type SyncState = { model: "sync_state", id: string, workspaceId: string,
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };

View File

@@ -296,7 +296,11 @@ pub fn compute_sync_ops(
.collect()
}
fn workspace_models(db: &DbContext, version: &str, workspace_id: &str) -> Result<Vec<SyncModel>> {
fn workspace_models(
db: &DbContext,
version: &str,
workspace_id: &str,
) -> Result<Vec<SyncModel>> {
// We want to include private environments here so that we can take them into account during
// the sync process. Otherwise, they would be treated as deleted.
let include_private_environments = true;

View File

@@ -2,7 +2,6 @@ use crate::connect::ws_connect;
use crate::error::Result;
use futures_util::stream::SplitSink;
use futures_util::{SinkExt, StreamExt};
use http::HeaderMap;
use log::{debug, info, warn};
use std::collections::HashMap;
use std::sync::Arc;
@@ -11,6 +10,7 @@ use tokio::net::TcpStream;
use tokio::sync::{Mutex, mpsc};
use tokio_tungstenite::tungstenite::Message;
use tokio_tungstenite::tungstenite::handshake::client::Response;
use http::HeaderMap;
use tokio_tungstenite::tungstenite::http::HeaderValue;
use tokio_tungstenite::{MaybeTlsStream, WebSocketStream};
use yaak_tls::ClientCertificateConfig;

8
package-lock.json generated
View File

@@ -7811,9 +7811,9 @@
}
},
"node_modules/hono": {
"version": "4.11.4",
"resolved": "https://registry.npmjs.org/hono/-/hono-4.11.4.tgz",
"integrity": "sha512-U7tt8JsyrxSRKspfhtLET79pU8K+tInj5QZXs1jSugO1Vq5dFj3kmZsRldo29mTBfcjDRVRXrEZ6LS63Cog9ZA==",
"version": "4.11.3",
"resolved": "https://registry.npmjs.org/hono/-/hono-4.11.3.tgz",
"integrity": "sha512-PmQi306+M/ct/m5s66Hrg+adPnkD5jiO6IjA7WhWw0gSBSo1EcRegwuI1deZ+wd5pzCGynCcn2DprnE4/yEV4w==",
"license": "MIT",
"engines": {
"node": ">=16.9.0"
@@ -15743,7 +15743,7 @@
"@hono/mcp": "^0.2.3",
"@hono/node-server": "^1.19.7",
"@modelcontextprotocol/sdk": "^1.25.2",
"hono": "^4.11.4",
"hono": "^4.11.3",
"zod": "^3.25.76"
},
"devDependencies": {

View File

@@ -17,7 +17,7 @@ npx @yaakapp/cli generate
```
For more details on creating plugins, check out
the [Quick Start Guide](https://yaak.app/docs/plugin-development/plugins-quick-start)
the [Quick Start Guide](https://feedback.yaak.app/help/articles/6911763-plugins-quick-start)
## Installation

View File

File diff suppressed because one or more lines are too long

View File

@@ -12,8 +12,6 @@ export type CookieExpires = { "AtUtc": string } | "SessionEnd";
export type CookieJar = { model: "cookie_jar", id: string, createdAt: string, updatedAt: string, workspaceId: string, cookies: Array<Cookie>, name: string, };
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
export type EditorKeymap = "default" | "vim" | "vscode" | "emacs";
export type EncryptedKey = { encryptedKey: string, };
@@ -40,7 +38,7 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, elapsedDns: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponseEvent = { model: "http_response_event", id: string, createdAt: string, updatedAt: string, workspaceId: string, responseId: string, event: HttpResponseEventData, };
@@ -49,7 +47,7 @@ export type HttpResponseEvent = { model: "http_response_event", id: string, crea
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
* The `From` impl is in yaak-http to avoid circular dependencies.
*/
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, } | { "type": "dns_resolved", hostname: string, addresses: Array<string>, duration: bigint, overridden: boolean, };
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, };
export type HttpResponseHeader = { name: string, value: string, };
@@ -79,6 +77,6 @@ export type WebsocketEventType = "binary" | "close" | "frame" | "open" | "ping"
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
export type WorkspaceMeta = { model: "workspace_meta", id: string, workspaceId: string, createdAt: string, updatedAt: string, encryptionKey: EncryptedKey | null, settingSyncDir: string | null, };

View File

@@ -11,8 +11,6 @@ import type {
ListHttpRequestsRequest,
ListHttpRequestsResponse,
OpenWindowRequest,
PromptFormRequest,
PromptFormResponse,
PromptTextRequest,
PromptTextResponse,
RenderGrpcRequestRequest,
@@ -25,7 +23,7 @@ import type {
TemplateRenderRequest,
WorkspaceInfo,
} from '../bindings/gen_events.ts';
import type { Folder, HttpRequest } from '../bindings/gen_models.ts';
import type { HttpRequest } from '../bindings/gen_models.ts';
import type { JsonValue } from '../bindings/serde_json/JsonValue';
export type WorkspaceHandle = Pick<WorkspaceInfo, 'id' | 'name'>;
@@ -39,7 +37,6 @@ export interface Context {
};
prompt: {
text(args: PromptTextRequest): Promise<PromptTextResponse['value']>;
form(args: PromptFormRequest): Promise<PromptFormResponse['values']>;
};
store: {
set<T>(key: string, value: T): Promise<void>;
@@ -82,15 +79,6 @@ export interface Context {
};
folder: {
list(args?: ListFoldersRequest): Promise<ListFoldersResponse['folders']>;
getById(args: { id: string }): Promise<Folder | null>;
create(
args: Omit<Partial<Folder>, 'id' | 'model' | 'createdAt' | 'updatedAt'> &
Pick<Folder, 'workspaceId' | 'name'>,
): Promise<Folder>;
update(
args: Omit<Partial<Folder>, 'model' | 'createdAt' | 'updatedAt'> & Pick<Folder, 'id'>,
): Promise<Folder>;
delete(args: { id: string }): Promise<Folder>;
};
httpResponse: {
find(args: FindHttpResponsesRequest): Promise<FindHttpResponsesResponse['httpResponses']>;

View File

@@ -11,7 +11,6 @@ import type {
DeleteKeyValueResponse,
DeleteModelResponse,
FindHttpResponsesResponse,
Folder,
GetCookieValueRequest,
GetCookieValueResponse,
GetHttpRequestByIdResponse,
@@ -29,7 +28,6 @@ import type {
ListHttpRequestsResponse,
ListWorkspacesResponse,
PluginContext,
PromptFormResponse,
PromptTextResponse,
RenderGrpcRequestResponse,
RenderHttpRequestResponse,
@@ -663,13 +661,6 @@ export class PluginInstance {
});
return reply.value;
},
form: async (args) => {
const reply: PromptFormResponse = await this.#sendForReply(context, {
type: 'prompt_form_request',
...args,
});
return reply.values;
},
},
httpResponse: {
find: async (args) => {
@@ -783,44 +774,6 @@ export class PluginInstance {
const { folders } = await this.#sendForReply<ListFoldersResponse>(context, payload);
return folders;
},
getById: async (args: { id: string }) => {
const payload = { type: 'list_folders_request' } as const;
const { folders } = await this.#sendForReply<ListFoldersResponse>(context, payload);
return folders.find((f) => f.id === args.id) ?? null;
},
create: async (args) => {
const payload = {
type: 'upsert_model_request',
model: {
name: '',
...args,
id: '',
model: 'folder',
},
} as InternalEventPayload;
const response = await this.#sendForReply<UpsertModelResponse>(context, payload);
return response.model as Folder;
},
update: async (args) => {
const payload = {
type: 'upsert_model_request',
model: {
model: 'folder',
...args,
},
} as InternalEventPayload;
const response = await this.#sendForReply<UpsertModelResponse>(context, payload);
return response.model as Folder;
},
delete: async (args: { id: string }) => {
const payload = {
type: 'delete_model_request',
model: 'folder',
id: args.id,
} as InternalEventPayload;
const response = await this.#sendForReply<DeleteModelResponse>(context, payload);
return response.model as Folder;
},
},
cookies: {
getValue: async (args: GetCookieValueRequest) => {

View File

@@ -18,7 +18,7 @@
"@hono/mcp": "^0.2.3",
"@hono/node-server": "^1.19.7",
"@modelcontextprotocol/sdk": "^1.25.2",
"hono": "^4.11.4",
"hono": "^4.11.3",
"zod": "^3.25.76"
},
"devDependencies": {

View File

@@ -2,12 +2,6 @@ import type { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
import * as z from 'zod';
import type { McpServerContext } from '../types.js';
import { getWorkspaceContext } from './helpers.js';
import {
authenticationSchema,
authenticationTypeSchema,
headersSchema,
workspaceIdSchema,
} from './schemas.js';
export function registerFolderTools(server: McpServer, ctx: McpServerContext) {
server.registerTool(
@@ -16,7 +10,10 @@ export function registerFolderTools(server: McpServer, ctx: McpServerContext) {
title: 'List Folders',
description: 'List all folders in a workspace',
inputSchema: {
workspaceId: workspaceIdSchema,
workspaceId: z
.string()
.optional()
.describe('Workspace ID (required if multiple workspaces are open)'),
},
},
async ({ workspaceId }) => {
@@ -33,116 +30,4 @@ export function registerFolderTools(server: McpServer, ctx: McpServerContext) {
};
},
);
server.registerTool(
'get_folder',
{
title: 'Get Folder',
description: 'Get details of a specific folder by ID',
inputSchema: {
id: z.string().describe('The folder ID'),
workspaceId: workspaceIdSchema,
},
},
async ({ id, workspaceId }) => {
const workspaceCtx = await getWorkspaceContext(ctx, workspaceId);
const folder = await workspaceCtx.yaak.folder.getById({ id });
return {
content: [
{
type: 'text' as const,
text: JSON.stringify(folder, null, 2),
},
],
};
},
);
server.registerTool(
'create_folder',
{
title: 'Create Folder',
description: 'Create a new folder in a workspace',
inputSchema: {
workspaceId: workspaceIdSchema,
name: z.string().describe('Folder name'),
folderId: z.string().optional().describe('Parent folder ID (for nested folders)'),
description: z.string().optional().describe('Folder description'),
sortPriority: z.number().optional().describe('Sort priority for ordering'),
headers: headersSchema.describe('Default headers to apply to requests in this folder'),
authenticationType: authenticationTypeSchema,
authentication: authenticationSchema,
},
},
async ({ workspaceId: ogWorkspaceId, ...args }) => {
const workspaceCtx = await getWorkspaceContext(ctx, ogWorkspaceId);
const workspaceId = await workspaceCtx.yaak.window.workspaceId();
if (!workspaceId) {
throw new Error('No workspace is open');
}
const folder = await workspaceCtx.yaak.folder.create({
workspaceId: workspaceId,
...args,
});
return {
content: [{ type: 'text' as const, text: JSON.stringify(folder, null, 2) }],
};
},
);
server.registerTool(
'update_folder',
{
title: 'Update Folder',
description: 'Update an existing folder',
inputSchema: {
id: z.string().describe('Folder ID to update'),
workspaceId: workspaceIdSchema,
name: z.string().optional().describe('Folder name'),
folderId: z.string().optional().describe('Parent folder ID (for nested folders)'),
description: z.string().optional().describe('Folder description'),
sortPriority: z.number().optional().describe('Sort priority for ordering'),
headers: headersSchema.describe('Default headers to apply to requests in this folder'),
authenticationType: authenticationTypeSchema,
authentication: authenticationSchema,
},
},
async ({ id, workspaceId, ...updates }) => {
const workspaceCtx = await getWorkspaceContext(ctx, workspaceId);
// Fetch existing folder to merge with updates
const existing = await workspaceCtx.yaak.folder.getById({ id });
if (!existing) {
throw new Error(`Folder with ID ${id} not found`);
}
// Merge existing fields with updates
const folder = await workspaceCtx.yaak.folder.update({
...existing,
...updates,
id,
});
return {
content: [{ type: 'text' as const, text: JSON.stringify(folder, null, 2) }],
};
},
);
server.registerTool(
'delete_folder',
{
title: 'Delete Folder',
description: 'Delete a folder by ID',
inputSchema: {
id: z.string().describe('Folder ID to delete'),
},
},
async ({ id }) => {
const folder = await ctx.yaak.folder.delete({ id });
return {
content: [{ type: 'text' as const, text: `Deleted: ${folder.name} (${folder.id})` }],
};
},
);
}

View File

@@ -2,15 +2,6 @@ import type { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
import * as z from 'zod';
import type { McpServerContext } from '../types.js';
import { getWorkspaceContext } from './helpers.js';
import {
authenticationSchema,
authenticationTypeSchema,
bodySchema,
bodyTypeSchema,
headersSchema,
urlParametersSchema,
workspaceIdSchema,
} from './schemas.js';
export function registerHttpRequestTools(server: McpServer, ctx: McpServerContext) {
server.registerTool(
@@ -19,7 +10,10 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
title: 'List HTTP Requests',
description: 'List all HTTP requests in a workspace',
inputSchema: {
workspaceId: workspaceIdSchema,
workspaceId: z
.string()
.optional()
.describe('Workspace ID (required if multiple workspaces are open)'),
},
},
async ({ workspaceId }) => {
@@ -44,7 +38,10 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
description: 'Get details of a specific HTTP request by ID',
inputSchema: {
id: z.string().describe('The HTTP request ID'),
workspaceId: workspaceIdSchema,
workspaceId: z
.string()
.optional()
.describe('Workspace ID (required if multiple workspaces are open)'),
},
},
async ({ id, workspaceId }) => {
@@ -70,7 +67,10 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
inputSchema: {
id: z.string().describe('The HTTP request ID to send'),
environmentId: z.string().optional().describe('Optional environment ID to use'),
workspaceId: workspaceIdSchema,
workspaceId: z
.string()
.optional()
.describe('Workspace ID (required if multiple workspaces are open)'),
},
},
async ({ id, workspaceId }) => {
@@ -99,7 +99,10 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
title: 'Create HTTP Request',
description: 'Create a new HTTP request',
inputSchema: {
workspaceId: workspaceIdSchema,
workspaceId: z
.string()
.optional()
.describe('Workspace ID (required if multiple workspaces are open)'),
name: z
.string()
.optional()
@@ -108,12 +111,62 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
method: z.string().optional().describe('HTTP method (defaults to GET)'),
folderId: z.string().optional().describe('Parent folder ID'),
description: z.string().optional().describe('Request description'),
headers: headersSchema.describe('Request headers'),
urlParameters: urlParametersSchema,
bodyType: bodyTypeSchema,
body: bodySchema,
authenticationType: authenticationTypeSchema,
authentication: authenticationSchema,
headers: z
.array(
z.object({
name: z.string(),
value: z.string(),
enabled: z.boolean().default(true),
}),
)
.optional()
.describe('Request headers'),
urlParameters: z
.array(
z.object({
name: z.string(),
value: z.string(),
enabled: z.boolean().default(true),
}),
)
.optional()
.describe('URL query parameters'),
bodyType: z
.string()
.optional()
.describe(
'Body type. Supported values: "binary", "graphql", "application/x-www-form-urlencoded", "multipart/form-data", or any text-based type (e.g., "application/json", "text/plain")',
),
body: z
.record(z.string(), z.any())
.optional()
.describe(
'Body content object. Structure varies by bodyType:\n' +
'- "binary": { filePath: "/path/to/file" }\n' +
'- "graphql": { query: "{ users { name } }", variables: "{\\"id\\": \\"123\\"}" }\n' +
'- "application/x-www-form-urlencoded": { form: [{ name: "key", value: "val", enabled: true }] }\n' +
'- "multipart/form-data": { form: [{ name: "field", value: "text", file: "/path/to/file", enabled: true }] }\n' +
'- text-based (application/json, etc.): { text: "raw body content" }',
),
authenticationType: z
.string()
.optional()
.describe(
'Authentication type. Common values: "basic", "bearer", "oauth2", "apikey", "jwt", "awsv4", "oauth1", "ntlm", "none". Use null to inherit from parent folder/workspace.',
),
authentication: z
.record(z.string(), z.any())
.optional()
.describe(
'Authentication configuration object. Structure varies by authenticationType:\n' +
'- "basic": { username: "user", password: "pass" }\n' +
'- "bearer": { token: "abc123", prefix: "Bearer" }\n' +
'- "oauth2": { clientId: "...", clientSecret: "...", grantType: "authorization_code", authorizationUrl: "...", accessTokenUrl: "...", scope: "...", ... }\n' +
'- "apikey": { location: "header" | "query", key: "X-API-Key", value: "..." }\n' +
'- "jwt": { algorithm: "HS256", secret: "...", payload: "{ ... }" }\n' +
'- "awsv4": { accessKeyId: "...", secretAccessKey: "...", service: "sts", region: "us-east-1", sessionToken: "..." }\n' +
'- "none": {}',
),
},
},
async ({ workspaceId: ogWorkspaceId, ...args }) => {
@@ -141,18 +194,68 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
description: 'Update an existing HTTP request',
inputSchema: {
id: z.string().describe('HTTP request ID to update'),
workspaceId: workspaceIdSchema,
workspaceId: z.string().describe('Workspace ID'),
name: z.string().optional().describe('Request name'),
url: z.string().optional().describe('Request URL'),
method: z.string().optional().describe('HTTP method'),
folderId: z.string().optional().describe('Parent folder ID'),
description: z.string().optional().describe('Request description'),
headers: headersSchema.describe('Request headers'),
urlParameters: urlParametersSchema,
bodyType: bodyTypeSchema,
body: bodySchema,
authenticationType: authenticationTypeSchema,
authentication: authenticationSchema,
headers: z
.array(
z.object({
name: z.string(),
value: z.string(),
enabled: z.boolean().default(true),
}),
)
.optional()
.describe('Request headers'),
urlParameters: z
.array(
z.object({
name: z.string(),
value: z.string(),
enabled: z.boolean().default(true),
}),
)
.optional()
.describe('URL query parameters'),
bodyType: z
.string()
.optional()
.describe(
'Body type. Supported values: "binary", "graphql", "application/x-www-form-urlencoded", "multipart/form-data", or any text-based type (e.g., "application/json", "text/plain")',
),
body: z
.record(z.string(), z.any())
.optional()
.describe(
'Body content object. Structure varies by bodyType:\n' +
'- "binary": { filePath: "/path/to/file" }\n' +
'- "graphql": { query: "{ users { name } }", variables: "{\\"id\\": \\"123\\"}" }\n' +
'- "application/x-www-form-urlencoded": { form: [{ name: "key", value: "val", enabled: true }] }\n' +
'- "multipart/form-data": { form: [{ name: "field", value: "text", file: "/path/to/file", enabled: true }] }\n' +
'- text-based (application/json, etc.): { text: "raw body content" }',
),
authenticationType: z
.string()
.optional()
.describe(
'Authentication type. Common values: "basic", "bearer", "oauth2", "apikey", "jwt", "awsv4", "oauth1", "ntlm", "none". Use null to inherit from parent folder/workspace.',
),
authentication: z
.record(z.string(), z.any())
.optional()
.describe(
'Authentication configuration object. Structure varies by authenticationType:\n' +
'- "basic": { username: "user", password: "pass" }\n' +
'- "bearer": { token: "abc123", prefix: "Bearer" }\n' +
'- "oauth2": { clientId: "...", clientSecret: "...", grantType: "authorization_code", authorizationUrl: "...", accessTokenUrl: "...", scope: "...", ... }\n' +
'- "apikey": { location: "header" | "query", key: "X-API-Key", value: "..." }\n' +
'- "jwt": { algorithm: "HS256", secret: "...", payload: "{ ... }" }\n' +
'- "awsv4": { accessKeyId: "...", secretAccessKey: "...", service: "sts", region: "us-east-1", sessionToken: "..." }\n' +
'- "none": {}',
),
},
},
async ({ id, workspaceId, ...updates }) => {

View File

@@ -1,67 +0,0 @@
import * as z from 'zod';
export const workspaceIdSchema = z
.string()
.optional()
.describe('Workspace ID (required if multiple workspaces are open)');
export const headersSchema = z
.array(
z.object({
name: z.string(),
value: z.string(),
enabled: z.boolean().default(true),
}),
)
.optional();
export const urlParametersSchema = z
.array(
z.object({
name: z.string(),
value: z.string(),
enabled: z.boolean().default(true),
}),
)
.optional()
.describe('URL query parameters');
export const bodyTypeSchema = z
.string()
.optional()
.describe(
'Body type. Supported values: "binary", "graphql", "application/x-www-form-urlencoded", "multipart/form-data", or any text-based type (e.g., "application/json", "text/plain")',
);
export const bodySchema = z
.record(z.string(), z.any())
.optional()
.describe(
'Body content object. Structure varies by bodyType:\n' +
'- "binary": { filePath: "/path/to/file" }\n' +
'- "graphql": { query: "{ users { name } }", variables: "{\\"id\\": \\"123\\"}" }\n' +
'- "application/x-www-form-urlencoded": { form: [{ name: "key", value: "val", enabled: true }] }\n' +
'- "multipart/form-data": { form: [{ name: "field", value: "text", file: "/path/to/file", enabled: true }] }\n' +
'- text-based (application/json, etc.): { text: "raw body content" }',
);
export const authenticationTypeSchema = z
.string()
.optional()
.describe(
'Authentication type. Common values: "basic", "bearer", "oauth2", "apikey", "jwt", "awsv4", "oauth1", "ntlm", "none". Use null to inherit from parent.',
);
export const authenticationSchema = z
.record(z.string(), z.any())
.optional()
.describe(
'Authentication configuration object. Structure varies by authenticationType:\n' +
'- "basic": { username: "user", password: "pass" }\n' +
'- "bearer": { token: "abc123", prefix: "Bearer" }\n' +
'- "oauth2": { clientId: "...", clientSecret: "...", grantType: "authorization_code", authorizationUrl: "...", accessTokenUrl: "...", scope: "...", ... }\n' +
'- "apikey": { location: "header" | "query", key: "X-API-Key", value: "..." }\n' +
'- "jwt": { algorithm: "HS256", secret: "...", payload: "{ ... }" }\n' +
'- "awsv4": { accessKeyId: "...", secretAccessKey: "...", service: "sts", region: "us-east-1", sessionToken: "..." }\n' +
'- "none": {}',
);

View File

@@ -1,161 +0,0 @@
import { open } from '@tauri-apps/plugin-dialog';
import { gitClone } from '@yaakapp-internal/git';
import { useState } from 'react';
import { openWorkspaceFromSyncDir } from '../commands/openWorkspaceFromSyncDir';
import { appInfo } from '../lib/appInfo';
import { showErrorToast } from '../lib/toast';
import { Banner } from './core/Banner';
import { Button } from './core/Button';
import { Checkbox } from './core/Checkbox';
import { IconButton } from './core/IconButton';
import { PlainInput } from './core/PlainInput';
import { VStack } from './core/Stacks';
import { promptCredentials } from './git/credentials';
interface Props {
hide: () => void;
}
// Detect path separator from an existing path (defaults to /)
function getPathSeparator(path: string): string {
return path.includes('\\') ? '\\' : '/';
}
export function CloneGitRepositoryDialog({ hide }: Props) {
const [url, setUrl] = useState<string>('');
const [baseDirectory, setBaseDirectory] = useState<string>(appInfo.defaultProjectDir);
const [directoryOverride, setDirectoryOverride] = useState<string | null>(null);
const [hasSubdirectory, setHasSubdirectory] = useState(false);
const [subdirectory, setSubdirectory] = useState<string>('');
const [isCloning, setIsCloning] = useState(false);
const [error, setError] = useState<string | null>(null);
const repoName = extractRepoName(url);
const sep = getPathSeparator(baseDirectory);
const computedDirectory = repoName ? `${baseDirectory}${sep}${repoName}` : baseDirectory;
const directory = directoryOverride ?? computedDirectory;
const workspaceDirectory =
hasSubdirectory && subdirectory ? `${directory}${sep}${subdirectory}` : directory;
const handleSelectDirectory = async () => {
const dir = await open({
title: 'Select Directory',
directory: true,
multiple: false,
});
if (dir != null) {
setBaseDirectory(dir);
setDirectoryOverride(null);
}
};
const handleClone = async (e: React.FormEvent) => {
e.preventDefault();
if (!url || !directory) return;
setIsCloning(true);
setError(null);
try {
const result = await gitClone(url, directory, promptCredentials);
if (result.type === 'needs_credentials') {
setError(
result.error ?? 'Authentication failed. Please check your credentials and try again.',
);
return;
}
// Open the workspace from the cloned directory (or subdirectory)
await openWorkspaceFromSyncDir.mutateAsync(workspaceDirectory);
hide();
} catch (err) {
setError(String(err));
showErrorToast({
id: 'git-clone-error',
title: 'Clone Failed',
message: String(err),
});
} finally {
setIsCloning(false);
}
};
return (
<VStack as="form" space={3} alignItems="start" className="pb-3" onSubmit={handleClone}>
{error && (
<Banner color="danger" className="w-full">
{error}
</Banner>
)}
<PlainInput
required
label="Repository URL"
placeholder="https://github.com/user/repo.git"
defaultValue={url}
onChange={setUrl}
/>
<PlainInput
label="Directory"
placeholder={appInfo.defaultProjectDir}
defaultValue={directory}
onChange={setDirectoryOverride}
rightSlot={
<IconButton
size="xs"
className="mr-0.5 !h-auto my-0.5"
icon="folder"
title="Browse"
onClick={handleSelectDirectory}
/>
}
/>
<Checkbox
checked={hasSubdirectory}
onChange={setHasSubdirectory}
title="Workspace is in a subdirectory"
help="Enable if the Yaak workspace files are not at the root of the repository"
/>
{hasSubdirectory && (
<PlainInput
label="Subdirectory"
placeholder="path/to/workspace"
defaultValue={subdirectory}
onChange={setSubdirectory}
/>
)}
<Button
type="submit"
color="primary"
className="w-full mt-3"
disabled={!url || !directory || isCloning}
isLoading={isCloning}
>
{isCloning ? 'Cloning...' : 'Clone Repository'}
</Button>
</VStack>
);
}
function extractRepoName(url: string): string {
// Handle various Git URL formats:
// https://github.com/user/repo.git
// git@github.com:user/repo.git
// https://github.com/user/repo
const match = url.match(/\/([^/]+?)(\.git)?$/);
if (match?.[1]) {
return match[1];
}
// Fallback for SSH-style URLs
const sshMatch = url.match(/:([^/]+?)(\.git)?$/);
if (sshMatch?.[1]) {
return sshMatch[1];
}
return '';
}

View File

@@ -1,181 +0,0 @@
import type { DnsOverride, Workspace } from '@yaakapp-internal/models';
import { patchModel } from '@yaakapp-internal/models';
import { useCallback, useId, useMemo } from 'react';
import { Button } from './core/Button';
import { Checkbox } from './core/Checkbox';
import { IconButton } from './core/IconButton';
import { PlainInput } from './core/PlainInput';
import { HStack, VStack } from './core/Stacks';
import { Table, TableBody, TableCell, TableHead, TableHeaderCell, TableRow } from './core/Table';
interface Props {
workspace: Workspace;
}
interface DnsOverrideWithId extends DnsOverride {
_id: string;
}
export function DnsOverridesEditor({ workspace }: Props) {
const reactId = useId();
// Ensure each override has an internal ID for React keys
const overridesWithIds = useMemo<DnsOverrideWithId[]>(() => {
return workspace.settingDnsOverrides.map((override, index) => ({
...override,
_id: `${reactId}-${index}`,
}));
}, [workspace.settingDnsOverrides, reactId]);
const handleChange = useCallback(
(overrides: DnsOverride[]) => {
patchModel(workspace, { settingDnsOverrides: overrides });
},
[workspace],
);
const handleAdd = useCallback(() => {
const newOverride: DnsOverride = {
hostname: '',
ipv4: [''],
ipv6: [],
enabled: true,
};
handleChange([...workspace.settingDnsOverrides, newOverride]);
}, [workspace.settingDnsOverrides, handleChange]);
const handleUpdate = useCallback(
(index: number, update: Partial<DnsOverride>) => {
const updated = workspace.settingDnsOverrides.map((o, i) =>
i === index ? { ...o, ...update } : o,
);
handleChange(updated);
},
[workspace.settingDnsOverrides, handleChange],
);
const handleDelete = useCallback(
(index: number) => {
const updated = workspace.settingDnsOverrides.filter((_, i) => i !== index);
handleChange(updated);
},
[workspace.settingDnsOverrides, handleChange],
);
return (
<VStack space={3} className="pb-3">
<div className="text-text-subtle text-sm">
Override DNS resolution for specific hostnames. This works like{' '}
<code className="text-text-subtlest bg-surface-highlight px-1 rounded">/etc/hosts</code>{' '}
but only for requests made from this workspace.
</div>
{overridesWithIds.length > 0 && (
<Table>
<TableHead>
<TableRow>
<TableHeaderCell className="w-8" />
<TableHeaderCell>Hostname</TableHeaderCell>
<TableHeaderCell>IPv4 Address</TableHeaderCell>
<TableHeaderCell>IPv6 Address</TableHeaderCell>
<TableHeaderCell className="w-10" />
</TableRow>
</TableHead>
<TableBody>
{overridesWithIds.map((override, index) => (
<DnsOverrideRow
key={override._id}
override={override}
onUpdate={(update) => handleUpdate(index, update)}
onDelete={() => handleDelete(index)}
/>
))}
</TableBody>
</Table>
)}
<HStack>
<Button size="xs" color="secondary" variant="border" onClick={handleAdd}>
Add DNS Override
</Button>
</HStack>
</VStack>
);
}
interface DnsOverrideRowProps {
override: DnsOverride;
onUpdate: (update: Partial<DnsOverride>) => void;
onDelete: () => void;
}
function DnsOverrideRow({ override, onUpdate, onDelete }: DnsOverrideRowProps) {
const ipv4Value = override.ipv4.join(', ');
const ipv6Value = override.ipv6.join(', ');
return (
<TableRow>
<TableCell>
<Checkbox
hideLabel
title={override.enabled ? 'Disable override' : 'Enable override'}
checked={override.enabled ?? true}
onChange={(enabled) => onUpdate({ enabled })}
/>
</TableCell>
<TableCell>
<PlainInput
size="sm"
hideLabel
label="Hostname"
placeholder="api.example.com"
defaultValue={override.hostname}
onChange={(hostname) => onUpdate({ hostname })}
/>
</TableCell>
<TableCell>
<PlainInput
size="sm"
hideLabel
label="IPv4 addresses"
placeholder="127.0.0.1"
defaultValue={ipv4Value}
onChange={(value) =>
onUpdate({
ipv4: value
.split(',')
.map((s) => s.trim())
.filter(Boolean),
})
}
/>
</TableCell>
<TableCell>
<PlainInput
size="sm"
hideLabel
label="IPv6 addresses"
placeholder="::1"
defaultValue={ipv6Value}
onChange={(value) =>
onUpdate({
ipv6: value
.split(',')
.map((s) => s.trim())
.filter(Boolean),
})
}
/>
</TableCell>
<TableCell>
<IconButton
size="xs"
iconSize="sm"
icon="trash"
title="Delete override"
onClick={onDelete}
/>
</TableCell>
</TableRow>
);
}

View File

@@ -1,6 +1,6 @@
import { createWorkspaceModel, foldersAtom, patchModel } from '@yaakapp-internal/models';
import { useAtomValue } from 'jotai';
import { useMemo } from 'react';
import { useMemo, useState } from 'react';
import { useAuthTab } from '../hooks/useAuthTab';
import { useEnvironmentsBreakdown } from '../hooks/useEnvironmentsBreakdown';
import { useHeadersTab } from '../hooks/useHeadersTab';
@@ -37,6 +37,7 @@ export type FolderSettingsTab =
export function FolderSettingsDialog({ folderId, tab }: Props) {
const folders = useAtomValue(foldersAtom);
const folder = folders.find((f) => f.id === folderId) ?? null;
const [activeTab, setActiveTab] = useState<string>(tab ?? TAB_GENERAL);
const authTab = useAuthTab(TAB_AUTH, folder);
const headersTab = useHeadersTab(TAB_HEADERS, folder);
const inheritedHeaders = useInheritedHeaders(folder);
@@ -68,7 +69,8 @@ export function FolderSettingsDialog({ folderId, tab }: Props) {
return (
<Tabs
defaultValue={tab ?? TAB_GENERAL}
value={activeTab}
onChangeValue={setActiveTab}
label="Folder Settings"
className="pt-2 pb-2 pl-3 pr-1"
layout="horizontal"
@@ -111,7 +113,7 @@ export function FolderSettingsDialog({ folderId, tab }: Props) {
<VStack alignItems="center" space={1.5}>
<p>
Override{' '}
<Link href="https://yaak.app/docs/using-yaak/environments-and-variables">
<Link href="https://feedback.yaak.app/help/articles/3284139-environments-and-variables">
Variables
</Link>{' '}
for requests within this folder.

View File

@@ -10,7 +10,7 @@ import {
stateExtensions,
updateSchema,
} from 'codemirror-json-schema';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useCallback, useEffect, useMemo, useRef } from 'react';
import type { ReflectResponseService } from '../hooks/useGrpc';
import { showAlert } from '../lib/alert';
import { showDialog } from '../lib/dialog';
@@ -39,15 +39,15 @@ export function GrpcEditor({
protoFiles,
...extraEditorProps
}: Props) {
const [editorView, setEditorView] = useState<EditorView | null>(null);
const editorViewRef = useRef<EditorView>(null);
const handleInitEditorViewRef = useCallback((h: EditorView | null) => {
setEditorView(h);
editorViewRef.current = h;
}, []);
// Find the schema for the selected service and method and update the editor
useEffect(() => {
if (
editorView == null ||
editorViewRef.current == null ||
services === null ||
request.service === null ||
request.method === null
@@ -91,7 +91,7 @@ export function GrpcEditor({
}
try {
updateSchema(editorView, JSON.parse(schema));
updateSchema(editorViewRef.current, JSON.parse(schema));
} catch (err) {
showAlert({
id: 'grpc-parse-schema-error',
@@ -107,7 +107,7 @@ export function GrpcEditor({
),
});
}
}, [editorView, services, request.method, request.service]);
}, [services, request.method, request.service]);
const extraExtensions = useMemo(
() => [
@@ -118,7 +118,7 @@ export function GrpcEditor({
jsonLanguage.data.of({
autocomplete: jsonCompletion(),
}),
stateExtensions({}),
stateExtensions(/** Init with empty schema **/),
],
[],
);

View File

@@ -7,6 +7,7 @@ import { useContainerSize } from '../hooks/useContainerQuery';
import type { ReflectResponseService } from '../hooks/useGrpc';
import { useHeadersTab } from '../hooks/useHeadersTab';
import { useInheritedHeaders } from '../hooks/useInheritedHeaders';
import { useKeyValue } from '../hooks/useKeyValue';
import { useRequestUpdateKey } from '../hooks/useRequestUpdateKey';
import { resolvedModelName } from '../lib/resolvedModelName';
import { Button } from './core/Button';
@@ -68,6 +69,11 @@ export function GrpcRequestPane({
const authTab = useAuthTab(TAB_AUTH, activeRequest);
const metadataTab = useHeadersTab(TAB_METADATA, activeRequest, 'Metadata');
const inheritedHeaders = useInheritedHeaders(activeRequest);
const { value: activeTabs, set: setActiveTabs } = useKeyValue<Record<string, string>>({
namespace: 'no_sync',
key: 'grpcRequestActiveTabs',
fallback: {},
});
const forceUpdateKey = useRequestUpdateKey(activeRequest.id ?? null);
const urlContainerEl = useRef<HTMLDivElement>(null);
@@ -139,6 +145,14 @@ export function GrpcRequestPane({
[activeRequest.description, authTab, metadataTab],
);
const activeTab = activeTabs?.[activeRequest.id];
const setActiveTab = useCallback(
async (tab: string) => {
await setActiveTabs((r) => ({ ...r, [activeRequest.id]: tab }));
},
[activeRequest.id, setActiveTabs],
);
const handleMetadataChange = useCallback(
(metadata: HttpRequestHeader[]) => patchModel(activeRequest, { metadata }),
[activeRequest],
@@ -251,11 +265,12 @@ export function GrpcRequestPane({
</HStack>
</div>
<Tabs
value={activeTab}
label="Request"
onChangeValue={setActiveTab}
tabs={tabs}
tabListClassName="mt-1 !mb-1.5"
storageKey="grpc_request_tabs"
activeTabKey={activeRequest.id}
storageKey="grpc_request_tabs_order"
>
<TabContent value="message">
<GrpcEditor

View File

@@ -1,7 +1,9 @@
import type { GrpcEvent, GrpcRequest } from '@yaakapp-internal/models';
import classNames from 'classnames';
import { format } from 'date-fns';
import { useAtomValue, useSetAtom } from 'jotai';
import type { CSSProperties } from 'react';
import { useEffect, useMemo, useState } from 'react';
import { useEffect, useMemo, useRef, useState } from 'react';
import {
activeGrpcConnectionAtom,
activeGrpcConnections,
@@ -9,14 +11,18 @@ import {
useGrpcEvents,
} from '../hooks/usePinnedGrpcConnection';
import { useStateWithDeps } from '../hooks/useStateWithDeps';
import { copyToClipboard } from '../lib/copy';
import { AutoScroller } from './core/AutoScroller';
import { Banner } from './core/Banner';
import { Button } from './core/Button';
import { Editor } from './core/Editor/LazyEditor';
import { EventDetailHeader, EventViewer } from './core/EventViewer';
import { EventViewerRow } from './core/EventViewerRow';
import { HotkeyList } from './core/HotkeyList';
import { Icon, type IconProps } from './core/Icon';
import { Icon } from './core/Icon';
import { IconButton } from './core/IconButton';
import { KeyValueRow, KeyValueRows } from './core/KeyValueRow';
import { LoadingIcon } from './core/LoadingIcon';
import { Separator } from './core/Separator';
import { SplitLayout } from './core/SplitLayout';
import { HStack, VStack } from './core/Stacks';
import { EmptyStateText } from './EmptyStateText';
import { ErrorBoundary } from './ErrorBoundary';
@@ -36,7 +42,7 @@ interface Props {
}
export function GrpcResponsePane({ style, methodType, activeRequest }: Props) {
const [activeEventIndex, setActiveEventIndex] = useState<number | null>(null);
const [activeEventId, setActiveEventId] = useState<string | null>(null);
const [showLarge, setShowLarge] = useStateWithDeps<boolean>(false, [activeRequest.id]);
const [showingLarge, setShowingLarge] = useState<boolean>(false);
const connections = useAtomValue(activeGrpcConnections);
@@ -45,8 +51,8 @@ export function GrpcResponsePane({ style, methodType, activeRequest }: Props) {
const setPinnedGrpcConnectionId = useSetAtom(pinnedGrpcConnectionIdAtom);
const activeEvent = useMemo(
() => (activeEventIndex != null ? events[activeEventIndex] : null),
[activeEventIndex, events],
() => events.find((m) => m.id === activeEventId) ?? null,
[activeEventId, events],
);
// Set the active message to the first message received if unary
@@ -55,188 +61,223 @@ export function GrpcResponsePane({ style, methodType, activeRequest }: Props) {
if (events.length === 0 || activeEvent != null || methodType !== 'unary') {
return;
}
const firstServerMessageIndex = events.findIndex((m) => m.eventType === 'server_message');
if (firstServerMessageIndex !== -1) {
setActiveEventIndex(firstServerMessageIndex);
}
setActiveEventId(events.find((m) => m.eventType === 'server_message')?.id ?? null);
}, [events.length]);
if (activeConnection == null) {
return (
<HotkeyList hotkeys={['request.send', 'model.create', 'sidebar.focus', 'url_bar.focus']} />
);
}
const header = (
<HStack className="pl-3 mb-1 font-mono text-sm text-text-subtle overflow-x-auto hide-scrollbars">
<HStack space={2}>
<span className="whitespace-nowrap">{events.length} Messages</span>
{activeConnection.state !== 'closed' && (
<LoadingIcon size="sm" className="text-text-subtlest" />
)}
</HStack>
<div className="ml-auto">
<RecentGrpcConnectionsDropdown
connections={connections}
activeConnection={activeConnection}
onPinnedConnectionId={setPinnedGrpcConnectionId}
/>
</div>
</HStack>
);
return (
<div style={style} className="h-full">
<ErrorBoundary name="GRPC Events">
<EventViewer
events={events}
getEventKey={(event) => event.id}
error={activeConnection.error}
header={header}
splitLayoutName="grpc_events"
defaultRatio={0.4}
renderRow={({ event, isActive, onClick }) => (
<GrpcEventRow event={event} isActive={isActive} onClick={onClick} />
)}
renderDetail={({ event }) => (
<GrpcEventDetail
event={event}
showLarge={showLarge}
showingLarge={showingLarge}
setShowLarge={setShowLarge}
setShowingLarge={setShowingLarge}
/>
)}
/>
</ErrorBoundary>
</div>
);
}
function GrpcEventRow({
event,
isActive,
onClick,
}: {
event: GrpcEvent;
isActive: boolean;
onClick: () => void;
}) {
const { eventType, status, content, error } = event;
const display = getEventDisplay(eventType, status);
return (
<EventViewerRow
isActive={isActive}
onClick={onClick}
icon={<Icon color={display.color} title={display.title} icon={display.icon} />}
content={
<span className="text-xs">
{content.slice(0, 1000)}
{error && <span className="text-warning"> ({error})</span>}
</span>
<SplitLayout
layout="vertical"
style={style}
name="grpc_events"
defaultRatio={0.4}
minHeightPx={20}
firstSlot={() =>
activeConnection == null ? (
<HotkeyList
hotkeys={['request.send', 'model.create', 'sidebar.focus', 'url_bar.focus']}
/>
) : (
<div className="w-full grid grid-rows-[auto_minmax(0,1fr)] grid-cols-1 items-center">
<HStack className="pl-3 mb-1 font-mono text-sm text-text-subtle overflow-x-auto hide-scrollbars">
<HStack space={2}>
<span className="whitespace-nowrap">{events.length} Messages</span>
{activeConnection.state !== 'closed' && (
<LoadingIcon size="sm" className="text-text-subtlest" />
)}
</HStack>
<div className="ml-auto">
<RecentGrpcConnectionsDropdown
connections={connections}
activeConnection={activeConnection}
onPinnedConnectionId={setPinnedGrpcConnectionId}
/>
</div>
</HStack>
<ErrorBoundary name="GRPC Events">
<AutoScroller
data={events}
header={
activeConnection.error && (
<Banner color="danger" className="m-3">
{activeConnection.error}
</Banner>
)
}
render={(event) => (
<EventRow
key={event.id}
event={event}
isActive={event.id === activeEventId}
onClick={() => {
if (event.id === activeEventId) setActiveEventId(null);
else setActiveEventId(event.id);
}}
/>
)}
/>
</ErrorBoundary>
</div>
)
}
secondSlot={
activeEvent != null && activeConnection != null
? () => (
<div className="grid grid-rows-[auto_minmax(0,1fr)]">
<div className="pb-3 px-2">
<Separator />
</div>
<div className="h-full pl-2 overflow-y-auto grid grid-rows-[auto_minmax(0,1fr)] ">
{activeEvent.eventType === 'client_message' ||
activeEvent.eventType === 'server_message' ? (
<>
<div className="mb-2 select-text cursor-text grid grid-cols-[minmax(0,1fr)_auto] items-center">
<div className="font-semibold">
Message {activeEvent.eventType === 'client_message' ? 'Sent' : 'Received'}
</div>
<IconButton
title="Copy message"
icon="copy"
size="xs"
onClick={() => copyToClipboard(activeEvent.content)}
/>
</div>
{!showLarge && activeEvent.content.length > 1000 * 1000 ? (
<VStack space={2} className="italic text-text-subtlest">
Message previews larger than 1MB are hidden
<div>
<Button
onClick={() => {
setShowingLarge(true);
setTimeout(() => {
setShowLarge(true);
setShowingLarge(false);
}, 500);
}}
isLoading={showingLarge}
color="secondary"
variant="border"
size="xs"
>
Try Showing
</Button>
</div>
</VStack>
) : (
<Editor
language="json"
defaultValue={activeEvent.content ?? ''}
wrapLines={false}
readOnly={true}
stateKey={null}
/>
)}
</>
) : (
<div className="h-full grid grid-rows-[auto_minmax(0,1fr)]">
<div>
<div className="select-text cursor-text font-semibold">
{activeEvent.content}
</div>
{activeEvent.error && (
<div className="select-text cursor-text text-sm font-mono py-1 text-warning">
{activeEvent.error}
</div>
)}
</div>
<div className="py-2 h-full">
{Object.keys(activeEvent.metadata).length === 0 ? (
<EmptyStateText>
No{' '}
{activeEvent.eventType === 'connection_end' ? 'trailers' : 'metadata'}
</EmptyStateText>
) : (
<KeyValueRows>
{Object.entries(activeEvent.metadata).map(([key, value]) => (
<KeyValueRow key={key} label={key}>
{value}
</KeyValueRow>
))}
</KeyValueRows>
)}
</div>
</div>
)}
</div>
</div>
)
: null
}
timestamp={event.createdAt}
/>
);
}
function GrpcEventDetail({
function EventRow({
onClick,
isActive,
event,
showLarge,
showingLarge,
setShowLarge,
setShowingLarge,
}: {
onClick?: () => void;
isActive?: boolean;
event: GrpcEvent;
showLarge: boolean;
showingLarge: boolean;
setShowLarge: (v: boolean) => void;
setShowingLarge: (v: boolean) => void;
}) {
if (event.eventType === 'client_message' || event.eventType === 'server_message') {
const title = `Message ${event.eventType === 'client_message' ? 'Sent' : 'Received'}`;
const { eventType, status, createdAt, content, error } = event;
const ref = useRef<HTMLDivElement>(null);
return (
<div className="h-full grid grid-rows-[auto_minmax(0,1fr)]">
<EventDetailHeader title={title} timestamp={event.createdAt} copyText={event.content} />
{!showLarge && event.content.length > 1000 * 1000 ? (
<VStack space={2} className="italic text-text-subtlest">
Message previews larger than 1MB are hidden
<div>
<Button
onClick={() => {
setShowingLarge(true);
setTimeout(() => {
setShowLarge(true);
setShowingLarge(false);
}, 500);
}}
isLoading={showingLarge}
color="secondary"
variant="border"
size="xs"
>
Try Showing
</Button>
</div>
</VStack>
) : (
<Editor
language="json"
defaultValue={event.content ?? ''}
wrapLines={false}
readOnly={true}
stateKey={null}
/>
)}
</div>
);
}
// Error or connection_end - show metadata/trailers
return (
<div className="h-full grid grid-rows-[auto_minmax(0,1fr)]">
<EventDetailHeader title={event.content} timestamp={event.createdAt} />
{event.error && (
<div className="select-text cursor-text text-sm font-mono py-1 text-warning">
{event.error}
</div>
)}
<div className="py-2 h-full">
{Object.keys(event.metadata).length === 0 ? (
<EmptyStateText>
No {event.eventType === 'connection_end' ? 'trailers' : 'metadata'}
</EmptyStateText>
) : (
<KeyValueRows>
{Object.entries(event.metadata).map(([key, value]) => (
<KeyValueRow key={key} label={key}>
{value}
</KeyValueRow>
))}
</KeyValueRows>
<div className="px-1" ref={ref}>
<button
type="button"
onClick={onClick}
className={classNames(
'w-full grid grid-cols-[auto_minmax(0,3fr)_auto] gap-2 items-center text-left',
'px-1.5 h-xs font-mono cursor-default group focus:outline-none focus:text-text rounded',
isActive && '!bg-surface-active !text-text',
'text-text-subtle hover:text',
)}
</div>
>
<Icon
color={
eventType === 'server_message'
? 'info'
: eventType === 'client_message'
? 'primary'
: eventType === 'error' || (status != null && status > 0)
? 'danger'
: eventType === 'connection_end'
? 'success'
: undefined
}
title={
eventType === 'server_message'
? 'Server message'
: eventType === 'client_message'
? 'Client message'
: eventType === 'error' || (status != null && status > 0)
? 'Error'
: eventType === 'connection_end'
? 'Connection response'
: undefined
}
icon={
eventType === 'server_message'
? 'arrow_big_down_dash'
: eventType === 'client_message'
? 'arrow_big_up_dash'
: eventType === 'error' || (status != null && status > 0)
? 'alert_triangle'
: eventType === 'connection_end'
? 'check'
: 'info'
}
/>
<div className={classNames('w-full truncate text-xs')}>
{content.slice(0, 1000)}
{error && <span className="text-warning"> ({error})</span>}
</div>
<div className={classNames('opacity-50 text-xs')}>
{format(`${createdAt}Z`, 'HH:mm:ss.SSS')}
</div>
</button>
</div>
);
}
function getEventDisplay(
eventType: GrpcEvent['eventType'],
status: GrpcEvent['status'],
): { icon: IconProps['icon']; color: IconProps['color']; title: string } {
if (eventType === 'server_message') {
return { icon: 'arrow_big_down_dash', color: 'info', title: 'Server message' };
}
if (eventType === 'client_message') {
return { icon: 'arrow_big_up_dash', color: 'primary', title: 'Client message' };
}
if (eventType === 'error' || (status != null && status > 0)) {
return { icon: 'alert_triangle', color: 'danger', title: 'Error' };
}
if (eventType === 'connection_end') {
return { icon: 'check', color: 'success', title: 'Connection response' };
}
return { icon: 'info', color: undefined, title: 'Event' };
}

View File

@@ -19,7 +19,6 @@ type Props = {
forceUpdateKey: string;
headers: HttpRequestHeader[];
inheritedHeaders?: HttpRequestHeader[];
inheritedHeadersLabel?: string;
stateKey: string;
onChange: (headers: HttpRequestHeader[]) => void;
label?: string;
@@ -29,36 +28,20 @@ export function HeadersEditor({
stateKey,
headers,
inheritedHeaders,
inheritedHeadersLabel = 'Inherited',
onChange,
forceUpdateKey,
}: Props) {
// Get header names defined at current level (case-insensitive)
const currentHeaderNames = new Set(
headers.filter((h) => h.name).map((h) => h.name.toLowerCase()),
);
// Filter inherited headers: must be enabled, have content, and not be overridden by current level
const validInheritedHeaders =
inheritedHeaders?.filter(
(pair) =>
pair.enabled && (pair.name || pair.value) && !currentHeaderNames.has(pair.name.toLowerCase()),
) ?? [];
const hasInheritedHeaders = validInheritedHeaders.length > 0;
inheritedHeaders?.filter((pair) => pair.enabled && (pair.name || pair.value)) ?? [];
return (
<div
className={
hasInheritedHeaders
? '@container w-full h-full grid grid-rows-[auto_minmax(0,1fr)] gap-y-1.5'
: '@container w-full h-full'
}
>
{hasInheritedHeaders && (
<div className="@container w-full h-full grid grid-rows-[auto_minmax(0,1fr)] gap-y-1.5">
{validInheritedHeaders.length > 0 ? (
<DetailsBanner
color="secondary"
className="text-sm"
summary={
<HStack>
{inheritedHeadersLabel} <CountBadge count={validInheritedHeaders.length} />
Inherited <CountBadge count={validInheritedHeaders.length} />
</HStack>
}
>
@@ -80,6 +63,8 @@ export function HeadersEditor({
))}
</div>
</DetailsBanner>
) : (
<span />
)}
<PairOrBulkEditor
forceUpdateKey={forceUpdateKey}

View File

@@ -62,7 +62,7 @@ export function HttpAuthenticationEditor({ model }: Props) {
<p>
Apply auth to all requests in <strong>{resolvedModelName(model)}</strong>
</p>
<Link href="https://yaak.app/docs/using-yaak/request-inheritance">
<Link href="https://feedback.yaak.app/help/articles/2112119-request-inheritance">
Documentation
</Link>
</EmptyStateText>

View File

@@ -4,7 +4,7 @@ import type { GenericCompletionOption } from '@yaakapp-internal/plugins';
import classNames from 'classnames';
import { atom, useAtomValue } from 'jotai';
import type { CSSProperties } from 'react';
import { lazy, Suspense, useCallback, useMemo, useRef, useState } from 'react';
import { lazy, Suspense, useCallback, useMemo, useState } from 'react';
import { activeRequestIdAtom } from '../hooks/useActiveRequestId';
import { allRequestsAtom } from '../hooks/useAllRequests';
import { useAuthTab } from '../hooks/useAuthTab';
@@ -12,6 +12,7 @@ import { useCancelHttpResponse } from '../hooks/useCancelHttpResponse';
import { useHeadersTab } from '../hooks/useHeadersTab';
import { useImportCurl } from '../hooks/useImportCurl';
import { useInheritedHeaders } from '../hooks/useInheritedHeaders';
import { useKeyValue } from '../hooks/useKeyValue';
import { usePinnedHttpResponse } from '../hooks/usePinnedHttpResponse';
import { useRequestEditor, useRequestEditorEvent } from '../hooks/useRequestEditor';
import { useRequestUpdateKey } from '../hooks/useRequestUpdateKey';
@@ -41,8 +42,8 @@ import { Editor } from './core/Editor/LazyEditor';
import { InlineCode } from './core/InlineCode';
import type { Pair } from './core/PairEditor';
import { PlainInput } from './core/PlainInput';
import type { TabItem, TabsRef } from './core/Tabs/Tabs';
import { setActiveTab, TabContent, Tabs } from './core/Tabs/Tabs';
import type { TabItem } from './core/Tabs/Tabs';
import { TabContent, Tabs } from './core/Tabs/Tabs';
import { EmptyStateText } from './EmptyStateText';
import { FormMultipartEditor } from './FormMultipartEditor';
import { FormUrlencodedEditor } from './FormUrlencodedEditor';
@@ -69,7 +70,6 @@ const TAB_PARAMS = 'params';
const TAB_HEADERS = 'headers';
const TAB_AUTH = 'auth';
const TAB_DESCRIPTION = 'description';
const TABS_STORAGE_KEY = 'http_request_tabs';
const nonActiveRequestUrlsAtom = atom((get) => {
const activeRequestId = get(activeRequestIdAtom);
@@ -83,20 +83,19 @@ const memoNotActiveRequestUrlsAtom = deepEqualAtom(nonActiveRequestUrlsAtom);
export function HttpRequestPane({ style, fullHeight, className, activeRequest }: Props) {
const activeRequestId = activeRequest.id;
const tabsRef = useRef<TabsRef>(null);
const { value: activeTabs, set: setActiveTabs } = useKeyValue<Record<string, string>>({
namespace: 'no_sync',
key: 'httpRequestActiveTabs',
fallback: {},
});
const [forceUpdateHeaderEditorKey, setForceUpdateHeaderEditorKey] = useState<number>(0);
const forceUpdateKey = useRequestUpdateKey(activeRequest.id ?? null);
const [{ urlKey }, { forceUrlRefresh, forceParamsRefresh }] = useRequestEditor();
const [{ urlKey }, { focusParamsTab, forceUrlRefresh, forceParamsRefresh }] = useRequestEditor();
const contentType = getContentTypeFromHeaders(activeRequest.headers);
const authTab = useAuthTab(TAB_AUTH, activeRequest);
const headersTab = useHeadersTab(TAB_HEADERS, activeRequest);
const inheritedHeaders = useInheritedHeaders(activeRequest);
// Listen for event to focus the params tab (e.g., when clicking a :param in the URL)
useRequestEditorEvent('request_pane.focus_tab', () => {
tabsRef.current?.setActiveTab(TAB_PARAMS);
}, []);
const handleContentTypeChange = useCallback(
async (contentType: string | null, patch: Partial<Omit<HttpRequest, 'headers'>> = {}) => {
if (activeRequest == null) {
@@ -261,6 +260,18 @@ export function HttpRequestPane({ style, fullHeight, className, activeRequest }:
[activeRequest],
);
const activeTab = activeTabs?.[activeRequestId];
const setActiveTab = useCallback(
async (tab: string) => {
await setActiveTabs((r) => ({ ...r, [activeRequest.id]: tab }));
},
[activeRequest.id, setActiveTabs],
);
useRequestEditorEvent('request_pane.focus_tab', async () => {
await setActiveTab(TAB_PARAMS);
});
const autocompleteUrls = useAtomValue(memoNotActiveRequestUrlsAtom);
const autocomplete: GenericCompletionConfig = useMemo(
@@ -287,11 +298,7 @@ export function HttpRequestPane({ style, fullHeight, className, activeRequest }:
e.preventDefault(); // Prevent input onChange
await patchModel(activeRequest, patch);
await setActiveTab({
storageKey: TABS_STORAGE_KEY,
activeTabKey: activeRequestId,
value: TAB_PARAMS,
});
focusParamsTab();
// Wait for request to update, then refresh the UI
// TODO: Somehow make this deterministic
@@ -302,7 +309,14 @@ export function HttpRequestPane({ style, fullHeight, className, activeRequest }:
}
}
},
[activeRequest, activeRequestId, forceParamsRefresh, forceUrlRefresh, importCurl],
[
activeRequest,
activeRequestId,
focusParamsTab,
forceParamsRefresh,
forceUrlRefresh,
importCurl,
],
);
const handleSend = useCallback(
() => sendRequest(activeRequest.id ?? null),
@@ -340,12 +354,12 @@ export function HttpRequestPane({ style, fullHeight, className, activeRequest }:
isLoading={activeResponse != null && activeResponse.state !== 'closed'}
/>
<Tabs
ref={tabsRef}
value={activeTab}
label="Request"
onChangeValue={setActiveTab}
tabs={tabs}
tabListClassName="mt-1 -mb-1.5"
storageKey={TABS_STORAGE_KEY}
activeTabKey={activeRequestId}
tabListClassName="mt-1 mb-1.5"
storageKey="http_request_tabs_order"
>
<TabContent value={TAB_AUTH}>
<HttpAuthenticationEditor model={activeRequest} />

View File

@@ -1,15 +1,15 @@
import type { HttpResponse } from '@yaakapp-internal/models';
import classNames from 'classnames';
import type { ComponentType, CSSProperties } from 'react';
import { lazy, Suspense, useMemo } from 'react';
import { lazy, Suspense, useCallback, useMemo } from 'react';
import { useLocalStorage } from 'react-use';
import { useCancelHttpResponse } from '../hooks/useCancelHttpResponse';
import { useHttpResponseEvents } from '../hooks/useHttpResponseEvents';
import { usePinnedHttpResponse } from '../hooks/usePinnedHttpResponse';
import { useResponseBodyBytes, useResponseBodyText } from '../hooks/useResponseBodyText';
import { useResponseViewMode } from '../hooks/useResponseViewMode';
import { useTimelineViewMode } from '../hooks/useTimelineViewMode';
import { getMimeTypeFromContentType } from '../lib/contentType';
import { getContentTypeFromHeaders, getCookieCounts } from '../lib/model_util';
import { getContentTypeFromHeaders } from '../lib/model_util';
import { ConfirmLargeResponse } from './ConfirmLargeResponse';
import { ConfirmLargeResponseRequest } from './ConfirmLargeResponseRequest';
import { Banner } from './core/Banner';
@@ -55,18 +55,32 @@ const TAB_HEADERS = 'headers';
const TAB_COOKIES = 'cookies';
const TAB_TIMELINE = 'timeline';
export type TimelineViewMode = 'timeline' | 'text';
export function HttpResponsePane({ style, className, activeRequestId }: Props) {
const { activeResponse, setPinnedResponseId, responses } = usePinnedHttpResponse(activeRequestId);
const [viewMode, setViewMode] = useResponseViewMode(activeResponse?.requestId);
const [timelineViewMode, setTimelineViewMode] = useTimelineViewMode();
const [activeTabs, setActiveTabs] = useLocalStorage<Record<string, string>>(
'responsePaneActiveTabs',
{},
);
const contentType = getContentTypeFromHeaders(activeResponse?.headers ?? null);
const mimeType = contentType == null ? null : getMimeTypeFromContentType(contentType).essence;
const responseEvents = useHttpResponseEvents(activeResponse);
const cookieCounts = useMemo(() => getCookieCounts(responseEvents.data), [responseEvents.data]);
const cookieCount = useMemo(() => {
if (!responseEvents.data) return 0;
let count = 0;
for (const event of responseEvents.data) {
const e = event.event;
if (
(e.type === 'header_up' && e.name.toLowerCase() === 'cookie') ||
(e.type === 'header_down' && e.name.toLowerCase() === 'set-cookie')
) {
count++;
}
}
return count;
}, [responseEvents.data]);
const tabs = useMemo<TabItem[]>(
() => [
@@ -78,9 +92,7 @@ export function HttpResponsePane({ style, className, activeRequestId }: Props) {
onChange: setViewMode,
items: [
{ label: 'Response', value: 'pretty' },
...(mimeType?.startsWith('image')
? []
: [{ label: 'Response (Raw)', shortLabel: 'Raw', value: 'raw' }]),
...(mimeType?.startsWith('image') ? [] : [{ label: 'Raw', value: 'raw' }]),
],
},
},
@@ -95,47 +107,40 @@ export function HttpResponsePane({ style, className, activeRequestId }: Props) {
label: 'Headers',
rightSlot: (
<CountBadge
count={activeResponse?.requestHeaders.length ?? 0}
count2={activeResponse?.headers.length ?? 0}
showZero
count={activeResponse?.requestHeaders.length ?? 0}
/>
),
},
{
value: TAB_COOKIES,
label: 'Cookies',
rightSlot:
cookieCounts.sent > 0 || cookieCounts.received > 0 ? (
<CountBadge count={cookieCounts.sent} count2={cookieCounts.received} showZero />
) : null,
rightSlot: cookieCount > 0 ? <CountBadge count={cookieCount} /> : null,
},
{
value: TAB_TIMELINE,
label: 'Timeline',
rightSlot: <CountBadge count={responseEvents.data?.length ?? 0} />,
options: {
value: timelineViewMode,
onChange: (v) => setTimelineViewMode((v as TimelineViewMode) ?? 'timeline'),
items: [
{ label: 'Timeline', value: 'timeline' },
{ label: 'Timeline (Text)', shortLabel: 'Timeline', value: 'text' },
],
},
},
],
[
activeResponse?.headers,
activeResponse?.requestContentLength,
activeResponse?.requestHeaders.length,
cookieCounts.sent,
cookieCounts.received,
cookieCount,
mimeType,
responseEvents.data?.length,
setViewMode,
viewMode,
timelineViewMode,
setTimelineViewMode,
],
);
const activeTab = activeTabs?.[activeRequestId];
const setActiveTab = useCallback(
(tab: string) => {
setActiveTabs((r) => ({ ...r, [activeRequestId]: tab }));
},
[activeRequestId, setActiveTabs],
);
const cancel = useCancelHttpResponse(activeResponse?.id ?? null);
@@ -199,12 +204,14 @@ export function HttpResponsePane({ style, className, activeRequestId }: Props) {
)}
{/* Show tabs if we have any data (headers, body, etc.) even if there's an error */}
<Tabs
key={activeRequestId} // Freshen tabs on request change
value={activeTab}
onChangeValue={setActiveTab}
tabs={tabs}
label="Response"
className="ml-3 mr-3 mb-3 min-h-0 flex-1"
tabListClassName="mt-0.5 -mb-1.5"
storageKey="http_response_tabs"
activeTabKey={activeRequestId}
tabListClassName="mt-0.5"
storageKey="http_response_tabs_order"
>
<TabContent value={TAB_BODY}>
<ErrorBoundary name="Http Response Viewer">
@@ -264,7 +271,7 @@ export function HttpResponsePane({ style, className, activeRequestId }: Props) {
<ResponseCookies response={activeResponse} />
</TabContent>
<TabContent value={TAB_TIMELINE}>
<HttpResponseTimeline response={activeResponse} viewMode={timelineViewMode} />
<HttpResponseTimeline response={activeResponse} />
</TabContent>
</Tabs>
</div>

View File

@@ -3,179 +3,186 @@ import type {
HttpResponseEvent,
HttpResponseEventData,
} from '@yaakapp-internal/models';
import classNames from 'classnames';
import { format } from 'date-fns';
import { type ReactNode, useMemo, useState } from 'react';
import { useHttpResponseEvents } from '../hooks/useHttpResponseEvents';
import { Editor } from './core/Editor/LazyEditor';
import { type EventDetailAction, EventDetailHeader, EventViewer } from './core/EventViewer';
import { EventViewerRow } from './core/EventViewerRow';
import { AutoScroller } from './core/AutoScroller';
import { Banner } from './core/Banner';
import { HttpMethodTagRaw } from './core/HttpMethodTag';
import { HttpStatusTagRaw } from './core/HttpStatusTag';
import { Icon, type IconProps } from './core/Icon';
import { KeyValueRow, KeyValueRows } from './core/KeyValueRow';
import type { TimelineViewMode } from './HttpResponsePane';
import { Separator } from './core/Separator';
import { SplitLayout } from './core/SplitLayout';
interface Props {
response: HttpResponse;
viewMode: TimelineViewMode;
}
export function HttpResponseTimeline({ response, viewMode }: Props) {
return <Inner key={response.id} response={response} viewMode={viewMode} />;
export function HttpResponseTimeline({ response }: Props) {
return <Inner key={response.id} response={response} />;
}
function Inner({ response, viewMode }: Props) {
const [showRaw, setShowRaw] = useState(false);
function Inner({ response }: Props) {
const [activeEventIndex, setActiveEventIndex] = useState<number | null>(null);
const { data: events, error, isLoading } = useHttpResponseEvents(response);
// Generate plain text representation of all events (with prefixes for timeline view)
const plainText = useMemo(() => {
if (!events || events.length === 0) return '';
return events.map((event) => formatEventText(event.event, true)).join('\n');
}, [events]);
const activeEvent = useMemo(
() => (activeEventIndex == null ? null : events?.[activeEventIndex]),
[activeEventIndex, events],
);
// Plain text view - show all events as text in an editor
if (viewMode === 'text') {
if (isLoading) {
return <div className="p-4 text-text-subtlest">Loading events...</div>;
} else if (error) {
return <div className="p-4 text-danger">{String(error)}</div>;
} else if (!events || events.length === 0) {
return <div className="p-4 text-text-subtlest">No events recorded</div>;
} else {
return (
<Editor language="timeline" defaultValue={plainText} readOnly stateKey={null} hideGutter />
);
}
if (isLoading) {
return <div className="p-3 text-text-subtlest italic">Loading events...</div>;
}
if (error) {
return (
<Banner color="danger" className="m-3">
{String(error)}
</Banner>
);
}
if (!events || events.length === 0) {
return <div className="p-3 text-text-subtlest italic">No events recorded</div>;
}
return (
<EventViewer
events={events ?? []}
getEventKey={(event) => event.id}
error={error ? String(error) : null}
isLoading={isLoading}
loadingMessage="Loading events..."
emptyMessage="No events recorded"
splitLayoutName="http_response_events"
<SplitLayout
layout="vertical"
name="http_response_events"
defaultRatio={0.25}
renderRow={({ event, isActive, onClick }) => {
const display = getEventDisplay(event.event);
return (
<EventViewerRow
isActive={isActive}
onClick={onClick}
icon={<Icon color={display.color} icon={display.icon} size="sm" />}
content={display.summary}
timestamp={event.createdAt}
/>
);
}}
renderDetail={({ event, onClose }) => (
<EventDetails event={event} showRaw={showRaw} setShowRaw={setShowRaw} onClose={onClose} />
minHeightPx={10}
firstSlot={() => (
<AutoScroller
data={events}
render={(event, i) => (
<EventRow
key={event.id}
event={event}
isActive={i === activeEventIndex}
onClick={() => {
if (i === activeEventIndex) setActiveEventIndex(null);
else setActiveEventIndex(i);
}}
/>
)}
/>
)}
secondSlot={
activeEvent
? () => (
<div className="grid grid-rows-[auto_minmax(0,1fr)]">
<div className="pb-3 px-2">
<Separator />
</div>
<div className="mx-2 overflow-y-auto">
<EventDetails event={activeEvent} />
</div>
</div>
)
: null
}
/>
);
}
function EventRow({
onClick,
isActive,
event,
}: {
onClick: () => void;
isActive: boolean;
event: HttpResponseEvent;
}) {
const display = getEventDisplay(event.event);
const { icon, color, summary } = display;
return (
<div className="px-1">
<button
type="button"
onClick={onClick}
className={classNames(
'w-full grid grid-cols-[auto_minmax(0,1fr)_auto] gap-2 items-center text-left',
'px-1.5 h-xs font-mono text-editor cursor-default group focus:outline-none focus:text-text rounded',
isActive && '!bg-surface-active !text-text',
'text-text-subtle hover:text',
)}
>
<Icon color={color} icon={icon} size="sm" />
<div className="w-full truncate">{summary}</div>
<div className="opacity-50">{format(`${event.createdAt}Z`, 'HH:mm:ss.SSS')}</div>
</button>
</div>
);
}
function formatBytes(bytes: number): string {
if (bytes < 1024) return `${bytes} B`;
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
}
function EventDetails({
event,
showRaw,
setShowRaw,
onClose,
}: {
event: HttpResponseEvent;
showRaw: boolean;
setShowRaw: (v: boolean) => void;
onClose: () => void;
}) {
function EventDetails({ event }: { event: HttpResponseEvent }) {
const { label } = getEventDisplay(event.event);
const timestamp = format(new Date(`${event.createdAt}Z`), 'HH:mm:ss.SSS');
const e = event.event;
const actions: EventDetailAction[] = [
{
key: 'toggle-raw',
label: showRaw ? 'Formatted' : 'Text',
onClick: () => setShowRaw(!showRaw),
},
];
// Determine the title based on event type
const title = (() => {
switch (e.type) {
case 'header_up':
return 'Header Sent';
case 'header_down':
return 'Header Received';
case 'send_url':
return 'Request';
case 'receive_url':
return 'Response';
case 'redirect':
return 'Redirect';
case 'setting':
return 'Apply Setting';
case 'chunk_sent':
return 'Data Sent';
case 'chunk_received':
return 'Data Received';
case 'dns_resolved':
return e.overridden ? 'DNS Override' : 'DNS Resolution';
default:
return label;
}
})();
// Render content based on view mode and event type
const renderContent = () => {
// Raw view - show plaintext representation (without prefix)
if (showRaw) {
const rawText = formatEventText(event.event, false);
return <Editor language="text" defaultValue={rawText} readOnly stateKey={null} hideGutter />;
}
// Headers - show name and value
if (e.type === 'header_up' || e.type === 'header_down') {
return (
// Headers - show name and value with Editor for JSON
if (e.type === 'header_up' || e.type === 'header_down') {
return (
<div className="flex flex-col gap-2 h-full">
<DetailHeader
title={e.type === 'header_down' ? 'Header Received' : 'Header Sent'}
timestamp={timestamp}
/>
<KeyValueRows>
<KeyValueRow label="Header">{e.name}</KeyValueRow>
<KeyValueRow label="Value">{e.value}</KeyValueRow>
</KeyValueRows>
);
}
</div>
);
}
// Request URL - show method and path separately
if (e.type === 'send_url') {
return (
// Request URL - show method and path separately
if (e.type === 'send_url') {
return (
<div className="flex flex-col gap-2">
<DetailHeader title="Request" timestamp={timestamp} />
<KeyValueRows>
<KeyValueRow label="Method">
<HttpMethodTagRaw forceColor method={e.method} />
</KeyValueRow>
<KeyValueRow label="Path">{e.path}</KeyValueRow>
</KeyValueRows>
);
}
</div>
);
}
// Response status - show version and status separately
if (e.type === 'receive_url') {
return (
// Response status - show version and status separately
if (e.type === 'receive_url') {
return (
<div className="flex flex-col gap-2">
<DetailHeader title="Response" timestamp={timestamp} />
<KeyValueRows>
<KeyValueRow label="HTTP Version">{e.version}</KeyValueRow>
<KeyValueRow label="Status">
<HttpStatusTagRaw status={e.status} />
</KeyValueRow>
</KeyValueRows>
);
}
</div>
);
}
// Redirect - show status, URL, and behavior
if (e.type === 'redirect') {
return (
// Redirect - show status, URL, and behavior
if (e.type === 'redirect') {
return (
<div className="flex flex-col gap-2">
<DetailHeader title="Redirect" timestamp={timestamp} />
<KeyValueRows>
<KeyValueRow label="Status">
<HttpStatusTagRaw status={e.status} />
@@ -185,98 +192,51 @@ function EventDetails({
{e.behavior === 'drop_body' ? 'Drop body, change to GET' : 'Preserve method and body'}
</KeyValueRow>
</KeyValueRows>
);
}
</div>
);
}
// Settings - show as key/value
if (e.type === 'setting') {
return (
// Settings - show as key/value
if (e.type === 'setting') {
return (
<div className="flex flex-col gap-2">
<DetailHeader title="Apply Setting" timestamp={timestamp} />
<KeyValueRows>
<KeyValueRow label="Setting">{e.name}</KeyValueRow>
<KeyValueRow label="Value">{e.value}</KeyValueRow>
</KeyValueRows>
);
}
</div>
);
}
// Chunks - show formatted bytes
if (e.type === 'chunk_sent' || e.type === 'chunk_received') {
return <div className="font-mono text-editor">{formatBytes(e.bytes)}</div>;
}
// Chunks - show formatted bytes
if (e.type === 'chunk_sent' || e.type === 'chunk_received') {
const direction = e.type === 'chunk_sent' ? 'Sent' : 'Received';
return (
<div className="flex flex-col gap-2">
<DetailHeader title={`Data ${direction}`} timestamp={timestamp} />
<div className="font-mono text-editor">{formatBytes(e.bytes)}</div>
</div>
);
}
// DNS Resolution - show hostname, addresses, and timing
if (e.type === 'dns_resolved') {
return (
<KeyValueRows>
<KeyValueRow label="Hostname">{e.hostname}</KeyValueRow>
<KeyValueRow label="Addresses">{e.addresses.join(', ')}</KeyValueRow>
<KeyValueRow label="Duration">
{e.overridden ? (
<span className="text-text-subtlest">--</span>
) : (
`${String(e.duration)}ms`
)}
</KeyValueRow>
{e.overridden ? <KeyValueRow label="Source">Workspace Override</KeyValueRow> : null}
</KeyValueRows>
);
}
// Default - use summary
const { summary } = getEventDisplay(event.event);
return <div className="font-mono text-editor">{summary}</div>;
};
// Default - use summary
const { summary } = getEventDisplay(event.event);
return (
<div className="flex flex-col gap-2 h-full">
<EventDetailHeader
title={title}
timestamp={event.createdAt}
actions={actions}
onClose={onClose}
/>
{renderContent()}
<div className="flex flex-col gap-1">
<DetailHeader title={label} timestamp={timestamp} />
<div className="font-mono text-editor">{summary}</div>
</div>
);
}
type EventTextParts = { prefix: '>' | '<' | '*'; text: string };
/** Get the prefix and text for an event */
function getEventTextParts(event: HttpResponseEventData): EventTextParts {
switch (event.type) {
case 'send_url':
return { prefix: '>', text: `${event.method} ${event.path}` };
case 'receive_url':
return { prefix: '<', text: `${event.version} ${event.status}` };
case 'header_up':
return { prefix: '>', text: `${event.name}: ${event.value}` };
case 'header_down':
return { prefix: '<', text: `${event.name}: ${event.value}` };
case 'redirect': {
const behavior = event.behavior === 'drop_body' ? 'drop body' : 'preserve';
return { prefix: '*', text: `Redirect ${event.status} -> ${event.url} (${behavior})` };
}
case 'setting':
return { prefix: '*', text: `Setting ${event.name}=${event.value}` };
case 'info':
return { prefix: '*', text: event.message };
case 'chunk_sent':
return { prefix: '*', text: `[${formatBytes(event.bytes)} sent]` };
case 'chunk_received':
return { prefix: '*', text: `[${formatBytes(event.bytes)} received]` };
case 'dns_resolved':
if (event.overridden) {
return { prefix: '*', text: `DNS override ${event.hostname} -> ${event.addresses.join(', ')}` };
}
return { prefix: '*', text: `DNS resolved ${event.hostname} to ${event.addresses.join(', ')} (${event.duration}ms)` };
default:
return { prefix: '*', text: '[unknown event]' };
}
}
/** Format event as plaintext, optionally with curl-style prefix (> outgoing, < incoming, * info) */
function formatEventText(event: HttpResponseEventData, includePrefix: boolean): string {
const { prefix, text } = getEventTextParts(event);
return includePrefix ? `${prefix} ${text}` : text;
function DetailHeader({ title, timestamp }: { title: string; timestamp: string }) {
return (
<div className="flex items-center justify-between gap-2">
<h3 className="font-semibold select-auto cursor-auto">{title}</h3>
<span className="text-text-subtlest font-mono text-editor">{timestamp}</span>
</div>
);
}
type EventDisplay = {
@@ -305,7 +265,7 @@ function getEventDisplay(event: HttpResponseEventData): EventDisplay {
case 'redirect':
return {
icon: 'arrow_big_right_dash',
color: 'success',
color: 'warning',
label: 'Redirect',
summary: `Redirecting ${event.status} ${event.url}${event.behavior === 'drop_body' ? ' (drop body)' : ''}`,
};
@@ -352,15 +312,6 @@ function getEventDisplay(event: HttpResponseEventData): EventDisplay {
label: 'Chunk',
summary: `${formatBytes(event.bytes)} chunk received`,
};
case 'dns_resolved':
return {
icon: 'globe',
color: event.overridden ? 'success' : 'secondary',
label: event.overridden ? 'DNS Override' : 'DNS',
summary: event.overridden
? `${event.hostname}${event.addresses.join(', ')} (overridden)`
: `${event.hostname}${event.addresses.join(', ')} (${event.duration}ms)`,
};
default:
return {
icon: 'info',

View File

@@ -71,7 +71,7 @@ export const RequestMethodDropdown = memo(function RequestMethodDropdown({
onChange={handleChange}
>
<Button size="xs" className={classNames(className, 'text-text-subtle hover:text')}>
<HttpMethodTag request={request} noAlias />
<HttpMethodTag request={request} />
</Button>
</RadioDropdown>
);

View File

@@ -5,6 +5,7 @@ import { useLicense } from '@yaakapp-internal/license';
import { pluginsAtom, settingsAtom } from '@yaakapp-internal/models';
import classNames from 'classnames';
import { useAtomValue } from 'jotai';
import { useState } from 'react';
import { useKeyPressEvent } from 'react-use';
import { appInfo } from '../../lib/appInfo';
import { capitalize } from '../../lib/capitalize';
@@ -50,6 +51,7 @@ export default function Settings({ hide }: Props) {
const { tab: tabFromQuery } = useSearch({ from: '/workspaces/$workspaceId/settings' });
// Parse tab and subtab (e.g., "plugins:installed")
const [mainTab, subtab] = tabFromQuery?.split(':') ?? [];
const [tab, setTab] = useState<string | undefined>(mainTab || tabFromQuery);
const settings = useAtomValue(settingsAtom);
const plugins = useAtomValue(pluginsAtom);
const licenseCheck = useLicense();
@@ -89,10 +91,11 @@ export default function Settings({ hide }: Props) {
)}
<Tabs
layout="horizontal"
defaultValue={mainTab || tabFromQuery}
value={tab}
addBorders
tabListClassName="min-w-[10rem] bg-surface x-theme-sidebar border-r border-border pl-3"
label="Settings"
onChangeValue={setTab}
tabs={tabs.map(
(value): TabItem => ({
value,
@@ -142,7 +145,7 @@ export default function Settings({ hide }: Props) {
<SettingsHotkeys />
</TabContent>
<TabContent value={TAB_PLUGINS} className="h-full grid grid-rows-1 px-6 !py-4">
<SettingsPlugins defaultSubtab={mainTab === TAB_PLUGINS ? subtab : undefined} />
<SettingsPlugins defaultSubtab={tab === TAB_PLUGINS ? subtab : undefined} />
</TabContent>
<TabContent value={TAB_PROXY} className="overflow-y-auto h-full px-6 !py-4">
<SettingsProxy />

View File

@@ -54,11 +54,13 @@ export function SettingsPlugins({ defaultSubtab }: SettingsPluginsProps) {
const installedPlugins = plugins.filter((p) => !isPluginBundled(p, appInfo.vendoredPluginDir));
const createPlugin = useInstallPlugin();
const refreshPlugins = useRefreshPlugins();
const [tab, setTab] = useState<string | undefined>(defaultSubtab);
return (
<div className="h-full">
<Tabs
defaultValue={defaultSubtab}
value={tab}
label="Plugins"
onChangeValue={setTab}
addBorders
tabs={[
{ label: 'Discover', value: 'search' },
@@ -115,7 +117,7 @@ export function SettingsPlugins({ defaultSubtab }: SettingsPluginsProps) {
icon="help"
title="View documentation"
onClick={() =>
openUrl('https://yaak.app/docs/plugin-development/plugins-quick-start')
openUrl('https://feedback.yaak.app/help/articles/6911763-quick-start')
}
/>
</HStack>

View File

@@ -75,7 +75,7 @@ export function SettingsTheme() {
<Heading>Theme</Heading>
<p className="text-text-subtle">
Make Yaak your own by selecting a theme, or{' '}
<Link href="https://yaak.app/docs/plugin-development/plugins-quick-start">
<Link href="https://feedback.yaak.app/help/articles/6911763-plugins-quick-start">
Create Your Own
</Link>
</p>

Some files were not shown because too many files have changed in this diff Show More