Compare commits

...

50 Commits

Author SHA1 Message Date
Gregory Schier
c00d2e981f Fix basic auth failing when password field is empty or unset
Handle undefined username/password values by defaulting to empty string,
preventing "undefined" from being encoded in the Authorization header.

Fixes https://feedback.yaak.app/p/strange-basic-auth-behaviour-in-202612

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-28 15:07:03 -08:00
Gregory Schier
9c45254952 Fix template tag theme colors 2026-01-28 13:08:22 -08:00
Gregory Schier
d031ff231a Bump plugin runtime types 2026-01-28 08:43:19 -08:00
Gregory Schier
f056894ddb Show full URL parts in Timeline debug view (#373)
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-28 08:41:17 -08:00
dependabot[bot]
1b0315165f Bump hono from 4.11.4 to 4.11.7 (#372) 2026-01-28 08:37:10 -08:00
Gregory Schier
bd7e840a57 Fix x64 macOS build bundling wrong architecture binaries
Set YAAK_TARGET_ARCH before npm run bootstrap so vendor scripts
download the correct x64 binaries instead of arm64 ones.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-26 20:00:59 -08:00
Gregory Schier
8969748c3c Add option to disable encryption when key is forgotten (#371) 2026-01-26 15:40:02 -08:00
Gregory Schier
4e15ac10a6 Add folder CRUD operations to MCP server (#369)
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-26 15:08:24 -08:00
Gregory Schier
47a3d44888 Git branch flow improvements (#370) 2026-01-26 14:45:51 -08:00
Gregory Schier
eb10910d20 Update HttpMethodTag.tsx 2026-01-22 06:03:04 -08:00
Gregory Schier
6ba83d424d Fix request method dropdown for GraphQL not showing HTTP method 2026-01-22 06:02:49 -08:00
Gregory Schier
beb47a6b6a Refactor default headers to be injected dynamically (#367) 2026-01-19 07:29:00 -08:00
Gregory Schier
1893b8f8dd Enable source maps for production builds (#366)
Co-authored-by: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-01-19 05:12:26 -08:00
Gregory Schier
7a5bca7aae Add text version of the response Timeline tab 2026-01-15 08:14:21 -08:00
Gregory Schier
9a75bc2ae7 Update release notes command 2026-01-15 07:22:46 -08:00
dependabot[bot]
65514e3882 Bump hono from 4.11.3 to 4.11.4 (#364) 2026-01-15 07:18:27 -08:00
Gregory Schier
9ddaafb79f Fix tab focusability 2026-01-15 07:17:25 -08:00
Gregory Schier
de47ee19ec Fix authentication actions being called with unrendered args 2026-01-15 07:10:33 -08:00
Gregory Schier
ea730d0184 Fix clicking URL placeholder params not focusing value input
The PairEditor ref callback used strict equality to determine when all
rows were ready, but placeholder params (like :id) regenerate fresh IDs
on every keystroke, causing rowsRef to accumulate entries. Using >=
allows the ref to be set even when there are more registered rows than
current pairs.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-14 11:28:09 -08:00
Gregory Schier
fe706998d4 Fix cursor style on template tags 2026-01-14 10:36:42 -08:00
Gregory Schier
99209e088f Consolidate tab persistence logic into Tabs component
- Move active tab persistence into Tabs component with storageKey + activeTabKey props
- Change value prop to defaultValue so callers don't manage tab state
- Add TabsRef with setActiveTab method for programmatic tab switching
- Restore request_pane.focus_tab listener for :param placeholder clicks
- Update all Tab consumers to use new pattern

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-14 10:32:10 -08:00
Gregory Schier
3eb29ff2fe Fix gRPC schema refresh not invalidating cache
The skip_cache flag in services() called reflect(), but reflect() had its
own cache check that returned early. Simplified by removing skip_cache and
always invalidating the pool in cmd_grpc_reflect, since that command is
only called when fresh schema is needed.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-14 08:20:03 -08:00
Gregory Schier
b759003c83 Fix events from old connections showing in new connections
Events from previous WebSocket/gRPC connections and HTTP responses were
persisting in the store and displaying in new connections. Added filter
parameter to mergeModelsInStore that clears old events when switching
connections, plus render-time filtering as a safety net.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-14 07:58:32 -08:00
Gregory Schier
6cba38ac89 Strip empty headers before sending 2026-01-14 06:59:05 -08:00
Gregory Schier
ba8f85baaf Update feedback links 2026-01-14 06:45:45 -08:00
Gregory Schier
9970d5fa6f Fix lint issues 2026-01-13 09:32:52 -08:00
Gregory Schier
d550b42ca3 Add count badge to DNS tab and make workspace settings tabs reorderable 2026-01-13 09:24:56 -08:00
Gregory Schier
2e1f0cb53f Adjust tab list margins 2026-01-13 09:24:53 -08:00
Gregory Schier
eead422ada Fix HeadersEditor padding when no inherited headers 2026-01-13 09:24:48 -08:00
Gregory Schier
b5753da3b7 Fix dropdown opening on first click of inactive tab 2026-01-13 09:24:44 -08:00
Gregory Schier
ae2f2459e9 Improve EventViewer UX
- Separate selected item from panel open state (closing panel keeps selection)
- Scroll selected item into view when detail panel opens
- Enter/Space opens detail panel, Escape closes it
- Remove browser focus outline on scroll container
- Add prefix prop to EventDetailHeader for labels
- Make timestamp optional in EventViewerRow
- Add close button to EventDetailHeader
- Fix title truncation with min-w-0
- Consolidate HttpResponseTimeline title generation
- Add ID/event labels to SSE detail header
- Remove fake timestamp from SSE events

Closes https://feedback.yaak.app/p/feedback-on-sse-viewer-ux-in-yaak
2026-01-13 09:05:50 -08:00
Gregory Schier
306e6f358a feat: Add DNS timings and resolution overrides (#360)
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-13 08:42:22 -08:00
Gregory Schier
822d52a57e Better logging for plugin timeouts 2026-01-13 07:26:32 -08:00
Gregory Schier
e665ce04df Fix plugins commands 2026-01-12 12:58:39 -08:00
Alex Coté
e4828e1b17 Fix README icon (#361) 2026-01-12 08:08:54 -08:00
Gregory Schier
42143249a2 Prevent Windows console window for yaaknode and yaakprotoc
Add new_xplatform_command() helper in yaak-common that creates a
tokio::process::Command with CREATE_NO_WINDOW flag set on Windows.

Also converts git commands to async for consistency.
2026-01-11 15:07:56 -08:00
Gregory Schier
72a7e6963d Separate entitlements for main app, yaaknode, and yaakprotoc 2026-01-11 14:05:47 -08:00
Gregory Schier
494e9efb64 Apply entitlements when signing vendored binaries 2026-01-11 14:03:35 -08:00
Gregory Schier
9fe077f598 Sign vendored binaries with hardened runtime on macOS 2026-01-11 10:14:39 -08:00
Gregory Schier
a6eca1cf2e Add Windows binary paths to tauri resources 2026-01-11 09:55:12 -08:00
Gregory Schier
31edd1013f Add missing bootstrap step to release workflow 2026-01-11 09:42:36 -08:00
Gregory Schier
28e9657ea5 Add EventDetailHeader component and fix EventViewer overflow
- Create standardized EventDetailHeader with title, timestamp, actions, and copyText props
- Fix EventViewer firstSlot overflow/scrolling issue
- Update GrpcResponsePane, WebsocketResponsePane, HttpResponseTimeline, and EventStreamViewer to use EventDetailHeader
- Fix Timeline title consistency when toggling Raw/Formatted views
2026-01-11 08:51:36 -08:00
Gregory Schier
ff084a224a Consolidate event viewer interfaces (#355) 2026-01-11 07:57:05 -08:00
Gregory Schier
bbcae34575 Fix race condition where streamed events could be lost
Events stream in via model_write listener while also being fetched
from the database. If the DB fetch completed before all events were
persisted, replaceModelsInStore would wipe out events that came in
via model_write.

Added mergeModelsInStore that adds fetched events without removing
existing ones. Applied to HTTP, gRPC, and WebSocket event hooks.
2026-01-11 07:42:04 -08:00
Gregory Schier
2a5587c128 Show sent/received cookie counts in Cookies tab
- Add getCookieCounts function to parse cookie headers and count
  individual cookies (not just headers)
- Deduplicates by cookie name using Sets
- Display as sent/received format like Headers tab
- Add showZero to CountBadge so 0/3 displays properly
- Add tests for getCookieCounts
2026-01-11 07:20:01 -08:00
Gregory Schier
c41e173a63 Fix dropdown menu hotkeys not working when menu is closed
The nested menu PR introduced an early return null when !isOpen,
which prevented MenuItemHotKey components from being rendered.
Fixed by extracting hotKeyElements and rendering them even when
the menu is closed.
2026-01-11 07:19:56 -08:00
Gregory Schier
2b43407ddf Fix gRPC autocomplete schema not being applied
Two issues fixed:

1. Initialize stateExtensions with empty object {} instead of undefined.
   When called with no argument, the schema state was undefined, causing
   jsonCompletion() to return [] instead of a proper result object, which
   CodeMirror's autocomplete didn't handle correctly.

2. Change editorView from useRef to useState so the effect that calls
   updateSchema() properly re-runs when the editor view is set. With useRef,
   the effect could run before the editor was mounted or with a stale
   reference when the editor was recreated.
2026-01-10 14:57:28 -08:00
Gregory Schier
4d75b8ef06 Surface gRPC message deserialization errors to UI
Previously, when a gRPC streaming message failed to deserialize (e.g., wrong
type like int instead of string), the error was silently logged and the message
was dropped. Now errors are surfaced to the UI as GrpcEventType::Error events.

Changed the streaming/client_streaming methods to accept an on_message callback
that handles both success (logs ClientMessage) and error (logs Error) cases,
rather than logging the client message prematurely before deserialization.
2026-01-10 14:57:28 -08:00
Gregory Schier
aa79fb05f9 Fix gRPC stream panic: use async stream combinators instead of block_on
The gRPC streaming code was using tokio::runtime::Handle::current().block_on()
inside filter_map closures, which caused a panic ('Cannot start a runtime from
within a runtime') when called from an async context.

Fixed by replacing the pattern with .then(async move { ... }).filter_map(|x| x)
which properly handles async operations in stream pipelines.

This fixes the gRPC Ping/Pong freeze issue and restores request cancellation.
2026-01-10 14:57:28 -08:00
Gregory Schier
fe01796536 feat: add ctx.prompt.form() plugin API for multi-field form dialogs (#359) 2026-01-10 08:55:43 -08:00
148 changed files with 4173 additions and 1919 deletions

View File

@@ -37,3 +37,11 @@ The skill generates markdown-formatted release notes following this structure:
**IMPORTANT**: Always add a blank lines around the markdown code fence and output the markdown code block last
**IMPORTANT**: PRs by `@gschier` should not mention the @username
## After Generating Release Notes
After outputting the release notes, ask the user if they would like to create a draft GitHub release with these notes. If they confirm, create the release using:
```bash
gh release create <tag> --draft --prerelease --title "<tag>" --notes '<release notes>'
```

View File

@@ -1,7 +1,7 @@
name: Generate Artifacts
on:
push:
tags: [ v* ]
tags: [v*]
jobs:
build-artifacts:
@@ -13,37 +13,37 @@ jobs:
fail-fast: false
matrix:
include:
- platform: 'macos-latest' # for Arm-based Macs (M1 and above).
args: '--target aarch64-apple-darwin'
yaak_arch: 'arm64'
os: 'macos'
targets: 'aarch64-apple-darwin'
- platform: 'macos-latest' # for Intel-based Macs.
args: '--target x86_64-apple-darwin'
yaak_arch: 'x64'
os: 'macos'
targets: 'x86_64-apple-darwin'
- platform: 'ubuntu-22.04'
args: ''
yaak_arch: 'x64'
os: 'ubuntu'
targets: ''
- platform: 'ubuntu-22.04-arm'
args: ''
yaak_arch: 'arm64'
os: 'ubuntu'
targets: ''
- platform: 'windows-latest'
args: ''
yaak_arch: 'x64'
os: 'windows'
targets: ''
- platform: "macos-latest" # for Arm-based Macs (M1 and above).
args: "--target aarch64-apple-darwin"
yaak_arch: "arm64"
os: "macos"
targets: "aarch64-apple-darwin"
- platform: "macos-latest" # for Intel-based Macs.
args: "--target x86_64-apple-darwin"
yaak_arch: "x64"
os: "macos"
targets: "x86_64-apple-darwin"
- platform: "ubuntu-22.04"
args: ""
yaak_arch: "x64"
os: "ubuntu"
targets: ""
- platform: "ubuntu-22.04-arm"
args: ""
yaak_arch: "arm64"
os: "ubuntu"
targets: ""
- platform: "windows-latest"
args: ""
yaak_arch: "x64"
os: "windows"
targets: ""
# Windows ARM64
- platform: 'windows-latest'
args: '--target aarch64-pc-windows-msvc'
yaak_arch: 'arm64'
os: 'windows'
targets: 'aarch64-pc-windows-msvc'
- platform: "windows-latest"
args: "--target aarch64-pc-windows-msvc"
yaak_arch: "arm64"
os: "windows"
targets: "aarch64-pc-windows-msvc"
runs-on: ${{ matrix.platform }}
timeout-minutes: 40
steps:
@@ -88,6 +88,9 @@ jobs:
& $exe --version
- run: npm ci
- run: npm run bootstrap
env:
YAAK_TARGET_ARCH: ${{ matrix.yaak_arch }}
- run: npm run lint
- name: Run JS Tests
run: npm test
@@ -99,6 +102,29 @@ jobs:
env:
YAAK_VERSION: ${{ github.ref_name }}
- name: Sign vendored binaries (macOS only)
if: matrix.os == 'macos'
env:
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
KEYCHAIN_PASSWORD: ${{ secrets.KEYCHAIN_PASSWORD }}
run: |
# Create keychain
KEYCHAIN_PATH=$RUNNER_TEMP/app-signing.keychain-db
security create-keychain -p "$KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
security set-keychain-settings -lut 21600 $KEYCHAIN_PATH
security unlock-keychain -p "$KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
# Import certificate
echo "$APPLE_CERTIFICATE" | base64 --decode > certificate.p12
security import certificate.p12 -P "$APPLE_CERTIFICATE_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
security list-keychain -d user -s $KEYCHAIN_PATH
# Sign vendored binaries with hardened runtime and their specific entitlements
codesign --force --options runtime --entitlements crates-tauri/yaak-app/macos/entitlements.yaakprotoc.plist --sign "$APPLE_SIGNING_IDENTITY" crates-tauri/yaak-app/vendored/protoc/yaakprotoc || true
codesign --force --options runtime --entitlements crates-tauri/yaak-app/macos/entitlements.yaaknode.plist --sign "$APPLE_SIGNING_IDENTITY" crates-tauri/yaak-app/vendored/node/yaaknode || true
- uses: tauri-apps/tauri-action@v0
env:
YAAK_TARGET_ARCH: ${{ matrix.yaak_arch }}
@@ -121,9 +147,9 @@ jobs:
AZURE_CLIENT_SECRET: ${{ matrix.os == 'windows' && secrets.AZURE_CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ matrix.os == 'windows' && secrets.AZURE_TENANT_ID }}
with:
tagName: 'v__VERSION__'
releaseName: 'Release __VERSION__'
releaseBody: '[Changelog __VERSION__](https://yaak.app/blog/__VERSION__)'
tagName: "v__VERSION__"
releaseName: "Release __VERSION__"
releaseBody: "[Changelog __VERSION__](https://yaak.app/blog/__VERSION__)"
releaseDraft: true
prerelease: true
args: '${{ matrix.args }} --config ./crates-tauri/yaak-app/tauri.release.conf.json'
args: "${{ matrix.args }} --config ./crates-tauri/yaak-app/tauri.release.conf.json"

4
Cargo.lock generated
View File

@@ -8075,6 +8075,7 @@ name = "yaak-common"
version = "0.1.0"
dependencies = [
"serde_json",
"tokio",
]
[[package]]
@@ -8121,8 +8122,10 @@ dependencies = [
"serde_json",
"serde_yaml",
"thiserror 2.0.17",
"tokio",
"ts-rs",
"url",
"yaak-common",
"yaak-models",
"yaak-sync",
]
@@ -8149,6 +8152,7 @@ dependencies = [
"tonic",
"tonic-reflection",
"uuid",
"yaak-common",
"yaak-tls",
]

View File

@@ -1,6 +1,6 @@
<p align="center">
<a href="https://github.com/JamesIves/github-sponsors-readme-action">
<img width="200px" src="https://github.com/mountain-loop/yaak/raw/main/src-tauri/icons/icon.png">
<img width="200px" src="https://github.com/mountain-loop/yaak/raw/main/crates-tauri/yaak-app/icons/icon.png">
</a>
</p>
@@ -64,7 +64,7 @@ visit [`DEVELOPMENT.md`](DEVELOPMENT.md) for tips on setting up your environment
## Useful Resources
- [Feedback and Bug Reports](https://feedback.yaak.app)
- [Documentation](https://feedback.yaak.app/help)
- [Documentation](https://yaak.app/docs)
- [Yaak vs Postman](https://yaak.app/alternatives/postman)
- [Yaak vs Bruno](https://yaak.app/alternatives/bruno)
- [Yaak vs Insomnia](https://yaak.app/alternatives/insomnia)

View File

@@ -15,7 +15,7 @@ use yaak_models::util::UpdateSource;
use yaak_plugins::events::{PluginContext, RenderPurpose};
use yaak_plugins::manager::PluginManager;
use yaak_plugins::template_callback::PluginTemplateCallback;
use yaak_templates::{parse_and_render, render_json_value_raw, RenderOptions};
use yaak_templates::{RenderOptions, parse_and_render, render_json_value_raw};
#[derive(Parser)]
#[command(name = "yaakcli")]
@@ -149,14 +149,7 @@ async fn render_http_request(
// Apply path placeholders (e.g., /users/:id -> /users/123)
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
Ok(HttpRequest {
url,
url_parameters,
headers,
body,
authentication,
..r.to_owned()
})
Ok(HttpRequest { url, url_parameters, headers, body, authentication, ..r.to_owned() })
}
#[tokio::main]
@@ -169,16 +162,10 @@ async fn main() {
}
// Use the same app_id for both data directory and keyring
let app_id = if cfg!(debug_assertions) {
"app.yaak.desktop.dev"
} else {
"app.yaak.desktop"
};
let app_id = if cfg!(debug_assertions) { "app.yaak.desktop.dev" } else { "app.yaak.desktop" };
let data_dir = cli.data_dir.unwrap_or_else(|| {
dirs::data_dir()
.expect("Could not determine data directory")
.join(app_id)
dirs::data_dir().expect("Could not determine data directory").join(app_id)
});
let db_path = data_dir.join("db.sqlite");
@@ -191,9 +178,7 @@ async fn main() {
// Initialize encryption manager for secure() template function
// Use the same app_id as the Tauri app for keyring access
let encryption_manager = Arc::new(
EncryptionManager::new(query_manager.clone(), app_id),
);
let encryption_manager = Arc::new(EncryptionManager::new(query_manager.clone(), app_id));
// Initialize plugin manager for template functions
let vendored_plugin_dir = data_dir.join("vendored-plugins");
@@ -203,9 +188,8 @@ async fn main() {
let node_bin_path = PathBuf::from("node");
// Find the plugin runtime - check YAAK_PLUGIN_RUNTIME env var, then fallback to development path
let plugin_runtime_main = std::env::var("YAAK_PLUGIN_RUNTIME")
.map(PathBuf::from)
.unwrap_or_else(|_| {
let plugin_runtime_main =
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
// Development fallback: look relative to crate root
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
@@ -226,14 +210,10 @@ async fn main() {
// Initialize plugins from database
let plugins = db.list_plugins().unwrap_or_default();
if !plugins.is_empty() {
let errors = plugin_manager
.initialize_all_plugins(plugins, &PluginContext::new_empty())
.await;
let errors =
plugin_manager.initialize_all_plugins(plugins, &PluginContext::new_empty()).await;
for (plugin_dir, error_msg) in errors {
eprintln!(
"Warning: Failed to initialize plugin '{}': {}",
plugin_dir, error_msg
);
eprintln!("Warning: Failed to initialize plugin '{}': {}", plugin_dir, error_msg);
}
}
@@ -249,9 +229,7 @@ async fn main() {
}
}
Commands::Requests { workspace_id } => {
let requests = db
.list_http_requests(&workspace_id)
.expect("Failed to list requests");
let requests = db.list_http_requests(&workspace_id).expect("Failed to list requests");
if requests.is_empty() {
println!("No requests found in workspace {}", workspace_id);
} else {
@@ -261,9 +239,7 @@ async fn main() {
}
}
Commands::Send { request_id } => {
let request = db
.get_http_request(&request_id)
.expect("Failed to get request");
let request = db.get_http_request(&request_id).expect("Failed to get request");
// Resolve environment chain for variable substitution
let environment_chain = db
@@ -318,18 +294,13 @@ async fn main() {
}))
} else {
// Drain events silently
tokio::spawn(async move {
while event_rx.recv().await.is_some() {}
});
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
None
};
// Send the request
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
let response = sender
.send(sendable, event_tx)
.await
.expect("Failed to send request");
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
// Wait for event handler to finish
if let Some(handle) = verbose_handle {
@@ -383,18 +354,13 @@ async fn main() {
}
}))
} else {
tokio::spawn(async move {
while event_rx.recv().await.is_some() {}
});
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
None
};
// Send the request
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
let response = sender
.send(sendable, event_tx)
.await
.expect("Failed to send request");
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
if let Some(handle) = verbose_handle {
let _ = handle.await;
@@ -421,12 +387,7 @@ async fn main() {
let (body, _stats) = response.text().await.expect("Failed to read response body");
println!("{}", body);
}
Commands::Create {
workspace_id,
name,
method,
url,
} => {
Commands::Create { workspace_id, name, method, url } => {
let request = HttpRequest {
workspace_id,
name,

View File

@@ -2,14 +2,6 @@
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<!-- Enable for NodeJS execution -->
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
<!-- Allow loading 1Password's dylib (signed with different Team ID) -->
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
<!-- Re-enable for sandboxing. Currently disabled because auto-updater doesn't work with sandboxing.-->
<!-- <key>com.apple.security.app-sandbox</key> <true/>-->
<!-- <key>com.apple.security.files.user-selected.read-write</key> <true/>-->

View File

@@ -0,0 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<!-- Enable for NodeJS/V8 JIT compiler -->
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
<!-- Allow loading plugins signed with different Team IDs (e.g., 1Password) -->
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
</dict>
</plist>

View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
</dict>
</plist>

View File

@@ -1,9 +1,11 @@
use crate::error::Result;
use crate::PluginContextExt;
use crate::error::Result;
use std::sync::Arc;
use tauri::{AppHandle, Manager, Runtime, State, WebviewWindow, command};
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
use yaak_crypto::manager::EncryptionManager;
use yaak_models::models::HttpRequestHeader;
use yaak_models::queries::workspaces::default_headers;
use yaak_plugins::events::GetThemesResponse;
use yaak_plugins::manager::PluginManager;
use yaak_plugins::native_template_functions::{
@@ -54,7 +56,12 @@ pub(crate) async fn cmd_secure_template<R: Runtime>(
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
let plugin_context = window.plugin_context();
Ok(encrypt_secure_template_function(plugin_manager, encryption_manager, &plugin_context, template)?)
Ok(encrypt_secure_template_function(
plugin_manager,
encryption_manager,
&plugin_context,
template,
)?)
}
#[command]
@@ -92,3 +99,17 @@ pub(crate) async fn cmd_set_workspace_key<R: Runtime>(
window.crypto().set_human_key(workspace_id, key)?;
Ok(())
}
#[command]
pub(crate) async fn cmd_disable_encryption<R: Runtime>(
window: WebviewWindow<R>,
workspace_id: &str,
) -> Result<()> {
window.crypto().disable_encryption(workspace_id)?;
Ok(())
}
#[command]
pub(crate) fn cmd_default_headers() -> Vec<HttpRequestHeader> {
default_headers()
}

View File

@@ -6,33 +6,47 @@ use crate::error::Result;
use std::path::{Path, PathBuf};
use tauri::command;
use yaak_git::{
GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult,
git_add, git_add_credential, git_add_remote, git_checkout_branch, git_commit,
git_create_branch, git_delete_branch, git_fetch_all, git_init, git_log,
git_merge_branch, git_pull, git_push, git_remotes, git_rm_remote, git_status,
git_unstage,
BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult,
PushResult, git_add, git_add_credential, git_add_remote, git_checkout_branch, git_clone,
git_commit, git_create_branch, git_delete_branch, git_delete_remote_branch, git_fetch_all,
git_init, git_log, git_merge_branch, git_pull, git_push, git_remotes, git_rename_branch,
git_rm_remote, git_status, git_unstage,
};
// NOTE: All of these commands are async to prevent blocking work from locking up the UI
#[command]
pub async fn cmd_git_checkout(dir: &Path, branch: &str, force: bool) -> Result<String> {
Ok(git_checkout_branch(dir, branch, force)?)
Ok(git_checkout_branch(dir, branch, force).await?)
}
#[command]
pub async fn cmd_git_branch(dir: &Path, branch: &str) -> Result<()> {
Ok(git_create_branch(dir, branch)?)
pub async fn cmd_git_branch(dir: &Path, branch: &str, base: Option<&str>) -> Result<()> {
Ok(git_create_branch(dir, branch, base).await?)
}
#[command]
pub async fn cmd_git_delete_branch(dir: &Path, branch: &str) -> Result<()> {
Ok(git_delete_branch(dir, branch)?)
pub async fn cmd_git_delete_branch(
dir: &Path,
branch: &str,
force: Option<bool>,
) -> Result<BranchDeleteResult> {
Ok(git_delete_branch(dir, branch, force.unwrap_or(false)).await?)
}
#[command]
pub async fn cmd_git_merge_branch(dir: &Path, branch: &str, force: bool) -> Result<()> {
Ok(git_merge_branch(dir, branch, force)?)
pub async fn cmd_git_delete_remote_branch(dir: &Path, branch: &str) -> Result<()> {
Ok(git_delete_remote_branch(dir, branch).await?)
}
#[command]
pub async fn cmd_git_merge_branch(dir: &Path, branch: &str) -> Result<()> {
Ok(git_merge_branch(dir, branch).await?)
}
#[command]
pub async fn cmd_git_rename_branch(dir: &Path, old_name: &str, new_name: &str) -> Result<()> {
Ok(git_rename_branch(dir, old_name, new_name).await?)
}
#[command]
@@ -50,24 +64,29 @@ pub async fn cmd_git_initialize(dir: &Path) -> Result<()> {
Ok(git_init(dir)?)
}
#[command]
pub async fn cmd_git_clone(url: &str, dir: &Path) -> Result<CloneResult> {
Ok(git_clone(url, dir).await?)
}
#[command]
pub async fn cmd_git_commit(dir: &Path, message: &str) -> Result<()> {
Ok(git_commit(dir, message)?)
Ok(git_commit(dir, message).await?)
}
#[command]
pub async fn cmd_git_fetch_all(dir: &Path) -> Result<()> {
Ok(git_fetch_all(dir)?)
Ok(git_fetch_all(dir).await?)
}
#[command]
pub async fn cmd_git_push(dir: &Path) -> Result<PushResult> {
Ok(git_push(dir)?)
Ok(git_push(dir).await?)
}
#[command]
pub async fn cmd_git_pull(dir: &Path) -> Result<PullResult> {
Ok(git_pull(dir)?)
Ok(git_pull(dir).await?)
}
#[command]
@@ -88,12 +107,11 @@ pub async fn cmd_git_unstage(dir: &Path, rela_paths: Vec<PathBuf>) -> Result<()>
#[command]
pub async fn cmd_git_add_credential(
dir: &Path,
remote_url: &str,
username: &str,
password: &str,
) -> Result<()> {
Ok(git_add_credential(dir, remote_url, username, password).await?)
Ok(git_add_credential(remote_url, username, password).await?)
}
#[command]

View File

@@ -1,12 +1,12 @@
use std::collections::BTreeMap;
use crate::error::Result;
use crate::PluginContextExt;
use crate::error::Result;
use crate::models_ext::QueryManagerExt;
use KeyAndValueRef::{Ascii, Binary};
use tauri::{Manager, Runtime, WebviewWindow};
use yaak_grpc::{KeyAndValueRef, MetadataMap};
use yaak_models::models::GrpcRequest;
use crate::models_ext::QueryManagerExt;
use yaak_plugins::events::{CallHttpAuthenticationRequest, HttpHeader};
use yaak_plugins::manager::PluginManager;

View File

@@ -1,8 +1,8 @@
use crate::models_ext::QueryManagerExt;
use chrono::{NaiveDateTime, Utc};
use log::debug;
use std::sync::OnceLock;
use tauri::{AppHandle, Runtime};
use crate::models_ext::QueryManagerExt;
use yaak_models::util::UpdateSource;
const NAMESPACE: &str = "analytics";

View File

@@ -1,9 +1,13 @@
use crate::PluginContextExt;
use crate::error::Error::GenericError;
use crate::error::Result;
use crate::models_ext::BlobManagerExt;
use crate::models_ext::QueryManagerExt;
use crate::render::render_http_request;
use log::{debug, warn};
use std::pin::Pin;
use std::sync::Arc;
use std::sync::atomic::{AtomicI32, Ordering};
use std::time::{Duration, Instant};
use tauri::{AppHandle, Manager, Runtime, WebviewWindow};
use tokio::fs::{File, create_dir_all};
@@ -15,22 +19,19 @@ use yaak_http::client::{
HttpConnectionOptions, HttpConnectionProxySetting, HttpConnectionProxySettingAuth,
};
use yaak_http::cookies::CookieStore;
use yaak_http::manager::HttpConnectionManager;
use yaak_http::manager::{CachedClient, HttpConnectionManager};
use yaak_http::sender::ReqwestSender;
use yaak_http::tee_reader::TeeReader;
use yaak_http::transaction::HttpTransaction;
use yaak_http::types::{
SendableBody, SendableHttpRequest, SendableHttpRequestOptions, append_query_params,
};
use crate::models_ext::BlobManagerExt;
use yaak_models::blob_manager::BodyChunk;
use yaak_models::models::{
CookieJar, Environment, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseHeader,
HttpResponseState, ProxySetting, ProxySettingAuth,
};
use crate::models_ext::QueryManagerExt;
use yaak_models::util::UpdateSource;
use crate::PluginContextExt;
use yaak_plugins::events::{
CallHttpAuthenticationRequest, HttpHeader, PluginContext, RenderPurpose,
};
@@ -173,7 +174,12 @@ async fn send_http_request_inner<R: Runtime>(
let environment_id = environment.map(|e| e.id);
let workspace = window.db().get_workspace(workspace_id)?;
let (resolved, auth_context_id) = resolve_http_request(window, unrendered_request)?;
let cb = PluginTemplateCallback::new(plugin_manager.clone(), encryption_manager.clone(), &plugin_context, RenderPurpose::Send);
let cb = PluginTemplateCallback::new(
plugin_manager.clone(),
encryption_manager.clone(),
&plugin_context,
RenderPurpose::Send,
);
let env_chain =
window.db().resolve_environments(&workspace.id, folder_id, environment_id.as_deref())?;
let request = render_http_request(&resolved, env_chain, &cb, &RenderOptions::throw()).await?;
@@ -228,12 +234,13 @@ async fn send_http_request_inner<R: Runtime>(
None => None,
};
let client = connection_manager
let cached_client = connection_manager
.get_client(&HttpConnectionOptions {
id: plugin_context.id.clone(),
validate_certificates: workspace.setting_validate_certificates,
proxy: proxy_setting,
client_certificate,
dns_overrides: workspace.setting_dns_overrides.clone(),
})
.await?;
@@ -250,7 +257,7 @@ async fn send_http_request_inner<R: Runtime>(
let cookie_store = maybe_cookie_store.as_ref().map(|(cs, _)| cs.clone());
let result = execute_transaction(
client,
cached_client,
sendable_request,
response_ctx,
cancelled_rx.clone(),
@@ -310,7 +317,7 @@ pub fn resolve_http_request<R: Runtime>(
}
async fn execute_transaction<R: Runtime>(
client: reqwest::Client,
cached_client: CachedClient,
mut sendable_request: SendableHttpRequest,
response_ctx: &mut ResponseContext<R>,
mut cancelled_rx: Receiver<bool>,
@@ -321,7 +328,10 @@ async fn execute_transaction<R: Runtime>(
let workspace_id = response_ctx.response().workspace_id.clone();
let is_persisted = response_ctx.is_persisted();
let sender = ReqwestSender::with_client(client);
// Keep a reference to the resolver for DNS timing events
let resolver = cached_client.resolver.clone();
let sender = ReqwestSender::with_client(cached_client.client);
let transaction = match cookie_store {
Some(cs) => HttpTransaction::with_cookie_store(sender, cs),
None => HttpTransaction::new(sender),
@@ -346,21 +356,39 @@ async fn execute_transaction<R: Runtime>(
let (event_tx, mut event_rx) =
tokio::sync::mpsc::channel::<yaak_http::sender::HttpResponseEvent>(100);
// Set the event sender on the DNS resolver so it can emit DNS timing events
resolver.set_event_sender(Some(event_tx.clone())).await;
// Shared state to capture DNS timing from the event processing task
let dns_elapsed = Arc::new(AtomicI32::new(0));
// Write events to DB in a task (only for persisted responses)
if is_persisted {
let response_id = response_id.clone();
let app_handle = app_handle.clone();
let update_source = response_ctx.update_source.clone();
let workspace_id = workspace_id.clone();
let dns_elapsed = dns_elapsed.clone();
tokio::spawn(async move {
while let Some(event) = event_rx.recv().await {
// Capture DNS timing when we see a DNS event
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
}
let db_event = HttpResponseEvent::new(&response_id, &workspace_id, event.into());
let _ = app_handle.db().upsert_http_response_event(&db_event, &update_source);
}
});
} else {
// For ephemeral responses, just drain the events
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
// For ephemeral responses, just drain the events but still capture DNS timing
let dns_elapsed = dns_elapsed.clone();
tokio::spawn(async move {
while let Some(event) = event_rx.recv().await {
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
}
}
});
};
// Capture request body as it's sent (only for persisted responses)
@@ -528,10 +556,14 @@ async fn execute_transaction<R: Runtime>(
// Final update with closed state and accurate byte count
response_ctx.update(|r| {
r.elapsed = start.elapsed().as_millis() as i32;
r.elapsed_dns = dns_elapsed.load(Ordering::SeqCst);
r.content_length = Some(written_bytes as i32);
r.state = HttpResponseState::Closed;
})?;
// Clear the event sender from the resolver since this request is done
resolver.set_event_sender(None).await;
Ok((response_ctx.response().clone(), maybe_blob_write_handle))
}

View File

@@ -1,17 +1,17 @@
use crate::PluginContextExt;
use crate::error::Result;
use crate::models_ext::QueryManagerExt;
use crate::PluginContextExt;
use log::info;
use std::collections::BTreeMap;
use std::fs::read_to_string;
use tauri::{Manager, Runtime, WebviewWindow};
use yaak_tauri_utils::window::WorkspaceWindowTrait;
use yaak_core::WorkspaceContext;
use yaak_models::models::{
Environment, Folder, GrpcRequest, HttpRequest, WebsocketRequest, Workspace,
};
use yaak_models::util::{BatchUpsertResult, UpdateSource, maybe_gen_id, maybe_gen_id_opt};
use yaak_plugins::manager::PluginManager;
use yaak_tauri_utils::window::WorkspaceWindowTrait;
pub(crate) async fn import_data<R: Runtime>(
window: &WebviewWindow<R>,

View File

@@ -7,7 +7,7 @@ use crate::http_request::{resolve_http_request, send_http_request};
use crate::import::import_data;
use crate::models_ext::{BlobManagerExt, QueryManagerExt};
use crate::notifications::YaakNotifier;
use crate::render::{render_grpc_request, render_template};
use crate::render::{render_grpc_request, render_json_value, render_template};
use crate::updates::{UpdateMode, UpdateTrigger, YaakUpdater};
use crate::uri_scheme::handle_deep_link;
use error::Result as YaakResult;
@@ -101,6 +101,7 @@ struct AppMetaData {
app_data_dir: String,
app_log_dir: String,
vendored_plugin_dir: String,
default_project_dir: String,
feature_updater: bool,
feature_license: bool,
}
@@ -111,6 +112,7 @@ async fn cmd_metadata(app_handle: AppHandle) -> YaakResult<AppMetaData> {
let app_log_dir = app_handle.path().app_log_dir()?;
let vendored_plugin_dir =
app_handle.path().resolve("vendored/plugins", BaseDirectory::Resource)?;
let default_project_dir = app_handle.path().home_dir()?.join("YaakProjects");
Ok(AppMetaData {
is_dev: is_dev(),
version: app_handle.package_info().version.to_string(),
@@ -118,6 +120,7 @@ async fn cmd_metadata(app_handle: AppHandle) -> YaakResult<AppMetaData> {
app_data_dir: app_data_dir.to_string_lossy().to_string(),
app_log_dir: app_log_dir.to_string_lossy().to_string(),
vendored_plugin_dir: vendored_plugin_dir.to_string_lossy().to_string(),
default_project_dir: default_project_dir.to_string_lossy().to_string(),
feature_license: cfg!(feature = "license"),
feature_updater: cfg!(feature = "updater"),
})
@@ -189,7 +192,6 @@ async fn cmd_grpc_reflect<R: Runtime>(
request_id: &str,
environment_id: Option<&str>,
proto_files: Vec<String>,
skip_cache: Option<bool>,
window: WebviewWindow<R>,
app_handle: AppHandle<R>,
grpc_handle: State<'_, Mutex<GrpcHandle>>,
@@ -224,18 +226,21 @@ async fn cmd_grpc_reflect<R: Runtime>(
let settings = window.db().get_settings();
let client_certificate =
find_client_certificate(req.url.as_str(), &settings.client_certificates);
let proto_files: Vec<PathBuf> =
proto_files.iter().map(|p| PathBuf::from_str(p).unwrap()).collect();
Ok(grpc_handle
.lock()
.await
// Always invalidate cached pool when this command is called, to force re-reflection
let mut handle = grpc_handle.lock().await;
handle.invalidate_pool(&req.id, &uri, &proto_files);
Ok(handle
.services(
&req.id,
&uri,
&proto_files.iter().map(|p| PathBuf::from_str(p).unwrap()).collect(),
&proto_files,
&metadata,
workspace.setting_validate_certificates,
client_certificate,
skip_cache.unwrap_or(false),
)
.await
.map_err(|e| GenericError(e.to_string()))?)
@@ -360,10 +365,8 @@ async fn cmd_grpc_go<R: Runtime>(
let cb = {
let cancelled_rx = cancelled_rx.clone();
let app_handle = app_handle.clone();
let environment_chain = environment_chain.clone();
let window = window.clone();
let base_msg = base_msg.clone();
let plugin_manager = plugin_manager.clone();
let encryption_manager = encryption_manager.clone();
@@ -385,14 +388,12 @@ async fn cmd_grpc_go<R: Runtime>(
match serde_json::from_str::<IncomingMsg>(ev.payload()) {
Ok(IncomingMsg::Message(msg)) => {
let window = window.clone();
let app_handle = app_handle.clone();
let base_msg = base_msg.clone();
let environment_chain = environment_chain.clone();
let plugin_manager = plugin_manager.clone();
let encryption_manager = encryption_manager.clone();
let msg = block_in_place(|| {
tauri::async_runtime::block_on(async {
render_template(
let result = render_template(
msg.as_str(),
environment_chain,
&PluginTemplateCallback::new(
@@ -406,24 +407,11 @@ async fn cmd_grpc_go<R: Runtime>(
),
&RenderOptions { error_behavior: RenderErrorBehavior::Throw },
)
.await
.expect("Failed to render template")
.await;
result.expect("Failed to render template")
})
});
in_msg_tx.try_send(msg.clone()).unwrap();
tauri::async_runtime::spawn(async move {
app_handle
.db()
.upsert_grpc_event(
&GrpcEvent {
content: msg,
event_type: GrpcEventType::ClientMessage,
..base_msg.clone()
},
&UpdateSource::from_window_label(window.label()),
)
.unwrap();
});
}
Ok(IncomingMsg::Commit) => {
maybe_in_msg_tx.take();
@@ -470,12 +458,48 @@ async fn cmd_grpc_go<R: Runtime>(
)?;
async move {
// Create callback for streaming methods that handles both success and error
let on_message = {
let app_handle = app_handle.clone();
let base_event = base_event.clone();
let window_label = window.label().to_string();
move |result: std::result::Result<String, String>| match result {
Ok(msg) => {
let _ = app_handle.db().upsert_grpc_event(
&GrpcEvent {
content: msg,
event_type: GrpcEventType::ClientMessage,
..base_event.clone()
},
&UpdateSource::from_window_label(&window_label),
);
}
Err(error) => {
let _ = app_handle.db().upsert_grpc_event(
&GrpcEvent {
content: format!("Failed to send message: {}", error),
event_type: GrpcEventType::Error,
..base_event.clone()
},
&UpdateSource::from_window_label(&window_label),
);
}
}
};
let (maybe_stream, maybe_msg) =
match (method_desc.is_client_streaming(), method_desc.is_server_streaming()) {
(true, true) => (
Some(
connection
.streaming(&service, &method, in_msg_stream, &metadata, client_cert)
.streaming(
&service,
&method,
in_msg_stream,
&metadata,
client_cert,
on_message.clone(),
)
.await,
),
None,
@@ -490,6 +514,7 @@ async fn cmd_grpc_go<R: Runtime>(
in_msg_stream,
&metadata,
client_cert,
on_message.clone(),
)
.await,
),
@@ -1035,14 +1060,54 @@ async fn cmd_get_http_authentication_summaries<R: Runtime>(
#[tauri::command]
async fn cmd_get_http_authentication_config<R: Runtime>(
window: WebviewWindow<R>,
app_handle: AppHandle<R>,
plugin_manager: State<'_, PluginManager>,
encryption_manager: State<'_, EncryptionManager>,
auth_name: &str,
values: HashMap<String, JsonPrimitive>,
model: AnyModel,
_environment_id: Option<&str>,
environment_id: Option<&str>,
) -> YaakResult<GetHttpAuthenticationConfigResponse> {
// Extract workspace_id and folder_id from the model to resolve the environment chain
let (workspace_id, folder_id) = match &model {
AnyModel::HttpRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::GrpcRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::WebsocketRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::Folder(f) => (f.workspace_id.clone(), f.folder_id.clone()),
AnyModel::Workspace(w) => (w.id.clone(), None),
_ => return Err(GenericError("Unsupported model type for authentication config".into())),
};
// Resolve environment chain and render the values for token lookup
let environment_chain = app_handle.db().resolve_environments(
&workspace_id,
folder_id.as_deref(),
environment_id,
)?;
let plugin_manager_arc = Arc::new((*plugin_manager).clone());
let encryption_manager_arc = Arc::new((*encryption_manager).clone());
let cb = PluginTemplateCallback::new(
plugin_manager_arc,
encryption_manager_arc,
&window.plugin_context(),
RenderPurpose::Preview,
);
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
let values_json: serde_json::Value = serde_json::to_value(&values)?;
let rendered_json =
render_json_value(values_json, environment_chain, &cb, &RenderOptions::throw()).await?;
// Convert back to HashMap<String, JsonPrimitive>
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
Ok(plugin_manager
.get_http_authentication_config(&window.plugin_context(), auth_name, values, model.id())
.get_http_authentication_config(
&window.plugin_context(),
auth_name,
rendered_values,
model.id(),
)
.await?)
}
@@ -1089,19 +1154,54 @@ async fn cmd_call_grpc_request_action<R: Runtime>(
#[tauri::command]
async fn cmd_call_http_authentication_action<R: Runtime>(
window: WebviewWindow<R>,
app_handle: AppHandle<R>,
plugin_manager: State<'_, PluginManager>,
encryption_manager: State<'_, EncryptionManager>,
auth_name: &str,
action_index: i32,
values: HashMap<String, JsonPrimitive>,
model: AnyModel,
_environment_id: Option<&str>,
environment_id: Option<&str>,
) -> YaakResult<()> {
// Extract workspace_id and folder_id from the model to resolve the environment chain
let (workspace_id, folder_id) = match &model {
AnyModel::HttpRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::GrpcRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::WebsocketRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
AnyModel::Folder(f) => (f.workspace_id.clone(), f.folder_id.clone()),
AnyModel::Workspace(w) => (w.id.clone(), None),
_ => return Err(GenericError("Unsupported model type for authentication action".into())),
};
// Resolve environment chain and render the values
let environment_chain = app_handle.db().resolve_environments(
&workspace_id,
folder_id.as_deref(),
environment_id,
)?;
let plugin_manager_arc = Arc::new((*plugin_manager).clone());
let encryption_manager_arc = Arc::new((*encryption_manager).clone());
let cb = PluginTemplateCallback::new(
plugin_manager_arc,
encryption_manager_arc,
&window.plugin_context(),
RenderPurpose::Send,
);
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
let values_json: serde_json::Value = serde_json::to_value(&values)?;
let rendered_json =
render_json_value(values_json, environment_chain, &cb, &RenderOptions::throw()).await?;
// Convert back to HashMap<String, JsonPrimitive>
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
Ok(plugin_manager
.call_http_authentication_action(
&window.plugin_context(),
auth_name,
action_index,
values,
rendered_values,
&model.id(),
)
.await?)
@@ -1621,6 +1721,8 @@ pub fn run() {
//
// Migrated commands
crate::commands::cmd_decrypt_template,
crate::commands::cmd_default_headers,
crate::commands::cmd_disable_encryption,
crate::commands::cmd_enable_encryption,
crate::commands::cmd_get_themes,
crate::commands::cmd_reveal_workspace_key,
@@ -1649,10 +1751,13 @@ pub fn run() {
git_ext::cmd_git_checkout,
git_ext::cmd_git_branch,
git_ext::cmd_git_delete_branch,
git_ext::cmd_git_delete_remote_branch,
git_ext::cmd_git_merge_branch,
git_ext::cmd_git_rename_branch,
git_ext::cmd_git_status,
git_ext::cmd_git_log,
git_ext::cmd_git_initialize,
git_ext::cmd_git_clone,
git_ext::cmd_git_commit,
git_ext::cmd_git_fetch_all,
git_ext::cmd_git_push,
@@ -1664,6 +1769,13 @@ pub fn run() {
git_ext::cmd_git_add_remote,
git_ext::cmd_git_rm_remote,
//
// Plugin commands
plugins_ext::cmd_plugins_search,
plugins_ext::cmd_plugins_install,
plugins_ext::cmd_plugins_uninstall,
plugins_ext::cmd_plugins_updates,
plugins_ext::cmd_plugins_update_all,
//
// WebSocket commands
ws_ext::cmd_ws_upsert_request,
ws_ext::cmd_ws_duplicate_request,

View File

@@ -1,5 +1,6 @@
use crate::error::Result;
use crate::history::get_or_upsert_launch_info;
use crate::models_ext::QueryManagerExt;
use chrono::{DateTime, Utc};
use log::{debug, info};
use reqwest::Method;
@@ -8,9 +9,8 @@ use std::time::Instant;
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow};
use ts_rs::TS;
use yaak_common::platform::get_os_str;
use yaak_tauri_utils::api_client::yaak_api_client;
use crate::models_ext::QueryManagerExt;
use yaak_models::util::UpdateSource;
use yaak_tauri_utils::api_client::yaak_api_client;
// Check for updates every hour
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;

View File

@@ -1,5 +1,7 @@
use crate::error::Result;
use crate::http_request::send_http_request_with_context;
use crate::models_ext::BlobManagerExt;
use crate::models_ext::QueryManagerExt;
use crate::render::{render_grpc_request, render_http_request, render_json_value};
use crate::window::{CreateWindowConfig, create_window};
use crate::{
@@ -14,11 +16,8 @@ use tauri::{AppHandle, Emitter, Manager, Runtime};
use tauri_plugin_clipboard_manager::ClipboardExt;
use tauri_plugin_opener::OpenerExt;
use yaak_crypto::manager::EncryptionManager;
use yaak_tauri_utils::window::WorkspaceWindowTrait;
use crate::models_ext::BlobManagerExt;
use yaak_models::models::{AnyModel, HttpResponse, Plugin};
use yaak_models::queries::any_request::AnyRequest;
use crate::models_ext::QueryManagerExt;
use yaak_models::util::UpdateSource;
use yaak_plugins::error::Error::PluginErr;
use yaak_plugins::events::{
@@ -32,6 +31,7 @@ use yaak_plugins::events::{
use yaak_plugins::manager::PluginManager;
use yaak_plugins::plugin_handle::PluginHandle;
use yaak_plugins::template_callback::PluginTemplateCallback;
use yaak_tauri_utils::window::WorkspaceWindowTrait;
use yaak_templates::{RenderErrorBehavior, RenderOptions};
pub(crate) async fn handle_plugin_event<R: Runtime>(
@@ -57,6 +57,10 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
Ok(call_frontend(&window, event).await)
}
InternalEventPayload::PromptFormRequest(_) => {
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
Ok(call_frontend(&window, event).await)
}
InternalEventPayload::FindHttpResponsesRequest(req) => {
let http_responses = app_handle
.db()
@@ -166,7 +170,12 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
)?;
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
let cb = PluginTemplateCallback::new(
plugin_manager,
encryption_manager,
&plugin_context,
req.purpose,
);
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let grpc_request =
render_grpc_request(&req.grpc_request, environment_chain, &cb, &opt).await?;
@@ -187,7 +196,12 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
)?;
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
let cb = PluginTemplateCallback::new(
plugin_manager,
encryption_manager,
&plugin_context,
req.purpose,
);
let opt = &RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let http_request =
render_http_request(&req.http_request, environment_chain, &cb, &opt).await?;
@@ -218,7 +232,12 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
)?;
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
let cb = PluginTemplateCallback::new(
plugin_manager,
encryption_manager,
&plugin_context,
req.purpose,
);
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
let data = render_json_value(req.data, environment_chain, &cb, &opt).await?;
Ok(Some(InternalEventPayload::TemplateRenderResponse(TemplateRenderResponse { data })))

View File

@@ -17,7 +17,7 @@ use tauri::path::BaseDirectory;
use tauri::plugin::{Builder, TauriPlugin};
use tauri::{
AppHandle, Emitter, Manager, RunEvent, Runtime, State, WebviewWindow, WindowEvent, command,
generate_handler, is_dev,
is_dev,
};
use tokio::sync::Mutex;
use ts_rs::TS;
@@ -132,7 +132,7 @@ impl PluginUpdater {
// ============================================================================
#[command]
pub(crate) async fn cmd_plugins_search<R: Runtime>(
pub async fn cmd_plugins_search<R: Runtime>(
app_handle: AppHandle<R>,
query: &str,
) -> Result<PluginSearchResponse> {
@@ -141,7 +141,7 @@ pub(crate) async fn cmd_plugins_search<R: Runtime>(
}
#[command]
pub(crate) async fn cmd_plugins_install<R: Runtime>(
pub async fn cmd_plugins_install<R: Runtime>(
window: WebviewWindow<R>,
name: &str,
version: Option<String>,
@@ -163,7 +163,7 @@ pub(crate) async fn cmd_plugins_install<R: Runtime>(
}
#[command]
pub(crate) async fn cmd_plugins_uninstall<R: Runtime>(
pub async fn cmd_plugins_uninstall<R: Runtime>(
plugin_id: &str,
window: WebviewWindow<R>,
) -> Result<Plugin> {
@@ -174,7 +174,7 @@ pub(crate) async fn cmd_plugins_uninstall<R: Runtime>(
}
#[command]
pub(crate) async fn cmd_plugins_updates<R: Runtime>(
pub async fn cmd_plugins_updates<R: Runtime>(
app_handle: AppHandle<R>,
) -> Result<PluginUpdatesResponse> {
let http_client = yaak_api_client(&app_handle)?;
@@ -183,7 +183,7 @@ pub(crate) async fn cmd_plugins_updates<R: Runtime>(
}
#[command]
pub(crate) async fn cmd_plugins_update_all<R: Runtime>(
pub async fn cmd_plugins_update_all<R: Runtime>(
window: WebviewWindow<R>,
) -> Result<Vec<PluginNameVersion>> {
let http_client = yaak_api_client(window.app_handle())?;
@@ -233,13 +233,6 @@ pub(crate) async fn cmd_plugins_update_all<R: Runtime>(
pub fn init<R: Runtime>() -> TauriPlugin<R> {
Builder::new("yaak-plugins")
.invoke_handler(generate_handler![
cmd_plugins_search,
cmd_plugins_install,
cmd_plugins_uninstall,
cmd_plugins_updates,
cmd_plugins_update_all
])
.setup(|app_handle, _| {
// Resolve paths for plugin manager
let vendored_plugin_dir = app_handle

View File

@@ -3,6 +3,7 @@ use std::path::PathBuf;
use std::time::{Duration, Instant};
use crate::error::Result;
use crate::models_ext::QueryManagerExt;
use log::{debug, error, info, warn};
use serde::{Deserialize, Serialize};
use tauri::{Emitter, Listener, Manager, Runtime, WebviewWindow};
@@ -11,7 +12,6 @@ use tauri_plugin_updater::{Update, UpdaterExt};
use tokio::task::block_in_place;
use tokio::time::sleep;
use ts_rs::TS;
use crate::models_ext::QueryManagerExt;
use yaak_models::util::generate_id;
use yaak_plugins::manager::PluginManager;

View File

@@ -1,18 +1,18 @@
use crate::PluginContextExt;
use crate::error::Result;
use crate::import::import_data;
use crate::models_ext::QueryManagerExt;
use crate::PluginContextExt;
use log::{info, warn};
use std::collections::HashMap;
use std::fs;
use std::sync::Arc;
use tauri::{AppHandle, Emitter, Manager, Runtime, Url};
use tauri_plugin_dialog::{DialogExt, MessageDialogButtons, MessageDialogKind};
use yaak_tauri_utils::api_client::yaak_api_client;
use yaak_models::util::generate_id;
use yaak_plugins::events::{Color, ShowToastRequest};
use yaak_plugins::install::download_and_install;
use yaak_plugins::manager::PluginManager;
use yaak_tauri_utils::api_client::yaak_api_client;
pub(crate) async fn handle_deep_link<R: Runtime>(
app_handle: &AppHandle<R>,
@@ -55,7 +55,8 @@ pub(crate) async fn handle_deep_link<R: Runtime>(
&plugin_context,
name,
version,
).await?;
)
.await?;
app_handle.emit(
"show_toast",
ShowToastRequest {

View File

@@ -1,4 +1,5 @@
use crate::error::Result;
use crate::models_ext::QueryManagerExt;
use crate::window_menu::app_menu;
use log::{info, warn};
use rand::random;
@@ -8,7 +9,6 @@ use tauri::{
};
use tauri_plugin_opener::OpenerExt;
use tokio::sync::mpsc;
use crate::models_ext::QueryManagerExt;
const DEFAULT_WINDOW_WIDTH: f64 = 1100.0;
const DEFAULT_WINDOW_HEIGHT: f64 = 600.0;

View File

@@ -1,9 +1,9 @@
//! WebSocket Tauri command wrappers
//! These wrap the core yaak-ws functionality for Tauri IPC.
use crate::PluginContextExt;
use crate::error::Result;
use crate::models_ext::QueryManagerExt;
use crate::PluginContextExt;
use http::HeaderMap;
use log::{debug, info, warn};
use std::str::FromStr;
@@ -56,9 +56,10 @@ pub async fn cmd_ws_delete_request<R: Runtime>(
app_handle: AppHandle<R>,
window: WebviewWindow<R>,
) -> Result<WebsocketRequest> {
Ok(app_handle
.db()
.delete_websocket_request_by_id(request_id, &UpdateSource::from_window_label(window.label()))?)
Ok(app_handle.db().delete_websocket_request_by_id(
request_id,
&UpdateSource::from_window_label(window.label()),
)?)
}
#[command]
@@ -67,12 +68,10 @@ pub async fn cmd_ws_delete_connection<R: Runtime>(
app_handle: AppHandle<R>,
window: WebviewWindow<R>,
) -> Result<WebsocketConnection> {
Ok(app_handle
.db()
.delete_websocket_connection_by_id(
connection_id,
&UpdateSource::from_window_label(window.label()),
)?)
Ok(app_handle.db().delete_websocket_connection_by_id(
connection_id,
&UpdateSource::from_window_label(window.label()),
)?)
}
#[command]
@@ -296,8 +295,10 @@ pub async fn cmd_ws_connect<R: Runtime>(
)
.await?;
for header in plugin_result.set_headers.unwrap_or_default() {
match (http::HeaderName::from_str(&header.name), HeaderValue::from_str(&header.value))
{
match (
http::HeaderName::from_str(&header.name),
HeaderValue::from_str(&header.value),
) {
(Ok(name), Ok(value)) => {
headers.insert(name, value);
}

View File

@@ -44,8 +44,8 @@
"vendored/protoc/include",
"vendored/plugins",
"vendored/plugin-runtime",
"vendored/node/yaaknode",
"vendored/protoc/yaakprotoc"
"vendored/node/yaaknode*",
"vendored/protoc/yaakprotoc*"
]
}
}

View File

@@ -8,10 +8,10 @@ use std::time::Duration;
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow, is_dev};
use ts_rs::TS;
use yaak_common::platform::get_os_str;
use yaak_tauri_utils::api_client::yaak_api_client;
use yaak_models::db_context::DbContext;
use yaak_models::query_manager::QueryManager;
use yaak_models::util::UpdateSource;
use yaak_tauri_utils::api_client::yaak_api_client;
/// Extension trait for accessing the QueryManager from Tauri Manager types.
/// This is needed temporarily until all crates are refactored to not use Tauri.

View File

@@ -6,3 +6,4 @@ publish = false
[dependencies]
serde_json = { workspace = true }
tokio = { workspace = true, features = ["process"] }

View File

@@ -0,0 +1,16 @@
use std::ffi::OsStr;
#[cfg(target_os = "windows")]
const CREATE_NO_WINDOW: u32 = 0x0800_0000;
/// Creates a new `tokio::process::Command` that won't spawn a console window on Windows.
pub fn new_xplatform_command<S: AsRef<OsStr>>(program: S) -> tokio::process::Command {
#[allow(unused_mut)]
let mut cmd = tokio::process::Command::new(program);
#[cfg(target_os = "windows")]
{
use std::os::windows::process::CommandExt;
cmd.creation_flags(CREATE_NO_WINDOW);
}
cmd
}

View File

@@ -1,2 +1,3 @@
pub mod command;
pub mod platform;
pub mod serde;

View File

@@ -11,3 +11,7 @@ export function revealWorkspaceKey(workspaceId: string) {
export function setWorkspaceKey(args: { workspaceId: string; key: string }) {
return invoke<void>('cmd_set_workspace_key', args);
}
export function disableEncryption(workspaceId: string) {
return invoke<void>('cmd_disable_encryption', { workspaceId });
}

View File

@@ -115,6 +115,35 @@ impl EncryptionManager {
self.set_workspace_key(workspace_id, &wkey)
}
pub fn disable_encryption(&self, workspace_id: &str) -> Result<()> {
info!("Disabling encryption for {workspace_id}");
self.query_manager.with_tx::<(), Error>(|tx| {
let workspace = tx.get_workspace(workspace_id)?;
let workspace_meta = tx.get_or_create_workspace_meta(workspace_id)?;
// Clear encryption challenge on workspace
tx.upsert_workspace(
&Workspace { encryption_key_challenge: None, ..workspace },
&UpdateSource::Background,
)?;
// Clear encryption key on workspace meta
tx.upsert_workspace_meta(
&WorkspaceMeta { encryption_key: None, ..workspace_meta },
&UpdateSource::Background,
)?;
Ok(())
})?;
// Remove from cache
let mut cache = self.cached_workspace_keys.lock().unwrap();
cache.remove(workspace_id);
Ok(())
}
fn get_workspace_key(&self, workspace_id: &str) -> Result<WorkspaceKey> {
{
let cache = self.cached_workspace_keys.lock().unwrap();

View File

@@ -12,7 +12,9 @@ serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true }
serde_yaml = "0.9.34"
thiserror = { workspace = true }
tokio = { workspace = true, features = ["io-util"] }
ts-rs = { workspace = true, features = ["chrono-impl", "serde-json-impl"] }
url = "2"
yaak-common = { workspace = true }
yaak-models = { workspace = true }
yaak-sync = { workspace = true }

View File

@@ -1,6 +1,10 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { SyncModel } from "./gen_models";
export type BranchDeleteResult = { "type": "success", message: string, } | { "type": "not_fully_merged" };
export type CloneResult = { "type": "success" } | { "type": "cancelled" } | { "type": "needs_credentials", url: string, error: string | null, };
export type GitAuthor = { name: string | null, email: string | null, };
export type GitCommit = { author: GitAuthor, when: string, message: string | null, };

View File

@@ -1,5 +1,7 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, parentModel: string, parentId: string | null, variables: Array<EnvironmentVariable>, color: string | null, sortPriority: number, };
export type EnvironmentVariable = { enabled?: boolean, name: string, value: string, id?: string, };
@@ -18,4 +20,4 @@ export type SyncModel = { "type": "workspace" } & Workspace | { "type": "environ
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };

View File

@@ -3,7 +3,7 @@ import { invoke } from '@tauri-apps/api/core';
import { createFastMutation } from '@yaakapp/app/hooks/useFastMutation';
import { queryClient } from '@yaakapp/app/lib/queryClient';
import { useMemo } from 'react';
import { GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult } from './bindings/gen_git';
import { BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult } from './bindings/gen_git';
export * from './bindings/gen_git';
@@ -59,7 +59,6 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
if (creds == null) throw new Error('Canceled');
await invoke('cmd_git_add_credential', {
dir,
remoteUrl: result.url,
username: creds.username,
password: creds.password,
@@ -90,21 +89,31 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
mutationFn: (args) => invoke('cmd_git_rm_remote', { dir, ...args }),
onSuccess,
}),
branch: createFastMutation<void, string, { branch: string }>({
createBranch: createFastMutation<void, string, { branch: string; base?: string }>({
mutationKey: ['git', 'branch', dir],
mutationFn: (args) => invoke('cmd_git_branch', { dir, ...args }),
onSuccess,
}),
mergeBranch: createFastMutation<void, string, { branch: string; force: boolean }>({
mergeBranch: createFastMutation<void, string, { branch: string }>({
mutationKey: ['git', 'merge', dir],
mutationFn: (args) => invoke('cmd_git_merge_branch', { dir, ...args }),
onSuccess,
}),
deleteBranch: createFastMutation<void, string, { branch: string }>({
deleteBranch: createFastMutation<BranchDeleteResult, string, { branch: string, force?: boolean }>({
mutationKey: ['git', 'delete-branch', dir],
mutationFn: (args) => invoke('cmd_git_delete_branch', { dir, ...args }),
onSuccess,
}),
deleteRemoteBranch: createFastMutation<void, string, { branch: string }>({
mutationKey: ['git', 'delete-remote-branch', dir],
mutationFn: (args) => invoke('cmd_git_delete_remote_branch', { dir, ...args }),
onSuccess,
}),
renameBranch: createFastMutation<void, string, { oldName: string, newName: string }>({
mutationKey: ['git', 'rename-branch', dir],
mutationFn: (args) => invoke('cmd_git_rename_branch', { dir, ...args }),
onSuccess,
}),
checkout: createFastMutation<string, string, { branch: string; force: boolean }>({
mutationKey: ['git', 'checkout', dir],
mutationFn: (args) => invoke('cmd_git_checkout', { dir, ...args }),
@@ -144,7 +153,6 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
if (creds == null) throw new Error('Canceled');
await invoke('cmd_git_add_credential', {
dir,
remoteUrl: result.url,
username: creds.username,
password: creds.password,
@@ -166,3 +174,28 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
async function getRemotes(dir: string) {
return invoke<GitRemote[]>('cmd_git_remotes', { dir });
}
/**
* Clone a git repository, prompting for credentials if needed.
*/
export async function gitClone(
url: string,
dir: string,
promptCredentials: (args: { url: string; error: string | null }) => Promise<GitCredentials | null>,
): Promise<CloneResult> {
const result = await invoke<CloneResult>('cmd_git_clone', { url, dir });
if (result.type !== 'needs_credentials') return result;
// Prompt for credentials
const creds = await promptCredentials({ url: result.url, error: result.error });
if (creds == null) return {type: 'cancelled'};
// Store credentials and retry
await invoke('cmd_git_add_credential', {
remoteUrl: result.url,
username: creds.username,
password: creds.password,
});
return invoke<CloneResult>('cmd_git_clone', { url, dir });
}

View File

@@ -1,38 +1,30 @@
use crate::error::Error::GitNotFound;
use crate::error::Result;
use std::path::Path;
use std::process::{Command, Stdio};
use std::process::Stdio;
use tokio::process::Command;
use yaak_common::command::new_xplatform_command;
use crate::error::Error::GitNotFound;
#[cfg(target_os = "windows")]
use std::os::windows::process::CommandExt;
/// Create a git command that runs in the specified directory
pub(crate) async fn new_binary_command(dir: &Path) -> Result<Command> {
let mut cmd = new_binary_command_global().await?;
cmd.arg("-C").arg(dir);
Ok(cmd)
}
#[cfg(target_os = "windows")]
const CREATE_NO_WINDOW: u32 = 0x0800_0000;
pub(crate) fn new_binary_command(dir: &Path) -> Result<Command> {
/// Create a git command without a specific directory (for global operations)
pub(crate) async fn new_binary_command_global() -> Result<Command> {
// 1. Probe that `git` exists and is runnable
let mut probe = Command::new("git");
let mut probe = new_xplatform_command("git");
probe.arg("--version").stdin(Stdio::null()).stdout(Stdio::null()).stderr(Stdio::null());
#[cfg(target_os = "windows")]
{
probe.creation_flags(CREATE_NO_WINDOW);
}
let status = probe.status().map_err(|_| GitNotFound)?;
let status = probe.status().await.map_err(|_| GitNotFound)?;
if !status.success() {
return Err(GitNotFound);
}
// 2. Build the reusable git command
let mut cmd = Command::new("git");
cmd.arg("-C").arg(dir);
#[cfg(target_os = "windows")]
{
cmd.creation_flags(CREATE_NO_WINDOW);
}
let cmd = new_xplatform_command("git");
Ok(cmd)
}

View File

@@ -1,99 +1,153 @@
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use crate::binary::new_binary_command;
use crate::error::Error::GenericError;
use crate::error::Result;
use crate::merge::do_merge;
use crate::repository::open_repo;
use crate::util::{bytes_to_string, get_branch_by_name, get_current_branch};
use git2::BranchType;
use git2::build::CheckoutBuilder;
use log::info;
use std::path::Path;
pub fn git_checkout_branch(dir: &Path, branch_name: &str, force: bool) -> Result<String> {
if branch_name.starts_with("origin/") {
return git_checkout_remote_branch(dir, branch_name, force);
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
#[serde(rename_all = "snake_case", tag = "type")]
#[ts(export, export_to = "gen_git.ts")]
pub enum BranchDeleteResult {
Success { message: String },
NotFullyMerged,
}
let repo = open_repo(dir)?;
let branch = get_branch_by_name(&repo, branch_name)?;
let branch_ref = branch.into_reference();
let branch_tree = branch_ref.peel_to_tree()?;
pub async fn git_checkout_branch(dir: &Path, branch_name: &str, force: bool) -> Result<String> {
let branch_name = branch_name.trim_start_matches("origin/");
let mut options = CheckoutBuilder::default();
let mut args = vec!["checkout"];
if force {
options.force();
args.push("--force");
}
args.push(branch_name);
repo.checkout_tree(branch_tree.as_object(), Some(&mut options))?;
repo.set_head(branch_ref.name().unwrap())?;
let out = new_binary_command(dir)
.await?
.args(&args)
.output()
.await
.map_err(|e| GenericError(format!("failed to run git checkout: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
if !out.status.success() {
return Err(GenericError(format!("Failed to checkout: {}", combined.trim())));
}
Ok(branch_name.to_string())
}
pub(crate) fn git_checkout_remote_branch(
dir: &Path,
branch_name: &str,
force: bool,
) -> Result<String> {
let branch_name = branch_name.trim_start_matches("origin/");
let repo = open_repo(dir)?;
let refname = format!("refs/remotes/origin/{}", branch_name);
let remote_ref = repo.find_reference(&refname)?;
let commit = remote_ref.peel_to_commit()?;
let mut new_branch = repo.branch(branch_name, &commit, false)?;
let upstream_name = format!("origin/{}", branch_name);
new_branch.set_upstream(Some(&upstream_name))?;
git_checkout_branch(dir, branch_name, force)
}
pub fn git_create_branch(dir: &Path, name: &str) -> Result<()> {
let repo = open_repo(dir)?;
let head = match repo.head() {
Ok(h) => h,
Err(e) if e.code() == git2::ErrorCode::UnbornBranch => {
let msg = "Cannot create branch when there are no commits";
return Err(GenericError(msg.into()));
}
Err(e) => return Err(e.into()),
};
let head = head.peel_to_commit()?;
repo.branch(name, &head, false)?;
Ok(())
}
pub fn git_delete_branch(dir: &Path, name: &str) -> Result<()> {
let repo = open_repo(dir)?;
let mut branch = get_branch_by_name(&repo, name)?;
if branch.is_head() {
info!("Deleting head branch");
let branches = repo.branches(Some(BranchType::Local))?;
let other_branch = branches.into_iter().filter_map(|b| b.ok()).find(|b| !b.0.is_head());
let other_branch = match other_branch {
None => return Err(GenericError("Cannot delete only branch".into())),
Some(b) => bytes_to_string(b.0.name_bytes()?)?,
};
git_checkout_branch(dir, &other_branch, true)?;
pub async fn git_create_branch(dir: &Path, name: &str, base: Option<&str>) -> Result<()> {
let mut cmd = new_binary_command(dir).await?;
cmd.arg("branch").arg(name);
if let Some(base_branch) = base {
cmd.arg(base_branch);
}
branch.delete()?;
let out =
cmd.output().await.map_err(|e| GenericError(format!("failed to run git branch: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
if !out.status.success() {
return Err(GenericError(format!("Failed to create branch: {}", combined.trim())));
}
Ok(())
}
pub fn git_merge_branch(dir: &Path, name: &str, _force: bool) -> Result<()> {
let repo = open_repo(dir)?;
let local_branch = get_current_branch(&repo)?.unwrap();
pub async fn git_delete_branch(dir: &Path, name: &str, force: bool) -> Result<BranchDeleteResult> {
let mut cmd = new_binary_command(dir).await?;
let commit_to_merge = get_branch_by_name(&repo, name)?.into_reference();
let commit_to_merge = repo.reference_to_annotated_commit(&commit_to_merge)?;
let out =
if force { cmd.args(["branch", "-D", name]) } else { cmd.args(["branch", "-d", name]) }
.output()
.await
.map_err(|e| GenericError(format!("failed to run git branch -d: {e}")))?;
do_merge(&repo, &local_branch, &commit_to_merge)?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
if !out.status.success() && stderr.to_lowercase().contains("not fully merged") {
return Ok(BranchDeleteResult::NotFullyMerged);
}
if !out.status.success() {
return Err(GenericError(format!("Failed to delete branch: {}", combined.trim())));
}
Ok(BranchDeleteResult::Success { message: combined })
}
pub async fn git_merge_branch(dir: &Path, name: &str) -> Result<()> {
let out = new_binary_command(dir)
.await?
.args(["merge", name])
.output()
.await
.map_err(|e| GenericError(format!("failed to run git merge: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
if !out.status.success() {
// Check for merge conflicts
if combined.to_lowercase().contains("conflict") {
return Err(GenericError(
"Merge conflicts detected. Please resolve them manually.".to_string(),
));
}
return Err(GenericError(format!("Failed to merge: {}", combined.trim())));
}
Ok(())
}
pub async fn git_delete_remote_branch(dir: &Path, name: &str) -> Result<()> {
// Remote branch names come in as "origin/branch-name", extract the branch name
let branch_name = name.trim_start_matches("origin/");
let out = new_binary_command(dir)
.await?
.args(["push", "origin", "--delete", branch_name])
.output()
.await
.map_err(|e| GenericError(format!("failed to run git push --delete: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
if !out.status.success() {
return Err(GenericError(format!("Failed to delete remote branch: {}", combined.trim())));
}
Ok(())
}
pub async fn git_rename_branch(dir: &Path, old_name: &str, new_name: &str) -> Result<()> {
let out = new_binary_command(dir)
.await?
.args(["branch", "-m", old_name, new_name])
.output()
.await
.map_err(|e| GenericError(format!("failed to run git branch -m: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
if !out.status.success() {
return Err(GenericError(format!("Failed to rename branch: {}", combined.trim())));
}
Ok(())
}

View File

@@ -0,0 +1,53 @@
use crate::binary::new_binary_command;
use crate::error::Error::GenericError;
use crate::error::Result;
use log::info;
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::Path;
use ts_rs::TS;
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
#[serde(rename_all = "snake_case", tag = "type")]
#[ts(export, export_to = "gen_git.ts")]
pub enum CloneResult {
Success,
Cancelled,
NeedsCredentials { url: String, error: Option<String> },
}
pub async fn git_clone(url: &str, dir: &Path) -> Result<CloneResult> {
let parent = dir.parent().ok_or_else(|| GenericError("Invalid clone directory".to_string()))?;
fs::create_dir_all(parent)
.map_err(|e| GenericError(format!("Failed to create directory: {e}")))?;
let mut cmd = new_binary_command(parent).await?;
cmd.args(["clone", url]).arg(dir).env("GIT_TERMINAL_PROMPT", "0");
let out =
cmd.output().await.map_err(|e| GenericError(format!("failed to run git clone: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);
let combined = format!("{}{}", stdout, stderr);
let combined_lower = combined.to_lowercase();
info!("Cloned status={}: {combined}", out.status);
if !out.status.success() {
// Check for credentials error
if combined_lower.contains("could not read") {
return Ok(CloneResult::NeedsCredentials { url: url.to_string(), error: None });
}
if combined_lower.contains("unable to access")
|| combined_lower.contains("authentication failed")
{
return Ok(CloneResult::NeedsCredentials {
url: url.to_string(),
error: Some(combined.to_string()),
});
}
return Err(GenericError(format!("Failed to clone: {}", combined.trim())));
}
Ok(CloneResult::Success)
}

View File

@@ -3,8 +3,9 @@ use crate::error::Error::GenericError;
use log::info;
use std::path::Path;
pub fn git_commit(dir: &Path, message: &str) -> crate::error::Result<()> {
let out = new_binary_command(dir)?.args(["commit", "--message", message]).output()?;
pub async fn git_commit(dir: &Path, message: &str) -> crate::error::Result<()> {
let out =
new_binary_command(dir).await?.args(["commit", "--message", message]).output().await?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);

View File

@@ -1,24 +1,19 @@
use crate::binary::new_binary_command;
use crate::binary::new_binary_command_global;
use crate::error::Error::GenericError;
use crate::error::Result;
use std::io::Write;
use std::path::Path;
use std::process::Stdio;
use tokio::io::AsyncWriteExt;
use url::Url;
pub async fn git_add_credential(
dir: &Path,
remote_url: &str,
username: &str,
password: &str,
) -> Result<()> {
pub async fn git_add_credential(remote_url: &str, username: &str, password: &str) -> Result<()> {
let url = Url::parse(remote_url)
.map_err(|e| GenericError(format!("Failed to parse remote url {remote_url}: {e:?}")))?;
let protocol = url.scheme();
let host = url.host_str().unwrap();
let path = Some(url.path());
let mut child = new_binary_command(dir)?
let mut child = new_binary_command_global()
.await?
.args(["credential", "approve"])
.stdin(Stdio::piped())
.stdout(Stdio::null())
@@ -26,19 +21,21 @@ pub async fn git_add_credential(
{
let stdin = child.stdin.as_mut().unwrap();
writeln!(stdin, "protocol={}", protocol)?;
writeln!(stdin, "host={}", host)?;
stdin.write_all(format!("protocol={}\n", protocol).as_bytes()).await?;
stdin.write_all(format!("host={}\n", host).as_bytes()).await?;
if let Some(path) = path {
if !path.is_empty() {
writeln!(stdin, "path={}", path.trim_start_matches('/'))?;
stdin
.write_all(format!("path={}\n", path.trim_start_matches('/')).as_bytes())
.await?;
}
}
writeln!(stdin, "username={}", username)?;
writeln!(stdin, "password={}", password)?;
writeln!(stdin)?; // blank line terminator
stdin.write_all(format!("username={}\n", username).as_bytes()).await?;
stdin.write_all(format!("password={}\n", password).as_bytes()).await?;
stdin.write_all(b"\n").await?; // blank line terminator
}
let status = child.wait()?;
let status = child.wait().await?;
if !status.success() {
return Err(GenericError("Failed to approve git credential".to_string()));
}

View File

@@ -3,10 +3,12 @@ use crate::error::Error::GenericError;
use crate::error::Result;
use std::path::Path;
pub fn git_fetch_all(dir: &Path) -> Result<()> {
let out = new_binary_command(dir)?
pub async fn git_fetch_all(dir: &Path) -> Result<()> {
let out = new_binary_command(dir)
.await?
.args(["fetch", "--all", "--prune", "--tags"])
.output()
.await
.map_err(|e| GenericError(format!("failed to run git pull: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);
let stderr = String::from_utf8_lossy(&out.stderr);

View File

@@ -1,13 +1,14 @@
mod add;
mod binary;
mod branch;
mod clone;
mod commit;
mod credential;
pub mod error;
mod fetch;
mod init;
mod log;
mod merge;
mod pull;
mod push;
mod remotes;
@@ -18,7 +19,11 @@ mod util;
// Re-export all git functions for external use
pub use add::git_add;
pub use branch::{git_checkout_branch, git_create_branch, git_delete_branch, git_merge_branch};
pub use branch::{
BranchDeleteResult, git_checkout_branch, git_create_branch, git_delete_branch,
git_delete_remote_branch, git_merge_branch, git_rename_branch,
};
pub use clone::{CloneResult, git_clone};
pub use commit::git_commit;
pub use credential::git_add_credential;
pub use fetch::git_fetch_all;

View File

@@ -1,135 +0,0 @@
use crate::error::Error::MergeConflicts;
use crate::util::bytes_to_string;
use git2::{AnnotatedCommit, Branch, IndexEntry, Reference, Repository};
use log::{debug, info};
pub(crate) fn do_merge(
repo: &Repository,
local_branch: &Branch,
commit_to_merge: &AnnotatedCommit,
) -> crate::error::Result<()> {
debug!("Merging remote branches");
let analysis = repo.merge_analysis(&[&commit_to_merge])?;
if analysis.0.is_fast_forward() {
let refname = bytes_to_string(local_branch.get().name_bytes())?;
match repo.find_reference(&refname) {
Ok(mut r) => {
merge_fast_forward(repo, &mut r, &commit_to_merge)?;
}
Err(_) => {
// The branch doesn't exist, so set the reference to the commit directly. Usually
// this is because you are pulling into an empty repository.
repo.reference(
&refname,
commit_to_merge.id(),
true,
&format!("Setting {} to {}", refname, commit_to_merge.id()),
)?;
repo.set_head(&refname)?;
repo.checkout_head(Some(
git2::build::CheckoutBuilder::default()
.allow_conflicts(true)
.conflict_style_merge(true)
.force(),
))?;
}
};
} else if analysis.0.is_normal() {
let head_commit = repo.reference_to_annotated_commit(&repo.head()?)?;
merge_normal(repo, &head_commit, commit_to_merge)?;
} else {
debug!("Skipping merge. Nothing to do")
}
Ok(())
}
pub(crate) fn merge_fast_forward(
repo: &Repository,
local_reference: &mut Reference,
remote_commit: &AnnotatedCommit,
) -> crate::error::Result<()> {
info!("Performing fast forward");
let name = match local_reference.name() {
Some(s) => s.to_string(),
None => String::from_utf8_lossy(local_reference.name_bytes()).to_string(),
};
let msg = format!("Fast-Forward: Setting {} to id: {}", name, remote_commit.id());
local_reference.set_target(remote_commit.id(), &msg)?;
repo.set_head(&name)?;
repo.checkout_head(Some(
git2::build::CheckoutBuilder::default()
// For some reason, the force is required to make the working directory actually get
// updated I suspect we should be adding some logic to handle dirty working directory
// states, but this is just an example so maybe not.
.force(),
))?;
Ok(())
}
pub(crate) fn merge_normal(
repo: &Repository,
local: &AnnotatedCommit,
remote: &AnnotatedCommit,
) -> crate::error::Result<()> {
info!("Performing normal merge");
let local_tree = repo.find_commit(local.id())?.tree()?;
let remote_tree = repo.find_commit(remote.id())?.tree()?;
let ancestor = repo.find_commit(repo.merge_base(local.id(), remote.id())?)?.tree()?;
let mut idx = repo.merge_trees(&ancestor, &local_tree, &remote_tree, None)?;
if idx.has_conflicts() {
let conflicts = idx.conflicts()?;
for conflict in conflicts {
if let Ok(conflict) = conflict {
print_conflict(&conflict);
}
}
return Err(MergeConflicts);
}
let result_tree = repo.find_tree(idx.write_tree_to(repo)?)?;
// now create the merge commit
let msg = format!("Merge: {} into {}", remote.id(), local.id());
let sig = repo.signature()?;
let local_commit = repo.find_commit(local.id())?;
let remote_commit = repo.find_commit(remote.id())?;
// Do our merge commit and set current branch head to that commit.
let _merge_commit = repo.commit(
Some("HEAD"),
&sig,
&sig,
&msg,
&result_tree,
&[&local_commit, &remote_commit],
)?;
// Set working tree to match head.
repo.checkout_head(None)?;
Ok(())
}
fn print_conflict(conflict: &git2::IndexConflict) {
let ancestor = conflict.ancestor.as_ref().map(path_from_index_entry);
let ours = conflict.our.as_ref().map(path_from_index_entry);
let theirs = conflict.their.as_ref().map(path_from_index_entry);
println!("Conflict detected:");
if let Some(path) = ancestor {
println!(" Common ancestor: {:?}", path);
}
if let Some(path) = ours {
println!(" Ours: {:?}", path);
}
if let Some(path) = theirs {
println!(" Theirs: {:?}", path);
}
}
fn path_from_index_entry(entry: &IndexEntry) -> String {
String::from_utf8_lossy(entry.path.as_slice()).into_owned()
}

View File

@@ -17,17 +17,25 @@ pub enum PullResult {
NeedsCredentials { url: String, error: Option<String> },
}
pub fn git_pull(dir: &Path) -> Result<PullResult> {
let repo = open_repo(dir)?;
let branch_name = get_current_branch_name(&repo)?;
let remote = get_default_remote_in_repo(&repo)?;
let remote_name = remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?;
let remote_url = remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?;
pub async fn git_pull(dir: &Path) -> Result<PullResult> {
// Extract all git2 data before any await points (git2 types are not Send)
let (branch_name, remote_name, remote_url) = {
let repo = open_repo(dir)?;
let branch_name = get_current_branch_name(&repo)?;
let remote = get_default_remote_in_repo(&repo)?;
let remote_name =
remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?.to_string();
let remote_url =
remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?.to_string();
(branch_name, remote_name, remote_url)
};
let out = new_binary_command(dir)?
let out = new_binary_command(dir)
.await?
.args(["pull", &remote_name, &branch_name])
.env("GIT_TERMINAL_PROMPT", "0")
.output()
.await
.map_err(|e| GenericError(format!("failed to run git pull: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);

View File

@@ -17,17 +17,25 @@ pub enum PushResult {
NeedsCredentials { url: String, error: Option<String> },
}
pub fn git_push(dir: &Path) -> Result<PushResult> {
let repo = open_repo(dir)?;
let branch_name = get_current_branch_name(&repo)?;
let remote = get_default_remote_for_push_in_repo(&repo)?;
let remote_name = remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?;
let remote_url = remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?;
pub async fn git_push(dir: &Path) -> Result<PushResult> {
// Extract all git2 data before any await points (git2 types are not Send)
let (branch_name, remote_name, remote_url) = {
let repo = open_repo(dir)?;
let branch_name = get_current_branch_name(&repo)?;
let remote = get_default_remote_for_push_in_repo(&repo)?;
let remote_name =
remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?.to_string();
let remote_url =
remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?.to_string();
(branch_name, remote_name, remote_url)
};
let out = new_binary_command(dir)?
let out = new_binary_command(dir)
.await?
.args(["push", &remote_name, &branch_name])
.env("GIT_TERMINAL_PROMPT", "0")
.output()
.await
.map_err(|e| GenericError(format!("failed to run git push: {e}")))?;
let stdout = String::from_utf8_lossy(&out.stdout);

View File

@@ -47,10 +47,6 @@ pub(crate) fn remote_branch_names(repo: &Repository) -> Result<Vec<String>> {
Ok(branches)
}
pub(crate) fn get_branch_by_name<'s>(repo: &'s Repository, name: &str) -> Result<Branch<'s>> {
Ok(repo.find_branch(name, BranchType::Local)?)
}
pub(crate) fn bytes_to_string(bytes: &[u8]) -> Result<String> {
Ok(String::from_utf8(bytes.to_vec())?)
}

View File

@@ -22,5 +22,6 @@ tokio-stream = "0.1.14"
tonic = { version = "0.12.3", default-features = false, features = ["transport"] }
tonic-reflection = "0.12.3"
uuid = { version = "1.7.0", features = ["v4"] }
yaak-common = { workspace = true }
yaak-tls = { workspace = true }
thiserror = "2.0.17"

View File

@@ -115,14 +115,18 @@ impl GrpcConnection {
Ok(client.unary(req, path, codec).await?)
}
pub async fn streaming(
pub async fn streaming<F>(
&self,
service: &str,
method: &str,
stream: ReceiverStream<String>,
metadata: &BTreeMap<String, String>,
client_cert: Option<ClientCertificateConfig>,
) -> Result<Response<Streaming<DynamicMessage>>> {
on_message: F,
) -> Result<Response<Streaming<DynamicMessage>>>
where
F: Fn(std::result::Result<String, String>) + Send + Sync + Clone + 'static,
{
let method = &self.method(&service, &method).await?;
let mapped_stream = {
let input_message = method.input();
@@ -131,31 +135,39 @@ impl GrpcConnection {
let md = metadata.clone();
let use_reflection = self.use_reflection.clone();
let client_cert = client_cert.clone();
stream.filter_map(move |json| {
let pool = pool.clone();
let uri = uri.clone();
let input_message = input_message.clone();
let md = md.clone();
let use_reflection = use_reflection.clone();
let client_cert = client_cert.clone();
tokio::runtime::Handle::current().block_on(async move {
if use_reflection {
if let Err(e) =
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
{
warn!("Failed to resolve Any types: {e}");
stream
.then(move |json| {
let pool = pool.clone();
let uri = uri.clone();
let input_message = input_message.clone();
let md = md.clone();
let use_reflection = use_reflection.clone();
let client_cert = client_cert.clone();
let on_message = on_message.clone();
let json_clone = json.clone();
async move {
if use_reflection {
if let Err(e) =
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
{
warn!("Failed to resolve Any types: {e}");
}
}
}
let mut de = Deserializer::from_str(&json);
match DynamicMessage::deserialize(input_message, &mut de) {
Ok(m) => Some(m),
Err(e) => {
warn!("Failed to deserialize message: {e}");
None
let mut de = Deserializer::from_str(&json);
match DynamicMessage::deserialize(input_message, &mut de) {
Ok(m) => {
on_message(Ok(json_clone));
Some(m)
}
Err(e) => {
warn!("Failed to deserialize message: {e}");
on_message(Err(e.to_string()));
None
}
}
}
})
})
.filter_map(|x| x)
};
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
@@ -169,14 +181,18 @@ impl GrpcConnection {
Ok(client.streaming(req, path, codec).await?)
}
pub async fn client_streaming(
pub async fn client_streaming<F>(
&self,
service: &str,
method: &str,
stream: ReceiverStream<String>,
metadata: &BTreeMap<String, String>,
client_cert: Option<ClientCertificateConfig>,
) -> Result<Response<DynamicMessage>> {
on_message: F,
) -> Result<Response<DynamicMessage>>
where
F: Fn(std::result::Result<String, String>) + Send + Sync + Clone + 'static,
{
let method = &self.method(&service, &method).await?;
let mapped_stream = {
let input_message = method.input();
@@ -185,31 +201,39 @@ impl GrpcConnection {
let md = metadata.clone();
let use_reflection = self.use_reflection.clone();
let client_cert = client_cert.clone();
stream.filter_map(move |json| {
let pool = pool.clone();
let uri = uri.clone();
let input_message = input_message.clone();
let md = md.clone();
let use_reflection = use_reflection.clone();
let client_cert = client_cert.clone();
tokio::runtime::Handle::current().block_on(async move {
if use_reflection {
if let Err(e) =
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
{
warn!("Failed to resolve Any types: {e}");
stream
.then(move |json| {
let pool = pool.clone();
let uri = uri.clone();
let input_message = input_message.clone();
let md = md.clone();
let use_reflection = use_reflection.clone();
let client_cert = client_cert.clone();
let on_message = on_message.clone();
let json_clone = json.clone();
async move {
if use_reflection {
if let Err(e) =
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
{
warn!("Failed to resolve Any types: {e}");
}
}
}
let mut de = Deserializer::from_str(&json);
match DynamicMessage::deserialize(input_message, &mut de) {
Ok(m) => Some(m),
Err(e) => {
warn!("Failed to deserialize message: {e}");
None
let mut de = Deserializer::from_str(&json);
match DynamicMessage::deserialize(input_message, &mut de) {
Ok(m) => {
on_message(Ok(json_clone));
Some(m)
}
Err(e) => {
warn!("Failed to deserialize message: {e}");
on_message(Err(e.to_string()));
None
}
}
}
})
})
.filter_map(|x| x)
};
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
@@ -316,10 +340,9 @@ impl GrpcHandle {
metadata: &BTreeMap<String, String>,
validate_certificates: bool,
client_cert: Option<ClientCertificateConfig>,
skip_cache: bool,
) -> Result<Vec<ServiceDefinition>> {
// Ensure we have a pool; reflect only if missing
if skip_cache || self.get_pool(id, uri, proto_files).is_none() {
if self.get_pool(id, uri, proto_files).is_none() {
info!("Reflecting gRPC services for {} at {}", id, uri);
self.reflect(id, uri, proto_files, metadata, validate_certificates, client_cert)
.await?;

View File

@@ -16,12 +16,12 @@ use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::sync::Arc;
use tokio::fs;
use tokio::process::Command;
use tokio::sync::RwLock;
use tonic::codegen::http::uri::PathAndQuery;
use tonic::transport::Uri;
use tonic_reflection::pb::v1::server_reflection_request::MessageRequest;
use tonic_reflection::pb::v1::server_reflection_response::MessageResponse;
use yaak_common::command::new_xplatform_command;
use yaak_tls::ClientCertificateConfig;
pub async fn fill_pool_from_files(
@@ -91,11 +91,11 @@ pub async fn fill_pool_from_files(
info!("Invoking protoc with {}", args.join(" "));
let out = Command::new(&config.protoc_bin_path)
.args(&args)
.output()
.await
.map_err(|e| GenericError(format!("Failed to run protoc: {}", e)))?;
let mut cmd = new_xplatform_command(&config.protoc_bin_path);
cmd.args(&args);
let out =
cmd.output().await.map_err(|e| GenericError(format!("Failed to run protoc: {}", e)))?;
if !out.status.success() {
return Err(GenericError(format!(

View File

@@ -2,6 +2,8 @@ use crate::dns::LocalhostResolver;
use crate::error::Result;
use log::{debug, info, warn};
use reqwest::{Client, Proxy, redirect};
use std::sync::Arc;
use yaak_models::models::DnsOverride;
use yaak_tls::{ClientCertificateConfig, get_tls_config};
#[derive(Clone)]
@@ -28,10 +30,14 @@ pub struct HttpConnectionOptions {
pub validate_certificates: bool,
pub proxy: HttpConnectionProxySetting,
pub client_certificate: Option<ClientCertificateConfig>,
pub dns_overrides: Vec<DnsOverride>,
}
impl HttpConnectionOptions {
pub(crate) fn build_client(&self) -> Result<Client> {
/// Build a reqwest Client and return it along with the DNS resolver.
/// The resolver is returned separately so it can be configured per-request
/// to emit DNS timing events to the appropriate channel.
pub(crate) fn build_client(&self) -> Result<(Client, Arc<LocalhostResolver>)> {
let mut client = Client::builder()
.connection_verbose(true)
.redirect(redirect::Policy::none())
@@ -40,15 +46,19 @@ impl HttpConnectionOptions {
.no_brotli()
.no_deflate()
.referer(false)
.tls_info(true);
.tls_info(true)
// Disable connection pooling to ensure DNS resolution happens on each request
// This is needed so we can emit DNS timing events for each request
.pool_max_idle_per_host(0);
// Configure TLS with optional client certificate
let config =
get_tls_config(self.validate_certificates, true, self.client_certificate.clone())?;
client = client.use_preconfigured_tls(config);
// Configure DNS resolver
client = client.dns_resolver(LocalhostResolver::new());
// Configure DNS resolver - keep a reference to configure per-request
let resolver = LocalhostResolver::new(self.dns_overrides.clone());
client = client.dns_resolver(resolver.clone());
// Configure proxy
match self.proxy.clone() {
@@ -69,7 +79,7 @@ impl HttpConnectionOptions {
self.client_certificate.is_some()
);
Ok(client.build()?)
Ok((client.build()?, resolver))
}
}

View File

@@ -1,53 +1,185 @@
use crate::sender::HttpResponseEvent;
use hyper_util::client::legacy::connect::dns::{
GaiResolver as HyperGaiResolver, Name as HyperName,
};
use log::info;
use reqwest::dns::{Addrs, Name, Resolve, Resolving};
use std::collections::HashMap;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr};
use std::str::FromStr;
use std::sync::Arc;
use std::time::Instant;
use tokio::sync::{RwLock, mpsc};
use tower_service::Service;
use yaak_models::models::DnsOverride;
/// Stores resolved addresses for a hostname override
#[derive(Clone)]
pub struct ResolvedOverride {
pub ipv4: Vec<Ipv4Addr>,
pub ipv6: Vec<Ipv6Addr>,
}
#[derive(Clone)]
pub struct LocalhostResolver {
fallback: HyperGaiResolver,
event_tx: Arc<RwLock<Option<mpsc::Sender<HttpResponseEvent>>>>,
overrides: Arc<HashMap<String, ResolvedOverride>>,
}
impl LocalhostResolver {
pub fn new() -> Arc<Self> {
pub fn new(dns_overrides: Vec<DnsOverride>) -> Arc<Self> {
let resolver = HyperGaiResolver::new();
Arc::new(Self { fallback: resolver })
// Pre-parse DNS overrides into a lookup map
let mut overrides = HashMap::new();
for o in dns_overrides {
if !o.enabled {
continue;
}
let hostname = o.hostname.to_lowercase();
let ipv4: Vec<Ipv4Addr> =
o.ipv4.iter().filter_map(|s| s.parse::<Ipv4Addr>().ok()).collect();
let ipv6: Vec<Ipv6Addr> =
o.ipv6.iter().filter_map(|s| s.parse::<Ipv6Addr>().ok()).collect();
// Only add if at least one address is valid
if !ipv4.is_empty() || !ipv6.is_empty() {
overrides.insert(hostname, ResolvedOverride { ipv4, ipv6 });
}
}
Arc::new(Self {
fallback: resolver,
event_tx: Arc::new(RwLock::new(None)),
overrides: Arc::new(overrides),
})
}
/// Set the event sender for the current request.
/// This should be called before each request to direct DNS events
/// to the appropriate channel.
pub async fn set_event_sender(&self, tx: Option<mpsc::Sender<HttpResponseEvent>>) {
let mut guard = self.event_tx.write().await;
*guard = tx;
}
}
impl Resolve for LocalhostResolver {
fn resolve(&self, name: Name) -> Resolving {
let host = name.as_str().to_lowercase();
let event_tx = self.event_tx.clone();
let overrides = self.overrides.clone();
info!("DNS resolve called for: {}", host);
// Check for DNS override first
if let Some(resolved) = overrides.get(&host) {
log::debug!("DNS override found for: {}", host);
let hostname = host.clone();
let mut addrs: Vec<SocketAddr> = Vec::new();
// Add IPv4 addresses
for ip in &resolved.ipv4 {
addrs.push(SocketAddr::new(IpAddr::V4(*ip), 0));
}
// Add IPv6 addresses
for ip in &resolved.ipv6 {
addrs.push(SocketAddr::new(IpAddr::V6(*ip), 0));
}
let addresses: Vec<String> = addrs.iter().map(|a| a.ip().to_string()).collect();
return Box::pin(async move {
// Emit DNS event for override
let guard = event_tx.read().await;
if let Some(tx) = guard.as_ref() {
let _ = tx
.send(HttpResponseEvent::DnsResolved {
hostname,
addresses,
duration: 0,
overridden: true,
})
.await;
}
Ok::<Addrs, Box<dyn std::error::Error + Send + Sync>>(Box::new(addrs.into_iter()))
});
}
// Check for .localhost suffix
let is_localhost = host.ends_with(".localhost");
if is_localhost {
let hostname = host.clone();
// Port 0 is fine; reqwest replaces it with the URL's explicit
// port or the schemes default (80/443, etc.).
// (See docs note below.)
// port or the scheme's default (80/443, etc.).
let addrs: Vec<SocketAddr> = vec![
SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0),
SocketAddr::new(IpAddr::V6(Ipv6Addr::LOCALHOST), 0),
];
let addresses: Vec<String> = addrs.iter().map(|a| a.ip().to_string()).collect();
return Box::pin(async move {
// Emit DNS event for localhost resolution
let guard = event_tx.read().await;
if let Some(tx) = guard.as_ref() {
let _ = tx
.send(HttpResponseEvent::DnsResolved {
hostname,
addresses,
duration: 0,
overridden: false,
})
.await;
}
Ok::<Addrs, Box<dyn std::error::Error + Send + Sync>>(Box::new(addrs.into_iter()))
});
}
// Fall back to system DNS
let mut fallback = self.fallback.clone();
let name_str = name.as_str().to_string();
let hostname = host.clone();
Box::pin(async move {
match HyperName::from_str(&name_str) {
Ok(n) => fallback
.call(n)
.await
.map(|addrs| Box::new(addrs) as Addrs)
.map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync>),
Err(e) => Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>),
let start = Instant::now();
let result = match HyperName::from_str(&name_str) {
Ok(n) => fallback.call(n).await,
Err(e) => return Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>),
};
let duration = start.elapsed().as_millis() as u64;
match result {
Ok(addrs) => {
// Collect addresses for event emission
let addr_vec: Vec<SocketAddr> = addrs.collect();
let addresses: Vec<String> =
addr_vec.iter().map(|a| a.ip().to_string()).collect();
// Emit DNS event
let guard = event_tx.read().await;
if let Some(tx) = guard.as_ref() {
let _ = tx
.send(HttpResponseEvent::DnsResolved {
hostname,
addresses,
duration,
overridden: false,
})
.await;
}
Ok(Box::new(addr_vec.into_iter()) as Addrs)
}
Err(err) => Err(Box::new(err) as Box<dyn std::error::Error + Send + Sync>),
}
})
}

View File

@@ -1,4 +1,5 @@
use crate::client::HttpConnectionOptions;
use crate::dns::LocalhostResolver;
use crate::error::Result;
use log::info;
use reqwest::Client;
@@ -7,8 +8,15 @@ use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::sync::RwLock;
/// A cached HTTP client along with its DNS resolver.
/// The resolver is needed to set the event sender per-request.
pub struct CachedClient {
pub client: Client,
pub resolver: Arc<LocalhostResolver>,
}
pub struct HttpConnectionManager {
connections: Arc<RwLock<BTreeMap<String, (Client, Instant)>>>,
connections: Arc<RwLock<BTreeMap<String, (CachedClient, Instant)>>>,
ttl: Duration,
}
@@ -20,21 +28,26 @@ impl HttpConnectionManager {
}
}
pub async fn get_client(&self, opt: &HttpConnectionOptions) -> Result<Client> {
pub async fn get_client(&self, opt: &HttpConnectionOptions) -> Result<CachedClient> {
let mut connections = self.connections.write().await;
let id = opt.id.clone();
// Clean old connections
connections.retain(|_, (_, last_used)| last_used.elapsed() <= self.ttl);
if let Some((c, last_used)) = connections.get_mut(&id) {
if let Some((cached, last_used)) = connections.get_mut(&id) {
info!("Re-using HTTP client {id}");
*last_used = Instant::now();
return Ok(c.clone());
return Ok(CachedClient {
client: cached.client.clone(),
resolver: cached.resolver.clone(),
});
}
let c = opt.build_client()?;
connections.insert(id.into(), (c.clone(), Instant::now()));
Ok(c)
let (client, resolver) = opt.build_client()?;
let cached = CachedClient { client: client.clone(), resolver: resolver.clone() };
connections.insert(id.into(), (cached, Instant::now()));
Ok(CachedClient { client, resolver })
}
}

View File

@@ -31,7 +31,14 @@ pub enum HttpResponseEvent {
},
SendUrl {
method: String,
scheme: String,
username: String,
password: String,
host: String,
port: u16,
path: String,
query: String,
fragment: String,
},
ReceiveUrl {
version: Version,
@@ -45,6 +52,12 @@ pub enum HttpResponseEvent {
ChunkReceived {
bytes: usize,
},
DnsResolved {
hostname: String,
addresses: Vec<String>,
duration: u64,
overridden: bool,
},
}
impl Display for HttpResponseEvent {
@@ -59,7 +72,16 @@ impl Display for HttpResponseEvent {
};
write!(f, "* Redirect {} -> {} ({})", status, url, behavior_str)
}
HttpResponseEvent::SendUrl { method, path } => write!(f, "> {} {}", method, path),
HttpResponseEvent::SendUrl { method, scheme, username, password, host, port, path, query, fragment } => {
let auth_str = if username.is_empty() && password.is_empty() {
String::new()
} else {
format!("{}:{}@", username, password)
};
let query_str = if query.is_empty() { String::new() } else { format!("?{}", query) };
let fragment_str = if fragment.is_empty() { String::new() } else { format!("#{}", fragment) };
write!(f, "> {} {}://{}{}:{}{}{}{}", method, scheme, auth_str, host, port, path, query_str, fragment_str)
}
HttpResponseEvent::ReceiveUrl { version, status } => {
write!(f, "< {} {}", version_to_str(version), status)
}
@@ -67,6 +89,19 @@ impl Display for HttpResponseEvent {
HttpResponseEvent::HeaderDown(name, value) => write!(f, "< {}: {}", name, value),
HttpResponseEvent::ChunkSent { bytes } => write!(f, "> [{} bytes sent]", bytes),
HttpResponseEvent::ChunkReceived { bytes } => write!(f, "< [{} bytes received]", bytes),
HttpResponseEvent::DnsResolved { hostname, addresses, duration, overridden } => {
if *overridden {
write!(f, "* DNS override {} -> {}", hostname, addresses.join(", "))
} else {
write!(
f,
"* DNS resolved {} to {} ({}ms)",
hostname,
addresses.join(", "),
duration
)
}
}
}
}
}
@@ -85,7 +120,9 @@ impl From<HttpResponseEvent> for yaak_models::models::HttpResponseEventData {
RedirectBehavior::DropBody => "drop_body".to_string(),
},
},
HttpResponseEvent::SendUrl { method, path } => D::SendUrl { method, path },
HttpResponseEvent::SendUrl { method, scheme, username, password, host, port, path, query, fragment } => {
D::SendUrl { method, scheme, username, password, host, port, path, query, fragment }
}
HttpResponseEvent::ReceiveUrl { version, status } => {
D::ReceiveUrl { version: format!("{:?}", version), status }
}
@@ -93,6 +130,9 @@ impl From<HttpResponseEvent> for yaak_models::models::HttpResponseEventData {
HttpResponseEvent::HeaderDown(name, value) => D::HeaderDown { name, value },
HttpResponseEvent::ChunkSent { bytes } => D::ChunkSent { bytes },
HttpResponseEvent::ChunkReceived { bytes } => D::ChunkReceived { bytes },
HttpResponseEvent::DnsResolved { hostname, addresses, duration, overridden } => {
D::DnsResolved { hostname, addresses, duration, overridden }
}
}
}
}
@@ -354,6 +394,9 @@ impl HttpSender for ReqwestSender {
// Add headers
for header in request.headers {
if header.0.is_empty() {
continue;
}
req_builder = req_builder.header(&header.0, &header.1);
}
@@ -390,8 +433,15 @@ impl HttpSender for ReqwestSender {
));
send_event(HttpResponseEvent::SendUrl {
path: sendable_req.url().path().to_string(),
method: sendable_req.method().to_string(),
scheme: sendable_req.url().scheme().to_string(),
username: sendable_req.url().username().to_string(),
password: sendable_req.url().password().unwrap_or_default().to_string(),
host: sendable_req.url().host_str().unwrap_or_default().to_string(),
port: sendable_req.url().port_or_known_default().unwrap_or(0),
path: sendable_req.url().path().to_string(),
query: sendable_req.url().query().unwrap_or_default().to_string(),
fragment: sendable_req.url().fragment().unwrap_or_default().to_string(),
});
let mut request_headers = Vec::new();

View File

@@ -342,7 +342,8 @@ mod tests {
#[tokio::test]
async fn test_transaction_single_redirect() {
let redirect_headers = vec![("Location".to_string(), "https://example.com/new".to_string())];
let redirect_headers =
vec![("Location".to_string(), "https://example.com/new".to_string())];
let responses = vec![
MockResponse { status: 302, headers: redirect_headers, body: vec![] },
@@ -373,7 +374,8 @@ mod tests {
#[tokio::test]
async fn test_transaction_max_redirects_exceeded() {
let redirect_headers = vec![("Location".to_string(), "https://example.com/loop".to_string())];
let redirect_headers =
vec![("Location".to_string(), "https://example.com/loop".to_string())];
// Create more redirects than allowed
let responses: Vec<MockResponse> = (0..12)
@@ -525,7 +527,8 @@ mod tests {
_request: SendableHttpRequest,
_event_tx: mpsc::Sender<HttpResponseEvent>,
) -> Result<HttpResponse> {
let headers = vec![("set-cookie".to_string(), "session=xyz789; Path=/".to_string())];
let headers =
vec![("set-cookie".to_string(), "session=xyz789; Path=/".to_string())];
let body_stream: Pin<Box<dyn AsyncRead + Send>> =
Box::pin(std::io::Cursor::new(vec![]));
@@ -584,7 +587,10 @@ mod tests {
let headers = vec![
("set-cookie".to_string(), "session=abc123; Path=/".to_string()),
("set-cookie".to_string(), "user_id=42; Path=/".to_string()),
("set-cookie".to_string(), "preferences=dark; Path=/; Max-Age=86400".to_string()),
(
"set-cookie".to_string(),
"preferences=dark; Path=/; Max-Age=86400".to_string(),
),
];
let body_stream: Pin<Box<dyn AsyncRead + Send>> =

View File

@@ -12,6 +12,8 @@ export type CookieExpires = { "AtUtc": string } | "SessionEnd";
export type CookieJar = { model: "cookie_jar", id: string, createdAt: string, updatedAt: string, workspaceId: string, cookies: Array<Cookie>, name: string, };
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
export type EditorKeymap = "default" | "vim" | "vscode" | "emacs";
export type EncryptedKey = { encryptedKey: string, };
@@ -38,7 +40,7 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, elapsedDns: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponseEvent = { model: "http_response_event", id: string, createdAt: string, updatedAt: string, workspaceId: string, responseId: string, event: HttpResponseEventData, };
@@ -47,7 +49,7 @@ export type HttpResponseEvent = { model: "http_response_event", id: string, crea
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
* The `From` impl is in yaak-http to avoid circular dependencies.
*/
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, };
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, scheme: string, username: string, password: string, host: string, port: number, path: string, query: string, fragment: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, } | { "type": "dns_resolved", hostname: string, addresses: Array<string>, duration: bigint, overridden: boolean, };
export type HttpResponseHeader = { name: string, value: string, };
@@ -91,6 +93,6 @@ export type WebsocketMessageType = "text" | "binary";
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
export type WorkspaceMeta = { model: "workspace_meta", id: string, workspaceId: string, createdAt: string, updatedAt: string, encryptionKey: EncryptedKey | null, settingSyncDir: string | null, };

View File

@@ -206,6 +206,34 @@ export function replaceModelsInStore<
});
}
export function mergeModelsInStore<
M extends AnyModel['model'],
T extends Extract<AnyModel, { model: M }>,
>(model: M, models: T[], filter?: (model: T) => boolean) {
mustStore().set(modelStoreDataAtom, (prev: ModelStoreData) => {
const existingModels = { ...prev[model] } as Record<string, T>;
// Merge in new models first
for (const m of models) {
existingModels[m.id] = m;
}
// Then filter out unwanted models
if (filter) {
for (const [id, m] of Object.entries(existingModels)) {
if (!filter(m)) {
delete existingModels[id];
}
}
}
return {
...prev,
[model]: existingModels,
};
});
}
function shouldIgnoreModel({ model, updateSource }: ModelPayload) {
// Never ignore updates from non-user sources
if (updateSource.type !== 'window') {

View File

@@ -0,0 +1,2 @@
-- Add DNS resolution timing to http_responses
ALTER TABLE http_responses ADD COLUMN elapsed_dns INTEGER DEFAULT 0 NOT NULL;

View File

@@ -0,0 +1,2 @@
-- Add DNS overrides setting to workspaces
ALTER TABLE workspaces ADD COLUMN setting_dns_overrides TEXT DEFAULT '[]' NOT NULL;

View File

@@ -0,0 +1,12 @@
-- Filter out headers that match the hardcoded defaults (User-Agent: yaak, Accept: */*),
-- keeping any other custom headers the user may have added.
UPDATE workspaces
SET headers = (
SELECT json_group_array(json(value))
FROM json_each(headers)
WHERE NOT (
(LOWER(json_extract(value, '$.name')) = 'user-agent' AND json_extract(value, '$.value') = 'yaak')
OR (LOWER(json_extract(value, '$.name')) = 'accept' AND json_extract(value, '$.value') = '*/*')
)
)
WHERE json_array_length(headers) > 0;

View File

@@ -73,6 +73,20 @@ pub struct ClientCertificate {
pub enabled: bool,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export, export_to = "gen_models.ts")]
pub struct DnsOverride {
pub hostname: String,
#[serde(default)]
pub ipv4: Vec<String>,
#[serde(default)]
pub ipv6: Vec<String>,
#[serde(default = "default_true")]
#[ts(optional, as = "Option<bool>")]
pub enabled: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[serde(rename_all = "snake_case")]
#[ts(export, export_to = "gen_models.ts")]
@@ -303,6 +317,8 @@ pub struct Workspace {
#[serde(default = "default_true")]
pub setting_follow_redirects: bool,
pub setting_request_timeout: i32,
#[serde(default)]
pub setting_dns_overrides: Vec<DnsOverride>,
}
impl UpsertModelInfo for Workspace {
@@ -343,6 +359,7 @@ impl UpsertModelInfo for Workspace {
(SettingFollowRedirects, self.setting_follow_redirects.into()),
(SettingRequestTimeout, self.setting_request_timeout.into()),
(SettingValidateCertificates, self.setting_validate_certificates.into()),
(SettingDnsOverrides, serde_json::to_string(&self.setting_dns_overrides)?.into()),
])
}
@@ -359,6 +376,7 @@ impl UpsertModelInfo for Workspace {
WorkspaceIden::SettingFollowRedirects,
WorkspaceIden::SettingRequestTimeout,
WorkspaceIden::SettingValidateCertificates,
WorkspaceIden::SettingDnsOverrides,
]
}
@@ -368,6 +386,7 @@ impl UpsertModelInfo for Workspace {
{
let headers: String = row.get("headers")?;
let authentication: String = row.get("authentication")?;
let setting_dns_overrides: String = row.get("setting_dns_overrides")?;
Ok(Self {
id: row.get("id")?,
model: row.get("model")?,
@@ -382,6 +401,7 @@ impl UpsertModelInfo for Workspace {
setting_follow_redirects: row.get("setting_follow_redirects")?,
setting_request_timeout: row.get("setting_request_timeout")?,
setting_validate_certificates: row.get("setting_validate_certificates")?,
setting_dns_overrides: serde_json::from_str(&setting_dns_overrides).unwrap_or_default(),
})
}
}
@@ -1333,6 +1353,7 @@ pub struct HttpResponse {
pub content_length_compressed: Option<i32>,
pub elapsed: i32,
pub elapsed_headers: i32,
pub elapsed_dns: i32,
pub error: Option<String>,
pub headers: Vec<HttpResponseHeader>,
pub remote_addr: Option<String>,
@@ -1381,6 +1402,7 @@ impl UpsertModelInfo for HttpResponse {
(ContentLengthCompressed, self.content_length_compressed.into()),
(Elapsed, self.elapsed.into()),
(ElapsedHeaders, self.elapsed_headers.into()),
(ElapsedDns, self.elapsed_dns.into()),
(Error, self.error.into()),
(Headers, serde_json::to_string(&self.headers)?.into()),
(RemoteAddr, self.remote_addr.into()),
@@ -1402,6 +1424,7 @@ impl UpsertModelInfo for HttpResponse {
HttpResponseIden::ContentLengthCompressed,
HttpResponseIden::Elapsed,
HttpResponseIden::ElapsedHeaders,
HttpResponseIden::ElapsedDns,
HttpResponseIden::Error,
HttpResponseIden::Headers,
HttpResponseIden::RemoteAddr,
@@ -1435,6 +1458,7 @@ impl UpsertModelInfo for HttpResponse {
version: r.get("version")?,
elapsed: r.get("elapsed")?,
elapsed_headers: r.get("elapsed_headers")?,
elapsed_dns: r.get("elapsed_dns").unwrap_or_default(),
remote_addr: r.get("remote_addr")?,
status: r.get("status")?,
status_reason: r.get("status_reason")?,
@@ -1471,7 +1495,21 @@ pub enum HttpResponseEventData {
},
SendUrl {
method: String,
#[serde(default)]
scheme: String,
#[serde(default)]
username: String,
#[serde(default)]
password: String,
#[serde(default)]
host: String,
#[serde(default)]
port: u16,
path: String,
#[serde(default)]
query: String,
#[serde(default)]
fragment: String,
},
ReceiveUrl {
version: String,
@@ -1491,6 +1529,12 @@ pub enum HttpResponseEventData {
ChunkReceived {
bytes: usize,
},
DnsResolved {
hostname: String,
addresses: Vec<String>,
duration: u64,
overridden: bool,
},
}
impl Default for HttpResponseEventData {

View File

@@ -1,3 +1,4 @@
use super::dedupe_headers;
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{GrpcRequest, GrpcRequestIden, HttpRequestHeader};
@@ -87,6 +88,6 @@ impl<'a> DbContext<'a> {
metadata.append(&mut grpc_request.metadata.clone());
Ok(metadata)
Ok(dedupe_headers(metadata))
}
}

View File

@@ -1,3 +1,4 @@
use super::dedupe_headers;
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{Folder, FolderIden, HttpRequest, HttpRequestHeader, HttpRequestIden};
@@ -87,7 +88,7 @@ impl<'a> DbContext<'a> {
headers.append(&mut http_request.headers.clone());
Ok(headers)
Ok(dedupe_headers(headers))
}
pub fn list_http_requests_for_folder_recursive(

View File

@@ -19,6 +19,26 @@ mod websocket_connections;
mod websocket_events;
mod websocket_requests;
mod workspace_metas;
mod workspaces;
pub mod workspaces;
const MAX_HISTORY_ITEMS: usize = 20;
use crate::models::HttpRequestHeader;
use std::collections::HashMap;
/// Deduplicate headers by name (case-insensitive), keeping the latest (most specific) value.
/// Preserves the order of first occurrence for each header name.
pub(crate) fn dedupe_headers(headers: Vec<HttpRequestHeader>) -> Vec<HttpRequestHeader> {
let mut index_by_name: HashMap<String, usize> = HashMap::new();
let mut deduped: Vec<HttpRequestHeader> = Vec::new();
for header in headers {
let key = header.name.to_lowercase();
if let Some(&idx) = index_by_name.get(&key) {
deduped[idx] = header;
} else {
index_by_name.insert(key, deduped.len());
deduped.push(header);
}
}
deduped
}

View File

@@ -1,3 +1,4 @@
use super::dedupe_headers;
use crate::db_context::DbContext;
use crate::error::Result;
use crate::models::{HttpRequestHeader, WebsocketRequest, WebsocketRequestIden};
@@ -95,6 +96,6 @@ impl<'a> DbContext<'a> {
headers.append(&mut websocket_request.headers.clone());
Ok(headers)
Ok(dedupe_headers(headers))
}
}

View File

@@ -65,28 +65,7 @@ impl<'a> DbContext<'a> {
}
pub fn upsert_workspace(&self, w: &Workspace, source: &UpdateSource) -> Result<Workspace> {
let mut workspace = w.clone();
// Add default headers only for NEW workspaces (empty ID means insert, not update)
// This prevents re-adding headers if a user intentionally removes all headers
if workspace.id.is_empty() && workspace.headers.is_empty() {
workspace.headers = vec![
HttpRequestHeader {
enabled: true,
name: "User-Agent".to_string(),
value: "yaak".to_string(),
id: None,
},
HttpRequestHeader {
enabled: true,
name: "Accept".to_string(),
value: "*/*".to_string(),
id: None,
},
];
}
self.upsert(&workspace, source)
self.upsert(w, source)
}
pub fn resolve_auth_for_workspace(
@@ -101,6 +80,28 @@ impl<'a> DbContext<'a> {
}
pub fn resolve_headers_for_workspace(&self, workspace: &Workspace) -> Vec<HttpRequestHeader> {
workspace.headers.clone()
let mut headers = default_headers();
headers.extend(workspace.headers.clone());
headers
}
}
/// Global default headers that are always sent with requests unless overridden.
/// These are prepended to the inheritance chain so workspace/folder/request headers
/// can override or disable them.
pub fn default_headers() -> Vec<HttpRequestHeader> {
vec![
HttpRequestHeader {
enabled: true,
name: "User-Agent".to_string(),
value: "yaak".to_string(),
id: None,
},
HttpRequestHeader {
enabled: true,
name: "Accept".to_string(),
value: "*/*".to_string(),
id: None,
},
]
}

View File

File diff suppressed because one or more lines are too long

View File

@@ -12,6 +12,8 @@ export type CookieExpires = { "AtUtc": string } | "SessionEnd";
export type CookieJar = { model: "cookie_jar", id: string, createdAt: string, updatedAt: string, workspaceId: string, cookies: Array<Cookie>, name: string, };
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
export type EditorKeymap = "default" | "vim" | "vscode" | "emacs";
export type EncryptedKey = { encryptedKey: string, };
@@ -38,7 +40,7 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, elapsedDns: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponseEvent = { model: "http_response_event", id: string, createdAt: string, updatedAt: string, workspaceId: string, responseId: string, event: HttpResponseEventData, };
@@ -47,7 +49,7 @@ export type HttpResponseEvent = { model: "http_response_event", id: string, crea
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
* The `From` impl is in yaak-http to avoid circular dependencies.
*/
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, };
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, } | { "type": "dns_resolved", hostname: string, addresses: Array<string>, duration: bigint, overridden: boolean, };
export type HttpResponseHeader = { name: string, value: string, };
@@ -77,6 +79,6 @@ export type WebsocketEventType = "binary" | "close" | "frame" | "open" | "ping"
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
export type WorkspaceMeta = { model: "workspace_meta", id: string, workspaceId: string, createdAt: string, updatedAt: string, encryptionKey: EncryptedKey | null, settingSyncDir: string | null, };

View File

@@ -80,10 +80,7 @@ pub async fn check_plugin_updates(
}
/// Search for plugins in the registry.
pub async fn search_plugins(
http_client: &Client,
query: &str,
) -> Result<PluginSearchResponse> {
pub async fn search_plugins(http_client: &Client, query: &str) -> Result<PluginSearchResponse> {
let mut url = build_url("/search");
{
let mut query_pairs = url.query_pairs_mut();

View File

@@ -157,6 +157,9 @@ pub enum InternalEventPayload {
PromptTextRequest(PromptTextRequest),
PromptTextResponse(PromptTextResponse),
PromptFormRequest(PromptFormRequest),
PromptFormResponse(PromptFormResponse),
WindowInfoRequest(WindowInfoRequest),
WindowInfoResponse(WindowInfoResponse),
@@ -571,6 +574,28 @@ pub struct PromptTextResponse {
pub value: Option<String>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_events.ts")]
pub struct PromptFormRequest {
pub id: String,
pub title: String,
#[ts(optional)]
pub description: Option<String>,
pub inputs: Vec<FormInput>,
#[ts(optional)]
pub confirm_text: Option<String>,
#[ts(optional)]
pub cancel_text: Option<String>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_events.ts")]
pub struct PromptFormResponse {
pub values: Option<HashMap<String, JsonPrimitive>>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]
#[serde(default, rename_all = "camelCase")]
#[ts(export, export_to = "gen_events.ts")]

View File

@@ -378,7 +378,8 @@ impl PluginManager {
plugins: Vec<PluginHandle>,
timeout_duration: Duration,
) -> Result<Vec<InternalEvent>> {
let label = format!("wait[{}.{}]", plugins.len(), payload.type_name());
let event_type = payload.type_name();
let label = format!("wait[{}.{}]", plugins.len(), event_type);
let (rx_id, mut rx) = self.subscribe(label.as_str()).await;
// 1. Build the events with IDs and everything
@@ -412,10 +413,21 @@ impl PluginManager {
// Timeout to prevent hanging forever if plugin doesn't respond
if timeout(timeout_duration, collect_events).await.is_err() {
let responded_ids: Vec<&String> =
found_events.iter().filter_map(|e| e.reply_id.as_ref()).collect();
let non_responding: Vec<&str> = events_to_send
.iter()
.filter(|e| !responded_ids.contains(&&e.id))
.map(|e| e.plugin_name.as_str())
.collect();
warn!(
"Timeout waiting for plugin responses. Got {}/{} responses",
"Timeout ({:?}) waiting for {} responses. Got {}/{} responses. \
Non-responding plugins: [{}]",
timeout_duration,
event_type,
found_events.len(),
events_to_send.len()
events_to_send.len(),
non_responding.join(", ")
);
}

View File

@@ -196,7 +196,11 @@ pub fn decrypt_secure_template_function(
}
}
new_tokens.push(Token::Raw {
text: template_function_secure_run(encryption_manager, args_map, plugin_context)?,
text: template_function_secure_run(
encryption_manager,
args_map,
plugin_context,
)?,
});
}
t => {
@@ -216,7 +220,8 @@ pub fn encrypt_secure_template_function(
plugin_context: &PluginContext,
template: &str,
) -> Result<String> {
let decrypted = decrypt_secure_template_function(&encryption_manager, plugin_context, template)?;
let decrypted =
decrypt_secure_template_function(&encryption_manager, plugin_context, template)?;
let tokens = Tokens {
tokens: vec![Token::Tag {
val: Val::Fn {
@@ -231,7 +236,12 @@ pub fn encrypt_secure_template_function(
Ok(transform_args(
tokens,
&PluginTemplateCallback::new(plugin_manager, encryption_manager, plugin_context, RenderPurpose::Preview),
&PluginTemplateCallback::new(
plugin_manager,
encryption_manager,
plugin_context,
RenderPurpose::Preview,
),
)?
.to_string())
}

View File

@@ -4,8 +4,8 @@ use std::net::SocketAddr;
use std::path::Path;
use std::process::Stdio;
use tokio::io::{AsyncBufReadExt, BufReader};
use tokio::process::Command;
use tokio::sync::watch::Receiver;
use yaak_common::command::new_xplatform_command;
/// Start the Node.js plugin runtime process.
///
@@ -30,13 +30,14 @@ pub async fn start_nodejs_plugin_runtime(
plugin_runtime_main_str
);
let mut child = Command::new(node_bin_path)
.env("HOST", addr.ip().to_string())
let mut cmd = new_xplatform_command(node_bin_path);
cmd.env("HOST", addr.ip().to_string())
.env("PORT", addr.port().to_string())
.arg(&plugin_runtime_main_str)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?;
.stderr(Stdio::piped());
let mut child = cmd.spawn()?;
info!("Spawned plugin runtime");

View File

@@ -46,7 +46,11 @@ impl TemplateCallback for PluginTemplateCallback {
let fn_name = if fn_name == "Response" { "response" } else { fn_name };
if fn_name == "secure" {
return template_function_secure_run(&self.encryption_manager, args, &self.plugin_context);
return template_function_secure_run(
&self.encryption_manager,
args,
&self.plugin_context,
);
} else if fn_name == "keychain" || fn_name == "keyring" {
return template_function_keychain_run(args);
}
@@ -56,7 +60,8 @@ impl TemplateCallback for PluginTemplateCallback {
primitive_args.insert(key, JsonPrimitive::from(value));
}
let resp = self.plugin_manager
let resp = self
.plugin_manager
.call_template_function(
&self.plugin_context,
fn_name,

View File

@@ -1,5 +1,7 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, parentModel: string, parentId: string | null, variables: Array<EnvironmentVariable>, color: string | null, sortPriority: number, };
export type EnvironmentVariable = { enabled?: boolean, name: string, value: string, id?: string, };
@@ -20,4 +22,4 @@ export type SyncState = { model: "sync_state", id: string, workspaceId: string,
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };

View File

@@ -296,11 +296,7 @@ pub fn compute_sync_ops(
.collect()
}
fn workspace_models(
db: &DbContext,
version: &str,
workspace_id: &str,
) -> Result<Vec<SyncModel>> {
fn workspace_models(db: &DbContext, version: &str, workspace_id: &str) -> Result<Vec<SyncModel>> {
// We want to include private environments here so that we can take them into account during
// the sync process. Otherwise, they would be treated as deleted.
let include_private_environments = true;

View File

@@ -2,6 +2,7 @@ use crate::connect::ws_connect;
use crate::error::Result;
use futures_util::stream::SplitSink;
use futures_util::{SinkExt, StreamExt};
use http::HeaderMap;
use log::{debug, info, warn};
use std::collections::HashMap;
use std::sync::Arc;
@@ -10,7 +11,6 @@ use tokio::net::TcpStream;
use tokio::sync::{Mutex, mpsc};
use tokio_tungstenite::tungstenite::Message;
use tokio_tungstenite::tungstenite::handshake::client::Response;
use http::HeaderMap;
use tokio_tungstenite::tungstenite::http::HeaderValue;
use tokio_tungstenite::{MaybeTlsStream, WebSocketStream};
use yaak_tls::ClientCertificateConfig;

10
package-lock.json generated
View File

@@ -7811,9 +7811,9 @@
}
},
"node_modules/hono": {
"version": "4.11.3",
"resolved": "https://registry.npmjs.org/hono/-/hono-4.11.3.tgz",
"integrity": "sha512-PmQi306+M/ct/m5s66Hrg+adPnkD5jiO6IjA7WhWw0gSBSo1EcRegwuI1deZ+wd5pzCGynCcn2DprnE4/yEV4w==",
"version": "4.11.7",
"resolved": "https://registry.npmjs.org/hono/-/hono-4.11.7.tgz",
"integrity": "sha512-l7qMiNee7t82bH3SeyUCt9UF15EVmaBvsppY2zQtrbIhl/yzBTny+YUxsVjSjQ6gaqaeVtZmGocom8TzBlA4Yw==",
"license": "MIT",
"engines": {
"node": ">=16.9.0"
@@ -15721,7 +15721,7 @@
},
"packages/plugin-runtime-types": {
"name": "@yaakapp/api",
"version": "0.7.1",
"version": "0.8.0",
"dependencies": {
"@types/node": "^24.0.13"
},
@@ -15743,7 +15743,7 @@
"@hono/mcp": "^0.2.3",
"@hono/node-server": "^1.19.7",
"@modelcontextprotocol/sdk": "^1.25.2",
"hono": "^4.11.3",
"hono": "^4.11.7",
"zod": "^3.25.76"
},
"devDependencies": {

View File

@@ -17,7 +17,7 @@ npx @yaakapp/cli generate
```
For more details on creating plugins, check out
the [Quick Start Guide](https://feedback.yaak.app/help/articles/6911763-plugins-quick-start)
the [Quick Start Guide](https://yaak.app/docs/plugin-development/plugins-quick-start)
## Installation

View File

@@ -1,6 +1,6 @@
{
"name": "@yaakapp/api",
"version": "0.7.1",
"version": "0.8.0",
"keywords": [
"api-client",
"insomnia-alternative",

View File

File diff suppressed because one or more lines are too long

View File

@@ -12,6 +12,8 @@ export type CookieExpires = { "AtUtc": string } | "SessionEnd";
export type CookieJar = { model: "cookie_jar", id: string, createdAt: string, updatedAt: string, workspaceId: string, cookies: Array<Cookie>, name: string, };
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
export type EditorKeymap = "default" | "vim" | "vscode" | "emacs";
export type EncryptedKey = { encryptedKey: string, };
@@ -38,7 +40,7 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, elapsedDns: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
export type HttpResponseEvent = { model: "http_response_event", id: string, createdAt: string, updatedAt: string, workspaceId: string, responseId: string, event: HttpResponseEventData, };
@@ -47,7 +49,7 @@ export type HttpResponseEvent = { model: "http_response_event", id: string, crea
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
* The `From` impl is in yaak-http to avoid circular dependencies.
*/
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, };
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, } | { "type": "dns_resolved", hostname: string, addresses: Array<string>, duration: bigint, overridden: boolean, };
export type HttpResponseHeader = { name: string, value: string, };
@@ -77,6 +79,6 @@ export type WebsocketEventType = "binary" | "close" | "frame" | "open" | "ping"
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
export type WorkspaceMeta = { model: "workspace_meta", id: string, workspaceId: string, createdAt: string, updatedAt: string, encryptionKey: EncryptedKey | null, settingSyncDir: string | null, };

View File

@@ -11,6 +11,8 @@ import type {
ListHttpRequestsRequest,
ListHttpRequestsResponse,
OpenWindowRequest,
PromptFormRequest,
PromptFormResponse,
PromptTextRequest,
PromptTextResponse,
RenderGrpcRequestRequest,
@@ -23,7 +25,7 @@ import type {
TemplateRenderRequest,
WorkspaceInfo,
} from '../bindings/gen_events.ts';
import type { HttpRequest } from '../bindings/gen_models.ts';
import type { Folder, HttpRequest } from '../bindings/gen_models.ts';
import type { JsonValue } from '../bindings/serde_json/JsonValue';
export type WorkspaceHandle = Pick<WorkspaceInfo, 'id' | 'name'>;
@@ -37,6 +39,7 @@ export interface Context {
};
prompt: {
text(args: PromptTextRequest): Promise<PromptTextResponse['value']>;
form(args: PromptFormRequest): Promise<PromptFormResponse['values']>;
};
store: {
set<T>(key: string, value: T): Promise<void>;
@@ -79,6 +82,15 @@ export interface Context {
};
folder: {
list(args?: ListFoldersRequest): Promise<ListFoldersResponse['folders']>;
getById(args: { id: string }): Promise<Folder | null>;
create(
args: Omit<Partial<Folder>, 'id' | 'model' | 'createdAt' | 'updatedAt'> &
Pick<Folder, 'workspaceId' | 'name'>,
): Promise<Folder>;
update(
args: Omit<Partial<Folder>, 'model' | 'createdAt' | 'updatedAt'> & Pick<Folder, 'id'>,
): Promise<Folder>;
delete(args: { id: string }): Promise<Folder>;
};
httpResponse: {
find(args: FindHttpResponsesRequest): Promise<FindHttpResponsesResponse['httpResponses']>;

View File

@@ -11,6 +11,7 @@ import type {
DeleteKeyValueResponse,
DeleteModelResponse,
FindHttpResponsesResponse,
Folder,
GetCookieValueRequest,
GetCookieValueResponse,
GetHttpRequestByIdResponse,
@@ -28,6 +29,7 @@ import type {
ListHttpRequestsResponse,
ListWorkspacesResponse,
PluginContext,
PromptFormResponse,
PromptTextResponse,
RenderGrpcRequestResponse,
RenderHttpRequestResponse,
@@ -661,6 +663,13 @@ export class PluginInstance {
});
return reply.value;
},
form: async (args) => {
const reply: PromptFormResponse = await this.#sendForReply(context, {
type: 'prompt_form_request',
...args,
});
return reply.values;
},
},
httpResponse: {
find: async (args) => {
@@ -774,6 +783,44 @@ export class PluginInstance {
const { folders } = await this.#sendForReply<ListFoldersResponse>(context, payload);
return folders;
},
getById: async (args: { id: string }) => {
const payload = { type: 'list_folders_request' } as const;
const { folders } = await this.#sendForReply<ListFoldersResponse>(context, payload);
return folders.find((f) => f.id === args.id) ?? null;
},
create: async (args) => {
const payload = {
type: 'upsert_model_request',
model: {
name: '',
...args,
id: '',
model: 'folder',
},
} as InternalEventPayload;
const response = await this.#sendForReply<UpsertModelResponse>(context, payload);
return response.model as Folder;
},
update: async (args) => {
const payload = {
type: 'upsert_model_request',
model: {
model: 'folder',
...args,
},
} as InternalEventPayload;
const response = await this.#sendForReply<UpsertModelResponse>(context, payload);
return response.model as Folder;
},
delete: async (args: { id: string }) => {
const payload = {
type: 'delete_model_request',
model: 'folder',
id: args.id,
} as InternalEventPayload;
const response = await this.#sendForReply<DeleteModelResponse>(context, payload);
return response.model as Folder;
},
},
cookies: {
getValue: async (args: GetCookieValueRequest) => {

View File

@@ -18,7 +18,7 @@
"@hono/mcp": "^0.2.3",
"@hono/node-server": "^1.19.7",
"@modelcontextprotocol/sdk": "^1.25.2",
"hono": "^4.11.3",
"hono": "^4.11.7",
"zod": "^3.25.76"
},
"devDependencies": {

View File

@@ -2,6 +2,12 @@ import type { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
import * as z from 'zod';
import type { McpServerContext } from '../types.js';
import { getWorkspaceContext } from './helpers.js';
import {
authenticationSchema,
authenticationTypeSchema,
headersSchema,
workspaceIdSchema,
} from './schemas.js';
export function registerFolderTools(server: McpServer, ctx: McpServerContext) {
server.registerTool(
@@ -10,10 +16,7 @@ export function registerFolderTools(server: McpServer, ctx: McpServerContext) {
title: 'List Folders',
description: 'List all folders in a workspace',
inputSchema: {
workspaceId: z
.string()
.optional()
.describe('Workspace ID (required if multiple workspaces are open)'),
workspaceId: workspaceIdSchema,
},
},
async ({ workspaceId }) => {
@@ -30,4 +33,116 @@ export function registerFolderTools(server: McpServer, ctx: McpServerContext) {
};
},
);
server.registerTool(
'get_folder',
{
title: 'Get Folder',
description: 'Get details of a specific folder by ID',
inputSchema: {
id: z.string().describe('The folder ID'),
workspaceId: workspaceIdSchema,
},
},
async ({ id, workspaceId }) => {
const workspaceCtx = await getWorkspaceContext(ctx, workspaceId);
const folder = await workspaceCtx.yaak.folder.getById({ id });
return {
content: [
{
type: 'text' as const,
text: JSON.stringify(folder, null, 2),
},
],
};
},
);
server.registerTool(
'create_folder',
{
title: 'Create Folder',
description: 'Create a new folder in a workspace',
inputSchema: {
workspaceId: workspaceIdSchema,
name: z.string().describe('Folder name'),
folderId: z.string().optional().describe('Parent folder ID (for nested folders)'),
description: z.string().optional().describe('Folder description'),
sortPriority: z.number().optional().describe('Sort priority for ordering'),
headers: headersSchema.describe('Default headers to apply to requests in this folder'),
authenticationType: authenticationTypeSchema,
authentication: authenticationSchema,
},
},
async ({ workspaceId: ogWorkspaceId, ...args }) => {
const workspaceCtx = await getWorkspaceContext(ctx, ogWorkspaceId);
const workspaceId = await workspaceCtx.yaak.window.workspaceId();
if (!workspaceId) {
throw new Error('No workspace is open');
}
const folder = await workspaceCtx.yaak.folder.create({
workspaceId: workspaceId,
...args,
});
return {
content: [{ type: 'text' as const, text: JSON.stringify(folder, null, 2) }],
};
},
);
server.registerTool(
'update_folder',
{
title: 'Update Folder',
description: 'Update an existing folder',
inputSchema: {
id: z.string().describe('Folder ID to update'),
workspaceId: workspaceIdSchema,
name: z.string().optional().describe('Folder name'),
folderId: z.string().optional().describe('Parent folder ID (for nested folders)'),
description: z.string().optional().describe('Folder description'),
sortPriority: z.number().optional().describe('Sort priority for ordering'),
headers: headersSchema.describe('Default headers to apply to requests in this folder'),
authenticationType: authenticationTypeSchema,
authentication: authenticationSchema,
},
},
async ({ id, workspaceId, ...updates }) => {
const workspaceCtx = await getWorkspaceContext(ctx, workspaceId);
// Fetch existing folder to merge with updates
const existing = await workspaceCtx.yaak.folder.getById({ id });
if (!existing) {
throw new Error(`Folder with ID ${id} not found`);
}
// Merge existing fields with updates
const folder = await workspaceCtx.yaak.folder.update({
...existing,
...updates,
id,
});
return {
content: [{ type: 'text' as const, text: JSON.stringify(folder, null, 2) }],
};
},
);
server.registerTool(
'delete_folder',
{
title: 'Delete Folder',
description: 'Delete a folder by ID',
inputSchema: {
id: z.string().describe('Folder ID to delete'),
},
},
async ({ id }) => {
const folder = await ctx.yaak.folder.delete({ id });
return {
content: [{ type: 'text' as const, text: `Deleted: ${folder.name} (${folder.id})` }],
};
},
);
}

View File

@@ -2,6 +2,15 @@ import type { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
import * as z from 'zod';
import type { McpServerContext } from '../types.js';
import { getWorkspaceContext } from './helpers.js';
import {
authenticationSchema,
authenticationTypeSchema,
bodySchema,
bodyTypeSchema,
headersSchema,
urlParametersSchema,
workspaceIdSchema,
} from './schemas.js';
export function registerHttpRequestTools(server: McpServer, ctx: McpServerContext) {
server.registerTool(
@@ -10,10 +19,7 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
title: 'List HTTP Requests',
description: 'List all HTTP requests in a workspace',
inputSchema: {
workspaceId: z
.string()
.optional()
.describe('Workspace ID (required if multiple workspaces are open)'),
workspaceId: workspaceIdSchema,
},
},
async ({ workspaceId }) => {
@@ -38,10 +44,7 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
description: 'Get details of a specific HTTP request by ID',
inputSchema: {
id: z.string().describe('The HTTP request ID'),
workspaceId: z
.string()
.optional()
.describe('Workspace ID (required if multiple workspaces are open)'),
workspaceId: workspaceIdSchema,
},
},
async ({ id, workspaceId }) => {
@@ -67,10 +70,7 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
inputSchema: {
id: z.string().describe('The HTTP request ID to send'),
environmentId: z.string().optional().describe('Optional environment ID to use'),
workspaceId: z
.string()
.optional()
.describe('Workspace ID (required if multiple workspaces are open)'),
workspaceId: workspaceIdSchema,
},
},
async ({ id, workspaceId }) => {
@@ -99,10 +99,7 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
title: 'Create HTTP Request',
description: 'Create a new HTTP request',
inputSchema: {
workspaceId: z
.string()
.optional()
.describe('Workspace ID (required if multiple workspaces are open)'),
workspaceId: workspaceIdSchema,
name: z
.string()
.optional()
@@ -111,62 +108,12 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
method: z.string().optional().describe('HTTP method (defaults to GET)'),
folderId: z.string().optional().describe('Parent folder ID'),
description: z.string().optional().describe('Request description'),
headers: z
.array(
z.object({
name: z.string(),
value: z.string(),
enabled: z.boolean().default(true),
}),
)
.optional()
.describe('Request headers'),
urlParameters: z
.array(
z.object({
name: z.string(),
value: z.string(),
enabled: z.boolean().default(true),
}),
)
.optional()
.describe('URL query parameters'),
bodyType: z
.string()
.optional()
.describe(
'Body type. Supported values: "binary", "graphql", "application/x-www-form-urlencoded", "multipart/form-data", or any text-based type (e.g., "application/json", "text/plain")',
),
body: z
.record(z.string(), z.any())
.optional()
.describe(
'Body content object. Structure varies by bodyType:\n' +
'- "binary": { filePath: "/path/to/file" }\n' +
'- "graphql": { query: "{ users { name } }", variables: "{\\"id\\": \\"123\\"}" }\n' +
'- "application/x-www-form-urlencoded": { form: [{ name: "key", value: "val", enabled: true }] }\n' +
'- "multipart/form-data": { form: [{ name: "field", value: "text", file: "/path/to/file", enabled: true }] }\n' +
'- text-based (application/json, etc.): { text: "raw body content" }',
),
authenticationType: z
.string()
.optional()
.describe(
'Authentication type. Common values: "basic", "bearer", "oauth2", "apikey", "jwt", "awsv4", "oauth1", "ntlm", "none". Use null to inherit from parent folder/workspace.',
),
authentication: z
.record(z.string(), z.any())
.optional()
.describe(
'Authentication configuration object. Structure varies by authenticationType:\n' +
'- "basic": { username: "user", password: "pass" }\n' +
'- "bearer": { token: "abc123", prefix: "Bearer" }\n' +
'- "oauth2": { clientId: "...", clientSecret: "...", grantType: "authorization_code", authorizationUrl: "...", accessTokenUrl: "...", scope: "...", ... }\n' +
'- "apikey": { location: "header" | "query", key: "X-API-Key", value: "..." }\n' +
'- "jwt": { algorithm: "HS256", secret: "...", payload: "{ ... }" }\n' +
'- "awsv4": { accessKeyId: "...", secretAccessKey: "...", service: "sts", region: "us-east-1", sessionToken: "..." }\n' +
'- "none": {}',
),
headers: headersSchema.describe('Request headers'),
urlParameters: urlParametersSchema,
bodyType: bodyTypeSchema,
body: bodySchema,
authenticationType: authenticationTypeSchema,
authentication: authenticationSchema,
},
},
async ({ workspaceId: ogWorkspaceId, ...args }) => {
@@ -194,68 +141,18 @@ export function registerHttpRequestTools(server: McpServer, ctx: McpServerContex
description: 'Update an existing HTTP request',
inputSchema: {
id: z.string().describe('HTTP request ID to update'),
workspaceId: z.string().describe('Workspace ID'),
workspaceId: workspaceIdSchema,
name: z.string().optional().describe('Request name'),
url: z.string().optional().describe('Request URL'),
method: z.string().optional().describe('HTTP method'),
folderId: z.string().optional().describe('Parent folder ID'),
description: z.string().optional().describe('Request description'),
headers: z
.array(
z.object({
name: z.string(),
value: z.string(),
enabled: z.boolean().default(true),
}),
)
.optional()
.describe('Request headers'),
urlParameters: z
.array(
z.object({
name: z.string(),
value: z.string(),
enabled: z.boolean().default(true),
}),
)
.optional()
.describe('URL query parameters'),
bodyType: z
.string()
.optional()
.describe(
'Body type. Supported values: "binary", "graphql", "application/x-www-form-urlencoded", "multipart/form-data", or any text-based type (e.g., "application/json", "text/plain")',
),
body: z
.record(z.string(), z.any())
.optional()
.describe(
'Body content object. Structure varies by bodyType:\n' +
'- "binary": { filePath: "/path/to/file" }\n' +
'- "graphql": { query: "{ users { name } }", variables: "{\\"id\\": \\"123\\"}" }\n' +
'- "application/x-www-form-urlencoded": { form: [{ name: "key", value: "val", enabled: true }] }\n' +
'- "multipart/form-data": { form: [{ name: "field", value: "text", file: "/path/to/file", enabled: true }] }\n' +
'- text-based (application/json, etc.): { text: "raw body content" }',
),
authenticationType: z
.string()
.optional()
.describe(
'Authentication type. Common values: "basic", "bearer", "oauth2", "apikey", "jwt", "awsv4", "oauth1", "ntlm", "none". Use null to inherit from parent folder/workspace.',
),
authentication: z
.record(z.string(), z.any())
.optional()
.describe(
'Authentication configuration object. Structure varies by authenticationType:\n' +
'- "basic": { username: "user", password: "pass" }\n' +
'- "bearer": { token: "abc123", prefix: "Bearer" }\n' +
'- "oauth2": { clientId: "...", clientSecret: "...", grantType: "authorization_code", authorizationUrl: "...", accessTokenUrl: "...", scope: "...", ... }\n' +
'- "apikey": { location: "header" | "query", key: "X-API-Key", value: "..." }\n' +
'- "jwt": { algorithm: "HS256", secret: "...", payload: "{ ... }" }\n' +
'- "awsv4": { accessKeyId: "...", secretAccessKey: "...", service: "sts", region: "us-east-1", sessionToken: "..." }\n' +
'- "none": {}',
),
headers: headersSchema.describe('Request headers'),
urlParameters: urlParametersSchema,
bodyType: bodyTypeSchema,
body: bodySchema,
authenticationType: authenticationTypeSchema,
authentication: authenticationSchema,
},
},
async ({ id, workspaceId, ...updates }) => {

View File

@@ -0,0 +1,67 @@
import * as z from 'zod';
export const workspaceIdSchema = z
.string()
.optional()
.describe('Workspace ID (required if multiple workspaces are open)');
export const headersSchema = z
.array(
z.object({
name: z.string(),
value: z.string(),
enabled: z.boolean().default(true),
}),
)
.optional();
export const urlParametersSchema = z
.array(
z.object({
name: z.string(),
value: z.string(),
enabled: z.boolean().default(true),
}),
)
.optional()
.describe('URL query parameters');
export const bodyTypeSchema = z
.string()
.optional()
.describe(
'Body type. Supported values: "binary", "graphql", "application/x-www-form-urlencoded", "multipart/form-data", or any text-based type (e.g., "application/json", "text/plain")',
);
export const bodySchema = z
.record(z.string(), z.any())
.optional()
.describe(
'Body content object. Structure varies by bodyType:\n' +
'- "binary": { filePath: "/path/to/file" }\n' +
'- "graphql": { query: "{ users { name } }", variables: "{\\"id\\": \\"123\\"}" }\n' +
'- "application/x-www-form-urlencoded": { form: [{ name: "key", value: "val", enabled: true }] }\n' +
'- "multipart/form-data": { form: [{ name: "field", value: "text", file: "/path/to/file", enabled: true }] }\n' +
'- text-based (application/json, etc.): { text: "raw body content" }',
);
export const authenticationTypeSchema = z
.string()
.optional()
.describe(
'Authentication type. Common values: "basic", "bearer", "oauth2", "apikey", "jwt", "awsv4", "oauth1", "ntlm", "none". Use null to inherit from parent.',
);
export const authenticationSchema = z
.record(z.string(), z.any())
.optional()
.describe(
'Authentication configuration object. Structure varies by authenticationType:\n' +
'- "basic": { username: "user", password: "pass" }\n' +
'- "bearer": { token: "abc123", prefix: "Bearer" }\n' +
'- "oauth2": { clientId: "...", clientSecret: "...", grantType: "authorization_code", authorizationUrl: "...", accessTokenUrl: "...", scope: "...", ... }\n' +
'- "apikey": { location: "header" | "query", key: "X-API-Key", value: "..." }\n' +
'- "jwt": { algorithm: "HS256", secret: "...", payload: "{ ... }" }\n' +
'- "awsv4": { accessKeyId: "...", secretAccessKey: "...", service: "sts", region: "us-east-1", sessionToken: "..." }\n' +
'- "none": {}',
);

View File

@@ -11,6 +11,7 @@
"version": "0.1.0",
"scripts": {
"build": "yaakcli build",
"dev": "yaakcli dev"
"dev": "yaakcli dev",
"test": "vitest --run tests"
}
}

View File

@@ -21,7 +21,8 @@ export const plugin: PluginDefinition = {
},
],
async onApply(_ctx, { values }) {
const { username, password } = values;
const username = values.username ?? '';
const password = values.password ?? '';
const value = `Basic ${Buffer.from(`${username}:${password}`).toString('base64')}`;
return { setHeaders: [{ name: 'Authorization', value }] };
},

View File

@@ -0,0 +1,77 @@
import type { Context } from '@yaakapp/api';
import { describe, expect, test } from 'vitest';
import { plugin } from '../src';
const ctx = {} as Context;
describe('auth-basic', () => {
test('Both username and password', async () => {
expect(
await plugin.authentication?.onApply(ctx, {
values: { username: 'user', password: 'pass' },
headers: [],
url: 'https://yaak.app',
method: 'POST',
contextId: '111',
}),
).toEqual({
setHeaders: [{ name: 'Authorization', value: `Basic ${Buffer.from('user:pass').toString('base64')}` }],
});
});
test('Empty password', async () => {
expect(
await plugin.authentication?.onApply(ctx, {
values: { username: 'apikey', password: '' },
headers: [],
url: 'https://yaak.app',
method: 'POST',
contextId: '111',
}),
).toEqual({
setHeaders: [{ name: 'Authorization', value: `Basic ${Buffer.from('apikey:').toString('base64')}` }],
});
});
test('Missing password (undefined)', async () => {
expect(
await plugin.authentication?.onApply(ctx, {
values: { username: 'apikey' },
headers: [],
url: 'https://yaak.app',
method: 'POST',
contextId: '111',
}),
).toEqual({
setHeaders: [{ name: 'Authorization', value: `Basic ${Buffer.from('apikey:').toString('base64')}` }],
});
});
test('Missing username (undefined)', async () => {
expect(
await plugin.authentication?.onApply(ctx, {
values: { password: 'secret' },
headers: [],
url: 'https://yaak.app',
method: 'POST',
contextId: '111',
}),
).toEqual({
setHeaders: [{ name: 'Authorization', value: `Basic ${Buffer.from(':secret').toString('base64')}` }],
});
});
test('No values (both undefined)', async () => {
expect(
await plugin.authentication?.onApply(ctx, {
values: {},
headers: [],
url: 'https://yaak.app',
method: 'POST',
contextId: '111',
}),
).toEqual({
setHeaders: [{ name: 'Authorization', value: `Basic ${Buffer.from(':').toString('base64')}` }],
});
});
});

View File

@@ -0,0 +1,161 @@
import { open } from '@tauri-apps/plugin-dialog';
import { gitClone } from '@yaakapp-internal/git';
import { useState } from 'react';
import { openWorkspaceFromSyncDir } from '../commands/openWorkspaceFromSyncDir';
import { appInfo } from '../lib/appInfo';
import { showErrorToast } from '../lib/toast';
import { Banner } from './core/Banner';
import { Button } from './core/Button';
import { Checkbox } from './core/Checkbox';
import { IconButton } from './core/IconButton';
import { PlainInput } from './core/PlainInput';
import { VStack } from './core/Stacks';
import { promptCredentials } from './git/credentials';
interface Props {
hide: () => void;
}
// Detect path separator from an existing path (defaults to /)
function getPathSeparator(path: string): string {
return path.includes('\\') ? '\\' : '/';
}
export function CloneGitRepositoryDialog({ hide }: Props) {
const [url, setUrl] = useState<string>('');
const [baseDirectory, setBaseDirectory] = useState<string>(appInfo.defaultProjectDir);
const [directoryOverride, setDirectoryOverride] = useState<string | null>(null);
const [hasSubdirectory, setHasSubdirectory] = useState(false);
const [subdirectory, setSubdirectory] = useState<string>('');
const [isCloning, setIsCloning] = useState(false);
const [error, setError] = useState<string | null>(null);
const repoName = extractRepoName(url);
const sep = getPathSeparator(baseDirectory);
const computedDirectory = repoName ? `${baseDirectory}${sep}${repoName}` : baseDirectory;
const directory = directoryOverride ?? computedDirectory;
const workspaceDirectory =
hasSubdirectory && subdirectory ? `${directory}${sep}${subdirectory}` : directory;
const handleSelectDirectory = async () => {
const dir = await open({
title: 'Select Directory',
directory: true,
multiple: false,
});
if (dir != null) {
setBaseDirectory(dir);
setDirectoryOverride(null);
}
};
const handleClone = async (e: React.FormEvent) => {
e.preventDefault();
if (!url || !directory) return;
setIsCloning(true);
setError(null);
try {
const result = await gitClone(url, directory, promptCredentials);
if (result.type === 'needs_credentials') {
setError(
result.error ?? 'Authentication failed. Please check your credentials and try again.',
);
return;
}
// Open the workspace from the cloned directory (or subdirectory)
await openWorkspaceFromSyncDir.mutateAsync(workspaceDirectory);
hide();
} catch (err) {
setError(String(err));
showErrorToast({
id: 'git-clone-error',
title: 'Clone Failed',
message: String(err),
});
} finally {
setIsCloning(false);
}
};
return (
<VStack as="form" space={3} alignItems="start" className="pb-3" onSubmit={handleClone}>
{error && (
<Banner color="danger" className="w-full">
{error}
</Banner>
)}
<PlainInput
required
label="Repository URL"
placeholder="https://github.com/user/repo.git"
defaultValue={url}
onChange={setUrl}
/>
<PlainInput
label="Directory"
placeholder={appInfo.defaultProjectDir}
defaultValue={directory}
onChange={setDirectoryOverride}
rightSlot={
<IconButton
size="xs"
className="mr-0.5 !h-auto my-0.5"
icon="folder"
title="Browse"
onClick={handleSelectDirectory}
/>
}
/>
<Checkbox
checked={hasSubdirectory}
onChange={setHasSubdirectory}
title="Workspace is in a subdirectory"
help="Enable if the Yaak workspace files are not at the root of the repository"
/>
{hasSubdirectory && (
<PlainInput
label="Subdirectory"
placeholder="path/to/workspace"
defaultValue={subdirectory}
onChange={setSubdirectory}
/>
)}
<Button
type="submit"
color="primary"
className="w-full mt-3"
disabled={!url || !directory || isCloning}
isLoading={isCloning}
>
{isCloning ? 'Cloning...' : 'Clone Repository'}
</Button>
</VStack>
);
}
function extractRepoName(url: string): string {
// Handle various Git URL formats:
// https://github.com/user/repo.git
// git@github.com:user/repo.git
// https://github.com/user/repo
const match = url.match(/\/([^/]+?)(\.git)?$/);
if (match?.[1]) {
return match[1];
}
// Fallback for SSH-style URLs
const sshMatch = url.match(/:([^/]+?)(\.git)?$/);
if (sshMatch?.[1]) {
return sshMatch[1];
}
return '';
}

View File

@@ -0,0 +1,181 @@
import type { DnsOverride, Workspace } from '@yaakapp-internal/models';
import { patchModel } from '@yaakapp-internal/models';
import { useCallback, useId, useMemo } from 'react';
import { Button } from './core/Button';
import { Checkbox } from './core/Checkbox';
import { IconButton } from './core/IconButton';
import { PlainInput } from './core/PlainInput';
import { HStack, VStack } from './core/Stacks';
import { Table, TableBody, TableCell, TableHead, TableHeaderCell, TableRow } from './core/Table';
interface Props {
workspace: Workspace;
}
interface DnsOverrideWithId extends DnsOverride {
_id: string;
}
export function DnsOverridesEditor({ workspace }: Props) {
const reactId = useId();
// Ensure each override has an internal ID for React keys
const overridesWithIds = useMemo<DnsOverrideWithId[]>(() => {
return workspace.settingDnsOverrides.map((override, index) => ({
...override,
_id: `${reactId}-${index}`,
}));
}, [workspace.settingDnsOverrides, reactId]);
const handleChange = useCallback(
(overrides: DnsOverride[]) => {
patchModel(workspace, { settingDnsOverrides: overrides });
},
[workspace],
);
const handleAdd = useCallback(() => {
const newOverride: DnsOverride = {
hostname: '',
ipv4: [''],
ipv6: [],
enabled: true,
};
handleChange([...workspace.settingDnsOverrides, newOverride]);
}, [workspace.settingDnsOverrides, handleChange]);
const handleUpdate = useCallback(
(index: number, update: Partial<DnsOverride>) => {
const updated = workspace.settingDnsOverrides.map((o, i) =>
i === index ? { ...o, ...update } : o,
);
handleChange(updated);
},
[workspace.settingDnsOverrides, handleChange],
);
const handleDelete = useCallback(
(index: number) => {
const updated = workspace.settingDnsOverrides.filter((_, i) => i !== index);
handleChange(updated);
},
[workspace.settingDnsOverrides, handleChange],
);
return (
<VStack space={3} className="pb-3">
<div className="text-text-subtle text-sm">
Override DNS resolution for specific hostnames. This works like{' '}
<code className="text-text-subtlest bg-surface-highlight px-1 rounded">/etc/hosts</code>{' '}
but only for requests made from this workspace.
</div>
{overridesWithIds.length > 0 && (
<Table>
<TableHead>
<TableRow>
<TableHeaderCell className="w-8" />
<TableHeaderCell>Hostname</TableHeaderCell>
<TableHeaderCell>IPv4 Address</TableHeaderCell>
<TableHeaderCell>IPv6 Address</TableHeaderCell>
<TableHeaderCell className="w-10" />
</TableRow>
</TableHead>
<TableBody>
{overridesWithIds.map((override, index) => (
<DnsOverrideRow
key={override._id}
override={override}
onUpdate={(update) => handleUpdate(index, update)}
onDelete={() => handleDelete(index)}
/>
))}
</TableBody>
</Table>
)}
<HStack>
<Button size="xs" color="secondary" variant="border" onClick={handleAdd}>
Add DNS Override
</Button>
</HStack>
</VStack>
);
}
interface DnsOverrideRowProps {
override: DnsOverride;
onUpdate: (update: Partial<DnsOverride>) => void;
onDelete: () => void;
}
function DnsOverrideRow({ override, onUpdate, onDelete }: DnsOverrideRowProps) {
const ipv4Value = override.ipv4.join(', ');
const ipv6Value = override.ipv6.join(', ');
return (
<TableRow>
<TableCell>
<Checkbox
hideLabel
title={override.enabled ? 'Disable override' : 'Enable override'}
checked={override.enabled ?? true}
onChange={(enabled) => onUpdate({ enabled })}
/>
</TableCell>
<TableCell>
<PlainInput
size="sm"
hideLabel
label="Hostname"
placeholder="api.example.com"
defaultValue={override.hostname}
onChange={(hostname) => onUpdate({ hostname })}
/>
</TableCell>
<TableCell>
<PlainInput
size="sm"
hideLabel
label="IPv4 addresses"
placeholder="127.0.0.1"
defaultValue={ipv4Value}
onChange={(value) =>
onUpdate({
ipv4: value
.split(',')
.map((s) => s.trim())
.filter(Boolean),
})
}
/>
</TableCell>
<TableCell>
<PlainInput
size="sm"
hideLabel
label="IPv6 addresses"
placeholder="::1"
defaultValue={ipv6Value}
onChange={(value) =>
onUpdate({
ipv6: value
.split(',')
.map((s) => s.trim())
.filter(Boolean),
})
}
/>
</TableCell>
<TableCell>
<IconButton
size="xs"
iconSize="sm"
icon="trash"
title="Delete override"
onClick={onDelete}
/>
</TableCell>
</TableRow>
);
}

View File

@@ -1,6 +1,6 @@
import { createWorkspaceModel, foldersAtom, patchModel } from '@yaakapp-internal/models';
import { useAtomValue } from 'jotai';
import { useMemo, useState } from 'react';
import { useMemo } from 'react';
import { useAuthTab } from '../hooks/useAuthTab';
import { useEnvironmentsBreakdown } from '../hooks/useEnvironmentsBreakdown';
import { useHeadersTab } from '../hooks/useHeadersTab';
@@ -37,7 +37,6 @@ export type FolderSettingsTab =
export function FolderSettingsDialog({ folderId, tab }: Props) {
const folders = useAtomValue(foldersAtom);
const folder = folders.find((f) => f.id === folderId) ?? null;
const [activeTab, setActiveTab] = useState<string>(tab ?? TAB_GENERAL);
const authTab = useAuthTab(TAB_AUTH, folder);
const headersTab = useHeadersTab(TAB_HEADERS, folder);
const inheritedHeaders = useInheritedHeaders(folder);
@@ -69,8 +68,7 @@ export function FolderSettingsDialog({ folderId, tab }: Props) {
return (
<Tabs
value={activeTab}
onChangeValue={setActiveTab}
defaultValue={tab ?? TAB_GENERAL}
label="Folder Settings"
className="pt-2 pb-2 pl-3 pr-1"
layout="horizontal"
@@ -113,7 +111,7 @@ export function FolderSettingsDialog({ folderId, tab }: Props) {
<VStack alignItems="center" space={1.5}>
<p>
Override{' '}
<Link href="https://feedback.yaak.app/help/articles/3284139-environments-and-variables">
<Link href="https://yaak.app/docs/using-yaak/environments-and-variables">
Variables
</Link>{' '}
for requests within this folder.

View File

@@ -10,7 +10,7 @@ import {
stateExtensions,
updateSchema,
} from 'codemirror-json-schema';
import { useCallback, useEffect, useMemo, useRef } from 'react';
import { useCallback, useEffect, useMemo, useState } from 'react';
import type { ReflectResponseService } from '../hooks/useGrpc';
import { showAlert } from '../lib/alert';
import { showDialog } from '../lib/dialog';
@@ -39,15 +39,15 @@ export function GrpcEditor({
protoFiles,
...extraEditorProps
}: Props) {
const editorViewRef = useRef<EditorView>(null);
const [editorView, setEditorView] = useState<EditorView | null>(null);
const handleInitEditorViewRef = useCallback((h: EditorView | null) => {
editorViewRef.current = h;
setEditorView(h);
}, []);
// Find the schema for the selected service and method and update the editor
useEffect(() => {
if (
editorViewRef.current == null ||
editorView == null ||
services === null ||
request.service === null ||
request.method === null
@@ -91,7 +91,7 @@ export function GrpcEditor({
}
try {
updateSchema(editorViewRef.current, JSON.parse(schema));
updateSchema(editorView, JSON.parse(schema));
} catch (err) {
showAlert({
id: 'grpc-parse-schema-error',
@@ -107,7 +107,7 @@ export function GrpcEditor({
),
});
}
}, [services, request.method, request.service]);
}, [editorView, services, request.method, request.service]);
const extraExtensions = useMemo(
() => [
@@ -118,7 +118,7 @@ export function GrpcEditor({
jsonLanguage.data.of({
autocomplete: jsonCompletion(),
}),
stateExtensions(/** Init with empty schema **/),
stateExtensions({}),
],
[],
);

View File

@@ -7,7 +7,6 @@ import { useContainerSize } from '../hooks/useContainerQuery';
import type { ReflectResponseService } from '../hooks/useGrpc';
import { useHeadersTab } from '../hooks/useHeadersTab';
import { useInheritedHeaders } from '../hooks/useInheritedHeaders';
import { useKeyValue } from '../hooks/useKeyValue';
import { useRequestUpdateKey } from '../hooks/useRequestUpdateKey';
import { resolvedModelName } from '../lib/resolvedModelName';
import { Button } from './core/Button';
@@ -69,11 +68,6 @@ export function GrpcRequestPane({
const authTab = useAuthTab(TAB_AUTH, activeRequest);
const metadataTab = useHeadersTab(TAB_METADATA, activeRequest, 'Metadata');
const inheritedHeaders = useInheritedHeaders(activeRequest);
const { value: activeTabs, set: setActiveTabs } = useKeyValue<Record<string, string>>({
namespace: 'no_sync',
key: 'grpcRequestActiveTabs',
fallback: {},
});
const forceUpdateKey = useRequestUpdateKey(activeRequest.id ?? null);
const urlContainerEl = useRef<HTMLDivElement>(null);
@@ -145,14 +139,6 @@ export function GrpcRequestPane({
[activeRequest.description, authTab, metadataTab],
);
const activeTab = activeTabs?.[activeRequest.id];
const setActiveTab = useCallback(
async (tab: string) => {
await setActiveTabs((r) => ({ ...r, [activeRequest.id]: tab }));
},
[activeRequest.id, setActiveTabs],
);
const handleMetadataChange = useCallback(
(metadata: HttpRequestHeader[]) => patchModel(activeRequest, { metadata }),
[activeRequest],
@@ -265,12 +251,11 @@ export function GrpcRequestPane({
</HStack>
</div>
<Tabs
value={activeTab}
label="Request"
onChangeValue={setActiveTab}
tabs={tabs}
tabListClassName="mt-1 !mb-1.5"
storageKey="grpc_request_tabs_order"
storageKey="grpc_request_tabs"
activeTabKey={activeRequest.id}
>
<TabContent value="message">
<GrpcEditor

View File

@@ -1,9 +1,7 @@
import type { GrpcEvent, GrpcRequest } from '@yaakapp-internal/models';
import classNames from 'classnames';
import { format } from 'date-fns';
import { useAtomValue, useSetAtom } from 'jotai';
import type { CSSProperties } from 'react';
import { useEffect, useMemo, useRef, useState } from 'react';
import { useEffect, useMemo, useState } from 'react';
import {
activeGrpcConnectionAtom,
activeGrpcConnections,
@@ -11,18 +9,14 @@ import {
useGrpcEvents,
} from '../hooks/usePinnedGrpcConnection';
import { useStateWithDeps } from '../hooks/useStateWithDeps';
import { copyToClipboard } from '../lib/copy';
import { AutoScroller } from './core/AutoScroller';
import { Banner } from './core/Banner';
import { Button } from './core/Button';
import { Editor } from './core/Editor/LazyEditor';
import { EventDetailHeader, EventViewer } from './core/EventViewer';
import { EventViewerRow } from './core/EventViewerRow';
import { HotkeyList } from './core/HotkeyList';
import { Icon } from './core/Icon';
import { IconButton } from './core/IconButton';
import { Icon, type IconProps } from './core/Icon';
import { KeyValueRow, KeyValueRows } from './core/KeyValueRow';
import { LoadingIcon } from './core/LoadingIcon';
import { Separator } from './core/Separator';
import { SplitLayout } from './core/SplitLayout';
import { HStack, VStack } from './core/Stacks';
import { EmptyStateText } from './EmptyStateText';
import { ErrorBoundary } from './ErrorBoundary';
@@ -42,7 +36,7 @@ interface Props {
}
export function GrpcResponsePane({ style, methodType, activeRequest }: Props) {
const [activeEventId, setActiveEventId] = useState<string | null>(null);
const [activeEventIndex, setActiveEventIndex] = useState<number | null>(null);
const [showLarge, setShowLarge] = useStateWithDeps<boolean>(false, [activeRequest.id]);
const [showingLarge, setShowingLarge] = useState<boolean>(false);
const connections = useAtomValue(activeGrpcConnections);
@@ -51,8 +45,8 @@ export function GrpcResponsePane({ style, methodType, activeRequest }: Props) {
const setPinnedGrpcConnectionId = useSetAtom(pinnedGrpcConnectionIdAtom);
const activeEvent = useMemo(
() => events.find((m) => m.id === activeEventId) ?? null,
[activeEventId, events],
() => (activeEventIndex != null ? events[activeEventIndex] : null),
[activeEventIndex, events],
);
// Set the active message to the first message received if unary
@@ -61,223 +55,188 @@ export function GrpcResponsePane({ style, methodType, activeRequest }: Props) {
if (events.length === 0 || activeEvent != null || methodType !== 'unary') {
return;
}
setActiveEventId(events.find((m) => m.eventType === 'server_message')?.id ?? null);
const firstServerMessageIndex = events.findIndex((m) => m.eventType === 'server_message');
if (firstServerMessageIndex !== -1) {
setActiveEventIndex(firstServerMessageIndex);
}
}, [events.length]);
if (activeConnection == null) {
return (
<HotkeyList hotkeys={['request.send', 'model.create', 'sidebar.focus', 'url_bar.focus']} />
);
}
const header = (
<HStack className="pl-3 mb-1 font-mono text-sm text-text-subtle overflow-x-auto hide-scrollbars">
<HStack space={2}>
<span className="whitespace-nowrap">{events.length} Messages</span>
{activeConnection.state !== 'closed' && (
<LoadingIcon size="sm" className="text-text-subtlest" />
)}
</HStack>
<div className="ml-auto">
<RecentGrpcConnectionsDropdown
connections={connections}
activeConnection={activeConnection}
onPinnedConnectionId={setPinnedGrpcConnectionId}
/>
</div>
</HStack>
);
return (
<SplitLayout
layout="vertical"
style={style}
name="grpc_events"
defaultRatio={0.4}
minHeightPx={20}
firstSlot={() =>
activeConnection == null ? (
<HotkeyList
hotkeys={['request.send', 'model.create', 'sidebar.focus', 'url_bar.focus']}
/>
) : (
<div className="w-full grid grid-rows-[auto_minmax(0,1fr)] grid-cols-1 items-center">
<HStack className="pl-3 mb-1 font-mono text-sm text-text-subtle overflow-x-auto hide-scrollbars">
<HStack space={2}>
<span className="whitespace-nowrap">{events.length} Messages</span>
{activeConnection.state !== 'closed' && (
<LoadingIcon size="sm" className="text-text-subtlest" />
)}
</HStack>
<div className="ml-auto">
<RecentGrpcConnectionsDropdown
connections={connections}
activeConnection={activeConnection}
onPinnedConnectionId={setPinnedGrpcConnectionId}
/>
</div>
</HStack>
<ErrorBoundary name="GRPC Events">
<AutoScroller
data={events}
header={
activeConnection.error && (
<Banner color="danger" className="m-3">
{activeConnection.error}
</Banner>
)
}
render={(event) => (
<EventRow
key={event.id}
event={event}
isActive={event.id === activeEventId}
onClick={() => {
if (event.id === activeEventId) setActiveEventId(null);
else setActiveEventId(event.id);
}}
/>
)}
/>
</ErrorBoundary>
</div>
)
}
secondSlot={
activeEvent != null && activeConnection != null
? () => (
<div className="grid grid-rows-[auto_minmax(0,1fr)]">
<div className="pb-3 px-2">
<Separator />
</div>
<div className="h-full pl-2 overflow-y-auto grid grid-rows-[auto_minmax(0,1fr)] ">
{activeEvent.eventType === 'client_message' ||
activeEvent.eventType === 'server_message' ? (
<>
<div className="mb-2 select-text cursor-text grid grid-cols-[minmax(0,1fr)_auto] items-center">
<div className="font-semibold">
Message {activeEvent.eventType === 'client_message' ? 'Sent' : 'Received'}
</div>
<IconButton
title="Copy message"
icon="copy"
size="xs"
onClick={() => copyToClipboard(activeEvent.content)}
/>
</div>
{!showLarge && activeEvent.content.length > 1000 * 1000 ? (
<VStack space={2} className="italic text-text-subtlest">
Message previews larger than 1MB are hidden
<div>
<Button
onClick={() => {
setShowingLarge(true);
setTimeout(() => {
setShowLarge(true);
setShowingLarge(false);
}, 500);
}}
isLoading={showingLarge}
color="secondary"
variant="border"
size="xs"
>
Try Showing
</Button>
</div>
</VStack>
) : (
<Editor
language="json"
defaultValue={activeEvent.content ?? ''}
wrapLines={false}
readOnly={true}
stateKey={null}
/>
)}
</>
) : (
<div className="h-full grid grid-rows-[auto_minmax(0,1fr)]">
<div>
<div className="select-text cursor-text font-semibold">
{activeEvent.content}
</div>
{activeEvent.error && (
<div className="select-text cursor-text text-sm font-mono py-1 text-warning">
{activeEvent.error}
</div>
)}
</div>
<div className="py-2 h-full">
{Object.keys(activeEvent.metadata).length === 0 ? (
<EmptyStateText>
No{' '}
{activeEvent.eventType === 'connection_end' ? 'trailers' : 'metadata'}
</EmptyStateText>
) : (
<KeyValueRows>
{Object.entries(activeEvent.metadata).map(([key, value]) => (
<KeyValueRow key={key} label={key}>
{value}
</KeyValueRow>
))}
</KeyValueRows>
)}
</div>
</div>
)}
</div>
</div>
)
: null
<div style={style} className="h-full">
<ErrorBoundary name="GRPC Events">
<EventViewer
events={events}
getEventKey={(event) => event.id}
error={activeConnection.error}
header={header}
splitLayoutName="grpc_events"
defaultRatio={0.4}
renderRow={({ event, isActive, onClick }) => (
<GrpcEventRow event={event} isActive={isActive} onClick={onClick} />
)}
renderDetail={({ event }) => (
<GrpcEventDetail
event={event}
showLarge={showLarge}
showingLarge={showingLarge}
setShowLarge={setShowLarge}
setShowingLarge={setShowingLarge}
/>
)}
/>
</ErrorBoundary>
</div>
);
}
function GrpcEventRow({
event,
isActive,
onClick,
}: {
event: GrpcEvent;
isActive: boolean;
onClick: () => void;
}) {
const { eventType, status, content, error } = event;
const display = getEventDisplay(eventType, status);
return (
<EventViewerRow
isActive={isActive}
onClick={onClick}
icon={<Icon color={display.color} title={display.title} icon={display.icon} />}
content={
<span className="text-xs">
{content.slice(0, 1000)}
{error && <span className="text-warning"> ({error})</span>}
</span>
}
timestamp={event.createdAt}
/>
);
}
function EventRow({
onClick,
isActive,
function GrpcEventDetail({
event,
showLarge,
showingLarge,
setShowLarge,
setShowingLarge,
}: {
onClick?: () => void;
isActive?: boolean;
event: GrpcEvent;
showLarge: boolean;
showingLarge: boolean;
setShowLarge: (v: boolean) => void;
setShowingLarge: (v: boolean) => void;
}) {
const { eventType, status, createdAt, content, error } = event;
const ref = useRef<HTMLDivElement>(null);
if (event.eventType === 'client_message' || event.eventType === 'server_message') {
const title = `Message ${event.eventType === 'client_message' ? 'Sent' : 'Received'}`;
return (
<div className="px-1" ref={ref}>
<button
type="button"
onClick={onClick}
className={classNames(
'w-full grid grid-cols-[auto_minmax(0,3fr)_auto] gap-2 items-center text-left',
'px-1.5 h-xs font-mono cursor-default group focus:outline-none focus:text-text rounded',
isActive && '!bg-surface-active !text-text',
'text-text-subtle hover:text',
return (
<div className="h-full grid grid-rows-[auto_minmax(0,1fr)]">
<EventDetailHeader title={title} timestamp={event.createdAt} copyText={event.content} />
{!showLarge && event.content.length > 1000 * 1000 ? (
<VStack space={2} className="italic text-text-subtlest">
Message previews larger than 1MB are hidden
<div>
<Button
onClick={() => {
setShowingLarge(true);
setTimeout(() => {
setShowLarge(true);
setShowingLarge(false);
}, 500);
}}
isLoading={showingLarge}
color="secondary"
variant="border"
size="xs"
>
Try Showing
</Button>
</div>
</VStack>
) : (
<Editor
language="json"
defaultValue={event.content ?? ''}
wrapLines={false}
readOnly={true}
stateKey={null}
/>
)}
>
<Icon
color={
eventType === 'server_message'
? 'info'
: eventType === 'client_message'
? 'primary'
: eventType === 'error' || (status != null && status > 0)
? 'danger'
: eventType === 'connection_end'
? 'success'
: undefined
}
title={
eventType === 'server_message'
? 'Server message'
: eventType === 'client_message'
? 'Client message'
: eventType === 'error' || (status != null && status > 0)
? 'Error'
: eventType === 'connection_end'
? 'Connection response'
: undefined
}
icon={
eventType === 'server_message'
? 'arrow_big_down_dash'
: eventType === 'client_message'
? 'arrow_big_up_dash'
: eventType === 'error' || (status != null && status > 0)
? 'alert_triangle'
: eventType === 'connection_end'
? 'check'
: 'info'
}
/>
<div className={classNames('w-full truncate text-xs')}>
{content.slice(0, 1000)}
{error && <span className="text-warning"> ({error})</span>}
</div>
);
}
// Error or connection_end - show metadata/trailers
return (
<div className="h-full grid grid-rows-[auto_minmax(0,1fr)]">
<EventDetailHeader title={event.content} timestamp={event.createdAt} />
{event.error && (
<div className="select-text cursor-text text-sm font-mono py-1 text-warning">
{event.error}
</div>
<div className={classNames('opacity-50 text-xs')}>
{format(`${createdAt}Z`, 'HH:mm:ss.SSS')}
</div>
</button>
)}
<div className="py-2 h-full">
{Object.keys(event.metadata).length === 0 ? (
<EmptyStateText>
No {event.eventType === 'connection_end' ? 'trailers' : 'metadata'}
</EmptyStateText>
) : (
<KeyValueRows>
{Object.entries(event.metadata).map(([key, value]) => (
<KeyValueRow key={key} label={key}>
{value}
</KeyValueRow>
))}
</KeyValueRows>
)}
</div>
</div>
);
}
function getEventDisplay(
eventType: GrpcEvent['eventType'],
status: GrpcEvent['status'],
): { icon: IconProps['icon']; color: IconProps['color']; title: string } {
if (eventType === 'server_message') {
return { icon: 'arrow_big_down_dash', color: 'info', title: 'Server message' };
}
if (eventType === 'client_message') {
return { icon: 'arrow_big_up_dash', color: 'primary', title: 'Client message' };
}
if (eventType === 'error' || (status != null && status > 0)) {
return { icon: 'alert_triangle', color: 'danger', title: 'Error' };
}
if (eventType === 'connection_end') {
return { icon: 'check', color: 'success', title: 'Connection response' };
}
return { icon: 'info', color: undefined, title: 'Event' };
}

View File

@@ -19,6 +19,7 @@ type Props = {
forceUpdateKey: string;
headers: HttpRequestHeader[];
inheritedHeaders?: HttpRequestHeader[];
inheritedHeadersLabel?: string;
stateKey: string;
onChange: (headers: HttpRequestHeader[]) => void;
label?: string;
@@ -28,20 +29,36 @@ export function HeadersEditor({
stateKey,
headers,
inheritedHeaders,
inheritedHeadersLabel = 'Inherited',
onChange,
forceUpdateKey,
}: Props) {
// Get header names defined at current level (case-insensitive)
const currentHeaderNames = new Set(
headers.filter((h) => h.name).map((h) => h.name.toLowerCase()),
);
// Filter inherited headers: must be enabled, have content, and not be overridden by current level
const validInheritedHeaders =
inheritedHeaders?.filter((pair) => pair.enabled && (pair.name || pair.value)) ?? [];
inheritedHeaders?.filter(
(pair) =>
pair.enabled && (pair.name || pair.value) && !currentHeaderNames.has(pair.name.toLowerCase()),
) ?? [];
const hasInheritedHeaders = validInheritedHeaders.length > 0;
return (
<div className="@container w-full h-full grid grid-rows-[auto_minmax(0,1fr)] gap-y-1.5">
{validInheritedHeaders.length > 0 ? (
<div
className={
hasInheritedHeaders
? '@container w-full h-full grid grid-rows-[auto_minmax(0,1fr)] gap-y-1.5'
: '@container w-full h-full'
}
>
{hasInheritedHeaders && (
<DetailsBanner
color="secondary"
className="text-sm"
summary={
<HStack>
Inherited <CountBadge count={validInheritedHeaders.length} />
{inheritedHeadersLabel} <CountBadge count={validInheritedHeaders.length} />
</HStack>
}
>
@@ -63,8 +80,6 @@ export function HeadersEditor({
))}
</div>
</DetailsBanner>
) : (
<span />
)}
<PairOrBulkEditor
forceUpdateKey={forceUpdateKey}

View File

@@ -62,7 +62,7 @@ export function HttpAuthenticationEditor({ model }: Props) {
<p>
Apply auth to all requests in <strong>{resolvedModelName(model)}</strong>
</p>
<Link href="https://feedback.yaak.app/help/articles/2112119-request-inheritance">
<Link href="https://yaak.app/docs/using-yaak/request-inheritance">
Documentation
</Link>
</EmptyStateText>

View File

@@ -4,7 +4,7 @@ import type { GenericCompletionOption } from '@yaakapp-internal/plugins';
import classNames from 'classnames';
import { atom, useAtomValue } from 'jotai';
import type { CSSProperties } from 'react';
import { lazy, Suspense, useCallback, useMemo, useState } from 'react';
import { lazy, Suspense, useCallback, useMemo, useRef, useState } from 'react';
import { activeRequestIdAtom } from '../hooks/useActiveRequestId';
import { allRequestsAtom } from '../hooks/useAllRequests';
import { useAuthTab } from '../hooks/useAuthTab';
@@ -12,7 +12,6 @@ import { useCancelHttpResponse } from '../hooks/useCancelHttpResponse';
import { useHeadersTab } from '../hooks/useHeadersTab';
import { useImportCurl } from '../hooks/useImportCurl';
import { useInheritedHeaders } from '../hooks/useInheritedHeaders';
import { useKeyValue } from '../hooks/useKeyValue';
import { usePinnedHttpResponse } from '../hooks/usePinnedHttpResponse';
import { useRequestEditor, useRequestEditorEvent } from '../hooks/useRequestEditor';
import { useRequestUpdateKey } from '../hooks/useRequestUpdateKey';
@@ -42,8 +41,8 @@ import { Editor } from './core/Editor/LazyEditor';
import { InlineCode } from './core/InlineCode';
import type { Pair } from './core/PairEditor';
import { PlainInput } from './core/PlainInput';
import type { TabItem } from './core/Tabs/Tabs';
import { TabContent, Tabs } from './core/Tabs/Tabs';
import type { TabItem, TabsRef } from './core/Tabs/Tabs';
import { setActiveTab, TabContent, Tabs } from './core/Tabs/Tabs';
import { EmptyStateText } from './EmptyStateText';
import { FormMultipartEditor } from './FormMultipartEditor';
import { FormUrlencodedEditor } from './FormUrlencodedEditor';
@@ -70,6 +69,7 @@ const TAB_PARAMS = 'params';
const TAB_HEADERS = 'headers';
const TAB_AUTH = 'auth';
const TAB_DESCRIPTION = 'description';
const TABS_STORAGE_KEY = 'http_request_tabs';
const nonActiveRequestUrlsAtom = atom((get) => {
const activeRequestId = get(activeRequestIdAtom);
@@ -83,19 +83,20 @@ const memoNotActiveRequestUrlsAtom = deepEqualAtom(nonActiveRequestUrlsAtom);
export function HttpRequestPane({ style, fullHeight, className, activeRequest }: Props) {
const activeRequestId = activeRequest.id;
const { value: activeTabs, set: setActiveTabs } = useKeyValue<Record<string, string>>({
namespace: 'no_sync',
key: 'httpRequestActiveTabs',
fallback: {},
});
const tabsRef = useRef<TabsRef>(null);
const [forceUpdateHeaderEditorKey, setForceUpdateHeaderEditorKey] = useState<number>(0);
const forceUpdateKey = useRequestUpdateKey(activeRequest.id ?? null);
const [{ urlKey }, { focusParamsTab, forceUrlRefresh, forceParamsRefresh }] = useRequestEditor();
const [{ urlKey }, { forceUrlRefresh, forceParamsRefresh }] = useRequestEditor();
const contentType = getContentTypeFromHeaders(activeRequest.headers);
const authTab = useAuthTab(TAB_AUTH, activeRequest);
const headersTab = useHeadersTab(TAB_HEADERS, activeRequest);
const inheritedHeaders = useInheritedHeaders(activeRequest);
// Listen for event to focus the params tab (e.g., when clicking a :param in the URL)
useRequestEditorEvent('request_pane.focus_tab', () => {
tabsRef.current?.setActiveTab(TAB_PARAMS);
}, []);
const handleContentTypeChange = useCallback(
async (contentType: string | null, patch: Partial<Omit<HttpRequest, 'headers'>> = {}) => {
if (activeRequest == null) {
@@ -260,18 +261,6 @@ export function HttpRequestPane({ style, fullHeight, className, activeRequest }:
[activeRequest],
);
const activeTab = activeTabs?.[activeRequestId];
const setActiveTab = useCallback(
async (tab: string) => {
await setActiveTabs((r) => ({ ...r, [activeRequest.id]: tab }));
},
[activeRequest.id, setActiveTabs],
);
useRequestEditorEvent('request_pane.focus_tab', async () => {
await setActiveTab(TAB_PARAMS);
});
const autocompleteUrls = useAtomValue(memoNotActiveRequestUrlsAtom);
const autocomplete: GenericCompletionConfig = useMemo(
@@ -298,7 +287,11 @@ export function HttpRequestPane({ style, fullHeight, className, activeRequest }:
e.preventDefault(); // Prevent input onChange
await patchModel(activeRequest, patch);
focusParamsTab();
await setActiveTab({
storageKey: TABS_STORAGE_KEY,
activeTabKey: activeRequestId,
value: TAB_PARAMS,
});
// Wait for request to update, then refresh the UI
// TODO: Somehow make this deterministic
@@ -309,14 +302,7 @@ export function HttpRequestPane({ style, fullHeight, className, activeRequest }:
}
}
},
[
activeRequest,
activeRequestId,
focusParamsTab,
forceParamsRefresh,
forceUrlRefresh,
importCurl,
],
[activeRequest, activeRequestId, forceParamsRefresh, forceUrlRefresh, importCurl],
);
const handleSend = useCallback(
() => sendRequest(activeRequest.id ?? null),
@@ -354,12 +340,12 @@ export function HttpRequestPane({ style, fullHeight, className, activeRequest }:
isLoading={activeResponse != null && activeResponse.state !== 'closed'}
/>
<Tabs
value={activeTab}
ref={tabsRef}
label="Request"
onChangeValue={setActiveTab}
tabs={tabs}
tabListClassName="mt-1 mb-1.5"
storageKey="http_request_tabs_order"
tabListClassName="mt-1 -mb-1.5"
storageKey={TABS_STORAGE_KEY}
activeTabKey={activeRequestId}
>
<TabContent value={TAB_AUTH}>
<HttpAuthenticationEditor model={activeRequest} />

View File

@@ -1,15 +1,15 @@
import type { HttpResponse } from '@yaakapp-internal/models';
import classNames from 'classnames';
import type { ComponentType, CSSProperties } from 'react';
import { lazy, Suspense, useCallback, useMemo } from 'react';
import { useLocalStorage } from 'react-use';
import { lazy, Suspense, useMemo } from 'react';
import { useCancelHttpResponse } from '../hooks/useCancelHttpResponse';
import { useHttpResponseEvents } from '../hooks/useHttpResponseEvents';
import { usePinnedHttpResponse } from '../hooks/usePinnedHttpResponse';
import { useResponseBodyBytes, useResponseBodyText } from '../hooks/useResponseBodyText';
import { useResponseViewMode } from '../hooks/useResponseViewMode';
import { useTimelineViewMode } from '../hooks/useTimelineViewMode';
import { getMimeTypeFromContentType } from '../lib/contentType';
import { getContentTypeFromHeaders } from '../lib/model_util';
import { getContentTypeFromHeaders, getCookieCounts } from '../lib/model_util';
import { ConfirmLargeResponse } from './ConfirmLargeResponse';
import { ConfirmLargeResponseRequest } from './ConfirmLargeResponseRequest';
import { Banner } from './core/Banner';
@@ -55,32 +55,18 @@ const TAB_HEADERS = 'headers';
const TAB_COOKIES = 'cookies';
const TAB_TIMELINE = 'timeline';
export type TimelineViewMode = 'timeline' | 'text';
export function HttpResponsePane({ style, className, activeRequestId }: Props) {
const { activeResponse, setPinnedResponseId, responses } = usePinnedHttpResponse(activeRequestId);
const [viewMode, setViewMode] = useResponseViewMode(activeResponse?.requestId);
const [activeTabs, setActiveTabs] = useLocalStorage<Record<string, string>>(
'responsePaneActiveTabs',
{},
);
const [timelineViewMode, setTimelineViewMode] = useTimelineViewMode();
const contentType = getContentTypeFromHeaders(activeResponse?.headers ?? null);
const mimeType = contentType == null ? null : getMimeTypeFromContentType(contentType).essence;
const responseEvents = useHttpResponseEvents(activeResponse);
const cookieCount = useMemo(() => {
if (!responseEvents.data) return 0;
let count = 0;
for (const event of responseEvents.data) {
const e = event.event;
if (
(e.type === 'header_up' && e.name.toLowerCase() === 'cookie') ||
(e.type === 'header_down' && e.name.toLowerCase() === 'set-cookie')
) {
count++;
}
}
return count;
}, [responseEvents.data]);
const cookieCounts = useMemo(() => getCookieCounts(responseEvents.data), [responseEvents.data]);
const tabs = useMemo<TabItem[]>(
() => [
@@ -92,7 +78,9 @@ export function HttpResponsePane({ style, className, activeRequestId }: Props) {
onChange: setViewMode,
items: [
{ label: 'Response', value: 'pretty' },
...(mimeType?.startsWith('image') ? [] : [{ label: 'Raw', value: 'raw' }]),
...(mimeType?.startsWith('image')
? []
: [{ label: 'Response (Raw)', shortLabel: 'Raw', value: 'raw' }]),
],
},
},
@@ -107,40 +95,47 @@ export function HttpResponsePane({ style, className, activeRequestId }: Props) {
label: 'Headers',
rightSlot: (
<CountBadge
count2={activeResponse?.headers.length ?? 0}
count={activeResponse?.requestHeaders.length ?? 0}
count2={activeResponse?.headers.length ?? 0}
showZero
/>
),
},
{
value: TAB_COOKIES,
label: 'Cookies',
rightSlot: cookieCount > 0 ? <CountBadge count={cookieCount} /> : null,
rightSlot:
cookieCounts.sent > 0 || cookieCounts.received > 0 ? (
<CountBadge count={cookieCounts.sent} count2={cookieCounts.received} showZero />
) : null,
},
{
value: TAB_TIMELINE,
label: 'Timeline',
rightSlot: <CountBadge count={responseEvents.data?.length ?? 0} />,
options: {
value: timelineViewMode,
onChange: (v) => setTimelineViewMode((v as TimelineViewMode) ?? 'timeline'),
items: [
{ label: 'Timeline', value: 'timeline' },
{ label: 'Timeline (Text)', shortLabel: 'Timeline', value: 'text' },
],
},
},
],
[
activeResponse?.headers,
activeResponse?.requestContentLength,
activeResponse?.requestHeaders.length,
cookieCount,
cookieCounts.sent,
cookieCounts.received,
mimeType,
responseEvents.data?.length,
setViewMode,
viewMode,
timelineViewMode,
setTimelineViewMode,
],
);
const activeTab = activeTabs?.[activeRequestId];
const setActiveTab = useCallback(
(tab: string) => {
setActiveTabs((r) => ({ ...r, [activeRequestId]: tab }));
},
[activeRequestId, setActiveTabs],
);
const cancel = useCancelHttpResponse(activeResponse?.id ?? null);
@@ -204,14 +199,12 @@ export function HttpResponsePane({ style, className, activeRequestId }: Props) {
)}
{/* Show tabs if we have any data (headers, body, etc.) even if there's an error */}
<Tabs
key={activeRequestId} // Freshen tabs on request change
value={activeTab}
onChangeValue={setActiveTab}
tabs={tabs}
label="Response"
className="ml-3 mr-3 mb-3 min-h-0 flex-1"
tabListClassName="mt-0.5"
storageKey="http_response_tabs_order"
tabListClassName="mt-0.5 -mb-1.5"
storageKey="http_response_tabs"
activeTabKey={activeRequestId}
>
<TabContent value={TAB_BODY}>
<ErrorBoundary name="Http Response Viewer">
@@ -271,7 +264,7 @@ export function HttpResponsePane({ style, className, activeRequestId }: Props) {
<ResponseCookies response={activeResponse} />
</TabContent>
<TabContent value={TAB_TIMELINE}>
<HttpResponseTimeline response={activeResponse} />
<HttpResponseTimeline response={activeResponse} viewMode={timelineViewMode} />
</TabContent>
</Tabs>
</div>

View File

@@ -3,186 +3,192 @@ import type {
HttpResponseEvent,
HttpResponseEventData,
} from '@yaakapp-internal/models';
import classNames from 'classnames';
import { format } from 'date-fns';
import { type ReactNode, useMemo, useState } from 'react';
import { useHttpResponseEvents } from '../hooks/useHttpResponseEvents';
import { AutoScroller } from './core/AutoScroller';
import { Banner } from './core/Banner';
import { Editor } from './core/Editor/LazyEditor';
import { type EventDetailAction, EventDetailHeader, EventViewer } from './core/EventViewer';
import { EventViewerRow } from './core/EventViewerRow';
import { HttpMethodTagRaw } from './core/HttpMethodTag';
import { HttpStatusTagRaw } from './core/HttpStatusTag';
import { Icon, type IconProps } from './core/Icon';
import { KeyValueRow, KeyValueRows } from './core/KeyValueRow';
import { Separator } from './core/Separator';
import { SplitLayout } from './core/SplitLayout';
import type { TimelineViewMode } from './HttpResponsePane';
interface Props {
response: HttpResponse;
viewMode: TimelineViewMode;
}
export function HttpResponseTimeline({ response }: Props) {
return <Inner key={response.id} response={response} />;
export function HttpResponseTimeline({ response, viewMode }: Props) {
return <Inner key={response.id} response={response} viewMode={viewMode} />;
}
function Inner({ response }: Props) {
const [activeEventIndex, setActiveEventIndex] = useState<number | null>(null);
function Inner({ response, viewMode }: Props) {
const [showRaw, setShowRaw] = useState(false);
const { data: events, error, isLoading } = useHttpResponseEvents(response);
const activeEvent = useMemo(
() => (activeEventIndex == null ? null : events?.[activeEventIndex]),
[activeEventIndex, events],
);
// Generate plain text representation of all events (with prefixes for timeline view)
const plainText = useMemo(() => {
if (!events || events.length === 0) return '';
return events.map((event) => formatEventText(event.event, true)).join('\n');
}, [events]);
if (isLoading) {
return <div className="p-3 text-text-subtlest italic">Loading events...</div>;
}
if (error) {
return (
<Banner color="danger" className="m-3">
{String(error)}
</Banner>
);
}
if (!events || events.length === 0) {
return <div className="p-3 text-text-subtlest italic">No events recorded</div>;
// Plain text view - show all events as text in an editor
if (viewMode === 'text') {
if (isLoading) {
return <div className="p-4 text-text-subtlest">Loading events...</div>;
} else if (error) {
return <div className="p-4 text-danger">{String(error)}</div>;
} else if (!events || events.length === 0) {
return <div className="p-4 text-text-subtlest">No events recorded</div>;
} else {
return (
<Editor language="timeline" defaultValue={plainText} readOnly stateKey={null} hideGutter />
);
}
}
return (
<SplitLayout
layout="vertical"
name="http_response_events"
<EventViewer
events={events ?? []}
getEventKey={(event) => event.id}
error={error ? String(error) : null}
isLoading={isLoading}
loadingMessage="Loading events..."
emptyMessage="No events recorded"
splitLayoutName="http_response_events"
defaultRatio={0.25}
minHeightPx={10}
firstSlot={() => (
<AutoScroller
data={events}
render={(event, i) => (
<EventRow
key={event.id}
event={event}
isActive={i === activeEventIndex}
onClick={() => {
if (i === activeEventIndex) setActiveEventIndex(null);
else setActiveEventIndex(i);
}}
/>
)}
/>
renderRow={({ event, isActive, onClick }) => {
const display = getEventDisplay(event.event);
return (
<EventViewerRow
isActive={isActive}
onClick={onClick}
icon={<Icon color={display.color} icon={display.icon} size="sm" />}
content={display.summary}
timestamp={event.createdAt}
/>
);
}}
renderDetail={({ event, onClose }) => (
<EventDetails event={event} showRaw={showRaw} setShowRaw={setShowRaw} onClose={onClose} />
)}
secondSlot={
activeEvent
? () => (
<div className="grid grid-rows-[auto_minmax(0,1fr)]">
<div className="pb-3 px-2">
<Separator />
</div>
<div className="mx-2 overflow-y-auto">
<EventDetails event={activeEvent} />
</div>
</div>
)
: null
}
/>
);
}
function EventRow({
onClick,
isActive,
event,
}: {
onClick: () => void;
isActive: boolean;
event: HttpResponseEvent;
}) {
const display = getEventDisplay(event.event);
const { icon, color, summary } = display;
return (
<div className="px-1">
<button
type="button"
onClick={onClick}
className={classNames(
'w-full grid grid-cols-[auto_minmax(0,1fr)_auto] gap-2 items-center text-left',
'px-1.5 h-xs font-mono text-editor cursor-default group focus:outline-none focus:text-text rounded',
isActive && '!bg-surface-active !text-text',
'text-text-subtle hover:text',
)}
>
<Icon color={color} icon={icon} size="sm" />
<div className="w-full truncate">{summary}</div>
<div className="opacity-50">{format(`${event.createdAt}Z`, 'HH:mm:ss.SSS')}</div>
</button>
</div>
);
}
function formatBytes(bytes: number): string {
if (bytes < 1024) return `${bytes} B`;
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
}
function EventDetails({ event }: { event: HttpResponseEvent }) {
function EventDetails({
event,
showRaw,
setShowRaw,
onClose,
}: {
event: HttpResponseEvent;
showRaw: boolean;
setShowRaw: (v: boolean) => void;
onClose: () => void;
}) {
const { label } = getEventDisplay(event.event);
const timestamp = format(new Date(`${event.createdAt}Z`), 'HH:mm:ss.SSS');
const e = event.event;
// Headers - show name and value with Editor for JSON
if (e.type === 'header_up' || e.type === 'header_down') {
return (
<div className="flex flex-col gap-2 h-full">
<DetailHeader
title={e.type === 'header_down' ? 'Header Received' : 'Header Sent'}
timestamp={timestamp}
/>
const actions: EventDetailAction[] = [
{
key: 'toggle-raw',
label: showRaw ? 'Formatted' : 'Text',
onClick: () => setShowRaw(!showRaw),
},
];
// Determine the title based on event type
const title = (() => {
switch (e.type) {
case 'header_up':
return 'Header Sent';
case 'header_down':
return 'Header Received';
case 'send_url':
return 'Request';
case 'receive_url':
return 'Response';
case 'redirect':
return 'Redirect';
case 'setting':
return 'Apply Setting';
case 'chunk_sent':
return 'Data Sent';
case 'chunk_received':
return 'Data Received';
case 'dns_resolved':
return e.overridden ? 'DNS Override' : 'DNS Resolution';
default:
return label;
}
})();
// Render content based on view mode and event type
const renderContent = () => {
// Raw view - show plaintext representation (without prefix)
if (showRaw) {
const rawText = formatEventText(event.event, false);
return <Editor language="text" defaultValue={rawText} readOnly stateKey={null} hideGutter />;
}
// Headers - show name and value
if (e.type === 'header_up' || e.type === 'header_down') {
return (
<KeyValueRows>
<KeyValueRow label="Header">{e.name}</KeyValueRow>
<KeyValueRow label="Value">{e.value}</KeyValueRow>
</KeyValueRows>
</div>
);
}
);
}
// Request URL - show method and path separately
if (e.type === 'send_url') {
return (
<div className="flex flex-col gap-2">
<DetailHeader title="Request" timestamp={timestamp} />
// Request URL - show all URL parts separately
if (e.type === 'send_url') {
const auth = e.username || e.password ? `${e.username}:${e.password}@` : '';
const isDefaultPort =
(e.scheme === 'http' && e.port === 80) || (e.scheme === 'https' && e.port === 443);
const portStr = isDefaultPort ? '' : `:${e.port}`;
const query = e.query ? `?${e.query}` : '';
const fragment = e.fragment ? `#${e.fragment}` : '';
const fullUrl = `${e.scheme}://${auth}${e.host}${portStr}${e.path}${query}${fragment}`;
return (
<KeyValueRows>
<KeyValueRow label="Method">
<HttpMethodTagRaw forceColor method={e.method} />
</KeyValueRow>
<KeyValueRow label="URL">{fullUrl}</KeyValueRow>
<KeyValueRow label="Method">{e.method}</KeyValueRow>
<KeyValueRow label="Scheme">{e.scheme}</KeyValueRow>
{e.username ? <KeyValueRow label="Username">{e.username}</KeyValueRow> : null}
{e.password ? <KeyValueRow label="Password">{e.password}</KeyValueRow> : null}
<KeyValueRow label="Host">{e.host}</KeyValueRow>
{!isDefaultPort ? <KeyValueRow label="Port">{e.port}</KeyValueRow> : null}
<KeyValueRow label="Path">{e.path}</KeyValueRow>
{e.query ? <KeyValueRow label="Query">{e.query}</KeyValueRow> : null}
{e.fragment ? <KeyValueRow label="Fragment">{e.fragment}</KeyValueRow> : null}
</KeyValueRows>
</div>
);
}
);
}
// Response status - show version and status separately
if (e.type === 'receive_url') {
return (
<div className="flex flex-col gap-2">
<DetailHeader title="Response" timestamp={timestamp} />
// Response status - show version and status separately
if (e.type === 'receive_url') {
return (
<KeyValueRows>
<KeyValueRow label="HTTP Version">{e.version}</KeyValueRow>
<KeyValueRow label="Status">
<HttpStatusTagRaw status={e.status} />
</KeyValueRow>
</KeyValueRows>
</div>
);
}
);
}
// Redirect - show status, URL, and behavior
if (e.type === 'redirect') {
return (
<div className="flex flex-col gap-2">
<DetailHeader title="Redirect" timestamp={timestamp} />
// Redirect - show status, URL, and behavior
if (e.type === 'redirect') {
return (
<KeyValueRows>
<KeyValueRow label="Status">
<HttpStatusTagRaw status={e.status} />
@@ -192,51 +198,107 @@ function EventDetails({ event }: { event: HttpResponseEvent }) {
{e.behavior === 'drop_body' ? 'Drop body, change to GET' : 'Preserve method and body'}
</KeyValueRow>
</KeyValueRows>
</div>
);
}
);
}
// Settings - show as key/value
if (e.type === 'setting') {
return (
<div className="flex flex-col gap-2">
<DetailHeader title="Apply Setting" timestamp={timestamp} />
// Settings - show as key/value
if (e.type === 'setting') {
return (
<KeyValueRows>
<KeyValueRow label="Setting">{e.name}</KeyValueRow>
<KeyValueRow label="Value">{e.value}</KeyValueRow>
</KeyValueRows>
</div>
);
}
);
}
// Chunks - show formatted bytes
if (e.type === 'chunk_sent' || e.type === 'chunk_received') {
const direction = e.type === 'chunk_sent' ? 'Sent' : 'Received';
return (
<div className="flex flex-col gap-2">
<DetailHeader title={`Data ${direction}`} timestamp={timestamp} />
<div className="font-mono text-editor">{formatBytes(e.bytes)}</div>
</div>
);
}
// Chunks - show formatted bytes
if (e.type === 'chunk_sent' || e.type === 'chunk_received') {
return <div className="font-mono text-editor">{formatBytes(e.bytes)}</div>;
}
// Default - use summary
const { summary } = getEventDisplay(event.event);
// DNS Resolution - show hostname, addresses, and timing
if (e.type === 'dns_resolved') {
return (
<KeyValueRows>
<KeyValueRow label="Hostname">{e.hostname}</KeyValueRow>
<KeyValueRow label="Addresses">{e.addresses.join(', ')}</KeyValueRow>
<KeyValueRow label="Duration">
{e.overridden ? (
<span className="text-text-subtlest">--</span>
) : (
`${String(e.duration)}ms`
)}
</KeyValueRow>
{e.overridden ? <KeyValueRow label="Source">Workspace Override</KeyValueRow> : null}
</KeyValueRows>
);
}
// Default - use summary
const { summary } = getEventDisplay(event.event);
return <div className="font-mono text-editor">{summary}</div>;
};
return (
<div className="flex flex-col gap-1">
<DetailHeader title={label} timestamp={timestamp} />
<div className="font-mono text-editor">{summary}</div>
<div className="flex flex-col gap-2 h-full">
<EventDetailHeader
title={title}
timestamp={event.createdAt}
actions={actions}
onClose={onClose}
/>
{renderContent()}
</div>
);
}
function DetailHeader({ title, timestamp }: { title: string; timestamp: string }) {
return (
<div className="flex items-center justify-between gap-2">
<h3 className="font-semibold select-auto cursor-auto">{title}</h3>
<span className="text-text-subtlest font-mono text-editor">{timestamp}</span>
</div>
);
type EventTextParts = { prefix: '>' | '<' | '*'; text: string };
/** Get the prefix and text for an event */
function getEventTextParts(event: HttpResponseEventData): EventTextParts {
switch (event.type) {
case 'send_url':
return {
prefix: '>',
text: `${event.method} ${event.path}${event.query ? `?${event.query}` : ''}${event.fragment ? `#${event.fragment}` : ''}`,
};
case 'receive_url':
return { prefix: '<', text: `${event.version} ${event.status}` };
case 'header_up':
return { prefix: '>', text: `${event.name}: ${event.value}` };
case 'header_down':
return { prefix: '<', text: `${event.name}: ${event.value}` };
case 'redirect': {
const behavior = event.behavior === 'drop_body' ? 'drop body' : 'preserve';
return { prefix: '*', text: `Redirect ${event.status} -> ${event.url} (${behavior})` };
}
case 'setting':
return { prefix: '*', text: `Setting ${event.name}=${event.value}` };
case 'info':
return { prefix: '*', text: event.message };
case 'chunk_sent':
return { prefix: '*', text: `[${formatBytes(event.bytes)} sent]` };
case 'chunk_received':
return { prefix: '*', text: `[${formatBytes(event.bytes)} received]` };
case 'dns_resolved':
if (event.overridden) {
return {
prefix: '*',
text: `DNS override ${event.hostname} -> ${event.addresses.join(', ')}`,
};
}
return {
prefix: '*',
text: `DNS resolved ${event.hostname} to ${event.addresses.join(', ')} (${event.duration}ms)`,
};
default:
return { prefix: '*', text: '[unknown event]' };
}
}
/** Format event as plaintext, optionally with curl-style prefix (> outgoing, < incoming, * info) */
function formatEventText(event: HttpResponseEventData, includePrefix: boolean): string {
const { prefix, text } = getEventTextParts(event);
return includePrefix ? `${prefix} ${text}` : text;
}
type EventDisplay = {
@@ -265,7 +327,7 @@ function getEventDisplay(event: HttpResponseEventData): EventDisplay {
case 'redirect':
return {
icon: 'arrow_big_right_dash',
color: 'warning',
color: 'success',
label: 'Redirect',
summary: `Redirecting ${event.status} ${event.url}${event.behavior === 'drop_body' ? ' (drop body)' : ''}`,
};
@@ -274,7 +336,7 @@ function getEventDisplay(event: HttpResponseEventData): EventDisplay {
icon: 'arrow_big_up_dash',
color: 'primary',
label: 'Request',
summary: `${event.method} ${event.path}`,
summary: `${event.method} ${event.path}${event.query ? `?${event.query}` : ''}${event.fragment ? `#${event.fragment}` : ''}`,
};
case 'receive_url':
return {
@@ -312,6 +374,15 @@ function getEventDisplay(event: HttpResponseEventData): EventDisplay {
label: 'Chunk',
summary: `${formatBytes(event.bytes)} chunk received`,
};
case 'dns_resolved':
return {
icon: 'globe',
color: event.overridden ? 'success' : 'secondary',
label: event.overridden ? 'DNS Override' : 'DNS',
summary: event.overridden
? `${event.hostname}${event.addresses.join(', ')} (overridden)`
: `${event.hostname}${event.addresses.join(', ')} (${event.duration}ms)`,
};
default:
return {
icon: 'info',

View File

@@ -71,7 +71,7 @@ export const RequestMethodDropdown = memo(function RequestMethodDropdown({
onChange={handleChange}
>
<Button size="xs" className={classNames(className, 'text-text-subtle hover:text')}>
<HttpMethodTag request={request} />
<HttpMethodTag request={request} noAlias />
</Button>
</RadioDropdown>
);

Some files were not shown because too many files have changed in this diff Show More