mirror of
https://github.com/mountain-loop/yaak.git
synced 2026-02-02 02:32:07 -05:00
Compare commits
58 Commits
omnara/rep
...
actions-sy
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
986143c4ae | ||
|
|
50b0e23d53 | ||
|
|
c4ce458f79 | ||
|
|
f02ae35634 | ||
|
|
c2f068970b | ||
|
|
eec2d6bc38 | ||
|
|
efa22e470e | ||
|
|
c00d2e981f | ||
|
|
9c45254952 | ||
|
|
d031ff231a | ||
|
|
f056894ddb | ||
|
|
1b0315165f | ||
|
|
bd7e840a57 | ||
|
|
8969748c3c | ||
|
|
4e15ac10a6 | ||
|
|
47a3d44888 | ||
|
|
eb10910d20 | ||
|
|
6ba83d424d | ||
|
|
beb47a6b6a | ||
|
|
1893b8f8dd | ||
|
|
7a5bca7aae | ||
|
|
9a75bc2ae7 | ||
|
|
65514e3882 | ||
|
|
9ddaafb79f | ||
|
|
de47ee19ec | ||
|
|
ea730d0184 | ||
|
|
fe706998d4 | ||
|
|
99209e088f | ||
|
|
3eb29ff2fe | ||
|
|
b759003c83 | ||
|
|
6cba38ac89 | ||
|
|
ba8f85baaf | ||
|
|
9970d5fa6f | ||
|
|
d550b42ca3 | ||
|
|
2e1f0cb53f | ||
|
|
eead422ada | ||
|
|
b5753da3b7 | ||
|
|
ae2f2459e9 | ||
|
|
306e6f358a | ||
|
|
822d52a57e | ||
|
|
e665ce04df | ||
|
|
e4828e1b17 | ||
|
|
42143249a2 | ||
|
|
72a7e6963d | ||
|
|
494e9efb64 | ||
|
|
9fe077f598 | ||
|
|
a6eca1cf2e | ||
|
|
31edd1013f | ||
|
|
28e9657ea5 | ||
|
|
ff084a224a | ||
|
|
bbcae34575 | ||
|
|
2a5587c128 | ||
|
|
c41e173a63 | ||
|
|
2b43407ddf | ||
|
|
4d75b8ef06 | ||
|
|
aa79fb05f9 | ||
|
|
fe01796536 | ||
|
|
6654d6c346 |
@@ -37,3 +37,11 @@ The skill generates markdown-formatted release notes following this structure:
|
||||
|
||||
**IMPORTANT**: Always add a blank lines around the markdown code fence and output the markdown code block last
|
||||
**IMPORTANT**: PRs by `@gschier` should not mention the @username
|
||||
|
||||
## After Generating Release Notes
|
||||
|
||||
After outputting the release notes, ask the user if they would like to create a draft GitHub release with these notes. If they confirm, create the release using:
|
||||
|
||||
```bash
|
||||
gh release create <tag> --draft --prerelease --title "<tag>" --notes '<release notes>'
|
||||
```
|
||||
|
||||
96
.github/workflows/release.yml
vendored
96
.github/workflows/release.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: Generate Artifacts
|
||||
on:
|
||||
push:
|
||||
tags: [ v* ]
|
||||
tags: [v*]
|
||||
|
||||
jobs:
|
||||
build-artifacts:
|
||||
@@ -13,37 +13,37 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: 'macos-latest' # for Arm-based Macs (M1 and above).
|
||||
args: '--target aarch64-apple-darwin'
|
||||
yaak_arch: 'arm64'
|
||||
os: 'macos'
|
||||
targets: 'aarch64-apple-darwin'
|
||||
- platform: 'macos-latest' # for Intel-based Macs.
|
||||
args: '--target x86_64-apple-darwin'
|
||||
yaak_arch: 'x64'
|
||||
os: 'macos'
|
||||
targets: 'x86_64-apple-darwin'
|
||||
- platform: 'ubuntu-22.04'
|
||||
args: ''
|
||||
yaak_arch: 'x64'
|
||||
os: 'ubuntu'
|
||||
targets: ''
|
||||
- platform: 'ubuntu-22.04-arm'
|
||||
args: ''
|
||||
yaak_arch: 'arm64'
|
||||
os: 'ubuntu'
|
||||
targets: ''
|
||||
- platform: 'windows-latest'
|
||||
args: ''
|
||||
yaak_arch: 'x64'
|
||||
os: 'windows'
|
||||
targets: ''
|
||||
- platform: "macos-latest" # for Arm-based Macs (M1 and above).
|
||||
args: "--target aarch64-apple-darwin"
|
||||
yaak_arch: "arm64"
|
||||
os: "macos"
|
||||
targets: "aarch64-apple-darwin"
|
||||
- platform: "macos-latest" # for Intel-based Macs.
|
||||
args: "--target x86_64-apple-darwin"
|
||||
yaak_arch: "x64"
|
||||
os: "macos"
|
||||
targets: "x86_64-apple-darwin"
|
||||
- platform: "ubuntu-22.04"
|
||||
args: ""
|
||||
yaak_arch: "x64"
|
||||
os: "ubuntu"
|
||||
targets: ""
|
||||
- platform: "ubuntu-22.04-arm"
|
||||
args: ""
|
||||
yaak_arch: "arm64"
|
||||
os: "ubuntu"
|
||||
targets: ""
|
||||
- platform: "windows-latest"
|
||||
args: ""
|
||||
yaak_arch: "x64"
|
||||
os: "windows"
|
||||
targets: ""
|
||||
# Windows ARM64
|
||||
- platform: 'windows-latest'
|
||||
args: '--target aarch64-pc-windows-msvc'
|
||||
yaak_arch: 'arm64'
|
||||
os: 'windows'
|
||||
targets: 'aarch64-pc-windows-msvc'
|
||||
- platform: "windows-latest"
|
||||
args: "--target aarch64-pc-windows-msvc"
|
||||
yaak_arch: "arm64"
|
||||
os: "windows"
|
||||
targets: "aarch64-pc-windows-msvc"
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 40
|
||||
steps:
|
||||
@@ -88,6 +88,9 @@ jobs:
|
||||
& $exe --version
|
||||
|
||||
- run: npm ci
|
||||
- run: npm run bootstrap
|
||||
env:
|
||||
YAAK_TARGET_ARCH: ${{ matrix.yaak_arch }}
|
||||
- run: npm run lint
|
||||
- name: Run JS Tests
|
||||
run: npm test
|
||||
@@ -99,6 +102,29 @@ jobs:
|
||||
env:
|
||||
YAAK_VERSION: ${{ github.ref_name }}
|
||||
|
||||
- name: Sign vendored binaries (macOS only)
|
||||
if: matrix.os == 'macos'
|
||||
env:
|
||||
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
|
||||
KEYCHAIN_PASSWORD: ${{ secrets.KEYCHAIN_PASSWORD }}
|
||||
run: |
|
||||
# Create keychain
|
||||
KEYCHAIN_PATH=$RUNNER_TEMP/app-signing.keychain-db
|
||||
security create-keychain -p "$KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
|
||||
security set-keychain-settings -lut 21600 $KEYCHAIN_PATH
|
||||
security unlock-keychain -p "$KEYCHAIN_PASSWORD" $KEYCHAIN_PATH
|
||||
|
||||
# Import certificate
|
||||
echo "$APPLE_CERTIFICATE" | base64 --decode > certificate.p12
|
||||
security import certificate.p12 -P "$APPLE_CERTIFICATE_PASSWORD" -A -t cert -f pkcs12 -k $KEYCHAIN_PATH
|
||||
security list-keychain -d user -s $KEYCHAIN_PATH
|
||||
|
||||
# Sign vendored binaries with hardened runtime and their specific entitlements
|
||||
codesign --force --options runtime --entitlements crates-tauri/yaak-app/macos/entitlements.yaakprotoc.plist --sign "$APPLE_SIGNING_IDENTITY" crates-tauri/yaak-app/vendored/protoc/yaakprotoc || true
|
||||
codesign --force --options runtime --entitlements crates-tauri/yaak-app/macos/entitlements.yaaknode.plist --sign "$APPLE_SIGNING_IDENTITY" crates-tauri/yaak-app/vendored/node/yaaknode || true
|
||||
|
||||
- uses: tauri-apps/tauri-action@v0
|
||||
env:
|
||||
YAAK_TARGET_ARCH: ${{ matrix.yaak_arch }}
|
||||
@@ -121,9 +147,9 @@ jobs:
|
||||
AZURE_CLIENT_SECRET: ${{ matrix.os == 'windows' && secrets.AZURE_CLIENT_SECRET }}
|
||||
AZURE_TENANT_ID: ${{ matrix.os == 'windows' && secrets.AZURE_TENANT_ID }}
|
||||
with:
|
||||
tagName: 'v__VERSION__'
|
||||
releaseName: 'Release __VERSION__'
|
||||
releaseBody: '[Changelog __VERSION__](https://yaak.app/blog/__VERSION__)'
|
||||
tagName: "v__VERSION__"
|
||||
releaseName: "Release __VERSION__"
|
||||
releaseBody: "[Changelog __VERSION__](https://yaak.app/blog/__VERSION__)"
|
||||
releaseDraft: true
|
||||
prerelease: true
|
||||
args: '${{ matrix.args }} --config ./crates-tauri/yaak-app/tauri.release.conf.json'
|
||||
args: "${{ matrix.args }} --config ./crates-tauri/yaak-app/tauri.release.conf.json"
|
||||
|
||||
33
Cargo.lock
generated
33
Cargo.lock
generated
@@ -7994,6 +7994,33 @@ dependencies = [
|
||||
"rustix 1.0.7",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yaak-actions"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror 2.0.17",
|
||||
"tokio",
|
||||
"ts-rs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yaak-actions-builtin"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"log",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"yaak-actions",
|
||||
"yaak-crypto",
|
||||
"yaak-http",
|
||||
"yaak-models",
|
||||
"yaak-plugins",
|
||||
"yaak-templates",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yaak-app"
|
||||
version = "0.0.0"
|
||||
@@ -8063,6 +8090,8 @@ dependencies = [
|
||||
"log",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"yaak-actions",
|
||||
"yaak-actions-builtin",
|
||||
"yaak-crypto",
|
||||
"yaak-http",
|
||||
"yaak-models",
|
||||
@@ -8075,6 +8104,7 @@ name = "yaak-common"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"serde_json",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -8121,8 +8151,10 @@ dependencies = [
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"thiserror 2.0.17",
|
||||
"tokio",
|
||||
"ts-rs",
|
||||
"url",
|
||||
"yaak-common",
|
||||
"yaak-models",
|
||||
"yaak-sync",
|
||||
]
|
||||
@@ -8149,6 +8181,7 @@ dependencies = [
|
||||
"tonic",
|
||||
"tonic-reflection",
|
||||
"uuid",
|
||||
"yaak-common",
|
||||
"yaak-tls",
|
||||
]
|
||||
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
resolver = "2"
|
||||
members = [
|
||||
# Shared crates (no Tauri dependency)
|
||||
"crates/yaak-actions",
|
||||
"crates/yaak-actions-builtin",
|
||||
"crates/yaak-core",
|
||||
"crates/yaak-common",
|
||||
"crates/yaak-crypto",
|
||||
@@ -45,6 +47,8 @@ tokio = "1.48.0"
|
||||
ts-rs = "11.1.0"
|
||||
|
||||
# Internal crates - shared
|
||||
yaak-actions = { path = "crates/yaak-actions" }
|
||||
yaak-actions-builtin = { path = "crates/yaak-actions-builtin" }
|
||||
yaak-core = { path = "crates/yaak-core" }
|
||||
yaak-common = { path = "crates/yaak-common" }
|
||||
yaak-crypto = { path = "crates/yaak-crypto" }
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
<p align="center">
|
||||
<a href="https://github.com/JamesIves/github-sponsors-readme-action">
|
||||
<img width="200px" src="https://github.com/mountain-loop/yaak/raw/main/src-tauri/icons/icon.png">
|
||||
<img width="200px" src="https://github.com/mountain-loop/yaak/raw/main/crates-tauri/yaak-app/icons/icon.png">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
@@ -64,7 +64,7 @@ visit [`DEVELOPMENT.md`](DEVELOPMENT.md) for tips on setting up your environment
|
||||
## Useful Resources
|
||||
|
||||
- [Feedback and Bug Reports](https://feedback.yaak.app)
|
||||
- [Documentation](https://feedback.yaak.app/help)
|
||||
- [Documentation](https://yaak.app/docs)
|
||||
- [Yaak vs Postman](https://yaak.app/alternatives/postman)
|
||||
- [Yaak vs Bruno](https://yaak.app/alternatives/bruno)
|
||||
- [Yaak vs Insomnia](https://yaak.app/alternatives/insomnia)
|
||||
|
||||
@@ -15,6 +15,8 @@ env_logger = "0.11"
|
||||
log = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
|
||||
yaak-actions = { workspace = true }
|
||||
yaak-actions-builtin = { workspace = true }
|
||||
yaak-crypto = { workspace = true }
|
||||
yaak-http = { workspace = true }
|
||||
yaak-models = { workspace = true }
|
||||
|
||||
@@ -1,21 +1,13 @@
|
||||
use clap::{Parser, Subcommand};
|
||||
use log::info;
|
||||
use serde_json::Value;
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::mpsc;
|
||||
use yaak_crypto::manager::EncryptionManager;
|
||||
use yaak_http::path_placeholders::apply_path_placeholders;
|
||||
use yaak_http::sender::{HttpSender, ReqwestSender};
|
||||
use yaak_http::types::{SendableHttpRequest, SendableHttpRequestOptions};
|
||||
use yaak_models::models::{HttpRequest, HttpRequestHeader, HttpUrlParameter};
|
||||
use yaak_models::render::make_vars_hashmap;
|
||||
use yaak_models::models::HttpRequest;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::events::{PluginContext, RenderPurpose};
|
||||
use yaak_plugins::events::PluginContext;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::template_callback::PluginTemplateCallback;
|
||||
use yaak_templates::{parse_and_render, render_json_value_raw, RenderOptions};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "yaakcli")]
|
||||
@@ -72,93 +64,6 @@ enum Commands {
|
||||
},
|
||||
}
|
||||
|
||||
/// Render an HTTP request with template variables and plugin functions
|
||||
async fn render_http_request(
|
||||
r: &HttpRequest,
|
||||
environment_chain: Vec<yaak_models::models::Environment>,
|
||||
cb: &PluginTemplateCallback,
|
||||
opt: &RenderOptions,
|
||||
) -> yaak_templates::error::Result<HttpRequest> {
|
||||
let vars = &make_vars_hashmap(environment_chain);
|
||||
|
||||
let mut url_parameters = Vec::new();
|
||||
for p in r.url_parameters.clone() {
|
||||
if !p.enabled {
|
||||
continue;
|
||||
}
|
||||
url_parameters.push(HttpUrlParameter {
|
||||
enabled: p.enabled,
|
||||
name: parse_and_render(p.name.as_str(), vars, cb, opt).await?,
|
||||
value: parse_and_render(p.value.as_str(), vars, cb, opt).await?,
|
||||
id: p.id,
|
||||
})
|
||||
}
|
||||
|
||||
let mut headers = Vec::new();
|
||||
for p in r.headers.clone() {
|
||||
if !p.enabled {
|
||||
continue;
|
||||
}
|
||||
headers.push(HttpRequestHeader {
|
||||
enabled: p.enabled,
|
||||
name: parse_and_render(p.name.as_str(), vars, cb, opt).await?,
|
||||
value: parse_and_render(p.value.as_str(), vars, cb, opt).await?,
|
||||
id: p.id,
|
||||
})
|
||||
}
|
||||
|
||||
let mut body = BTreeMap::new();
|
||||
for (k, v) in r.body.clone() {
|
||||
body.insert(k, render_json_value_raw(v, vars, cb, opt).await?);
|
||||
}
|
||||
|
||||
let authentication = {
|
||||
let mut disabled = false;
|
||||
let mut auth = BTreeMap::new();
|
||||
match r.authentication.get("disabled") {
|
||||
Some(Value::Bool(true)) => {
|
||||
disabled = true;
|
||||
}
|
||||
Some(Value::String(tmpl)) => {
|
||||
disabled = parse_and_render(tmpl.as_str(), vars, cb, opt)
|
||||
.await
|
||||
.unwrap_or_default()
|
||||
.is_empty();
|
||||
info!(
|
||||
"Rendering authentication.disabled as a template: {disabled} from \"{tmpl}\""
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
if disabled {
|
||||
auth.insert("disabled".to_string(), Value::Bool(true));
|
||||
} else {
|
||||
for (k, v) in r.authentication.clone() {
|
||||
if k == "disabled" {
|
||||
auth.insert(k, Value::Bool(false));
|
||||
} else {
|
||||
auth.insert(k, render_json_value_raw(v, vars, cb, opt).await?);
|
||||
}
|
||||
}
|
||||
}
|
||||
auth
|
||||
};
|
||||
|
||||
let url = parse_and_render(r.url.clone().as_str(), vars, cb, opt).await?;
|
||||
|
||||
// Apply path placeholders (e.g., /users/:id -> /users/123)
|
||||
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
|
||||
|
||||
Ok(HttpRequest {
|
||||
url,
|
||||
url_parameters,
|
||||
headers,
|
||||
body,
|
||||
authentication,
|
||||
..r.to_owned()
|
||||
})
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let cli = Cli::parse();
|
||||
@@ -169,16 +74,10 @@ async fn main() {
|
||||
}
|
||||
|
||||
// Use the same app_id for both data directory and keyring
|
||||
let app_id = if cfg!(debug_assertions) {
|
||||
"app.yaak.desktop.dev"
|
||||
} else {
|
||||
"app.yaak.desktop"
|
||||
};
|
||||
let app_id = if cfg!(debug_assertions) { "app.yaak.desktop.dev" } else { "app.yaak.desktop" };
|
||||
|
||||
let data_dir = cli.data_dir.unwrap_or_else(|| {
|
||||
dirs::data_dir()
|
||||
.expect("Could not determine data directory")
|
||||
.join(app_id)
|
||||
dirs::data_dir().expect("Could not determine data directory").join(app_id)
|
||||
});
|
||||
|
||||
let db_path = data_dir.join("db.sqlite");
|
||||
@@ -189,12 +88,6 @@ async fn main() {
|
||||
|
||||
let db = query_manager.connect();
|
||||
|
||||
// Initialize encryption manager for secure() template function
|
||||
// Use the same app_id as the Tauri app for keyring access
|
||||
let encryption_manager = Arc::new(
|
||||
EncryptionManager::new(query_manager.clone(), app_id),
|
||||
);
|
||||
|
||||
// Initialize plugin manager for template functions
|
||||
let vendored_plugin_dir = data_dir.join("vendored-plugins");
|
||||
let installed_plugin_dir = data_dir.join("installed-plugins");
|
||||
@@ -203,9 +96,8 @@ async fn main() {
|
||||
let node_bin_path = PathBuf::from("node");
|
||||
|
||||
// Find the plugin runtime - check YAAK_PLUGIN_RUNTIME env var, then fallback to development path
|
||||
let plugin_runtime_main = std::env::var("YAAK_PLUGIN_RUNTIME")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| {
|
||||
let plugin_runtime_main =
|
||||
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
|
||||
// Development fallback: look relative to crate root
|
||||
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
|
||||
@@ -214,9 +106,9 @@ async fn main() {
|
||||
// Create plugin manager (plugins may not be available in CLI context)
|
||||
let plugin_manager = Arc::new(
|
||||
PluginManager::new(
|
||||
vendored_plugin_dir,
|
||||
installed_plugin_dir,
|
||||
node_bin_path,
|
||||
vendored_plugin_dir.clone(),
|
||||
installed_plugin_dir.clone(),
|
||||
node_bin_path.clone(),
|
||||
plugin_runtime_main,
|
||||
false,
|
||||
)
|
||||
@@ -226,14 +118,10 @@ async fn main() {
|
||||
// Initialize plugins from database
|
||||
let plugins = db.list_plugins().unwrap_or_default();
|
||||
if !plugins.is_empty() {
|
||||
let errors = plugin_manager
|
||||
.initialize_all_plugins(plugins, &PluginContext::new_empty())
|
||||
.await;
|
||||
let errors =
|
||||
plugin_manager.initialize_all_plugins(plugins, &PluginContext::new_empty()).await;
|
||||
for (plugin_dir, error_msg) in errors {
|
||||
eprintln!(
|
||||
"Warning: Failed to initialize plugin '{}': {}",
|
||||
plugin_dir, error_msg
|
||||
);
|
||||
eprintln!("Warning: Failed to initialize plugin '{}': {}", plugin_dir, error_msg);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -249,9 +137,7 @@ async fn main() {
|
||||
}
|
||||
}
|
||||
Commands::Requests { workspace_id } => {
|
||||
let requests = db
|
||||
.list_http_requests(&workspace_id)
|
||||
.expect("Failed to list requests");
|
||||
let requests = db.list_http_requests(&workspace_id).expect("Failed to list requests");
|
||||
if requests.is_empty() {
|
||||
println!("No requests found in workspace {}", workspace_id);
|
||||
} else {
|
||||
@@ -261,101 +147,67 @@ async fn main() {
|
||||
}
|
||||
}
|
||||
Commands::Send { request_id } => {
|
||||
let request = db
|
||||
.get_http_request(&request_id)
|
||||
.expect("Failed to get request");
|
||||
use yaak_actions::{
|
||||
ActionExecutor, ActionId, ActionParams, ActionResult, ActionTarget, CurrentContext,
|
||||
};
|
||||
use yaak_actions_builtin::{BuiltinActionDependencies, register_http_actions};
|
||||
|
||||
// Resolve environment chain for variable substitution
|
||||
let environment_chain = db
|
||||
.resolve_environments(
|
||||
&request.workspace_id,
|
||||
request.folder_id.as_deref(),
|
||||
cli.environment.as_deref(),
|
||||
)
|
||||
.unwrap_or_default();
|
||||
|
||||
// Create template callback with plugin support
|
||||
let plugin_context = PluginContext::new(None, Some(request.workspace_id.clone()));
|
||||
let template_callback = PluginTemplateCallback::new(
|
||||
plugin_manager.clone(),
|
||||
encryption_manager.clone(),
|
||||
&plugin_context,
|
||||
RenderPurpose::Send,
|
||||
);
|
||||
|
||||
// Render templates in the request
|
||||
let rendered_request = render_http_request(
|
||||
&request,
|
||||
environment_chain,
|
||||
&template_callback,
|
||||
&RenderOptions::throw(),
|
||||
// Create dependencies
|
||||
let deps = BuiltinActionDependencies::new_standalone(
|
||||
&db_path,
|
||||
&blob_path,
|
||||
&app_id,
|
||||
vendored_plugin_dir.clone(),
|
||||
installed_plugin_dir.clone(),
|
||||
node_bin_path.clone(),
|
||||
)
|
||||
.await
|
||||
.expect("Failed to render request templates");
|
||||
.expect("Failed to initialize dependencies");
|
||||
|
||||
if cli.verbose {
|
||||
println!("> {} {}", rendered_request.method, rendered_request.url);
|
||||
}
|
||||
// Create executor and register actions
|
||||
let executor = ActionExecutor::new();
|
||||
executor.register_builtin_groups().await.expect("Failed to register groups");
|
||||
register_http_actions(&executor, &deps).await.expect("Failed to register HTTP actions");
|
||||
|
||||
// Convert to sendable request
|
||||
let sendable = SendableHttpRequest::from_http_request(
|
||||
&rendered_request,
|
||||
SendableHttpRequestOptions::default(),
|
||||
)
|
||||
.await
|
||||
.expect("Failed to build request");
|
||||
|
||||
// Create event channel for progress
|
||||
let (event_tx, mut event_rx) = mpsc::channel(100);
|
||||
|
||||
// Spawn task to print events if verbose
|
||||
let verbose = cli.verbose;
|
||||
let verbose_handle = if verbose {
|
||||
Some(tokio::spawn(async move {
|
||||
while let Some(event) = event_rx.recv().await {
|
||||
println!("{}", event);
|
||||
}
|
||||
}))
|
||||
} else {
|
||||
// Drain events silently
|
||||
tokio::spawn(async move {
|
||||
while event_rx.recv().await.is_some() {}
|
||||
});
|
||||
None
|
||||
// Prepare context
|
||||
let context = CurrentContext {
|
||||
target: Some(ActionTarget::HttpRequest { id: request_id.clone() }),
|
||||
environment_id: cli.environment.clone(),
|
||||
workspace_id: None,
|
||||
has_window: false,
|
||||
can_prompt: false,
|
||||
};
|
||||
|
||||
// Send the request
|
||||
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
|
||||
let response = sender
|
||||
.send(sendable, event_tx)
|
||||
.await
|
||||
.expect("Failed to send request");
|
||||
// Prepare params
|
||||
let params = ActionParams {
|
||||
data: serde_json::json!({
|
||||
"render": true,
|
||||
"follow_redirects": false,
|
||||
"timeout_ms": 30000,
|
||||
}),
|
||||
};
|
||||
|
||||
// Wait for event handler to finish
|
||||
if let Some(handle) = verbose_handle {
|
||||
let _ = handle.await;
|
||||
}
|
||||
// Invoke action
|
||||
let action_id = ActionId::builtin("http", "send-request");
|
||||
let result = executor.invoke(&action_id, context, params).await.expect("Action failed");
|
||||
|
||||
// Print response
|
||||
if verbose {
|
||||
println!();
|
||||
}
|
||||
println!(
|
||||
"HTTP {} {}",
|
||||
response.status,
|
||||
response.status_reason.as_deref().unwrap_or("")
|
||||
);
|
||||
|
||||
if verbose {
|
||||
for (name, value) in &response.headers {
|
||||
println!("{}: {}", name, value);
|
||||
// Handle result
|
||||
match result {
|
||||
ActionResult::Success { data, message } => {
|
||||
if let Some(msg) = message {
|
||||
println!("{}", msg);
|
||||
}
|
||||
if let Some(data) = data {
|
||||
println!("{}", serde_json::to_string_pretty(&data).unwrap());
|
||||
}
|
||||
}
|
||||
ActionResult::RequiresInput { .. } => {
|
||||
eprintln!("Action requires input (not supported in CLI)");
|
||||
}
|
||||
ActionResult::Cancelled => {
|
||||
eprintln!("Action cancelled");
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
// Print body
|
||||
let (body, _stats) = response.text().await.expect("Failed to read response body");
|
||||
println!("{}", body);
|
||||
}
|
||||
Commands::Get { url } => {
|
||||
if cli.verbose {
|
||||
@@ -383,18 +235,13 @@ async fn main() {
|
||||
}
|
||||
}))
|
||||
} else {
|
||||
tokio::spawn(async move {
|
||||
while event_rx.recv().await.is_some() {}
|
||||
});
|
||||
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
|
||||
None
|
||||
};
|
||||
|
||||
// Send the request
|
||||
let sender = ReqwestSender::new().expect("Failed to create HTTP client");
|
||||
let response = sender
|
||||
.send(sendable, event_tx)
|
||||
.await
|
||||
.expect("Failed to send request");
|
||||
let response = sender.send(sendable, event_tx).await.expect("Failed to send request");
|
||||
|
||||
if let Some(handle) = verbose_handle {
|
||||
let _ = handle.await;
|
||||
@@ -421,12 +268,7 @@ async fn main() {
|
||||
let (body, _stats) = response.text().await.expect("Failed to read response body");
|
||||
println!("{}", body);
|
||||
}
|
||||
Commands::Create {
|
||||
workspace_id,
|
||||
name,
|
||||
method,
|
||||
url,
|
||||
} => {
|
||||
Commands::Create { workspace_id, name, method, url } => {
|
||||
let request = HttpRequest {
|
||||
workspace_id,
|
||||
name,
|
||||
|
||||
@@ -2,14 +2,6 @@
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<!-- Enable for NodeJS execution -->
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
|
||||
<!-- Allow loading 1Password's dylib (signed with different Team ID) -->
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
|
||||
<!-- Re-enable for sandboxing. Currently disabled because auto-updater doesn't work with sandboxing.-->
|
||||
<!-- <key>com.apple.security.app-sandbox</key> <true/>-->
|
||||
<!-- <key>com.apple.security.files.user-selected.read-write</key> <true/>-->
|
||||
|
||||
13
crates-tauri/yaak-app/macos/entitlements.yaaknode.plist
Normal file
13
crates-tauri/yaak-app/macos/entitlements.yaaknode.plist
Normal file
@@ -0,0 +1,13 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<!-- Enable for NodeJS/V8 JIT compiler -->
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
|
||||
<!-- Allow loading plugins signed with different Team IDs (e.g., 1Password) -->
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,9 +1,11 @@
|
||||
use crate::error::Result;
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Result;
|
||||
use std::sync::Arc;
|
||||
use tauri::{AppHandle, Manager, Runtime, State, WebviewWindow, command};
|
||||
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
|
||||
use yaak_crypto::manager::EncryptionManager;
|
||||
use yaak_models::models::HttpRequestHeader;
|
||||
use yaak_models::queries::workspaces::default_headers;
|
||||
use yaak_plugins::events::GetThemesResponse;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::native_template_functions::{
|
||||
@@ -54,7 +56,12 @@ pub(crate) async fn cmd_secure_template<R: Runtime>(
|
||||
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
|
||||
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
|
||||
let plugin_context = window.plugin_context();
|
||||
Ok(encrypt_secure_template_function(plugin_manager, encryption_manager, &plugin_context, template)?)
|
||||
Ok(encrypt_secure_template_function(
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
&plugin_context,
|
||||
template,
|
||||
)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
@@ -92,3 +99,17 @@ pub(crate) async fn cmd_set_workspace_key<R: Runtime>(
|
||||
window.crypto().set_human_key(workspace_id, key)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_disable_encryption<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
workspace_id: &str,
|
||||
) -> Result<()> {
|
||||
window.crypto().disable_encryption(workspace_id)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) fn cmd_default_headers() -> Vec<HttpRequestHeader> {
|
||||
default_headers()
|
||||
}
|
||||
|
||||
@@ -6,33 +6,47 @@ use crate::error::Result;
|
||||
use std::path::{Path, PathBuf};
|
||||
use tauri::command;
|
||||
use yaak_git::{
|
||||
GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult,
|
||||
git_add, git_add_credential, git_add_remote, git_checkout_branch, git_commit,
|
||||
git_create_branch, git_delete_branch, git_fetch_all, git_init, git_log,
|
||||
git_merge_branch, git_pull, git_push, git_remotes, git_rm_remote, git_status,
|
||||
git_unstage,
|
||||
BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult,
|
||||
PushResult, git_add, git_add_credential, git_add_remote, git_checkout_branch, git_clone,
|
||||
git_commit, git_create_branch, git_delete_branch, git_delete_remote_branch, git_fetch_all,
|
||||
git_init, git_log, git_merge_branch, git_pull, git_push, git_remotes, git_rename_branch,
|
||||
git_rm_remote, git_status, git_unstage,
|
||||
};
|
||||
|
||||
// NOTE: All of these commands are async to prevent blocking work from locking up the UI
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_checkout(dir: &Path, branch: &str, force: bool) -> Result<String> {
|
||||
Ok(git_checkout_branch(dir, branch, force)?)
|
||||
Ok(git_checkout_branch(dir, branch, force).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_branch(dir: &Path, branch: &str) -> Result<()> {
|
||||
Ok(git_create_branch(dir, branch)?)
|
||||
pub async fn cmd_git_branch(dir: &Path, branch: &str, base: Option<&str>) -> Result<()> {
|
||||
Ok(git_create_branch(dir, branch, base).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_delete_branch(dir: &Path, branch: &str) -> Result<()> {
|
||||
Ok(git_delete_branch(dir, branch)?)
|
||||
pub async fn cmd_git_delete_branch(
|
||||
dir: &Path,
|
||||
branch: &str,
|
||||
force: Option<bool>,
|
||||
) -> Result<BranchDeleteResult> {
|
||||
Ok(git_delete_branch(dir, branch, force.unwrap_or(false)).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_merge_branch(dir: &Path, branch: &str, force: bool) -> Result<()> {
|
||||
Ok(git_merge_branch(dir, branch, force)?)
|
||||
pub async fn cmd_git_delete_remote_branch(dir: &Path, branch: &str) -> Result<()> {
|
||||
Ok(git_delete_remote_branch(dir, branch).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_merge_branch(dir: &Path, branch: &str) -> Result<()> {
|
||||
Ok(git_merge_branch(dir, branch).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_rename_branch(dir: &Path, old_name: &str, new_name: &str) -> Result<()> {
|
||||
Ok(git_rename_branch(dir, old_name, new_name).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
@@ -50,24 +64,29 @@ pub async fn cmd_git_initialize(dir: &Path) -> Result<()> {
|
||||
Ok(git_init(dir)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_clone(url: &str, dir: &Path) -> Result<CloneResult> {
|
||||
Ok(git_clone(url, dir).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_commit(dir: &Path, message: &str) -> Result<()> {
|
||||
Ok(git_commit(dir, message)?)
|
||||
Ok(git_commit(dir, message).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_fetch_all(dir: &Path) -> Result<()> {
|
||||
Ok(git_fetch_all(dir)?)
|
||||
Ok(git_fetch_all(dir).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_push(dir: &Path) -> Result<PushResult> {
|
||||
Ok(git_push(dir)?)
|
||||
Ok(git_push(dir).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_pull(dir: &Path) -> Result<PullResult> {
|
||||
Ok(git_pull(dir)?)
|
||||
Ok(git_pull(dir).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
@@ -88,12 +107,11 @@ pub async fn cmd_git_unstage(dir: &Path, rela_paths: Vec<PathBuf>) -> Result<()>
|
||||
|
||||
#[command]
|
||||
pub async fn cmd_git_add_credential(
|
||||
dir: &Path,
|
||||
remote_url: &str,
|
||||
username: &str,
|
||||
password: &str,
|
||||
) -> Result<()> {
|
||||
Ok(git_add_credential(dir, remote_url, username, password).await?)
|
||||
Ok(git_add_credential(remote_url, username, password).await?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use KeyAndValueRef::{Ascii, Binary};
|
||||
use tauri::{Manager, Runtime, WebviewWindow};
|
||||
use yaak_grpc::{KeyAndValueRef, MetadataMap};
|
||||
use yaak_models::models::GrpcRequest;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_plugins::events::{CallHttpAuthenticationRequest, HttpHeader};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use chrono::{NaiveDateTime, Utc};
|
||||
use log::debug;
|
||||
use std::sync::OnceLock;
|
||||
use tauri::{AppHandle, Runtime};
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
|
||||
const NAMESPACE: &str = "analytics";
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::BlobManagerExt;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::render::render_http_request;
|
||||
use log::{debug, warn};
|
||||
use std::pin::Pin;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicI32, Ordering};
|
||||
use std::time::{Duration, Instant};
|
||||
use tauri::{AppHandle, Manager, Runtime, WebviewWindow};
|
||||
use tokio::fs::{File, create_dir_all};
|
||||
@@ -15,22 +19,19 @@ use yaak_http::client::{
|
||||
HttpConnectionOptions, HttpConnectionProxySetting, HttpConnectionProxySettingAuth,
|
||||
};
|
||||
use yaak_http::cookies::CookieStore;
|
||||
use yaak_http::manager::HttpConnectionManager;
|
||||
use yaak_http::manager::{CachedClient, HttpConnectionManager};
|
||||
use yaak_http::sender::ReqwestSender;
|
||||
use yaak_http::tee_reader::TeeReader;
|
||||
use yaak_http::transaction::HttpTransaction;
|
||||
use yaak_http::types::{
|
||||
SendableBody, SendableHttpRequest, SendableHttpRequestOptions, append_query_params,
|
||||
};
|
||||
use crate::models_ext::BlobManagerExt;
|
||||
use yaak_models::blob_manager::BodyChunk;
|
||||
use yaak_models::models::{
|
||||
CookieJar, Environment, HttpRequest, HttpResponse, HttpResponseEvent, HttpResponseHeader,
|
||||
HttpResponseState, ProxySetting, ProxySettingAuth,
|
||||
};
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use crate::PluginContextExt;
|
||||
use yaak_plugins::events::{
|
||||
CallHttpAuthenticationRequest, HttpHeader, PluginContext, RenderPurpose,
|
||||
};
|
||||
@@ -173,7 +174,12 @@ async fn send_http_request_inner<R: Runtime>(
|
||||
let environment_id = environment.map(|e| e.id);
|
||||
let workspace = window.db().get_workspace(workspace_id)?;
|
||||
let (resolved, auth_context_id) = resolve_http_request(window, unrendered_request)?;
|
||||
let cb = PluginTemplateCallback::new(plugin_manager.clone(), encryption_manager.clone(), &plugin_context, RenderPurpose::Send);
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager.clone(),
|
||||
encryption_manager.clone(),
|
||||
&plugin_context,
|
||||
RenderPurpose::Send,
|
||||
);
|
||||
let env_chain =
|
||||
window.db().resolve_environments(&workspace.id, folder_id, environment_id.as_deref())?;
|
||||
let request = render_http_request(&resolved, env_chain, &cb, &RenderOptions::throw()).await?;
|
||||
@@ -228,12 +234,13 @@ async fn send_http_request_inner<R: Runtime>(
|
||||
None => None,
|
||||
};
|
||||
|
||||
let client = connection_manager
|
||||
let cached_client = connection_manager
|
||||
.get_client(&HttpConnectionOptions {
|
||||
id: plugin_context.id.clone(),
|
||||
validate_certificates: workspace.setting_validate_certificates,
|
||||
proxy: proxy_setting,
|
||||
client_certificate,
|
||||
dns_overrides: workspace.setting_dns_overrides.clone(),
|
||||
})
|
||||
.await?;
|
||||
|
||||
@@ -250,7 +257,7 @@ async fn send_http_request_inner<R: Runtime>(
|
||||
|
||||
let cookie_store = maybe_cookie_store.as_ref().map(|(cs, _)| cs.clone());
|
||||
let result = execute_transaction(
|
||||
client,
|
||||
cached_client,
|
||||
sendable_request,
|
||||
response_ctx,
|
||||
cancelled_rx.clone(),
|
||||
@@ -310,7 +317,7 @@ pub fn resolve_http_request<R: Runtime>(
|
||||
}
|
||||
|
||||
async fn execute_transaction<R: Runtime>(
|
||||
client: reqwest::Client,
|
||||
cached_client: CachedClient,
|
||||
mut sendable_request: SendableHttpRequest,
|
||||
response_ctx: &mut ResponseContext<R>,
|
||||
mut cancelled_rx: Receiver<bool>,
|
||||
@@ -321,7 +328,10 @@ async fn execute_transaction<R: Runtime>(
|
||||
let workspace_id = response_ctx.response().workspace_id.clone();
|
||||
let is_persisted = response_ctx.is_persisted();
|
||||
|
||||
let sender = ReqwestSender::with_client(client);
|
||||
// Keep a reference to the resolver for DNS timing events
|
||||
let resolver = cached_client.resolver.clone();
|
||||
|
||||
let sender = ReqwestSender::with_client(cached_client.client);
|
||||
let transaction = match cookie_store {
|
||||
Some(cs) => HttpTransaction::with_cookie_store(sender, cs),
|
||||
None => HttpTransaction::new(sender),
|
||||
@@ -346,21 +356,39 @@ async fn execute_transaction<R: Runtime>(
|
||||
let (event_tx, mut event_rx) =
|
||||
tokio::sync::mpsc::channel::<yaak_http::sender::HttpResponseEvent>(100);
|
||||
|
||||
// Set the event sender on the DNS resolver so it can emit DNS timing events
|
||||
resolver.set_event_sender(Some(event_tx.clone())).await;
|
||||
|
||||
// Shared state to capture DNS timing from the event processing task
|
||||
let dns_elapsed = Arc::new(AtomicI32::new(0));
|
||||
|
||||
// Write events to DB in a task (only for persisted responses)
|
||||
if is_persisted {
|
||||
let response_id = response_id.clone();
|
||||
let app_handle = app_handle.clone();
|
||||
let update_source = response_ctx.update_source.clone();
|
||||
let workspace_id = workspace_id.clone();
|
||||
let dns_elapsed = dns_elapsed.clone();
|
||||
tokio::spawn(async move {
|
||||
while let Some(event) = event_rx.recv().await {
|
||||
// Capture DNS timing when we see a DNS event
|
||||
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
|
||||
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
|
||||
}
|
||||
let db_event = HttpResponseEvent::new(&response_id, &workspace_id, event.into());
|
||||
let _ = app_handle.db().upsert_http_response_event(&db_event, &update_source);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// For ephemeral responses, just drain the events
|
||||
tokio::spawn(async move { while event_rx.recv().await.is_some() {} });
|
||||
// For ephemeral responses, just drain the events but still capture DNS timing
|
||||
let dns_elapsed = dns_elapsed.clone();
|
||||
tokio::spawn(async move {
|
||||
while let Some(event) = event_rx.recv().await {
|
||||
if let yaak_http::sender::HttpResponseEvent::DnsResolved { duration, .. } = &event {
|
||||
dns_elapsed.store(*duration as i32, Ordering::SeqCst);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// Capture request body as it's sent (only for persisted responses)
|
||||
@@ -528,10 +556,14 @@ async fn execute_transaction<R: Runtime>(
|
||||
// Final update with closed state and accurate byte count
|
||||
response_ctx.update(|r| {
|
||||
r.elapsed = start.elapsed().as_millis() as i32;
|
||||
r.elapsed_dns = dns_elapsed.load(Ordering::SeqCst);
|
||||
r.content_length = Some(written_bytes as i32);
|
||||
r.state = HttpResponseState::Closed;
|
||||
})?;
|
||||
|
||||
// Clear the event sender from the resolver since this request is done
|
||||
resolver.set_event_sender(None).await;
|
||||
|
||||
Ok((response_ctx.response().clone(), maybe_blob_write_handle))
|
||||
}
|
||||
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::PluginContextExt;
|
||||
use log::info;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fs::read_to_string;
|
||||
use tauri::{Manager, Runtime, WebviewWindow};
|
||||
use yaak_tauri_utils::window::WorkspaceWindowTrait;
|
||||
use yaak_core::WorkspaceContext;
|
||||
use yaak_models::models::{
|
||||
Environment, Folder, GrpcRequest, HttpRequest, WebsocketRequest, Workspace,
|
||||
};
|
||||
use yaak_models::util::{BatchUpsertResult, UpdateSource, maybe_gen_id, maybe_gen_id_opt};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_tauri_utils::window::WorkspaceWindowTrait;
|
||||
|
||||
pub(crate) async fn import_data<R: Runtime>(
|
||||
window: &WebviewWindow<R>,
|
||||
|
||||
@@ -7,7 +7,7 @@ use crate::http_request::{resolve_http_request, send_http_request};
|
||||
use crate::import::import_data;
|
||||
use crate::models_ext::{BlobManagerExt, QueryManagerExt};
|
||||
use crate::notifications::YaakNotifier;
|
||||
use crate::render::{render_grpc_request, render_template};
|
||||
use crate::render::{render_grpc_request, render_json_value, render_template};
|
||||
use crate::updates::{UpdateMode, UpdateTrigger, YaakUpdater};
|
||||
use crate::uri_scheme::handle_deep_link;
|
||||
use error::Result as YaakResult;
|
||||
@@ -101,6 +101,7 @@ struct AppMetaData {
|
||||
app_data_dir: String,
|
||||
app_log_dir: String,
|
||||
vendored_plugin_dir: String,
|
||||
default_project_dir: String,
|
||||
feature_updater: bool,
|
||||
feature_license: bool,
|
||||
}
|
||||
@@ -111,6 +112,7 @@ async fn cmd_metadata(app_handle: AppHandle) -> YaakResult<AppMetaData> {
|
||||
let app_log_dir = app_handle.path().app_log_dir()?;
|
||||
let vendored_plugin_dir =
|
||||
app_handle.path().resolve("vendored/plugins", BaseDirectory::Resource)?;
|
||||
let default_project_dir = app_handle.path().home_dir()?.join("YaakProjects");
|
||||
Ok(AppMetaData {
|
||||
is_dev: is_dev(),
|
||||
version: app_handle.package_info().version.to_string(),
|
||||
@@ -118,6 +120,7 @@ async fn cmd_metadata(app_handle: AppHandle) -> YaakResult<AppMetaData> {
|
||||
app_data_dir: app_data_dir.to_string_lossy().to_string(),
|
||||
app_log_dir: app_log_dir.to_string_lossy().to_string(),
|
||||
vendored_plugin_dir: vendored_plugin_dir.to_string_lossy().to_string(),
|
||||
default_project_dir: default_project_dir.to_string_lossy().to_string(),
|
||||
feature_license: cfg!(feature = "license"),
|
||||
feature_updater: cfg!(feature = "updater"),
|
||||
})
|
||||
@@ -189,7 +192,6 @@ async fn cmd_grpc_reflect<R: Runtime>(
|
||||
request_id: &str,
|
||||
environment_id: Option<&str>,
|
||||
proto_files: Vec<String>,
|
||||
skip_cache: Option<bool>,
|
||||
window: WebviewWindow<R>,
|
||||
app_handle: AppHandle<R>,
|
||||
grpc_handle: State<'_, Mutex<GrpcHandle>>,
|
||||
@@ -224,18 +226,21 @@ async fn cmd_grpc_reflect<R: Runtime>(
|
||||
let settings = window.db().get_settings();
|
||||
let client_certificate =
|
||||
find_client_certificate(req.url.as_str(), &settings.client_certificates);
|
||||
let proto_files: Vec<PathBuf> =
|
||||
proto_files.iter().map(|p| PathBuf::from_str(p).unwrap()).collect();
|
||||
|
||||
Ok(grpc_handle
|
||||
.lock()
|
||||
.await
|
||||
// Always invalidate cached pool when this command is called, to force re-reflection
|
||||
let mut handle = grpc_handle.lock().await;
|
||||
handle.invalidate_pool(&req.id, &uri, &proto_files);
|
||||
|
||||
Ok(handle
|
||||
.services(
|
||||
&req.id,
|
||||
&uri,
|
||||
&proto_files.iter().map(|p| PathBuf::from_str(p).unwrap()).collect(),
|
||||
&proto_files,
|
||||
&metadata,
|
||||
workspace.setting_validate_certificates,
|
||||
client_certificate,
|
||||
skip_cache.unwrap_or(false),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| GenericError(e.to_string()))?)
|
||||
@@ -360,10 +365,8 @@ async fn cmd_grpc_go<R: Runtime>(
|
||||
|
||||
let cb = {
|
||||
let cancelled_rx = cancelled_rx.clone();
|
||||
let app_handle = app_handle.clone();
|
||||
let environment_chain = environment_chain.clone();
|
||||
let window = window.clone();
|
||||
let base_msg = base_msg.clone();
|
||||
let plugin_manager = plugin_manager.clone();
|
||||
let encryption_manager = encryption_manager.clone();
|
||||
|
||||
@@ -385,14 +388,12 @@ async fn cmd_grpc_go<R: Runtime>(
|
||||
match serde_json::from_str::<IncomingMsg>(ev.payload()) {
|
||||
Ok(IncomingMsg::Message(msg)) => {
|
||||
let window = window.clone();
|
||||
let app_handle = app_handle.clone();
|
||||
let base_msg = base_msg.clone();
|
||||
let environment_chain = environment_chain.clone();
|
||||
let plugin_manager = plugin_manager.clone();
|
||||
let encryption_manager = encryption_manager.clone();
|
||||
let msg = block_in_place(|| {
|
||||
tauri::async_runtime::block_on(async {
|
||||
render_template(
|
||||
let result = render_template(
|
||||
msg.as_str(),
|
||||
environment_chain,
|
||||
&PluginTemplateCallback::new(
|
||||
@@ -406,24 +407,11 @@ async fn cmd_grpc_go<R: Runtime>(
|
||||
),
|
||||
&RenderOptions { error_behavior: RenderErrorBehavior::Throw },
|
||||
)
|
||||
.await
|
||||
.expect("Failed to render template")
|
||||
.await;
|
||||
result.expect("Failed to render template")
|
||||
})
|
||||
});
|
||||
in_msg_tx.try_send(msg.clone()).unwrap();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
app_handle
|
||||
.db()
|
||||
.upsert_grpc_event(
|
||||
&GrpcEvent {
|
||||
content: msg,
|
||||
event_type: GrpcEventType::ClientMessage,
|
||||
..base_msg.clone()
|
||||
},
|
||||
&UpdateSource::from_window_label(window.label()),
|
||||
)
|
||||
.unwrap();
|
||||
});
|
||||
}
|
||||
Ok(IncomingMsg::Commit) => {
|
||||
maybe_in_msg_tx.take();
|
||||
@@ -470,12 +458,48 @@ async fn cmd_grpc_go<R: Runtime>(
|
||||
)?;
|
||||
|
||||
async move {
|
||||
// Create callback for streaming methods that handles both success and error
|
||||
let on_message = {
|
||||
let app_handle = app_handle.clone();
|
||||
let base_event = base_event.clone();
|
||||
let window_label = window.label().to_string();
|
||||
move |result: std::result::Result<String, String>| match result {
|
||||
Ok(msg) => {
|
||||
let _ = app_handle.db().upsert_grpc_event(
|
||||
&GrpcEvent {
|
||||
content: msg,
|
||||
event_type: GrpcEventType::ClientMessage,
|
||||
..base_event.clone()
|
||||
},
|
||||
&UpdateSource::from_window_label(&window_label),
|
||||
);
|
||||
}
|
||||
Err(error) => {
|
||||
let _ = app_handle.db().upsert_grpc_event(
|
||||
&GrpcEvent {
|
||||
content: format!("Failed to send message: {}", error),
|
||||
event_type: GrpcEventType::Error,
|
||||
..base_event.clone()
|
||||
},
|
||||
&UpdateSource::from_window_label(&window_label),
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let (maybe_stream, maybe_msg) =
|
||||
match (method_desc.is_client_streaming(), method_desc.is_server_streaming()) {
|
||||
(true, true) => (
|
||||
Some(
|
||||
connection
|
||||
.streaming(&service, &method, in_msg_stream, &metadata, client_cert)
|
||||
.streaming(
|
||||
&service,
|
||||
&method,
|
||||
in_msg_stream,
|
||||
&metadata,
|
||||
client_cert,
|
||||
on_message.clone(),
|
||||
)
|
||||
.await,
|
||||
),
|
||||
None,
|
||||
@@ -490,6 +514,7 @@ async fn cmd_grpc_go<R: Runtime>(
|
||||
in_msg_stream,
|
||||
&metadata,
|
||||
client_cert,
|
||||
on_message.clone(),
|
||||
)
|
||||
.await,
|
||||
),
|
||||
@@ -1035,14 +1060,54 @@ async fn cmd_get_http_authentication_summaries<R: Runtime>(
|
||||
#[tauri::command]
|
||||
async fn cmd_get_http_authentication_config<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
app_handle: AppHandle<R>,
|
||||
plugin_manager: State<'_, PluginManager>,
|
||||
encryption_manager: State<'_, EncryptionManager>,
|
||||
auth_name: &str,
|
||||
values: HashMap<String, JsonPrimitive>,
|
||||
model: AnyModel,
|
||||
_environment_id: Option<&str>,
|
||||
environment_id: Option<&str>,
|
||||
) -> YaakResult<GetHttpAuthenticationConfigResponse> {
|
||||
// Extract workspace_id and folder_id from the model to resolve the environment chain
|
||||
let (workspace_id, folder_id) = match &model {
|
||||
AnyModel::HttpRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::GrpcRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::WebsocketRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::Folder(f) => (f.workspace_id.clone(), f.folder_id.clone()),
|
||||
AnyModel::Workspace(w) => (w.id.clone(), None),
|
||||
_ => return Err(GenericError("Unsupported model type for authentication config".into())),
|
||||
};
|
||||
|
||||
// Resolve environment chain and render the values for token lookup
|
||||
let environment_chain = app_handle.db().resolve_environments(
|
||||
&workspace_id,
|
||||
folder_id.as_deref(),
|
||||
environment_id,
|
||||
)?;
|
||||
let plugin_manager_arc = Arc::new((*plugin_manager).clone());
|
||||
let encryption_manager_arc = Arc::new((*encryption_manager).clone());
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager_arc,
|
||||
encryption_manager_arc,
|
||||
&window.plugin_context(),
|
||||
RenderPurpose::Preview,
|
||||
);
|
||||
|
||||
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
|
||||
let values_json: serde_json::Value = serde_json::to_value(&values)?;
|
||||
let rendered_json =
|
||||
render_json_value(values_json, environment_chain, &cb, &RenderOptions::throw()).await?;
|
||||
|
||||
// Convert back to HashMap<String, JsonPrimitive>
|
||||
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
|
||||
|
||||
Ok(plugin_manager
|
||||
.get_http_authentication_config(&window.plugin_context(), auth_name, values, model.id())
|
||||
.get_http_authentication_config(
|
||||
&window.plugin_context(),
|
||||
auth_name,
|
||||
rendered_values,
|
||||
model.id(),
|
||||
)
|
||||
.await?)
|
||||
}
|
||||
|
||||
@@ -1089,19 +1154,54 @@ async fn cmd_call_grpc_request_action<R: Runtime>(
|
||||
#[tauri::command]
|
||||
async fn cmd_call_http_authentication_action<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
app_handle: AppHandle<R>,
|
||||
plugin_manager: State<'_, PluginManager>,
|
||||
encryption_manager: State<'_, EncryptionManager>,
|
||||
auth_name: &str,
|
||||
action_index: i32,
|
||||
values: HashMap<String, JsonPrimitive>,
|
||||
model: AnyModel,
|
||||
_environment_id: Option<&str>,
|
||||
environment_id: Option<&str>,
|
||||
) -> YaakResult<()> {
|
||||
// Extract workspace_id and folder_id from the model to resolve the environment chain
|
||||
let (workspace_id, folder_id) = match &model {
|
||||
AnyModel::HttpRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::GrpcRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::WebsocketRequest(r) => (r.workspace_id.clone(), r.folder_id.clone()),
|
||||
AnyModel::Folder(f) => (f.workspace_id.clone(), f.folder_id.clone()),
|
||||
AnyModel::Workspace(w) => (w.id.clone(), None),
|
||||
_ => return Err(GenericError("Unsupported model type for authentication action".into())),
|
||||
};
|
||||
|
||||
// Resolve environment chain and render the values
|
||||
let environment_chain = app_handle.db().resolve_environments(
|
||||
&workspace_id,
|
||||
folder_id.as_deref(),
|
||||
environment_id,
|
||||
)?;
|
||||
let plugin_manager_arc = Arc::new((*plugin_manager).clone());
|
||||
let encryption_manager_arc = Arc::new((*encryption_manager).clone());
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager_arc,
|
||||
encryption_manager_arc,
|
||||
&window.plugin_context(),
|
||||
RenderPurpose::Send,
|
||||
);
|
||||
|
||||
// Convert HashMap<String, JsonPrimitive> to serde_json::Value for rendering
|
||||
let values_json: serde_json::Value = serde_json::to_value(&values)?;
|
||||
let rendered_json =
|
||||
render_json_value(values_json, environment_chain, &cb, &RenderOptions::throw()).await?;
|
||||
|
||||
// Convert back to HashMap<String, JsonPrimitive>
|
||||
let rendered_values: HashMap<String, JsonPrimitive> = serde_json::from_value(rendered_json)?;
|
||||
|
||||
Ok(plugin_manager
|
||||
.call_http_authentication_action(
|
||||
&window.plugin_context(),
|
||||
auth_name,
|
||||
action_index,
|
||||
values,
|
||||
rendered_values,
|
||||
&model.id(),
|
||||
)
|
||||
.await?)
|
||||
@@ -1621,6 +1721,8 @@ pub fn run() {
|
||||
//
|
||||
// Migrated commands
|
||||
crate::commands::cmd_decrypt_template,
|
||||
crate::commands::cmd_default_headers,
|
||||
crate::commands::cmd_disable_encryption,
|
||||
crate::commands::cmd_enable_encryption,
|
||||
crate::commands::cmd_get_themes,
|
||||
crate::commands::cmd_reveal_workspace_key,
|
||||
@@ -1649,10 +1751,13 @@ pub fn run() {
|
||||
git_ext::cmd_git_checkout,
|
||||
git_ext::cmd_git_branch,
|
||||
git_ext::cmd_git_delete_branch,
|
||||
git_ext::cmd_git_delete_remote_branch,
|
||||
git_ext::cmd_git_merge_branch,
|
||||
git_ext::cmd_git_rename_branch,
|
||||
git_ext::cmd_git_status,
|
||||
git_ext::cmd_git_log,
|
||||
git_ext::cmd_git_initialize,
|
||||
git_ext::cmd_git_clone,
|
||||
git_ext::cmd_git_commit,
|
||||
git_ext::cmd_git_fetch_all,
|
||||
git_ext::cmd_git_push,
|
||||
@@ -1664,6 +1769,13 @@ pub fn run() {
|
||||
git_ext::cmd_git_add_remote,
|
||||
git_ext::cmd_git_rm_remote,
|
||||
//
|
||||
// Plugin commands
|
||||
plugins_ext::cmd_plugins_search,
|
||||
plugins_ext::cmd_plugins_install,
|
||||
plugins_ext::cmd_plugins_uninstall,
|
||||
plugins_ext::cmd_plugins_updates,
|
||||
plugins_ext::cmd_plugins_update_all,
|
||||
//
|
||||
// WebSocket commands
|
||||
ws_ext::cmd_ws_upsert_request,
|
||||
ws_ext::cmd_ws_duplicate_request,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use crate::error::Result;
|
||||
use crate::history::get_or_upsert_launch_info;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::{debug, info};
|
||||
use reqwest::Method;
|
||||
@@ -8,9 +9,8 @@ use std::time::Instant;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow};
|
||||
use ts_rs::TS;
|
||||
use yaak_common::platform::get_os_str;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
|
||||
// Check for updates every hour
|
||||
const MAX_UPDATE_CHECK_SECONDS: u64 = 60 * 60;
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
use crate::error::Result;
|
||||
use crate::http_request::send_http_request_with_context;
|
||||
use crate::models_ext::BlobManagerExt;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::render::{render_grpc_request, render_http_request, render_json_value};
|
||||
use crate::window::{CreateWindowConfig, create_window};
|
||||
use crate::{
|
||||
@@ -14,11 +16,8 @@ use tauri::{AppHandle, Emitter, Manager, Runtime};
|
||||
use tauri_plugin_clipboard_manager::ClipboardExt;
|
||||
use tauri_plugin_opener::OpenerExt;
|
||||
use yaak_crypto::manager::EncryptionManager;
|
||||
use yaak_tauri_utils::window::WorkspaceWindowTrait;
|
||||
use crate::models_ext::BlobManagerExt;
|
||||
use yaak_models::models::{AnyModel, HttpResponse, Plugin};
|
||||
use yaak_models::queries::any_request::AnyRequest;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_plugins::error::Error::PluginErr;
|
||||
use yaak_plugins::events::{
|
||||
@@ -32,6 +31,7 @@ use yaak_plugins::events::{
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::plugin_handle::PluginHandle;
|
||||
use yaak_plugins::template_callback::PluginTemplateCallback;
|
||||
use yaak_tauri_utils::window::WorkspaceWindowTrait;
|
||||
use yaak_templates::{RenderErrorBehavior, RenderOptions};
|
||||
|
||||
pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
@@ -57,6 +57,10 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
||||
Ok(call_frontend(&window, event).await)
|
||||
}
|
||||
InternalEventPayload::PromptFormRequest(_) => {
|
||||
let window = get_window_from_plugin_context(app_handle, &plugin_context)?;
|
||||
Ok(call_frontend(&window, event).await)
|
||||
}
|
||||
InternalEventPayload::FindHttpResponsesRequest(req) => {
|
||||
let http_responses = app_handle
|
||||
.db()
|
||||
@@ -166,7 +170,12 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
)?;
|
||||
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
|
||||
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
|
||||
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
&plugin_context,
|
||||
req.purpose,
|
||||
);
|
||||
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
||||
let grpc_request =
|
||||
render_grpc_request(&req.grpc_request, environment_chain, &cb, &opt).await?;
|
||||
@@ -187,7 +196,12 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
)?;
|
||||
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
|
||||
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
|
||||
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
&plugin_context,
|
||||
req.purpose,
|
||||
);
|
||||
let opt = &RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
||||
let http_request =
|
||||
render_http_request(&req.http_request, environment_chain, &cb, &opt).await?;
|
||||
@@ -218,7 +232,12 @@ pub(crate) async fn handle_plugin_event<R: Runtime>(
|
||||
)?;
|
||||
let plugin_manager = Arc::new((*app_handle.state::<PluginManager>()).clone());
|
||||
let encryption_manager = Arc::new((*app_handle.state::<EncryptionManager>()).clone());
|
||||
let cb = PluginTemplateCallback::new(plugin_manager, encryption_manager, &plugin_context, req.purpose);
|
||||
let cb = PluginTemplateCallback::new(
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
&plugin_context,
|
||||
req.purpose,
|
||||
);
|
||||
let opt = RenderOptions { error_behavior: RenderErrorBehavior::Throw };
|
||||
let data = render_json_value(req.data, environment_chain, &cb, &opt).await?;
|
||||
Ok(Some(InternalEventPayload::TemplateRenderResponse(TemplateRenderResponse { data })))
|
||||
|
||||
@@ -17,7 +17,7 @@ use tauri::path::BaseDirectory;
|
||||
use tauri::plugin::{Builder, TauriPlugin};
|
||||
use tauri::{
|
||||
AppHandle, Emitter, Manager, RunEvent, Runtime, State, WebviewWindow, WindowEvent, command,
|
||||
generate_handler, is_dev,
|
||||
is_dev,
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
use ts_rs::TS;
|
||||
@@ -132,7 +132,7 @@ impl PluginUpdater {
|
||||
// ============================================================================
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_plugins_search<R: Runtime>(
|
||||
pub async fn cmd_plugins_search<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
query: &str,
|
||||
) -> Result<PluginSearchResponse> {
|
||||
@@ -141,7 +141,7 @@ pub(crate) async fn cmd_plugins_search<R: Runtime>(
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_plugins_install<R: Runtime>(
|
||||
pub async fn cmd_plugins_install<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
name: &str,
|
||||
version: Option<String>,
|
||||
@@ -163,7 +163,7 @@ pub(crate) async fn cmd_plugins_install<R: Runtime>(
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_plugins_uninstall<R: Runtime>(
|
||||
pub async fn cmd_plugins_uninstall<R: Runtime>(
|
||||
plugin_id: &str,
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<Plugin> {
|
||||
@@ -174,7 +174,7 @@ pub(crate) async fn cmd_plugins_uninstall<R: Runtime>(
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_plugins_updates<R: Runtime>(
|
||||
pub async fn cmd_plugins_updates<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
) -> Result<PluginUpdatesResponse> {
|
||||
let http_client = yaak_api_client(&app_handle)?;
|
||||
@@ -183,7 +183,7 @@ pub(crate) async fn cmd_plugins_updates<R: Runtime>(
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub(crate) async fn cmd_plugins_update_all<R: Runtime>(
|
||||
pub async fn cmd_plugins_update_all<R: Runtime>(
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<Vec<PluginNameVersion>> {
|
||||
let http_client = yaak_api_client(window.app_handle())?;
|
||||
@@ -233,13 +233,6 @@ pub(crate) async fn cmd_plugins_update_all<R: Runtime>(
|
||||
|
||||
pub fn init<R: Runtime>() -> TauriPlugin<R> {
|
||||
Builder::new("yaak-plugins")
|
||||
.invoke_handler(generate_handler![
|
||||
cmd_plugins_search,
|
||||
cmd_plugins_install,
|
||||
cmd_plugins_uninstall,
|
||||
cmd_plugins_updates,
|
||||
cmd_plugins_update_all
|
||||
])
|
||||
.setup(|app_handle, _| {
|
||||
// Resolve paths for plugin manager
|
||||
let vendored_plugin_dir = app_handle
|
||||
|
||||
@@ -3,6 +3,7 @@ use std::path::PathBuf;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use log::{debug, error, info, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tauri::{Emitter, Listener, Manager, Runtime, WebviewWindow};
|
||||
@@ -11,7 +12,6 @@ use tauri_plugin_updater::{Update, UpdaterExt};
|
||||
use tokio::task::block_in_place;
|
||||
use tokio::time::sleep;
|
||||
use ts_rs::TS;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use yaak_models::util::generate_id;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Result;
|
||||
use crate::import::import_data;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::PluginContextExt;
|
||||
use log::{info, warn};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::sync::Arc;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, Url};
|
||||
use tauri_plugin_dialog::{DialogExt, MessageDialogButtons, MessageDialogKind};
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
use yaak_models::util::generate_id;
|
||||
use yaak_plugins::events::{Color, ShowToastRequest};
|
||||
use yaak_plugins::install::download_and_install;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
|
||||
pub(crate) async fn handle_deep_link<R: Runtime>(
|
||||
app_handle: &AppHandle<R>,
|
||||
@@ -55,7 +55,8 @@ pub(crate) async fn handle_deep_link<R: Runtime>(
|
||||
&plugin_context,
|
||||
name,
|
||||
version,
|
||||
).await?;
|
||||
)
|
||||
.await?;
|
||||
app_handle.emit(
|
||||
"show_toast",
|
||||
ShowToastRequest {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::window_menu::app_menu;
|
||||
use log::{info, warn};
|
||||
use rand::random;
|
||||
@@ -8,7 +9,6 @@ use tauri::{
|
||||
};
|
||||
use tauri_plugin_opener::OpenerExt;
|
||||
use tokio::sync::mpsc;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
|
||||
const DEFAULT_WINDOW_WIDTH: f64 = 1100.0;
|
||||
const DEFAULT_WINDOW_HEIGHT: f64 = 600.0;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
//! WebSocket Tauri command wrappers
|
||||
//! These wrap the core yaak-ws functionality for Tauri IPC.
|
||||
|
||||
use crate::PluginContextExt;
|
||||
use crate::error::Result;
|
||||
use crate::models_ext::QueryManagerExt;
|
||||
use crate::PluginContextExt;
|
||||
use http::HeaderMap;
|
||||
use log::{debug, info, warn};
|
||||
use std::str::FromStr;
|
||||
@@ -56,9 +56,10 @@ pub async fn cmd_ws_delete_request<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<WebsocketRequest> {
|
||||
Ok(app_handle
|
||||
.db()
|
||||
.delete_websocket_request_by_id(request_id, &UpdateSource::from_window_label(window.label()))?)
|
||||
Ok(app_handle.db().delete_websocket_request_by_id(
|
||||
request_id,
|
||||
&UpdateSource::from_window_label(window.label()),
|
||||
)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
@@ -67,12 +68,10 @@ pub async fn cmd_ws_delete_connection<R: Runtime>(
|
||||
app_handle: AppHandle<R>,
|
||||
window: WebviewWindow<R>,
|
||||
) -> Result<WebsocketConnection> {
|
||||
Ok(app_handle
|
||||
.db()
|
||||
.delete_websocket_connection_by_id(
|
||||
connection_id,
|
||||
&UpdateSource::from_window_label(window.label()),
|
||||
)?)
|
||||
Ok(app_handle.db().delete_websocket_connection_by_id(
|
||||
connection_id,
|
||||
&UpdateSource::from_window_label(window.label()),
|
||||
)?)
|
||||
}
|
||||
|
||||
#[command]
|
||||
@@ -296,8 +295,10 @@ pub async fn cmd_ws_connect<R: Runtime>(
|
||||
)
|
||||
.await?;
|
||||
for header in plugin_result.set_headers.unwrap_or_default() {
|
||||
match (http::HeaderName::from_str(&header.name), HeaderValue::from_str(&header.value))
|
||||
{
|
||||
match (
|
||||
http::HeaderName::from_str(&header.name),
|
||||
HeaderValue::from_str(&header.value),
|
||||
) {
|
||||
(Ok(name), Ok(value)) => {
|
||||
headers.insert(name, value);
|
||||
}
|
||||
|
||||
@@ -44,8 +44,8 @@
|
||||
"vendored/protoc/include",
|
||||
"vendored/plugins",
|
||||
"vendored/plugin-runtime",
|
||||
"vendored/node/yaaknode",
|
||||
"vendored/protoc/yaakprotoc"
|
||||
"vendored/node/yaaknode*",
|
||||
"vendored/protoc/yaakprotoc*"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,10 +8,10 @@ use std::time::Duration;
|
||||
use tauri::{AppHandle, Emitter, Manager, Runtime, WebviewWindow, is_dev};
|
||||
use ts_rs::TS;
|
||||
use yaak_common::platform::get_os_str;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
use yaak_models::db_context::DbContext;
|
||||
use yaak_models::query_manager::QueryManager;
|
||||
use yaak_models::util::UpdateSource;
|
||||
use yaak_tauri_utils::api_client::yaak_api_client;
|
||||
|
||||
/// Extension trait for accessing the QueryManager from Tauri Manager types.
|
||||
/// This is needed temporarily until all crates are refactored to not use Tauri.
|
||||
|
||||
18
crates/yaak-actions-builtin/Cargo.toml
Normal file
18
crates/yaak-actions-builtin/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "yaak-actions-builtin"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
authors = ["Gregory Schier"]
|
||||
publish = false
|
||||
|
||||
[dependencies]
|
||||
yaak-actions = { workspace = true }
|
||||
yaak-http = { workspace = true }
|
||||
yaak-models = { workspace = true }
|
||||
yaak-templates = { workspace = true }
|
||||
yaak-plugins = { workspace = true }
|
||||
yaak-crypto = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["sync", "rt-multi-thread"] }
|
||||
log = { workspace = true }
|
||||
88
crates/yaak-actions-builtin/src/dependencies.rs
Normal file
88
crates/yaak-actions-builtin/src/dependencies.rs
Normal file
@@ -0,0 +1,88 @@
|
||||
//! Dependency injection for built-in actions.
|
||||
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use yaak_crypto::manager::EncryptionManager;
|
||||
use yaak_models::query_manager::QueryManager;
|
||||
use yaak_plugins::events::PluginContext;
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
|
||||
/// Dependencies needed by built-in action implementations.
|
||||
///
|
||||
/// This struct bundles all the dependencies that action handlers need,
|
||||
/// providing a clean way to initialize them in different contexts
|
||||
/// (CLI, Tauri app, MCP server, etc.).
|
||||
pub struct BuiltinActionDependencies {
|
||||
pub query_manager: Arc<QueryManager>,
|
||||
pub plugin_manager: Arc<PluginManager>,
|
||||
pub encryption_manager: Arc<EncryptionManager>,
|
||||
}
|
||||
|
||||
impl BuiltinActionDependencies {
|
||||
/// Create dependencies for standalone usage (CLI, MCP server, etc.)
|
||||
///
|
||||
/// This initializes all the necessary managers following the same pattern
|
||||
/// as the yaak-cli implementation.
|
||||
pub async fn new_standalone(
|
||||
db_path: &Path,
|
||||
blob_path: &Path,
|
||||
app_id: &str,
|
||||
plugin_vendored_dir: PathBuf,
|
||||
plugin_installed_dir: PathBuf,
|
||||
node_path: PathBuf,
|
||||
) -> Result<Self, Box<dyn std::error::Error>> {
|
||||
// Initialize database
|
||||
let (query_manager, _, _) = yaak_models::init_standalone(db_path, blob_path)?;
|
||||
|
||||
// Initialize encryption manager (takes QueryManager by value)
|
||||
let encryption_manager = Arc::new(EncryptionManager::new(
|
||||
query_manager.clone(),
|
||||
app_id.to_string(),
|
||||
));
|
||||
|
||||
let query_manager = Arc::new(query_manager);
|
||||
|
||||
// Find plugin runtime
|
||||
let plugin_runtime_main = std::env::var("YAAK_PLUGIN_RUNTIME")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| {
|
||||
// Development fallback
|
||||
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("../../crates-tauri/yaak-app/vendored/plugin-runtime/index.cjs")
|
||||
});
|
||||
|
||||
// Initialize plugin manager
|
||||
let plugin_manager = Arc::new(
|
||||
PluginManager::new(
|
||||
plugin_vendored_dir,
|
||||
plugin_installed_dir,
|
||||
node_path,
|
||||
plugin_runtime_main,
|
||||
false, // not sandboxed in CLI
|
||||
)
|
||||
.await,
|
||||
);
|
||||
|
||||
// Initialize plugins from database
|
||||
let db = query_manager.connect();
|
||||
let plugins = db.list_plugins().unwrap_or_default();
|
||||
if !plugins.is_empty() {
|
||||
let errors = plugin_manager
|
||||
.initialize_all_plugins(plugins, &PluginContext::new_empty())
|
||||
.await;
|
||||
for (plugin_dir, error_msg) in errors {
|
||||
log::warn!(
|
||||
"Failed to initialize plugin '{}': {}",
|
||||
plugin_dir,
|
||||
error_msg
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
query_manager,
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
})
|
||||
}
|
||||
}
|
||||
24
crates/yaak-actions-builtin/src/http/mod.rs
Normal file
24
crates/yaak-actions-builtin/src/http/mod.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
//! HTTP action implementations.
|
||||
|
||||
pub mod send;
|
||||
|
||||
use crate::BuiltinActionDependencies;
|
||||
use yaak_actions::{ActionError, ActionExecutor, ActionSource};
|
||||
|
||||
/// Register all HTTP-related actions with the executor.
|
||||
pub async fn register_http_actions(
|
||||
executor: &ActionExecutor,
|
||||
deps: &BuiltinActionDependencies,
|
||||
) -> Result<(), ActionError> {
|
||||
let handler = send::HttpSendActionHandler {
|
||||
query_manager: deps.query_manager.clone(),
|
||||
plugin_manager: deps.plugin_manager.clone(),
|
||||
encryption_manager: deps.encryption_manager.clone(),
|
||||
};
|
||||
|
||||
executor
|
||||
.register(send::metadata(), ActionSource::Builtin, handler)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
293
crates/yaak-actions-builtin/src/http/send.rs
Normal file
293
crates/yaak-actions-builtin/src/http/send.rs
Normal file
@@ -0,0 +1,293 @@
|
||||
//! HTTP send action implementation.
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use std::sync::Arc;
|
||||
use serde_json::{json, Value};
|
||||
use tokio::sync::mpsc;
|
||||
use yaak_actions::{
|
||||
ActionError, ActionGroupId, ActionHandler, ActionId, ActionMetadata,
|
||||
ActionParams, ActionResult, ActionScope, CurrentContext,
|
||||
RequiredContext,
|
||||
};
|
||||
use yaak_crypto::manager::EncryptionManager;
|
||||
use yaak_http::path_placeholders::apply_path_placeholders;
|
||||
use yaak_http::sender::{HttpSender, ReqwestSender};
|
||||
use yaak_http::types::{SendableHttpRequest, SendableHttpRequestOptions};
|
||||
use yaak_models::models::{HttpRequest, HttpRequestHeader, HttpUrlParameter};
|
||||
use yaak_models::query_manager::QueryManager;
|
||||
use yaak_models::render::make_vars_hashmap;
|
||||
use yaak_plugins::events::{PluginContext, RenderPurpose};
|
||||
use yaak_plugins::manager::PluginManager;
|
||||
use yaak_plugins::template_callback::PluginTemplateCallback;
|
||||
use yaak_templates::{parse_and_render, render_json_value_raw, RenderOptions};
|
||||
|
||||
/// Handler for HTTP send action.
|
||||
pub struct HttpSendActionHandler {
|
||||
pub query_manager: Arc<QueryManager>,
|
||||
pub plugin_manager: Arc<PluginManager>,
|
||||
pub encryption_manager: Arc<EncryptionManager>,
|
||||
}
|
||||
|
||||
/// Metadata for the HTTP send action.
|
||||
pub fn metadata() -> ActionMetadata {
|
||||
ActionMetadata {
|
||||
id: ActionId::builtin("http", "send-request"),
|
||||
label: "Send HTTP Request".to_string(),
|
||||
description: Some("Execute an HTTP request and return the response".to_string()),
|
||||
icon: Some("play".to_string()),
|
||||
scope: ActionScope::HttpRequest,
|
||||
keyboard_shortcut: None,
|
||||
requires_selection: true,
|
||||
enabled_condition: None,
|
||||
group_id: Some(ActionGroupId::builtin("send")),
|
||||
order: 10,
|
||||
required_context: RequiredContext::requires_target(),
|
||||
}
|
||||
}
|
||||
|
||||
impl ActionHandler for HttpSendActionHandler {
|
||||
fn handle(
|
||||
&self,
|
||||
context: CurrentContext,
|
||||
params: ActionParams,
|
||||
) -> std::pin::Pin<
|
||||
Box<dyn std::future::Future<Output = Result<ActionResult, ActionError>> + Send + 'static>,
|
||||
> {
|
||||
let query_manager = self.query_manager.clone();
|
||||
let plugin_manager = self.plugin_manager.clone();
|
||||
let encryption_manager = self.encryption_manager.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
// Extract request_id from context
|
||||
let request_id = context
|
||||
.target
|
||||
.as_ref()
|
||||
.ok_or_else(|| {
|
||||
ActionError::ContextMissing {
|
||||
missing_fields: vec!["target".to_string()],
|
||||
}
|
||||
})?
|
||||
.id()
|
||||
.ok_or_else(|| {
|
||||
ActionError::ContextMissing {
|
||||
missing_fields: vec!["target.id".to_string()],
|
||||
}
|
||||
})?
|
||||
.to_string();
|
||||
|
||||
// Fetch request and environment from database (synchronous)
|
||||
let (request, environment_chain) = {
|
||||
let db = query_manager.connect();
|
||||
|
||||
// Fetch HTTP request from database
|
||||
let request = db.get_http_request(&request_id).map_err(|e| {
|
||||
ActionError::Internal(format!("Failed to fetch request {}: {}", request_id, e))
|
||||
})?;
|
||||
|
||||
// Resolve environment chain for variable substitution
|
||||
let environment_chain = if let Some(env_id) = &context.environment_id {
|
||||
db.resolve_environments(
|
||||
&request.workspace_id,
|
||||
request.folder_id.as_deref(),
|
||||
Some(env_id),
|
||||
)
|
||||
.unwrap_or_default()
|
||||
} else {
|
||||
db.resolve_environments(
|
||||
&request.workspace_id,
|
||||
request.folder_id.as_deref(),
|
||||
None,
|
||||
)
|
||||
.unwrap_or_default()
|
||||
};
|
||||
|
||||
(request, environment_chain)
|
||||
}; // db is dropped here
|
||||
|
||||
// Create template callback with plugin support
|
||||
let plugin_context = PluginContext::new(None, Some(request.workspace_id.clone()));
|
||||
let template_callback = PluginTemplateCallback::new(
|
||||
plugin_manager,
|
||||
encryption_manager,
|
||||
&plugin_context,
|
||||
RenderPurpose::Send,
|
||||
);
|
||||
|
||||
// Render templates in the request
|
||||
let rendered_request = render_http_request(
|
||||
&request,
|
||||
environment_chain,
|
||||
&template_callback,
|
||||
&RenderOptions::throw(),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| ActionError::Internal(format!("Failed to render request: {}", e)))?;
|
||||
|
||||
// Build sendable request
|
||||
let options = SendableHttpRequestOptions {
|
||||
timeout: params
|
||||
.data
|
||||
.get("timeout_ms")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|ms| std::time::Duration::from_millis(ms)),
|
||||
follow_redirects: params
|
||||
.data
|
||||
.get("follow_redirects")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(false),
|
||||
};
|
||||
|
||||
let sendable = SendableHttpRequest::from_http_request(&rendered_request, options)
|
||||
.await
|
||||
.map_err(|e| ActionError::Internal(format!("Failed to build request: {}", e)))?;
|
||||
|
||||
// Create event channel
|
||||
let (event_tx, mut event_rx) = mpsc::channel(100);
|
||||
|
||||
// Spawn task to drain events
|
||||
let _event_handle = tokio::spawn(async move {
|
||||
while event_rx.recv().await.is_some() {
|
||||
// For now, just drain events
|
||||
// In the future, we could log them or emit them to UI
|
||||
}
|
||||
});
|
||||
|
||||
// Send the request
|
||||
let sender = ReqwestSender::new()
|
||||
.map_err(|e| ActionError::Internal(format!("Failed to create HTTP client: {}", e)))?;
|
||||
let response = sender
|
||||
.send(sendable, event_tx)
|
||||
.await
|
||||
.map_err(|e| ActionError::Internal(format!("Failed to send request: {}", e)))?;
|
||||
|
||||
// Consume response body
|
||||
let status = response.status;
|
||||
let status_reason = response.status_reason.clone();
|
||||
let headers = response.headers.clone();
|
||||
let url = response.url.clone();
|
||||
|
||||
let (body_text, stats) = response
|
||||
.text()
|
||||
.await
|
||||
.map_err(|e| ActionError::Internal(format!("Failed to read response body: {}", e)))?;
|
||||
|
||||
// Return success result with response data
|
||||
Ok(ActionResult::Success {
|
||||
data: Some(json!({
|
||||
"status": status,
|
||||
"statusReason": status_reason,
|
||||
"headers": headers,
|
||||
"body": body_text,
|
||||
"contentLength": stats.size_decompressed,
|
||||
"url": url,
|
||||
})),
|
||||
message: Some(format!("HTTP {}", status)),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to render templates in an HTTP request.
|
||||
/// Copied from yaak-cli implementation.
|
||||
async fn render_http_request(
|
||||
r: &HttpRequest,
|
||||
environment_chain: Vec<yaak_models::models::Environment>,
|
||||
cb: &PluginTemplateCallback,
|
||||
opt: &RenderOptions,
|
||||
) -> Result<HttpRequest, String> {
|
||||
let vars = &make_vars_hashmap(environment_chain);
|
||||
|
||||
let mut url_parameters = Vec::new();
|
||||
for p in r.url_parameters.clone() {
|
||||
if !p.enabled {
|
||||
continue;
|
||||
}
|
||||
url_parameters.push(HttpUrlParameter {
|
||||
enabled: p.enabled,
|
||||
name: parse_and_render(p.name.as_str(), vars, cb, opt)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?,
|
||||
value: parse_and_render(p.value.as_str(), vars, cb, opt)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?,
|
||||
id: p.id,
|
||||
})
|
||||
}
|
||||
|
||||
let mut headers = Vec::new();
|
||||
for p in r.headers.clone() {
|
||||
if !p.enabled {
|
||||
continue;
|
||||
}
|
||||
headers.push(HttpRequestHeader {
|
||||
enabled: p.enabled,
|
||||
name: parse_and_render(p.name.as_str(), vars, cb, opt)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?,
|
||||
value: parse_and_render(p.value.as_str(), vars, cb, opt)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?,
|
||||
id: p.id,
|
||||
})
|
||||
}
|
||||
|
||||
let mut body = BTreeMap::new();
|
||||
for (k, v) in r.body.clone() {
|
||||
body.insert(
|
||||
k,
|
||||
render_json_value_raw(v, vars, cb, opt)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?,
|
||||
);
|
||||
}
|
||||
|
||||
let authentication = {
|
||||
let mut disabled = false;
|
||||
let mut auth = BTreeMap::new();
|
||||
match r.authentication.get("disabled") {
|
||||
Some(Value::Bool(true)) => {
|
||||
disabled = true;
|
||||
}
|
||||
Some(Value::String(tmpl)) => {
|
||||
disabled = parse_and_render(tmpl.as_str(), vars, cb, opt)
|
||||
.await
|
||||
.unwrap_or_default()
|
||||
.is_empty();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
if disabled {
|
||||
auth.insert("disabled".to_string(), Value::Bool(true));
|
||||
} else {
|
||||
for (k, v) in r.authentication.clone() {
|
||||
if k == "disabled" {
|
||||
auth.insert(k, Value::Bool(false));
|
||||
} else {
|
||||
auth.insert(
|
||||
k,
|
||||
render_json_value_raw(v, vars, cb, opt)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
auth
|
||||
};
|
||||
|
||||
let url = parse_and_render(r.url.clone().as_str(), vars, cb, opt)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
// Apply path placeholders (e.g., /users/:id -> /users/123)
|
||||
let (url, url_parameters) = apply_path_placeholders(&url, &url_parameters);
|
||||
|
||||
Ok(HttpRequest {
|
||||
url,
|
||||
url_parameters,
|
||||
headers,
|
||||
body,
|
||||
authentication,
|
||||
..r.to_owned()
|
||||
})
|
||||
}
|
||||
11
crates/yaak-actions-builtin/src/lib.rs
Normal file
11
crates/yaak-actions-builtin/src/lib.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
//! Built-in action implementations for Yaak.
|
||||
//!
|
||||
//! This crate provides concrete implementations of built-in actions using
|
||||
//! the yaak-actions framework. It depends on domain-specific crates like
|
||||
//! yaak-http, yaak-models, yaak-plugins, etc.
|
||||
|
||||
pub mod dependencies;
|
||||
pub mod http;
|
||||
|
||||
pub use dependencies::BuiltinActionDependencies;
|
||||
pub use http::register_http_actions;
|
||||
15
crates/yaak-actions/Cargo.toml
Normal file
15
crates/yaak-actions/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "yaak-actions"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
description = "Centralized action system for Yaak"
|
||||
|
||||
[dependencies]
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true, features = ["sync"] }
|
||||
ts-rs = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
|
||||
14
crates/yaak-actions/bindings/ActionAvailability.ts
generated
Normal file
14
crates/yaak-actions/bindings/ActionAvailability.ts
generated
Normal file
@@ -0,0 +1,14 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
/**
|
||||
* Availability status for an action.
|
||||
*/
|
||||
export type ActionAvailability = { "status": "available" } | { "status": "available-with-prompt",
|
||||
/**
|
||||
* Fields that will require prompting.
|
||||
*/
|
||||
prompt_fields: Array<string>, } | { "status": "unavailable",
|
||||
/**
|
||||
* Fields that are missing.
|
||||
*/
|
||||
missing_fields: Array<string>, } | { "status": "not-found" };
|
||||
13
crates/yaak-actions/bindings/ActionError.ts
generated
Normal file
13
crates/yaak-actions/bindings/ActionError.ts
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { ActionGroupId } from "./ActionGroupId";
|
||||
import type { ActionId } from "./ActionId";
|
||||
import type { ActionScope } from "./ActionScope";
|
||||
|
||||
/**
|
||||
* Errors that can occur during action operations.
|
||||
*/
|
||||
export type ActionError = { "type": "not-found" } & ActionId | { "type": "disabled", action_id: ActionId, reason: string, } | { "type": "invalid-scope", expected: ActionScope, actual: ActionScope, } | { "type": "timeout" } & ActionId | { "type": "plugin-error" } & string | { "type": "validation-error" } & string | { "type": "permission-denied" } & string | { "type": "cancelled" } | { "type": "internal" } & string | { "type": "context-missing",
|
||||
/**
|
||||
* The context fields that are missing.
|
||||
*/
|
||||
missing_fields: Array<string>, } | { "type": "group-not-found" } & ActionGroupId | { "type": "group-already-exists" } & ActionGroupId;
|
||||
10
crates/yaak-actions/bindings/ActionGroupId.ts
generated
Normal file
10
crates/yaak-actions/bindings/ActionGroupId.ts
generated
Normal file
@@ -0,0 +1,10 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
/**
|
||||
* Unique identifier for an action group.
|
||||
*
|
||||
* Format: `namespace:group-name`
|
||||
* - Built-in: `yaak:export`
|
||||
* - Plugin: `plugin.my-plugin:utilities`
|
||||
*/
|
||||
export type ActionGroupId = string;
|
||||
32
crates/yaak-actions/bindings/ActionGroupMetadata.ts
generated
Normal file
32
crates/yaak-actions/bindings/ActionGroupMetadata.ts
generated
Normal file
@@ -0,0 +1,32 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { ActionGroupId } from "./ActionGroupId";
|
||||
import type { ActionScope } from "./ActionScope";
|
||||
|
||||
/**
|
||||
* Metadata about an action group.
|
||||
*/
|
||||
export type ActionGroupMetadata = {
|
||||
/**
|
||||
* Unique identifier for this group.
|
||||
*/
|
||||
id: ActionGroupId,
|
||||
/**
|
||||
* Display name for the group.
|
||||
*/
|
||||
name: string,
|
||||
/**
|
||||
* Optional description of the group's purpose.
|
||||
*/
|
||||
description: string | null,
|
||||
/**
|
||||
* Icon to display for the group.
|
||||
*/
|
||||
icon: string | null,
|
||||
/**
|
||||
* Sort order for displaying groups (lower = earlier).
|
||||
*/
|
||||
order: number,
|
||||
/**
|
||||
* Optional scope restriction (if set, group only appears in this scope).
|
||||
*/
|
||||
scope: ActionScope | null, };
|
||||
18
crates/yaak-actions/bindings/ActionGroupSource.ts
generated
Normal file
18
crates/yaak-actions/bindings/ActionGroupSource.ts
generated
Normal file
@@ -0,0 +1,18 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
/**
|
||||
* Where an action group was registered from.
|
||||
*/
|
||||
export type ActionGroupSource = { "type": "builtin" } | { "type": "plugin",
|
||||
/**
|
||||
* Plugin reference ID.
|
||||
*/
|
||||
ref_id: string,
|
||||
/**
|
||||
* Plugin name.
|
||||
*/
|
||||
name: string, } | { "type": "dynamic",
|
||||
/**
|
||||
* Source identifier.
|
||||
*/
|
||||
source_id: string, };
|
||||
16
crates/yaak-actions/bindings/ActionGroupWithActions.ts
generated
Normal file
16
crates/yaak-actions/bindings/ActionGroupWithActions.ts
generated
Normal file
@@ -0,0 +1,16 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { ActionGroupMetadata } from "./ActionGroupMetadata";
|
||||
import type { ActionMetadata } from "./ActionMetadata";
|
||||
|
||||
/**
|
||||
* A group with its actions for UI rendering.
|
||||
*/
|
||||
export type ActionGroupWithActions = {
|
||||
/**
|
||||
* Group metadata.
|
||||
*/
|
||||
group: ActionGroupMetadata,
|
||||
/**
|
||||
* Actions in this group.
|
||||
*/
|
||||
actions: Array<ActionMetadata>, };
|
||||
10
crates/yaak-actions/bindings/ActionId.ts
generated
Normal file
10
crates/yaak-actions/bindings/ActionId.ts
generated
Normal file
@@ -0,0 +1,10 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
/**
|
||||
* Unique identifier for an action.
|
||||
*
|
||||
* Format: `namespace:category:name`
|
||||
* - Built-in: `yaak:http-request:send`
|
||||
* - Plugin: `plugin.copy-curl:http-request:copy`
|
||||
*/
|
||||
export type ActionId = string;
|
||||
54
crates/yaak-actions/bindings/ActionMetadata.ts
generated
Normal file
54
crates/yaak-actions/bindings/ActionMetadata.ts
generated
Normal file
@@ -0,0 +1,54 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { ActionGroupId } from "./ActionGroupId";
|
||||
import type { ActionId } from "./ActionId";
|
||||
import type { ActionScope } from "./ActionScope";
|
||||
import type { RequiredContext } from "./RequiredContext";
|
||||
|
||||
/**
|
||||
* Metadata about an action for discovery.
|
||||
*/
|
||||
export type ActionMetadata = {
|
||||
/**
|
||||
* Unique identifier for this action.
|
||||
*/
|
||||
id: ActionId,
|
||||
/**
|
||||
* Display label for the action.
|
||||
*/
|
||||
label: string,
|
||||
/**
|
||||
* Optional description of what the action does.
|
||||
*/
|
||||
description: string | null,
|
||||
/**
|
||||
* Icon name to display.
|
||||
*/
|
||||
icon: string | null,
|
||||
/**
|
||||
* The scope this action applies to.
|
||||
*/
|
||||
scope: ActionScope,
|
||||
/**
|
||||
* Keyboard shortcut (e.g., "Cmd+Enter").
|
||||
*/
|
||||
keyboardShortcut: string | null,
|
||||
/**
|
||||
* Whether the action requires a selection/target.
|
||||
*/
|
||||
requiresSelection: boolean,
|
||||
/**
|
||||
* Optional condition expression for when action is enabled.
|
||||
*/
|
||||
enabledCondition: string | null,
|
||||
/**
|
||||
* Optional group this action belongs to.
|
||||
*/
|
||||
groupId: ActionGroupId | null,
|
||||
/**
|
||||
* Sort order within a group (lower = earlier).
|
||||
*/
|
||||
order: number,
|
||||
/**
|
||||
* Context requirements for this action.
|
||||
*/
|
||||
requiredContext: RequiredContext, };
|
||||
10
crates/yaak-actions/bindings/ActionParams.ts
generated
Normal file
10
crates/yaak-actions/bindings/ActionParams.ts
generated
Normal file
@@ -0,0 +1,10 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
/**
|
||||
* Parameters passed to action handlers.
|
||||
*/
|
||||
export type ActionParams = {
|
||||
/**
|
||||
* Arbitrary JSON parameters.
|
||||
*/
|
||||
data: unknown, };
|
||||
23
crates/yaak-actions/bindings/ActionResult.ts
generated
Normal file
23
crates/yaak-actions/bindings/ActionResult.ts
generated
Normal file
@@ -0,0 +1,23 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { InputPrompt } from "./InputPrompt";
|
||||
|
||||
/**
|
||||
* Result of action execution.
|
||||
*/
|
||||
export type ActionResult = { "type": "success",
|
||||
/**
|
||||
* Optional data to return.
|
||||
*/
|
||||
data: unknown,
|
||||
/**
|
||||
* Optional message to display.
|
||||
*/
|
||||
message: string | null, } | { "type": "requires-input",
|
||||
/**
|
||||
* Prompt to show user.
|
||||
*/
|
||||
prompt: InputPrompt,
|
||||
/**
|
||||
* Continuation token.
|
||||
*/
|
||||
continuation_id: string, } | { "type": "cancelled" };
|
||||
6
crates/yaak-actions/bindings/ActionScope.ts
generated
Normal file
6
crates/yaak-actions/bindings/ActionScope.ts
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
/**
|
||||
* The scope in which an action can be invoked.
|
||||
*/
|
||||
export type ActionScope = "global" | "http-request" | "websocket-request" | "grpc-request" | "workspace" | "folder" | "environment" | "cookie-jar";
|
||||
18
crates/yaak-actions/bindings/ActionSource.ts
generated
Normal file
18
crates/yaak-actions/bindings/ActionSource.ts
generated
Normal file
@@ -0,0 +1,18 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
/**
|
||||
* Where an action was registered from.
|
||||
*/
|
||||
export type ActionSource = { "type": "builtin" } | { "type": "plugin",
|
||||
/**
|
||||
* Plugin reference ID.
|
||||
*/
|
||||
ref_id: string,
|
||||
/**
|
||||
* Plugin name.
|
||||
*/
|
||||
name: string, } | { "type": "dynamic",
|
||||
/**
|
||||
* Source identifier.
|
||||
*/
|
||||
source_id: string, };
|
||||
6
crates/yaak-actions/bindings/ActionTarget.ts
generated
Normal file
6
crates/yaak-actions/bindings/ActionTarget.ts
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
/**
|
||||
* The target entity for an action.
|
||||
*/
|
||||
export type ActionTarget = { "type": "none" } | { "type": "http-request", id: string, } | { "type": "websocket-request", id: string, } | { "type": "grpc-request", id: string, } | { "type": "workspace", id: string, } | { "type": "folder", id: string, } | { "type": "environment", id: string, } | { "type": "multiple", targets: Array<ActionTarget>, };
|
||||
6
crates/yaak-actions/bindings/ContextRequirement.ts
generated
Normal file
6
crates/yaak-actions/bindings/ContextRequirement.ts
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
/**
|
||||
* How strictly a context field is required.
|
||||
*/
|
||||
export type ContextRequirement = "not-required" | "optional" | "required" | "required-with-prompt";
|
||||
27
crates/yaak-actions/bindings/CurrentContext.ts
generated
Normal file
27
crates/yaak-actions/bindings/CurrentContext.ts
generated
Normal file
@@ -0,0 +1,27 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { ActionTarget } from "./ActionTarget";
|
||||
|
||||
/**
|
||||
* Current context state from the application.
|
||||
*/
|
||||
export type CurrentContext = {
|
||||
/**
|
||||
* Current workspace ID (if any).
|
||||
*/
|
||||
workspaceId: string | null,
|
||||
/**
|
||||
* Current environment ID (if any).
|
||||
*/
|
||||
environmentId: string | null,
|
||||
/**
|
||||
* Currently selected target (if any).
|
||||
*/
|
||||
target: ActionTarget | null,
|
||||
/**
|
||||
* Whether a window context is available.
|
||||
*/
|
||||
hasWindow: boolean,
|
||||
/**
|
||||
* Whether the context provider can prompt for missing fields.
|
||||
*/
|
||||
canPrompt: boolean, };
|
||||
7
crates/yaak-actions/bindings/InputPrompt.ts
generated
Normal file
7
crates/yaak-actions/bindings/InputPrompt.ts
generated
Normal file
@@ -0,0 +1,7 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { SelectOption } from "./SelectOption";
|
||||
|
||||
/**
|
||||
* A prompt for user input.
|
||||
*/
|
||||
export type InputPrompt = { "type": "text", label: string, placeholder: string | null, default_value: string | null, } | { "type": "select", label: string, options: Array<SelectOption>, } | { "type": "confirm", label: string, };
|
||||
23
crates/yaak-actions/bindings/RequiredContext.ts
generated
Normal file
23
crates/yaak-actions/bindings/RequiredContext.ts
generated
Normal file
@@ -0,0 +1,23 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { ContextRequirement } from "./ContextRequirement";
|
||||
|
||||
/**
|
||||
* Specifies what context fields an action requires.
|
||||
*/
|
||||
export type RequiredContext = {
|
||||
/**
|
||||
* Action requires a workspace to be active.
|
||||
*/
|
||||
workspace: ContextRequirement,
|
||||
/**
|
||||
* Action requires an environment to be selected.
|
||||
*/
|
||||
environment: ContextRequirement,
|
||||
/**
|
||||
* Action requires a specific target entity (request, folder, etc.).
|
||||
*/
|
||||
target: ContextRequirement,
|
||||
/**
|
||||
* Action requires a window context (for UI operations).
|
||||
*/
|
||||
window: ContextRequirement, };
|
||||
6
crates/yaak-actions/bindings/SelectOption.ts
generated
Normal file
6
crates/yaak-actions/bindings/SelectOption.ts
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
/**
|
||||
* An option in a select prompt.
|
||||
*/
|
||||
export type SelectOption = { label: string, value: string, };
|
||||
331
crates/yaak-actions/src/context.rs
Normal file
331
crates/yaak-actions/src/context.rs
Normal file
@@ -0,0 +1,331 @@
|
||||
//! Action context types and context-aware filtering.
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::ActionScope;
|
||||
|
||||
/// Specifies what context fields an action requires.
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RequiredContext {
|
||||
/// Action requires a workspace to be active.
|
||||
#[serde(default)]
|
||||
pub workspace: ContextRequirement,
|
||||
|
||||
/// Action requires an environment to be selected.
|
||||
#[serde(default)]
|
||||
pub environment: ContextRequirement,
|
||||
|
||||
/// Action requires a specific target entity (request, folder, etc.).
|
||||
#[serde(default)]
|
||||
pub target: ContextRequirement,
|
||||
|
||||
/// Action requires a window context (for UI operations).
|
||||
#[serde(default)]
|
||||
pub window: ContextRequirement,
|
||||
}
|
||||
|
||||
impl RequiredContext {
|
||||
/// Action requires a target entity.
|
||||
pub fn requires_target() -> Self {
|
||||
Self {
|
||||
target: ContextRequirement::Required,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Action requires workspace and target.
|
||||
pub fn requires_workspace_and_target() -> Self {
|
||||
Self {
|
||||
workspace: ContextRequirement::Required,
|
||||
target: ContextRequirement::Required,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Action works globally, no specific context needed.
|
||||
pub fn global() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Action requires target with prompt if missing.
|
||||
pub fn requires_target_with_prompt() -> Self {
|
||||
Self {
|
||||
target: ContextRequirement::RequiredWithPrompt,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Action requires environment with prompt if missing.
|
||||
pub fn requires_environment_with_prompt() -> Self {
|
||||
Self {
|
||||
environment: ContextRequirement::RequiredWithPrompt,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// How strictly a context field is required.
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum ContextRequirement {
|
||||
/// Field is not needed.
|
||||
#[default]
|
||||
NotRequired,
|
||||
|
||||
/// Field is optional but will be used if available.
|
||||
Optional,
|
||||
|
||||
/// Field must be present; action will fail without it.
|
||||
Required,
|
||||
|
||||
/// Field must be present; prompt user to select if missing.
|
||||
RequiredWithPrompt,
|
||||
}
|
||||
|
||||
/// Current context state from the application.
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CurrentContext {
|
||||
/// Current workspace ID (if any).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub workspace_id: Option<String>,
|
||||
|
||||
/// Current environment ID (if any).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub environment_id: Option<String>,
|
||||
|
||||
/// Currently selected target (if any).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub target: Option<ActionTarget>,
|
||||
|
||||
/// Whether a window context is available.
|
||||
#[serde(default)]
|
||||
pub has_window: bool,
|
||||
|
||||
/// Whether the context provider can prompt for missing fields.
|
||||
#[serde(default)]
|
||||
pub can_prompt: bool,
|
||||
}
|
||||
|
||||
/// The target entity for an action.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(tag = "type", rename_all = "kebab-case")]
|
||||
pub enum ActionTarget {
|
||||
/// No target.
|
||||
None,
|
||||
/// HTTP request target.
|
||||
HttpRequest { id: String },
|
||||
/// WebSocket request target.
|
||||
WebsocketRequest { id: String },
|
||||
/// gRPC request target.
|
||||
GrpcRequest { id: String },
|
||||
/// Workspace target.
|
||||
Workspace { id: String },
|
||||
/// Folder target.
|
||||
Folder { id: String },
|
||||
/// Environment target.
|
||||
Environment { id: String },
|
||||
/// Multiple targets.
|
||||
Multiple { targets: Vec<ActionTarget> },
|
||||
}
|
||||
|
||||
impl ActionTarget {
|
||||
/// Get the scope this target corresponds to.
|
||||
pub fn scope(&self) -> Option<ActionScope> {
|
||||
match self {
|
||||
Self::None => None,
|
||||
Self::HttpRequest { .. } => Some(ActionScope::HttpRequest),
|
||||
Self::WebsocketRequest { .. } => Some(ActionScope::WebsocketRequest),
|
||||
Self::GrpcRequest { .. } => Some(ActionScope::GrpcRequest),
|
||||
Self::Workspace { .. } => Some(ActionScope::Workspace),
|
||||
Self::Folder { .. } => Some(ActionScope::Folder),
|
||||
Self::Environment { .. } => Some(ActionScope::Environment),
|
||||
Self::Multiple { .. } => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the ID of the target (if single target).
|
||||
pub fn id(&self) -> Option<&str> {
|
||||
match self {
|
||||
Self::HttpRequest { id }
|
||||
| Self::WebsocketRequest { id }
|
||||
| Self::GrpcRequest { id }
|
||||
| Self::Workspace { id }
|
||||
| Self::Folder { id }
|
||||
| Self::Environment { id } => Some(id),
|
||||
Self::None | Self::Multiple { .. } => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Availability status for an action.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(tag = "status", rename_all = "kebab-case")]
|
||||
pub enum ActionAvailability {
|
||||
/// Action is ready to execute.
|
||||
Available,
|
||||
|
||||
/// Action can execute but will prompt for missing context.
|
||||
AvailableWithPrompt {
|
||||
/// Fields that will require prompting.
|
||||
prompt_fields: Vec<String>,
|
||||
},
|
||||
|
||||
/// Action cannot execute due to missing context.
|
||||
Unavailable {
|
||||
/// Fields that are missing.
|
||||
missing_fields: Vec<String>,
|
||||
},
|
||||
|
||||
/// Action not found in registry.
|
||||
NotFound,
|
||||
}
|
||||
|
||||
impl ActionAvailability {
|
||||
/// Check if the action is available (possibly with prompts).
|
||||
pub fn is_available(&self) -> bool {
|
||||
matches!(self, Self::Available | Self::AvailableWithPrompt { .. })
|
||||
}
|
||||
|
||||
/// Check if the action is immediately available without prompts.
|
||||
pub fn is_immediately_available(&self) -> bool {
|
||||
matches!(self, Self::Available)
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if required context is satisfied by current context.
|
||||
pub fn check_context_availability(
|
||||
required: &RequiredContext,
|
||||
current: &CurrentContext,
|
||||
) -> ActionAvailability {
|
||||
let mut missing_fields = Vec::new();
|
||||
let mut prompt_fields = Vec::new();
|
||||
|
||||
// Check workspace
|
||||
check_field(
|
||||
"workspace",
|
||||
current.workspace_id.is_some(),
|
||||
&required.workspace,
|
||||
current.can_prompt,
|
||||
&mut missing_fields,
|
||||
&mut prompt_fields,
|
||||
);
|
||||
|
||||
// Check environment
|
||||
check_field(
|
||||
"environment",
|
||||
current.environment_id.is_some(),
|
||||
&required.environment,
|
||||
current.can_prompt,
|
||||
&mut missing_fields,
|
||||
&mut prompt_fields,
|
||||
);
|
||||
|
||||
// Check target
|
||||
check_field(
|
||||
"target",
|
||||
current.target.is_some(),
|
||||
&required.target,
|
||||
current.can_prompt,
|
||||
&mut missing_fields,
|
||||
&mut prompt_fields,
|
||||
);
|
||||
|
||||
// Check window
|
||||
check_field(
|
||||
"window",
|
||||
current.has_window,
|
||||
&required.window,
|
||||
false, // Can't prompt for window
|
||||
&mut missing_fields,
|
||||
&mut prompt_fields,
|
||||
);
|
||||
|
||||
if !missing_fields.is_empty() {
|
||||
ActionAvailability::Unavailable { missing_fields }
|
||||
} else if !prompt_fields.is_empty() {
|
||||
ActionAvailability::AvailableWithPrompt { prompt_fields }
|
||||
} else {
|
||||
ActionAvailability::Available
|
||||
}
|
||||
}
|
||||
|
||||
fn check_field(
|
||||
name: &str,
|
||||
has_value: bool,
|
||||
requirement: &ContextRequirement,
|
||||
can_prompt: bool,
|
||||
missing: &mut Vec<String>,
|
||||
promptable: &mut Vec<String>,
|
||||
) {
|
||||
match requirement {
|
||||
ContextRequirement::NotRequired | ContextRequirement::Optional => {}
|
||||
ContextRequirement::Required => {
|
||||
if !has_value {
|
||||
missing.push(name.to_string());
|
||||
}
|
||||
}
|
||||
ContextRequirement::RequiredWithPrompt => {
|
||||
if !has_value {
|
||||
if can_prompt {
|
||||
promptable.push(name.to_string());
|
||||
} else {
|
||||
missing.push(name.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_context_available() {
|
||||
let required = RequiredContext::requires_target();
|
||||
let current = CurrentContext {
|
||||
target: Some(ActionTarget::HttpRequest {
|
||||
id: "123".to_string(),
|
||||
}),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let availability = check_context_availability(&required, ¤t);
|
||||
assert!(matches!(availability, ActionAvailability::Available));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_context_missing() {
|
||||
let required = RequiredContext::requires_target();
|
||||
let current = CurrentContext::default();
|
||||
|
||||
let availability = check_context_availability(&required, ¤t);
|
||||
assert!(matches!(
|
||||
availability,
|
||||
ActionAvailability::Unavailable { missing_fields } if missing_fields == vec!["target"]
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_context_promptable() {
|
||||
let required = RequiredContext::requires_target_with_prompt();
|
||||
let current = CurrentContext {
|
||||
can_prompt: true,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let availability = check_context_availability(&required, ¤t);
|
||||
assert!(matches!(
|
||||
availability,
|
||||
ActionAvailability::AvailableWithPrompt { prompt_fields } if prompt_fields == vec!["target"]
|
||||
));
|
||||
}
|
||||
}
|
||||
131
crates/yaak-actions/src/error.rs
Normal file
131
crates/yaak-actions/src/error.rs
Normal file
@@ -0,0 +1,131 @@
|
||||
//! Error types for the action system.
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use thiserror::Error;
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::{ActionGroupId, ActionId};
|
||||
|
||||
/// Errors that can occur during action operations.
|
||||
#[derive(Debug, Error, Clone, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(tag = "type", rename_all = "kebab-case")]
|
||||
pub enum ActionError {
|
||||
/// Action not found in registry.
|
||||
#[error("Action not found: {0}")]
|
||||
NotFound(ActionId),
|
||||
|
||||
/// Action is disabled in current context.
|
||||
#[error("Action is disabled: {action_id} - {reason}")]
|
||||
Disabled { action_id: ActionId, reason: String },
|
||||
|
||||
/// Invalid scope for the action.
|
||||
#[error("Invalid scope: expected {expected:?}, got {actual:?}")]
|
||||
InvalidScope {
|
||||
expected: crate::ActionScope,
|
||||
actual: crate::ActionScope,
|
||||
},
|
||||
|
||||
/// Action execution timed out.
|
||||
#[error("Action timed out: {0}")]
|
||||
Timeout(ActionId),
|
||||
|
||||
/// Error from plugin execution.
|
||||
#[error("Plugin error: {0}")]
|
||||
PluginError(String),
|
||||
|
||||
/// Validation error in action parameters.
|
||||
#[error("Validation error: {0}")]
|
||||
ValidationError(String),
|
||||
|
||||
/// Permission denied for action.
|
||||
#[error("Permission denied: {0}")]
|
||||
PermissionDenied(String),
|
||||
|
||||
/// Action was cancelled by user.
|
||||
#[error("Action cancelled by user")]
|
||||
Cancelled,
|
||||
|
||||
/// Internal error.
|
||||
#[error("Internal error: {0}")]
|
||||
Internal(String),
|
||||
|
||||
/// Required context is missing.
|
||||
#[error("Required context missing: {missing_fields:?}")]
|
||||
ContextMissing {
|
||||
/// The context fields that are missing.
|
||||
missing_fields: Vec<String>,
|
||||
},
|
||||
|
||||
/// Action group not found.
|
||||
#[error("Group not found: {0}")]
|
||||
GroupNotFound(ActionGroupId),
|
||||
|
||||
/// Action group already exists.
|
||||
#[error("Group already exists: {0}")]
|
||||
GroupAlreadyExists(ActionGroupId),
|
||||
}
|
||||
|
||||
impl ActionError {
|
||||
/// Get a user-friendly error message.
|
||||
pub fn user_message(&self) -> String {
|
||||
match self {
|
||||
Self::NotFound(id) => format!("Action '{}' is not available", id),
|
||||
Self::Disabled { reason, .. } => reason.clone(),
|
||||
Self::InvalidScope { expected, actual } => {
|
||||
format!("Action requires {:?} scope, but got {:?}", expected, actual)
|
||||
}
|
||||
Self::Timeout(_) => "The operation took too long and was cancelled".into(),
|
||||
Self::PluginError(msg) => format!("Plugin error: {}", msg),
|
||||
Self::ValidationError(msg) => format!("Invalid input: {}", msg),
|
||||
Self::PermissionDenied(resource) => format!("Permission denied for {}", resource),
|
||||
Self::Cancelled => "Operation was cancelled".into(),
|
||||
Self::Internal(_) => "An unexpected error occurred".into(),
|
||||
Self::ContextMissing { missing_fields } => {
|
||||
format!("Missing required context: {}", missing_fields.join(", "))
|
||||
}
|
||||
Self::GroupNotFound(id) => format!("Action group '{}' not found", id),
|
||||
Self::GroupAlreadyExists(id) => format!("Action group '{}' already exists", id),
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether this error should be reported to telemetry.
|
||||
pub fn is_reportable(&self) -> bool {
|
||||
matches!(self, Self::Internal(_) | Self::PluginError(_))
|
||||
}
|
||||
|
||||
/// Whether this error can potentially be resolved by user interaction.
|
||||
pub fn is_promptable(&self) -> bool {
|
||||
matches!(self, Self::ContextMissing { .. })
|
||||
}
|
||||
|
||||
/// Whether this is a user-initiated cancellation.
|
||||
pub fn is_cancelled(&self) -> bool {
|
||||
matches!(self, Self::Cancelled)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_error_messages() {
|
||||
let err = ActionError::ContextMissing {
|
||||
missing_fields: vec!["workspace".into()],
|
||||
};
|
||||
assert_eq!(err.user_message(), "Missing required context: workspace");
|
||||
assert!(err.is_promptable());
|
||||
assert!(!err.is_cancelled());
|
||||
|
||||
let cancelled = ActionError::Cancelled;
|
||||
assert!(cancelled.is_cancelled());
|
||||
assert!(!cancelled.is_promptable());
|
||||
|
||||
let not_found = ActionError::NotFound(ActionId::builtin("test", "action"));
|
||||
assert_eq!(
|
||||
not_found.user_message(),
|
||||
"Action 'yaak:test:action' is not available"
|
||||
);
|
||||
}
|
||||
}
|
||||
606
crates/yaak-actions/src/executor.rs
Normal file
606
crates/yaak-actions/src/executor.rs
Normal file
@@ -0,0 +1,606 @@
|
||||
//! Action executor - central hub for action registration and invocation.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::{
|
||||
check_context_availability, ActionAvailability, ActionError, ActionGroupId,
|
||||
ActionGroupMetadata, ActionGroupSource, ActionGroupWithActions, ActionHandler, ActionId,
|
||||
ActionMetadata, ActionParams, ActionResult, ActionScope, ActionSource, CurrentContext,
|
||||
RegisteredActionGroup,
|
||||
};
|
||||
|
||||
/// Options for listing actions.
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct ListActionsOptions {
|
||||
/// Filter by scope.
|
||||
pub scope: Option<ActionScope>,
|
||||
/// Filter by group.
|
||||
pub group_id: Option<ActionGroupId>,
|
||||
/// Search term for label/description.
|
||||
pub search: Option<String>,
|
||||
}
|
||||
|
||||
/// A registered action with its handler.
|
||||
struct RegisteredAction {
|
||||
/// Action metadata.
|
||||
metadata: ActionMetadata,
|
||||
/// Where the action was registered from.
|
||||
source: ActionSource,
|
||||
/// The handler for this action.
|
||||
handler: Arc<dyn ActionHandler>,
|
||||
}
|
||||
|
||||
/// Central hub for action registration and invocation.
|
||||
///
|
||||
/// The executor owns all action metadata and handlers, ensuring every
|
||||
/// registered action has a handler by construction.
|
||||
pub struct ActionExecutor {
|
||||
/// All registered actions indexed by ID.
|
||||
actions: RwLock<HashMap<ActionId, RegisteredAction>>,
|
||||
|
||||
/// Actions indexed by scope for efficient filtering.
|
||||
scope_index: RwLock<HashMap<ActionScope, Vec<ActionId>>>,
|
||||
|
||||
/// All registered groups indexed by ID.
|
||||
groups: RwLock<HashMap<ActionGroupId, RegisteredActionGroup>>,
|
||||
}
|
||||
|
||||
impl Default for ActionExecutor {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActionExecutor {
|
||||
/// Create a new empty executor.
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
actions: RwLock::new(HashMap::new()),
|
||||
scope_index: RwLock::new(HashMap::new()),
|
||||
groups: RwLock::new(HashMap::new()),
|
||||
}
|
||||
}
|
||||
|
||||
// ─────────────────────────────────────────────────────────────────────────
|
||||
// Action Registration
|
||||
// ─────────────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Register an action with its handler.
|
||||
///
|
||||
/// Every action must have a handler - this is enforced by the API.
|
||||
pub async fn register<H: ActionHandler + 'static>(
|
||||
&self,
|
||||
metadata: ActionMetadata,
|
||||
source: ActionSource,
|
||||
handler: H,
|
||||
) -> Result<ActionId, ActionError> {
|
||||
let id = metadata.id.clone();
|
||||
let scope = metadata.scope.clone();
|
||||
|
||||
let action = RegisteredAction {
|
||||
metadata,
|
||||
source,
|
||||
handler: Arc::new(handler),
|
||||
};
|
||||
|
||||
// Insert action
|
||||
{
|
||||
let mut actions = self.actions.write().await;
|
||||
actions.insert(id.clone(), action);
|
||||
}
|
||||
|
||||
// Update scope index
|
||||
{
|
||||
let mut index = self.scope_index.write().await;
|
||||
index.entry(scope).or_default().push(id.clone());
|
||||
}
|
||||
|
||||
Ok(id)
|
||||
}
|
||||
|
||||
/// Unregister an action.
|
||||
pub async fn unregister(&self, id: &ActionId) -> Result<(), ActionError> {
|
||||
let mut actions = self.actions.write().await;
|
||||
|
||||
let action = actions
|
||||
.remove(id)
|
||||
.ok_or_else(|| ActionError::NotFound(id.clone()))?;
|
||||
|
||||
// Update scope index
|
||||
{
|
||||
let mut index = self.scope_index.write().await;
|
||||
if let Some(ids) = index.get_mut(&action.metadata.scope) {
|
||||
ids.retain(|i| i != id);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove from group if assigned
|
||||
if let Some(group_id) = &action.metadata.group_id {
|
||||
let mut groups = self.groups.write().await;
|
||||
if let Some(group) = groups.get_mut(group_id) {
|
||||
group.action_ids.retain(|i| i != id);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Unregister all actions from a specific source.
|
||||
pub async fn unregister_source(&self, source_id: &str) -> Vec<ActionId> {
|
||||
let actions_to_remove: Vec<ActionId> = {
|
||||
let actions = self.actions.read().await;
|
||||
actions
|
||||
.iter()
|
||||
.filter(|(_, a)| match &a.source {
|
||||
ActionSource::Plugin { ref_id, .. } => ref_id == source_id,
|
||||
ActionSource::Dynamic {
|
||||
source_id: sid, ..
|
||||
} => sid == source_id,
|
||||
ActionSource::Builtin => false,
|
||||
})
|
||||
.map(|(id, _)| id.clone())
|
||||
.collect()
|
||||
};
|
||||
|
||||
for id in &actions_to_remove {
|
||||
let _ = self.unregister(id).await;
|
||||
}
|
||||
|
||||
actions_to_remove
|
||||
}
|
||||
|
||||
// ─────────────────────────────────────────────────────────────────────────
|
||||
// Action Invocation
|
||||
// ─────────────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Invoke an action with the given context and parameters.
|
||||
///
|
||||
/// This will:
|
||||
/// 1. Look up the action metadata
|
||||
/// 2. Check context availability
|
||||
/// 3. Execute the handler
|
||||
pub async fn invoke(
|
||||
&self,
|
||||
action_id: &ActionId,
|
||||
context: CurrentContext,
|
||||
params: ActionParams,
|
||||
) -> Result<ActionResult, ActionError> {
|
||||
// Get action and handler
|
||||
let (metadata, handler) = {
|
||||
let actions = self.actions.read().await;
|
||||
let action = actions
|
||||
.get(action_id)
|
||||
.ok_or_else(|| ActionError::NotFound(action_id.clone()))?;
|
||||
(action.metadata.clone(), action.handler.clone())
|
||||
};
|
||||
|
||||
// Check context availability
|
||||
let availability = check_context_availability(&metadata.required_context, &context);
|
||||
|
||||
match availability {
|
||||
ActionAvailability::Available | ActionAvailability::AvailableWithPrompt { .. } => {
|
||||
// Context is satisfied, proceed with execution
|
||||
}
|
||||
ActionAvailability::Unavailable { missing_fields } => {
|
||||
return Err(ActionError::ContextMissing { missing_fields });
|
||||
}
|
||||
ActionAvailability::NotFound => {
|
||||
return Err(ActionError::NotFound(action_id.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
// Execute handler
|
||||
handler.handle(context, params).await
|
||||
}
|
||||
|
||||
/// Invoke an action, skipping context validation.
|
||||
///
|
||||
/// Use this when you've already validated the context externally.
|
||||
pub async fn invoke_unchecked(
|
||||
&self,
|
||||
action_id: &ActionId,
|
||||
context: CurrentContext,
|
||||
params: ActionParams,
|
||||
) -> Result<ActionResult, ActionError> {
|
||||
// Get handler
|
||||
let handler = {
|
||||
let actions = self.actions.read().await;
|
||||
let action = actions
|
||||
.get(action_id)
|
||||
.ok_or_else(|| ActionError::NotFound(action_id.clone()))?;
|
||||
action.handler.clone()
|
||||
};
|
||||
|
||||
// Execute handler
|
||||
handler.handle(context, params).await
|
||||
}
|
||||
|
||||
// ─────────────────────────────────────────────────────────────────────────
|
||||
// Action Queries
|
||||
// ─────────────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Get action metadata by ID.
|
||||
pub async fn get(&self, id: &ActionId) -> Option<ActionMetadata> {
|
||||
let actions = self.actions.read().await;
|
||||
actions.get(id).map(|a| a.metadata.clone())
|
||||
}
|
||||
|
||||
/// List all actions, optionally filtered.
|
||||
pub async fn list(&self, options: ListActionsOptions) -> Vec<ActionMetadata> {
|
||||
let actions = self.actions.read().await;
|
||||
|
||||
let mut result: Vec<_> = actions
|
||||
.values()
|
||||
.filter(|a| {
|
||||
// Scope filter
|
||||
if let Some(scope) = &options.scope {
|
||||
if &a.metadata.scope != scope {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Group filter
|
||||
if let Some(group_id) = &options.group_id {
|
||||
if a.metadata.group_id.as_ref() != Some(group_id) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Search filter
|
||||
if let Some(search) = &options.search {
|
||||
let search = search.to_lowercase();
|
||||
let matches_label = a.metadata.label.to_lowercase().contains(&search);
|
||||
let matches_desc = a
|
||||
.metadata
|
||||
.description
|
||||
.as_ref()
|
||||
.map(|d| d.to_lowercase().contains(&search))
|
||||
.unwrap_or(false);
|
||||
if !matches_label && !matches_desc {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
})
|
||||
.map(|a| a.metadata.clone())
|
||||
.collect();
|
||||
|
||||
// Sort by order then label
|
||||
result.sort_by(|a, b| a.order.cmp(&b.order).then_with(|| a.label.cmp(&b.label)));
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// List actions available in the given context.
|
||||
pub async fn list_available(
|
||||
&self,
|
||||
context: &CurrentContext,
|
||||
options: ListActionsOptions,
|
||||
) -> Vec<(ActionMetadata, ActionAvailability)> {
|
||||
let all_actions = self.list(options).await;
|
||||
|
||||
all_actions
|
||||
.into_iter()
|
||||
.map(|action| {
|
||||
let availability =
|
||||
check_context_availability(&action.required_context, context);
|
||||
(action, availability)
|
||||
})
|
||||
.filter(|(_, availability)| availability.is_available())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get availability status for a specific action.
|
||||
pub async fn get_availability(
|
||||
&self,
|
||||
id: &ActionId,
|
||||
context: &CurrentContext,
|
||||
) -> ActionAvailability {
|
||||
let actions = self.actions.read().await;
|
||||
|
||||
match actions.get(id) {
|
||||
Some(action) => {
|
||||
check_context_availability(&action.metadata.required_context, context)
|
||||
}
|
||||
None => ActionAvailability::NotFound,
|
||||
}
|
||||
}
|
||||
|
||||
// ─────────────────────────────────────────────────────────────────────────
|
||||
// Group Registration
|
||||
// ─────────────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Register an action group.
|
||||
pub async fn register_group(
|
||||
&self,
|
||||
metadata: ActionGroupMetadata,
|
||||
source: ActionGroupSource,
|
||||
) -> Result<ActionGroupId, ActionError> {
|
||||
let id = metadata.id.clone();
|
||||
|
||||
let mut groups = self.groups.write().await;
|
||||
if groups.contains_key(&id) {
|
||||
return Err(ActionError::GroupAlreadyExists(id));
|
||||
}
|
||||
|
||||
groups.insert(
|
||||
id.clone(),
|
||||
RegisteredActionGroup {
|
||||
metadata,
|
||||
action_ids: Vec::new(),
|
||||
source,
|
||||
},
|
||||
);
|
||||
|
||||
Ok(id)
|
||||
}
|
||||
|
||||
/// Unregister a group (does not unregister its actions).
|
||||
pub async fn unregister_group(&self, id: &ActionGroupId) -> Result<(), ActionError> {
|
||||
let mut groups = self.groups.write().await;
|
||||
groups
|
||||
.remove(id)
|
||||
.ok_or_else(|| ActionError::GroupNotFound(id.clone()))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Add an action to a group.
|
||||
pub async fn add_to_group(
|
||||
&self,
|
||||
action_id: &ActionId,
|
||||
group_id: &ActionGroupId,
|
||||
) -> Result<(), ActionError> {
|
||||
// Update action's group_id
|
||||
{
|
||||
let mut actions = self.actions.write().await;
|
||||
let action = actions
|
||||
.get_mut(action_id)
|
||||
.ok_or_else(|| ActionError::NotFound(action_id.clone()))?;
|
||||
action.metadata.group_id = Some(group_id.clone());
|
||||
}
|
||||
|
||||
// Add to group's action list
|
||||
{
|
||||
let mut groups = self.groups.write().await;
|
||||
let group = groups
|
||||
.get_mut(group_id)
|
||||
.ok_or_else(|| ActionError::GroupNotFound(group_id.clone()))?;
|
||||
|
||||
if !group.action_ids.contains(action_id) {
|
||||
group.action_ids.push(action_id.clone());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ─────────────────────────────────────────────────────────────────────────
|
||||
// Group Queries
|
||||
// ─────────────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Get a group by ID.
|
||||
pub async fn get_group(&self, id: &ActionGroupId) -> Option<ActionGroupMetadata> {
|
||||
let groups = self.groups.read().await;
|
||||
groups.get(id).map(|g| g.metadata.clone())
|
||||
}
|
||||
|
||||
/// List all groups, optionally filtered by scope.
|
||||
pub async fn list_groups(&self, scope: Option<ActionScope>) -> Vec<ActionGroupMetadata> {
|
||||
let groups = self.groups.read().await;
|
||||
|
||||
let mut result: Vec<_> = groups
|
||||
.values()
|
||||
.filter(|g| {
|
||||
scope.as_ref().map_or(true, |s| {
|
||||
g.metadata.scope.as_ref().map_or(true, |gs| gs == s)
|
||||
})
|
||||
})
|
||||
.map(|g| g.metadata.clone())
|
||||
.collect();
|
||||
|
||||
result.sort_by_key(|g| g.order);
|
||||
result
|
||||
}
|
||||
|
||||
/// List all actions in a specific group.
|
||||
pub async fn list_by_group(&self, group_id: &ActionGroupId) -> Vec<ActionMetadata> {
|
||||
let groups = self.groups.read().await;
|
||||
let actions = self.actions.read().await;
|
||||
|
||||
groups
|
||||
.get(group_id)
|
||||
.map(|group| {
|
||||
let mut result: Vec<_> = group
|
||||
.action_ids
|
||||
.iter()
|
||||
.filter_map(|id| actions.get(id).map(|a| a.metadata.clone()))
|
||||
.collect();
|
||||
result.sort_by_key(|a| a.order);
|
||||
result
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Get actions organized by their groups.
|
||||
pub async fn list_grouped(&self, scope: Option<ActionScope>) -> Vec<ActionGroupWithActions> {
|
||||
let group_list = self.list_groups(scope).await;
|
||||
let mut result = Vec::new();
|
||||
|
||||
for group in group_list {
|
||||
let actions = self.list_by_group(&group.id).await;
|
||||
result.push(ActionGroupWithActions { group, actions });
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
// ─────────────────────────────────────────────────────────────────────────
|
||||
// Built-in Registration
|
||||
// ─────────────────────────────────────────────────────────────────────────
|
||||
|
||||
/// Register all built-in groups.
|
||||
pub async fn register_builtin_groups(&self) -> Result<(), ActionError> {
|
||||
for group in crate::groups::builtin::all() {
|
||||
self.register_group(group, ActionGroupSource::Builtin).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{handler_fn, RequiredContext};
|
||||
|
||||
async fn create_test_executor() -> ActionExecutor {
|
||||
let executor = ActionExecutor::new();
|
||||
executor
|
||||
.register(
|
||||
ActionMetadata {
|
||||
id: ActionId::builtin("test", "echo"),
|
||||
label: "Echo".to_string(),
|
||||
description: None,
|
||||
icon: None,
|
||||
scope: ActionScope::Global,
|
||||
keyboard_shortcut: None,
|
||||
requires_selection: false,
|
||||
enabled_condition: None,
|
||||
group_id: None,
|
||||
order: 0,
|
||||
required_context: RequiredContext::default(),
|
||||
},
|
||||
ActionSource::Builtin,
|
||||
handler_fn(|_ctx, params| async move {
|
||||
let msg: String = params.get("message").unwrap_or_default();
|
||||
Ok(ActionResult::with_message(msg))
|
||||
}),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
executor
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_register_and_invoke() {
|
||||
let executor = create_test_executor().await;
|
||||
let action_id = ActionId::builtin("test", "echo");
|
||||
|
||||
let params = ActionParams::from_json(serde_json::json!({
|
||||
"message": "Hello, World!"
|
||||
}));
|
||||
|
||||
let result = executor
|
||||
.invoke(&action_id, CurrentContext::default(), params)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
match result {
|
||||
ActionResult::Success { message, .. } => {
|
||||
assert_eq!(message, Some("Hello, World!".to_string()));
|
||||
}
|
||||
_ => panic!("Expected Success result"),
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_invoke_not_found() {
|
||||
let executor = ActionExecutor::new();
|
||||
let action_id = ActionId::builtin("test", "unknown");
|
||||
|
||||
let result = executor
|
||||
.invoke(&action_id, CurrentContext::default(), ActionParams::empty())
|
||||
.await;
|
||||
|
||||
assert!(matches!(result, Err(ActionError::NotFound(_))));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_list_by_scope() {
|
||||
let executor = ActionExecutor::new();
|
||||
|
||||
executor
|
||||
.register(
|
||||
ActionMetadata {
|
||||
id: ActionId::builtin("global", "one"),
|
||||
label: "Global One".to_string(),
|
||||
description: None,
|
||||
icon: None,
|
||||
scope: ActionScope::Global,
|
||||
keyboard_shortcut: None,
|
||||
requires_selection: false,
|
||||
enabled_condition: None,
|
||||
group_id: None,
|
||||
order: 0,
|
||||
required_context: RequiredContext::default(),
|
||||
},
|
||||
ActionSource::Builtin,
|
||||
handler_fn(|_ctx, _params| async move { Ok(ActionResult::ok()) }),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor
|
||||
.register(
|
||||
ActionMetadata {
|
||||
id: ActionId::builtin("http", "one"),
|
||||
label: "HTTP One".to_string(),
|
||||
description: None,
|
||||
icon: None,
|
||||
scope: ActionScope::HttpRequest,
|
||||
keyboard_shortcut: None,
|
||||
requires_selection: false,
|
||||
enabled_condition: None,
|
||||
group_id: None,
|
||||
order: 0,
|
||||
required_context: RequiredContext::default(),
|
||||
},
|
||||
ActionSource::Builtin,
|
||||
handler_fn(|_ctx, _params| async move { Ok(ActionResult::ok()) }),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let global_actions = executor
|
||||
.list(ListActionsOptions {
|
||||
scope: Some(ActionScope::Global),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
assert_eq!(global_actions.len(), 1);
|
||||
|
||||
let http_actions = executor
|
||||
.list(ListActionsOptions {
|
||||
scope: Some(ActionScope::HttpRequest),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
assert_eq!(http_actions.len(), 1);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_groups() {
|
||||
let executor = ActionExecutor::new();
|
||||
executor.register_builtin_groups().await.unwrap();
|
||||
|
||||
let groups = executor.list_groups(None).await;
|
||||
assert!(!groups.is_empty());
|
||||
|
||||
let export_group = executor.get_group(&ActionGroupId::builtin("export")).await;
|
||||
assert!(export_group.is_some());
|
||||
assert_eq!(export_group.unwrap().name, "Export");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_unregister() {
|
||||
let executor = create_test_executor().await;
|
||||
let action_id = ActionId::builtin("test", "echo");
|
||||
|
||||
assert!(executor.get(&action_id).await.is_some());
|
||||
|
||||
executor.unregister(&action_id).await.unwrap();
|
||||
assert!(executor.get(&action_id).await.is_none());
|
||||
}
|
||||
}
|
||||
208
crates/yaak-actions/src/groups.rs
Normal file
208
crates/yaak-actions/src/groups.rs
Normal file
@@ -0,0 +1,208 @@
|
||||
//! Action group types and management.
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::{ActionId, ActionMetadata, ActionScope};
|
||||
|
||||
/// Unique identifier for an action group.
|
||||
///
|
||||
/// Format: `namespace:group-name`
|
||||
/// - Built-in: `yaak:export`
|
||||
/// - Plugin: `plugin.my-plugin:utilities`
|
||||
#[derive(Clone, Debug, Hash, Eq, PartialEq, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
pub struct ActionGroupId(pub String);
|
||||
|
||||
impl ActionGroupId {
|
||||
/// Create a namespaced group ID.
|
||||
pub fn new(namespace: &str, name: &str) -> Self {
|
||||
Self(format!("{}:{}", namespace, name))
|
||||
}
|
||||
|
||||
/// Create ID for built-in groups.
|
||||
pub fn builtin(name: &str) -> Self {
|
||||
Self::new("yaak", name)
|
||||
}
|
||||
|
||||
/// Create ID for plugin groups.
|
||||
pub fn plugin(plugin_ref_id: &str, name: &str) -> Self {
|
||||
Self::new(&format!("plugin.{}", plugin_ref_id), name)
|
||||
}
|
||||
|
||||
/// Get the raw string value.
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ActionGroupId {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Metadata about an action group.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ActionGroupMetadata {
|
||||
/// Unique identifier for this group.
|
||||
pub id: ActionGroupId,
|
||||
|
||||
/// Display name for the group.
|
||||
pub name: String,
|
||||
|
||||
/// Optional description of the group's purpose.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub description: Option<String>,
|
||||
|
||||
/// Icon to display for the group.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub icon: Option<String>,
|
||||
|
||||
/// Sort order for displaying groups (lower = earlier).
|
||||
#[serde(default)]
|
||||
pub order: i32,
|
||||
|
||||
/// Optional scope restriction (if set, group only appears in this scope).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub scope: Option<ActionScope>,
|
||||
}
|
||||
|
||||
/// Where an action group was registered from.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(tag = "type", rename_all = "kebab-case")]
|
||||
pub enum ActionGroupSource {
|
||||
/// Built into Yaak core.
|
||||
Builtin,
|
||||
/// Registered by a plugin.
|
||||
Plugin {
|
||||
/// Plugin reference ID.
|
||||
ref_id: String,
|
||||
/// Plugin name.
|
||||
name: String,
|
||||
},
|
||||
/// Registered at runtime.
|
||||
Dynamic {
|
||||
/// Source identifier.
|
||||
source_id: String,
|
||||
},
|
||||
}
|
||||
|
||||
/// A registered action group with its actions.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct RegisteredActionGroup {
|
||||
/// Group metadata.
|
||||
pub metadata: ActionGroupMetadata,
|
||||
|
||||
/// IDs of actions in this group (ordered by action's order field).
|
||||
pub action_ids: Vec<ActionId>,
|
||||
|
||||
/// Where the group was registered from.
|
||||
pub source: ActionGroupSource,
|
||||
}
|
||||
|
||||
/// A group with its actions for UI rendering.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ActionGroupWithActions {
|
||||
/// Group metadata.
|
||||
pub group: ActionGroupMetadata,
|
||||
|
||||
/// Actions in this group.
|
||||
pub actions: Vec<ActionMetadata>,
|
||||
}
|
||||
|
||||
/// Built-in action group definitions.
|
||||
pub mod builtin {
|
||||
use super::*;
|
||||
|
||||
/// Export group - export and copy actions.
|
||||
pub fn export() -> ActionGroupMetadata {
|
||||
ActionGroupMetadata {
|
||||
id: ActionGroupId::builtin("export"),
|
||||
name: "Export".into(),
|
||||
description: Some("Export and copy actions".into()),
|
||||
icon: Some("download".into()),
|
||||
order: 100,
|
||||
scope: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Code generation group.
|
||||
pub fn code_generation() -> ActionGroupMetadata {
|
||||
ActionGroupMetadata {
|
||||
id: ActionGroupId::builtin("code-generation"),
|
||||
name: "Code Generation".into(),
|
||||
description: Some("Generate code snippets from requests".into()),
|
||||
icon: Some("code".into()),
|
||||
order: 200,
|
||||
scope: Some(ActionScope::HttpRequest),
|
||||
}
|
||||
}
|
||||
|
||||
/// Send group - request sending actions.
|
||||
pub fn send() -> ActionGroupMetadata {
|
||||
ActionGroupMetadata {
|
||||
id: ActionGroupId::builtin("send"),
|
||||
name: "Send".into(),
|
||||
description: Some("Actions for sending requests".into()),
|
||||
icon: Some("play".into()),
|
||||
order: 50,
|
||||
scope: Some(ActionScope::HttpRequest),
|
||||
}
|
||||
}
|
||||
|
||||
/// Import group.
|
||||
pub fn import() -> ActionGroupMetadata {
|
||||
ActionGroupMetadata {
|
||||
id: ActionGroupId::builtin("import"),
|
||||
name: "Import".into(),
|
||||
description: Some("Import data from files".into()),
|
||||
icon: Some("upload".into()),
|
||||
order: 150,
|
||||
scope: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Workspace management group.
|
||||
pub fn workspace() -> ActionGroupMetadata {
|
||||
ActionGroupMetadata {
|
||||
id: ActionGroupId::builtin("workspace"),
|
||||
name: "Workspace".into(),
|
||||
description: Some("Workspace management actions".into()),
|
||||
icon: Some("folder".into()),
|
||||
order: 300,
|
||||
scope: Some(ActionScope::Workspace),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get all built-in group definitions.
|
||||
pub fn all() -> Vec<ActionGroupMetadata> {
|
||||
vec![send(), export(), import(), code_generation(), workspace()]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_group_id_creation() {
|
||||
let id = ActionGroupId::builtin("export");
|
||||
assert_eq!(id.as_str(), "yaak:export");
|
||||
|
||||
let plugin_id = ActionGroupId::plugin("my-plugin", "utilities");
|
||||
assert_eq!(plugin_id.as_str(), "plugin.my-plugin:utilities");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_builtin_groups() {
|
||||
let groups = builtin::all();
|
||||
assert!(!groups.is_empty());
|
||||
assert!(groups.iter().any(|g| g.id == ActionGroupId::builtin("export")));
|
||||
}
|
||||
}
|
||||
103
crates/yaak-actions/src/handler.rs
Normal file
103
crates/yaak-actions/src/handler.rs
Normal file
@@ -0,0 +1,103 @@
|
||||
//! Action handler types and execution.
|
||||
|
||||
use std::future::Future;
|
||||
use std::pin::Pin;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::{ActionError, ActionParams, ActionResult, CurrentContext};
|
||||
|
||||
/// A boxed future for async action handlers.
|
||||
pub type BoxFuture<'a, T> = Pin<Box<dyn Future<Output = T> + Send + 'a>>;
|
||||
|
||||
/// Function signature for action handlers.
|
||||
pub type ActionHandlerFn = Arc<
|
||||
dyn Fn(CurrentContext, ActionParams) -> BoxFuture<'static, Result<ActionResult, ActionError>>
|
||||
+ Send
|
||||
+ Sync,
|
||||
>;
|
||||
|
||||
/// Trait for types that can handle action invocations.
|
||||
pub trait ActionHandler: Send + Sync {
|
||||
/// Execute the action with the given context and parameters.
|
||||
fn handle(
|
||||
&self,
|
||||
context: CurrentContext,
|
||||
params: ActionParams,
|
||||
) -> BoxFuture<'static, Result<ActionResult, ActionError>>;
|
||||
}
|
||||
|
||||
/// Wrapper to create an ActionHandler from a function.
|
||||
pub struct FnHandler<F>(pub F);
|
||||
|
||||
impl<F, Fut> ActionHandler for FnHandler<F>
|
||||
where
|
||||
F: Fn(CurrentContext, ActionParams) -> Fut + Send + Sync,
|
||||
Fut: Future<Output = Result<ActionResult, ActionError>> + Send + 'static,
|
||||
{
|
||||
fn handle(
|
||||
&self,
|
||||
context: CurrentContext,
|
||||
params: ActionParams,
|
||||
) -> BoxFuture<'static, Result<ActionResult, ActionError>> {
|
||||
Box::pin((self.0)(context, params))
|
||||
}
|
||||
}
|
||||
|
||||
/// Create an action handler from an async function.
|
||||
///
|
||||
/// # Example
|
||||
/// ```ignore
|
||||
/// let handler = handler_fn(|ctx, params| async move {
|
||||
/// Ok(ActionResult::ok())
|
||||
/// });
|
||||
/// ```
|
||||
pub fn handler_fn<F, Fut>(f: F) -> FnHandler<F>
|
||||
where
|
||||
F: Fn(CurrentContext, ActionParams) -> Fut + Send + Sync,
|
||||
Fut: Future<Output = Result<ActionResult, ActionError>> + Send + 'static,
|
||||
{
|
||||
FnHandler(f)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_handler_fn() {
|
||||
let handler = handler_fn(|_ctx, _params| async move { Ok(ActionResult::ok()) });
|
||||
|
||||
let result = handler
|
||||
.handle(CurrentContext::default(), ActionParams::empty())
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_handler_with_params() {
|
||||
let handler = handler_fn(|_ctx, params| async move {
|
||||
let name: Option<String> = params.get("name");
|
||||
Ok(ActionResult::with_message(format!(
|
||||
"Hello, {}!",
|
||||
name.unwrap_or_else(|| "World".to_string())
|
||||
)))
|
||||
});
|
||||
|
||||
let params = ActionParams::from_json(serde_json::json!({
|
||||
"name": "Yaak"
|
||||
}));
|
||||
|
||||
let result = handler
|
||||
.handle(CurrentContext::default(), params)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
match result {
|
||||
ActionResult::Success { message, .. } => {
|
||||
assert_eq!(message, Some("Hello, Yaak!".to_string()));
|
||||
}
|
||||
_ => panic!("Expected Success result"),
|
||||
}
|
||||
}
|
||||
}
|
||||
18
crates/yaak-actions/src/lib.rs
Normal file
18
crates/yaak-actions/src/lib.rs
Normal file
@@ -0,0 +1,18 @@
|
||||
//! Centralized action system for Yaak.
|
||||
//!
|
||||
//! This crate provides a unified hub for registering and invoking actions
|
||||
//! across all entry points: plugins, Tauri desktop app, CLI, deep links, and MCP server.
|
||||
|
||||
mod context;
|
||||
mod error;
|
||||
mod executor;
|
||||
mod groups;
|
||||
mod handler;
|
||||
mod types;
|
||||
|
||||
pub use context::*;
|
||||
pub use error::*;
|
||||
pub use executor::*;
|
||||
pub use groups::*;
|
||||
pub use handler::*;
|
||||
pub use types::*;
|
||||
273
crates/yaak-actions/src/types.rs
Normal file
273
crates/yaak-actions/src/types.rs
Normal file
@@ -0,0 +1,273 @@
|
||||
//! Core types for the action system.
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::{ActionGroupId, RequiredContext};
|
||||
|
||||
/// Unique identifier for an action.
|
||||
///
|
||||
/// Format: `namespace:category:name`
|
||||
/// - Built-in: `yaak:http-request:send`
|
||||
/// - Plugin: `plugin.copy-curl:http-request:copy`
|
||||
#[derive(Clone, Debug, Hash, Eq, PartialEq, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
pub struct ActionId(pub String);
|
||||
|
||||
impl ActionId {
|
||||
/// Create a namespaced action ID.
|
||||
pub fn new(namespace: &str, category: &str, name: &str) -> Self {
|
||||
Self(format!("{}:{}:{}", namespace, category, name))
|
||||
}
|
||||
|
||||
/// Create ID for built-in actions.
|
||||
pub fn builtin(category: &str, name: &str) -> Self {
|
||||
Self::new("yaak", category, name)
|
||||
}
|
||||
|
||||
/// Create ID for plugin actions.
|
||||
pub fn plugin(plugin_ref_id: &str, category: &str, name: &str) -> Self {
|
||||
Self::new(&format!("plugin.{}", plugin_ref_id), category, name)
|
||||
}
|
||||
|
||||
/// Get the raw string value.
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ActionId {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
/// The scope in which an action can be invoked.
|
||||
#[derive(Clone, Debug, Hash, Eq, PartialEq, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum ActionScope {
|
||||
/// Global actions available everywhere.
|
||||
Global,
|
||||
/// Actions on HTTP requests.
|
||||
HttpRequest,
|
||||
/// Actions on WebSocket requests.
|
||||
WebsocketRequest,
|
||||
/// Actions on gRPC requests.
|
||||
GrpcRequest,
|
||||
/// Actions on workspaces.
|
||||
Workspace,
|
||||
/// Actions on folders.
|
||||
Folder,
|
||||
/// Actions on environments.
|
||||
Environment,
|
||||
/// Actions on cookie jars.
|
||||
CookieJar,
|
||||
}
|
||||
|
||||
/// Metadata about an action for discovery.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ActionMetadata {
|
||||
/// Unique identifier for this action.
|
||||
pub id: ActionId,
|
||||
|
||||
/// Display label for the action.
|
||||
pub label: String,
|
||||
|
||||
/// Optional description of what the action does.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub description: Option<String>,
|
||||
|
||||
/// Icon name to display.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub icon: Option<String>,
|
||||
|
||||
/// The scope this action applies to.
|
||||
pub scope: ActionScope,
|
||||
|
||||
/// Keyboard shortcut (e.g., "Cmd+Enter").
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub keyboard_shortcut: Option<String>,
|
||||
|
||||
/// Whether the action requires a selection/target.
|
||||
#[serde(default)]
|
||||
pub requires_selection: bool,
|
||||
|
||||
/// Optional condition expression for when action is enabled.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub enabled_condition: Option<String>,
|
||||
|
||||
/// Optional group this action belongs to.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub group_id: Option<ActionGroupId>,
|
||||
|
||||
/// Sort order within a group (lower = earlier).
|
||||
#[serde(default)]
|
||||
pub order: i32,
|
||||
|
||||
/// Context requirements for this action.
|
||||
#[serde(default)]
|
||||
pub required_context: RequiredContext,
|
||||
}
|
||||
|
||||
/// Where an action was registered from.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(tag = "type", rename_all = "kebab-case")]
|
||||
pub enum ActionSource {
|
||||
/// Built into Yaak core.
|
||||
Builtin,
|
||||
/// Registered by a plugin.
|
||||
Plugin {
|
||||
/// Plugin reference ID.
|
||||
ref_id: String,
|
||||
/// Plugin name.
|
||||
name: String,
|
||||
},
|
||||
/// Registered at runtime (e.g., by MCP tools).
|
||||
Dynamic {
|
||||
/// Source identifier.
|
||||
source_id: String,
|
||||
},
|
||||
}
|
||||
|
||||
/// Parameters passed to action handlers.
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
pub struct ActionParams {
|
||||
/// Arbitrary JSON parameters.
|
||||
#[serde(default)]
|
||||
#[ts(type = "unknown")]
|
||||
pub data: serde_json::Value,
|
||||
}
|
||||
|
||||
impl ActionParams {
|
||||
/// Create empty params.
|
||||
pub fn empty() -> Self {
|
||||
Self {
|
||||
data: serde_json::Value::Null,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create params from a JSON value.
|
||||
pub fn from_json(data: serde_json::Value) -> Self {
|
||||
Self { data }
|
||||
}
|
||||
|
||||
/// Get a typed value from the params.
|
||||
pub fn get<T: serde::de::DeserializeOwned>(&self, key: &str) -> Option<T> {
|
||||
self.data
|
||||
.get(key)
|
||||
.and_then(|v| serde_json::from_value(v.clone()).ok())
|
||||
}
|
||||
}
|
||||
|
||||
/// Result of action execution.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(tag = "type", rename_all = "kebab-case")]
|
||||
pub enum ActionResult {
|
||||
/// Action completed successfully.
|
||||
Success {
|
||||
/// Optional data to return.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[ts(type = "unknown")]
|
||||
data: Option<serde_json::Value>,
|
||||
/// Optional message to display.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
message: Option<String>,
|
||||
},
|
||||
|
||||
/// Action requires user input to continue.
|
||||
RequiresInput {
|
||||
/// Prompt to show user.
|
||||
prompt: InputPrompt,
|
||||
/// Continuation token.
|
||||
continuation_id: String,
|
||||
},
|
||||
|
||||
/// Action was cancelled by the user.
|
||||
Cancelled,
|
||||
}
|
||||
|
||||
impl ActionResult {
|
||||
/// Create a success result with no data.
|
||||
pub fn ok() -> Self {
|
||||
Self::Success {
|
||||
data: None,
|
||||
message: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a success result with a message.
|
||||
pub fn with_message(message: impl Into<String>) -> Self {
|
||||
Self::Success {
|
||||
data: None,
|
||||
message: Some(message.into()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a success result with data.
|
||||
pub fn with_data(data: serde_json::Value) -> Self {
|
||||
Self::Success {
|
||||
data: Some(data),
|
||||
message: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A prompt for user input.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
#[serde(tag = "type", rename_all = "kebab-case")]
|
||||
pub enum InputPrompt {
|
||||
/// Text input prompt.
|
||||
Text {
|
||||
label: String,
|
||||
placeholder: Option<String>,
|
||||
default_value: Option<String>,
|
||||
},
|
||||
/// Selection prompt.
|
||||
Select {
|
||||
label: String,
|
||||
options: Vec<SelectOption>,
|
||||
},
|
||||
/// Confirmation prompt.
|
||||
Confirm { label: String },
|
||||
}
|
||||
|
||||
/// An option in a select prompt.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, TS)]
|
||||
#[ts(export)]
|
||||
pub struct SelectOption {
|
||||
pub label: String,
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_action_id_creation() {
|
||||
let id = ActionId::builtin("http-request", "send");
|
||||
assert_eq!(id.as_str(), "yaak:http-request:send");
|
||||
|
||||
let plugin_id = ActionId::plugin("copy-curl", "http-request", "copy");
|
||||
assert_eq!(plugin_id.as_str(), "plugin.copy-curl:http-request:copy");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_action_params() {
|
||||
let params = ActionParams::from_json(serde_json::json!({
|
||||
"name": "test",
|
||||
"count": 42
|
||||
}));
|
||||
|
||||
assert_eq!(params.get::<String>("name"), Some("test".to_string()));
|
||||
assert_eq!(params.get::<i32>("count"), Some(42));
|
||||
assert_eq!(params.get::<String>("missing"), None);
|
||||
}
|
||||
}
|
||||
@@ -6,3 +6,4 @@ publish = false
|
||||
|
||||
[dependencies]
|
||||
serde_json = { workspace = true }
|
||||
tokio = { workspace = true, features = ["process"] }
|
||||
|
||||
16
crates/yaak-common/src/command.rs
Normal file
16
crates/yaak-common/src/command.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use std::ffi::OsStr;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
const CREATE_NO_WINDOW: u32 = 0x0800_0000;
|
||||
|
||||
/// Creates a new `tokio::process::Command` that won't spawn a console window on Windows.
|
||||
pub fn new_xplatform_command<S: AsRef<OsStr>>(program: S) -> tokio::process::Command {
|
||||
#[allow(unused_mut)]
|
||||
let mut cmd = tokio::process::Command::new(program);
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use std::os::windows::process::CommandExt;
|
||||
cmd.creation_flags(CREATE_NO_WINDOW);
|
||||
}
|
||||
cmd
|
||||
}
|
||||
@@ -1,2 +1,3 @@
|
||||
pub mod command;
|
||||
pub mod platform;
|
||||
pub mod serde;
|
||||
|
||||
@@ -11,3 +11,7 @@ export function revealWorkspaceKey(workspaceId: string) {
|
||||
export function setWorkspaceKey(args: { workspaceId: string; key: string }) {
|
||||
return invoke<void>('cmd_set_workspace_key', args);
|
||||
}
|
||||
|
||||
export function disableEncryption(workspaceId: string) {
|
||||
return invoke<void>('cmd_disable_encryption', { workspaceId });
|
||||
}
|
||||
|
||||
@@ -115,6 +115,35 @@ impl EncryptionManager {
|
||||
self.set_workspace_key(workspace_id, &wkey)
|
||||
}
|
||||
|
||||
pub fn disable_encryption(&self, workspace_id: &str) -> Result<()> {
|
||||
info!("Disabling encryption for {workspace_id}");
|
||||
|
||||
self.query_manager.with_tx::<(), Error>(|tx| {
|
||||
let workspace = tx.get_workspace(workspace_id)?;
|
||||
let workspace_meta = tx.get_or_create_workspace_meta(workspace_id)?;
|
||||
|
||||
// Clear encryption challenge on workspace
|
||||
tx.upsert_workspace(
|
||||
&Workspace { encryption_key_challenge: None, ..workspace },
|
||||
&UpdateSource::Background,
|
||||
)?;
|
||||
|
||||
// Clear encryption key on workspace meta
|
||||
tx.upsert_workspace_meta(
|
||||
&WorkspaceMeta { encryption_key: None, ..workspace_meta },
|
||||
&UpdateSource::Background,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Remove from cache
|
||||
let mut cache = self.cached_workspace_keys.lock().unwrap();
|
||||
cache.remove(workspace_id);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_workspace_key(&self, workspace_id: &str) -> Result<WorkspaceKey> {
|
||||
{
|
||||
let cache = self.cached_workspace_keys.lock().unwrap();
|
||||
|
||||
@@ -12,7 +12,9 @@ serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
serde_yaml = "0.9.34"
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true, features = ["io-util"] }
|
||||
ts-rs = { workspace = true, features = ["chrono-impl", "serde-json-impl"] }
|
||||
url = "2"
|
||||
yaak-common = { workspace = true }
|
||||
yaak-models = { workspace = true }
|
||||
yaak-sync = { workspace = true }
|
||||
|
||||
4
crates/yaak-git/bindings/gen_git.ts
generated
4
crates/yaak-git/bindings/gen_git.ts
generated
@@ -1,6 +1,10 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
import type { SyncModel } from "./gen_models";
|
||||
|
||||
export type BranchDeleteResult = { "type": "success", message: string, } | { "type": "not_fully_merged" };
|
||||
|
||||
export type CloneResult = { "type": "success" } | { "type": "cancelled" } | { "type": "needs_credentials", url: string, error: string | null, };
|
||||
|
||||
export type GitAuthor = { name: string | null, email: string | null, };
|
||||
|
||||
export type GitCommit = { author: GitAuthor, when: string, message: string | null, };
|
||||
|
||||
4
crates/yaak-git/bindings/gen_models.ts
generated
4
crates/yaak-git/bindings/gen_models.ts
generated
@@ -1,5 +1,7 @@
|
||||
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
|
||||
|
||||
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
|
||||
|
||||
export type Environment = { model: "environment", id: string, workspaceId: string, createdAt: string, updatedAt: string, name: string, public: boolean, parentModel: string, parentId: string | null, variables: Array<EnvironmentVariable>, color: string | null, sortPriority: number, };
|
||||
|
||||
export type EnvironmentVariable = { enabled?: boolean, name: string, value: string, id?: string, };
|
||||
@@ -18,4 +20,4 @@ export type SyncModel = { "type": "workspace" } & Workspace | { "type": "environ
|
||||
|
||||
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
|
||||
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
|
||||
|
||||
@@ -3,9 +3,10 @@ import { invoke } from '@tauri-apps/api/core';
|
||||
import { createFastMutation } from '@yaakapp/app/hooks/useFastMutation';
|
||||
import { queryClient } from '@yaakapp/app/lib/queryClient';
|
||||
import { useMemo } from 'react';
|
||||
import { GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult } from './bindings/gen_git';
|
||||
import { BranchDeleteResult, CloneResult, GitCommit, GitRemote, GitStatusSummary, PullResult, PushResult } from './bindings/gen_git';
|
||||
|
||||
export * from './bindings/gen_git';
|
||||
export * from './bindings/gen_models';
|
||||
|
||||
export interface GitCredentials {
|
||||
username: string;
|
||||
@@ -59,7 +60,6 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
if (creds == null) throw new Error('Canceled');
|
||||
|
||||
await invoke('cmd_git_add_credential', {
|
||||
dir,
|
||||
remoteUrl: result.url,
|
||||
username: creds.username,
|
||||
password: creds.password,
|
||||
@@ -90,21 +90,31 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
mutationFn: (args) => invoke('cmd_git_rm_remote', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
branch: createFastMutation<void, string, { branch: string }>({
|
||||
createBranch: createFastMutation<void, string, { branch: string; base?: string }>({
|
||||
mutationKey: ['git', 'branch', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
mergeBranch: createFastMutation<void, string, { branch: string; force: boolean }>({
|
||||
mergeBranch: createFastMutation<void, string, { branch: string }>({
|
||||
mutationKey: ['git', 'merge', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_merge_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
deleteBranch: createFastMutation<void, string, { branch: string }>({
|
||||
deleteBranch: createFastMutation<BranchDeleteResult, string, { branch: string, force?: boolean }>({
|
||||
mutationKey: ['git', 'delete-branch', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_delete_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
deleteRemoteBranch: createFastMutation<void, string, { branch: string }>({
|
||||
mutationKey: ['git', 'delete-remote-branch', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_delete_remote_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
renameBranch: createFastMutation<void, string, { oldName: string, newName: string }>({
|
||||
mutationKey: ['git', 'rename-branch', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_rename_branch', { dir, ...args }),
|
||||
onSuccess,
|
||||
}),
|
||||
checkout: createFastMutation<string, string, { branch: string; force: boolean }>({
|
||||
mutationKey: ['git', 'checkout', dir],
|
||||
mutationFn: (args) => invoke('cmd_git_checkout', { dir, ...args }),
|
||||
@@ -144,7 +154,6 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
if (creds == null) throw new Error('Canceled');
|
||||
|
||||
await invoke('cmd_git_add_credential', {
|
||||
dir,
|
||||
remoteUrl: result.url,
|
||||
username: creds.username,
|
||||
password: creds.password,
|
||||
@@ -166,3 +175,28 @@ export const gitMutations = (dir: string, callbacks: GitCallbacks) => {
|
||||
async function getRemotes(dir: string) {
|
||||
return invoke<GitRemote[]>('cmd_git_remotes', { dir });
|
||||
}
|
||||
|
||||
/**
|
||||
* Clone a git repository, prompting for credentials if needed.
|
||||
*/
|
||||
export async function gitClone(
|
||||
url: string,
|
||||
dir: string,
|
||||
promptCredentials: (args: { url: string; error: string | null }) => Promise<GitCredentials | null>,
|
||||
): Promise<CloneResult> {
|
||||
const result = await invoke<CloneResult>('cmd_git_clone', { url, dir });
|
||||
if (result.type !== 'needs_credentials') return result;
|
||||
|
||||
// Prompt for credentials
|
||||
const creds = await promptCredentials({ url: result.url, error: result.error });
|
||||
if (creds == null) return {type: 'cancelled'};
|
||||
|
||||
// Store credentials and retry
|
||||
await invoke('cmd_git_add_credential', {
|
||||
remoteUrl: result.url,
|
||||
username: creds.username,
|
||||
password: creds.password,
|
||||
});
|
||||
|
||||
return invoke<CloneResult>('cmd_git_clone', { url, dir });
|
||||
}
|
||||
|
||||
@@ -1,38 +1,30 @@
|
||||
use crate::error::Error::GitNotFound;
|
||||
use crate::error::Result;
|
||||
use std::path::Path;
|
||||
use std::process::{Command, Stdio};
|
||||
use std::process::Stdio;
|
||||
use tokio::process::Command;
|
||||
use yaak_common::command::new_xplatform_command;
|
||||
|
||||
use crate::error::Error::GitNotFound;
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::os::windows::process::CommandExt;
|
||||
/// Create a git command that runs in the specified directory
|
||||
pub(crate) async fn new_binary_command(dir: &Path) -> Result<Command> {
|
||||
let mut cmd = new_binary_command_global().await?;
|
||||
cmd.arg("-C").arg(dir);
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
const CREATE_NO_WINDOW: u32 = 0x0800_0000;
|
||||
|
||||
pub(crate) fn new_binary_command(dir: &Path) -> Result<Command> {
|
||||
/// Create a git command without a specific directory (for global operations)
|
||||
pub(crate) async fn new_binary_command_global() -> Result<Command> {
|
||||
// 1. Probe that `git` exists and is runnable
|
||||
let mut probe = Command::new("git");
|
||||
let mut probe = new_xplatform_command("git");
|
||||
probe.arg("--version").stdin(Stdio::null()).stdout(Stdio::null()).stderr(Stdio::null());
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
probe.creation_flags(CREATE_NO_WINDOW);
|
||||
}
|
||||
|
||||
let status = probe.status().map_err(|_| GitNotFound)?;
|
||||
let status = probe.status().await.map_err(|_| GitNotFound)?;
|
||||
|
||||
if !status.success() {
|
||||
return Err(GitNotFound);
|
||||
}
|
||||
|
||||
// 2. Build the reusable git command
|
||||
let mut cmd = Command::new("git");
|
||||
cmd.arg("-C").arg(dir);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
cmd.creation_flags(CREATE_NO_WINDOW);
|
||||
}
|
||||
|
||||
let cmd = new_xplatform_command("git");
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
@@ -1,99 +1,153 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ts_rs::TS;
|
||||
|
||||
use crate::binary::new_binary_command;
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use crate::merge::do_merge;
|
||||
use crate::repository::open_repo;
|
||||
use crate::util::{bytes_to_string, get_branch_by_name, get_current_branch};
|
||||
use git2::BranchType;
|
||||
use git2::build::CheckoutBuilder;
|
||||
use log::info;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn git_checkout_branch(dir: &Path, branch_name: &str, force: bool) -> Result<String> {
|
||||
if branch_name.starts_with("origin/") {
|
||||
return git_checkout_remote_branch(dir, branch_name, force);
|
||||
}
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
|
||||
#[serde(rename_all = "snake_case", tag = "type")]
|
||||
#[ts(export, export_to = "gen_git.ts")]
|
||||
pub enum BranchDeleteResult {
|
||||
Success { message: String },
|
||||
NotFullyMerged,
|
||||
}
|
||||
|
||||
let repo = open_repo(dir)?;
|
||||
let branch = get_branch_by_name(&repo, branch_name)?;
|
||||
let branch_ref = branch.into_reference();
|
||||
let branch_tree = branch_ref.peel_to_tree()?;
|
||||
pub async fn git_checkout_branch(dir: &Path, branch_name: &str, force: bool) -> Result<String> {
|
||||
let branch_name = branch_name.trim_start_matches("origin/");
|
||||
|
||||
let mut options = CheckoutBuilder::default();
|
||||
let mut args = vec!["checkout"];
|
||||
if force {
|
||||
options.force();
|
||||
args.push("--force");
|
||||
}
|
||||
args.push(branch_name);
|
||||
|
||||
repo.checkout_tree(branch_tree.as_object(), Some(&mut options))?;
|
||||
repo.set_head(branch_ref.name().unwrap())?;
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(&args)
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git checkout: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to checkout: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(branch_name.to_string())
|
||||
}
|
||||
|
||||
pub(crate) fn git_checkout_remote_branch(
|
||||
dir: &Path,
|
||||
branch_name: &str,
|
||||
force: bool,
|
||||
) -> Result<String> {
|
||||
let branch_name = branch_name.trim_start_matches("origin/");
|
||||
let repo = open_repo(dir)?;
|
||||
|
||||
let refname = format!("refs/remotes/origin/{}", branch_name);
|
||||
let remote_ref = repo.find_reference(&refname)?;
|
||||
let commit = remote_ref.peel_to_commit()?;
|
||||
|
||||
let mut new_branch = repo.branch(branch_name, &commit, false)?;
|
||||
let upstream_name = format!("origin/{}", branch_name);
|
||||
new_branch.set_upstream(Some(&upstream_name))?;
|
||||
|
||||
git_checkout_branch(dir, branch_name, force)
|
||||
}
|
||||
|
||||
pub fn git_create_branch(dir: &Path, name: &str) -> Result<()> {
|
||||
let repo = open_repo(dir)?;
|
||||
let head = match repo.head() {
|
||||
Ok(h) => h,
|
||||
Err(e) if e.code() == git2::ErrorCode::UnbornBranch => {
|
||||
let msg = "Cannot create branch when there are no commits";
|
||||
return Err(GenericError(msg.into()));
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
let head = head.peel_to_commit()?;
|
||||
|
||||
repo.branch(name, &head, false)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn git_delete_branch(dir: &Path, name: &str) -> Result<()> {
|
||||
let repo = open_repo(dir)?;
|
||||
let mut branch = get_branch_by_name(&repo, name)?;
|
||||
|
||||
if branch.is_head() {
|
||||
info!("Deleting head branch");
|
||||
let branches = repo.branches(Some(BranchType::Local))?;
|
||||
let other_branch = branches.into_iter().filter_map(|b| b.ok()).find(|b| !b.0.is_head());
|
||||
let other_branch = match other_branch {
|
||||
None => return Err(GenericError("Cannot delete only branch".into())),
|
||||
Some(b) => bytes_to_string(b.0.name_bytes()?)?,
|
||||
};
|
||||
|
||||
git_checkout_branch(dir, &other_branch, true)?;
|
||||
pub async fn git_create_branch(dir: &Path, name: &str, base: Option<&str>) -> Result<()> {
|
||||
let mut cmd = new_binary_command(dir).await?;
|
||||
cmd.arg("branch").arg(name);
|
||||
if let Some(base_branch) = base {
|
||||
cmd.arg(base_branch);
|
||||
}
|
||||
|
||||
branch.delete()?;
|
||||
let out =
|
||||
cmd.output().await.map_err(|e| GenericError(format!("failed to run git branch: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to create branch: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn git_merge_branch(dir: &Path, name: &str, _force: bool) -> Result<()> {
|
||||
let repo = open_repo(dir)?;
|
||||
let local_branch = get_current_branch(&repo)?.unwrap();
|
||||
pub async fn git_delete_branch(dir: &Path, name: &str, force: bool) -> Result<BranchDeleteResult> {
|
||||
let mut cmd = new_binary_command(dir).await?;
|
||||
|
||||
let commit_to_merge = get_branch_by_name(&repo, name)?.into_reference();
|
||||
let commit_to_merge = repo.reference_to_annotated_commit(&commit_to_merge)?;
|
||||
let out =
|
||||
if force { cmd.args(["branch", "-D", name]) } else { cmd.args(["branch", "-d", name]) }
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git branch -d: {e}")))?;
|
||||
|
||||
do_merge(&repo, &local_branch, &commit_to_merge)?;
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() && stderr.to_lowercase().contains("not fully merged") {
|
||||
return Ok(BranchDeleteResult::NotFullyMerged);
|
||||
}
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to delete branch: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(BranchDeleteResult::Success { message: combined })
|
||||
}
|
||||
|
||||
pub async fn git_merge_branch(dir: &Path, name: &str) -> Result<()> {
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["merge", name])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git merge: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
// Check for merge conflicts
|
||||
if combined.to_lowercase().contains("conflict") {
|
||||
return Err(GenericError(
|
||||
"Merge conflicts detected. Please resolve them manually.".to_string(),
|
||||
));
|
||||
}
|
||||
return Err(GenericError(format!("Failed to merge: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn git_delete_remote_branch(dir: &Path, name: &str) -> Result<()> {
|
||||
// Remote branch names come in as "origin/branch-name", extract the branch name
|
||||
let branch_name = name.trim_start_matches("origin/");
|
||||
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["push", "origin", "--delete", branch_name])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git push --delete: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to delete remote branch: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn git_rename_branch(dir: &Path, old_name: &str, new_name: &str) -> Result<()> {
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["branch", "-m", old_name, new_name])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git branch -m: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!("Failed to rename branch: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
53
crates/yaak-git/src/clone.rs
Normal file
53
crates/yaak-git/src/clone.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
use crate::binary::new_binary_command;
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use log::info;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use ts_rs::TS;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, TS)]
|
||||
#[serde(rename_all = "snake_case", tag = "type")]
|
||||
#[ts(export, export_to = "gen_git.ts")]
|
||||
pub enum CloneResult {
|
||||
Success,
|
||||
Cancelled,
|
||||
NeedsCredentials { url: String, error: Option<String> },
|
||||
}
|
||||
|
||||
pub async fn git_clone(url: &str, dir: &Path) -> Result<CloneResult> {
|
||||
let parent = dir.parent().ok_or_else(|| GenericError("Invalid clone directory".to_string()))?;
|
||||
fs::create_dir_all(parent)
|
||||
.map_err(|e| GenericError(format!("Failed to create directory: {e}")))?;
|
||||
let mut cmd = new_binary_command(parent).await?;
|
||||
cmd.args(["clone", url]).arg(dir).env("GIT_TERMINAL_PROMPT", "0");
|
||||
|
||||
let out =
|
||||
cmd.output().await.map_err(|e| GenericError(format!("failed to run git clone: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
let combined_lower = combined.to_lowercase();
|
||||
|
||||
info!("Cloned status={}: {combined}", out.status);
|
||||
|
||||
if !out.status.success() {
|
||||
// Check for credentials error
|
||||
if combined_lower.contains("could not read") {
|
||||
return Ok(CloneResult::NeedsCredentials { url: url.to_string(), error: None });
|
||||
}
|
||||
if combined_lower.contains("unable to access")
|
||||
|| combined_lower.contains("authentication failed")
|
||||
{
|
||||
return Ok(CloneResult::NeedsCredentials {
|
||||
url: url.to_string(),
|
||||
error: Some(combined.to_string()),
|
||||
});
|
||||
}
|
||||
return Err(GenericError(format!("Failed to clone: {}", combined.trim())));
|
||||
}
|
||||
|
||||
Ok(CloneResult::Success)
|
||||
}
|
||||
@@ -3,8 +3,9 @@ use crate::error::Error::GenericError;
|
||||
use log::info;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn git_commit(dir: &Path, message: &str) -> crate::error::Result<()> {
|
||||
let out = new_binary_command(dir)?.args(["commit", "--message", message]).output()?;
|
||||
pub async fn git_commit(dir: &Path, message: &str) -> crate::error::Result<()> {
|
||||
let out =
|
||||
new_binary_command(dir).await?.args(["commit", "--message", message]).output().await?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
|
||||
@@ -1,24 +1,19 @@
|
||||
use crate::binary::new_binary_command;
|
||||
use crate::binary::new_binary_command_global;
|
||||
use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::process::Stdio;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use url::Url;
|
||||
|
||||
pub async fn git_add_credential(
|
||||
dir: &Path,
|
||||
remote_url: &str,
|
||||
username: &str,
|
||||
password: &str,
|
||||
) -> Result<()> {
|
||||
pub async fn git_add_credential(remote_url: &str, username: &str, password: &str) -> Result<()> {
|
||||
let url = Url::parse(remote_url)
|
||||
.map_err(|e| GenericError(format!("Failed to parse remote url {remote_url}: {e:?}")))?;
|
||||
let protocol = url.scheme();
|
||||
let host = url.host_str().unwrap();
|
||||
let path = Some(url.path());
|
||||
|
||||
let mut child = new_binary_command(dir)?
|
||||
let mut child = new_binary_command_global()
|
||||
.await?
|
||||
.args(["credential", "approve"])
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::null())
|
||||
@@ -26,19 +21,21 @@ pub async fn git_add_credential(
|
||||
|
||||
{
|
||||
let stdin = child.stdin.as_mut().unwrap();
|
||||
writeln!(stdin, "protocol={}", protocol)?;
|
||||
writeln!(stdin, "host={}", host)?;
|
||||
stdin.write_all(format!("protocol={}\n", protocol).as_bytes()).await?;
|
||||
stdin.write_all(format!("host={}\n", host).as_bytes()).await?;
|
||||
if let Some(path) = path {
|
||||
if !path.is_empty() {
|
||||
writeln!(stdin, "path={}", path.trim_start_matches('/'))?;
|
||||
stdin
|
||||
.write_all(format!("path={}\n", path.trim_start_matches('/')).as_bytes())
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
writeln!(stdin, "username={}", username)?;
|
||||
writeln!(stdin, "password={}", password)?;
|
||||
writeln!(stdin)?; // blank line terminator
|
||||
stdin.write_all(format!("username={}\n", username).as_bytes()).await?;
|
||||
stdin.write_all(format!("password={}\n", password).as_bytes()).await?;
|
||||
stdin.write_all(b"\n").await?; // blank line terminator
|
||||
}
|
||||
|
||||
let status = child.wait()?;
|
||||
let status = child.wait().await?;
|
||||
if !status.success() {
|
||||
return Err(GenericError("Failed to approve git credential".to_string()));
|
||||
}
|
||||
|
||||
@@ -3,10 +3,12 @@ use crate::error::Error::GenericError;
|
||||
use crate::error::Result;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn git_fetch_all(dir: &Path) -> Result<()> {
|
||||
let out = new_binary_command(dir)?
|
||||
pub async fn git_fetch_all(dir: &Path) -> Result<()> {
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["fetch", "--all", "--prune", "--tags"])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git pull: {e}")))?;
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
mod add;
|
||||
mod binary;
|
||||
mod branch;
|
||||
mod clone;
|
||||
mod commit;
|
||||
mod credential;
|
||||
pub mod error;
|
||||
mod fetch;
|
||||
mod init;
|
||||
mod log;
|
||||
mod merge;
|
||||
|
||||
mod pull;
|
||||
mod push;
|
||||
mod remotes;
|
||||
@@ -18,7 +19,11 @@ mod util;
|
||||
|
||||
// Re-export all git functions for external use
|
||||
pub use add::git_add;
|
||||
pub use branch::{git_checkout_branch, git_create_branch, git_delete_branch, git_merge_branch};
|
||||
pub use branch::{
|
||||
BranchDeleteResult, git_checkout_branch, git_create_branch, git_delete_branch,
|
||||
git_delete_remote_branch, git_merge_branch, git_rename_branch,
|
||||
};
|
||||
pub use clone::{CloneResult, git_clone};
|
||||
pub use commit::git_commit;
|
||||
pub use credential::git_add_credential;
|
||||
pub use fetch::git_fetch_all;
|
||||
|
||||
@@ -1,135 +0,0 @@
|
||||
use crate::error::Error::MergeConflicts;
|
||||
use crate::util::bytes_to_string;
|
||||
use git2::{AnnotatedCommit, Branch, IndexEntry, Reference, Repository};
|
||||
use log::{debug, info};
|
||||
|
||||
pub(crate) fn do_merge(
|
||||
repo: &Repository,
|
||||
local_branch: &Branch,
|
||||
commit_to_merge: &AnnotatedCommit,
|
||||
) -> crate::error::Result<()> {
|
||||
debug!("Merging remote branches");
|
||||
let analysis = repo.merge_analysis(&[&commit_to_merge])?;
|
||||
|
||||
if analysis.0.is_fast_forward() {
|
||||
let refname = bytes_to_string(local_branch.get().name_bytes())?;
|
||||
match repo.find_reference(&refname) {
|
||||
Ok(mut r) => {
|
||||
merge_fast_forward(repo, &mut r, &commit_to_merge)?;
|
||||
}
|
||||
Err(_) => {
|
||||
// The branch doesn't exist, so set the reference to the commit directly. Usually
|
||||
// this is because you are pulling into an empty repository.
|
||||
repo.reference(
|
||||
&refname,
|
||||
commit_to_merge.id(),
|
||||
true,
|
||||
&format!("Setting {} to {}", refname, commit_to_merge.id()),
|
||||
)?;
|
||||
repo.set_head(&refname)?;
|
||||
repo.checkout_head(Some(
|
||||
git2::build::CheckoutBuilder::default()
|
||||
.allow_conflicts(true)
|
||||
.conflict_style_merge(true)
|
||||
.force(),
|
||||
))?;
|
||||
}
|
||||
};
|
||||
} else if analysis.0.is_normal() {
|
||||
let head_commit = repo.reference_to_annotated_commit(&repo.head()?)?;
|
||||
merge_normal(repo, &head_commit, commit_to_merge)?;
|
||||
} else {
|
||||
debug!("Skipping merge. Nothing to do")
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn merge_fast_forward(
|
||||
repo: &Repository,
|
||||
local_reference: &mut Reference,
|
||||
remote_commit: &AnnotatedCommit,
|
||||
) -> crate::error::Result<()> {
|
||||
info!("Performing fast forward");
|
||||
let name = match local_reference.name() {
|
||||
Some(s) => s.to_string(),
|
||||
None => String::from_utf8_lossy(local_reference.name_bytes()).to_string(),
|
||||
};
|
||||
let msg = format!("Fast-Forward: Setting {} to id: {}", name, remote_commit.id());
|
||||
local_reference.set_target(remote_commit.id(), &msg)?;
|
||||
repo.set_head(&name)?;
|
||||
repo.checkout_head(Some(
|
||||
git2::build::CheckoutBuilder::default()
|
||||
// For some reason, the force is required to make the working directory actually get
|
||||
// updated I suspect we should be adding some logic to handle dirty working directory
|
||||
// states, but this is just an example so maybe not.
|
||||
.force(),
|
||||
))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn merge_normal(
|
||||
repo: &Repository,
|
||||
local: &AnnotatedCommit,
|
||||
remote: &AnnotatedCommit,
|
||||
) -> crate::error::Result<()> {
|
||||
info!("Performing normal merge");
|
||||
let local_tree = repo.find_commit(local.id())?.tree()?;
|
||||
let remote_tree = repo.find_commit(remote.id())?.tree()?;
|
||||
let ancestor = repo.find_commit(repo.merge_base(local.id(), remote.id())?)?.tree()?;
|
||||
|
||||
let mut idx = repo.merge_trees(&ancestor, &local_tree, &remote_tree, None)?;
|
||||
|
||||
if idx.has_conflicts() {
|
||||
let conflicts = idx.conflicts()?;
|
||||
for conflict in conflicts {
|
||||
if let Ok(conflict) = conflict {
|
||||
print_conflict(&conflict);
|
||||
}
|
||||
}
|
||||
return Err(MergeConflicts);
|
||||
}
|
||||
|
||||
let result_tree = repo.find_tree(idx.write_tree_to(repo)?)?;
|
||||
// now create the merge commit
|
||||
let msg = format!("Merge: {} into {}", remote.id(), local.id());
|
||||
let sig = repo.signature()?;
|
||||
let local_commit = repo.find_commit(local.id())?;
|
||||
let remote_commit = repo.find_commit(remote.id())?;
|
||||
|
||||
// Do our merge commit and set current branch head to that commit.
|
||||
let _merge_commit = repo.commit(
|
||||
Some("HEAD"),
|
||||
&sig,
|
||||
&sig,
|
||||
&msg,
|
||||
&result_tree,
|
||||
&[&local_commit, &remote_commit],
|
||||
)?;
|
||||
|
||||
// Set working tree to match head.
|
||||
repo.checkout_head(None)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_conflict(conflict: &git2::IndexConflict) {
|
||||
let ancestor = conflict.ancestor.as_ref().map(path_from_index_entry);
|
||||
let ours = conflict.our.as_ref().map(path_from_index_entry);
|
||||
let theirs = conflict.their.as_ref().map(path_from_index_entry);
|
||||
|
||||
println!("Conflict detected:");
|
||||
if let Some(path) = ancestor {
|
||||
println!(" Common ancestor: {:?}", path);
|
||||
}
|
||||
if let Some(path) = ours {
|
||||
println!(" Ours: {:?}", path);
|
||||
}
|
||||
if let Some(path) = theirs {
|
||||
println!(" Theirs: {:?}", path);
|
||||
}
|
||||
}
|
||||
|
||||
fn path_from_index_entry(entry: &IndexEntry) -> String {
|
||||
String::from_utf8_lossy(entry.path.as_slice()).into_owned()
|
||||
}
|
||||
@@ -17,17 +17,25 @@ pub enum PullResult {
|
||||
NeedsCredentials { url: String, error: Option<String> },
|
||||
}
|
||||
|
||||
pub fn git_pull(dir: &Path) -> Result<PullResult> {
|
||||
let repo = open_repo(dir)?;
|
||||
let branch_name = get_current_branch_name(&repo)?;
|
||||
let remote = get_default_remote_in_repo(&repo)?;
|
||||
let remote_name = remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?;
|
||||
let remote_url = remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?;
|
||||
pub async fn git_pull(dir: &Path) -> Result<PullResult> {
|
||||
// Extract all git2 data before any await points (git2 types are not Send)
|
||||
let (branch_name, remote_name, remote_url) = {
|
||||
let repo = open_repo(dir)?;
|
||||
let branch_name = get_current_branch_name(&repo)?;
|
||||
let remote = get_default_remote_in_repo(&repo)?;
|
||||
let remote_name =
|
||||
remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?.to_string();
|
||||
let remote_url =
|
||||
remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?.to_string();
|
||||
(branch_name, remote_name, remote_url)
|
||||
};
|
||||
|
||||
let out = new_binary_command(dir)?
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["pull", &remote_name, &branch_name])
|
||||
.env("GIT_TERMINAL_PROMPT", "0")
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git pull: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
|
||||
@@ -17,17 +17,25 @@ pub enum PushResult {
|
||||
NeedsCredentials { url: String, error: Option<String> },
|
||||
}
|
||||
|
||||
pub fn git_push(dir: &Path) -> Result<PushResult> {
|
||||
let repo = open_repo(dir)?;
|
||||
let branch_name = get_current_branch_name(&repo)?;
|
||||
let remote = get_default_remote_for_push_in_repo(&repo)?;
|
||||
let remote_name = remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?;
|
||||
let remote_url = remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?;
|
||||
pub async fn git_push(dir: &Path) -> Result<PushResult> {
|
||||
// Extract all git2 data before any await points (git2 types are not Send)
|
||||
let (branch_name, remote_name, remote_url) = {
|
||||
let repo = open_repo(dir)?;
|
||||
let branch_name = get_current_branch_name(&repo)?;
|
||||
let remote = get_default_remote_for_push_in_repo(&repo)?;
|
||||
let remote_name =
|
||||
remote.name().ok_or(GenericError("Failed to get remote name".to_string()))?.to_string();
|
||||
let remote_url =
|
||||
remote.url().ok_or(GenericError("Failed to get remote url".to_string()))?.to_string();
|
||||
(branch_name, remote_name, remote_url)
|
||||
};
|
||||
|
||||
let out = new_binary_command(dir)?
|
||||
let out = new_binary_command(dir)
|
||||
.await?
|
||||
.args(["push", &remote_name, &branch_name])
|
||||
.env("GIT_TERMINAL_PROMPT", "0")
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("failed to run git push: {e}")))?;
|
||||
|
||||
let stdout = String::from_utf8_lossy(&out.stdout);
|
||||
|
||||
@@ -47,10 +47,6 @@ pub(crate) fn remote_branch_names(repo: &Repository) -> Result<Vec<String>> {
|
||||
Ok(branches)
|
||||
}
|
||||
|
||||
pub(crate) fn get_branch_by_name<'s>(repo: &'s Repository, name: &str) -> Result<Branch<'s>> {
|
||||
Ok(repo.find_branch(name, BranchType::Local)?)
|
||||
}
|
||||
|
||||
pub(crate) fn bytes_to_string(bytes: &[u8]) -> Result<String> {
|
||||
Ok(String::from_utf8(bytes.to_vec())?)
|
||||
}
|
||||
|
||||
@@ -22,5 +22,6 @@ tokio-stream = "0.1.14"
|
||||
tonic = { version = "0.12.3", default-features = false, features = ["transport"] }
|
||||
tonic-reflection = "0.12.3"
|
||||
uuid = { version = "1.7.0", features = ["v4"] }
|
||||
yaak-common = { workspace = true }
|
||||
yaak-tls = { workspace = true }
|
||||
thiserror = "2.0.17"
|
||||
|
||||
@@ -115,14 +115,18 @@ impl GrpcConnection {
|
||||
Ok(client.unary(req, path, codec).await?)
|
||||
}
|
||||
|
||||
pub async fn streaming(
|
||||
pub async fn streaming<F>(
|
||||
&self,
|
||||
service: &str,
|
||||
method: &str,
|
||||
stream: ReceiverStream<String>,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
client_cert: Option<ClientCertificateConfig>,
|
||||
) -> Result<Response<Streaming<DynamicMessage>>> {
|
||||
on_message: F,
|
||||
) -> Result<Response<Streaming<DynamicMessage>>>
|
||||
where
|
||||
F: Fn(std::result::Result<String, String>) + Send + Sync + Clone + 'static,
|
||||
{
|
||||
let method = &self.method(&service, &method).await?;
|
||||
let mapped_stream = {
|
||||
let input_message = method.input();
|
||||
@@ -131,31 +135,39 @@ impl GrpcConnection {
|
||||
let md = metadata.clone();
|
||||
let use_reflection = self.use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
stream.filter_map(move |json| {
|
||||
let pool = pool.clone();
|
||||
let uri = uri.clone();
|
||||
let input_message = input_message.clone();
|
||||
let md = md.clone();
|
||||
let use_reflection = use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
tokio::runtime::Handle::current().block_on(async move {
|
||||
if use_reflection {
|
||||
if let Err(e) =
|
||||
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
|
||||
{
|
||||
warn!("Failed to resolve Any types: {e}");
|
||||
stream
|
||||
.then(move |json| {
|
||||
let pool = pool.clone();
|
||||
let uri = uri.clone();
|
||||
let input_message = input_message.clone();
|
||||
let md = md.clone();
|
||||
let use_reflection = use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
let on_message = on_message.clone();
|
||||
let json_clone = json.clone();
|
||||
async move {
|
||||
if use_reflection {
|
||||
if let Err(e) =
|
||||
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
|
||||
{
|
||||
warn!("Failed to resolve Any types: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut de = Deserializer::from_str(&json);
|
||||
match DynamicMessage::deserialize(input_message, &mut de) {
|
||||
Ok(m) => Some(m),
|
||||
Err(e) => {
|
||||
warn!("Failed to deserialize message: {e}");
|
||||
None
|
||||
let mut de = Deserializer::from_str(&json);
|
||||
match DynamicMessage::deserialize(input_message, &mut de) {
|
||||
Ok(m) => {
|
||||
on_message(Ok(json_clone));
|
||||
Some(m)
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Failed to deserialize message: {e}");
|
||||
on_message(Err(e.to_string()));
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
.filter_map(|x| x)
|
||||
};
|
||||
|
||||
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
|
||||
@@ -169,14 +181,18 @@ impl GrpcConnection {
|
||||
Ok(client.streaming(req, path, codec).await?)
|
||||
}
|
||||
|
||||
pub async fn client_streaming(
|
||||
pub async fn client_streaming<F>(
|
||||
&self,
|
||||
service: &str,
|
||||
method: &str,
|
||||
stream: ReceiverStream<String>,
|
||||
metadata: &BTreeMap<String, String>,
|
||||
client_cert: Option<ClientCertificateConfig>,
|
||||
) -> Result<Response<DynamicMessage>> {
|
||||
on_message: F,
|
||||
) -> Result<Response<DynamicMessage>>
|
||||
where
|
||||
F: Fn(std::result::Result<String, String>) + Send + Sync + Clone + 'static,
|
||||
{
|
||||
let method = &self.method(&service, &method).await?;
|
||||
let mapped_stream = {
|
||||
let input_message = method.input();
|
||||
@@ -185,31 +201,39 @@ impl GrpcConnection {
|
||||
let md = metadata.clone();
|
||||
let use_reflection = self.use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
stream.filter_map(move |json| {
|
||||
let pool = pool.clone();
|
||||
let uri = uri.clone();
|
||||
let input_message = input_message.clone();
|
||||
let md = md.clone();
|
||||
let use_reflection = use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
tokio::runtime::Handle::current().block_on(async move {
|
||||
if use_reflection {
|
||||
if let Err(e) =
|
||||
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
|
||||
{
|
||||
warn!("Failed to resolve Any types: {e}");
|
||||
stream
|
||||
.then(move |json| {
|
||||
let pool = pool.clone();
|
||||
let uri = uri.clone();
|
||||
let input_message = input_message.clone();
|
||||
let md = md.clone();
|
||||
let use_reflection = use_reflection.clone();
|
||||
let client_cert = client_cert.clone();
|
||||
let on_message = on_message.clone();
|
||||
let json_clone = json.clone();
|
||||
async move {
|
||||
if use_reflection {
|
||||
if let Err(e) =
|
||||
reflect_types_for_message(pool, &uri, &json, &md, client_cert).await
|
||||
{
|
||||
warn!("Failed to resolve Any types: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut de = Deserializer::from_str(&json);
|
||||
match DynamicMessage::deserialize(input_message, &mut de) {
|
||||
Ok(m) => Some(m),
|
||||
Err(e) => {
|
||||
warn!("Failed to deserialize message: {e}");
|
||||
None
|
||||
let mut de = Deserializer::from_str(&json);
|
||||
match DynamicMessage::deserialize(input_message, &mut de) {
|
||||
Ok(m) => {
|
||||
on_message(Ok(json_clone));
|
||||
Some(m)
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Failed to deserialize message: {e}");
|
||||
on_message(Err(e.to_string()));
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
.filter_map(|x| x)
|
||||
};
|
||||
|
||||
let mut client = tonic::client::Grpc::with_origin(self.conn.clone(), self.uri.clone());
|
||||
@@ -316,10 +340,9 @@ impl GrpcHandle {
|
||||
metadata: &BTreeMap<String, String>,
|
||||
validate_certificates: bool,
|
||||
client_cert: Option<ClientCertificateConfig>,
|
||||
skip_cache: bool,
|
||||
) -> Result<Vec<ServiceDefinition>> {
|
||||
// Ensure we have a pool; reflect only if missing
|
||||
if skip_cache || self.get_pool(id, uri, proto_files).is_none() {
|
||||
if self.get_pool(id, uri, proto_files).is_none() {
|
||||
info!("Reflecting gRPC services for {} at {}", id, uri);
|
||||
self.reflect(id, uri, proto_files, metadata, validate_certificates, client_cert)
|
||||
.await?;
|
||||
|
||||
@@ -16,12 +16,12 @@ use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use tokio::fs;
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::RwLock;
|
||||
use tonic::codegen::http::uri::PathAndQuery;
|
||||
use tonic::transport::Uri;
|
||||
use tonic_reflection::pb::v1::server_reflection_request::MessageRequest;
|
||||
use tonic_reflection::pb::v1::server_reflection_response::MessageResponse;
|
||||
use yaak_common::command::new_xplatform_command;
|
||||
use yaak_tls::ClientCertificateConfig;
|
||||
|
||||
pub async fn fill_pool_from_files(
|
||||
@@ -91,11 +91,11 @@ pub async fn fill_pool_from_files(
|
||||
|
||||
info!("Invoking protoc with {}", args.join(" "));
|
||||
|
||||
let out = Command::new(&config.protoc_bin_path)
|
||||
.args(&args)
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| GenericError(format!("Failed to run protoc: {}", e)))?;
|
||||
let mut cmd = new_xplatform_command(&config.protoc_bin_path);
|
||||
cmd.args(&args);
|
||||
|
||||
let out =
|
||||
cmd.output().await.map_err(|e| GenericError(format!("Failed to run protoc: {}", e)))?;
|
||||
|
||||
if !out.status.success() {
|
||||
return Err(GenericError(format!(
|
||||
|
||||
@@ -2,6 +2,8 @@ use crate::dns::LocalhostResolver;
|
||||
use crate::error::Result;
|
||||
use log::{debug, info, warn};
|
||||
use reqwest::{Client, Proxy, redirect};
|
||||
use std::sync::Arc;
|
||||
use yaak_models::models::DnsOverride;
|
||||
use yaak_tls::{ClientCertificateConfig, get_tls_config};
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -28,10 +30,14 @@ pub struct HttpConnectionOptions {
|
||||
pub validate_certificates: bool,
|
||||
pub proxy: HttpConnectionProxySetting,
|
||||
pub client_certificate: Option<ClientCertificateConfig>,
|
||||
pub dns_overrides: Vec<DnsOverride>,
|
||||
}
|
||||
|
||||
impl HttpConnectionOptions {
|
||||
pub(crate) fn build_client(&self) -> Result<Client> {
|
||||
/// Build a reqwest Client and return it along with the DNS resolver.
|
||||
/// The resolver is returned separately so it can be configured per-request
|
||||
/// to emit DNS timing events to the appropriate channel.
|
||||
pub(crate) fn build_client(&self) -> Result<(Client, Arc<LocalhostResolver>)> {
|
||||
let mut client = Client::builder()
|
||||
.connection_verbose(true)
|
||||
.redirect(redirect::Policy::none())
|
||||
@@ -40,15 +46,19 @@ impl HttpConnectionOptions {
|
||||
.no_brotli()
|
||||
.no_deflate()
|
||||
.referer(false)
|
||||
.tls_info(true);
|
||||
.tls_info(true)
|
||||
// Disable connection pooling to ensure DNS resolution happens on each request
|
||||
// This is needed so we can emit DNS timing events for each request
|
||||
.pool_max_idle_per_host(0);
|
||||
|
||||
// Configure TLS with optional client certificate
|
||||
let config =
|
||||
get_tls_config(self.validate_certificates, true, self.client_certificate.clone())?;
|
||||
client = client.use_preconfigured_tls(config);
|
||||
|
||||
// Configure DNS resolver
|
||||
client = client.dns_resolver(LocalhostResolver::new());
|
||||
// Configure DNS resolver - keep a reference to configure per-request
|
||||
let resolver = LocalhostResolver::new(self.dns_overrides.clone());
|
||||
client = client.dns_resolver(resolver.clone());
|
||||
|
||||
// Configure proxy
|
||||
match self.proxy.clone() {
|
||||
@@ -69,7 +79,7 @@ impl HttpConnectionOptions {
|
||||
self.client_certificate.is_some()
|
||||
);
|
||||
|
||||
Ok(client.build()?)
|
||||
Ok((client.build()?, resolver))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,53 +1,185 @@
|
||||
use crate::sender::HttpResponseEvent;
|
||||
use hyper_util::client::legacy::connect::dns::{
|
||||
GaiResolver as HyperGaiResolver, Name as HyperName,
|
||||
};
|
||||
use log::info;
|
||||
use reqwest::dns::{Addrs, Name, Resolve, Resolving};
|
||||
use std::collections::HashMap;
|
||||
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr};
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
use tokio::sync::{RwLock, mpsc};
|
||||
use tower_service::Service;
|
||||
use yaak_models::models::DnsOverride;
|
||||
|
||||
/// Stores resolved addresses for a hostname override
|
||||
#[derive(Clone)]
|
||||
pub struct ResolvedOverride {
|
||||
pub ipv4: Vec<Ipv4Addr>,
|
||||
pub ipv6: Vec<Ipv6Addr>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct LocalhostResolver {
|
||||
fallback: HyperGaiResolver,
|
||||
event_tx: Arc<RwLock<Option<mpsc::Sender<HttpResponseEvent>>>>,
|
||||
overrides: Arc<HashMap<String, ResolvedOverride>>,
|
||||
}
|
||||
|
||||
impl LocalhostResolver {
|
||||
pub fn new() -> Arc<Self> {
|
||||
pub fn new(dns_overrides: Vec<DnsOverride>) -> Arc<Self> {
|
||||
let resolver = HyperGaiResolver::new();
|
||||
Arc::new(Self { fallback: resolver })
|
||||
|
||||
// Pre-parse DNS overrides into a lookup map
|
||||
let mut overrides = HashMap::new();
|
||||
for o in dns_overrides {
|
||||
if !o.enabled {
|
||||
continue;
|
||||
}
|
||||
let hostname = o.hostname.to_lowercase();
|
||||
|
||||
let ipv4: Vec<Ipv4Addr> =
|
||||
o.ipv4.iter().filter_map(|s| s.parse::<Ipv4Addr>().ok()).collect();
|
||||
|
||||
let ipv6: Vec<Ipv6Addr> =
|
||||
o.ipv6.iter().filter_map(|s| s.parse::<Ipv6Addr>().ok()).collect();
|
||||
|
||||
// Only add if at least one address is valid
|
||||
if !ipv4.is_empty() || !ipv6.is_empty() {
|
||||
overrides.insert(hostname, ResolvedOverride { ipv4, ipv6 });
|
||||
}
|
||||
}
|
||||
|
||||
Arc::new(Self {
|
||||
fallback: resolver,
|
||||
event_tx: Arc::new(RwLock::new(None)),
|
||||
overrides: Arc::new(overrides),
|
||||
})
|
||||
}
|
||||
|
||||
/// Set the event sender for the current request.
|
||||
/// This should be called before each request to direct DNS events
|
||||
/// to the appropriate channel.
|
||||
pub async fn set_event_sender(&self, tx: Option<mpsc::Sender<HttpResponseEvent>>) {
|
||||
let mut guard = self.event_tx.write().await;
|
||||
*guard = tx;
|
||||
}
|
||||
}
|
||||
|
||||
impl Resolve for LocalhostResolver {
|
||||
fn resolve(&self, name: Name) -> Resolving {
|
||||
let host = name.as_str().to_lowercase();
|
||||
let event_tx = self.event_tx.clone();
|
||||
let overrides = self.overrides.clone();
|
||||
|
||||
info!("DNS resolve called for: {}", host);
|
||||
|
||||
// Check for DNS override first
|
||||
if let Some(resolved) = overrides.get(&host) {
|
||||
log::debug!("DNS override found for: {}", host);
|
||||
let hostname = host.clone();
|
||||
let mut addrs: Vec<SocketAddr> = Vec::new();
|
||||
|
||||
// Add IPv4 addresses
|
||||
for ip in &resolved.ipv4 {
|
||||
addrs.push(SocketAddr::new(IpAddr::V4(*ip), 0));
|
||||
}
|
||||
|
||||
// Add IPv6 addresses
|
||||
for ip in &resolved.ipv6 {
|
||||
addrs.push(SocketAddr::new(IpAddr::V6(*ip), 0));
|
||||
}
|
||||
|
||||
let addresses: Vec<String> = addrs.iter().map(|a| a.ip().to_string()).collect();
|
||||
|
||||
return Box::pin(async move {
|
||||
// Emit DNS event for override
|
||||
let guard = event_tx.read().await;
|
||||
if let Some(tx) = guard.as_ref() {
|
||||
let _ = tx
|
||||
.send(HttpResponseEvent::DnsResolved {
|
||||
hostname,
|
||||
addresses,
|
||||
duration: 0,
|
||||
overridden: true,
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
Ok::<Addrs, Box<dyn std::error::Error + Send + Sync>>(Box::new(addrs.into_iter()))
|
||||
});
|
||||
}
|
||||
|
||||
// Check for .localhost suffix
|
||||
let is_localhost = host.ends_with(".localhost");
|
||||
if is_localhost {
|
||||
let hostname = host.clone();
|
||||
// Port 0 is fine; reqwest replaces it with the URL's explicit
|
||||
// port or the scheme’s default (80/443, etc.).
|
||||
// (See docs note below.)
|
||||
// port or the scheme's default (80/443, etc.).
|
||||
let addrs: Vec<SocketAddr> = vec![
|
||||
SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0),
|
||||
SocketAddr::new(IpAddr::V6(Ipv6Addr::LOCALHOST), 0),
|
||||
];
|
||||
|
||||
let addresses: Vec<String> = addrs.iter().map(|a| a.ip().to_string()).collect();
|
||||
|
||||
return Box::pin(async move {
|
||||
// Emit DNS event for localhost resolution
|
||||
let guard = event_tx.read().await;
|
||||
if let Some(tx) = guard.as_ref() {
|
||||
let _ = tx
|
||||
.send(HttpResponseEvent::DnsResolved {
|
||||
hostname,
|
||||
addresses,
|
||||
duration: 0,
|
||||
overridden: false,
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
Ok::<Addrs, Box<dyn std::error::Error + Send + Sync>>(Box::new(addrs.into_iter()))
|
||||
});
|
||||
}
|
||||
|
||||
// Fall back to system DNS
|
||||
let mut fallback = self.fallback.clone();
|
||||
let name_str = name.as_str().to_string();
|
||||
let hostname = host.clone();
|
||||
|
||||
Box::pin(async move {
|
||||
match HyperName::from_str(&name_str) {
|
||||
Ok(n) => fallback
|
||||
.call(n)
|
||||
.await
|
||||
.map(|addrs| Box::new(addrs) as Addrs)
|
||||
.map_err(|err| Box::new(err) as Box<dyn std::error::Error + Send + Sync>),
|
||||
Err(e) => Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>),
|
||||
let start = Instant::now();
|
||||
|
||||
let result = match HyperName::from_str(&name_str) {
|
||||
Ok(n) => fallback.call(n).await,
|
||||
Err(e) => return Err(Box::new(e) as Box<dyn std::error::Error + Send + Sync>),
|
||||
};
|
||||
|
||||
let duration = start.elapsed().as_millis() as u64;
|
||||
|
||||
match result {
|
||||
Ok(addrs) => {
|
||||
// Collect addresses for event emission
|
||||
let addr_vec: Vec<SocketAddr> = addrs.collect();
|
||||
let addresses: Vec<String> =
|
||||
addr_vec.iter().map(|a| a.ip().to_string()).collect();
|
||||
|
||||
// Emit DNS event
|
||||
let guard = event_tx.read().await;
|
||||
if let Some(tx) = guard.as_ref() {
|
||||
let _ = tx
|
||||
.send(HttpResponseEvent::DnsResolved {
|
||||
hostname,
|
||||
addresses,
|
||||
duration,
|
||||
overridden: false,
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
Ok(Box::new(addr_vec.into_iter()) as Addrs)
|
||||
}
|
||||
Err(err) => Err(Box::new(err) as Box<dyn std::error::Error + Send + Sync>),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::client::HttpConnectionOptions;
|
||||
use crate::dns::LocalhostResolver;
|
||||
use crate::error::Result;
|
||||
use log::info;
|
||||
use reqwest::Client;
|
||||
@@ -7,8 +8,15 @@ use std::sync::Arc;
|
||||
use std::time::{Duration, Instant};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
/// A cached HTTP client along with its DNS resolver.
|
||||
/// The resolver is needed to set the event sender per-request.
|
||||
pub struct CachedClient {
|
||||
pub client: Client,
|
||||
pub resolver: Arc<LocalhostResolver>,
|
||||
}
|
||||
|
||||
pub struct HttpConnectionManager {
|
||||
connections: Arc<RwLock<BTreeMap<String, (Client, Instant)>>>,
|
||||
connections: Arc<RwLock<BTreeMap<String, (CachedClient, Instant)>>>,
|
||||
ttl: Duration,
|
||||
}
|
||||
|
||||
@@ -20,21 +28,26 @@ impl HttpConnectionManager {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_client(&self, opt: &HttpConnectionOptions) -> Result<Client> {
|
||||
pub async fn get_client(&self, opt: &HttpConnectionOptions) -> Result<CachedClient> {
|
||||
let mut connections = self.connections.write().await;
|
||||
let id = opt.id.clone();
|
||||
|
||||
// Clean old connections
|
||||
connections.retain(|_, (_, last_used)| last_used.elapsed() <= self.ttl);
|
||||
|
||||
if let Some((c, last_used)) = connections.get_mut(&id) {
|
||||
if let Some((cached, last_used)) = connections.get_mut(&id) {
|
||||
info!("Re-using HTTP client {id}");
|
||||
*last_used = Instant::now();
|
||||
return Ok(c.clone());
|
||||
return Ok(CachedClient {
|
||||
client: cached.client.clone(),
|
||||
resolver: cached.resolver.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
let c = opt.build_client()?;
|
||||
connections.insert(id.into(), (c.clone(), Instant::now()));
|
||||
Ok(c)
|
||||
let (client, resolver) = opt.build_client()?;
|
||||
let cached = CachedClient { client: client.clone(), resolver: resolver.clone() };
|
||||
connections.insert(id.into(), (cached, Instant::now()));
|
||||
|
||||
Ok(CachedClient { client, resolver })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,7 +31,14 @@ pub enum HttpResponseEvent {
|
||||
},
|
||||
SendUrl {
|
||||
method: String,
|
||||
scheme: String,
|
||||
username: String,
|
||||
password: String,
|
||||
host: String,
|
||||
port: u16,
|
||||
path: String,
|
||||
query: String,
|
||||
fragment: String,
|
||||
},
|
||||
ReceiveUrl {
|
||||
version: Version,
|
||||
@@ -45,6 +52,12 @@ pub enum HttpResponseEvent {
|
||||
ChunkReceived {
|
||||
bytes: usize,
|
||||
},
|
||||
DnsResolved {
|
||||
hostname: String,
|
||||
addresses: Vec<String>,
|
||||
duration: u64,
|
||||
overridden: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl Display for HttpResponseEvent {
|
||||
@@ -59,7 +72,16 @@ impl Display for HttpResponseEvent {
|
||||
};
|
||||
write!(f, "* Redirect {} -> {} ({})", status, url, behavior_str)
|
||||
}
|
||||
HttpResponseEvent::SendUrl { method, path } => write!(f, "> {} {}", method, path),
|
||||
HttpResponseEvent::SendUrl { method, scheme, username, password, host, port, path, query, fragment } => {
|
||||
let auth_str = if username.is_empty() && password.is_empty() {
|
||||
String::new()
|
||||
} else {
|
||||
format!("{}:{}@", username, password)
|
||||
};
|
||||
let query_str = if query.is_empty() { String::new() } else { format!("?{}", query) };
|
||||
let fragment_str = if fragment.is_empty() { String::new() } else { format!("#{}", fragment) };
|
||||
write!(f, "> {} {}://{}{}:{}{}{}{}", method, scheme, auth_str, host, port, path, query_str, fragment_str)
|
||||
}
|
||||
HttpResponseEvent::ReceiveUrl { version, status } => {
|
||||
write!(f, "< {} {}", version_to_str(version), status)
|
||||
}
|
||||
@@ -67,6 +89,19 @@ impl Display for HttpResponseEvent {
|
||||
HttpResponseEvent::HeaderDown(name, value) => write!(f, "< {}: {}", name, value),
|
||||
HttpResponseEvent::ChunkSent { bytes } => write!(f, "> [{} bytes sent]", bytes),
|
||||
HttpResponseEvent::ChunkReceived { bytes } => write!(f, "< [{} bytes received]", bytes),
|
||||
HttpResponseEvent::DnsResolved { hostname, addresses, duration, overridden } => {
|
||||
if *overridden {
|
||||
write!(f, "* DNS override {} -> {}", hostname, addresses.join(", "))
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"* DNS resolved {} to {} ({}ms)",
|
||||
hostname,
|
||||
addresses.join(", "),
|
||||
duration
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -85,7 +120,9 @@ impl From<HttpResponseEvent> for yaak_models::models::HttpResponseEventData {
|
||||
RedirectBehavior::DropBody => "drop_body".to_string(),
|
||||
},
|
||||
},
|
||||
HttpResponseEvent::SendUrl { method, path } => D::SendUrl { method, path },
|
||||
HttpResponseEvent::SendUrl { method, scheme, username, password, host, port, path, query, fragment } => {
|
||||
D::SendUrl { method, scheme, username, password, host, port, path, query, fragment }
|
||||
}
|
||||
HttpResponseEvent::ReceiveUrl { version, status } => {
|
||||
D::ReceiveUrl { version: format!("{:?}", version), status }
|
||||
}
|
||||
@@ -93,6 +130,9 @@ impl From<HttpResponseEvent> for yaak_models::models::HttpResponseEventData {
|
||||
HttpResponseEvent::HeaderDown(name, value) => D::HeaderDown { name, value },
|
||||
HttpResponseEvent::ChunkSent { bytes } => D::ChunkSent { bytes },
|
||||
HttpResponseEvent::ChunkReceived { bytes } => D::ChunkReceived { bytes },
|
||||
HttpResponseEvent::DnsResolved { hostname, addresses, duration, overridden } => {
|
||||
D::DnsResolved { hostname, addresses, duration, overridden }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -354,6 +394,9 @@ impl HttpSender for ReqwestSender {
|
||||
|
||||
// Add headers
|
||||
for header in request.headers {
|
||||
if header.0.is_empty() {
|
||||
continue;
|
||||
}
|
||||
req_builder = req_builder.header(&header.0, &header.1);
|
||||
}
|
||||
|
||||
@@ -390,8 +433,15 @@ impl HttpSender for ReqwestSender {
|
||||
));
|
||||
|
||||
send_event(HttpResponseEvent::SendUrl {
|
||||
path: sendable_req.url().path().to_string(),
|
||||
method: sendable_req.method().to_string(),
|
||||
scheme: sendable_req.url().scheme().to_string(),
|
||||
username: sendable_req.url().username().to_string(),
|
||||
password: sendable_req.url().password().unwrap_or_default().to_string(),
|
||||
host: sendable_req.url().host_str().unwrap_or_default().to_string(),
|
||||
port: sendable_req.url().port_or_known_default().unwrap_or(0),
|
||||
path: sendable_req.url().path().to_string(),
|
||||
query: sendable_req.url().query().unwrap_or_default().to_string(),
|
||||
fragment: sendable_req.url().fragment().unwrap_or_default().to_string(),
|
||||
});
|
||||
|
||||
let mut request_headers = Vec::new();
|
||||
|
||||
@@ -342,7 +342,8 @@ mod tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_transaction_single_redirect() {
|
||||
let redirect_headers = vec![("Location".to_string(), "https://example.com/new".to_string())];
|
||||
let redirect_headers =
|
||||
vec![("Location".to_string(), "https://example.com/new".to_string())];
|
||||
|
||||
let responses = vec![
|
||||
MockResponse { status: 302, headers: redirect_headers, body: vec![] },
|
||||
@@ -373,7 +374,8 @@ mod tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_transaction_max_redirects_exceeded() {
|
||||
let redirect_headers = vec![("Location".to_string(), "https://example.com/loop".to_string())];
|
||||
let redirect_headers =
|
||||
vec![("Location".to_string(), "https://example.com/loop".to_string())];
|
||||
|
||||
// Create more redirects than allowed
|
||||
let responses: Vec<MockResponse> = (0..12)
|
||||
@@ -525,7 +527,8 @@ mod tests {
|
||||
_request: SendableHttpRequest,
|
||||
_event_tx: mpsc::Sender<HttpResponseEvent>,
|
||||
) -> Result<HttpResponse> {
|
||||
let headers = vec![("set-cookie".to_string(), "session=xyz789; Path=/".to_string())];
|
||||
let headers =
|
||||
vec![("set-cookie".to_string(), "session=xyz789; Path=/".to_string())];
|
||||
|
||||
let body_stream: Pin<Box<dyn AsyncRead + Send>> =
|
||||
Box::pin(std::io::Cursor::new(vec![]));
|
||||
@@ -584,7 +587,10 @@ mod tests {
|
||||
let headers = vec![
|
||||
("set-cookie".to_string(), "session=abc123; Path=/".to_string()),
|
||||
("set-cookie".to_string(), "user_id=42; Path=/".to_string()),
|
||||
("set-cookie".to_string(), "preferences=dark; Path=/; Max-Age=86400".to_string()),
|
||||
(
|
||||
"set-cookie".to_string(),
|
||||
"preferences=dark; Path=/; Max-Age=86400".to_string(),
|
||||
),
|
||||
];
|
||||
|
||||
let body_stream: Pin<Box<dyn AsyncRead + Send>> =
|
||||
|
||||
8
crates/yaak-models/bindings/gen_models.ts
generated
8
crates/yaak-models/bindings/gen_models.ts
generated
@@ -12,6 +12,8 @@ export type CookieExpires = { "AtUtc": string } | "SessionEnd";
|
||||
|
||||
export type CookieJar = { model: "cookie_jar", id: string, createdAt: string, updatedAt: string, workspaceId: string, cookies: Array<Cookie>, name: string, };
|
||||
|
||||
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
|
||||
|
||||
export type EditorKeymap = "default" | "vim" | "vscode" | "emacs";
|
||||
|
||||
export type EncryptedKey = { encryptedKey: string, };
|
||||
@@ -38,7 +40,7 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
|
||||
|
||||
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
|
||||
|
||||
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
|
||||
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, elapsedDns: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
|
||||
|
||||
export type HttpResponseEvent = { model: "http_response_event", id: string, createdAt: string, updatedAt: string, workspaceId: string, responseId: string, event: HttpResponseEventData, };
|
||||
|
||||
@@ -47,7 +49,7 @@ export type HttpResponseEvent = { model: "http_response_event", id: string, crea
|
||||
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
|
||||
* The `From` impl is in yaak-http to avoid circular dependencies.
|
||||
*/
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, };
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, scheme: string, username: string, password: string, host: string, port: number, path: string, query: string, fragment: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, } | { "type": "dns_resolved", hostname: string, addresses: Array<string>, duration: bigint, overridden: boolean, };
|
||||
|
||||
export type HttpResponseHeader = { name: string, value: string, };
|
||||
|
||||
@@ -91,6 +93,6 @@ export type WebsocketMessageType = "text" | "binary";
|
||||
|
||||
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
|
||||
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
|
||||
|
||||
export type WorkspaceMeta = { model: "workspace_meta", id: string, workspaceId: string, createdAt: string, updatedAt: string, encryptionKey: EncryptedKey | null, settingSyncDir: string | null, };
|
||||
|
||||
@@ -206,6 +206,34 @@ export function replaceModelsInStore<
|
||||
});
|
||||
}
|
||||
|
||||
export function mergeModelsInStore<
|
||||
M extends AnyModel['model'],
|
||||
T extends Extract<AnyModel, { model: M }>,
|
||||
>(model: M, models: T[], filter?: (model: T) => boolean) {
|
||||
mustStore().set(modelStoreDataAtom, (prev: ModelStoreData) => {
|
||||
const existingModels = { ...prev[model] } as Record<string, T>;
|
||||
|
||||
// Merge in new models first
|
||||
for (const m of models) {
|
||||
existingModels[m.id] = m;
|
||||
}
|
||||
|
||||
// Then filter out unwanted models
|
||||
if (filter) {
|
||||
for (const [id, m] of Object.entries(existingModels)) {
|
||||
if (!filter(m)) {
|
||||
delete existingModels[id];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...prev,
|
||||
[model]: existingModels,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function shouldIgnoreModel({ model, updateSource }: ModelPayload) {
|
||||
// Never ignore updates from non-user sources
|
||||
if (updateSource.type !== 'window') {
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
-- Add DNS resolution timing to http_responses
|
||||
ALTER TABLE http_responses ADD COLUMN elapsed_dns INTEGER DEFAULT 0 NOT NULL;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- Add DNS overrides setting to workspaces
|
||||
ALTER TABLE workspaces ADD COLUMN setting_dns_overrides TEXT DEFAULT '[]' NOT NULL;
|
||||
@@ -0,0 +1,12 @@
|
||||
-- Filter out headers that match the hardcoded defaults (User-Agent: yaak, Accept: */*),
|
||||
-- keeping any other custom headers the user may have added.
|
||||
UPDATE workspaces
|
||||
SET headers = (
|
||||
SELECT json_group_array(json(value))
|
||||
FROM json_each(headers)
|
||||
WHERE NOT (
|
||||
(LOWER(json_extract(value, '$.name')) = 'user-agent' AND json_extract(value, '$.value') = 'yaak')
|
||||
OR (LOWER(json_extract(value, '$.name')) = 'accept' AND json_extract(value, '$.value') = '*/*')
|
||||
)
|
||||
)
|
||||
WHERE json_array_length(headers) > 0;
|
||||
@@ -73,6 +73,20 @@ pub struct ClientCertificate {
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, TS)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[ts(export, export_to = "gen_models.ts")]
|
||||
pub struct DnsOverride {
|
||||
pub hostname: String,
|
||||
#[serde(default)]
|
||||
pub ipv4: Vec<String>,
|
||||
#[serde(default)]
|
||||
pub ipv6: Vec<String>,
|
||||
#[serde(default = "default_true")]
|
||||
#[ts(optional, as = "Option<bool>")]
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[ts(export, export_to = "gen_models.ts")]
|
||||
@@ -303,6 +317,8 @@ pub struct Workspace {
|
||||
#[serde(default = "default_true")]
|
||||
pub setting_follow_redirects: bool,
|
||||
pub setting_request_timeout: i32,
|
||||
#[serde(default)]
|
||||
pub setting_dns_overrides: Vec<DnsOverride>,
|
||||
}
|
||||
|
||||
impl UpsertModelInfo for Workspace {
|
||||
@@ -343,6 +359,7 @@ impl UpsertModelInfo for Workspace {
|
||||
(SettingFollowRedirects, self.setting_follow_redirects.into()),
|
||||
(SettingRequestTimeout, self.setting_request_timeout.into()),
|
||||
(SettingValidateCertificates, self.setting_validate_certificates.into()),
|
||||
(SettingDnsOverrides, serde_json::to_string(&self.setting_dns_overrides)?.into()),
|
||||
])
|
||||
}
|
||||
|
||||
@@ -359,6 +376,7 @@ impl UpsertModelInfo for Workspace {
|
||||
WorkspaceIden::SettingFollowRedirects,
|
||||
WorkspaceIden::SettingRequestTimeout,
|
||||
WorkspaceIden::SettingValidateCertificates,
|
||||
WorkspaceIden::SettingDnsOverrides,
|
||||
]
|
||||
}
|
||||
|
||||
@@ -368,6 +386,7 @@ impl UpsertModelInfo for Workspace {
|
||||
{
|
||||
let headers: String = row.get("headers")?;
|
||||
let authentication: String = row.get("authentication")?;
|
||||
let setting_dns_overrides: String = row.get("setting_dns_overrides")?;
|
||||
Ok(Self {
|
||||
id: row.get("id")?,
|
||||
model: row.get("model")?,
|
||||
@@ -382,6 +401,7 @@ impl UpsertModelInfo for Workspace {
|
||||
setting_follow_redirects: row.get("setting_follow_redirects")?,
|
||||
setting_request_timeout: row.get("setting_request_timeout")?,
|
||||
setting_validate_certificates: row.get("setting_validate_certificates")?,
|
||||
setting_dns_overrides: serde_json::from_str(&setting_dns_overrides).unwrap_or_default(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1333,6 +1353,7 @@ pub struct HttpResponse {
|
||||
pub content_length_compressed: Option<i32>,
|
||||
pub elapsed: i32,
|
||||
pub elapsed_headers: i32,
|
||||
pub elapsed_dns: i32,
|
||||
pub error: Option<String>,
|
||||
pub headers: Vec<HttpResponseHeader>,
|
||||
pub remote_addr: Option<String>,
|
||||
@@ -1381,6 +1402,7 @@ impl UpsertModelInfo for HttpResponse {
|
||||
(ContentLengthCompressed, self.content_length_compressed.into()),
|
||||
(Elapsed, self.elapsed.into()),
|
||||
(ElapsedHeaders, self.elapsed_headers.into()),
|
||||
(ElapsedDns, self.elapsed_dns.into()),
|
||||
(Error, self.error.into()),
|
||||
(Headers, serde_json::to_string(&self.headers)?.into()),
|
||||
(RemoteAddr, self.remote_addr.into()),
|
||||
@@ -1402,6 +1424,7 @@ impl UpsertModelInfo for HttpResponse {
|
||||
HttpResponseIden::ContentLengthCompressed,
|
||||
HttpResponseIden::Elapsed,
|
||||
HttpResponseIden::ElapsedHeaders,
|
||||
HttpResponseIden::ElapsedDns,
|
||||
HttpResponseIden::Error,
|
||||
HttpResponseIden::Headers,
|
||||
HttpResponseIden::RemoteAddr,
|
||||
@@ -1435,6 +1458,7 @@ impl UpsertModelInfo for HttpResponse {
|
||||
version: r.get("version")?,
|
||||
elapsed: r.get("elapsed")?,
|
||||
elapsed_headers: r.get("elapsed_headers")?,
|
||||
elapsed_dns: r.get("elapsed_dns").unwrap_or_default(),
|
||||
remote_addr: r.get("remote_addr")?,
|
||||
status: r.get("status")?,
|
||||
status_reason: r.get("status_reason")?,
|
||||
@@ -1471,7 +1495,21 @@ pub enum HttpResponseEventData {
|
||||
},
|
||||
SendUrl {
|
||||
method: String,
|
||||
#[serde(default)]
|
||||
scheme: String,
|
||||
#[serde(default)]
|
||||
username: String,
|
||||
#[serde(default)]
|
||||
password: String,
|
||||
#[serde(default)]
|
||||
host: String,
|
||||
#[serde(default)]
|
||||
port: u16,
|
||||
path: String,
|
||||
#[serde(default)]
|
||||
query: String,
|
||||
#[serde(default)]
|
||||
fragment: String,
|
||||
},
|
||||
ReceiveUrl {
|
||||
version: String,
|
||||
@@ -1491,6 +1529,12 @@ pub enum HttpResponseEventData {
|
||||
ChunkReceived {
|
||||
bytes: usize,
|
||||
},
|
||||
DnsResolved {
|
||||
hostname: String,
|
||||
addresses: Vec<String>,
|
||||
duration: u64,
|
||||
overridden: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl Default for HttpResponseEventData {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use super::dedupe_headers;
|
||||
use crate::db_context::DbContext;
|
||||
use crate::error::Result;
|
||||
use crate::models::{GrpcRequest, GrpcRequestIden, HttpRequestHeader};
|
||||
@@ -87,6 +88,6 @@ impl<'a> DbContext<'a> {
|
||||
|
||||
metadata.append(&mut grpc_request.metadata.clone());
|
||||
|
||||
Ok(metadata)
|
||||
Ok(dedupe_headers(metadata))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use super::dedupe_headers;
|
||||
use crate::db_context::DbContext;
|
||||
use crate::error::Result;
|
||||
use crate::models::{Folder, FolderIden, HttpRequest, HttpRequestHeader, HttpRequestIden};
|
||||
@@ -87,7 +88,7 @@ impl<'a> DbContext<'a> {
|
||||
|
||||
headers.append(&mut http_request.headers.clone());
|
||||
|
||||
Ok(headers)
|
||||
Ok(dedupe_headers(headers))
|
||||
}
|
||||
|
||||
pub fn list_http_requests_for_folder_recursive(
|
||||
|
||||
@@ -19,6 +19,26 @@ mod websocket_connections;
|
||||
mod websocket_events;
|
||||
mod websocket_requests;
|
||||
mod workspace_metas;
|
||||
mod workspaces;
|
||||
pub mod workspaces;
|
||||
|
||||
const MAX_HISTORY_ITEMS: usize = 20;
|
||||
|
||||
use crate::models::HttpRequestHeader;
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Deduplicate headers by name (case-insensitive), keeping the latest (most specific) value.
|
||||
/// Preserves the order of first occurrence for each header name.
|
||||
pub(crate) fn dedupe_headers(headers: Vec<HttpRequestHeader>) -> Vec<HttpRequestHeader> {
|
||||
let mut index_by_name: HashMap<String, usize> = HashMap::new();
|
||||
let mut deduped: Vec<HttpRequestHeader> = Vec::new();
|
||||
for header in headers {
|
||||
let key = header.name.to_lowercase();
|
||||
if let Some(&idx) = index_by_name.get(&key) {
|
||||
deduped[idx] = header;
|
||||
} else {
|
||||
index_by_name.insert(key, deduped.len());
|
||||
deduped.push(header);
|
||||
}
|
||||
}
|
||||
deduped
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use super::dedupe_headers;
|
||||
use crate::db_context::DbContext;
|
||||
use crate::error::Result;
|
||||
use crate::models::{HttpRequestHeader, WebsocketRequest, WebsocketRequestIden};
|
||||
@@ -95,6 +96,6 @@ impl<'a> DbContext<'a> {
|
||||
|
||||
headers.append(&mut websocket_request.headers.clone());
|
||||
|
||||
Ok(headers)
|
||||
Ok(dedupe_headers(headers))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -80,6 +80,28 @@ impl<'a> DbContext<'a> {
|
||||
}
|
||||
|
||||
pub fn resolve_headers_for_workspace(&self, workspace: &Workspace) -> Vec<HttpRequestHeader> {
|
||||
workspace.headers.clone()
|
||||
let mut headers = default_headers();
|
||||
headers.extend(workspace.headers.clone());
|
||||
headers
|
||||
}
|
||||
}
|
||||
|
||||
/// Global default headers that are always sent with requests unless overridden.
|
||||
/// These are prepended to the inheritance chain so workspace/folder/request headers
|
||||
/// can override or disable them.
|
||||
pub fn default_headers() -> Vec<HttpRequestHeader> {
|
||||
vec![
|
||||
HttpRequestHeader {
|
||||
enabled: true,
|
||||
name: "User-Agent".to_string(),
|
||||
value: "yaak".to_string(),
|
||||
id: None,
|
||||
},
|
||||
HttpRequestHeader {
|
||||
enabled: true,
|
||||
name: "Accept".to_string(),
|
||||
value: "*/*".to_string(),
|
||||
id: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
6
crates/yaak-plugins/bindings/gen_events.ts
generated
6
crates/yaak-plugins/bindings/gen_events.ts
generated
File diff suppressed because one or more lines are too long
8
crates/yaak-plugins/bindings/gen_models.ts
generated
8
crates/yaak-plugins/bindings/gen_models.ts
generated
@@ -12,6 +12,8 @@ export type CookieExpires = { "AtUtc": string } | "SessionEnd";
|
||||
|
||||
export type CookieJar = { model: "cookie_jar", id: string, createdAt: string, updatedAt: string, workspaceId: string, cookies: Array<Cookie>, name: string, };
|
||||
|
||||
export type DnsOverride = { hostname: string, ipv4: Array<string>, ipv6: Array<string>, enabled?: boolean, };
|
||||
|
||||
export type EditorKeymap = "default" | "vim" | "vscode" | "emacs";
|
||||
|
||||
export type EncryptedKey = { encryptedKey: string, };
|
||||
@@ -38,7 +40,7 @@ export type HttpRequest = { model: "http_request", id: string, createdAt: string
|
||||
|
||||
export type HttpRequestHeader = { enabled?: boolean, name: string, value: string, id?: string, };
|
||||
|
||||
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
|
||||
export type HttpResponse = { model: "http_response", id: string, createdAt: string, updatedAt: string, workspaceId: string, requestId: string, bodyPath: string | null, contentLength: number | null, contentLengthCompressed: number | null, elapsed: number, elapsedHeaders: number, elapsedDns: number, error: string | null, headers: Array<HttpResponseHeader>, remoteAddr: string | null, requestContentLength: number | null, requestHeaders: Array<HttpResponseHeader>, status: number, statusReason: string | null, state: HttpResponseState, url: string, version: string | null, };
|
||||
|
||||
export type HttpResponseEvent = { model: "http_response_event", id: string, createdAt: string, updatedAt: string, workspaceId: string, responseId: string, event: HttpResponseEventData, };
|
||||
|
||||
@@ -47,7 +49,7 @@ export type HttpResponseEvent = { model: "http_response_event", id: string, crea
|
||||
* This mirrors `yaak_http::sender::HttpResponseEvent` but with serde support.
|
||||
* The `From` impl is in yaak-http to avoid circular dependencies.
|
||||
*/
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, };
|
||||
export type HttpResponseEventData = { "type": "setting", name: string, value: string, } | { "type": "info", message: string, } | { "type": "redirect", url: string, status: number, behavior: string, } | { "type": "send_url", method: string, path: string, } | { "type": "receive_url", version: string, status: string, } | { "type": "header_up", name: string, value: string, } | { "type": "header_down", name: string, value: string, } | { "type": "chunk_sent", bytes: number, } | { "type": "chunk_received", bytes: number, } | { "type": "dns_resolved", hostname: string, addresses: Array<string>, duration: bigint, overridden: boolean, };
|
||||
|
||||
export type HttpResponseHeader = { name: string, value: string, };
|
||||
|
||||
@@ -77,6 +79,6 @@ export type WebsocketEventType = "binary" | "close" | "frame" | "open" | "ping"
|
||||
|
||||
export type WebsocketRequest = { model: "websocket_request", id: string, createdAt: string, updatedAt: string, workspaceId: string, folderId: string | null, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, message: string, name: string, sortPriority: number, url: string, urlParameters: Array<HttpUrlParameter>, };
|
||||
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, };
|
||||
export type Workspace = { model: "workspace", id: string, createdAt: string, updatedAt: string, authentication: Record<string, any>, authenticationType: string | null, description: string, headers: Array<HttpRequestHeader>, name: string, encryptionKeyChallenge: string | null, settingValidateCertificates: boolean, settingFollowRedirects: boolean, settingRequestTimeout: number, settingDnsOverrides: Array<DnsOverride>, };
|
||||
|
||||
export type WorkspaceMeta = { model: "workspace_meta", id: string, workspaceId: string, createdAt: string, updatedAt: string, encryptionKey: EncryptedKey | null, settingSyncDir: string | null, };
|
||||
|
||||
@@ -80,10 +80,7 @@ pub async fn check_plugin_updates(
|
||||
}
|
||||
|
||||
/// Search for plugins in the registry.
|
||||
pub async fn search_plugins(
|
||||
http_client: &Client,
|
||||
query: &str,
|
||||
) -> Result<PluginSearchResponse> {
|
||||
pub async fn search_plugins(http_client: &Client, query: &str) -> Result<PluginSearchResponse> {
|
||||
let mut url = build_url("/search");
|
||||
{
|
||||
let mut query_pairs = url.query_pairs_mut();
|
||||
|
||||
@@ -157,6 +157,9 @@ pub enum InternalEventPayload {
|
||||
PromptTextRequest(PromptTextRequest),
|
||||
PromptTextResponse(PromptTextResponse),
|
||||
|
||||
PromptFormRequest(PromptFormRequest),
|
||||
PromptFormResponse(PromptFormResponse),
|
||||
|
||||
WindowInfoRequest(WindowInfoRequest),
|
||||
WindowInfoResponse(WindowInfoResponse),
|
||||
|
||||
@@ -571,6 +574,28 @@ pub struct PromptTextResponse {
|
||||
pub value: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
#[ts(export, export_to = "gen_events.ts")]
|
||||
pub struct PromptFormRequest {
|
||||
pub id: String,
|
||||
pub title: String,
|
||||
#[ts(optional)]
|
||||
pub description: Option<String>,
|
||||
pub inputs: Vec<FormInput>,
|
||||
#[ts(optional)]
|
||||
pub confirm_text: Option<String>,
|
||||
#[ts(optional)]
|
||||
pub cancel_text: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
#[ts(export, export_to = "gen_events.ts")]
|
||||
pub struct PromptFormResponse {
|
||||
pub values: Option<HashMap<String, JsonPrimitive>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, TS)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
#[ts(export, export_to = "gen_events.ts")]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user