Compare commits

..

2 Commits

Author SHA1 Message Date
Gregory Schier
0264e59553 Improve CLI streaming output, logging flags, and schema/help ergonomics 2026-02-23 08:01:30 -08:00
Gregory Schier
53d86f5568 Add workspace/environment schemas and shared agent hints 2026-02-23 07:25:33 -08:00
23 changed files with 234 additions and 399 deletions

View File

@@ -22,7 +22,7 @@
<!-- sponsors-premium --><a href="https://github.com/MVST-Solutions"><img src="https:&#x2F;&#x2F;github.com&#x2F;MVST-Solutions.png" width="80px" alt="User avatar: MVST-Solutions" /></a>&nbsp;&nbsp;<a href="https://github.com/dharsanb"><img src="https:&#x2F;&#x2F;github.com&#x2F;dharsanb.png" width="80px" alt="User avatar: dharsanb" /></a>&nbsp;&nbsp;<a href="https://github.com/railwayapp"><img src="https:&#x2F;&#x2F;github.com&#x2F;railwayapp.png" width="80px" alt="User avatar: railwayapp" /></a>&nbsp;&nbsp;<a href="https://github.com/caseyamcl"><img src="https:&#x2F;&#x2F;github.com&#x2F;caseyamcl.png" width="80px" alt="User avatar: caseyamcl" /></a>&nbsp;&nbsp;<a href="https://github.com/bytebase"><img src="https:&#x2F;&#x2F;github.com&#x2F;bytebase.png" width="80px" alt="User avatar: bytebase" /></a>&nbsp;&nbsp;<a href="https://github.com/"><img src="https:&#x2F;&#x2F;raw.githubusercontent.com&#x2F;JamesIves&#x2F;github-sponsors-readme-action&#x2F;dev&#x2F;.github&#x2F;assets&#x2F;placeholder.png" width="80px" alt="User avatar: " /></a>&nbsp;&nbsp;<!-- sponsors-premium --> <!-- sponsors-premium --><a href="https://github.com/MVST-Solutions"><img src="https:&#x2F;&#x2F;github.com&#x2F;MVST-Solutions.png" width="80px" alt="User avatar: MVST-Solutions" /></a>&nbsp;&nbsp;<a href="https://github.com/dharsanb"><img src="https:&#x2F;&#x2F;github.com&#x2F;dharsanb.png" width="80px" alt="User avatar: dharsanb" /></a>&nbsp;&nbsp;<a href="https://github.com/railwayapp"><img src="https:&#x2F;&#x2F;github.com&#x2F;railwayapp.png" width="80px" alt="User avatar: railwayapp" /></a>&nbsp;&nbsp;<a href="https://github.com/caseyamcl"><img src="https:&#x2F;&#x2F;github.com&#x2F;caseyamcl.png" width="80px" alt="User avatar: caseyamcl" /></a>&nbsp;&nbsp;<a href="https://github.com/bytebase"><img src="https:&#x2F;&#x2F;github.com&#x2F;bytebase.png" width="80px" alt="User avatar: bytebase" /></a>&nbsp;&nbsp;<a href="https://github.com/"><img src="https:&#x2F;&#x2F;raw.githubusercontent.com&#x2F;JamesIves&#x2F;github-sponsors-readme-action&#x2F;dev&#x2F;.github&#x2F;assets&#x2F;placeholder.png" width="80px" alt="User avatar: " /></a>&nbsp;&nbsp;<!-- sponsors-premium -->
</p> </p>
<p align="center"> <p align="center">
<!-- sponsors-base --><a href="https://github.com/seanwash"><img src="https:&#x2F;&#x2F;github.com&#x2F;seanwash.png" width="50px" alt="User avatar: seanwash" /></a>&nbsp;&nbsp;<a href="https://github.com/jerath"><img src="https:&#x2F;&#x2F;github.com&#x2F;jerath.png" width="50px" alt="User avatar: jerath" /></a>&nbsp;&nbsp;<a href="https://github.com/itsa-sh"><img src="https:&#x2F;&#x2F;github.com&#x2F;itsa-sh.png" width="50px" alt="User avatar: itsa-sh" /></a>&nbsp;&nbsp;<a href="https://github.com/dmmulroy"><img src="https:&#x2F;&#x2F;github.com&#x2F;dmmulroy.png" width="50px" alt="User avatar: dmmulroy" /></a>&nbsp;&nbsp;<a href="https://github.com/timcole"><img src="https:&#x2F;&#x2F;github.com&#x2F;timcole.png" width="50px" alt="User avatar: timcole" /></a>&nbsp;&nbsp;<a href="https://github.com/VLZH"><img src="https:&#x2F;&#x2F;github.com&#x2F;VLZH.png" width="50px" alt="User avatar: VLZH" /></a>&nbsp;&nbsp;<a href="https://github.com/terasaka2k"><img src="https:&#x2F;&#x2F;github.com&#x2F;terasaka2k.png" width="50px" alt="User avatar: terasaka2k" /></a>&nbsp;&nbsp;<a href="https://github.com/andriyor"><img src="https:&#x2F;&#x2F;github.com&#x2F;andriyor.png" width="50px" alt="User avatar: andriyor" /></a>&nbsp;&nbsp;<a href="https://github.com/majudhu"><img src="https:&#x2F;&#x2F;github.com&#x2F;majudhu.png" width="50px" alt="User avatar: majudhu" /></a>&nbsp;&nbsp;<a href="https://github.com/axelrindle"><img src="https:&#x2F;&#x2F;github.com&#x2F;axelrindle.png" width="50px" alt="User avatar: axelrindle" /></a>&nbsp;&nbsp;<a href="https://github.com/jirizverina"><img src="https:&#x2F;&#x2F;github.com&#x2F;jirizverina.png" width="50px" alt="User avatar: jirizverina" /></a>&nbsp;&nbsp;<a href="https://github.com/chip-well"><img src="https:&#x2F;&#x2F;github.com&#x2F;chip-well.png" width="50px" alt="User avatar: chip-well" /></a>&nbsp;&nbsp;<a href="https://github.com/GRAYAH"><img src="https:&#x2F;&#x2F;github.com&#x2F;GRAYAH.png" width="50px" alt="User avatar: GRAYAH" /></a>&nbsp;&nbsp;<a href="https://github.com/flashblaze"><img src="https:&#x2F;&#x2F;github.com&#x2F;flashblaze.png" width="50px" alt="User avatar: flashblaze" /></a>&nbsp;&nbsp;<a href="https://github.com/Frostist"><img src="https:&#x2F;&#x2F;github.com&#x2F;Frostist.png" width="50px" alt="User avatar: Frostist" /></a>&nbsp;&nbsp;<!-- sponsors-base --> <!-- sponsors-base --><a href="https://github.com/seanwash"><img src="https:&#x2F;&#x2F;github.com&#x2F;seanwash.png" width="50px" alt="User avatar: seanwash" /></a>&nbsp;&nbsp;<a href="https://github.com/jerath"><img src="https:&#x2F;&#x2F;github.com&#x2F;jerath.png" width="50px" alt="User avatar: jerath" /></a>&nbsp;&nbsp;<a href="https://github.com/itsa-sh"><img src="https:&#x2F;&#x2F;github.com&#x2F;itsa-sh.png" width="50px" alt="User avatar: itsa-sh" /></a>&nbsp;&nbsp;<a href="https://github.com/dmmulroy"><img src="https:&#x2F;&#x2F;github.com&#x2F;dmmulroy.png" width="50px" alt="User avatar: dmmulroy" /></a>&nbsp;&nbsp;<a href="https://github.com/timcole"><img src="https:&#x2F;&#x2F;github.com&#x2F;timcole.png" width="50px" alt="User avatar: timcole" /></a>&nbsp;&nbsp;<a href="https://github.com/VLZH"><img src="https:&#x2F;&#x2F;github.com&#x2F;VLZH.png" width="50px" alt="User avatar: VLZH" /></a>&nbsp;&nbsp;<a href="https://github.com/terasaka2k"><img src="https:&#x2F;&#x2F;github.com&#x2F;terasaka2k.png" width="50px" alt="User avatar: terasaka2k" /></a>&nbsp;&nbsp;<a href="https://github.com/andriyor"><img src="https:&#x2F;&#x2F;github.com&#x2F;andriyor.png" width="50px" alt="User avatar: andriyor" /></a>&nbsp;&nbsp;<a href="https://github.com/majudhu"><img src="https:&#x2F;&#x2F;github.com&#x2F;majudhu.png" width="50px" alt="User avatar: majudhu" /></a>&nbsp;&nbsp;<a href="https://github.com/axelrindle"><img src="https:&#x2F;&#x2F;github.com&#x2F;axelrindle.png" width="50px" alt="User avatar: axelrindle" /></a>&nbsp;&nbsp;<a href="https://github.com/jirizverina"><img src="https:&#x2F;&#x2F;github.com&#x2F;jirizverina.png" width="50px" alt="User avatar: jirizverina" /></a>&nbsp;&nbsp;<a href="https://github.com/chip-well"><img src="https:&#x2F;&#x2F;github.com&#x2F;chip-well.png" width="50px" alt="User avatar: chip-well" /></a>&nbsp;&nbsp;<a href="https://github.com/GRAYAH"><img src="https:&#x2F;&#x2F;github.com&#x2F;GRAYAH.png" width="50px" alt="User avatar: GRAYAH" /></a>&nbsp;&nbsp;<a href="https://github.com/flashblaze"><img src="https:&#x2F;&#x2F;github.com&#x2F;flashblaze.png" width="50px" alt="User avatar: flashblaze" /></a>&nbsp;&nbsp;<!-- sponsors-base -->
</p> </p>
![Yaak API Client](https://yaak.app/static/screenshot.png) ![Yaak API Client](https://yaak.app/static/screenshot.png)

View File

@@ -1,66 +1,93 @@
# Yaak CLI # yaak-cli
The `yaak` CLI for publishing plugins and creating/updating/sending requests. Command-line interface for Yaak.
## Installation ## Command Overview
```sh Current top-level commands:
npm install @yaakapp/cli
```
## Agentic Workflows
The `yaak` CLI is primarily meant to be used by AI agents, and has the following features:
- `schema` subcommands to get the JSON Schema for any model (eg. `yaak request schema http`)
- `--json '{...}'` input format to create and update data
- `--verbose` mode for extracting debug info while sending requests
- The ability to send entire workspaces and folders (Supports `--parallel` and `--fail-fast`)
### Example Prompts
Use the `yaak` CLI with agents like Claude or Codex to do useful things for you.
Here are some example prompts:
```text ```text
Scan my API routes and create a workspace (using yaak cli) with yaakcli send <request_id>
all the requests needed for me to do manual testing? yaakcli agent-help
yaakcli workspace list
yaakcli workspace schema [--pretty]
yaakcli workspace show <workspace_id>
yaakcli workspace create --name <name>
yaakcli workspace create --json '{"name":"My Workspace"}'
yaakcli workspace create '{"name":"My Workspace"}'
yaakcli workspace update --json '{"id":"wk_abc","description":"Updated"}'
yaakcli workspace delete <workspace_id> [--yes]
yaakcli request list <workspace_id>
yaakcli request show <request_id>
yaakcli request send <request_id>
yaakcli request create <workspace_id> --name <name> --url <url> [--method GET]
yaakcli request create --json '{"workspaceId":"wk_abc","name":"Users","url":"https://api.example.com/users"}'
yaakcli request create '{"workspaceId":"wk_abc","name":"Users","url":"https://api.example.com/users"}'
yaakcli request update --json '{"id":"rq_abc","name":"Users v2"}'
yaakcli request delete <request_id> [--yes]
yaakcli folder list <workspace_id>
yaakcli folder show <folder_id>
yaakcli folder create <workspace_id> --name <name>
yaakcli folder create --json '{"workspaceId":"wk_abc","name":"Auth"}'
yaakcli folder create '{"workspaceId":"wk_abc","name":"Auth"}'
yaakcli folder update --json '{"id":"fl_abc","name":"Auth v2"}'
yaakcli folder delete <folder_id> [--yes]
yaakcli environment list <workspace_id>
yaakcli environment schema [--pretty]
yaakcli environment show <environment_id>
yaakcli environment create <workspace_id> --name <name>
yaakcli environment create --json '{"workspaceId":"wk_abc","name":"Production"}'
yaakcli environment create '{"workspaceId":"wk_abc","name":"Production"}'
yaakcli environment update --json '{"id":"ev_abc","color":"#00ff00"}'
yaakcli environment delete <environment_id> [--yes]
``` ```
```text Global options:
Send all the GraphQL requests in my workspace
- `--data-dir <path>`: use a custom data directory
- `-e, --environment <id>`: environment to use during request rendering/sending
- `-v, --verbose`: verbose send output (events and streamed response body)
- `--log [level]`: enable CLI logging; optional level is `error|warn|info|debug|trace`
Notes:
- `send` is currently a shortcut for sending an HTTP request ID.
- `delete` commands prompt for confirmation unless `--yes` is provided.
- In non-interactive mode, `delete` commands require `--yes`.
- `create` and `update` commands support `--json` and positional JSON shorthand.
- For `create` commands, use one input mode at a time. Example: do not combine `<workspace_id>` with `--json`.
- Template tags use `${[ ... ]}` syntax (for example `${[API_BASE_URL]}`), not `{{ ... }}`.
- `update` uses JSON Merge Patch semantics (RFC 7386) for partial updates.
## Examples
```bash
yaakcli workspace list
yaakcli workspace create --name "My Workspace"
yaakcli workspace show wk_abc
yaakcli workspace update --json '{"id":"wk_abc","description":"Team workspace"}'
yaakcli request list wk_abc
yaakcli request show rq_abc
yaakcli request create wk_abc --name "Users" --url "https://api.example.com/users"
yaakcli request update --json '{"id":"rq_abc","name":"Users v2"}'
yaakcli request send rq_abc -e ev_abc
yaakcli request delete rq_abc --yes
yaakcli folder create wk_abc --name "Auth"
yaakcli folder update --json '{"id":"fl_abc","name":"Auth v2"}'
yaakcli environment create wk_abc --name "Production"
yaakcli environment update --json '{"id":"ev_abc","color":"#00ff00"}'
``` ```
## Description ## Roadmap
Here's the current print of `yaak --help` Planned command expansion (request schema and polymorphic send) is tracked in `PLAN.md`.
```text When command behavior changes, update this README and verify with:
Yaak CLI - API client from the command line
Usage: yaak [OPTIONS] <COMMAND> ```bash
cargo run -q -p yaak-cli -- --help
Commands: cargo run -q -p yaak-cli -- request --help
auth Authentication commands cargo run -q -p yaak-cli -- workspace --help
plugin Plugin development and publishing commands cargo run -q -p yaak-cli -- folder --help
send Send a request, folder, or workspace by ID cargo run -q -p yaak-cli -- environment --help
workspace Workspace commands
request Request commands
folder Folder commands
environment Environment commands
Options:
--data-dir <DATA_DIR> Use a custom data directory
-e, --environment <ENVIRONMENT> Environment ID to use for variable substitution
-v, --verbose Enable verbose send output (events and streamed response body)
--log [<LEVEL>] Enable CLI logging; optionally set level (error|warn|info|debug|trace) [possible values: error, warn, info, debug, trace]
-h, --help Print help
-V, --version Print version
Agent Hints:
- Template variable syntax is ${[ my_var ]}, not {{ ... }}
- Template function syntax is ${[ namespace.my_func(a='aaa',b='bbb') ]}
- View JSONSchema for models before creating or updating (eg. `yaak request schema http`)
- Deletion requires confirmation (--yes for non-interactive environments)
``` ```

View File

@@ -68,8 +68,12 @@ pub struct SendArgs {
/// Request, folder, or workspace ID /// Request, folder, or workspace ID
pub id: String, pub id: String,
/// Execute requests sequentially (default)
#[arg(long, conflicts_with = "parallel")]
pub sequential: bool,
/// Execute requests in parallel /// Execute requests in parallel
#[arg(long)] #[arg(long, conflicts_with = "sequential")]
pub parallel: bool, pub parallel: bool,
/// Stop on first request failure when sending folders/workspaces /// Stop on first request failure when sending folders/workspaces
@@ -338,8 +342,8 @@ pub enum EnvironmentCommands {
1) yaak environment create <workspace_id> --name <name> 1) yaak environment create <workspace_id> --name <name>
2) yaak environment create --json '{"workspaceId":"wk_abc","name":"Production"}' 2) yaak environment create --json '{"workspaceId":"wk_abc","name":"Production"}'
3) yaak environment create '{"workspaceId":"wk_abc","name":"Production"}' 3) yaak environment create '{"workspaceId":"wk_abc","name":"Production"}'
4) yaak environment create <workspace_id> --json '{"name":"Production"}'
"#)] Do not combine <workspace_id> with --json."#)]
Create { Create {
/// Workspace ID for flag-based mode, or positional JSON payload shorthand /// Workspace ID for flag-based mode, or positional JSON payload shorthand
#[arg(value_name = "WORKSPACE_ID_OR_JSON")] #[arg(value_name = "WORKSPACE_ID_OR_JSON")]

View File

@@ -2,8 +2,8 @@ use crate::cli::{EnvironmentArgs, EnvironmentCommands};
use crate::context::CliContext; use crate::context::CliContext;
use crate::utils::confirm::confirm_delete; use crate::utils::confirm::confirm_delete;
use crate::utils::json::{ use crate::utils::json::{
apply_merge_patch, is_json_shorthand, merge_workspace_id_arg, parse_optional_json, apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
parse_required_json, require_id, validate_create_id, validate_create_id,
}; };
use crate::utils::schema::append_agent_hints; use crate::utils::schema::append_agent_hints;
use schemars::schema_for; use schemars::schema_for;
@@ -34,13 +34,18 @@ pub fn run(ctx: &CliContext, args: EnvironmentArgs) -> i32 {
} }
fn schema(pretty: bool) -> CommandResult { fn schema(pretty: bool) -> CommandResult {
let mut schema = serde_json::to_value(schema_for!(Environment)) let mut schema =
.map_err(|e| format!("Failed to serialize environment schema: {e}"))?; serde_json::to_value(schema_for!(Environment)).map_err(|e| format!(
"Failed to serialize environment schema: {e}"
))?;
append_agent_hints(&mut schema); append_agent_hints(&mut schema);
let output = let output = if pretty {
if pretty { serde_json::to_string_pretty(&schema) } else { serde_json::to_string(&schema) } serde_json::to_string_pretty(&schema)
.map_err(|e| format!("Failed to format environment schema JSON: {e}"))?; } else {
serde_json::to_string(&schema)
}
.map_err(|e| format!("Failed to format environment schema JSON: {e}"))?;
println!("{output}"); println!("{output}");
Ok(()) Ok(())
} }
@@ -78,11 +83,17 @@ fn create(
name: Option<String>, name: Option<String>,
json: Option<String>, json: Option<String>,
) -> CommandResult { ) -> CommandResult {
let json_shorthand = if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
workspace_id.as_deref().filter(|v| is_json_shorthand(v)).map(str::to_owned); return Err(
let workspace_id_arg = workspace_id.filter(|v| !is_json_shorthand(v)); "environment create cannot combine workspace_id with --json payload".to_string()
);
}
let payload = parse_optional_json(json, json_shorthand, "environment create")?; let payload = parse_optional_json(
json,
workspace_id.clone().filter(|v| is_json_shorthand(v)),
"environment create",
)?;
if let Some(payload) = payload { if let Some(payload) = payload {
if name.is_some() { if name.is_some() {
@@ -92,11 +103,10 @@ fn create(
validate_create_id(&payload, "environment")?; validate_create_id(&payload, "environment")?;
let mut environment: Environment = serde_json::from_value(payload) let mut environment: Environment = serde_json::from_value(payload)
.map_err(|e| format!("Failed to parse environment create JSON: {e}"))?; .map_err(|e| format!("Failed to parse environment create JSON: {e}"))?;
merge_workspace_id_arg(
workspace_id_arg.as_deref(), if environment.workspace_id.is_empty() {
&mut environment.workspace_id, return Err("environment create JSON requires non-empty \"workspaceId\"".to_string());
"environment create", }
)?;
if environment.parent_model.is_empty() { if environment.parent_model.is_empty() {
environment.parent_model = "environment".to_string(); environment.parent_model = "environment".to_string();
@@ -111,7 +121,7 @@ fn create(
return Ok(()); return Ok(());
} }
let workspace_id = workspace_id_arg.ok_or_else(|| { let workspace_id = workspace_id.ok_or_else(|| {
"environment create requires workspace_id unless JSON payload is provided".to_string() "environment create requires workspace_id unless JSON payload is provided".to_string()
})?; })?;
let name = name.ok_or_else(|| { let name = name.ok_or_else(|| {

View File

@@ -2,8 +2,8 @@ use crate::cli::{FolderArgs, FolderCommands};
use crate::context::CliContext; use crate::context::CliContext;
use crate::utils::confirm::confirm_delete; use crate::utils::confirm::confirm_delete;
use crate::utils::json::{ use crate::utils::json::{
apply_merge_patch, is_json_shorthand, merge_workspace_id_arg, parse_optional_json, apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
parse_required_json, require_id, validate_create_id, validate_create_id,
}; };
use yaak_models::models::Folder; use yaak_models::models::Folder;
use yaak_models::util::UpdateSource; use yaak_models::util::UpdateSource;
@@ -58,11 +58,15 @@ fn create(
name: Option<String>, name: Option<String>,
json: Option<String>, json: Option<String>,
) -> CommandResult { ) -> CommandResult {
let json_shorthand = if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
workspace_id.as_deref().filter(|v| is_json_shorthand(v)).map(str::to_owned); return Err("folder create cannot combine workspace_id with --json payload".to_string());
let workspace_id_arg = workspace_id.filter(|v| !is_json_shorthand(v)); }
let payload = parse_optional_json(json, json_shorthand, "folder create")?; let payload = parse_optional_json(
json,
workspace_id.clone().filter(|v| is_json_shorthand(v)),
"folder create",
)?;
if let Some(payload) = payload { if let Some(payload) = payload {
if name.is_some() { if name.is_some() {
@@ -70,13 +74,12 @@ fn create(
} }
validate_create_id(&payload, "folder")?; validate_create_id(&payload, "folder")?;
let mut folder: Folder = serde_json::from_value(payload) let folder: Folder = serde_json::from_value(payload)
.map_err(|e| format!("Failed to parse folder create JSON: {e}"))?; .map_err(|e| format!("Failed to parse folder create JSON: {e}"))?;
merge_workspace_id_arg(
workspace_id_arg.as_deref(), if folder.workspace_id.is_empty() {
&mut folder.workspace_id, return Err("folder create JSON requires non-empty \"workspaceId\"".to_string());
"folder create", }
)?;
let created = ctx let created = ctx
.db() .db()
@@ -87,7 +90,7 @@ fn create(
return Ok(()); return Ok(());
} }
let workspace_id = workspace_id_arg.ok_or_else(|| { let workspace_id = workspace_id.ok_or_else(|| {
"folder create requires workspace_id unless JSON payload is provided".to_string() "folder create requires workspace_id unless JSON payload is provided".to_string()
})?; })?;
let name = name.ok_or_else(|| { let name = name.ok_or_else(|| {

View File

@@ -2,8 +2,8 @@ use crate::cli::{RequestArgs, RequestCommands, RequestSchemaType};
use crate::context::CliContext; use crate::context::CliContext;
use crate::utils::confirm::confirm_delete; use crate::utils::confirm::confirm_delete;
use crate::utils::json::{ use crate::utils::json::{
apply_merge_patch, is_json_shorthand, merge_workspace_id_arg, parse_optional_json, apply_merge_patch, is_json_shorthand, parse_optional_json, parse_required_json, require_id,
parse_required_json, require_id, validate_create_id, validate_create_id,
}; };
use crate::utils::schema::append_agent_hints; use crate::utils::schema::append_agent_hints;
use schemars::schema_for; use schemars::schema_for;
@@ -11,8 +11,8 @@ use serde_json::{Map, Value, json};
use std::collections::HashMap; use std::collections::HashMap;
use std::io::Write; use std::io::Write;
use tokio::sync::mpsc; use tokio::sync::mpsc;
use yaak::send::{SendHttpRequestByIdWithPluginsParams, send_http_request_by_id_with_plugins};
use yaak_http::sender::HttpResponseEvent as SenderHttpResponseEvent; use yaak_http::sender::HttpResponseEvent as SenderHttpResponseEvent;
use yaak::send::{SendHttpRequestByIdWithPluginsParams, send_http_request_by_id_with_plugins};
use yaak_models::models::{GrpcRequest, HttpRequest, WebsocketRequest}; use yaak_models::models::{GrpcRequest, HttpRequest, WebsocketRequest};
use yaak_models::queries::any_request::AnyRequest; use yaak_models::queries::any_request::AnyRequest;
use yaak_models::util::UpdateSource; use yaak_models::util::UpdateSource;
@@ -336,11 +336,15 @@ fn create(
url: Option<String>, url: Option<String>,
json: Option<String>, json: Option<String>,
) -> CommandResult { ) -> CommandResult {
let json_shorthand = if json.is_some() && workspace_id.as_deref().is_some_and(|v| !is_json_shorthand(v)) {
workspace_id.as_deref().filter(|v| is_json_shorthand(v)).map(str::to_owned); return Err("request create cannot combine workspace_id with --json payload".to_string());
let workspace_id_arg = workspace_id.filter(|v| !is_json_shorthand(v)); }
let payload = parse_optional_json(json, json_shorthand, "request create")?; let payload = parse_optional_json(
json,
workspace_id.clone().filter(|v| is_json_shorthand(v)),
"request create",
)?;
if let Some(payload) = payload { if let Some(payload) = payload {
if name.is_some() || method.is_some() || url.is_some() { if name.is_some() || method.is_some() || url.is_some() {
@@ -348,13 +352,12 @@ fn create(
} }
validate_create_id(&payload, "request")?; validate_create_id(&payload, "request")?;
let mut request: HttpRequest = serde_json::from_value(payload) let request: HttpRequest = serde_json::from_value(payload)
.map_err(|e| format!("Failed to parse request create JSON: {e}"))?; .map_err(|e| format!("Failed to parse request create JSON: {e}"))?;
merge_workspace_id_arg(
workspace_id_arg.as_deref(), if request.workspace_id.is_empty() {
&mut request.workspace_id, return Err("request create JSON requires non-empty \"workspaceId\"".to_string());
"request create", }
)?;
let created = ctx let created = ctx
.db() .db()
@@ -365,7 +368,7 @@ fn create(
return Ok(()); return Ok(());
} }
let workspace_id = workspace_id_arg.ok_or_else(|| { let workspace_id = workspace_id.ok_or_else(|| {
"request create requires workspace_id unless JSON payload is provided".to_string() "request create requires workspace_id unless JSON payload is provided".to_string()
})?; })?;
let name = name.unwrap_or_default(); let name = name.unwrap_or_default();

View File

@@ -38,16 +38,12 @@ impl CliContext {
let encryption_manager = Arc::new(EncryptionManager::new(query_manager.clone(), app_id)); let encryption_manager = Arc::new(EncryptionManager::new(query_manager.clone(), app_id));
let plugin_manager = if with_plugins { let plugin_manager = if with_plugins {
let embedded_vendored_plugin_dir = data_dir.join("vendored-plugins"); let vendored_plugin_dir = data_dir.join("vendored-plugins");
let bundled_plugin_dir =
resolve_bundled_plugin_dir_for_cli(&embedded_vendored_plugin_dir);
let installed_plugin_dir = data_dir.join("installed-plugins"); let installed_plugin_dir = data_dir.join("installed-plugins");
let node_bin_path = PathBuf::from("node"); let node_bin_path = PathBuf::from("node");
if bundled_plugin_dir == embedded_vendored_plugin_dir { prepare_embedded_vendored_plugins(&vendored_plugin_dir)
prepare_embedded_vendored_plugins(&embedded_vendored_plugin_dir) .expect("Failed to prepare bundled plugins");
.expect("Failed to prepare bundled plugins");
}
let plugin_runtime_main = let plugin_runtime_main =
std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| { std::env::var("YAAK_PLUGIN_RUNTIME").map(PathBuf::from).unwrap_or_else(|_| {
@@ -56,13 +52,13 @@ impl CliContext {
}); });
match PluginManager::new( match PluginManager::new(
bundled_plugin_dir, vendored_plugin_dir,
embedded_vendored_plugin_dir,
installed_plugin_dir, installed_plugin_dir,
node_bin_path, node_bin_path,
plugin_runtime_main, plugin_runtime_main,
&query_manager, &query_manager,
&PluginContext::new_empty(), &PluginContext::new_empty(),
false,
) )
.await .await
{ {
@@ -135,20 +131,3 @@ fn prepare_embedded_vendored_plugins(vendored_plugin_dir: &Path) -> std::io::Res
EMBEDDED_VENDORED_PLUGINS.extract(vendored_plugin_dir)?; EMBEDDED_VENDORED_PLUGINS.extract(vendored_plugin_dir)?;
Ok(()) Ok(())
} }
fn resolve_bundled_plugin_dir_for_cli(embedded_vendored_plugin_dir: &Path) -> PathBuf {
if !cfg!(debug_assertions) {
return embedded_vendored_plugin_dir.to_path_buf();
}
let plugins_dir = match std::env::current_dir() {
Ok(cwd) => cwd.join("plugins"),
Err(_) => return embedded_vendored_plugin_dir.to_path_buf(),
};
if !plugins_dir.is_dir() {
return embedded_vendored_plugin_dir.to_path_buf();
}
plugins_dir.canonicalize().unwrap_or(plugins_dir)
}

View File

@@ -63,30 +63,6 @@ pub fn validate_create_id(payload: &Value, context: &str) -> JsonResult<()> {
} }
} }
pub fn merge_workspace_id_arg(
workspace_id_from_arg: Option<&str>,
payload_workspace_id: &mut String,
context: &str,
) -> JsonResult<()> {
if let Some(workspace_id_arg) = workspace_id_from_arg {
if payload_workspace_id.is_empty() {
*payload_workspace_id = workspace_id_arg.to_string();
} else if payload_workspace_id != workspace_id_arg {
return Err(format!(
"{context} got conflicting workspace_id values between positional arg and JSON payload"
));
}
}
if payload_workspace_id.is_empty() {
return Err(format!(
"{context} requires non-empty \"workspaceId\" in JSON payload or positional workspace_id"
));
}
Ok(())
}
pub fn apply_merge_patch<T>(existing: &T, patch: &Value, id: &str, context: &str) -> JsonResult<T> pub fn apply_merge_patch<T>(existing: &T, patch: &Value, id: &str, context: &str) -> JsonResult<T>
where where
T: Serialize + DeserializeOwned, T: Serialize + DeserializeOwned,

View File

@@ -79,54 +79,6 @@ fn json_create_and_update_merge_patch_round_trip() {
.stdout(contains("\"color\": \"#00ff00\"")); .stdout(contains("\"color\": \"#00ff00\""));
} }
#[test]
fn create_merges_positional_workspace_id_into_json_payload() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
let create_assert = cli_cmd(data_dir)
.args([
"environment",
"create",
"wk_test",
"--json",
r#"{"name":"Merged Environment"}"#,
])
.assert()
.success();
let environment_id = parse_created_id(&create_assert.get_output().stdout, "environment create");
cli_cmd(data_dir)
.args(["environment", "show", &environment_id])
.assert()
.success()
.stdout(contains("\"workspaceId\": \"wk_test\""))
.stdout(contains("\"name\": \"Merged Environment\""));
}
#[test]
fn create_rejects_conflicting_workspace_ids_between_arg_and_json() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
seed_workspace(data_dir, "wk_other");
cli_cmd(data_dir)
.args([
"environment",
"create",
"wk_test",
"--json",
r#"{"workspaceId":"wk_other","name":"Mismatch"}"#,
])
.assert()
.failure()
.stderr(contains(
"environment create got conflicting workspace_id values between positional arg and JSON payload",
));
}
#[test] #[test]
fn environment_schema_outputs_json_schema() { fn environment_schema_outputs_json_schema() {
let temp_dir = TempDir::new().expect("Failed to create temp dir"); let temp_dir = TempDir::new().expect("Failed to create temp dir");
@@ -139,8 +91,6 @@ fn environment_schema_outputs_json_schema() {
.stdout(contains("\"type\":\"object\"")) .stdout(contains("\"type\":\"object\""))
.stdout(contains("\"x-yaak-agent-hints\"")) .stdout(contains("\"x-yaak-agent-hints\""))
.stdout(contains("\"templateVariableSyntax\":\"${[ my_var ]}\"")) .stdout(contains("\"templateVariableSyntax\":\"${[ my_var ]}\""))
.stdout(contains( .stdout(contains("\"templateFunctionSyntax\":\"${[ namespace.my_func(a='aaa',b='bbb') ]}\""))
"\"templateFunctionSyntax\":\"${[ namespace.my_func(a='aaa',b='bbb') ]}\"",
))
.stdout(contains("\"workspaceId\"")); .stdout(contains("\"workspaceId\""));
} }

View File

@@ -72,51 +72,3 @@ fn json_create_and_update_merge_patch_round_trip() {
.stdout(contains("\"name\": \"Json Folder\"")) .stdout(contains("\"name\": \"Json Folder\""))
.stdout(contains("\"description\": \"Folder Description\"")); .stdout(contains("\"description\": \"Folder Description\""));
} }
#[test]
fn create_merges_positional_workspace_id_into_json_payload() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
let create_assert = cli_cmd(data_dir)
.args([
"folder",
"create",
"wk_test",
"--json",
r#"{"name":"Merged Folder"}"#,
])
.assert()
.success();
let folder_id = parse_created_id(&create_assert.get_output().stdout, "folder create");
cli_cmd(data_dir)
.args(["folder", "show", &folder_id])
.assert()
.success()
.stdout(contains("\"workspaceId\": \"wk_test\""))
.stdout(contains("\"name\": \"Merged Folder\""));
}
#[test]
fn create_rejects_conflicting_workspace_ids_between_arg_and_json() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
seed_workspace(data_dir, "wk_other");
cli_cmd(data_dir)
.args([
"folder",
"create",
"wk_test",
"--json",
r#"{"workspaceId":"wk_other","name":"Mismatch"}"#,
])
.assert()
.failure()
.stderr(contains(
"folder create got conflicting workspace_id values between positional arg and JSON payload",
));
}

View File

@@ -130,54 +130,6 @@ fn create_allows_workspace_only_with_empty_defaults() {
assert_eq!(request.url, ""); assert_eq!(request.url, "");
} }
#[test]
fn create_merges_positional_workspace_id_into_json_payload() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
let create_assert = cli_cmd(data_dir)
.args([
"request",
"create",
"wk_test",
"--json",
r#"{"name":"Merged Request","url":"https://example.com"}"#,
])
.assert()
.success();
let request_id = parse_created_id(&create_assert.get_output().stdout, "request create");
cli_cmd(data_dir)
.args(["request", "show", &request_id])
.assert()
.success()
.stdout(contains("\"workspaceId\": \"wk_test\""))
.stdout(contains("\"name\": \"Merged Request\""));
}
#[test]
fn create_rejects_conflicting_workspace_ids_between_arg_and_json() {
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let data_dir = temp_dir.path();
seed_workspace(data_dir, "wk_test");
seed_workspace(data_dir, "wk_other");
cli_cmd(data_dir)
.args([
"request",
"create",
"wk_test",
"--json",
r#"{"workspaceId":"wk_other","name":"Mismatch"}"#,
])
.assert()
.failure()
.stderr(contains(
"request create got conflicting workspace_id values between positional arg and JSON payload",
));
}
#[test] #[test]
fn request_send_persists_response_body_and_events() { fn request_send_persists_response_body_and_events() {
let temp_dir = TempDir::new().expect("Failed to create temp dir"); let temp_dir = TempDir::new().expect("Failed to create temp dir");
@@ -239,9 +191,7 @@ fn request_schema_http_outputs_json_schema() {
.stdout(contains("\"type\":\"object\"")) .stdout(contains("\"type\":\"object\""))
.stdout(contains("\"x-yaak-agent-hints\"")) .stdout(contains("\"x-yaak-agent-hints\""))
.stdout(contains("\"templateVariableSyntax\":\"${[ my_var ]}\"")) .stdout(contains("\"templateVariableSyntax\":\"${[ my_var ]}\""))
.stdout(contains( .stdout(contains("\"templateFunctionSyntax\":\"${[ namespace.my_func(a='aaa',b='bbb') ]}\""))
"\"templateFunctionSyntax\":\"${[ namespace.my_func(a='aaa',b='bbb') ]}\"",
))
.stdout(contains("\"authentication\":")) .stdout(contains("\"authentication\":"))
.stdout(contains("/foo/:id/comments/:commentId")) .stdout(contains("/foo/:id/comments/:commentId"))
.stdout(contains("put concrete values in `urlParameters`")); .stdout(contains("put concrete values in `urlParameters`"));

View File

@@ -362,7 +362,7 @@ async fn handle_host_plugin_request<R: Runtime>(
workspace_id: http_request.workspace_id.clone(), workspace_id: http_request.workspace_id.clone(),
..Default::default() ..Default::default()
}, },
&UpdateSource::from_window_label(window.label()), &UpdateSource::Plugin,
&blobs, &blobs,
)? )?
}; };

View File

@@ -10,7 +10,6 @@ use crate::error::Result;
use crate::models_ext::QueryManagerExt; use crate::models_ext::QueryManagerExt;
use log::{error, info, warn}; use log::{error, info, warn};
use serde::Serialize; use serde::Serialize;
use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
@@ -244,11 +243,6 @@ pub fn init<R: Runtime>() -> TauriPlugin<R> {
.path() .path()
.resolve("vendored/plugins", BaseDirectory::Resource) .resolve("vendored/plugins", BaseDirectory::Resource)
.expect("failed to resolve plugin directory resource"); .expect("failed to resolve plugin directory resource");
let bundled_plugin_dir = if is_dev() {
resolve_workspace_plugins_dir().unwrap_or_else(|| vendored_plugin_dir.clone())
} else {
vendored_plugin_dir.clone()
};
let installed_plugin_dir = app_handle let installed_plugin_dir = app_handle
.path() .path()
@@ -272,6 +266,7 @@ pub fn init<R: Runtime>() -> TauriPlugin<R> {
.expect("failed to resolve plugin runtime") .expect("failed to resolve plugin runtime")
.join("index.cjs"); .join("index.cjs");
let dev_mode = is_dev();
let query_manager = let query_manager =
app_handle.state::<yaak_models::query_manager::QueryManager>().inner().clone(); app_handle.state::<yaak_models::query_manager::QueryManager>().inner().clone();
@@ -279,13 +274,13 @@ pub fn init<R: Runtime>() -> TauriPlugin<R> {
let app_handle_clone = app_handle.clone(); let app_handle_clone = app_handle.clone();
tauri::async_runtime::block_on(async move { tauri::async_runtime::block_on(async move {
let manager = PluginManager::new( let manager = PluginManager::new(
bundled_plugin_dir,
vendored_plugin_dir, vendored_plugin_dir,
installed_plugin_dir, installed_plugin_dir,
node_bin_path, node_bin_path,
plugin_runtime_main, plugin_runtime_main,
&query_manager, &query_manager,
&PluginContext::new_empty(), &PluginContext::new_empty(),
dev_mode,
) )
.await .await
.expect("Failed to initialize plugins"); .expect("Failed to initialize plugins");
@@ -327,11 +322,3 @@ pub fn init<R: Runtime>() -> TauriPlugin<R> {
}) })
.build() .build()
} }
fn resolve_workspace_plugins_dir() -> Option<PathBuf> {
PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("../..")
.join("plugins")
.canonicalize()
.ok()
}

View File

@@ -36,6 +36,7 @@ impl HttpConnectionManager {
connections.retain(|_, (_, last_used)| last_used.elapsed() <= self.ttl); connections.retain(|_, (_, last_used)| last_used.elapsed() <= self.ttl);
if let Some((cached, last_used)) = connections.get_mut(&id) { if let Some((cached, last_used)) = connections.get_mut(&id) {
info!("Re-using HTTP client {id}");
*last_used = Instant::now(); *last_used = Instant::now();
return Ok(CachedClient { return Ok(CachedClient {
client: cached.client.clone(), client: cached.client.clone(),

View File

@@ -24,6 +24,7 @@ use crate::plugin_handle::PluginHandle;
use crate::server_ws::PluginRuntimeServerWebsocket; use crate::server_ws::PluginRuntimeServerWebsocket;
use log::{error, info, warn}; use log::{error, info, warn};
use std::collections::HashMap; use std::collections::HashMap;
use std::env;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
@@ -45,9 +46,9 @@ pub struct PluginManager {
kill_tx: tokio::sync::watch::Sender<bool>, kill_tx: tokio::sync::watch::Sender<bool>,
killed_rx: Arc<Mutex<Option<oneshot::Receiver<()>>>>, killed_rx: Arc<Mutex<Option<oneshot::Receiver<()>>>>,
ws_service: Arc<PluginRuntimeServerWebsocket>, ws_service: Arc<PluginRuntimeServerWebsocket>,
bundled_plugin_dir: PathBuf,
vendored_plugin_dir: PathBuf, vendored_plugin_dir: PathBuf,
pub(crate) installed_plugin_dir: PathBuf, pub(crate) installed_plugin_dir: PathBuf,
dev_mode: bool,
} }
/// Callback for plugin initialization events (e.g., toast notifications) /// Callback for plugin initialization events (e.g., toast notifications)
@@ -57,21 +58,21 @@ impl PluginManager {
/// Create a new PluginManager with the given paths. /// Create a new PluginManager with the given paths.
/// ///
/// # Arguments /// # Arguments
/// * `bundled_plugin_dir` - Directory to scan for bundled plugins
/// * `vendored_plugin_dir` - Path to vendored plugins directory /// * `vendored_plugin_dir` - Path to vendored plugins directory
/// * `installed_plugin_dir` - Path to installed plugins directory /// * `installed_plugin_dir` - Path to installed plugins directory
/// * `node_bin_path` - Path to the yaaknode binary /// * `node_bin_path` - Path to the yaaknode binary
/// * `plugin_runtime_main` - Path to the plugin runtime index.cjs /// * `plugin_runtime_main` - Path to the plugin runtime index.cjs
/// * `query_manager` - Query manager for bundled plugin registration and loading /// * `query_manager` - Query manager for bundled plugin registration and loading
/// * `plugin_context` - Context to use while initializing plugins /// * `plugin_context` - Context to use while initializing plugins
/// * `dev_mode` - Whether the app is in dev mode (affects plugin loading)
pub async fn new( pub async fn new(
bundled_plugin_dir: PathBuf,
vendored_plugin_dir: PathBuf, vendored_plugin_dir: PathBuf,
installed_plugin_dir: PathBuf, installed_plugin_dir: PathBuf,
node_bin_path: PathBuf, node_bin_path: PathBuf,
plugin_runtime_main: PathBuf, plugin_runtime_main: PathBuf,
query_manager: &QueryManager, query_manager: &QueryManager,
plugin_context: &PluginContext, plugin_context: &PluginContext,
dev_mode: bool,
) -> Result<PluginManager> { ) -> Result<PluginManager> {
let (events_tx, mut events_rx) = mpsc::channel(2048); let (events_tx, mut events_rx) = mpsc::channel(2048);
let (kill_server_tx, kill_server_rx) = tokio::sync::watch::channel(false); let (kill_server_tx, kill_server_rx) = tokio::sync::watch::channel(false);
@@ -88,9 +89,9 @@ impl PluginManager {
ws_service: Arc::new(ws_service.clone()), ws_service: Arc::new(ws_service.clone()),
kill_tx: kill_server_tx, kill_tx: kill_server_tx,
killed_rx: Arc::new(Mutex::new(Some(killed_rx))), killed_rx: Arc::new(Mutex::new(Some(killed_rx))),
bundled_plugin_dir,
vendored_plugin_dir, vendored_plugin_dir,
installed_plugin_dir, installed_plugin_dir,
dev_mode,
}; };
// Forward events to subscribers // Forward events to subscribers
@@ -191,11 +192,25 @@ impl PluginManager {
Ok(plugin_manager) Ok(plugin_manager)
} }
/// Get the vendored plugin directory path (resolves dev mode path if applicable)
pub fn get_plugins_dir(&self) -> PathBuf {
if self.dev_mode {
// Use plugins directly for easy development
// Tauri runs from crates-tauri/yaak-app/, so go up two levels to reach project root
env::current_dir()
.map(|cwd| cwd.join("../../plugins").canonicalize().unwrap())
.unwrap_or_else(|_| self.vendored_plugin_dir.clone())
} else {
self.vendored_plugin_dir.clone()
}
}
/// Read plugin directories from disk and return their paths. /// Read plugin directories from disk and return their paths.
/// This is useful for discovering bundled plugins. /// This is useful for discovering bundled plugins.
pub async fn list_bundled_plugin_dirs(&self) -> Result<Vec<String>> { pub async fn list_bundled_plugin_dirs(&self) -> Result<Vec<String>> {
info!("Loading bundled plugins from {:?}", self.bundled_plugin_dir); let plugins_dir = self.get_plugins_dir();
read_plugins_dir(&self.bundled_plugin_dir).await info!("Loading bundled plugins from {plugins_dir:?}");
read_plugins_dir(&plugins_dir).await
} }
pub async fn uninstall(&self, plugin_context: &PluginContext, dir: &str) -> Result<()> { pub async fn uninstall(&self, plugin_context: &PluginContext, dir: &str) -> Result<()> {

View File

@@ -273,5 +273,6 @@ pub fn find_client_certificate(
}); });
} }
debug!("No matching client certificate found for {}", url_string);
None None
} }

7
npm/README.md Normal file
View File

@@ -0,0 +1,7 @@
# Yaak CLI NPM Packages
The Rust `yaak` CLI binary is published to NPM with a meta package (`@yaakapp/cli`) and
platform-specific optional dependency packages. The package exposes both `yaak` and `yaakcli`
commands for compatibility.
This follows the same strategy previously used in the standalone `yaak-cli` repo.

View File

@@ -1,5 +1,5 @@
const fs = require("node:fs"); const fs = require("node:fs");
const path = require("node:path"); const path = require("node:path");
const cliReadme = path.join(__dirname, "..", "..", "crates-cli", "yaak-cli", "README.md"); const readme = path.join(__dirname, "..", "..", "README.md");
fs.copyFileSync(cliReadme, path.join(__dirname, "README.md")); fs.copyFileSync(readme, path.join(__dirname, "README.md"));

56
package-lock.json generated
View File

@@ -73,7 +73,7 @@
"devDependencies": { "devDependencies": {
"@biomejs/biome": "^2.3.13", "@biomejs/biome": "^2.3.13",
"@tauri-apps/cli": "^2.9.6", "@tauri-apps/cli": "^2.9.6",
"@yaakapp/cli": "^0.4.0", "@yaakapp/cli": "^0.4.0-beta.2",
"dotenv-cli": "^11.0.0", "dotenv-cli": "^11.0.0",
"husky": "^9.1.7", "husky": "^9.1.7",
"nodejs-file-downloader": "^4.13.0", "nodejs-file-downloader": "^4.13.0",
@@ -4326,9 +4326,9 @@
"link": true "link": true
}, },
"node_modules/@yaakapp/cli": { "node_modules/@yaakapp/cli": {
"version": "0.4.0", "version": "0.4.0-beta.2",
"resolved": "https://registry.npmjs.org/@yaakapp/cli/-/cli-0.4.0.tgz", "resolved": "https://registry.npmjs.org/@yaakapp/cli/-/cli-0.4.0-beta.2.tgz",
"integrity": "sha512-8xnu2oFWlgV+xeIAHMuEgsqX6Sxq4UYrSH2WbafwDLbSep6fxpO74tiBH7xp4wakt/7Bcy9a2Q5R9nkAc1ZUdA==", "integrity": "sha512-UXPxTS9oWVCIr4rShC7HjcAX+gSmw/BQ5F1Xp3Rub3vY/G7+513JJsc1HhLGVZqFfOVRSMEKRxtF9/9okSyiHg==",
"dev": true, "dev": true,
"hasInstallScript": true, "hasInstallScript": true,
"bin": { "bin": {
@@ -4336,18 +4336,18 @@
"yaakcli": "bin/cli.js" "yaakcli": "bin/cli.js"
}, },
"optionalDependencies": { "optionalDependencies": {
"@yaakapp/cli-darwin-arm64": "0.4.0", "@yaakapp/cli-darwin-arm64": "0.4.0-beta.2",
"@yaakapp/cli-darwin-x64": "0.4.0", "@yaakapp/cli-darwin-x64": "0.4.0-beta.2",
"@yaakapp/cli-linux-arm64": "0.4.0", "@yaakapp/cli-linux-arm64": "0.4.0-beta.2",
"@yaakapp/cli-linux-x64": "0.4.0", "@yaakapp/cli-linux-x64": "0.4.0-beta.2",
"@yaakapp/cli-win32-arm64": "0.4.0", "@yaakapp/cli-win32-arm64": "0.4.0-beta.2",
"@yaakapp/cli-win32-x64": "0.4.0" "@yaakapp/cli-win32-x64": "0.4.0-beta.2"
} }
}, },
"node_modules/@yaakapp/cli-darwin-arm64": { "node_modules/@yaakapp/cli-darwin-arm64": {
"version": "0.4.0", "version": "0.4.0-beta.2",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-darwin-arm64/-/cli-darwin-arm64-0.4.0.tgz", "resolved": "https://registry.npmjs.org/@yaakapp/cli-darwin-arm64/-/cli-darwin-arm64-0.4.0-beta.2.tgz",
"integrity": "sha512-bl8+VQNPMabXNGQCa7u6w0JGe3CmzYZPsGE8Q+5wGSxa3trGf1bmq/fMW5JXrMi1P7Laepnyad0TGGP/2C8uwQ==", "integrity": "sha512-mqkyH5tIPRLs9JumP9ZmzjB5gIwmOL1yCDoJ1qVU8DIJ7mwlcQaPGYTK98pVdBcKOjofVakBTcpol9P8rBv4qw==",
"cpu": [ "cpu": [
"arm64" "arm64"
], ],
@@ -4358,9 +4358,9 @@
] ]
}, },
"node_modules/@yaakapp/cli-darwin-x64": { "node_modules/@yaakapp/cli-darwin-x64": {
"version": "0.4.0", "version": "0.4.0-beta.2",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-darwin-x64/-/cli-darwin-x64-0.4.0.tgz", "resolved": "https://registry.npmjs.org/@yaakapp/cli-darwin-x64/-/cli-darwin-x64-0.4.0-beta.2.tgz",
"integrity": "sha512-R+ETXNBWvmA3W88ZoTk/JtG/PZaUb85y3SwBgMbwcgdhBVwNS/g+DbCspcTFI5zs8Txsf5VuiFU+dW9M9olZ6A==", "integrity": "sha512-QI/H2yUF8CkJq+cnRthoUWWTEJPH4QPA78FYcGjFRhvBaj1m2G/GlCA5NkTXm/fvIjNkQEODSihXrhU+zoSSCw==",
"cpu": [ "cpu": [
"x64" "x64"
], ],
@@ -4371,9 +4371,9 @@
] ]
}, },
"node_modules/@yaakapp/cli-linux-arm64": { "node_modules/@yaakapp/cli-linux-arm64": {
"version": "0.4.0", "version": "0.4.0-beta.2",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-linux-arm64/-/cli-linux-arm64-0.4.0.tgz", "resolved": "https://registry.npmjs.org/@yaakapp/cli-linux-arm64/-/cli-linux-arm64-0.4.0-beta.2.tgz",
"integrity": "sha512-Pf7VyQf4r85FsI0qYnnst7URQF8/RxSZZj79cXLai0FnN3fDiypX4CmHx765bJxgfQZlBvqVmvPAaMW/TeiJEQ==", "integrity": "sha512-nvAp97LkgRpqVHyMwDdpkzlKOWG2kJXezCLRZaRWaEpbnNuviSF+0yzCuFGZRHEEspj7B0TiM+sKGkpvjNlweA==",
"cpu": [ "cpu": [
"arm64" "arm64"
], ],
@@ -4384,9 +4384,9 @@
] ]
}, },
"node_modules/@yaakapp/cli-linux-x64": { "node_modules/@yaakapp/cli-linux-x64": {
"version": "0.4.0", "version": "0.4.0-beta.2",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-linux-x64/-/cli-linux-x64-0.4.0.tgz", "resolved": "https://registry.npmjs.org/@yaakapp/cli-linux-x64/-/cli-linux-x64-0.4.0-beta.2.tgz",
"integrity": "sha512-bYWWfHAIW81A+ydJChjH1Qo3+aihz9gFLh7/9MOa6CJgnC6H3V5cnapmh50Hddt9l5ic02aA1FB8ORQOXxb01A==", "integrity": "sha512-9/qAMNrtE9glxih3XWGfFssIJpQ4mHNUTuWYKroc0aZZUrunnCw3tX1tQtFDxy0QRIZcGlBeBRtgxuuBd2fYbg==",
"cpu": [ "cpu": [
"x64" "x64"
], ],
@@ -4397,9 +4397,9 @@
] ]
}, },
"node_modules/@yaakapp/cli-win32-arm64": { "node_modules/@yaakapp/cli-win32-arm64": {
"version": "0.4.0", "version": "0.4.0-beta.2",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-win32-arm64/-/cli-win32-arm64-0.4.0.tgz", "resolved": "https://registry.npmjs.org/@yaakapp/cli-win32-arm64/-/cli-win32-arm64-0.4.0-beta.2.tgz",
"integrity": "sha512-8X12xkyidyYZ5vtarZGFSYR6HJbUMFUsNxYPNQccnYJIY+soNkjJHOWDjaRvBzCbR8MLT9N04Y5PE/Jv20gXpA==", "integrity": "sha512-eM1zL+hl0y3NBLxWO90y9VyaFsAf0HAsECBWvhKhvEdd6KG4K1XzpXrC30cHQBGePIrCa/az8eSuvTde0Z2C/g==",
"cpu": [ "cpu": [
"arm64" "arm64"
], ],
@@ -4410,9 +4410,9 @@
] ]
}, },
"node_modules/@yaakapp/cli-win32-x64": { "node_modules/@yaakapp/cli-win32-x64": {
"version": "0.4.0", "version": "0.4.0-beta.2",
"resolved": "https://registry.npmjs.org/@yaakapp/cli-win32-x64/-/cli-win32-x64-0.4.0.tgz", "resolved": "https://registry.npmjs.org/@yaakapp/cli-win32-x64/-/cli-win32-x64-0.4.0-beta.2.tgz",
"integrity": "sha512-wansfrCCycFcFclowQQxfsNLIAyATyqnnbITED5gUfUrBf8NFHrG0sWVCWlXUhHU7YvpmqL7CsdtlMkIGiZCPQ==", "integrity": "sha512-ySdiK0h216EqURkM5KZoqbPTgbIX4eNK/IgrKwSazxRb369HOZYQ8X68as+VRxEL4NCMmWlQNdbBDuf+apg/mg==",
"cpu": [ "cpu": [
"x64" "x64"
], ],

View File

@@ -97,7 +97,7 @@
"devDependencies": { "devDependencies": {
"@biomejs/biome": "^2.3.13", "@biomejs/biome": "^2.3.13",
"@tauri-apps/cli": "^2.9.6", "@tauri-apps/cli": "^2.9.6",
"@yaakapp/cli": "^0.4.0", "@yaakapp/cli": "^0.4.0-beta.2",
"dotenv-cli": "^11.0.0", "dotenv-cli": "^11.0.0",
"husky": "^9.1.7", "husky": "^9.1.7",
"nodejs-file-downloader": "^4.13.0", "nodejs-file-downloader": "^4.13.0",

View File

@@ -1,7 +1,7 @@
{ {
"name": "@yaak/action-send-folder", "name": "@yaak/action-send-folder",
"displayName": "Send All", "displayName": "Send All",
"description": "Send all HTTP requests in a folder sequentially in tree order", "description": "Send all HTTP requests in a folder sequentially",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://github.com/mountain-loop/yaak.git", "url": "https://github.com/mountain-loop/yaak.git",

View File

@@ -14,44 +14,22 @@ export const plugin: PluginDefinition = {
ctx.httpRequest.list(), ctx.httpRequest.list(),
]); ]);
// Build the send order to match tree ordering: // Build a set of all folder IDs that are descendants of the target folder
// sort siblings by sortPriority then updatedAt, and traverse folders depth-first. const folderIds = new Set<string>([targetFolder.id]);
const compareByOrder = ( const addDescendants = (parentId: string) => {
a: Pick<typeof allFolders[number], 'sortPriority' | 'updatedAt'>, for (const folder of allFolders) {
b: Pick<typeof allFolders[number], 'sortPriority' | 'updatedAt'>, if (folder.folderId === parentId && !folderIds.has(folder.id)) {
) => { folderIds.add(folder.id);
if (a.sortPriority === b.sortPriority) { addDescendants(folder.id);
return a.updatedAt > b.updatedAt ? 1 : -1;
}
return a.sortPriority - b.sortPriority;
};
const childrenByFolderId = new Map<string, Array<typeof allFolders[number] | typeof allRequests[number]>>();
for (const folder of allFolders) {
if (folder.folderId == null) continue;
const children = childrenByFolderId.get(folder.folderId) ?? [];
children.push(folder);
childrenByFolderId.set(folder.folderId, children);
}
for (const request of allRequests) {
if (request.folderId == null) continue;
const children = childrenByFolderId.get(request.folderId) ?? [];
children.push(request);
childrenByFolderId.set(request.folderId, children);
}
const requestsToSend: typeof allRequests = [];
const collectRequests = (folderId: string) => {
const children = (childrenByFolderId.get(folderId) ?? []).slice().sort(compareByOrder);
for (const child of children) {
if (child.model === 'folder') {
collectRequests(child.id);
} else if (child.model === 'http_request') {
requestsToSend.push(child);
} }
} }
}; };
collectRequests(targetFolder.id); addDescendants(targetFolder.id);
// Filter HTTP requests to those in the target folder or its descendants
const requestsToSend = allRequests.filter(
(req) => req.folderId != null && folderIds.has(req.folderId),
);
if (requestsToSend.length === 0) { if (requestsToSend.length === 0) {
await ctx.toast.show({ await ctx.toast.show({
@@ -62,7 +40,7 @@ export const plugin: PluginDefinition = {
return; return;
} }
// Send requests sequentially in the calculated folder order. // Send each request sequentially
let successCount = 0; let successCount = 0;
let errorCount = 0; let errorCount = 0;

View File

@@ -72,10 +72,6 @@ export const plugin: PluginDefinition = {
name: 'header', name: 'header',
label: 'Header Name', label: 'Header Name',
async dynamic(ctx, args) { async dynamic(ctx, args) {
// Dynamic form config also runs during send-time rendering.
// Keep this preview-only to avoid side-effect request sends.
if (args.purpose !== 'preview') return null;
const response = await getResponse(ctx, { const response = await getResponse(ctx, {
requestId: String(args.values.request || ''), requestId: String(args.values.request || ''),
purpose: args.purpose, purpose: args.purpose,
@@ -150,10 +146,6 @@ export const plugin: PluginDefinition = {
label: 'JSONPath or XPath', label: 'JSONPath or XPath',
placeholder: '$.books[0].id or /books[0]/id', placeholder: '$.books[0].id or /books[0]/id',
dynamic: async (ctx, args) => { dynamic: async (ctx, args) => {
// Dynamic form config also runs during send-time rendering.
// Keep this preview-only to avoid side-effect request sends.
if (args.purpose !== 'preview') return null;
const resp = await getResponse(ctx, { const resp = await getResponse(ctx, {
requestId: String(args.values.request || ''), requestId: String(args.values.request || ''),
purpose: 'preview', purpose: 'preview',