working on workflows

This commit is contained in:
2026-03-04 22:02:34 -06:00
parent b54aa3ec26
commit 7438f92502
63 changed files with 10231 additions and 731 deletions

View File

@@ -1,5 +1,5 @@
use anyhow::{Context, Result};
use reqwest::{multipart, Client as HttpClient, Method, RequestBuilder, Response, StatusCode};
use reqwest::{multipart, Client as HttpClient, Method, RequestBuilder, StatusCode};
use serde::{de::DeserializeOwned, Serialize};
use std::path::PathBuf;
use std::time::Duration;
@@ -83,13 +83,14 @@ impl ApiClient {
self.auth_token = None;
}
/// Refresh the authentication token using the refresh token
/// Refresh the authentication token using the refresh token.
///
/// Returns Ok(true) if refresh succeeded, Ok(false) if no refresh token available
/// Returns `Ok(true)` if refresh succeeded, `Ok(false)` if no refresh token
/// is available or the server rejected it.
async fn refresh_auth_token(&mut self) -> Result<bool> {
let refresh_token = match &self.refresh_token {
Some(token) => token.clone(),
None => return Ok(false), // No refresh token available
None => return Ok(false),
};
#[derive(Serialize)]
@@ -103,7 +104,6 @@ impl ApiClient {
refresh_token: String,
}
// Build refresh request without auth token
let url = format!("{}/auth/refresh", self.base_url);
let req = self
.client
@@ -113,7 +113,7 @@ impl ApiClient {
let response = req.send().await.context("Failed to refresh token")?;
if !response.status().is_success() {
// Refresh failed - clear tokens
// Refresh failed clear tokens so we don't keep retrying
self.auth_token = None;
self.refresh_token = None;
return Ok(false);
@@ -128,7 +128,7 @@ impl ApiClient {
self.auth_token = Some(api_response.data.access_token.clone());
self.refresh_token = Some(api_response.data.refresh_token.clone());
// Persist to config file if we have the path
// Persist to config file
if self.config_path.is_some() {
if let Ok(mut config) = CliConfig::load() {
let _ = config.set_auth(
@@ -141,45 +141,96 @@ impl ApiClient {
Ok(true)
}
/// Build a request with common headers
fn build_request(&self, method: Method, path: &str) -> RequestBuilder {
// Auth endpoints are at /auth, not /auth
let url = if path.starts_with("/auth") {
// ── Request building helpers ────────────────────────────────────────
/// Build a full URL from a path.
fn url_for(&self, path: &str) -> String {
if path.starts_with("/auth") {
format!("{}{}", self.base_url, path)
} else {
format!("{}/api/v1{}", self.base_url, path)
};
let mut req = self.client.request(method, &url);
}
}
/// Build a `RequestBuilder` with auth header applied.
fn build_request(&self, method: Method, path: &str) -> RequestBuilder {
let url = self.url_for(path);
let mut req = self.client.request(method, &url);
if let Some(token) = &self.auth_token {
req = req.bearer_auth(token);
}
req
}
/// Execute a request and handle the response with automatic token refresh
async fn execute<T: DeserializeOwned>(&mut self, req: RequestBuilder) -> Result<T> {
// ── Core execute-with-retry machinery ──────────────────────────────
/// Send a request that carries a JSON body. On a 401 response the token
/// is refreshed and the request is rebuilt & retried exactly once.
async fn execute_json<T, B>(
&mut self,
method: Method,
path: &str,
body: Option<&B>,
) -> Result<T>
where
T: DeserializeOwned,
B: Serialize,
{
// First attempt
let req = self.attach_body(self.build_request(method.clone(), path), body);
let response = req.send().await.context("Failed to send request to API")?;
// If 401 and we have a refresh token, try to refresh once
if response.status() == StatusCode::UNAUTHORIZED && self.refresh_token.is_some() {
// Try to refresh the token
if self.refresh_auth_token().await? {
// Rebuild and retry the original request with new token
// Note: This is a simplified retry - the original request body is already consumed
// For a production implementation, we'd need to clone the request or store the body
return Err(anyhow::anyhow!(
"Token expired and was refreshed. Please retry your command."
));
// Retry with new token
let req = self.attach_body(self.build_request(method, path), body);
let response = req
.send()
.await
.context("Failed to send request to API (retry)")?;
return self.handle_response(response).await;
}
}
self.handle_response(response).await
}
/// Handle API response and extract data
async fn handle_response<T: DeserializeOwned>(&self, response: Response) -> Result<T> {
/// Send a request that carries a JSON body and expects no response body.
async fn execute_json_no_response<B: Serialize>(
&mut self,
method: Method,
path: &str,
body: Option<&B>,
) -> Result<()> {
let req = self.attach_body(self.build_request(method.clone(), path), body);
let response = req.send().await.context("Failed to send request to API")?;
if response.status() == StatusCode::UNAUTHORIZED && self.refresh_token.is_some() {
if self.refresh_auth_token().await? {
let req = self.attach_body(self.build_request(method, path), body);
let response = req
.send()
.await
.context("Failed to send request to API (retry)")?;
return self.handle_empty_response(response).await;
}
}
self.handle_empty_response(response).await
}
/// Optionally attach a JSON body to a request builder.
fn attach_body<B: Serialize>(&self, req: RequestBuilder, body: Option<&B>) -> RequestBuilder {
match body {
Some(b) => req.json(b),
None => req,
}
}
// ── Response handling ──────────────────────────────────────────────
/// Parse a successful API response or return a descriptive error.
async fn handle_response<T: DeserializeOwned>(&self, response: reqwest::Response) -> Result<T> {
let status = response.status();
if status.is_success() {
@@ -194,7 +245,6 @@ impl ApiClient {
.await
.unwrap_or_else(|_| "Unknown error".to_string());
// Try to parse as API error
if let Ok(api_error) = serde_json::from_str::<ApiError>(&error_text) {
anyhow::bail!("API error ({}): {}", status, api_error.error);
} else {
@@ -203,10 +253,30 @@ impl ApiClient {
}
}
/// Handle a response where we only care about success/failure, not a body.
async fn handle_empty_response(&self, response: reqwest::Response) -> Result<()> {
let status = response.status();
if status.is_success() {
Ok(())
} else {
let error_text = response
.text()
.await
.unwrap_or_else(|_| "Unknown error".to_string());
if let Ok(api_error) = serde_json::from_str::<ApiError>(&error_text) {
anyhow::bail!("API error ({}): {}", status, api_error.error);
} else {
anyhow::bail!("API error ({}): {}", status, error_text);
}
}
}
// ── Public convenience methods ─────────────────────────────────────
/// GET request
pub async fn get<T: DeserializeOwned>(&mut self, path: &str) -> Result<T> {
let req = self.build_request(Method::GET, path);
self.execute(req).await
self.execute_json::<T, ()>(Method::GET, path, None).await
}
/// GET request with query parameters (query string must be in path)
@@ -215,8 +285,7 @@ impl ApiClient {
/// Example: `client.get_with_query("/actions?enabled=true&pack=core").await`
#[allow(dead_code)]
pub async fn get_with_query<T: DeserializeOwned>(&mut self, path: &str) -> Result<T> {
let req = self.build_request(Method::GET, path);
self.execute(req).await
self.execute_json::<T, ()>(Method::GET, path, None).await
}
/// POST request with JSON body
@@ -225,8 +294,7 @@ impl ApiClient {
path: &str,
body: &B,
) -> Result<T> {
let req = self.build_request(Method::POST, path).json(body);
self.execute(req).await
self.execute_json(Method::POST, path, Some(body)).await
}
/// PUT request with JSON body
@@ -237,8 +305,7 @@ impl ApiClient {
path: &str,
body: &B,
) -> Result<T> {
let req = self.build_request(Method::PUT, path).json(body);
self.execute(req).await
self.execute_json(Method::PUT, path, Some(body)).await
}
/// PATCH request with JSON body
@@ -247,8 +314,7 @@ impl ApiClient {
path: &str,
body: &B,
) -> Result<T> {
let req = self.build_request(Method::PATCH, path).json(body);
self.execute(req).await
self.execute_json(Method::PATCH, path, Some(body)).await
}
/// DELETE request with response parsing
@@ -259,8 +325,7 @@ impl ApiClient {
/// delete operations return metadata (e.g., cascade deletion summaries).
#[allow(dead_code)]
pub async fn delete<T: DeserializeOwned>(&mut self, path: &str) -> Result<T> {
let req = self.build_request(Method::DELETE, path);
self.execute(req).await
self.execute_json::<T, ()>(Method::DELETE, path, None).await
}
/// POST request without expecting response body
@@ -270,36 +335,14 @@ impl ApiClient {
/// Kept for API completeness even though not currently used.
#[allow(dead_code)]
pub async fn post_no_response<B: Serialize>(&mut self, path: &str, body: &B) -> Result<()> {
let req = self.build_request(Method::POST, path).json(body);
let response = req.send().await.context("Failed to send request to API")?;
let status = response.status();
if status.is_success() {
Ok(())
} else {
let error_text = response
.text()
.await
.unwrap_or_else(|_| "Unknown error".to_string());
anyhow::bail!("API error ({}): {}", status, error_text);
}
self.execute_json_no_response(Method::POST, path, Some(body))
.await
}
/// DELETE request without expecting response body
pub async fn delete_no_response(&mut self, path: &str) -> Result<()> {
let req = self.build_request(Method::DELETE, path);
let response = req.send().await.context("Failed to send request to API")?;
let status = response.status();
if status.is_success() {
Ok(())
} else {
let error_text = response
.text()
.await
.unwrap_or_else(|_| "Unknown error".to_string());
anyhow::bail!("API error ({}): {}", status, error_text);
}
self.execute_json_no_response::<()>(Method::DELETE, path, None)
.await
}
/// POST a multipart/form-data request with a file field and optional text fields.
@@ -318,33 +361,47 @@ impl ApiClient {
mime_type: &str,
extra_fields: Vec<(&str, String)>,
) -> Result<T> {
let url = format!("{}/api/v1{}", self.base_url, path);
// Closure-like helper to build the multipart request from scratch.
// We need this because reqwest::multipart::Form is not Clone, so we
// must rebuild it for the retry attempt.
let build_multipart_request =
|client: &ApiClient, bytes: &[u8]| -> Result<reqwest::RequestBuilder> {
let url = format!("{}/api/v1{}", client.base_url, path);
let file_part = multipart::Part::bytes(file_bytes)
.file_name(file_name.to_string())
.mime_str(mime_type)
.context("Invalid MIME type")?;
let file_part = multipart::Part::bytes(bytes.to_vec())
.file_name(file_name.to_string())
.mime_str(mime_type)
.context("Invalid MIME type")?;
let mut form = multipart::Form::new().part(file_field_name.to_string(), file_part);
let mut form = multipart::Form::new().part(file_field_name.to_string(), file_part);
for (key, value) in extra_fields {
form = form.text(key.to_string(), value);
}
for (key, value) in &extra_fields {
form = form.text(key.to_string(), value.clone());
}
let mut req = self.client.post(&url).multipart(form);
let mut req = client.client.post(&url).multipart(form);
if let Some(token) = &client.auth_token {
req = req.bearer_auth(token);
}
Ok(req)
};
if let Some(token) = &self.auth_token {
req = req.bearer_auth(token);
}
// First attempt
let req = build_multipart_request(self, &file_bytes)?;
let response = req
.send()
.await
.context("Failed to send multipart request to API")?;
let response = req.send().await.context("Failed to send multipart request to API")?;
// Handle 401 + refresh (same pattern as execute())
if response.status() == StatusCode::UNAUTHORIZED && self.refresh_token.is_some() {
if self.refresh_auth_token().await? {
return Err(anyhow::anyhow!(
"Token expired and was refreshed. Please retry your command."
));
// Retry with new token
let req = build_multipart_request(self, &file_bytes)?;
let response = req
.send()
.await
.context("Failed to send multipart request to API (retry)")?;
return self.handle_response(response).await;
}
}
@@ -374,4 +431,22 @@ mod tests {
client.clear_auth_token();
assert!(client.auth_token.is_none());
}
#[test]
fn test_url_for_api_path() {
let client = ApiClient::new("http://localhost:8080".to_string(), None);
assert_eq!(
client.url_for("/actions"),
"http://localhost:8080/api/v1/actions"
);
}
#[test]
fn test_url_for_auth_path() {
let client = ApiClient::new("http://localhost:8080".to_string(), None);
assert_eq!(
client.url_for("/auth/login"),
"http://localhost:8080/auth/login"
);
}
}

View File

@@ -52,7 +52,7 @@ pub enum ActionCommands {
action_ref: String,
/// Skip confirmation prompt
#[arg(short, long)]
#[arg(long)]
yes: bool,
},
/// Execute an action

View File

@@ -7,3 +7,4 @@ pub mod pack_index;
pub mod rule;
pub mod sensor;
pub mod trigger;
pub mod workflow;

View File

@@ -11,6 +11,37 @@ use crate::output::{self, OutputFormat};
#[derive(Subcommand)]
pub enum PackCommands {
/// Create an empty pack
///
/// Creates a new pack with no actions, triggers, rules, or sensors.
/// Use --interactive (-i) to be prompted for each field, or provide
/// fields via flags. Only --ref is required in non-interactive mode
/// (--label defaults to a title-cased ref, version defaults to 0.1.0).
Create {
/// Unique reference identifier (e.g., "my_pack", "slack")
#[arg(long, short = 'r')]
r#ref: Option<String>,
/// Human-readable label (defaults to title-cased ref)
#[arg(long, short)]
label: Option<String>,
/// Pack description
#[arg(long, short)]
description: Option<String>,
/// Pack version (semver format recommended)
#[arg(long = "pack-version", default_value = "0.1.0")]
pack_version: String,
/// Tags for categorization (comma-separated)
#[arg(long, value_delimiter = ',')]
tags: Vec<String>,
/// Interactive mode — prompt for each field
#[arg(long, short)]
interactive: bool,
},
/// List all installed packs
List {
/// Filter by pack name
@@ -75,7 +106,7 @@ pub enum PackCommands {
pack_ref: String,
/// Skip confirmation prompt
#[arg(short = 'y', long)]
#[arg(long)]
yes: bool,
},
/// Register a pack from a local directory (path must be accessible by the API server)
@@ -282,6 +313,17 @@ struct UploadPackResponse {
tests_skipped: bool,
}
#[derive(Debug, Serialize)]
struct CreatePackBody {
r#ref: String,
label: String,
#[serde(skip_serializing_if = "Option::is_none")]
description: Option<String>,
version: String,
#[serde(default)]
tags: Vec<String>,
}
pub async fn handle_pack_command(
profile: &Option<String>,
command: PackCommands,
@@ -289,6 +331,27 @@ pub async fn handle_pack_command(
output_format: OutputFormat,
) -> Result<()> {
match command {
PackCommands::Create {
r#ref,
label,
description,
pack_version,
tags,
interactive,
} => {
handle_create(
profile,
r#ref,
label,
description,
pack_version,
tags,
interactive,
api_url,
output_format,
)
.await
}
PackCommands::List { name } => handle_list(profile, name, api_url, output_format).await,
PackCommands::Show { pack_ref } => {
handle_show(profile, pack_ref, api_url, output_format).await
@@ -401,6 +464,169 @@ pub async fn handle_pack_command(
}
}
/// Derive a human-readable label from a pack ref.
///
/// Splits on `_`, `-`, or `.` and title-cases each word.
fn label_from_ref(r: &str) -> String {
r.split(|c| c == '_' || c == '-' || c == '.')
.filter(|s| !s.is_empty())
.map(|word| {
let mut chars = word.chars();
match chars.next() {
Some(first) => {
let upper: String = first.to_uppercase().collect();
format!("{}{}", upper, chars.as_str())
}
None => String::new(),
}
})
.collect::<Vec<_>>()
.join(" ")
}
async fn handle_create(
profile: &Option<String>,
ref_flag: Option<String>,
label_flag: Option<String>,
description_flag: Option<String>,
version_flag: String,
tags_flag: Vec<String>,
interactive: bool,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
// ── Collect field values ────────────────────────────────────────
let (pack_ref, label, description, version, tags) = if interactive {
// Interactive prompts
let pack_ref: String = match ref_flag {
Some(r) => r,
None => dialoguer::Input::new()
.with_prompt("Pack ref (unique identifier, e.g. \"my_pack\")")
.interact_text()?,
};
let default_label = label_flag
.clone()
.unwrap_or_else(|| label_from_ref(&pack_ref));
let label: String = dialoguer::Input::new()
.with_prompt("Label")
.default(default_label)
.interact_text()?;
let default_desc = description_flag.clone().unwrap_or_default();
let description: String = dialoguer::Input::new()
.with_prompt("Description (optional, Enter to skip)")
.default(default_desc)
.allow_empty(true)
.interact_text()?;
let description = if description.is_empty() {
None
} else {
Some(description)
};
let version: String = dialoguer::Input::new()
.with_prompt("Version")
.default(version_flag)
.interact_text()?;
let default_tags = if tags_flag.is_empty() {
String::new()
} else {
tags_flag.join(", ")
};
let tags_input: String = dialoguer::Input::new()
.with_prompt("Tags (comma-separated, optional)")
.default(default_tags)
.allow_empty(true)
.interact_text()?;
let tags: Vec<String> = tags_input
.split(',')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect();
// Show summary and confirm
println!();
output::print_section("New Pack Summary");
output::print_key_value_table(vec![
("Ref", pack_ref.clone()),
("Label", label.clone()),
(
"Description",
description
.clone()
.unwrap_or_else(|| "(none)".to_string()),
),
("Version", version.clone()),
(
"Tags",
if tags.is_empty() {
"(none)".to_string()
} else {
tags.join(", ")
},
),
]);
println!();
let confirm = dialoguer::Confirm::new()
.with_prompt("Create this pack?")
.default(true)
.interact()?;
if !confirm {
output::print_info("Pack creation cancelled");
return Ok(());
}
(pack_ref, label, description, version, tags)
} else {
// Non-interactive: ref is required
let pack_ref = ref_flag.ok_or_else(|| {
anyhow::anyhow!(
"Pack ref is required. Provide --ref <value> or use --interactive mode."
)
})?;
let label = label_flag.unwrap_or_else(|| label_from_ref(&pack_ref));
let description = description_flag;
let version = version_flag;
let tags = tags_flag;
(pack_ref, label, description, version, tags)
};
// ── Send request ────────────────────────────────────────────────
let config = CliConfig::load_with_profile(profile.as_deref())?;
let mut client = ApiClient::from_config(&config, api_url);
let body = CreatePackBody {
r#ref: pack_ref,
label,
description,
version,
tags,
};
let pack: Pack = client.post("/packs", &body).await?;
// ── Output ──────────────────────────────────────────────────────
match output_format {
OutputFormat::Json | OutputFormat::Yaml => {
output::print_output(&pack, output_format)?;
}
OutputFormat::Table => {
output::print_success(&format!(
"Pack '{}' created successfully (id: {})",
pack.pack_ref, pack.id
));
}
}
Ok(())
}
async fn handle_list(
profile: &Option<String>,
name: Option<String>,
@@ -1630,3 +1856,48 @@ async fn handle_update(
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_label_from_ref_underscores() {
assert_eq!(label_from_ref("my_cool_pack"), "My Cool Pack");
}
#[test]
fn test_label_from_ref_hyphens() {
assert_eq!(label_from_ref("my-cool-pack"), "My Cool Pack");
}
#[test]
fn test_label_from_ref_dots() {
assert_eq!(label_from_ref("my.cool.pack"), "My Cool Pack");
}
#[test]
fn test_label_from_ref_mixed_separators() {
assert_eq!(label_from_ref("my_cool-pack.v2"), "My Cool Pack V2");
}
#[test]
fn test_label_from_ref_single_word() {
assert_eq!(label_from_ref("slack"), "Slack");
}
#[test]
fn test_label_from_ref_already_capitalized() {
assert_eq!(label_from_ref("AWS"), "AWS");
}
#[test]
fn test_label_from_ref_empty() {
assert_eq!(label_from_ref(""), "");
}
#[test]
fn test_label_from_ref_consecutive_separators() {
assert_eq!(label_from_ref("my__pack"), "My Pack");
}
}

View File

@@ -42,7 +42,7 @@ pub enum TriggerCommands {
trigger_ref: String,
/// Skip confirmation prompt
#[arg(short, long)]
#[arg(long)]
yes: bool,
},
}

View File

@@ -0,0 +1,699 @@
use anyhow::{Context, Result};
use clap::Subcommand;
use serde::{Deserialize, Serialize};
use std::path::{Path, PathBuf};
use crate::client::ApiClient;
use crate::config::CliConfig;
use crate::output::{self, OutputFormat};
#[derive(Subcommand)]
pub enum WorkflowCommands {
/// Upload a workflow action from local YAML files to an existing pack.
///
/// Reads the action YAML file, finds the referenced workflow YAML file
/// via its `workflow_file` field, and uploads both to the API. The pack
/// is determined from the action ref (e.g. `mypack.deploy` → pack `mypack`).
Upload {
/// Path to the action YAML file (e.g. actions/deploy.yaml).
/// Must contain a `workflow_file` field pointing to the workflow YAML.
action_file: String,
/// Force update if the workflow already exists
#[arg(short, long)]
force: bool,
},
/// List workflows
List {
/// Filter by pack reference
#[arg(long)]
pack: Option<String>,
/// Filter by tag (comma-separated)
#[arg(long)]
tags: Option<String>,
/// Search term (matches label/description)
#[arg(long)]
search: Option<String>,
},
/// Show details of a specific workflow
Show {
/// Workflow reference (e.g. core.install_packs)
workflow_ref: String,
},
/// Delete a workflow
Delete {
/// Workflow reference (e.g. core.install_packs)
workflow_ref: String,
/// Skip confirmation prompt
#[arg(long)]
yes: bool,
},
}
// ── Local YAML models (for parsing action YAML files) ──────────────────
/// Minimal representation of an action YAML file, capturing only the fields
/// we need to build a `SaveWorkflowFileRequest`.
#[derive(Debug, Deserialize)]
struct ActionYaml {
/// Full action ref, e.g. `python_example.timeline_demo`
#[serde(rename = "ref")]
action_ref: String,
/// Human-readable label
#[serde(default)]
label: String,
/// Description
#[serde(default)]
description: Option<String>,
/// Relative path to the workflow YAML from the `actions/` directory
workflow_file: Option<String>,
/// Parameter schema (flat format)
#[serde(default)]
parameters: Option<serde_json::Value>,
/// Output schema (flat format)
#[serde(default)]
output: Option<serde_json::Value>,
/// Tags
#[serde(default)]
tags: Option<Vec<String>>,
/// Whether the action is enabled
#[serde(default)]
enabled: Option<bool>,
}
// ── API DTOs ────────────────────────────────────────────────────────────
/// Mirrors the API's `SaveWorkflowFileRequest`.
#[derive(Debug, Serialize)]
struct SaveWorkflowFileRequest {
name: String,
label: String,
#[serde(skip_serializing_if = "Option::is_none")]
description: Option<String>,
version: String,
pack_ref: String,
definition: serde_json::Value,
#[serde(skip_serializing_if = "Option::is_none")]
param_schema: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
out_schema: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
tags: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
enabled: Option<bool>,
}
#[derive(Debug, Serialize, Deserialize)]
struct WorkflowResponse {
id: i64,
#[serde(rename = "ref")]
workflow_ref: String,
pack: i64,
pack_ref: String,
label: String,
description: Option<String>,
version: String,
param_schema: Option<serde_json::Value>,
out_schema: Option<serde_json::Value>,
definition: serde_json::Value,
tags: Vec<String>,
enabled: bool,
created: String,
updated: String,
}
#[derive(Debug, Serialize, Deserialize)]
struct WorkflowSummary {
id: i64,
#[serde(rename = "ref")]
workflow_ref: String,
pack_ref: String,
label: String,
description: Option<String>,
version: String,
tags: Vec<String>,
enabled: bool,
created: String,
updated: String,
}
// ── Command dispatch ────────────────────────────────────────────────────
pub async fn handle_workflow_command(
profile: &Option<String>,
command: WorkflowCommands,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
match command {
WorkflowCommands::Upload { action_file, force } => {
handle_upload(profile, action_file, force, api_url, output_format).await
}
WorkflowCommands::List { pack, tags, search } => {
handle_list(profile, pack, tags, search, api_url, output_format).await
}
WorkflowCommands::Show { workflow_ref } => {
handle_show(profile, workflow_ref, api_url, output_format).await
}
WorkflowCommands::Delete { workflow_ref, yes } => {
handle_delete(profile, workflow_ref, yes, api_url, output_format).await
}
}
}
// ── Upload ──────────────────────────────────────────────────────────────
async fn handle_upload(
profile: &Option<String>,
action_file: String,
force: bool,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
let action_path = Path::new(&action_file);
// ── 1. Validate & read the action YAML ──────────────────────────────
if !action_path.exists() {
anyhow::bail!("Action YAML file not found: {}", action_file);
}
if !action_path.is_file() {
anyhow::bail!("Path is not a file: {}", action_file);
}
let action_yaml_content =
std::fs::read_to_string(action_path).context("Failed to read action YAML file")?;
let action: ActionYaml = serde_yaml_ng::from_str(&action_yaml_content)
.context("Failed to parse action YAML file")?;
// ── 2. Extract pack_ref and workflow name from the action ref ────────
let (pack_ref, workflow_name) = split_action_ref(&action.action_ref)?;
// ── 3. Resolve the workflow_file path ───────────────────────────────
let workflow_file_rel = action.workflow_file.as_deref().ok_or_else(|| {
anyhow::anyhow!(
"Action YAML does not contain a 'workflow_file' field. \
This command requires a workflow action — regular actions should be \
uploaded as part of a pack."
)
})?;
// workflow_file is relative to the actions/ directory. The action YAML is
// typically at `<pack>/actions/<name>.yaml`, so the workflow file is
// resolved relative to the action YAML's parent directory.
let workflow_path = resolve_workflow_path(action_path, workflow_file_rel)?;
if !workflow_path.exists() {
anyhow::bail!(
"Workflow file not found: {}\n \
(resolved from workflow_file: '{}' relative to '{}')",
workflow_path.display(),
workflow_file_rel,
action_path
.parent()
.unwrap_or(Path::new("."))
.display()
);
}
// ── 4. Read and parse the workflow YAML ─────────────────────────────
let workflow_yaml_content =
std::fs::read_to_string(&workflow_path).context("Failed to read workflow YAML file")?;
let workflow_definition: serde_json::Value =
serde_yaml_ng::from_str(&workflow_yaml_content).context(format!(
"Failed to parse workflow YAML file: {}",
workflow_path.display()
))?;
// Extract version from the workflow definition, defaulting to "1.0.0"
let version = workflow_definition
.get("version")
.and_then(|v| v.as_str())
.unwrap_or("1.0.0")
.to_string();
// ── 5. Build the API request ────────────────────────────────────────
//
// Merge the action-level fields from the workflow definition back into the
// definition payload (the API's SaveWorkflowFileRequest.definition carries
// the full blob; write_workflow_yaml on the server side strips the action-
// level fields before writing the graph-only file).
let mut definition_map: serde_json::Map<String, serde_json::Value> =
if let Some(obj) = workflow_definition.as_object() {
obj.clone()
} else {
serde_json::Map::new()
};
// Ensure action-level fields are present in the definition (the API and
// web UI store the combined form in the database; the server splits them
// into two files on disk).
if let Some(params) = &action.parameters {
definition_map
.entry("parameters".to_string())
.or_insert_with(|| params.clone());
}
if let Some(out) = &action.output {
definition_map
.entry("output".to_string())
.or_insert_with(|| out.clone());
}
let request = SaveWorkflowFileRequest {
name: workflow_name.clone(),
label: if action.label.is_empty() {
workflow_name.clone()
} else {
action.label.clone()
},
description: action.description.clone(),
version,
pack_ref: pack_ref.clone(),
definition: serde_json::Value::Object(definition_map),
param_schema: action.parameters.clone(),
out_schema: action.output.clone(),
tags: action.tags.clone(),
enabled: action.enabled,
};
// ── 6. Print progress ───────────────────────────────────────────────
if output_format == OutputFormat::Table {
output::print_info(&format!(
"Uploading workflow action '{}.{}' to pack '{}'",
pack_ref, workflow_name, pack_ref,
));
output::print_info(&format!(" Action YAML: {}", action_path.display()));
output::print_info(&format!(" Workflow YAML: {}", workflow_path.display()));
}
// ── 7. Send to API ──────────────────────────────────────────────────
let config = CliConfig::load_with_profile(profile.as_deref())?;
let mut client = ApiClient::from_config(&config, api_url);
let workflow_ref = format!("{}.{}", pack_ref, workflow_name);
// Try create first; if 409 Conflict and --force, fall back to update.
let create_path = format!("/packs/{}/workflow-files", pack_ref);
let result: Result<WorkflowResponse> = client.post(&create_path, &request).await;
let response: WorkflowResponse = match result {
Ok(resp) => resp,
Err(err) => {
let err_str = err.to_string();
if err_str.contains("409") || err_str.to_lowercase().contains("conflict") {
if !force {
anyhow::bail!(
"Workflow '{}' already exists. Use --force to update it.",
workflow_ref
);
}
if output_format == OutputFormat::Table {
output::print_info("Workflow already exists, updating...");
}
let update_path = format!("/workflows/{}/file", workflow_ref);
client.put(&update_path, &request).await.context(
"Failed to update existing workflow. \
Check that the pack exists and the workflow ref is correct.",
)?
} else {
return Err(err).context("Failed to upload workflow");
}
}
};
// ── 8. Print result ─────────────────────────────────────────────────
match output_format {
OutputFormat::Json | OutputFormat::Yaml => {
output::print_output(&response, output_format)?;
}
OutputFormat::Table => {
println!();
output::print_success(&format!(
"Workflow '{}' uploaded successfully",
response.workflow_ref
));
output::print_key_value_table(vec![
("ID", response.id.to_string()),
("Reference", response.workflow_ref.clone()),
("Pack", response.pack_ref.clone()),
("Label", response.label.clone()),
("Version", response.version.clone()),
(
"Tags",
if response.tags.is_empty() {
"none".to_string()
} else {
response.tags.join(", ")
},
),
("Enabled", output::format_bool(response.enabled)),
]);
}
}
Ok(())
}
// ── List ────────────────────────────────────────────────────────────────
async fn handle_list(
profile: &Option<String>,
pack: Option<String>,
tags: Option<String>,
search: Option<String>,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
let config = CliConfig::load_with_profile(profile.as_deref())?;
let mut client = ApiClient::from_config(&config, api_url);
let path = if let Some(ref pack_ref) = pack {
format!("/packs/{}/workflows", pack_ref)
} else {
let mut query_parts: Vec<String> = Vec::new();
if let Some(ref t) = tags {
query_parts.push(format!("tags={}", urlencoding::encode(t)));
}
if let Some(ref s) = search {
query_parts.push(format!("search={}", urlencoding::encode(s)));
}
if query_parts.is_empty() {
"/workflows".to_string()
} else {
format!("/workflows?{}", query_parts.join("&"))
}
};
let workflows: Vec<WorkflowSummary> = client.get(&path).await?;
match output_format {
OutputFormat::Json | OutputFormat::Yaml => {
output::print_output(&workflows, output_format)?;
}
OutputFormat::Table => {
if workflows.is_empty() {
output::print_info("No workflows found");
} else {
let mut table = output::create_table();
output::add_header(
&mut table,
vec!["ID", "Reference", "Pack", "Label", "Version", "Enabled", "Tags"],
);
for wf in &workflows {
table.add_row(vec![
wf.id.to_string(),
wf.workflow_ref.clone(),
wf.pack_ref.clone(),
output::truncate(&wf.label, 30),
wf.version.clone(),
output::format_bool(wf.enabled),
if wf.tags.is_empty() {
"-".to_string()
} else {
output::truncate(&wf.tags.join(", "), 25)
},
]);
}
println!("{}", table);
output::print_info(&format!("{} workflow(s) found", workflows.len()));
}
}
}
Ok(())
}
// ── Show ────────────────────────────────────────────────────────────────
async fn handle_show(
profile: &Option<String>,
workflow_ref: String,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
let config = CliConfig::load_with_profile(profile.as_deref())?;
let mut client = ApiClient::from_config(&config, api_url);
let path = format!("/workflows/{}", workflow_ref);
let workflow: WorkflowResponse = client.get(&path).await?;
match output_format {
OutputFormat::Json | OutputFormat::Yaml => {
output::print_output(&workflow, output_format)?;
}
OutputFormat::Table => {
output::print_section(&format!("Workflow: {}", workflow.workflow_ref));
output::print_key_value_table(vec![
("ID", workflow.id.to_string()),
("Reference", workflow.workflow_ref.clone()),
("Pack", workflow.pack_ref.clone()),
("Pack ID", workflow.pack.to_string()),
("Label", workflow.label.clone()),
(
"Description",
workflow
.description
.clone()
.unwrap_or_else(|| "-".to_string()),
),
("Version", workflow.version.clone()),
("Enabled", output::format_bool(workflow.enabled)),
(
"Tags",
if workflow.tags.is_empty() {
"none".to_string()
} else {
workflow.tags.join(", ")
},
),
("Created", output::format_timestamp(&workflow.created)),
("Updated", output::format_timestamp(&workflow.updated)),
]);
// Show parameter schema if present
if let Some(ref params) = workflow.param_schema {
if !params.is_null() && params.as_object().is_some_and(|o| !o.is_empty()) {
output::print_section("Parameters");
let yaml = serde_yaml_ng::to_string(params)?;
println!("{}", yaml);
}
}
// Show output schema if present
if let Some(ref out) = workflow.out_schema {
if !out.is_null() && out.as_object().is_some_and(|o| !o.is_empty()) {
output::print_section("Output Schema");
let yaml = serde_yaml_ng::to_string(out)?;
println!("{}", yaml);
}
}
// Show task summary from definition
if let Some(tasks) = workflow.definition.get("tasks") {
if let Some(arr) = tasks.as_array() {
output::print_section("Tasks");
let mut table = output::create_table();
output::add_header(&mut table, vec!["#", "Name", "Action", "Transitions"]);
for (i, task) in arr.iter().enumerate() {
let name = task
.get("name")
.and_then(|v| v.as_str())
.unwrap_or("?");
let action = task
.get("action")
.and_then(|v| v.as_str())
.unwrap_or("-");
let transition_count = task
.get("next")
.and_then(|v| v.as_array())
.map(|a| {
// Count total target tasks across all transitions
a.iter()
.filter_map(|t| {
t.get("do").and_then(|d| d.as_array()).map(|d| d.len())
})
.sum::<usize>()
})
.unwrap_or(0);
let transitions_str = if transition_count == 0 {
"terminal".to_string()
} else {
format!("{} target(s)", transition_count)
};
table.add_row(vec![
(i + 1).to_string(),
name.to_string(),
output::truncate(action, 35),
transitions_str,
]);
}
println!("{}", table);
}
}
}
}
Ok(())
}
// ── Delete ──────────────────────────────────────────────────────────────
async fn handle_delete(
profile: &Option<String>,
workflow_ref: String,
yes: bool,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
let config = CliConfig::load_with_profile(profile.as_deref())?;
let mut client = ApiClient::from_config(&config, api_url);
if !yes && output_format == OutputFormat::Table {
let confirm = dialoguer::Confirm::new()
.with_prompt(format!(
"Are you sure you want to delete workflow '{}'?",
workflow_ref
))
.default(false)
.interact()?;
if !confirm {
output::print_info("Delete cancelled");
return Ok(());
}
}
let path = format!("/workflows/{}", workflow_ref);
client.delete_no_response(&path).await?;
match output_format {
OutputFormat::Json | OutputFormat::Yaml => {
let msg = serde_json::json!({"message": format!("Workflow '{}' deleted", workflow_ref)});
output::print_output(&msg, output_format)?;
}
OutputFormat::Table => {
output::print_success(&format!("Workflow '{}' deleted successfully", workflow_ref));
}
}
Ok(())
}
// ── Helpers ─────────────────────────────────────────────────────────────
/// Split an action ref like `pack_name.action_name` into `(pack_ref, name)`.
///
/// Supports multi-segment pack refs: `org.pack.action` → `("org.pack", "action")`.
/// The last dot-separated segment is the workflow/action name; everything before
/// it is the pack ref.
fn split_action_ref(action_ref: &str) -> Result<(String, String)> {
let dot_pos = action_ref.rfind('.').ok_or_else(|| {
anyhow::anyhow!(
"Invalid action ref '{}': expected format 'pack_ref.name' (at least one dot)",
action_ref
)
})?;
let pack_ref = &action_ref[..dot_pos];
let name = &action_ref[dot_pos + 1..];
if pack_ref.is_empty() || name.is_empty() {
anyhow::bail!(
"Invalid action ref '{}': both pack_ref and name must be non-empty",
action_ref
);
}
Ok((pack_ref.to_string(), name.to_string()))
}
/// Resolve the workflow YAML path from the action YAML's location and the
/// `workflow_file` value.
///
/// `workflow_file` is relative to the `actions/` directory. Since the action
/// YAML is typically at `<pack>/actions/<name>.yaml`, the workflow path is
/// resolved relative to the action YAML's parent directory.
fn resolve_workflow_path(action_yaml_path: &Path, workflow_file: &str) -> Result<PathBuf> {
let action_dir = action_yaml_path
.parent()
.unwrap_or(Path::new("."));
let resolved = action_dir.join(workflow_file);
// Canonicalize if possible (for better error messages), but don't fail
// if the file doesn't exist yet — we'll check existence later.
Ok(resolved)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_split_action_ref_simple() {
let (pack, name) = split_action_ref("core.echo").unwrap();
assert_eq!(pack, "core");
assert_eq!(name, "echo");
}
#[test]
fn test_split_action_ref_multi_segment_pack() {
let (pack, name) = split_action_ref("org.infra.deploy").unwrap();
assert_eq!(pack, "org.infra");
assert_eq!(name, "deploy");
}
#[test]
fn test_split_action_ref_no_dot() {
assert!(split_action_ref("nodot").is_err());
}
#[test]
fn test_split_action_ref_empty_parts() {
assert!(split_action_ref(".name").is_err());
assert!(split_action_ref("pack.").is_err());
}
#[test]
fn test_resolve_workflow_path() {
let action_path = Path::new("/packs/mypack/actions/deploy.yaml");
let resolved =
resolve_workflow_path(action_path, "workflows/deploy.workflow.yaml").unwrap();
assert_eq!(
resolved,
PathBuf::from("/packs/mypack/actions/workflows/deploy.workflow.yaml")
);
}
#[test]
fn test_resolve_workflow_path_relative() {
let action_path = Path::new("actions/deploy.yaml");
let resolved =
resolve_workflow_path(action_path, "workflows/deploy.workflow.yaml").unwrap();
assert_eq!(
resolved,
PathBuf::from("actions/workflows/deploy.workflow.yaml")
);
}
}

View File

@@ -16,6 +16,7 @@ use commands::{
rule::RuleCommands,
sensor::SensorCommands,
trigger::TriggerCommands,
workflow::WorkflowCommands,
};
#[derive(Parser)]
@@ -78,6 +79,11 @@ enum Commands {
#[command(subcommand)]
command: ExecutionCommands,
},
/// Workflow management
Workflow {
#[command(subcommand)]
command: WorkflowCommands,
},
/// Trigger management
Trigger {
#[command(subcommand)]
@@ -172,6 +178,15 @@ async fn main() {
)
.await
}
Commands::Workflow { command } => {
commands::workflow::handle_workflow_command(
&cli.profile,
command,
&cli.api_url,
output_format,
)
.await
}
Commands::Trigger { command } => {
commands::trigger::handle_trigger_command(
&cli.profile,

View File

@@ -438,3 +438,38 @@ pub async fn mock_not_found(server: &MockServer, path_pattern: &str) {
.mount(server)
.await;
}
/// Mock a successful pack create response (POST /api/v1/packs)
#[allow(dead_code)]
pub async fn mock_pack_create(server: &MockServer) {
Mock::given(method("POST"))
.and(path("/api/v1/packs"))
.respond_with(ResponseTemplate::new(201).set_body_json(json!({
"data": {
"id": 42,
"ref": "my_pack",
"label": "My Pack",
"description": "A test pack",
"version": "0.1.0",
"author": null,
"enabled": true,
"tags": ["test"],
"created": "2024-01-01T00:00:00Z",
"updated": "2024-01-01T00:00:00Z"
}
})))
.mount(server)
.await;
}
/// Mock a 409 conflict response for pack create
#[allow(dead_code)]
pub async fn mock_pack_create_conflict(server: &MockServer) {
Mock::given(method("POST"))
.and(path("/api/v1/packs"))
.respond_with(ResponseTemplate::new(409).set_body_json(json!({
"error": "Pack with ref 'my_pack' already exists"
})))
.mount(server)
.await;
}

View File

@@ -4,6 +4,11 @@
use assert_cmd::Command;
use predicates::prelude::*;
use serde_json::json;
use wiremock::{
matchers::{body_json, method, path},
Mock, ResponseTemplate,
};
mod common;
use common::*;
@@ -222,6 +227,231 @@ async fn test_pack_get_json_output() {
.stdout(predicate::str::contains(r#""ref": "core""#));
}
// ── pack create tests ──────────────────────────────────────────────────
#[tokio::test]
async fn test_pack_create_non_interactive() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_pack_create(&fixture.mock_server).await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("pack")
.arg("create")
.arg("--ref")
.arg("my_pack")
.arg("--label")
.arg("My Pack")
.arg("--description")
.arg("A test pack")
.arg("--pack-version")
.arg("0.1.0")
.arg("--tags")
.arg("test");
cmd.assert()
.success()
.stdout(predicate::str::contains("my_pack"))
.stdout(predicate::str::contains("created successfully"));
}
#[tokio::test]
async fn test_pack_create_json_output() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_pack_create(&fixture.mock_server).await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("--json")
.arg("pack")
.arg("create")
.arg("--ref")
.arg("my_pack");
cmd.assert()
.success()
.stdout(predicate::str::contains(r#""ref": "my_pack""#))
.stdout(predicate::str::contains(r#""id": 42"#));
}
#[tokio::test]
async fn test_pack_create_conflict() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_pack_create_conflict(&fixture.mock_server).await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("pack")
.arg("create")
.arg("--ref")
.arg("my_pack");
cmd.assert()
.failure()
.stderr(predicate::str::contains("already exists"));
}
#[tokio::test]
async fn test_pack_create_missing_ref() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("pack")
.arg("create");
cmd.assert()
.failure()
.stderr(predicate::str::contains("Pack ref is required"));
}
#[tokio::test]
async fn test_pack_create_default_label_from_ref() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
// Use a custom mock that validates the request body contains the derived label
Mock::given(method("POST"))
.and(path("/api/v1/packs"))
.and(body_json(json!({
"ref": "my_cool_pack",
"label": "My Cool Pack",
"version": "0.1.0",
"tags": []
})))
.respond_with(ResponseTemplate::new(201).set_body_json(json!({
"data": {
"id": 99,
"ref": "my_cool_pack",
"label": "My Cool Pack",
"version": "0.1.0",
"enabled": true,
"created": "2024-01-01T00:00:00Z",
"updated": "2024-01-01T00:00:00Z"
}
})))
.mount(&fixture.mock_server)
.await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("pack")
.arg("create")
.arg("--ref")
.arg("my_cool_pack");
cmd.assert()
.success()
.stdout(predicate::str::contains("my_cool_pack"))
.stdout(predicate::str::contains("created successfully"));
}
#[tokio::test]
async fn test_pack_create_default_version() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
// Verify the default version "0.1.0" is sent when --pack-version is not specified
Mock::given(method("POST"))
.and(path("/api/v1/packs"))
.and(body_json(json!({
"ref": "versioned_pack",
"label": "Versioned Pack",
"version": "0.1.0",
"tags": []
})))
.respond_with(ResponseTemplate::new(201).set_body_json(json!({
"data": {
"id": 7,
"ref": "versioned_pack",
"label": "Versioned Pack",
"version": "0.1.0",
"enabled": true,
"created": "2024-01-01T00:00:00Z",
"updated": "2024-01-01T00:00:00Z"
}
})))
.mount(&fixture.mock_server)
.await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("pack")
.arg("create")
.arg("--ref")
.arg("versioned_pack");
cmd.assert().success();
}
#[tokio::test]
async fn test_pack_create_with_tags() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
Mock::given(method("POST"))
.and(path("/api/v1/packs"))
.and(body_json(json!({
"ref": "tagged",
"label": "Tagged",
"version": "0.1.0",
"tags": ["networking", "monitoring"]
})))
.respond_with(ResponseTemplate::new(201).set_body_json(json!({
"data": {
"id": 10,
"ref": "tagged",
"label": "Tagged",
"version": "0.1.0",
"tags": ["networking", "monitoring"],
"enabled": true,
"created": "2024-01-01T00:00:00Z",
"updated": "2024-01-01T00:00:00Z"
}
})))
.mount(&fixture.mock_server)
.await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("pack")
.arg("create")
.arg("--ref")
.arg("tagged")
.arg("--tags")
.arg("networking,monitoring");
cmd.assert().success();
}
#[tokio::test]
async fn test_pack_list_empty_result() {
let fixture = TestFixture::new().await;

View File

@@ -0,0 +1,777 @@
//! Integration tests for CLI workflow commands
#![allow(deprecated)]
use assert_cmd::Command;
use predicates::prelude::*;
use serde_json::json;
use std::fs;
use wiremock::matchers::{method, path};
use wiremock::{Mock, MockServer, ResponseTemplate};
mod common;
use common::*;
// ── Mock helpers ────────────────────────────────────────────────────────
async fn mock_workflow_list(server: &MockServer) {
Mock::given(method("GET"))
.and(path("/api/v1/workflows"))
.respond_with(ResponseTemplate::new(200).set_body_json(json!({
"data": [
{
"id": 1,
"ref": "core.install_packs",
"pack_ref": "core",
"label": "Install Packs",
"description": "Install one or more packs",
"version": "1.0.0",
"tags": ["core", "packs"],
"enabled": true,
"created": "2024-01-01T00:00:00Z",
"updated": "2024-01-01T00:00:00Z"
},
{
"id": 2,
"ref": "mypack.deploy",
"pack_ref": "mypack",
"label": "Deploy App",
"description": "Deploy an application",
"version": "2.0.0",
"tags": ["deploy"],
"enabled": true,
"created": "2024-01-02T00:00:00Z",
"updated": "2024-01-02T00:00:00Z"
}
]
})))
.mount(server)
.await;
}
async fn mock_workflow_list_by_pack(server: &MockServer, pack_ref: &str) {
let p = format!("/api/v1/packs/{}/workflows", pack_ref);
Mock::given(method("GET"))
.and(path(p.as_str()))
.respond_with(ResponseTemplate::new(200).set_body_json(json!({
"data": [
{
"id": 1,
"ref": format!("{}.example_workflow", pack_ref),
"pack_ref": pack_ref,
"label": "Example Workflow",
"description": "An example workflow",
"version": "1.0.0",
"tags": [],
"enabled": true,
"created": "2024-01-01T00:00:00Z",
"updated": "2024-01-01T00:00:00Z"
}
]
})))
.mount(server)
.await;
}
async fn mock_workflow_get(server: &MockServer, workflow_ref: &str) {
let p = format!("/api/v1/workflows/{}", workflow_ref);
Mock::given(method("GET"))
.and(path(p.as_str()))
.respond_with(ResponseTemplate::new(200).set_body_json(json!({
"data": {
"id": 1,
"ref": workflow_ref,
"pack": 1,
"pack_ref": "mypack",
"label": "My Workflow",
"description": "A test workflow",
"version": "1.0.0",
"param_schema": {
"url": {"type": "string", "required": true},
"timeout": {"type": "integer", "default": 30}
},
"out_schema": {
"status": {"type": "string"}
},
"definition": {
"version": "1.0.0",
"vars": {"result": null},
"tasks": [
{
"name": "step1",
"action": "core.echo",
"input": {"message": "hello"},
"next": [
{"when": "{{ succeeded() }}", "do": ["step2"]}
]
},
{
"name": "step2",
"action": "core.echo",
"input": {"message": "done"}
}
]
},
"tags": ["test", "demo"],
"enabled": true,
"created": "2024-01-01T00:00:00Z",
"updated": "2024-01-01T00:00:00Z"
}
})))
.mount(server)
.await;
}
async fn mock_workflow_delete(server: &MockServer, workflow_ref: &str) {
let p = format!("/api/v1/workflows/{}", workflow_ref);
Mock::given(method("DELETE"))
.and(path(p.as_str()))
.respond_with(ResponseTemplate::new(204))
.mount(server)
.await;
}
async fn mock_workflow_save(server: &MockServer, pack_ref: &str) {
let p = format!("/api/v1/packs/{}/workflow-files", pack_ref);
Mock::given(method("POST"))
.and(path(p.as_str()))
.respond_with(ResponseTemplate::new(201).set_body_json(json!({
"data": {
"id": 10,
"ref": format!("{}.deploy", pack_ref),
"pack": 1,
"pack_ref": pack_ref,
"label": "Deploy App",
"description": "Deploy the application",
"version": "1.0.0",
"param_schema": null,
"out_schema": null,
"definition": {"version": "1.0.0", "tasks": []},
"tags": ["deploy"],
"enabled": true,
"created": "2024-01-10T00:00:00Z",
"updated": "2024-01-10T00:00:00Z"
}
})))
.mount(server)
.await;
}
async fn mock_workflow_save_conflict(server: &MockServer, pack_ref: &str) {
let p = format!("/api/v1/packs/{}/workflow-files", pack_ref);
Mock::given(method("POST"))
.and(path(p.as_str()))
.respond_with(ResponseTemplate::new(409).set_body_json(json!({
"error": "Workflow with ref 'mypack.deploy' already exists"
})))
.mount(server)
.await;
}
async fn mock_workflow_update(server: &MockServer, workflow_ref: &str) {
let p = format!("/api/v1/workflows/{}/file", workflow_ref);
Mock::given(method("PUT"))
.and(path(p.as_str()))
.respond_with(ResponseTemplate::new(200).set_body_json(json!({
"data": {
"id": 10,
"ref": workflow_ref,
"pack": 1,
"pack_ref": "mypack",
"label": "Deploy App",
"description": "Deploy the application",
"version": "1.0.0",
"param_schema": null,
"out_schema": null,
"definition": {"version": "1.0.0", "tasks": []},
"tags": ["deploy"],
"enabled": true,
"created": "2024-01-10T00:00:00Z",
"updated": "2024-01-10T12:00:00Z"
}
})))
.mount(server)
.await;
}
// ── Helper to write action + workflow YAML to temp dirs ─────────────────
struct WorkflowFixture {
_dir: tempfile::TempDir,
action_yaml_path: String,
}
impl WorkflowFixture {
fn new(action_ref: &str, workflow_file: &str) -> Self {
let dir = tempfile::TempDir::new().expect("Failed to create temp dir");
let actions_dir = dir.path().join("actions");
let workflows_dir = actions_dir.join("workflows");
fs::create_dir_all(&workflows_dir).unwrap();
// Write the action YAML
let action_yaml = format!(
r#"ref: {}
label: "Deploy App"
description: "Deploy the application"
enabled: true
workflow_file: {}
parameters:
environment:
type: string
required: true
description: "Target environment"
version:
type: string
default: "latest"
output:
status:
type: string
tags:
- deploy
"#,
action_ref, workflow_file,
);
let action_name = action_ref.rsplit('.').next().unwrap();
let action_path = actions_dir.join(format!("{}.yaml", action_name));
fs::write(&action_path, &action_yaml).unwrap();
// Write the workflow YAML
let workflow_yaml = r#"version: "1.0.0"
vars:
deploy_result: null
tasks:
- name: prepare
action: core.echo
input:
message: "Preparing deployment"
next:
- when: "{{ succeeded() }}"
do:
- deploy
- name: deploy
action: core.echo
input:
message: "Deploying"
next:
- when: "{{ succeeded() }}"
do:
- verify
- name: verify
action: core.echo
input:
message: "Verifying"
output_map:
status: "{{ 'success' if workflow.deploy_result else 'unknown' }}"
"#;
let workflow_path = workflows_dir.join(format!("{}.workflow.yaml", action_name));
fs::write(&workflow_path, workflow_yaml).unwrap();
Self {
action_yaml_path: action_path.to_string_lossy().to_string(),
_dir: dir,
}
}
}
// ── List tests ──────────────────────────────────────────────────────────
#[tokio::test]
async fn test_workflow_list_authenticated() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_workflow_list(&fixture.mock_server).await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("list");
cmd.assert()
.success()
.stdout(predicate::str::contains("core.install_packs"))
.stdout(predicate::str::contains("mypack.deploy"))
.stdout(predicate::str::contains("2 workflow(s) found"));
}
#[tokio::test]
async fn test_workflow_list_by_pack() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_workflow_list_by_pack(&fixture.mock_server, "core").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("list")
.arg("--pack")
.arg("core");
cmd.assert()
.success()
.stdout(predicate::str::contains("core.example_workflow"))
.stdout(predicate::str::contains("1 workflow(s) found"));
}
#[tokio::test]
async fn test_workflow_list_json_output() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_workflow_list(&fixture.mock_server).await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("--json")
.arg("workflow")
.arg("list");
cmd.assert()
.success()
.stdout(predicate::str::contains("\"core.install_packs\""))
.stdout(predicate::str::contains("\"mypack.deploy\""));
}
#[tokio::test]
async fn test_workflow_list_yaml_output() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_workflow_list(&fixture.mock_server).await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("--yaml")
.arg("workflow")
.arg("list");
cmd.assert()
.success()
.stdout(predicate::str::contains("core.install_packs"))
.stdout(predicate::str::contains("mypack.deploy"));
}
#[tokio::test]
async fn test_workflow_list_empty() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
Mock::given(method("GET"))
.and(path("/api/v1/workflows"))
.respond_with(ResponseTemplate::new(200).set_body_json(json!({
"data": []
})))
.mount(&fixture.mock_server)
.await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("list");
cmd.assert()
.success()
.stdout(predicate::str::contains("No workflows found"));
}
#[tokio::test]
async fn test_workflow_list_unauthenticated() {
let fixture = TestFixture::new().await;
fixture.write_default_config();
mock_unauthorized(&fixture.mock_server, "/api/v1/workflows").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("list");
cmd.assert().failure();
}
// ── Show tests ──────────────────────────────────────────────────────────
#[tokio::test]
async fn test_workflow_show() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_workflow_get(&fixture.mock_server, "mypack.my_workflow").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("show")
.arg("mypack.my_workflow");
cmd.assert()
.success()
.stdout(predicate::str::contains("mypack.my_workflow"))
.stdout(predicate::str::contains("My Workflow"))
.stdout(predicate::str::contains("1.0.0"))
.stdout(predicate::str::contains("test, demo"))
// Tasks table should show task names
.stdout(predicate::str::contains("step1"))
.stdout(predicate::str::contains("step2"))
.stdout(predicate::str::contains("core.echo"));
}
#[tokio::test]
async fn test_workflow_show_json_output() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_workflow_get(&fixture.mock_server, "mypack.my_workflow").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("--json")
.arg("workflow")
.arg("show")
.arg("mypack.my_workflow");
cmd.assert()
.success()
.stdout(predicate::str::contains("\"mypack.my_workflow\""))
.stdout(predicate::str::contains("\"My Workflow\""))
.stdout(predicate::str::contains("\"definition\""));
}
#[tokio::test]
async fn test_workflow_show_not_found() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_not_found(&fixture.mock_server, "/api/v1/workflows/nonexistent.wf").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("show")
.arg("nonexistent.wf");
cmd.assert().failure();
}
// ── Delete tests ────────────────────────────────────────────────────────
#[tokio::test]
async fn test_workflow_delete_with_yes_flag() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_workflow_delete(&fixture.mock_server, "mypack.my_workflow").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("delete")
.arg("mypack.my_workflow")
.arg("--yes");
cmd.assert()
.success()
.stdout(predicate::str::contains("deleted successfully"));
}
#[tokio::test]
async fn test_workflow_delete_json_output() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_workflow_delete(&fixture.mock_server, "mypack.my_workflow").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("--json")
.arg("workflow")
.arg("delete")
.arg("mypack.my_workflow")
.arg("--yes");
cmd.assert()
.success()
.stdout(predicate::str::contains("\"message\""))
.stdout(predicate::str::contains("deleted"));
}
// ── Upload tests ────────────────────────────────────────────────────────
#[tokio::test]
async fn test_workflow_upload_success() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
let wf_fixture =
WorkflowFixture::new("mypack.deploy", "workflows/deploy.workflow.yaml");
mock_workflow_save(&fixture.mock_server, "mypack").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("upload")
.arg(&wf_fixture.action_yaml_path);
cmd.assert()
.success()
.stdout(predicate::str::contains("uploaded successfully"))
.stdout(predicate::str::contains("mypack.deploy"));
}
#[tokio::test]
async fn test_workflow_upload_json_output() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
let wf_fixture =
WorkflowFixture::new("mypack.deploy", "workflows/deploy.workflow.yaml");
mock_workflow_save(&fixture.mock_server, "mypack").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("--json")
.arg("workflow")
.arg("upload")
.arg(&wf_fixture.action_yaml_path);
cmd.assert()
.success()
.stdout(predicate::str::contains("\"mypack.deploy\""))
.stdout(predicate::str::contains("\"Deploy App\""));
}
#[tokio::test]
async fn test_workflow_upload_conflict_without_force() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
let wf_fixture =
WorkflowFixture::new("mypack.deploy", "workflows/deploy.workflow.yaml");
mock_workflow_save_conflict(&fixture.mock_server, "mypack").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("upload")
.arg(&wf_fixture.action_yaml_path);
cmd.assert()
.failure()
.stderr(predicate::str::contains("already exists"))
.stderr(predicate::str::contains("--force"));
}
#[tokio::test]
async fn test_workflow_upload_conflict_with_force() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
let wf_fixture =
WorkflowFixture::new("mypack.deploy", "workflows/deploy.workflow.yaml");
mock_workflow_save_conflict(&fixture.mock_server, "mypack").await;
mock_workflow_update(&fixture.mock_server, "mypack.deploy").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("upload")
.arg(&wf_fixture.action_yaml_path)
.arg("--force");
cmd.assert()
.success()
.stdout(predicate::str::contains("uploaded successfully"));
}
#[tokio::test]
async fn test_workflow_upload_missing_action_file() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("upload")
.arg("/nonexistent/path/action.yaml");
cmd.assert()
.failure()
.stderr(predicate::str::contains("not found"));
}
#[tokio::test]
async fn test_workflow_upload_missing_workflow_file() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
// Create a temp dir with only the action YAML, no workflow file
let dir = tempfile::TempDir::new().unwrap();
let actions_dir = dir.path().join("actions");
fs::create_dir_all(&actions_dir).unwrap();
let action_yaml = r#"ref: mypack.deploy
label: "Deploy App"
workflow_file: workflows/deploy.workflow.yaml
"#;
let action_path = actions_dir.join("deploy.yaml");
fs::write(&action_path, action_yaml).unwrap();
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("upload")
.arg(action_path.to_string_lossy().as_ref());
cmd.assert()
.failure()
.stderr(predicate::str::contains("Workflow file not found"));
}
#[tokio::test]
async fn test_workflow_upload_action_without_workflow_file_field() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
// Create a temp dir with a regular (non-workflow) action YAML
let dir = tempfile::TempDir::new().unwrap();
let actions_dir = dir.path().join("actions");
fs::create_dir_all(&actions_dir).unwrap();
let action_yaml = r#"ref: mypack.echo
label: "Echo"
description: "A regular action, not a workflow"
runner_type: shell
entry_point: echo.sh
"#;
let action_path = actions_dir.join("echo.yaml");
fs::write(&action_path, action_yaml).unwrap();
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("upload")
.arg(action_path.to_string_lossy().as_ref());
cmd.assert()
.failure()
.stderr(predicate::str::contains("workflow_file"));
}
#[tokio::test]
async fn test_workflow_upload_invalid_action_yaml() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
let dir = tempfile::TempDir::new().unwrap();
let bad_yaml_path = dir.path().join("bad.yaml");
fs::write(&bad_yaml_path, "this is not valid yaml: [[[").unwrap();
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("upload")
.arg(bad_yaml_path.to_string_lossy().as_ref());
cmd.assert()
.failure()
.stderr(predicate::str::contains("Failed to parse action YAML"));
}
// ── Help text tests ─────────────────────────────────────────────────────
#[tokio::test]
async fn test_workflow_help() {
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.arg("workflow").arg("--help");
cmd.assert()
.success()
.stdout(predicate::str::contains("upload"))
.stdout(predicate::str::contains("list"))
.stdout(predicate::str::contains("show"))
.stdout(predicate::str::contains("delete"));
}
#[tokio::test]
async fn test_workflow_upload_help() {
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.arg("workflow").arg("upload").arg("--help");
cmd.assert()
.success()
.stdout(predicate::str::contains("action"))
.stdout(predicate::str::contains("workflow_file"))
.stdout(predicate::str::contains("--force"));
}