working on workflows

This commit is contained in:
2026-03-04 22:02:34 -06:00
parent b54aa3ec26
commit 7438f92502
63 changed files with 10231 additions and 731 deletions

View File

@@ -317,6 +317,62 @@ pub struct CreateFileVersionRequest {
pub created_by: Option<String>,
}
/// Request DTO for the upsert-and-allocate endpoint.
///
/// Looks up an artifact by ref (creating it if it doesn't exist), then
/// allocates a new file-backed version and returns the `file_path` where
/// the caller should write the file on the shared artifact volume.
///
/// This replaces the multi-step create → 409-handling → allocate dance
/// with a single API call.
#[derive(Debug, Clone, Deserialize, ToSchema)]
pub struct AllocateFileVersionByRefRequest {
// -- Artifact metadata (used only when creating a new artifact) ----------
/// Owner scope type (default: action)
#[schema(example = "action")]
pub scope: Option<OwnerType>,
/// Owner identifier (ref string of the owning entity)
#[schema(example = "python_example.artifact_demo")]
pub owner: Option<String>,
/// Artifact type (must be a file-backed type; default: file_text)
#[schema(example = "file_text")]
pub r#type: Option<ArtifactType>,
/// Visibility level. If omitted, uses type-aware default.
pub visibility: Option<ArtifactVisibility>,
/// Retention policy type (default: versions)
pub retention_policy: Option<RetentionPolicyType>,
/// Retention limit (default: 10)
pub retention_limit: Option<i32>,
/// Human-readable name
#[schema(example = "Demo Log")]
pub name: Option<String>,
/// Optional description
pub description: Option<String>,
/// Execution ID to link this artifact to
#[schema(example = 42)]
pub execution: Option<i64>,
// -- Version metadata ----------------------------------------------------
/// MIME content type for this version (e.g. "text/plain")
#[schema(example = "text/plain")]
pub content_type: Option<String>,
/// Free-form metadata about this version
#[schema(value_type = Option<Object>)]
pub meta: Option<JsonValue>,
/// Who created this version (e.g. action ref, identity, "system")
pub created_by: Option<String>,
}
/// Response DTO for an artifact version (without binary content)
#[derive(Debug, Clone, Serialize, ToSchema)]
pub struct ArtifactVersionResponse {

View File

@@ -9,17 +9,24 @@
//! - Listing artifacts by execution
//! - Version history and retrieval
//! - Upsert-and-upload: create-or-reuse an artifact by ref and upload a version in one call
//! - Upsert-and-allocate: create-or-reuse an artifact by ref and allocate a file-backed version path in one call
//! - SSE streaming for file-backed artifacts (live tail while execution is running)
use axum::{
body::Body,
extract::{Multipart, Path, Query, State},
http::{header, StatusCode},
response::IntoResponse,
response::{
sse::{Event, KeepAlive, Sse},
IntoResponse,
},
routing::{get, post},
Json, Router,
};
use futures::stream::Stream;
use std::sync::Arc;
use tracing::warn;
use tokio::io::{AsyncReadExt, AsyncSeekExt};
use tracing::{debug, warn};
use attune_common::models::enums::{
ArtifactType, ArtifactVisibility, OwnerType, RetentionPolicyType,
@@ -36,10 +43,10 @@ use crate::{
auth::middleware::RequireAuth,
dto::{
artifact::{
AppendProgressRequest, ArtifactQueryParams, ArtifactResponse, ArtifactSummary,
ArtifactVersionResponse, ArtifactVersionSummary, CreateArtifactRequest,
CreateFileVersionRequest, CreateVersionJsonRequest, SetDataRequest,
UpdateArtifactRequest,
AllocateFileVersionByRefRequest, AppendProgressRequest, ArtifactQueryParams,
ArtifactResponse, ArtifactSummary, ArtifactVersionResponse, ArtifactVersionSummary,
CreateArtifactRequest, CreateFileVersionRequest, CreateVersionJsonRequest,
SetDataRequest, UpdateArtifactRequest,
},
common::{PaginatedResponse, PaginationParams},
ApiResponse, SuccessResponse,
@@ -659,6 +666,7 @@ pub async fn create_version_file(
// Update the version row with the computed file_path
sqlx::query("UPDATE artifact_version SET file_path = $1 WHERE id = $2")
.bind(&file_path)
.bind(version.id)
.execute(&state.db)
.await
.map_err(|e| {
@@ -1250,6 +1258,165 @@ pub async fn upload_version_by_ref(
))
}
/// Upsert an artifact by ref and allocate a file-backed version in one call.
///
/// If the artifact doesn't exist, it is created using the supplied metadata.
/// If it already exists, the execution link is updated (if provided).
/// Then a new file-backed version is allocated and the `file_path` is returned.
///
/// The caller writes the file to `$ATTUNE_ARTIFACTS_DIR/{file_path}` on the
/// shared volume — no HTTP upload needed.
#[utoipa::path(
post,
path = "/api/v1/artifacts/ref/{ref}/versions/file",
tag = "artifacts",
params(
("ref" = String, Path, description = "Artifact reference (e.g. 'mypack.build_log')")
),
request_body = AllocateFileVersionByRefRequest,
responses(
(status = 201, description = "File version allocated", body = inline(ApiResponse<ArtifactVersionResponse>)),
(status = 400, description = "Invalid request (non-file-backed artifact type)"),
),
security(("bearer_auth" = []))
)]
pub async fn allocate_file_version_by_ref(
RequireAuth(_user): RequireAuth,
State(state): State<Arc<AppState>>,
Path(artifact_ref): Path<String>,
Json(request): Json<AllocateFileVersionByRefRequest>,
) -> ApiResult<impl IntoResponse> {
// Upsert: find existing artifact or create a new one
let artifact = match ArtifactRepository::find_by_ref(&state.db, &artifact_ref).await? {
Some(existing) => {
// Update execution link if a new execution ID was provided
if request.execution.is_some() && request.execution != existing.execution {
let update_input = UpdateArtifactInput {
r#ref: None,
scope: None,
owner: None,
r#type: None,
visibility: None,
retention_policy: None,
retention_limit: None,
name: None,
description: None,
content_type: None,
size_bytes: None,
execution: request.execution.map(Some),
data: None,
};
ArtifactRepository::update(&state.db, existing.id, update_input).await?
} else {
existing
}
}
None => {
// Parse artifact type (default to FileText)
let a_type = request.r#type.unwrap_or(ArtifactType::FileText);
// Validate it's a file-backed type
if !is_file_backed_type(a_type) {
return Err(ApiError::BadRequest(format!(
"Artifact type {:?} is not file-backed. \
Use POST /artifacts/ref/{{ref}}/versions/upload for DB-stored artifacts.",
a_type,
)));
}
let a_scope = request.scope.unwrap_or(OwnerType::Action);
let a_visibility = request.visibility.unwrap_or(ArtifactVisibility::Private);
let a_retention_policy = request
.retention_policy
.unwrap_or(RetentionPolicyType::Versions);
let a_retention_limit = request.retention_limit.unwrap_or(10);
let create_input = CreateArtifactInput {
r#ref: artifact_ref.clone(),
scope: a_scope,
owner: request.owner.unwrap_or_default(),
r#type: a_type,
visibility: a_visibility,
retention_policy: a_retention_policy,
retention_limit: a_retention_limit,
name: request.name,
description: request.description,
content_type: request.content_type.clone(),
execution: request.execution,
data: None,
};
ArtifactRepository::create(&state.db, create_input).await?
}
};
// Validate the existing artifact is file-backed
if !is_file_backed_type(artifact.r#type) {
return Err(ApiError::BadRequest(format!(
"Artifact '{}' is type {:?}, which does not support file-backed versions.",
artifact.r#ref, artifact.r#type,
)));
}
let content_type = request
.content_type
.unwrap_or_else(|| default_content_type_for_artifact(artifact.r#type));
// Create version row (file_path computed after we know the version number)
let input = CreateArtifactVersionInput {
artifact: artifact.id,
content_type: Some(content_type.clone()),
content: None,
content_json: None,
file_path: None,
meta: request.meta,
created_by: request.created_by,
};
let version = ArtifactVersionRepository::create(&state.db, input).await?;
// Compute the file path from the artifact ref and version number
let file_path = compute_file_path(&artifact.r#ref, version.version, &content_type);
// Create the parent directory on disk
let artifacts_dir = &state.config.artifacts_dir;
let full_path = std::path::Path::new(artifacts_dir).join(&file_path);
if let Some(parent) = full_path.parent() {
tokio::fs::create_dir_all(parent).await.map_err(|e| {
ApiError::InternalServerError(format!(
"Failed to create artifact directory '{}': {}",
parent.display(),
e,
))
})?;
}
// Update the version row with the computed file_path
sqlx::query("UPDATE artifact_version SET file_path = $1 WHERE id = $2")
.bind(&file_path)
.bind(version.id)
.execute(&state.db)
.await
.map_err(|e| {
ApiError::InternalServerError(format!(
"Failed to set file_path on version {}: {}",
version.id, e,
))
})?;
// Return the version with file_path populated
let mut response = ArtifactVersionResponse::from(version);
response.file_path = Some(file_path);
Ok((
StatusCode::CREATED,
Json(ApiResponse::with_message(
response,
"File version allocated — write content to $ATTUNE_ARTIFACTS_DIR/<file_path>",
)),
))
}
// ============================================================================
// Helpers
// ============================================================================
@@ -1459,8 +1626,434 @@ fn cleanup_empty_parents(dir: &std::path::Path, stop_at: &str) {
}
}
}
// ============================================================================
// SSE file streaming
// ============================================================================
/// Query parameters for the artifact stream endpoint.
#[derive(serde::Deserialize)]
pub struct StreamArtifactParams {
/// JWT access token (SSE/EventSource cannot set Authorization header).
pub token: Option<String>,
}
/// Internal state machine for the `stream_artifact` SSE generator.
///
/// We use `futures::stream::unfold` instead of `async_stream::stream!` to avoid
/// adding an external dependency.
enum TailState {
/// Waiting for the file to appear on disk.
WaitingForFile {
full_path: std::path::PathBuf,
file_path: String,
execution_id: Option<i64>,
db: sqlx::PgPool,
started: tokio::time::Instant,
},
/// File exists — send initial content.
SendInitial {
full_path: std::path::PathBuf,
file_path: String,
execution_id: Option<i64>,
db: sqlx::PgPool,
},
/// Tailing the file for new bytes.
Tailing {
full_path: std::path::PathBuf,
file_path: String,
execution_id: Option<i64>,
db: sqlx::PgPool,
offset: u64,
idle_count: u32,
},
/// Emit the final `done` SSE event and close.
SendDone,
/// Stream has ended — return `None` to close.
Finished,
}
/// How long to wait for the file to appear on disk.
const STREAM_MAX_WAIT: std::time::Duration = std::time::Duration::from_secs(30);
/// How often to poll for new bytes / file existence.
const STREAM_POLL_INTERVAL: std::time::Duration = std::time::Duration::from_millis(500);
/// After this many consecutive empty polls we check whether the execution
/// is done and, if so, terminate the stream.
const STREAM_IDLE_CHECKS_BEFORE_DONE: u32 = 6; // 3 seconds of no new data
/// Check whether the given execution has reached a terminal status.
async fn is_execution_terminal(db: &sqlx::PgPool, execution_id: Option<i64>) -> bool {
let Some(exec_id) = execution_id else {
return false;
};
match sqlx::query_scalar::<_, String>("SELECT status::text FROM execution WHERE id = $1")
.bind(exec_id)
.fetch_optional(db)
.await
{
Ok(Some(status)) => matches!(
status.as_str(),
"succeeded" | "failed" | "timeout" | "canceled" | "abandoned"
),
Ok(None) => true, // execution deleted — treat as done
Err(_) => false, // DB error — keep tailing
}
}
/// Do one final read from `offset` to EOF and return the new bytes (if any).
async fn final_read_bytes(full_path: &std::path::Path, offset: u64) -> Option<String> {
let mut f = tokio::fs::File::open(full_path).await.ok()?;
let meta = f.metadata().await.ok()?;
if meta.len() <= offset {
return None;
}
f.seek(std::io::SeekFrom::Start(offset)).await.ok()?;
let mut tail = Vec::new();
f.read_to_end(&mut tail).await.ok()?;
if tail.is_empty() {
return None;
}
Some(String::from_utf8_lossy(&tail).into_owned())
}
/// Stream the latest file-backed artifact version as Server-Sent Events.
///
/// The endpoint:
/// 1. Waits (up to ~30 s) for the file to appear on disk if it has been
/// allocated but not yet written by the worker.
/// 2. Once the file exists it sends the current content as an initial `content`
/// event, then tails the file every 500 ms, sending `append` events with new
/// bytes.
/// 3. When no new bytes have appeared for several consecutive checks **and** the
/// linked execution (if any) has reached a terminal status, it sends a `done`
/// event and the stream ends.
/// 4. If the client disconnects the stream is cleaned up automatically.
///
/// **Event types** (SSE `event:` field):
/// - `content` full file content up to the current offset (sent once)
/// - `append` incremental bytes appended since the last event
/// - `waiting` file does not exist yet; sent periodically while waiting
/// - `done` no more data expected; stream will close
/// - `error` something went wrong; `data` contains a human-readable message
#[utoipa::path(
get,
path = "/api/v1/artifacts/{id}/stream",
tag = "artifacts",
params(
("id" = i64, Path, description = "Artifact ID"),
("token" = String, Query, description = "JWT access token for authentication"),
),
responses(
(status = 200, description = "SSE stream of file content", content_type = "text/event-stream"),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Artifact not found or not file-backed"),
),
)]
pub async fn stream_artifact(
State(state): State<Arc<AppState>>,
Path(id): Path<i64>,
Query(params): Query<StreamArtifactParams>,
) -> Result<Sse<impl Stream<Item = Result<Event, std::convert::Infallible>>>, ApiError> {
// --- auth (EventSource can't send headers, so token comes via query) ----
use crate::auth::jwt::validate_token;
let token = params.token.as_ref().ok_or(ApiError::Unauthorized(
"Missing authentication token".to_string(),
))?;
validate_token(token, &state.jwt_config)
.map_err(|_| ApiError::Unauthorized("Invalid authentication token".to_string()))?;
// --- resolve artifact + latest version ---------------------------------
let artifact = ArtifactRepository::find_by_id(&state.db, id)
.await?
.ok_or_else(|| ApiError::NotFound(format!("Artifact with ID {} not found", id)))?;
if !is_file_backed_type(artifact.r#type) {
return Err(ApiError::BadRequest(format!(
"Artifact '{}' is type {:?} which is not file-backed. \
Use the download endpoint instead.",
artifact.r#ref, artifact.r#type,
)));
}
let ver = ArtifactVersionRepository::find_latest(&state.db, id)
.await?
.ok_or_else(|| ApiError::NotFound(format!("No versions found for artifact {}", id)))?;
let file_path = ver.file_path.ok_or_else(|| {
ApiError::NotFound(format!(
"Latest version of artifact '{}' has no file_path allocated",
artifact.r#ref,
))
})?;
let artifacts_dir = state.config.artifacts_dir.clone();
let full_path = std::path::PathBuf::from(&artifacts_dir).join(&file_path);
let execution_id = artifact.execution;
let db = state.db.clone();
// --- build the SSE stream via unfold -----------------------------------
let initial_state = TailState::WaitingForFile {
full_path,
file_path,
execution_id,
db,
started: tokio::time::Instant::now(),
};
let stream = futures::stream::unfold(initial_state, |state| async move {
match state {
TailState::Finished => None,
// ---- Drain state for clean shutdown ----
TailState::SendDone => Some((
Ok(Event::default()
.event("done")
.data("Execution complete — stream closed")),
TailState::Finished,
)),
// ---- Phase 1: wait for the file to appear ----
TailState::WaitingForFile {
full_path,
file_path,
execution_id,
db,
started,
} => {
if full_path.exists() {
let next = TailState::SendInitial {
full_path,
file_path,
execution_id,
db,
};
Some((
Ok(Event::default()
.event("waiting")
.data("File found — loading content")),
next,
))
} else if started.elapsed() > STREAM_MAX_WAIT {
Some((
Ok(Event::default().event("error").data(format!(
"Timed out waiting for file to appear at '{}'",
file_path,
))),
TailState::Finished,
))
} else {
tokio::time::sleep(STREAM_POLL_INTERVAL).await;
Some((
Ok(Event::default()
.event("waiting")
.data("File not yet available — waiting for worker to create it")),
TailState::WaitingForFile {
full_path,
file_path,
execution_id,
db,
started,
},
))
}
}
// ---- Phase 2: read and send current file content ----
TailState::SendInitial {
full_path,
file_path,
execution_id,
db,
} => match tokio::fs::File::open(&full_path).await {
Ok(mut file) => {
let mut buf = Vec::new();
match file.read_to_end(&mut buf).await {
Ok(_) => {
let offset = buf.len() as u64;
debug!(
"artifact stream: sent initial {} bytes for '{}'",
offset, file_path,
);
Some((
Ok(Event::default()
.event("content")
.data(String::from_utf8_lossy(&buf).into_owned())),
TailState::Tailing {
full_path,
file_path,
execution_id,
db,
offset,
idle_count: 0,
},
))
}
Err(e) => Some((
Ok(Event::default()
.event("error")
.data(format!("Failed to read file: {}", e))),
TailState::Finished,
)),
}
}
Err(e) => Some((
Ok(Event::default()
.event("error")
.data(format!("Failed to open file: {}", e))),
TailState::Finished,
)),
},
// ---- Phase 3: tail the file for new bytes ----
TailState::Tailing {
full_path,
file_path,
execution_id,
db,
mut offset,
mut idle_count,
} => {
tokio::time::sleep(STREAM_POLL_INTERVAL).await;
// Re-open the file each iteration so we pick up content that
// was written by a different process (the worker).
let mut file = match tokio::fs::File::open(&full_path).await {
Ok(f) => f,
Err(e) => {
return Some((
Ok(Event::default()
.event("error")
.data(format!("File disappeared: {}", e))),
TailState::Finished,
));
}
};
let meta = match file.metadata().await {
Ok(m) => m,
Err(_) => {
// Transient metadata error — keep going.
return Some((
Ok(Event::default().comment("metadata-retry")),
TailState::Tailing {
full_path,
file_path,
execution_id,
db,
offset,
idle_count,
},
));
}
};
let file_len = meta.len();
if file_len > offset {
// New data available — seek and read.
if let Err(e) = file.seek(std::io::SeekFrom::Start(offset)).await {
return Some((
Ok(Event::default()
.event("error")
.data(format!("Seek error: {}", e))),
TailState::Finished,
));
}
let mut new_buf = Vec::with_capacity((file_len - offset) as usize);
match file.read_to_end(&mut new_buf).await {
Ok(n) => {
offset += n as u64;
idle_count = 0;
Some((
Ok(Event::default()
.event("append")
.data(String::from_utf8_lossy(&new_buf).into_owned())),
TailState::Tailing {
full_path,
file_path,
execution_id,
db,
offset,
idle_count,
},
))
}
Err(e) => Some((
Ok(Event::default()
.event("error")
.data(format!("Read error: {}", e))),
TailState::Finished,
)),
}
} else if file_len < offset {
// File truncated — resend from scratch.
drop(file);
Some((
Ok(Event::default()
.event("waiting")
.data("File was truncated — resending content")),
TailState::SendInitial {
full_path,
file_path,
execution_id,
db,
},
))
} else {
// No change.
idle_count += 1;
if idle_count >= STREAM_IDLE_CHECKS_BEFORE_DONE {
let done = is_execution_terminal(&db, execution_id).await
|| (execution_id.is_none()
&& idle_count >= STREAM_IDLE_CHECKS_BEFORE_DONE * 4);
if done {
// One final read to catch trailing bytes.
return if let Some(trailing) =
final_read_bytes(&full_path, offset).await
{
Some((
Ok(Event::default().event("append").data(trailing)),
TailState::SendDone,
))
} else {
Some((
Ok(Event::default()
.event("done")
.data("Execution complete — stream closed")),
TailState::Finished,
))
};
}
// Reset so we don't hit the DB every poll.
idle_count = 0;
}
Some((
Ok(Event::default().comment("no-change")),
TailState::Tailing {
full_path,
file_path,
execution_id,
db,
offset,
idle_count,
},
))
}
}
}
});
Ok(Sse::new(stream).keep_alive(
KeepAlive::new()
.interval(std::time::Duration::from_secs(15))
.text("keepalive"),
))
}
/// Derive a simple file extension from a MIME content type
fn extension_from_content_type(ct: &str) -> &str {
match ct {
"text/plain" => "txt",
@@ -1503,6 +2096,10 @@ pub fn routes() -> Router<Arc<AppState>> {
"/artifacts/ref/{ref}/versions/upload",
post(upload_version_by_ref),
)
.route(
"/artifacts/ref/{ref}/versions/file",
post(allocate_file_version_by_ref),
)
// Progress / data
.route("/artifacts/{id}/progress", post(append_progress))
.route(
@@ -1511,6 +2108,8 @@ pub fn routes() -> Router<Arc<AppState>> {
)
// Download (latest)
.route("/artifacts/{id}/download", get(download_latest))
// SSE streaming for file-backed artifacts
.route("/artifacts/{id}/stream", get(stream_artifact))
// Version management
.route(
"/artifacts/{id}/versions",

View File

@@ -523,12 +523,11 @@ async fn write_workflow_yaml(
pack_ref: &str,
request: &SaveWorkflowFileRequest,
) -> Result<(), ApiError> {
let workflows_dir = packs_base_dir
.join(pack_ref)
.join("actions")
.join("workflows");
let pack_dir = packs_base_dir.join(pack_ref);
let actions_dir = pack_dir.join("actions");
let workflows_dir = actions_dir.join("workflows");
// Ensure the directory exists
// Ensure both directories exist
tokio::fs::create_dir_all(&workflows_dir)
.await
.map_err(|e| {
@@ -539,34 +538,164 @@ async fn write_workflow_yaml(
))
})?;
let filename = format!("{}.workflow.yaml", request.name);
let filepath = workflows_dir.join(&filename);
// ── 1. Write the workflow file (graph-only: version, vars, tasks, output_map) ──
let workflow_filename = format!("{}.workflow.yaml", request.name);
let workflow_filepath = workflows_dir.join(&workflow_filename);
// Serialize definition to YAML
let yaml_content = serde_yaml_ng::to_string(&request.definition).map_err(|e| {
// Strip action-level fields from the definition — the workflow file should
// contain only the execution graph. The action YAML is authoritative for
// ref, label, description, parameters, output, and tags.
let graph_only = strip_action_level_fields(&request.definition);
let workflow_yaml = serde_yaml_ng::to_string(&graph_only).map_err(|e| {
ApiError::BadRequest(format!("Failed to serialize workflow to YAML: {}", e))
})?;
// Write file
tokio::fs::write(&filepath, yaml_content)
let workflow_yaml_with_header = format!(
"# Workflow execution graph for {}.{}\n\
# Action-level metadata (ref, label, parameters, output, tags) is defined\n\
# in the companion action YAML: actions/{}.yaml\n\n{}",
pack_ref, request.name, request.name, workflow_yaml
);
tokio::fs::write(&workflow_filepath, &workflow_yaml_with_header)
.await
.map_err(|e| {
ApiError::InternalServerError(format!(
"Failed to write workflow file '{}': {}",
filepath.display(),
workflow_filepath.display(),
e
))
})?;
tracing::info!(
"Wrote workflow file: {} ({} bytes)",
filepath.display(),
filepath.metadata().map(|m| m.len()).unwrap_or(0)
workflow_filepath.display(),
workflow_yaml_with_header.len()
);
// ── 2. Write the companion action YAML ──
let action_filename = format!("{}.yaml", request.name);
let action_filepath = actions_dir.join(&action_filename);
let action_yaml = build_action_yaml(pack_ref, request);
tokio::fs::write(&action_filepath, &action_yaml)
.await
.map_err(|e| {
ApiError::InternalServerError(format!(
"Failed to write action YAML '{}': {}",
action_filepath.display(),
e
))
})?;
tracing::info!(
"Wrote action YAML: {} ({} bytes)",
action_filepath.display(),
action_yaml.len()
);
Ok(())
}
/// Strip action-level fields from a workflow definition JSON, keeping only
/// the execution graph: `version`, `vars`, `tasks`, `output_map`.
///
/// Fields removed: `ref`, `label`, `description`, `parameters`, `output`, `tags`.
fn strip_action_level_fields(definition: &serde_json::Value) -> serde_json::Value {
if let Some(obj) = definition.as_object() {
let mut graph = serde_json::Map::new();
// Keep only graph-level fields
for key in &["version", "vars", "tasks", "output_map"] {
if let Some(val) = obj.get(*key) {
graph.insert((*key).to_string(), val.clone());
}
}
serde_json::Value::Object(graph)
} else {
// Shouldn't happen, but pass through if not an object
definition.clone()
}
}
/// Build the companion action YAML content for a workflow action.
///
/// This file defines the action-level metadata (ref, label, parameters, etc.)
/// and references the workflow file via `workflow_file`.
fn build_action_yaml(pack_ref: &str, request: &SaveWorkflowFileRequest) -> String {
let mut lines = Vec::new();
lines.push(format!(
"# Action definition for workflow {}.{}",
pack_ref, request.name
));
lines.push(format!(
"# The workflow graph (tasks, transitions, variables) is in:"
));
lines.push(format!(
"# actions/workflows/{}.workflow.yaml",
request.name
));
lines.push(String::new());
lines.push(format!("ref: {}.{}", pack_ref, request.name));
lines.push(format!("label: \"{}\"", request.label.replace('"', "\\\"")));
if let Some(ref desc) = request.description {
if !desc.is_empty() {
lines.push(format!("description: \"{}\"", desc.replace('"', "\\\"")));
}
}
lines.push(format!("enabled: true"));
lines.push(format!(
"workflow_file: workflows/{}.workflow.yaml",
request.name
));
// Parameters
if let Some(ref params) = request.param_schema {
if let Some(obj) = params.as_object() {
if !obj.is_empty() {
lines.push(String::new());
let params_yaml = serde_yaml_ng::to_string(params).unwrap_or_default();
lines.push(format!("parameters:"));
// Indent the YAML output under `parameters:`
for line in params_yaml.lines() {
lines.push(format!(" {}", line));
}
}
}
}
// Output schema
if let Some(ref output) = request.out_schema {
if let Some(obj) = output.as_object() {
if !obj.is_empty() {
lines.push(String::new());
let output_yaml = serde_yaml_ng::to_string(output).unwrap_or_default();
lines.push(format!("output:"));
for line in output_yaml.lines() {
lines.push(format!(" {}", line));
}
}
}
}
// Tags
if let Some(ref tags) = request.tags {
if !tags.is_empty() {
lines.push(String::new());
lines.push(format!("tags:"));
for tag in tags {
lines.push(format!(" - {}", tag));
}
}
}
lines.push(String::new()); // trailing newline
lines.join("\n")
}
/// Create a companion action record for a workflow definition.
///
/// This ensures the workflow appears in action lists and the action palette in the

View File

@@ -1,5 +1,5 @@
use anyhow::{Context, Result};
use reqwest::{multipart, Client as HttpClient, Method, RequestBuilder, Response, StatusCode};
use reqwest::{multipart, Client as HttpClient, Method, RequestBuilder, StatusCode};
use serde::{de::DeserializeOwned, Serialize};
use std::path::PathBuf;
use std::time::Duration;
@@ -83,13 +83,14 @@ impl ApiClient {
self.auth_token = None;
}
/// Refresh the authentication token using the refresh token
/// Refresh the authentication token using the refresh token.
///
/// Returns Ok(true) if refresh succeeded, Ok(false) if no refresh token available
/// Returns `Ok(true)` if refresh succeeded, `Ok(false)` if no refresh token
/// is available or the server rejected it.
async fn refresh_auth_token(&mut self) -> Result<bool> {
let refresh_token = match &self.refresh_token {
Some(token) => token.clone(),
None => return Ok(false), // No refresh token available
None => return Ok(false),
};
#[derive(Serialize)]
@@ -103,7 +104,6 @@ impl ApiClient {
refresh_token: String,
}
// Build refresh request without auth token
let url = format!("{}/auth/refresh", self.base_url);
let req = self
.client
@@ -113,7 +113,7 @@ impl ApiClient {
let response = req.send().await.context("Failed to refresh token")?;
if !response.status().is_success() {
// Refresh failed - clear tokens
// Refresh failed clear tokens so we don't keep retrying
self.auth_token = None;
self.refresh_token = None;
return Ok(false);
@@ -128,7 +128,7 @@ impl ApiClient {
self.auth_token = Some(api_response.data.access_token.clone());
self.refresh_token = Some(api_response.data.refresh_token.clone());
// Persist to config file if we have the path
// Persist to config file
if self.config_path.is_some() {
if let Ok(mut config) = CliConfig::load() {
let _ = config.set_auth(
@@ -141,45 +141,96 @@ impl ApiClient {
Ok(true)
}
/// Build a request with common headers
fn build_request(&self, method: Method, path: &str) -> RequestBuilder {
// Auth endpoints are at /auth, not /auth
let url = if path.starts_with("/auth") {
// ── Request building helpers ────────────────────────────────────────
/// Build a full URL from a path.
fn url_for(&self, path: &str) -> String {
if path.starts_with("/auth") {
format!("{}{}", self.base_url, path)
} else {
format!("{}/api/v1{}", self.base_url, path)
};
let mut req = self.client.request(method, &url);
}
}
/// Build a `RequestBuilder` with auth header applied.
fn build_request(&self, method: Method, path: &str) -> RequestBuilder {
let url = self.url_for(path);
let mut req = self.client.request(method, &url);
if let Some(token) = &self.auth_token {
req = req.bearer_auth(token);
}
req
}
/// Execute a request and handle the response with automatic token refresh
async fn execute<T: DeserializeOwned>(&mut self, req: RequestBuilder) -> Result<T> {
// ── Core execute-with-retry machinery ──────────────────────────────
/// Send a request that carries a JSON body. On a 401 response the token
/// is refreshed and the request is rebuilt & retried exactly once.
async fn execute_json<T, B>(
&mut self,
method: Method,
path: &str,
body: Option<&B>,
) -> Result<T>
where
T: DeserializeOwned,
B: Serialize,
{
// First attempt
let req = self.attach_body(self.build_request(method.clone(), path), body);
let response = req.send().await.context("Failed to send request to API")?;
// If 401 and we have a refresh token, try to refresh once
if response.status() == StatusCode::UNAUTHORIZED && self.refresh_token.is_some() {
// Try to refresh the token
if self.refresh_auth_token().await? {
// Rebuild and retry the original request with new token
// Note: This is a simplified retry - the original request body is already consumed
// For a production implementation, we'd need to clone the request or store the body
return Err(anyhow::anyhow!(
"Token expired and was refreshed. Please retry your command."
));
// Retry with new token
let req = self.attach_body(self.build_request(method, path), body);
let response = req
.send()
.await
.context("Failed to send request to API (retry)")?;
return self.handle_response(response).await;
}
}
self.handle_response(response).await
}
/// Handle API response and extract data
async fn handle_response<T: DeserializeOwned>(&self, response: Response) -> Result<T> {
/// Send a request that carries a JSON body and expects no response body.
async fn execute_json_no_response<B: Serialize>(
&mut self,
method: Method,
path: &str,
body: Option<&B>,
) -> Result<()> {
let req = self.attach_body(self.build_request(method.clone(), path), body);
let response = req.send().await.context("Failed to send request to API")?;
if response.status() == StatusCode::UNAUTHORIZED && self.refresh_token.is_some() {
if self.refresh_auth_token().await? {
let req = self.attach_body(self.build_request(method, path), body);
let response = req
.send()
.await
.context("Failed to send request to API (retry)")?;
return self.handle_empty_response(response).await;
}
}
self.handle_empty_response(response).await
}
/// Optionally attach a JSON body to a request builder.
fn attach_body<B: Serialize>(&self, req: RequestBuilder, body: Option<&B>) -> RequestBuilder {
match body {
Some(b) => req.json(b),
None => req,
}
}
// ── Response handling ──────────────────────────────────────────────
/// Parse a successful API response or return a descriptive error.
async fn handle_response<T: DeserializeOwned>(&self, response: reqwest::Response) -> Result<T> {
let status = response.status();
if status.is_success() {
@@ -194,7 +245,6 @@ impl ApiClient {
.await
.unwrap_or_else(|_| "Unknown error".to_string());
// Try to parse as API error
if let Ok(api_error) = serde_json::from_str::<ApiError>(&error_text) {
anyhow::bail!("API error ({}): {}", status, api_error.error);
} else {
@@ -203,10 +253,30 @@ impl ApiClient {
}
}
/// Handle a response where we only care about success/failure, not a body.
async fn handle_empty_response(&self, response: reqwest::Response) -> Result<()> {
let status = response.status();
if status.is_success() {
Ok(())
} else {
let error_text = response
.text()
.await
.unwrap_or_else(|_| "Unknown error".to_string());
if let Ok(api_error) = serde_json::from_str::<ApiError>(&error_text) {
anyhow::bail!("API error ({}): {}", status, api_error.error);
} else {
anyhow::bail!("API error ({}): {}", status, error_text);
}
}
}
// ── Public convenience methods ─────────────────────────────────────
/// GET request
pub async fn get<T: DeserializeOwned>(&mut self, path: &str) -> Result<T> {
let req = self.build_request(Method::GET, path);
self.execute(req).await
self.execute_json::<T, ()>(Method::GET, path, None).await
}
/// GET request with query parameters (query string must be in path)
@@ -215,8 +285,7 @@ impl ApiClient {
/// Example: `client.get_with_query("/actions?enabled=true&pack=core").await`
#[allow(dead_code)]
pub async fn get_with_query<T: DeserializeOwned>(&mut self, path: &str) -> Result<T> {
let req = self.build_request(Method::GET, path);
self.execute(req).await
self.execute_json::<T, ()>(Method::GET, path, None).await
}
/// POST request with JSON body
@@ -225,8 +294,7 @@ impl ApiClient {
path: &str,
body: &B,
) -> Result<T> {
let req = self.build_request(Method::POST, path).json(body);
self.execute(req).await
self.execute_json(Method::POST, path, Some(body)).await
}
/// PUT request with JSON body
@@ -237,8 +305,7 @@ impl ApiClient {
path: &str,
body: &B,
) -> Result<T> {
let req = self.build_request(Method::PUT, path).json(body);
self.execute(req).await
self.execute_json(Method::PUT, path, Some(body)).await
}
/// PATCH request with JSON body
@@ -247,8 +314,7 @@ impl ApiClient {
path: &str,
body: &B,
) -> Result<T> {
let req = self.build_request(Method::PATCH, path).json(body);
self.execute(req).await
self.execute_json(Method::PATCH, path, Some(body)).await
}
/// DELETE request with response parsing
@@ -259,8 +325,7 @@ impl ApiClient {
/// delete operations return metadata (e.g., cascade deletion summaries).
#[allow(dead_code)]
pub async fn delete<T: DeserializeOwned>(&mut self, path: &str) -> Result<T> {
let req = self.build_request(Method::DELETE, path);
self.execute(req).await
self.execute_json::<T, ()>(Method::DELETE, path, None).await
}
/// POST request without expecting response body
@@ -270,36 +335,14 @@ impl ApiClient {
/// Kept for API completeness even though not currently used.
#[allow(dead_code)]
pub async fn post_no_response<B: Serialize>(&mut self, path: &str, body: &B) -> Result<()> {
let req = self.build_request(Method::POST, path).json(body);
let response = req.send().await.context("Failed to send request to API")?;
let status = response.status();
if status.is_success() {
Ok(())
} else {
let error_text = response
.text()
.await
.unwrap_or_else(|_| "Unknown error".to_string());
anyhow::bail!("API error ({}): {}", status, error_text);
}
self.execute_json_no_response(Method::POST, path, Some(body))
.await
}
/// DELETE request without expecting response body
pub async fn delete_no_response(&mut self, path: &str) -> Result<()> {
let req = self.build_request(Method::DELETE, path);
let response = req.send().await.context("Failed to send request to API")?;
let status = response.status();
if status.is_success() {
Ok(())
} else {
let error_text = response
.text()
.await
.unwrap_or_else(|_| "Unknown error".to_string());
anyhow::bail!("API error ({}): {}", status, error_text);
}
self.execute_json_no_response::<()>(Method::DELETE, path, None)
.await
}
/// POST a multipart/form-data request with a file field and optional text fields.
@@ -318,33 +361,47 @@ impl ApiClient {
mime_type: &str,
extra_fields: Vec<(&str, String)>,
) -> Result<T> {
let url = format!("{}/api/v1{}", self.base_url, path);
// Closure-like helper to build the multipart request from scratch.
// We need this because reqwest::multipart::Form is not Clone, so we
// must rebuild it for the retry attempt.
let build_multipart_request =
|client: &ApiClient, bytes: &[u8]| -> Result<reqwest::RequestBuilder> {
let url = format!("{}/api/v1{}", client.base_url, path);
let file_part = multipart::Part::bytes(file_bytes)
.file_name(file_name.to_string())
.mime_str(mime_type)
.context("Invalid MIME type")?;
let file_part = multipart::Part::bytes(bytes.to_vec())
.file_name(file_name.to_string())
.mime_str(mime_type)
.context("Invalid MIME type")?;
let mut form = multipart::Form::new().part(file_field_name.to_string(), file_part);
let mut form = multipart::Form::new().part(file_field_name.to_string(), file_part);
for (key, value) in extra_fields {
form = form.text(key.to_string(), value);
}
for (key, value) in &extra_fields {
form = form.text(key.to_string(), value.clone());
}
let mut req = self.client.post(&url).multipart(form);
let mut req = client.client.post(&url).multipart(form);
if let Some(token) = &client.auth_token {
req = req.bearer_auth(token);
}
Ok(req)
};
if let Some(token) = &self.auth_token {
req = req.bearer_auth(token);
}
// First attempt
let req = build_multipart_request(self, &file_bytes)?;
let response = req
.send()
.await
.context("Failed to send multipart request to API")?;
let response = req.send().await.context("Failed to send multipart request to API")?;
// Handle 401 + refresh (same pattern as execute())
if response.status() == StatusCode::UNAUTHORIZED && self.refresh_token.is_some() {
if self.refresh_auth_token().await? {
return Err(anyhow::anyhow!(
"Token expired and was refreshed. Please retry your command."
));
// Retry with new token
let req = build_multipart_request(self, &file_bytes)?;
let response = req
.send()
.await
.context("Failed to send multipart request to API (retry)")?;
return self.handle_response(response).await;
}
}
@@ -374,4 +431,22 @@ mod tests {
client.clear_auth_token();
assert!(client.auth_token.is_none());
}
#[test]
fn test_url_for_api_path() {
let client = ApiClient::new("http://localhost:8080".to_string(), None);
assert_eq!(
client.url_for("/actions"),
"http://localhost:8080/api/v1/actions"
);
}
#[test]
fn test_url_for_auth_path() {
let client = ApiClient::new("http://localhost:8080".to_string(), None);
assert_eq!(
client.url_for("/auth/login"),
"http://localhost:8080/auth/login"
);
}
}

View File

@@ -52,7 +52,7 @@ pub enum ActionCommands {
action_ref: String,
/// Skip confirmation prompt
#[arg(short, long)]
#[arg(long)]
yes: bool,
},
/// Execute an action

View File

@@ -7,3 +7,4 @@ pub mod pack_index;
pub mod rule;
pub mod sensor;
pub mod trigger;
pub mod workflow;

View File

@@ -11,6 +11,37 @@ use crate::output::{self, OutputFormat};
#[derive(Subcommand)]
pub enum PackCommands {
/// Create an empty pack
///
/// Creates a new pack with no actions, triggers, rules, or sensors.
/// Use --interactive (-i) to be prompted for each field, or provide
/// fields via flags. Only --ref is required in non-interactive mode
/// (--label defaults to a title-cased ref, version defaults to 0.1.0).
Create {
/// Unique reference identifier (e.g., "my_pack", "slack")
#[arg(long, short = 'r')]
r#ref: Option<String>,
/// Human-readable label (defaults to title-cased ref)
#[arg(long, short)]
label: Option<String>,
/// Pack description
#[arg(long, short)]
description: Option<String>,
/// Pack version (semver format recommended)
#[arg(long = "pack-version", default_value = "0.1.0")]
pack_version: String,
/// Tags for categorization (comma-separated)
#[arg(long, value_delimiter = ',')]
tags: Vec<String>,
/// Interactive mode — prompt for each field
#[arg(long, short)]
interactive: bool,
},
/// List all installed packs
List {
/// Filter by pack name
@@ -75,7 +106,7 @@ pub enum PackCommands {
pack_ref: String,
/// Skip confirmation prompt
#[arg(short = 'y', long)]
#[arg(long)]
yes: bool,
},
/// Register a pack from a local directory (path must be accessible by the API server)
@@ -282,6 +313,17 @@ struct UploadPackResponse {
tests_skipped: bool,
}
#[derive(Debug, Serialize)]
struct CreatePackBody {
r#ref: String,
label: String,
#[serde(skip_serializing_if = "Option::is_none")]
description: Option<String>,
version: String,
#[serde(default)]
tags: Vec<String>,
}
pub async fn handle_pack_command(
profile: &Option<String>,
command: PackCommands,
@@ -289,6 +331,27 @@ pub async fn handle_pack_command(
output_format: OutputFormat,
) -> Result<()> {
match command {
PackCommands::Create {
r#ref,
label,
description,
pack_version,
tags,
interactive,
} => {
handle_create(
profile,
r#ref,
label,
description,
pack_version,
tags,
interactive,
api_url,
output_format,
)
.await
}
PackCommands::List { name } => handle_list(profile, name, api_url, output_format).await,
PackCommands::Show { pack_ref } => {
handle_show(profile, pack_ref, api_url, output_format).await
@@ -401,6 +464,169 @@ pub async fn handle_pack_command(
}
}
/// Derive a human-readable label from a pack ref.
///
/// Splits on `_`, `-`, or `.` and title-cases each word.
fn label_from_ref(r: &str) -> String {
r.split(|c| c == '_' || c == '-' || c == '.')
.filter(|s| !s.is_empty())
.map(|word| {
let mut chars = word.chars();
match chars.next() {
Some(first) => {
let upper: String = first.to_uppercase().collect();
format!("{}{}", upper, chars.as_str())
}
None => String::new(),
}
})
.collect::<Vec<_>>()
.join(" ")
}
async fn handle_create(
profile: &Option<String>,
ref_flag: Option<String>,
label_flag: Option<String>,
description_flag: Option<String>,
version_flag: String,
tags_flag: Vec<String>,
interactive: bool,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
// ── Collect field values ────────────────────────────────────────
let (pack_ref, label, description, version, tags) = if interactive {
// Interactive prompts
let pack_ref: String = match ref_flag {
Some(r) => r,
None => dialoguer::Input::new()
.with_prompt("Pack ref (unique identifier, e.g. \"my_pack\")")
.interact_text()?,
};
let default_label = label_flag
.clone()
.unwrap_or_else(|| label_from_ref(&pack_ref));
let label: String = dialoguer::Input::new()
.with_prompt("Label")
.default(default_label)
.interact_text()?;
let default_desc = description_flag.clone().unwrap_or_default();
let description: String = dialoguer::Input::new()
.with_prompt("Description (optional, Enter to skip)")
.default(default_desc)
.allow_empty(true)
.interact_text()?;
let description = if description.is_empty() {
None
} else {
Some(description)
};
let version: String = dialoguer::Input::new()
.with_prompt("Version")
.default(version_flag)
.interact_text()?;
let default_tags = if tags_flag.is_empty() {
String::new()
} else {
tags_flag.join(", ")
};
let tags_input: String = dialoguer::Input::new()
.with_prompt("Tags (comma-separated, optional)")
.default(default_tags)
.allow_empty(true)
.interact_text()?;
let tags: Vec<String> = tags_input
.split(',')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect();
// Show summary and confirm
println!();
output::print_section("New Pack Summary");
output::print_key_value_table(vec![
("Ref", pack_ref.clone()),
("Label", label.clone()),
(
"Description",
description
.clone()
.unwrap_or_else(|| "(none)".to_string()),
),
("Version", version.clone()),
(
"Tags",
if tags.is_empty() {
"(none)".to_string()
} else {
tags.join(", ")
},
),
]);
println!();
let confirm = dialoguer::Confirm::new()
.with_prompt("Create this pack?")
.default(true)
.interact()?;
if !confirm {
output::print_info("Pack creation cancelled");
return Ok(());
}
(pack_ref, label, description, version, tags)
} else {
// Non-interactive: ref is required
let pack_ref = ref_flag.ok_or_else(|| {
anyhow::anyhow!(
"Pack ref is required. Provide --ref <value> or use --interactive mode."
)
})?;
let label = label_flag.unwrap_or_else(|| label_from_ref(&pack_ref));
let description = description_flag;
let version = version_flag;
let tags = tags_flag;
(pack_ref, label, description, version, tags)
};
// ── Send request ────────────────────────────────────────────────
let config = CliConfig::load_with_profile(profile.as_deref())?;
let mut client = ApiClient::from_config(&config, api_url);
let body = CreatePackBody {
r#ref: pack_ref,
label,
description,
version,
tags,
};
let pack: Pack = client.post("/packs", &body).await?;
// ── Output ──────────────────────────────────────────────────────
match output_format {
OutputFormat::Json | OutputFormat::Yaml => {
output::print_output(&pack, output_format)?;
}
OutputFormat::Table => {
output::print_success(&format!(
"Pack '{}' created successfully (id: {})",
pack.pack_ref, pack.id
));
}
}
Ok(())
}
async fn handle_list(
profile: &Option<String>,
name: Option<String>,
@@ -1630,3 +1856,48 @@ async fn handle_update(
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_label_from_ref_underscores() {
assert_eq!(label_from_ref("my_cool_pack"), "My Cool Pack");
}
#[test]
fn test_label_from_ref_hyphens() {
assert_eq!(label_from_ref("my-cool-pack"), "My Cool Pack");
}
#[test]
fn test_label_from_ref_dots() {
assert_eq!(label_from_ref("my.cool.pack"), "My Cool Pack");
}
#[test]
fn test_label_from_ref_mixed_separators() {
assert_eq!(label_from_ref("my_cool-pack.v2"), "My Cool Pack V2");
}
#[test]
fn test_label_from_ref_single_word() {
assert_eq!(label_from_ref("slack"), "Slack");
}
#[test]
fn test_label_from_ref_already_capitalized() {
assert_eq!(label_from_ref("AWS"), "AWS");
}
#[test]
fn test_label_from_ref_empty() {
assert_eq!(label_from_ref(""), "");
}
#[test]
fn test_label_from_ref_consecutive_separators() {
assert_eq!(label_from_ref("my__pack"), "My Pack");
}
}

View File

@@ -42,7 +42,7 @@ pub enum TriggerCommands {
trigger_ref: String,
/// Skip confirmation prompt
#[arg(short, long)]
#[arg(long)]
yes: bool,
},
}

View File

@@ -0,0 +1,699 @@
use anyhow::{Context, Result};
use clap::Subcommand;
use serde::{Deserialize, Serialize};
use std::path::{Path, PathBuf};
use crate::client::ApiClient;
use crate::config::CliConfig;
use crate::output::{self, OutputFormat};
#[derive(Subcommand)]
pub enum WorkflowCommands {
/// Upload a workflow action from local YAML files to an existing pack.
///
/// Reads the action YAML file, finds the referenced workflow YAML file
/// via its `workflow_file` field, and uploads both to the API. The pack
/// is determined from the action ref (e.g. `mypack.deploy` → pack `mypack`).
Upload {
/// Path to the action YAML file (e.g. actions/deploy.yaml).
/// Must contain a `workflow_file` field pointing to the workflow YAML.
action_file: String,
/// Force update if the workflow already exists
#[arg(short, long)]
force: bool,
},
/// List workflows
List {
/// Filter by pack reference
#[arg(long)]
pack: Option<String>,
/// Filter by tag (comma-separated)
#[arg(long)]
tags: Option<String>,
/// Search term (matches label/description)
#[arg(long)]
search: Option<String>,
},
/// Show details of a specific workflow
Show {
/// Workflow reference (e.g. core.install_packs)
workflow_ref: String,
},
/// Delete a workflow
Delete {
/// Workflow reference (e.g. core.install_packs)
workflow_ref: String,
/// Skip confirmation prompt
#[arg(long)]
yes: bool,
},
}
// ── Local YAML models (for parsing action YAML files) ──────────────────
/// Minimal representation of an action YAML file, capturing only the fields
/// we need to build a `SaveWorkflowFileRequest`.
#[derive(Debug, Deserialize)]
struct ActionYaml {
/// Full action ref, e.g. `python_example.timeline_demo`
#[serde(rename = "ref")]
action_ref: String,
/// Human-readable label
#[serde(default)]
label: String,
/// Description
#[serde(default)]
description: Option<String>,
/// Relative path to the workflow YAML from the `actions/` directory
workflow_file: Option<String>,
/// Parameter schema (flat format)
#[serde(default)]
parameters: Option<serde_json::Value>,
/// Output schema (flat format)
#[serde(default)]
output: Option<serde_json::Value>,
/// Tags
#[serde(default)]
tags: Option<Vec<String>>,
/// Whether the action is enabled
#[serde(default)]
enabled: Option<bool>,
}
// ── API DTOs ────────────────────────────────────────────────────────────
/// Mirrors the API's `SaveWorkflowFileRequest`.
#[derive(Debug, Serialize)]
struct SaveWorkflowFileRequest {
name: String,
label: String,
#[serde(skip_serializing_if = "Option::is_none")]
description: Option<String>,
version: String,
pack_ref: String,
definition: serde_json::Value,
#[serde(skip_serializing_if = "Option::is_none")]
param_schema: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
out_schema: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
tags: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
enabled: Option<bool>,
}
#[derive(Debug, Serialize, Deserialize)]
struct WorkflowResponse {
id: i64,
#[serde(rename = "ref")]
workflow_ref: String,
pack: i64,
pack_ref: String,
label: String,
description: Option<String>,
version: String,
param_schema: Option<serde_json::Value>,
out_schema: Option<serde_json::Value>,
definition: serde_json::Value,
tags: Vec<String>,
enabled: bool,
created: String,
updated: String,
}
#[derive(Debug, Serialize, Deserialize)]
struct WorkflowSummary {
id: i64,
#[serde(rename = "ref")]
workflow_ref: String,
pack_ref: String,
label: String,
description: Option<String>,
version: String,
tags: Vec<String>,
enabled: bool,
created: String,
updated: String,
}
// ── Command dispatch ────────────────────────────────────────────────────
pub async fn handle_workflow_command(
profile: &Option<String>,
command: WorkflowCommands,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
match command {
WorkflowCommands::Upload { action_file, force } => {
handle_upload(profile, action_file, force, api_url, output_format).await
}
WorkflowCommands::List { pack, tags, search } => {
handle_list(profile, pack, tags, search, api_url, output_format).await
}
WorkflowCommands::Show { workflow_ref } => {
handle_show(profile, workflow_ref, api_url, output_format).await
}
WorkflowCommands::Delete { workflow_ref, yes } => {
handle_delete(profile, workflow_ref, yes, api_url, output_format).await
}
}
}
// ── Upload ──────────────────────────────────────────────────────────────
async fn handle_upload(
profile: &Option<String>,
action_file: String,
force: bool,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
let action_path = Path::new(&action_file);
// ── 1. Validate & read the action YAML ──────────────────────────────
if !action_path.exists() {
anyhow::bail!("Action YAML file not found: {}", action_file);
}
if !action_path.is_file() {
anyhow::bail!("Path is not a file: {}", action_file);
}
let action_yaml_content =
std::fs::read_to_string(action_path).context("Failed to read action YAML file")?;
let action: ActionYaml = serde_yaml_ng::from_str(&action_yaml_content)
.context("Failed to parse action YAML file")?;
// ── 2. Extract pack_ref and workflow name from the action ref ────────
let (pack_ref, workflow_name) = split_action_ref(&action.action_ref)?;
// ── 3. Resolve the workflow_file path ───────────────────────────────
let workflow_file_rel = action.workflow_file.as_deref().ok_or_else(|| {
anyhow::anyhow!(
"Action YAML does not contain a 'workflow_file' field. \
This command requires a workflow action — regular actions should be \
uploaded as part of a pack."
)
})?;
// workflow_file is relative to the actions/ directory. The action YAML is
// typically at `<pack>/actions/<name>.yaml`, so the workflow file is
// resolved relative to the action YAML's parent directory.
let workflow_path = resolve_workflow_path(action_path, workflow_file_rel)?;
if !workflow_path.exists() {
anyhow::bail!(
"Workflow file not found: {}\n \
(resolved from workflow_file: '{}' relative to '{}')",
workflow_path.display(),
workflow_file_rel,
action_path
.parent()
.unwrap_or(Path::new("."))
.display()
);
}
// ── 4. Read and parse the workflow YAML ─────────────────────────────
let workflow_yaml_content =
std::fs::read_to_string(&workflow_path).context("Failed to read workflow YAML file")?;
let workflow_definition: serde_json::Value =
serde_yaml_ng::from_str(&workflow_yaml_content).context(format!(
"Failed to parse workflow YAML file: {}",
workflow_path.display()
))?;
// Extract version from the workflow definition, defaulting to "1.0.0"
let version = workflow_definition
.get("version")
.and_then(|v| v.as_str())
.unwrap_or("1.0.0")
.to_string();
// ── 5. Build the API request ────────────────────────────────────────
//
// Merge the action-level fields from the workflow definition back into the
// definition payload (the API's SaveWorkflowFileRequest.definition carries
// the full blob; write_workflow_yaml on the server side strips the action-
// level fields before writing the graph-only file).
let mut definition_map: serde_json::Map<String, serde_json::Value> =
if let Some(obj) = workflow_definition.as_object() {
obj.clone()
} else {
serde_json::Map::new()
};
// Ensure action-level fields are present in the definition (the API and
// web UI store the combined form in the database; the server splits them
// into two files on disk).
if let Some(params) = &action.parameters {
definition_map
.entry("parameters".to_string())
.or_insert_with(|| params.clone());
}
if let Some(out) = &action.output {
definition_map
.entry("output".to_string())
.or_insert_with(|| out.clone());
}
let request = SaveWorkflowFileRequest {
name: workflow_name.clone(),
label: if action.label.is_empty() {
workflow_name.clone()
} else {
action.label.clone()
},
description: action.description.clone(),
version,
pack_ref: pack_ref.clone(),
definition: serde_json::Value::Object(definition_map),
param_schema: action.parameters.clone(),
out_schema: action.output.clone(),
tags: action.tags.clone(),
enabled: action.enabled,
};
// ── 6. Print progress ───────────────────────────────────────────────
if output_format == OutputFormat::Table {
output::print_info(&format!(
"Uploading workflow action '{}.{}' to pack '{}'",
pack_ref, workflow_name, pack_ref,
));
output::print_info(&format!(" Action YAML: {}", action_path.display()));
output::print_info(&format!(" Workflow YAML: {}", workflow_path.display()));
}
// ── 7. Send to API ──────────────────────────────────────────────────
let config = CliConfig::load_with_profile(profile.as_deref())?;
let mut client = ApiClient::from_config(&config, api_url);
let workflow_ref = format!("{}.{}", pack_ref, workflow_name);
// Try create first; if 409 Conflict and --force, fall back to update.
let create_path = format!("/packs/{}/workflow-files", pack_ref);
let result: Result<WorkflowResponse> = client.post(&create_path, &request).await;
let response: WorkflowResponse = match result {
Ok(resp) => resp,
Err(err) => {
let err_str = err.to_string();
if err_str.contains("409") || err_str.to_lowercase().contains("conflict") {
if !force {
anyhow::bail!(
"Workflow '{}' already exists. Use --force to update it.",
workflow_ref
);
}
if output_format == OutputFormat::Table {
output::print_info("Workflow already exists, updating...");
}
let update_path = format!("/workflows/{}/file", workflow_ref);
client.put(&update_path, &request).await.context(
"Failed to update existing workflow. \
Check that the pack exists and the workflow ref is correct.",
)?
} else {
return Err(err).context("Failed to upload workflow");
}
}
};
// ── 8. Print result ─────────────────────────────────────────────────
match output_format {
OutputFormat::Json | OutputFormat::Yaml => {
output::print_output(&response, output_format)?;
}
OutputFormat::Table => {
println!();
output::print_success(&format!(
"Workflow '{}' uploaded successfully",
response.workflow_ref
));
output::print_key_value_table(vec![
("ID", response.id.to_string()),
("Reference", response.workflow_ref.clone()),
("Pack", response.pack_ref.clone()),
("Label", response.label.clone()),
("Version", response.version.clone()),
(
"Tags",
if response.tags.is_empty() {
"none".to_string()
} else {
response.tags.join(", ")
},
),
("Enabled", output::format_bool(response.enabled)),
]);
}
}
Ok(())
}
// ── List ────────────────────────────────────────────────────────────────
async fn handle_list(
profile: &Option<String>,
pack: Option<String>,
tags: Option<String>,
search: Option<String>,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
let config = CliConfig::load_with_profile(profile.as_deref())?;
let mut client = ApiClient::from_config(&config, api_url);
let path = if let Some(ref pack_ref) = pack {
format!("/packs/{}/workflows", pack_ref)
} else {
let mut query_parts: Vec<String> = Vec::new();
if let Some(ref t) = tags {
query_parts.push(format!("tags={}", urlencoding::encode(t)));
}
if let Some(ref s) = search {
query_parts.push(format!("search={}", urlencoding::encode(s)));
}
if query_parts.is_empty() {
"/workflows".to_string()
} else {
format!("/workflows?{}", query_parts.join("&"))
}
};
let workflows: Vec<WorkflowSummary> = client.get(&path).await?;
match output_format {
OutputFormat::Json | OutputFormat::Yaml => {
output::print_output(&workflows, output_format)?;
}
OutputFormat::Table => {
if workflows.is_empty() {
output::print_info("No workflows found");
} else {
let mut table = output::create_table();
output::add_header(
&mut table,
vec!["ID", "Reference", "Pack", "Label", "Version", "Enabled", "Tags"],
);
for wf in &workflows {
table.add_row(vec![
wf.id.to_string(),
wf.workflow_ref.clone(),
wf.pack_ref.clone(),
output::truncate(&wf.label, 30),
wf.version.clone(),
output::format_bool(wf.enabled),
if wf.tags.is_empty() {
"-".to_string()
} else {
output::truncate(&wf.tags.join(", "), 25)
},
]);
}
println!("{}", table);
output::print_info(&format!("{} workflow(s) found", workflows.len()));
}
}
}
Ok(())
}
// ── Show ────────────────────────────────────────────────────────────────
async fn handle_show(
profile: &Option<String>,
workflow_ref: String,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
let config = CliConfig::load_with_profile(profile.as_deref())?;
let mut client = ApiClient::from_config(&config, api_url);
let path = format!("/workflows/{}", workflow_ref);
let workflow: WorkflowResponse = client.get(&path).await?;
match output_format {
OutputFormat::Json | OutputFormat::Yaml => {
output::print_output(&workflow, output_format)?;
}
OutputFormat::Table => {
output::print_section(&format!("Workflow: {}", workflow.workflow_ref));
output::print_key_value_table(vec![
("ID", workflow.id.to_string()),
("Reference", workflow.workflow_ref.clone()),
("Pack", workflow.pack_ref.clone()),
("Pack ID", workflow.pack.to_string()),
("Label", workflow.label.clone()),
(
"Description",
workflow
.description
.clone()
.unwrap_or_else(|| "-".to_string()),
),
("Version", workflow.version.clone()),
("Enabled", output::format_bool(workflow.enabled)),
(
"Tags",
if workflow.tags.is_empty() {
"none".to_string()
} else {
workflow.tags.join(", ")
},
),
("Created", output::format_timestamp(&workflow.created)),
("Updated", output::format_timestamp(&workflow.updated)),
]);
// Show parameter schema if present
if let Some(ref params) = workflow.param_schema {
if !params.is_null() && params.as_object().is_some_and(|o| !o.is_empty()) {
output::print_section("Parameters");
let yaml = serde_yaml_ng::to_string(params)?;
println!("{}", yaml);
}
}
// Show output schema if present
if let Some(ref out) = workflow.out_schema {
if !out.is_null() && out.as_object().is_some_and(|o| !o.is_empty()) {
output::print_section("Output Schema");
let yaml = serde_yaml_ng::to_string(out)?;
println!("{}", yaml);
}
}
// Show task summary from definition
if let Some(tasks) = workflow.definition.get("tasks") {
if let Some(arr) = tasks.as_array() {
output::print_section("Tasks");
let mut table = output::create_table();
output::add_header(&mut table, vec!["#", "Name", "Action", "Transitions"]);
for (i, task) in arr.iter().enumerate() {
let name = task
.get("name")
.and_then(|v| v.as_str())
.unwrap_or("?");
let action = task
.get("action")
.and_then(|v| v.as_str())
.unwrap_or("-");
let transition_count = task
.get("next")
.and_then(|v| v.as_array())
.map(|a| {
// Count total target tasks across all transitions
a.iter()
.filter_map(|t| {
t.get("do").and_then(|d| d.as_array()).map(|d| d.len())
})
.sum::<usize>()
})
.unwrap_or(0);
let transitions_str = if transition_count == 0 {
"terminal".to_string()
} else {
format!("{} target(s)", transition_count)
};
table.add_row(vec![
(i + 1).to_string(),
name.to_string(),
output::truncate(action, 35),
transitions_str,
]);
}
println!("{}", table);
}
}
}
}
Ok(())
}
// ── Delete ──────────────────────────────────────────────────────────────
async fn handle_delete(
profile: &Option<String>,
workflow_ref: String,
yes: bool,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
let config = CliConfig::load_with_profile(profile.as_deref())?;
let mut client = ApiClient::from_config(&config, api_url);
if !yes && output_format == OutputFormat::Table {
let confirm = dialoguer::Confirm::new()
.with_prompt(format!(
"Are you sure you want to delete workflow '{}'?",
workflow_ref
))
.default(false)
.interact()?;
if !confirm {
output::print_info("Delete cancelled");
return Ok(());
}
}
let path = format!("/workflows/{}", workflow_ref);
client.delete_no_response(&path).await?;
match output_format {
OutputFormat::Json | OutputFormat::Yaml => {
let msg = serde_json::json!({"message": format!("Workflow '{}' deleted", workflow_ref)});
output::print_output(&msg, output_format)?;
}
OutputFormat::Table => {
output::print_success(&format!("Workflow '{}' deleted successfully", workflow_ref));
}
}
Ok(())
}
// ── Helpers ─────────────────────────────────────────────────────────────
/// Split an action ref like `pack_name.action_name` into `(pack_ref, name)`.
///
/// Supports multi-segment pack refs: `org.pack.action` → `("org.pack", "action")`.
/// The last dot-separated segment is the workflow/action name; everything before
/// it is the pack ref.
fn split_action_ref(action_ref: &str) -> Result<(String, String)> {
let dot_pos = action_ref.rfind('.').ok_or_else(|| {
anyhow::anyhow!(
"Invalid action ref '{}': expected format 'pack_ref.name' (at least one dot)",
action_ref
)
})?;
let pack_ref = &action_ref[..dot_pos];
let name = &action_ref[dot_pos + 1..];
if pack_ref.is_empty() || name.is_empty() {
anyhow::bail!(
"Invalid action ref '{}': both pack_ref and name must be non-empty",
action_ref
);
}
Ok((pack_ref.to_string(), name.to_string()))
}
/// Resolve the workflow YAML path from the action YAML's location and the
/// `workflow_file` value.
///
/// `workflow_file` is relative to the `actions/` directory. Since the action
/// YAML is typically at `<pack>/actions/<name>.yaml`, the workflow path is
/// resolved relative to the action YAML's parent directory.
fn resolve_workflow_path(action_yaml_path: &Path, workflow_file: &str) -> Result<PathBuf> {
let action_dir = action_yaml_path
.parent()
.unwrap_or(Path::new("."));
let resolved = action_dir.join(workflow_file);
// Canonicalize if possible (for better error messages), but don't fail
// if the file doesn't exist yet — we'll check existence later.
Ok(resolved)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_split_action_ref_simple() {
let (pack, name) = split_action_ref("core.echo").unwrap();
assert_eq!(pack, "core");
assert_eq!(name, "echo");
}
#[test]
fn test_split_action_ref_multi_segment_pack() {
let (pack, name) = split_action_ref("org.infra.deploy").unwrap();
assert_eq!(pack, "org.infra");
assert_eq!(name, "deploy");
}
#[test]
fn test_split_action_ref_no_dot() {
assert!(split_action_ref("nodot").is_err());
}
#[test]
fn test_split_action_ref_empty_parts() {
assert!(split_action_ref(".name").is_err());
assert!(split_action_ref("pack.").is_err());
}
#[test]
fn test_resolve_workflow_path() {
let action_path = Path::new("/packs/mypack/actions/deploy.yaml");
let resolved =
resolve_workflow_path(action_path, "workflows/deploy.workflow.yaml").unwrap();
assert_eq!(
resolved,
PathBuf::from("/packs/mypack/actions/workflows/deploy.workflow.yaml")
);
}
#[test]
fn test_resolve_workflow_path_relative() {
let action_path = Path::new("actions/deploy.yaml");
let resolved =
resolve_workflow_path(action_path, "workflows/deploy.workflow.yaml").unwrap();
assert_eq!(
resolved,
PathBuf::from("actions/workflows/deploy.workflow.yaml")
);
}
}

View File

@@ -16,6 +16,7 @@ use commands::{
rule::RuleCommands,
sensor::SensorCommands,
trigger::TriggerCommands,
workflow::WorkflowCommands,
};
#[derive(Parser)]
@@ -78,6 +79,11 @@ enum Commands {
#[command(subcommand)]
command: ExecutionCommands,
},
/// Workflow management
Workflow {
#[command(subcommand)]
command: WorkflowCommands,
},
/// Trigger management
Trigger {
#[command(subcommand)]
@@ -172,6 +178,15 @@ async fn main() {
)
.await
}
Commands::Workflow { command } => {
commands::workflow::handle_workflow_command(
&cli.profile,
command,
&cli.api_url,
output_format,
)
.await
}
Commands::Trigger { command } => {
commands::trigger::handle_trigger_command(
&cli.profile,

View File

@@ -438,3 +438,38 @@ pub async fn mock_not_found(server: &MockServer, path_pattern: &str) {
.mount(server)
.await;
}
/// Mock a successful pack create response (POST /api/v1/packs)
#[allow(dead_code)]
pub async fn mock_pack_create(server: &MockServer) {
Mock::given(method("POST"))
.and(path("/api/v1/packs"))
.respond_with(ResponseTemplate::new(201).set_body_json(json!({
"data": {
"id": 42,
"ref": "my_pack",
"label": "My Pack",
"description": "A test pack",
"version": "0.1.0",
"author": null,
"enabled": true,
"tags": ["test"],
"created": "2024-01-01T00:00:00Z",
"updated": "2024-01-01T00:00:00Z"
}
})))
.mount(server)
.await;
}
/// Mock a 409 conflict response for pack create
#[allow(dead_code)]
pub async fn mock_pack_create_conflict(server: &MockServer) {
Mock::given(method("POST"))
.and(path("/api/v1/packs"))
.respond_with(ResponseTemplate::new(409).set_body_json(json!({
"error": "Pack with ref 'my_pack' already exists"
})))
.mount(server)
.await;
}

View File

@@ -4,6 +4,11 @@
use assert_cmd::Command;
use predicates::prelude::*;
use serde_json::json;
use wiremock::{
matchers::{body_json, method, path},
Mock, ResponseTemplate,
};
mod common;
use common::*;
@@ -222,6 +227,231 @@ async fn test_pack_get_json_output() {
.stdout(predicate::str::contains(r#""ref": "core""#));
}
// ── pack create tests ──────────────────────────────────────────────────
#[tokio::test]
async fn test_pack_create_non_interactive() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_pack_create(&fixture.mock_server).await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("pack")
.arg("create")
.arg("--ref")
.arg("my_pack")
.arg("--label")
.arg("My Pack")
.arg("--description")
.arg("A test pack")
.arg("--pack-version")
.arg("0.1.0")
.arg("--tags")
.arg("test");
cmd.assert()
.success()
.stdout(predicate::str::contains("my_pack"))
.stdout(predicate::str::contains("created successfully"));
}
#[tokio::test]
async fn test_pack_create_json_output() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_pack_create(&fixture.mock_server).await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("--json")
.arg("pack")
.arg("create")
.arg("--ref")
.arg("my_pack");
cmd.assert()
.success()
.stdout(predicate::str::contains(r#""ref": "my_pack""#))
.stdout(predicate::str::contains(r#""id": 42"#));
}
#[tokio::test]
async fn test_pack_create_conflict() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_pack_create_conflict(&fixture.mock_server).await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("pack")
.arg("create")
.arg("--ref")
.arg("my_pack");
cmd.assert()
.failure()
.stderr(predicate::str::contains("already exists"));
}
#[tokio::test]
async fn test_pack_create_missing_ref() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("pack")
.arg("create");
cmd.assert()
.failure()
.stderr(predicate::str::contains("Pack ref is required"));
}
#[tokio::test]
async fn test_pack_create_default_label_from_ref() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
// Use a custom mock that validates the request body contains the derived label
Mock::given(method("POST"))
.and(path("/api/v1/packs"))
.and(body_json(json!({
"ref": "my_cool_pack",
"label": "My Cool Pack",
"version": "0.1.0",
"tags": []
})))
.respond_with(ResponseTemplate::new(201).set_body_json(json!({
"data": {
"id": 99,
"ref": "my_cool_pack",
"label": "My Cool Pack",
"version": "0.1.0",
"enabled": true,
"created": "2024-01-01T00:00:00Z",
"updated": "2024-01-01T00:00:00Z"
}
})))
.mount(&fixture.mock_server)
.await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("pack")
.arg("create")
.arg("--ref")
.arg("my_cool_pack");
cmd.assert()
.success()
.stdout(predicate::str::contains("my_cool_pack"))
.stdout(predicate::str::contains("created successfully"));
}
#[tokio::test]
async fn test_pack_create_default_version() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
// Verify the default version "0.1.0" is sent when --pack-version is not specified
Mock::given(method("POST"))
.and(path("/api/v1/packs"))
.and(body_json(json!({
"ref": "versioned_pack",
"label": "Versioned Pack",
"version": "0.1.0",
"tags": []
})))
.respond_with(ResponseTemplate::new(201).set_body_json(json!({
"data": {
"id": 7,
"ref": "versioned_pack",
"label": "Versioned Pack",
"version": "0.1.0",
"enabled": true,
"created": "2024-01-01T00:00:00Z",
"updated": "2024-01-01T00:00:00Z"
}
})))
.mount(&fixture.mock_server)
.await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("pack")
.arg("create")
.arg("--ref")
.arg("versioned_pack");
cmd.assert().success();
}
#[tokio::test]
async fn test_pack_create_with_tags() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
Mock::given(method("POST"))
.and(path("/api/v1/packs"))
.and(body_json(json!({
"ref": "tagged",
"label": "Tagged",
"version": "0.1.0",
"tags": ["networking", "monitoring"]
})))
.respond_with(ResponseTemplate::new(201).set_body_json(json!({
"data": {
"id": 10,
"ref": "tagged",
"label": "Tagged",
"version": "0.1.0",
"tags": ["networking", "monitoring"],
"enabled": true,
"created": "2024-01-01T00:00:00Z",
"updated": "2024-01-01T00:00:00Z"
}
})))
.mount(&fixture.mock_server)
.await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("pack")
.arg("create")
.arg("--ref")
.arg("tagged")
.arg("--tags")
.arg("networking,monitoring");
cmd.assert().success();
}
#[tokio::test]
async fn test_pack_list_empty_result() {
let fixture = TestFixture::new().await;

View File

@@ -0,0 +1,777 @@
//! Integration tests for CLI workflow commands
#![allow(deprecated)]
use assert_cmd::Command;
use predicates::prelude::*;
use serde_json::json;
use std::fs;
use wiremock::matchers::{method, path};
use wiremock::{Mock, MockServer, ResponseTemplate};
mod common;
use common::*;
// ── Mock helpers ────────────────────────────────────────────────────────
async fn mock_workflow_list(server: &MockServer) {
Mock::given(method("GET"))
.and(path("/api/v1/workflows"))
.respond_with(ResponseTemplate::new(200).set_body_json(json!({
"data": [
{
"id": 1,
"ref": "core.install_packs",
"pack_ref": "core",
"label": "Install Packs",
"description": "Install one or more packs",
"version": "1.0.0",
"tags": ["core", "packs"],
"enabled": true,
"created": "2024-01-01T00:00:00Z",
"updated": "2024-01-01T00:00:00Z"
},
{
"id": 2,
"ref": "mypack.deploy",
"pack_ref": "mypack",
"label": "Deploy App",
"description": "Deploy an application",
"version": "2.0.0",
"tags": ["deploy"],
"enabled": true,
"created": "2024-01-02T00:00:00Z",
"updated": "2024-01-02T00:00:00Z"
}
]
})))
.mount(server)
.await;
}
async fn mock_workflow_list_by_pack(server: &MockServer, pack_ref: &str) {
let p = format!("/api/v1/packs/{}/workflows", pack_ref);
Mock::given(method("GET"))
.and(path(p.as_str()))
.respond_with(ResponseTemplate::new(200).set_body_json(json!({
"data": [
{
"id": 1,
"ref": format!("{}.example_workflow", pack_ref),
"pack_ref": pack_ref,
"label": "Example Workflow",
"description": "An example workflow",
"version": "1.0.0",
"tags": [],
"enabled": true,
"created": "2024-01-01T00:00:00Z",
"updated": "2024-01-01T00:00:00Z"
}
]
})))
.mount(server)
.await;
}
async fn mock_workflow_get(server: &MockServer, workflow_ref: &str) {
let p = format!("/api/v1/workflows/{}", workflow_ref);
Mock::given(method("GET"))
.and(path(p.as_str()))
.respond_with(ResponseTemplate::new(200).set_body_json(json!({
"data": {
"id": 1,
"ref": workflow_ref,
"pack": 1,
"pack_ref": "mypack",
"label": "My Workflow",
"description": "A test workflow",
"version": "1.0.0",
"param_schema": {
"url": {"type": "string", "required": true},
"timeout": {"type": "integer", "default": 30}
},
"out_schema": {
"status": {"type": "string"}
},
"definition": {
"version": "1.0.0",
"vars": {"result": null},
"tasks": [
{
"name": "step1",
"action": "core.echo",
"input": {"message": "hello"},
"next": [
{"when": "{{ succeeded() }}", "do": ["step2"]}
]
},
{
"name": "step2",
"action": "core.echo",
"input": {"message": "done"}
}
]
},
"tags": ["test", "demo"],
"enabled": true,
"created": "2024-01-01T00:00:00Z",
"updated": "2024-01-01T00:00:00Z"
}
})))
.mount(server)
.await;
}
async fn mock_workflow_delete(server: &MockServer, workflow_ref: &str) {
let p = format!("/api/v1/workflows/{}", workflow_ref);
Mock::given(method("DELETE"))
.and(path(p.as_str()))
.respond_with(ResponseTemplate::new(204))
.mount(server)
.await;
}
async fn mock_workflow_save(server: &MockServer, pack_ref: &str) {
let p = format!("/api/v1/packs/{}/workflow-files", pack_ref);
Mock::given(method("POST"))
.and(path(p.as_str()))
.respond_with(ResponseTemplate::new(201).set_body_json(json!({
"data": {
"id": 10,
"ref": format!("{}.deploy", pack_ref),
"pack": 1,
"pack_ref": pack_ref,
"label": "Deploy App",
"description": "Deploy the application",
"version": "1.0.0",
"param_schema": null,
"out_schema": null,
"definition": {"version": "1.0.0", "tasks": []},
"tags": ["deploy"],
"enabled": true,
"created": "2024-01-10T00:00:00Z",
"updated": "2024-01-10T00:00:00Z"
}
})))
.mount(server)
.await;
}
async fn mock_workflow_save_conflict(server: &MockServer, pack_ref: &str) {
let p = format!("/api/v1/packs/{}/workflow-files", pack_ref);
Mock::given(method("POST"))
.and(path(p.as_str()))
.respond_with(ResponseTemplate::new(409).set_body_json(json!({
"error": "Workflow with ref 'mypack.deploy' already exists"
})))
.mount(server)
.await;
}
async fn mock_workflow_update(server: &MockServer, workflow_ref: &str) {
let p = format!("/api/v1/workflows/{}/file", workflow_ref);
Mock::given(method("PUT"))
.and(path(p.as_str()))
.respond_with(ResponseTemplate::new(200).set_body_json(json!({
"data": {
"id": 10,
"ref": workflow_ref,
"pack": 1,
"pack_ref": "mypack",
"label": "Deploy App",
"description": "Deploy the application",
"version": "1.0.0",
"param_schema": null,
"out_schema": null,
"definition": {"version": "1.0.0", "tasks": []},
"tags": ["deploy"],
"enabled": true,
"created": "2024-01-10T00:00:00Z",
"updated": "2024-01-10T12:00:00Z"
}
})))
.mount(server)
.await;
}
// ── Helper to write action + workflow YAML to temp dirs ─────────────────
struct WorkflowFixture {
_dir: tempfile::TempDir,
action_yaml_path: String,
}
impl WorkflowFixture {
fn new(action_ref: &str, workflow_file: &str) -> Self {
let dir = tempfile::TempDir::new().expect("Failed to create temp dir");
let actions_dir = dir.path().join("actions");
let workflows_dir = actions_dir.join("workflows");
fs::create_dir_all(&workflows_dir).unwrap();
// Write the action YAML
let action_yaml = format!(
r#"ref: {}
label: "Deploy App"
description: "Deploy the application"
enabled: true
workflow_file: {}
parameters:
environment:
type: string
required: true
description: "Target environment"
version:
type: string
default: "latest"
output:
status:
type: string
tags:
- deploy
"#,
action_ref, workflow_file,
);
let action_name = action_ref.rsplit('.').next().unwrap();
let action_path = actions_dir.join(format!("{}.yaml", action_name));
fs::write(&action_path, &action_yaml).unwrap();
// Write the workflow YAML
let workflow_yaml = r#"version: "1.0.0"
vars:
deploy_result: null
tasks:
- name: prepare
action: core.echo
input:
message: "Preparing deployment"
next:
- when: "{{ succeeded() }}"
do:
- deploy
- name: deploy
action: core.echo
input:
message: "Deploying"
next:
- when: "{{ succeeded() }}"
do:
- verify
- name: verify
action: core.echo
input:
message: "Verifying"
output_map:
status: "{{ 'success' if workflow.deploy_result else 'unknown' }}"
"#;
let workflow_path = workflows_dir.join(format!("{}.workflow.yaml", action_name));
fs::write(&workflow_path, workflow_yaml).unwrap();
Self {
action_yaml_path: action_path.to_string_lossy().to_string(),
_dir: dir,
}
}
}
// ── List tests ──────────────────────────────────────────────────────────
#[tokio::test]
async fn test_workflow_list_authenticated() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_workflow_list(&fixture.mock_server).await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("list");
cmd.assert()
.success()
.stdout(predicate::str::contains("core.install_packs"))
.stdout(predicate::str::contains("mypack.deploy"))
.stdout(predicate::str::contains("2 workflow(s) found"));
}
#[tokio::test]
async fn test_workflow_list_by_pack() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_workflow_list_by_pack(&fixture.mock_server, "core").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("list")
.arg("--pack")
.arg("core");
cmd.assert()
.success()
.stdout(predicate::str::contains("core.example_workflow"))
.stdout(predicate::str::contains("1 workflow(s) found"));
}
#[tokio::test]
async fn test_workflow_list_json_output() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_workflow_list(&fixture.mock_server).await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("--json")
.arg("workflow")
.arg("list");
cmd.assert()
.success()
.stdout(predicate::str::contains("\"core.install_packs\""))
.stdout(predicate::str::contains("\"mypack.deploy\""));
}
#[tokio::test]
async fn test_workflow_list_yaml_output() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_workflow_list(&fixture.mock_server).await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("--yaml")
.arg("workflow")
.arg("list");
cmd.assert()
.success()
.stdout(predicate::str::contains("core.install_packs"))
.stdout(predicate::str::contains("mypack.deploy"));
}
#[tokio::test]
async fn test_workflow_list_empty() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
Mock::given(method("GET"))
.and(path("/api/v1/workflows"))
.respond_with(ResponseTemplate::new(200).set_body_json(json!({
"data": []
})))
.mount(&fixture.mock_server)
.await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("list");
cmd.assert()
.success()
.stdout(predicate::str::contains("No workflows found"));
}
#[tokio::test]
async fn test_workflow_list_unauthenticated() {
let fixture = TestFixture::new().await;
fixture.write_default_config();
mock_unauthorized(&fixture.mock_server, "/api/v1/workflows").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("list");
cmd.assert().failure();
}
// ── Show tests ──────────────────────────────────────────────────────────
#[tokio::test]
async fn test_workflow_show() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_workflow_get(&fixture.mock_server, "mypack.my_workflow").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("show")
.arg("mypack.my_workflow");
cmd.assert()
.success()
.stdout(predicate::str::contains("mypack.my_workflow"))
.stdout(predicate::str::contains("My Workflow"))
.stdout(predicate::str::contains("1.0.0"))
.stdout(predicate::str::contains("test, demo"))
// Tasks table should show task names
.stdout(predicate::str::contains("step1"))
.stdout(predicate::str::contains("step2"))
.stdout(predicate::str::contains("core.echo"));
}
#[tokio::test]
async fn test_workflow_show_json_output() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_workflow_get(&fixture.mock_server, "mypack.my_workflow").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("--json")
.arg("workflow")
.arg("show")
.arg("mypack.my_workflow");
cmd.assert()
.success()
.stdout(predicate::str::contains("\"mypack.my_workflow\""))
.stdout(predicate::str::contains("\"My Workflow\""))
.stdout(predicate::str::contains("\"definition\""));
}
#[tokio::test]
async fn test_workflow_show_not_found() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_not_found(&fixture.mock_server, "/api/v1/workflows/nonexistent.wf").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("show")
.arg("nonexistent.wf");
cmd.assert().failure();
}
// ── Delete tests ────────────────────────────────────────────────────────
#[tokio::test]
async fn test_workflow_delete_with_yes_flag() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_workflow_delete(&fixture.mock_server, "mypack.my_workflow").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("delete")
.arg("mypack.my_workflow")
.arg("--yes");
cmd.assert()
.success()
.stdout(predicate::str::contains("deleted successfully"));
}
#[tokio::test]
async fn test_workflow_delete_json_output() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
mock_workflow_delete(&fixture.mock_server, "mypack.my_workflow").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("--json")
.arg("workflow")
.arg("delete")
.arg("mypack.my_workflow")
.arg("--yes");
cmd.assert()
.success()
.stdout(predicate::str::contains("\"message\""))
.stdout(predicate::str::contains("deleted"));
}
// ── Upload tests ────────────────────────────────────────────────────────
#[tokio::test]
async fn test_workflow_upload_success() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
let wf_fixture =
WorkflowFixture::new("mypack.deploy", "workflows/deploy.workflow.yaml");
mock_workflow_save(&fixture.mock_server, "mypack").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("upload")
.arg(&wf_fixture.action_yaml_path);
cmd.assert()
.success()
.stdout(predicate::str::contains("uploaded successfully"))
.stdout(predicate::str::contains("mypack.deploy"));
}
#[tokio::test]
async fn test_workflow_upload_json_output() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
let wf_fixture =
WorkflowFixture::new("mypack.deploy", "workflows/deploy.workflow.yaml");
mock_workflow_save(&fixture.mock_server, "mypack").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("--json")
.arg("workflow")
.arg("upload")
.arg(&wf_fixture.action_yaml_path);
cmd.assert()
.success()
.stdout(predicate::str::contains("\"mypack.deploy\""))
.stdout(predicate::str::contains("\"Deploy App\""));
}
#[tokio::test]
async fn test_workflow_upload_conflict_without_force() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
let wf_fixture =
WorkflowFixture::new("mypack.deploy", "workflows/deploy.workflow.yaml");
mock_workflow_save_conflict(&fixture.mock_server, "mypack").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("upload")
.arg(&wf_fixture.action_yaml_path);
cmd.assert()
.failure()
.stderr(predicate::str::contains("already exists"))
.stderr(predicate::str::contains("--force"));
}
#[tokio::test]
async fn test_workflow_upload_conflict_with_force() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
let wf_fixture =
WorkflowFixture::new("mypack.deploy", "workflows/deploy.workflow.yaml");
mock_workflow_save_conflict(&fixture.mock_server, "mypack").await;
mock_workflow_update(&fixture.mock_server, "mypack.deploy").await;
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("upload")
.arg(&wf_fixture.action_yaml_path)
.arg("--force");
cmd.assert()
.success()
.stdout(predicate::str::contains("uploaded successfully"));
}
#[tokio::test]
async fn test_workflow_upload_missing_action_file() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("upload")
.arg("/nonexistent/path/action.yaml");
cmd.assert()
.failure()
.stderr(predicate::str::contains("not found"));
}
#[tokio::test]
async fn test_workflow_upload_missing_workflow_file() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
// Create a temp dir with only the action YAML, no workflow file
let dir = tempfile::TempDir::new().unwrap();
let actions_dir = dir.path().join("actions");
fs::create_dir_all(&actions_dir).unwrap();
let action_yaml = r#"ref: mypack.deploy
label: "Deploy App"
workflow_file: workflows/deploy.workflow.yaml
"#;
let action_path = actions_dir.join("deploy.yaml");
fs::write(&action_path, action_yaml).unwrap();
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("upload")
.arg(action_path.to_string_lossy().as_ref());
cmd.assert()
.failure()
.stderr(predicate::str::contains("Workflow file not found"));
}
#[tokio::test]
async fn test_workflow_upload_action_without_workflow_file_field() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
// Create a temp dir with a regular (non-workflow) action YAML
let dir = tempfile::TempDir::new().unwrap();
let actions_dir = dir.path().join("actions");
fs::create_dir_all(&actions_dir).unwrap();
let action_yaml = r#"ref: mypack.echo
label: "Echo"
description: "A regular action, not a workflow"
runner_type: shell
entry_point: echo.sh
"#;
let action_path = actions_dir.join("echo.yaml");
fs::write(&action_path, action_yaml).unwrap();
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("upload")
.arg(action_path.to_string_lossy().as_ref());
cmd.assert()
.failure()
.stderr(predicate::str::contains("workflow_file"));
}
#[tokio::test]
async fn test_workflow_upload_invalid_action_yaml() {
let fixture = TestFixture::new().await;
fixture.write_authenticated_config("valid_token", "refresh_token");
let dir = tempfile::TempDir::new().unwrap();
let bad_yaml_path = dir.path().join("bad.yaml");
fs::write(&bad_yaml_path, "this is not valid yaml: [[[").unwrap();
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("--api-url")
.arg(fixture.server_url())
.arg("workflow")
.arg("upload")
.arg(bad_yaml_path.to_string_lossy().as_ref());
cmd.assert()
.failure()
.stderr(predicate::str::contains("Failed to parse action YAML"));
}
// ── Help text tests ─────────────────────────────────────────────────────
#[tokio::test]
async fn test_workflow_help() {
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.arg("workflow").arg("--help");
cmd.assert()
.success()
.stdout(predicate::str::contains("upload"))
.stdout(predicate::str::contains("list"))
.stdout(predicate::str::contains("show"))
.stdout(predicate::str::contains("delete"));
}
#[tokio::test]
async fn test_workflow_upload_help() {
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.arg("workflow").arg("upload").arg("--help");
cmd.assert()
.success()
.stdout(predicate::str::contains("action"))
.stdout(predicate::str::contains("workflow_file"))
.stdout(predicate::str::contains("--force"));
}

View File

@@ -1052,6 +1052,14 @@ pub mod execution {
/// Task name within the workflow
pub task_name: String,
/// Name of the predecessor task whose completion triggered this task's
/// dispatch. `None` for entry-point tasks (dispatched at workflow
/// start). Used by the timeline UI to draw only the transitions that
/// actually fired rather than every possible transition from the
/// workflow definition.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub triggered_by: Option<String>,
/// Index for with-items iteration (0-based)
pub task_index: Option<i32>,

View File

@@ -7,19 +7,33 @@
//! Components are loaded in dependency order:
//! 1. Runtimes (no dependencies)
//! 2. Triggers (no dependencies)
//! 3. Actions (depend on runtime)
//! 3. Actions (depend on runtime; workflow actions also create workflow_definition records)
//! 4. Sensors (depend on triggers and runtime)
//!
//! All loaders use **upsert** semantics: if an entity with the same ref already
//! exists it is updated in place (preserving its database ID); otherwise a new
//! row is created. After loading, entities that belong to the pack but whose
//! refs are no longer present in the YAML files are deleted.
//!
//! ## Workflow Actions
//!
//! An action YAML may include a `workflow_file` field pointing to a workflow
//! definition file relative to the `actions/` directory (e.g.,
//! `workflow_file: workflows/deploy.workflow.yaml`). When present the loader:
//!
//! 1. Reads and parses the referenced workflow YAML file.
//! 2. Creates or updates a `workflow_definition` record in the database.
//! 3. Creates the action record with `workflow_def` linked to the definition.
//!
//! This allows the action YAML to control action-level metadata (ref, label,
//! parameters, policies) independently of the workflow graph. Multiple actions
//! can reference the same workflow file with different configurations.
use std::collections::HashMap;
use std::path::Path;
use sqlx::PgPool;
use tracing::{info, warn};
use tracing::{debug, info, warn};
use crate::error::{Error, Result};
use crate::models::Id;
@@ -32,8 +46,12 @@ use crate::repositories::trigger::{
CreateSensorInput, CreateTriggerInput, SensorRepository, TriggerRepository, UpdateSensorInput,
UpdateTriggerInput,
};
use crate::repositories::workflow::{
CreateWorkflowDefinitionInput, UpdateWorkflowDefinitionInput, WorkflowDefinitionRepository,
};
use crate::repositories::{Create, Delete, FindById, FindByRef, Update};
use crate::version_matching::extract_version_components;
use crate::workflow::parser::parse_workflow_yaml;
/// Result of loading pack components into the database.
#[derive(Debug, Default)]
@@ -588,6 +606,13 @@ impl<'a> PackComponentLoader<'a> {
/// Load action definitions from `pack_dir/actions/*.yaml`.
///
/// Returns the list of loaded action refs for cleanup.
///
/// When an action YAML contains a `workflow_file` field, the loader reads
/// the referenced workflow definition, creates/updates a
/// `workflow_definition` record, and links the action to it via the
/// `action.workflow_def` FK. This enables the action YAML to control
/// action-level metadata independently of the workflow graph, and allows
/// multiple actions to share the same workflow file.
async fn load_actions(
&self,
pack_dir: &Path,
@@ -636,19 +661,64 @@ impl<'a> PackComponentLoader<'a> {
.unwrap_or("")
.to_string();
let entrypoint = data
.get("entry_point")
// ── Workflow file handling ──────────────────────────────────
// If the action declares `workflow_file`, load the referenced
// workflow definition and link the action to it.
let workflow_file_field = data
.get("workflow_file")
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
.map(|s| s.to_string());
// Resolve runtime ID from runner_type
let runner_type = data
.get("runner_type")
.and_then(|v| v.as_str())
.unwrap_or("shell");
let workflow_def_id: Option<Id> = if let Some(ref wf_path) = workflow_file_field {
match self
.load_workflow_for_action(
&actions_dir,
wf_path,
&action_ref,
&label,
&description,
&data,
)
.await
{
Ok(id) => Some(id),
Err(e) => {
let msg = format!(
"Failed to load workflow file '{}' for action '{}': {}",
wf_path, action_ref, e
);
warn!("{}", msg);
result.warnings.push(msg);
// Continue creating the action without workflow link
None
}
}
} else {
None
};
let runtime_id = self.resolve_runtime_id(runner_type).await?;
// For workflow actions the entrypoint is the workflow file path;
// for regular actions it comes from entry_point in the YAML.
let entrypoint = if let Some(ref wf_path) = workflow_file_field {
wf_path.clone()
} else {
data.get("entry_point")
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string()
};
// Resolve runtime ID from runner_type (workflow actions have no
// runner_type and get runtime = None).
let runtime_id = if workflow_file_field.is_some() {
None
} else {
let runner_type = data
.get("runner_type")
.and_then(|v| v.as_str())
.unwrap_or("shell");
self.resolve_runtime_id(runner_type).await?
};
let param_schema = data
.get("parameters")
@@ -701,6 +771,19 @@ impl<'a> PackComponentLoader<'a> {
Ok(_) => {
info!("Updated action '{}' (ID: {})", action_ref, existing.id);
result.actions_updated += 1;
// Re-link workflow definition if present
if let Some(wf_id) = workflow_def_id {
if let Err(e) =
ActionRepository::link_workflow_def(self.pool, existing.id, wf_id)
.await
{
warn!(
"Failed to link workflow def {} to action '{}': {}",
wf_id, action_ref, e
);
}
}
}
Err(e) => {
let msg = format!("Failed to update action '{}': {}", action_ref, e);
@@ -745,8 +828,25 @@ impl<'a> PackComponentLoader<'a> {
match create_result {
Ok(id) => {
info!("Created action '{}' (ID: {})", action_ref, id);
loaded_refs.push(action_ref);
loaded_refs.push(action_ref.clone());
result.actions_loaded += 1;
// Link workflow definition if present
if let Some(wf_id) = workflow_def_id {
if let Err(e) =
ActionRepository::link_workflow_def(self.pool, id, wf_id).await
{
warn!(
"Failed to link workflow def {} to new action '{}': {}",
wf_id, action_ref, e
);
} else {
info!(
"Linked action '{}' (ID: {}) to workflow definition (ID: {})",
action_ref, id, wf_id
);
}
}
}
Err(e) => {
// Check for unique constraint violation (already exists race condition)
@@ -771,6 +871,146 @@ impl<'a> PackComponentLoader<'a> {
Ok(loaded_refs)
}
/// Load a workflow definition file referenced by an action's `workflow_file`
/// field and create/update the corresponding `workflow_definition` record.
///
/// Returns the database ID of the workflow definition.
async fn load_workflow_for_action(
&self,
actions_dir: &Path,
workflow_file_path: &str,
action_ref: &str,
action_label: &str,
action_description: &str,
action_data: &serde_yaml_ng::Value,
) -> Result<Id> {
let full_path = actions_dir.join(workflow_file_path);
if !full_path.exists() {
return Err(Error::validation(format!(
"Workflow file '{}' not found at '{}'",
workflow_file_path,
full_path.display()
)));
}
let content = std::fs::read_to_string(&full_path).map_err(|e| {
Error::io(format!(
"Failed to read workflow file '{}': {}",
full_path.display(),
e
))
})?;
let mut workflow_yaml = parse_workflow_yaml(&content)?;
// The action YAML is authoritative for action-level metadata.
// Fill in ref/label/description/tags from the action when the
// workflow file omits them (action-linked workflow files should
// contain only the execution graph).
if workflow_yaml.r#ref.is_empty() {
workflow_yaml.r#ref = action_ref.to_string();
}
if workflow_yaml.label.is_empty() {
workflow_yaml.label = action_label.to_string();
}
if workflow_yaml.description.is_none() {
workflow_yaml.description = Some(action_description.to_string());
}
if workflow_yaml.tags.is_empty() {
if let Some(tags_val) = action_data.get("tags") {
if let Some(tags_seq) = tags_val.as_sequence() {
workflow_yaml.tags = tags_seq
.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect();
}
}
}
let workflow_ref = workflow_yaml.r#ref.clone();
// The action YAML is authoritative for param_schema / out_schema.
// Fall back to the workflow file's own schemas only if the action
// YAML doesn't define them.
let param_schema = action_data
.get("parameters")
.and_then(|v| serde_json::to_value(v).ok())
.or_else(|| workflow_yaml.parameters.clone());
let out_schema = action_data
.get("output")
.and_then(|v| serde_json::to_value(v).ok())
.or_else(|| workflow_yaml.output.clone());
let definition_json = serde_json::to_value(&workflow_yaml)
.map_err(|e| Error::validation(format!("Failed to serialize workflow: {}", e)))?;
// Derive label/description for the DB record from the action YAML,
// since it is authoritative. The workflow file values were already
// used as fallback above when populating workflow_yaml.
let label = workflow_yaml.label.clone();
let description = workflow_yaml.description.clone();
let tags = workflow_yaml.tags.clone();
// Check if this workflow definition already exists
if let Some(existing) =
WorkflowDefinitionRepository::find_by_ref(self.pool, &workflow_ref).await?
{
debug!(
"Updating existing workflow definition '{}' (ID: {})",
workflow_ref, existing.id
);
let update_input = UpdateWorkflowDefinitionInput {
label: Some(label),
description,
version: Some(workflow_yaml.version.clone()),
param_schema,
out_schema,
definition: Some(definition_json),
tags: Some(tags),
enabled: Some(true),
};
WorkflowDefinitionRepository::update(self.pool, existing.id, update_input).await?;
info!(
"Updated workflow definition '{}' (ID: {}) for action '{}'",
workflow_ref, existing.id, action_ref
);
Ok(existing.id)
} else {
debug!(
"Creating new workflow definition '{}' for action '{}'",
workflow_ref, action_ref
);
let create_input = CreateWorkflowDefinitionInput {
r#ref: workflow_ref.clone(),
pack: self.pack_id,
pack_ref: self.pack_ref.clone(),
label,
description,
version: workflow_yaml.version.clone(),
param_schema,
out_schema,
definition: definition_json,
tags,
enabled: true,
};
let created = WorkflowDefinitionRepository::create(self.pool, create_input).await?;
info!(
"Created workflow definition '{}' (ID: {}) for action '{}'",
workflow_ref, created.id, action_ref
);
Ok(created.id)
}
}
/// Load sensor definitions from `pack_dir/sensors/*.yaml`.
///
/// Returns the list of loaded sensor refs for cleanup.

View File

@@ -115,12 +115,17 @@ pub fn validate_workflow_expressions(
match directive {
PublishDirective::Simple(map) => {
for (pk, pv) in map {
validate_template(
pv,
&format!("{task_loc} next[{ti}].publish.{pk}"),
&known_names,
&mut warnings,
);
// Only validate string values as templates;
// non-string literals (booleans, numbers, etc.)
// pass through unchanged and have no expressions.
if let Some(s) = pv.as_str() {
validate_template(
s,
&format!("{task_loc} next[{ti}].publish.{pk}"),
&known_names,
&mut warnings,
);
}
}
}
PublishDirective::Key(_) => { /* nothing to validate */ }
@@ -132,12 +137,16 @@ pub fn validate_workflow_expressions(
for directive in &task.publish {
if let PublishDirective::Simple(map) = directive {
for (pk, pv) in map {
validate_template(
pv,
&format!("{task_loc} publish.{pk}"),
&known_names,
&mut warnings,
);
// Only validate string values as templates;
// non-string literals pass through unchanged.
if let Some(s) = pv.as_str() {
validate_template(
s,
&format!("{task_loc} publish.{pk}"),
&known_names,
&mut warnings,
);
}
}
}
}
@@ -567,7 +576,7 @@ mod tests {
fn test_transition_publish_validated() {
let mut task = action_task("step1");
let mut publish_map = HashMap::new();
publish_map.insert("out".to_string(), "{{ unknown_thing }}".to_string());
publish_map.insert("out".to_string(), serde_json::Value::String("{{ unknown_thing }}".to_string()));
task.next = vec![super::super::parser::TaskTransition {
when: Some("{{ succeeded() }}".to_string()),
publish: vec![PublishDirective::Simple(publish_map)],

View File

@@ -109,32 +109,49 @@ impl WorkflowLoader {
}
/// Load all workflows from a specific pack
///
/// Scans two directories in order:
/// 1. `{pack_dir}/workflows/` — legacy/standalone workflow files
/// 2. `{pack_dir}/actions/workflows/` — visual-builder and action-linked workflow files
///
/// If the same workflow ref appears in both directories, the version from
/// `actions/workflows/` wins (it is scanned second and overwrites the map entry).
pub async fn load_pack_workflows(
&self,
pack_name: &str,
pack_dir: &Path,
) -> Result<HashMap<String, LoadedWorkflow>> {
let workflows_dir = pack_dir.join("workflows");
if !workflows_dir.exists() {
debug!("No workflows directory in pack '{}'", pack_name);
return Ok(HashMap::new());
}
let workflow_files = self.scan_workflow_files(&workflows_dir, pack_name).await?;
let mut workflows = HashMap::new();
for file in workflow_files {
match self.load_workflow_file(&file).await {
Ok(loaded) => {
workflows.insert(loaded.file.ref_name.clone(), loaded);
}
Err(e) => {
warn!("Failed to load workflow '{}': {}", file.path.display(), e);
// Scan both workflow directories
let scan_dirs: Vec<std::path::PathBuf> = vec![
pack_dir.join("workflows"),
pack_dir.join("actions").join("workflows"),
];
for workflows_dir in &scan_dirs {
if !workflows_dir.exists() {
continue;
}
let workflow_files = self.scan_workflow_files(workflows_dir, pack_name).await?;
for file in workflow_files {
match self.load_workflow_file(&file).await {
Ok(loaded) => {
workflows.insert(loaded.file.ref_name.clone(), loaded);
}
Err(e) => {
warn!("Failed to load workflow '{}': {}", file.path.display(), e);
}
}
}
}
if workflows.is_empty() {
debug!("No workflows found in pack '{}'", pack_name);
}
Ok(workflows)
}
@@ -185,6 +202,10 @@ impl WorkflowLoader {
}
/// Reload a specific workflow by reference
///
/// Searches for the workflow file in both `workflows/` and
/// `actions/workflows/` directories, trying `.yaml`, `.yml`, and
/// `.workflow.yaml` extensions.
pub async fn reload_workflow(&self, ref_name: &str) -> Result<LoadedWorkflow> {
let parts: Vec<&str> = ref_name.split('.').collect();
if parts.len() != 2 {
@@ -198,36 +219,35 @@ impl WorkflowLoader {
let workflow_name = parts[1];
let pack_dir = self.config.packs_base_dir.join(pack_name);
let workflow_path = pack_dir
.join("workflows")
.join(format!("{}.yaml", workflow_name));
if !workflow_path.exists() {
// Try .yml extension
let workflow_path_yml = pack_dir
.join("workflows")
.join(format!("{}.yml", workflow_name));
if workflow_path_yml.exists() {
let file = WorkflowFile {
path: workflow_path_yml,
pack: pack_name.to_string(),
name: workflow_name.to_string(),
ref_name: ref_name.to_string(),
};
return self.load_workflow_file(&file).await;
// Candidate directories and filename patterns to search
let dirs = [
pack_dir.join("actions").join("workflows"),
pack_dir.join("workflows"),
];
let extensions = [
format!("{}.workflow.yaml", workflow_name),
format!("{}.yaml", workflow_name),
format!("{}.workflow.yml", workflow_name),
format!("{}.yml", workflow_name),
];
for dir in &dirs {
for filename in &extensions {
let candidate = dir.join(filename);
if candidate.exists() {
let file = WorkflowFile {
path: candidate,
pack: pack_name.to_string(),
name: workflow_name.to_string(),
ref_name: ref_name.to_string(),
};
return self.load_workflow_file(&file).await;
}
}
return Err(Error::not_found("workflow", "ref", ref_name));
}
let file = WorkflowFile {
path: workflow_path,
pack: pack_name.to_string(),
name: workflow_name.to_string(),
ref_name: ref_name.to_string(),
};
self.load_workflow_file(&file).await
Err(Error::not_found("workflow", "ref", ref_name))
}
/// Scan pack directories
@@ -259,6 +279,11 @@ impl WorkflowLoader {
}
/// Scan workflow files in a directory
///
/// Handles both `{name}.yaml` and `{name}.workflow.yaml` naming
/// conventions. For files with a `.workflow.yaml` suffix (produced by
/// the visual workflow builder), the `.workflow` portion is stripped
/// when deriving the workflow name and ref.
async fn scan_workflow_files(
&self,
workflows_dir: &Path,
@@ -278,7 +303,14 @@ impl WorkflowLoader {
if path.is_file() {
if let Some(ext) = path.extension() {
if ext == "yaml" || ext == "yml" {
if let Some(name) = path.file_stem().and_then(|n| n.to_str()) {
if let Some(raw_stem) = path.file_stem().and_then(|n| n.to_str()) {
// Strip `.workflow` suffix if present:
// "deploy.workflow.yaml" -> stem "deploy.workflow" -> name "deploy"
// "deploy.yaml" -> stem "deploy" -> name "deploy"
let name = raw_stem
.strip_suffix(".workflow")
.unwrap_or(raw_stem);
let ref_name = format!("{}.{}", pack_name, name);
workflow_files.push(WorkflowFile {
path: path.clone(),
@@ -475,4 +507,161 @@ tasks:
.to_string()
.contains("exceeds maximum size"));
}
/// Verify that `scan_workflow_files` strips the `.workflow` suffix from
/// filenames like `deploy.workflow.yaml`, yielding name `deploy` and
/// ref `pack.deploy` instead of `pack.deploy.workflow`.
#[tokio::test]
async fn test_scan_workflow_files_strips_workflow_suffix() {
let temp_dir = TempDir::new().unwrap();
let packs_dir = temp_dir.path().to_path_buf();
let pack_dir = packs_dir.join("my_pack");
let workflows_dir = pack_dir.join("actions").join("workflows");
fs::create_dir_all(&workflows_dir).await.unwrap();
let workflow_yaml = r#"
ref: my_pack.deploy
label: Deploy
version: "1.0.0"
tasks:
- name: step1
action: core.noop
"#;
fs::write(workflows_dir.join("deploy.workflow.yaml"), workflow_yaml)
.await
.unwrap();
let config = LoaderConfig {
packs_base_dir: packs_dir,
skip_validation: true,
max_file_size: 1024 * 1024,
};
let loader = WorkflowLoader::new(config);
let files = loader
.scan_workflow_files(&workflows_dir, "my_pack")
.await
.unwrap();
assert_eq!(files.len(), 1);
assert_eq!(files[0].name, "deploy");
assert_eq!(files[0].ref_name, "my_pack.deploy");
}
/// Verify that `load_pack_workflows` discovers workflow files in both
/// `workflows/` (legacy) and `actions/workflows/` (visual builder)
/// directories, and that `actions/workflows/` wins on ref collision.
#[tokio::test]
async fn test_load_pack_workflows_scans_both_directories() {
let temp_dir = TempDir::new().unwrap();
let packs_dir = temp_dir.path().to_path_buf();
let pack_dir = packs_dir.join("dual_pack");
// Legacy directory: workflows/
let legacy_dir = pack_dir.join("workflows");
fs::create_dir_all(&legacy_dir).await.unwrap();
let legacy_yaml = r#"
ref: dual_pack.alpha
label: Alpha (legacy)
version: "1.0.0"
tasks:
- name: t1
action: core.noop
"#;
fs::write(legacy_dir.join("alpha.yaml"), legacy_yaml)
.await
.unwrap();
// Also put a workflow that only exists in the legacy dir
let beta_yaml = r#"
ref: dual_pack.beta
label: Beta
version: "1.0.0"
tasks:
- name: t1
action: core.noop
"#;
fs::write(legacy_dir.join("beta.yaml"), beta_yaml)
.await
.unwrap();
// Visual builder directory: actions/workflows/
let builder_dir = pack_dir.join("actions").join("workflows");
fs::create_dir_all(&builder_dir).await.unwrap();
let builder_yaml = r#"
ref: dual_pack.alpha
label: Alpha (builder)
version: "2.0.0"
tasks:
- name: t1
action: core.noop
"#;
fs::write(builder_dir.join("alpha.workflow.yaml"), builder_yaml)
.await
.unwrap();
let config = LoaderConfig {
packs_base_dir: packs_dir,
skip_validation: true,
max_file_size: 1024 * 1024,
};
let loader = WorkflowLoader::new(config);
let workflows = loader
.load_pack_workflows("dual_pack", &pack_dir)
.await
.unwrap();
// Both alpha and beta should be present
assert_eq!(workflows.len(), 2);
assert!(workflows.contains_key("dual_pack.alpha"));
assert!(workflows.contains_key("dual_pack.beta"));
// Alpha should come from actions/workflows/ (scanned second, overwrites)
let alpha = &workflows["dual_pack.alpha"];
assert_eq!(alpha.workflow.label, "Alpha (builder)");
assert_eq!(alpha.workflow.version, "2.0.0");
// Beta only exists in legacy dir
let beta = &workflows["dual_pack.beta"];
assert_eq!(beta.workflow.label, "Beta");
}
/// Verify that `reload_workflow` finds files in `actions/workflows/`
/// with the `.workflow.yaml` extension.
#[tokio::test]
async fn test_reload_workflow_finds_actions_workflows_dir() {
let temp_dir = TempDir::new().unwrap();
let packs_dir = temp_dir.path().to_path_buf();
let pack_dir = packs_dir.join("rp");
let builder_dir = pack_dir.join("actions").join("workflows");
fs::create_dir_all(&builder_dir).await.unwrap();
let yaml = r#"
ref: rp.deploy
label: Deploy
version: "1.0.0"
tasks:
- name: step1
action: core.noop
"#;
fs::write(builder_dir.join("deploy.workflow.yaml"), yaml)
.await
.unwrap();
let config = LoaderConfig {
packs_base_dir: packs_dir,
skip_validation: true,
max_file_size: 1024 * 1024,
};
let loader = WorkflowLoader::new(config);
let loaded = loader.reload_workflow("rp.deploy").await.unwrap();
assert_eq!(loaded.workflow.r#ref, "rp.deploy");
assert_eq!(loaded.file.name, "deploy");
assert_eq!(loaded.file.ref_name, "rp.deploy");
}
}

View File

@@ -78,14 +78,26 @@ impl From<ParseError> for crate::error::Error {
}
/// Complete workflow definition parsed from YAML
///
/// When loaded via an action's `workflow_file` field, the `ref` and `label`
/// fields are optional — the action YAML is authoritative for those values.
/// For standalone workflow files (in `workflows/`), they should be present.
#[derive(Debug, Clone, Serialize, Deserialize, Validate)]
pub struct WorkflowDefinition {
/// Unique reference (e.g., "my_pack.deploy_app")
#[validate(length(min = 1, max = 255))]
/// Unique reference (e.g., "my_pack.deploy_app").
///
/// Optional for action-linked workflow files (supplied by the action YAML).
/// Required for standalone workflow files.
#[serde(default)]
#[validate(length(max = 255))]
pub r#ref: String,
/// Human-readable label
#[validate(length(min = 1, max = 255))]
/// Human-readable label.
///
/// Optional for action-linked workflow files (supplied by the action YAML).
/// Required for standalone workflow files.
#[serde(default)]
#[validate(length(max = 255))]
pub label: String,
/// Optional description
@@ -412,11 +424,19 @@ pub enum TaskType {
}
/// Variable publishing directive
///
/// Publish directives map variable names to values. Values may be template
/// expressions (strings containing `{{ }}`), literal strings, or any other
/// JSON-compatible type (booleans, numbers, arrays, objects). Non-string
/// literals are preserved through the rendering pipeline so that, for example,
/// `validation_passed: true` publishes the boolean `true`, not the string
/// `"true"`.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum PublishDirective {
/// Simple key-value pair
Simple(HashMap<String, String>),
/// Key-value pair where the value can be any JSON-compatible type
/// (string template, boolean, number, array, object, null).
Simple(HashMap<String, serde_json::Value>),
/// Just a key (publishes entire result under that key)
Key(String),
}
@@ -1315,4 +1335,175 @@ tasks:
assert!(workflow.tasks[0].next[0].chart_meta.is_none());
assert!(workflow.tasks[0].next[1].chart_meta.is_none());
}
// -----------------------------------------------------------------------
// Action-linked workflow file (no ref/label)
// -----------------------------------------------------------------------
#[test]
fn test_parse_action_linked_workflow_without_ref_and_label() {
// Action-linked workflow files (in actions/workflows/) omit ref and
// label — those are supplied by the companion action YAML. The
// parser must accept such files and default the fields to empty
// strings.
let yaml = r#"
version: 1.0.0
vars:
counter: 0
tasks:
- name: step1
action: core.echo
input:
message: "hello"
next:
- when: "{{ succeeded() }}"
do:
- step2
- name: step2
action: core.echo
input:
message: "world"
output_map:
result: "{{ task.step2.result }}"
"#;
let result = parse_workflow_yaml(yaml);
assert!(result.is_ok(), "Parse failed: {:?}", result.err());
let workflow = result.unwrap();
// ref and label default to empty strings
assert_eq!(workflow.r#ref, "");
assert_eq!(workflow.label, "");
// Graph fields are parsed normally
assert_eq!(workflow.version, "1.0.0");
assert_eq!(workflow.tasks.len(), 2);
assert_eq!(workflow.tasks[0].name, "step1");
assert!(workflow.vars.contains_key("counter"));
assert!(workflow.output_map.is_some());
// No parameters or output schema (those come from the action YAML)
assert!(workflow.parameters.is_none());
assert!(workflow.output.is_none());
assert!(workflow.tags.is_empty());
}
#[test]
fn test_parse_standalone_workflow_still_works_with_ref_and_label() {
// Standalone workflow files (in workflows/) still carry ref and label.
// Verify they continue to parse correctly.
let yaml = r#"
ref: mypack.deploy
label: Deploy Workflow
description: Deploys the application
version: 2.0.0
parameters:
target:
type: string
required: true
tags:
- deploy
- production
tasks:
- name: deploy
action: core.run
input:
target: "{{ parameters.target }}"
"#;
let result = parse_workflow_yaml(yaml);
assert!(result.is_ok(), "Parse failed: {:?}", result.err());
let workflow = result.unwrap();
assert_eq!(workflow.r#ref, "mypack.deploy");
assert_eq!(workflow.label, "Deploy Workflow");
assert_eq!(
workflow.description.as_deref(),
Some("Deploys the application")
);
assert_eq!(workflow.version, "2.0.0");
assert!(workflow.parameters.is_some());
assert_eq!(workflow.tags, vec!["deploy", "production"]);
}
#[test]
fn test_typed_publish_values_in_transitions() {
// Regression test: publish directive values that are booleans, numbers,
// or null must parse successfully (not just strings). Previously
// `PublishDirective::Simple(HashMap<String, String>)` rejected them.
let yaml = r#"
ref: test.typed_publish
label: Typed Publish
version: 1.0.0
tasks:
- name: validate
action: core.echo
next:
- when: "{{ succeeded() }}"
publish:
- validation_passed: true
- count: 42
- ratio: 3.14
- label: "hello"
- template_val: "{{ result().data }}"
- nothing: null
do:
- finalize
- when: "{{ failed() }}"
publish:
- validation_passed: false
do:
- handle_error
- name: finalize
action: core.echo
- name: handle_error
action: core.echo
"#;
let result = parse_workflow_yaml(yaml);
assert!(result.is_ok(), "Parse failed: {:?}", result.err());
let workflow = result.unwrap();
let task = &workflow.tasks[0];
assert_eq!(task.name, "validate");
assert_eq!(task.next.len(), 2);
// Success transition: 6 publish directives with mixed types
let success_transition = &task.next[0];
assert_eq!(success_transition.publish.len(), 6);
// Verify each typed value survived parsing
for directive in &success_transition.publish {
if let PublishDirective::Simple(map) = directive {
if let Some(val) = map.get("validation_passed") {
assert_eq!(val, &serde_json::Value::Bool(true), "boolean true");
} else if let Some(val) = map.get("count") {
assert_eq!(val, &serde_json::json!(42), "integer");
} else if let Some(val) = map.get("ratio") {
assert_eq!(val, &serde_json::json!(3.14), "float");
} else if let Some(val) = map.get("label") {
assert_eq!(val, &serde_json::json!("hello"), "string");
} else if let Some(val) = map.get("template_val") {
assert_eq!(val, &serde_json::json!("{{ result().data }}"), "template");
} else if let Some(val) = map.get("nothing") {
assert!(val.is_null(), "null");
}
}
}
// Failure transition: boolean false
let failure_transition = &task.next[1];
assert_eq!(failure_transition.publish.len(), 1);
if let PublishDirective::Simple(map) = &failure_transition.publish[0] {
assert_eq!(map.get("validation_passed"), Some(&serde_json::Value::Bool(false)));
} else {
panic!("Expected Simple publish directive");
}
}
}

View File

@@ -4,6 +4,11 @@
//! Workflows are stored in the `workflow_definition` table with their full YAML definition
//! as JSON. A companion action record is also created so that workflows appear in
//! action lists and the workflow builder's action palette.
//!
//! Standalone workflow files (in `workflows/`) carry their own `ref` and `label`.
//! Action-linked workflow files (in `actions/workflows/`, referenced via
//! `workflow_file`) may omit those fields — the registrar falls back to
//! `WorkflowFile.ref_name` / `WorkflowFile.name` derived from the filename.
use crate::error::{Error, Result};
use crate::repositories::action::{ActionRepository, CreateActionInput, UpdateActionInput};
@@ -61,6 +66,32 @@ impl WorkflowRegistrar {
Self { pool, options }
}
/// Resolve the effective ref for a workflow.
///
/// Prefers the value declared in the YAML; falls back to the
/// `WorkflowFile.ref_name` derived from the filename when the YAML
/// omits it (action-linked workflow files).
fn effective_ref(loaded: &LoadedWorkflow) -> String {
if loaded.workflow.r#ref.is_empty() {
loaded.file.ref_name.clone()
} else {
loaded.workflow.r#ref.clone()
}
}
/// Resolve the effective label for a workflow.
///
/// Prefers the value declared in the YAML; falls back to the
/// `WorkflowFile.name` (human-readable filename stem) when the YAML
/// omits it.
fn effective_label(loaded: &LoadedWorkflow) -> String {
if loaded.workflow.label.is_empty() {
loaded.file.name.clone()
} else {
loaded.workflow.label.clone()
}
}
/// Register a single workflow
pub async fn register_workflow(&self, loaded: &LoadedWorkflow) -> Result<RegistrationResult> {
debug!("Registering workflow: {}", loaded.file.ref_name);
@@ -91,6 +122,12 @@ impl WorkflowRegistrar {
warnings.push(err.clone());
}
// Resolve effective ref/label — prefer workflow YAML values, fall
// back to filename-derived values for action-linked workflow files
// that omit action-level metadata.
let effective_ref = Self::effective_ref(loaded);
let effective_label = Self::effective_label(loaded);
let (workflow_def_id, created) = if let Some(existing) = existing_workflow {
if !self.options.update_existing {
return Err(Error::already_exists(
@@ -102,7 +139,13 @@ impl WorkflowRegistrar {
info!("Updating existing workflow: {}", loaded.file.ref_name);
let workflow_def_id = self
.update_workflow(&existing.id, &loaded.workflow, &pack.r#ref)
.update_workflow(
&existing.id,
&loaded.workflow,
&pack.r#ref,
&effective_ref,
&effective_label,
)
.await?;
// Update or create the companion action record
@@ -112,6 +155,8 @@ impl WorkflowRegistrar {
pack.id,
&pack.r#ref,
&loaded.file.name,
&effective_ref,
&effective_label,
)
.await?;
@@ -119,7 +164,14 @@ impl WorkflowRegistrar {
} else {
info!("Creating new workflow: {}", loaded.file.ref_name);
let workflow_def_id = self
.create_workflow(&loaded.workflow, &loaded.file.pack, pack.id, &pack.r#ref)
.create_workflow(
&loaded.workflow,
&loaded.file.pack,
pack.id,
&pack.r#ref,
&effective_ref,
&effective_label,
)
.await?;
// Create a companion action record so the workflow appears in action lists
@@ -129,6 +181,8 @@ impl WorkflowRegistrar {
pack.id,
&pack.r#ref,
&loaded.file.name,
&effective_ref,
&effective_label,
)
.await?;
@@ -195,6 +249,9 @@ impl WorkflowRegistrar {
/// This ensures the workflow appears in action lists and the action palette
/// in the workflow builder. The action is linked to the workflow definition
/// via the `workflow_def` FK.
///
/// `effective_ref` and `effective_label` are the resolved values (which may
/// have been derived from the filename when the workflow YAML omits them).
async fn create_companion_action(
&self,
workflow_def_id: i64,
@@ -202,14 +259,16 @@ impl WorkflowRegistrar {
pack_id: i64,
pack_ref: &str,
workflow_name: &str,
effective_ref: &str,
effective_label: &str,
) -> Result<()> {
let entrypoint = format!("workflows/{}.workflow.yaml", workflow_name);
let action_input = CreateActionInput {
r#ref: workflow.r#ref.clone(),
r#ref: effective_ref.to_string(),
pack: pack_id,
pack_ref: pack_ref.to_string(),
label: workflow.label.clone(),
label: effective_label.to_string(),
description: workflow.description.clone().unwrap_or_default(),
entrypoint,
runtime: None,
@@ -226,7 +285,7 @@ impl WorkflowRegistrar {
info!(
"Created companion action '{}' (ID: {}) for workflow definition (ID: {})",
workflow.r#ref, action.id, workflow_def_id
effective_ref, action.id, workflow_def_id
);
Ok(())
@@ -236,6 +295,9 @@ impl WorkflowRegistrar {
///
/// If the action already exists, update it. If it doesn't exist (e.g., for
/// workflows registered before the companion-action fix), create it.
///
/// `effective_ref` and `effective_label` are the resolved values (which may
/// have been derived from the filename when the workflow YAML omits them).
async fn ensure_companion_action(
&self,
workflow_def_id: i64,
@@ -243,6 +305,8 @@ impl WorkflowRegistrar {
pack_id: i64,
pack_ref: &str,
workflow_name: &str,
effective_ref: &str,
effective_label: &str,
) -> Result<()> {
let existing_action =
ActionRepository::find_by_workflow_def(&self.pool, workflow_def_id).await?;
@@ -250,7 +314,7 @@ impl WorkflowRegistrar {
if let Some(action) = existing_action {
// Update the existing companion action to stay in sync
let update_input = UpdateActionInput {
label: Some(workflow.label.clone()),
label: Some(effective_label.to_string()),
description: workflow.description.clone(),
entrypoint: Some(format!("workflows/{}.workflow.yaml", workflow_name)),
runtime: None,
@@ -276,6 +340,8 @@ impl WorkflowRegistrar {
pack_id,
pack_ref,
workflow_name,
effective_ref,
effective_label,
)
.await?;
}
@@ -284,27 +350,32 @@ impl WorkflowRegistrar {
}
/// Create a new workflow definition
///
/// `effective_ref` and `effective_label` are the resolved values (which may
/// have been derived from the filename when the workflow YAML omits them).
async fn create_workflow(
&self,
workflow: &WorkflowYaml,
_pack_name: &str,
pack_id: i64,
pack_ref: &str,
effective_ref: &str,
effective_label: &str,
) -> Result<i64> {
// Convert the parsed workflow back to JSON for storage
let definition = serde_json::to_value(workflow)
.map_err(|e| Error::validation(format!("Failed to serialize workflow: {}", e)))?;
let input = CreateWorkflowDefinitionInput {
r#ref: workflow.r#ref.clone(),
r#ref: effective_ref.to_string(),
pack: pack_id,
pack_ref: pack_ref.to_string(),
label: workflow.label.clone(),
label: effective_label.to_string(),
description: workflow.description.clone(),
version: workflow.version.clone(),
param_schema: workflow.parameters.clone(),
out_schema: workflow.output.clone(),
definition: definition,
definition,
tags: workflow.tags.clone(),
enabled: true,
};
@@ -315,18 +386,23 @@ impl WorkflowRegistrar {
}
/// Update an existing workflow definition
///
/// `effective_ref` and `effective_label` are the resolved values (which may
/// have been derived from the filename when the workflow YAML omits them).
async fn update_workflow(
&self,
workflow_id: &i64,
workflow: &WorkflowYaml,
_pack_ref: &str,
_effective_ref: &str,
effective_label: &str,
) -> Result<i64> {
// Convert the parsed workflow back to JSON for storage
let definition = serde_json::to_value(workflow)
.map_err(|e| Error::validation(format!("Failed to serialize workflow: {}", e)))?;
let input = UpdateWorkflowDefinitionInput {
label: Some(workflow.label.clone()),
label: Some(effective_label.to_string()),
description: workflow.description.clone(),
version: Some(workflow.version.clone()),
param_schema: workflow.parameters.clone(),

View File

@@ -42,27 +42,12 @@ use crate::workflow::graph::TaskGraph;
/// Extract workflow parameters from an execution's `config` field.
///
/// The config may be stored in two formats:
/// 1. Wrapped: `{"parameters": {"n": 5, ...}}` — used by child task executions
/// 2. Flat: `{"n": 5, ...}` — used by the API for manual executions
///
/// This helper checks for a `"parameters"` key first, and if absent treats
/// the entire config object as the parameters (matching the worker's logic
/// in `ActionExecutor::prepare_execution_context`).
/// All executions store config in flat format: `{"n": 5, ...}`.
/// The config object itself IS the parameters map.
fn extract_workflow_params(config: &Option<JsonValue>) -> JsonValue {
match config {
Some(c) => {
// Prefer the wrapped format if present
if let Some(params) = c.get("parameters") {
params.clone()
} else if c.is_object() {
// Flat format — the config itself is the parameters
c.clone()
} else {
serde_json::json!({})
}
}
None => serde_json::json!({}),
Some(c) if c.is_object() => c.clone(),
_ => serde_json::json!({}),
}
}
@@ -100,10 +85,7 @@ fn apply_param_defaults(params: JsonValue, param_schema: &Option<JsonValue>) ->
};
if needs_default {
if let Some(default_val) = prop.get("default") {
debug!(
"Applying default for parameter '{}': {}",
key, default_val
);
debug!("Applying default for parameter '{}': {}", key, default_val);
obj.insert(key.clone(), default_val.clone());
}
}
@@ -234,8 +216,25 @@ impl ExecutionScheduler {
worker.id, execution_id
);
// Apply parameter defaults from the action's param_schema.
// This mirrors what `process_workflow_execution` does for workflows
// so that non-workflow executions also get missing parameters filled
// in from the action's declared defaults.
let execution_config = {
let raw_config = execution.config.clone();
let params = extract_workflow_params(&raw_config);
let params_with_defaults = apply_param_defaults(params, &action.param_schema);
// Config is already flat — just use the defaults-applied version
if params_with_defaults.is_object()
&& !params_with_defaults.as_object().unwrap().is_empty()
{
Some(params_with_defaults)
} else {
raw_config
}
};
// Update execution status to scheduled
let execution_config = execution.config.clone();
let mut execution_for_update = execution;
execution_for_update.status = ExecutionStatus::Scheduled;
ExecutionRepository::update(pool, execution_for_update.id, execution_for_update.into())
@@ -391,6 +390,7 @@ impl ExecutionScheduler {
&workflow_execution.id,
task_node,
&wf_ctx,
None, // entry-point task — no predecessor
)
.await?;
} else {
@@ -407,6 +407,10 @@ impl ExecutionScheduler {
/// Create a child execution for a single workflow task and dispatch it to
/// a worker. The child execution references the parent workflow execution
/// via `workflow_task` metadata.
///
/// `triggered_by` is the name of the predecessor task whose completion
/// caused this task to be scheduled. Pass `None` for entry-point tasks
/// dispatched at workflow start.
async fn dispatch_workflow_task(
pool: &PgPool,
publisher: &Publisher,
@@ -415,6 +419,7 @@ impl ExecutionScheduler {
workflow_execution_id: &i64,
task_node: &crate::workflow::graph::TaskNode,
wf_ctx: &WorkflowContext,
triggered_by: Option<&str>,
) -> Result<()> {
let action_ref: String = match &task_node.action {
Some(a) => a.clone(),
@@ -461,6 +466,7 @@ impl ExecutionScheduler {
&action_ref,
with_items_expr,
wf_ctx,
triggered_by,
)
.await;
}
@@ -484,12 +490,12 @@ impl ExecutionScheduler {
task_node.input.clone()
};
// Build task config from the (rendered) input
// Build task config from the (rendered) input.
// Store as flat parameters (consistent with manual and rule-triggered
// executions) — no {"parameters": ...} wrapper.
let task_config: Option<JsonValue> =
if rendered_input.is_object() && !rendered_input.as_object().unwrap().is_empty() {
Some(serde_json::json!({
"parameters": rendered_input
}))
Some(rendered_input.clone())
} else if let Some(parent_config) = &parent_execution.config {
Some(parent_config.clone())
} else {
@@ -500,6 +506,7 @@ impl ExecutionScheduler {
let workflow_task = WorkflowTaskMetadata {
workflow_execution: *workflow_execution_id,
task_name: task_node.name.clone(),
triggered_by: triggered_by.map(String::from),
task_index: None,
task_batch: None,
retry_count: 0,
@@ -587,6 +594,7 @@ impl ExecutionScheduler {
action_ref: &str,
with_items_expr: &str,
wf_ctx: &WorkflowContext,
triggered_by: Option<&str>,
) -> Result<()> {
// Resolve the with_items expression to a JSON array
let items_value = wf_ctx
@@ -647,9 +655,11 @@ impl ExecutionScheduler {
task_node.input.clone()
};
// Store as flat parameters (consistent with manual and rule-triggered
// executions) — no {"parameters": ...} wrapper.
let task_config: Option<JsonValue> =
if rendered_input.is_object() && !rendered_input.as_object().unwrap().is_empty() {
Some(serde_json::json!({ "parameters": rendered_input }))
Some(rendered_input.clone())
} else if let Some(parent_config) = &parent_execution.config {
Some(parent_config.clone())
} else {
@@ -659,6 +669,7 @@ impl ExecutionScheduler {
let workflow_task = WorkflowTaskMetadata {
workflow_execution: *workflow_execution_id,
task_name: task_node.name.clone(),
triggered_by: triggered_by.map(String::from),
task_index: Some(index as i32),
task_batch: None,
retry_count: 0,
@@ -961,8 +972,7 @@ impl ExecutionScheduler {
.and_then(|n| n.concurrency)
.unwrap_or(1);
let free_slots =
concurrency_limit.saturating_sub(in_flight_count.0 as usize);
let free_slots = concurrency_limit.saturating_sub(in_flight_count.0 as usize);
if free_slots > 0 {
if let Err(e) = Self::publish_pending_with_items_children(
@@ -1009,6 +1019,39 @@ impl ExecutionScheduler {
return Ok(());
}
// ---------------------------------------------------------
// Race-condition guard: when multiple with_items children
// complete nearly simultaneously, the worker updates their
// DB status to Completed *before* the completion MQ message
// is processed. This means several advance_workflow calls
// (processed sequentially by the completion listener) can
// each see "0 siblings remaining" and fall through to
// transition evaluation, dispatching successor tasks
// multiple times.
//
// To prevent this we re-check the *persisted*
// completed/failed task lists that were loaded from the
// workflow_execution record at the top of this function.
// If `task_name` is already present, a previous
// advance_workflow invocation already handled the final
// completion of this with_items task and dispatched its
// successors — we can safely return.
// ---------------------------------------------------------
if workflow_execution
.completed_tasks
.contains(&task_name.to_string())
|| workflow_execution
.failed_tasks
.contains(&task_name.to_string())
{
debug!(
"with_items task '{}' already in persisted completed/failed list — \
another advance_workflow call already handled final completion, skipping",
task_name,
);
return Ok(());
}
// All items done — check if any failed
let any_failed: Vec<(i64,)> = sqlx::query_as(
"SELECT id \
@@ -1129,10 +1172,10 @@ impl ExecutionScheduler {
if should_fire {
// Process publish directives from this transition
if !transition.publish.is_empty() {
let publish_map: HashMap<String, String> = transition
let publish_map: HashMap<String, JsonValue> = transition
.publish
.iter()
.map(|p| (p.name.clone(), p.expression.clone()))
.map(|p| (p.name.clone(), p.value.clone()))
.collect();
if let Err(e) = wf_ctx.publish_from_result(
&serde_json::json!({}),
@@ -1161,6 +1204,41 @@ impl ExecutionScheduler {
continue;
}
// Guard against dispatching a task that has already
// been dispatched (exists as a child execution in
// this workflow). This catches edge cases where
// the persisted completed/failed lists haven't been
// updated yet but a child execution was already
// created by a prior advance_workflow call.
//
// This is critical for with_items predecessors:
// workers update DB status to Completed before the
// completion MQ message is processed, so multiple
// with_items items can each see "0 siblings
// remaining" and attempt to dispatch the same
// successor. The query covers both regular tasks
// (task_index IS NULL) and with_items tasks
// (task_index IS NOT NULL) so that neither case
// can be double-dispatched.
let already_dispatched: (i64,) = sqlx::query_as(
"SELECT COUNT(*) \
FROM execution \
WHERE workflow_task->>'workflow_execution' = $1::text \
AND workflow_task->>'task_name' = $2",
)
.bind(workflow_execution_id.to_string())
.bind(next_task_name.as_str())
.fetch_one(pool)
.await?;
if already_dispatched.0 > 0 {
debug!(
"Skipping task '{}' — already dispatched ({} existing execution(s))",
next_task_name, already_dispatched.0
);
continue;
}
// Check join barrier: if the task has a `join` count,
// only schedule it when enough predecessors are done.
if let Some(next_node) = graph.get_task(next_task_name) {
@@ -1210,6 +1288,7 @@ impl ExecutionScheduler {
&workflow_execution_id,
task_node,
&wf_ctx,
Some(task_name), // predecessor that triggered this task
)
.await
{
@@ -1716,19 +1795,8 @@ mod tests {
assert_eq!(free, 0);
}
#[test]
fn test_extract_workflow_params_wrapped_format() {
// Child task executions store config as {"parameters": {...}}
let config = Some(serde_json::json!({
"parameters": {"n": 5, "name": "test"}
}));
let params = extract_workflow_params(&config);
assert_eq!(params, serde_json::json!({"n": 5, "name": "test"}));
}
#[test]
fn test_extract_workflow_params_flat_format() {
// API manual executions store config as flat {"n": 5, ...}
let config = Some(serde_json::json!({"n": 5, "name": "test"}));
let params = extract_workflow_params(&config);
assert_eq!(params, serde_json::json!({"n": 5, "name": "test"}));
@@ -1742,7 +1810,6 @@ mod tests {
#[test]
fn test_extract_workflow_params_non_object() {
// Edge case: config is a non-object JSON value
let config = Some(serde_json::json!("not an object"));
let params = extract_workflow_params(&config);
assert_eq!(params, serde_json::json!({}));
@@ -1756,14 +1823,17 @@ mod tests {
}
#[test]
fn test_extract_workflow_params_wrapped_takes_precedence() {
// If config has a "parameters" key, that value is used even if
// the config object also has other top-level keys
fn test_extract_workflow_params_with_parameters_key() {
// A "parameters" key is just a regular parameter — not unwrapped
let config = Some(serde_json::json!({
"parameters": {"n": 5},
"context": {"rule": "test"}
}));
let params = extract_workflow_params(&config);
assert_eq!(params, serde_json::json!({"n": 5}));
// Returns the whole object as-is — "parameters" is treated as a normal key
assert_eq!(
params,
serde_json::json!({"parameters": {"n": 5}, "context": {"rule": "test"}})
);
}
}

View File

@@ -412,24 +412,26 @@ impl WorkflowContext {
/// Publish variables from a task result.
///
/// Each publish directive is a `(name, expression)` pair where the
/// expression is a template string like `"{{ result().data.items }}"`.
/// The expression is rendered with `render_json`-style type preservation
/// so that non-string values (arrays, numbers, booleans) keep their type.
/// Each publish directive is a `(name, value)` pair where the value is
/// any JSON-compatible type. String values are treated as template
/// expressions (e.g. `"{{ result().data.items }}"`) and rendered with
/// type preservation. Non-string values (booleans, numbers, arrays,
/// objects, null) pass through `render_json` unchanged, preserving
/// their original type.
pub fn publish_from_result(
&mut self,
result: &JsonValue,
publish_vars: &[String],
publish_map: Option<&HashMap<String, String>>,
publish_map: Option<&HashMap<String, JsonValue>>,
) -> ContextResult<()> {
// If publish map is provided, use it
if let Some(map) = publish_map {
for (var_name, template) in map {
// Use type-preserving rendering: if the entire template is a
// single expression like `{{ result().data.items }}`, preserve
// the underlying JsonValue type (e.g. an array stays an array).
let json_value = JsonValue::String(template.clone());
let value = self.render_json(&json_value)?;
for (var_name, json_value) in map {
// render_json handles all types: strings are template-rendered
// (with type preservation for pure `{{ }}` expressions), while
// booleans, numbers, arrays, objects, and null pass through
// unchanged.
let value = self.render_json(json_value)?;
self.set_var(var_name, value);
}
} else {
@@ -1095,7 +1097,7 @@ mod tests {
let mut publish_map = HashMap::new();
publish_map.insert(
"number_list".to_string(),
"{{ result().data.items }}".to_string(),
JsonValue::String("{{ result().data.items }}".to_string()),
);
ctx.publish_from_result(&json!({}), &[], Some(&publish_map))
@@ -1117,6 +1119,52 @@ mod tests {
assert_eq!(ctx.get_var("my_var").unwrap(), result);
}
#[test]
fn test_publish_typed_values() {
// Non-string publish values (booleans, numbers, null) should pass
// through render_json unchanged and be stored with their original type.
let mut ctx = WorkflowContext::new(json!({}), HashMap::new());
ctx.set_last_task_outcome(json!({"status": "ok"}), TaskOutcome::Succeeded);
let mut publish_map = HashMap::new();
publish_map.insert("flag".to_string(), JsonValue::Bool(true));
publish_map.insert("count".to_string(), json!(42));
publish_map.insert("ratio".to_string(), json!(3.14));
publish_map.insert("nothing".to_string(), JsonValue::Null);
publish_map.insert(
"template".to_string(),
JsonValue::String("{{ result().status }}".to_string()),
);
publish_map.insert(
"plain_str".to_string(),
JsonValue::String("hello".to_string()),
);
ctx.publish_from_result(&json!({}), &[], Some(&publish_map))
.unwrap();
// Boolean preserved as boolean (not string "true")
assert_eq!(ctx.get_var("flag").unwrap(), json!(true));
assert!(ctx.get_var("flag").unwrap().is_boolean());
// Integer preserved
assert_eq!(ctx.get_var("count").unwrap(), json!(42));
assert!(ctx.get_var("count").unwrap().is_number());
// Float preserved
assert_eq!(ctx.get_var("ratio").unwrap(), json!(3.14));
// Null preserved
assert_eq!(ctx.get_var("nothing").unwrap(), json!(null));
assert!(ctx.get_var("nothing").unwrap().is_null());
// Template expression rendered with type preservation
assert_eq!(ctx.get_var("template").unwrap(), json!("ok"));
// Plain string stays as string
assert_eq!(ctx.get_var("plain_str").unwrap(), json!("hello"));
}
#[test]
fn test_published_var_accessible_via_workflow_namespace() {
let mut ctx = WorkflowContext::new(json!({}), HashMap::new());

View File

@@ -11,6 +11,7 @@
//! - `do` — next tasks to invoke when the condition is met
use attune_common::workflow::{Task, TaskType, WorkflowDefinition};
use serde_json::Value as JsonValue;
use std::collections::{HashMap, HashSet};
/// Result type for graph operations
@@ -101,11 +102,23 @@ pub struct GraphTransition {
pub do_tasks: Vec<String>,
}
/// A single publish variable (key = expression)
/// A single publish variable (key = value).
///
/// The `value` field holds either a template expression (as a `JsonValue::String`
/// containing `{{ }}`), a literal string, or any other JSON-compatible type
/// (boolean, number, array, object, null). The workflow context's `render_json`
/// method handles all of these: strings are template-rendered (with type
/// preservation for pure expressions), while non-string values pass through
/// unchanged.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct PublishVar {
pub name: String,
pub expression: String,
/// The publish value — may be a template string, literal boolean, number,
/// array, object, or null. Renamed from `expression` (which only supported
/// strings); the serde alias ensures existing serialized task graphs that
/// use the old field name still deserialize correctly.
#[serde(alias = "expression")]
pub value: JsonValue,
}
/// Retry configuration
@@ -463,14 +476,14 @@ fn extract_publish_vars(publish: &[attune_common::workflow::PublishDirective]) -
for (key, value) in map {
vars.push(PublishVar {
name: key.clone(),
expression: value.clone(),
value: value.clone(),
});
}
}
PublishDirective::Key(key) => {
vars.push(PublishVar {
name: key.clone(),
expression: "{{ result() }}".to_string(),
value: JsonValue::String("{{ result() }}".to_string()),
});
}
}
@@ -678,7 +691,7 @@ tasks:
assert_eq!(transitions.len(), 1);
assert_eq!(transitions[0].publish.len(), 1);
assert_eq!(transitions[0].publish[0].name, "msg");
assert_eq!(transitions[0].publish[0].expression, "task1 done");
assert_eq!(transitions[0].publish[0].value, JsonValue::String("task1 done".to_string()));
}
#[test]
@@ -932,4 +945,82 @@ tasks:
assert!(next.contains(&"failure_task".to_string()));
assert!(next.contains(&"always_task".to_string()));
}
#[test]
fn test_typed_publish_values() {
// Verify that non-string publish values (booleans, numbers, null)
// are preserved through parsing and graph construction.
let yaml = r#"
ref: test.typed_publish
label: Typed Publish Test
version: 1.0.0
tasks:
- name: task1
action: core.echo
next:
- when: "{{ succeeded() }}"
publish:
- validation_passed: true
- count: 42
- ratio: 3.14
- label: "hello"
- template_val: "{{ result().data }}"
- nothing: null
do:
- task2
- when: "{{ failed() }}"
publish:
- validation_passed: false
do:
- task2
- name: task2
action: core.echo
"#;
let workflow = workflow::parse_workflow_yaml(yaml).unwrap();
let graph = TaskGraph::from_workflow(&workflow).unwrap();
let task1 = graph.get_task("task1").unwrap();
assert_eq!(task1.transitions.len(), 2);
// Success transition should have 6 publish vars
let success_publish = &task1.transitions[0].publish;
assert_eq!(success_publish.len(), 6);
// Build a lookup map for easier assertions
let publish_map: HashMap<&str, &JsonValue> = success_publish
.iter()
.map(|p| (p.name.as_str(), &p.value))
.collect();
// Boolean true is preserved as a JSON boolean
assert_eq!(publish_map["validation_passed"], &JsonValue::Bool(true));
// Integer is preserved as a JSON number
assert_eq!(publish_map["count"], &serde_json::json!(42));
// Float is preserved as a JSON number
assert_eq!(publish_map["ratio"], &serde_json::json!(3.14));
// Plain string stays as a string
assert_eq!(
publish_map["label"],
&JsonValue::String("hello".to_string())
);
// Template expression stays as a string (rendered later by context)
assert_eq!(
publish_map["template_val"],
&JsonValue::String("{{ result().data }}".to_string())
);
// Null is preserved
assert_eq!(publish_map["nothing"], &JsonValue::Null);
// Failure transition should have boolean false
let failure_publish = &task1.transitions[1].publish;
assert_eq!(failure_publish.len(), 1);
assert_eq!(failure_publish[0].name, "validation_passed");
assert_eq!(failure_publish[0].value, JsonValue::Bool(false));
}
}

View File

@@ -162,11 +162,16 @@ pub enum TaskType {
}
/// Variable publishing directive
///
/// Values may be template expressions (strings containing `{{ }}`), literal
/// strings, or any other JSON-compatible type (booleans, numbers, arrays,
/// objects). Non-string literals are preserved through the rendering pipeline.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum PublishDirective {
/// Simple key-value pair
Simple(HashMap<String, String>),
/// Key-value pair where the value can be any JSON-compatible type
/// (string template, boolean, number, array, object, null).
Simple(HashMap<String, serde_json::Value>),
/// Just a key (publishes entire result under that key)
Key(String),
}

View File

@@ -4,6 +4,11 @@
//! Workflows are stored in the `workflow_definition` table with their full YAML definition
//! as JSON. A companion action record is also created so that workflows appear in
//! action lists and the workflow builder's action palette.
//!
//! Standalone workflow files (in `workflows/`) carry their own `ref` and `label`.
//! Action-linked workflow files (in `actions/workflows/`, referenced via
//! `workflow_file`) may omit those fields — the registrar falls back to
//! `WorkflowFile.ref_name` / `WorkflowFile.name` derived from the filename.
use attune_common::error::{Error, Result};
use attune_common::repositories::action::{ActionRepository, CreateActionInput, UpdateActionInput};
@@ -63,6 +68,32 @@ impl WorkflowRegistrar {
Self { pool, options }
}
/// Resolve the effective ref for a workflow.
///
/// Prefers the value declared in the YAML; falls back to the
/// `WorkflowFile.ref_name` derived from the filename when the YAML
/// omits it (action-linked workflow files).
fn effective_ref(loaded: &LoadedWorkflow) -> String {
if loaded.workflow.r#ref.is_empty() {
loaded.file.ref_name.clone()
} else {
loaded.workflow.r#ref.clone()
}
}
/// Resolve the effective label for a workflow.
///
/// Prefers the value declared in the YAML; falls back to the
/// `WorkflowFile.name` (human-readable filename stem) when the YAML
/// omits it.
fn effective_label(loaded: &LoadedWorkflow) -> String {
if loaded.workflow.label.is_empty() {
loaded.file.name.clone()
} else {
loaded.workflow.label.clone()
}
}
/// Register a single workflow
pub async fn register_workflow(&self, loaded: &LoadedWorkflow) -> Result<RegistrationResult> {
debug!("Registering workflow: {}", loaded.file.ref_name);
@@ -93,6 +124,12 @@ impl WorkflowRegistrar {
warnings.push(err.clone());
}
// Resolve effective ref/label — prefer workflow YAML values, fall
// back to filename-derived values for action-linked workflow files
// that omit action-level metadata.
let effective_ref = Self::effective_ref(loaded);
let effective_label = Self::effective_label(loaded);
let (workflow_def_id, created) = if let Some(existing) = existing_workflow {
if !self.options.update_existing {
return Err(Error::already_exists(
@@ -104,7 +141,13 @@ impl WorkflowRegistrar {
info!("Updating existing workflow: {}", loaded.file.ref_name);
let workflow_def_id = self
.update_workflow(&existing.id, &loaded.workflow, &pack.r#ref)
.update_workflow(
&existing.id,
&loaded.workflow,
&pack.r#ref,
&effective_ref,
&effective_label,
)
.await?;
// Update or create the companion action record
@@ -114,6 +157,8 @@ impl WorkflowRegistrar {
pack.id,
&pack.r#ref,
&loaded.file.name,
&effective_ref,
&effective_label,
)
.await?;
@@ -121,7 +166,14 @@ impl WorkflowRegistrar {
} else {
info!("Creating new workflow: {}", loaded.file.ref_name);
let workflow_def_id = self
.create_workflow(&loaded.workflow, &loaded.file.pack, pack.id, &pack.r#ref)
.create_workflow(
&loaded.workflow,
&loaded.file.pack,
pack.id,
&pack.r#ref,
&effective_ref,
&effective_label,
)
.await?;
// Create a companion action record so the workflow appears in action lists
@@ -131,6 +183,8 @@ impl WorkflowRegistrar {
pack.id,
&pack.r#ref,
&loaded.file.name,
&effective_ref,
&effective_label,
)
.await?;
@@ -197,6 +251,9 @@ impl WorkflowRegistrar {
/// This ensures the workflow appears in action lists and the action palette
/// in the workflow builder. The action is linked to the workflow definition
/// via the `workflow_def` FK.
///
/// `effective_ref` and `effective_label` are the resolved values (which may
/// have been derived from the filename when the workflow YAML omits them).
async fn create_companion_action(
&self,
workflow_def_id: i64,
@@ -204,14 +261,16 @@ impl WorkflowRegistrar {
pack_id: i64,
pack_ref: &str,
workflow_name: &str,
effective_ref: &str,
effective_label: &str,
) -> Result<()> {
let entrypoint = format!("workflows/{}.workflow.yaml", workflow_name);
let action_input = CreateActionInput {
r#ref: workflow.r#ref.clone(),
r#ref: effective_ref.to_string(),
pack: pack_id,
pack_ref: pack_ref.to_string(),
label: workflow.label.clone(),
label: effective_label.to_string(),
description: workflow.description.clone().unwrap_or_default(),
entrypoint,
runtime: None,
@@ -228,7 +287,7 @@ impl WorkflowRegistrar {
info!(
"Created companion action '{}' (ID: {}) for workflow definition (ID: {})",
workflow.r#ref, action.id, workflow_def_id
effective_ref, action.id, workflow_def_id
);
Ok(())
@@ -238,6 +297,9 @@ impl WorkflowRegistrar {
///
/// If the action already exists, update it. If it doesn't exist (e.g., for
/// workflows registered before the companion-action fix), create it.
///
/// `effective_ref` and `effective_label` are the resolved values (which may
/// have been derived from the filename when the workflow YAML omits them).
async fn ensure_companion_action(
&self,
workflow_def_id: i64,
@@ -245,6 +307,8 @@ impl WorkflowRegistrar {
pack_id: i64,
pack_ref: &str,
workflow_name: &str,
effective_ref: &str,
effective_label: &str,
) -> Result<()> {
let existing_action =
ActionRepository::find_by_workflow_def(&self.pool, workflow_def_id).await?;
@@ -252,7 +316,7 @@ impl WorkflowRegistrar {
if let Some(action) = existing_action {
// Update the existing companion action to stay in sync
let update_input = UpdateActionInput {
label: Some(workflow.label.clone()),
label: Some(effective_label.to_string()),
description: workflow.description.clone(),
entrypoint: Some(format!("workflows/{}.workflow.yaml", workflow_name)),
runtime: None,
@@ -278,6 +342,8 @@ impl WorkflowRegistrar {
pack_id,
pack_ref,
workflow_name,
effective_ref,
effective_label,
)
.await?;
}
@@ -286,27 +352,32 @@ impl WorkflowRegistrar {
}
/// Create a new workflow definition
///
/// `effective_ref` and `effective_label` are the resolved values (which may
/// have been derived from the filename when the workflow YAML omits them).
async fn create_workflow(
&self,
workflow: &WorkflowYaml,
_pack_name: &str,
pack_id: i64,
pack_ref: &str,
effective_ref: &str,
effective_label: &str,
) -> Result<i64> {
// Convert the parsed workflow back to JSON for storage
let definition = serde_json::to_value(workflow)
.map_err(|e| Error::validation(format!("Failed to serialize workflow: {}", e)))?;
let input = CreateWorkflowDefinitionInput {
r#ref: workflow.r#ref.clone(),
r#ref: effective_ref.to_string(),
pack: pack_id,
pack_ref: pack_ref.to_string(),
label: workflow.label.clone(),
label: effective_label.to_string(),
description: workflow.description.clone(),
version: workflow.version.clone(),
param_schema: workflow.parameters.clone(),
out_schema: workflow.output.clone(),
definition: definition,
definition,
tags: workflow.tags.clone(),
enabled: true,
};
@@ -317,18 +388,23 @@ impl WorkflowRegistrar {
}
/// Update an existing workflow definition
///
/// `effective_ref` and `effective_label` are the resolved values (which may
/// have been derived from the filename when the workflow YAML omits them).
async fn update_workflow(
&self,
workflow_id: &i64,
workflow: &WorkflowYaml,
_pack_ref: &str,
_effective_ref: &str,
effective_label: &str,
) -> Result<i64> {
// Convert the parsed workflow back to JSON for storage
let definition = serde_json::to_value(workflow)
.map_err(|e| Error::validation(format!("Failed to serialize workflow: {}", e)))?;
let input = UpdateWorkflowDefinitionInput {
label: Some(workflow.label.clone()),
label: Some(effective_label.to_string()),
description: workflow.description.clone(),
version: Some(workflow.version.clone()),
param_schema: workflow.parameters.clone(),

View File

@@ -257,41 +257,27 @@ impl ActionExecutor {
execution.id
);
// Extract parameters from execution config
// Extract parameters from execution config.
// Config is always stored in flat format: the config object itself
// is the parameters map (e.g. {"url": "...", "method": "GET"}).
let mut parameters = HashMap::new();
if let Some(config) = &execution.config {
info!("Execution config present: {:?}", config);
debug!("Execution config present: {:?}", config);
// Try to get parameters from config.parameters first
if let Some(params) = config.get("parameters") {
info!("Found config.parameters key");
if let JsonValue::Object(map) = params {
for (key, value) in map {
parameters.insert(key.clone(), value.clone());
}
}
} else if let JsonValue::Object(map) = config {
info!("No config.parameters key, treating entire config as parameters");
// If no parameters key, treat entire config as parameters
// (this handles rule action_params being placed at root level)
if let JsonValue::Object(map) = config {
for (key, value) in map {
// Skip special keys that aren't action parameters
if key != "context" && key != "env" {
info!("Adding parameter: {} = {:?}", key, value);
parameters.insert(key.clone(), value.clone());
} else {
info!("Skipping special key: {}", key);
}
debug!("Adding parameter: {} = {:?}", key, value);
parameters.insert(key.clone(), value.clone());
}
} else {
info!("Config is not an Object, cannot extract parameters");
}
} else {
info!("No execution config present");
debug!("No execution config present");
}
info!(
debug!(
"Extracted {} parameters: {:?}",
parameters.len(),
parameters

View File

@@ -56,19 +56,14 @@ pub async fn execute_streaming(
let mut error = None;
// Write parameters first if using stdin delivery.
// Skip empty/trivial content ("{}","","[]") to avoid polluting stdin
// before secrets — scripts that read secrets via readline() expect
// the secrets JSON as the first line.
let has_real_params = parameters_stdin
.map(|s| !matches!(s.trim(), "" | "{}" | "[]"))
.unwrap_or(false);
// When the caller provides parameters_stdin (i.e. the action uses
// stdin delivery), always write the content — even if it's "{}" —
// because the script expects to read valid JSON from stdin.
if let Some(params_data) = parameters_stdin {
if has_real_params {
if let Err(e) = stdin.write_all(params_data.as_bytes()).await {
error = Some(format!("Failed to write parameters to stdin: {}", e));
} else if let Err(e) = stdin.write_all(b"\n---ATTUNE_PARAMS_END---\n").await {
error = Some(format!("Failed to write parameter delimiter: {}", e));
}
if let Err(e) = stdin.write_all(params_data.as_bytes()).await {
error = Some(format!("Failed to write parameters to stdin: {}", e));
} else if let Err(e) = stdin.write_all(b"\n---ATTUNE_PARAMS_END---\n").await {
error = Some(format!("Failed to write parameter delimiter: {}", e));
}
}