more polish on workflows
Some checks failed
CI / Rustfmt (push) Failing after 25s
CI / Clippy (push) Failing after 2m3s
CI / Cargo Audit & Deny (push) Successful in 33s
CI / Web Blocking Checks (push) Failing after 26s
CI / Security Blocking Checks (push) Successful in 8s
CI / Security Advisory Checks (push) Has been cancelled
CI / Web Advisory Checks (push) Has been cancelled
CI / Tests (push) Has been cancelled

This commit is contained in:
2026-03-11 11:21:28 -05:00
parent a7ed135af2
commit b5d6bb2243
25 changed files with 366 additions and 322 deletions

View File

@@ -49,9 +49,6 @@ pub struct SaveWorkflowFileRequest {
#[schema(example = json!(["deployment", "automation"]))]
pub tags: Option<Vec<String>>,
/// Whether the workflow is enabled
#[schema(example = true)]
pub enabled: Option<bool>,
}
/// Request DTO for creating a new workflow
@@ -97,9 +94,6 @@ pub struct CreateWorkflowRequest {
#[schema(example = json!(["incident", "slack", "approval"]))]
pub tags: Option<Vec<String>>,
/// Whether the workflow is enabled
#[schema(example = true)]
pub enabled: Option<bool>,
}
/// Request DTO for updating a workflow
@@ -135,9 +129,6 @@ pub struct UpdateWorkflowRequest {
#[schema(example = json!(["incident", "slack", "approval", "automation"]))]
pub tags: Option<Vec<String>>,
/// Whether the workflow is enabled
#[schema(example = true)]
pub enabled: Option<bool>,
}
/// Response DTO for workflow information
@@ -187,10 +178,6 @@ pub struct WorkflowResponse {
#[schema(example = json!(["incident", "slack", "approval"]))]
pub tags: Vec<String>,
/// Whether the workflow is enabled
#[schema(example = true)]
pub enabled: bool,
/// Creation timestamp
#[schema(example = "2024-01-13T10:30:00Z")]
pub created: DateTime<Utc>,
@@ -231,10 +218,6 @@ pub struct WorkflowSummary {
#[schema(example = json!(["incident", "slack", "approval"]))]
pub tags: Vec<String>,
/// Whether the workflow is enabled
#[schema(example = true)]
pub enabled: bool,
/// Creation timestamp
#[schema(example = "2024-01-13T10:30:00Z")]
pub created: DateTime<Utc>,
@@ -259,7 +242,6 @@ impl From<attune_common::models::workflow::WorkflowDefinition> for WorkflowRespo
out_schema: workflow.out_schema,
definition: workflow.definition,
tags: workflow.tags,
enabled: workflow.enabled,
created: workflow.created,
updated: workflow.updated,
}
@@ -277,7 +259,6 @@ impl From<attune_common::models::workflow::WorkflowDefinition> for WorkflowSumma
description: workflow.description,
version: workflow.version,
tags: workflow.tags,
enabled: workflow.enabled,
created: workflow.created,
updated: workflow.updated,
}
@@ -291,10 +272,6 @@ pub struct WorkflowSearchParams {
#[param(example = "incident,approval")]
pub tags: Option<String>,
/// Filter by enabled status
#[param(example = true)]
pub enabled: Option<bool>,
/// Search term for label/description (case-insensitive)
#[param(example = "incident")]
pub search: Option<String>,
@@ -320,7 +297,6 @@ mod tests {
out_schema: None,
definition: serde_json::json!({"tasks": []}),
tags: None,
enabled: None,
};
assert!(req.validate().is_err());
@@ -338,7 +314,6 @@ mod tests {
out_schema: None,
definition: serde_json::json!({"tasks": []}),
tags: Some(vec!["test".to_string()]),
enabled: Some(true),
};
assert!(req.validate().is_ok());
@@ -354,7 +329,6 @@ mod tests {
out_schema: None,
definition: None,
tags: None,
enabled: None,
};
// Should be valid even with all None values
@@ -365,7 +339,6 @@ mod tests {
fn test_workflow_search_params() {
let params = WorkflowSearchParams {
tags: Some("incident,approval".to_string()),
enabled: Some(true),
search: Some("response".to_string()),
pack_ref: Some("core".to_string()),
};

View File

@@ -66,7 +66,6 @@ pub async fn list_workflows(
let filters = WorkflowSearchFilters {
pack: None,
pack_ref: search_params.pack_ref.clone(),
enabled: search_params.enabled,
tags,
search: search_params.search.clone(),
limit: pagination.limit(),
@@ -113,7 +112,6 @@ pub async fn list_workflows_by_pack(
let filters = WorkflowSearchFilters {
pack: None,
pack_ref: Some(pack_ref),
enabled: None,
tags: None,
search: None,
limit: pagination.limit(),
@@ -208,7 +206,6 @@ pub async fn create_workflow(
out_schema: request.out_schema.clone(),
definition: request.definition,
tags: request.tags.clone().unwrap_or_default(),
enabled: request.enabled.unwrap_or(true),
};
let workflow = WorkflowDefinitionRepository::create(&state.db, workflow_input).await?;
@@ -275,7 +272,6 @@ pub async fn update_workflow(
out_schema: request.out_schema.clone(),
definition: request.definition,
tags: request.tags,
enabled: request.enabled,
};
let workflow =
@@ -408,7 +404,6 @@ pub async fn save_workflow_file(
out_schema: request.out_schema.clone(),
definition: definition_json,
tags: request.tags.clone().unwrap_or_default(),
enabled: request.enabled.unwrap_or(true),
};
let workflow = WorkflowDefinitionRepository::create(&state.db, workflow_input).await?;
@@ -489,7 +484,6 @@ pub async fn update_workflow_file(
out_schema: request.out_schema.clone(),
definition: Some(definition_json),
tags: request.tags,
enabled: request.enabled,
};
let workflow =
@@ -647,7 +641,6 @@ fn build_action_yaml(pack_ref: &str, request: &SaveWorkflowFileRequest) -> Strin
lines.push(format!("description: \"{}\"", desc.replace('"', "\\\"")));
}
}
lines.push("enabled: true".to_string());
lines.push(format!(
"workflow_file: workflows/{}.workflow.yaml",
request.name

View File

@@ -551,7 +551,6 @@ pub async fn create_test_workflow(
]
}),
tags: vec!["test".to_string()],
enabled: true,
};
Ok(WorkflowDefinitionRepository::create(pool, input).await?)

View File

@@ -22,7 +22,6 @@ ref: {}.example_workflow
label: Example Workflow
description: A test workflow for integration testing
version: "1.0.0"
enabled: true
parameters:
message:
type: string
@@ -46,7 +45,6 @@ ref: {}.another_workflow
label: Another Workflow
description: Second test workflow
version: "1.0.0"
enabled: false
tasks:
- name: task1
action: core.noop

View File

@@ -46,8 +46,7 @@ async fn test_create_workflow_success() {
}
]
},
"tags": ["test", "automation"],
"enabled": true
"tags": ["test", "automation"]
}),
ctx.token(),
)
@@ -60,7 +59,6 @@ async fn test_create_workflow_success() {
assert_eq!(body["data"]["ref"], "test-pack.test_workflow");
assert_eq!(body["data"]["label"], "Test Workflow");
assert_eq!(body["data"]["version"], "1.0.0");
assert_eq!(body["data"]["enabled"], true);
assert!(body["data"]["tags"].as_array().unwrap().len() == 2);
}
@@ -85,7 +83,6 @@ async fn test_create_workflow_duplicate_ref() {
out_schema: None,
definition: json!({"tasks": []}),
tags: vec![],
enabled: true,
};
WorkflowDefinitionRepository::create(&ctx.pool, input)
.await
@@ -152,7 +149,6 @@ async fn test_get_workflow_by_ref() {
out_schema: None,
definition: json!({"tasks": [{"name": "task1"}]}),
tags: vec!["test".to_string()],
enabled: true,
};
WorkflowDefinitionRepository::create(&ctx.pool, input)
.await
@@ -206,7 +202,6 @@ async fn test_list_workflows() {
out_schema: None,
definition: json!({"tasks": []}),
tags: vec!["test".to_string()],
enabled: i % 2 == 1, // Odd ones enabled
};
WorkflowDefinitionRepository::create(&ctx.pool, input)
.await
@@ -256,7 +251,6 @@ async fn test_list_workflows_by_pack() {
out_schema: None,
definition: json!({"tasks": []}),
tags: vec![],
enabled: true,
};
WorkflowDefinitionRepository::create(&ctx.pool, input)
.await
@@ -275,7 +269,6 @@ async fn test_list_workflows_by_pack() {
out_schema: None,
definition: json!({"tasks": []}),
tags: vec![],
enabled: true,
};
WorkflowDefinitionRepository::create(&ctx.pool, input)
.await
@@ -308,14 +301,14 @@ async fn test_list_workflows_with_filters() {
let pack_name = unique_pack_name();
let pack = create_test_pack(&ctx.pool, &pack_name).await.unwrap();
// Create workflows with different tags and enabled status
// Create workflows with different tags
let workflows = vec![
("workflow1", vec!["incident", "approval"], true),
("workflow2", vec!["incident"], false),
("workflow3", vec!["automation"], true),
("workflow1", vec!["incident", "approval"]),
("workflow2", vec!["incident"]),
("workflow3", vec!["automation"]),
];
for (ref_name, tags, enabled) in workflows {
for (ref_name, tags) in workflows {
let input = CreateWorkflowDefinitionInput {
r#ref: format!("test-pack.{}", ref_name),
pack: pack.id,
@@ -327,24 +320,12 @@ async fn test_list_workflows_with_filters() {
out_schema: None,
definition: json!({"tasks": []}),
tags: tags.iter().map(|s| s.to_string()).collect(),
enabled,
};
WorkflowDefinitionRepository::create(&ctx.pool, input)
.await
.unwrap();
}
// Filter by enabled (and pack_ref for isolation)
let response = ctx
.get(
&format!("/api/v1/workflows?enabled=true&pack_ref={}", pack_name),
ctx.token(),
)
.await
.unwrap();
let body: Value = response.json().await.unwrap();
assert_eq!(body["data"].as_array().unwrap().len(), 2);
// Filter by tag (and pack_ref for isolation)
let response = ctx
.get(
@@ -387,7 +368,6 @@ async fn test_update_workflow() {
out_schema: None,
definition: json!({"tasks": []}),
tags: vec!["test".to_string()],
enabled: true,
};
WorkflowDefinitionRepository::create(&ctx.pool, input)
.await
@@ -400,8 +380,7 @@ async fn test_update_workflow() {
json!({
"label": "Updated Label",
"description": "Updated description",
"version": "1.1.0",
"enabled": false
"version": "1.1.0"
}),
ctx.token(),
)
@@ -414,7 +393,6 @@ async fn test_update_workflow() {
assert_eq!(body["data"]["label"], "Updated Label");
assert_eq!(body["data"]["description"], "Updated description");
assert_eq!(body["data"]["version"], "1.1.0");
assert_eq!(body["data"]["enabled"], false);
}
#[tokio::test]
@@ -455,7 +433,6 @@ async fn test_delete_workflow() {
out_schema: None,
definition: json!({"tasks": []}),
tags: vec![],
enabled: true,
};
WorkflowDefinitionRepository::create(&ctx.pool, input)
.await

View File

@@ -86,9 +86,6 @@ struct ActionYaml {
#[serde(default)]
tags: Option<Vec<String>>,
/// Whether the action is enabled
#[serde(default)]
enabled: Option<bool>,
}
// ── API DTOs ────────────────────────────────────────────────────────────
@@ -109,8 +106,6 @@ struct SaveWorkflowFileRequest {
out_schema: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
tags: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
enabled: Option<bool>,
}
#[derive(Debug, Serialize, Deserialize)]
@@ -127,7 +122,6 @@ struct WorkflowResponse {
out_schema: Option<serde_json::Value>,
definition: serde_json::Value,
tags: Vec<String>,
enabled: bool,
created: String,
updated: String,
}
@@ -142,7 +136,6 @@ struct WorkflowSummary {
description: Option<String>,
version: String,
tags: Vec<String>,
enabled: bool,
created: String,
updated: String,
}
@@ -281,7 +274,6 @@ async fn handle_upload(
param_schema: action.parameters.clone(),
out_schema: action.output.clone(),
tags: action.tags.clone(),
enabled: action.enabled,
};
// ── 6. Print progress ───────────────────────────────────────────────
@@ -357,7 +349,6 @@ async fn handle_upload(
response.tags.join(", ")
},
),
("Enabled", output::format_bool(response.enabled)),
]);
}
}
@@ -414,7 +405,6 @@ async fn handle_list(
"Pack",
"Label",
"Version",
"Enabled",
"Tags",
],
);
@@ -426,7 +416,6 @@ async fn handle_list(
wf.pack_ref.clone(),
output::truncate(&wf.label, 30),
wf.version.clone(),
output::format_bool(wf.enabled),
if wf.tags.is_empty() {
"-".to_string()
} else {
@@ -478,7 +467,6 @@ async fn handle_show(
.unwrap_or_else(|| "-".to_string()),
),
("Version", workflow.version.clone()),
("Enabled", output::format_bool(workflow.enabled)),
(
"Tags",
if workflow.tags.is_empty() {

View File

@@ -1385,7 +1385,6 @@ pub mod workflow {
pub out_schema: Option<JsonSchema>,
pub definition: JsonDict,
pub tags: Vec<String>,
pub enabled: bool,
pub created: DateTime<Utc>,
pub updated: DateTime<Utc>,
}

View File

@@ -1131,7 +1131,6 @@ impl<'a> PackComponentLoader<'a> {
out_schema,
definition: Some(definition_json),
tags: Some(tags),
enabled: Some(true),
};
WorkflowDefinitionRepository::update(self.pool, existing.id, update_input).await?;
@@ -1159,7 +1158,6 @@ impl<'a> PackComponentLoader<'a> {
out_schema,
definition: definition_json,
tags,
enabled: true,
};
let created = WorkflowDefinitionRepository::create(self.pool, create_input).await?;

View File

@@ -20,8 +20,6 @@ pub struct WorkflowSearchFilters {
pub pack: Option<Id>,
/// Filter by pack reference
pub pack_ref: Option<String>,
/// Filter by enabled status
pub enabled: Option<bool>,
/// Filter by tags (OR across tags — matches if any tag is present)
pub tags: Option<Vec<String>>,
/// Text search across label and description (case-insensitive substring)
@@ -62,7 +60,6 @@ pub struct CreateWorkflowDefinitionInput {
pub out_schema: Option<JsonSchema>,
pub definition: JsonDict,
pub tags: Vec<String>,
pub enabled: bool,
}
#[derive(Debug, Clone, Default)]
@@ -74,7 +71,6 @@ pub struct UpdateWorkflowDefinitionInput {
pub out_schema: Option<JsonSchema>,
pub definition: Option<JsonDict>,
pub tags: Option<Vec<String>>,
pub enabled: Option<bool>,
}
#[async_trait::async_trait]
@@ -84,7 +80,7 @@ impl FindById for WorkflowDefinitionRepository {
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, WorkflowDefinition>(
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated
FROM workflow_definition
WHERE id = $1"
)
@@ -102,7 +98,7 @@ impl FindByRef for WorkflowDefinitionRepository {
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, WorkflowDefinition>(
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated
FROM workflow_definition
WHERE ref = $1"
)
@@ -120,7 +116,7 @@ impl List for WorkflowDefinitionRepository {
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, WorkflowDefinition>(
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated
FROM workflow_definition
ORDER BY created DESC
LIMIT 1000"
@@ -141,9 +137,9 @@ impl Create for WorkflowDefinitionRepository {
{
sqlx::query_as::<_, WorkflowDefinition>(
"INSERT INTO workflow_definition
(ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
RETURNING id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated"
(ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
RETURNING id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated"
)
.bind(&input.r#ref)
.bind(input.pack)
@@ -155,7 +151,6 @@ impl Create for WorkflowDefinitionRepository {
.bind(&input.out_schema)
.bind(&input.definition)
.bind(&input.tags)
.bind(input.enabled)
.fetch_one(executor)
.await
.map_err(Into::into)
@@ -219,20 +214,12 @@ impl Update for WorkflowDefinitionRepository {
query.push("tags = ").push_bind(tags);
has_updates = true;
}
if let Some(enabled) = input.enabled {
if has_updates {
query.push(", ");
}
query.push("enabled = ").push_bind(enabled);
has_updates = true;
}
if !has_updates {
return Self::get_by_id(executor, id).await;
}
query.push(", updated = NOW() WHERE id = ").push_bind(id);
query.push(" RETURNING id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated");
query.push(" RETURNING id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated");
query
.build_query_as::<WorkflowDefinition>()
@@ -269,7 +256,7 @@ impl WorkflowDefinitionRepository {
where
E: Executor<'e, Database = Postgres> + Copy + 'e,
{
let select_cols = "id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated";
let select_cols = "id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated";
let mut qb: QueryBuilder<'_, Postgres> =
QueryBuilder::new(format!("SELECT {select_cols} FROM workflow_definition"));
@@ -301,9 +288,6 @@ impl WorkflowDefinitionRepository {
if let Some(ref pack_ref) = filters.pack_ref {
push_condition!("pack_ref = ", pack_ref.clone());
}
if let Some(enabled) = filters.enabled {
push_condition!("enabled = ", enabled);
}
if let Some(ref tags) = filters.tags {
if !tags.is_empty() {
// Use PostgreSQL array overlap operator: tags && ARRAY[...]
@@ -359,7 +343,7 @@ impl WorkflowDefinitionRepository {
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, WorkflowDefinition>(
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated
FROM workflow_definition
WHERE pack = $1
ORDER BY label"
@@ -379,7 +363,7 @@ impl WorkflowDefinitionRepository {
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, WorkflowDefinition>(
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated
FROM workflow_definition
WHERE pack_ref = $1
ORDER BY label"
@@ -403,29 +387,13 @@ impl WorkflowDefinitionRepository {
Ok(result.0)
}
/// Find all enabled workflows
pub async fn find_enabled<'e, E>(executor: E) -> Result<Vec<WorkflowDefinition>>
where
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, WorkflowDefinition>(
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated
FROM workflow_definition
WHERE enabled = true
ORDER BY label"
)
.fetch_all(executor)
.await
.map_err(Into::into)
}
/// Find workflows by tag
pub async fn find_by_tag<'e, E>(executor: E, tag: &str) -> Result<Vec<WorkflowDefinition>>
where
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, WorkflowDefinition>(
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, enabled, created, updated
"SELECT id, ref, pack, pack_ref, label, description, version, param_schema, out_schema, definition, tags, created, updated
FROM workflow_definition
WHERE $1 = ANY(tags)
ORDER BY label"

View File

@@ -379,7 +379,6 @@ impl WorkflowRegistrar {
out_schema: workflow.output.clone(),
definition,
tags: workflow.tags.clone(),
enabled: true,
};
let created = WorkflowDefinitionRepository::create(&self.pool, input).await?;
@@ -411,7 +410,6 @@ impl WorkflowRegistrar {
out_schema: workflow.output.clone(),
definition: Some(definition),
tags: Some(workflow.tags.clone()),
enabled: Some(true),
};
let updated = WorkflowDefinitionRepository::update(&self.pool, *workflow_id, input).await?;

View File

@@ -286,21 +286,6 @@ impl ExecutionScheduler {
)
})?;
if !workflow_def.enabled {
warn!(
"Workflow '{}' is disabled, failing execution {}",
workflow_def.r#ref, execution.id
);
let mut fail = execution.clone();
fail.status = ExecutionStatus::Failed;
fail.result = Some(serde_json::json!({
"error": format!("Workflow '{}' is disabled", workflow_def.r#ref),
"succeeded": false,
}));
ExecutionRepository::update(pool, fail.id, fail.into()).await?;
return Ok(());
}
// Parse workflow definition JSON into the strongly-typed struct
let definition: WorkflowDefinition =
serde_json::from_value(workflow_def.definition.clone()).map_err(|e| {
@@ -900,35 +885,61 @@ impl ExecutionScheduler {
return Ok(());
}
// Cancelled workflow: don't dispatch new tasks, but check whether all
// running children have now finished. When none remain, finalize the
// parent execution as Cancelled so it doesn't stay stuck in "Canceling".
if workflow_execution.status == ExecutionStatus::Cancelled {
let running = Self::count_running_workflow_children(
pool,
workflow_execution_id,
&workflow_execution.completed_tasks,
&workflow_execution.failed_tasks,
)
.await?;
if running == 0 {
info!(
"Cancelled workflow_execution {} has no more running children, \
finalizing parent execution {} as Cancelled",
workflow_execution_id, workflow_execution.execution
);
Self::finalize_cancelled_workflow(
pool,
let parent_execution = ExecutionRepository::find_by_id(pool, workflow_execution.execution)
.await?
.ok_or_else(|| {
anyhow::anyhow!(
"Parent execution {} not found for workflow_execution {}",
workflow_execution.execution,
workflow_execution_id
)
})?;
// Cancellation must be a hard stop for workflow orchestration. Once
// either the workflow record, the parent execution, or the completed
// child itself is in a cancellation state, do not evaluate transitions,
// release more with_items siblings, or dispatch any successor tasks.
if Self::should_halt_workflow_advancement(
workflow_execution.status,
parent_execution.status,
execution.status,
) {
if workflow_execution.status == ExecutionStatus::Cancelled {
let running = Self::count_running_workflow_children(
pool,
workflow_execution_id,
&workflow_execution.completed_tasks,
&workflow_execution.failed_tasks,
)
.await?;
if running == 0 {
info!(
"Cancelled workflow_execution {} has no more running children, \
finalizing parent execution {} as Cancelled",
workflow_execution_id, workflow_execution.execution
);
Self::finalize_cancelled_workflow(
pool,
workflow_execution.execution,
workflow_execution_id,
)
.await?;
} else {
debug!(
"Workflow_execution {} is cancelling/cancelled with {} running children, \
skipping advancement",
workflow_execution_id, running
);
}
} else {
debug!(
"Cancelled workflow_execution {} still has {} running children, \
waiting for them to finish",
workflow_execution_id, running
"Workflow_execution {} advancement halted due to cancellation state \
(workflow: {:?}, parent: {:?}, child: {:?})",
workflow_execution_id,
workflow_execution.status,
parent_execution.status,
execution.status
);
}
@@ -1116,17 +1127,6 @@ impl ExecutionScheduler {
}
}
// Load the parent execution for context
let parent_execution = ExecutionRepository::find_by_id(pool, workflow_execution.execution)
.await?
.ok_or_else(|| {
anyhow::anyhow!(
"Parent execution {} not found for workflow_execution {}",
workflow_execution.execution,
workflow_execution_id
)
})?;
// -----------------------------------------------------------------
// Rebuild the WorkflowContext from persisted state + completed task
// results so that successor task inputs can be rendered.
@@ -1414,6 +1414,23 @@ impl ExecutionScheduler {
Ok(count)
}
fn should_halt_workflow_advancement(
workflow_status: ExecutionStatus,
parent_status: ExecutionStatus,
child_status: ExecutionStatus,
) -> bool {
matches!(
workflow_status,
ExecutionStatus::Canceling | ExecutionStatus::Cancelled
) || matches!(
parent_status,
ExecutionStatus::Canceling | ExecutionStatus::Cancelled
) || matches!(
child_status,
ExecutionStatus::Canceling | ExecutionStatus::Cancelled
)
}
/// Finalize a cancelled workflow by updating the parent `execution` record
/// to `Cancelled`. The `workflow_execution` record is already `Cancelled`
/// (set by `cancel_workflow_children`); this only touches the parent.
@@ -1918,4 +1935,28 @@ mod tests {
assert_eq!(update.status, Some(ExecutionStatus::Scheduled));
assert_eq!(update.worker, Some(99));
}
#[test]
fn test_workflow_advancement_halts_for_any_cancellation_state() {
assert!(ExecutionScheduler::should_halt_workflow_advancement(
ExecutionStatus::Running,
ExecutionStatus::Canceling,
ExecutionStatus::Completed
));
assert!(ExecutionScheduler::should_halt_workflow_advancement(
ExecutionStatus::Cancelled,
ExecutionStatus::Running,
ExecutionStatus::Failed
));
assert!(ExecutionScheduler::should_halt_workflow_advancement(
ExecutionStatus::Running,
ExecutionStatus::Running,
ExecutionStatus::Cancelled
));
assert!(!ExecutionScheduler::should_halt_workflow_advancement(
ExecutionStatus::Running,
ExecutionStatus::Running,
ExecutionStatus::Failed
));
}
}

View File

@@ -379,7 +379,6 @@ impl WorkflowRegistrar {
out_schema: workflow.output.clone(),
definition,
tags: workflow.tags.clone(),
enabled: true,
};
let created = WorkflowDefinitionRepository::create(&self.pool, input).await?;
@@ -411,7 +410,6 @@ impl WorkflowRegistrar {
out_schema: workflow.output.clone(),
definition: Some(definition),
tags: Some(workflow.tags.clone()),
enabled: Some(true),
};
let updated = WorkflowDefinitionRepository::update(&self.pool, *workflow_id, input).await?;