[WIP] Workflows

This commit is contained in:
2026-02-27 16:34:17 -06:00
parent 570c52e623
commit daeff10f18
96 changed files with 5889 additions and 2098 deletions

View File

@@ -137,6 +137,11 @@ pub struct ActionResponse {
#[schema(value_type = Object, nullable = true)]
pub out_schema: Option<JsonValue>,
/// Workflow definition ID (non-null if this action is a workflow)
#[serde(skip_serializing_if = "Option::is_none")]
#[schema(example = 42, nullable = true)]
pub workflow_def: Option<i64>,
/// Whether this is an ad-hoc action (not from pack installation)
#[schema(example = false)]
pub is_adhoc: bool,
@@ -186,6 +191,11 @@ pub struct ActionSummary {
#[schema(example = ">=3.12", nullable = true)]
pub runtime_version_constraint: Option<String>,
/// Workflow definition ID (non-null if this action is a workflow)
#[serde(skip_serializing_if = "Option::is_none")]
#[schema(example = 42, nullable = true)]
pub workflow_def: Option<i64>,
/// Creation timestamp
#[schema(example = "2024-01-13T10:30:00Z")]
pub created: DateTime<Utc>,
@@ -210,6 +220,7 @@ impl From<attune_common::models::action::Action> for ActionResponse {
runtime_version_constraint: action.runtime_version_constraint,
param_schema: action.param_schema,
out_schema: action.out_schema,
workflow_def: action.workflow_def,
is_adhoc: action.is_adhoc,
created: action.created,
updated: action.updated,
@@ -229,6 +240,7 @@ impl From<attune_common::models::action::Action> for ActionSummary {
entrypoint: action.entrypoint,
runtime: action.runtime,
runtime_version_constraint: action.runtime_version_constraint,
workflow_def: action.workflow_def,
created: action.created,
updated: action.updated,
}

View File

@@ -53,10 +53,6 @@ pub struct EventResponse {
/// Creation timestamp
#[schema(example = "2024-01-13T10:30:00Z")]
pub created: DateTime<Utc>,
/// Last update timestamp
#[schema(example = "2024-01-13T10:30:00Z")]
pub updated: DateTime<Utc>,
}
impl From<Event> for EventResponse {
@@ -72,7 +68,6 @@ impl From<Event> for EventResponse {
rule: event.rule,
rule_ref: event.rule_ref,
created: event.created,
updated: event.updated,
}
}
}
@@ -230,9 +225,9 @@ pub struct EnforcementResponse {
#[schema(example = "2024-01-13T10:30:00Z")]
pub created: DateTime<Utc>,
/// Last update timestamp
#[schema(example = "2024-01-13T10:30:00Z")]
pub updated: DateTime<Utc>,
/// Timestamp when the enforcement was resolved (status changed from created to processed/disabled)
#[schema(example = "2024-01-13T10:30:01Z", nullable = true)]
pub resolved_at: Option<DateTime<Utc>>,
}
impl From<Enforcement> for EnforcementResponse {
@@ -249,7 +244,7 @@ impl From<Enforcement> for EnforcementResponse {
condition: enforcement.condition,
conditions: enforcement.conditions,
created: enforcement.created,
updated: enforcement.updated,
resolved_at: enforcement.resolved_at,
}
}
}

View File

@@ -6,6 +6,7 @@ use serde_json::Value as JsonValue;
use utoipa::{IntoParams, ToSchema};
use attune_common::models::enums::ExecutionStatus;
use attune_common::models::execution::WorkflowTaskMetadata;
/// Request DTO for creating a manual execution
#[derive(Debug, Clone, Deserialize, ToSchema)]
@@ -62,6 +63,11 @@ pub struct ExecutionResponse {
#[schema(value_type = Object, example = json!({"message_id": "1234567890.123456"}))]
pub result: Option<JsonValue>,
/// Workflow task metadata (only populated for workflow task executions)
#[serde(skip_serializing_if = "Option::is_none")]
#[schema(value_type = Option<Object>, nullable = true)]
pub workflow_task: Option<WorkflowTaskMetadata>,
/// Creation timestamp
#[schema(example = "2024-01-13T10:30:00Z")]
pub created: DateTime<Utc>,
@@ -102,6 +108,11 @@ pub struct ExecutionSummary {
#[schema(example = "core.timer")]
pub trigger_ref: Option<String>,
/// Workflow task metadata (only populated for workflow task executions)
#[serde(skip_serializing_if = "Option::is_none")]
#[schema(value_type = Option<Object>, nullable = true)]
pub workflow_task: Option<WorkflowTaskMetadata>,
/// Creation timestamp
#[schema(example = "2024-01-13T10:30:00Z")]
pub created: DateTime<Utc>,
@@ -150,6 +161,12 @@ pub struct ExecutionQueryParams {
#[param(example = 1)]
pub parent: Option<i64>,
/// If true, only return top-level executions (those without a parent).
/// Useful for the "By Workflow" view where child tasks are loaded separately.
#[serde(default)]
#[param(example = false)]
pub top_level_only: Option<bool>,
/// Page number (for pagination)
#[serde(default = "default_page")]
#[param(example = 1, minimum = 1)]
@@ -190,6 +207,7 @@ impl From<attune_common::models::execution::Execution> for ExecutionResponse {
result: execution
.result
.map(|r| serde_json::to_value(r).unwrap_or(JsonValue::Null)),
workflow_task: execution.workflow_task,
created: execution.created,
updated: execution.updated,
}
@@ -207,6 +225,7 @@ impl From<attune_common::models::execution::Execution> for ExecutionSummary {
enforcement: execution.enforcement,
rule_ref: None, // Populated separately via enforcement lookup
trigger_ref: None, // Populated separately via enforcement lookup
workflow_task: execution.workflow_task,
created: execution.created,
updated: execution.updated,
}
@@ -256,6 +275,7 @@ mod tests {
action_ref: None,
enforcement: None,
parent: None,
top_level_only: None,
pack_name: None,
rule_ref: None,
trigger_ref: None,
@@ -274,6 +294,7 @@ mod tests {
action_ref: None,
enforcement: None,
parent: None,
top_level_only: None,
pack_name: None,
rule_ref: None,
trigger_ref: None,

View File

@@ -126,7 +126,7 @@ impl HistoryQueryParams {
/// Path parameter for the entity type segment.
#[derive(Debug, Clone, Deserialize, IntoParams)]
pub struct HistoryEntityTypePath {
/// Entity type: `execution`, `worker`, `enforcement`, or `event`
/// Entity type: `execution` or `worker`
pub entity_type: String,
}

View File

@@ -168,6 +168,10 @@ pub async fn list_executions(
filtered_executions.retain(|e| e.parent == Some(parent_id));
}
if query.top_level_only == Some(true) {
filtered_executions.retain(|e| e.parent.is_none());
}
if let Some(executor_id) = query.executor {
filtered_executions.retain(|e| e.executor == Some(executor_id));
}

View File

@@ -27,14 +27,14 @@ use crate::{
/// List history records for a given entity type.
///
/// Supported entity types: `execution`, `worker`, `enforcement`, `event`.
/// Supported entity types: `execution`, `worker`.
/// Returns a paginated list of change records ordered by time descending.
#[utoipa::path(
get,
path = "/api/v1/history/{entity_type}",
tag = "history",
params(
("entity_type" = String, Path, description = "Entity type: execution, worker, enforcement, or event"),
("entity_type" = String, Path, description = "Entity type: execution or worker"),
HistoryQueryParams,
),
responses(
@@ -127,56 +127,6 @@ pub async fn get_worker_history(
get_entity_history_by_id(&state, HistoryEntityType::Worker, id, query).await
}
/// Get history for a specific enforcement by ID.
///
/// Returns all change records for the given enforcement, ordered by time descending.
#[utoipa::path(
get,
path = "/api/v1/enforcements/{id}/history",
tag = "history",
params(
("id" = i64, Path, description = "Enforcement ID"),
HistoryQueryParams,
),
responses(
(status = 200, description = "History records for the enforcement", body = PaginatedResponse<HistoryRecordResponse>),
),
security(("bearer_auth" = []))
)]
pub async fn get_enforcement_history(
State(state): State<Arc<AppState>>,
RequireAuth(_user): RequireAuth,
Path(id): Path<i64>,
Query(query): Query<HistoryQueryParams>,
) -> ApiResult<impl IntoResponse> {
get_entity_history_by_id(&state, HistoryEntityType::Enforcement, id, query).await
}
/// Get history for a specific event by ID.
///
/// Returns all change records for the given event, ordered by time descending.
#[utoipa::path(
get,
path = "/api/v1/events/{id}/history",
tag = "history",
params(
("id" = i64, Path, description = "Event ID"),
HistoryQueryParams,
),
responses(
(status = 200, description = "History records for the event", body = PaginatedResponse<HistoryRecordResponse>),
),
security(("bearer_auth" = []))
)]
pub async fn get_event_history(
State(state): State<Arc<AppState>>,
RequireAuth(_user): RequireAuth,
Path(id): Path<i64>,
Query(query): Query<HistoryQueryParams>,
) -> ApiResult<impl IntoResponse> {
get_entity_history_by_id(&state, HistoryEntityType::Event, id, query).await
}
// ---------------------------------------------------------------------------
// Shared helpers
// ---------------------------------------------------------------------------
@@ -231,8 +181,6 @@ async fn get_entity_history_by_id(
/// - `GET /history/:entity_type` — generic history query
/// - `GET /executions/:id/history` — execution-specific history
/// - `GET /workers/:id/history` — worker-specific history (note: currently no /workers base route exists)
/// - `GET /enforcements/:id/history` — enforcement-specific history
/// - `GET /events/:id/history` — event-specific history
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
// Generic history endpoint
@@ -240,6 +188,4 @@ pub fn routes() -> Router<Arc<AppState>> {
// Entity-specific convenience endpoints
.route("/executions/{id}/history", get(get_execution_history))
.route("/workers/{id}/history", get(get_worker_history))
.route("/enforcements/{id}/history", get(get_enforcement_history))
.route("/events/{id}/history", get(get_event_history))
}

View File

@@ -601,8 +601,8 @@ async fn write_workflow_yaml(
/// Create a companion action record for a workflow definition.
///
/// This ensures the workflow appears in action lists and the action palette in the
/// workflow builder. The action is created with `is_workflow = true` and linked to
/// the workflow definition via the `workflow_def` FK.
/// workflow builder. The action is linked to the workflow definition via the
/// `workflow_def` FK.
async fn create_companion_action(
db: &sqlx::PgPool,
workflow_ref: &str,
@@ -643,7 +643,7 @@ async fn create_companion_action(
))
})?;
// Link the action to the workflow definition (sets is_workflow = true and workflow_def)
// Link the action to the workflow definition (sets workflow_def FK)
ActionRepository::link_workflow_def(db, action.id, workflow_def_id)
.await
.map_err(|e| {

View File

@@ -368,7 +368,6 @@ mod tests {
runtime_version_constraint: None,
param_schema: schema,
out_schema: None,
is_workflow: false,
workflow_def: None,
is_adhoc: false,
parameter_delivery: attune_common::models::ParameterDelivery::default(),

View File

@@ -120,23 +120,21 @@ async fn test_sse_stream_receives_execution_updates() -> Result<()> {
println!("Updating execution {} to 'running' status", execution_id);
// Update execution status - this should trigger PostgreSQL NOTIFY
let _ = sqlx::query(
"UPDATE execution SET status = 'running', start_time = NOW() WHERE id = $1",
)
.bind(execution_id)
.execute(&pool_clone)
.await;
let _ =
sqlx::query("UPDATE execution SET status = 'running', updated = NOW() WHERE id = $1")
.bind(execution_id)
.execute(&pool_clone)
.await;
println!("Update executed, waiting before setting to succeeded");
tokio::time::sleep(Duration::from_millis(500)).await;
// Update to succeeded
let _ = sqlx::query(
"UPDATE execution SET status = 'succeeded', end_time = NOW() WHERE id = $1",
)
.bind(execution_id)
.execute(&pool_clone)
.await;
let _ =
sqlx::query("UPDATE execution SET status = 'succeeded', updated = NOW() WHERE id = $1")
.bind(execution_id)
.execute(&pool_clone)
.await;
println!("Execution {} updated to 'succeeded'", execution_id);
});

View File

@@ -896,7 +896,6 @@ pub mod action {
pub runtime_version_constraint: Option<String>,
pub param_schema: Option<JsonSchema>,
pub out_schema: Option<JsonSchema>,
pub is_workflow: bool,
pub workflow_def: Option<Id>,
pub is_adhoc: bool,
#[sqlx(default)]
@@ -988,7 +987,6 @@ pub mod event {
pub source: Option<Id>,
pub source_ref: Option<String>,
pub created: DateTime<Utc>,
pub updated: DateTime<Utc>,
pub rule: Option<Id>,
pub rule_ref: Option<String>,
}
@@ -1006,7 +1004,7 @@ pub mod event {
pub condition: EnforcementCondition,
pub conditions: JsonValue,
pub created: DateTime<Utc>,
pub updated: DateTime<Utc>,
pub resolved_at: Option<DateTime<Utc>>,
}
}
@@ -1484,8 +1482,6 @@ pub mod entity_history {
pub enum HistoryEntityType {
Execution,
Worker,
Enforcement,
Event,
}
impl HistoryEntityType {
@@ -1494,8 +1490,6 @@ pub mod entity_history {
match self {
Self::Execution => "execution_history",
Self::Worker => "worker_history",
Self::Enforcement => "enforcement_history",
Self::Event => "event_history",
}
}
}
@@ -1505,8 +1499,6 @@ pub mod entity_history {
match self {
Self::Execution => write!(f, "execution"),
Self::Worker => write!(f, "worker"),
Self::Enforcement => write!(f, "enforcement"),
Self::Event => write!(f, "event"),
}
}
}
@@ -1518,10 +1510,8 @@ pub mod entity_history {
match s.to_lowercase().as_str() {
"execution" => Ok(Self::Execution),
"worker" => Ok(Self::Worker),
"enforcement" => Ok(Self::Enforcement),
"event" => Ok(Self::Event),
other => Err(format!(
"unknown history entity type '{}'; expected one of: execution, worker, enforcement, event",
"unknown history entity type '{}'; expected one of: execution, worker",
other
)),
}

View File

@@ -57,7 +57,7 @@ impl FindById for ActionRepository {
r#"
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
runtime, runtime_version_constraint,
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
param_schema, out_schema, workflow_def, is_adhoc, created, updated
FROM action
WHERE id = $1
"#,
@@ -80,7 +80,7 @@ impl FindByRef for ActionRepository {
r#"
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
runtime, runtime_version_constraint,
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
param_schema, out_schema, workflow_def, is_adhoc, created, updated
FROM action
WHERE ref = $1
"#,
@@ -103,7 +103,7 @@ impl List for ActionRepository {
r#"
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
runtime, runtime_version_constraint,
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
param_schema, out_schema, workflow_def, is_adhoc, created, updated
FROM action
ORDER BY ref ASC
"#,
@@ -142,7 +142,7 @@ impl Create for ActionRepository {
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
RETURNING id, ref, pack, pack_ref, label, description, entrypoint,
runtime, runtime_version_constraint,
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
param_schema, out_schema, workflow_def, is_adhoc, created, updated
"#,
)
.bind(&input.r#ref)
@@ -256,7 +256,7 @@ impl Update for ActionRepository {
query.push(", updated = NOW() WHERE id = ");
query.push_bind(id);
query.push(" RETURNING id, ref, pack, pack_ref, label, description, entrypoint, runtime, runtime_version_constraint, param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated");
query.push(" RETURNING id, ref, pack, pack_ref, label, description, entrypoint, runtime, runtime_version_constraint, param_schema, out_schema, workflow_def, is_adhoc, created, updated");
let action = query
.build_query_as::<Action>()
@@ -296,7 +296,7 @@ impl ActionRepository {
r#"
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
runtime, runtime_version_constraint,
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
param_schema, out_schema, workflow_def, is_adhoc, created, updated
FROM action
WHERE pack = $1
ORDER BY ref ASC
@@ -318,7 +318,7 @@ impl ActionRepository {
r#"
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
runtime, runtime_version_constraint,
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
param_schema, out_schema, workflow_def, is_adhoc, created, updated
FROM action
WHERE runtime = $1
ORDER BY ref ASC
@@ -341,7 +341,7 @@ impl ActionRepository {
r#"
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
runtime, runtime_version_constraint,
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
param_schema, out_schema, workflow_def, is_adhoc, created, updated
FROM action
WHERE LOWER(ref) LIKE $1 OR LOWER(label) LIKE $1 OR LOWER(description) LIKE $1
ORDER BY ref ASC
@@ -354,7 +354,7 @@ impl ActionRepository {
Ok(actions)
}
/// Find all workflow actions (actions where is_workflow = true)
/// Find all workflow actions (actions linked to a workflow definition)
pub async fn find_workflows<'e, E>(executor: E) -> Result<Vec<Action>>
where
E: Executor<'e, Database = Postgres> + 'e,
@@ -363,9 +363,9 @@ impl ActionRepository {
r#"
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
runtime, runtime_version_constraint,
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
param_schema, out_schema, workflow_def, is_adhoc, created, updated
FROM action
WHERE is_workflow = true
WHERE workflow_def IS NOT NULL
ORDER BY ref ASC
"#,
)
@@ -387,7 +387,7 @@ impl ActionRepository {
r#"
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
runtime, runtime_version_constraint,
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
param_schema, out_schema, workflow_def, is_adhoc, created, updated
FROM action
WHERE workflow_def = $1
"#,
@@ -411,11 +411,11 @@ impl ActionRepository {
let action = sqlx::query_as::<_, Action>(
r#"
UPDATE action
SET is_workflow = true, workflow_def = $2, updated = NOW()
SET workflow_def = $2, updated = NOW()
WHERE id = $1
RETURNING id, ref, pack, pack_ref, label, description, entrypoint,
runtime, runtime_version_constraint,
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
param_schema, out_schema, workflow_def, is_adhoc, created, updated
"#,
)
.bind(action_id)

View File

@@ -80,6 +80,19 @@ pub struct EnforcementVolumeBucket {
pub enforcement_count: i64,
}
/// A single hourly bucket of execution volume (from execution hypertable directly).
#[derive(Debug, Clone, Serialize, FromRow)]
pub struct ExecutionVolumeBucket {
/// Start of the 1-hour bucket
pub bucket: DateTime<Utc>,
/// Action ref; NULL when grouped across all actions
pub action_ref: Option<String>,
/// The initial status at creation time
pub initial_status: Option<String>,
/// Number of executions created in this bucket
pub execution_count: i64,
}
/// Aggregated failure rate over a time range.
#[derive(Debug, Clone, Serialize)]
pub struct FailureRateSummary {
@@ -454,6 +467,69 @@ impl AnalyticsRepository {
Ok(rows)
}
// =======================================================================
// Execution volume (from execution hypertable directly)
// =======================================================================
/// Query the `execution_volume_hourly` continuous aggregate for execution
/// creation volume across all actions.
pub async fn execution_volume_hourly<'e, E>(
executor: E,
range: &AnalyticsTimeRange,
) -> Result<Vec<ExecutionVolumeBucket>>
where
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, ExecutionVolumeBucket>(
r#"
SELECT
bucket,
NULL::text AS action_ref,
initial_status::text AS initial_status,
SUM(execution_count)::bigint AS execution_count
FROM execution_volume_hourly
WHERE bucket >= $1 AND bucket <= $2
GROUP BY bucket, initial_status
ORDER BY bucket ASC, initial_status
"#,
)
.bind(range.since)
.bind(range.until)
.fetch_all(executor)
.await
.map_err(Into::into)
}
/// Query the `execution_volume_hourly` continuous aggregate filtered by
/// a specific action ref.
pub async fn execution_volume_hourly_by_action<'e, E>(
executor: E,
range: &AnalyticsTimeRange,
action_ref: &str,
) -> Result<Vec<ExecutionVolumeBucket>>
where
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, ExecutionVolumeBucket>(
r#"
SELECT
bucket,
action_ref,
initial_status::text AS initial_status,
execution_count
FROM execution_volume_hourly
WHERE bucket >= $1 AND bucket <= $2 AND action_ref = $3
ORDER BY bucket ASC, initial_status
"#,
)
.bind(range.since)
.bind(range.until)
.bind(action_ref)
.fetch_all(executor)
.await
.map_err(Into::into)
}
// =======================================================================
// Derived analytics
// =======================================================================

View File

@@ -263,11 +263,6 @@ mod tests {
"execution_history"
);
assert_eq!(HistoryEntityType::Worker.table_name(), "worker_history");
assert_eq!(
HistoryEntityType::Enforcement.table_name(),
"enforcement_history"
);
assert_eq!(HistoryEntityType::Event.table_name(), "event_history");
}
#[test]
@@ -280,14 +275,8 @@ mod tests {
"Worker".parse::<HistoryEntityType>().unwrap(),
HistoryEntityType::Worker
);
assert_eq!(
"ENFORCEMENT".parse::<HistoryEntityType>().unwrap(),
HistoryEntityType::Enforcement
);
assert_eq!(
"event".parse::<HistoryEntityType>().unwrap(),
HistoryEntityType::Event
);
assert!("enforcement".parse::<HistoryEntityType>().is_err());
assert!("event".parse::<HistoryEntityType>().is_err());
assert!("unknown".parse::<HistoryEntityType>().is_err());
}
@@ -295,7 +284,5 @@ mod tests {
fn test_history_entity_type_display() {
assert_eq!(HistoryEntityType::Execution.to_string(), "execution");
assert_eq!(HistoryEntityType::Worker.to_string(), "worker");
assert_eq!(HistoryEntityType::Enforcement.to_string(), "enforcement");
assert_eq!(HistoryEntityType::Event.to_string(), "event");
}
}

View File

@@ -1,6 +1,9 @@
//! Event and Enforcement repository for database operations
//!
//! This module provides CRUD operations and queries for Event and Enforcement entities.
//! Note: Events are immutable time-series data — there is no Update impl for EventRepository.
use chrono::{DateTime, Utc};
use crate::models::{
enums::{EnforcementCondition, EnforcementStatus},
@@ -36,13 +39,6 @@ pub struct CreateEventInput {
pub rule_ref: Option<String>,
}
/// Input for updating an event
#[derive(Debug, Clone, Default)]
pub struct UpdateEventInput {
pub config: Option<JsonDict>,
pub payload: Option<JsonDict>,
}
#[async_trait::async_trait]
impl FindById for EventRepository {
async fn find_by_id<'e, E>(executor: E, id: i64) -> Result<Option<Self::Entity>>
@@ -52,7 +48,7 @@ impl FindById for EventRepository {
let event = sqlx::query_as::<_, Event>(
r#"
SELECT id, trigger, trigger_ref, config, payload, source, source_ref,
rule, rule_ref, created, updated
rule, rule_ref, created
FROM event
WHERE id = $1
"#,
@@ -74,7 +70,7 @@ impl List for EventRepository {
let events = sqlx::query_as::<_, Event>(
r#"
SELECT id, trigger, trigger_ref, config, payload, source, source_ref,
rule, rule_ref, created, updated
rule, rule_ref, created
FROM event
ORDER BY created DESC
LIMIT 1000
@@ -100,7 +96,7 @@ impl Create for EventRepository {
INSERT INTO event (trigger, trigger_ref, config, payload, source, source_ref, rule, rule_ref)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
RETURNING id, trigger, trigger_ref, config, payload, source, source_ref,
rule, rule_ref, created, updated
rule, rule_ref, created
"#,
)
.bind(input.trigger)
@@ -118,49 +114,6 @@ impl Create for EventRepository {
}
}
#[async_trait::async_trait]
impl Update for EventRepository {
type UpdateInput = UpdateEventInput;
async fn update<'e, E>(executor: E, id: i64, input: Self::UpdateInput) -> Result<Self::Entity>
where
E: Executor<'e, Database = Postgres> + 'e,
{
// Build update query
let mut query = QueryBuilder::new("UPDATE event SET ");
let mut has_updates = false;
if let Some(config) = &input.config {
query.push("config = ");
query.push_bind(config);
has_updates = true;
}
if let Some(payload) = &input.payload {
if has_updates {
query.push(", ");
}
query.push("payload = ");
query.push_bind(payload);
has_updates = true;
}
if !has_updates {
// No updates requested, fetch and return existing entity
return Self::get_by_id(executor, id).await;
}
query.push(", updated = NOW() WHERE id = ");
query.push_bind(id);
query.push(" RETURNING id, trigger, trigger_ref, config, payload, source, source_ref, rule, rule_ref, created, updated");
let event = query.build_query_as::<Event>().fetch_one(executor).await?;
Ok(event)
}
}
#[async_trait::async_trait]
impl Delete for EventRepository {
async fn delete<'e, E>(executor: E, id: i64) -> Result<bool>
@@ -185,7 +138,7 @@ impl EventRepository {
let events = sqlx::query_as::<_, Event>(
r#"
SELECT id, trigger, trigger_ref, config, payload, source, source_ref,
rule, rule_ref, created, updated
rule, rule_ref, created
FROM event
WHERE trigger = $1
ORDER BY created DESC
@@ -207,7 +160,7 @@ impl EventRepository {
let events = sqlx::query_as::<_, Event>(
r#"
SELECT id, trigger, trigger_ref, config, payload, source, source_ref,
rule, rule_ref, created, updated
rule, rule_ref, created
FROM event
WHERE trigger_ref = $1
ORDER BY created DESC
@@ -256,6 +209,7 @@ pub struct CreateEnforcementInput {
pub struct UpdateEnforcementInput {
pub status: Option<EnforcementStatus>,
pub payload: Option<JsonDict>,
pub resolved_at: Option<DateTime<Utc>>,
}
#[async_trait::async_trait]
@@ -267,7 +221,7 @@ impl FindById for EnforcementRepository {
let enforcement = sqlx::query_as::<_, Enforcement>(
r#"
SELECT id, rule, rule_ref, trigger_ref, config, event, status, payload,
condition, conditions, created, updated
condition, conditions, created, resolved_at
FROM enforcement
WHERE id = $1
"#,
@@ -289,7 +243,7 @@ impl List for EnforcementRepository {
let enforcements = sqlx::query_as::<_, Enforcement>(
r#"
SELECT id, rule, rule_ref, trigger_ref, config, event, status, payload,
condition, conditions, created, updated
condition, conditions, created, resolved_at
FROM enforcement
ORDER BY created DESC
LIMIT 1000
@@ -316,7 +270,7 @@ impl Create for EnforcementRepository {
payload, condition, conditions)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
RETURNING id, rule, rule_ref, trigger_ref, config, event, status, payload,
condition, conditions, created, updated
condition, conditions, created, resolved_at
"#,
)
.bind(input.rule)
@@ -363,14 +317,23 @@ impl Update for EnforcementRepository {
has_updates = true;
}
if let Some(resolved_at) = input.resolved_at {
if has_updates {
query.push(", ");
}
query.push("resolved_at = ");
query.push_bind(resolved_at);
has_updates = true;
}
if !has_updates {
// No updates requested, fetch and return existing entity
return Self::get_by_id(executor, id).await;
}
query.push(", updated = NOW() WHERE id = ");
query.push(" WHERE id = ");
query.push_bind(id);
query.push(" RETURNING id, rule, rule_ref, trigger_ref, config, event, status, payload, condition, conditions, created, updated");
query.push(" RETURNING id, rule, rule_ref, trigger_ref, config, event, status, payload, condition, conditions, created, resolved_at");
let enforcement = query
.build_query_as::<Enforcement>()
@@ -405,7 +368,7 @@ impl EnforcementRepository {
let enforcements = sqlx::query_as::<_, Enforcement>(
r#"
SELECT id, rule, rule_ref, trigger_ref, config, event, status, payload,
condition, conditions, created, updated
condition, conditions, created, resolved_at
FROM enforcement
WHERE rule = $1
ORDER BY created DESC
@@ -429,7 +392,7 @@ impl EnforcementRepository {
let enforcements = sqlx::query_as::<_, Enforcement>(
r#"
SELECT id, rule, rule_ref, trigger_ref, config, event, status, payload,
condition, conditions, created, updated
condition, conditions, created, resolved_at
FROM enforcement
WHERE status = $1
ORDER BY created DESC
@@ -450,7 +413,7 @@ impl EnforcementRepository {
let enforcements = sqlx::query_as::<_, Enforcement>(
r#"
SELECT id, rule, rule_ref, trigger_ref, config, event, status, payload,
condition, conditions, created, updated
condition, conditions, created, resolved_at
FROM enforcement
WHERE event = $1
ORDER BY created DESC

View File

@@ -6,6 +6,15 @@ use sqlx::{Executor, Postgres, QueryBuilder};
use super::{Create, Delete, FindById, List, Repository, Update};
/// Column list for SELECT queries on the execution table.
///
/// Defined once to avoid drift between queries and the `Execution` model.
/// The execution table has DB-only columns (`is_workflow`, `workflow_def`) that
/// are NOT in the Rust struct, so `SELECT *` must never be used.
pub const SELECT_COLUMNS: &str = "\
id, action, action_ref, config, env_vars, parent, enforcement, \
executor, status, result, workflow_task, created, updated";
pub struct ExecutionRepository;
impl Repository for ExecutionRepository {
@@ -54,9 +63,12 @@ impl FindById for ExecutionRepository {
where
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, Execution>(
"SELECT id, action, action_ref, config, env_vars, parent, enforcement, executor, status, result, workflow_task, created, updated FROM execution WHERE id = $1"
).bind(id).fetch_optional(executor).await.map_err(Into::into)
let sql = format!("SELECT {SELECT_COLUMNS} FROM execution WHERE id = $1");
sqlx::query_as::<_, Execution>(&sql)
.bind(id)
.fetch_optional(executor)
.await
.map_err(Into::into)
}
}
@@ -66,9 +78,12 @@ impl List for ExecutionRepository {
where
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, Execution>(
"SELECT id, action, action_ref, config, env_vars, parent, enforcement, executor, status, result, workflow_task, created, updated FROM execution ORDER BY created DESC LIMIT 1000"
).fetch_all(executor).await.map_err(Into::into)
let sql =
format!("SELECT {SELECT_COLUMNS} FROM execution ORDER BY created DESC LIMIT 1000");
sqlx::query_as::<_, Execution>(&sql)
.fetch_all(executor)
.await
.map_err(Into::into)
}
}
@@ -79,9 +94,26 @@ impl Create for ExecutionRepository {
where
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, Execution>(
"INSERT INTO execution (action, action_ref, config, env_vars, parent, enforcement, executor, status, result, workflow_task) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) RETURNING id, action, action_ref, config, env_vars, parent, enforcement, executor, status, result, workflow_task, created, updated"
).bind(input.action).bind(&input.action_ref).bind(&input.config).bind(&input.env_vars).bind(input.parent).bind(input.enforcement).bind(input.executor).bind(input.status).bind(&input.result).bind(sqlx::types::Json(&input.workflow_task)).fetch_one(executor).await.map_err(Into::into)
let sql = format!(
"INSERT INTO execution \
(action, action_ref, config, env_vars, parent, enforcement, executor, status, result, workflow_task) \
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) \
RETURNING {SELECT_COLUMNS}"
);
sqlx::query_as::<_, Execution>(&sql)
.bind(input.action)
.bind(&input.action_ref)
.bind(&input.config)
.bind(&input.env_vars)
.bind(input.parent)
.bind(input.enforcement)
.bind(input.executor)
.bind(input.status)
.bind(&input.result)
.bind(sqlx::types::Json(&input.workflow_task))
.fetch_one(executor)
.await
.map_err(Into::into)
}
}
@@ -130,7 +162,8 @@ impl Update for ExecutionRepository {
}
query.push(", updated = NOW() WHERE id = ").push_bind(id);
query.push(" RETURNING id, action, action_ref, config, env_vars, parent, enforcement, executor, status, result, workflow_task, created, updated");
query.push(" RETURNING ");
query.push(SELECT_COLUMNS);
query
.build_query_as::<Execution>()
@@ -162,9 +195,14 @@ impl ExecutionRepository {
where
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, Execution>(
"SELECT id, action, action_ref, config, env_vars, parent, enforcement, executor, status, result, workflow_task, created, updated FROM execution WHERE status = $1 ORDER BY created DESC"
).bind(status).fetch_all(executor).await.map_err(Into::into)
let sql = format!(
"SELECT {SELECT_COLUMNS} FROM execution WHERE status = $1 ORDER BY created DESC"
);
sqlx::query_as::<_, Execution>(&sql)
.bind(status)
.fetch_all(executor)
.await
.map_err(Into::into)
}
pub async fn find_by_enforcement<'e, E>(
@@ -174,8 +212,31 @@ impl ExecutionRepository {
where
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, Execution>(
"SELECT id, action, action_ref, config, env_vars, parent, enforcement, executor, status, result, workflow_task, created, updated FROM execution WHERE enforcement = $1 ORDER BY created DESC"
).bind(enforcement_id).fetch_all(executor).await.map_err(Into::into)
let sql = format!(
"SELECT {SELECT_COLUMNS} FROM execution WHERE enforcement = $1 ORDER BY created DESC"
);
sqlx::query_as::<_, Execution>(&sql)
.bind(enforcement_id)
.fetch_all(executor)
.await
.map_err(Into::into)
}
/// Find all child executions for a given parent execution ID.
///
/// Returns child executions ordered by creation time (ascending),
/// which is the natural task execution order for workflows.
pub async fn find_by_parent<'e, E>(executor: E, parent_id: Id) -> Result<Vec<Execution>>
where
E: Executor<'e, Database = Postgres> + 'e,
{
let sql = format!(
"SELECT {SELECT_COLUMNS} FROM execution WHERE parent = $1 ORDER BY created ASC"
);
sqlx::query_as::<_, Execution>(&sql)
.bind(parent_id)
.fetch_all(executor)
.await
.map_err(Into::into)
}
}

View File

@@ -194,7 +194,7 @@ impl WorkflowRegistrar {
///
/// This ensures the workflow appears in action lists and the action palette
/// in the workflow builder. The action is linked to the workflow definition
/// via `is_workflow = true` and `workflow_def` FK.
/// via the `workflow_def` FK.
async fn create_companion_action(
&self,
workflow_def_id: i64,
@@ -221,7 +221,7 @@ impl WorkflowRegistrar {
let action = ActionRepository::create(&self.pool, action_input).await?;
// Link the action to the workflow definition (sets is_workflow = true and workflow_def)
// Link the action to the workflow definition (sets workflow_def FK)
ActionRepository::link_workflow_def(&self.pool, action.id, workflow_def_id).await?;
info!(

View File

@@ -54,8 +54,8 @@ async fn test_create_enforcement_minimal() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -89,7 +89,7 @@ async fn test_create_enforcement_minimal() {
assert_eq!(enforcement.condition, EnforcementCondition::All);
assert_eq!(enforcement.conditions, json!([]));
assert!(enforcement.created.timestamp() > 0);
assert!(enforcement.updated.timestamp() > 0);
assert_eq!(enforcement.resolved_at, None); // Not yet resolved
}
#[tokio::test]
@@ -125,8 +125,8 @@ async fn test_create_enforcement_with_event() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -192,8 +192,8 @@ async fn test_create_enforcement_with_conditions() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -257,8 +257,8 @@ async fn test_create_enforcement_with_any_condition() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -333,10 +333,12 @@ async fn test_create_enforcement_with_invalid_rule_fails() {
}
#[tokio::test]
async fn test_create_enforcement_with_invalid_event_fails() {
async fn test_create_enforcement_with_nonexistent_event_succeeds() {
let pool = create_test_pool().await.unwrap();
// Try to create enforcement with non-existent event ID
// The enforcement.event column has no FK constraint (event is a hypertable
// and hypertables cannot be FK targets). A non-existent event ID is accepted
// as a dangling reference.
let input = CreateEnforcementInput {
rule: None,
rule_ref: "some.rule".to_string(),
@@ -351,8 +353,9 @@ async fn test_create_enforcement_with_invalid_event_fails() {
let result = EnforcementRepository::create(&pool, input).await;
assert!(result.is_err());
// Foreign key constraint violation
assert!(result.is_ok());
let enforcement = result.unwrap();
assert_eq!(enforcement.event, Some(99999));
}
// ============================================================================
@@ -392,8 +395,8 @@ async fn test_find_enforcement_by_id() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -464,8 +467,8 @@ async fn test_get_enforcement_by_id() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -542,8 +545,8 @@ async fn test_list_enforcements() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -613,8 +616,8 @@ async fn test_update_enforcement_status() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -628,9 +631,11 @@ async fn test_update_enforcement_status() {
.await
.unwrap();
let now = chrono::Utc::now();
let input = UpdateEnforcementInput {
status: Some(EnforcementStatus::Processed),
payload: None,
resolved_at: Some(now),
};
let updated = EnforcementRepository::update(&pool, enforcement.id, input)
@@ -639,7 +644,8 @@ async fn test_update_enforcement_status() {
assert_eq!(updated.id, enforcement.id);
assert_eq!(updated.status, EnforcementStatus::Processed);
assert!(updated.updated > enforcement.updated);
assert!(updated.resolved_at.is_some());
assert!(updated.resolved_at.unwrap() >= enforcement.created);
}
#[tokio::test]
@@ -675,8 +681,8 @@ async fn test_update_enforcement_status_transitions() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -689,26 +695,30 @@ async fn test_update_enforcement_status_transitions() {
.await
.unwrap();
// Test status transitions: Created -> Succeeded
// Test status transitions: Created -> Processed
let now = chrono::Utc::now();
let updated = EnforcementRepository::update(
&pool,
enforcement.id,
UpdateEnforcementInput {
status: Some(EnforcementStatus::Processed),
payload: None,
resolved_at: Some(now),
},
)
.await
.unwrap();
assert_eq!(updated.status, EnforcementStatus::Processed);
assert!(updated.resolved_at.is_some());
// Test status transition: Succeeded -> Failed (although unusual)
// Test status transition: Processed -> Disabled (although unusual)
let updated = EnforcementRepository::update(
&pool,
enforcement.id,
UpdateEnforcementInput {
status: Some(EnforcementStatus::Disabled),
payload: None,
resolved_at: None,
},
)
.await
@@ -749,8 +759,8 @@ async fn test_update_enforcement_payload() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -768,6 +778,7 @@ async fn test_update_enforcement_payload() {
let input = UpdateEnforcementInput {
status: None,
payload: Some(new_payload.clone()),
resolved_at: None,
};
let updated = EnforcementRepository::update(&pool, enforcement.id, input)
@@ -810,8 +821,8 @@ async fn test_update_enforcement_both_fields() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -824,10 +835,12 @@ async fn test_update_enforcement_both_fields() {
.await
.unwrap();
let now = chrono::Utc::now();
let new_payload = json!({"result": "success"});
let input = UpdateEnforcementInput {
status: Some(EnforcementStatus::Processed),
payload: Some(new_payload.clone()),
resolved_at: Some(now),
};
let updated = EnforcementRepository::update(&pool, enforcement.id, input)
@@ -871,8 +884,8 @@ async fn test_update_enforcement_no_changes() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -889,6 +902,7 @@ async fn test_update_enforcement_no_changes() {
let input = UpdateEnforcementInput {
status: None,
payload: None,
resolved_at: None,
};
let result = EnforcementRepository::update(&pool, enforcement.id, input)
@@ -907,6 +921,7 @@ async fn test_update_enforcement_not_found() {
let input = UpdateEnforcementInput {
status: Some(EnforcementStatus::Processed),
payload: None,
resolved_at: Some(chrono::Utc::now()),
};
let result = EnforcementRepository::update(&pool, 99999, input).await;
@@ -952,8 +967,8 @@ async fn test_delete_enforcement() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -1025,8 +1040,8 @@ async fn test_find_enforcements_by_rule() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -1047,8 +1062,8 @@ async fn test_find_enforcements_by_rule() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -1117,8 +1132,8 @@ async fn test_find_enforcements_by_status() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -1206,8 +1221,8 @@ async fn test_find_enforcements_by_event() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -1290,8 +1305,8 @@ async fn test_delete_rule_sets_enforcement_rule_to_null() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -1323,7 +1338,7 @@ async fn test_delete_rule_sets_enforcement_rule_to_null() {
// ============================================================================
#[tokio::test]
async fn test_enforcement_timestamps_auto_managed() {
async fn test_enforcement_resolved_at_lifecycle() {
let pool = create_test_pool().await.unwrap();
let pack = PackFixture::new_unique("timestamp_pack")
@@ -1355,8 +1370,8 @@ async fn test_enforcement_timestamps_auto_managed() {
trigger: trigger.id,
trigger_ref: trigger.r#ref.clone(),
conditions: json!({}),
action_params: json!({}),
trigger_params: json!({}),
action_params: json!({}),
trigger_params: json!({}),
enabled: true,
is_adhoc: false,
},
@@ -1369,24 +1384,23 @@ async fn test_enforcement_timestamps_auto_managed() {
.await
.unwrap();
let created_time = enforcement.created;
let updated_time = enforcement.updated;
assert!(created_time.timestamp() > 0);
assert_eq!(created_time, updated_time);
// Update and verify timestamp changed
tokio::time::sleep(tokio::time::Duration::from_millis(10)).await;
// Initially, resolved_at is NULL
assert!(enforcement.created.timestamp() > 0);
assert_eq!(enforcement.resolved_at, None);
// Resolve the enforcement and verify resolved_at is set
let resolved_time = chrono::Utc::now();
let input = UpdateEnforcementInput {
status: Some(EnforcementStatus::Processed),
payload: None,
resolved_at: Some(resolved_time),
};
let updated = EnforcementRepository::update(&pool, enforcement.id, input)
.await
.unwrap();
assert_eq!(updated.created, created_time); // created unchanged
assert!(updated.updated > updated_time); // updated changed
assert_eq!(updated.created, enforcement.created); // created unchanged
assert!(updated.resolved_at.is_some());
assert!(updated.resolved_at.unwrap() >= enforcement.created);
}

View File

@@ -2,13 +2,14 @@
//!
//! These tests verify CRUD operations, queries, and constraints
//! for the Event repository.
//! Note: Events are immutable time-series data — there are no update tests.
mod helpers;
use attune_common::{
repositories::{
event::{CreateEventInput, EventRepository, UpdateEventInput},
Create, Delete, FindById, List, Update,
event::{CreateEventInput, EventRepository},
Create, Delete, FindById, List,
},
Error,
};
@@ -56,7 +57,6 @@ async fn test_create_event_minimal() {
assert_eq!(event.source, None);
assert_eq!(event.source_ref, None);
assert!(event.created.timestamp() > 0);
assert!(event.updated.timestamp() > 0);
}
#[tokio::test]
@@ -363,162 +363,6 @@ async fn test_list_events_respects_limit() {
assert!(events.len() <= 1000);
}
// ============================================================================
// UPDATE Tests
// ============================================================================
#[tokio::test]
async fn test_update_event_config() {
let pool = create_test_pool().await.unwrap();
let pack = PackFixture::new_unique("update_pack")
.create(&pool)
.await
.unwrap();
let trigger = TriggerFixture::new_unique(Some(pack.id), Some(pack.r#ref.clone()), "webhook")
.create(&pool)
.await
.unwrap();
let event = EventFixture::new_unique(Some(trigger.id), &trigger.r#ref)
.with_config(json!({"old": "config"}))
.create(&pool)
.await
.unwrap();
let new_config = json!({"new": "config", "updated": true});
let input = UpdateEventInput {
config: Some(new_config.clone()),
payload: None,
};
let updated = EventRepository::update(&pool, event.id, input)
.await
.unwrap();
assert_eq!(updated.id, event.id);
assert_eq!(updated.config, Some(new_config));
assert!(updated.updated > event.updated);
}
#[tokio::test]
async fn test_update_event_payload() {
let pool = create_test_pool().await.unwrap();
let pack = PackFixture::new_unique("payload_update_pack")
.create(&pool)
.await
.unwrap();
let trigger = TriggerFixture::new_unique(Some(pack.id), Some(pack.r#ref.clone()), "webhook")
.create(&pool)
.await
.unwrap();
let event = EventFixture::new_unique(Some(trigger.id), &trigger.r#ref)
.with_payload(json!({"initial": "payload"}))
.create(&pool)
.await
.unwrap();
let new_payload = json!({"updated": "payload", "version": 2});
let input = UpdateEventInput {
config: None,
payload: Some(new_payload.clone()),
};
let updated = EventRepository::update(&pool, event.id, input)
.await
.unwrap();
assert_eq!(updated.payload, Some(new_payload));
assert!(updated.updated > event.updated);
}
#[tokio::test]
async fn test_update_event_both_fields() {
let pool = create_test_pool().await.unwrap();
let pack = PackFixture::new_unique("both_update_pack")
.create(&pool)
.await
.unwrap();
let trigger = TriggerFixture::new_unique(Some(pack.id), Some(pack.r#ref.clone()), "webhook")
.create(&pool)
.await
.unwrap();
let event = EventFixture::new_unique(Some(trigger.id), &trigger.r#ref)
.create(&pool)
.await
.unwrap();
let new_config = json!({"setting": "value"});
let new_payload = json!({"data": "value"});
let input = UpdateEventInput {
config: Some(new_config.clone()),
payload: Some(new_payload.clone()),
};
let updated = EventRepository::update(&pool, event.id, input)
.await
.unwrap();
assert_eq!(updated.config, Some(new_config));
assert_eq!(updated.payload, Some(new_payload));
}
#[tokio::test]
async fn test_update_event_no_changes() {
let pool = create_test_pool().await.unwrap();
let pack = PackFixture::new_unique("nochange_pack")
.create(&pool)
.await
.unwrap();
let trigger = TriggerFixture::new_unique(Some(pack.id), Some(pack.r#ref.clone()), "webhook")
.create(&pool)
.await
.unwrap();
let event = EventFixture::new_unique(Some(trigger.id), &trigger.r#ref)
.with_payload(json!({"test": "data"}))
.create(&pool)
.await
.unwrap();
let input = UpdateEventInput {
config: None,
payload: None,
};
let result = EventRepository::update(&pool, event.id, input)
.await
.unwrap();
// Should return existing event without updating
assert_eq!(result.id, event.id);
assert_eq!(result.payload, event.payload);
}
#[tokio::test]
async fn test_update_event_not_found() {
let pool = create_test_pool().await.unwrap();
let input = UpdateEventInput {
config: Some(json!({"test": "config"})),
payload: None,
};
let result = EventRepository::update(&pool, 99999, input).await;
// When updating non-existent entity with changes, SQLx returns RowNotFound error
assert!(result.is_err());
}
// ============================================================================
// DELETE Tests
// ============================================================================
@@ -561,7 +405,7 @@ async fn test_delete_event_not_found() {
}
#[tokio::test]
async fn test_delete_event_sets_enforcement_event_to_null() {
async fn test_delete_event_enforcement_retains_event_id() {
let pool = create_test_pool().await.unwrap();
// Create pack, trigger, action, rule, and event
@@ -616,17 +460,19 @@ async fn test_delete_event_sets_enforcement_event_to_null() {
.await
.unwrap();
// Delete the event - enforcement.event should be set to NULL (ON DELETE SET NULL)
// Delete the event — since the event table is a TimescaleDB hypertable, the FK
// constraint from enforcement.event was dropped (hypertables cannot be FK targets).
// The enforcement.event column retains the old ID as a dangling reference.
EventRepository::delete(&pool, event.id).await.unwrap();
// Enforcement should still exist but with NULL event
// Enforcement still exists with the original event ID (now a dangling reference)
use attune_common::repositories::event::EnforcementRepository;
let found_enforcement = EnforcementRepository::find_by_id(&pool, enforcement.id)
.await
.unwrap()
.unwrap();
assert_eq!(found_enforcement.event, None);
assert_eq!(found_enforcement.event, Some(event.id));
}
// ============================================================================
@@ -756,7 +602,7 @@ async fn test_find_events_by_trigger_ref_preserves_after_trigger_deletion() {
// ============================================================================
#[tokio::test]
async fn test_event_timestamps_auto_managed() {
async fn test_event_created_timestamp_auto_set() {
let pool = create_test_pool().await.unwrap();
let pack = PackFixture::new_unique("timestamp_pack")
@@ -774,24 +620,5 @@ async fn test_event_timestamps_auto_managed() {
.await
.unwrap();
let created_time = event.created;
let updated_time = event.updated;
assert!(created_time.timestamp() > 0);
assert_eq!(created_time, updated_time);
// Update and verify timestamp changed
tokio::time::sleep(tokio::time::Duration::from_millis(10)).await;
let input = UpdateEventInput {
config: Some(json!({"updated": true})),
payload: None,
};
let updated = EventRepository::update(&pool, event.id, input)
.await
.unwrap();
assert_eq!(updated.created, created_time); // created unchanged
assert!(updated.updated > updated_time); // updated changed
assert!(event.created.timestamp() > 0);
}

View File

@@ -7,6 +7,7 @@
//! - Detecting inquiry requests in execution results
//! - Creating inquiries for human-in-the-loop workflows
//! - Enabling FIFO execution ordering by notifying waiting executions
//! - Advancing workflow orchestration when child task executions complete
use anyhow::Result;
use attune_common::{
@@ -14,10 +15,14 @@ use attune_common::{
repositories::{execution::ExecutionRepository, FindById},
};
use sqlx::PgPool;
use std::sync::atomic::AtomicUsize;
use std::sync::Arc;
use tracing::{debug, error, info, warn};
use crate::{inquiry_handler::InquiryHandler, queue_manager::ExecutionQueueManager};
use crate::{
inquiry_handler::InquiryHandler, queue_manager::ExecutionQueueManager,
scheduler::ExecutionScheduler,
};
/// Completion listener that handles execution completion messages
pub struct CompletionListener {
@@ -25,6 +30,9 @@ pub struct CompletionListener {
consumer: Arc<Consumer>,
publisher: Arc<Publisher>,
queue_manager: Arc<ExecutionQueueManager>,
/// Round-robin counter shared with the scheduler for dispatching workflow
/// successor tasks to workers.
round_robin_counter: Arc<AtomicUsize>,
}
impl CompletionListener {
@@ -40,6 +48,7 @@ impl CompletionListener {
consumer,
publisher,
queue_manager,
round_robin_counter: Arc::new(AtomicUsize::new(0)),
}
}
@@ -50,6 +59,7 @@ impl CompletionListener {
let pool = self.pool.clone();
let publisher = self.publisher.clone();
let queue_manager = self.queue_manager.clone();
let round_robin_counter = self.round_robin_counter.clone();
// Use the handler pattern to consume messages
self.consumer
@@ -58,12 +68,14 @@ impl CompletionListener {
let pool = pool.clone();
let publisher = publisher.clone();
let queue_manager = queue_manager.clone();
let round_robin_counter = round_robin_counter.clone();
async move {
if let Err(e) = Self::process_execution_completed(
&pool,
&publisher,
&queue_manager,
&round_robin_counter,
&envelope,
)
.await
@@ -88,6 +100,7 @@ impl CompletionListener {
pool: &PgPool,
publisher: &Publisher,
queue_manager: &ExecutionQueueManager,
round_robin_counter: &AtomicUsize,
envelope: &MessageEnvelope<ExecutionCompletedPayload>,
) -> Result<()> {
debug!("Processing execution completed message: {:?}", envelope);
@@ -115,6 +128,26 @@ impl CompletionListener {
execution_id, exec.status
);
// Check if this execution is a workflow child task and advance the
// workflow orchestration (schedule successor tasks or complete the
// workflow).
if exec.workflow_task.is_some() {
info!(
"Execution {} is a workflow task, advancing workflow",
execution_id
);
if let Err(e) =
ExecutionScheduler::advance_workflow(pool, publisher, round_robin_counter, exec)
.await
{
error!(
"Failed to advance workflow for execution {}: {}",
execution_id, e
);
// Continue processing — don't fail the entire completion
}
}
// Check if execution result contains an inquiry request
if let Some(result) = &exec.result {
if InquiryHandler::has_inquiry_request(result) {

View File

@@ -152,6 +152,7 @@ impl EnforcementProcessor {
UpdateEnforcementInput {
status: Some(EnforcementStatus::Processed),
payload: None,
resolved_at: Some(chrono::Utc::now()),
},
)
.await?;
@@ -170,6 +171,7 @@ impl EnforcementProcessor {
UpdateEnforcementInput {
status: Some(EnforcementStatus::Disabled),
payload: None,
resolved_at: Some(chrono::Utc::now()),
},
)
.await?;
@@ -356,7 +358,7 @@ mod tests {
condition: attune_common::models::enums::EnforcementCondition::Any,
conditions: json!({}),
created: chrono::Utc::now(),
updated: chrono::Utc::now(),
resolved_at: Some(chrono::Utc::now()),
};
let mut rule = Rule {

View File

@@ -21,6 +21,7 @@ mod scheduler;
mod service;
mod timeout_monitor;
mod worker_health;
mod workflow;
use anyhow::Result;
use attune_common::config::Config;

File diff suppressed because it is too large Load Diff

View File

@@ -12,6 +12,7 @@ use anyhow::Result;
use attune_common::{
models::{enums::ExecutionStatus, Execution},
mq::{MessageEnvelope, MessageType, Publisher},
repositories::execution::SELECT_COLUMNS as EXECUTION_COLUMNS,
};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
@@ -105,17 +106,16 @@ impl ExecutionTimeoutMonitor {
);
// Find executions stuck in SCHEDULED status
let stale_executions = sqlx::query_as::<_, Execution>(
"SELECT * FROM execution
WHERE status = $1
AND updated < $2
ORDER BY updated ASC
LIMIT 100", // Process in batches to avoid overwhelming system
)
.bind(ExecutionStatus::Scheduled)
.bind(cutoff)
.fetch_all(&self.pool)
.await?;
let sql = format!(
"SELECT {EXECUTION_COLUMNS} FROM execution \
WHERE status = $1 AND updated < $2 \
ORDER BY updated ASC LIMIT 100"
);
let stale_executions = sqlx::query_as::<_, Execution>(&sql)
.bind(ExecutionStatus::Scheduled)
.bind(cutoff)
.fetch_all(&self.pool)
.await?;
if stale_executions.is_empty() {
debug!("No stale scheduled executions found");

View File

@@ -2,6 +2,22 @@
//!
//! This module manages workflow execution context, including variables,
//! template rendering, and data flow between tasks.
//!
//! ## Function-call expressions
//!
//! Templates support Orquesta-style function calls:
//! - `{{ result() }}` — the last completed task's result
//! - `{{ result().field }}` — nested access into the result
//! - `{{ succeeded() }}` — `true` if the last task succeeded
//! - `{{ failed() }}` — `true` if the last task failed
//! - `{{ timed_out() }}` — `true` if the last task timed out
//!
//! ## Type-preserving rendering
//!
//! When a JSON string value is a *pure* template expression (the entire value
//! is `{{ expr }}`), `render_json` returns the raw `JsonValue` from the
//! expression instead of stringifying it. This means `"{{ item }}"` resolving
//! to integer `5` stays as `5`, not the string `"5"`.
use dashmap::DashMap;
use serde_json::{json, Value as JsonValue};
@@ -31,6 +47,15 @@ pub enum ContextError {
JsonError(#[from] serde_json::Error),
}
/// The status of the last completed task, used by `succeeded()` / `failed()` /
/// `timed_out()` function expressions.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TaskOutcome {
Succeeded,
Failed,
TimedOut,
}
/// Workflow execution context
///
/// Uses Arc for shared immutable data to enable efficient cloning.
@@ -55,6 +80,12 @@ pub struct WorkflowContext {
/// Current item index (for with-items iteration) - per-item data
current_index: Option<usize>,
/// The result of the last completed task (for `result()` expressions)
last_task_result: Option<JsonValue>,
/// The outcome of the last completed task (for `succeeded()` / `failed()`)
last_task_outcome: Option<TaskOutcome>,
}
impl WorkflowContext {
@@ -75,6 +106,46 @@ impl WorkflowContext {
system: Arc::new(system),
current_item: None,
current_index: None,
last_task_result: None,
last_task_outcome: None,
}
}
/// Rebuild a workflow context from persisted workflow execution state.
///
/// This is used when advancing a workflow after a child task completes —
/// the scheduler reconstructs the context from the `workflow_execution`
/// record's stored `variables` plus the results of all completed child
/// executions.
pub fn rebuild(
parameters: JsonValue,
stored_variables: &JsonValue,
task_results: HashMap<String, JsonValue>,
) -> Self {
let variables = DashMap::new();
if let Some(obj) = stored_variables.as_object() {
for (k, v) in obj {
variables.insert(k.clone(), v.clone());
}
}
let results = DashMap::new();
for (k, v) in task_results {
results.insert(k, v);
}
let system = DashMap::new();
system.insert("workflow_start".to_string(), json!(chrono::Utc::now()));
Self {
variables: Arc::new(variables),
parameters: Arc::new(parameters),
task_results: Arc::new(results),
system: Arc::new(system),
current_item: None,
current_index: None,
last_task_result: None,
last_task_outcome: None,
}
}
@@ -112,7 +183,28 @@ impl WorkflowContext {
self.current_index = None;
}
/// Render a template string
/// Record the outcome of the last completed task so that `result()`,
/// `succeeded()`, `failed()`, and `timed_out()` expressions resolve
/// correctly.
pub fn set_last_task_outcome(&mut self, result: JsonValue, outcome: TaskOutcome) {
self.last_task_result = Some(result);
self.last_task_outcome = Some(outcome);
}
/// Export workflow variables as a JSON object suitable for persisting
/// back to the `workflow_execution.variables` column.
pub fn export_variables(&self) -> JsonValue {
let map: HashMap<String, JsonValue> = self
.variables
.iter()
.map(|entry| (entry.key().clone(), entry.value().clone()))
.collect();
json!(map)
}
/// Render a template string, always returning a `String`.
///
/// For type-preserving rendering of JSON values use [`render_json`].
pub fn render_template(&self, template: &str) -> ContextResult<String> {
// Simple template rendering (Jinja2-like syntax)
// Supports: {{ variable }}, {{ task.result }}, {{ parameters.key }}
@@ -143,10 +235,49 @@ impl WorkflowContext {
Ok(result)
}
/// Render a JSON value (recursively render templates in strings)
/// Try to evaluate a string as a single pure template expression.
///
/// Returns `Some(JsonValue)` when the **entire** string is exactly
/// `{{ expr }}` (with optional whitespace), preserving the original
/// JSON type of the evaluated expression. Returns `None` if the
/// string contains literal text around the template or multiple
/// template expressions — in that case the caller should fall back
/// to `render_template` which always stringifies.
fn try_evaluate_pure_expression(&self, s: &str) -> Option<ContextResult<JsonValue>> {
let trimmed = s.trim();
if !trimmed.starts_with("{{") || !trimmed.ends_with("}}") {
return None;
}
// Make sure there is only ONE template expression in the string.
// Count `{{` occurrences — if more than one, it's not a pure expr.
if trimmed.matches("{{").count() != 1 {
return None;
}
let expr = trimmed[2..trimmed.len() - 2].trim();
if expr.is_empty() {
return None;
}
Some(self.evaluate_expression(expr))
}
/// Render a JSON value, recursively resolving `{{ }}` templates in
/// strings.
///
/// **Type-preserving**: when a string value is a *pure* template
/// expression (the entire string is `{{ expr }}`), the raw `JsonValue`
/// from the expression is returned. For example, if `item` is `5`
/// (a JSON number), then `"{{ item }}"` resolves to `5` not `"5"`.
pub fn render_json(&self, value: &JsonValue) -> ContextResult<JsonValue> {
match value {
JsonValue::String(s) => {
// Fast path: try as a pure expression to preserve type
if let Some(result) = self.try_evaluate_pure_expression(s) {
return result;
}
// Fallback: render as string (interpolation with surrounding text)
let rendered = self.render_template(s)?;
Ok(JsonValue::String(rendered))
}
@@ -170,6 +301,28 @@ impl WorkflowContext {
/// Evaluate a template expression
fn evaluate_expression(&self, expr: &str) -> ContextResult<JsonValue> {
// ---------------------------------------------------------------
// Function-call expressions: result(), succeeded(), failed(), timed_out()
// ---------------------------------------------------------------
// We handle these *before* splitting on `.` because the function
// name contains parentheses which would confuse the dot-split.
//
// Supported patterns:
// result() → last task result
// result().foo.bar → nested access into result
// result().data.items → nested access into result
// succeeded() → boolean
// failed() → boolean
// timed_out() → boolean
// ---------------------------------------------------------------
if let Some(result_val) = self.try_evaluate_function_call(expr)? {
return Ok(result_val);
}
// ---------------------------------------------------------------
// Dot-path expressions
// ---------------------------------------------------------------
let parts: Vec<&str> = expr.split('.').collect();
if parts.is_empty() {
@@ -244,7 +397,8 @@ impl WorkflowContext {
Err(ContextError::VariableNotFound(format!("system.{}", key)))
}
}
// Direct variable reference
// Direct variable reference (e.g., `number_list` published by a
// previous task's transition)
var_name => {
if let Some(entry) = self.variables.get(var_name) {
let value = entry.value().clone();
@@ -261,6 +415,56 @@ impl WorkflowContext {
}
}
/// Try to evaluate `expr` as a function-call expression.
///
/// Returns `Ok(Some(value))` if the expression starts with a recognised
/// function call, `Ok(None)` if it does not match, or `Err` on failure.
fn try_evaluate_function_call(&self, expr: &str) -> ContextResult<Option<JsonValue>> {
// succeeded()
if expr == "succeeded()" {
let val = self
.last_task_outcome
.map(|o| o == TaskOutcome::Succeeded)
.unwrap_or(false);
return Ok(Some(json!(val)));
}
// failed()
if expr == "failed()" {
let val = self
.last_task_outcome
.map(|o| o == TaskOutcome::Failed)
.unwrap_or(false);
return Ok(Some(json!(val)));
}
// timed_out()
if expr == "timed_out()" {
let val = self
.last_task_outcome
.map(|o| o == TaskOutcome::TimedOut)
.unwrap_or(false);
return Ok(Some(json!(val)));
}
// result() or result().path.to.field
if expr == "result()" || expr.starts_with("result().") {
let base = self.last_task_result.clone().unwrap_or(JsonValue::Null);
if expr == "result()" {
return Ok(Some(base));
}
// Strip "result()." prefix and navigate the remaining path
let rest = &expr["result().".len()..];
let path_parts: Vec<&str> = rest.split('.').collect();
let val = self.get_nested_value(&base, &path_parts)?;
return Ok(Some(val));
}
Ok(None)
}
/// Get nested value from JSON
fn get_nested_value(&self, value: &JsonValue, path: &[&str]) -> ContextResult<JsonValue> {
let mut current = value;
@@ -313,7 +517,12 @@ impl WorkflowContext {
}
}
/// Publish variables from a task result
/// Publish variables from a task result.
///
/// Each publish directive is a `(name, expression)` pair where the
/// expression is a template string like `"{{ result().data.items }}"`.
/// The expression is rendered with `render_json`-style type preservation
/// so that non-string values (arrays, numbers, booleans) keep their type.
pub fn publish_from_result(
&mut self,
result: &JsonValue,
@@ -323,16 +532,11 @@ impl WorkflowContext {
// If publish map is provided, use it
if let Some(map) = publish_map {
for (var_name, template) in map {
// Create temporary context with result
let mut temp_ctx = self.clone();
temp_ctx.set_var("result", result.clone());
let value_str = temp_ctx.render_template(template)?;
// Try to parse as JSON, otherwise store as string
let value = serde_json::from_str(&value_str)
.unwrap_or_else(|_| JsonValue::String(value_str));
// Use type-preserving rendering: if the entire template is a
// single expression like `{{ result().data.items }}`, preserve
// the underlying JsonValue type (e.g. an array stays an array).
let json_value = JsonValue::String(template.clone());
let value = self.render_json(&json_value)?;
self.set_var(var_name, value);
}
} else {
@@ -405,6 +609,8 @@ impl WorkflowContext {
system: Arc::new(system),
current_item: None,
current_index: None,
last_task_result: None,
last_task_outcome: None,
})
}
}
@@ -513,6 +719,122 @@ mod tests {
assert_eq!(result["nested"]["value"], "Name is test");
}
#[test]
fn test_render_json_type_preserving_number() {
let mut ctx = WorkflowContext::new(json!({}), HashMap::new());
ctx.set_current_item(json!(5), 0);
// Pure expression — should preserve the integer type
let input = json!({"seconds": "{{ item }}"});
let result = ctx.render_json(&input).unwrap();
assert_eq!(result["seconds"], json!(5));
assert!(result["seconds"].is_number());
}
#[test]
fn test_render_json_type_preserving_array() {
let mut ctx = WorkflowContext::new(json!({}), HashMap::new());
ctx.set_last_task_outcome(
json!({"data": {"items": [0, 1, 2, 3, 4]}}),
TaskOutcome::Succeeded,
);
// Pure expression into result() — should preserve the array type
let input = json!({"list": "{{ result().data.items }}"});
let result = ctx.render_json(&input).unwrap();
assert_eq!(result["list"], json!([0, 1, 2, 3, 4]));
assert!(result["list"].is_array());
}
#[test]
fn test_render_json_mixed_template_stays_string() {
let mut ctx = WorkflowContext::new(json!({}), HashMap::new());
ctx.set_current_item(json!(5), 0);
// Mixed text + template — must remain a string
let input = json!({"msg": "Sleeping for {{ item }} seconds"});
let result = ctx.render_json(&input).unwrap();
assert_eq!(result["msg"], json!("Sleeping for 5 seconds"));
assert!(result["msg"].is_string());
}
#[test]
fn test_render_json_type_preserving_bool() {
let mut ctx = WorkflowContext::new(json!({}), HashMap::new());
ctx.set_last_task_outcome(json!({}), TaskOutcome::Succeeded);
let input = json!({"ok": "{{ succeeded() }}"});
let result = ctx.render_json(&input).unwrap();
assert_eq!(result["ok"], json!(true));
assert!(result["ok"].is_boolean());
}
#[test]
fn test_result_function() {
let mut ctx = WorkflowContext::new(json!({}), HashMap::new());
ctx.set_last_task_outcome(
json!({"data": {"items": [10, 20]}, "stdout": "hello"}),
TaskOutcome::Succeeded,
);
// result() returns the full last task result
let val = ctx.evaluate_expression("result()").unwrap();
assert_eq!(val["data"]["items"], json!([10, 20]));
// result().stdout returns nested field
let val = ctx.evaluate_expression("result().stdout").unwrap();
assert_eq!(val, json!("hello"));
// result().data.items returns deeper nested field
let val = ctx.evaluate_expression("result().data.items").unwrap();
assert_eq!(val, json!([10, 20]));
}
#[test]
fn test_succeeded_failed_functions() {
let mut ctx = WorkflowContext::new(json!({}), HashMap::new());
ctx.set_last_task_outcome(json!({}), TaskOutcome::Succeeded);
assert_eq!(ctx.evaluate_expression("succeeded()").unwrap(), json!(true));
assert_eq!(ctx.evaluate_expression("failed()").unwrap(), json!(false));
assert_eq!(
ctx.evaluate_expression("timed_out()").unwrap(),
json!(false)
);
ctx.set_last_task_outcome(json!({}), TaskOutcome::Failed);
assert_eq!(
ctx.evaluate_expression("succeeded()").unwrap(),
json!(false)
);
assert_eq!(ctx.evaluate_expression("failed()").unwrap(), json!(true));
ctx.set_last_task_outcome(json!({}), TaskOutcome::TimedOut);
assert_eq!(ctx.evaluate_expression("timed_out()").unwrap(), json!(true));
}
#[test]
fn test_publish_with_result_function() {
let mut ctx = WorkflowContext::new(json!({}), HashMap::new());
ctx.set_last_task_outcome(
json!({"data": {"items": [0, 1, 2]}}),
TaskOutcome::Succeeded,
);
let mut publish_map = HashMap::new();
publish_map.insert(
"number_list".to_string(),
"{{ result().data.items }}".to_string(),
);
ctx.publish_from_result(&json!({}), &[], Some(&publish_map))
.unwrap();
let val = ctx.get_var("number_list").unwrap();
assert_eq!(val, json!([0, 1, 2]));
assert!(val.is_array());
}
#[test]
fn test_publish_variables() {
let mut ctx = WorkflowContext::new(json!({}), HashMap::new());
@@ -524,6 +846,23 @@ mod tests {
assert_eq!(ctx.get_var("my_var").unwrap(), result);
}
#[test]
fn test_rebuild_context() {
let stored_vars = json!({"number_list": [0, 1, 2]});
let mut task_results = HashMap::new();
task_results.insert("task1".to_string(), json!({"data": {"items": [0, 1, 2]}}));
let ctx = WorkflowContext::rebuild(json!({"count": 5}), &stored_vars, task_results);
assert_eq!(ctx.get_var("number_list").unwrap(), json!([0, 1, 2]));
assert_eq!(
ctx.get_task_result("task1").unwrap(),
json!({"data": {"items": [0, 1, 2]}})
);
let rendered = ctx.render_template("{{ parameters.count }}").unwrap();
assert_eq!(rendered, "5");
}
#[test]
fn test_export_import() {
let mut ctx = WorkflowContext::new(json!({"key": "value"}), HashMap::new());
@@ -539,4 +878,28 @@ mod tests {
json!({"result": "ok"})
);
}
#[test]
fn test_with_items_integer_type_preservation() {
// Simulates the sleep_2 task from the hello_workflow:
// input: { seconds: "{{ item }}" }
// with_items: [0, 1, 2, 3, 4]
let mut ctx = WorkflowContext::new(json!({}), HashMap::new());
ctx.set_current_item(json!(3), 3);
let input = json!({
"message": "Sleeping for {{ item }} seconds ",
"seconds": "{{item}}"
});
let rendered = ctx.render_json(&input).unwrap();
// seconds should be integer 3, not string "3"
assert_eq!(rendered["seconds"], json!(3));
assert!(rendered["seconds"].is_number());
// message should be a string with the value interpolated
assert_eq!(rendered["message"], json!("Sleeping for 3 seconds "));
assert!(rendered["message"].is_string());
}
}

View File

@@ -196,7 +196,7 @@ impl WorkflowRegistrar {
///
/// This ensures the workflow appears in action lists and the action palette
/// in the workflow builder. The action is linked to the workflow definition
/// via `is_workflow = true` and `workflow_def` FK.
/// via the `workflow_def` FK.
async fn create_companion_action(
&self,
workflow_def_id: i64,
@@ -223,7 +223,7 @@ impl WorkflowRegistrar {
let action = ActionRepository::create(&self.pool, action_input).await?;
// Link the action to the workflow definition (sets is_workflow = true and workflow_def)
// Link the action to the workflow definition (sets workflow_def FK)
ActionRepository::link_workflow_def(&self.pool, action.id, workflow_def_id).await?;
info!(