[WIP] workflow builder

This commit is contained in:
2026-02-23 20:45:10 -06:00
parent d629da32fa
commit 53a3fbb6b1
66 changed files with 7887 additions and 1608 deletions

View File

@@ -40,7 +40,9 @@ use crate::{
#[derive(Debug, Clone, Serialize, Deserialize, Validate, ToSchema)]
pub struct CreateEventRequest {
/// Trigger reference (e.g., "core.timer", "core.webhook")
/// Also accepts "trigger_type" for compatibility with the sensor interface spec.
#[validate(length(min = 1))]
#[serde(alias = "trigger_type")]
#[schema(example = "core.timer")]
pub trigger_ref: String,

View File

@@ -10,9 +10,13 @@ use axum::{
use std::sync::Arc;
use validator::Validate;
use attune_common::models::OwnerType;
use attune_common::repositories::{
action::ActionRepository,
key::{CreateKeyInput, KeyRepository, UpdateKeyInput},
Create, Delete, List, Update,
pack::PackRepository,
trigger::SensorRepository,
Create, Delete, FindByRef, List, Update,
};
use crate::auth::RequireAuth;
@@ -157,6 +161,78 @@ pub async fn create_key(
)));
}
// Auto-resolve owner IDs from refs when only the ref is provided.
// This makes the API more ergonomic for sensors and other clients that
// know the owner ref but not the numeric database ID.
let mut owner_sensor = request.owner_sensor;
let mut owner_action = request.owner_action;
let mut owner_pack = request.owner_pack;
match request.owner_type {
OwnerType::Sensor => {
if owner_sensor.is_none() {
if let Some(ref sensor_ref) = request.owner_sensor_ref {
if let Some(sensor) =
SensorRepository::find_by_ref(&state.db, sensor_ref).await?
{
tracing::debug!(
"Auto-resolved owner_sensor from ref '{}' to id {}",
sensor_ref,
sensor.id
);
owner_sensor = Some(sensor.id);
} else {
return Err(ApiError::BadRequest(format!(
"Sensor with ref '{}' not found",
sensor_ref
)));
}
}
}
}
OwnerType::Action => {
if owner_action.is_none() {
if let Some(ref action_ref) = request.owner_action_ref {
if let Some(action) =
ActionRepository::find_by_ref(&state.db, action_ref).await?
{
tracing::debug!(
"Auto-resolved owner_action from ref '{}' to id {}",
action_ref,
action.id
);
owner_action = Some(action.id);
} else {
return Err(ApiError::BadRequest(format!(
"Action with ref '{}' not found",
action_ref
)));
}
}
}
}
OwnerType::Pack => {
if owner_pack.is_none() {
if let Some(ref pack_ref) = request.owner_pack_ref {
if let Some(pack) = PackRepository::find_by_ref(&state.db, pack_ref).await? {
tracing::debug!(
"Auto-resolved owner_pack from ref '{}' to id {}",
pack_ref,
pack.id
);
owner_pack = Some(pack.id);
} else {
return Err(ApiError::BadRequest(format!(
"Pack with ref '{}' not found",
pack_ref
)));
}
}
}
}
_ => {}
}
// Encrypt value if requested
let (value, encryption_key_hash) = if request.encrypted {
let encryption_key = state
@@ -190,11 +266,11 @@ pub async fn create_key(
owner_type: request.owner_type,
owner: request.owner,
owner_identity: request.owner_identity,
owner_pack: request.owner_pack,
owner_pack,
owner_pack_ref: request.owner_pack_ref,
owner_action: request.owner_action,
owner_action,
owner_action_ref: request.owner_action_ref,
owner_sensor: request.owner_sensor,
owner_sensor,
owner_sensor_ref: request.owner_sensor_ref,
name: request.name,
encrypted: request.encrypted,

View File

@@ -14,7 +14,10 @@ use validator::Validate;
use attune_common::models::pack_test::PackTestResult;
use attune_common::mq::{MessageEnvelope, MessageType, PackRegisteredPayload};
use attune_common::repositories::{
action::ActionRepository,
pack::{CreatePackInput, UpdatePackInput},
rule::{RestoreRuleInput, RuleRepository},
trigger::TriggerRepository,
Create, Delete, FindById, FindByRef, PackRepository, PackTestRepository, Pagination, Update,
};
use attune_common::workflow::{PackWorkflowService, PackWorkflowServiceConfig};
@@ -545,6 +548,9 @@ async fn register_pack_internal(
.and_then(|v| v.as_str())
.map(|s| s.to_string());
// Ad-hoc rules to restore after pack reinstallation
let mut saved_adhoc_rules: Vec<attune_common::models::rule::Rule> = Vec::new();
// Check if pack already exists
if !force {
if PackRepository::exists_by_ref(&state.db, &pack_ref).await? {
@@ -554,8 +560,20 @@ async fn register_pack_internal(
)));
}
} else {
// Delete existing pack if force is true
// Delete existing pack if force is true, preserving ad-hoc (user-created) rules
if let Some(existing_pack) = PackRepository::find_by_ref(&state.db, &pack_ref).await? {
// Save ad-hoc rules before deletion — CASCADE on pack FK would destroy them
saved_adhoc_rules = RuleRepository::find_adhoc_by_pack(&state.db, existing_pack.id)
.await
.unwrap_or_default();
if !saved_adhoc_rules.is_empty() {
tracing::info!(
"Preserving {} ad-hoc rule(s) during reinstall of pack '{}'",
saved_adhoc_rules.len(),
pack_ref
);
}
PackRepository::delete(&state.db, existing_pack.id).await?;
tracing::info!("Deleted existing pack '{}' for forced reinstall", pack_ref);
}
@@ -671,6 +689,123 @@ async fn register_pack_internal(
}
}
// Restore ad-hoc rules that were saved before pack deletion, and
// re-link any rules from other packs whose action/trigger FKs were
// set to NULL when the old pack's entities were cascade-deleted.
{
// Phase 1: Restore saved ad-hoc rules
if !saved_adhoc_rules.is_empty() {
let mut restored = 0u32;
for saved_rule in &saved_adhoc_rules {
// Resolve action and trigger IDs by ref (they may have been recreated)
let action_id = ActionRepository::find_by_ref(&state.db, &saved_rule.action_ref)
.await
.ok()
.flatten()
.map(|a| a.id);
let trigger_id = TriggerRepository::find_by_ref(&state.db, &saved_rule.trigger_ref)
.await
.ok()
.flatten()
.map(|t| t.id);
let input = RestoreRuleInput {
r#ref: saved_rule.r#ref.clone(),
pack: pack.id,
pack_ref: pack.r#ref.clone(),
label: saved_rule.label.clone(),
description: saved_rule.description.clone(),
action: action_id,
action_ref: saved_rule.action_ref.clone(),
trigger: trigger_id,
trigger_ref: saved_rule.trigger_ref.clone(),
conditions: saved_rule.conditions.clone(),
action_params: saved_rule.action_params.clone(),
trigger_params: saved_rule.trigger_params.clone(),
enabled: saved_rule.enabled,
};
match RuleRepository::restore_rule(&state.db, input).await {
Ok(rule) => {
restored += 1;
if rule.action.is_none() || rule.trigger.is_none() {
tracing::warn!(
"Restored ad-hoc rule '{}' with unresolved references \
(action: {}, trigger: {})",
rule.r#ref,
if rule.action.is_some() {
"linked"
} else {
"NULL"
},
if rule.trigger.is_some() {
"linked"
} else {
"NULL"
},
);
}
}
Err(e) => {
tracing::warn!(
"Failed to restore ad-hoc rule '{}': {}",
saved_rule.r#ref,
e
);
}
}
}
tracing::info!(
"Restored {}/{} ad-hoc rule(s) for pack '{}'",
restored,
saved_adhoc_rules.len(),
pack.r#ref
);
}
// Phase 2: Re-link rules from other packs whose action/trigger FKs
// were set to NULL when the old pack's entities were cascade-deleted
let new_actions = ActionRepository::find_by_pack(&state.db, pack.id)
.await
.unwrap_or_default();
let new_triggers = TriggerRepository::find_by_pack(&state.db, pack.id)
.await
.unwrap_or_default();
for action in &new_actions {
match RuleRepository::relink_action_by_ref(&state.db, &action.r#ref, action.id).await {
Ok(count) if count > 0 => {
tracing::info!("Re-linked {} rule(s) to action '{}'", count, action.r#ref);
}
Err(e) => {
tracing::warn!(
"Failed to re-link rules to action '{}': {}",
action.r#ref,
e
);
}
_ => {}
}
}
for trigger in &new_triggers {
match RuleRepository::relink_trigger_by_ref(&state.db, &trigger.r#ref, trigger.id).await
{
Ok(count) if count > 0 => {
tracing::info!("Re-linked {} rule(s) to trigger '{}'", count, trigger.r#ref);
}
Err(e) => {
tracing::warn!(
"Failed to re-link rules to trigger '{}': {}",
trigger.r#ref,
e
);
}
_ => {}
}
}
}
// Set up runtime environments for the pack's actions.
// This creates virtualenvs, installs dependencies, etc. based on each
// runtime's execution_config from the database.
@@ -964,7 +1099,6 @@ async fn register_pack_internal(
responses(
(status = 201, description = "Pack installed successfully", body = ApiResponse<PackInstallResponse>),
(status = 400, description = "Invalid request or tests failed", body = ApiResponse<String>),
(status = 409, description = "Pack already exists", body = ApiResponse<String>),
(status = 501, description = "Not implemented yet", body = ApiResponse<String>),
),
security(("bearer_auth" = []))
@@ -1122,12 +1256,14 @@ pub async fn install_pack(
tracing::info!("Pack moved to permanent storage: {:?}", final_path);
// Register the pack in database (from permanent storage location)
// Register the pack in database (from permanent storage location).
// Remote installs always force-overwrite: if you're pulling from a remote,
// the intent is to get that pack installed regardless of local state.
let pack_id = register_pack_internal(
state.clone(),
user_sub,
final_path.to_string_lossy().to_string(),
request.force,
true, // always force for remote installs
request.skip_tests,
)
.await

View File

@@ -4,9 +4,10 @@ use axum::{
extract::{Path, Query, State},
http::StatusCode,
response::IntoResponse,
routing::get,
routing::{get, post, put},
Json, Router,
};
use std::path::PathBuf;
use std::sync::Arc;
use validator::Validate;
@@ -23,8 +24,8 @@ use crate::{
dto::{
common::{PaginatedResponse, PaginationParams},
workflow::{
CreateWorkflowRequest, UpdateWorkflowRequest, WorkflowResponse, WorkflowSearchParams,
WorkflowSummary,
CreateWorkflowRequest, SaveWorkflowFileRequest, UpdateWorkflowRequest,
WorkflowResponse, WorkflowSearchParams, WorkflowSummary,
},
ApiResponse, SuccessResponse,
},
@@ -340,6 +341,202 @@ pub async fn delete_workflow(
Ok((StatusCode::OK, Json(response)))
}
/// Save a workflow file to disk and sync it to the database
///
/// Writes a `{name}.workflow.yaml` file to `{packs_base_dir}/{pack_ref}/actions/workflows/`
/// and creates or updates the corresponding workflow_definition record in the database.
#[utoipa::path(
post,
path = "/api/v1/packs/{pack_ref}/workflow-files",
tag = "workflows",
params(
("pack_ref" = String, Path, description = "Pack reference identifier")
),
request_body = SaveWorkflowFileRequest,
responses(
(status = 201, description = "Workflow file saved and synced", body = inline(ApiResponse<WorkflowResponse>)),
(status = 400, description = "Validation error"),
(status = 404, description = "Pack not found"),
(status = 409, description = "Workflow with same ref already exists"),
(status = 500, description = "Failed to write workflow file")
),
security(("bearer_auth" = []))
)]
pub async fn save_workflow_file(
State(state): State<Arc<AppState>>,
RequireAuth(_user): RequireAuth,
Path(pack_ref): Path<String>,
Json(request): Json<SaveWorkflowFileRequest>,
) -> ApiResult<impl IntoResponse> {
request.validate()?;
// Verify pack exists
let pack = PackRepository::find_by_ref(&state.db, &pack_ref)
.await?
.ok_or_else(|| ApiError::NotFound(format!("Pack '{}' not found", pack_ref)))?;
let workflow_ref = format!("{}.{}", pack_ref, request.name);
// Check if workflow already exists
if WorkflowDefinitionRepository::find_by_ref(&state.db, &workflow_ref)
.await?
.is_some()
{
return Err(ApiError::Conflict(format!(
"Workflow with ref '{}' already exists",
workflow_ref
)));
}
// Write YAML file to disk
let packs_base_dir = PathBuf::from(&state.config.packs_base_dir);
write_workflow_yaml(&packs_base_dir, &pack_ref, &request).await?;
// Create workflow in database
let definition_json = serde_json::to_value(&request.definition).map_err(|e| {
ApiError::BadRequest(format!("Failed to serialize workflow definition: {}", e))
})?;
let workflow_input = CreateWorkflowDefinitionInput {
r#ref: workflow_ref,
pack: pack.id,
pack_ref: pack.r#ref.clone(),
label: request.label,
description: request.description,
version: request.version,
param_schema: request.param_schema,
out_schema: request.out_schema,
definition: definition_json,
tags: request.tags.unwrap_or_default(),
enabled: request.enabled.unwrap_or(true),
};
let workflow = WorkflowDefinitionRepository::create(&state.db, workflow_input).await?;
let response = ApiResponse::with_message(
WorkflowResponse::from(workflow),
"Workflow file saved and synced successfully",
);
Ok((StatusCode::CREATED, Json(response)))
}
/// Update a workflow file on disk and sync changes to the database
#[utoipa::path(
put,
path = "/api/v1/workflows/{ref}/file",
tag = "workflows",
params(
("ref" = String, Path, description = "Workflow reference identifier")
),
request_body = SaveWorkflowFileRequest,
responses(
(status = 200, description = "Workflow file updated and synced", body = inline(ApiResponse<WorkflowResponse>)),
(status = 400, description = "Validation error"),
(status = 404, description = "Workflow not found"),
(status = 500, description = "Failed to write workflow file")
),
security(("bearer_auth" = []))
)]
pub async fn update_workflow_file(
State(state): State<Arc<AppState>>,
RequireAuth(_user): RequireAuth,
Path(workflow_ref): Path<String>,
Json(request): Json<SaveWorkflowFileRequest>,
) -> ApiResult<impl IntoResponse> {
request.validate()?;
// Check if workflow exists
let existing_workflow = WorkflowDefinitionRepository::find_by_ref(&state.db, &workflow_ref)
.await?
.ok_or_else(|| ApiError::NotFound(format!("Workflow '{}' not found", workflow_ref)))?;
// Verify pack exists
let _pack = PackRepository::find_by_ref(&state.db, &request.pack_ref)
.await?
.ok_or_else(|| ApiError::NotFound(format!("Pack '{}' not found", request.pack_ref)))?;
// Write updated YAML file to disk
let packs_base_dir = PathBuf::from(&state.config.packs_base_dir);
write_workflow_yaml(&packs_base_dir, &request.pack_ref, &request).await?;
// Update workflow in database
let definition_json = serde_json::to_value(&request.definition).map_err(|e| {
ApiError::BadRequest(format!("Failed to serialize workflow definition: {}", e))
})?;
let update_input = UpdateWorkflowDefinitionInput {
label: Some(request.label),
description: request.description,
version: Some(request.version),
param_schema: request.param_schema,
out_schema: request.out_schema,
definition: Some(definition_json),
tags: request.tags,
enabled: request.enabled,
};
let workflow =
WorkflowDefinitionRepository::update(&state.db, existing_workflow.id, update_input).await?;
let response = ApiResponse::with_message(
WorkflowResponse::from(workflow),
"Workflow file updated and synced successfully",
);
Ok((StatusCode::OK, Json(response)))
}
/// Write a workflow definition to disk as YAML
async fn write_workflow_yaml(
packs_base_dir: &PathBuf,
pack_ref: &str,
request: &SaveWorkflowFileRequest,
) -> Result<(), ApiError> {
let workflows_dir = packs_base_dir
.join(pack_ref)
.join("actions")
.join("workflows");
// Ensure the directory exists
tokio::fs::create_dir_all(&workflows_dir)
.await
.map_err(|e| {
ApiError::InternalServerError(format!(
"Failed to create workflows directory '{}': {}",
workflows_dir.display(),
e
))
})?;
let filename = format!("{}.workflow.yaml", request.name);
let filepath = workflows_dir.join(&filename);
// Serialize definition to YAML
let yaml_content = serde_yaml_ng::to_string(&request.definition).map_err(|e| {
ApiError::BadRequest(format!("Failed to serialize workflow to YAML: {}", e))
})?;
// Write file
tokio::fs::write(&filepath, yaml_content)
.await
.map_err(|e| {
ApiError::InternalServerError(format!(
"Failed to write workflow file '{}': {}",
filepath.display(),
e
))
})?;
tracing::info!(
"Wrote workflow file: {} ({} bytes)",
filepath.display(),
filepath.metadata().map(|m| m.len()).unwrap_or(0)
);
Ok(())
}
/// Create workflow routes
pub fn routes() -> Router<Arc<AppState>> {
Router::new()
@@ -350,7 +547,9 @@ pub fn routes() -> Router<Arc<AppState>> {
.put(update_workflow)
.delete(delete_workflow),
)
.route("/workflows/{ref}/file", put(update_workflow_file))
.route("/packs/{pack_ref}/workflows", get(list_workflows_by_pack))
.route("/packs/{pack_ref}/workflow-files", post(save_workflow_file))
}
#[cfg(test)]
@@ -362,4 +561,43 @@ mod tests {
// Just verify the router can be constructed
let _router = routes();
}
#[test]
fn test_save_request_validation() {
let req = SaveWorkflowFileRequest {
name: "test_workflow".to_string(),
label: "Test Workflow".to_string(),
description: Some("A test workflow".to_string()),
version: "1.0.0".to_string(),
pack_ref: "core".to_string(),
definition: serde_json::json!({
"ref": "core.test_workflow",
"label": "Test Workflow",
"version": "1.0.0",
"tasks": [{"name": "task1", "action": "core.echo"}]
}),
param_schema: None,
out_schema: None,
tags: None,
enabled: None,
};
assert!(req.validate().is_ok());
}
#[test]
fn test_save_request_validation_empty_name() {
let req = SaveWorkflowFileRequest {
name: "".to_string(), // Invalid: empty
label: "Test".to_string(),
description: None,
version: "1.0.0".to_string(),
pack_ref: "core".to_string(),
definition: serde_json::json!({}),
param_schema: None,
out_schema: None,
tags: None,
enabled: None,
};
assert!(req.validate().is_err());
}
}