artifact management

This commit is contained in:
2026-03-03 14:16:23 -06:00
parent 8299e5efcb
commit b54aa3ec26
15 changed files with 980 additions and 411 deletions

View File

@@ -105,6 +105,9 @@ pub struct UpdateArtifactRequest {
/// Updated content type
pub content_type: Option<String>,
/// Updated execution ID (re-links artifact to a different execution)
pub execution: Option<i64>,
/// Updated structured data (replaces existing data entirely)
pub data: Option<JsonValue>,
}

View File

@@ -237,6 +237,9 @@ pub async fn update_action(
runtime_version_constraint: request.runtime_version_constraint,
param_schema: request.param_schema,
out_schema: request.out_schema,
parameter_delivery: None,
parameter_format: None,
output_format: None,
};
let action = ActionRepository::update(&state.db, existing_action.id, update_input).await?;

View File

@@ -8,6 +8,7 @@
//! - Progress append for progress-type artifacts (streaming updates)
//! - Listing artifacts by execution
//! - Version history and retrieval
//! - Upsert-and-upload: create-or-reuse an artifact by ref and upload a version in one call
use axum::{
body::Body,
@@ -20,7 +21,9 @@ use axum::{
use std::sync::Arc;
use tracing::warn;
use attune_common::models::enums::{ArtifactType, ArtifactVisibility};
use attune_common::models::enums::{
ArtifactType, ArtifactVisibility, OwnerType, RetentionPolicyType,
};
use attune_common::repositories::{
artifact::{
ArtifactRepository, ArtifactSearchFilters, ArtifactVersionRepository, CreateArtifactInput,
@@ -251,6 +254,7 @@ pub async fn update_artifact(
description: request.description,
content_type: request.content_type,
size_bytes: None, // Managed by version creation trigger
execution: request.execution.map(Some),
data: request.data,
};
@@ -970,6 +974,282 @@ pub async fn delete_version(
))
}
// ============================================================================
// Upsert-and-upload by ref
// ============================================================================
/// Upload a file version to an artifact identified by ref, creating the artifact if it does not
/// already exist.
///
/// This is the recommended way for actions to produce versioned file artifacts. The caller
/// provides the artifact ref and file content in a single multipart request. The server:
///
/// 1. Looks up the artifact by `ref`.
/// 2. If not found, creates it using the metadata fields in the multipart body.
/// 3. If found, optionally updates the `execution` link to the current execution.
/// 4. Uploads the file bytes as a new version (version number is auto-assigned).
///
/// **Multipart fields:**
/// - `file` (required) — the binary file content
/// - `ref` (required for creation) — artifact reference (ignored if artifact already exists)
/// - `scope` — owner scope: `system`, `pack`, `action`, `sensor`, `rule` (default: `action`)
/// - `owner` — owner identifier (default: empty string)
/// - `type` — artifact type: `file_text`, `file_image`, etc. (default: `file_text`)
/// - `visibility` — `public` or `private` (default: type-aware server default)
/// - `name` — human-readable name
/// - `description` — optional description
/// - `content_type` — MIME type (default: auto-detected from multipart or `application/octet-stream`)
/// - `execution` — execution ID to link this artifact to (updates existing artifacts too)
/// - `retention_policy` — `versions`, `days`, `hours`, `minutes` (default: `versions`)
/// - `retention_limit` — limit value (default: `10`)
/// - `created_by` — who created this version
/// - `meta` — JSON metadata for this version
#[utoipa::path(
post,
path = "/api/v1/artifacts/ref/{ref}/versions/upload",
tag = "artifacts",
params(("ref" = String, Path, description = "Artifact reference (created if not found)")),
request_body(content = String, content_type = "multipart/form-data"),
responses(
(status = 201, description = "Version created (artifact may have been created too)", body = inline(ApiResponse<ArtifactVersionResponse>)),
(status = 400, description = "Missing file field or invalid metadata"),
(status = 413, description = "File too large"),
),
security(("bearer_auth" = []))
)]
pub async fn upload_version_by_ref(
RequireAuth(_user): RequireAuth,
State(state): State<Arc<AppState>>,
Path(artifact_ref): Path<String>,
mut multipart: Multipart,
) -> ApiResult<impl IntoResponse> {
// 50 MB limit
const MAX_FILE_SIZE: usize = 50 * 1024 * 1024;
// Collect all multipart fields
let mut file_data: Option<Vec<u8>> = None;
let mut file_content_type: Option<String> = None;
let mut content_type_field: Option<String> = None;
let mut meta: Option<serde_json::Value> = None;
let mut created_by: Option<String> = None;
// Artifact-creation metadata (used only when creating a new artifact)
let mut scope: Option<String> = None;
let mut owner: Option<String> = None;
let mut artifact_type: Option<String> = None;
let mut visibility: Option<String> = None;
let mut name: Option<String> = None;
let mut description: Option<String> = None;
let mut execution: Option<String> = None;
let mut retention_policy: Option<String> = None;
let mut retention_limit: Option<String> = None;
while let Some(field) = multipart
.next_field()
.await
.map_err(|e| ApiError::BadRequest(format!("Multipart error: {}", e)))?
{
let field_name = field.name().unwrap_or("").to_string();
match field_name.as_str() {
"file" => {
file_content_type = field.content_type().map(|s| s.to_string());
let bytes = field
.bytes()
.await
.map_err(|e| ApiError::BadRequest(format!("Failed to read file: {}", e)))?;
if bytes.len() > MAX_FILE_SIZE {
return Err(ApiError::BadRequest(format!(
"File exceeds maximum size of {} bytes",
MAX_FILE_SIZE
)));
}
file_data = Some(bytes.to_vec());
}
"content_type" => {
let t = field.text().await.unwrap_or_default();
if !t.is_empty() {
content_type_field = Some(t);
}
}
"meta" => {
let t = field.text().await.unwrap_or_default();
if !t.is_empty() {
meta =
Some(serde_json::from_str(&t).map_err(|e| {
ApiError::BadRequest(format!("Invalid meta JSON: {}", e))
})?);
}
}
"created_by" => {
let t = field.text().await.unwrap_or_default();
if !t.is_empty() {
created_by = Some(t);
}
}
"scope" => {
scope = Some(field.text().await.unwrap_or_default());
}
"owner" => {
owner = Some(field.text().await.unwrap_or_default());
}
"type" => {
artifact_type = Some(field.text().await.unwrap_or_default());
}
"visibility" => {
visibility = Some(field.text().await.unwrap_or_default());
}
"name" => {
name = Some(field.text().await.unwrap_or_default());
}
"description" => {
description = Some(field.text().await.unwrap_or_default());
}
"execution" => {
execution = Some(field.text().await.unwrap_or_default());
}
"retention_policy" => {
retention_policy = Some(field.text().await.unwrap_or_default());
}
"retention_limit" => {
retention_limit = Some(field.text().await.unwrap_or_default());
}
_ => { /* skip unknown fields */ }
}
}
let file_bytes = file_data.ok_or_else(|| {
ApiError::BadRequest("Missing required 'file' field in multipart upload".to_string())
})?;
// Parse execution ID
let execution_id: Option<i64> = match &execution {
Some(s) if !s.is_empty() => Some(
s.parse::<i64>()
.map_err(|_| ApiError::BadRequest(format!("Invalid execution ID: '{}'", s)))?,
),
_ => None,
};
// Upsert: find existing artifact or create a new one
let artifact = match ArtifactRepository::find_by_ref(&state.db, &artifact_ref).await? {
Some(existing) => {
// Update execution link if a new execution ID was provided
if execution_id.is_some() && execution_id != existing.execution {
let update_input = UpdateArtifactInput {
r#ref: None,
scope: None,
owner: None,
r#type: None,
visibility: None,
retention_policy: None,
retention_limit: None,
name: None,
description: None,
content_type: None,
size_bytes: None,
execution: execution_id.map(Some),
data: None,
};
ArtifactRepository::update(&state.db, existing.id, update_input).await?
} else {
existing
}
}
None => {
// Parse artifact type
let a_type: ArtifactType = match &artifact_type {
Some(t) => serde_json::from_value(serde_json::Value::String(t.clone()))
.map_err(|_| ApiError::BadRequest(format!("Invalid artifact type: '{}'", t)))?,
None => ArtifactType::FileText,
};
// Parse scope
let a_scope: OwnerType = match &scope {
Some(s) if !s.is_empty() => {
serde_json::from_value(serde_json::Value::String(s.clone()))
.map_err(|_| ApiError::BadRequest(format!("Invalid scope: '{}'", s)))?
}
_ => OwnerType::Action,
};
// Parse visibility with type-aware default
let a_visibility: ArtifactVisibility = match &visibility {
Some(v) if !v.is_empty() => {
serde_json::from_value(serde_json::Value::String(v.clone()))
.map_err(|_| ApiError::BadRequest(format!("Invalid visibility: '{}'", v)))?
}
_ => {
if a_type == ArtifactType::Progress {
ArtifactVisibility::Public
} else {
ArtifactVisibility::Private
}
}
};
// Parse retention
let a_retention_policy: RetentionPolicyType = match &retention_policy {
Some(rp) if !rp.is_empty() => {
serde_json::from_value(serde_json::Value::String(rp.clone())).map_err(|_| {
ApiError::BadRequest(format!("Invalid retention_policy: '{}'", rp))
})?
}
_ => RetentionPolicyType::Versions,
};
let a_retention_limit: i32 = match &retention_limit {
Some(rl) if !rl.is_empty() => rl.parse::<i32>().map_err(|_| {
ApiError::BadRequest(format!("Invalid retention_limit: '{}'", rl))
})?,
_ => 10,
};
let create_input = CreateArtifactInput {
r#ref: artifact_ref.clone(),
scope: a_scope,
owner: owner.unwrap_or_default(),
r#type: a_type,
visibility: a_visibility,
retention_policy: a_retention_policy,
retention_limit: a_retention_limit,
name: name.filter(|s| !s.is_empty()),
description: description.filter(|s| !s.is_empty()),
content_type: content_type_field
.clone()
.or_else(|| file_content_type.clone()),
execution: execution_id,
data: None,
};
ArtifactRepository::create(&state.db, create_input).await?
}
};
// Resolve content type: explicit field > multipart header > fallback
let resolved_ct = content_type_field
.or(file_content_type)
.unwrap_or_else(|| "application/octet-stream".to_string());
let version_input = CreateArtifactVersionInput {
artifact: artifact.id,
content_type: Some(resolved_ct),
content: Some(file_bytes),
content_json: None,
file_path: None,
meta,
created_by,
};
let version = ArtifactVersionRepository::create(&state.db, version_input).await?;
Ok((
StatusCode::CREATED,
Json(ApiResponse::with_message(
ArtifactVersionResponse::from(version),
"Version uploaded successfully",
)),
))
}
// ============================================================================
// Helpers
// ============================================================================
@@ -1219,6 +1499,10 @@ pub fn routes() -> Router<Arc<AppState>> {
.delete(delete_artifact),
)
.route("/artifacts/ref/{ref}", get(get_artifact_by_ref))
.route(
"/artifacts/ref/{ref}/versions/upload",
post(upload_version_by_ref),
)
// Progress / data
.route("/artifacts/{id}/progress", post(append_progress))
.route(

View File

@@ -14,10 +14,7 @@ use validator::Validate;
use attune_common::models::pack_test::PackTestResult;
use attune_common::mq::{MessageEnvelope, MessageType, PackRegisteredPayload};
use attune_common::repositories::{
action::ActionRepository,
pack::{CreatePackInput, UpdatePackInput},
rule::{RestoreRuleInput, RuleRepository},
trigger::TriggerRepository,
Create, Delete, FindById, FindByRef, PackRepository, PackTestRepository, Pagination, Update,
};
use attune_common::workflow::{PackWorkflowService, PackWorkflowServiceConfig};
@@ -732,85 +729,100 @@ async fn register_pack_internal(
.and_then(|v| v.as_str())
.map(|s| s.to_string());
// Ad-hoc rules to restore after pack reinstallation
let mut saved_adhoc_rules: Vec<attune_common::models::rule::Rule> = Vec::new();
// Extract common metadata fields used for both create and update
let conf_schema = pack_yaml
.get("config_schema")
.and_then(|v| serde_json::to_value(v).ok())
.unwrap_or_else(|| serde_json::json!({}));
let meta = pack_yaml
.get("metadata")
.and_then(|v| serde_json::to_value(v).ok())
.unwrap_or_else(|| serde_json::json!({}));
let tags: Vec<String> = pack_yaml
.get("keywords")
.and_then(|v| v.as_sequence())
.map(|seq| {
seq.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect()
})
.unwrap_or_default();
let runtime_deps: Vec<String> = pack_yaml
.get("runtime_deps")
.and_then(|v| v.as_sequence())
.map(|seq| {
seq.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect()
})
.unwrap_or_default();
let dependencies: Vec<String> = pack_yaml
.get("dependencies")
.and_then(|v| v.as_sequence())
.map(|seq| {
seq.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect()
})
.unwrap_or_default();
// Check if pack already exists
if !force {
if PackRepository::exists_by_ref(&state.db, &pack_ref).await? {
// Check if pack already exists — update in place to preserve IDs
let existing_pack = PackRepository::find_by_ref(&state.db, &pack_ref).await?;
let is_new_pack;
let pack = if let Some(existing) = existing_pack {
if !force {
return Err(ApiError::Conflict(format!(
"Pack '{}' already exists. Use force=true to reinstall.",
pack_ref
)));
}
// Update existing pack in place — preserves pack ID and all child entity IDs
let update_input = UpdatePackInput {
label: Some(label),
description: Some(description.unwrap_or_default()),
version: Some(version.clone()),
conf_schema: Some(conf_schema),
config: None, // preserve user-set config
meta: Some(meta),
tags: Some(tags),
runtime_deps: Some(runtime_deps),
dependencies: Some(dependencies),
is_standard: None,
installers: None,
};
let updated = PackRepository::update(&state.db, existing.id, update_input).await?;
tracing::info!(
"Updated existing pack '{}' (ID: {}) in place",
pack_ref,
updated.id
);
is_new_pack = false;
updated
} else {
// Delete existing pack if force is true, preserving ad-hoc (user-created) rules
if let Some(existing_pack) = PackRepository::find_by_ref(&state.db, &pack_ref).await? {
// Save ad-hoc rules before deletion — CASCADE on pack FK would destroy them
saved_adhoc_rules = RuleRepository::find_adhoc_by_pack(&state.db, existing_pack.id)
.await
.unwrap_or_default();
if !saved_adhoc_rules.is_empty() {
tracing::info!(
"Preserving {} ad-hoc rule(s) during reinstall of pack '{}'",
saved_adhoc_rules.len(),
pack_ref
);
}
// Create new pack
let pack_input = CreatePackInput {
r#ref: pack_ref.clone(),
label,
description,
version: version.clone(),
conf_schema,
config: serde_json::json!({}),
meta,
tags,
runtime_deps,
dependencies,
is_standard: false,
installers: serde_json::json!({}),
};
PackRepository::delete(&state.db, existing_pack.id).await?;
tracing::info!("Deleted existing pack '{}' for forced reinstall", pack_ref);
}
}
// Create pack input
let pack_input = CreatePackInput {
r#ref: pack_ref.clone(),
label,
description,
version: version.clone(),
conf_schema: pack_yaml
.get("config_schema")
.and_then(|v| serde_json::to_value(v).ok())
.unwrap_or_else(|| serde_json::json!({})),
config: serde_json::json!({}),
meta: pack_yaml
.get("metadata")
.and_then(|v| serde_json::to_value(v).ok())
.unwrap_or_else(|| serde_json::json!({})),
tags: pack_yaml
.get("keywords")
.and_then(|v| v.as_sequence())
.map(|seq| {
seq.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect()
})
.unwrap_or_default(),
runtime_deps: pack_yaml
.get("runtime_deps")
.and_then(|v| v.as_sequence())
.map(|seq| {
seq.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect()
})
.unwrap_or_default(),
dependencies: pack_yaml
.get("dependencies")
.and_then(|v| v.as_sequence())
.map(|seq| {
seq.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect()
})
.unwrap_or_default(),
is_standard: false,
installers: serde_json::json!({}),
is_new_pack = true;
PackRepository::create(&state.db, pack_input).await?
};
let pack = PackRepository::create(&state.db, pack_input).await?;
// Auto-sync workflows after pack creation
let packs_base_dir = PathBuf::from(&state.config.packs_base_dir);
let service_config = PackWorkflowServiceConfig {
@@ -850,14 +862,18 @@ async fn register_pack_internal(
match component_loader.load_all(&pack_path).await {
Ok(load_result) => {
tracing::info!(
"Pack '{}' components loaded: {} runtimes, {} triggers, {} actions, {} sensors ({} skipped, {} warnings)",
"Pack '{}' components loaded: {} created, {} updated, {} skipped, {} removed, {} warnings \
(runtimes: {}/{}, triggers: {}/{}, actions: {}/{}, sensors: {}/{})",
pack.r#ref,
load_result.runtimes_loaded,
load_result.triggers_loaded,
load_result.actions_loaded,
load_result.sensors_loaded,
load_result.total_loaded(),
load_result.total_updated(),
load_result.total_skipped(),
load_result.warnings.len()
load_result.removed,
load_result.warnings.len(),
load_result.runtimes_loaded, load_result.runtimes_updated,
load_result.triggers_loaded, load_result.triggers_updated,
load_result.actions_loaded, load_result.actions_updated,
load_result.sensors_loaded, load_result.sensors_updated,
);
for warning in &load_result.warnings {
tracing::warn!("Pack component warning: {}", warning);
@@ -873,122 +889,9 @@ async fn register_pack_internal(
}
}
// Restore ad-hoc rules that were saved before pack deletion, and
// re-link any rules from other packs whose action/trigger FKs were
// set to NULL when the old pack's entities were cascade-deleted.
{
// Phase 1: Restore saved ad-hoc rules
if !saved_adhoc_rules.is_empty() {
let mut restored = 0u32;
for saved_rule in &saved_adhoc_rules {
// Resolve action and trigger IDs by ref (they may have been recreated)
let action_id = ActionRepository::find_by_ref(&state.db, &saved_rule.action_ref)
.await
.ok()
.flatten()
.map(|a| a.id);
let trigger_id = TriggerRepository::find_by_ref(&state.db, &saved_rule.trigger_ref)
.await
.ok()
.flatten()
.map(|t| t.id);
let input = RestoreRuleInput {
r#ref: saved_rule.r#ref.clone(),
pack: pack.id,
pack_ref: pack.r#ref.clone(),
label: saved_rule.label.clone(),
description: saved_rule.description.clone(),
action: action_id,
action_ref: saved_rule.action_ref.clone(),
trigger: trigger_id,
trigger_ref: saved_rule.trigger_ref.clone(),
conditions: saved_rule.conditions.clone(),
action_params: saved_rule.action_params.clone(),
trigger_params: saved_rule.trigger_params.clone(),
enabled: saved_rule.enabled,
};
match RuleRepository::restore_rule(&state.db, input).await {
Ok(rule) => {
restored += 1;
if rule.action.is_none() || rule.trigger.is_none() {
tracing::warn!(
"Restored ad-hoc rule '{}' with unresolved references \
(action: {}, trigger: {})",
rule.r#ref,
if rule.action.is_some() {
"linked"
} else {
"NULL"
},
if rule.trigger.is_some() {
"linked"
} else {
"NULL"
},
);
}
}
Err(e) => {
tracing::warn!(
"Failed to restore ad-hoc rule '{}': {}",
saved_rule.r#ref,
e
);
}
}
}
tracing::info!(
"Restored {}/{} ad-hoc rule(s) for pack '{}'",
restored,
saved_adhoc_rules.len(),
pack.r#ref
);
}
// Phase 2: Re-link rules from other packs whose action/trigger FKs
// were set to NULL when the old pack's entities were cascade-deleted
let new_actions = ActionRepository::find_by_pack(&state.db, pack.id)
.await
.unwrap_or_default();
let new_triggers = TriggerRepository::find_by_pack(&state.db, pack.id)
.await
.unwrap_or_default();
for action in &new_actions {
match RuleRepository::relink_action_by_ref(&state.db, &action.r#ref, action.id).await {
Ok(count) if count > 0 => {
tracing::info!("Re-linked {} rule(s) to action '{}'", count, action.r#ref);
}
Err(e) => {
tracing::warn!(
"Failed to re-link rules to action '{}': {}",
action.r#ref,
e
);
}
_ => {}
}
}
for trigger in &new_triggers {
match RuleRepository::relink_trigger_by_ref(&state.db, &trigger.r#ref, trigger.id).await
{
Ok(count) if count > 0 => {
tracing::info!("Re-linked {} rule(s) to trigger '{}'", count, trigger.r#ref);
}
Err(e) => {
tracing::warn!(
"Failed to re-link rules to trigger '{}': {}",
trigger.r#ref,
e
);
}
_ => {}
}
}
}
// Since entities are now updated in place (IDs preserved), ad-hoc rules
// and cross-pack FK references survive reinstallation automatically.
// No need to save/restore rules or re-link FKs.
// Set up runtime environments for the pack's actions.
// This creates virtualenvs, installs dependencies, etc. based on each
@@ -1199,8 +1102,11 @@ async fn register_pack_internal(
let test_passed = result.status == "passed";
if !test_passed && !force {
// Tests failed and force is not set - rollback pack creation
let _ = PackRepository::delete(&state.db, pack.id).await;
// Tests failed and force is not set — only delete if we just created this pack.
// If we updated an existing pack, deleting would destroy the original.
if is_new_pack {
let _ = PackRepository::delete(&state.db, pack.id).await;
}
return Err(ApiError::BadRequest(format!(
"Pack registration failed: tests did not pass. Use force=true to register anyway."
)));
@@ -1217,7 +1123,9 @@ async fn register_pack_internal(
tracing::warn!("Failed to execute tests for pack '{}': {}", pack.r#ref, e);
// If tests can't be executed and force is not set, fail the registration
if !force {
let _ = PackRepository::delete(&state.db, pack.id).await;
if is_new_pack {
let _ = PackRepository::delete(&state.db, pack.id).await;
}
return Err(ApiError::BadRequest(format!(
"Pack registration failed: could not execute tests. Error: {}. Use force=true to register anyway.",
e

View File

@@ -669,6 +669,9 @@ async fn update_companion_action(
runtime_version_constraint: None,
param_schema: param_schema.cloned(),
out_schema: out_schema.cloned(),
parameter_delivery: None,
parameter_format: None,
output_format: None,
};
ActionRepository::update(db, action.id, update_input)
@@ -731,6 +734,9 @@ async fn ensure_companion_action(
runtime_version_constraint: None,
param_schema: param_schema.cloned(),
out_schema: out_schema.cloned(),
parameter_delivery: None,
parameter_format: None,
output_format: None,
};
ActionRepository::update(db, action.id, update_input)