node running, runtime version awareness
This commit is contained in:
@@ -38,6 +38,11 @@ pub struct CreateActionRequest {
|
||||
#[schema(example = 1)]
|
||||
pub runtime: Option<i64>,
|
||||
|
||||
/// Optional semver version constraint for the runtime (e.g., ">=3.12", ">=3.12,<4.0", "~18.0")
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[schema(example = ">=3.12", nullable = true)]
|
||||
pub runtime_version_constraint: Option<String>,
|
||||
|
||||
/// Parameter schema (StackStorm-style) defining expected inputs with inline required/secret
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[schema(value_type = Object, nullable = true, example = json!({"channel": {"type": "string", "description": "Slack channel", "required": true}, "message": {"type": "string", "description": "Message text", "required": true}}))]
|
||||
@@ -71,6 +76,10 @@ pub struct UpdateActionRequest {
|
||||
#[schema(example = 1)]
|
||||
pub runtime: Option<i64>,
|
||||
|
||||
/// Optional semver version constraint for the runtime (e.g., ">=3.12", ">=3.12,<4.0", "~18.0")
|
||||
#[schema(example = ">=3.12", nullable = true)]
|
||||
pub runtime_version_constraint: Option<Option<String>>,
|
||||
|
||||
/// Parameter schema (StackStorm-style with inline required/secret)
|
||||
#[schema(value_type = Object, nullable = true)]
|
||||
pub param_schema: Option<JsonValue>,
|
||||
@@ -115,6 +124,11 @@ pub struct ActionResponse {
|
||||
#[schema(example = 1)]
|
||||
pub runtime: Option<i64>,
|
||||
|
||||
/// Semver version constraint for the runtime (e.g., ">=3.12", ">=3.12,<4.0", "~18.0")
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[schema(example = ">=3.12", nullable = true)]
|
||||
pub runtime_version_constraint: Option<String>,
|
||||
|
||||
/// Parameter schema (StackStorm-style with inline required/secret)
|
||||
#[schema(value_type = Object, nullable = true)]
|
||||
pub param_schema: Option<JsonValue>,
|
||||
@@ -167,6 +181,11 @@ pub struct ActionSummary {
|
||||
#[schema(example = 1)]
|
||||
pub runtime: Option<i64>,
|
||||
|
||||
/// Semver version constraint for the runtime
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[schema(example = ">=3.12", nullable = true)]
|
||||
pub runtime_version_constraint: Option<String>,
|
||||
|
||||
/// Creation timestamp
|
||||
#[schema(example = "2024-01-13T10:30:00Z")]
|
||||
pub created: DateTime<Utc>,
|
||||
@@ -188,6 +207,7 @@ impl From<attune_common::models::action::Action> for ActionResponse {
|
||||
description: action.description,
|
||||
entrypoint: action.entrypoint,
|
||||
runtime: action.runtime,
|
||||
runtime_version_constraint: action.runtime_version_constraint,
|
||||
param_schema: action.param_schema,
|
||||
out_schema: action.out_schema,
|
||||
is_adhoc: action.is_adhoc,
|
||||
@@ -208,6 +228,7 @@ impl From<attune_common::models::action::Action> for ActionSummary {
|
||||
description: action.description,
|
||||
entrypoint: action.entrypoint,
|
||||
runtime: action.runtime,
|
||||
runtime_version_constraint: action.runtime_version_constraint,
|
||||
created: action.created,
|
||||
updated: action.updated,
|
||||
}
|
||||
@@ -284,6 +305,7 @@ mod tests {
|
||||
description: "Test description".to_string(),
|
||||
entrypoint: "/actions/test.py".to_string(),
|
||||
runtime: None,
|
||||
runtime_version_constraint: None,
|
||||
param_schema: None,
|
||||
out_schema: None,
|
||||
};
|
||||
@@ -300,6 +322,7 @@ mod tests {
|
||||
description: "Test description".to_string(),
|
||||
entrypoint: "/actions/test.py".to_string(),
|
||||
runtime: None,
|
||||
runtime_version_constraint: None,
|
||||
param_schema: None,
|
||||
out_schema: None,
|
||||
};
|
||||
@@ -314,6 +337,7 @@ mod tests {
|
||||
description: None,
|
||||
entrypoint: None,
|
||||
runtime: None,
|
||||
runtime_version_constraint: None,
|
||||
param_schema: None,
|
||||
out_schema: None,
|
||||
};
|
||||
|
||||
@@ -186,6 +186,7 @@ pub async fn create_action(
|
||||
description: request.description,
|
||||
entrypoint: request.entrypoint,
|
||||
runtime: request.runtime,
|
||||
runtime_version_constraint: request.runtime_version_constraint,
|
||||
param_schema: request.param_schema,
|
||||
out_schema: request.out_schema,
|
||||
is_adhoc: true, // Actions created via API are ad-hoc (not from pack installation)
|
||||
@@ -235,6 +236,7 @@ pub async fn update_action(
|
||||
description: request.description,
|
||||
entrypoint: request.entrypoint,
|
||||
runtime: request.runtime,
|
||||
runtime_version_constraint: request.runtime_version_constraint,
|
||||
param_schema: request.param_schema,
|
||||
out_schema: request.out_schema,
|
||||
};
|
||||
|
||||
@@ -669,6 +669,7 @@ pub async fn create_sensor(
|
||||
entrypoint: request.entrypoint,
|
||||
runtime: runtime.id,
|
||||
runtime_ref: runtime.r#ref.clone(),
|
||||
runtime_version_constraint: None,
|
||||
trigger: trigger.id,
|
||||
trigger_ref: trigger.r#ref.clone(),
|
||||
enabled: request.enabled,
|
||||
@@ -721,6 +722,7 @@ pub async fn update_sensor(
|
||||
entrypoint: request.entrypoint,
|
||||
runtime: None,
|
||||
runtime_ref: None,
|
||||
runtime_version_constraint: None,
|
||||
trigger: None,
|
||||
trigger_ref: None,
|
||||
enabled: request.enabled,
|
||||
@@ -806,6 +808,7 @@ pub async fn enable_sensor(
|
||||
entrypoint: None,
|
||||
runtime: None,
|
||||
runtime_ref: None,
|
||||
runtime_version_constraint: None,
|
||||
trigger: None,
|
||||
trigger_ref: None,
|
||||
enabled: Some(true),
|
||||
@@ -852,6 +855,7 @@ pub async fn disable_sensor(
|
||||
entrypoint: None,
|
||||
runtime: None,
|
||||
runtime_ref: None,
|
||||
runtime_version_constraint: None,
|
||||
trigger: None,
|
||||
trigger_ref: None,
|
||||
enabled: Some(false),
|
||||
|
||||
@@ -12,6 +12,7 @@ use std::sync::Arc;
|
||||
use validator::Validate;
|
||||
|
||||
use attune_common::repositories::{
|
||||
action::{ActionRepository, CreateActionInput, UpdateActionInput},
|
||||
pack::PackRepository,
|
||||
workflow::{
|
||||
CreateWorkflowDefinitionInput, UpdateWorkflowDefinitionInput, WorkflowDefinitionRepository,
|
||||
@@ -225,21 +226,36 @@ pub async fn create_workflow(
|
||||
|
||||
// Create workflow input
|
||||
let workflow_input = CreateWorkflowDefinitionInput {
|
||||
r#ref: request.r#ref,
|
||||
r#ref: request.r#ref.clone(),
|
||||
pack: pack.id,
|
||||
pack_ref: pack.r#ref.clone(),
|
||||
label: request.label,
|
||||
description: request.description,
|
||||
version: request.version,
|
||||
param_schema: request.param_schema,
|
||||
out_schema: request.out_schema,
|
||||
label: request.label.clone(),
|
||||
description: request.description.clone(),
|
||||
version: request.version.clone(),
|
||||
param_schema: request.param_schema.clone(),
|
||||
out_schema: request.out_schema.clone(),
|
||||
definition: request.definition,
|
||||
tags: request.tags.unwrap_or_default(),
|
||||
tags: request.tags.clone().unwrap_or_default(),
|
||||
enabled: request.enabled.unwrap_or(true),
|
||||
};
|
||||
|
||||
let workflow = WorkflowDefinitionRepository::create(&state.db, workflow_input).await?;
|
||||
|
||||
// Create a companion action record so the workflow appears in action lists
|
||||
create_companion_action(
|
||||
&state.db,
|
||||
&workflow.r#ref,
|
||||
pack.id,
|
||||
&pack.r#ref,
|
||||
&request.label,
|
||||
&request.description.clone().unwrap_or_default(),
|
||||
"workflow",
|
||||
request.param_schema.as_ref(),
|
||||
request.out_schema.as_ref(),
|
||||
workflow.id,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let response = ApiResponse::with_message(
|
||||
WorkflowResponse::from(workflow),
|
||||
"Workflow created successfully",
|
||||
@@ -280,11 +296,11 @@ pub async fn update_workflow(
|
||||
|
||||
// Create update input
|
||||
let update_input = UpdateWorkflowDefinitionInput {
|
||||
label: request.label,
|
||||
description: request.description,
|
||||
version: request.version,
|
||||
param_schema: request.param_schema,
|
||||
out_schema: request.out_schema,
|
||||
label: request.label.clone(),
|
||||
description: request.description.clone(),
|
||||
version: request.version.clone(),
|
||||
param_schema: request.param_schema.clone(),
|
||||
out_schema: request.out_schema.clone(),
|
||||
definition: request.definition,
|
||||
tags: request.tags,
|
||||
enabled: request.enabled,
|
||||
@@ -293,6 +309,17 @@ pub async fn update_workflow(
|
||||
let workflow =
|
||||
WorkflowDefinitionRepository::update(&state.db, existing_workflow.id, update_input).await?;
|
||||
|
||||
// Update the companion action record if it exists
|
||||
update_companion_action(
|
||||
&state.db,
|
||||
existing_workflow.id,
|
||||
request.label.as_deref(),
|
||||
request.description.as_deref(),
|
||||
request.param_schema.as_ref(),
|
||||
request.out_schema.as_ref(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let response = ApiResponse::with_message(
|
||||
WorkflowResponse::from(workflow),
|
||||
"Workflow updated successfully",
|
||||
@@ -325,7 +352,7 @@ pub async fn delete_workflow(
|
||||
.await?
|
||||
.ok_or_else(|| ApiError::NotFound(format!("Workflow '{}' not found", workflow_ref)))?;
|
||||
|
||||
// Delete the workflow
|
||||
// Delete the workflow (companion action is cascade-deleted via FK on action.workflow_def)
|
||||
let deleted = WorkflowDefinitionRepository::delete(&state.db, workflow.id).await?;
|
||||
|
||||
if !deleted {
|
||||
@@ -345,6 +372,7 @@ pub async fn delete_workflow(
|
||||
///
|
||||
/// Writes a `{name}.workflow.yaml` file to `{packs_base_dir}/{pack_ref}/actions/workflows/`
|
||||
/// and creates or updates the corresponding workflow_definition record in the database.
|
||||
/// Also creates a companion action record so the workflow appears in action lists and palettes.
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/api/v1/packs/{pack_ref}/workflow-files",
|
||||
@@ -398,21 +426,37 @@ pub async fn save_workflow_file(
|
||||
})?;
|
||||
|
||||
let workflow_input = CreateWorkflowDefinitionInput {
|
||||
r#ref: workflow_ref,
|
||||
r#ref: workflow_ref.clone(),
|
||||
pack: pack.id,
|
||||
pack_ref: pack.r#ref.clone(),
|
||||
label: request.label,
|
||||
description: request.description,
|
||||
version: request.version,
|
||||
param_schema: request.param_schema,
|
||||
out_schema: request.out_schema,
|
||||
label: request.label.clone(),
|
||||
description: request.description.clone(),
|
||||
version: request.version.clone(),
|
||||
param_schema: request.param_schema.clone(),
|
||||
out_schema: request.out_schema.clone(),
|
||||
definition: definition_json,
|
||||
tags: request.tags.unwrap_or_default(),
|
||||
tags: request.tags.clone().unwrap_or_default(),
|
||||
enabled: request.enabled.unwrap_or(true),
|
||||
};
|
||||
|
||||
let workflow = WorkflowDefinitionRepository::create(&state.db, workflow_input).await?;
|
||||
|
||||
// Create a companion action record so the workflow appears in action lists and palettes
|
||||
let entrypoint = format!("workflows/{}.workflow.yaml", request.name);
|
||||
create_companion_action(
|
||||
&state.db,
|
||||
&workflow_ref,
|
||||
pack.id,
|
||||
&pack.r#ref,
|
||||
&request.label,
|
||||
&request.description.clone().unwrap_or_default(),
|
||||
&entrypoint,
|
||||
request.param_schema.as_ref(),
|
||||
request.out_schema.as_ref(),
|
||||
workflow.id,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let response = ApiResponse::with_message(
|
||||
WorkflowResponse::from(workflow),
|
||||
"Workflow file saved and synced successfully",
|
||||
@@ -452,7 +496,7 @@ pub async fn update_workflow_file(
|
||||
.ok_or_else(|| ApiError::NotFound(format!("Workflow '{}' not found", workflow_ref)))?;
|
||||
|
||||
// Verify pack exists
|
||||
let _pack = PackRepository::find_by_ref(&state.db, &request.pack_ref)
|
||||
let pack = PackRepository::find_by_ref(&state.db, &request.pack_ref)
|
||||
.await?
|
||||
.ok_or_else(|| ApiError::NotFound(format!("Pack '{}' not found", request.pack_ref)))?;
|
||||
|
||||
@@ -466,11 +510,11 @@ pub async fn update_workflow_file(
|
||||
})?;
|
||||
|
||||
let update_input = UpdateWorkflowDefinitionInput {
|
||||
label: Some(request.label),
|
||||
description: request.description,
|
||||
label: Some(request.label.clone()),
|
||||
description: request.description.clone(),
|
||||
version: Some(request.version),
|
||||
param_schema: request.param_schema,
|
||||
out_schema: request.out_schema,
|
||||
param_schema: request.param_schema.clone(),
|
||||
out_schema: request.out_schema.clone(),
|
||||
definition: Some(definition_json),
|
||||
tags: request.tags,
|
||||
enabled: request.enabled,
|
||||
@@ -479,6 +523,23 @@ pub async fn update_workflow_file(
|
||||
let workflow =
|
||||
WorkflowDefinitionRepository::update(&state.db, existing_workflow.id, update_input).await?;
|
||||
|
||||
// Update the companion action record, or create it if it doesn't exist yet
|
||||
// (handles workflows that were created before this fix was deployed)
|
||||
let entrypoint = format!("workflows/{}.workflow.yaml", request.name);
|
||||
ensure_companion_action(
|
||||
&state.db,
|
||||
existing_workflow.id,
|
||||
&workflow_ref,
|
||||
pack.id,
|
||||
&pack.r#ref,
|
||||
&request.label,
|
||||
&request.description.unwrap_or_default(),
|
||||
&entrypoint,
|
||||
request.param_schema.as_ref(),
|
||||
request.out_schema.as_ref(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let response = ApiResponse::with_message(
|
||||
WorkflowResponse::from(workflow),
|
||||
"Workflow file updated and synced successfully",
|
||||
@@ -537,6 +598,204 @@ async fn write_workflow_yaml(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create a companion action record for a workflow definition.
|
||||
///
|
||||
/// This ensures the workflow appears in action lists and the action palette in the
|
||||
/// workflow builder. The action is created with `is_workflow = true` and linked to
|
||||
/// the workflow definition via the `workflow_def` FK.
|
||||
async fn create_companion_action(
|
||||
db: &sqlx::PgPool,
|
||||
workflow_ref: &str,
|
||||
pack_id: i64,
|
||||
pack_ref: &str,
|
||||
label: &str,
|
||||
description: &str,
|
||||
entrypoint: &str,
|
||||
param_schema: Option<&serde_json::Value>,
|
||||
out_schema: Option<&serde_json::Value>,
|
||||
workflow_def_id: i64,
|
||||
) -> Result<(), ApiError> {
|
||||
let action_input = CreateActionInput {
|
||||
r#ref: workflow_ref.to_string(),
|
||||
pack: pack_id,
|
||||
pack_ref: pack_ref.to_string(),
|
||||
label: label.to_string(),
|
||||
description: description.to_string(),
|
||||
entrypoint: entrypoint.to_string(),
|
||||
runtime: None,
|
||||
runtime_version_constraint: None,
|
||||
param_schema: param_schema.cloned(),
|
||||
out_schema: out_schema.cloned(),
|
||||
is_adhoc: false,
|
||||
};
|
||||
|
||||
let action = ActionRepository::create(db, action_input)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(
|
||||
"Failed to create companion action for workflow '{}': {}",
|
||||
workflow_ref,
|
||||
e
|
||||
);
|
||||
ApiError::InternalServerError(format!(
|
||||
"Failed to create companion action for workflow: {}",
|
||||
e
|
||||
))
|
||||
})?;
|
||||
|
||||
// Link the action to the workflow definition (sets is_workflow = true and workflow_def)
|
||||
ActionRepository::link_workflow_def(db, action.id, workflow_def_id)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!(
|
||||
"Failed to link action to workflow definition '{}': {}",
|
||||
workflow_ref,
|
||||
e
|
||||
);
|
||||
ApiError::InternalServerError(format!(
|
||||
"Failed to link action to workflow definition: {}",
|
||||
e
|
||||
))
|
||||
})?;
|
||||
|
||||
tracing::info!(
|
||||
"Created companion action '{}' (ID: {}) for workflow definition (ID: {})",
|
||||
workflow_ref,
|
||||
action.id,
|
||||
workflow_def_id
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Update the companion action record for a workflow definition.
|
||||
///
|
||||
/// Finds the action linked to the workflow definition and updates its metadata
|
||||
/// to stay in sync with the workflow definition.
|
||||
async fn update_companion_action(
|
||||
db: &sqlx::PgPool,
|
||||
workflow_def_id: i64,
|
||||
label: Option<&str>,
|
||||
description: Option<&str>,
|
||||
param_schema: Option<&serde_json::Value>,
|
||||
out_schema: Option<&serde_json::Value>,
|
||||
) -> Result<(), ApiError> {
|
||||
let existing_action = ActionRepository::find_by_workflow_def(db, workflow_def_id)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::warn!(
|
||||
"Failed to look up companion action for workflow_def {}: {}",
|
||||
workflow_def_id,
|
||||
e
|
||||
);
|
||||
ApiError::InternalServerError(format!("Failed to look up companion action: {}", e))
|
||||
})?;
|
||||
|
||||
if let Some(action) = existing_action {
|
||||
let update_input = UpdateActionInput {
|
||||
label: label.map(|s| s.to_string()),
|
||||
description: description.map(|s| s.to_string()),
|
||||
entrypoint: None,
|
||||
runtime: None,
|
||||
runtime_version_constraint: None,
|
||||
param_schema: param_schema.cloned(),
|
||||
out_schema: out_schema.cloned(),
|
||||
};
|
||||
|
||||
ActionRepository::update(db, action.id, update_input)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::warn!(
|
||||
"Failed to update companion action (ID: {}) for workflow_def {}: {}",
|
||||
action.id,
|
||||
workflow_def_id,
|
||||
e
|
||||
);
|
||||
ApiError::InternalServerError(format!("Failed to update companion action: {}", e))
|
||||
})?;
|
||||
|
||||
tracing::debug!(
|
||||
"Updated companion action '{}' (ID: {}) for workflow definition (ID: {})",
|
||||
action.r#ref,
|
||||
action.id,
|
||||
workflow_def_id
|
||||
);
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"No companion action found for workflow_def {}; skipping update",
|
||||
workflow_def_id
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Ensure a companion action record exists for a workflow definition.
|
||||
///
|
||||
/// If the action already exists, update it. If it doesn't exist (e.g., for workflows
|
||||
/// created before the companion-action fix), create it.
|
||||
async fn ensure_companion_action(
|
||||
db: &sqlx::PgPool,
|
||||
workflow_def_id: i64,
|
||||
workflow_ref: &str,
|
||||
pack_id: i64,
|
||||
pack_ref: &str,
|
||||
label: &str,
|
||||
description: &str,
|
||||
entrypoint: &str,
|
||||
param_schema: Option<&serde_json::Value>,
|
||||
out_schema: Option<&serde_json::Value>,
|
||||
) -> Result<(), ApiError> {
|
||||
let existing_action = ActionRepository::find_by_workflow_def(db, workflow_def_id)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
ApiError::InternalServerError(format!("Failed to look up companion action: {}", e))
|
||||
})?;
|
||||
|
||||
if let Some(action) = existing_action {
|
||||
// Update existing companion action
|
||||
let update_input = UpdateActionInput {
|
||||
label: Some(label.to_string()),
|
||||
description: Some(description.to_string()),
|
||||
entrypoint: Some(entrypoint.to_string()),
|
||||
runtime: None,
|
||||
runtime_version_constraint: None,
|
||||
param_schema: param_schema.cloned(),
|
||||
out_schema: out_schema.cloned(),
|
||||
};
|
||||
|
||||
ActionRepository::update(db, action.id, update_input)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
ApiError::InternalServerError(format!("Failed to update companion action: {}", e))
|
||||
})?;
|
||||
|
||||
tracing::debug!(
|
||||
"Updated companion action '{}' (ID: {}) for workflow definition (ID: {})",
|
||||
action.r#ref,
|
||||
action.id,
|
||||
workflow_def_id
|
||||
);
|
||||
} else {
|
||||
// Create new companion action (backfill for pre-fix workflows)
|
||||
create_companion_action(
|
||||
db,
|
||||
workflow_ref,
|
||||
pack_id,
|
||||
pack_ref,
|
||||
label,
|
||||
description,
|
||||
entrypoint,
|
||||
param_schema,
|
||||
out_schema,
|
||||
workflow_def_id,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create workflow routes
|
||||
pub fn routes() -> Router<Arc<AppState>> {
|
||||
Router::new()
|
||||
@@ -551,53 +810,3 @@ pub fn routes() -> Router<Arc<AppState>> {
|
||||
.route("/packs/{pack_ref}/workflows", get(list_workflows_by_pack))
|
||||
.route("/packs/{pack_ref}/workflow-files", post(save_workflow_file))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_workflow_routes_structure() {
|
||||
// Just verify the router can be constructed
|
||||
let _router = routes();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_save_request_validation() {
|
||||
let req = SaveWorkflowFileRequest {
|
||||
name: "test_workflow".to_string(),
|
||||
label: "Test Workflow".to_string(),
|
||||
description: Some("A test workflow".to_string()),
|
||||
version: "1.0.0".to_string(),
|
||||
pack_ref: "core".to_string(),
|
||||
definition: serde_json::json!({
|
||||
"ref": "core.test_workflow",
|
||||
"label": "Test Workflow",
|
||||
"version": "1.0.0",
|
||||
"tasks": [{"name": "task1", "action": "core.echo"}]
|
||||
}),
|
||||
param_schema: None,
|
||||
out_schema: None,
|
||||
tags: None,
|
||||
enabled: None,
|
||||
};
|
||||
assert!(req.validate().is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_save_request_validation_empty_name() {
|
||||
let req = SaveWorkflowFileRequest {
|
||||
name: "".to_string(), // Invalid: empty
|
||||
label: "Test".to_string(),
|
||||
description: None,
|
||||
version: "1.0.0".to_string(),
|
||||
pack_ref: "core".to_string(),
|
||||
definition: serde_json::json!({}),
|
||||
param_schema: None,
|
||||
out_schema: None,
|
||||
tags: None,
|
||||
enabled: None,
|
||||
};
|
||||
assert!(req.validate().is_err());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -365,6 +365,7 @@ mod tests {
|
||||
description: "Test action".to_string(),
|
||||
entrypoint: "test.sh".to_string(),
|
||||
runtime: Some(1),
|
||||
runtime_version_constraint: None,
|
||||
param_schema: schema,
|
||||
out_schema: None,
|
||||
is_workflow: false,
|
||||
|
||||
@@ -452,6 +452,7 @@ pub async fn create_test_action(pool: &PgPool, pack_id: i64, ref_name: &str) ->
|
||||
description: format!("Test action for {}", ref_name),
|
||||
entrypoint: "main.py".to_string(),
|
||||
runtime: None,
|
||||
runtime_version_constraint: None,
|
||||
param_schema: None,
|
||||
out_schema: None,
|
||||
is_adhoc: false,
|
||||
|
||||
@@ -55,6 +55,7 @@ async fn setup_test_pack_and_action(pool: &PgPool) -> Result<(Pack, Action)> {
|
||||
description: "Test action for SSE tests".to_string(),
|
||||
entrypoint: "test.sh".to_string(),
|
||||
runtime: None,
|
||||
runtime_version_constraint: None,
|
||||
param_schema: None,
|
||||
out_schema: None,
|
||||
is_adhoc: false,
|
||||
|
||||
@@ -66,6 +66,9 @@ walkdir = { workspace = true }
|
||||
# Regular expressions
|
||||
regex = { workspace = true }
|
||||
|
||||
# Version matching
|
||||
semver = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
mockall = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
|
||||
@@ -20,6 +20,7 @@ pub mod schema;
|
||||
pub mod template_resolver;
|
||||
pub mod test_executor;
|
||||
pub mod utils;
|
||||
pub mod version_matching;
|
||||
pub mod workflow;
|
||||
|
||||
// Re-export commonly used types
|
||||
|
||||
@@ -443,6 +443,16 @@ pub mod runtime {
|
||||
/// Optional dependency management configuration
|
||||
#[serde(default)]
|
||||
pub dependencies: Option<DependencyConfig>,
|
||||
|
||||
/// Optional environment variables to set during action execution.
|
||||
///
|
||||
/// Values support the same template variables as other fields:
|
||||
/// `{pack_dir}`, `{env_dir}`, `{interpreter}`, `{manifest_path}`.
|
||||
///
|
||||
/// Example: `{"NODE_PATH": "{env_dir}/node_modules"}` ensures Node.js
|
||||
/// can find packages installed in the isolated runtime environment.
|
||||
#[serde(default)]
|
||||
pub env_vars: HashMap<String, String>,
|
||||
}
|
||||
|
||||
/// Describes the interpreter binary and how it invokes action scripts.
|
||||
@@ -756,6 +766,51 @@ pub mod runtime {
|
||||
}
|
||||
}
|
||||
|
||||
/// A specific version of a runtime (e.g., Python 3.12.1, Node.js 20.11.0).
|
||||
///
|
||||
/// Each version stores its own complete `execution_config` so the worker can
|
||||
/// use a version-specific interpreter binary, environment commands, etc.
|
||||
/// Actions and sensors declare an optional version constraint (semver range)
|
||||
/// which is matched against available `RuntimeVersion` rows at execution time.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
|
||||
pub struct RuntimeVersion {
|
||||
pub id: Id,
|
||||
/// Parent runtime ID (FK → runtime.id)
|
||||
pub runtime: Id,
|
||||
/// Parent runtime ref for display/filtering (e.g., "core.python")
|
||||
pub runtime_ref: String,
|
||||
/// Semantic version string (e.g., "3.12.1", "20.11.0")
|
||||
pub version: String,
|
||||
/// Major version component (nullable for non-numeric schemes)
|
||||
pub version_major: Option<i32>,
|
||||
/// Minor version component
|
||||
pub version_minor: Option<i32>,
|
||||
/// Patch version component
|
||||
pub version_patch: Option<i32>,
|
||||
/// Complete execution configuration for this version
|
||||
/// (same structure as `runtime.execution_config`)
|
||||
pub execution_config: JsonDict,
|
||||
/// Version-specific distribution/verification metadata
|
||||
pub distributions: JsonDict,
|
||||
/// Whether this is the default version for the parent runtime
|
||||
pub is_default: bool,
|
||||
/// Whether this version is verified as available on the system
|
||||
pub available: bool,
|
||||
/// When this version was last verified
|
||||
pub verified_at: Option<DateTime<Utc>>,
|
||||
/// Arbitrary version-specific metadata
|
||||
pub meta: JsonDict,
|
||||
pub created: DateTime<Utc>,
|
||||
pub updated: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl RuntimeVersion {
|
||||
/// Parse the `execution_config` JSONB into a typed `RuntimeExecutionConfig`.
|
||||
pub fn parsed_execution_config(&self) -> RuntimeExecutionConfig {
|
||||
serde_json::from_value(self.execution_config.clone()).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
|
||||
pub struct Worker {
|
||||
pub id: Id,
|
||||
@@ -808,6 +863,9 @@ pub mod trigger {
|
||||
pub entrypoint: String,
|
||||
pub runtime: Id,
|
||||
pub runtime_ref: String,
|
||||
/// Optional semver version constraint for the runtime
|
||||
/// (e.g., ">=3.12", ">=3.12,<4.0", "~18.0"). NULL means any version.
|
||||
pub runtime_version_constraint: Option<String>,
|
||||
pub trigger: Id,
|
||||
pub trigger_ref: String,
|
||||
pub enabled: bool,
|
||||
@@ -832,6 +890,9 @@ pub mod action {
|
||||
pub description: String,
|
||||
pub entrypoint: String,
|
||||
pub runtime: Option<Id>,
|
||||
/// Optional semver version constraint for the runtime
|
||||
/// (e.g., ">=3.12", ">=3.12,<4.0", "~18.0"). NULL means any version.
|
||||
pub runtime_version_constraint: Option<String>,
|
||||
pub param_schema: Option<JsonSchema>,
|
||||
pub out_schema: Option<JsonSchema>,
|
||||
pub is_workflow: bool,
|
||||
|
||||
@@ -20,10 +20,12 @@ use crate::error::{Error, Result};
|
||||
use crate::models::Id;
|
||||
use crate::repositories::action::ActionRepository;
|
||||
use crate::repositories::runtime::{CreateRuntimeInput, RuntimeRepository};
|
||||
use crate::repositories::runtime_version::{CreateRuntimeVersionInput, RuntimeVersionRepository};
|
||||
use crate::repositories::trigger::{
|
||||
CreateSensorInput, CreateTriggerInput, SensorRepository, TriggerRepository,
|
||||
};
|
||||
use crate::repositories::{Create, FindById, FindByRef, Update};
|
||||
use crate::version_matching::extract_version_components;
|
||||
|
||||
/// Result of loading pack components into the database.
|
||||
#[derive(Debug, Default)]
|
||||
@@ -201,6 +203,10 @@ impl<'a> PackComponentLoader<'a> {
|
||||
Ok(rt) => {
|
||||
info!("Created runtime '{}' (ID: {})", runtime_ref, rt.id);
|
||||
result.runtimes_loaded += 1;
|
||||
|
||||
// Load version entries from the optional `versions` array
|
||||
self.load_runtime_versions(&data, rt.id, &runtime_ref, result)
|
||||
.await;
|
||||
}
|
||||
Err(e) => {
|
||||
// Check for unique constraint violation (race condition)
|
||||
@@ -226,6 +232,141 @@ impl<'a> PackComponentLoader<'a> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Load version entries from the `versions` array in a runtime YAML.
|
||||
///
|
||||
/// Each entry in the array describes a specific version of the runtime
|
||||
/// with its own `execution_config` and `distributions`. Example:
|
||||
///
|
||||
/// ```yaml
|
||||
/// versions:
|
||||
/// - version: "3.12"
|
||||
/// is_default: true
|
||||
/// execution_config:
|
||||
/// interpreter:
|
||||
/// binary: python3.12
|
||||
/// ...
|
||||
/// distributions:
|
||||
/// verification:
|
||||
/// commands:
|
||||
/// - binary: python3.12
|
||||
/// args: ["--version"]
|
||||
/// ...
|
||||
/// ```
|
||||
async fn load_runtime_versions(
|
||||
&self,
|
||||
data: &serde_yaml_ng::Value,
|
||||
runtime_id: Id,
|
||||
runtime_ref: &str,
|
||||
result: &mut PackLoadResult,
|
||||
) {
|
||||
let versions = match data.get("versions").and_then(|v| v.as_sequence()) {
|
||||
Some(seq) => seq,
|
||||
None => return, // No versions defined — that's fine
|
||||
};
|
||||
|
||||
info!(
|
||||
"Loading {} version(s) for runtime '{}'",
|
||||
versions.len(),
|
||||
runtime_ref
|
||||
);
|
||||
|
||||
for entry in versions {
|
||||
let version_str = match entry.get("version").and_then(|v| v.as_str()) {
|
||||
Some(v) => v.to_string(),
|
||||
None => {
|
||||
let msg = format!(
|
||||
"Runtime '{}' has a version entry without a 'version' field, skipping",
|
||||
runtime_ref
|
||||
);
|
||||
warn!("{}", msg);
|
||||
result.warnings.push(msg);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
// Check if this version already exists
|
||||
if let Ok(Some(_existing)) = RuntimeVersionRepository::find_by_runtime_and_version(
|
||||
self.pool,
|
||||
runtime_id,
|
||||
&version_str,
|
||||
)
|
||||
.await
|
||||
{
|
||||
info!(
|
||||
"Version '{}' for runtime '{}' already exists, skipping",
|
||||
version_str, runtime_ref
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
let (version_major, version_minor, version_patch) =
|
||||
extract_version_components(&version_str);
|
||||
|
||||
let execution_config = entry
|
||||
.get("execution_config")
|
||||
.and_then(|v| serde_json::to_value(v).ok())
|
||||
.unwrap_or_else(|| serde_json::json!({}));
|
||||
|
||||
let distributions = entry
|
||||
.get("distributions")
|
||||
.and_then(|v| serde_json::to_value(v).ok())
|
||||
.unwrap_or_else(|| serde_json::json!({}));
|
||||
|
||||
let is_default = entry
|
||||
.get("is_default")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(false);
|
||||
|
||||
let meta = entry
|
||||
.get("meta")
|
||||
.and_then(|v| serde_json::to_value(v).ok())
|
||||
.unwrap_or_else(|| serde_json::json!({}));
|
||||
|
||||
let input = CreateRuntimeVersionInput {
|
||||
runtime: runtime_id,
|
||||
runtime_ref: runtime_ref.to_string(),
|
||||
version: version_str.clone(),
|
||||
version_major,
|
||||
version_minor,
|
||||
version_patch,
|
||||
execution_config,
|
||||
distributions,
|
||||
is_default,
|
||||
available: true, // Assume available until verification runs
|
||||
meta,
|
||||
};
|
||||
|
||||
match RuntimeVersionRepository::create(self.pool, input).await {
|
||||
Ok(rv) => {
|
||||
info!(
|
||||
"Created version '{}' for runtime '{}' (ID: {})",
|
||||
version_str, runtime_ref, rv.id
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
// Check for unique constraint violation (race condition)
|
||||
if let Error::Database(ref db_err) = e {
|
||||
if let sqlx::Error::Database(ref inner) = db_err {
|
||||
if inner.is_unique_violation() {
|
||||
info!(
|
||||
"Version '{}' for runtime '{}' already exists (concurrent), skipping",
|
||||
version_str, runtime_ref
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
let msg = format!(
|
||||
"Failed to create version '{}' for runtime '{}': {}",
|
||||
version_str, runtime_ref, e
|
||||
);
|
||||
warn!("{}", msg);
|
||||
result.warnings.push(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn load_triggers(
|
||||
&self,
|
||||
pack_dir: &Path,
|
||||
@@ -424,16 +565,22 @@ impl<'a> PackComponentLoader<'a> {
|
||||
.unwrap_or("text")
|
||||
.to_lowercase();
|
||||
|
||||
// Optional runtime version constraint (e.g., ">=3.12", "~18.0")
|
||||
let runtime_version_constraint = data
|
||||
.get("runtime_version")
|
||||
.and_then(|v| v.as_str())
|
||||
.map(|s| s.to_string());
|
||||
|
||||
// Use raw SQL to include parameter_delivery, parameter_format,
|
||||
// output_format which are not in CreateActionInput
|
||||
let create_result = sqlx::query_scalar::<_, i64>(
|
||||
r#"
|
||||
INSERT INTO action (
|
||||
ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, param_schema, out_schema, is_adhoc,
|
||||
runtime, runtime_version_constraint, param_schema, out_schema, is_adhoc,
|
||||
parameter_delivery, parameter_format, output_format
|
||||
)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)
|
||||
RETURNING id
|
||||
"#,
|
||||
)
|
||||
@@ -444,6 +591,7 @@ impl<'a> PackComponentLoader<'a> {
|
||||
.bind(&description)
|
||||
.bind(&entrypoint)
|
||||
.bind(runtime_id)
|
||||
.bind(&runtime_version_constraint)
|
||||
.bind(¶m_schema)
|
||||
.bind(&out_schema)
|
||||
.bind(false) // is_adhoc
|
||||
@@ -601,6 +749,12 @@ impl<'a> PackComponentLoader<'a> {
|
||||
.and_then(|v| serde_json::to_value(v).ok())
|
||||
.unwrap_or_else(|| serde_json::json!({}));
|
||||
|
||||
// Optional runtime version constraint (e.g., ">=3.12", "~18.0")
|
||||
let runtime_version_constraint = data
|
||||
.get("runtime_version")
|
||||
.and_then(|v| v.as_str())
|
||||
.map(|s| s.to_string());
|
||||
|
||||
// Upsert: update existing sensors so re-registration corrects
|
||||
// stale metadata (especially runtime assignments).
|
||||
if let Some(existing) = SensorRepository::find_by_ref(self.pool, &sensor_ref).await? {
|
||||
@@ -612,6 +766,7 @@ impl<'a> PackComponentLoader<'a> {
|
||||
entrypoint: Some(entrypoint),
|
||||
runtime: Some(sensor_runtime_id),
|
||||
runtime_ref: Some(sensor_runtime_ref.clone()),
|
||||
runtime_version_constraint: Some(runtime_version_constraint.clone()),
|
||||
trigger: Some(trigger_id.unwrap_or(existing.trigger)),
|
||||
trigger_ref: Some(trigger_ref.unwrap_or(existing.trigger_ref.clone())),
|
||||
enabled: Some(enabled),
|
||||
@@ -645,6 +800,7 @@ impl<'a> PackComponentLoader<'a> {
|
||||
entrypoint,
|
||||
runtime: sensor_runtime_id,
|
||||
runtime_ref: sensor_runtime_ref.clone(),
|
||||
runtime_version_constraint,
|
||||
trigger: trigger_id.unwrap_or(0),
|
||||
trigger_ref: trigger_ref.unwrap_or_default(),
|
||||
enabled,
|
||||
|
||||
@@ -29,6 +29,7 @@ pub struct CreateActionInput {
|
||||
pub description: String,
|
||||
pub entrypoint: String,
|
||||
pub runtime: Option<Id>,
|
||||
pub runtime_version_constraint: Option<String>,
|
||||
pub param_schema: Option<JsonSchema>,
|
||||
pub out_schema: Option<JsonSchema>,
|
||||
pub is_adhoc: bool,
|
||||
@@ -41,6 +42,7 @@ pub struct UpdateActionInput {
|
||||
pub description: Option<String>,
|
||||
pub entrypoint: Option<String>,
|
||||
pub runtime: Option<Id>,
|
||||
pub runtime_version_constraint: Option<Option<String>>,
|
||||
pub param_schema: Option<JsonSchema>,
|
||||
pub out_schema: Option<JsonSchema>,
|
||||
}
|
||||
@@ -54,7 +56,8 @@ impl FindById for ActionRepository {
|
||||
let action = sqlx::query_as::<_, Action>(
|
||||
r#"
|
||||
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
runtime, runtime_version_constraint,
|
||||
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
FROM action
|
||||
WHERE id = $1
|
||||
"#,
|
||||
@@ -76,7 +79,8 @@ impl FindByRef for ActionRepository {
|
||||
let action = sqlx::query_as::<_, Action>(
|
||||
r#"
|
||||
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
runtime, runtime_version_constraint,
|
||||
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
FROM action
|
||||
WHERE ref = $1
|
||||
"#,
|
||||
@@ -98,7 +102,8 @@ impl List for ActionRepository {
|
||||
let actions = sqlx::query_as::<_, Action>(
|
||||
r#"
|
||||
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
runtime, runtime_version_constraint,
|
||||
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
FROM action
|
||||
ORDER BY ref ASC
|
||||
"#,
|
||||
@@ -133,10 +138,11 @@ impl Create for ActionRepository {
|
||||
let action = sqlx::query_as::<_, Action>(
|
||||
r#"
|
||||
INSERT INTO action (ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, param_schema, out_schema, is_adhoc)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
|
||||
runtime, runtime_version_constraint, param_schema, out_schema, is_adhoc)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
|
||||
RETURNING id, ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
runtime, runtime_version_constraint,
|
||||
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
"#,
|
||||
)
|
||||
.bind(&input.r#ref)
|
||||
@@ -146,6 +152,7 @@ impl Create for ActionRepository {
|
||||
.bind(&input.description)
|
||||
.bind(&input.entrypoint)
|
||||
.bind(input.runtime)
|
||||
.bind(&input.runtime_version_constraint)
|
||||
.bind(&input.param_schema)
|
||||
.bind(&input.out_schema)
|
||||
.bind(input.is_adhoc)
|
||||
@@ -213,6 +220,15 @@ impl Update for ActionRepository {
|
||||
has_updates = true;
|
||||
}
|
||||
|
||||
if let Some(runtime_version_constraint) = &input.runtime_version_constraint {
|
||||
if has_updates {
|
||||
query.push(", ");
|
||||
}
|
||||
query.push("runtime_version_constraint = ");
|
||||
query.push_bind(runtime_version_constraint);
|
||||
has_updates = true;
|
||||
}
|
||||
|
||||
if let Some(param_schema) = &input.param_schema {
|
||||
if has_updates {
|
||||
query.push(", ");
|
||||
@@ -240,7 +256,7 @@ impl Update for ActionRepository {
|
||||
|
||||
query.push(", updated = NOW() WHERE id = ");
|
||||
query.push_bind(id);
|
||||
query.push(" RETURNING id, ref, pack, pack_ref, label, description, entrypoint, runtime, param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated");
|
||||
query.push(" RETURNING id, ref, pack, pack_ref, label, description, entrypoint, runtime, runtime_version_constraint, param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated");
|
||||
|
||||
let action = query
|
||||
.build_query_as::<Action>()
|
||||
@@ -279,7 +295,8 @@ impl ActionRepository {
|
||||
let actions = sqlx::query_as::<_, Action>(
|
||||
r#"
|
||||
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
runtime, runtime_version_constraint,
|
||||
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
FROM action
|
||||
WHERE pack = $1
|
||||
ORDER BY ref ASC
|
||||
@@ -300,7 +317,8 @@ impl ActionRepository {
|
||||
let actions = sqlx::query_as::<_, Action>(
|
||||
r#"
|
||||
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
runtime, runtime_version_constraint,
|
||||
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
FROM action
|
||||
WHERE runtime = $1
|
||||
ORDER BY ref ASC
|
||||
@@ -322,7 +340,8 @@ impl ActionRepository {
|
||||
let actions = sqlx::query_as::<_, Action>(
|
||||
r#"
|
||||
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
runtime, runtime_version_constraint,
|
||||
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
FROM action
|
||||
WHERE LOWER(ref) LIKE $1 OR LOWER(label) LIKE $1 OR LOWER(description) LIKE $1
|
||||
ORDER BY ref ASC
|
||||
@@ -343,7 +362,8 @@ impl ActionRepository {
|
||||
let actions = sqlx::query_as::<_, Action>(
|
||||
r#"
|
||||
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
runtime, runtime_version_constraint,
|
||||
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
FROM action
|
||||
WHERE is_workflow = true
|
||||
ORDER BY ref ASC
|
||||
@@ -366,7 +386,8 @@ impl ActionRepository {
|
||||
let action = sqlx::query_as::<_, Action>(
|
||||
r#"
|
||||
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
runtime, runtime_version_constraint,
|
||||
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
FROM action
|
||||
WHERE workflow_def = $1
|
||||
"#,
|
||||
@@ -393,7 +414,8 @@ impl ActionRepository {
|
||||
SET is_workflow = true, workflow_def = $2, updated = NOW()
|
||||
WHERE id = $1
|
||||
RETURNING id, ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
runtime, runtime_version_constraint,
|
||||
param_schema, out_schema, is_workflow, workflow_def, is_adhoc, created, updated
|
||||
"#,
|
||||
)
|
||||
.bind(action_id)
|
||||
|
||||
@@ -40,6 +40,7 @@ pub mod pack_test;
|
||||
pub mod queue_stats;
|
||||
pub mod rule;
|
||||
pub mod runtime;
|
||||
pub mod runtime_version;
|
||||
pub mod trigger;
|
||||
pub mod workflow;
|
||||
|
||||
@@ -57,6 +58,7 @@ pub use pack_test::PackTestRepository;
|
||||
pub use queue_stats::QueueStatsRepository;
|
||||
pub use rule::RuleRepository;
|
||||
pub use runtime::{RuntimeRepository, WorkerRepository};
|
||||
pub use runtime_version::RuntimeVersionRepository;
|
||||
pub use trigger::{SensorRepository, TriggerRepository};
|
||||
pub use workflow::{WorkflowDefinitionRepository, WorkflowExecutionRepository};
|
||||
|
||||
|
||||
447
crates/common/src/repositories/runtime_version.rs
Normal file
447
crates/common/src/repositories/runtime_version.rs
Normal file
@@ -0,0 +1,447 @@
|
||||
//! Repository for runtime version operations
|
||||
//!
|
||||
//! Provides CRUD operations and specialized queries for the `runtime_version`
|
||||
//! table, which stores version-specific execution configurations for runtimes.
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::models::{Id, RuntimeVersion};
|
||||
use crate::repositories::{Create, Delete, FindById, List, Repository, Update};
|
||||
use sqlx::{Executor, Postgres, QueryBuilder};
|
||||
|
||||
/// Repository for runtime version database operations
|
||||
pub struct RuntimeVersionRepository;
|
||||
|
||||
impl Repository for RuntimeVersionRepository {
|
||||
type Entity = RuntimeVersion;
|
||||
|
||||
fn table_name() -> &'static str {
|
||||
"runtime_version"
|
||||
}
|
||||
}
|
||||
|
||||
/// Input for creating a new runtime version
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CreateRuntimeVersionInput {
|
||||
pub runtime: Id,
|
||||
pub runtime_ref: String,
|
||||
pub version: String,
|
||||
pub version_major: Option<i32>,
|
||||
pub version_minor: Option<i32>,
|
||||
pub version_patch: Option<i32>,
|
||||
pub execution_config: serde_json::Value,
|
||||
pub distributions: serde_json::Value,
|
||||
pub is_default: bool,
|
||||
pub available: bool,
|
||||
pub meta: serde_json::Value,
|
||||
}
|
||||
|
||||
/// Input for updating an existing runtime version
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct UpdateRuntimeVersionInput {
|
||||
pub version: Option<String>,
|
||||
pub version_major: Option<Option<i32>>,
|
||||
pub version_minor: Option<Option<i32>>,
|
||||
pub version_patch: Option<Option<i32>>,
|
||||
pub execution_config: Option<serde_json::Value>,
|
||||
pub distributions: Option<serde_json::Value>,
|
||||
pub is_default: Option<bool>,
|
||||
pub available: Option<bool>,
|
||||
pub verified_at: Option<Option<chrono::DateTime<chrono::Utc>>>,
|
||||
pub meta: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
const SELECT_COLUMNS: &str = r#"
|
||||
id, runtime, runtime_ref, version,
|
||||
version_major, version_minor, version_patch,
|
||||
execution_config, distributions,
|
||||
is_default, available, verified_at, meta,
|
||||
created, updated
|
||||
"#;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl FindById for RuntimeVersionRepository {
|
||||
async fn find_by_id<'e, E>(executor: E, id: i64) -> Result<Option<RuntimeVersion>>
|
||||
where
|
||||
E: Executor<'e, Database = Postgres> + 'e,
|
||||
{
|
||||
let row = sqlx::query_as::<_, RuntimeVersion>(&format!(
|
||||
"SELECT {} FROM runtime_version WHERE id = $1",
|
||||
SELECT_COLUMNS
|
||||
))
|
||||
.bind(id)
|
||||
.fetch_optional(executor)
|
||||
.await?;
|
||||
|
||||
Ok(row)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl List for RuntimeVersionRepository {
|
||||
async fn list<'e, E>(executor: E) -> Result<Vec<RuntimeVersion>>
|
||||
where
|
||||
E: Executor<'e, Database = Postgres> + 'e,
|
||||
{
|
||||
let rows = sqlx::query_as::<_, RuntimeVersion>(&format!(
|
||||
"SELECT {} FROM runtime_version ORDER BY runtime_ref ASC, version ASC",
|
||||
SELECT_COLUMNS
|
||||
))
|
||||
.fetch_all(executor)
|
||||
.await?;
|
||||
|
||||
Ok(rows)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl Create for RuntimeVersionRepository {
|
||||
type CreateInput = CreateRuntimeVersionInput;
|
||||
|
||||
async fn create<'e, E>(executor: E, input: Self::CreateInput) -> Result<RuntimeVersion>
|
||||
where
|
||||
E: Executor<'e, Database = Postgres> + 'e,
|
||||
{
|
||||
let row = sqlx::query_as::<_, RuntimeVersion>(&format!(
|
||||
r#"
|
||||
INSERT INTO runtime_version (
|
||||
runtime, runtime_ref, version,
|
||||
version_major, version_minor, version_patch,
|
||||
execution_config, distributions,
|
||||
is_default, available, meta
|
||||
)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
|
||||
RETURNING {}
|
||||
"#,
|
||||
SELECT_COLUMNS
|
||||
))
|
||||
.bind(input.runtime)
|
||||
.bind(&input.runtime_ref)
|
||||
.bind(&input.version)
|
||||
.bind(input.version_major)
|
||||
.bind(input.version_minor)
|
||||
.bind(input.version_patch)
|
||||
.bind(&input.execution_config)
|
||||
.bind(&input.distributions)
|
||||
.bind(input.is_default)
|
||||
.bind(input.available)
|
||||
.bind(&input.meta)
|
||||
.fetch_one(executor)
|
||||
.await?;
|
||||
|
||||
Ok(row)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl Update for RuntimeVersionRepository {
|
||||
type UpdateInput = UpdateRuntimeVersionInput;
|
||||
|
||||
async fn update<'e, E>(executor: E, id: i64, input: Self::UpdateInput) -> Result<RuntimeVersion>
|
||||
where
|
||||
E: Executor<'e, Database = Postgres> + 'e,
|
||||
{
|
||||
let mut query: QueryBuilder<Postgres> = QueryBuilder::new("UPDATE runtime_version SET ");
|
||||
let mut has_updates = false;
|
||||
|
||||
if let Some(version) = &input.version {
|
||||
query.push("version = ");
|
||||
query.push_bind(version);
|
||||
has_updates = true;
|
||||
}
|
||||
|
||||
if let Some(version_major) = &input.version_major {
|
||||
if has_updates {
|
||||
query.push(", ");
|
||||
}
|
||||
query.push("version_major = ");
|
||||
query.push_bind(*version_major);
|
||||
has_updates = true;
|
||||
}
|
||||
|
||||
if let Some(version_minor) = &input.version_minor {
|
||||
if has_updates {
|
||||
query.push(", ");
|
||||
}
|
||||
query.push("version_minor = ");
|
||||
query.push_bind(*version_minor);
|
||||
has_updates = true;
|
||||
}
|
||||
|
||||
if let Some(version_patch) = &input.version_patch {
|
||||
if has_updates {
|
||||
query.push(", ");
|
||||
}
|
||||
query.push("version_patch = ");
|
||||
query.push_bind(*version_patch);
|
||||
has_updates = true;
|
||||
}
|
||||
|
||||
if let Some(execution_config) = &input.execution_config {
|
||||
if has_updates {
|
||||
query.push(", ");
|
||||
}
|
||||
query.push("execution_config = ");
|
||||
query.push_bind(execution_config);
|
||||
has_updates = true;
|
||||
}
|
||||
|
||||
if let Some(distributions) = &input.distributions {
|
||||
if has_updates {
|
||||
query.push(", ");
|
||||
}
|
||||
query.push("distributions = ");
|
||||
query.push_bind(distributions);
|
||||
has_updates = true;
|
||||
}
|
||||
|
||||
if let Some(is_default) = input.is_default {
|
||||
if has_updates {
|
||||
query.push(", ");
|
||||
}
|
||||
query.push("is_default = ");
|
||||
query.push_bind(is_default);
|
||||
has_updates = true;
|
||||
}
|
||||
|
||||
if let Some(available) = input.available {
|
||||
if has_updates {
|
||||
query.push(", ");
|
||||
}
|
||||
query.push("available = ");
|
||||
query.push_bind(available);
|
||||
has_updates = true;
|
||||
}
|
||||
|
||||
if let Some(verified_at) = &input.verified_at {
|
||||
if has_updates {
|
||||
query.push(", ");
|
||||
}
|
||||
query.push("verified_at = ");
|
||||
query.push_bind(*verified_at);
|
||||
has_updates = true;
|
||||
}
|
||||
|
||||
if let Some(meta) = &input.meta {
|
||||
if has_updates {
|
||||
query.push(", ");
|
||||
}
|
||||
query.push("meta = ");
|
||||
query.push_bind(meta);
|
||||
has_updates = true;
|
||||
}
|
||||
|
||||
if !has_updates {
|
||||
// Nothing to update — just fetch the current row
|
||||
return Self::find_by_id(executor, id)
|
||||
.await?
|
||||
.ok_or_else(|| crate::Error::not_found("runtime_version", "id", id.to_string()));
|
||||
}
|
||||
|
||||
query.push(" WHERE id = ");
|
||||
query.push_bind(id);
|
||||
query.push(&format!(" RETURNING {}", SELECT_COLUMNS));
|
||||
|
||||
let row = query
|
||||
.build_query_as::<RuntimeVersion>()
|
||||
.fetch_optional(executor)
|
||||
.await?
|
||||
.ok_or_else(|| crate::Error::not_found("runtime_version", "id", id.to_string()))?;
|
||||
|
||||
Ok(row)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl Delete for RuntimeVersionRepository {
|
||||
async fn delete<'e, E>(executor: E, id: i64) -> Result<bool>
|
||||
where
|
||||
E: Executor<'e, Database = Postgres> + 'e,
|
||||
{
|
||||
let result = sqlx::query("DELETE FROM runtime_version WHERE id = $1")
|
||||
.bind(id)
|
||||
.execute(executor)
|
||||
.await?;
|
||||
|
||||
Ok(result.rows_affected() > 0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Specialized queries
|
||||
impl RuntimeVersionRepository {
|
||||
/// Find all versions for a given runtime ID.
|
||||
///
|
||||
/// Returns versions ordered by major, minor, patch descending
|
||||
/// (newest version first).
|
||||
pub async fn find_by_runtime<'e, E>(executor: E, runtime_id: Id) -> Result<Vec<RuntimeVersion>>
|
||||
where
|
||||
E: Executor<'e, Database = Postgres> + 'e,
|
||||
{
|
||||
let rows = sqlx::query_as::<_, RuntimeVersion>(&format!(
|
||||
r#"
|
||||
SELECT {}
|
||||
FROM runtime_version
|
||||
WHERE runtime = $1
|
||||
ORDER BY version_major DESC NULLS LAST,
|
||||
version_minor DESC NULLS LAST,
|
||||
version_patch DESC NULLS LAST
|
||||
"#,
|
||||
SELECT_COLUMNS
|
||||
))
|
||||
.bind(runtime_id)
|
||||
.fetch_all(executor)
|
||||
.await?;
|
||||
|
||||
Ok(rows)
|
||||
}
|
||||
|
||||
/// Find all versions for a given runtime ref (e.g., "core.python").
|
||||
pub async fn find_by_runtime_ref<'e, E>(
|
||||
executor: E,
|
||||
runtime_ref: &str,
|
||||
) -> Result<Vec<RuntimeVersion>>
|
||||
where
|
||||
E: Executor<'e, Database = Postgres> + 'e,
|
||||
{
|
||||
let rows = sqlx::query_as::<_, RuntimeVersion>(&format!(
|
||||
r#"
|
||||
SELECT {}
|
||||
FROM runtime_version
|
||||
WHERE runtime_ref = $1
|
||||
ORDER BY version_major DESC NULLS LAST,
|
||||
version_minor DESC NULLS LAST,
|
||||
version_patch DESC NULLS LAST
|
||||
"#,
|
||||
SELECT_COLUMNS
|
||||
))
|
||||
.bind(runtime_ref)
|
||||
.fetch_all(executor)
|
||||
.await?;
|
||||
|
||||
Ok(rows)
|
||||
}
|
||||
|
||||
/// Find all available versions for a given runtime ID.
|
||||
///
|
||||
/// Only returns versions where `available = true`.
|
||||
pub async fn find_available_by_runtime<'e, E>(
|
||||
executor: E,
|
||||
runtime_id: Id,
|
||||
) -> Result<Vec<RuntimeVersion>>
|
||||
where
|
||||
E: Executor<'e, Database = Postgres> + 'e,
|
||||
{
|
||||
let rows = sqlx::query_as::<_, RuntimeVersion>(&format!(
|
||||
r#"
|
||||
SELECT {}
|
||||
FROM runtime_version
|
||||
WHERE runtime = $1 AND available = TRUE
|
||||
ORDER BY version_major DESC NULLS LAST,
|
||||
version_minor DESC NULLS LAST,
|
||||
version_patch DESC NULLS LAST
|
||||
"#,
|
||||
SELECT_COLUMNS
|
||||
))
|
||||
.bind(runtime_id)
|
||||
.fetch_all(executor)
|
||||
.await?;
|
||||
|
||||
Ok(rows)
|
||||
}
|
||||
|
||||
/// Find the default version for a given runtime ID.
|
||||
///
|
||||
/// Returns `None` if no version is marked as default.
|
||||
pub async fn find_default_by_runtime<'e, E>(
|
||||
executor: E,
|
||||
runtime_id: Id,
|
||||
) -> Result<Option<RuntimeVersion>>
|
||||
where
|
||||
E: Executor<'e, Database = Postgres> + 'e,
|
||||
{
|
||||
let row = sqlx::query_as::<_, RuntimeVersion>(&format!(
|
||||
r#"
|
||||
SELECT {}
|
||||
FROM runtime_version
|
||||
WHERE runtime = $1 AND is_default = TRUE
|
||||
LIMIT 1
|
||||
"#,
|
||||
SELECT_COLUMNS
|
||||
))
|
||||
.bind(runtime_id)
|
||||
.fetch_optional(executor)
|
||||
.await?;
|
||||
|
||||
Ok(row)
|
||||
}
|
||||
|
||||
/// Find a specific version by runtime ID and version string.
|
||||
pub async fn find_by_runtime_and_version<'e, E>(
|
||||
executor: E,
|
||||
runtime_id: Id,
|
||||
version: &str,
|
||||
) -> Result<Option<RuntimeVersion>>
|
||||
where
|
||||
E: Executor<'e, Database = Postgres> + 'e,
|
||||
{
|
||||
let row = sqlx::query_as::<_, RuntimeVersion>(&format!(
|
||||
r#"
|
||||
SELECT {}
|
||||
FROM runtime_version
|
||||
WHERE runtime = $1 AND version = $2
|
||||
"#,
|
||||
SELECT_COLUMNS
|
||||
))
|
||||
.bind(runtime_id)
|
||||
.bind(version)
|
||||
.fetch_optional(executor)
|
||||
.await?;
|
||||
|
||||
Ok(row)
|
||||
}
|
||||
|
||||
/// Clear the `is_default` flag on all versions for a runtime.
|
||||
///
|
||||
/// Useful before setting a new default version.
|
||||
pub async fn clear_default_for_runtime<'e, E>(executor: E, runtime_id: Id) -> Result<u64>
|
||||
where
|
||||
E: Executor<'e, Database = Postgres> + 'e,
|
||||
{
|
||||
let result = sqlx::query(
|
||||
"UPDATE runtime_version SET is_default = FALSE WHERE runtime = $1 AND is_default = TRUE",
|
||||
)
|
||||
.bind(runtime_id)
|
||||
.execute(executor)
|
||||
.await?;
|
||||
|
||||
Ok(result.rows_affected())
|
||||
}
|
||||
|
||||
/// Mark a version's availability and update the verification timestamp.
|
||||
pub async fn set_availability<'e, E>(executor: E, id: Id, available: bool) -> Result<bool>
|
||||
where
|
||||
E: Executor<'e, Database = Postgres> + 'e,
|
||||
{
|
||||
let result = sqlx::query(
|
||||
"UPDATE runtime_version SET available = $1, verified_at = NOW() WHERE id = $2",
|
||||
)
|
||||
.bind(available)
|
||||
.bind(id)
|
||||
.execute(executor)
|
||||
.await?;
|
||||
|
||||
Ok(result.rows_affected() > 0)
|
||||
}
|
||||
|
||||
/// Delete all versions for a given runtime ID.
|
||||
pub async fn delete_by_runtime<'e, E>(executor: E, runtime_id: Id) -> Result<u64>
|
||||
where
|
||||
E: Executor<'e, Database = Postgres> + 'e,
|
||||
{
|
||||
let result = sqlx::query("DELETE FROM runtime_version WHERE runtime = $1")
|
||||
.bind(runtime_id)
|
||||
.execute(executor)
|
||||
.await?;
|
||||
|
||||
Ok(result.rows_affected())
|
||||
}
|
||||
}
|
||||
@@ -518,6 +518,7 @@ pub struct CreateSensorInput {
|
||||
pub entrypoint: String,
|
||||
pub runtime: Id,
|
||||
pub runtime_ref: String,
|
||||
pub runtime_version_constraint: Option<String>,
|
||||
pub trigger: Id,
|
||||
pub trigger_ref: String,
|
||||
pub enabled: bool,
|
||||
@@ -533,6 +534,7 @@ pub struct UpdateSensorInput {
|
||||
pub entrypoint: Option<String>,
|
||||
pub runtime: Option<Id>,
|
||||
pub runtime_ref: Option<String>,
|
||||
pub runtime_version_constraint: Option<Option<String>>,
|
||||
pub trigger: Option<Id>,
|
||||
pub trigger_ref: Option<String>,
|
||||
pub enabled: Option<bool>,
|
||||
@@ -549,7 +551,8 @@ impl FindById for SensorRepository {
|
||||
let sensor = sqlx::query_as::<_, Sensor>(
|
||||
r#"
|
||||
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, runtime_ref, trigger, trigger_ref, enabled,
|
||||
runtime, runtime_ref, runtime_version_constraint,
|
||||
trigger, trigger_ref, enabled,
|
||||
param_schema, config, created, updated
|
||||
FROM sensor
|
||||
WHERE id = $1
|
||||
@@ -572,7 +575,8 @@ impl FindByRef for SensorRepository {
|
||||
let sensor = sqlx::query_as::<_, Sensor>(
|
||||
r#"
|
||||
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, runtime_ref, trigger, trigger_ref, enabled,
|
||||
runtime, runtime_ref, runtime_version_constraint,
|
||||
trigger, trigger_ref, enabled,
|
||||
param_schema, config, created, updated
|
||||
FROM sensor
|
||||
WHERE ref = $1
|
||||
@@ -595,7 +599,8 @@ impl List for SensorRepository {
|
||||
let sensors = sqlx::query_as::<_, Sensor>(
|
||||
r#"
|
||||
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, runtime_ref, trigger, trigger_ref, enabled,
|
||||
runtime, runtime_ref, runtime_version_constraint,
|
||||
trigger, trigger_ref, enabled,
|
||||
param_schema, config, created, updated
|
||||
FROM sensor
|
||||
ORDER BY ref ASC
|
||||
@@ -619,11 +624,13 @@ impl Create for SensorRepository {
|
||||
let sensor = sqlx::query_as::<_, Sensor>(
|
||||
r#"
|
||||
INSERT INTO sensor (ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, runtime_ref, trigger, trigger_ref, enabled,
|
||||
runtime, runtime_ref, runtime_version_constraint,
|
||||
trigger, trigger_ref, enabled,
|
||||
param_schema, config)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)
|
||||
RETURNING id, ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, runtime_ref, trigger, trigger_ref, enabled,
|
||||
runtime, runtime_ref, runtime_version_constraint,
|
||||
trigger, trigger_ref, enabled,
|
||||
param_schema, config, created, updated
|
||||
"#,
|
||||
)
|
||||
@@ -635,6 +642,7 @@ impl Create for SensorRepository {
|
||||
.bind(&input.entrypoint)
|
||||
.bind(input.runtime)
|
||||
.bind(&input.runtime_ref)
|
||||
.bind(&input.runtime_version_constraint)
|
||||
.bind(input.trigger)
|
||||
.bind(&input.trigger_ref)
|
||||
.bind(input.enabled)
|
||||
@@ -711,6 +719,15 @@ impl Update for SensorRepository {
|
||||
has_updates = true;
|
||||
}
|
||||
|
||||
if let Some(runtime_version_constraint) = &input.runtime_version_constraint {
|
||||
if has_updates {
|
||||
query.push(", ");
|
||||
}
|
||||
query.push("runtime_version_constraint = ");
|
||||
query.push_bind(runtime_version_constraint);
|
||||
has_updates = true;
|
||||
}
|
||||
|
||||
if let Some(trigger) = input.trigger {
|
||||
if has_updates {
|
||||
query.push(", ");
|
||||
@@ -754,7 +771,7 @@ impl Update for SensorRepository {
|
||||
|
||||
query.push(", updated = NOW() WHERE id = ");
|
||||
query.push_bind(id);
|
||||
query.push(" RETURNING id, ref, pack, pack_ref, label, description, entrypoint, runtime, runtime_ref, trigger, trigger_ref, enabled, param_schema, config, created, updated");
|
||||
query.push(" RETURNING id, ref, pack, pack_ref, label, description, entrypoint, runtime, runtime_ref, runtime_version_constraint, trigger, trigger_ref, enabled, param_schema, config, created, updated");
|
||||
|
||||
let sensor = query.build_query_as::<Sensor>().fetch_one(executor).await?;
|
||||
|
||||
@@ -786,7 +803,8 @@ impl SensorRepository {
|
||||
let sensors = sqlx::query_as::<_, Sensor>(
|
||||
r#"
|
||||
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, runtime_ref, trigger, trigger_ref, enabled,
|
||||
runtime, runtime_ref, runtime_version_constraint,
|
||||
trigger, trigger_ref, enabled,
|
||||
param_schema, config, created, updated
|
||||
FROM sensor
|
||||
WHERE trigger = $1
|
||||
@@ -808,7 +826,8 @@ impl SensorRepository {
|
||||
let sensors = sqlx::query_as::<_, Sensor>(
|
||||
r#"
|
||||
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, runtime_ref, trigger, trigger_ref, enabled,
|
||||
runtime, runtime_ref, runtime_version_constraint,
|
||||
trigger, trigger_ref, enabled,
|
||||
param_schema, config, created, updated
|
||||
FROM sensor
|
||||
WHERE enabled = true
|
||||
@@ -829,7 +848,8 @@ impl SensorRepository {
|
||||
let sensors = sqlx::query_as::<_, Sensor>(
|
||||
r#"
|
||||
SELECT id, ref, pack, pack_ref, label, description, entrypoint,
|
||||
runtime, runtime_ref, trigger, trigger_ref, enabled,
|
||||
runtime, runtime_ref, runtime_version_constraint,
|
||||
trigger, trigger_ref, enabled,
|
||||
param_schema, config, created, updated
|
||||
FROM sensor
|
||||
WHERE pack = $1
|
||||
|
||||
@@ -5,6 +5,9 @@
|
||||
//! 1. Environment variable override (highest priority)
|
||||
//! 2. Config file specification (medium priority)
|
||||
//! 3. Database-driven detection with verification (lowest priority)
|
||||
//!
|
||||
//! Also provides [`normalize_runtime_name`] for alias-aware runtime name
|
||||
//! comparison across the codebase (worker filters, env setup, etc.).
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::error::Result;
|
||||
@@ -15,6 +18,49 @@ use std::collections::HashMap;
|
||||
use std::process::Command;
|
||||
use tracing::{debug, info, warn};
|
||||
|
||||
/// Normalize a runtime name to its canonical short form.
|
||||
///
|
||||
/// This ensures that different ways of referring to the same runtime
|
||||
/// (e.g., "node", "nodejs", "node.js") all resolve to a single canonical
|
||||
/// name. Used by worker runtime filters and environment setup to match
|
||||
/// database runtime names against short filter values.
|
||||
///
|
||||
/// The canonical names mirror the alias groups in
|
||||
/// `PackComponentLoader::resolve_runtime`.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// use attune_common::runtime_detection::normalize_runtime_name;
|
||||
/// assert_eq!(normalize_runtime_name("node.js"), "node");
|
||||
/// assert_eq!(normalize_runtime_name("nodejs"), "node");
|
||||
/// assert_eq!(normalize_runtime_name("python3"), "python");
|
||||
/// assert_eq!(normalize_runtime_name("shell"), "shell");
|
||||
/// ```
|
||||
pub fn normalize_runtime_name(name: &str) -> &str {
|
||||
match name {
|
||||
"node" | "nodejs" | "node.js" => "node",
|
||||
"python" | "python3" => "python",
|
||||
"bash" | "sh" | "shell" => "shell",
|
||||
"native" | "builtin" | "standalone" => "native",
|
||||
other => other,
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a runtime name matches a filter entry, supporting common aliases.
|
||||
///
|
||||
/// Both sides are lowercased and then normalized before comparison so that,
|
||||
/// e.g., a filter value of `"node"` matches a database runtime name `"Node.js"`.
|
||||
pub fn runtime_matches_filter(rt_name: &str, filter_entry: &str) -> bool {
|
||||
let rt_lower = rt_name.to_ascii_lowercase();
|
||||
let filter_lower = filter_entry.to_ascii_lowercase();
|
||||
normalize_runtime_name(&rt_lower) == normalize_runtime_name(&filter_lower)
|
||||
}
|
||||
|
||||
/// Check if a runtime name matches any entry in a filter list.
|
||||
pub fn runtime_in_filter(rt_name: &str, filter: &[String]) -> bool {
|
||||
filter.iter().any(|f| runtime_matches_filter(rt_name, f))
|
||||
}
|
||||
|
||||
/// Runtime detection service
|
||||
pub struct RuntimeDetector {
|
||||
pool: PgPool,
|
||||
@@ -290,6 +336,72 @@ mod tests {
|
||||
use super::*;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn test_normalize_runtime_name_node_variants() {
|
||||
assert_eq!(normalize_runtime_name("node"), "node");
|
||||
assert_eq!(normalize_runtime_name("nodejs"), "node");
|
||||
assert_eq!(normalize_runtime_name("node.js"), "node");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_runtime_name_python_variants() {
|
||||
assert_eq!(normalize_runtime_name("python"), "python");
|
||||
assert_eq!(normalize_runtime_name("python3"), "python");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_runtime_name_shell_variants() {
|
||||
assert_eq!(normalize_runtime_name("shell"), "shell");
|
||||
assert_eq!(normalize_runtime_name("bash"), "shell");
|
||||
assert_eq!(normalize_runtime_name("sh"), "shell");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_runtime_name_native_variants() {
|
||||
assert_eq!(normalize_runtime_name("native"), "native");
|
||||
assert_eq!(normalize_runtime_name("builtin"), "native");
|
||||
assert_eq!(normalize_runtime_name("standalone"), "native");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_runtime_name_passthrough() {
|
||||
assert_eq!(normalize_runtime_name("custom_runtime"), "custom_runtime");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_runtime_matches_filter() {
|
||||
// Node.js DB name lowercased vs worker filter "node"
|
||||
assert!(runtime_matches_filter("node.js", "node"));
|
||||
assert!(runtime_matches_filter("node", "nodejs"));
|
||||
assert!(runtime_matches_filter("nodejs", "node.js"));
|
||||
// Exact match
|
||||
assert!(runtime_matches_filter("shell", "shell"));
|
||||
// No match
|
||||
assert!(!runtime_matches_filter("python", "node"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_runtime_matches_filter_case_insensitive() {
|
||||
// Database stores capitalized names (e.g., "Node.js", "Python")
|
||||
// Worker capabilities store lowercase (e.g., "node", "python")
|
||||
assert!(runtime_matches_filter("Node.js", "node"));
|
||||
assert!(runtime_matches_filter("node", "Node.js"));
|
||||
assert!(runtime_matches_filter("Python", "python"));
|
||||
assert!(runtime_matches_filter("python", "Python"));
|
||||
assert!(runtime_matches_filter("Shell", "shell"));
|
||||
assert!(runtime_matches_filter("NODEJS", "node"));
|
||||
assert!(!runtime_matches_filter("Python", "node"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_runtime_in_filter() {
|
||||
let filter = vec!["shell".to_string(), "node".to_string()];
|
||||
assert!(runtime_in_filter("shell", &filter));
|
||||
assert!(runtime_in_filter("node.js", &filter));
|
||||
assert!(runtime_in_filter("nodejs", &filter));
|
||||
assert!(!runtime_in_filter("python", &filter));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_verification_command_structure() {
|
||||
let cmd = json!({
|
||||
|
||||
638
crates/common/src/version_matching.rs
Normal file
638
crates/common/src/version_matching.rs
Normal file
@@ -0,0 +1,638 @@
|
||||
//! Runtime version constraint matching
|
||||
//!
|
||||
//! Provides utilities for parsing and evaluating semver version constraints
|
||||
//! against available runtime versions. Used by the worker to select the
|
||||
//! appropriate runtime version when an action or sensor declares a
|
||||
//! `runtime_version_constraint`.
|
||||
//!
|
||||
//! # Constraint Syntax
|
||||
//!
|
||||
//! Constraints follow standard semver range syntax:
|
||||
//!
|
||||
//! | Constraint | Meaning |
|
||||
//! |-----------------|----------------------------------------|
|
||||
//! | `3.12` | Exactly 3.12.x (any patch) |
|
||||
//! | `=3.12.1` | Exactly 3.12.1 |
|
||||
//! | `>=3.12` | 3.12.0 or newer |
|
||||
//! | `>=3.12,<4.0` | 3.12.0 or newer, but below 4.0.0 |
|
||||
//! | `~3.12` | Compatible with 3.12.x (>=3.12.0, <3.13.0) |
|
||||
//! | `^3.12` | Compatible with 3.x.x (>=3.12.0, <4.0.0) |
|
||||
//!
|
||||
//! Multiple constraints can be separated by commas (AND logic).
|
||||
//!
|
||||
//! # Lenient Parsing
|
||||
//!
|
||||
//! Version strings are parsed leniently to handle real-world formats:
|
||||
//! - `3.12` → `3.12.0`
|
||||
//! - `3` → `3.0.0`
|
||||
//! - `v3.12.1` → `3.12.1` (leading 'v' stripped)
|
||||
//! - `3.12.1-beta.1` → parsed with pre-release info
|
||||
//!
|
||||
//! # Examples
|
||||
//!
|
||||
//! ```
|
||||
//! use attune_common::version_matching::{parse_version, matches_constraint, select_best_version};
|
||||
//! use attune_common::models::RuntimeVersion;
|
||||
//!
|
||||
//! // Simple constraint matching
|
||||
//! assert!(matches_constraint("3.12.1", ">=3.12").unwrap());
|
||||
//! assert!(!matches_constraint("3.11.0", ">=3.12").unwrap());
|
||||
//!
|
||||
//! // Range constraints
|
||||
//! assert!(matches_constraint("3.12.5", ">=3.12,<4.0").unwrap());
|
||||
//! assert!(!matches_constraint("4.0.0", ">=3.12,<4.0").unwrap());
|
||||
//!
|
||||
//! // Tilde (patch-level compatibility)
|
||||
//! assert!(matches_constraint("3.12.5", "~3.12").unwrap());
|
||||
//! assert!(!matches_constraint("3.13.0", "~3.12").unwrap());
|
||||
//!
|
||||
//! // Caret (minor-level compatibility)
|
||||
//! assert!(matches_constraint("3.15.0", "^3.12").unwrap());
|
||||
//! assert!(!matches_constraint("4.0.0", "^3.12").unwrap());
|
||||
//! ```
|
||||
|
||||
use semver::{Version, VersionReq};
|
||||
use tracing::{debug, warn};
|
||||
|
||||
use crate::models::RuntimeVersion;
|
||||
|
||||
/// Error type for version matching operations.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum VersionError {
|
||||
#[error("Invalid version string '{0}': {1}")]
|
||||
InvalidVersion(String, String),
|
||||
|
||||
#[error("Invalid version constraint '{0}': {1}")]
|
||||
InvalidConstraint(String, String),
|
||||
}
|
||||
|
||||
/// Result type for version matching operations.
|
||||
pub type VersionResult<T> = std::result::Result<T, VersionError>;
|
||||
|
||||
/// Parse a version string leniently into a [`semver::Version`].
|
||||
///
|
||||
/// Handles common real-world formats:
|
||||
/// - `"3.12"` → `Version { major: 3, minor: 12, patch: 0 }`
|
||||
/// - `"3"` → `Version { major: 3, minor: 0, patch: 0 }`
|
||||
/// - `"v3.12.1"` → `Version { major: 3, minor: 12, patch: 1 }`
|
||||
/// - `"3.12.1"` → `Version { major: 3, minor: 12, patch: 1 }`
|
||||
pub fn parse_version(version_str: &str) -> VersionResult<Version> {
|
||||
let trimmed = version_str.trim();
|
||||
|
||||
// Strip leading 'v' or 'V'
|
||||
let stripped = trimmed
|
||||
.strip_prefix('v')
|
||||
.or_else(|| trimmed.strip_prefix('V'))
|
||||
.unwrap_or(trimmed);
|
||||
|
||||
// Try direct parse first (handles full semver like "3.12.1" and pre-release)
|
||||
if let Ok(v) = Version::parse(stripped) {
|
||||
return Ok(v);
|
||||
}
|
||||
|
||||
// Try adding missing components
|
||||
let parts: Vec<&str> = stripped.split('.').collect();
|
||||
let padded = match parts.len() {
|
||||
1 => format!("{}.0.0", parts[0]),
|
||||
2 => format!("{}.{}.0", parts[0], parts[1]),
|
||||
_ => {
|
||||
// More than 3 parts or other issues — try joining first 3
|
||||
if parts.len() >= 3 {
|
||||
format!("{}.{}.{}", parts[0], parts[1], parts[2])
|
||||
} else {
|
||||
stripped.to_string()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Version::parse(&padded)
|
||||
.map_err(|e| VersionError::InvalidVersion(version_str.to_string(), e.to_string()))
|
||||
}
|
||||
|
||||
/// Parse a version constraint string into a [`semver::VersionReq`].
|
||||
///
|
||||
/// Handles comma-separated constraints (AND logic) and the standard
|
||||
/// semver operators: `=`, `>=`, `<=`, `>`, `<`, `~`, `^`.
|
||||
///
|
||||
/// If a bare version is given (no operator), it is treated as a
|
||||
/// compatibility constraint: `"3.12"` becomes `">=3.12.0, <3.13.0"` (tilde behavior).
|
||||
///
|
||||
/// Note: The `semver` crate's `VersionReq` natively handles comma-separated
|
||||
/// constraints and all standard operators.
|
||||
pub fn parse_constraint(constraint_str: &str) -> VersionResult<VersionReq> {
|
||||
let trimmed = constraint_str.trim();
|
||||
|
||||
if trimmed.is_empty() {
|
||||
// Empty constraint matches everything
|
||||
return Ok(VersionReq::STAR);
|
||||
}
|
||||
|
||||
// Preprocess each comma-separated part to handle lenient input.
|
||||
// For each part, if it looks like a bare version (no operator prefix),
|
||||
// we treat it as a tilde constraint so "3.12" means "~3.12".
|
||||
let parts: Vec<String> = trimmed
|
||||
.split(',')
|
||||
.map(|part| {
|
||||
let p = part.trim();
|
||||
if p.is_empty() {
|
||||
return String::new();
|
||||
}
|
||||
|
||||
// Check if the first character is an operator
|
||||
let first_char = p.chars().next().unwrap_or(' ');
|
||||
if first_char.is_ascii_digit() || first_char == 'v' || first_char == 'V' {
|
||||
// Bare version — treat as tilde range (compatible within minor)
|
||||
let stripped = p
|
||||
.strip_prefix('v')
|
||||
.or_else(|| p.strip_prefix('V'))
|
||||
.unwrap_or(p);
|
||||
|
||||
// Pad to at least major.minor for tilde semantics
|
||||
let dot_count = stripped.chars().filter(|c| *c == '.').count();
|
||||
let padded = match dot_count {
|
||||
0 => format!("{}.0", stripped),
|
||||
_ => stripped.to_string(),
|
||||
};
|
||||
|
||||
format!("~{}", padded)
|
||||
} else {
|
||||
// Has operator prefix — normalize version part if needed
|
||||
// Find where the version number starts
|
||||
let version_start = p.find(|c: char| c.is_ascii_digit()).unwrap_or(p.len());
|
||||
|
||||
let (op, ver) = p.split_at(version_start);
|
||||
let ver = ver
|
||||
.strip_prefix('v')
|
||||
.or_else(|| ver.strip_prefix('V'))
|
||||
.unwrap_or(ver);
|
||||
|
||||
// Pad version if needed
|
||||
let dot_count = ver.chars().filter(|c| *c == '.').count();
|
||||
let padded = match dot_count {
|
||||
0 if !ver.is_empty() => format!("{}.0.0", ver),
|
||||
1 => format!("{}.0", ver),
|
||||
_ => ver.to_string(),
|
||||
};
|
||||
|
||||
format!("{}{}", op.trim(), padded)
|
||||
}
|
||||
})
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect();
|
||||
|
||||
if parts.is_empty() {
|
||||
return Ok(VersionReq::STAR);
|
||||
}
|
||||
|
||||
let normalized = parts.join(", ");
|
||||
|
||||
VersionReq::parse(&normalized)
|
||||
.map_err(|e| VersionError::InvalidConstraint(constraint_str.to_string(), e.to_string()))
|
||||
}
|
||||
|
||||
/// Check whether a version string satisfies a constraint string.
|
||||
///
|
||||
/// Returns `true` if the version matches the constraint.
|
||||
/// Returns an error if either the version or constraint cannot be parsed.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use attune_common::version_matching::matches_constraint;
|
||||
///
|
||||
/// assert!(matches_constraint("3.12.1", ">=3.12").unwrap());
|
||||
/// assert!(!matches_constraint("3.11.0", ">=3.12").unwrap());
|
||||
/// assert!(matches_constraint("3.12.5", ">=3.12,<4.0").unwrap());
|
||||
/// ```
|
||||
pub fn matches_constraint(version_str: &str, constraint_str: &str) -> VersionResult<bool> {
|
||||
let version = parse_version(version_str)?;
|
||||
let constraint = parse_constraint(constraint_str)?;
|
||||
Ok(constraint.matches(&version))
|
||||
}
|
||||
|
||||
/// Select the best matching runtime version from a list of candidates.
|
||||
///
|
||||
/// "Best" is defined as the highest version that satisfies the constraint
|
||||
/// and is marked as available. If no constraint is given, the default version
|
||||
/// is preferred; if no default exists, the highest available version is returned.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `versions` - All registered versions for a runtime (any order)
|
||||
/// * `constraint` - Optional version constraint string (e.g., `">=3.12"`)
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// The best matching `RuntimeVersion`, or `None` if no version matches.
|
||||
pub fn select_best_version<'a>(
|
||||
versions: &'a [RuntimeVersion],
|
||||
constraint: Option<&str>,
|
||||
) -> Option<&'a RuntimeVersion> {
|
||||
if versions.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Only consider available versions
|
||||
let available: Vec<&RuntimeVersion> = versions.iter().filter(|v| v.available).collect();
|
||||
|
||||
if available.is_empty() {
|
||||
debug!("No available versions found");
|
||||
return None;
|
||||
}
|
||||
|
||||
match constraint {
|
||||
Some(constraint_str) if !constraint_str.trim().is_empty() => {
|
||||
let req = match parse_constraint(constraint_str) {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
warn!("Invalid version constraint '{}': {}", constraint_str, e);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
// Filter to versions that match the constraint, then pick the highest
|
||||
let mut matching: Vec<(&RuntimeVersion, Version)> = available
|
||||
.iter()
|
||||
.filter_map(|rv| match parse_version(&rv.version) {
|
||||
Ok(v) if req.matches(&v) => Some((*rv, v)),
|
||||
Ok(_) => {
|
||||
debug!(
|
||||
"Version {} does not match constraint '{}'",
|
||||
rv.version, constraint_str
|
||||
);
|
||||
None
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("Cannot parse version '{}' for matching: {}", rv.version, e);
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
if matching.is_empty() {
|
||||
debug!(
|
||||
"No available versions match constraint '{}'",
|
||||
constraint_str
|
||||
);
|
||||
return None;
|
||||
}
|
||||
|
||||
// Sort by semver descending — highest version first
|
||||
matching.sort_by(|a, b| b.1.cmp(&a.1));
|
||||
|
||||
Some(matching[0].0)
|
||||
}
|
||||
|
||||
_ => {
|
||||
// No constraint — prefer the default version, else the highest available
|
||||
if let Some(default) = available.iter().find(|v| v.is_default) {
|
||||
return Some(default);
|
||||
}
|
||||
|
||||
// Pick highest available version
|
||||
let mut with_parsed: Vec<(&RuntimeVersion, Version)> = available
|
||||
.iter()
|
||||
.filter_map(|rv| parse_version(&rv.version).ok().map(|v| (*rv, v)))
|
||||
.collect();
|
||||
|
||||
with_parsed.sort_by(|a, b| b.1.cmp(&a.1));
|
||||
with_parsed.first().map(|(rv, _)| *rv)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract semver components from a version string.
|
||||
///
|
||||
/// Returns `(major, minor, patch)` as `Option<i32>` values.
|
||||
/// Useful for populating the `version_major`, `version_minor`, `version_patch`
|
||||
/// columns in the `runtime_version` table.
|
||||
pub fn extract_version_components(version_str: &str) -> (Option<i32>, Option<i32>, Option<i32>) {
|
||||
match parse_version(version_str) {
|
||||
Ok(v) => (
|
||||
i32::try_from(v.major).ok(),
|
||||
i32::try_from(v.minor).ok(),
|
||||
i32::try_from(v.patch).ok(),
|
||||
),
|
||||
Err(_) => (None, None, None),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_json::json;
|
||||
|
||||
// ========================================================================
|
||||
// parse_version tests
|
||||
// ========================================================================
|
||||
|
||||
#[test]
|
||||
fn test_parse_version_full() {
|
||||
let v = parse_version("3.12.1").unwrap();
|
||||
assert_eq!(v, Version::new(3, 12, 1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_version_two_parts() {
|
||||
let v = parse_version("3.12").unwrap();
|
||||
assert_eq!(v, Version::new(3, 12, 0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_version_one_part() {
|
||||
let v = parse_version("3").unwrap();
|
||||
assert_eq!(v, Version::new(3, 0, 0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_version_leading_v() {
|
||||
let v = parse_version("v3.12.1").unwrap();
|
||||
assert_eq!(v, Version::new(3, 12, 1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_version_leading_v_uppercase() {
|
||||
let v = parse_version("V20.11.0").unwrap();
|
||||
assert_eq!(v, Version::new(20, 11, 0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_version_with_whitespace() {
|
||||
let v = parse_version(" 3.12.1 ").unwrap();
|
||||
assert_eq!(v, Version::new(3, 12, 1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_version_invalid() {
|
||||
assert!(parse_version("not-a-version").is_err());
|
||||
}
|
||||
|
||||
// ========================================================================
|
||||
// parse_constraint tests
|
||||
// ========================================================================
|
||||
|
||||
#[test]
|
||||
fn test_parse_constraint_gte() {
|
||||
let req = parse_constraint(">=3.12").unwrap();
|
||||
assert!(req.matches(&Version::new(3, 12, 0)));
|
||||
assert!(req.matches(&Version::new(3, 13, 0)));
|
||||
assert!(req.matches(&Version::new(4, 0, 0)));
|
||||
assert!(!req.matches(&Version::new(3, 11, 9)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_constraint_exact_with_eq() {
|
||||
let req = parse_constraint("=3.12.1").unwrap();
|
||||
assert!(req.matches(&Version::new(3, 12, 1)));
|
||||
assert!(!req.matches(&Version::new(3, 12, 2)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_constraint_bare_version() {
|
||||
// Bare "3.12" is treated as ~3.12 → >=3.12.0, <3.13.0
|
||||
let req = parse_constraint("3.12").unwrap();
|
||||
assert!(req.matches(&Version::new(3, 12, 0)));
|
||||
assert!(req.matches(&Version::new(3, 12, 9)));
|
||||
assert!(!req.matches(&Version::new(3, 13, 0)));
|
||||
assert!(!req.matches(&Version::new(3, 11, 0)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_constraint_tilde() {
|
||||
let req = parse_constraint("~3.12").unwrap();
|
||||
assert!(req.matches(&Version::new(3, 12, 0)));
|
||||
assert!(req.matches(&Version::new(3, 12, 99)));
|
||||
assert!(!req.matches(&Version::new(3, 13, 0)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_constraint_caret() {
|
||||
let req = parse_constraint("^3.12").unwrap();
|
||||
assert!(req.matches(&Version::new(3, 12, 0)));
|
||||
assert!(req.matches(&Version::new(3, 99, 0)));
|
||||
assert!(!req.matches(&Version::new(4, 0, 0)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_constraint_range() {
|
||||
let req = parse_constraint(">=3.12,<4.0").unwrap();
|
||||
assert!(req.matches(&Version::new(3, 12, 0)));
|
||||
assert!(req.matches(&Version::new(3, 99, 0)));
|
||||
assert!(!req.matches(&Version::new(4, 0, 0)));
|
||||
assert!(!req.matches(&Version::new(3, 11, 0)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_constraint_empty() {
|
||||
let req = parse_constraint("").unwrap();
|
||||
assert!(req.matches(&Version::new(0, 0, 1)));
|
||||
assert!(req.matches(&Version::new(999, 0, 0)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_constraint_lt() {
|
||||
let req = parse_constraint("<4.0").unwrap();
|
||||
assert!(req.matches(&Version::new(3, 99, 99)));
|
||||
assert!(!req.matches(&Version::new(4, 0, 0)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_constraint_lte() {
|
||||
let req = parse_constraint("<=3.12").unwrap();
|
||||
assert!(req.matches(&Version::new(3, 12, 0)));
|
||||
// Note: semver <=3.12.0 means exactly ≤3.12.0
|
||||
assert!(!req.matches(&Version::new(3, 12, 1)));
|
||||
assert!(!req.matches(&Version::new(3, 13, 0)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_constraint_gt() {
|
||||
let req = parse_constraint(">3.12").unwrap();
|
||||
assert!(!req.matches(&Version::new(3, 12, 0)));
|
||||
assert!(req.matches(&Version::new(3, 12, 1)));
|
||||
assert!(req.matches(&Version::new(3, 13, 0)));
|
||||
}
|
||||
|
||||
// ========================================================================
|
||||
// matches_constraint tests
|
||||
// ========================================================================
|
||||
|
||||
#[test]
|
||||
fn test_matches_constraint_basic() {
|
||||
assert!(matches_constraint("3.12.1", ">=3.12").unwrap());
|
||||
assert!(!matches_constraint("3.11.0", ">=3.12").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_matches_constraint_range() {
|
||||
assert!(matches_constraint("3.12.5", ">=3.12,<4.0").unwrap());
|
||||
assert!(!matches_constraint("4.0.0", ">=3.12,<4.0").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_matches_constraint_tilde() {
|
||||
assert!(matches_constraint("3.12.5", "~3.12").unwrap());
|
||||
assert!(!matches_constraint("3.13.0", "~3.12").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_matches_constraint_caret() {
|
||||
assert!(matches_constraint("3.15.0", "^3.12").unwrap());
|
||||
assert!(!matches_constraint("4.0.0", "^3.12").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_matches_constraint_node_versions() {
|
||||
assert!(matches_constraint("20.11.0", ">=18").unwrap());
|
||||
assert!(matches_constraint("18.0.0", ">=18").unwrap());
|
||||
assert!(!matches_constraint("16.20.0", ">=18").unwrap());
|
||||
}
|
||||
|
||||
// ========================================================================
|
||||
// extract_version_components tests
|
||||
// ========================================================================
|
||||
|
||||
#[test]
|
||||
fn test_extract_components_full() {
|
||||
let (maj, min, pat) = extract_version_components("3.12.1");
|
||||
assert_eq!(maj, Some(3));
|
||||
assert_eq!(min, Some(12));
|
||||
assert_eq!(pat, Some(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_components_partial() {
|
||||
let (maj, min, pat) = extract_version_components("20.11");
|
||||
assert_eq!(maj, Some(20));
|
||||
assert_eq!(min, Some(11));
|
||||
assert_eq!(pat, Some(0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_components_invalid() {
|
||||
let (maj, min, pat) = extract_version_components("not-a-version");
|
||||
assert_eq!(maj, None);
|
||||
assert_eq!(min, None);
|
||||
assert_eq!(pat, None);
|
||||
}
|
||||
|
||||
// ========================================================================
|
||||
// select_best_version tests
|
||||
// ========================================================================
|
||||
|
||||
fn make_version(
|
||||
id: i64,
|
||||
runtime: i64,
|
||||
version: &str,
|
||||
is_default: bool,
|
||||
available: bool,
|
||||
) -> RuntimeVersion {
|
||||
let (major, minor, patch) = extract_version_components(version);
|
||||
RuntimeVersion {
|
||||
id,
|
||||
runtime,
|
||||
runtime_ref: "core.python".to_string(),
|
||||
version: version.to_string(),
|
||||
version_major: major,
|
||||
version_minor: minor,
|
||||
version_patch: patch,
|
||||
execution_config: json!({}),
|
||||
distributions: json!({}),
|
||||
is_default,
|
||||
available,
|
||||
verified_at: None,
|
||||
meta: json!({}),
|
||||
created: chrono::Utc::now(),
|
||||
updated: chrono::Utc::now(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_select_best_no_constraint_prefers_default() {
|
||||
let versions = vec![
|
||||
make_version(1, 1, "3.11.0", false, true),
|
||||
make_version(2, 1, "3.12.0", true, true), // default
|
||||
make_version(3, 1, "3.14.0", false, true),
|
||||
];
|
||||
|
||||
let best = select_best_version(&versions, None).unwrap();
|
||||
assert_eq!(best.id, 2); // default version
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_select_best_no_constraint_no_default_picks_highest() {
|
||||
let versions = vec![
|
||||
make_version(1, 1, "3.11.0", false, true),
|
||||
make_version(2, 1, "3.12.0", false, true),
|
||||
make_version(3, 1, "3.14.0", false, true),
|
||||
];
|
||||
|
||||
let best = select_best_version(&versions, None).unwrap();
|
||||
assert_eq!(best.id, 3); // highest version
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_select_best_with_constraint() {
|
||||
let versions = vec![
|
||||
make_version(1, 1, "3.11.0", false, true),
|
||||
make_version(2, 1, "3.12.0", false, true),
|
||||
make_version(3, 1, "3.14.0", false, true),
|
||||
];
|
||||
|
||||
// >=3.12,<3.14 should pick 3.12.0 (3.14.0 is excluded)
|
||||
let best = select_best_version(&versions, Some(">=3.12,<3.14")).unwrap();
|
||||
assert_eq!(best.id, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_select_best_with_constraint_picks_highest_match() {
|
||||
let versions = vec![
|
||||
make_version(1, 1, "3.11.0", false, true),
|
||||
make_version(2, 1, "3.12.0", false, true),
|
||||
make_version(3, 1, "3.12.5", false, true),
|
||||
make_version(4, 1, "3.13.0", false, true),
|
||||
];
|
||||
|
||||
// ~3.12 → >=3.12.0, <3.13.0 → should pick 3.12.5
|
||||
let best = select_best_version(&versions, Some("~3.12")).unwrap();
|
||||
assert_eq!(best.id, 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_select_best_skips_unavailable() {
|
||||
let versions = vec![
|
||||
make_version(1, 1, "3.12.0", false, true),
|
||||
make_version(2, 1, "3.14.0", false, false), // not available
|
||||
];
|
||||
|
||||
let best = select_best_version(&versions, Some(">=3.12")).unwrap();
|
||||
assert_eq!(best.id, 1); // 3.14 is unavailable
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_select_best_no_match() {
|
||||
let versions = vec![
|
||||
make_version(1, 1, "3.11.0", false, true),
|
||||
make_version(2, 1, "3.12.0", false, true),
|
||||
];
|
||||
|
||||
let best = select_best_version(&versions, Some(">=4.0"));
|
||||
assert!(best.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_select_best_empty_versions() {
|
||||
let versions: Vec<RuntimeVersion> = vec![];
|
||||
assert!(select_best_version(&versions, None).is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_select_best_all_unavailable() {
|
||||
let versions = vec![
|
||||
make_version(1, 1, "3.12.0", false, false),
|
||||
make_version(2, 1, "3.14.0", false, false),
|
||||
];
|
||||
|
||||
assert!(select_best_version(&versions, None).is_none());
|
||||
}
|
||||
}
|
||||
@@ -2,9 +2,11 @@
|
||||
//!
|
||||
//! This module handles registering workflows as workflow definitions in the database.
|
||||
//! Workflows are stored in the `workflow_definition` table with their full YAML definition
|
||||
//! as JSON. Optionally, actions can be created that reference workflow definitions.
|
||||
//! as JSON. A companion action record is also created so that workflows appear in
|
||||
//! action lists and the workflow builder's action palette.
|
||||
|
||||
use crate::error::{Error, Result};
|
||||
use crate::repositories::action::{ActionRepository, CreateActionInput, UpdateActionInput};
|
||||
use crate::repositories::workflow::{CreateWorkflowDefinitionInput, UpdateWorkflowDefinitionInput};
|
||||
use crate::repositories::{
|
||||
Create, Delete, FindByRef, PackRepository, Update, WorkflowDefinitionRepository,
|
||||
@@ -102,12 +104,34 @@ impl WorkflowRegistrar {
|
||||
let workflow_def_id = self
|
||||
.update_workflow(&existing.id, &loaded.workflow, &pack.r#ref)
|
||||
.await?;
|
||||
|
||||
// Update or create the companion action record
|
||||
self.ensure_companion_action(
|
||||
workflow_def_id,
|
||||
&loaded.workflow,
|
||||
pack.id,
|
||||
&pack.r#ref,
|
||||
&loaded.file.name,
|
||||
)
|
||||
.await?;
|
||||
|
||||
(workflow_def_id, false)
|
||||
} else {
|
||||
info!("Creating new workflow: {}", loaded.file.ref_name);
|
||||
let workflow_def_id = self
|
||||
.create_workflow(&loaded.workflow, &loaded.file.pack, pack.id, &pack.r#ref)
|
||||
.await?;
|
||||
|
||||
// Create a companion action record so the workflow appears in action lists
|
||||
self.create_companion_action(
|
||||
workflow_def_id,
|
||||
&loaded.workflow,
|
||||
pack.id,
|
||||
&pack.r#ref,
|
||||
&loaded.file.name,
|
||||
)
|
||||
.await?;
|
||||
|
||||
(workflow_def_id, true)
|
||||
};
|
||||
|
||||
@@ -158,13 +182,104 @@ impl WorkflowRegistrar {
|
||||
.await?
|
||||
.ok_or_else(|| Error::not_found("workflow", "ref", ref_name))?;
|
||||
|
||||
// Delete workflow definition (cascades to workflow_execution and related executions)
|
||||
// Delete workflow definition (cascades to workflow_execution, and the companion
|
||||
// action is cascade-deleted via the FK on action.workflow_def)
|
||||
WorkflowDefinitionRepository::delete(&self.pool, workflow.id).await?;
|
||||
|
||||
info!("Unregistered workflow: {}", ref_name);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create a companion action record for a workflow definition.
|
||||
///
|
||||
/// This ensures the workflow appears in action lists and the action palette
|
||||
/// in the workflow builder. The action is linked to the workflow definition
|
||||
/// via `is_workflow = true` and `workflow_def` FK.
|
||||
async fn create_companion_action(
|
||||
&self,
|
||||
workflow_def_id: i64,
|
||||
workflow: &WorkflowYaml,
|
||||
pack_id: i64,
|
||||
pack_ref: &str,
|
||||
workflow_name: &str,
|
||||
) -> Result<()> {
|
||||
let entrypoint = format!("workflows/{}.workflow.yaml", workflow_name);
|
||||
|
||||
let action_input = CreateActionInput {
|
||||
r#ref: workflow.r#ref.clone(),
|
||||
pack: pack_id,
|
||||
pack_ref: pack_ref.to_string(),
|
||||
label: workflow.label.clone(),
|
||||
description: workflow.description.clone().unwrap_or_default(),
|
||||
entrypoint,
|
||||
runtime: None,
|
||||
runtime_version_constraint: None,
|
||||
param_schema: workflow.parameters.clone(),
|
||||
out_schema: workflow.output.clone(),
|
||||
is_adhoc: false,
|
||||
};
|
||||
|
||||
let action = ActionRepository::create(&self.pool, action_input).await?;
|
||||
|
||||
// Link the action to the workflow definition (sets is_workflow = true and workflow_def)
|
||||
ActionRepository::link_workflow_def(&self.pool, action.id, workflow_def_id).await?;
|
||||
|
||||
info!(
|
||||
"Created companion action '{}' (ID: {}) for workflow definition (ID: {})",
|
||||
workflow.r#ref, action.id, workflow_def_id
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Ensure a companion action record exists for a workflow definition.
|
||||
///
|
||||
/// If the action already exists, update it. If it doesn't exist (e.g., for
|
||||
/// workflows registered before the companion-action fix), create it.
|
||||
async fn ensure_companion_action(
|
||||
&self,
|
||||
workflow_def_id: i64,
|
||||
workflow: &WorkflowYaml,
|
||||
pack_id: i64,
|
||||
pack_ref: &str,
|
||||
workflow_name: &str,
|
||||
) -> Result<()> {
|
||||
let existing_action =
|
||||
ActionRepository::find_by_workflow_def(&self.pool, workflow_def_id).await?;
|
||||
|
||||
if let Some(action) = existing_action {
|
||||
// Update the existing companion action to stay in sync
|
||||
let update_input = UpdateActionInput {
|
||||
label: Some(workflow.label.clone()),
|
||||
description: workflow.description.clone(),
|
||||
entrypoint: Some(format!("workflows/{}.workflow.yaml", workflow_name)),
|
||||
runtime: None,
|
||||
runtime_version_constraint: None,
|
||||
param_schema: workflow.parameters.clone(),
|
||||
out_schema: workflow.output.clone(),
|
||||
};
|
||||
|
||||
ActionRepository::update(&self.pool, action.id, update_input).await?;
|
||||
|
||||
debug!(
|
||||
"Updated companion action '{}' (ID: {}) for workflow definition (ID: {})",
|
||||
action.r#ref, action.id, workflow_def_id
|
||||
);
|
||||
} else {
|
||||
// Backfill: create companion action for pre-fix workflows
|
||||
self.create_companion_action(
|
||||
workflow_def_id,
|
||||
workflow,
|
||||
pack_id,
|
||||
pack_ref,
|
||||
workflow_name,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create a new workflow definition
|
||||
async fn create_workflow(
|
||||
&self,
|
||||
|
||||
@@ -198,6 +198,7 @@ async fn test_update_action() {
|
||||
description: Some("Updated description".to_string()),
|
||||
entrypoint: None,
|
||||
runtime: None,
|
||||
runtime_version_constraint: None,
|
||||
param_schema: None,
|
||||
out_schema: None,
|
||||
};
|
||||
@@ -329,6 +330,7 @@ async fn test_action_foreign_key_constraint() {
|
||||
description: "Test".to_string(),
|
||||
entrypoint: "main.py".to_string(),
|
||||
runtime: None,
|
||||
runtime_version_constraint: None,
|
||||
param_schema: None,
|
||||
out_schema: None,
|
||||
is_adhoc: false,
|
||||
|
||||
@@ -457,6 +457,7 @@ impl ActionFixture {
|
||||
description: self.description,
|
||||
entrypoint: self.entrypoint,
|
||||
runtime: self.runtime,
|
||||
runtime_version_constraint: None,
|
||||
param_schema: self.param_schema,
|
||||
out_schema: self.out_schema,
|
||||
is_adhoc: false,
|
||||
@@ -1088,6 +1089,7 @@ impl SensorFixture {
|
||||
entrypoint: self.entrypoint,
|
||||
runtime: self.runtime_id,
|
||||
runtime_ref: self.runtime_ref,
|
||||
runtime_version_constraint: None,
|
||||
trigger: self.trigger_id,
|
||||
trigger_ref: self.trigger_ref,
|
||||
enabled: self.enabled,
|
||||
|
||||
@@ -179,6 +179,7 @@ async fn test_create_sensor_duplicate_ref_fails() {
|
||||
entrypoint: "sensors/dup.py".to_string(),
|
||||
runtime: runtime.id,
|
||||
runtime_ref: runtime.r#ref.clone(),
|
||||
runtime_version_constraint: None,
|
||||
trigger: trigger.id,
|
||||
trigger_ref: trigger.r#ref.clone(),
|
||||
enabled: true,
|
||||
@@ -233,6 +234,7 @@ async fn test_create_sensor_invalid_ref_format_fails() {
|
||||
entrypoint: "sensors/invalid.py".to_string(),
|
||||
runtime: runtime.id,
|
||||
runtime_ref: runtime.r#ref.clone(),
|
||||
runtime_version_constraint: None,
|
||||
trigger: trigger.id,
|
||||
trigger_ref: trigger.r#ref.clone(),
|
||||
enabled: true,
|
||||
@@ -272,6 +274,7 @@ async fn test_create_sensor_invalid_pack_fails() {
|
||||
entrypoint: "sensors/invalid.py".to_string(),
|
||||
runtime: runtime.id,
|
||||
runtime_ref: runtime.r#ref.clone(),
|
||||
runtime_version_constraint: None,
|
||||
trigger: trigger.id,
|
||||
trigger_ref: trigger.r#ref.clone(),
|
||||
enabled: true,
|
||||
@@ -302,6 +305,7 @@ async fn test_create_sensor_invalid_trigger_fails() {
|
||||
entrypoint: "sensors/invalid.py".to_string(),
|
||||
runtime: runtime.id,
|
||||
runtime_ref: runtime.r#ref.clone(),
|
||||
runtime_version_constraint: None,
|
||||
trigger: 99999, // Non-existent trigger
|
||||
trigger_ref: "invalid.trigger".to_string(),
|
||||
enabled: true,
|
||||
@@ -332,6 +336,7 @@ async fn test_create_sensor_invalid_runtime_fails() {
|
||||
entrypoint: "sensors/invalid.py".to_string(),
|
||||
runtime: 99999, // Non-existent runtime
|
||||
runtime_ref: "invalid.runtime".to_string(),
|
||||
runtime_version_constraint: None,
|
||||
trigger: trigger.id,
|
||||
trigger_ref: trigger.r#ref.clone(),
|
||||
enabled: true,
|
||||
|
||||
@@ -17,6 +17,7 @@ use attune_common::{
|
||||
runtime::{RuntimeRepository, WorkerRepository},
|
||||
FindById, FindByRef, Update,
|
||||
},
|
||||
runtime_detection::runtime_matches_filter,
|
||||
};
|
||||
use chrono::Utc;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -263,13 +264,13 @@ impl ExecutionScheduler {
|
||||
if let Some(ref capabilities) = worker.capabilities {
|
||||
if let Some(runtimes) = capabilities.get("runtimes") {
|
||||
if let Some(runtime_array) = runtimes.as_array() {
|
||||
// Check if any runtime in the array matches (case-insensitive)
|
||||
// Check if any runtime in the array matches (alias-aware)
|
||||
for runtime_value in runtime_array {
|
||||
if let Some(runtime_str) = runtime_value.as_str() {
|
||||
if runtime_str.eq_ignore_ascii_case(runtime_name) {
|
||||
if runtime_matches_filter(runtime_name, runtime_str) {
|
||||
debug!(
|
||||
"Worker {} supports runtime '{}' via capabilities",
|
||||
worker.name, runtime_name
|
||||
"Worker {} supports runtime '{}' via capabilities (matched '{}')",
|
||||
worker.name, runtime_name, runtime_str
|
||||
);
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -2,9 +2,11 @@
|
||||
//!
|
||||
//! This module handles registering workflows as workflow definitions in the database.
|
||||
//! Workflows are stored in the `workflow_definition` table with their full YAML definition
|
||||
//! as JSON. Optionally, actions can be created that reference workflow definitions.
|
||||
//! as JSON. A companion action record is also created so that workflows appear in
|
||||
//! action lists and the workflow builder's action palette.
|
||||
|
||||
use attune_common::error::{Error, Result};
|
||||
use attune_common::repositories::action::{ActionRepository, CreateActionInput, UpdateActionInput};
|
||||
use attune_common::repositories::workflow::{
|
||||
CreateWorkflowDefinitionInput, UpdateWorkflowDefinitionInput,
|
||||
};
|
||||
@@ -104,12 +106,34 @@ impl WorkflowRegistrar {
|
||||
let workflow_def_id = self
|
||||
.update_workflow(&existing.id, &loaded.workflow, &pack.r#ref)
|
||||
.await?;
|
||||
|
||||
// Update or create the companion action record
|
||||
self.ensure_companion_action(
|
||||
workflow_def_id,
|
||||
&loaded.workflow,
|
||||
pack.id,
|
||||
&pack.r#ref,
|
||||
&loaded.file.name,
|
||||
)
|
||||
.await?;
|
||||
|
||||
(workflow_def_id, false)
|
||||
} else {
|
||||
info!("Creating new workflow: {}", loaded.file.ref_name);
|
||||
let workflow_def_id = self
|
||||
.create_workflow(&loaded.workflow, &loaded.file.pack, pack.id, &pack.r#ref)
|
||||
.await?;
|
||||
|
||||
// Create a companion action record so the workflow appears in action lists
|
||||
self.create_companion_action(
|
||||
workflow_def_id,
|
||||
&loaded.workflow,
|
||||
pack.id,
|
||||
&pack.r#ref,
|
||||
&loaded.file.name,
|
||||
)
|
||||
.await?;
|
||||
|
||||
(workflow_def_id, true)
|
||||
};
|
||||
|
||||
@@ -160,13 +184,104 @@ impl WorkflowRegistrar {
|
||||
.await?
|
||||
.ok_or_else(|| Error::not_found("workflow", "ref", ref_name))?;
|
||||
|
||||
// Delete workflow definition (cascades to workflow_execution and related executions)
|
||||
// Delete workflow definition (cascades to workflow_execution, and the companion
|
||||
// action is cascade-deleted via the FK on action.workflow_def)
|
||||
WorkflowDefinitionRepository::delete(&self.pool, workflow.id).await?;
|
||||
|
||||
info!("Unregistered workflow: {}", ref_name);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create a companion action record for a workflow definition.
|
||||
///
|
||||
/// This ensures the workflow appears in action lists and the action palette
|
||||
/// in the workflow builder. The action is linked to the workflow definition
|
||||
/// via `is_workflow = true` and `workflow_def` FK.
|
||||
async fn create_companion_action(
|
||||
&self,
|
||||
workflow_def_id: i64,
|
||||
workflow: &WorkflowYaml,
|
||||
pack_id: i64,
|
||||
pack_ref: &str,
|
||||
workflow_name: &str,
|
||||
) -> Result<()> {
|
||||
let entrypoint = format!("workflows/{}.workflow.yaml", workflow_name);
|
||||
|
||||
let action_input = CreateActionInput {
|
||||
r#ref: workflow.r#ref.clone(),
|
||||
pack: pack_id,
|
||||
pack_ref: pack_ref.to_string(),
|
||||
label: workflow.label.clone(),
|
||||
description: workflow.description.clone().unwrap_or_default(),
|
||||
entrypoint,
|
||||
runtime: None,
|
||||
runtime_version_constraint: None,
|
||||
param_schema: workflow.parameters.clone(),
|
||||
out_schema: workflow.output.clone(),
|
||||
is_adhoc: false,
|
||||
};
|
||||
|
||||
let action = ActionRepository::create(&self.pool, action_input).await?;
|
||||
|
||||
// Link the action to the workflow definition (sets is_workflow = true and workflow_def)
|
||||
ActionRepository::link_workflow_def(&self.pool, action.id, workflow_def_id).await?;
|
||||
|
||||
info!(
|
||||
"Created companion action '{}' (ID: {}) for workflow definition (ID: {})",
|
||||
workflow.r#ref, action.id, workflow_def_id
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Ensure a companion action record exists for a workflow definition.
|
||||
///
|
||||
/// If the action already exists, update it. If it doesn't exist (e.g., for
|
||||
/// workflows registered before the companion-action fix), create it.
|
||||
async fn ensure_companion_action(
|
||||
&self,
|
||||
workflow_def_id: i64,
|
||||
workflow: &WorkflowYaml,
|
||||
pack_id: i64,
|
||||
pack_ref: &str,
|
||||
workflow_name: &str,
|
||||
) -> Result<()> {
|
||||
let existing_action =
|
||||
ActionRepository::find_by_workflow_def(&self.pool, workflow_def_id).await?;
|
||||
|
||||
if let Some(action) = existing_action {
|
||||
// Update the existing companion action to stay in sync
|
||||
let update_input = UpdateActionInput {
|
||||
label: Some(workflow.label.clone()),
|
||||
description: workflow.description.clone(),
|
||||
entrypoint: Some(format!("workflows/{}.workflow.yaml", workflow_name)),
|
||||
runtime: None,
|
||||
runtime_version_constraint: None,
|
||||
param_schema: workflow.parameters.clone(),
|
||||
out_schema: workflow.output.clone(),
|
||||
};
|
||||
|
||||
ActionRepository::update(&self.pool, action.id, update_input).await?;
|
||||
|
||||
debug!(
|
||||
"Updated companion action '{}' (ID: {}) for workflow definition (ID: {})",
|
||||
action.r#ref, action.id, workflow_def_id
|
||||
);
|
||||
} else {
|
||||
// Backfill: create companion action for pre-fix workflows
|
||||
self.create_companion_action(
|
||||
workflow_def_id,
|
||||
workflow,
|
||||
pack_id,
|
||||
pack_ref,
|
||||
workflow_name,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create a new workflow definition
|
||||
async fn create_workflow(
|
||||
&self,
|
||||
|
||||
@@ -99,6 +99,7 @@ async fn create_test_action(pool: &PgPool, pack_id: i64, pack_ref: &str, suffix:
|
||||
description: format!("Test action {}", suffix),
|
||||
entrypoint: "echo test".to_string(),
|
||||
runtime: None,
|
||||
runtime_version_constraint: None,
|
||||
param_schema: None,
|
||||
out_schema: None,
|
||||
is_adhoc: false,
|
||||
|
||||
@@ -94,6 +94,7 @@ async fn create_test_action(pool: &PgPool, pack_id: i64, suffix: &str) -> i64 {
|
||||
description: format!("Test action {}", suffix),
|
||||
entrypoint: "echo test".to_string(),
|
||||
runtime: None,
|
||||
runtime_version_constraint: None,
|
||||
param_schema: None,
|
||||
out_schema: None,
|
||||
is_adhoc: false,
|
||||
|
||||
@@ -26,6 +26,7 @@ clap = { workspace = true }
|
||||
lapin = { workspace = true }
|
||||
reqwest = { workspace = true }
|
||||
hostname = "0.4"
|
||||
regex = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
aes-gcm = { workspace = true }
|
||||
|
||||
@@ -8,6 +8,19 @@
|
||||
//! The goal is to ensure environments are ready *before* the first execution,
|
||||
//! eliminating the first-run penalty and potential permission errors that occur
|
||||
//! when setup is deferred to execution time.
|
||||
//!
|
||||
//! ## Version-Aware Environments
|
||||
//!
|
||||
//! When runtime versions are registered (e.g., Python 3.11, 3.12, 3.13), this
|
||||
//! module creates per-version environments at:
|
||||
//! `{runtime_envs_dir}/{pack_ref}/{runtime_name}-{version}`
|
||||
//!
|
||||
//! For example: `/opt/attune/runtime_envs/my_pack/python-3.12`
|
||||
//!
|
||||
//! This ensures that different versions maintain isolated environments with
|
||||
//! their own interpreter binaries and installed dependencies. A base (unversioned)
|
||||
//! environment is also created for backward compatibility with actions that don't
|
||||
//! declare a version constraint.
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::path::Path;
|
||||
@@ -15,11 +28,14 @@ use std::path::Path;
|
||||
use sqlx::PgPool;
|
||||
use tracing::{debug, error, info, warn};
|
||||
|
||||
use attune_common::models::RuntimeVersion;
|
||||
use attune_common::mq::PackRegisteredPayload;
|
||||
use attune_common::repositories::action::ActionRepository;
|
||||
use attune_common::repositories::pack::PackRepository;
|
||||
use attune_common::repositories::runtime::RuntimeRepository;
|
||||
use attune_common::repositories::runtime_version::RuntimeVersionRepository;
|
||||
use attune_common::repositories::{FindById, List};
|
||||
use attune_common::runtime_detection::runtime_in_filter;
|
||||
|
||||
// Re-export the utility that the API also uses so callers can reach it from
|
||||
// either crate without adding a direct common dependency for this one function.
|
||||
@@ -96,6 +112,26 @@ pub async fn scan_and_setup_all_environments(
|
||||
}
|
||||
};
|
||||
|
||||
// Load all runtime versions, indexed by runtime ID
|
||||
let version_map: HashMap<i64, Vec<RuntimeVersion>> =
|
||||
match RuntimeVersionRepository::list(db_pool).await {
|
||||
Ok(versions) => {
|
||||
let mut map: HashMap<i64, Vec<RuntimeVersion>> = HashMap::new();
|
||||
for v in versions {
|
||||
map.entry(v.runtime).or_default().push(v);
|
||||
}
|
||||
map
|
||||
}
|
||||
Err(e) => {
|
||||
warn!(
|
||||
"Failed to load runtime versions from database: {}. \
|
||||
Version-specific environments will not be created.",
|
||||
e
|
||||
);
|
||||
HashMap::new()
|
||||
}
|
||||
};
|
||||
|
||||
info!("Found {} registered pack(s) to scan", packs.len());
|
||||
|
||||
for pack in &packs {
|
||||
@@ -109,6 +145,7 @@ pub async fn scan_and_setup_all_environments(
|
||||
packs_base_dir,
|
||||
runtime_envs_dir,
|
||||
&runtime_map,
|
||||
&version_map,
|
||||
)
|
||||
.await;
|
||||
|
||||
@@ -164,13 +201,13 @@ pub async fn setup_environments_for_registered_pack(
|
||||
return pack_result;
|
||||
}
|
||||
|
||||
// Filter to runtimes this worker supports
|
||||
// Filter to runtimes this worker supports (alias-aware matching)
|
||||
let target_runtimes: Vec<&String> = event
|
||||
.runtime_names
|
||||
.iter()
|
||||
.filter(|name| {
|
||||
if let Some(filter) = runtime_filter {
|
||||
filter.contains(name)
|
||||
runtime_in_filter(name, filter)
|
||||
} else {
|
||||
true
|
||||
}
|
||||
@@ -219,6 +256,7 @@ pub async fn setup_environments_for_registered_pack(
|
||||
continue;
|
||||
}
|
||||
|
||||
// Set up base (unversioned) environment
|
||||
let env_dir = runtime_envs_dir.join(&event.pack_ref).join(rt_name);
|
||||
|
||||
let process_runtime = ProcessRuntime::new(
|
||||
@@ -248,6 +286,19 @@ pub async fn setup_environments_for_registered_pack(
|
||||
pack_result.errors.push(msg);
|
||||
}
|
||||
}
|
||||
|
||||
// Set up per-version environments for available runtime versions
|
||||
setup_version_environments(
|
||||
db_pool,
|
||||
rt.id,
|
||||
rt_name,
|
||||
&event.pack_ref,
|
||||
&pack_dir,
|
||||
packs_base_dir,
|
||||
runtime_envs_dir,
|
||||
&mut pack_result,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
pack_result
|
||||
@@ -256,7 +307,8 @@ pub async fn setup_environments_for_registered_pack(
|
||||
/// Internal helper: set up environments for a single pack during the startup scan.
|
||||
///
|
||||
/// Discovers which runtimes the pack's actions use, filters by this worker's
|
||||
/// capabilities, and creates any missing environments.
|
||||
/// capabilities, and creates any missing environments. Also creates per-version
|
||||
/// environments for runtimes that have registered versions.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn setup_environments_for_pack(
|
||||
db_pool: &PgPool,
|
||||
@@ -266,6 +318,7 @@ async fn setup_environments_for_pack(
|
||||
packs_base_dir: &Path,
|
||||
runtime_envs_dir: &Path,
|
||||
runtime_map: &HashMap<i64, attune_common::models::Runtime>,
|
||||
version_map: &HashMap<i64, Vec<RuntimeVersion>>,
|
||||
) -> PackEnvSetupResult {
|
||||
let mut pack_result = PackEnvSetupResult {
|
||||
pack_ref: pack_ref.to_string(),
|
||||
@@ -327,6 +380,25 @@ async fn setup_environments_for_pack(
|
||||
&mut pack_result,
|
||||
)
|
||||
.await;
|
||||
// Also set up version-specific environments
|
||||
let versions = match RuntimeVersionRepository::find_available_by_runtime(
|
||||
db_pool, runtime_id,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(v) => v,
|
||||
Err(_) => Vec::new(),
|
||||
};
|
||||
setup_version_environments_from_list(
|
||||
&versions,
|
||||
&rt_name,
|
||||
pack_ref,
|
||||
&pack_dir,
|
||||
packs_base_dir,
|
||||
runtime_envs_dir,
|
||||
&mut pack_result,
|
||||
)
|
||||
.await;
|
||||
continue;
|
||||
}
|
||||
Ok(None) => {
|
||||
@@ -353,6 +425,22 @@ async fn setup_environments_for_pack(
|
||||
&mut pack_result,
|
||||
)
|
||||
.await;
|
||||
|
||||
// Set up per-version environments for available versions of this runtime
|
||||
if let Some(versions) = version_map.get(&runtime_id) {
|
||||
let available_versions: Vec<RuntimeVersion> =
|
||||
versions.iter().filter(|v| v.available).cloned().collect();
|
||||
setup_version_environments_from_list(
|
||||
&available_versions,
|
||||
&rt_name,
|
||||
pack_ref,
|
||||
&pack_dir,
|
||||
packs_base_dir,
|
||||
runtime_envs_dir,
|
||||
&mut pack_result,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
if !pack_result.environments_created.is_empty() {
|
||||
@@ -377,9 +465,9 @@ async fn process_runtime_for_pack(
|
||||
runtime_envs_dir: &Path,
|
||||
pack_result: &mut PackEnvSetupResult,
|
||||
) {
|
||||
// Apply worker runtime filter
|
||||
// Apply worker runtime filter (alias-aware matching)
|
||||
if let Some(filter) = runtime_filter {
|
||||
if !filter.iter().any(|f| f == rt_name) {
|
||||
if !runtime_in_filter(rt_name, filter) {
|
||||
debug!(
|
||||
"Runtime '{}' not in worker filter, skipping for pack '{}'",
|
||||
rt_name, pack_ref,
|
||||
@@ -430,6 +518,115 @@ async fn process_runtime_for_pack(
|
||||
}
|
||||
}
|
||||
|
||||
/// Set up per-version environments for a runtime, given a list of available versions.
|
||||
///
|
||||
/// For each available version, creates an environment at:
|
||||
/// `{runtime_envs_dir}/{pack_ref}/{runtime_name}-{version}`
|
||||
///
|
||||
/// This uses the version's own `execution_config` (which may specify a different
|
||||
/// interpreter binary, environment create command, etc.).
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn setup_version_environments_from_list(
|
||||
versions: &[RuntimeVersion],
|
||||
rt_name: &str,
|
||||
pack_ref: &str,
|
||||
pack_dir: &Path,
|
||||
packs_base_dir: &Path,
|
||||
runtime_envs_dir: &Path,
|
||||
pack_result: &mut PackEnvSetupResult,
|
||||
) {
|
||||
if versions.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
for version in versions {
|
||||
let version_exec_config = version.parsed_execution_config();
|
||||
|
||||
// Skip versions with no environment config and no dependencies
|
||||
if version_exec_config.environment.is_none()
|
||||
&& !version_exec_config.has_dependencies(pack_dir)
|
||||
{
|
||||
debug!(
|
||||
"Version '{}' {} has no environment config, skipping for pack '{}'",
|
||||
version.runtime_ref, version.version, pack_ref,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
let version_env_suffix = format!("{}-{}", rt_name, version.version);
|
||||
let version_env_dir = runtime_envs_dir.join(pack_ref).join(&version_env_suffix);
|
||||
|
||||
let version_runtime = ProcessRuntime::new(
|
||||
rt_name.to_string(),
|
||||
version_exec_config,
|
||||
packs_base_dir.to_path_buf(),
|
||||
runtime_envs_dir.to_path_buf(),
|
||||
);
|
||||
|
||||
match version_runtime
|
||||
.setup_pack_environment(pack_dir, &version_env_dir)
|
||||
.await
|
||||
{
|
||||
Ok(()) => {
|
||||
info!(
|
||||
"Version environment '{}' ready for pack '{}'",
|
||||
version_env_suffix, pack_ref,
|
||||
);
|
||||
pack_result.environments_created.push(version_env_suffix);
|
||||
}
|
||||
Err(e) => {
|
||||
let msg = format!(
|
||||
"Failed to set up version environment '{}' for pack '{}': {}",
|
||||
version_env_suffix, pack_ref, e,
|
||||
);
|
||||
warn!("{}", msg);
|
||||
pack_result.errors.push(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Set up per-version environments for a runtime by querying the database.
|
||||
///
|
||||
/// This is a convenience wrapper around `setup_version_environments_from_list`
|
||||
/// that queries available versions from the database first. Used in the
|
||||
/// pack.registered event handler where we don't have a pre-loaded version map.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn setup_version_environments(
|
||||
db_pool: &PgPool,
|
||||
runtime_id: i64,
|
||||
rt_name: &str,
|
||||
pack_ref: &str,
|
||||
pack_dir: &Path,
|
||||
packs_base_dir: &Path,
|
||||
runtime_envs_dir: &Path,
|
||||
pack_result: &mut PackEnvSetupResult,
|
||||
) {
|
||||
let versions =
|
||||
match RuntimeVersionRepository::find_available_by_runtime(db_pool, runtime_id).await {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
debug!(
|
||||
"Failed to load versions for runtime '{}' (id {}): {}. \
|
||||
Skipping version-specific environments.",
|
||||
rt_name, runtime_id, e,
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
setup_version_environments_from_list(
|
||||
&versions,
|
||||
rt_name,
|
||||
pack_ref,
|
||||
pack_dir,
|
||||
packs_base_dir,
|
||||
runtime_envs_dir,
|
||||
pack_result,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
/// Determine the runtime filter from the `ATTUNE_WORKER_RUNTIMES` environment variable.
|
||||
///
|
||||
/// Returns `None` if the variable is not set (meaning all runtimes are accepted).
|
||||
|
||||
@@ -2,11 +2,24 @@
|
||||
//!
|
||||
//! Coordinates the execution of actions by managing the runtime,
|
||||
//! loading action data, preparing execution context, and collecting results.
|
||||
//!
|
||||
//! ## Runtime Version Selection
|
||||
//!
|
||||
//! When an action declares a `runtime_version_constraint` (e.g., `">=3.12"`),
|
||||
//! the executor queries the `runtime_version` table for all versions of the
|
||||
//! action's runtime and uses [`select_best_version`] to pick the highest
|
||||
//! available version satisfying the constraint. The selected version's
|
||||
//! `execution_config` is passed through the `ExecutionContext` as an override
|
||||
//! so the `ProcessRuntime` uses version-specific interpreter binaries,
|
||||
//! environment commands, etc.
|
||||
|
||||
use attune_common::error::{Error, Result};
|
||||
use attune_common::models::runtime::RuntimeExecutionConfig;
|
||||
use attune_common::models::{runtime::Runtime as RuntimeModel, Action, Execution, ExecutionStatus};
|
||||
use attune_common::repositories::execution::{ExecutionRepository, UpdateExecutionInput};
|
||||
use attune_common::repositories::runtime_version::RuntimeVersionRepository;
|
||||
use attune_common::repositories::{FindById, Update};
|
||||
use attune_common::version_matching::select_best_version;
|
||||
use std::path::PathBuf as StdPathBuf;
|
||||
|
||||
use serde_json::Value as JsonValue;
|
||||
@@ -365,6 +378,15 @@ impl ActionExecutor {
|
||||
|
||||
let runtime_name = runtime_record.as_ref().map(|r| r.name.to_lowercase());
|
||||
|
||||
// --- Runtime Version Resolution ---
|
||||
// If the action declares a runtime_version_constraint (e.g., ">=3.12"),
|
||||
// query all registered versions for this runtime and select the best
|
||||
// match. The selected version's execution_config overrides the parent
|
||||
// runtime's config so the ProcessRuntime uses a version-specific
|
||||
// interpreter binary, environment commands, etc.
|
||||
let (runtime_config_override, runtime_env_dir_suffix, selected_runtime_version) =
|
||||
self.resolve_runtime_version(&runtime_record, action).await;
|
||||
|
||||
// Determine the pack directory for this action
|
||||
let pack_dir = self.packs_base_dir.join(&action.pack_ref);
|
||||
|
||||
@@ -446,6 +468,9 @@ impl ActionExecutor {
|
||||
code,
|
||||
code_path,
|
||||
runtime_name,
|
||||
runtime_config_override,
|
||||
runtime_env_dir_suffix,
|
||||
selected_runtime_version,
|
||||
max_stdout_bytes: self.max_stdout_bytes,
|
||||
max_stderr_bytes: self.max_stderr_bytes,
|
||||
parameter_delivery: action.parameter_delivery,
|
||||
@@ -456,6 +481,101 @@ impl ActionExecutor {
|
||||
Ok(context)
|
||||
}
|
||||
|
||||
/// Resolve the best runtime version for an action, if applicable.
|
||||
///
|
||||
/// Returns a tuple of:
|
||||
/// - Optional `RuntimeExecutionConfig` override (from the selected version)
|
||||
/// - Optional env dir suffix (e.g., `"python-3.12"`) for per-version isolation
|
||||
/// - Optional version string for logging (e.g., `"3.12"`)
|
||||
///
|
||||
/// If the action has no `runtime_version_constraint`, or no versions are
|
||||
/// registered for its runtime, all three are `None` and the parent runtime's
|
||||
/// config is used as-is.
|
||||
async fn resolve_runtime_version(
|
||||
&self,
|
||||
runtime_record: &Option<RuntimeModel>,
|
||||
action: &Action,
|
||||
) -> (
|
||||
Option<RuntimeExecutionConfig>,
|
||||
Option<String>,
|
||||
Option<String>,
|
||||
) {
|
||||
let runtime = match runtime_record {
|
||||
Some(r) => r,
|
||||
None => return (None, None, None),
|
||||
};
|
||||
|
||||
// Query all versions for this runtime
|
||||
let versions = match RuntimeVersionRepository::find_by_runtime(&self.pool, runtime.id).await
|
||||
{
|
||||
Ok(v) if !v.is_empty() => v,
|
||||
Ok(_) => {
|
||||
// No versions registered — use parent runtime config as-is
|
||||
if action.runtime_version_constraint.is_some() {
|
||||
warn!(
|
||||
"Action '{}' declares runtime_version_constraint '{}' but runtime '{}' \
|
||||
has no registered versions. Using parent runtime config.",
|
||||
action.r#ref,
|
||||
action.runtime_version_constraint.as_deref().unwrap_or(""),
|
||||
runtime.name,
|
||||
);
|
||||
}
|
||||
return (None, None, None);
|
||||
}
|
||||
Err(e) => {
|
||||
warn!(
|
||||
"Failed to load runtime versions for runtime '{}' (id {}): {}. \
|
||||
Using parent runtime config.",
|
||||
runtime.name, runtime.id, e,
|
||||
);
|
||||
return (None, None, None);
|
||||
}
|
||||
};
|
||||
|
||||
let constraint = action.runtime_version_constraint.as_deref();
|
||||
|
||||
match select_best_version(&versions, constraint) {
|
||||
Some(selected) => {
|
||||
let version_config = selected.parsed_execution_config();
|
||||
let rt_name = runtime.name.to_lowercase();
|
||||
let env_suffix = format!("{}-{}", rt_name, selected.version);
|
||||
|
||||
info!(
|
||||
"Selected runtime version '{}' (id {}) for action '{}' \
|
||||
(constraint: {}, runtime: '{}'). Env dir suffix: '{}'",
|
||||
selected.version,
|
||||
selected.id,
|
||||
action.r#ref,
|
||||
constraint.unwrap_or("none"),
|
||||
runtime.name,
|
||||
env_suffix,
|
||||
);
|
||||
|
||||
(
|
||||
Some(version_config),
|
||||
Some(env_suffix),
|
||||
Some(selected.version.clone()),
|
||||
)
|
||||
}
|
||||
None => {
|
||||
if let Some(c) = constraint {
|
||||
warn!(
|
||||
"No available runtime version matches constraint '{}' for action '{}' \
|
||||
(runtime: '{}'). Using parent runtime config as fallback.",
|
||||
c, action.r#ref, runtime.name,
|
||||
);
|
||||
} else {
|
||||
debug!(
|
||||
"No default or available version found for runtime '{}'. \
|
||||
Using parent runtime config.",
|
||||
runtime.name,
|
||||
);
|
||||
}
|
||||
(None, None, None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Execute the action using the runtime registry
|
||||
async fn execute_action(&self, context: ExecutionContext) -> Result<ExecutionResult> {
|
||||
debug!("Executing action: {}", context.action_ref);
|
||||
|
||||
@@ -11,6 +11,7 @@ pub mod registration;
|
||||
pub mod runtime;
|
||||
pub mod secrets;
|
||||
pub mod service;
|
||||
pub mod version_verify;
|
||||
|
||||
// Re-export commonly used types
|
||||
pub use executor::ActionExecutor;
|
||||
|
||||
@@ -36,6 +36,7 @@ impl LocalRuntime {
|
||||
},
|
||||
environment: None,
|
||||
dependencies: None,
|
||||
env_vars: std::collections::HashMap::new(),
|
||||
};
|
||||
|
||||
Self {
|
||||
@@ -168,6 +169,9 @@ mod tests {
|
||||
code: Some("#!/bin/bash\necho 'hello from shell'".to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: ParameterDelivery::default(),
|
||||
@@ -197,6 +201,9 @@ mod tests {
|
||||
code: Some("some code".to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("unknown".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: ParameterDelivery::default(),
|
||||
|
||||
@@ -9,6 +9,18 @@
|
||||
//! as separate Rust types. Instead, the `ProcessRuntime` handles all
|
||||
//! languages by using the interpreter, environment, and dependency
|
||||
//! configuration stored in the database.
|
||||
//!
|
||||
//! ## Runtime Version Selection
|
||||
//!
|
||||
//! When an action declares a `runtime_version_constraint` (e.g., `">=3.12"`),
|
||||
//! the executor resolves the best matching `RuntimeVersion` from the database
|
||||
//! and passes its `execution_config` through `ExecutionContext::runtime_config_override`.
|
||||
//! The `ProcessRuntime` uses this override instead of its built-in config,
|
||||
//! enabling version-specific interpreter binaries, environment commands, etc.
|
||||
//!
|
||||
//! The environment directory is also overridden to include the version suffix
|
||||
//! (e.g., `python-3.12` instead of `python`) so that different versions
|
||||
//! maintain isolated environments.
|
||||
|
||||
pub mod dependency;
|
||||
pub mod local;
|
||||
@@ -26,6 +38,7 @@ pub use process::ProcessRuntime;
|
||||
pub use shell::ShellRuntime;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use attune_common::models::runtime::RuntimeExecutionConfig;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
@@ -112,6 +125,24 @@ pub struct ExecutionContext {
|
||||
/// Runtime name (python, shell, etc.) - used to select the correct runtime
|
||||
pub runtime_name: Option<String>,
|
||||
|
||||
/// Optional override of the runtime's execution config, set when a specific
|
||||
/// runtime version has been selected (e.g., Python 3.12 vs the parent
|
||||
/// "Python" runtime). When present, `ProcessRuntime` uses this config
|
||||
/// instead of its built-in one for interpreter resolution, environment
|
||||
/// setup, and dependency management.
|
||||
#[serde(skip)]
|
||||
pub runtime_config_override: Option<RuntimeExecutionConfig>,
|
||||
|
||||
/// Optional override of the environment directory suffix. When a specific
|
||||
/// runtime version is selected, the env dir includes the version
|
||||
/// (e.g., `python-3.12` instead of `python`) for per-version isolation.
|
||||
/// Format: just the directory name, not the full path.
|
||||
pub runtime_env_dir_suffix: Option<String>,
|
||||
|
||||
/// The selected runtime version string for logging/diagnostics
|
||||
/// (e.g., "3.12.1"). `None` means the parent runtime config is used as-is.
|
||||
pub selected_runtime_version: Option<String>,
|
||||
|
||||
/// Maximum stdout size in bytes (for log truncation)
|
||||
#[serde(default = "default_max_log_bytes")]
|
||||
pub max_stdout_bytes: usize,
|
||||
@@ -154,6 +185,9 @@ impl ExecutionContext {
|
||||
code,
|
||||
code_path: None,
|
||||
runtime_name: None,
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: ParameterDelivery::default(),
|
||||
|
||||
@@ -94,6 +94,7 @@ impl ProcessRuntime {
|
||||
}
|
||||
|
||||
/// Get the interpreter path, checking for an external pack environment first.
|
||||
#[cfg(test)]
|
||||
fn resolve_interpreter(&self, pack_dir: &Path, env_dir: Option<&Path>) -> PathBuf {
|
||||
self.config.resolve_interpreter_with_env(pack_dir, env_dir)
|
||||
}
|
||||
@@ -472,24 +473,52 @@ impl Runtime for ProcessRuntime {
|
||||
}
|
||||
|
||||
async fn execute(&self, context: ExecutionContext) -> RuntimeResult<ExecutionResult> {
|
||||
info!(
|
||||
"Executing action '{}' (execution_id: {}) with runtime '{}', \
|
||||
parameter delivery: {:?}, format: {:?}, output format: {:?}",
|
||||
context.action_ref,
|
||||
context.execution_id,
|
||||
self.runtime_name,
|
||||
context.parameter_delivery,
|
||||
context.parameter_format,
|
||||
context.output_format,
|
||||
);
|
||||
// Determine the effective execution config: use the version-specific
|
||||
// override if the executor resolved a specific runtime version for this
|
||||
// action, otherwise fall back to this ProcessRuntime's built-in config.
|
||||
let effective_config: &RuntimeExecutionConfig = context
|
||||
.runtime_config_override
|
||||
.as_ref()
|
||||
.unwrap_or(&self.config);
|
||||
|
||||
if let Some(ref ver) = context.selected_runtime_version {
|
||||
info!(
|
||||
"Executing action '{}' (execution_id: {}) with runtime '{}' version {}, \
|
||||
parameter delivery: {:?}, format: {:?}, output format: {:?}",
|
||||
context.action_ref,
|
||||
context.execution_id,
|
||||
self.runtime_name,
|
||||
ver,
|
||||
context.parameter_delivery,
|
||||
context.parameter_format,
|
||||
context.output_format,
|
||||
);
|
||||
} else {
|
||||
info!(
|
||||
"Executing action '{}' (execution_id: {}) with runtime '{}', \
|
||||
parameter delivery: {:?}, format: {:?}, output format: {:?}",
|
||||
context.action_ref,
|
||||
context.execution_id,
|
||||
self.runtime_name,
|
||||
context.parameter_delivery,
|
||||
context.parameter_format,
|
||||
context.output_format,
|
||||
);
|
||||
}
|
||||
|
||||
let pack_ref = self.extract_pack_ref(&context.action_ref);
|
||||
let pack_dir = self.packs_base_dir.join(pack_ref);
|
||||
|
||||
// Compute external env_dir for this pack/runtime combination.
|
||||
// Pattern: {runtime_envs_dir}/{pack_ref}/{runtime_name}
|
||||
let env_dir = self.env_dir_for_pack(pack_ref);
|
||||
let env_dir_opt = if self.config.environment.is_some() {
|
||||
// When a specific runtime version is selected, the env dir includes a
|
||||
// version suffix (e.g., "python-3.12") for per-version isolation.
|
||||
// Pattern: {runtime_envs_dir}/{pack_ref}/{runtime_name[-version]}
|
||||
let env_dir = if let Some(ref suffix) = context.runtime_env_dir_suffix {
|
||||
self.runtime_envs_dir.join(pack_ref).join(suffix)
|
||||
} else {
|
||||
self.env_dir_for_pack(pack_ref)
|
||||
};
|
||||
let env_dir_opt = if effective_config.environment.is_some() {
|
||||
Some(env_dir.as_path())
|
||||
} else {
|
||||
None
|
||||
@@ -499,7 +528,7 @@ impl Runtime for ProcessRuntime {
|
||||
// (scanning all registered packs) or via pack.registered MQ events when a
|
||||
// new pack is installed. We only log a warning here if the expected
|
||||
// environment directory is missing so operators can investigate.
|
||||
if self.config.environment.is_some() && pack_dir.exists() && !env_dir.exists() {
|
||||
if effective_config.environment.is_some() && pack_dir.exists() && !env_dir.exists() {
|
||||
warn!(
|
||||
"Runtime environment for pack '{}' not found at {}. \
|
||||
The environment should have been created at startup or on pack registration. \
|
||||
@@ -512,8 +541,8 @@ impl Runtime for ProcessRuntime {
|
||||
// If the environment directory exists but contains a broken interpreter
|
||||
// (e.g. broken symlinks from a venv created in a different container),
|
||||
// attempt to recreate it before resolving the interpreter.
|
||||
if self.config.environment.is_some() && env_dir.exists() && pack_dir.exists() {
|
||||
if let Some(ref env_cfg) = self.config.environment {
|
||||
if effective_config.environment.is_some() && env_dir.exists() && pack_dir.exists() {
|
||||
if let Some(ref env_cfg) = effective_config.environment {
|
||||
if let Some(ref interp_template) = env_cfg.interpreter_path {
|
||||
let mut vars = std::collections::HashMap::new();
|
||||
vars.insert("env_dir", env_dir.to_string_lossy().to_string());
|
||||
@@ -550,8 +579,18 @@ impl Runtime for ProcessRuntime {
|
||||
e,
|
||||
);
|
||||
} else {
|
||||
// Recreate the environment
|
||||
match self.setup_pack_environment(&pack_dir, &env_dir).await {
|
||||
// Recreate the environment using a temporary ProcessRuntime
|
||||
// with the effective (possibly version-specific) config.
|
||||
let setup_runtime = ProcessRuntime::new(
|
||||
self.runtime_name.clone(),
|
||||
effective_config.clone(),
|
||||
self.packs_base_dir.clone(),
|
||||
self.runtime_envs_dir.clone(),
|
||||
);
|
||||
match setup_runtime
|
||||
.setup_pack_environment(&pack_dir, &env_dir)
|
||||
.await
|
||||
{
|
||||
Ok(()) => {
|
||||
info!(
|
||||
"Successfully recreated environment for pack '{}' at {}",
|
||||
@@ -575,18 +614,37 @@ impl Runtime for ProcessRuntime {
|
||||
}
|
||||
}
|
||||
|
||||
let interpreter = self.resolve_interpreter(&pack_dir, env_dir_opt);
|
||||
let interpreter = effective_config.resolve_interpreter_with_env(&pack_dir, env_dir_opt);
|
||||
|
||||
info!(
|
||||
"Resolved interpreter: {} (env_dir: {}, env_exists: {}, pack_dir: {})",
|
||||
"Resolved interpreter: {} (env_dir: {}, env_exists: {}, pack_dir: {}, version: {})",
|
||||
interpreter.display(),
|
||||
env_dir.display(),
|
||||
env_dir.exists(),
|
||||
pack_dir.display(),
|
||||
context
|
||||
.selected_runtime_version
|
||||
.as_deref()
|
||||
.unwrap_or("default"),
|
||||
);
|
||||
|
||||
// Prepare environment and parameters according to delivery method
|
||||
let mut env = context.env.clone();
|
||||
|
||||
// Inject runtime-specific environment variables from execution_config.
|
||||
// These are template-based (e.g., NODE_PATH={env_dir}/node_modules) and
|
||||
// resolved against the current pack/env directories.
|
||||
if !effective_config.env_vars.is_empty() {
|
||||
let vars = effective_config.build_template_vars_with_env(&pack_dir, env_dir_opt);
|
||||
for (key, value_template) in &effective_config.env_vars {
|
||||
let resolved = RuntimeExecutionConfig::resolve_template(value_template, &vars);
|
||||
debug!(
|
||||
"Setting runtime env var: {}={} (template: {})",
|
||||
key, resolved, value_template
|
||||
);
|
||||
env.insert(key.clone(), resolved);
|
||||
}
|
||||
}
|
||||
let param_config = ParameterDeliveryConfig {
|
||||
delivery: context.parameter_delivery,
|
||||
format: context.parameter_format,
|
||||
@@ -614,7 +672,7 @@ impl Runtime for ProcessRuntime {
|
||||
debug!("Executing file: {}", code_path.display());
|
||||
process_executor::build_action_command(
|
||||
&interpreter,
|
||||
&self.config.interpreter.args,
|
||||
&effective_config.interpreter.args,
|
||||
code_path,
|
||||
working_dir,
|
||||
&env,
|
||||
@@ -635,7 +693,7 @@ impl Runtime for ProcessRuntime {
|
||||
debug!("Executing action file: {}", action_file.display());
|
||||
process_executor::build_action_command(
|
||||
&interpreter,
|
||||
&self.config.interpreter.args,
|
||||
&effective_config.interpreter.args,
|
||||
&action_file,
|
||||
working_dir,
|
||||
&env,
|
||||
@@ -781,6 +839,7 @@ mod tests {
|
||||
},
|
||||
environment: None,
|
||||
dependencies: None,
|
||||
env_vars: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -813,6 +872,7 @@ mod tests {
|
||||
"{manifest_path}".to_string(),
|
||||
],
|
||||
}),
|
||||
env_vars: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -837,6 +897,9 @@ mod tests {
|
||||
code: None,
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 1024,
|
||||
max_stderr_bytes: 1024,
|
||||
parameter_delivery: ParameterDelivery::default(),
|
||||
@@ -868,6 +931,9 @@ mod tests {
|
||||
code: None,
|
||||
code_path: Some(PathBuf::from("/tmp/packs/mypack/actions/hello.py")),
|
||||
runtime_name: None,
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 1024,
|
||||
max_stderr_bytes: 1024,
|
||||
parameter_delivery: ParameterDelivery::default(),
|
||||
@@ -899,6 +965,9 @@ mod tests {
|
||||
code: None,
|
||||
code_path: Some(PathBuf::from("/tmp/packs/mypack/actions/hello.sh")),
|
||||
runtime_name: None,
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 1024,
|
||||
max_stderr_bytes: 1024,
|
||||
parameter_delivery: ParameterDelivery::default(),
|
||||
@@ -986,6 +1055,9 @@ mod tests {
|
||||
code: None,
|
||||
code_path: Some(script_path),
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 1024 * 1024,
|
||||
max_stderr_bytes: 1024 * 1024,
|
||||
parameter_delivery: ParameterDelivery::default(),
|
||||
@@ -1018,6 +1090,7 @@ mod tests {
|
||||
},
|
||||
environment: None,
|
||||
dependencies: None,
|
||||
env_vars: HashMap::new(),
|
||||
};
|
||||
|
||||
let runtime = ProcessRuntime::new(
|
||||
@@ -1039,6 +1112,9 @@ mod tests {
|
||||
code: None,
|
||||
code_path: Some(script_path),
|
||||
runtime_name: Some("python".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 1024 * 1024,
|
||||
max_stderr_bytes: 1024 * 1024,
|
||||
parameter_delivery: ParameterDelivery::default(),
|
||||
@@ -1074,6 +1150,9 @@ mod tests {
|
||||
code: Some("echo 'inline shell code'".to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 1024 * 1024,
|
||||
max_stderr_bytes: 1024 * 1024,
|
||||
parameter_delivery: ParameterDelivery::default(),
|
||||
@@ -1121,6 +1200,9 @@ mod tests {
|
||||
code: None,
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 1024 * 1024,
|
||||
max_stderr_bytes: 1024 * 1024,
|
||||
parameter_delivery: ParameterDelivery::default(),
|
||||
@@ -1226,6 +1308,9 @@ mod tests {
|
||||
code: None,
|
||||
code_path: Some(script_path),
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 1024 * 1024,
|
||||
max_stderr_bytes: 1024 * 1024,
|
||||
parameter_delivery: ParameterDelivery::default(),
|
||||
|
||||
@@ -665,6 +665,9 @@ def run(x, y):
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_common::models::ParameterDelivery::default(),
|
||||
@@ -701,6 +704,9 @@ def run():
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_common::models::ParameterDelivery::default(),
|
||||
@@ -737,6 +743,9 @@ def run():
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_common::models::ParameterDelivery::default(),
|
||||
@@ -786,6 +795,9 @@ def run():
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_common::models::ParameterDelivery::default(),
|
||||
|
||||
@@ -615,6 +615,9 @@ mod tests {
|
||||
code: Some("echo 'Hello, World!'".to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_common::models::ParameterDelivery::default(),
|
||||
@@ -648,6 +651,9 @@ mod tests {
|
||||
code: Some("echo \"Hello, $name!\"".to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_common::models::ParameterDelivery::default(),
|
||||
@@ -676,6 +682,9 @@ mod tests {
|
||||
code: Some("sleep 10".to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_common::models::ParameterDelivery::default(),
|
||||
@@ -706,6 +715,9 @@ mod tests {
|
||||
code: Some("exit 1".to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_common::models::ParameterDelivery::default(),
|
||||
@@ -751,6 +763,9 @@ echo "missing=$missing"
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_common::models::ParameterDelivery::default(),
|
||||
@@ -791,6 +806,9 @@ echo '{"id": 3, "name": "Charlie"}'
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_common::models::ParameterDelivery::default(),
|
||||
@@ -854,6 +872,9 @@ printf '{"status_code":200,"body":"hello","json":{\n "args": {\n "hello": "w
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_common::models::ParameterDelivery::default(),
|
||||
@@ -906,6 +927,9 @@ echo '{"result": "success", "count": 42}'
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_common::models::ParameterDelivery::default(),
|
||||
|
||||
@@ -2,6 +2,16 @@
|
||||
//!
|
||||
//! Main service orchestration for the Attune Worker Service.
|
||||
//! Manages worker registration, heartbeat, message consumption, and action execution.
|
||||
//!
|
||||
//! ## Startup Sequence
|
||||
//!
|
||||
//! 1. Connect to database and message queue
|
||||
//! 2. Load runtimes from database → create `ProcessRuntime` instances
|
||||
//! 3. Register worker and set up MQ infrastructure
|
||||
//! 4. **Verify runtime versions** — run verification commands for each registered
|
||||
//! `RuntimeVersion` to determine which are available on this host/container
|
||||
//! 5. **Set up runtime environments** — create per-version environments for packs
|
||||
//! 6. Start heartbeat, execution consumer, and pack registration consumer
|
||||
|
||||
use attune_common::config::Config;
|
||||
use attune_common::db::Database;
|
||||
@@ -13,6 +23,7 @@ use attune_common::mq::{
|
||||
PackRegisteredPayload, Publisher, PublisherConfig,
|
||||
};
|
||||
use attune_common::repositories::{execution::ExecutionRepository, FindById};
|
||||
use attune_common::runtime_detection::runtime_in_filter;
|
||||
use chrono::Utc;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
@@ -34,6 +45,7 @@ use crate::runtime::process::ProcessRuntime;
|
||||
use crate::runtime::shell::ShellRuntime;
|
||||
use crate::runtime::RuntimeRegistry;
|
||||
use crate::secrets::SecretManager;
|
||||
use crate::version_verify;
|
||||
|
||||
use attune_common::repositories::runtime::RuntimeRepository;
|
||||
use attune_common::repositories::List;
|
||||
@@ -187,9 +199,11 @@ impl WorkerService {
|
||||
for rt in executable_runtimes {
|
||||
let rt_name = rt.name.to_lowercase();
|
||||
|
||||
// Apply filter if ATTUNE_WORKER_RUNTIMES is set
|
||||
// Apply filter if ATTUNE_WORKER_RUNTIMES is set.
|
||||
// Uses alias-aware matching so that e.g. filter "node"
|
||||
// matches DB runtime name "Node.js" (lowercased to "node.js").
|
||||
if let Some(ref filter) = runtime_filter {
|
||||
if !filter.contains(&rt_name) {
|
||||
if !runtime_in_filter(&rt_name, filter) {
|
||||
debug!(
|
||||
"Skipping runtime '{}' (not in ATTUNE_WORKER_RUNTIMES filter)",
|
||||
rt_name
|
||||
@@ -353,9 +367,15 @@ impl WorkerService {
|
||||
})?;
|
||||
info!("Worker-specific message queue infrastructure setup completed");
|
||||
|
||||
// Verify which runtime versions are available on this system.
|
||||
// This updates the `available` flag in the database so that
|
||||
// `select_best_version()` only considers genuinely present versions.
|
||||
self.verify_runtime_versions().await;
|
||||
|
||||
// Proactively set up runtime environments for all registered packs.
|
||||
// This runs before we start consuming execution messages so that
|
||||
// environments are ready by the time the first execution arrives.
|
||||
// Now version-aware: creates per-version environments where needed.
|
||||
self.scan_and_setup_environments().await;
|
||||
|
||||
// Start heartbeat
|
||||
@@ -380,6 +400,33 @@ impl WorkerService {
|
||||
/// 3. Wait for in-flight tasks with timeout
|
||||
/// 4. Close MQ connection
|
||||
/// 5. Close DB connection
|
||||
/// Verify which runtime versions are available on this host/container.
|
||||
///
|
||||
/// Runs each version's verification commands (from `distributions` JSONB)
|
||||
/// and updates the `available` flag in the database. This ensures that
|
||||
/// `select_best_version()` only considers versions whose interpreters
|
||||
/// are genuinely present.
|
||||
async fn verify_runtime_versions(&self) {
|
||||
let filter_refs: Option<Vec<String>> = self.runtime_filter.clone();
|
||||
let filter_slice: Option<&[String]> = filter_refs.as_deref();
|
||||
|
||||
let result = version_verify::verify_all_runtime_versions(&self.db_pool, filter_slice).await;
|
||||
|
||||
if !result.errors.is_empty() {
|
||||
warn!(
|
||||
"Runtime version verification completed with {} error(s): {:?}",
|
||||
result.errors.len(),
|
||||
result.errors,
|
||||
);
|
||||
} else {
|
||||
info!(
|
||||
"Runtime version verification complete: {} checked, \
|
||||
{} available, {} unavailable",
|
||||
result.total_checked, result.available, result.unavailable,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Scan all registered packs and create missing runtime environments.
|
||||
async fn scan_and_setup_environments(&self) {
|
||||
let filter_refs: Option<Vec<String>> = self.runtime_filter.clone();
|
||||
|
||||
485
crates/worker/src/version_verify.rs
Normal file
485
crates/worker/src/version_verify.rs
Normal file
@@ -0,0 +1,485 @@
|
||||
//! Runtime Version Verification
|
||||
//!
|
||||
//! At worker startup, this module verifies which runtime versions are actually
|
||||
//! available on the system by running each version's verification commands
|
||||
//! (from the `distributions` JSONB column). Versions that pass verification
|
||||
//! are marked `available = true`; those that fail are marked `available = false`.
|
||||
//!
|
||||
//! This ensures the worker has an accurate picture of what it can execute,
|
||||
//! and `select_best_version()` only considers versions whose interpreters
|
||||
//! are genuinely present on this particular host/container.
|
||||
|
||||
use attune_common::repositories::List;
|
||||
use sqlx::PgPool;
|
||||
use std::time::Duration;
|
||||
use tokio::process::Command;
|
||||
use tracing::{debug, info, warn};
|
||||
|
||||
use attune_common::models::RuntimeVersion;
|
||||
use attune_common::repositories::runtime_version::RuntimeVersionRepository;
|
||||
use attune_common::runtime_detection::runtime_in_filter;
|
||||
|
||||
/// Result of verifying all runtime versions at startup.
|
||||
#[derive(Debug)]
|
||||
pub struct VersionVerificationResult {
|
||||
/// Total number of versions checked.
|
||||
pub total_checked: usize,
|
||||
/// Number of versions marked as available.
|
||||
pub available: usize,
|
||||
/// Number of versions marked as unavailable.
|
||||
pub unavailable: usize,
|
||||
/// Errors encountered during verification (non-fatal).
|
||||
pub errors: Vec<String>,
|
||||
}
|
||||
|
||||
/// A single verification command extracted from the `distributions` JSONB.
|
||||
#[derive(Debug)]
|
||||
struct VerificationCommand {
|
||||
binary: String,
|
||||
args: Vec<String>,
|
||||
expected_exit_code: i32,
|
||||
pattern: Option<String>,
|
||||
#[allow(dead_code)]
|
||||
priority: i32,
|
||||
}
|
||||
|
||||
/// Verify all registered runtime versions and update their `available` flag.
|
||||
///
|
||||
/// For each `RuntimeVersion` row in the database:
|
||||
/// 1. Extract verification commands from `distributions.verification.commands`
|
||||
/// 2. Run each command (in priority order) until one succeeds
|
||||
/// 3. Update `available` and `verified_at` in the database
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `pool` - Database connection pool
|
||||
/// * `runtime_filter` - Optional runtime name filter (from `ATTUNE_WORKER_RUNTIMES`)
|
||||
pub async fn verify_all_runtime_versions(
|
||||
pool: &PgPool,
|
||||
runtime_filter: Option<&[String]>,
|
||||
) -> VersionVerificationResult {
|
||||
info!("Starting runtime version verification");
|
||||
|
||||
let mut result = VersionVerificationResult {
|
||||
total_checked: 0,
|
||||
available: 0,
|
||||
unavailable: 0,
|
||||
errors: Vec::new(),
|
||||
};
|
||||
|
||||
// Load all runtime versions
|
||||
let versions: Vec<RuntimeVersion> = match RuntimeVersionRepository::list(pool).await {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
let msg = format!("Failed to load runtime versions from database: {}", e);
|
||||
warn!("{}", msg);
|
||||
result.errors.push(msg);
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
if versions.is_empty() {
|
||||
debug!("No runtime versions registered, skipping verification");
|
||||
return result;
|
||||
}
|
||||
|
||||
info!("Found {} runtime version(s) to verify", versions.len());
|
||||
|
||||
for version in &versions {
|
||||
// Apply runtime filter: extract the runtime base name from the ref
|
||||
// e.g., "core.python" → "python"
|
||||
let rt_base_name = version
|
||||
.runtime_ref
|
||||
.split('.')
|
||||
.last()
|
||||
.unwrap_or(&version.runtime_ref)
|
||||
.to_lowercase();
|
||||
|
||||
if let Some(filter) = runtime_filter {
|
||||
if !runtime_in_filter(&rt_base_name, filter) {
|
||||
debug!(
|
||||
"Skipping version '{}' of runtime '{}' (not in worker runtime filter)",
|
||||
version.version, version.runtime_ref,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
result.total_checked += 1;
|
||||
|
||||
let is_available = verify_single_version(version).await;
|
||||
|
||||
// Update the database
|
||||
match RuntimeVersionRepository::set_availability(pool, version.id, is_available).await {
|
||||
Ok(_) => {
|
||||
if is_available {
|
||||
info!(
|
||||
"Runtime version '{}' {} is available",
|
||||
version.runtime_ref, version.version,
|
||||
);
|
||||
result.available += 1;
|
||||
} else {
|
||||
info!(
|
||||
"Runtime version '{}' {} is NOT available on this system",
|
||||
version.runtime_ref, version.version,
|
||||
);
|
||||
result.unavailable += 1;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
let msg = format!(
|
||||
"Failed to update availability for version '{}' {}: {}",
|
||||
version.runtime_ref, version.version, e,
|
||||
);
|
||||
warn!("{}", msg);
|
||||
result.errors.push(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"Runtime version verification complete: {} checked, {} available, {} unavailable, {} error(s)",
|
||||
result.total_checked,
|
||||
result.available,
|
||||
result.unavailable,
|
||||
result.errors.len(),
|
||||
);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Verify a single runtime version by running its verification commands.
|
||||
///
|
||||
/// Returns `true` if at least one verification command succeeds.
|
||||
async fn verify_single_version(version: &RuntimeVersion) -> bool {
|
||||
let commands = extract_verification_commands(&version.distributions);
|
||||
|
||||
if commands.is_empty() {
|
||||
// No verification commands — try using the version's execution_config
|
||||
// interpreter binary with --version as a basic check.
|
||||
let exec_config = version.parsed_execution_config();
|
||||
let binary = &exec_config.interpreter.binary;
|
||||
if binary.is_empty() {
|
||||
debug!(
|
||||
"No verification commands and no interpreter for '{}' {}. \
|
||||
Assuming available (will fail at execution time if not).",
|
||||
version.runtime_ref, version.version,
|
||||
);
|
||||
return true;
|
||||
}
|
||||
|
||||
debug!(
|
||||
"No verification commands for '{}' {}. \
|
||||
Falling back to '{} --version' check.",
|
||||
version.runtime_ref, version.version, binary,
|
||||
);
|
||||
|
||||
return run_basic_binary_check(binary).await;
|
||||
}
|
||||
|
||||
// Run commands in priority order (lowest priority number = highest priority)
|
||||
for cmd in &commands {
|
||||
match run_verification_command(cmd).await {
|
||||
Ok(true) => {
|
||||
debug!(
|
||||
"Verification passed for '{}' {} using binary '{}'",
|
||||
version.runtime_ref, version.version, cmd.binary,
|
||||
);
|
||||
return true;
|
||||
}
|
||||
Ok(false) => {
|
||||
debug!(
|
||||
"Verification failed for '{}' {} using binary '{}' \
|
||||
(pattern mismatch or non-zero exit)",
|
||||
version.runtime_ref, version.version, cmd.binary,
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
debug!(
|
||||
"Verification command '{}' for '{}' {} failed: {}",
|
||||
cmd.binary, version.runtime_ref, version.version, e,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Extract verification commands from the `distributions` JSONB.
|
||||
///
|
||||
/// Expected structure:
|
||||
/// ```json
|
||||
/// {
|
||||
/// "verification": {
|
||||
/// "commands": [
|
||||
/// {
|
||||
/// "binary": "python3.12",
|
||||
/// "args": ["--version"],
|
||||
/// "exit_code": 0,
|
||||
/// "pattern": "Python 3\\.12\\.",
|
||||
/// "priority": 1
|
||||
/// }
|
||||
/// ]
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
fn extract_verification_commands(distributions: &serde_json::Value) -> Vec<VerificationCommand> {
|
||||
let mut commands = Vec::new();
|
||||
|
||||
let cmds = match distributions
|
||||
.get("verification")
|
||||
.and_then(|v| v.get("commands"))
|
||||
.and_then(|v| v.as_array())
|
||||
{
|
||||
Some(arr) => arr,
|
||||
None => return commands,
|
||||
};
|
||||
|
||||
for cmd_val in cmds {
|
||||
let binary = match cmd_val.get("binary").and_then(|v| v.as_str()) {
|
||||
Some(b) => b.to_string(),
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let args: Vec<String> = cmd_val
|
||||
.get("args")
|
||||
.and_then(|v| v.as_array())
|
||||
.map(|arr| {
|
||||
arr.iter()
|
||||
.filter_map(|v| v.as_str().map(String::from))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let expected_exit_code = cmd_val
|
||||
.get("exit_code")
|
||||
.and_then(|v| v.as_i64())
|
||||
.unwrap_or(0) as i32;
|
||||
|
||||
let pattern = cmd_val
|
||||
.get("pattern")
|
||||
.and_then(|v| v.as_str())
|
||||
.map(String::from);
|
||||
|
||||
let priority = cmd_val
|
||||
.get("priority")
|
||||
.and_then(|v| v.as_i64())
|
||||
.unwrap_or(100) as i32;
|
||||
|
||||
commands.push(VerificationCommand {
|
||||
binary,
|
||||
args,
|
||||
expected_exit_code,
|
||||
pattern,
|
||||
priority,
|
||||
});
|
||||
}
|
||||
|
||||
// Sort by priority (lowest number = highest priority)
|
||||
commands.sort_by_key(|c| c.priority);
|
||||
commands
|
||||
}
|
||||
|
||||
/// Run a single verification command and check exit code + output pattern.
|
||||
async fn run_verification_command(cmd: &VerificationCommand) -> std::result::Result<bool, String> {
|
||||
let output = Command::new(&cmd.binary)
|
||||
.args(&cmd.args)
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::piped())
|
||||
.kill_on_drop(true)
|
||||
.spawn()
|
||||
.map_err(|e| format!("Failed to spawn '{}': {}", cmd.binary, e))?;
|
||||
|
||||
let result = tokio::time::timeout(Duration::from_secs(10), output.wait_with_output())
|
||||
.await
|
||||
.map_err(|_| format!("Verification command '{}' timed out after 10s", cmd.binary))?
|
||||
.map_err(|e| format!("Failed to wait for '{}': {}", cmd.binary, e))?;
|
||||
|
||||
// Check exit code
|
||||
let actual_exit = result.status.code().unwrap_or(-1);
|
||||
if actual_exit != cmd.expected_exit_code {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// Check output pattern if specified
|
||||
if let Some(ref pattern) = cmd.pattern {
|
||||
let stdout = String::from_utf8_lossy(&result.stdout);
|
||||
let stderr = String::from_utf8_lossy(&result.stderr);
|
||||
let combined = format!("{}{}", stdout, stderr);
|
||||
|
||||
let re = regex::Regex::new(pattern)
|
||||
.map_err(|e| format!("Invalid verification pattern '{}': {}", pattern, e))?;
|
||||
|
||||
if !re.is_match(&combined) {
|
||||
debug!(
|
||||
"Pattern '{}' did not match output of '{}': stdout='{}', stderr='{}'",
|
||||
pattern,
|
||||
cmd.binary,
|
||||
stdout.trim(),
|
||||
stderr.trim(),
|
||||
);
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Basic binary availability check: run `binary --version` and check for exit 0.
|
||||
async fn run_basic_binary_check(binary: &str) -> bool {
|
||||
match Command::new(binary)
|
||||
.arg("--version")
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::piped())
|
||||
.kill_on_drop(true)
|
||||
.spawn()
|
||||
{
|
||||
Ok(child) => {
|
||||
match tokio::time::timeout(Duration::from_secs(10), child.wait_with_output()).await {
|
||||
Ok(Ok(output)) => output.status.success(),
|
||||
Ok(Err(e)) => {
|
||||
debug!("Binary check for '{}' failed: {}", binary, e);
|
||||
false
|
||||
}
|
||||
Err(_) => {
|
||||
debug!("Binary check for '{}' timed out", binary);
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
debug!("Failed to spawn '{}': {}", binary, e);
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn test_extract_verification_commands_full() {
|
||||
let distributions = json!({
|
||||
"verification": {
|
||||
"commands": [
|
||||
{
|
||||
"binary": "python3.12",
|
||||
"args": ["--version"],
|
||||
"exit_code": 0,
|
||||
"pattern": "Python 3\\.12\\.",
|
||||
"priority": 1
|
||||
},
|
||||
{
|
||||
"binary": "python3",
|
||||
"args": ["--version"],
|
||||
"exit_code": 0,
|
||||
"pattern": "Python 3\\.12\\.",
|
||||
"priority": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
|
||||
let cmds = extract_verification_commands(&distributions);
|
||||
assert_eq!(cmds.len(), 2);
|
||||
assert_eq!(cmds[0].binary, "python3.12");
|
||||
assert_eq!(cmds[0].priority, 1);
|
||||
assert_eq!(cmds[1].binary, "python3");
|
||||
assert_eq!(cmds[1].priority, 2);
|
||||
assert_eq!(cmds[0].args, vec!["--version"]);
|
||||
assert_eq!(cmds[0].expected_exit_code, 0);
|
||||
assert_eq!(cmds[0].pattern.as_deref(), Some("Python 3\\.12\\."));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_verification_commands_empty() {
|
||||
let distributions = json!({});
|
||||
let cmds = extract_verification_commands(&distributions);
|
||||
assert!(cmds.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_verification_commands_no_commands_array() {
|
||||
let distributions = json!({
|
||||
"verification": {}
|
||||
});
|
||||
let cmds = extract_verification_commands(&distributions);
|
||||
assert!(cmds.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_verification_commands_missing_binary() {
|
||||
let distributions = json!({
|
||||
"verification": {
|
||||
"commands": [
|
||||
{
|
||||
"args": ["--version"],
|
||||
"exit_code": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
let cmds = extract_verification_commands(&distributions);
|
||||
assert!(cmds.is_empty(), "Commands without binary should be skipped");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_verification_commands_defaults() {
|
||||
let distributions = json!({
|
||||
"verification": {
|
||||
"commands": [
|
||||
{
|
||||
"binary": "node"
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
let cmds = extract_verification_commands(&distributions);
|
||||
assert_eq!(cmds.len(), 1);
|
||||
assert_eq!(cmds[0].binary, "node");
|
||||
assert!(cmds[0].args.is_empty());
|
||||
assert_eq!(cmds[0].expected_exit_code, 0);
|
||||
assert!(cmds[0].pattern.is_none());
|
||||
assert_eq!(cmds[0].priority, 100);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_verification_commands_sorted_by_priority() {
|
||||
let distributions = json!({
|
||||
"verification": {
|
||||
"commands": [
|
||||
{ "binary": "low", "priority": 10 },
|
||||
{ "binary": "high", "priority": 1 },
|
||||
{ "binary": "mid", "priority": 5 }
|
||||
]
|
||||
}
|
||||
});
|
||||
let cmds = extract_verification_commands(&distributions);
|
||||
assert_eq!(cmds.len(), 3);
|
||||
assert_eq!(cmds[0].binary, "high");
|
||||
assert_eq!(cmds[1].binary, "mid");
|
||||
assert_eq!(cmds[2].binary, "low");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_run_basic_binary_check_nonexistent() {
|
||||
// A binary that definitely doesn't exist
|
||||
let result = run_basic_binary_check("__nonexistent_binary_12345__").await;
|
||||
assert!(!result);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_run_verification_command_nonexistent() {
|
||||
let cmd = VerificationCommand {
|
||||
binary: "__nonexistent_binary_12345__".to_string(),
|
||||
args: vec!["--version".to_string()],
|
||||
expected_exit_code: 0,
|
||||
pattern: None,
|
||||
priority: 1,
|
||||
};
|
||||
let result = run_verification_command(&cmd).await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
}
|
||||
@@ -48,6 +48,7 @@ fn make_python_config() -> RuntimeExecutionConfig {
|
||||
"{manifest_path}".to_string(),
|
||||
],
|
||||
}),
|
||||
env_vars: std::collections::HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,6 +61,7 @@ fn make_shell_config() -> RuntimeExecutionConfig {
|
||||
},
|
||||
environment: None,
|
||||
dependencies: None,
|
||||
env_vars: std::collections::HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -76,6 +78,9 @@ fn make_context(action_ref: &str, entry_point: &str, runtime_name: &str) -> Exec
|
||||
code: None,
|
||||
code_path: None,
|
||||
runtime_name: Some(runtime_name.to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: ParameterDelivery::default(),
|
||||
@@ -108,7 +113,10 @@ async fn test_python_venv_creation_via_process_runtime() {
|
||||
.expect("Failed to create venv environment");
|
||||
|
||||
// Verify venv was created at the external runtime_envs location
|
||||
assert!(env_dir.exists(), "Virtualenv directory should exist at external location");
|
||||
assert!(
|
||||
env_dir.exists(),
|
||||
"Virtualenv directory should exist at external location"
|
||||
);
|
||||
|
||||
let venv_python = env_dir.join("bin").join("python3");
|
||||
assert!(
|
||||
@@ -319,11 +327,20 @@ async fn test_multiple_pack_isolation() {
|
||||
// Each pack should have its own venv at the external location
|
||||
assert!(env_dir_a.exists(), "pack_a should have its own venv");
|
||||
assert!(env_dir_b.exists(), "pack_b should have its own venv");
|
||||
assert_ne!(env_dir_a, env_dir_b, "Venvs should be in different directories");
|
||||
assert_ne!(
|
||||
env_dir_a, env_dir_b,
|
||||
"Venvs should be in different directories"
|
||||
);
|
||||
|
||||
// Pack directories should remain clean
|
||||
assert!(!pack_a_dir.join(".venv").exists(), "pack_a dir should not contain .venv");
|
||||
assert!(!pack_b_dir.join(".venv").exists(), "pack_b dir should not contain .venv");
|
||||
assert!(
|
||||
!pack_a_dir.join(".venv").exists(),
|
||||
"pack_a dir should not contain .venv"
|
||||
);
|
||||
assert!(
|
||||
!pack_b_dir.join(".venv").exists(),
|
||||
"pack_b dir should not contain .venv"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
||||
@@ -19,6 +19,7 @@ fn make_python_process_runtime(packs_base_dir: PathBuf) -> ProcessRuntime {
|
||||
},
|
||||
environment: None,
|
||||
dependencies: None,
|
||||
env_vars: std::collections::HashMap::new(),
|
||||
};
|
||||
ProcessRuntime::new("python".to_string(), config, packs_base_dir.clone(), packs_base_dir.join("../runtime_envs"))
|
||||
}
|
||||
@@ -42,6 +43,9 @@ fn make_python_context(
|
||||
code: Some(code.to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes,
|
||||
max_stderr_bytes,
|
||||
parameter_delivery: attune_worker::runtime::ParameterDelivery::default(),
|
||||
@@ -121,6 +125,9 @@ done
|
||||
code: Some(code.to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 400, // Small limit
|
||||
max_stderr_bytes: 1024,
|
||||
parameter_delivery: attune_worker::runtime::ParameterDelivery::default(),
|
||||
@@ -258,6 +265,7 @@ async fn test_shell_process_runtime_truncation() {
|
||||
},
|
||||
environment: None,
|
||||
dependencies: None,
|
||||
env_vars: std::collections::HashMap::new(),
|
||||
};
|
||||
let runtime = ProcessRuntime::new("shell".to_string(), config, tmp.path().to_path_buf(), tmp.path().join("runtime_envs"));
|
||||
|
||||
@@ -275,6 +283,9 @@ async fn test_shell_process_runtime_truncation() {
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 500,
|
||||
max_stderr_bytes: 1024,
|
||||
parameter_delivery: attune_worker::runtime::ParameterDelivery::default(),
|
||||
|
||||
@@ -20,6 +20,7 @@ fn make_python_process_runtime(packs_base_dir: PathBuf) -> ProcessRuntime {
|
||||
},
|
||||
environment: None,
|
||||
dependencies: None,
|
||||
env_vars: std::collections::HashMap::new(),
|
||||
};
|
||||
let runtime_envs_dir = packs_base_dir.parent().unwrap_or(&packs_base_dir).join("runtime_envs");
|
||||
ProcessRuntime::new("python".to_string(), config, packs_base_dir, runtime_envs_dir)
|
||||
@@ -68,6 +69,9 @@ print(json.dumps(result))
|
||||
code: Some(code.to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_worker::runtime::ParameterDelivery::default(),
|
||||
@@ -158,6 +162,9 @@ echo "SECURITY_PASS: Secrets not in environment but accessible via get_secret"
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_worker::runtime::ParameterDelivery::default(),
|
||||
@@ -219,6 +226,9 @@ print(json.dumps({'secret_a': secrets.get('secret_a')}))
|
||||
code: Some(code1.to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_worker::runtime::ParameterDelivery::default(),
|
||||
@@ -261,6 +271,9 @@ print(json.dumps({
|
||||
code: Some(code2.to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_worker::runtime::ParameterDelivery::default(),
|
||||
@@ -312,6 +325,9 @@ print("ok")
|
||||
code: Some(code.to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_worker::runtime::ParameterDelivery::default(),
|
||||
@@ -360,6 +376,9 @@ fi
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_worker::runtime::ParameterDelivery::default(),
|
||||
@@ -409,6 +428,7 @@ echo "PASS: No secrets in environment"
|
||||
},
|
||||
environment: None,
|
||||
dependencies: None,
|
||||
env_vars: std::collections::HashMap::new(),
|
||||
};
|
||||
let runtime = ProcessRuntime::new("shell".to_string(), config, tmp.path().to_path_buf(), tmp.path().join("runtime_envs"));
|
||||
|
||||
@@ -428,6 +448,9 @@ echo "PASS: No secrets in environment"
|
||||
code: None,
|
||||
code_path: Some(actions_dir.join("check_env.sh")),
|
||||
runtime_name: Some("shell".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_worker::runtime::ParameterDelivery::default(),
|
||||
@@ -476,6 +499,7 @@ print(json.dumps({"leaked": leaked}))
|
||||
},
|
||||
environment: None,
|
||||
dependencies: None,
|
||||
env_vars: std::collections::HashMap::new(),
|
||||
};
|
||||
let runtime = ProcessRuntime::new("python".to_string(), config, tmp.path().to_path_buf(), tmp.path().join("runtime_envs"));
|
||||
|
||||
@@ -495,6 +519,9 @@ print(json.dumps({"leaked": leaked}))
|
||||
code: None,
|
||||
code_path: Some(actions_dir.join("check_env.py")),
|
||||
runtime_name: Some("python".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_worker::runtime::ParameterDelivery::default(),
|
||||
|
||||
Reference in New Issue
Block a user