working out the worker/execution interface

This commit is contained in:
2026-02-08 12:55:33 -06:00
parent c62f41669d
commit a74e13fa0b
108 changed files with 21162 additions and 674 deletions

View File

@@ -16,6 +16,7 @@ tokio = { workspace = true }
sqlx = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
serde_yaml_ng = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
anyhow = { workspace = true }
@@ -30,6 +31,6 @@ thiserror = { workspace = true }
aes-gcm = { workspace = true }
sha2 = { workspace = true }
base64 = { workspace = true }
tempfile = { workspace = true }
[dev-dependencies]
tempfile = { workspace = true }

View File

@@ -27,6 +27,7 @@ pub struct ActionExecutor {
max_stdout_bytes: usize,
max_stderr_bytes: usize,
packs_base_dir: PathBuf,
api_url: String,
}
impl ActionExecutor {
@@ -39,6 +40,7 @@ impl ActionExecutor {
max_stdout_bytes: usize,
max_stderr_bytes: usize,
packs_base_dir: PathBuf,
api_url: String,
) -> Self {
Self {
pool,
@@ -48,6 +50,7 @@ impl ActionExecutor {
max_stdout_bytes,
max_stderr_bytes,
packs_base_dir,
api_url,
}
}
@@ -100,7 +103,16 @@ impl ActionExecutor {
}
// Update execution with result
if result.is_success() {
let is_success = result.is_success();
debug!(
"Execution {} result: exit_code={}, error={:?}, is_success={}",
execution_id,
result.exit_code,
result.error,
is_success
);
if is_success {
self.handle_execution_success(execution_id, &result).await?;
} else {
self.handle_execution_failure(execution_id, Some(&result))
@@ -190,35 +202,63 @@ impl ActionExecutor {
let mut parameters = HashMap::new();
if let Some(config) = &execution.config {
info!("Execution config present: {:?}", config);
// Try to get parameters from config.parameters first
if let Some(params) = config.get("parameters") {
info!("Found config.parameters key");
if let JsonValue::Object(map) = params {
for (key, value) in map {
parameters.insert(key.clone(), value.clone());
}
}
} else if let JsonValue::Object(map) = config {
info!("No config.parameters key, treating entire config as parameters");
// If no parameters key, treat entire config as parameters
// (this handles rule action_params being placed at root level)
for (key, value) in map {
// Skip special keys that aren't action parameters
if key != "context" && key != "env" {
info!("Adding parameter: {} = {:?}", key, value);
parameters.insert(key.clone(), value.clone());
} else {
info!("Skipping special key: {}", key);
}
}
} else {
info!("Config is not an Object, cannot extract parameters");
}
} else {
info!("No execution config present");
}
// Prepare environment variables
let mut env = HashMap::new();
env.insert("ATTUNE_EXECUTION_ID".to_string(), execution.id.to_string());
env.insert(
"ATTUNE_ACTION_REF".to_string(),
execution.action_ref.clone(),
);
info!("Extracted {} parameters: {:?}", parameters.len(), parameters);
if let Some(action_id) = execution.action {
env.insert("ATTUNE_ACTION_ID".to_string(), action_id.to_string());
// Prepare standard environment variables
let mut env = HashMap::new();
// Standard execution context variables (see docs/QUICKREF-execution-environment.md)
env.insert("ATTUNE_EXEC_ID".to_string(), execution.id.to_string());
env.insert("ATTUNE_ACTION".to_string(), execution.action_ref.clone());
env.insert("ATTUNE_API_URL".to_string(), self.api_url.clone());
// TODO: Generate execution-scoped API token
// For now, set placeholder to maintain interface compatibility
env.insert("ATTUNE_API_TOKEN".to_string(), "".to_string());
// Add rule and trigger context if execution was triggered by enforcement
if let Some(enforcement_id) = execution.enforcement {
if let Ok(Some(enforcement)) = sqlx::query_as::<
_,
attune_common::models::event::Enforcement,
>("SELECT * FROM enforcement WHERE id = $1")
.bind(enforcement_id)
.fetch_optional(&self.pool)
.await
{
env.insert("ATTUNE_RULE".to_string(), enforcement.rule_ref);
env.insert("ATTUNE_TRIGGER".to_string(), enforcement.trigger_ref);
}
}
// Add context data as environment variables from config
@@ -341,6 +381,8 @@ impl ActionExecutor {
runtime_name,
max_stdout_bytes: self.max_stdout_bytes,
max_stderr_bytes: self.max_stderr_bytes,
parameter_delivery: action.parameter_delivery,
parameter_format: action.parameter_format,
};
Ok(context)
@@ -392,7 +434,10 @@ impl ActionExecutor {
execution_id: i64,
result: &ExecutionResult,
) -> Result<()> {
info!("Execution {} succeeded", execution_id);
info!(
"Execution {} succeeded (exit_code={}, duration={}ms)",
execution_id, result.exit_code, result.duration_ms
);
// Build comprehensive result with execution metadata
let exec_dir = self.artifact_manager.get_execution_dir(execution_id);
@@ -402,29 +447,15 @@ impl ActionExecutor {
"succeeded": true,
});
// Add log file paths if logs exist
// Include stdout content directly in result
if !result.stdout.is_empty() {
let stdout_path = exec_dir.join("stdout.log");
result_data["stdout_log"] = serde_json::json!(stdout_path.to_string_lossy());
// Include stdout preview (first 1000 chars)
let stdout_preview = if result.stdout.len() > 1000 {
format!("{}...", &result.stdout[..1000])
} else {
result.stdout.clone()
};
result_data["stdout"] = serde_json::json!(stdout_preview);
result_data["stdout"] = serde_json::json!(result.stdout);
}
if !result.stderr.is_empty() {
// Include stderr log path only if stderr is non-empty and non-whitespace
if !result.stderr.trim().is_empty() {
let stderr_path = exec_dir.join("stderr.log");
result_data["stderr_log"] = serde_json::json!(stderr_path.to_string_lossy());
// Include stderr preview (first 1000 chars)
let stderr_preview = if result.stderr.len() > 1000 {
format!("{}...", &result.stderr[..1000])
} else {
result.stderr.clone()
};
result_data["stderr"] = serde_json::json!(stderr_preview);
}
// Include parsed result if available
@@ -450,7 +481,14 @@ impl ActionExecutor {
execution_id: i64,
result: Option<&ExecutionResult>,
) -> Result<()> {
error!("Execution {} failed", execution_id);
if let Some(r) = result {
error!(
"Execution {} failed (exit_code={}, error={:?}, duration={}ms)",
execution_id, r.exit_code, r.error, r.duration_ms
);
} else {
error!("Execution {} failed during preparation", execution_id);
}
let exec_dir = self.artifact_manager.get_execution_dir(execution_id);
let mut result_data = serde_json::json!({
@@ -466,29 +504,15 @@ impl ActionExecutor {
result_data["error"] = serde_json::json!(error);
}
// Add log file paths and previews if logs exist
// Include stdout content directly in result
if !exec_result.stdout.is_empty() {
let stdout_path = exec_dir.join("stdout.log");
result_data["stdout_log"] = serde_json::json!(stdout_path.to_string_lossy());
// Include stdout preview (first 1000 chars)
let stdout_preview = if exec_result.stdout.len() > 1000 {
format!("{}...", &exec_result.stdout[..1000])
} else {
exec_result.stdout.clone()
};
result_data["stdout"] = serde_json::json!(stdout_preview);
result_data["stdout"] = serde_json::json!(exec_result.stdout);
}
if !exec_result.stderr.is_empty() {
// Include stderr log path only if stderr is non-empty and non-whitespace
if !exec_result.stderr.trim().is_empty() {
let stderr_path = exec_dir.join("stderr.log");
result_data["stderr_log"] = serde_json::json!(stderr_path.to_string_lossy());
// Include stderr preview (first 1000 chars)
let stderr_preview = if exec_result.stderr.len() > 1000 {
format!("{}...", &exec_result.stderr[..1000])
} else {
exec_result.stderr.clone()
};
result_data["stderr"] = serde_json::json!(stderr_preview);
}
// Add truncation warnings if applicable
@@ -509,33 +533,23 @@ impl ActionExecutor {
warn!("Execution {} failed without ExecutionResult - this indicates an early/catastrophic failure", execution_id);
// Check if stderr log exists from artifact storage
// Check if stderr log exists and is non-empty from artifact storage
let stderr_path = exec_dir.join("stderr.log");
if stderr_path.exists() {
result_data["stderr_log"] = serde_json::json!(stderr_path.to_string_lossy());
// Try to read a preview if file exists
if let Ok(contents) = tokio::fs::read_to_string(&stderr_path).await {
let preview = if contents.len() > 1000 {
format!("{}...", &contents[..1000])
} else {
contents
};
result_data["stderr"] = serde_json::json!(preview);
if !contents.trim().is_empty() {
result_data["stderr_log"] = serde_json::json!(stderr_path.to_string_lossy());
}
}
}
// Check if stdout log exists from artifact storage
let stdout_path = exec_dir.join("stdout.log");
if stdout_path.exists() {
result_data["stdout_log"] = serde_json::json!(stdout_path.to_string_lossy());
// Try to read a preview if file exists
if let Ok(contents) = tokio::fs::read_to_string(&stdout_path).await {
let preview = if contents.len() > 1000 {
format!("{}...", &contents[..1000])
} else {
contents
};
result_data["stdout"] = serde_json::json!(preview);
if !contents.is_empty() {
result_data["stdout"] = serde_json::json!(contents);
}
}
}
}

View File

@@ -10,7 +10,6 @@ pub mod registration;
pub mod runtime;
pub mod secrets;
pub mod service;
pub mod test_executor;
// Re-export commonly used types
pub use executor::ActionExecutor;
@@ -22,4 +21,5 @@ pub use runtime::{
};
pub use secrets::SecretManager;
pub use service::WorkerService;
pub use test_executor::{TestConfig, TestExecutor};
// Re-export test executor from common (shared business logic)
pub use attune_common::test_executor::{TestConfig, TestExecutor};

View File

@@ -3,6 +3,7 @@
use anyhow::Result;
use attune_common::config::Config;
use clap::Parser;
use tokio::signal::unix::{signal, SignalKind};
use tracing::info;
use attune_worker::service::WorkerService;
@@ -70,8 +71,26 @@ async fn main() -> Result<()> {
info!("Attune Worker Service is ready");
// Run until interrupted
service.run().await?;
// Start the service
service.start().await?;
// Setup signal handlers for graceful shutdown
let mut sigint = signal(SignalKind::interrupt())?;
let mut sigterm = signal(SignalKind::terminate())?;
tokio::select! {
_ = sigint.recv() => {
info!("Received SIGINT signal");
}
_ = sigterm.recv() => {
info!("Received SIGTERM signal");
}
}
info!("Shutting down gracefully...");
// Stop the service and mark worker as inactive
service.stop().await?;
info!("Attune Worker Service shutdown complete");

View File

@@ -7,6 +7,7 @@ pub mod dependency;
pub mod local;
pub mod log_writer;
pub mod native;
pub mod parameter_passing;
pub mod python;
pub mod python_venv;
pub mod shell;
@@ -18,6 +19,7 @@ pub use python::PythonRuntime;
pub use shell::ShellRuntime;
use async_trait::async_trait;
use attune_common::models::{ParameterDelivery, ParameterFormat};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
@@ -29,6 +31,7 @@ pub use dependency::{
DependencySpec, EnvironmentInfo,
};
pub use log_writer::{BoundedLogResult, BoundedLogWriter};
pub use parameter_passing::{ParameterDeliveryConfig, PreparedParameters};
pub use python_venv::PythonVenvManager;
/// Runtime execution result
@@ -108,6 +111,14 @@ pub struct ExecutionContext {
/// Maximum stderr size in bytes (for log truncation)
#[serde(default = "default_max_log_bytes")]
pub max_stderr_bytes: usize,
/// How parameters should be delivered to the action
#[serde(default)]
pub parameter_delivery: ParameterDelivery,
/// Format for parameter serialization
#[serde(default)]
pub parameter_format: ParameterFormat,
}
fn default_max_log_bytes() -> usize {
@@ -133,6 +144,8 @@ impl ExecutionContext {
runtime_name: None,
max_stdout_bytes: 10 * 1024 * 1024,
max_stderr_bytes: 10 * 1024 * 1024,
parameter_delivery: ParameterDelivery::default(),
parameter_format: ParameterFormat::default(),
}
}
}

View File

@@ -4,14 +4,16 @@
//! This runtime is used for Rust binaries and other compiled executables.
use super::{
parameter_passing::{self, ParameterDeliveryConfig},
BoundedLogWriter, ExecutionContext, ExecutionResult, Runtime, RuntimeError, RuntimeResult,
};
use async_trait::async_trait;
use std::path::PathBuf;
use std::process::Stdio;
use std::time::Instant;
use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader};
use tokio::process::Command;
use tokio::time::{timeout, Duration};
use tokio::time::Duration;
use tracing::{debug, info, warn};
/// Native runtime for executing compiled binaries
@@ -35,11 +37,11 @@ impl NativeRuntime {
/// Execute a native binary with parameters and environment variables
async fn execute_binary(
&self,
binary_path: std::path::PathBuf,
parameters: &std::collections::HashMap<String, serde_json::Value>,
binary_path: PathBuf,
secrets: &std::collections::HashMap<String, String>,
env: &std::collections::HashMap<String, String>,
exec_timeout: Option<u64>,
parameters_stdin: Option<&str>,
timeout: Option<u64>,
max_stdout_bytes: usize,
max_stderr_bytes: usize,
) -> RuntimeResult<ExecutionResult> {
@@ -76,22 +78,11 @@ impl NativeRuntime {
cmd.current_dir(work_dir);
}
// Add environment variables
// Add environment variables (including parameter delivery metadata)
for (key, value) in env {
cmd.env(key, value);
}
// Add parameters as environment variables with ATTUNE_ACTION_ prefix
for (key, value) in parameters {
let value_str = match value {
serde_json::Value::String(s) => s.clone(),
serde_json::Value::Number(n) => n.to_string(),
serde_json::Value::Bool(b) => b.to_string(),
_ => serde_json::to_string(value)?,
};
cmd.env(format!("ATTUNE_ACTION_{}", key.to_uppercase()), value_str);
}
// Configure stdio
cmd.stdin(Stdio::piped())
.stdout(Stdio::piped())
@@ -102,29 +93,42 @@ impl NativeRuntime {
.spawn()
.map_err(|e| RuntimeError::ExecutionFailed(format!("Failed to spawn binary: {}", e)))?;
// Write secrets to stdin - if this fails, the process has already started
// so we should continue and capture whatever output we can
let stdin_write_error = if !secrets.is_empty() {
if let Some(mut stdin) = child.stdin.take() {
// Write to stdin - parameters (if using stdin delivery) and/or secrets
// If this fails, the process has already started, so we continue and capture output
let stdin_write_error = if let Some(mut stdin) = child.stdin.take() {
let mut error = None;
// Write parameters first if using stdin delivery
if let Some(params_data) = parameters_stdin {
if let Err(e) = stdin.write_all(params_data.as_bytes()).await {
error = Some(format!("Failed to write parameters to stdin: {}", e));
} else if let Err(e) = stdin.write_all(b"\n---ATTUNE_PARAMS_END---\n").await {
error = Some(format!("Failed to write parameter delimiter: {}", e));
}
}
// Write secrets as JSON (always, for backward compatibility)
if error.is_none() && !secrets.is_empty() {
match serde_json::to_string(secrets) {
Ok(secrets_json) => {
if let Err(e) = stdin.write_all(secrets_json.as_bytes()).await {
Some(format!("Failed to write secrets to stdin: {}", e))
} else if let Err(e) = stdin.shutdown().await {
Some(format!("Failed to close stdin: {}", e))
} else {
None
error = Some(format!("Failed to write secrets to stdin: {}", e));
} else if let Err(e) = stdin.write_all(b"\n").await {
error = Some(format!("Failed to write newline to stdin: {}", e));
}
}
Err(e) => Some(format!("Failed to serialize secrets: {}", e)),
Err(e) => error = Some(format!("Failed to serialize secrets: {}", e)),
}
} else {
None
}
// Close stdin
if let Err(e) = stdin.shutdown().await {
if error.is_none() {
error = Some(format!("Failed to close stdin: {}", e));
}
}
error
} else {
if let Some(stdin) = child.stdin.take() {
drop(stdin); // Close stdin if no secrets
}
None
};
@@ -184,8 +188,8 @@ impl NativeRuntime {
let (stdout_writer, stderr_writer) = tokio::join!(stdout_task, stderr_task);
// Wait for process with timeout
let wait_result = if let Some(timeout_secs) = exec_timeout {
match timeout(Duration::from_secs(timeout_secs), child.wait()).await {
let wait_result = if let Some(timeout_secs) = timeout {
match tokio::time::timeout(Duration::from_secs(timeout_secs), child.wait()).await {
Ok(result) => result,
Err(_) => {
warn!(
@@ -317,10 +321,26 @@ impl Runtime for NativeRuntime {
async fn execute(&self, context: ExecutionContext) -> RuntimeResult<ExecutionResult> {
info!(
"Executing native action: {} (execution_id: {})",
context.action_ref, context.execution_id
"Executing native action: {} (execution_id: {}) with parameter delivery: {:?}, format: {:?}",
context.action_ref, context.execution_id, context.parameter_delivery, context.parameter_format
);
// Prepare environment and parameters according to delivery method
let mut env = context.env.clone();
let config = ParameterDeliveryConfig {
delivery: context.parameter_delivery,
format: context.parameter_format,
};
let prepared_params = parameter_passing::prepare_parameters(
&context.parameters,
&mut env,
config,
)?;
// Get stdin content if parameters are delivered via stdin
let parameters_stdin = prepared_params.stdin_content();
// Get the binary path
let binary_path = context.code_path.ok_or_else(|| {
RuntimeError::InvalidAction("Native runtime requires code_path to be set".to_string())
@@ -328,9 +348,9 @@ impl Runtime for NativeRuntime {
self.execute_binary(
binary_path,
&context.parameters,
&context.secrets,
&context.env,
&env,
parameters_stdin,
context.timeout,
context.max_stdout_bytes,
context.max_stderr_bytes,

View File

@@ -0,0 +1,320 @@
//! Parameter Passing Module
//!
//! Provides utilities for formatting and delivering action parameters
//! in different formats (dotenv, JSON, YAML) via different methods
//! (environment variables, stdin, temporary files).
use attune_common::models::{ParameterDelivery, ParameterFormat};
use serde_json::Value as JsonValue;
use std::collections::HashMap;
use std::io::Write;
use std::path::PathBuf;
use tempfile::NamedTempFile;
use tracing::debug;
use super::RuntimeError;
/// Format parameters according to the specified format
pub fn format_parameters(
parameters: &HashMap<String, JsonValue>,
format: ParameterFormat,
) -> Result<String, RuntimeError> {
match format {
ParameterFormat::Dotenv => format_dotenv(parameters),
ParameterFormat::Json => format_json(parameters),
ParameterFormat::Yaml => format_yaml(parameters),
}
}
/// Format parameters as dotenv (key='value')
/// Note: Parameter names are preserved as-is (case-sensitive)
fn format_dotenv(parameters: &HashMap<String, JsonValue>) -> Result<String, RuntimeError> {
let mut lines = Vec::new();
for (key, value) in parameters {
let value_str = value_to_string(value);
// Escape single quotes in value
let escaped_value = value_str.replace('\'', "'\\''");
lines.push(format!("{}='{}'", key, escaped_value));
}
Ok(lines.join("\n"))
}
/// Format parameters as JSON
fn format_json(parameters: &HashMap<String, JsonValue>) -> Result<String, RuntimeError> {
serde_json::to_string_pretty(parameters).map_err(|e| {
RuntimeError::ExecutionFailed(format!(
"Failed to serialize parameters to JSON: {}",
e
))
})
}
/// Format parameters as YAML
fn format_yaml(parameters: &HashMap<String, JsonValue>) -> Result<String, RuntimeError> {
serde_yaml_ng::to_string(parameters).map_err(|e| {
RuntimeError::ExecutionFailed(format!(
"Failed to serialize parameters to YAML: {}",
e
))
})
}
/// Convert JSON value to string representation
fn value_to_string(value: &JsonValue) -> String {
match value {
JsonValue::String(s) => s.clone(),
JsonValue::Number(n) => n.to_string(),
JsonValue::Bool(b) => b.to_string(),
JsonValue::Null => String::new(),
_ => serde_json::to_string(value).unwrap_or_else(|_| String::new()),
}
}
/// Create a temporary file with parameters
pub fn create_parameter_file(
parameters: &HashMap<String, JsonValue>,
format: ParameterFormat,
) -> Result<NamedTempFile, RuntimeError> {
let formatted = format_parameters(parameters, format)?;
let mut temp_file = NamedTempFile::new()
.map_err(|e| RuntimeError::IoError(e))?;
// Set restrictive permissions (owner read-only)
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mut perms = temp_file.as_file().metadata()
.map_err(|e| RuntimeError::IoError(e))?
.permissions();
perms.set_mode(0o400); // Read-only for owner
temp_file.as_file().set_permissions(perms)
.map_err(|e| RuntimeError::IoError(e))?;
}
temp_file
.write_all(formatted.as_bytes())
.map_err(|e| RuntimeError::IoError(e))?;
temp_file
.flush()
.map_err(|e| RuntimeError::IoError(e))?;
debug!(
"Created parameter file at {:?} with format {:?}",
temp_file.path(),
format
);
Ok(temp_file)
}
/// Parameter delivery configuration
#[derive(Debug, Clone)]
pub struct ParameterDeliveryConfig {
pub delivery: ParameterDelivery,
pub format: ParameterFormat,
}
/// Prepared parameters ready for execution
#[derive(Debug)]
pub enum PreparedParameters {
/// Parameters are in environment variables
Environment,
/// Parameters will be passed via stdin
Stdin(String),
/// Parameters are in a temporary file
File {
path: PathBuf,
#[allow(dead_code)]
temp_file: NamedTempFile,
},
}
impl PreparedParameters {
/// Get the file path if this is file-based delivery
pub fn file_path(&self) -> Option<&PathBuf> {
match self {
PreparedParameters::File { path, .. } => Some(path),
_ => None,
}
}
/// Get the stdin content if this is stdin-based delivery
pub fn stdin_content(&self) -> Option<&str> {
match self {
PreparedParameters::Stdin(content) => Some(content),
_ => None,
}
}
}
/// Prepare parameters for delivery according to the specified method and format
pub fn prepare_parameters(
parameters: &HashMap<String, JsonValue>,
env: &mut HashMap<String, String>,
config: ParameterDeliveryConfig,
) -> Result<PreparedParameters, RuntimeError> {
match config.delivery {
ParameterDelivery::Stdin => {
// Format parameters for stdin
let formatted = format_parameters(parameters, config.format)?;
// Add environment variables to indicate delivery method
env.insert(
"ATTUNE_PARAMETER_DELIVERY".to_string(),
"stdin".to_string(),
);
env.insert(
"ATTUNE_PARAMETER_FORMAT".to_string(),
config.format.to_string(),
);
Ok(PreparedParameters::Stdin(formatted))
}
ParameterDelivery::File => {
// Create temporary file with parameters
let temp_file = create_parameter_file(parameters, config.format)?;
let path = temp_file.path().to_path_buf();
// Add environment variables to indicate delivery method and file location
env.insert(
"ATTUNE_PARAMETER_DELIVERY".to_string(),
"file".to_string(),
);
env.insert(
"ATTUNE_PARAMETER_FORMAT".to_string(),
config.format.to_string(),
);
env.insert(
"ATTUNE_PARAMETER_FILE".to_string(),
path.to_string_lossy().to_string(),
);
Ok(PreparedParameters::File { path, temp_file })
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::json;
#[test]
fn test_format_dotenv() {
let mut params = HashMap::new();
params.insert("message".to_string(), json!("Hello, World!"));
params.insert("count".to_string(), json!(42));
params.insert("enabled".to_string(), json!(true));
let result = format_dotenv(&params).unwrap();
assert!(result.contains("message='Hello, World!'"));
assert!(result.contains("count='42'"));
assert!(result.contains("enabled='true'"));
}
#[test]
fn test_format_dotenv_escaping() {
let mut params = HashMap::new();
params.insert("message".to_string(), json!("It's a test"));
let result = format_dotenv(&params).unwrap();
assert!(result.contains("message='It'\\''s a test'"));
}
#[test]
fn test_format_json() {
let mut params = HashMap::new();
params.insert("message".to_string(), json!("Hello"));
params.insert("count".to_string(), json!(42));
let result = format_json(&params).unwrap();
let parsed: HashMap<String, JsonValue> = serde_json::from_str(&result).unwrap();
assert_eq!(parsed.get("message"), Some(&json!("Hello")));
assert_eq!(parsed.get("count"), Some(&json!(42)));
}
#[test]
fn test_format_yaml() {
let mut params = HashMap::new();
params.insert("message".to_string(), json!("Hello"));
params.insert("count".to_string(), json!(42));
let result = format_yaml(&params).unwrap();
assert!(result.contains("message:"));
assert!(result.contains("Hello"));
assert!(result.contains("count:"));
assert!(result.contains("42"));
}
#[test]
#[test]
fn test_create_parameter_file() {
let mut params = HashMap::new();
params.insert("key".to_string(), json!("value"));
let temp_file = create_parameter_file(&params, ParameterFormat::Json).unwrap();
let content = std::fs::read_to_string(temp_file.path()).unwrap();
assert!(content.contains("key"));
assert!(content.contains("value"));
}
#[test]
fn test_prepare_parameters_stdin() {
let mut params = HashMap::new();
params.insert("test".to_string(), json!("value"));
let mut env = HashMap::new();
let config = ParameterDeliveryConfig {
delivery: ParameterDelivery::Stdin,
format: ParameterFormat::Json,
};
let result = prepare_parameters(&params, &mut env, config).unwrap();
assert!(matches!(result, PreparedParameters::Stdin(_)));
assert_eq!(
env.get("ATTUNE_PARAMETER_DELIVERY"),
Some(&"stdin".to_string())
);
assert_eq!(
env.get("ATTUNE_PARAMETER_FORMAT"),
Some(&"json".to_string())
);
}
#[test]
fn test_prepare_parameters_file() {
let mut params = HashMap::new();
params.insert("test".to_string(), json!("value"));
let mut env = HashMap::new();
let config = ParameterDeliveryConfig {
delivery: ParameterDelivery::File,
format: ParameterFormat::Yaml,
};
let result = prepare_parameters(&params, &mut env, config).unwrap();
assert!(matches!(result, PreparedParameters::File { .. }));
assert_eq!(
env.get("ATTUNE_PARAMETER_DELIVERY"),
Some(&"file".to_string())
);
assert_eq!(
env.get("ATTUNE_PARAMETER_FORMAT"),
Some(&"yaml".to_string())
);
assert!(env.contains_key("ATTUNE_PARAMETER_FILE"));
}
}

View File

@@ -3,6 +3,7 @@
//! Executes shell scripts and commands using subprocess execution.
use super::{
parameter_passing::{self, ParameterDeliveryConfig},
BoundedLogWriter, ExecutionContext, ExecutionResult, Runtime, RuntimeError, RuntimeResult,
};
use async_trait::async_trait;
@@ -53,6 +54,7 @@ impl ShellRuntime {
&self,
mut cmd: Command,
secrets: &std::collections::HashMap<String, String>,
parameters_stdin: Option<&str>,
timeout_secs: Option<u64>,
max_stdout_bytes: usize,
max_stderr_bytes: usize,
@@ -66,22 +68,36 @@ impl ShellRuntime {
.stderr(Stdio::piped())
.spawn()?;
// Write secrets to stdin - if this fails, the process has already started
// so we should continue and capture whatever output we can
// Write to stdin - parameters (if using stdin delivery) and/or secrets
// If this fails, the process has already started, so we continue and capture output
let stdin_write_error = if let Some(mut stdin) = child.stdin.take() {
match serde_json::to_string(secrets) {
Ok(secrets_json) => {
if let Err(e) = stdin.write_all(secrets_json.as_bytes()).await {
Some(format!("Failed to write secrets to stdin: {}", e))
} else if let Err(e) = stdin.write_all(b"\n").await {
Some(format!("Failed to write newline to stdin: {}", e))
} else {
drop(stdin);
None
}
let mut error = None;
// Write parameters first if using stdin delivery
if let Some(params_data) = parameters_stdin {
if let Err(e) = stdin.write_all(params_data.as_bytes()).await {
error = Some(format!("Failed to write parameters to stdin: {}", e));
} else if let Err(e) = stdin.write_all(b"\n---ATTUNE_PARAMS_END---\n").await {
error = Some(format!("Failed to write parameter delimiter: {}", e));
}
Err(e) => Some(format!("Failed to serialize secrets: {}", e)),
}
// Write secrets as JSON (always, for backward compatibility)
if error.is_none() && !secrets.is_empty() {
match serde_json::to_string(secrets) {
Ok(secrets_json) => {
if let Err(e) = stdin.write_all(secrets_json.as_bytes()).await {
error = Some(format!("Failed to write secrets to stdin: {}", e));
} else if let Err(e) = stdin.write_all(b"\n").await {
error = Some(format!("Failed to write newline to stdin: {}", e));
}
}
Err(e) => error = Some(format!("Failed to serialize secrets: {}", e)),
}
}
drop(stdin);
error
} else {
None
};
@@ -315,9 +331,10 @@ impl ShellRuntime {
/// Execute shell script directly
async fn execute_shell_code(
&self,
script: String,
code: String,
secrets: &std::collections::HashMap<String, String>,
env: &std::collections::HashMap<String, String>,
parameters_stdin: Option<&str>,
timeout_secs: Option<u64>,
max_stdout_bytes: usize,
max_stderr_bytes: usize,
@@ -329,7 +346,7 @@ impl ShellRuntime {
// Build command
let mut cmd = Command::new(&self.shell_path);
cmd.arg("-c").arg(&script);
cmd.arg("-c").arg(&code);
// Add environment variables
for (key, value) in env {
@@ -339,6 +356,7 @@ impl ShellRuntime {
self.execute_with_streaming(
cmd,
secrets,
parameters_stdin,
timeout_secs,
max_stdout_bytes,
max_stderr_bytes,
@@ -349,22 +367,23 @@ impl ShellRuntime {
/// Execute shell script from file
async fn execute_shell_file(
&self,
code_path: PathBuf,
script_path: PathBuf,
secrets: &std::collections::HashMap<String, String>,
env: &std::collections::HashMap<String, String>,
parameters_stdin: Option<&str>,
timeout_secs: Option<u64>,
max_stdout_bytes: usize,
max_stderr_bytes: usize,
) -> RuntimeResult<ExecutionResult> {
debug!(
"Executing shell file: {:?} with {} secrets",
code_path,
script_path,
secrets.len()
);
// Build command
let mut cmd = Command::new(&self.shell_path);
cmd.arg(&code_path);
cmd.arg(&script_path);
// Add environment variables
for (key, value) in env {
@@ -374,6 +393,7 @@ impl ShellRuntime {
self.execute_with_streaming(
cmd,
secrets,
parameters_stdin,
timeout_secs,
max_stdout_bytes,
max_stderr_bytes,
@@ -412,29 +432,49 @@ impl Runtime for ShellRuntime {
async fn execute(&self, context: ExecutionContext) -> RuntimeResult<ExecutionResult> {
info!(
"Executing shell action: {} (execution_id: {})",
context.action_ref, context.execution_id
"Executing shell action: {} (execution_id: {}) with parameter delivery: {:?}, format: {:?}",
context.action_ref, context.execution_id, context.parameter_delivery, context.parameter_format
);
info!(
"Action parameters (count: {}): {:?}",
context.parameters.len(),
context.parameters
);
// Prepare environment and parameters according to delivery method
let mut env = context.env.clone();
let config = ParameterDeliveryConfig {
delivery: context.parameter_delivery,
format: context.parameter_format,
};
let prepared_params = parameter_passing::prepare_parameters(
&context.parameters,
&mut env,
config,
)?;
// Get stdin content if parameters are delivered via stdin
let parameters_stdin = prepared_params.stdin_content();
if let Some(stdin_data) = parameters_stdin {
info!(
"Parameters to be sent via stdin (length: {} bytes):\n{}",
stdin_data.len(),
stdin_data
);
} else {
info!("No parameters will be sent via stdin");
}
// If code_path is provided, execute the file directly
if let Some(code_path) = &context.code_path {
// Merge parameters into environment variables with ATTUNE_ACTION_ prefix
let mut env = context.env.clone();
for (key, value) in &context.parameters {
let value_str = match value {
serde_json::Value::String(s) => s.clone(),
serde_json::Value::Number(n) => n.to_string(),
serde_json::Value::Bool(b) => b.to_string(),
_ => serde_json::to_string(value)?,
};
env.insert(format!("ATTUNE_ACTION_{}", key.to_uppercase()), value_str);
}
return self
.execute_shell_file(
code_path.clone(),
&context.secrets,
&env,
parameters_stdin,
context.timeout,
context.max_stdout_bytes,
context.max_stderr_bytes,
@@ -447,7 +487,8 @@ impl Runtime for ShellRuntime {
self.execute_shell_code(
script,
&context.secrets,
&context.env,
&env,
parameters_stdin,
context.timeout,
context.max_stdout_bytes,
context.max_stderr_bytes,
@@ -534,6 +575,8 @@ mod tests {
runtime_name: Some("shell".to_string()),
max_stdout_bytes: 10 * 1024 * 1024,
max_stderr_bytes: 10 * 1024 * 1024,
parameter_delivery: attune_common::models::ParameterDelivery::default(),
parameter_format: attune_common::models::ParameterFormat::default(),
};
let result = runtime.execute(context).await.unwrap();
@@ -564,6 +607,8 @@ mod tests {
runtime_name: Some("shell".to_string()),
max_stdout_bytes: 10 * 1024 * 1024,
max_stderr_bytes: 10 * 1024 * 1024,
parameter_delivery: attune_common::models::ParameterDelivery::default(),
parameter_format: attune_common::models::ParameterFormat::default(),
};
let result = runtime.execute(context).await.unwrap();
@@ -589,6 +634,8 @@ mod tests {
runtime_name: Some("shell".to_string()),
max_stdout_bytes: 10 * 1024 * 1024,
max_stderr_bytes: 10 * 1024 * 1024,
parameter_delivery: attune_common::models::ParameterDelivery::default(),
parameter_format: attune_common::models::ParameterFormat::default(),
};
let result = runtime.execute(context).await.unwrap();
@@ -616,6 +663,8 @@ mod tests {
runtime_name: Some("shell".to_string()),
max_stdout_bytes: 10 * 1024 * 1024,
max_stderr_bytes: 10 * 1024 * 1024,
parameter_delivery: attune_common::models::ParameterDelivery::default(),
parameter_format: attune_common::models::ParameterFormat::default(),
};
let result = runtime.execute(context).await.unwrap();
@@ -658,6 +707,8 @@ echo "missing=$missing"
runtime_name: Some("shell".to_string()),
max_stdout_bytes: 10 * 1024 * 1024,
max_stderr_bytes: 10 * 1024 * 1024,
parameter_delivery: attune_common::models::ParameterDelivery::default(),
parameter_format: attune_common::models::ParameterFormat::default(),
};
let result = runtime.execute(context).await.unwrap();

View File

@@ -10,7 +10,7 @@ use attune_common::models::ExecutionStatus;
use attune_common::mq::{
config::MessageQueueConfig as MqConfig, Connection, Consumer, ConsumerConfig,
ExecutionCompletedPayload, ExecutionStatusChangedPayload, MessageEnvelope, MessageType,
Publisher, PublisherConfig, QueueConfig,
Publisher, PublisherConfig,
};
use attune_common::repositories::{execution::ExecutionRepository, FindById};
use chrono::Utc;
@@ -230,6 +230,11 @@ impl WorkerService {
.map(|w| w.max_stderr_bytes)
.unwrap_or(10 * 1024 * 1024);
let packs_base_dir = std::path::PathBuf::from(&config.packs_base_dir);
// Get API URL from environment or construct from server config
let api_url = std::env::var("ATTUNE_API_URL")
.unwrap_or_else(|_| format!("http://{}:{}", config.server.host, config.server.port));
let executor = Arc::new(ActionExecutor::new(
pool.clone(),
runtime_registry,
@@ -238,6 +243,7 @@ impl WorkerService {
max_stdout_bytes,
max_stderr_bytes,
packs_base_dir,
api_url,
));
// Initialize heartbeat manager
@@ -430,8 +436,13 @@ impl WorkerService {
}
// Publish completion notification for queue management
if let Err(e) =
Self::publish_completion_notification(&db_pool, &publisher, execution_id).await
if let Err(e) = Self::publish_completion_notification(
&db_pool,
&publisher,
execution_id,
ExecutionStatus::Completed,
)
.await
{
error!(
"Failed to publish completion notification for execution {}: {}",
@@ -458,8 +469,13 @@ impl WorkerService {
}
// Publish completion notification for queue management
if let Err(e) =
Self::publish_completion_notification(&db_pool, &publisher, execution_id).await
if let Err(e) = Self::publish_completion_notification(
&db_pool,
&publisher,
execution_id,
ExecutionStatus::Failed,
)
.await
{
error!(
"Failed to publish completion notification for execution {}: {}",
@@ -528,6 +544,7 @@ impl WorkerService {
db_pool: &PgPool,
publisher: &Publisher,
execution_id: i64,
final_status: ExecutionStatus,
) -> Result<()> {
// Fetch execution to get action_id and other required fields
let execution = ExecutionRepository::find_by_id(db_pool, execution_id)
@@ -556,7 +573,7 @@ impl WorkerService {
execution_id: execution.id,
action_id,
action_ref: execution.action_ref.clone(),
status: format!("{:?}", execution.status),
status: format!("{:?}", final_status),
result: execution.result.clone(),
completed_at: Utc::now(),
};
@@ -576,21 +593,7 @@ impl WorkerService {
Ok(())
}
/// Run the worker service until interrupted
pub async fn run(&mut self) -> Result<()> {
self.start().await?;
// Wait for shutdown signal
tokio::signal::ctrl_c()
.await
.map_err(|e| Error::Internal(format!("Failed to wait for shutdown signal: {}", e)))?;
info!("Received shutdown signal");
self.stop().await?;
Ok(())
}
}
#[cfg(test)]

View File

@@ -1,507 +0,0 @@
//! Pack Test Executor Module
//!
//! Executes pack tests by running test runners and collecting results.
use attune_common::error::{Error, Result};
use attune_common::models::pack_test::{
PackTestResult, TestCaseResult, TestStatus, TestSuiteResult,
};
use chrono::Utc;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::process::Stdio;
use std::time::{Duration, Instant};
use tokio::io::{AsyncBufReadExt, BufReader};
use tokio::process::Command;
use tracing::{debug, error, info, warn};
/// Test configuration from pack.yaml
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TestConfig {
pub enabled: bool,
pub discovery: DiscoveryConfig,
pub runners: HashMap<String, RunnerConfig>,
pub result_format: Option<String>,
pub result_path: Option<String>,
pub min_pass_rate: Option<f64>,
pub on_failure: Option<String>,
}
/// Test discovery configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DiscoveryConfig {
pub method: String,
pub path: Option<String>,
}
/// Test runner configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RunnerConfig {
pub r#type: String,
pub entry_point: String,
pub timeout: Option<u64>,
pub result_format: Option<String>,
}
/// Test executor for running pack tests
pub struct TestExecutor {
/// Base directory for pack files
pack_base_dir: PathBuf,
}
impl TestExecutor {
/// Create a new test executor
pub fn new(pack_base_dir: PathBuf) -> Self {
Self { pack_base_dir }
}
/// Execute all tests for a pack
pub async fn execute_pack_tests(
&self,
pack_ref: &str,
pack_version: &str,
test_config: &TestConfig,
) -> Result<PackTestResult> {
info!("Executing tests for pack: {} v{}", pack_ref, pack_version);
if !test_config.enabled {
return Err(Error::Validation(
"Testing is not enabled for this pack".to_string(),
));
}
let pack_dir = self.pack_base_dir.join(pack_ref);
if !pack_dir.exists() {
return Err(Error::not_found(
"pack_directory",
"path",
pack_dir.display().to_string(),
));
}
let start_time = Instant::now();
let execution_time = Utc::now();
let mut test_suites = Vec::new();
// Execute tests for each runner
for (runner_name, runner_config) in &test_config.runners {
info!(
"Running test suite: {} ({})",
runner_name, runner_config.r#type
);
match self
.execute_test_suite(&pack_dir, runner_name, runner_config)
.await
{
Ok(suite_result) => {
info!(
"Test suite '{}' completed: {}/{} passed",
runner_name, suite_result.passed, suite_result.total
);
test_suites.push(suite_result);
}
Err(e) => {
error!("Test suite '{}' failed to execute: {}", runner_name, e);
// Create a failed suite result
test_suites.push(TestSuiteResult {
name: runner_name.clone(),
runner_type: runner_config.r#type.clone(),
total: 0,
passed: 0,
failed: 1,
skipped: 0,
duration_ms: 0,
test_cases: vec![TestCaseResult {
name: format!("{}_execution", runner_name),
status: TestStatus::Error,
duration_ms: 0,
error_message: Some(e.to_string()),
stdout: None,
stderr: None,
}],
});
}
}
}
let total_duration_ms = start_time.elapsed().as_millis() as i64;
// Aggregate results
let total_tests: i32 = test_suites.iter().map(|s| s.total).sum();
let passed: i32 = test_suites.iter().map(|s| s.passed).sum();
let failed: i32 = test_suites.iter().map(|s| s.failed).sum();
let skipped: i32 = test_suites.iter().map(|s| s.skipped).sum();
let pass_rate = if total_tests > 0 {
passed as f64 / total_tests as f64
} else {
0.0
};
info!(
"Pack tests completed: {}/{} passed ({:.1}%)",
passed,
total_tests,
pass_rate * 100.0
);
// Determine overall test status
let status = if failed > 0 {
"failed".to_string()
} else if passed == total_tests {
"passed".to_string()
} else if skipped == total_tests {
"skipped".to_string()
} else {
"partial".to_string()
};
Ok(PackTestResult {
pack_ref: pack_ref.to_string(),
pack_version: pack_version.to_string(),
execution_time,
status,
total_tests,
passed,
failed,
skipped,
pass_rate,
duration_ms: total_duration_ms,
test_suites,
})
}
/// Execute a single test suite
async fn execute_test_suite(
&self,
pack_dir: &Path,
runner_name: &str,
runner_config: &RunnerConfig,
) -> Result<TestSuiteResult> {
let start_time = Instant::now();
// Resolve entry point path
let entry_point = pack_dir.join(&runner_config.entry_point);
if !entry_point.exists() {
return Err(Error::not_found(
"test_entry_point",
"path",
entry_point.display().to_string(),
));
}
// Determine command based on runner type
// Use relative path from pack directory for the entry point
let relative_entry_point = entry_point
.strip_prefix(pack_dir)
.unwrap_or(&entry_point)
.to_string_lossy()
.to_string();
let (command, args) = match runner_config.r#type.as_str() {
"script" => {
// Execute as shell script
let shell = if entry_point.extension().and_then(|s| s.to_str()) == Some("sh") {
"/bin/sh"
} else {
"/bin/bash"
};
(shell.to_string(), vec![relative_entry_point])
}
"unittest" => {
// Execute as Python unittest
(
"python3".to_string(),
vec![
"-m".to_string(),
"unittest".to_string(),
relative_entry_point,
],
)
}
"pytest" => {
// Execute with pytest
(
"pytest".to_string(),
vec![relative_entry_point, "-v".to_string()],
)
}
_ => {
return Err(Error::Validation(format!(
"Unsupported runner type: {}",
runner_config.r#type
)));
}
};
// Execute test command with pack_dir as working directory
let timeout_duration = Duration::from_secs(runner_config.timeout.unwrap_or(300));
let output = self
.run_command(&command, &args, pack_dir, timeout_duration)
.await?;
let duration_ms = start_time.elapsed().as_millis() as i64;
// Parse output based on result format
let result_format = runner_config.result_format.as_deref().unwrap_or("simple");
let mut suite_result = match result_format {
"simple" => self.parse_simple_output(&output, runner_name, &runner_config.r#type)?,
"json" => self.parse_json_output(&output.stdout, runner_name)?,
_ => {
warn!(
"Unknown result format '{}', falling back to simple",
result_format
);
self.parse_simple_output(&output, runner_name, &runner_config.r#type)?
}
};
suite_result.duration_ms = duration_ms;
Ok(suite_result)
}
/// Run a command with timeout
async fn run_command(
&self,
command: &str,
args: &[String],
working_dir: &Path,
timeout: Duration,
) -> Result<CommandOutput> {
debug!(
"Executing command: {} {} (timeout: {:?})",
command,
args.join(" "),
timeout
);
let mut cmd = Command::new(command);
cmd.args(args)
.current_dir(working_dir)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.stdin(Stdio::null());
let start = Instant::now();
let mut child = cmd.spawn().map_err(|e| {
Error::Internal(format!("Failed to spawn command '{}': {}", command, e))
})?;
// Wait for process with timeout
let status = tokio::time::timeout(timeout, child.wait())
.await
.map_err(|_| Error::Timeout(format!("Test execution timed out after {:?}", timeout)))?
.map_err(|e| Error::Internal(format!("Process wait failed: {}", e)))?;
// Read output
let stdout_handle = child.stdout.take();
let stderr_handle = child.stderr.take();
let stdout = if let Some(stdout) = stdout_handle {
self.read_stream(stdout).await?
} else {
String::new()
};
let stderr = if let Some(stderr) = stderr_handle {
self.read_stream(stderr).await?
} else {
String::new()
};
let duration_ms = start.elapsed().as_millis() as u64;
let exit_code = status.code().unwrap_or(-1);
Ok(CommandOutput {
exit_code,
stdout,
stderr,
duration_ms,
})
}
/// Read from an async stream
async fn read_stream(&self, stream: impl tokio::io::AsyncRead + Unpin) -> Result<String> {
let mut reader = BufReader::new(stream);
let mut output = String::new();
let mut line = String::new();
while reader
.read_line(&mut line)
.await
.map_err(|e| Error::Internal(format!("Failed to read stream: {}", e)))?
> 0
{
output.push_str(&line);
line.clear();
}
Ok(output)
}
/// Parse simple test output format
fn parse_simple_output(
&self,
output: &CommandOutput,
runner_name: &str,
runner_type: &str,
) -> Result<TestSuiteResult> {
let text = format!("{}\n{}", output.stdout, output.stderr);
// Parse test counts from output
let total = self.extract_number(&text, "Total Tests:");
let passed = self.extract_number(&text, "Passed:");
let failed = self.extract_number(&text, "Failed:");
let skipped = self.extract_number(&text, "Skipped:").or_else(|| Some(0));
// If we couldn't parse counts, use exit code
let (total, passed, failed, skipped) = if total.is_none() || passed.is_none() {
if output.exit_code == 0 {
(1, 1, 0, 0)
} else {
(1, 0, 1, 0)
}
} else {
(
total.unwrap_or(0),
passed.unwrap_or(0),
failed.unwrap_or(0),
skipped.unwrap_or(0),
)
};
// Create a single test case representing the entire suite
let test_case = TestCaseResult {
name: format!("{}_suite", runner_name),
status: if output.exit_code == 0 {
TestStatus::Passed
} else {
TestStatus::Failed
},
duration_ms: output.duration_ms as i64,
error_message: if output.exit_code != 0 {
Some(format!("Exit code: {}", output.exit_code))
} else {
None
},
stdout: if !output.stdout.is_empty() {
Some(output.stdout.clone())
} else {
None
},
stderr: if !output.stderr.is_empty() {
Some(output.stderr.clone())
} else {
None
},
};
Ok(TestSuiteResult {
name: runner_name.to_string(),
runner_type: runner_type.to_string(),
total,
passed,
failed,
skipped,
duration_ms: output.duration_ms as i64,
test_cases: vec![test_case],
})
}
/// Parse JSON test output format
fn parse_json_output(&self, _json_str: &str, _runner_name: &str) -> Result<TestSuiteResult> {
// TODO: Implement JSON parsing for structured test results
// For now, return a basic result
Err(Error::Validation(
"JSON result format not yet implemented".to_string(),
))
}
/// Extract a number from text after a label
fn extract_number(&self, text: &str, label: &str) -> Option<i32> {
text.lines()
.find(|line| line.contains(label))
.and_then(|line| {
line.split(label)
.nth(1)?
.trim()
.split_whitespace()
.next()?
.parse::<i32>()
.ok()
})
}
}
/// Command execution output
#[derive(Debug)]
struct CommandOutput {
exit_code: i32,
stdout: String,
stderr: String,
duration_ms: u64,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_extract_number() {
let executor = TestExecutor::new(PathBuf::from("/tmp"));
let text = "Total Tests: 36\nPassed: 35\nFailed: 1";
assert_eq!(executor.extract_number(text, "Total Tests:"), Some(36));
assert_eq!(executor.extract_number(text, "Passed:"), Some(35));
assert_eq!(executor.extract_number(text, "Failed:"), Some(1));
assert_eq!(executor.extract_number(text, "Skipped:"), None);
}
#[test]
fn test_parse_simple_output() {
let executor = TestExecutor::new(PathBuf::from("/tmp"));
let output = CommandOutput {
exit_code: 0,
stdout: "Total Tests: 36\nPassed: 36\nFailed: 0\n".to_string(),
stderr: String::new(),
duration_ms: 1234,
};
let result = executor
.parse_simple_output(&output, "shell", "script")
.unwrap();
assert_eq!(result.total, 36);
assert_eq!(result.passed, 36);
assert_eq!(result.failed, 0);
assert_eq!(result.skipped, 0);
assert_eq!(result.duration_ms, 1234);
}
#[test]
fn test_parse_simple_output_with_failures() {
let executor = TestExecutor::new(PathBuf::from("/tmp"));
let output = CommandOutput {
exit_code: 1,
stdout: "Total Tests: 10\nPassed: 8\nFailed: 2\n".to_string(),
stderr: "Some tests failed\n".to_string(),
duration_ms: 5000,
};
let result = executor
.parse_simple_output(&output, "python", "unittest")
.unwrap();
assert_eq!(result.total, 10);
assert_eq!(result.passed, 8);
assert_eq!(result.failed, 2);
assert_eq!(result.test_cases.len(), 1);
assert_eq!(result.test_cases[0].status, TestStatus::Failed);
}
}