working out the worker/execution interface

This commit is contained in:
2026-02-08 12:55:33 -06:00
parent c62f41669d
commit a74e13fa0b
108 changed files with 21162 additions and 674 deletions

View File

@@ -17,6 +17,7 @@ pub mod pack_registry;
pub mod repositories;
pub mod runtime_detection;
pub mod schema;
pub mod test_executor;
pub mod utils;
pub mod workflow;

View File

@@ -37,8 +37,132 @@ pub type JsonSchema = JsonValue;
pub mod enums {
use serde::{Deserialize, Serialize};
use sqlx::Type;
use std::fmt;
use std::str::FromStr;
use utoipa::ToSchema;
/// How parameters should be delivered to an action
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "lowercase")]
pub enum ParameterDelivery {
/// Pass parameters via stdin (secure, recommended for most cases)
Stdin,
/// Pass parameters via temporary file (secure, best for large payloads)
File,
}
impl Default for ParameterDelivery {
fn default() -> Self {
Self::Stdin
}
}
impl fmt::Display for ParameterDelivery {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Stdin => write!(f, "stdin"),
Self::File => write!(f, "file"),
}
}
}
impl FromStr for ParameterDelivery {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"stdin" => Ok(Self::Stdin),
"file" => Ok(Self::File),
_ => Err(format!("Invalid parameter delivery method: {}", s)),
}
}
}
impl sqlx::Type<sqlx::Postgres> for ParameterDelivery {
fn type_info() -> sqlx::postgres::PgTypeInfo {
<String as sqlx::Type<sqlx::Postgres>>::type_info()
}
}
impl<'r> sqlx::Decode<'r, sqlx::Postgres> for ParameterDelivery {
fn decode(value: sqlx::postgres::PgValueRef<'r>) -> Result<Self, sqlx::error::BoxDynError> {
let s = <String as sqlx::Decode<sqlx::Postgres>>::decode(value)?;
s.parse().map_err(|e: String| e.into())
}
}
impl<'q> sqlx::Encode<'q, sqlx::Postgres> for ParameterDelivery {
fn encode_by_ref(
&self,
buf: &mut sqlx::postgres::PgArgumentBuffer,
) -> Result<sqlx::encode::IsNull, sqlx::error::BoxDynError> {
Ok(<String as sqlx::Encode<sqlx::Postgres>>::encode(self.to_string(), buf)?)
}
}
/// Format for parameter serialization
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "lowercase")]
pub enum ParameterFormat {
/// KEY='VALUE' format (one per line)
Dotenv,
/// JSON object
Json,
/// YAML format
Yaml,
}
impl Default for ParameterFormat {
fn default() -> Self {
Self::Json
}
}
impl fmt::Display for ParameterFormat {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Json => write!(f, "json"),
Self::Dotenv => write!(f, "dotenv"),
Self::Yaml => write!(f, "yaml"),
}
}
}
impl FromStr for ParameterFormat {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"json" => Ok(Self::Json),
"dotenv" => Ok(Self::Dotenv),
"yaml" => Ok(Self::Yaml),
_ => Err(format!("Invalid parameter format: {}", s)),
}
}
}
impl sqlx::Type<sqlx::Postgres> for ParameterFormat {
fn type_info() -> sqlx::postgres::PgTypeInfo {
<String as sqlx::Type<sqlx::Postgres>>::type_info()
}
}
impl<'r> sqlx::Decode<'r, sqlx::Postgres> for ParameterFormat {
fn decode(value: sqlx::postgres::PgValueRef<'r>) -> Result<Self, sqlx::error::BoxDynError> {
let s = <String as sqlx::Decode<sqlx::Postgres>>::decode(value)?;
s.parse().map_err(|e: String| e.into())
}
}
impl<'q> sqlx::Encode<'q, sqlx::Postgres> for ParameterFormat {
fn encode_by_ref(
&self,
buf: &mut sqlx::postgres::PgArgumentBuffer,
) -> Result<sqlx::encode::IsNull, sqlx::error::BoxDynError> {
Ok(<String as sqlx::Encode<sqlx::Postgres>>::encode(self.to_string(), buf)?)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Type, ToSchema)]
#[sqlx(type_name = "worker_type_enum", rename_all = "lowercase")]
#[serde(rename_all = "lowercase")]
@@ -310,6 +434,10 @@ pub mod action {
pub is_workflow: bool,
pub workflow_def: Option<Id>,
pub is_adhoc: bool,
#[sqlx(default)]
pub parameter_delivery: ParameterDelivery,
#[sqlx(default)]
pub parameter_format: ParameterFormat,
pub created: DateTime<Utc>,
pub updated: DateTime<Utc>,
}
@@ -493,6 +621,11 @@ pub mod execution {
pub action_ref: String,
pub config: Option<JsonDict>,
/// Environment variables for this execution (string -> string mapping)
/// These are set as environment variables in the action's process.
/// Separate from parameters which are passed via stdin/file.
pub env_vars: Option<JsonDict>,
/// Parent execution ID (generic hierarchy for all execution types)
///
/// Used for:

View File

@@ -20,6 +20,7 @@ pub struct CreateExecutionInput {
pub action: Option<Id>,
pub action_ref: String,
pub config: Option<JsonDict>,
pub env_vars: Option<JsonDict>,
pub parent: Option<Id>,
pub enforcement: Option<Id>,
pub executor: Option<Id>,
@@ -54,7 +55,7 @@ impl FindById for ExecutionRepository {
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, Execution>(
"SELECT id, action, action_ref, config, parent, enforcement, executor, status, result, workflow_task, created, updated FROM execution WHERE id = $1"
"SELECT id, action, action_ref, config, env_vars, parent, enforcement, executor, status, result, workflow_task, created, updated FROM execution WHERE id = $1"
).bind(id).fetch_optional(executor).await.map_err(Into::into)
}
}
@@ -66,7 +67,7 @@ impl List for ExecutionRepository {
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, Execution>(
"SELECT id, action, action_ref, config, parent, enforcement, executor, status, result, workflow_task, created, updated FROM execution ORDER BY created DESC LIMIT 1000"
"SELECT id, action, action_ref, config, env_vars, parent, enforcement, executor, status, result, workflow_task, created, updated FROM execution ORDER BY created DESC LIMIT 1000"
).fetch_all(executor).await.map_err(Into::into)
}
}
@@ -79,8 +80,8 @@ impl Create for ExecutionRepository {
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, Execution>(
"INSERT INTO execution (action, action_ref, config, parent, enforcement, executor, status, result, workflow_task) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING id, action, action_ref, config, parent, enforcement, executor, status, result, workflow_task, created, updated"
).bind(input.action).bind(&input.action_ref).bind(&input.config).bind(input.parent).bind(input.enforcement).bind(input.executor).bind(input.status).bind(&input.result).bind(sqlx::types::Json(&input.workflow_task)).fetch_one(executor).await.map_err(Into::into)
"INSERT INTO execution (action, action_ref, config, env_vars, parent, enforcement, executor, status, result, workflow_task) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) RETURNING id, action, action_ref, config, env_vars, parent, enforcement, executor, status, result, workflow_task, created, updated"
).bind(input.action).bind(&input.action_ref).bind(&input.config).bind(&input.env_vars).bind(input.parent).bind(input.enforcement).bind(input.executor).bind(input.status).bind(&input.result).bind(sqlx::types::Json(&input.workflow_task)).fetch_one(executor).await.map_err(Into::into)
}
}
@@ -129,7 +130,7 @@ impl Update for ExecutionRepository {
}
query.push(", updated = NOW() WHERE id = ").push_bind(id);
query.push(" RETURNING id, action, action_ref, config, parent, enforcement, executor, status, result, workflow_task, created, updated");
query.push(" RETURNING id, action, action_ref, config, env_vars, parent, enforcement, executor, status, result, workflow_task, created, updated");
query
.build_query_as::<Execution>()
@@ -162,7 +163,7 @@ impl ExecutionRepository {
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, Execution>(
"SELECT id, action, action_ref, config, parent, enforcement, executor, status, result, workflow_task, created, updated FROM execution WHERE status = $1 ORDER BY created DESC"
"SELECT id, action, action_ref, config, env_vars, parent, enforcement, executor, status, result, workflow_task, created, updated FROM execution WHERE status = $1 ORDER BY created DESC"
).bind(status).fetch_all(executor).await.map_err(Into::into)
}
@@ -174,7 +175,7 @@ impl ExecutionRepository {
E: Executor<'e, Database = Postgres> + 'e,
{
sqlx::query_as::<_, Execution>(
"SELECT id, action, action_ref, config, parent, enforcement, executor, status, result, workflow_task, created, updated FROM execution WHERE enforcement = $1 ORDER BY created DESC"
"SELECT id, action, action_ref, config, env_vars, parent, enforcement, executor, status, result, workflow_task, created, updated FROM execution WHERE enforcement = $1 ORDER BY created DESC"
).bind(enforcement_id).fetch_all(executor).await.map_err(Into::into)
}
}

View File

@@ -0,0 +1,505 @@
//! Pack Test Executor Module
//!
//! Executes pack tests by running test runners and collecting results.
use crate::error::{Error, Result};
use crate::models::pack_test::{PackTestResult, TestCaseResult, TestStatus, TestSuiteResult};
use chrono::Utc;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::process::Stdio;
use std::time::{Duration, Instant};
use tokio::io::{AsyncBufReadExt, BufReader};
use tokio::process::Command;
use tracing::{debug, error, info, warn};
/// Test configuration from pack.yaml
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TestConfig {
pub enabled: bool,
pub discovery: DiscoveryConfig,
pub runners: HashMap<String, RunnerConfig>,
pub result_format: Option<String>,
pub result_path: Option<String>,
pub min_pass_rate: Option<f64>,
pub on_failure: Option<String>,
}
/// Test discovery configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DiscoveryConfig {
pub method: String,
pub path: Option<String>,
}
/// Test runner configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RunnerConfig {
pub r#type: String,
pub entry_point: String,
pub timeout: Option<u64>,
pub result_format: Option<String>,
}
/// Test executor for running pack tests
pub struct TestExecutor {
/// Base directory for pack files
pack_base_dir: PathBuf,
}
impl TestExecutor {
/// Create a new test executor
pub fn new(pack_base_dir: PathBuf) -> Self {
Self { pack_base_dir }
}
/// Execute all tests for a pack
pub async fn execute_pack_tests(
&self,
pack_ref: &str,
pack_version: &str,
test_config: &TestConfig,
) -> Result<PackTestResult> {
info!("Executing tests for pack: {} v{}", pack_ref, pack_version);
if !test_config.enabled {
return Err(Error::Validation(
"Testing is not enabled for this pack".to_string(),
));
}
let pack_dir = self.pack_base_dir.join(pack_ref);
if !pack_dir.exists() {
return Err(Error::not_found(
"pack_directory",
"path",
pack_dir.display().to_string(),
));
}
let start_time = Instant::now();
let execution_time = Utc::now();
let mut test_suites = Vec::new();
// Execute tests for each runner
for (runner_name, runner_config) in &test_config.runners {
info!(
"Running test suite: {} ({})",
runner_name, runner_config.r#type
);
match self
.execute_test_suite(&pack_dir, runner_name, runner_config)
.await
{
Ok(suite_result) => {
info!(
"Test suite '{}' completed: {}/{} passed",
runner_name, suite_result.passed, suite_result.total
);
test_suites.push(suite_result);
}
Err(e) => {
error!("Test suite '{}' failed to execute: {}", runner_name, e);
// Create a failed suite result
test_suites.push(TestSuiteResult {
name: runner_name.clone(),
runner_type: runner_config.r#type.clone(),
total: 0,
passed: 0,
failed: 1,
skipped: 0,
duration_ms: 0,
test_cases: vec![TestCaseResult {
name: format!("{}_execution", runner_name),
status: TestStatus::Error,
duration_ms: 0,
error_message: Some(e.to_string()),
stdout: None,
stderr: None,
}],
});
}
}
}
let total_duration_ms = start_time.elapsed().as_millis() as i64;
// Aggregate results
let total_tests: i32 = test_suites.iter().map(|s| s.total).sum();
let passed: i32 = test_suites.iter().map(|s| s.passed).sum();
let failed: i32 = test_suites.iter().map(|s| s.failed).sum();
let skipped: i32 = test_suites.iter().map(|s| s.skipped).sum();
let pass_rate = if total_tests > 0 {
passed as f64 / total_tests as f64
} else {
0.0
};
info!(
"Pack tests completed: {}/{} passed ({:.1}%)",
passed,
total_tests,
pass_rate * 100.0
);
// Determine overall test status
let status = if failed > 0 {
"failed".to_string()
} else if passed == total_tests {
"passed".to_string()
} else if skipped == total_tests {
"skipped".to_string()
} else {
"partial".to_string()
};
Ok(PackTestResult {
pack_ref: pack_ref.to_string(),
pack_version: pack_version.to_string(),
execution_time,
status,
total_tests,
passed,
failed,
skipped,
pass_rate,
duration_ms: total_duration_ms,
test_suites,
})
}
/// Execute a single test suite
async fn execute_test_suite(
&self,
pack_dir: &Path,
runner_name: &str,
runner_config: &RunnerConfig,
) -> Result<TestSuiteResult> {
let start_time = Instant::now();
// Resolve entry point path
let entry_point = pack_dir.join(&runner_config.entry_point);
if !entry_point.exists() {
return Err(Error::not_found(
"test_entry_point",
"path",
entry_point.display().to_string(),
));
}
// Determine command based on runner type
// Use relative path from pack directory for the entry point
let relative_entry_point = entry_point
.strip_prefix(pack_dir)
.unwrap_or(&entry_point)
.to_string_lossy()
.to_string();
let (command, args) = match runner_config.r#type.as_str() {
"script" => {
// Execute as shell script
let shell = if entry_point.extension().and_then(|s| s.to_str()) == Some("sh") {
"/bin/sh"
} else {
"/bin/bash"
};
(shell.to_string(), vec![relative_entry_point])
}
"unittest" => {
// Execute as Python unittest
(
"python3".to_string(),
vec![
"-m".to_string(),
"unittest".to_string(),
relative_entry_point,
],
)
}
"pytest" => {
// Execute with pytest
(
"pytest".to_string(),
vec![relative_entry_point, "-v".to_string()],
)
}
_ => {
return Err(Error::Validation(format!(
"Unsupported runner type: {}",
runner_config.r#type
)));
}
};
// Execute test command with pack_dir as working directory
let timeout_duration = Duration::from_secs(runner_config.timeout.unwrap_or(300));
let output = self
.run_command(&command, &args, pack_dir, timeout_duration)
.await?;
let duration_ms = start_time.elapsed().as_millis() as i64;
// Parse output based on result format
let result_format = runner_config.result_format.as_deref().unwrap_or("simple");
let mut suite_result = match result_format {
"simple" => self.parse_simple_output(&output, runner_name, &runner_config.r#type)?,
"json" => self.parse_json_output(&output.stdout, runner_name)?,
_ => {
warn!(
"Unknown result format '{}', falling back to simple",
result_format
);
self.parse_simple_output(&output, runner_name, &runner_config.r#type)?
}
};
suite_result.duration_ms = duration_ms;
Ok(suite_result)
}
/// Run a command with timeout
async fn run_command(
&self,
command: &str,
args: &[String],
working_dir: &Path,
timeout: Duration,
) -> Result<CommandOutput> {
debug!(
"Executing command: {} {} (timeout: {:?})",
command,
args.join(" "),
timeout
);
let mut cmd = Command::new(command);
cmd.args(args)
.current_dir(working_dir)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.stdin(Stdio::null());
let start = Instant::now();
let mut child = cmd.spawn().map_err(|e| {
Error::Internal(format!("Failed to spawn command '{}': {}", command, e))
})?;
// Wait for process with timeout
let status = tokio::time::timeout(timeout, child.wait())
.await
.map_err(|_| Error::Timeout(format!("Test execution timed out after {:?}", timeout)))?
.map_err(|e| Error::Internal(format!("Process wait failed: {}", e)))?;
// Read output
let stdout_handle = child.stdout.take();
let stderr_handle = child.stderr.take();
let stdout = if let Some(stdout) = stdout_handle {
self.read_stream(stdout).await?
} else {
String::new()
};
let stderr = if let Some(stderr) = stderr_handle {
self.read_stream(stderr).await?
} else {
String::new()
};
let duration_ms = start.elapsed().as_millis() as u64;
let exit_code = status.code().unwrap_or(-1);
Ok(CommandOutput {
exit_code,
stdout,
stderr,
duration_ms,
})
}
/// Read from an async stream
async fn read_stream(&self, stream: impl tokio::io::AsyncRead + Unpin) -> Result<String> {
let mut reader = BufReader::new(stream);
let mut output = String::new();
let mut line = String::new();
while reader
.read_line(&mut line)
.await
.map_err(|e| Error::Internal(format!("Failed to read stream: {}", e)))?
> 0
{
output.push_str(&line);
line.clear();
}
Ok(output)
}
/// Parse simple test output format
fn parse_simple_output(
&self,
output: &CommandOutput,
runner_name: &str,
runner_type: &str,
) -> Result<TestSuiteResult> {
let text = format!("{}\n{}", output.stdout, output.stderr);
// Parse test counts from output
let total = self.extract_number(&text, "Total Tests:");
let passed = self.extract_number(&text, "Passed:");
let failed = self.extract_number(&text, "Failed:");
let skipped = self.extract_number(&text, "Skipped:").or_else(|| Some(0));
// If we couldn't parse counts, use exit code
let (total, passed, failed, skipped) = if total.is_none() || passed.is_none() {
if output.exit_code == 0 {
(1, 1, 0, 0)
} else {
(1, 0, 1, 0)
}
} else {
(
total.unwrap_or(0),
passed.unwrap_or(0),
failed.unwrap_or(0),
skipped.unwrap_or(0),
)
};
// Create a single test case representing the entire suite
let test_case = TestCaseResult {
name: format!("{}_suite", runner_name),
status: if output.exit_code == 0 {
TestStatus::Passed
} else {
TestStatus::Failed
},
duration_ms: output.duration_ms as i64,
error_message: if output.exit_code != 0 {
Some(format!("Exit code: {}", output.exit_code))
} else {
None
},
stdout: if !output.stdout.is_empty() {
Some(output.stdout.clone())
} else {
None
},
stderr: if !output.stderr.is_empty() {
Some(output.stderr.clone())
} else {
None
},
};
Ok(TestSuiteResult {
name: runner_name.to_string(),
runner_type: runner_type.to_string(),
total,
passed,
failed,
skipped,
duration_ms: output.duration_ms as i64,
test_cases: vec![test_case],
})
}
/// Parse JSON test output format
fn parse_json_output(&self, _json_str: &str, _runner_name: &str) -> Result<TestSuiteResult> {
// TODO: Implement JSON parsing for structured test results
// For now, return a basic result
Err(Error::Validation(
"JSON result format not yet implemented".to_string(),
))
}
/// Extract a number from text after a label
fn extract_number(&self, text: &str, label: &str) -> Option<i32> {
text.lines()
.find(|line| line.contains(label))
.and_then(|line| {
line.split(label)
.nth(1)?
.trim()
.split_whitespace()
.next()?
.parse::<i32>()
.ok()
})
}
}
/// Command execution output
#[derive(Debug)]
struct CommandOutput {
exit_code: i32,
stdout: String,
stderr: String,
duration_ms: u64,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_extract_number() {
let executor = TestExecutor::new(PathBuf::from("/tmp"));
let text = "Total Tests: 36\nPassed: 35\nFailed: 1";
assert_eq!(executor.extract_number(text, "Total Tests:"), Some(36));
assert_eq!(executor.extract_number(text, "Passed:"), Some(35));
assert_eq!(executor.extract_number(text, "Failed:"), Some(1));
assert_eq!(executor.extract_number(text, "Skipped:"), None);
}
#[test]
fn test_parse_simple_output() {
let executor = TestExecutor::new(PathBuf::from("/tmp"));
let output = CommandOutput {
exit_code: 0,
stdout: "Total Tests: 36\nPassed: 36\nFailed: 0\n".to_string(),
stderr: String::new(),
duration_ms: 1234,
};
let result = executor
.parse_simple_output(&output, "shell", "script")
.unwrap();
assert_eq!(result.total, 36);
assert_eq!(result.passed, 36);
assert_eq!(result.failed, 0);
assert_eq!(result.skipped, 0);
assert_eq!(result.duration_ms, 1234);
}
#[test]
fn test_parse_simple_output_with_failures() {
let executor = TestExecutor::new(PathBuf::from("/tmp"));
let output = CommandOutput {
exit_code: 1,
stdout: "Total Tests: 10\nPassed: 8\nFailed: 2\n".to_string(),
stderr: "Some tests failed\n".to_string(),
duration_ms: 5000,
};
let result = executor
.parse_simple_output(&output, "python", "unittest")
.unwrap();
assert_eq!(result.total, 10);
assert_eq!(result.passed, 8);
assert_eq!(result.failed, 2);
assert_eq!(result.test_cases.len(), 1);
assert_eq!(result.test_cases[0].status, TestStatus::Failed);
}
}

View File

@@ -37,6 +37,7 @@ async fn test_create_execution_basic() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: Some(json!({"param1": "value1"})),
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -69,6 +70,7 @@ async fn test_create_execution_without_action() {
action: None,
action_ref: action_ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -101,6 +103,7 @@ async fn test_create_execution_with_all_fields() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: Some(json!({"timeout": 300, "retry": true})),
env_vars: None,
parent: None,
enforcement: None,
executor: None, // Don't reference non-existent identity
@@ -135,6 +138,7 @@ async fn test_create_execution_with_parent() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -152,6 +156,7 @@ async fn test_create_execution_with_parent() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: Some(parent.id),
enforcement: None,
executor: None,
@@ -189,6 +194,7 @@ async fn test_find_execution_by_id() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -240,6 +246,7 @@ async fn test_list_executions() {
action: Some(action.id),
action_ref: format!("{}_{}", action.r#ref, i),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -284,6 +291,7 @@ async fn test_list_executions_ordered_by_created_desc() {
action: Some(action.id),
action_ref: format!("{}_{}", action.r#ref, i),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -333,6 +341,7 @@ async fn test_update_execution_status() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -376,6 +385,7 @@ async fn test_update_execution_result() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -420,6 +430,7 @@ async fn test_update_execution_executor() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -462,6 +473,7 @@ async fn test_update_execution_status_transitions() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -551,6 +563,7 @@ async fn test_update_execution_failed_status() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -594,6 +607,7 @@ async fn test_update_execution_no_changes() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -636,6 +650,7 @@ async fn test_delete_execution() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -700,6 +715,7 @@ async fn test_find_executions_by_status() {
action: Some(action.id),
action_ref: format!("{}_{}", action.r#ref, i),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -745,6 +761,7 @@ async fn test_find_executions_by_enforcement() {
action: Some(action.id),
action_ref: format!("{}_1", action.r#ref),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -762,6 +779,7 @@ async fn test_find_executions_by_enforcement() {
action: Some(action.id),
action_ref: format!("{}_{}", action.r#ref, i),
config: None,
env_vars: None,
parent: None,
enforcement: if i == 2 { None } else { None }, // Can't reference non-existent enforcement
executor: None,
@@ -804,6 +822,7 @@ async fn test_parent_child_execution_hierarchy() {
action: Some(action.id),
action_ref: format!("{}.parent", action.r#ref),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -823,6 +842,7 @@ async fn test_parent_child_execution_hierarchy() {
action: Some(action.id),
action_ref: format!("{}.child_{}", action.r#ref, i),
config: None,
env_vars: None,
parent: Some(parent.id),
enforcement: None,
executor: None,
@@ -865,6 +885,7 @@ async fn test_nested_execution_hierarchy() {
action: Some(action.id),
action_ref: format!("{}.grandparent", action.r#ref),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -882,6 +903,7 @@ async fn test_nested_execution_hierarchy() {
action: Some(action.id),
action_ref: format!("{}.parent", action.r#ref),
config: None,
env_vars: None,
parent: Some(grandparent.id),
enforcement: None,
executor: None,
@@ -899,6 +921,7 @@ async fn test_nested_execution_hierarchy() {
action: Some(action.id),
action_ref: format!("{}.child", action.r#ref),
config: None,
env_vars: None,
parent: Some(parent.id),
enforcement: None,
executor: None,
@@ -939,6 +962,7 @@ async fn test_execution_timestamps() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -1008,6 +1032,7 @@ async fn test_execution_config_json() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: Some(complex_config.clone()),
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -1039,6 +1064,7 @@ async fn test_execution_result_json() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,

View File

@@ -44,6 +44,7 @@ async fn test_create_inquiry_minimal() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -102,6 +103,7 @@ async fn test_create_inquiry_with_response_schema() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -158,6 +160,7 @@ async fn test_create_inquiry_with_timeout() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -210,6 +213,7 @@ async fn test_create_inquiry_with_assigned_user() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -296,6 +300,7 @@ async fn test_find_inquiry_by_id() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -355,6 +360,7 @@ async fn test_get_inquiry_by_id() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -422,6 +428,7 @@ async fn test_list_inquiries() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -481,6 +488,7 @@ async fn test_update_inquiry_status() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -535,6 +543,7 @@ async fn test_update_inquiry_status_transitions() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -618,6 +627,7 @@ async fn test_update_inquiry_response() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -674,6 +684,7 @@ async fn test_update_inquiry_with_response_and_status() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -730,6 +741,7 @@ async fn test_update_inquiry_assignment() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -795,6 +807,7 @@ async fn test_update_inquiry_no_changes() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -869,6 +882,7 @@ async fn test_delete_inquiry() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -926,6 +940,7 @@ async fn test_delete_execution_cascades_to_inquiries() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -991,6 +1006,7 @@ async fn test_find_inquiries_by_status() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -1068,6 +1084,7 @@ async fn test_find_inquiries_by_execution() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -1085,6 +1102,7 @@ async fn test_find_inquiries_by_execution() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -1147,6 +1165,7 @@ async fn test_inquiry_timestamps_auto_managed() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,
@@ -1212,6 +1231,7 @@ async fn test_inquiry_complex_response_schema() {
action: Some(action.id),
action_ref: action.r#ref.clone(),
config: None,
env_vars: None,
parent: None,
enforcement: None,
executor: None,