re-uploading work
This commit is contained in:
320
crates/worker/src/runtime/dependency.rs
Normal file
320
crates/worker/src/runtime/dependency.rs
Normal file
@@ -0,0 +1,320 @@
|
||||
//! Runtime Dependency Management
|
||||
//!
|
||||
//! Provides generic abstractions for managing runtime dependencies across
|
||||
//! different languages (Python, Node.js, Java, etc.).
|
||||
|
||||
use async_trait::async_trait;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use thiserror::Error;
|
||||
|
||||
/// Dependency manager result type
|
||||
pub type DependencyResult<T> = std::result::Result<T, DependencyError>;
|
||||
|
||||
/// Dependency manager errors
|
||||
#[derive(Debug, Error)]
|
||||
pub enum DependencyError {
|
||||
#[error("Failed to create environment: {0}")]
|
||||
CreateEnvironmentFailed(String),
|
||||
|
||||
#[error("Failed to install dependencies: {0}")]
|
||||
InstallFailed(String),
|
||||
|
||||
#[error("Environment not found: {0}")]
|
||||
EnvironmentNotFound(String),
|
||||
|
||||
#[error("Invalid dependency specification: {0}")]
|
||||
InvalidDependencySpec(String),
|
||||
|
||||
#[error("IO error: {0}")]
|
||||
IoError(#[from] std::io::Error),
|
||||
|
||||
#[error("Process execution error: {0}")]
|
||||
ProcessError(String),
|
||||
|
||||
#[error("Lock file error: {0}")]
|
||||
LockFileError(String),
|
||||
|
||||
#[error("Environment validation failed: {0}")]
|
||||
ValidationFailed(String),
|
||||
}
|
||||
|
||||
/// Dependency specification for a pack
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DependencySpec {
|
||||
/// Runtime type (python, nodejs, java, etc.)
|
||||
pub runtime: String,
|
||||
|
||||
/// List of dependencies (e.g., ["requests==2.28.0", "flask>=2.0.0"])
|
||||
pub dependencies: Vec<String>,
|
||||
|
||||
/// Requirements file content (alternative to dependencies list)
|
||||
pub requirements_file_content: Option<String>,
|
||||
|
||||
/// Minimum runtime version required
|
||||
pub min_version: Option<String>,
|
||||
|
||||
/// Maximum runtime version required
|
||||
pub max_version: Option<String>,
|
||||
|
||||
/// Additional metadata
|
||||
pub metadata: HashMap<String, serde_json::Value>,
|
||||
}
|
||||
|
||||
impl DependencySpec {
|
||||
/// Create a new dependency specification
|
||||
pub fn new(runtime: impl Into<String>) -> Self {
|
||||
Self {
|
||||
runtime: runtime.into(),
|
||||
dependencies: Vec::new(),
|
||||
requirements_file_content: None,
|
||||
min_version: None,
|
||||
max_version: None,
|
||||
metadata: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Add a dependency
|
||||
pub fn with_dependency(mut self, dep: impl Into<String>) -> Self {
|
||||
self.dependencies.push(dep.into());
|
||||
self
|
||||
}
|
||||
|
||||
/// Add multiple dependencies
|
||||
pub fn with_dependencies(mut self, deps: Vec<String>) -> Self {
|
||||
self.dependencies.extend(deps);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set requirements file content
|
||||
pub fn with_requirements_file(mut self, content: String) -> Self {
|
||||
self.requirements_file_content = Some(content);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set version constraints
|
||||
pub fn with_version_range(
|
||||
mut self,
|
||||
min_version: Option<String>,
|
||||
max_version: Option<String>,
|
||||
) -> Self {
|
||||
self.min_version = min_version;
|
||||
self.max_version = max_version;
|
||||
self
|
||||
}
|
||||
|
||||
/// Check if this spec has any dependencies
|
||||
pub fn has_dependencies(&self) -> bool {
|
||||
!self.dependencies.is_empty() || self.requirements_file_content.is_some()
|
||||
}
|
||||
}
|
||||
|
||||
/// Information about an isolated environment
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct EnvironmentInfo {
|
||||
/// Unique environment identifier (typically pack_ref)
|
||||
pub id: String,
|
||||
|
||||
/// Path to the environment directory
|
||||
pub path: PathBuf,
|
||||
|
||||
/// Runtime type
|
||||
pub runtime: String,
|
||||
|
||||
/// Runtime version in the environment
|
||||
pub runtime_version: String,
|
||||
|
||||
/// List of installed dependencies
|
||||
pub installed_dependencies: Vec<String>,
|
||||
|
||||
/// Timestamp when environment was created
|
||||
pub created_at: chrono::DateTime<chrono::Utc>,
|
||||
|
||||
/// Timestamp when environment was last updated
|
||||
pub updated_at: chrono::DateTime<chrono::Utc>,
|
||||
|
||||
/// Whether the environment is valid and ready to use
|
||||
pub is_valid: bool,
|
||||
|
||||
/// Environment-specific executable path (e.g., venv/bin/python)
|
||||
pub executable_path: PathBuf,
|
||||
}
|
||||
|
||||
/// Trait for managing isolated runtime environments
|
||||
#[async_trait]
|
||||
pub trait DependencyManager: Send + Sync {
|
||||
/// Get the runtime type this manager handles (e.g., "python", "nodejs")
|
||||
fn runtime_type(&self) -> &str;
|
||||
|
||||
/// Create or update an isolated environment for a pack
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `pack_ref` - Unique identifier for the pack (e.g., "core.http")
|
||||
/// * `spec` - Dependency specification
|
||||
///
|
||||
/// # Returns
|
||||
/// Information about the created/updated environment
|
||||
async fn ensure_environment(
|
||||
&self,
|
||||
pack_ref: &str,
|
||||
spec: &DependencySpec,
|
||||
) -> DependencyResult<EnvironmentInfo>;
|
||||
|
||||
/// Get information about an existing environment
|
||||
async fn get_environment(&self, pack_ref: &str) -> DependencyResult<Option<EnvironmentInfo>>;
|
||||
|
||||
/// Remove an environment
|
||||
async fn remove_environment(&self, pack_ref: &str) -> DependencyResult<()>;
|
||||
|
||||
/// Validate an environment is still functional
|
||||
async fn validate_environment(&self, pack_ref: &str) -> DependencyResult<bool>;
|
||||
|
||||
/// Get the executable path for running actions in this environment
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `pack_ref` - Pack identifier
|
||||
///
|
||||
/// # Returns
|
||||
/// Path to the runtime executable within the isolated environment
|
||||
async fn get_executable_path(&self, pack_ref: &str) -> DependencyResult<PathBuf>;
|
||||
|
||||
/// List all managed environments
|
||||
async fn list_environments(&self) -> DependencyResult<Vec<EnvironmentInfo>>;
|
||||
|
||||
/// Clean up invalid or unused environments
|
||||
async fn cleanup(&self, keep_recent: usize) -> DependencyResult<Vec<String>>;
|
||||
|
||||
/// Check if dependencies have changed and environment needs updating
|
||||
async fn needs_update(&self, pack_ref: &str, _spec: &DependencySpec) -> DependencyResult<bool> {
|
||||
// Default implementation: check if environment exists and validate it
|
||||
match self.get_environment(pack_ref).await? {
|
||||
None => Ok(true), // Doesn't exist, needs creation
|
||||
Some(env_info) => {
|
||||
// Check if environment is valid
|
||||
if !env_info.is_valid {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
// Could add more sophisticated checks here (dependency hash comparison, etc.)
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Registry for managing multiple dependency managers
|
||||
pub struct DependencyManagerRegistry {
|
||||
managers: HashMap<String, Box<dyn DependencyManager>>,
|
||||
}
|
||||
|
||||
impl DependencyManagerRegistry {
|
||||
/// Create a new registry
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
managers: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Register a dependency manager
|
||||
pub fn register(&mut self, manager: Box<dyn DependencyManager>) {
|
||||
let runtime_type = manager.runtime_type().to_string();
|
||||
self.managers.insert(runtime_type, manager);
|
||||
}
|
||||
|
||||
/// Get a dependency manager by runtime type
|
||||
pub fn get(&self, runtime_type: &str) -> Option<&dyn DependencyManager> {
|
||||
self.managers.get(runtime_type).map(|m| m.as_ref())
|
||||
}
|
||||
|
||||
/// Check if a runtime type is supported
|
||||
pub fn supports(&self, runtime_type: &str) -> bool {
|
||||
self.managers.contains_key(runtime_type)
|
||||
}
|
||||
|
||||
/// List all supported runtime types
|
||||
pub fn supported_runtimes(&self) -> Vec<String> {
|
||||
self.managers.keys().cloned().collect()
|
||||
}
|
||||
|
||||
/// Ensure environment for a pack with given spec
|
||||
pub async fn ensure_environment(
|
||||
&self,
|
||||
pack_ref: &str,
|
||||
spec: &DependencySpec,
|
||||
) -> DependencyResult<EnvironmentInfo> {
|
||||
let manager = self.get(&spec.runtime).ok_or_else(|| {
|
||||
DependencyError::InvalidDependencySpec(format!(
|
||||
"No dependency manager found for runtime: {}",
|
||||
spec.runtime
|
||||
))
|
||||
})?;
|
||||
|
||||
manager.ensure_environment(pack_ref, spec).await
|
||||
}
|
||||
|
||||
/// Get executable path for a pack
|
||||
pub async fn get_executable_path(
|
||||
&self,
|
||||
pack_ref: &str,
|
||||
runtime_type: &str,
|
||||
) -> DependencyResult<PathBuf> {
|
||||
let manager = self.get(runtime_type).ok_or_else(|| {
|
||||
DependencyError::InvalidDependencySpec(format!(
|
||||
"No dependency manager found for runtime: {}",
|
||||
runtime_type
|
||||
))
|
||||
})?;
|
||||
|
||||
manager.get_executable_path(pack_ref).await
|
||||
}
|
||||
|
||||
/// Cleanup all managers
|
||||
pub async fn cleanup_all(&self, keep_recent: usize) -> DependencyResult<Vec<String>> {
|
||||
let mut removed = Vec::new();
|
||||
|
||||
for manager in self.managers.values() {
|
||||
let mut cleaned = manager.cleanup(keep_recent).await?;
|
||||
removed.append(&mut cleaned);
|
||||
}
|
||||
|
||||
Ok(removed)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DependencyManagerRegistry {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_dependency_spec_builder() {
|
||||
let spec = DependencySpec::new("python")
|
||||
.with_dependency("requests==2.28.0")
|
||||
.with_dependency("flask>=2.0.0")
|
||||
.with_version_range(Some("3.8".to_string()), Some("3.11".to_string()));
|
||||
|
||||
assert_eq!(spec.runtime, "python");
|
||||
assert_eq!(spec.dependencies.len(), 2);
|
||||
assert!(spec.has_dependencies());
|
||||
assert_eq!(spec.min_version, Some("3.8".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_dependency_spec_empty() {
|
||||
let spec = DependencySpec::new("nodejs");
|
||||
assert!(!spec.has_dependencies());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_dependency_manager_registry() {
|
||||
let registry = DependencyManagerRegistry::new();
|
||||
assert_eq!(registry.supported_runtimes().len(), 0);
|
||||
assert!(!registry.supports("python"));
|
||||
}
|
||||
}
|
||||
207
crates/worker/src/runtime/local.rs
Normal file
207
crates/worker/src/runtime/local.rs
Normal file
@@ -0,0 +1,207 @@
|
||||
//! Local Runtime Module
|
||||
//!
|
||||
//! Provides local execution capabilities by combining Python and Shell runtimes.
|
||||
//! This module serves as a facade for all local process-based execution.
|
||||
|
||||
use super::native::NativeRuntime;
|
||||
use super::python::PythonRuntime;
|
||||
use super::shell::ShellRuntime;
|
||||
use super::{ExecutionContext, ExecutionResult, Runtime, RuntimeError, RuntimeResult};
|
||||
use async_trait::async_trait;
|
||||
use tracing::{debug, info};
|
||||
|
||||
/// Local runtime that delegates to Python, Shell, or Native based on action type
|
||||
pub struct LocalRuntime {
|
||||
native: NativeRuntime,
|
||||
python: PythonRuntime,
|
||||
shell: ShellRuntime,
|
||||
}
|
||||
|
||||
impl LocalRuntime {
|
||||
/// Create a new local runtime with default settings
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
native: NativeRuntime::new(),
|
||||
python: PythonRuntime::new(),
|
||||
shell: ShellRuntime::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a local runtime with custom runtimes
|
||||
pub fn with_runtimes(
|
||||
native: NativeRuntime,
|
||||
python: PythonRuntime,
|
||||
shell: ShellRuntime,
|
||||
) -> Self {
|
||||
Self {
|
||||
native,
|
||||
python,
|
||||
shell,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the appropriate runtime for the given context
|
||||
fn select_runtime(&self, context: &ExecutionContext) -> RuntimeResult<&dyn Runtime> {
|
||||
if self.native.can_execute(context) {
|
||||
debug!("Selected Native runtime for action: {}", context.action_ref);
|
||||
Ok(&self.native)
|
||||
} else if self.python.can_execute(context) {
|
||||
debug!("Selected Python runtime for action: {}", context.action_ref);
|
||||
Ok(&self.python)
|
||||
} else if self.shell.can_execute(context) {
|
||||
debug!("Selected Shell runtime for action: {}", context.action_ref);
|
||||
Ok(&self.shell)
|
||||
} else {
|
||||
Err(RuntimeError::RuntimeNotFound(format!(
|
||||
"No suitable local runtime found for action: {}",
|
||||
context.action_ref
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for LocalRuntime {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Runtime for LocalRuntime {
|
||||
fn name(&self) -> &str {
|
||||
"local"
|
||||
}
|
||||
|
||||
fn can_execute(&self, context: &ExecutionContext) -> bool {
|
||||
self.native.can_execute(context)
|
||||
|| self.python.can_execute(context)
|
||||
|| self.shell.can_execute(context)
|
||||
}
|
||||
|
||||
async fn execute(&self, context: ExecutionContext) -> RuntimeResult<ExecutionResult> {
|
||||
info!(
|
||||
"Executing local action: {} (execution_id: {})",
|
||||
context.action_ref, context.execution_id
|
||||
);
|
||||
|
||||
let runtime = self.select_runtime(&context)?;
|
||||
runtime.execute(context).await
|
||||
}
|
||||
|
||||
async fn setup(&self) -> RuntimeResult<()> {
|
||||
info!("Setting up Local runtime");
|
||||
|
||||
self.native.setup().await?;
|
||||
self.python.setup().await?;
|
||||
self.shell.setup().await?;
|
||||
|
||||
info!("Local runtime setup complete");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn cleanup(&self) -> RuntimeResult<()> {
|
||||
info!("Cleaning up Local runtime");
|
||||
|
||||
self.native.cleanup().await?;
|
||||
self.python.cleanup().await?;
|
||||
self.shell.cleanup().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn validate(&self) -> RuntimeResult<()> {
|
||||
debug!("Validating Local runtime");
|
||||
|
||||
self.native.validate().await?;
|
||||
self.python.validate().await?;
|
||||
self.shell.validate().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_local_runtime_python() {
|
||||
let runtime = LocalRuntime::new();
|
||||
|
||||
let context = ExecutionContext {
|
||||
execution_id: 1,
|
||||
action_ref: "test.python_action".to_string(),
|
||||
parameters: HashMap::new(),
|
||||
env: HashMap::new(),
|
||||
secrets: HashMap::new(),
|
||||
timeout: Some(10),
|
||||
working_dir: None,
|
||||
entry_point: "run".to_string(),
|
||||
code: Some(
|
||||
r#"
|
||||
def run():
|
||||
return "hello from python"
|
||||
"#
|
||||
.to_string(),
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
};
|
||||
|
||||
assert!(runtime.can_execute(&context));
|
||||
let result = runtime.execute(context).await.unwrap();
|
||||
assert!(result.is_success());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_local_runtime_shell() {
|
||||
let runtime = LocalRuntime::new();
|
||||
|
||||
let context = ExecutionContext {
|
||||
execution_id: 2,
|
||||
action_ref: "test.shell_action".to_string(),
|
||||
parameters: HashMap::new(),
|
||||
env: HashMap::new(),
|
||||
secrets: HashMap::new(),
|
||||
timeout: Some(10),
|
||||
working_dir: None,
|
||||
entry_point: "shell".to_string(),
|
||||
code: Some("echo 'hello from shell'".to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
};
|
||||
|
||||
assert!(runtime.can_execute(&context));
|
||||
let result = runtime.execute(context).await.unwrap();
|
||||
assert!(result.is_success());
|
||||
assert!(result.stdout.contains("hello from shell"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_local_runtime_unknown() {
|
||||
let runtime = LocalRuntime::new();
|
||||
|
||||
let context = ExecutionContext {
|
||||
execution_id: 3,
|
||||
action_ref: "test.unknown_action".to_string(),
|
||||
parameters: HashMap::new(),
|
||||
env: HashMap::new(),
|
||||
secrets: HashMap::new(),
|
||||
timeout: Some(10),
|
||||
working_dir: None,
|
||||
entry_point: "unknown".to_string(),
|
||||
code: Some("some code".to_string()),
|
||||
code_path: None,
|
||||
runtime_name: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
};
|
||||
|
||||
assert!(!runtime.can_execute(&context));
|
||||
}
|
||||
}
|
||||
300
crates/worker/src/runtime/log_writer.rs
Normal file
300
crates/worker/src/runtime/log_writer.rs
Normal file
@@ -0,0 +1,300 @@
|
||||
//! Log Writer Module
|
||||
//!
|
||||
//! Provides bounded log writers that limit output size to prevent OOM issues.
|
||||
|
||||
use std::pin::Pin;
|
||||
use std::task::{Context, Poll};
|
||||
use tokio::io::AsyncWrite;
|
||||
|
||||
const TRUNCATION_NOTICE_STDOUT: &str = "\n\n[OUTPUT TRUNCATED: stdout exceeded size limit]\n";
|
||||
const TRUNCATION_NOTICE_STDERR: &str = "\n\n[OUTPUT TRUNCATED: stderr exceeded size limit]\n";
|
||||
|
||||
// Reserve space for truncation notice so it can always fit
|
||||
const NOTICE_RESERVE_BYTES: usize = 128;
|
||||
|
||||
/// Result of bounded log writing
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct BoundedLogResult {
|
||||
/// The captured log content
|
||||
pub content: String,
|
||||
|
||||
/// Whether the log was truncated
|
||||
pub truncated: bool,
|
||||
|
||||
/// Number of bytes truncated (0 if not truncated)
|
||||
pub bytes_truncated: usize,
|
||||
|
||||
/// Total bytes attempted to write
|
||||
pub total_bytes_attempted: usize,
|
||||
}
|
||||
|
||||
impl BoundedLogResult {
|
||||
/// Create a new result with no truncation
|
||||
pub fn new(content: String) -> Self {
|
||||
let len = content.len();
|
||||
Self {
|
||||
content,
|
||||
truncated: false,
|
||||
bytes_truncated: 0,
|
||||
total_bytes_attempted: len,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a truncated result
|
||||
pub fn truncated(
|
||||
content: String,
|
||||
bytes_truncated: usize,
|
||||
total_bytes_attempted: usize,
|
||||
) -> Self {
|
||||
Self {
|
||||
content,
|
||||
truncated: true,
|
||||
bytes_truncated,
|
||||
total_bytes_attempted,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A writer that limits the amount of data captured and adds a truncation notice
|
||||
pub struct BoundedLogWriter {
|
||||
/// Internal buffer for captured data
|
||||
buffer: Vec<u8>,
|
||||
|
||||
/// Maximum bytes to capture
|
||||
max_bytes: usize,
|
||||
|
||||
/// Whether we've already truncated and added the notice
|
||||
truncated: bool,
|
||||
|
||||
/// Total bytes attempted to write (including truncated)
|
||||
total_bytes_attempted: usize,
|
||||
|
||||
/// Actual data bytes written to buffer (excluding truncation notice)
|
||||
data_bytes_written: usize,
|
||||
|
||||
/// Truncation notice to append when limit is reached
|
||||
truncation_notice: &'static str,
|
||||
}
|
||||
|
||||
impl BoundedLogWriter {
|
||||
/// Create a new bounded log writer for stdout
|
||||
pub fn new_stdout(max_bytes: usize) -> Self {
|
||||
Self {
|
||||
buffer: Vec::with_capacity(std::cmp::min(max_bytes, 1024 * 1024)),
|
||||
max_bytes,
|
||||
truncated: false,
|
||||
total_bytes_attempted: 0,
|
||||
data_bytes_written: 0,
|
||||
truncation_notice: TRUNCATION_NOTICE_STDOUT,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new bounded log writer for stderr
|
||||
pub fn new_stderr(max_bytes: usize) -> Self {
|
||||
Self {
|
||||
buffer: Vec::with_capacity(std::cmp::min(max_bytes, 1024 * 1024)),
|
||||
max_bytes,
|
||||
truncated: false,
|
||||
total_bytes_attempted: 0,
|
||||
data_bytes_written: 0,
|
||||
truncation_notice: TRUNCATION_NOTICE_STDERR,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the result with truncation information
|
||||
pub fn into_result(self) -> BoundedLogResult {
|
||||
let content = String::from_utf8_lossy(&self.buffer).to_string();
|
||||
|
||||
if self.truncated {
|
||||
BoundedLogResult::truncated(
|
||||
content,
|
||||
self.total_bytes_attempted
|
||||
.saturating_sub(self.data_bytes_written),
|
||||
self.total_bytes_attempted,
|
||||
)
|
||||
} else {
|
||||
BoundedLogResult::new(content)
|
||||
}
|
||||
}
|
||||
|
||||
/// Write data to the buffer, respecting size limits
|
||||
fn write_bounded(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||
self.total_bytes_attempted = self.total_bytes_attempted.saturating_add(buf.len());
|
||||
|
||||
// If already truncated, discard all further writes
|
||||
if self.truncated {
|
||||
return Ok(buf.len()); // Pretend we wrote it all
|
||||
}
|
||||
|
||||
let current_size = self.buffer.len();
|
||||
// Reserve space for truncation notice
|
||||
let effective_limit = self.max_bytes.saturating_sub(NOTICE_RESERVE_BYTES);
|
||||
let remaining_space = effective_limit.saturating_sub(current_size);
|
||||
|
||||
if remaining_space == 0 {
|
||||
// Already at limit, add truncation notice if not already added
|
||||
if !self.truncated {
|
||||
self.add_truncation_notice();
|
||||
}
|
||||
return Ok(buf.len()); // Pretend we wrote it all
|
||||
}
|
||||
|
||||
// Calculate how much we can actually write
|
||||
let bytes_to_write = std::cmp::min(buf.len(), remaining_space);
|
||||
|
||||
if bytes_to_write < buf.len() {
|
||||
// We're about to hit the limit
|
||||
self.buffer.extend_from_slice(&buf[..bytes_to_write]);
|
||||
self.data_bytes_written += bytes_to_write;
|
||||
self.add_truncation_notice();
|
||||
} else {
|
||||
// We can write everything
|
||||
self.buffer.extend_from_slice(&buf[..bytes_to_write]);
|
||||
self.data_bytes_written += bytes_to_write;
|
||||
}
|
||||
|
||||
Ok(buf.len()) // Always report full write to avoid backpressure issues
|
||||
}
|
||||
|
||||
/// Add truncation notice to the buffer
|
||||
fn add_truncation_notice(&mut self) {
|
||||
self.truncated = true;
|
||||
|
||||
let notice_bytes = self.truncation_notice.as_bytes();
|
||||
// We reserved space, so the notice should always fit
|
||||
self.buffer.extend_from_slice(notice_bytes);
|
||||
}
|
||||
}
|
||||
|
||||
impl AsyncWrite for BoundedLogWriter {
|
||||
fn poll_write(
|
||||
mut self: Pin<&mut Self>,
|
||||
_cx: &mut Context<'_>,
|
||||
buf: &[u8],
|
||||
) -> Poll<std::io::Result<usize>> {
|
||||
Poll::Ready(self.write_bounded(buf))
|
||||
}
|
||||
|
||||
fn poll_flush(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<std::io::Result<()>> {
|
||||
Poll::Ready(Ok(()))
|
||||
}
|
||||
|
||||
fn poll_shutdown(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<std::io::Result<()>> {
|
||||
Poll::Ready(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_bounded_writer_under_limit() {
|
||||
let mut writer = BoundedLogWriter::new_stdout(1024);
|
||||
let data = b"Hello, world!";
|
||||
|
||||
writer.write_all(data).await.unwrap();
|
||||
|
||||
let result = writer.into_result();
|
||||
assert_eq!(result.content, "Hello, world!");
|
||||
assert!(!result.truncated);
|
||||
assert_eq!(result.bytes_truncated, 0);
|
||||
assert_eq!(result.total_bytes_attempted, 13);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_bounded_writer_at_limit() {
|
||||
// With 178 bytes, we can fit 50 bytes (178 - 128 reserve = 50)
|
||||
let mut writer = BoundedLogWriter::new_stdout(178);
|
||||
let data = b"12345678901234567890123456789012345678901234567890"; // 50 bytes
|
||||
|
||||
writer.write_all(data).await.unwrap();
|
||||
|
||||
let result = writer.into_result();
|
||||
assert_eq!(result.content.len(), 50);
|
||||
assert!(!result.truncated);
|
||||
assert_eq!(result.bytes_truncated, 0);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_bounded_writer_exceeds_limit() {
|
||||
// 148 bytes means effective limit is 20 (148 - 128 = 20)
|
||||
let mut writer = BoundedLogWriter::new_stdout(148);
|
||||
let data = b"This is a long message that exceeds the limit";
|
||||
|
||||
writer.write_all(data).await.unwrap();
|
||||
|
||||
let result = writer.into_result();
|
||||
assert!(result.truncated);
|
||||
assert!(result.content.contains("[OUTPUT TRUNCATED"));
|
||||
assert!(result.bytes_truncated > 0);
|
||||
assert_eq!(result.total_bytes_attempted, 45);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_bounded_writer_multiple_writes() {
|
||||
// 148 bytes means effective limit is 20 (148 - 128 = 20)
|
||||
let mut writer = BoundedLogWriter::new_stdout(148);
|
||||
|
||||
writer.write_all(b"First ").await.unwrap(); // 6 bytes
|
||||
writer.write_all(b"Second ").await.unwrap(); // 7 bytes = 13 total
|
||||
writer.write_all(b"Third ").await.unwrap(); // 6 bytes = 19 total
|
||||
writer.write_all(b"Fourth ").await.unwrap(); // 7 bytes = 26 total, exceeds 20 limit
|
||||
|
||||
let result = writer.into_result();
|
||||
assert!(result.truncated);
|
||||
assert!(result.content.contains("[OUTPUT TRUNCATED"));
|
||||
assert_eq!(result.total_bytes_attempted, 26);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_bounded_writer_stderr_notice() {
|
||||
// 143 bytes means effective limit is 15 (143 - 128 = 15)
|
||||
let mut writer = BoundedLogWriter::new_stderr(143);
|
||||
let data = b"Error message that is too long";
|
||||
|
||||
writer.write_all(data).await.unwrap();
|
||||
|
||||
let result = writer.into_result();
|
||||
assert!(result.truncated);
|
||||
assert!(result.content.contains("stderr exceeded size limit"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_bounded_writer_empty() {
|
||||
let writer = BoundedLogWriter::new_stdout(1024);
|
||||
|
||||
let result = writer.into_result();
|
||||
assert_eq!(result.content, "");
|
||||
assert!(!result.truncated);
|
||||
assert_eq!(result.bytes_truncated, 0);
|
||||
assert_eq!(result.total_bytes_attempted, 0);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_bounded_writer_exact_limit_no_truncation_notice() {
|
||||
// 138 bytes means effective limit is 10 (138 - 128 = 10)
|
||||
let mut writer = BoundedLogWriter::new_stdout(138);
|
||||
let data = b"1234567890"; // Exactly 10 bytes
|
||||
|
||||
writer.write_all(data).await.unwrap();
|
||||
|
||||
let result = writer.into_result();
|
||||
assert_eq!(result.content, "1234567890");
|
||||
assert!(!result.truncated);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_bounded_writer_one_byte_over() {
|
||||
// 138 bytes means effective limit is 10 (138 - 128 = 10)
|
||||
let mut writer = BoundedLogWriter::new_stdout(138);
|
||||
let data = b"12345678901"; // 11 bytes
|
||||
|
||||
writer.write_all(data).await.unwrap();
|
||||
|
||||
let result = writer.into_result();
|
||||
assert!(result.truncated);
|
||||
assert_eq!(result.bytes_truncated, 1);
|
||||
}
|
||||
}
|
||||
330
crates/worker/src/runtime/mod.rs
Normal file
330
crates/worker/src/runtime/mod.rs
Normal file
@@ -0,0 +1,330 @@
|
||||
//! Runtime Module
|
||||
//!
|
||||
//! Provides runtime abstraction and implementations for executing actions
|
||||
//! in different environments (Python, Shell, Node.js, Containers).
|
||||
|
||||
pub mod dependency;
|
||||
pub mod local;
|
||||
pub mod log_writer;
|
||||
pub mod native;
|
||||
pub mod python;
|
||||
pub mod python_venv;
|
||||
pub mod shell;
|
||||
|
||||
// Re-export runtime implementations
|
||||
pub use local::LocalRuntime;
|
||||
pub use native::NativeRuntime;
|
||||
pub use python::PythonRuntime;
|
||||
pub use shell::ShellRuntime;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use thiserror::Error;
|
||||
|
||||
// Re-export dependency management types
|
||||
pub use dependency::{
|
||||
DependencyError, DependencyManager, DependencyManagerRegistry, DependencyResult,
|
||||
DependencySpec, EnvironmentInfo,
|
||||
};
|
||||
pub use log_writer::{BoundedLogResult, BoundedLogWriter};
|
||||
pub use python_venv::PythonVenvManager;
|
||||
|
||||
/// Runtime execution result
|
||||
pub type RuntimeResult<T> = std::result::Result<T, RuntimeError>;
|
||||
|
||||
/// Runtime execution errors
|
||||
#[derive(Debug, Error)]
|
||||
pub enum RuntimeError {
|
||||
#[error("Execution failed: {0}")]
|
||||
ExecutionFailed(String),
|
||||
|
||||
#[error("Timeout after {0} seconds")]
|
||||
Timeout(u64),
|
||||
|
||||
#[error("Runtime not found: {0}")]
|
||||
RuntimeNotFound(String),
|
||||
|
||||
#[error("Invalid action: {0}")]
|
||||
InvalidAction(String),
|
||||
|
||||
#[error("IO error: {0}")]
|
||||
IoError(#[from] std::io::Error),
|
||||
|
||||
#[error("Serialization error: {0}")]
|
||||
SerializationError(#[from] serde_json::Error),
|
||||
|
||||
#[error("Process error: {0}")]
|
||||
ProcessError(String),
|
||||
|
||||
#[error("Setup error: {0}")]
|
||||
SetupError(String),
|
||||
|
||||
#[error("Cleanup error: {0}")]
|
||||
CleanupError(String),
|
||||
}
|
||||
|
||||
/// Action execution context
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ExecutionContext {
|
||||
/// Execution ID
|
||||
pub execution_id: i64,
|
||||
|
||||
/// Action reference (pack.action)
|
||||
pub action_ref: String,
|
||||
|
||||
/// Action parameters
|
||||
pub parameters: HashMap<String, serde_json::Value>,
|
||||
|
||||
/// Environment variables
|
||||
pub env: HashMap<String, String>,
|
||||
|
||||
/// Secrets (passed securely via stdin, not environment variables)
|
||||
pub secrets: HashMap<String, String>,
|
||||
|
||||
/// Execution timeout in seconds
|
||||
pub timeout: Option<u64>,
|
||||
|
||||
/// Working directory
|
||||
pub working_dir: Option<PathBuf>,
|
||||
|
||||
/// Action entry point (script, function, etc.)
|
||||
pub entry_point: String,
|
||||
|
||||
/// Action code/script content
|
||||
pub code: Option<String>,
|
||||
|
||||
/// Action code file path (alternative to code)
|
||||
pub code_path: Option<PathBuf>,
|
||||
|
||||
/// Runtime name (python, shell, etc.) - used to select the correct runtime
|
||||
pub runtime_name: Option<String>,
|
||||
|
||||
/// Maximum stdout size in bytes (for log truncation)
|
||||
#[serde(default = "default_max_log_bytes")]
|
||||
pub max_stdout_bytes: usize,
|
||||
|
||||
/// Maximum stderr size in bytes (for log truncation)
|
||||
#[serde(default = "default_max_log_bytes")]
|
||||
pub max_stderr_bytes: usize,
|
||||
}
|
||||
|
||||
fn default_max_log_bytes() -> usize {
|
||||
10 * 1024 * 1024 // 10MB
|
||||
}
|
||||
|
||||
impl ExecutionContext {
|
||||
/// Create a test context with default values (for tests)
|
||||
#[cfg(test)]
|
||||
pub fn test_context(action_ref: String, code: Option<String>) -> Self {
|
||||
use std::collections::HashMap;
|
||||
Self {
|
||||
execution_id: 1,
|
||||
action_ref,
|
||||
parameters: HashMap::new(),
|
||||
env: HashMap::new(),
|
||||
secrets: HashMap::new(),
|
||||
timeout: Some(10),
|
||||
working_dir: None,
|
||||
entry_point: "run".to_string(),
|
||||
code,
|
||||
code_path: None,
|
||||
runtime_name: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Action execution result
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ExecutionResult {
|
||||
/// Exit code (0 = success)
|
||||
pub exit_code: i32,
|
||||
|
||||
/// Standard output
|
||||
pub stdout: String,
|
||||
|
||||
/// Standard error
|
||||
pub stderr: String,
|
||||
|
||||
/// Execution result data (parsed from stdout or returned by action)
|
||||
pub result: Option<serde_json::Value>,
|
||||
|
||||
/// Execution duration in milliseconds
|
||||
pub duration_ms: u64,
|
||||
|
||||
/// Error message if execution failed
|
||||
pub error: Option<String>,
|
||||
|
||||
/// Whether stdout was truncated due to size limits
|
||||
#[serde(default)]
|
||||
pub stdout_truncated: bool,
|
||||
|
||||
/// Whether stderr was truncated due to size limits
|
||||
#[serde(default)]
|
||||
pub stderr_truncated: bool,
|
||||
|
||||
/// Number of bytes truncated from stdout (0 if not truncated)
|
||||
#[serde(default)]
|
||||
pub stdout_bytes_truncated: usize,
|
||||
|
||||
/// Number of bytes truncated from stderr (0 if not truncated)
|
||||
#[serde(default)]
|
||||
pub stderr_bytes_truncated: usize,
|
||||
}
|
||||
|
||||
impl ExecutionResult {
|
||||
/// Check if execution was successful
|
||||
pub fn is_success(&self) -> bool {
|
||||
self.exit_code == 0 && self.error.is_none()
|
||||
}
|
||||
|
||||
/// Create a success result
|
||||
pub fn success(stdout: String, result: Option<serde_json::Value>, duration_ms: u64) -> Self {
|
||||
Self {
|
||||
exit_code: 0,
|
||||
stdout,
|
||||
stderr: String::new(),
|
||||
result,
|
||||
duration_ms,
|
||||
error: None,
|
||||
stdout_truncated: false,
|
||||
stderr_truncated: false,
|
||||
stdout_bytes_truncated: 0,
|
||||
stderr_bytes_truncated: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a failure result
|
||||
pub fn failure(exit_code: i32, stderr: String, error: String, duration_ms: u64) -> Self {
|
||||
Self {
|
||||
exit_code,
|
||||
stdout: String::new(),
|
||||
stderr,
|
||||
result: None,
|
||||
duration_ms,
|
||||
error: Some(error),
|
||||
stdout_truncated: false,
|
||||
stderr_truncated: false,
|
||||
stdout_bytes_truncated: 0,
|
||||
stderr_bytes_truncated: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Runtime trait for executing actions
|
||||
#[async_trait]
|
||||
pub trait Runtime: Send + Sync {
|
||||
/// Get the runtime name
|
||||
fn name(&self) -> &str;
|
||||
|
||||
/// Check if this runtime can execute the given action
|
||||
fn can_execute(&self, context: &ExecutionContext) -> bool;
|
||||
|
||||
/// Execute an action
|
||||
async fn execute(&self, context: ExecutionContext) -> RuntimeResult<ExecutionResult>;
|
||||
|
||||
/// Setup the runtime environment (called once on worker startup)
|
||||
async fn setup(&self) -> RuntimeResult<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Cleanup the runtime environment (called on worker shutdown)
|
||||
async fn cleanup(&self) -> RuntimeResult<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Validate the runtime is properly configured
|
||||
async fn validate(&self) -> RuntimeResult<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Runtime registry for managing multiple runtime implementations
|
||||
pub struct RuntimeRegistry {
|
||||
runtimes: Vec<Box<dyn Runtime>>,
|
||||
}
|
||||
|
||||
impl RuntimeRegistry {
|
||||
/// Create a new runtime registry
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
runtimes: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Register a runtime
|
||||
pub fn register(&mut self, runtime: Box<dyn Runtime>) {
|
||||
self.runtimes.push(runtime);
|
||||
}
|
||||
|
||||
/// Get a runtime that can execute the given context
|
||||
pub fn get_runtime(&self, context: &ExecutionContext) -> RuntimeResult<&dyn Runtime> {
|
||||
// If runtime_name is specified, use it to select the runtime directly
|
||||
if let Some(ref runtime_name) = context.runtime_name {
|
||||
return self
|
||||
.runtimes
|
||||
.iter()
|
||||
.find(|r| r.name() == runtime_name)
|
||||
.map(|r| r.as_ref())
|
||||
.ok_or_else(|| {
|
||||
RuntimeError::RuntimeNotFound(format!(
|
||||
"Runtime '{}' not found for action: {} (available: {})",
|
||||
runtime_name,
|
||||
context.action_ref,
|
||||
self.list_runtimes().join(", ")
|
||||
))
|
||||
});
|
||||
}
|
||||
|
||||
// Otherwise, fall back to can_execute check
|
||||
self.runtimes
|
||||
.iter()
|
||||
.find(|r| r.can_execute(context))
|
||||
.map(|r| r.as_ref())
|
||||
.ok_or_else(|| {
|
||||
RuntimeError::RuntimeNotFound(format!(
|
||||
"No runtime found for action: {} (available: {})",
|
||||
context.action_ref,
|
||||
self.list_runtimes().join(", ")
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
/// Setup all registered runtimes
|
||||
pub async fn setup_all(&self) -> RuntimeResult<()> {
|
||||
for runtime in &self.runtimes {
|
||||
runtime.setup().await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Cleanup all registered runtimes
|
||||
pub async fn cleanup_all(&self) -> RuntimeResult<()> {
|
||||
for runtime in &self.runtimes {
|
||||
runtime.cleanup().await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Validate all registered runtimes
|
||||
pub async fn validate_all(&self) -> RuntimeResult<()> {
|
||||
for runtime in &self.runtimes {
|
||||
runtime.validate().await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// List all registered runtimes
|
||||
pub fn list_runtimes(&self) -> Vec<&str> {
|
||||
self.runtimes.iter().map(|r| r.name()).collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for RuntimeRegistry {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
493
crates/worker/src/runtime/native.rs
Normal file
493
crates/worker/src/runtime/native.rs
Normal file
@@ -0,0 +1,493 @@
|
||||
//! Native Runtime
|
||||
//!
|
||||
//! Executes compiled native binaries directly without any shell or interpreter wrapper.
|
||||
//! This runtime is used for Rust binaries and other compiled executables.
|
||||
|
||||
use super::{
|
||||
BoundedLogWriter, ExecutionContext, ExecutionResult, Runtime, RuntimeError, RuntimeResult,
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use std::process::Stdio;
|
||||
use std::time::Instant;
|
||||
use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader};
|
||||
use tokio::process::Command;
|
||||
use tokio::time::{timeout, Duration};
|
||||
use tracing::{debug, info, warn};
|
||||
|
||||
/// Native runtime for executing compiled binaries
|
||||
pub struct NativeRuntime {
|
||||
work_dir: Option<std::path::PathBuf>,
|
||||
}
|
||||
|
||||
impl NativeRuntime {
|
||||
/// Create a new native runtime
|
||||
pub fn new() -> Self {
|
||||
Self { work_dir: None }
|
||||
}
|
||||
|
||||
/// Create a native runtime with custom working directory
|
||||
pub fn with_work_dir(work_dir: std::path::PathBuf) -> Self {
|
||||
Self {
|
||||
work_dir: Some(work_dir),
|
||||
}
|
||||
}
|
||||
|
||||
/// Execute a native binary with parameters and environment variables
|
||||
async fn execute_binary(
|
||||
&self,
|
||||
binary_path: std::path::PathBuf,
|
||||
parameters: &std::collections::HashMap<String, serde_json::Value>,
|
||||
secrets: &std::collections::HashMap<String, String>,
|
||||
env: &std::collections::HashMap<String, String>,
|
||||
exec_timeout: Option<u64>,
|
||||
max_stdout_bytes: usize,
|
||||
max_stderr_bytes: usize,
|
||||
) -> RuntimeResult<ExecutionResult> {
|
||||
let start = Instant::now();
|
||||
|
||||
// Check if binary exists and is executable
|
||||
if !binary_path.exists() {
|
||||
return Err(RuntimeError::ExecutionFailed(format!(
|
||||
"Binary not found: {}",
|
||||
binary_path.display()
|
||||
)));
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let metadata = std::fs::metadata(&binary_path)?;
|
||||
let permissions = metadata.permissions();
|
||||
if permissions.mode() & 0o111 == 0 {
|
||||
return Err(RuntimeError::ExecutionFailed(format!(
|
||||
"Binary is not executable: {}",
|
||||
binary_path.display()
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
debug!("Executing native binary: {}", binary_path.display());
|
||||
|
||||
// Build command
|
||||
let mut cmd = Command::new(&binary_path);
|
||||
|
||||
// Set working directory
|
||||
if let Some(ref work_dir) = self.work_dir {
|
||||
cmd.current_dir(work_dir);
|
||||
}
|
||||
|
||||
// Add environment variables
|
||||
for (key, value) in env {
|
||||
cmd.env(key, value);
|
||||
}
|
||||
|
||||
// Add parameters as environment variables with ATTUNE_ACTION_ prefix
|
||||
for (key, value) in parameters {
|
||||
let value_str = match value {
|
||||
serde_json::Value::String(s) => s.clone(),
|
||||
serde_json::Value::Number(n) => n.to_string(),
|
||||
serde_json::Value::Bool(b) => b.to_string(),
|
||||
_ => serde_json::to_string(value)?,
|
||||
};
|
||||
cmd.env(format!("ATTUNE_ACTION_{}", key.to_uppercase()), value_str);
|
||||
}
|
||||
|
||||
// Configure stdio
|
||||
cmd.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped());
|
||||
|
||||
// Spawn process
|
||||
let mut child = cmd
|
||||
.spawn()
|
||||
.map_err(|e| RuntimeError::ExecutionFailed(format!("Failed to spawn binary: {}", e)))?;
|
||||
|
||||
// Write secrets to stdin - if this fails, the process has already started
|
||||
// so we should continue and capture whatever output we can
|
||||
let stdin_write_error = if !secrets.is_empty() {
|
||||
if let Some(mut stdin) = child.stdin.take() {
|
||||
match serde_json::to_string(secrets) {
|
||||
Ok(secrets_json) => {
|
||||
if let Err(e) = stdin.write_all(secrets_json.as_bytes()).await {
|
||||
Some(format!("Failed to write secrets to stdin: {}", e))
|
||||
} else if let Err(e) = stdin.shutdown().await {
|
||||
Some(format!("Failed to close stdin: {}", e))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Err(e) => Some(format!("Failed to serialize secrets: {}", e)),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
if let Some(stdin) = child.stdin.take() {
|
||||
drop(stdin); // Close stdin if no secrets
|
||||
}
|
||||
None
|
||||
};
|
||||
|
||||
// Capture stdout and stderr with size limits
|
||||
let stdout_handle = child
|
||||
.stdout
|
||||
.take()
|
||||
.ok_or_else(|| RuntimeError::ProcessError("Failed to capture stdout".to_string()))?;
|
||||
let stderr_handle = child
|
||||
.stderr
|
||||
.take()
|
||||
.ok_or_else(|| RuntimeError::ProcessError("Failed to capture stderr".to_string()))?;
|
||||
|
||||
let mut stdout_writer = BoundedLogWriter::new_stdout(max_stdout_bytes);
|
||||
let mut stderr_writer = BoundedLogWriter::new_stderr(max_stderr_bytes);
|
||||
|
||||
// Create buffered readers
|
||||
let mut stdout_reader = BufReader::new(stdout_handle);
|
||||
let mut stderr_reader = BufReader::new(stderr_handle);
|
||||
|
||||
// Stream both outputs concurrently
|
||||
let stdout_task = async {
|
||||
let mut line = Vec::new();
|
||||
loop {
|
||||
line.clear();
|
||||
match stdout_reader.read_until(b'\n', &mut line).await {
|
||||
Ok(0) => break, // EOF
|
||||
Ok(_) => {
|
||||
if stdout_writer.write_all(&line).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
stdout_writer
|
||||
};
|
||||
|
||||
let stderr_task = async {
|
||||
let mut line = Vec::new();
|
||||
loop {
|
||||
line.clear();
|
||||
match stderr_reader.read_until(b'\n', &mut line).await {
|
||||
Ok(0) => break, // EOF
|
||||
Ok(_) => {
|
||||
if stderr_writer.write_all(&line).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
stderr_writer
|
||||
};
|
||||
|
||||
// Wait for both streams to complete
|
||||
let (stdout_writer, stderr_writer) = tokio::join!(stdout_task, stderr_task);
|
||||
|
||||
// Wait for process with timeout
|
||||
let wait_result = if let Some(timeout_secs) = exec_timeout {
|
||||
match timeout(Duration::from_secs(timeout_secs), child.wait()).await {
|
||||
Ok(result) => result,
|
||||
Err(_) => {
|
||||
warn!(
|
||||
"Native binary execution timed out after {} seconds",
|
||||
timeout_secs
|
||||
);
|
||||
let _ = child.kill().await;
|
||||
return Err(RuntimeError::Timeout(timeout_secs));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
child.wait().await
|
||||
};
|
||||
|
||||
let status = wait_result.map_err(|e| {
|
||||
RuntimeError::ExecutionFailed(format!("Failed to wait for process: {}", e))
|
||||
})?;
|
||||
|
||||
let duration_ms = start.elapsed().as_millis() as u64;
|
||||
let exit_code = status.code().unwrap_or(-1);
|
||||
|
||||
// Extract logs with truncation info
|
||||
let stdout_log = stdout_writer.into_result();
|
||||
let stderr_log = stderr_writer.into_result();
|
||||
|
||||
debug!(
|
||||
"Native binary completed with exit code {} in {}ms",
|
||||
exit_code, duration_ms
|
||||
);
|
||||
|
||||
if stdout_log.truncated {
|
||||
warn!(
|
||||
"stdout truncated: {} bytes over limit",
|
||||
stdout_log.bytes_truncated
|
||||
);
|
||||
}
|
||||
if stderr_log.truncated {
|
||||
warn!(
|
||||
"stderr truncated: {} bytes over limit",
|
||||
stderr_log.bytes_truncated
|
||||
);
|
||||
}
|
||||
|
||||
// Parse result from stdout if successful
|
||||
let result = if exit_code == 0 {
|
||||
serde_json::from_str(&stdout_log.content).ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Determine error message
|
||||
let error = if exit_code != 0 {
|
||||
Some(format!(
|
||||
"Native binary exited with code {}: {}",
|
||||
exit_code,
|
||||
stderr_log.content.trim()
|
||||
))
|
||||
} else if let Some(stdin_err) = stdin_write_error {
|
||||
// Ignore broken pipe errors for fast-exiting successful actions
|
||||
// These occur when the process exits before we finish writing secrets to stdin
|
||||
let is_broken_pipe =
|
||||
stdin_err.contains("Broken pipe") || stdin_err.contains("os error 32");
|
||||
let is_fast_exit = duration_ms < 500;
|
||||
let is_success = exit_code == 0;
|
||||
|
||||
if is_broken_pipe && is_fast_exit && is_success {
|
||||
debug!(
|
||||
"Ignoring broken pipe error for fast-exiting successful action ({}ms)",
|
||||
duration_ms
|
||||
);
|
||||
None
|
||||
} else {
|
||||
Some(stdin_err)
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(ExecutionResult {
|
||||
exit_code,
|
||||
stdout: stdout_log.content,
|
||||
stderr: stderr_log.content,
|
||||
result,
|
||||
duration_ms,
|
||||
error,
|
||||
stdout_truncated: stdout_log.truncated,
|
||||
stderr_truncated: stderr_log.truncated,
|
||||
stdout_bytes_truncated: stdout_log.bytes_truncated,
|
||||
stderr_bytes_truncated: stderr_log.bytes_truncated,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for NativeRuntime {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Runtime for NativeRuntime {
|
||||
fn name(&self) -> &str {
|
||||
"native"
|
||||
}
|
||||
|
||||
fn can_execute(&self, context: &ExecutionContext) -> bool {
|
||||
// Check if runtime_name is explicitly set to "native"
|
||||
if let Some(ref runtime_name) = context.runtime_name {
|
||||
return runtime_name.to_lowercase() == "native";
|
||||
}
|
||||
|
||||
// Otherwise, check if code_path points to an executable binary
|
||||
// This is a heuristic - native binaries typically don't have common script extensions
|
||||
if let Some(ref code_path) = context.code_path {
|
||||
let extension = code_path.extension().and_then(|e| e.to_str()).unwrap_or("");
|
||||
|
||||
// Exclude common script extensions
|
||||
let is_script = matches!(
|
||||
extension,
|
||||
"py" | "js" | "sh" | "bash" | "rb" | "pl" | "php" | "lua"
|
||||
);
|
||||
|
||||
// If it's not a script and the file exists, it might be a native binary
|
||||
!is_script && code_path.exists()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
async fn execute(&self, context: ExecutionContext) -> RuntimeResult<ExecutionResult> {
|
||||
info!(
|
||||
"Executing native action: {} (execution_id: {})",
|
||||
context.action_ref, context.execution_id
|
||||
);
|
||||
|
||||
// Get the binary path
|
||||
let binary_path = context.code_path.ok_or_else(|| {
|
||||
RuntimeError::InvalidAction("Native runtime requires code_path to be set".to_string())
|
||||
})?;
|
||||
|
||||
self.execute_binary(
|
||||
binary_path,
|
||||
&context.parameters,
|
||||
&context.secrets,
|
||||
&context.env,
|
||||
context.timeout,
|
||||
context.max_stdout_bytes,
|
||||
context.max_stderr_bytes,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn setup(&self) -> RuntimeResult<()> {
|
||||
info!("Setting up Native runtime");
|
||||
|
||||
// Verify we can execute native binaries (basic check)
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::process::Command;
|
||||
let output = Command::new("uname").arg("-s").output().map_err(|e| {
|
||||
RuntimeError::SetupError(format!("Failed to verify native runtime: {}", e))
|
||||
})?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(RuntimeError::SetupError(
|
||||
"Failed to execute native commands".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
debug!("Native runtime setup complete");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn cleanup(&self) -> RuntimeResult<()> {
|
||||
info!("Cleaning up Native runtime");
|
||||
// No cleanup needed for native runtime
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn validate(&self) -> RuntimeResult<()> {
|
||||
debug!("Validating Native runtime");
|
||||
|
||||
// Basic validation - ensure we can execute commands
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::process::Command;
|
||||
Command::new("echo").arg("test").output().map_err(|e| {
|
||||
RuntimeError::SetupError(format!("Native runtime validation failed: {}", e))
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_native_runtime_name() {
|
||||
let runtime = NativeRuntime::new();
|
||||
assert_eq!(runtime.name(), "native");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_native_runtime_can_execute() {
|
||||
let runtime = NativeRuntime::new();
|
||||
|
||||
// Test with explicit runtime_name
|
||||
let mut context = ExecutionContext::test_context("test.action".to_string(), None);
|
||||
context.runtime_name = Some("native".to_string());
|
||||
assert!(runtime.can_execute(&context));
|
||||
|
||||
// Test with uppercase runtime_name
|
||||
context.runtime_name = Some("NATIVE".to_string());
|
||||
assert!(runtime.can_execute(&context));
|
||||
|
||||
// Test with wrong runtime_name
|
||||
context.runtime_name = Some("python".to_string());
|
||||
assert!(!runtime.can_execute(&context));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_native_runtime_setup() {
|
||||
let runtime = NativeRuntime::new();
|
||||
let result = runtime.setup().await;
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_native_runtime_validate() {
|
||||
let runtime = NativeRuntime::new();
|
||||
let result = runtime.validate().await;
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[tokio::test]
|
||||
async fn test_native_runtime_execute_simple() {
|
||||
use std::fs;
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
use tempfile::TempDir;
|
||||
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let binary_path = temp_dir.path().join("test_binary.sh");
|
||||
|
||||
// Create a simple shell script as our "binary"
|
||||
fs::write(
|
||||
&binary_path,
|
||||
"#!/bin/bash\necho 'Hello from native runtime'",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Make it executable
|
||||
let metadata = fs::metadata(&binary_path).unwrap();
|
||||
let mut permissions = metadata.permissions();
|
||||
permissions.set_mode(0o755);
|
||||
fs::set_permissions(&binary_path, permissions).unwrap();
|
||||
|
||||
let runtime = NativeRuntime::new();
|
||||
let mut context = ExecutionContext::test_context("test.native".to_string(), None);
|
||||
context.code_path = Some(binary_path);
|
||||
context.runtime_name = Some("native".to_string());
|
||||
|
||||
let result = runtime.execute(context).await;
|
||||
assert!(result.is_ok());
|
||||
|
||||
let exec_result = result.unwrap();
|
||||
assert_eq!(exec_result.exit_code, 0);
|
||||
assert!(exec_result.stdout.contains("Hello from native runtime"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_native_runtime_missing_binary() {
|
||||
let runtime = NativeRuntime::new();
|
||||
let mut context = ExecutionContext::test_context("test.native".to_string(), None);
|
||||
context.code_path = Some(std::path::PathBuf::from("/nonexistent/binary"));
|
||||
context.runtime_name = Some("native".to_string());
|
||||
|
||||
let result = runtime.execute(context).await;
|
||||
assert!(result.is_err());
|
||||
assert!(matches!(
|
||||
result.unwrap_err(),
|
||||
RuntimeError::ExecutionFailed(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_native_runtime_no_code_path() {
|
||||
let runtime = NativeRuntime::new();
|
||||
let mut context = ExecutionContext::test_context("test.native".to_string(), None);
|
||||
context.runtime_name = Some("native".to_string());
|
||||
// code_path is None
|
||||
|
||||
let result = runtime.execute(context).await;
|
||||
assert!(result.is_err());
|
||||
assert!(matches!(
|
||||
result.unwrap_err(),
|
||||
RuntimeError::InvalidAction(_)
|
||||
));
|
||||
}
|
||||
}
|
||||
752
crates/worker/src/runtime/python.rs
Normal file
752
crates/worker/src/runtime/python.rs
Normal file
@@ -0,0 +1,752 @@
|
||||
//! Python Runtime Implementation
|
||||
//!
|
||||
//! Executes Python actions using subprocess execution.
|
||||
|
||||
use super::{
|
||||
BoundedLogWriter, DependencyManagerRegistry, DependencySpec, ExecutionContext, ExecutionResult,
|
||||
Runtime, RuntimeError, RuntimeResult,
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Stdio;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader};
|
||||
use tokio::process::Command;
|
||||
use tokio::time::timeout;
|
||||
use tracing::{debug, info, warn};
|
||||
|
||||
/// Python runtime for executing Python scripts and functions
|
||||
pub struct PythonRuntime {
|
||||
/// Python interpreter path (fallback when no venv exists)
|
||||
python_path: PathBuf,
|
||||
|
||||
/// Base directory for storing action code
|
||||
work_dir: PathBuf,
|
||||
|
||||
/// Optional dependency manager registry for isolated environments
|
||||
dependency_manager: Option<Arc<DependencyManagerRegistry>>,
|
||||
}
|
||||
|
||||
impl PythonRuntime {
|
||||
/// Create a new Python runtime
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
python_path: PathBuf::from("python3"),
|
||||
work_dir: PathBuf::from("/tmp/attune/actions"),
|
||||
dependency_manager: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a Python runtime with custom settings
|
||||
pub fn with_config(python_path: PathBuf, work_dir: PathBuf) -> Self {
|
||||
Self {
|
||||
python_path,
|
||||
work_dir,
|
||||
dependency_manager: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a Python runtime with dependency manager support
|
||||
pub fn with_dependency_manager(
|
||||
python_path: PathBuf,
|
||||
work_dir: PathBuf,
|
||||
dependency_manager: Arc<DependencyManagerRegistry>,
|
||||
) -> Self {
|
||||
Self {
|
||||
python_path,
|
||||
work_dir,
|
||||
dependency_manager: Some(dependency_manager),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the Python executable path to use for a given context
|
||||
///
|
||||
/// If the action has a pack_ref with dependencies, use the venv Python.
|
||||
/// Otherwise, use the default Python interpreter.
|
||||
async fn get_python_executable(&self, context: &ExecutionContext) -> RuntimeResult<PathBuf> {
|
||||
// Check if we have a dependency manager and can extract pack_ref
|
||||
if let Some(ref dep_mgr) = self.dependency_manager {
|
||||
// Extract pack_ref from action_ref (format: "pack_ref.action_name")
|
||||
if let Some(pack_ref) = context.action_ref.split('.').next() {
|
||||
// Try to get the executable path for this pack
|
||||
match dep_mgr.get_executable_path(pack_ref, "python").await {
|
||||
Ok(python_path) => {
|
||||
debug!(
|
||||
"Using pack-specific Python from venv: {}",
|
||||
python_path.display()
|
||||
);
|
||||
return Ok(python_path);
|
||||
}
|
||||
Err(e) => {
|
||||
// Venv doesn't exist or failed - this is OK if pack has no dependencies
|
||||
debug!(
|
||||
"No venv found for pack {} ({}), using default Python",
|
||||
pack_ref, e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to default Python interpreter
|
||||
debug!("Using default Python interpreter: {:?}", self.python_path);
|
||||
Ok(self.python_path.clone())
|
||||
}
|
||||
|
||||
/// Generate Python wrapper script that loads parameters and executes the action
|
||||
fn generate_wrapper_script(&self, context: &ExecutionContext) -> RuntimeResult<String> {
|
||||
let params_json = serde_json::to_string(&context.parameters)?;
|
||||
|
||||
// Use base64 encoding for code to avoid any quote/escape issues
|
||||
let code_bytes = context.code.as_deref().unwrap_or("").as_bytes();
|
||||
let code_base64 =
|
||||
base64::Engine::encode(&base64::engine::general_purpose::STANDARD, code_bytes);
|
||||
|
||||
let wrapper = format!(
|
||||
r#"#!/usr/bin/env python3
|
||||
import sys
|
||||
import json
|
||||
import traceback
|
||||
import base64
|
||||
from pathlib import Path
|
||||
|
||||
# Global secrets storage (read from stdin, NOT from environment)
|
||||
_attune_secrets = {{}}
|
||||
|
||||
def get_secret(name):
|
||||
"""
|
||||
Get a secret value by name.
|
||||
|
||||
Secrets are passed securely via stdin and are never exposed in
|
||||
environment variables or process listings.
|
||||
|
||||
Args:
|
||||
name (str): The name of the secret to retrieve
|
||||
|
||||
Returns:
|
||||
str: The secret value, or None if not found
|
||||
"""
|
||||
return _attune_secrets.get(name)
|
||||
|
||||
def main():
|
||||
global _attune_secrets
|
||||
|
||||
try:
|
||||
# Read secrets from stdin FIRST (before executing action code)
|
||||
# This prevents secrets from being visible in process environment
|
||||
secrets_line = sys.stdin.readline().strip()
|
||||
if secrets_line:
|
||||
_attune_secrets = json.loads(secrets_line)
|
||||
|
||||
# Parse parameters
|
||||
parameters = json.loads('''{}''')
|
||||
|
||||
# Decode action code from base64 (avoids quote/escape issues)
|
||||
action_code = base64.b64decode('{}').decode('utf-8')
|
||||
|
||||
# Execute the code in a controlled namespace
|
||||
# Include get_secret helper function
|
||||
namespace = {{
|
||||
'__name__': '__main__',
|
||||
'parameters': parameters,
|
||||
'get_secret': get_secret
|
||||
}}
|
||||
exec(action_code, namespace)
|
||||
|
||||
# Look for main function or run function
|
||||
if '{}' in namespace:
|
||||
result = namespace['{}'](**parameters)
|
||||
elif 'run' in namespace:
|
||||
result = namespace['run'](**parameters)
|
||||
elif 'main' in namespace:
|
||||
result = namespace['main'](**parameters)
|
||||
else:
|
||||
# No entry point found, return the namespace (only JSON-serializable values)
|
||||
def is_json_serializable(obj):
|
||||
"""Check if an object is JSON serializable"""
|
||||
if obj is None:
|
||||
return True
|
||||
if isinstance(obj, (bool, int, float, str)):
|
||||
return True
|
||||
if isinstance(obj, (list, tuple)):
|
||||
return all(is_json_serializable(item) for item in obj)
|
||||
if isinstance(obj, dict):
|
||||
return all(is_json_serializable(k) and is_json_serializable(v)
|
||||
for k, v in obj.items())
|
||||
return False
|
||||
|
||||
result = {{k: v for k, v in namespace.items()
|
||||
if not k.startswith('__') and is_json_serializable(v)}}
|
||||
|
||||
# Output result as JSON
|
||||
if result is not None:
|
||||
print(json.dumps({{'result': result, 'status': 'success'}}))
|
||||
else:
|
||||
print(json.dumps({{'status': 'success'}}))
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
except Exception as e:
|
||||
error_info = {{
|
||||
'status': 'error',
|
||||
'error': str(e),
|
||||
'error_type': type(e).__name__,
|
||||
'traceback': traceback.format_exc()
|
||||
}}
|
||||
print(json.dumps(error_info), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
"#,
|
||||
params_json, code_base64, context.entry_point, context.entry_point
|
||||
);
|
||||
|
||||
Ok(wrapper)
|
||||
}
|
||||
|
||||
/// Execute with streaming and bounded log collection
|
||||
async fn execute_with_streaming(
|
||||
&self,
|
||||
mut cmd: Command,
|
||||
secrets: &std::collections::HashMap<String, String>,
|
||||
timeout_secs: Option<u64>,
|
||||
max_stdout_bytes: usize,
|
||||
max_stderr_bytes: usize,
|
||||
) -> RuntimeResult<ExecutionResult> {
|
||||
let start = Instant::now();
|
||||
|
||||
// Spawn process with piped I/O
|
||||
let mut child = cmd
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
// Write secrets to stdin
|
||||
if let Some(mut stdin) = child.stdin.take() {
|
||||
let secrets_json = serde_json::to_string(secrets)?;
|
||||
stdin.write_all(secrets_json.as_bytes()).await?;
|
||||
stdin.write_all(b"\n").await?;
|
||||
drop(stdin);
|
||||
}
|
||||
|
||||
// Create bounded writers
|
||||
let mut stdout_writer = BoundedLogWriter::new_stdout(max_stdout_bytes);
|
||||
let mut stderr_writer = BoundedLogWriter::new_stderr(max_stderr_bytes);
|
||||
|
||||
// Take stdout and stderr streams
|
||||
let stdout = child.stdout.take().expect("stdout not captured");
|
||||
let stderr = child.stderr.take().expect("stderr not captured");
|
||||
|
||||
// Create buffered readers
|
||||
let mut stdout_reader = BufReader::new(stdout);
|
||||
let mut stderr_reader = BufReader::new(stderr);
|
||||
|
||||
// Stream both outputs concurrently
|
||||
let stdout_task = async {
|
||||
let mut line = Vec::new();
|
||||
loop {
|
||||
line.clear();
|
||||
match stdout_reader.read_until(b'\n', &mut line).await {
|
||||
Ok(0) => break, // EOF
|
||||
Ok(_) => {
|
||||
if stdout_writer.write_all(&line).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
stdout_writer
|
||||
};
|
||||
|
||||
let stderr_task = async {
|
||||
let mut line = Vec::new();
|
||||
loop {
|
||||
line.clear();
|
||||
match stderr_reader.read_until(b'\n', &mut line).await {
|
||||
Ok(0) => break, // EOF
|
||||
Ok(_) => {
|
||||
if stderr_writer.write_all(&line).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
stderr_writer
|
||||
};
|
||||
|
||||
// Wait for both streams and the process
|
||||
let (stdout_writer, stderr_writer, wait_result) =
|
||||
tokio::join!(stdout_task, stderr_task, async {
|
||||
if let Some(timeout_secs) = timeout_secs {
|
||||
timeout(std::time::Duration::from_secs(timeout_secs), child.wait()).await
|
||||
} else {
|
||||
Ok(child.wait().await)
|
||||
}
|
||||
});
|
||||
|
||||
let duration_ms = start.elapsed().as_millis() as u64;
|
||||
|
||||
// Handle timeout
|
||||
let status = match wait_result {
|
||||
Ok(Ok(status)) => status,
|
||||
Ok(Err(e)) => {
|
||||
return Err(RuntimeError::ProcessError(format!(
|
||||
"Process wait failed: {}",
|
||||
e
|
||||
)));
|
||||
}
|
||||
Err(_) => {
|
||||
return Ok(ExecutionResult {
|
||||
exit_code: -1,
|
||||
stdout: String::new(),
|
||||
stderr: String::new(),
|
||||
result: None,
|
||||
duration_ms,
|
||||
error: Some(format!(
|
||||
"Execution timed out after {} seconds",
|
||||
timeout_secs.unwrap()
|
||||
)),
|
||||
stdout_truncated: false,
|
||||
stderr_truncated: false,
|
||||
stdout_bytes_truncated: 0,
|
||||
stderr_bytes_truncated: 0,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// Get results from bounded writers
|
||||
let stdout_result = stdout_writer.into_result();
|
||||
let stderr_result = stderr_writer.into_result();
|
||||
|
||||
let exit_code = status.code().unwrap_or(-1);
|
||||
|
||||
debug!(
|
||||
"Python execution completed: exit_code={}, duration={}ms, stdout_truncated={}, stderr_truncated={}",
|
||||
exit_code, duration_ms, stdout_result.truncated, stderr_result.truncated
|
||||
);
|
||||
|
||||
// Try to parse result from stdout
|
||||
let result = if exit_code == 0 {
|
||||
stdout_result
|
||||
.content
|
||||
.lines()
|
||||
.last()
|
||||
.and_then(|line| serde_json::from_str(line).ok())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(ExecutionResult {
|
||||
exit_code,
|
||||
stdout: stdout_result.content.clone(),
|
||||
stderr: stderr_result.content.clone(),
|
||||
result,
|
||||
duration_ms,
|
||||
error: if exit_code != 0 {
|
||||
Some(stderr_result.content)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
stdout_truncated: stdout_result.truncated,
|
||||
stderr_truncated: stderr_result.truncated,
|
||||
stdout_bytes_truncated: stdout_result.bytes_truncated,
|
||||
stderr_bytes_truncated: stderr_result.bytes_truncated,
|
||||
})
|
||||
}
|
||||
|
||||
async fn execute_python_code(
|
||||
&self,
|
||||
script: String,
|
||||
secrets: &std::collections::HashMap<String, String>,
|
||||
env: &std::collections::HashMap<String, String>,
|
||||
timeout_secs: Option<u64>,
|
||||
python_path: PathBuf,
|
||||
max_stdout_bytes: usize,
|
||||
max_stderr_bytes: usize,
|
||||
) -> RuntimeResult<ExecutionResult> {
|
||||
debug!(
|
||||
"Executing Python script with {} secrets (passed via stdin)",
|
||||
secrets.len()
|
||||
);
|
||||
|
||||
// Build command
|
||||
let mut cmd = Command::new(&python_path);
|
||||
cmd.arg("-c").arg(&script);
|
||||
|
||||
// Add environment variables
|
||||
for (key, value) in env {
|
||||
cmd.env(key, value);
|
||||
}
|
||||
|
||||
self.execute_with_streaming(
|
||||
cmd,
|
||||
secrets,
|
||||
timeout_secs,
|
||||
max_stdout_bytes,
|
||||
max_stderr_bytes,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Execute Python script from file
|
||||
async fn execute_python_file(
|
||||
&self,
|
||||
code_path: PathBuf,
|
||||
secrets: &std::collections::HashMap<String, String>,
|
||||
env: &std::collections::HashMap<String, String>,
|
||||
timeout_secs: Option<u64>,
|
||||
python_path: PathBuf,
|
||||
max_stdout_bytes: usize,
|
||||
max_stderr_bytes: usize,
|
||||
) -> RuntimeResult<ExecutionResult> {
|
||||
debug!(
|
||||
"Executing Python file: {:?} with {} secrets",
|
||||
code_path,
|
||||
secrets.len()
|
||||
);
|
||||
|
||||
// Build command
|
||||
let mut cmd = Command::new(&python_path);
|
||||
cmd.arg(&code_path);
|
||||
|
||||
// Add environment variables
|
||||
for (key, value) in env {
|
||||
cmd.env(key, value);
|
||||
}
|
||||
|
||||
self.execute_with_streaming(
|
||||
cmd,
|
||||
secrets,
|
||||
timeout_secs,
|
||||
max_stdout_bytes,
|
||||
max_stderr_bytes,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PythonRuntime {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl PythonRuntime {
|
||||
/// Ensure pack dependencies are installed (called before execution if needed)
|
||||
///
|
||||
/// This is a helper method that can be called by the worker service to ensure
|
||||
/// a pack's Python dependencies are set up before executing actions.
|
||||
pub async fn ensure_pack_dependencies(
|
||||
&self,
|
||||
pack_ref: &str,
|
||||
spec: &DependencySpec,
|
||||
) -> RuntimeResult<()> {
|
||||
if let Some(ref dep_mgr) = self.dependency_manager {
|
||||
if spec.has_dependencies() {
|
||||
info!(
|
||||
"Ensuring Python dependencies for pack: {} ({} dependencies)",
|
||||
pack_ref,
|
||||
spec.dependencies.len()
|
||||
);
|
||||
|
||||
dep_mgr
|
||||
.ensure_environment(pack_ref, spec)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
RuntimeError::SetupError(format!(
|
||||
"Failed to setup Python environment for {}: {}",
|
||||
pack_ref, e
|
||||
))
|
||||
})?;
|
||||
|
||||
info!("Python dependencies ready for pack: {}", pack_ref);
|
||||
} else {
|
||||
debug!("Pack {} has no Python dependencies", pack_ref);
|
||||
}
|
||||
} else {
|
||||
warn!("Dependency manager not configured, skipping dependency isolation");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Runtime for PythonRuntime {
|
||||
fn name(&self) -> &str {
|
||||
"python"
|
||||
}
|
||||
|
||||
fn can_execute(&self, context: &ExecutionContext) -> bool {
|
||||
// Check if action reference suggests Python
|
||||
let is_python = context.action_ref.contains(".py")
|
||||
|| context.entry_point.ends_with(".py")
|
||||
|| context
|
||||
.code_path
|
||||
.as_ref()
|
||||
.map(|p| p.extension().and_then(|e| e.to_str()) == Some("py"))
|
||||
.unwrap_or(false);
|
||||
|
||||
is_python
|
||||
}
|
||||
|
||||
async fn execute(&self, context: ExecutionContext) -> RuntimeResult<ExecutionResult> {
|
||||
info!(
|
||||
"Executing Python action: {} (execution_id: {})",
|
||||
context.action_ref, context.execution_id
|
||||
);
|
||||
|
||||
// Get the appropriate Python executable (venv or default)
|
||||
let python_path = self.get_python_executable(&context).await?;
|
||||
|
||||
// If code_path is provided, execute the file directly
|
||||
if let Some(code_path) = &context.code_path {
|
||||
return self
|
||||
.execute_python_file(
|
||||
code_path.clone(),
|
||||
&context.secrets,
|
||||
&context.env,
|
||||
context.timeout,
|
||||
python_path,
|
||||
context.max_stdout_bytes,
|
||||
context.max_stderr_bytes,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
// Otherwise, generate wrapper script and execute
|
||||
let script = self.generate_wrapper_script(&context)?;
|
||||
self.execute_python_code(
|
||||
script,
|
||||
&context.secrets,
|
||||
&context.env,
|
||||
context.timeout,
|
||||
python_path,
|
||||
context.max_stdout_bytes,
|
||||
context.max_stderr_bytes,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn setup(&self) -> RuntimeResult<()> {
|
||||
info!("Setting up Python runtime");
|
||||
|
||||
// Ensure work directory exists
|
||||
tokio::fs::create_dir_all(&self.work_dir)
|
||||
.await
|
||||
.map_err(|e| RuntimeError::SetupError(format!("Failed to create work dir: {}", e)))?;
|
||||
|
||||
// Verify Python is available
|
||||
let output = Command::new(&self.python_path)
|
||||
.arg("--version")
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
RuntimeError::SetupError(format!(
|
||||
"Python not found at {:?}: {}",
|
||||
self.python_path, e
|
||||
))
|
||||
})?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(RuntimeError::SetupError(
|
||||
"Python interpreter is not working".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let version = String::from_utf8_lossy(&output.stdout);
|
||||
info!("Python runtime ready: {}", version.trim());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn cleanup(&self) -> RuntimeResult<()> {
|
||||
info!("Cleaning up Python runtime");
|
||||
// Could clean up temporary files here
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn validate(&self) -> RuntimeResult<()> {
|
||||
debug!("Validating Python runtime");
|
||||
|
||||
// Check if Python is available
|
||||
let output = Command::new(&self.python_path)
|
||||
.arg("--version")
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| RuntimeError::SetupError(format!("Python validation failed: {}", e)))?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(RuntimeError::SetupError(
|
||||
"Python interpreter validation failed".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_python_runtime_simple() {
|
||||
let runtime = PythonRuntime::new();
|
||||
|
||||
let context = ExecutionContext {
|
||||
execution_id: 1,
|
||||
action_ref: "test.simple".to_string(),
|
||||
parameters: {
|
||||
let mut map = HashMap::new();
|
||||
map.insert("x".to_string(), serde_json::json!(5));
|
||||
map.insert("y".to_string(), serde_json::json!(10));
|
||||
map
|
||||
},
|
||||
env: HashMap::new(),
|
||||
secrets: HashMap::new(),
|
||||
timeout: Some(10),
|
||||
working_dir: None,
|
||||
entry_point: "run".to_string(),
|
||||
code: Some(
|
||||
r#"
|
||||
def run(x, y):
|
||||
return x + y
|
||||
"#
|
||||
.to_string(),
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
};
|
||||
|
||||
let result = runtime.execute(context).await.unwrap();
|
||||
assert!(result.is_success());
|
||||
assert_eq!(result.exit_code, 0);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_python_runtime_timeout() {
|
||||
let runtime = PythonRuntime::new();
|
||||
|
||||
let context = ExecutionContext {
|
||||
execution_id: 2,
|
||||
action_ref: "test.timeout".to_string(),
|
||||
parameters: HashMap::new(),
|
||||
env: HashMap::new(),
|
||||
secrets: HashMap::new(),
|
||||
timeout: Some(1),
|
||||
working_dir: None,
|
||||
entry_point: "run".to_string(),
|
||||
code: Some(
|
||||
r#"
|
||||
import time
|
||||
def run():
|
||||
time.sleep(10)
|
||||
return "done"
|
||||
"#
|
||||
.to_string(),
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
};
|
||||
|
||||
let result = runtime.execute(context).await.unwrap();
|
||||
assert!(!result.is_success());
|
||||
assert!(result.error.is_some());
|
||||
let error_msg = result.error.unwrap();
|
||||
assert!(error_msg.contains("timeout") || error_msg.contains("timed out"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_python_runtime_error() {
|
||||
let runtime = PythonRuntime::new();
|
||||
|
||||
let context = ExecutionContext {
|
||||
execution_id: 3,
|
||||
action_ref: "test.error".to_string(),
|
||||
parameters: HashMap::new(),
|
||||
env: HashMap::new(),
|
||||
secrets: HashMap::new(),
|
||||
timeout: Some(10),
|
||||
working_dir: None,
|
||||
entry_point: "run".to_string(),
|
||||
code: Some(
|
||||
r#"
|
||||
def run():
|
||||
raise ValueError("Test error")
|
||||
"#
|
||||
.to_string(),
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
};
|
||||
|
||||
let result = runtime.execute(context).await.unwrap();
|
||||
assert!(!result.is_success());
|
||||
assert!(result.error.is_some());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_python_runtime_with_secrets() {
|
||||
let runtime = PythonRuntime::new();
|
||||
|
||||
let context = ExecutionContext {
|
||||
execution_id: 4,
|
||||
action_ref: "test.secrets".to_string(),
|
||||
parameters: HashMap::new(),
|
||||
env: HashMap::new(),
|
||||
secrets: {
|
||||
let mut s = HashMap::new();
|
||||
s.insert("api_key".to_string(), "secret_key_12345".to_string());
|
||||
s.insert("db_password".to_string(), "super_secret_pass".to_string());
|
||||
s
|
||||
},
|
||||
timeout: Some(10),
|
||||
working_dir: None,
|
||||
entry_point: "run".to_string(),
|
||||
code: Some(
|
||||
r#"
|
||||
def run():
|
||||
# Access secrets via get_secret() helper
|
||||
api_key = get_secret('api_key')
|
||||
db_pass = get_secret('db_password')
|
||||
missing = get_secret('nonexistent')
|
||||
|
||||
return {
|
||||
'api_key': api_key,
|
||||
'db_pass': db_pass,
|
||||
'missing': missing
|
||||
}
|
||||
"#
|
||||
.to_string(),
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
};
|
||||
|
||||
let result = runtime.execute(context).await.unwrap();
|
||||
assert!(result.is_success());
|
||||
assert_eq!(result.exit_code, 0);
|
||||
|
||||
// Verify secrets are accessible in action code
|
||||
let result_data = result.result.unwrap();
|
||||
let result_obj = result_data.get("result").unwrap();
|
||||
assert_eq!(result_obj.get("api_key").unwrap(), "secret_key_12345");
|
||||
assert_eq!(result_obj.get("db_pass").unwrap(), "super_secret_pass");
|
||||
assert_eq!(result_obj.get("missing"), Some(&serde_json::Value::Null));
|
||||
}
|
||||
}
|
||||
653
crates/worker/src/runtime/python_venv.rs
Normal file
653
crates/worker/src/runtime/python_venv.rs
Normal file
@@ -0,0 +1,653 @@
|
||||
//! Python Virtual Environment Manager
|
||||
//!
|
||||
//! Manages isolated Python virtual environments for packs with Python dependencies.
|
||||
//! Each pack gets its own venv to prevent dependency conflicts.
|
||||
|
||||
use super::dependency::{
|
||||
DependencyError, DependencyManager, DependencyResult, DependencySpec, EnvironmentInfo,
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Stdio;
|
||||
use tokio::fs;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::process::Command;
|
||||
use tracing::{debug, info, warn};
|
||||
|
||||
/// Python virtual environment manager
|
||||
pub struct PythonVenvManager {
|
||||
/// Base directory for all virtual environments
|
||||
base_dir: PathBuf,
|
||||
|
||||
/// Python interpreter to use for creating venvs
|
||||
python_path: PathBuf,
|
||||
|
||||
/// Cache of environment info
|
||||
env_cache: tokio::sync::RwLock<HashMap<String, EnvironmentInfo>>,
|
||||
}
|
||||
|
||||
/// Metadata stored with each environment
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct VenvMetadata {
|
||||
pack_ref: String,
|
||||
dependencies: Vec<String>,
|
||||
created_at: chrono::DateTime<chrono::Utc>,
|
||||
updated_at: chrono::DateTime<chrono::Utc>,
|
||||
python_version: String,
|
||||
dependency_hash: String,
|
||||
}
|
||||
|
||||
impl PythonVenvManager {
|
||||
/// Create a new Python venv manager
|
||||
pub fn new(base_dir: PathBuf) -> Self {
|
||||
Self {
|
||||
base_dir,
|
||||
python_path: PathBuf::from("python3"),
|
||||
env_cache: tokio::sync::RwLock::new(HashMap::new()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new Python venv manager with custom Python path
|
||||
pub fn with_python_path(base_dir: PathBuf, python_path: PathBuf) -> Self {
|
||||
Self {
|
||||
base_dir,
|
||||
python_path,
|
||||
env_cache: tokio::sync::RwLock::new(HashMap::new()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the directory path for a pack's venv
|
||||
fn get_venv_path(&self, pack_ref: &str) -> PathBuf {
|
||||
// Sanitize pack_ref to create a valid directory name
|
||||
let safe_name = pack_ref.replace(['/', '\\', '.'], "_");
|
||||
self.base_dir.join(safe_name)
|
||||
}
|
||||
|
||||
/// Get the Python executable path within a venv
|
||||
fn get_venv_python(&self, venv_path: &Path) -> PathBuf {
|
||||
if cfg!(windows) {
|
||||
venv_path.join("Scripts").join("python.exe")
|
||||
} else {
|
||||
venv_path.join("bin").join("python")
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the pip executable path within a venv
|
||||
fn get_venv_pip(&self, venv_path: &Path) -> PathBuf {
|
||||
if cfg!(windows) {
|
||||
venv_path.join("Scripts").join("pip.exe")
|
||||
} else {
|
||||
venv_path.join("bin").join("pip")
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the metadata file path for a venv
|
||||
fn get_metadata_path(&self, venv_path: &Path) -> PathBuf {
|
||||
venv_path.join("attune_metadata.json")
|
||||
}
|
||||
|
||||
/// Calculate a hash of dependencies for change detection
|
||||
fn calculate_dependency_hash(&self, spec: &DependencySpec) -> String {
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
let mut hasher = DefaultHasher::new();
|
||||
|
||||
// Sort dependencies for consistent hashing
|
||||
let mut deps = spec.dependencies.clone();
|
||||
deps.sort();
|
||||
|
||||
for dep in &deps {
|
||||
dep.hash(&mut hasher);
|
||||
}
|
||||
|
||||
if let Some(ref content) = spec.requirements_file_content {
|
||||
content.hash(&mut hasher);
|
||||
}
|
||||
|
||||
format!("{:x}", hasher.finish())
|
||||
}
|
||||
|
||||
/// Create a new virtual environment
|
||||
async fn create_venv(&self, venv_path: &Path) -> DependencyResult<()> {
|
||||
info!(
|
||||
"Creating Python virtual environment at: {}",
|
||||
venv_path.display()
|
||||
);
|
||||
|
||||
// Ensure base directory exists
|
||||
if let Some(parent) = venv_path.parent() {
|
||||
fs::create_dir_all(parent).await?;
|
||||
}
|
||||
|
||||
// Create venv using python -m venv
|
||||
let output = Command::new(&self.python_path)
|
||||
.arg("-m")
|
||||
.arg("venv")
|
||||
.arg(venv_path)
|
||||
.arg("--clear") // Clear if exists
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
DependencyError::CreateEnvironmentFailed(format!(
|
||||
"Failed to spawn venv command: {}",
|
||||
e
|
||||
))
|
||||
})?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
return Err(DependencyError::CreateEnvironmentFailed(format!(
|
||||
"venv creation failed: {}",
|
||||
stderr
|
||||
)));
|
||||
}
|
||||
|
||||
// Upgrade pip to latest version
|
||||
let pip_path = self.get_venv_pip(venv_path);
|
||||
let output = Command::new(&pip_path)
|
||||
.arg("install")
|
||||
.arg("--upgrade")
|
||||
.arg("pip")
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| DependencyError::InstallFailed(format!("Failed to upgrade pip: {}", e)))?;
|
||||
|
||||
if !output.status.success() {
|
||||
warn!("Failed to upgrade pip, continuing anyway");
|
||||
}
|
||||
|
||||
info!("Virtual environment created successfully");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Install dependencies in a venv
|
||||
async fn install_dependencies(
|
||||
&self,
|
||||
venv_path: &Path,
|
||||
spec: &DependencySpec,
|
||||
) -> DependencyResult<()> {
|
||||
if !spec.has_dependencies() {
|
||||
debug!("No dependencies to install");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
info!("Installing dependencies in venv: {}", venv_path.display());
|
||||
|
||||
let pip_path = self.get_venv_pip(venv_path);
|
||||
|
||||
// Install from requirements file content if provided
|
||||
if let Some(ref requirements_content) = spec.requirements_file_content {
|
||||
let req_file = venv_path.join("requirements.txt");
|
||||
fs::write(&req_file, requirements_content).await?;
|
||||
|
||||
let output = Command::new(&pip_path)
|
||||
.arg("install")
|
||||
.arg("-r")
|
||||
.arg(&req_file)
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
DependencyError::InstallFailed(format!(
|
||||
"Failed to install from requirements.txt: {}",
|
||||
e
|
||||
))
|
||||
})?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
return Err(DependencyError::InstallFailed(format!(
|
||||
"pip install failed: {}",
|
||||
stderr
|
||||
)));
|
||||
}
|
||||
|
||||
info!("Dependencies installed from requirements.txt");
|
||||
} else if !spec.dependencies.is_empty() {
|
||||
// Install individual dependencies
|
||||
let output = Command::new(&pip_path)
|
||||
.arg("install")
|
||||
.args(&spec.dependencies)
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
DependencyError::InstallFailed(format!("Failed to install dependencies: {}", e))
|
||||
})?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
return Err(DependencyError::InstallFailed(format!(
|
||||
"pip install failed: {}",
|
||||
stderr
|
||||
)));
|
||||
}
|
||||
|
||||
info!("Installed {} dependencies", spec.dependencies.len());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get Python version from a venv
|
||||
async fn get_python_version(&self, venv_path: &Path) -> DependencyResult<String> {
|
||||
let python_path = self.get_venv_python(venv_path);
|
||||
|
||||
let output = Command::new(&python_path)
|
||||
.arg("--version")
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
DependencyError::ProcessError(format!("Failed to get Python version: {}", e))
|
||||
})?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(DependencyError::ProcessError(
|
||||
"Failed to get Python version".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let version = String::from_utf8_lossy(&output.stdout);
|
||||
Ok(version.trim().to_string())
|
||||
}
|
||||
|
||||
/// List installed packages in a venv
|
||||
async fn list_installed_packages(&self, venv_path: &Path) -> DependencyResult<Vec<String>> {
|
||||
let pip_path = self.get_venv_pip(venv_path);
|
||||
|
||||
let output = Command::new(&pip_path)
|
||||
.arg("list")
|
||||
.arg("--format=freeze")
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
DependencyError::ProcessError(format!("Failed to list packages: {}", e))
|
||||
})?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let packages = String::from_utf8_lossy(&output.stdout)
|
||||
.lines()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
|
||||
Ok(packages)
|
||||
}
|
||||
|
||||
/// Save metadata for a venv
|
||||
async fn save_metadata(
|
||||
&self,
|
||||
venv_path: &Path,
|
||||
metadata: &VenvMetadata,
|
||||
) -> DependencyResult<()> {
|
||||
let metadata_path = self.get_metadata_path(venv_path);
|
||||
let json = serde_json::to_string_pretty(metadata).map_err(|e| {
|
||||
DependencyError::LockFileError(format!("Failed to serialize metadata: {}", e))
|
||||
})?;
|
||||
|
||||
let mut file = fs::File::create(&metadata_path).await.map_err(|e| {
|
||||
DependencyError::LockFileError(format!("Failed to create metadata file: {}", e))
|
||||
})?;
|
||||
|
||||
file.write_all(json.as_bytes()).await.map_err(|e| {
|
||||
DependencyError::LockFileError(format!("Failed to write metadata: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Load metadata for a venv
|
||||
async fn load_metadata(&self, venv_path: &Path) -> DependencyResult<Option<VenvMetadata>> {
|
||||
let metadata_path = self.get_metadata_path(venv_path);
|
||||
|
||||
if !metadata_path.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(&metadata_path).await.map_err(|e| {
|
||||
DependencyError::LockFileError(format!("Failed to read metadata: {}", e))
|
||||
})?;
|
||||
|
||||
let metadata: VenvMetadata = serde_json::from_str(&content).map_err(|e| {
|
||||
DependencyError::LockFileError(format!("Failed to parse metadata: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(Some(metadata))
|
||||
}
|
||||
|
||||
/// Check if a venv exists and is valid
|
||||
async fn is_valid_venv(&self, venv_path: &Path) -> bool {
|
||||
if !venv_path.exists() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let python_path = self.get_venv_python(venv_path);
|
||||
if !python_path.exists() {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Try to run python --version to verify it works
|
||||
let result = Command::new(&python_path)
|
||||
.arg("--version")
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
.status()
|
||||
.await;
|
||||
|
||||
matches!(result, Ok(status) if status.success())
|
||||
}
|
||||
|
||||
/// Build environment info from a venv
|
||||
async fn build_env_info(
|
||||
&self,
|
||||
pack_ref: &str,
|
||||
venv_path: &Path,
|
||||
) -> DependencyResult<EnvironmentInfo> {
|
||||
let is_valid = self.is_valid_venv(venv_path).await;
|
||||
let python_path = self.get_venv_python(venv_path);
|
||||
|
||||
let (python_version, installed_deps, created_at, updated_at) = if is_valid {
|
||||
let version = self
|
||||
.get_python_version(venv_path)
|
||||
.await
|
||||
.unwrap_or_else(|_| "Unknown".to_string());
|
||||
let deps = self
|
||||
.list_installed_packages(venv_path)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
|
||||
let metadata = self.load_metadata(venv_path).await.ok().flatten();
|
||||
let created = metadata
|
||||
.as_ref()
|
||||
.map(|m| m.created_at)
|
||||
.unwrap_or_else(chrono::Utc::now);
|
||||
let updated = metadata
|
||||
.as_ref()
|
||||
.map(|m| m.updated_at)
|
||||
.unwrap_or_else(chrono::Utc::now);
|
||||
|
||||
(version, deps, created, updated)
|
||||
} else {
|
||||
(
|
||||
"Unknown".to_string(),
|
||||
Vec::new(),
|
||||
chrono::Utc::now(),
|
||||
chrono::Utc::now(),
|
||||
)
|
||||
};
|
||||
|
||||
Ok(EnvironmentInfo {
|
||||
id: pack_ref.to_string(),
|
||||
path: venv_path.to_path_buf(),
|
||||
runtime: "python".to_string(),
|
||||
runtime_version: python_version,
|
||||
installed_dependencies: installed_deps,
|
||||
created_at,
|
||||
updated_at,
|
||||
is_valid,
|
||||
executable_path: python_path,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl DependencyManager for PythonVenvManager {
|
||||
fn runtime_type(&self) -> &str {
|
||||
"python"
|
||||
}
|
||||
|
||||
async fn ensure_environment(
|
||||
&self,
|
||||
pack_ref: &str,
|
||||
spec: &DependencySpec,
|
||||
) -> DependencyResult<EnvironmentInfo> {
|
||||
info!("Ensuring Python environment for pack: {}", pack_ref);
|
||||
|
||||
let venv_path = self.get_venv_path(pack_ref);
|
||||
let dependency_hash = self.calculate_dependency_hash(spec);
|
||||
|
||||
// Check if environment exists and is up to date
|
||||
if venv_path.exists() {
|
||||
if let Some(metadata) = self.load_metadata(&venv_path).await? {
|
||||
if metadata.dependency_hash == dependency_hash
|
||||
&& self.is_valid_venv(&venv_path).await
|
||||
{
|
||||
debug!("Using existing venv (dependencies unchanged)");
|
||||
let env_info = self.build_env_info(pack_ref, &venv_path).await?;
|
||||
|
||||
// Update cache
|
||||
let mut cache = self.env_cache.write().await;
|
||||
cache.insert(pack_ref.to_string(), env_info.clone());
|
||||
|
||||
return Ok(env_info);
|
||||
}
|
||||
info!("Dependencies changed or venv invalid, recreating environment");
|
||||
}
|
||||
}
|
||||
|
||||
// Create or recreate the venv
|
||||
self.create_venv(&venv_path).await?;
|
||||
|
||||
// Install dependencies
|
||||
self.install_dependencies(&venv_path, spec).await?;
|
||||
|
||||
// Get Python version
|
||||
let python_version = self.get_python_version(&venv_path).await?;
|
||||
|
||||
// Save metadata
|
||||
let metadata = VenvMetadata {
|
||||
pack_ref: pack_ref.to_string(),
|
||||
dependencies: spec.dependencies.clone(),
|
||||
created_at: chrono::Utc::now(),
|
||||
updated_at: chrono::Utc::now(),
|
||||
python_version: python_version.clone(),
|
||||
dependency_hash,
|
||||
};
|
||||
self.save_metadata(&venv_path, &metadata).await?;
|
||||
|
||||
// Build environment info
|
||||
let env_info = self.build_env_info(pack_ref, &venv_path).await?;
|
||||
|
||||
// Update cache
|
||||
let mut cache = self.env_cache.write().await;
|
||||
cache.insert(pack_ref.to_string(), env_info.clone());
|
||||
|
||||
info!("Python environment ready for pack: {}", pack_ref);
|
||||
Ok(env_info)
|
||||
}
|
||||
|
||||
async fn get_environment(&self, pack_ref: &str) -> DependencyResult<Option<EnvironmentInfo>> {
|
||||
// Check cache first
|
||||
{
|
||||
let cache = self.env_cache.read().await;
|
||||
if let Some(env_info) = cache.get(pack_ref) {
|
||||
return Ok(Some(env_info.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
let venv_path = self.get_venv_path(pack_ref);
|
||||
if !venv_path.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let env_info = self.build_env_info(pack_ref, &venv_path).await?;
|
||||
|
||||
// Update cache
|
||||
let mut cache = self.env_cache.write().await;
|
||||
cache.insert(pack_ref.to_string(), env_info.clone());
|
||||
|
||||
Ok(Some(env_info))
|
||||
}
|
||||
|
||||
async fn remove_environment(&self, pack_ref: &str) -> DependencyResult<()> {
|
||||
info!("Removing Python environment for pack: {}", pack_ref);
|
||||
|
||||
let venv_path = self.get_venv_path(pack_ref);
|
||||
if venv_path.exists() {
|
||||
fs::remove_dir_all(&venv_path).await?;
|
||||
}
|
||||
|
||||
// Remove from cache
|
||||
let mut cache = self.env_cache.write().await;
|
||||
cache.remove(pack_ref);
|
||||
|
||||
info!("Environment removed");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn validate_environment(&self, pack_ref: &str) -> DependencyResult<bool> {
|
||||
let venv_path = self.get_venv_path(pack_ref);
|
||||
Ok(self.is_valid_venv(&venv_path).await)
|
||||
}
|
||||
|
||||
async fn get_executable_path(&self, pack_ref: &str) -> DependencyResult<PathBuf> {
|
||||
let venv_path = self.get_venv_path(pack_ref);
|
||||
let python_path = self.get_venv_python(&venv_path);
|
||||
|
||||
if !python_path.exists() {
|
||||
return Err(DependencyError::EnvironmentNotFound(format!(
|
||||
"Python executable not found for pack: {}",
|
||||
pack_ref
|
||||
)));
|
||||
}
|
||||
|
||||
Ok(python_path)
|
||||
}
|
||||
|
||||
async fn list_environments(&self) -> DependencyResult<Vec<EnvironmentInfo>> {
|
||||
let mut environments = Vec::new();
|
||||
|
||||
let mut entries = fs::read_dir(&self.base_dir).await?;
|
||||
while let Some(entry) = entries.next_entry().await? {
|
||||
if entry.file_type().await?.is_dir() {
|
||||
let venv_path = entry.path();
|
||||
if self.is_valid_venv(&venv_path).await {
|
||||
// Extract pack_ref from directory name
|
||||
if let Some(dir_name) = venv_path.file_name().and_then(|n| n.to_str()) {
|
||||
if let Ok(env_info) = self.build_env_info(dir_name, &venv_path).await {
|
||||
environments.push(env_info);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(environments)
|
||||
}
|
||||
|
||||
async fn cleanup(&self, keep_recent: usize) -> DependencyResult<Vec<String>> {
|
||||
info!(
|
||||
"Cleaning up Python virtual environments (keeping {} most recent)",
|
||||
keep_recent
|
||||
);
|
||||
|
||||
let mut environments = self.list_environments().await?;
|
||||
|
||||
// Sort by updated_at, newest first
|
||||
environments.sort_by(|a, b| b.updated_at.cmp(&a.updated_at));
|
||||
|
||||
let mut removed = Vec::new();
|
||||
|
||||
// Remove environments beyond keep_recent threshold
|
||||
for env in environments.iter().skip(keep_recent) {
|
||||
// Also skip if environment is invalid
|
||||
if !env.is_valid {
|
||||
if let Err(e) = self.remove_environment(&env.id).await {
|
||||
warn!("Failed to remove environment {}: {}", env.id, e);
|
||||
} else {
|
||||
removed.push(env.id.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!("Cleaned up {} environments", removed.len());
|
||||
Ok(removed)
|
||||
}
|
||||
|
||||
async fn needs_update(&self, pack_ref: &str, spec: &DependencySpec) -> DependencyResult<bool> {
|
||||
let venv_path = self.get_venv_path(pack_ref);
|
||||
|
||||
if !venv_path.exists() {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
if !self.is_valid_venv(&venv_path).await {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
// Check if dependency hash matches
|
||||
if let Some(metadata) = self.load_metadata(&venv_path).await? {
|
||||
let current_hash = self.calculate_dependency_hash(spec);
|
||||
Ok(metadata.dependency_hash != current_hash)
|
||||
} else {
|
||||
// No metadata, assume needs update
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_venv_path_sanitization() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let manager = PythonVenvManager::new(temp_dir.path().to_path_buf());
|
||||
|
||||
let path = manager.get_venv_path("core.http");
|
||||
assert!(path.to_string_lossy().contains("core_http"));
|
||||
|
||||
let path = manager.get_venv_path("my/pack");
|
||||
assert!(path.to_string_lossy().contains("my_pack"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_dependency_hash_consistency() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let manager = PythonVenvManager::new(temp_dir.path().to_path_buf());
|
||||
|
||||
let spec1 = DependencySpec::new("python")
|
||||
.with_dependency("requests==2.28.0")
|
||||
.with_dependency("flask==2.0.0");
|
||||
|
||||
let spec2 = DependencySpec::new("python")
|
||||
.with_dependency("flask==2.0.0")
|
||||
.with_dependency("requests==2.28.0");
|
||||
|
||||
// Hashes should be the same regardless of order (we sort)
|
||||
let hash1 = manager.calculate_dependency_hash(&spec1);
|
||||
let hash2 = manager.calculate_dependency_hash(&spec2);
|
||||
assert_eq!(hash1, hash2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_dependency_hash_different() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let manager = PythonVenvManager::new(temp_dir.path().to_path_buf());
|
||||
|
||||
let spec1 = DependencySpec::new("python").with_dependency("requests==2.28.0");
|
||||
|
||||
let spec2 = DependencySpec::new("python").with_dependency("requests==2.29.0");
|
||||
|
||||
let hash1 = manager.calculate_dependency_hash(&spec1);
|
||||
let hash2 = manager.calculate_dependency_hash(&spec2);
|
||||
assert_ne!(hash1, hash2);
|
||||
}
|
||||
}
|
||||
672
crates/worker/src/runtime/shell.rs
Normal file
672
crates/worker/src/runtime/shell.rs
Normal file
@@ -0,0 +1,672 @@
|
||||
//! Shell Runtime Implementation
|
||||
//!
|
||||
//! Executes shell scripts and commands using subprocess execution.
|
||||
|
||||
use super::{
|
||||
BoundedLogWriter, ExecutionContext, ExecutionResult, Runtime, RuntimeError, RuntimeResult,
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Stdio;
|
||||
use std::time::Instant;
|
||||
use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader};
|
||||
use tokio::process::Command;
|
||||
use tokio::time::timeout;
|
||||
use tracing::{debug, info, warn};
|
||||
|
||||
/// Shell runtime for executing shell scripts and commands
|
||||
pub struct ShellRuntime {
|
||||
/// Shell interpreter path (bash, sh, zsh, etc.)
|
||||
shell_path: PathBuf,
|
||||
|
||||
/// Base directory for storing action code
|
||||
work_dir: PathBuf,
|
||||
}
|
||||
|
||||
impl ShellRuntime {
|
||||
/// Create a new Shell runtime with bash
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
shell_path: PathBuf::from("/bin/bash"),
|
||||
work_dir: PathBuf::from("/tmp/attune/actions"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a Shell runtime with custom shell
|
||||
pub fn with_shell(shell_path: PathBuf) -> Self {
|
||||
Self {
|
||||
shell_path,
|
||||
work_dir: PathBuf::from("/tmp/attune/actions"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a Shell runtime with custom settings
|
||||
pub fn with_config(shell_path: PathBuf, work_dir: PathBuf) -> Self {
|
||||
Self {
|
||||
shell_path,
|
||||
work_dir,
|
||||
}
|
||||
}
|
||||
|
||||
/// Execute with streaming and bounded log collection
|
||||
async fn execute_with_streaming(
|
||||
&self,
|
||||
mut cmd: Command,
|
||||
secrets: &std::collections::HashMap<String, String>,
|
||||
timeout_secs: Option<u64>,
|
||||
max_stdout_bytes: usize,
|
||||
max_stderr_bytes: usize,
|
||||
) -> RuntimeResult<ExecutionResult> {
|
||||
let start = Instant::now();
|
||||
|
||||
// Spawn process with piped I/O
|
||||
let mut child = cmd
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
// Write secrets to stdin - if this fails, the process has already started
|
||||
// so we should continue and capture whatever output we can
|
||||
let stdin_write_error = if let Some(mut stdin) = child.stdin.take() {
|
||||
match serde_json::to_string(secrets) {
|
||||
Ok(secrets_json) => {
|
||||
if let Err(e) = stdin.write_all(secrets_json.as_bytes()).await {
|
||||
Some(format!("Failed to write secrets to stdin: {}", e))
|
||||
} else if let Err(e) = stdin.write_all(b"\n").await {
|
||||
Some(format!("Failed to write newline to stdin: {}", e))
|
||||
} else {
|
||||
drop(stdin);
|
||||
None
|
||||
}
|
||||
}
|
||||
Err(e) => Some(format!("Failed to serialize secrets: {}", e)),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Create bounded writers
|
||||
let mut stdout_writer = BoundedLogWriter::new_stdout(max_stdout_bytes);
|
||||
let mut stderr_writer = BoundedLogWriter::new_stderr(max_stderr_bytes);
|
||||
|
||||
// Take stdout and stderr streams
|
||||
let stdout = child.stdout.take().expect("stdout not captured");
|
||||
let stderr = child.stderr.take().expect("stderr not captured");
|
||||
|
||||
// Create buffered readers
|
||||
let mut stdout_reader = BufReader::new(stdout);
|
||||
let mut stderr_reader = BufReader::new(stderr);
|
||||
|
||||
// Stream both outputs concurrently
|
||||
let stdout_task = async {
|
||||
let mut line = Vec::new();
|
||||
loop {
|
||||
line.clear();
|
||||
match stdout_reader.read_until(b'\n', &mut line).await {
|
||||
Ok(0) => break, // EOF
|
||||
Ok(_) => {
|
||||
if stdout_writer.write_all(&line).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
stdout_writer
|
||||
};
|
||||
|
||||
let stderr_task = async {
|
||||
let mut line = Vec::new();
|
||||
loop {
|
||||
line.clear();
|
||||
match stderr_reader.read_until(b'\n', &mut line).await {
|
||||
Ok(0) => break, // EOF
|
||||
Ok(_) => {
|
||||
if stderr_writer.write_all(&line).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
stderr_writer
|
||||
};
|
||||
|
||||
// Wait for both streams and the process
|
||||
let (stdout_writer, stderr_writer, wait_result) =
|
||||
tokio::join!(stdout_task, stderr_task, async {
|
||||
if let Some(timeout_secs) = timeout_secs {
|
||||
timeout(std::time::Duration::from_secs(timeout_secs), child.wait()).await
|
||||
} else {
|
||||
Ok(child.wait().await)
|
||||
}
|
||||
});
|
||||
|
||||
let duration_ms = start.elapsed().as_millis() as u64;
|
||||
|
||||
// Get results from bounded writers - we have these regardless of wait() success
|
||||
let stdout_result = stdout_writer.into_result();
|
||||
let stderr_result = stderr_writer.into_result();
|
||||
|
||||
// Handle process wait result
|
||||
let (exit_code, process_error) = match wait_result {
|
||||
Ok(Ok(status)) => (status.code().unwrap_or(-1), None),
|
||||
Ok(Err(e)) => {
|
||||
// Process wait failed, but we have the output - return it with an error
|
||||
warn!("Process wait failed but captured output: {}", e);
|
||||
(-1, Some(format!("Process wait failed: {}", e)))
|
||||
}
|
||||
Err(_) => {
|
||||
// Timeout occurred
|
||||
return Ok(ExecutionResult {
|
||||
exit_code: -1,
|
||||
stdout: stdout_result.content.clone(),
|
||||
stderr: stderr_result.content.clone(),
|
||||
result: None,
|
||||
duration_ms,
|
||||
error: Some(format!(
|
||||
"Execution timed out after {} seconds",
|
||||
timeout_secs.unwrap()
|
||||
)),
|
||||
stdout_truncated: stdout_result.truncated,
|
||||
stderr_truncated: stderr_result.truncated,
|
||||
stdout_bytes_truncated: stdout_result.bytes_truncated,
|
||||
stderr_bytes_truncated: stderr_result.bytes_truncated,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
debug!(
|
||||
"Shell execution completed: exit_code={}, duration={}ms, stdout_truncated={}, stderr_truncated={}",
|
||||
exit_code, duration_ms, stdout_result.truncated, stderr_result.truncated
|
||||
);
|
||||
|
||||
// Try to parse result from stdout as JSON
|
||||
let result = if exit_code == 0 && !stdout_result.content.trim().is_empty() {
|
||||
stdout_result
|
||||
.content
|
||||
.trim()
|
||||
.lines()
|
||||
.last()
|
||||
.and_then(|line| serde_json::from_str(line).ok())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Determine error message
|
||||
let error = if let Some(proc_err) = process_error {
|
||||
Some(proc_err)
|
||||
} else if let Some(stdin_err) = stdin_write_error {
|
||||
// Ignore broken pipe errors for fast-exiting successful actions
|
||||
// These occur when the process exits before we finish writing secrets to stdin
|
||||
let is_broken_pipe =
|
||||
stdin_err.contains("Broken pipe") || stdin_err.contains("os error 32");
|
||||
let is_fast_exit = duration_ms < 500;
|
||||
let is_success = exit_code == 0;
|
||||
|
||||
if is_broken_pipe && is_fast_exit && is_success {
|
||||
debug!(
|
||||
"Ignoring broken pipe error for fast-exiting successful action ({}ms)",
|
||||
duration_ms
|
||||
);
|
||||
None
|
||||
} else {
|
||||
Some(stdin_err)
|
||||
}
|
||||
} else if exit_code != 0 {
|
||||
Some(if stderr_result.content.is_empty() {
|
||||
format!("Command exited with code {}", exit_code)
|
||||
} else {
|
||||
// Use last line of stderr as error, or full stderr if short
|
||||
if stderr_result.content.lines().count() > 5 {
|
||||
stderr_result
|
||||
.content
|
||||
.lines()
|
||||
.last()
|
||||
.unwrap_or("")
|
||||
.to_string()
|
||||
} else {
|
||||
stderr_result.content.clone()
|
||||
}
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(ExecutionResult {
|
||||
exit_code,
|
||||
stdout: stdout_result.content.clone(),
|
||||
stderr: stderr_result.content.clone(),
|
||||
result,
|
||||
duration_ms,
|
||||
error,
|
||||
stdout_truncated: stdout_result.truncated,
|
||||
stderr_truncated: stderr_result.truncated,
|
||||
stdout_bytes_truncated: stdout_result.bytes_truncated,
|
||||
stderr_bytes_truncated: stderr_result.bytes_truncated,
|
||||
})
|
||||
}
|
||||
|
||||
/// Generate shell wrapper script that injects parameters as environment variables
|
||||
fn generate_wrapper_script(&self, context: &ExecutionContext) -> RuntimeResult<String> {
|
||||
let mut script = String::new();
|
||||
|
||||
// Add shebang
|
||||
script.push_str("#!/bin/bash\n");
|
||||
script.push_str("set -e\n\n"); // Exit on error
|
||||
|
||||
// Read secrets from stdin and store in associative array
|
||||
script.push_str("# Read secrets from stdin (passed securely, not via environment)\n");
|
||||
script.push_str("declare -A ATTUNE_SECRETS\n");
|
||||
script.push_str("read -r ATTUNE_SECRETS_JSON\n");
|
||||
script.push_str("if [ -n \"$ATTUNE_SECRETS_JSON\" ]; then\n");
|
||||
script.push_str(" # Parse JSON secrets using Python (always available)\n");
|
||||
script.push_str(" eval \"$(echo \"$ATTUNE_SECRETS_JSON\" | python3 -c \"\n");
|
||||
script.push_str("import sys, json\n");
|
||||
script.push_str("try:\n");
|
||||
script.push_str(" secrets = json.load(sys.stdin)\n");
|
||||
script.push_str(" for key, value in secrets.items():\n");
|
||||
script.push_str(" # Escape single quotes in value\n");
|
||||
script.push_str(
|
||||
" safe_value = value.replace(\\\"'\\\", \\\"'\\\\\\\\\\\\\\\\'\\\") \n",
|
||||
);
|
||||
script.push_str(" print(f\\\"ATTUNE_SECRETS['{key}']='{safe_value}'\\\")\n");
|
||||
script.push_str("except: pass\n");
|
||||
script.push_str("\")\"\n");
|
||||
script.push_str("fi\n\n");
|
||||
|
||||
// Helper function to get secrets
|
||||
script.push_str("# Helper function to access secrets\n");
|
||||
script.push_str("get_secret() {\n");
|
||||
script.push_str(" local name=\"$1\"\n");
|
||||
script.push_str(" echo \"${ATTUNE_SECRETS[$name]}\"\n");
|
||||
script.push_str("}\n\n");
|
||||
|
||||
// Export parameters as environment variables
|
||||
script.push_str("# Action parameters\n");
|
||||
for (key, value) in &context.parameters {
|
||||
let value_str = match value {
|
||||
serde_json::Value::String(s) => s.clone(),
|
||||
serde_json::Value::Number(n) => n.to_string(),
|
||||
serde_json::Value::Bool(b) => b.to_string(),
|
||||
_ => serde_json::to_string(value)?,
|
||||
};
|
||||
// Export with PARAM_ prefix for consistency
|
||||
script.push_str(&format!(
|
||||
"export PARAM_{}='{}'\n",
|
||||
key.to_uppercase(),
|
||||
value_str
|
||||
));
|
||||
// Also export without prefix for easier shell script writing
|
||||
script.push_str(&format!("export {}='{}'\n", key, value_str));
|
||||
}
|
||||
script.push_str("\n");
|
||||
|
||||
// Add the action code
|
||||
script.push_str("# Action code\n");
|
||||
if let Some(code) = &context.code {
|
||||
script.push_str(code);
|
||||
}
|
||||
|
||||
Ok(script)
|
||||
}
|
||||
|
||||
/// Execute shell script directly
|
||||
async fn execute_shell_code(
|
||||
&self,
|
||||
script: String,
|
||||
secrets: &std::collections::HashMap<String, String>,
|
||||
env: &std::collections::HashMap<String, String>,
|
||||
timeout_secs: Option<u64>,
|
||||
max_stdout_bytes: usize,
|
||||
max_stderr_bytes: usize,
|
||||
) -> RuntimeResult<ExecutionResult> {
|
||||
debug!(
|
||||
"Executing shell script with {} secrets (passed via stdin)",
|
||||
secrets.len()
|
||||
);
|
||||
|
||||
// Build command
|
||||
let mut cmd = Command::new(&self.shell_path);
|
||||
cmd.arg("-c").arg(&script);
|
||||
|
||||
// Add environment variables
|
||||
for (key, value) in env {
|
||||
cmd.env(key, value);
|
||||
}
|
||||
|
||||
self.execute_with_streaming(
|
||||
cmd,
|
||||
secrets,
|
||||
timeout_secs,
|
||||
max_stdout_bytes,
|
||||
max_stderr_bytes,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Execute shell script from file
|
||||
async fn execute_shell_file(
|
||||
&self,
|
||||
code_path: PathBuf,
|
||||
secrets: &std::collections::HashMap<String, String>,
|
||||
env: &std::collections::HashMap<String, String>,
|
||||
timeout_secs: Option<u64>,
|
||||
max_stdout_bytes: usize,
|
||||
max_stderr_bytes: usize,
|
||||
) -> RuntimeResult<ExecutionResult> {
|
||||
debug!(
|
||||
"Executing shell file: {:?} with {} secrets",
|
||||
code_path,
|
||||
secrets.len()
|
||||
);
|
||||
|
||||
// Build command
|
||||
let mut cmd = Command::new(&self.shell_path);
|
||||
cmd.arg(&code_path);
|
||||
|
||||
// Add environment variables
|
||||
for (key, value) in env {
|
||||
cmd.env(key, value);
|
||||
}
|
||||
|
||||
self.execute_with_streaming(
|
||||
cmd,
|
||||
secrets,
|
||||
timeout_secs,
|
||||
max_stdout_bytes,
|
||||
max_stderr_bytes,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ShellRuntime {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Runtime for ShellRuntime {
|
||||
fn name(&self) -> &str {
|
||||
"shell"
|
||||
}
|
||||
|
||||
fn can_execute(&self, context: &ExecutionContext) -> bool {
|
||||
// Check if action reference suggests shell script
|
||||
let is_shell = context.action_ref.contains(".sh")
|
||||
|| context.entry_point.ends_with(".sh")
|
||||
|| context
|
||||
.code_path
|
||||
.as_ref()
|
||||
.map(|p| p.extension().and_then(|e| e.to_str()) == Some("sh"))
|
||||
.unwrap_or(false)
|
||||
|| context.entry_point == "bash"
|
||||
|| context.entry_point == "sh"
|
||||
|| context.entry_point == "shell";
|
||||
|
||||
is_shell
|
||||
}
|
||||
|
||||
async fn execute(&self, context: ExecutionContext) -> RuntimeResult<ExecutionResult> {
|
||||
info!(
|
||||
"Executing shell action: {} (execution_id: {})",
|
||||
context.action_ref, context.execution_id
|
||||
);
|
||||
|
||||
// If code_path is provided, execute the file directly
|
||||
if let Some(code_path) = &context.code_path {
|
||||
// Merge parameters into environment variables with ATTUNE_ACTION_ prefix
|
||||
let mut env = context.env.clone();
|
||||
for (key, value) in &context.parameters {
|
||||
let value_str = match value {
|
||||
serde_json::Value::String(s) => s.clone(),
|
||||
serde_json::Value::Number(n) => n.to_string(),
|
||||
serde_json::Value::Bool(b) => b.to_string(),
|
||||
_ => serde_json::to_string(value)?,
|
||||
};
|
||||
env.insert(format!("ATTUNE_ACTION_{}", key.to_uppercase()), value_str);
|
||||
}
|
||||
|
||||
return self
|
||||
.execute_shell_file(
|
||||
code_path.clone(),
|
||||
&context.secrets,
|
||||
&env,
|
||||
context.timeout,
|
||||
context.max_stdout_bytes,
|
||||
context.max_stderr_bytes,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
// Otherwise, generate wrapper script and execute
|
||||
let script = self.generate_wrapper_script(&context)?;
|
||||
self.execute_shell_code(
|
||||
script,
|
||||
&context.secrets,
|
||||
&context.env,
|
||||
context.timeout,
|
||||
context.max_stdout_bytes,
|
||||
context.max_stderr_bytes,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn setup(&self) -> RuntimeResult<()> {
|
||||
info!("Setting up Shell runtime");
|
||||
|
||||
// Ensure work directory exists
|
||||
tokio::fs::create_dir_all(&self.work_dir)
|
||||
.await
|
||||
.map_err(|e| RuntimeError::SetupError(format!("Failed to create work dir: {}", e)))?;
|
||||
|
||||
// Verify shell is available
|
||||
let output = Command::new(&self.shell_path)
|
||||
.arg("--version")
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
RuntimeError::SetupError(format!("Shell not found at {:?}: {}", self.shell_path, e))
|
||||
})?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(RuntimeError::SetupError(
|
||||
"Shell interpreter is not working".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let version = String::from_utf8_lossy(&output.stdout);
|
||||
info!("Shell runtime ready: {}", version.trim());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn cleanup(&self) -> RuntimeResult<()> {
|
||||
info!("Cleaning up Shell runtime");
|
||||
// Could clean up temporary files here
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn validate(&self) -> RuntimeResult<()> {
|
||||
debug!("Validating Shell runtime");
|
||||
|
||||
// Check if shell is available
|
||||
let output = Command::new(&self.shell_path)
|
||||
.arg("-c")
|
||||
.arg("echo 'test'")
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| RuntimeError::SetupError(format!("Shell validation failed: {}", e)))?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(RuntimeError::SetupError(
|
||||
"Shell interpreter validation failed".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_shell_runtime_simple() {
|
||||
let runtime = ShellRuntime::new();
|
||||
|
||||
let context = ExecutionContext {
|
||||
execution_id: 1,
|
||||
action_ref: "test.simple".to_string(),
|
||||
parameters: HashMap::new(),
|
||||
env: HashMap::new(),
|
||||
secrets: HashMap::new(),
|
||||
timeout: Some(10),
|
||||
working_dir: None,
|
||||
entry_point: "shell".to_string(),
|
||||
code: Some("echo 'Hello, World!'".to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
};
|
||||
|
||||
let result = runtime.execute(context).await.unwrap();
|
||||
assert!(result.is_success());
|
||||
assert_eq!(result.exit_code, 0);
|
||||
assert!(result.stdout.contains("Hello, World!"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_shell_runtime_with_params() {
|
||||
let runtime = ShellRuntime::new();
|
||||
|
||||
let context = ExecutionContext {
|
||||
execution_id: 2,
|
||||
action_ref: "test.params".to_string(),
|
||||
parameters: {
|
||||
let mut map = HashMap::new();
|
||||
map.insert("name".to_string(), serde_json::json!("Alice"));
|
||||
map
|
||||
},
|
||||
env: HashMap::new(),
|
||||
secrets: HashMap::new(),
|
||||
timeout: Some(10),
|
||||
working_dir: None,
|
||||
entry_point: "shell".to_string(),
|
||||
code: Some("echo \"Hello, $name!\"".to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
};
|
||||
|
||||
let result = runtime.execute(context).await.unwrap();
|
||||
assert!(result.is_success());
|
||||
assert!(result.stdout.contains("Hello, Alice!"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_shell_runtime_timeout() {
|
||||
let runtime = ShellRuntime::new();
|
||||
|
||||
let context = ExecutionContext {
|
||||
execution_id: 3,
|
||||
action_ref: "test.timeout".to_string(),
|
||||
parameters: HashMap::new(),
|
||||
env: HashMap::new(),
|
||||
secrets: HashMap::new(),
|
||||
timeout: Some(1),
|
||||
working_dir: None,
|
||||
entry_point: "shell".to_string(),
|
||||
code: Some("sleep 10".to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
};
|
||||
|
||||
let result = runtime.execute(context).await.unwrap();
|
||||
assert!(!result.is_success());
|
||||
assert!(result.error.is_some());
|
||||
let error_msg = result.error.unwrap();
|
||||
assert!(error_msg.contains("timeout") || error_msg.contains("timed out"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_shell_runtime_error() {
|
||||
let runtime = ShellRuntime::new();
|
||||
|
||||
let context = ExecutionContext {
|
||||
execution_id: 4,
|
||||
action_ref: "test.error".to_string(),
|
||||
parameters: HashMap::new(),
|
||||
env: HashMap::new(),
|
||||
secrets: HashMap::new(),
|
||||
timeout: Some(10),
|
||||
working_dir: None,
|
||||
entry_point: "shell".to_string(),
|
||||
code: Some("exit 1".to_string()),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
};
|
||||
|
||||
let result = runtime.execute(context).await.unwrap();
|
||||
assert!(!result.is_success());
|
||||
assert_eq!(result.exit_code, 1);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_shell_runtime_with_secrets() {
|
||||
let runtime = ShellRuntime::new();
|
||||
|
||||
let context = ExecutionContext {
|
||||
execution_id: 5,
|
||||
action_ref: "test.secrets".to_string(),
|
||||
parameters: HashMap::new(),
|
||||
env: HashMap::new(),
|
||||
secrets: {
|
||||
let mut s = HashMap::new();
|
||||
s.insert("api_key".to_string(), "secret_key_12345".to_string());
|
||||
s.insert("db_password".to_string(), "super_secret_pass".to_string());
|
||||
s
|
||||
},
|
||||
timeout: Some(10),
|
||||
working_dir: None,
|
||||
entry_point: "shell".to_string(),
|
||||
code: Some(
|
||||
r#"
|
||||
# Access secrets via get_secret function
|
||||
api_key=$(get_secret 'api_key')
|
||||
db_pass=$(get_secret 'db_password')
|
||||
missing=$(get_secret 'nonexistent')
|
||||
|
||||
echo "api_key=$api_key"
|
||||
echo "db_pass=$db_pass"
|
||||
echo "missing=$missing"
|
||||
"#
|
||||
.to_string(),
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("shell".to_string()),
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
};
|
||||
|
||||
let result = runtime.execute(context).await.unwrap();
|
||||
assert!(result.is_success());
|
||||
assert_eq!(result.exit_code, 0);
|
||||
|
||||
// Verify secrets are accessible in action code
|
||||
assert!(result.stdout.contains("api_key=secret_key_12345"));
|
||||
assert!(result.stdout.contains("db_pass=super_secret_pass"));
|
||||
assert!(result.stdout.contains("missing="));
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user