[WIP] client action streaming
This commit is contained in:
@@ -543,6 +543,16 @@ impl ActionExecutor {
|
||||
selected_runtime_version,
|
||||
max_stdout_bytes: self.max_stdout_bytes,
|
||||
max_stderr_bytes: self.max_stderr_bytes,
|
||||
stdout_log_path: Some(
|
||||
self.artifact_manager
|
||||
.get_execution_dir(execution.id)
|
||||
.join("stdout.log"),
|
||||
),
|
||||
stderr_log_path: Some(
|
||||
self.artifact_manager
|
||||
.get_execution_dir(execution.id)
|
||||
.join("stderr.log"),
|
||||
),
|
||||
parameter_delivery: action.parameter_delivery,
|
||||
parameter_format: action.parameter_format,
|
||||
output_format: action.output_format,
|
||||
|
||||
@@ -2,9 +2,10 @@
|
||||
//!
|
||||
//! Provides bounded log writers that limit output size to prevent OOM issues.
|
||||
|
||||
use std::path::Path;
|
||||
use std::pin::Pin;
|
||||
use std::task::{Context, Poll};
|
||||
use tokio::io::AsyncWrite;
|
||||
use tokio::io::{AsyncWrite, AsyncWriteExt};
|
||||
|
||||
const TRUNCATION_NOTICE_STDOUT: &str = "\n\n[OUTPUT TRUNCATED: stdout exceeded size limit]\n";
|
||||
const TRUNCATION_NOTICE_STDERR: &str = "\n\n[OUTPUT TRUNCATED: stderr exceeded size limit]\n";
|
||||
@@ -76,6 +77,15 @@ pub struct BoundedLogWriter {
|
||||
truncation_notice: &'static str,
|
||||
}
|
||||
|
||||
/// A file-backed writer that applies the same truncation policy as `BoundedLogWriter`.
|
||||
pub struct BoundedLogFileWriter {
|
||||
file: tokio::fs::File,
|
||||
max_bytes: usize,
|
||||
truncated: bool,
|
||||
data_bytes_written: usize,
|
||||
truncation_notice: &'static str,
|
||||
}
|
||||
|
||||
impl BoundedLogWriter {
|
||||
/// Create a new bounded log writer for stdout
|
||||
pub fn new_stdout(max_bytes: usize) -> Self {
|
||||
@@ -166,6 +176,76 @@ impl BoundedLogWriter {
|
||||
}
|
||||
}
|
||||
|
||||
impl BoundedLogFileWriter {
|
||||
pub async fn new_stdout(path: &Path, max_bytes: usize) -> std::io::Result<Self> {
|
||||
Self::create(path, max_bytes, TRUNCATION_NOTICE_STDOUT).await
|
||||
}
|
||||
|
||||
pub async fn new_stderr(path: &Path, max_bytes: usize) -> std::io::Result<Self> {
|
||||
Self::create(path, max_bytes, TRUNCATION_NOTICE_STDERR).await
|
||||
}
|
||||
|
||||
async fn create(
|
||||
path: &Path,
|
||||
max_bytes: usize,
|
||||
truncation_notice: &'static str,
|
||||
) -> std::io::Result<Self> {
|
||||
if let Some(parent) = path.parent() {
|
||||
tokio::fs::create_dir_all(parent).await?;
|
||||
}
|
||||
|
||||
let file = tokio::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.truncate(true)
|
||||
.open(path)
|
||||
.await?;
|
||||
|
||||
Ok(Self {
|
||||
file,
|
||||
max_bytes,
|
||||
truncated: false,
|
||||
data_bytes_written: 0,
|
||||
truncation_notice,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn write_all(&mut self, buf: &[u8]) -> std::io::Result<()> {
|
||||
if self.truncated {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let effective_limit = self.max_bytes.saturating_sub(NOTICE_RESERVE_BYTES);
|
||||
let remaining_space = effective_limit.saturating_sub(self.data_bytes_written);
|
||||
|
||||
if remaining_space == 0 {
|
||||
self.add_truncation_notice().await?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let bytes_to_write = std::cmp::min(buf.len(), remaining_space);
|
||||
if bytes_to_write > 0 {
|
||||
self.file.write_all(&buf[..bytes_to_write]).await?;
|
||||
self.data_bytes_written += bytes_to_write;
|
||||
}
|
||||
|
||||
if bytes_to_write < buf.len() {
|
||||
self.add_truncation_notice().await?;
|
||||
}
|
||||
|
||||
self.file.flush().await
|
||||
}
|
||||
|
||||
async fn add_truncation_notice(&mut self) -> std::io::Result<()> {
|
||||
if self.truncated {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.truncated = true;
|
||||
self.file.write_all(self.truncation_notice.as_bytes()).await
|
||||
}
|
||||
}
|
||||
|
||||
impl AsyncWrite for BoundedLogWriter {
|
||||
fn poll_write(
|
||||
mut self: Pin<&mut Self>,
|
||||
|
||||
@@ -48,7 +48,7 @@ pub use dependency::{
|
||||
DependencyError, DependencyManager, DependencyManagerRegistry, DependencyResult,
|
||||
DependencySpec, EnvironmentInfo,
|
||||
};
|
||||
pub use log_writer::{BoundedLogResult, BoundedLogWriter};
|
||||
pub use log_writer::{BoundedLogFileWriter, BoundedLogResult, BoundedLogWriter};
|
||||
pub use parameter_passing::{ParameterDeliveryConfig, PreparedParameters};
|
||||
|
||||
// Re-export parameter types from common
|
||||
@@ -148,6 +148,12 @@ pub struct ExecutionContext {
|
||||
/// Maximum stderr size in bytes (for log truncation)
|
||||
pub max_stderr_bytes: usize,
|
||||
|
||||
/// Optional live stdout log path for incremental writes during execution.
|
||||
pub stdout_log_path: Option<PathBuf>,
|
||||
|
||||
/// Optional live stderr log path for incremental writes during execution.
|
||||
pub stderr_log_path: Option<PathBuf>,
|
||||
|
||||
/// How parameters should be delivered to the action
|
||||
pub parameter_delivery: ParameterDelivery,
|
||||
|
||||
@@ -185,6 +191,8 @@ impl ExecutionContext {
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
stdout_log_path: None,
|
||||
stderr_log_path: None,
|
||||
parameter_delivery: ParameterDelivery::default(),
|
||||
parameter_format: ParameterFormat::default(),
|
||||
output_format: OutputFormat::default(),
|
||||
|
||||
@@ -5,10 +5,11 @@
|
||||
|
||||
use super::{
|
||||
parameter_passing::{self, ParameterDeliveryConfig},
|
||||
BoundedLogWriter, ExecutionContext, ExecutionResult, Runtime, RuntimeError, RuntimeResult,
|
||||
BoundedLogFileWriter, BoundedLogWriter, ExecutionContext, ExecutionResult, Runtime,
|
||||
RuntimeError, RuntimeResult,
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use std::path::PathBuf;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Stdio;
|
||||
use std::time::Instant;
|
||||
use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader};
|
||||
@@ -45,6 +46,8 @@ impl NativeRuntime {
|
||||
timeout: Option<u64>,
|
||||
max_stdout_bytes: usize,
|
||||
max_stderr_bytes: usize,
|
||||
stdout_log_path: Option<&Path>,
|
||||
stderr_log_path: Option<&Path>,
|
||||
) -> RuntimeResult<ExecutionResult> {
|
||||
let start = Instant::now();
|
||||
|
||||
@@ -131,6 +134,8 @@ impl NativeRuntime {
|
||||
|
||||
let mut stdout_writer = BoundedLogWriter::new_stdout(max_stdout_bytes);
|
||||
let mut stderr_writer = BoundedLogWriter::new_stderr(max_stderr_bytes);
|
||||
let mut stdout_file = open_live_log_file(stdout_log_path, max_stdout_bytes, true).await?;
|
||||
let mut stderr_file = open_live_log_file(stderr_log_path, max_stderr_bytes, false).await?;
|
||||
|
||||
// Create buffered readers
|
||||
let mut stdout_reader = BufReader::new(stdout_handle);
|
||||
@@ -147,6 +152,9 @@ impl NativeRuntime {
|
||||
if stdout_writer.write_all(&line).await.is_err() {
|
||||
break;
|
||||
}
|
||||
if let Some(file) = stdout_file.as_mut() {
|
||||
let _ = file.write_all(&line).await;
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
@@ -164,6 +172,9 @@ impl NativeRuntime {
|
||||
if stderr_writer.write_all(&line).await.is_err() {
|
||||
break;
|
||||
}
|
||||
if let Some(file) = stderr_file.as_mut() {
|
||||
let _ = file.write_all(&line).await;
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
@@ -352,6 +363,8 @@ impl Runtime for NativeRuntime {
|
||||
context.timeout,
|
||||
context.max_stdout_bytes,
|
||||
context.max_stderr_bytes,
|
||||
context.stdout_log_path.as_deref(),
|
||||
context.stderr_log_path.as_deref(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
@@ -401,6 +414,23 @@ impl Runtime for NativeRuntime {
|
||||
}
|
||||
}
|
||||
|
||||
async fn open_live_log_file(
|
||||
path: Option<&Path>,
|
||||
max_bytes: usize,
|
||||
is_stdout: bool,
|
||||
) -> std::io::Result<Option<BoundedLogFileWriter>> {
|
||||
let Some(path) = path else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
let writer = if is_stdout {
|
||||
BoundedLogFileWriter::new_stdout(path, max_bytes).await?
|
||||
} else {
|
||||
BoundedLogFileWriter::new_stderr(path, max_bytes).await?
|
||||
};
|
||||
Ok(Some(writer))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
@@ -962,6 +962,8 @@ impl Runtime for ProcessRuntime {
|
||||
context.max_stderr_bytes,
|
||||
context.output_format,
|
||||
context.cancel_token.clone(),
|
||||
context.stdout_log_path.as_deref(),
|
||||
context.stderr_log_path.as_deref(),
|
||||
)
|
||||
.await;
|
||||
|
||||
|
||||
@@ -12,10 +12,10 @@
|
||||
//! 1. SIGTERM is sent to the process immediately
|
||||
//! 2. After a 5-second grace period, SIGKILL is sent as a last resort
|
||||
|
||||
use super::{BoundedLogWriter, ExecutionResult, OutputFormat, RuntimeResult};
|
||||
use super::{BoundedLogFileWriter, BoundedLogWriter, ExecutionResult, OutputFormat, RuntimeResult};
|
||||
use std::collections::HashMap;
|
||||
use std::io;
|
||||
use std::path::Path;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::time::Instant;
|
||||
use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader};
|
||||
use tokio::process::Command;
|
||||
@@ -59,6 +59,8 @@ pub async fn execute_streaming(
|
||||
max_stderr_bytes,
|
||||
output_format,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
}
|
||||
@@ -93,6 +95,8 @@ pub async fn execute_streaming_cancellable(
|
||||
max_stderr_bytes: usize,
|
||||
output_format: OutputFormat,
|
||||
cancel_token: Option<CancellationToken>,
|
||||
stdout_log_path: Option<&Path>,
|
||||
stderr_log_path: Option<&Path>,
|
||||
) -> RuntimeResult<ExecutionResult> {
|
||||
let start = Instant::now();
|
||||
|
||||
@@ -130,6 +134,8 @@ pub async fn execute_streaming_cancellable(
|
||||
// Create bounded writers
|
||||
let mut stdout_writer = BoundedLogWriter::new_stdout(max_stdout_bytes);
|
||||
let mut stderr_writer = BoundedLogWriter::new_stderr(max_stderr_bytes);
|
||||
let mut stdout_file = open_live_log_file(stdout_log_path, max_stdout_bytes, true).await?;
|
||||
let mut stderr_file = open_live_log_file(stderr_log_path, max_stderr_bytes, false).await?;
|
||||
|
||||
// Take stdout and stderr streams
|
||||
let stdout = child.stdout.take().expect("stdout not captured");
|
||||
@@ -150,6 +156,9 @@ pub async fn execute_streaming_cancellable(
|
||||
if stdout_writer.write_all(&line).await.is_err() {
|
||||
break;
|
||||
}
|
||||
if let Some(file) = stdout_file.as_mut() {
|
||||
let _ = file.write_all(&line).await;
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
@@ -167,6 +176,9 @@ pub async fn execute_streaming_cancellable(
|
||||
if stderr_writer.write_all(&line).await.is_err() {
|
||||
break;
|
||||
}
|
||||
if let Some(file) = stderr_file.as_mut() {
|
||||
let _ = file.write_all(&line).await;
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
@@ -351,6 +363,24 @@ pub async fn execute_streaming_cancellable(
|
||||
})
|
||||
}
|
||||
|
||||
async fn open_live_log_file(
|
||||
path: Option<&Path>,
|
||||
max_bytes: usize,
|
||||
is_stdout: bool,
|
||||
) -> io::Result<Option<BoundedLogFileWriter>> {
|
||||
let Some(path) = path else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
let path: PathBuf = path.to_path_buf();
|
||||
let writer = if is_stdout {
|
||||
BoundedLogFileWriter::new_stdout(&path, max_bytes).await?
|
||||
} else {
|
||||
BoundedLogFileWriter::new_stderr(&path, max_bytes).await?
|
||||
};
|
||||
Ok(Some(writer))
|
||||
}
|
||||
|
||||
/// Parse stdout content according to the specified output format.
|
||||
fn configure_child_process(cmd: &mut Command) -> io::Result<()> {
|
||||
#[cfg(unix)]
|
||||
|
||||
@@ -1,819 +0,0 @@
|
||||
//! Python Runtime Implementation
|
||||
//!
|
||||
//! Executes Python actions using subprocess execution.
|
||||
|
||||
use super::{
|
||||
BoundedLogWriter, DependencyManagerRegistry, DependencySpec, ExecutionContext, ExecutionResult,
|
||||
OutputFormat, Runtime, RuntimeError, RuntimeResult,
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Stdio;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader};
|
||||
use tokio::process::Command;
|
||||
use tokio::time::timeout;
|
||||
use tracing::{debug, info, warn};
|
||||
|
||||
/// Python runtime for executing Python scripts and functions
|
||||
pub struct PythonRuntime {
|
||||
/// Python interpreter path (fallback when no venv exists)
|
||||
python_path: PathBuf,
|
||||
|
||||
/// Base directory for storing action code
|
||||
work_dir: PathBuf,
|
||||
|
||||
/// Optional dependency manager registry for isolated environments
|
||||
dependency_manager: Option<Arc<DependencyManagerRegistry>>,
|
||||
}
|
||||
|
||||
impl PythonRuntime {
|
||||
/// Create a new Python runtime
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
python_path: PathBuf::from("python3"),
|
||||
work_dir: PathBuf::from("/tmp/attune/actions"),
|
||||
dependency_manager: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a Python runtime with custom settings
|
||||
pub fn with_config(python_path: PathBuf, work_dir: PathBuf) -> Self {
|
||||
Self {
|
||||
python_path,
|
||||
work_dir,
|
||||
dependency_manager: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a Python runtime with dependency manager support
|
||||
pub fn with_dependency_manager(
|
||||
python_path: PathBuf,
|
||||
work_dir: PathBuf,
|
||||
dependency_manager: Arc<DependencyManagerRegistry>,
|
||||
) -> Self {
|
||||
Self {
|
||||
python_path,
|
||||
work_dir,
|
||||
dependency_manager: Some(dependency_manager),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the Python executable path to use for a given context
|
||||
///
|
||||
/// If the action has a pack_ref with dependencies, use the venv Python.
|
||||
/// Otherwise, use the default Python interpreter.
|
||||
async fn get_python_executable(&self, context: &ExecutionContext) -> RuntimeResult<PathBuf> {
|
||||
// Check if we have a dependency manager and can extract pack_ref
|
||||
if let Some(ref dep_mgr) = self.dependency_manager {
|
||||
// Extract pack_ref from action_ref (format: "pack_ref.action_name")
|
||||
if let Some(pack_ref) = context.action_ref.split('.').next() {
|
||||
// Try to get the executable path for this pack
|
||||
match dep_mgr.get_executable_path(pack_ref, "python").await {
|
||||
Ok(python_path) => {
|
||||
debug!(
|
||||
"Using pack-specific Python from venv: {}",
|
||||
python_path.display()
|
||||
);
|
||||
return Ok(python_path);
|
||||
}
|
||||
Err(e) => {
|
||||
// Venv doesn't exist or failed - this is OK if pack has no dependencies
|
||||
debug!(
|
||||
"No venv found for pack {} ({}), using default Python",
|
||||
pack_ref, e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to default Python interpreter
|
||||
debug!("Using default Python interpreter: {:?}", self.python_path);
|
||||
Ok(self.python_path.clone())
|
||||
}
|
||||
|
||||
/// Generate Python wrapper script that loads parameters and executes the action
|
||||
fn generate_wrapper_script(&self, context: &ExecutionContext) -> RuntimeResult<String> {
|
||||
let params_json = serde_json::to_string(&context.parameters)?;
|
||||
|
||||
// Use base64 encoding for code to avoid any quote/escape issues
|
||||
let code_bytes = context.code.as_deref().unwrap_or("").as_bytes();
|
||||
let code_base64 =
|
||||
base64::Engine::encode(&base64::engine::general_purpose::STANDARD, code_bytes);
|
||||
|
||||
let wrapper = format!(
|
||||
r#"#!/usr/bin/env python3
|
||||
import sys
|
||||
import json
|
||||
import traceback
|
||||
import base64
|
||||
from pathlib import Path
|
||||
|
||||
# Global secrets storage (read from stdin, NOT from environment)
|
||||
_attune_secrets = {{}}
|
||||
|
||||
def get_secret(name):
|
||||
"""
|
||||
Get a secret value by name.
|
||||
|
||||
Secrets are passed securely via stdin and are never exposed in
|
||||
environment variables or process listings.
|
||||
|
||||
Args:
|
||||
name (str): The name of the secret to retrieve
|
||||
|
||||
Returns:
|
||||
str: The secret value, or None if not found
|
||||
"""
|
||||
return _attune_secrets.get(name)
|
||||
|
||||
def main():
|
||||
global _attune_secrets
|
||||
|
||||
try:
|
||||
# Read secrets from stdin FIRST (before executing action code)
|
||||
# This prevents secrets from being visible in process environment
|
||||
secrets_line = sys.stdin.readline().strip()
|
||||
if secrets_line:
|
||||
_attune_secrets = json.loads(secrets_line)
|
||||
|
||||
# Parse parameters
|
||||
parameters = json.loads('''{}''')
|
||||
|
||||
# Decode action code from base64 (avoids quote/escape issues)
|
||||
action_code = base64.b64decode('{}').decode('utf-8')
|
||||
|
||||
# Execute the code in a controlled namespace
|
||||
# Include get_secret helper function
|
||||
namespace = {{
|
||||
'__name__': '__main__',
|
||||
'parameters': parameters,
|
||||
'get_secret': get_secret
|
||||
}}
|
||||
exec(action_code, namespace)
|
||||
|
||||
# Look for main function or run function
|
||||
if '{}' in namespace:
|
||||
result = namespace['{}'](**parameters)
|
||||
elif 'run' in namespace:
|
||||
result = namespace['run'](**parameters)
|
||||
elif 'main' in namespace:
|
||||
result = namespace['main'](**parameters)
|
||||
else:
|
||||
# No entry point found, return the namespace (only JSON-serializable values)
|
||||
def is_json_serializable(obj):
|
||||
"""Check if an object is JSON serializable"""
|
||||
if obj is None:
|
||||
return True
|
||||
if isinstance(obj, (bool, int, float, str)):
|
||||
return True
|
||||
if isinstance(obj, (list, tuple)):
|
||||
return all(is_json_serializable(item) for item in obj)
|
||||
if isinstance(obj, dict):
|
||||
return all(is_json_serializable(k) and is_json_serializable(v)
|
||||
for k, v in obj.items())
|
||||
return False
|
||||
|
||||
result = {{k: v for k, v in namespace.items()
|
||||
if not k.startswith('__') and is_json_serializable(v)}}
|
||||
|
||||
# Output result as JSON
|
||||
if result is not None:
|
||||
print(json.dumps({{'result': result, 'status': 'success'}}))
|
||||
else:
|
||||
print(json.dumps({{'status': 'success'}}))
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
except Exception as e:
|
||||
error_info = {{
|
||||
'status': 'error',
|
||||
'error': str(e),
|
||||
'error_type': type(e).__name__,
|
||||
'traceback': traceback.format_exc()
|
||||
}}
|
||||
print(json.dumps(error_info), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
"#,
|
||||
params_json, code_base64, context.entry_point, context.entry_point
|
||||
);
|
||||
|
||||
Ok(wrapper)
|
||||
}
|
||||
|
||||
/// Execute with streaming and bounded log collection
|
||||
async fn execute_with_streaming(
|
||||
&self,
|
||||
mut cmd: Command,
|
||||
secrets: &std::collections::HashMap<String, String>,
|
||||
timeout_secs: Option<u64>,
|
||||
max_stdout_bytes: usize,
|
||||
max_stderr_bytes: usize,
|
||||
output_format: OutputFormat,
|
||||
) -> RuntimeResult<ExecutionResult> {
|
||||
let start = Instant::now();
|
||||
|
||||
// Spawn process with piped I/O
|
||||
let mut child = cmd
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
// Write secrets to stdin
|
||||
if let Some(mut stdin) = child.stdin.take() {
|
||||
let secrets_json = serde_json::to_string(secrets)?;
|
||||
stdin.write_all(secrets_json.as_bytes()).await?;
|
||||
stdin.write_all(b"\n").await?;
|
||||
drop(stdin);
|
||||
}
|
||||
|
||||
// Create bounded writers
|
||||
let mut stdout_writer = BoundedLogWriter::new_stdout(max_stdout_bytes);
|
||||
let mut stderr_writer = BoundedLogWriter::new_stderr(max_stderr_bytes);
|
||||
|
||||
// Take stdout and stderr streams
|
||||
let stdout = child.stdout.take().expect("stdout not captured");
|
||||
let stderr = child.stderr.take().expect("stderr not captured");
|
||||
|
||||
// Create buffered readers
|
||||
let mut stdout_reader = BufReader::new(stdout);
|
||||
let mut stderr_reader = BufReader::new(stderr);
|
||||
|
||||
// Stream both outputs concurrently
|
||||
let stdout_task = async {
|
||||
let mut line = Vec::new();
|
||||
loop {
|
||||
line.clear();
|
||||
match stdout_reader.read_until(b'\n', &mut line).await {
|
||||
Ok(0) => break, // EOF
|
||||
Ok(_) => {
|
||||
if stdout_writer.write_all(&line).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
stdout_writer
|
||||
};
|
||||
|
||||
let stderr_task = async {
|
||||
let mut line = Vec::new();
|
||||
loop {
|
||||
line.clear();
|
||||
match stderr_reader.read_until(b'\n', &mut line).await {
|
||||
Ok(0) => break, // EOF
|
||||
Ok(_) => {
|
||||
if stderr_writer.write_all(&line).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
stderr_writer
|
||||
};
|
||||
|
||||
// Wait for both streams and the process
|
||||
let (stdout_writer, stderr_writer, wait_result) =
|
||||
tokio::join!(stdout_task, stderr_task, async {
|
||||
if let Some(timeout_secs) = timeout_secs {
|
||||
timeout(std::time::Duration::from_secs(timeout_secs), child.wait()).await
|
||||
} else {
|
||||
Ok(child.wait().await)
|
||||
}
|
||||
});
|
||||
|
||||
let duration_ms = start.elapsed().as_millis() as u64;
|
||||
|
||||
// Handle timeout
|
||||
let status = match wait_result {
|
||||
Ok(Ok(status)) => status,
|
||||
Ok(Err(e)) => {
|
||||
return Err(RuntimeError::ProcessError(format!(
|
||||
"Process wait failed: {}",
|
||||
e
|
||||
)));
|
||||
}
|
||||
Err(_) => {
|
||||
return Ok(ExecutionResult {
|
||||
exit_code: -1,
|
||||
stdout: String::new(),
|
||||
stderr: String::new(),
|
||||
result: None,
|
||||
duration_ms,
|
||||
error: Some(format!(
|
||||
"Execution timed out after {} seconds",
|
||||
timeout_secs.unwrap()
|
||||
)),
|
||||
stdout_truncated: false,
|
||||
stderr_truncated: false,
|
||||
stdout_bytes_truncated: 0,
|
||||
stderr_bytes_truncated: 0,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// Get results from bounded writers
|
||||
let stdout_result = stdout_writer.into_result();
|
||||
let stderr_result = stderr_writer.into_result();
|
||||
|
||||
let exit_code = status.code().unwrap_or(-1);
|
||||
|
||||
debug!(
|
||||
"Python execution completed: exit_code={}, duration={}ms, stdout_truncated={}, stderr_truncated={}",
|
||||
exit_code, duration_ms, stdout_result.truncated, stderr_result.truncated
|
||||
);
|
||||
|
||||
// Parse result from stdout based on output_format
|
||||
let result = if exit_code == 0 && !stdout_result.content.trim().is_empty() {
|
||||
match output_format {
|
||||
OutputFormat::Text => {
|
||||
// No parsing - text output is captured in stdout field
|
||||
None
|
||||
}
|
||||
OutputFormat::Json => {
|
||||
// Try to parse full stdout as JSON first (handles multi-line JSON),
|
||||
// then fall back to last line only (for scripts that log before output)
|
||||
let trimmed = stdout_result.content.trim();
|
||||
serde_json::from_str(trimmed).ok().or_else(|| {
|
||||
trimmed
|
||||
.lines()
|
||||
.last()
|
||||
.and_then(|line| serde_json::from_str(line).ok())
|
||||
})
|
||||
}
|
||||
OutputFormat::Yaml => {
|
||||
// Try to parse stdout as YAML
|
||||
serde_yaml_ng::from_str(stdout_result.content.trim()).ok()
|
||||
}
|
||||
OutputFormat::Jsonl => {
|
||||
// Parse each line as JSON and collect into array
|
||||
let mut items = Vec::new();
|
||||
for line in stdout_result.content.trim().lines() {
|
||||
if let Ok(value) = serde_json::from_str::<serde_json::Value>(line) {
|
||||
items.push(value);
|
||||
}
|
||||
}
|
||||
if items.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(serde_json::Value::Array(items))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(ExecutionResult {
|
||||
exit_code,
|
||||
// Only populate stdout if result wasn't parsed (avoid duplication)
|
||||
stdout: if result.is_some() {
|
||||
String::new()
|
||||
} else {
|
||||
stdout_result.content.clone()
|
||||
},
|
||||
stderr: stderr_result.content.clone(),
|
||||
result,
|
||||
duration_ms,
|
||||
error: if exit_code != 0 {
|
||||
Some(stderr_result.content)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
stdout_truncated: stdout_result.truncated,
|
||||
stderr_truncated: stderr_result.truncated,
|
||||
stdout_bytes_truncated: stdout_result.bytes_truncated,
|
||||
stderr_bytes_truncated: stderr_result.bytes_truncated,
|
||||
})
|
||||
}
|
||||
|
||||
async fn execute_python_code(
|
||||
&self,
|
||||
script: String,
|
||||
secrets: &std::collections::HashMap<String, String>,
|
||||
env: &std::collections::HashMap<String, String>,
|
||||
timeout_secs: Option<u64>,
|
||||
python_path: PathBuf,
|
||||
max_stdout_bytes: usize,
|
||||
max_stderr_bytes: usize,
|
||||
output_format: OutputFormat,
|
||||
) -> RuntimeResult<ExecutionResult> {
|
||||
debug!(
|
||||
"Executing Python script with {} secrets (passed via stdin)",
|
||||
secrets.len()
|
||||
);
|
||||
|
||||
// Build command
|
||||
let mut cmd = Command::new(&python_path);
|
||||
cmd.arg("-c").arg(&script);
|
||||
|
||||
// Add environment variables
|
||||
for (key, value) in env {
|
||||
cmd.env(key, value);
|
||||
}
|
||||
|
||||
self.execute_with_streaming(
|
||||
cmd,
|
||||
secrets,
|
||||
timeout_secs,
|
||||
max_stdout_bytes,
|
||||
max_stderr_bytes,
|
||||
output_format,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Execute Python script from file
|
||||
async fn execute_python_file(
|
||||
&self,
|
||||
code_path: PathBuf,
|
||||
secrets: &std::collections::HashMap<String, String>,
|
||||
env: &std::collections::HashMap<String, String>,
|
||||
timeout_secs: Option<u64>,
|
||||
python_path: PathBuf,
|
||||
max_stdout_bytes: usize,
|
||||
max_stderr_bytes: usize,
|
||||
output_format: OutputFormat,
|
||||
) -> RuntimeResult<ExecutionResult> {
|
||||
debug!(
|
||||
"Executing Python file: {:?} with {} secrets",
|
||||
code_path,
|
||||
secrets.len()
|
||||
);
|
||||
|
||||
// Build command
|
||||
let mut cmd = Command::new(&python_path);
|
||||
cmd.arg(&code_path);
|
||||
|
||||
// Add environment variables
|
||||
for (key, value) in env {
|
||||
cmd.env(key, value);
|
||||
}
|
||||
|
||||
self.execute_with_streaming(
|
||||
cmd,
|
||||
secrets,
|
||||
timeout_secs,
|
||||
max_stdout_bytes,
|
||||
max_stderr_bytes,
|
||||
output_format,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PythonRuntime {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl PythonRuntime {
|
||||
/// Ensure pack dependencies are installed (called before execution if needed)
|
||||
///
|
||||
/// This is a helper method that can be called by the worker service to ensure
|
||||
/// a pack's Python dependencies are set up before executing actions.
|
||||
pub async fn ensure_pack_dependencies(
|
||||
&self,
|
||||
pack_ref: &str,
|
||||
spec: &DependencySpec,
|
||||
) -> RuntimeResult<()> {
|
||||
if let Some(ref dep_mgr) = self.dependency_manager {
|
||||
if spec.has_dependencies() {
|
||||
info!(
|
||||
"Ensuring Python dependencies for pack: {} ({} dependencies)",
|
||||
pack_ref,
|
||||
spec.dependencies.len()
|
||||
);
|
||||
|
||||
dep_mgr
|
||||
.ensure_environment(pack_ref, spec)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
RuntimeError::SetupError(format!(
|
||||
"Failed to setup Python environment for {}: {}",
|
||||
pack_ref, e
|
||||
))
|
||||
})?;
|
||||
|
||||
info!("Python dependencies ready for pack: {}", pack_ref);
|
||||
} else {
|
||||
debug!("Pack {} has no Python dependencies", pack_ref);
|
||||
}
|
||||
} else {
|
||||
warn!("Dependency manager not configured, skipping dependency isolation");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Runtime for PythonRuntime {
|
||||
fn name(&self) -> &str {
|
||||
"python"
|
||||
}
|
||||
|
||||
fn can_execute(&self, context: &ExecutionContext) -> bool {
|
||||
// Check if action reference suggests Python
|
||||
let is_python = context.action_ref.contains(".py")
|
||||
|| context.entry_point.ends_with(".py")
|
||||
|| context
|
||||
.code_path
|
||||
.as_ref()
|
||||
.map(|p| p.extension().and_then(|e| e.to_str()) == Some("py"))
|
||||
.unwrap_or(false);
|
||||
|
||||
is_python
|
||||
}
|
||||
|
||||
async fn execute(&self, context: ExecutionContext) -> RuntimeResult<ExecutionResult> {
|
||||
info!(
|
||||
"Executing Python action: {} (execution_id: {})",
|
||||
context.action_ref, context.execution_id
|
||||
);
|
||||
|
||||
// Get the appropriate Python executable (venv or default)
|
||||
let python_path = self.get_python_executable(&context).await?;
|
||||
|
||||
// If code_path is provided, execute the file directly
|
||||
if let Some(code_path) = &context.code_path {
|
||||
return self
|
||||
.execute_python_file(
|
||||
code_path.clone(),
|
||||
&context.secrets,
|
||||
&context.env,
|
||||
context.timeout,
|
||||
python_path,
|
||||
context.max_stdout_bytes,
|
||||
context.max_stderr_bytes,
|
||||
context.output_format,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
// Otherwise, generate wrapper script and execute
|
||||
let script = self.generate_wrapper_script(&context)?;
|
||||
self.execute_python_code(
|
||||
script,
|
||||
&context.secrets,
|
||||
&context.env,
|
||||
context.timeout,
|
||||
python_path,
|
||||
context.max_stdout_bytes,
|
||||
context.max_stderr_bytes,
|
||||
context.output_format,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn setup(&self) -> RuntimeResult<()> {
|
||||
info!("Setting up Python runtime");
|
||||
|
||||
// Ensure work directory exists
|
||||
tokio::fs::create_dir_all(&self.work_dir)
|
||||
.await
|
||||
.map_err(|e| RuntimeError::SetupError(format!("Failed to create work dir: {}", e)))?;
|
||||
|
||||
// Verify Python is available
|
||||
let output = Command::new(&self.python_path)
|
||||
.arg("--version")
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
RuntimeError::SetupError(format!(
|
||||
"Python not found at {:?}: {}",
|
||||
self.python_path, e
|
||||
))
|
||||
})?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(RuntimeError::SetupError(
|
||||
"Python interpreter is not working".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let version = String::from_utf8_lossy(&output.stdout);
|
||||
info!("Python runtime ready: {}", version.trim());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn cleanup(&self) -> RuntimeResult<()> {
|
||||
info!("Cleaning up Python runtime");
|
||||
// Could clean up temporary files here
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn validate(&self) -> RuntimeResult<()> {
|
||||
debug!("Validating Python runtime");
|
||||
|
||||
// Check if Python is available
|
||||
let output = Command::new(&self.python_path)
|
||||
.arg("--version")
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| RuntimeError::SetupError(format!("Python validation failed: {}", e)))?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(RuntimeError::SetupError(
|
||||
"Python interpreter validation failed".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_python_runtime_simple() {
|
||||
let runtime = PythonRuntime::new();
|
||||
|
||||
let context = ExecutionContext {
|
||||
execution_id: 1,
|
||||
action_ref: "test.simple".to_string(),
|
||||
parameters: {
|
||||
let mut map = HashMap::new();
|
||||
map.insert("x".to_string(), serde_json::json!(5));
|
||||
map.insert("y".to_string(), serde_json::json!(10));
|
||||
map
|
||||
},
|
||||
env: HashMap::new(),
|
||||
secrets: HashMap::new(),
|
||||
timeout: Some(10),
|
||||
working_dir: None,
|
||||
entry_point: "run".to_string(),
|
||||
code: Some(
|
||||
r#"
|
||||
def run(x, y):
|
||||
return x + y
|
||||
"#
|
||||
.to_string(),
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_common::models::ParameterDelivery::default(),
|
||||
parameter_format: attune_common::models::ParameterFormat::default(),
|
||||
output_format: attune_common::models::OutputFormat::default(),
|
||||
};
|
||||
|
||||
let result = runtime.execute(context).await.unwrap();
|
||||
assert!(result.is_success());
|
||||
assert_eq!(result.exit_code, 0);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_python_runtime_timeout() {
|
||||
let runtime = PythonRuntime::new();
|
||||
|
||||
let context = ExecutionContext {
|
||||
execution_id: 2,
|
||||
action_ref: "test.timeout".to_string(),
|
||||
parameters: HashMap::new(),
|
||||
env: HashMap::new(),
|
||||
secrets: HashMap::new(),
|
||||
timeout: Some(1),
|
||||
working_dir: None,
|
||||
entry_point: "run".to_string(),
|
||||
code: Some(
|
||||
r#"
|
||||
import time
|
||||
def run():
|
||||
time.sleep(10)
|
||||
return "done"
|
||||
"#
|
||||
.to_string(),
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_common::models::ParameterDelivery::default(),
|
||||
parameter_format: attune_common::models::ParameterFormat::default(),
|
||||
output_format: attune_common::models::OutputFormat::default(),
|
||||
};
|
||||
|
||||
let result = runtime.execute(context).await.unwrap();
|
||||
assert!(!result.is_success());
|
||||
assert!(result.error.is_some());
|
||||
let error_msg = result.error.unwrap();
|
||||
assert!(error_msg.contains("timeout") || error_msg.contains("timed out"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_python_runtime_error() {
|
||||
let runtime = PythonRuntime::new();
|
||||
|
||||
let context = ExecutionContext {
|
||||
execution_id: 3,
|
||||
action_ref: "test.error".to_string(),
|
||||
parameters: HashMap::new(),
|
||||
env: HashMap::new(),
|
||||
secrets: HashMap::new(),
|
||||
timeout: Some(10),
|
||||
working_dir: None,
|
||||
entry_point: "run".to_string(),
|
||||
code: Some(
|
||||
r#"
|
||||
def run():
|
||||
raise ValueError("Test error")
|
||||
"#
|
||||
.to_string(),
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_common::models::ParameterDelivery::default(),
|
||||
parameter_format: attune_common::models::ParameterFormat::default(),
|
||||
output_format: attune_common::models::OutputFormat::default(),
|
||||
};
|
||||
|
||||
let result = runtime.execute(context).await.unwrap();
|
||||
assert!(!result.is_success());
|
||||
assert!(result.error.is_some());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[ignore = "Pre-existing failure - secrets not being passed correctly"]
|
||||
async fn test_python_runtime_with_secrets() {
|
||||
let runtime = PythonRuntime::new();
|
||||
|
||||
let context = ExecutionContext {
|
||||
execution_id: 4,
|
||||
action_ref: "test.secrets".to_string(),
|
||||
parameters: HashMap::new(),
|
||||
env: HashMap::new(),
|
||||
secrets: {
|
||||
let mut s = HashMap::new();
|
||||
s.insert("api_key".to_string(), "secret_key_12345".to_string());
|
||||
s.insert("db_password".to_string(), "super_secret_pass".to_string());
|
||||
s
|
||||
},
|
||||
timeout: Some(10),
|
||||
working_dir: None,
|
||||
entry_point: "run".to_string(),
|
||||
code: Some(
|
||||
r#"
|
||||
def run():
|
||||
# Access secrets via get_secret() helper
|
||||
api_key = get_secret('api_key')
|
||||
db_pass = get_secret('db_password')
|
||||
missing = get_secret('nonexistent')
|
||||
|
||||
return {
|
||||
'api_key': api_key,
|
||||
'db_pass': db_pass,
|
||||
'missing': missing
|
||||
}
|
||||
"#
|
||||
.to_string(),
|
||||
),
|
||||
code_path: None,
|
||||
runtime_name: Some("python".to_string()),
|
||||
runtime_config_override: None,
|
||||
runtime_env_dir_suffix: None,
|
||||
selected_runtime_version: None,
|
||||
max_stdout_bytes: 10 * 1024 * 1024,
|
||||
max_stderr_bytes: 10 * 1024 * 1024,
|
||||
parameter_delivery: attune_common::models::ParameterDelivery::default(),
|
||||
parameter_format: attune_common::models::ParameterFormat::default(),
|
||||
output_format: attune_common::models::OutputFormat::default(),
|
||||
};
|
||||
|
||||
let result = runtime.execute(context).await.unwrap();
|
||||
assert!(result.is_success());
|
||||
assert_eq!(result.exit_code, 0);
|
||||
|
||||
// Verify secrets are accessible in action code
|
||||
let result_data = result.result.unwrap();
|
||||
let result_obj = result_data.get("result").unwrap();
|
||||
assert_eq!(result_obj.get("api_key").unwrap(), "secret_key_12345");
|
||||
assert_eq!(result_obj.get("db_pass").unwrap(), "super_secret_pass");
|
||||
assert_eq!(result_obj.get("missing"), Some(&serde_json::Value::Null));
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user