[wip] cli capability parity
Some checks failed
CI / Rustfmt (push) Successful in 23s
CI / Cargo Audit & Deny (push) Successful in 30s
CI / Web Blocking Checks (push) Successful in 48s
CI / Security Blocking Checks (push) Successful in 8s
CI / Clippy (push) Failing after 1m55s
CI / Web Advisory Checks (push) Successful in 35s
CI / Security Advisory Checks (push) Successful in 37s
CI / Tests (push) Successful in 8m5s

This commit is contained in:
2026-03-06 16:58:50 -06:00
parent 48b6ca6bd7
commit 87d830f952
94 changed files with 3694 additions and 734 deletions

View File

@@ -6,9 +6,6 @@ authors.workspace = true
license.workspace = true
repository.workspace = true
[features]
integration-tests = []
[lib]
name = "attune_api"
path = "src/lib.rs"

View File

@@ -2,6 +2,7 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_json::Value as JsonValue;
use utoipa::{IntoParams, ToSchema};
use validator::Validate;
@@ -61,9 +62,9 @@ pub struct KeyResponse {
#[schema(example = true)]
pub encrypted: bool,
/// The secret value (decrypted if encrypted)
#[schema(example = "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx")]
pub value: String,
/// The secret value (decrypted if encrypted). Can be a string, object, array, number, or boolean.
#[schema(value_type = Value, example = json!("ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"))]
pub value: JsonValue,
/// Creation timestamp
#[schema(example = "2024-01-13T10:30:00Z")]
@@ -194,21 +195,16 @@ pub struct CreateKeyRequest {
#[schema(example = "GitHub API Token")]
pub name: String,
/// The secret value to store
#[validate(length(min = 1, max = 10000))]
#[schema(example = "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx")]
pub value: String,
/// The secret value to store. Can be a string, object, array, number, or boolean.
#[schema(value_type = Value, example = json!("ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"))]
pub value: JsonValue,
/// Whether to encrypt the value (recommended: true)
#[serde(default = "default_encrypted")]
#[schema(example = true)]
/// Whether to encrypt the value at rest (default: false; use --encrypt / -e from CLI)
#[serde(default)]
#[schema(example = false)]
pub encrypted: bool,
}
fn default_encrypted() -> bool {
true
}
/// Request to update an existing key/secret
#[derive(Debug, Clone, Serialize, Deserialize, Validate, ToSchema)]
pub struct UpdateKeyRequest {
@@ -217,10 +213,9 @@ pub struct UpdateKeyRequest {
#[schema(example = "GitHub API Token (Updated)")]
pub name: Option<String>,
/// Update the secret value
#[validate(length(min = 1, max = 10000))]
#[schema(example = "ghp_new_token_xxxxxxxxxxxxxxxxxxxxxxxx")]
pub value: Option<String>,
/// Update the secret value. Can be a string, object, array, number, or boolean.
#[schema(value_type = Option<Value>, example = json!("ghp_new_token_xxxxxxxxxxxxxxxxxxxxxxxx"))]
pub value: Option<JsonValue>,
/// Update encryption status (re-encrypts if changing from false to true)
#[schema(example = true)]

View File

@@ -115,6 +115,9 @@ async fn mq_reconnect_loop(state: Arc<AppState>, mq_url: String) {
#[tokio::main]
async fn main() -> Result<()> {
// Install HMAC-only JWT crypto provider (must be before any token operations)
attune_common::auth::install_crypto_provider();
// Initialize tracing subscriber
tracing_subscriber::fmt()
.with_target(false)

View File

@@ -102,8 +102,8 @@ pub async fn get_key(
ApiError::InternalServerError("Encryption key not configured on server".to_string())
})?;
let decrypted_value =
attune_common::crypto::decrypt(&key.value, encryption_key).map_err(|e| {
let decrypted_value = attune_common::crypto::decrypt_json(&key.value, encryption_key)
.map_err(|e| {
tracing::error!("Failed to decrypt key '{}': {}", key_ref, e);
ApiError::InternalServerError(format!("Failed to decrypt key: {}", e))
})?;
@@ -233,11 +233,11 @@ pub async fn create_key(
)
})?;
let encrypted_value = attune_common::crypto::encrypt(&request.value, encryption_key)
let encrypted_value = attune_common::crypto::encrypt_json(&request.value, encryption_key)
.map_err(|e| {
tracing::error!("Failed to encrypt key value: {}", e);
ApiError::InternalServerError(format!("Failed to encrypt value: {}", e))
})?;
tracing::error!("Failed to encrypt key value: {}", e);
ApiError::InternalServerError(format!("Failed to encrypt value: {}", e))
})?;
let key_hash = attune_common::crypto::hash_encryption_key(encryption_key);
@@ -270,10 +270,11 @@ pub async fn create_key(
// Return decrypted value in response
if key.encrypted {
let encryption_key = state.config.security.encryption_key.as_ref().unwrap();
key.value = attune_common::crypto::decrypt(&key.value, encryption_key).map_err(|e| {
tracing::error!("Failed to decrypt newly created key: {}", e);
ApiError::InternalServerError(format!("Failed to decrypt value: {}", e))
})?;
key.value =
attune_common::crypto::decrypt_json(&key.value, encryption_key).map_err(|e| {
tracing::error!("Failed to decrypt newly created key: {}", e);
ApiError::InternalServerError(format!("Failed to decrypt value: {}", e))
})?;
}
let response = ApiResponse::with_message(KeyResponse::from(key), "Key created successfully");
@@ -328,11 +329,11 @@ pub async fn update_key(
)
})?;
let encrypted_value = attune_common::crypto::encrypt(&new_value, encryption_key)
let encrypted_value = attune_common::crypto::encrypt_json(&new_value, encryption_key)
.map_err(|e| {
tracing::error!("Failed to encrypt key value: {}", e);
ApiError::InternalServerError(format!("Failed to encrypt value: {}", e))
})?;
tracing::error!("Failed to encrypt key value: {}", e);
ApiError::InternalServerError(format!("Failed to encrypt value: {}", e))
})?;
let key_hash = attune_common::crypto::hash_encryption_key(encryption_key);
@@ -366,7 +367,7 @@ pub async fn update_key(
ApiError::InternalServerError("Encryption key not configured on server".to_string())
})?;
updated_key.value = attune_common::crypto::decrypt(&updated_key.value, encryption_key)
updated_key.value = attune_common::crypto::decrypt_json(&updated_key.value, encryption_key)
.map_err(|e| {
tracing::error!("Failed to decrypt updated key '{}': {}", key_ref, e);
ApiError::InternalServerError(format!("Failed to decrypt value: {}", e))

View File

@@ -1,4 +1,3 @@
#![cfg(feature = "integration-tests")]
//! Integration tests for health check and authentication endpoints
use axum::http::StatusCode;
@@ -8,6 +7,7 @@ use serde_json::json;
mod helpers;
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_register_debug() {
let ctx = TestContext::new()
.await
@@ -37,6 +37,7 @@ async fn test_register_debug() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_health_check() {
let ctx = TestContext::new()
.await
@@ -55,6 +56,7 @@ async fn test_health_check() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_health_detailed() {
let ctx = TestContext::new()
.await
@@ -75,6 +77,7 @@ async fn test_health_detailed() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_health_ready() {
let ctx = TestContext::new()
.await
@@ -91,6 +94,7 @@ async fn test_health_ready() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_health_live() {
let ctx = TestContext::new()
.await
@@ -107,6 +111,7 @@ async fn test_health_live() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_register_user() {
let ctx = TestContext::new()
.await
@@ -138,6 +143,7 @@ async fn test_register_user() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_register_duplicate_user() {
let ctx = TestContext::new()
.await
@@ -175,6 +181,7 @@ async fn test_register_duplicate_user() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_register_invalid_password() {
let ctx = TestContext::new()
.await
@@ -197,6 +204,7 @@ async fn test_register_invalid_password() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_login_success() {
let ctx = TestContext::new()
.await
@@ -239,6 +247,7 @@ async fn test_login_success() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_login_wrong_password() {
let ctx = TestContext::new()
.await
@@ -275,6 +284,7 @@ async fn test_login_wrong_password() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_login_nonexistent_user() {
let ctx = TestContext::new()
.await
@@ -296,6 +306,7 @@ async fn test_login_nonexistent_user() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_current_user() {
let ctx = TestContext::new()
.await
@@ -319,6 +330,7 @@ async fn test_get_current_user() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_current_user_unauthorized() {
let ctx = TestContext::new()
.await
@@ -333,6 +345,7 @@ async fn test_get_current_user_unauthorized() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_current_user_invalid_token() {
let ctx = TestContext::new()
.await
@@ -347,6 +360,7 @@ async fn test_get_current_user_invalid_token() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_refresh_token() {
let ctx = TestContext::new()
.await
@@ -397,6 +411,7 @@ async fn test_refresh_token() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_refresh_with_invalid_token() {
let ctx = TestContext::new()
.await

View File

@@ -1,4 +1,3 @@
#![cfg(feature = "integration-tests")]
//! Integration tests for pack registry system
//!
//! This module tests:
@@ -128,6 +127,7 @@ actions:
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_install_pack_from_local_directory() -> Result<()> {
let ctx = TestContext::new().await?.with_auth().await?;
let token = ctx.token().unwrap();
@@ -167,6 +167,7 @@ async fn test_install_pack_from_local_directory() -> Result<()> {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_install_pack_with_dependency_validation_success() -> Result<()> {
let ctx = TestContext::new().await?.with_auth().await?;
let token = ctx.token().unwrap();
@@ -217,6 +218,7 @@ async fn test_install_pack_with_dependency_validation_success() -> Result<()> {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_install_pack_with_missing_dependency_fails() -> Result<()> {
let ctx = TestContext::new().await?.with_auth().await?;
let token = ctx.token().unwrap();
@@ -256,6 +258,7 @@ async fn test_install_pack_with_missing_dependency_fails() -> Result<()> {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_install_pack_skip_deps_bypasses_validation() -> Result<()> {
let ctx = TestContext::new().await?.with_auth().await?;
let token = ctx.token().unwrap();
@@ -291,6 +294,7 @@ async fn test_install_pack_skip_deps_bypasses_validation() -> Result<()> {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_install_pack_with_runtime_validation() -> Result<()> {
let ctx = TestContext::new().await?.with_auth().await?;
let token = ctx.token().unwrap();
@@ -324,6 +328,7 @@ async fn test_install_pack_with_runtime_validation() -> Result<()> {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_install_pack_metadata_tracking() -> Result<()> {
let ctx = TestContext::new().await?.with_auth().await?;
let token = ctx.token().unwrap();
@@ -373,6 +378,7 @@ async fn test_install_pack_metadata_tracking() -> Result<()> {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_install_pack_force_reinstall() -> Result<()> {
let ctx = TestContext::new().await?.with_auth().await?;
let token = ctx.token().unwrap();
@@ -425,6 +431,7 @@ async fn test_install_pack_force_reinstall() -> Result<()> {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_install_pack_storage_path_created() -> Result<()> {
let ctx = TestContext::new().await?.with_auth().await?;
let token = ctx.token().unwrap();
@@ -475,6 +482,7 @@ async fn test_install_pack_storage_path_created() -> Result<()> {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_install_pack_invalid_source() -> Result<()> {
let ctx = TestContext::new().await?.with_auth().await?;
let token = ctx.token().unwrap();
@@ -505,6 +513,7 @@ async fn test_install_pack_invalid_source() -> Result<()> {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_install_pack_missing_pack_yaml() -> Result<()> {
let ctx = TestContext::new().await?.with_auth().await?;
let token = ctx.token().unwrap();
@@ -539,6 +548,7 @@ async fn test_install_pack_missing_pack_yaml() -> Result<()> {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_install_pack_invalid_pack_yaml() -> Result<()> {
let ctx = TestContext::new().await?.with_auth().await?;
let token = ctx.token().unwrap();
@@ -567,6 +577,7 @@ async fn test_install_pack_invalid_pack_yaml() -> Result<()> {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_install_pack_without_auth_fails() -> Result<()> {
let ctx = TestContext::new().await?; // No auth
@@ -592,6 +603,7 @@ async fn test_install_pack_without_auth_fails() -> Result<()> {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_multiple_pack_installations() -> Result<()> {
let ctx = TestContext::new().await?.with_auth().await?;
let token = ctx.token().unwrap();
@@ -639,6 +651,7 @@ async fn test_multiple_pack_installations() -> Result<()> {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_install_pack_version_upgrade() -> Result<()> {
let ctx = TestContext::new().await?.with_auth().await?;
let token = ctx.token().unwrap();

View File

@@ -1,4 +1,3 @@
#![cfg(feature = "integration-tests")]
//! Integration tests for pack workflow sync and validation
mod helpers;
@@ -59,6 +58,7 @@ tasks:
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_sync_pack_workflows_endpoint() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();
@@ -95,6 +95,7 @@ async fn test_sync_pack_workflows_endpoint() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_validate_pack_workflows_endpoint() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();
@@ -121,6 +122,7 @@ async fn test_validate_pack_workflows_endpoint() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_sync_nonexistent_pack_returns_404() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();
@@ -137,6 +139,7 @@ async fn test_sync_nonexistent_pack_returns_404() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_validate_nonexistent_pack_returns_404() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();
@@ -153,6 +156,7 @@ async fn test_validate_nonexistent_pack_returns_404() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_sync_workflows_requires_authentication() {
let ctx = TestContext::new().await.unwrap();
@@ -180,6 +184,7 @@ async fn test_sync_workflows_requires_authentication() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_validate_workflows_requires_authentication() {
let ctx = TestContext::new().await.unwrap();
@@ -207,6 +212,7 @@ async fn test_validate_workflows_requires_authentication() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_pack_creation_with_auto_sync() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();
@@ -237,6 +243,7 @@ async fn test_pack_creation_with_auto_sync() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_pack_update_with_auto_resync() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();

View File

@@ -1,4 +1,3 @@
#![cfg(feature = "integration-tests")]
//! Integration tests for SSE execution stream endpoint
//!
//! These tests verify that:
@@ -87,6 +86,7 @@ async fn create_test_execution(pool: &PgPool, action_id: i64) -> Result<Executio
/// Run with: cargo test test_sse_stream_receives_execution_updates -- --ignored --nocapture
/// After starting: cargo run -p attune-api -- -c config.test.yaml
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_sse_stream_receives_execution_updates() -> Result<()> {
// Set up test context with auth
let ctx = TestContext::new().await?.with_auth().await?;
@@ -225,6 +225,7 @@ async fn test_sse_stream_receives_execution_updates() -> Result<()> {
/// Test that SSE stream correctly filters by execution_id
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_sse_stream_filters_by_execution_id() -> Result<()> {
// Set up test context with auth
let ctx = TestContext::new().await?.with_auth().await?;
@@ -326,6 +327,7 @@ async fn test_sse_stream_filters_by_execution_id() -> Result<()> {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_sse_stream_requires_authentication() -> Result<()> {
// Try to connect without token
let sse_url = "http://localhost:8080/api/v1/executions/stream";
@@ -371,6 +373,7 @@ async fn test_sse_stream_requires_authentication() -> Result<()> {
/// Test streaming all executions (no filter)
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_sse_stream_all_executions() -> Result<()> {
// Set up test context with auth
let ctx = TestContext::new().await?.with_auth().await?;
@@ -463,6 +466,7 @@ async fn test_sse_stream_all_executions() -> Result<()> {
/// Test that PostgreSQL NOTIFY triggers actually fire
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_postgresql_notify_trigger_fires() -> Result<()> {
let ctx = TestContext::new().await?;

View File

@@ -1,4 +1,3 @@
#![cfg(feature = "integration-tests")]
//! Integration tests for webhook API endpoints
use attune_api::{AppState, Server};
@@ -109,6 +108,7 @@ async fn get_auth_token(app: &axum::Router, username: &str, password: &str) -> S
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_enable_webhook() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));
@@ -151,6 +151,7 @@ async fn test_enable_webhook() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_disable_webhook() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));
@@ -201,6 +202,7 @@ async fn test_disable_webhook() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_regenerate_webhook_key() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));
@@ -252,6 +254,7 @@ async fn test_regenerate_webhook_key() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_regenerate_webhook_key_not_enabled() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));
@@ -288,6 +291,7 @@ async fn test_regenerate_webhook_key_not_enabled() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_receive_webhook() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));
@@ -358,6 +362,7 @@ async fn test_receive_webhook() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_receive_webhook_invalid_key() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state));
@@ -387,6 +392,7 @@ async fn test_receive_webhook_invalid_key() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_receive_webhook_disabled() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));
@@ -436,6 +442,7 @@ async fn test_receive_webhook_disabled() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_webhook_requires_auth_for_management() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));
@@ -468,6 +475,7 @@ async fn test_webhook_requires_auth_for_management() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_receive_webhook_minimal_payload() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));

View File

@@ -1,4 +1,3 @@
#![cfg(feature = "integration-tests")]
//! Comprehensive integration tests for webhook security features (Phase 3)
//!
//! Tests cover:
@@ -123,6 +122,7 @@ fn generate_hmac_signature(payload: &[u8], secret: &str, algorithm: &str) -> Str
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_webhook_hmac_sha256_valid() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));
@@ -189,6 +189,7 @@ async fn test_webhook_hmac_sha256_valid() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_webhook_hmac_sha512_valid() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));
@@ -245,6 +246,7 @@ async fn test_webhook_hmac_sha512_valid() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_webhook_hmac_invalid_signature() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));
@@ -300,6 +302,7 @@ async fn test_webhook_hmac_invalid_signature() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_webhook_hmac_missing_signature() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));
@@ -352,6 +355,7 @@ async fn test_webhook_hmac_missing_signature() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_webhook_hmac_wrong_secret() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));
@@ -414,6 +418,7 @@ async fn test_webhook_hmac_wrong_secret() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_webhook_rate_limit_enforced() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));
@@ -489,6 +494,7 @@ async fn test_webhook_rate_limit_enforced() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_webhook_rate_limit_disabled() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));
@@ -535,6 +541,7 @@ async fn test_webhook_rate_limit_disabled() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_webhook_ip_whitelist_allowed() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));
@@ -605,6 +612,7 @@ async fn test_webhook_ip_whitelist_allowed() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_webhook_ip_whitelist_blocked() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));
@@ -661,6 +669,7 @@ async fn test_webhook_ip_whitelist_blocked() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_webhook_payload_size_limit_enforced() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));
@@ -711,6 +720,7 @@ async fn test_webhook_payload_size_limit_enforced() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_webhook_payload_size_within_limit() {
let state = setup_test_state().await;
let server = Server::new(std::sync::Arc::new(state.clone()));

View File

@@ -1,4 +1,3 @@
#![cfg(feature = "integration-tests")]
//! Integration tests for workflow API endpoints
use attune_common::repositories::{
@@ -20,6 +19,7 @@ fn unique_pack_name() -> String {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_workflow_success() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();
@@ -65,6 +65,7 @@ async fn test_create_workflow_success() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_workflow_duplicate_ref() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();
@@ -110,6 +111,7 @@ async fn test_create_workflow_duplicate_ref() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_workflow_pack_not_found() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();
@@ -132,6 +134,7 @@ async fn test_create_workflow_pack_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_workflow_by_ref() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();
@@ -170,6 +173,7 @@ async fn test_get_workflow_by_ref() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_workflow_not_found() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();
@@ -182,6 +186,7 @@ async fn test_get_workflow_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_workflows() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();
@@ -228,6 +233,7 @@ async fn test_list_workflows() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_workflows_by_pack() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();
@@ -295,6 +301,7 @@ async fn test_list_workflows_by_pack() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_workflows_with_filters() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();
@@ -362,6 +369,7 @@ async fn test_list_workflows_with_filters() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_workflow() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();
@@ -410,6 +418,7 @@ async fn test_update_workflow() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_workflow_not_found() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();
@@ -428,6 +437,7 @@ async fn test_update_workflow_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_workflow() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();
@@ -469,6 +479,7 @@ async fn test_delete_workflow() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_workflow_not_found() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();
@@ -481,6 +492,7 @@ async fn test_delete_workflow_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_workflow_requires_auth() {
let ctx = TestContext::new().await.unwrap();
@@ -505,6 +517,7 @@ async fn test_create_workflow_requires_auth() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_workflow_validation() {
let ctx = TestContext::new().await.unwrap().with_auth().await.unwrap();

View File

@@ -51,9 +51,12 @@ flate2 = { workspace = true }
# WebSocket client (for notifier integration)
tokio-tungstenite = { workspace = true }
# Hashing
sha2 = { workspace = true }
# Terminal UI
colored = "3.1"
comfy-table = "7.2"
comfy-table = { version = "7.2", features = ["custom_styling"] }
dialoguer = "0.12"
# Authentication

View File

@@ -1,5 +1,5 @@
use anyhow::{Context, Result};
use reqwest::{multipart, Client as HttpClient, Method, RequestBuilder, StatusCode};
use reqwest::{header, multipart, Client as HttpClient, Method, RequestBuilder, StatusCode};
use serde::{de::DeserializeOwned, Serialize};
use std::path::PathBuf;
use std::time::Duration;
@@ -347,6 +347,80 @@ impl ApiClient {
.await
}
/// GET request that returns raw bytes and optional filename from Content-Disposition.
///
/// Used for downloading binary content (e.g., artifact files).
/// Returns `(bytes, content_type, optional_filename)`.
pub async fn download_bytes(
&mut self,
path: &str,
) -> Result<(Vec<u8>, String, Option<String>)> {
// First attempt
let req = self.build_request(Method::GET, path);
let response = req.send().await.context("Failed to send request to API")?;
if response.status() == StatusCode::UNAUTHORIZED
&& self.refresh_token.is_some()
&& self.refresh_auth_token().await?
{
// Retry with new token
let req = self.build_request(Method::GET, path);
let response = req
.send()
.await
.context("Failed to send request to API (retry)")?;
return self.handle_bytes_response(response).await;
}
self.handle_bytes_response(response).await
}
/// Parse a binary response, extracting content type and optional filename.
async fn handle_bytes_response(
&self,
response: reqwest::Response,
) -> Result<(Vec<u8>, String, Option<String>)> {
let status = response.status();
if status.is_success() {
let content_type = response
.headers()
.get(header::CONTENT_TYPE)
.and_then(|v| v.to_str().ok())
.unwrap_or("application/octet-stream")
.to_string();
let filename = response
.headers()
.get(header::CONTENT_DISPOSITION)
.and_then(|v| v.to_str().ok())
.and_then(|v| {
// Parse filename from Content-Disposition: attachment; filename="name.ext"
v.split("filename=")
.nth(1)
.map(|f| f.trim_matches('"').trim_matches('\'').to_string())
});
let bytes = response
.bytes()
.await
.context("Failed to read response bytes")?;
Ok((bytes.to_vec(), content_type, filename))
} else {
let error_text = response
.text()
.await
.unwrap_or_else(|_| "Unknown error".to_string());
if let Ok(api_error) = serde_json::from_str::<ApiError>(&error_text) {
anyhow::bail!("API error ({}): {}", status, api_error.error);
} else {
anyhow::bail!("API error ({}): {}", status, error_text);
}
}
}
/// POST a multipart/form-data request with a file field and optional text fields.
///
/// - `file_field_name`: the multipart field name for the file

View File

@@ -241,7 +241,7 @@ async fn handle_list(
let mut table = output::create_table();
output::add_header(
&mut table,
vec!["ID", "Pack", "Name", "Runner", "Enabled", "Description"],
vec!["ID", "Pack", "Name", "Runner", "Description"],
);
for action in actions {
@@ -253,7 +253,6 @@ async fn handle_list(
.runtime
.map(|r| r.to_string())
.unwrap_or_else(|| "none".to_string()),
"".to_string(),
output::truncate(&action.description, 40),
]);
}

File diff suppressed because it is too large Load Diff

View File

@@ -175,7 +175,7 @@ async fn handle_current(output_format: OutputFormat) -> Result<()> {
match output_format {
OutputFormat::Json | OutputFormat::Yaml => {
let result = serde_json::json!({
"current_profile": config.current_profile
"profile": config.current_profile
});
output::print_output(&result, output_format)?;
}
@@ -194,7 +194,7 @@ async fn handle_use(name: String, output_format: OutputFormat) -> Result<()> {
match output_format {
OutputFormat::Json | OutputFormat::Yaml => {
let result = serde_json::json!({
"current_profile": name,
"profile": name,
"message": "Switched profile"
});
output::print_output(&result, output_format)?;
@@ -299,10 +299,6 @@ async fn handle_show_profile(name: String, output_format: OutputFormat) -> Resul
),
];
if let Some(output_format) = &profile.output_format {
pairs.push(("Output Format", output_format.clone()));
}
if let Some(description) = &profile.description {
pairs.push(("Description", description.clone()));
}

View File

@@ -50,7 +50,7 @@ pub enum ExecutionCommands {
execution_id: i64,
/// Skip confirmation prompt
#[arg(short = 'y', long)]
#[arg(long)]
yes: bool,
},
/// Get raw execution result

View File

@@ -0,0 +1,605 @@
use anyhow::Result;
use clap::Subcommand;
use serde::{Deserialize, Serialize};
use serde_json::Value as JsonValue;
use sha2::{Digest, Sha256};
use crate::client::ApiClient;
use crate::config::CliConfig;
use crate::output::{self, OutputFormat};
#[derive(Subcommand)]
pub enum KeyCommands {
/// List all keys (values redacted)
List {
/// Filter by owner type (system, identity, pack, action, sensor)
#[arg(long)]
owner_type: Option<String>,
/// Filter by owner string
#[arg(long)]
owner: Option<String>,
/// Page number
#[arg(long, default_value = "1")]
page: u32,
/// Items per page
#[arg(long, default_value = "50")]
per_page: u32,
},
/// Show details of a specific key
Show {
/// Key reference identifier
key_ref: String,
/// Decrypt and display the actual value (otherwise a SHA-256 hash is shown)
#[arg(short = 'd', long)]
decrypt: bool,
},
/// Create a new key/secret
Create {
/// Unique reference for the key (e.g., "github_token")
#[arg(long)]
r#ref: String,
/// Human-readable name for the key
#[arg(long)]
name: String,
/// The secret value to store. Plain strings are stored as JSON strings.
/// Use JSON syntax for structured values (e.g., '{"user":"admin","pass":"s3cret"}').
#[arg(long)]
value: String,
/// Owner type (system, identity, pack, action, sensor)
#[arg(long, default_value = "system")]
owner_type: String,
/// Owner string identifier
#[arg(long)]
owner: Option<String>,
/// Owner pack reference (auto-resolves pack ID)
#[arg(long)]
owner_pack_ref: Option<String>,
/// Owner action reference (auto-resolves action ID)
#[arg(long)]
owner_action_ref: Option<String>,
/// Owner sensor reference (auto-resolves sensor ID)
#[arg(long)]
owner_sensor_ref: Option<String>,
/// Encrypt the value before storing (default: unencrypted)
#[arg(short = 'e', long)]
encrypt: bool,
},
/// Update an existing key/secret
Update {
/// Key reference identifier
key_ref: String,
/// Update the human-readable name
#[arg(long)]
name: Option<String>,
/// Update the secret value. Plain strings are stored as JSON strings.
/// Use JSON syntax for structured values (e.g., '{"user":"admin","pass":"s3cret"}').
#[arg(long)]
value: Option<String>,
/// Update encryption status
#[arg(long)]
encrypted: Option<bool>,
},
/// Delete a key/secret
Delete {
/// Key reference identifier
key_ref: String,
/// Skip confirmation prompt
#[arg(long)]
yes: bool,
},
}
// ── Response / request types used for (de)serialization against the API ────
#[derive(Debug, Serialize, Deserialize)]
struct KeyResponse {
id: i64,
#[serde(rename = "ref")]
key_ref: String,
owner_type: String,
#[serde(default)]
owner: Option<String>,
#[serde(default)]
owner_identity: Option<i64>,
#[serde(default)]
owner_pack: Option<i64>,
#[serde(default)]
owner_pack_ref: Option<String>,
#[serde(default)]
owner_action: Option<i64>,
#[serde(default)]
owner_action_ref: Option<String>,
#[serde(default)]
owner_sensor: Option<i64>,
#[serde(default)]
owner_sensor_ref: Option<String>,
name: String,
encrypted: bool,
#[serde(default)]
value: JsonValue,
created: String,
updated: String,
}
#[derive(Debug, Serialize, Deserialize)]
struct KeySummary {
id: i64,
#[serde(rename = "ref")]
key_ref: String,
owner_type: String,
#[serde(default)]
owner: Option<String>,
name: String,
encrypted: bool,
created: String,
}
#[derive(Debug, Serialize)]
struct CreateKeyRequestBody {
r#ref: String,
owner_type: String,
#[serde(skip_serializing_if = "Option::is_none")]
owner: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
owner_pack_ref: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
owner_action_ref: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
owner_sensor_ref: Option<String>,
name: String,
value: JsonValue,
encrypted: bool,
}
#[derive(Debug, Serialize)]
struct UpdateKeyRequestBody {
#[serde(skip_serializing_if = "Option::is_none")]
name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
value: Option<JsonValue>,
#[serde(skip_serializing_if = "Option::is_none")]
encrypted: Option<bool>,
}
// ── Command dispatch ───────────────────────────────────────────────────────
pub async fn handle_key_command(
profile: &Option<String>,
command: KeyCommands,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
match command {
KeyCommands::List {
owner_type,
owner,
page,
per_page,
} => {
handle_list(
profile,
owner_type,
owner,
page,
per_page,
api_url,
output_format,
)
.await
}
KeyCommands::Show { key_ref, decrypt } => {
handle_show(profile, key_ref, decrypt, api_url, output_format).await
}
KeyCommands::Create {
r#ref,
name,
value,
owner_type,
owner,
owner_pack_ref,
owner_action_ref,
owner_sensor_ref,
encrypt,
} => {
handle_create(
profile,
r#ref,
name,
value,
owner_type,
owner,
owner_pack_ref,
owner_action_ref,
owner_sensor_ref,
encrypt,
api_url,
output_format,
)
.await
}
KeyCommands::Update {
key_ref,
name,
value,
encrypted,
} => {
handle_update(
profile,
key_ref,
name,
value,
encrypted,
api_url,
output_format,
)
.await
}
KeyCommands::Delete { key_ref, yes } => {
handle_delete(profile, key_ref, yes, api_url, output_format).await
}
}
}
// ── Handlers ───────────────────────────────────────────────────────────────
#[allow(clippy::too_many_arguments)]
async fn handle_list(
profile: &Option<String>,
owner_type: Option<String>,
owner: Option<String>,
page: u32,
per_page: u32,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
let config = CliConfig::load_with_profile(profile.as_deref())?;
let mut client = ApiClient::from_config(&config, api_url);
let mut query_params = vec![format!("page={}", page), format!("per_page={}", per_page)];
if let Some(ot) = owner_type {
query_params.push(format!("owner_type={}", ot));
}
if let Some(o) = owner {
query_params.push(format!("owner={}", o));
}
let path = format!("/keys?{}", query_params.join("&"));
let keys: Vec<KeySummary> = client.get(&path).await?;
match output_format {
OutputFormat::Json | OutputFormat::Yaml => {
output::print_output(&keys, output_format)?;
}
OutputFormat::Table => {
if keys.is_empty() {
output::print_info("No keys found");
} else {
let mut table = output::create_table();
output::add_header(
&mut table,
vec![
"ID",
"Ref",
"Name",
"Owner Type",
"Owner",
"Encrypted",
"Created",
],
);
for key in keys {
table.add_row(vec![
key.id.to_string(),
key.key_ref.clone(),
key.name.clone(),
key.owner_type.clone(),
key.owner.clone().unwrap_or_else(|| "-".to_string()),
output::format_bool(key.encrypted),
output::format_timestamp(&key.created),
]);
}
println!("{}", table);
}
}
}
Ok(())
}
async fn handle_show(
profile: &Option<String>,
key_ref: String,
decrypt: bool,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
let config = CliConfig::load_with_profile(profile.as_deref())?;
let mut client = ApiClient::from_config(&config, api_url);
let path = format!("/keys/{}", urlencoding::encode(&key_ref));
let key: KeyResponse = client.get(&path).await?;
match output_format {
OutputFormat::Json | OutputFormat::Yaml => {
if decrypt {
output::print_output(&key, output_format)?;
} else {
// Redact value — replace with hash
let mut redacted = serde_json::to_value(&key)?;
if let Some(obj) = redacted.as_object_mut() {
obj.insert(
"value".to_string(),
JsonValue::String(hash_value_for_display(&key.value)),
);
}
output::print_output(&redacted, output_format)?;
}
}
OutputFormat::Table => {
output::print_section(&format!("Key: {}", key.key_ref));
let mut pairs = vec![
("ID", key.id.to_string()),
("Reference", key.key_ref.clone()),
("Name", key.name.clone()),
("Owner Type", key.owner_type.clone()),
(
"Owner",
key.owner.clone().unwrap_or_else(|| "-".to_string()),
),
];
if let Some(ref pack_ref) = key.owner_pack_ref {
pairs.push(("Owner Pack", pack_ref.clone()));
}
if let Some(ref action_ref) = key.owner_action_ref {
pairs.push(("Owner Action", action_ref.clone()));
}
if let Some(ref sensor_ref) = key.owner_sensor_ref {
pairs.push(("Owner Sensor", sensor_ref.clone()));
}
pairs.push(("Encrypted", output::format_bool(key.encrypted)));
if decrypt {
pairs.push(("Value", format_value_for_display(&key.value)));
} else {
pairs.push(("Value (SHA-256)", hash_value_for_display(&key.value)));
pairs.push((
"",
"(use --decrypt / -d to reveal the actual value)".to_string(),
));
}
pairs.push(("Created", output::format_timestamp(&key.created)));
pairs.push(("Updated", output::format_timestamp(&key.updated)));
output::print_key_value_table(pairs);
}
}
Ok(())
}
#[allow(clippy::too_many_arguments)]
async fn handle_create(
profile: &Option<String>,
key_ref: String,
name: String,
value: String,
owner_type: String,
owner: Option<String>,
owner_pack_ref: Option<String>,
owner_action_ref: Option<String>,
owner_sensor_ref: Option<String>,
encrypted: bool,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
// Validate owner_type before sending
validate_owner_type(&owner_type)?;
let config = CliConfig::load_with_profile(profile.as_deref())?;
let mut client = ApiClient::from_config(&config, api_url);
let json_value = parse_value_as_json(&value);
let request = CreateKeyRequestBody {
r#ref: key_ref,
owner_type,
owner,
owner_pack_ref,
owner_action_ref,
owner_sensor_ref,
name,
value: json_value,
encrypted,
};
let key: KeyResponse = client.post("/keys", &request).await?;
match output_format {
OutputFormat::Json | OutputFormat::Yaml => {
output::print_output(&key, output_format)?;
}
OutputFormat::Table => {
output::print_success(&format!("Key '{}' created successfully", key.key_ref));
output::print_key_value_table(vec![
("ID", key.id.to_string()),
("Reference", key.key_ref.clone()),
("Name", key.name.clone()),
("Owner Type", key.owner_type.clone()),
(
"Owner",
key.owner.clone().unwrap_or_else(|| "-".to_string()),
),
("Encrypted", output::format_bool(key.encrypted)),
("Created", output::format_timestamp(&key.created)),
]);
}
}
Ok(())
}
async fn handle_update(
profile: &Option<String>,
key_ref: String,
name: Option<String>,
value: Option<String>,
encrypted: Option<bool>,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
if name.is_none() && value.is_none() && encrypted.is_none() {
anyhow::bail!(
"At least one field must be provided to update (--name, --value, or --encrypted)"
);
}
let config = CliConfig::load_with_profile(profile.as_deref())?;
let mut client = ApiClient::from_config(&config, api_url);
let json_value = value.map(|v| parse_value_as_json(&v));
let request = UpdateKeyRequestBody {
name,
value: json_value,
encrypted,
};
let path = format!("/keys/{}", urlencoding::encode(&key_ref));
let key: KeyResponse = client.put(&path, &request).await?;
match output_format {
OutputFormat::Json | OutputFormat::Yaml => {
output::print_output(&key, output_format)?;
}
OutputFormat::Table => {
output::print_success(&format!("Key '{}' updated successfully", key.key_ref));
output::print_key_value_table(vec![
("ID", key.id.to_string()),
("Reference", key.key_ref.clone()),
("Name", key.name.clone()),
("Owner Type", key.owner_type.clone()),
(
"Owner",
key.owner.clone().unwrap_or_else(|| "-".to_string()),
),
("Encrypted", output::format_bool(key.encrypted)),
("Updated", output::format_timestamp(&key.updated)),
]);
}
}
Ok(())
}
async fn handle_delete(
profile: &Option<String>,
key_ref: String,
yes: bool,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
let config = CliConfig::load_with_profile(profile.as_deref())?;
let mut client = ApiClient::from_config(&config, api_url);
// Confirm deletion unless --yes is provided
if !yes && matches!(output_format, OutputFormat::Table) {
let confirm = dialoguer::Confirm::new()
.with_prompt(format!(
"Are you sure you want to delete key '{}'?",
key_ref
))
.default(false)
.interact()?;
if !confirm {
output::print_info("Deletion cancelled");
return Ok(());
}
}
let path = format!("/keys/{}", urlencoding::encode(&key_ref));
client.delete_no_response(&path).await?;
match output_format {
OutputFormat::Json | OutputFormat::Yaml => {
let msg =
serde_json::json!({"message": format!("Key '{}' deleted successfully", key_ref)});
output::print_output(&msg, output_format)?;
}
OutputFormat::Table => {
output::print_success(&format!("Key '{}' deleted successfully", key_ref));
}
}
Ok(())
}
// ── Helpers ────────────────────────────────────────────────────────────────
/// Validate that the owner_type string is one of the accepted values.
fn validate_owner_type(owner_type: &str) -> Result<()> {
const VALID: &[&str] = &["system", "identity", "pack", "action", "sensor"];
if !VALID.contains(&owner_type) {
anyhow::bail!(
"Invalid owner type '{}'. Must be one of: {}",
owner_type,
VALID.join(", ")
);
}
Ok(())
}
/// Parse a CLI string value into a [`JsonValue`].
///
/// If the input is valid JSON (object, array, number, boolean, null, or
/// quoted string), it is used as-is. Otherwise, it is treated as a plain
/// string and wrapped in a JSON string value.
fn parse_value_as_json(input: &str) -> JsonValue {
match serde_json::from_str::<JsonValue>(input) {
Ok(v) => v,
Err(_) => JsonValue::String(input.to_string()),
}
}
/// Format a [`JsonValue`] for table display.
fn format_value_for_display(value: &JsonValue) -> String {
match value {
JsonValue::String(s) => s.clone(),
other => serde_json::to_string_pretty(other).unwrap_or_else(|_| other.to_string()),
}
}
/// Compute a SHA-256 hash of the JSON value for display purposes.
///
/// This lets users verify a value matches expectations without revealing
/// the actual content (e.g., to confirm it hasn't changed).
fn hash_value_for_display(value: &JsonValue) -> String {
let serialized = serde_json::to_string(value).unwrap_or_default();
let mut hasher = Sha256::new();
hasher.update(serialized.as_bytes());
let result = hasher.finalize();
format!("sha256:{:x}", result)
}

View File

@@ -1,7 +1,9 @@
pub mod action;
pub mod artifact;
pub mod auth;
pub mod config;
pub mod execution;
pub mod key;
pub mod pack;
pub mod pack_index;
pub mod rule;

View File

@@ -95,10 +95,6 @@ pub enum PackCommands {
/// Update version
#[arg(long)]
version: Option<String>,
/// Update enabled status
#[arg(long)]
enabled: Option<bool>,
},
/// Uninstall a pack
Uninstall {
@@ -246,8 +242,6 @@ struct Pack {
#[serde(default)]
keywords: Option<Vec<String>>,
#[serde(default)]
enabled: Option<bool>,
#[serde(default)]
metadata: Option<serde_json::Value>,
created: String,
updated: String,
@@ -273,8 +267,6 @@ struct PackDetail {
#[serde(default)]
keywords: Option<Vec<String>>,
#[serde(default)]
enabled: Option<bool>,
#[serde(default)]
metadata: Option<serde_json::Value>,
created: String,
updated: String,
@@ -404,7 +396,6 @@ pub async fn handle_pack_command(
label,
description,
version,
enabled,
} => {
handle_update(
profile,
@@ -412,7 +403,6 @@ pub async fn handle_pack_command(
label,
description,
version,
enabled,
api_url,
output_format,
)
@@ -651,17 +641,13 @@ async fn handle_list(
output::print_info("No packs found");
} else {
let mut table = output::create_table();
output::add_header(
&mut table,
vec!["ID", "Name", "Version", "Enabled", "Description"],
);
output::add_header(&mut table, vec!["ID", "Name", "Version", "Description"]);
for pack in packs {
table.add_row(vec![
pack.id.to_string(),
pack.pack_ref,
pack.version,
output::format_bool(pack.enabled.unwrap_or(true)),
output::truncate(&pack.description.unwrap_or_default(), 50),
]);
}
@@ -705,7 +691,6 @@ async fn handle_show(
"Description",
pack.description.unwrap_or_else(|| "None".to_string()),
),
("Enabled", output::format_bool(pack.enabled.unwrap_or(true))),
("Actions", pack.action_count.unwrap_or(0).to_string()),
("Triggers", pack.trigger_count.unwrap_or(0).to_string()),
("Rules", pack.rule_count.unwrap_or(0).to_string()),
@@ -1779,7 +1764,6 @@ async fn handle_update(
label: Option<String>,
description: Option<String>,
version: Option<String>,
enabled: Option<bool>,
api_url: &Option<String>,
output_format: OutputFormat,
) -> Result<()> {
@@ -1787,7 +1771,7 @@ async fn handle_update(
let mut client = ApiClient::from_config(&config, api_url);
// Check that at least one field is provided
if label.is_none() && description.is_none() && version.is_none() && enabled.is_none() {
if label.is_none() && description.is_none() && version.is_none() {
anyhow::bail!("At least one field must be provided to update");
}
@@ -1799,15 +1783,12 @@ async fn handle_update(
description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
version: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
enabled: Option<bool>,
}
let request = UpdatePackRequest {
label,
description,
version,
enabled,
};
let path = format!("/packs/{}", pack_ref);
@@ -1824,7 +1805,6 @@ async fn handle_update(
("Ref", pack.pack_ref.clone()),
("Label", pack.label.clone()),
("Version", pack.version.clone()),
("Enabled", output::format_bool(pack.enabled.unwrap_or(true))),
("Updated", output::format_timestamp(&pack.updated)),
]);
}

View File

@@ -98,7 +98,7 @@ pub enum RuleCommands {
rule_ref: String,
/// Skip confirmation prompt
#[arg(short = 'y', long)]
#[arg(long)]
yes: bool,
},
}
@@ -275,12 +275,13 @@ async fn handle_list(
let mut table = output::create_table();
output::add_header(
&mut table,
vec!["ID", "Pack", "Name", "Trigger", "Action", "Enabled"],
vec!["ID", "Ref", "Pack", "Label", "Trigger", "Action", "Enabled"],
);
for rule in rules {
table.add_row(vec![
rule.id.to_string(),
rule.rule_ref.clone(),
rule.pack_ref.clone(),
rule.label.clone(),
rule.trigger_ref.clone(),

View File

@@ -5,25 +5,35 @@ use std::env;
use std::fs;
use std::path::PathBuf;
use crate::output::OutputFormat;
/// CLI configuration stored in user's home directory
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CliConfig {
/// Current active profile name
#[serde(default = "default_profile_name")]
#[serde(
default = "default_profile_name",
rename = "profile",
alias = "current_profile"
)]
pub current_profile: String,
/// Named profiles (like SSH hosts)
#[serde(default)]
pub profiles: HashMap<String, Profile>,
/// Default output format (can be overridden per-profile)
#[serde(default = "default_output_format")]
pub default_output_format: String,
/// Output format (table, json, yaml)
#[serde(
default = "default_format",
rename = "format",
alias = "default_output_format"
)]
pub format: String,
}
fn default_profile_name() -> String {
"default".to_string()
}
fn default_output_format() -> String {
fn default_format() -> String {
"table".to_string()
}
@@ -38,8 +48,9 @@ pub struct Profile {
/// Refresh token
#[serde(skip_serializing_if = "Option::is_none")]
pub refresh_token: Option<String>,
/// Output format override for this profile
#[serde(skip_serializing_if = "Option::is_none")]
/// Output format override for this profile (deprecated — ignored, kept for deserialization compat)
#[serde(skip_serializing)]
#[allow(dead_code)]
pub output_format: Option<String>,
/// Optional description
#[serde(skip_serializing_if = "Option::is_none")]
@@ -63,7 +74,7 @@ impl Default for CliConfig {
Self {
current_profile: "default".to_string(),
profiles,
default_output_format: default_output_format(),
format: default_format(),
}
}
}
@@ -193,6 +204,29 @@ impl CliConfig {
self.save()
}
/// Resolve the effective output format.
///
/// Priority (highest to lowest):
/// 1. Explicit CLI flag (`--json`, `--yaml`, `--output`)
/// 2. Config `format` field
///
/// The `cli_flag` parameter should be `None` when the user did not pass an
/// explicit flag (i.e. clap returned the default value `table` *without*
/// the user typing it). Callers should pass `Some(format)` only when the
/// user actually supplied the flag.
pub fn effective_format(&self, cli_override: Option<OutputFormat>) -> OutputFormat {
if let Some(fmt) = cli_override {
return fmt;
}
// Fall back to config value
match self.format.to_lowercase().as_str() {
"json" => OutputFormat::Json,
"yaml" => OutputFormat::Yaml,
_ => OutputFormat::Table,
}
}
/// Set a configuration value by key
pub fn set_value(&mut self, key: &str, value: String) -> Result<()> {
match key {
@@ -200,14 +234,18 @@ impl CliConfig {
let profile = self.current_profile_mut()?;
profile.api_url = value;
}
"output_format" => {
let profile = self.current_profile_mut()?;
profile.output_format = Some(value);
"format" | "output_format" | "default_output_format" => {
// Validate the value
match value.to_lowercase().as_str() {
"table" | "json" | "yaml" => {}
_ => anyhow::bail!(
"Invalid format '{}'. Must be one of: table, json, yaml",
value
),
}
self.format = value.to_lowercase();
}
"default_output_format" => {
self.default_output_format = value;
}
"current_profile" => {
"profile" | "current_profile" => {
self.switch_profile(value)?;
return Ok(());
}
@@ -223,15 +261,8 @@ impl CliConfig {
let profile = self.current_profile()?;
Ok(profile.api_url.clone())
}
"output_format" => {
let profile = self.current_profile()?;
Ok(profile
.output_format
.clone()
.unwrap_or_else(|| self.default_output_format.clone()))
}
"default_output_format" => Ok(self.default_output_format.clone()),
"current_profile" => Ok(self.current_profile.clone()),
"format" | "output_format" | "default_output_format" => Ok(self.format.clone()),
"profile" | "current_profile" => Ok(self.current_profile.clone()),
"auth_token" => {
let profile = self.current_profile()?;
Ok(profile
@@ -262,19 +293,9 @@ impl CliConfig {
};
vec![
("current_profile".to_string(), self.current_profile.clone()),
("profile".to_string(), self.current_profile.clone()),
("format".to_string(), self.format.clone()),
("api_url".to_string(), profile.api_url.clone()),
(
"output_format".to_string(),
profile
.output_format
.clone()
.unwrap_or_else(|| self.default_output_format.clone()),
),
(
"default_output_format".to_string(),
self.default_output_format.clone(),
),
(
"auth_token".to_string(),
profile
@@ -354,7 +375,7 @@ mod tests {
fn test_default_config() {
let config = CliConfig::default();
assert_eq!(config.current_profile, "default");
assert_eq!(config.default_output_format, "table");
assert_eq!(config.format, "table");
assert!(config.profiles.contains_key("default"));
let profile = config.current_profile().unwrap();
@@ -378,6 +399,33 @@ mod tests {
);
}
#[test]
fn test_effective_format_defaults_to_config() {
let mut config = CliConfig::default();
config.format = "json".to_string();
// No CLI override → uses config
assert_eq!(config.effective_format(None), OutputFormat::Json);
}
#[test]
fn test_effective_format_cli_overrides_config() {
let mut config = CliConfig::default();
config.format = "json".to_string();
// CLI override wins
assert_eq!(
config.effective_format(Some(OutputFormat::Yaml)),
OutputFormat::Yaml
);
}
#[test]
fn test_effective_format_default_table() {
let config = CliConfig::default();
assert_eq!(config.effective_format(None), OutputFormat::Table);
}
#[test]
fn test_profile_management() {
let mut config = CliConfig::default();
@@ -387,7 +435,7 @@ mod tests {
api_url: "https://staging.example.com".to_string(),
auth_token: None,
refresh_token: None,
output_format: Some("json".to_string()),
output_format: None,
description: Some("Staging environment".to_string()),
};
config
@@ -442,7 +490,7 @@ mod tests {
config.get_value("api_url").unwrap(),
"http://localhost:8080"
);
assert_eq!(config.get_value("output_format").unwrap(), "table");
assert_eq!(config.get_value("format").unwrap(), "table");
// Set API URL for current profile
config
@@ -450,10 +498,53 @@ mod tests {
.unwrap();
assert_eq!(config.get_value("api_url").unwrap(), "http://test.com");
// Set output format for current profile
config
// Set format
config.set_value("format", "json".to_string()).unwrap();
assert_eq!(config.get_value("format").unwrap(), "json");
}
#[test]
fn test_set_value_validates_format() {
let mut config = CliConfig::default();
// Valid values
assert!(config.set_value("format", "table".to_string()).is_ok());
assert!(config.set_value("format", "json".to_string()).is_ok());
assert!(config.set_value("format", "yaml".to_string()).is_ok());
assert!(config.set_value("format", "JSON".to_string()).is_ok()); // case-insensitive
// Invalid value
assert!(config.set_value("format", "xml".to_string()).is_err());
}
#[test]
fn test_backward_compat_aliases() {
let mut config = CliConfig::default();
// Old key names should still work for get/set
assert!(config
.set_value("output_format", "json".to_string())
.unwrap();
.is_ok());
assert_eq!(config.get_value("output_format").unwrap(), "json");
assert_eq!(config.get_value("format").unwrap(), "json");
assert!(config
.set_value("default_output_format", "yaml".to_string())
.is_ok());
assert_eq!(config.get_value("default_output_format").unwrap(), "yaml");
assert_eq!(config.get_value("format").unwrap(), "yaml");
}
#[test]
fn test_deserialize_legacy_default_output_format() {
let yaml = r#"
profile: default
default_output_format: json
profiles:
default:
api_url: http://localhost:8080
"#;
let config: CliConfig = serde_yaml_ng::from_str(yaml).unwrap();
assert_eq!(config.format, "json");
}
}

View File

@@ -9,9 +9,11 @@ mod wait;
use commands::{
action::{handle_action_command, ActionCommands},
artifact::ArtifactCommands,
auth::AuthCommands,
config::ConfigCommands,
execution::ExecutionCommands,
key::KeyCommands,
pack::PackCommands,
rule::RuleCommands,
sensor::SensorCommands,
@@ -33,8 +35,8 @@ struct Cli {
api_url: Option<String>,
/// Output format
#[arg(long, value_enum, default_value = "table", global = true, conflicts_with_all = ["json", "yaml"])]
output: output::OutputFormat,
#[arg(long, value_enum, global = true, conflicts_with_all = ["json", "yaml"])]
output: Option<output::OutputFormat>,
/// Output as JSON (shorthand for --output json)
#[arg(short = 'j', long, global = true, conflicts_with_all = ["output", "yaml"])]
@@ -74,6 +76,11 @@ enum Commands {
#[command(subcommand)]
command: RuleCommands,
},
/// Key/secret management
Key {
#[command(subcommand)]
command: KeyCommands,
},
/// Execution monitoring
Execution {
#[command(subcommand)]
@@ -94,6 +101,11 @@ enum Commands {
#[command(subcommand)]
command: SensorCommands,
},
/// Artifact management (list, upload, download, delete)
Artifact {
#[command(subcommand)]
command: ArtifactCommands,
},
/// Configuration management
Config {
#[command(subcommand)]
@@ -129,6 +141,9 @@ enum Commands {
#[tokio::main]
async fn main() {
// Install HMAC-only JWT crypto provider (must be before any token operations)
attune_common::auth::install_crypto_provider();
let cli = Cli::parse();
// Initialize logging
@@ -138,14 +153,17 @@ async fn main() {
.init();
}
// Determine output format from flags
let output_format = if cli.json {
output::OutputFormat::Json
// Determine output format: explicit CLI flags > config file > default (table)
let cli_override = if cli.json {
Some(output::OutputFormat::Json)
} else if cli.yaml {
output::OutputFormat::Yaml
Some(output::OutputFormat::Yaml)
} else {
cli.output
};
let config_for_format =
config::CliConfig::load_with_profile(cli.profile.as_deref()).unwrap_or_default();
let output_format = config_for_format.effective_format(cli_override);
let result = match cli.command {
Commands::Auth { command } => {
@@ -169,6 +187,10 @@ async fn main() {
commands::rule::handle_rule_command(&cli.profile, command, &cli.api_url, output_format)
.await
}
Commands::Key { command } => {
commands::key::handle_key_command(&cli.profile, command, &cli.api_url, output_format)
.await
}
Commands::Execution { command } => {
commands::execution::handle_execution_command(
&cli.profile,
@@ -205,6 +227,15 @@ async fn main() {
)
.await
}
Commands::Artifact { command } => {
commands::artifact::handle_artifact_command(
&cli.profile,
command,
&cli.api_url,
output_format,
)
.await
}
Commands::Config { command } => {
commands::config::handle_config_command(&cli.profile, command, output_format).await
}

View File

@@ -115,11 +115,33 @@ fn create_test_index(packs: &[(&str, &str)]) -> TempDir {
temp_dir
}
/// Create an isolated CLI command that never touches the user's real config.
///
/// Returns `(Command, TempDir)` — the `TempDir` must be kept alive for the
/// duration of the test so the config directory isn't deleted prematurely.
fn isolated_cmd() -> (Command, TempDir) {
let config_dir = TempDir::new().expect("Failed to create temp config dir");
// Write a minimal default config so the CLI doesn't try to create one
let attune_dir = config_dir.path().join("attune");
fs::create_dir_all(&attune_dir).expect("Failed to create attune config dir");
fs::write(
attune_dir.join("config.yaml"),
"profile: default\nformat: table\nprofiles:\n default:\n api_url: http://localhost:8080\n",
)
.expect("Failed to write test config");
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", config_dir.path())
.env("HOME", config_dir.path());
(cmd, config_dir)
}
#[test]
fn test_pack_checksum_directory() {
let pack_dir = create_test_pack("checksum-test", "1.0.0", &[]);
let mut cmd = Command::cargo_bin("attune").unwrap();
let (mut cmd, _config_dir) = isolated_cmd();
cmd.arg("--output")
.arg("table")
.arg("pack")
@@ -135,7 +157,7 @@ fn test_pack_checksum_directory() {
fn test_pack_checksum_json_output() {
let pack_dir = create_test_pack("checksum-json", "1.0.0", &[]);
let mut cmd = Command::cargo_bin("attune").unwrap();
let (mut cmd, _config_dir) = isolated_cmd();
cmd.arg("--output")
.arg("json")
.arg("pack")
@@ -153,7 +175,7 @@ fn test_pack_checksum_json_output() {
#[test]
fn test_pack_checksum_nonexistent_path() {
let mut cmd = Command::cargo_bin("attune").unwrap();
let (mut cmd, _config_dir) = isolated_cmd();
cmd.arg("pack").arg("checksum").arg("/nonexistent/path");
cmd.assert().failure().stderr(
@@ -165,7 +187,7 @@ fn test_pack_checksum_nonexistent_path() {
fn test_pack_index_entry_generates_valid_json() {
let pack_dir = create_test_pack("index-entry-test", "1.2.3", &[]);
let mut cmd = Command::cargo_bin("attune").unwrap();
let (mut cmd, _config_dir) = isolated_cmd();
cmd.arg("--output")
.arg("json")
.arg("pack")
@@ -199,7 +221,7 @@ fn test_pack_index_entry_generates_valid_json() {
fn test_pack_index_entry_with_archive_url() {
let pack_dir = create_test_pack("archive-test", "2.0.0", &[]);
let mut cmd = Command::cargo_bin("attune").unwrap();
let (mut cmd, _config_dir) = isolated_cmd();
cmd.arg("--output")
.arg("json")
.arg("pack")
@@ -227,7 +249,7 @@ fn test_pack_index_entry_missing_pack_yaml() {
let temp_dir = TempDir::new().unwrap();
fs::write(temp_dir.path().join("readme.txt"), "No pack.yaml here").unwrap();
let mut cmd = Command::cargo_bin("attune").unwrap();
let (mut cmd, _config_dir) = isolated_cmd();
cmd.arg("pack")
.arg("index-entry")
.arg(temp_dir.path().to_str().unwrap());
@@ -244,7 +266,7 @@ fn test_pack_index_update_adds_new_entry() {
let pack_dir = create_test_pack("new-pack", "1.0.0", &[]);
let mut cmd = Command::cargo_bin("attune").unwrap();
let (mut cmd, _config_dir) = isolated_cmd();
cmd.arg("pack")
.arg("index-update")
.arg("--index")
@@ -273,7 +295,7 @@ fn test_pack_index_update_prevents_duplicate_without_flag() {
let pack_dir = create_test_pack("existing-pack", "1.0.0", &[]);
let mut cmd = Command::cargo_bin("attune").unwrap();
let (mut cmd, _config_dir) = isolated_cmd();
cmd.arg("pack")
.arg("index-update")
.arg("--index")
@@ -294,7 +316,7 @@ fn test_pack_index_update_with_update_flag() {
let pack_dir = create_test_pack("existing-pack", "2.0.0", &[]);
let mut cmd = Command::cargo_bin("attune").unwrap();
let (mut cmd, _config_dir) = isolated_cmd();
cmd.arg("pack")
.arg("index-update")
.arg("--index")
@@ -327,7 +349,7 @@ fn test_pack_index_update_invalid_index_file() {
let pack_dir = create_test_pack("test-pack", "1.0.0", &[]);
let mut cmd = Command::cargo_bin("attune").unwrap();
let (mut cmd, _config_dir) = isolated_cmd();
cmd.arg("pack")
.arg("index-update")
.arg("--index")
@@ -345,8 +367,10 @@ fn test_pack_index_merge_combines_indexes() {
let output_dir = TempDir::new().unwrap();
let output_path = output_dir.path().join("merged.json");
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.arg("pack")
let (mut cmd, _config_dir) = isolated_cmd();
cmd.arg("--output")
.arg("table")
.arg("pack")
.arg("index-merge")
.arg("--file")
.arg(output_path.to_str().unwrap())
@@ -372,8 +396,10 @@ fn test_pack_index_merge_deduplicates() {
let output_dir = TempDir::new().unwrap();
let output_path = output_dir.path().join("merged.json");
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.arg("pack")
let (mut cmd, _config_dir) = isolated_cmd();
cmd.arg("--output")
.arg("table")
.arg("pack")
.arg("index-merge")
.arg("--file")
.arg(output_path.to_str().unwrap())
@@ -403,7 +429,7 @@ fn test_pack_index_merge_output_exists_without_force() {
let output_path = output_dir.path().join("merged.json");
fs::write(&output_path, "existing content").unwrap();
let mut cmd = Command::cargo_bin("attune").unwrap();
let (mut cmd, _config_dir) = isolated_cmd();
cmd.arg("pack")
.arg("index-merge")
.arg("--file")
@@ -423,7 +449,7 @@ fn test_pack_index_merge_with_force_flag() {
let output_path = output_dir.path().join("merged.json");
fs::write(&output_path, "existing content").unwrap();
let mut cmd = Command::cargo_bin("attune").unwrap();
let (mut cmd, _config_dir) = isolated_cmd();
cmd.arg("pack")
.arg("index-merge")
.arg("--file")
@@ -443,7 +469,7 @@ fn test_pack_index_merge_empty_input_list() {
let output_dir = TempDir::new().unwrap();
let output_path = output_dir.path().join("merged.json");
let mut cmd = Command::cargo_bin("attune").unwrap();
let (mut cmd, _config_dir) = isolated_cmd();
cmd.arg("pack")
.arg("index-merge")
.arg("--file")
@@ -459,8 +485,10 @@ fn test_pack_index_merge_missing_input_file() {
let output_dir = TempDir::new().unwrap();
let output_path = output_dir.path().join("merged.json");
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.arg("pack")
let (mut cmd, _config_dir) = isolated_cmd();
cmd.arg("--output")
.arg("table")
.arg("pack")
.arg("index-merge")
.arg("--file")
.arg(output_path.to_str().unwrap())
@@ -483,7 +511,7 @@ fn test_pack_commands_help() {
];
for args in commands {
let mut cmd = Command::cargo_bin("attune").unwrap();
let (mut cmd, _config_dir) = isolated_cmd();
for arg in &args {
cmd.arg(arg);
}

View File

@@ -20,7 +20,7 @@ async fn test_config_show_default() {
cmd.assert()
.success()
.stdout(predicate::str::contains("current_profile"))
.stdout(predicate::str::contains("profile"))
.stdout(predicate::str::contains("api_url"));
}
@@ -38,7 +38,7 @@ async fn test_config_show_json_output() {
cmd.assert()
.success()
.stdout(predicate::str::contains(r#""current_profile""#))
.stdout(predicate::str::contains(r#""profile""#))
.stdout(predicate::str::contains(r#""api_url""#));
}
@@ -56,7 +56,7 @@ async fn test_config_show_yaml_output() {
cmd.assert()
.success()
.stdout(predicate::str::contains("current_profile:"))
.stdout(predicate::str::contains("profile:"))
.stdout(predicate::str::contains("api_url:"));
}
@@ -118,7 +118,7 @@ async fn test_config_set_api_url() {
}
#[tokio::test]
async fn test_config_set_output_format() {
async fn test_config_set_format() {
let fixture = TestFixture::new().await;
fixture.write_default_config();
@@ -127,7 +127,7 @@ async fn test_config_set_output_format() {
.env("HOME", fixture.config_dir_path())
.arg("config")
.arg("set")
.arg("output_format")
.arg("format")
.arg("json");
cmd.assert()
@@ -137,7 +137,7 @@ async fn test_config_set_output_format() {
// Verify the change was persisted
let config_content =
std::fs::read_to_string(&fixture.config_path).expect("Failed to read config");
assert!(config_content.contains("output_format: json"));
assert!(config_content.contains("format: json"));
}
#[tokio::test]
@@ -273,7 +273,7 @@ async fn test_profile_use_switch() {
// Verify the current profile was changed
let config_content =
std::fs::read_to_string(&fixture.config_path).expect("Failed to read config");
assert!(config_content.contains("current_profile: staging"));
assert!(config_content.contains("profile: staging"));
}
#[tokio::test]
@@ -384,7 +384,7 @@ async fn test_profile_override_with_flag() {
// Verify current profile wasn't changed in the config file
let config_content =
std::fs::read_to_string(&fixture.config_path).expect("Failed to read config");
assert!(config_content.contains("current_profile: default"));
assert!(config_content.contains("profile: default"));
}
#[tokio::test]
@@ -405,28 +405,35 @@ async fn test_profile_override_with_env_var() {
// Verify current profile wasn't changed in the config file
let config_content =
std::fs::read_to_string(&fixture.config_path).expect("Failed to read config");
assert!(config_content.contains("current_profile: default"));
assert!(config_content.contains("profile: default"));
}
#[tokio::test]
async fn test_profile_with_custom_output_format() {
async fn test_config_format_respected_by_commands() {
let fixture = TestFixture::new().await;
fixture.write_multi_profile_config();
// Write a config with format set to json
let config = format!(
r#"
profile: default
format: json
profiles:
default:
api_url: {}
description: Test server
"#,
fixture.server_url()
);
fixture.write_config(&config);
// Switch to production which has json output format
// Run config list without --json flag; should output JSON because config says so
let mut cmd = Command::cargo_bin("attune").unwrap();
cmd.env("XDG_CONFIG_HOME", fixture.config_dir_path())
.env("HOME", fixture.config_dir_path())
.arg("config")
.arg("use")
.arg("production");
.arg("list");
cmd.assert().success();
// Verify the profile has custom output format
let config_content =
std::fs::read_to_string(&fixture.config_path).expect("Failed to read config");
assert!(config_content.contains("output_format: json"));
// JSON output contains curly braces
cmd.assert().success().stdout(predicate::str::contains("{"));
}
#[tokio::test]
@@ -443,7 +450,7 @@ async fn test_config_list_all_keys() {
cmd.assert()
.success()
.stdout(predicate::str::contains("api_url"))
.stdout(predicate::str::contains("output_format"))
.stdout(predicate::str::contains("format"))
.stdout(predicate::str::contains("auth_token"));
}

View File

@@ -55,6 +55,8 @@ utoipa = { workspace = true }
# JWT
jsonwebtoken = { workspace = true }
hmac = { workspace = true }
signature = { workspace = true }
# Encryption
argon2 = { workspace = true }

View File

@@ -0,0 +1,193 @@
//! HMAC-only CryptoProvider for jsonwebtoken v10.
//!
//! The `jsonwebtoken` crate v10 requires a `CryptoProvider` to be installed
//! before any signing/verification operations. The built-in `rust_crypto`
//! feature pulls in the `rsa` crate, which has an unpatched advisory
//! (RUSTSEC-2023-0071 — Marvin Attack timing sidechannel).
//!
//! Since Attune only uses HMAC-SHA2 (HS256/HS384/HS512) for JWT signing,
//! this module provides a minimal CryptoProvider that supports only those
//! algorithms, avoiding the `rsa` dependency entirely.
//!
//! Call [`install()`] once at process startup (before any JWT operations).
use hmac::{Hmac, Mac};
use jsonwebtoken::crypto::{CryptoProvider, JwkUtils, JwtSigner, JwtVerifier};
use jsonwebtoken::{Algorithm, DecodingKey, EncodingKey};
use sha2::{Sha256, Sha384, Sha512};
use signature::{Signer, Verifier};
use std::sync::Once;
type HmacSha256 = Hmac<Sha256>;
type HmacSha384 = Hmac<Sha384>;
type HmacSha512 = Hmac<Sha512>;
// ---------------------------------------------------------------------------
// Signers
// ---------------------------------------------------------------------------
macro_rules! define_hmac_signer {
($name:ident, $alg:expr, $hmac_type:ty) => {
struct $name($hmac_type);
impl $name {
fn new(key: &EncodingKey) -> jsonwebtoken::errors::Result<Self> {
let inner = <$hmac_type>::new_from_slice(key.try_get_hmac_secret()?)
.map_err(|_| jsonwebtoken::errors::ErrorKind::InvalidKeyFormat)?;
Ok(Self(inner))
}
}
impl Signer<Vec<u8>> for $name {
fn try_sign(&self, msg: &[u8]) -> std::result::Result<Vec<u8>, signature::Error> {
let mut mac = self.0.clone();
mac.reset();
mac.update(msg);
Ok(mac.finalize().into_bytes().to_vec())
}
}
impl JwtSigner for $name {
fn algorithm(&self) -> Algorithm {
$alg
}
}
};
}
define_hmac_signer!(Hs256Signer, Algorithm::HS256, HmacSha256);
define_hmac_signer!(Hs384Signer, Algorithm::HS384, HmacSha384);
define_hmac_signer!(Hs512Signer, Algorithm::HS512, HmacSha512);
// ---------------------------------------------------------------------------
// Verifiers
// ---------------------------------------------------------------------------
macro_rules! define_hmac_verifier {
($name:ident, $alg:expr, $hmac_type:ty) => {
struct $name($hmac_type);
impl $name {
fn new(key: &DecodingKey) -> jsonwebtoken::errors::Result<Self> {
let inner = <$hmac_type>::new_from_slice(key.try_get_hmac_secret()?)
.map_err(|_| jsonwebtoken::errors::ErrorKind::InvalidKeyFormat)?;
Ok(Self(inner))
}
}
impl Verifier<Vec<u8>> for $name {
fn verify(
&self,
msg: &[u8],
sig: &Vec<u8>,
) -> std::result::Result<(), signature::Error> {
let mut mac = self.0.clone();
mac.reset();
mac.update(msg);
mac.verify_slice(sig).map_err(signature::Error::from_source)
}
}
impl JwtVerifier for $name {
fn algorithm(&self) -> Algorithm {
$alg
}
}
};
}
define_hmac_verifier!(Hs256Verifier, Algorithm::HS256, HmacSha256);
define_hmac_verifier!(Hs384Verifier, Algorithm::HS384, HmacSha384);
define_hmac_verifier!(Hs512Verifier, Algorithm::HS512, HmacSha512);
// ---------------------------------------------------------------------------
// Provider
// ---------------------------------------------------------------------------
fn hmac_signer_factory(
algorithm: &Algorithm,
key: &EncodingKey,
) -> jsonwebtoken::errors::Result<Box<dyn JwtSigner>> {
match algorithm {
Algorithm::HS256 => Ok(Box::new(Hs256Signer::new(key)?)),
Algorithm::HS384 => Ok(Box::new(Hs384Signer::new(key)?)),
Algorithm::HS512 => Ok(Box::new(Hs512Signer::new(key)?)),
_other => Err(jsonwebtoken::errors::ErrorKind::InvalidAlgorithm.into()),
}
}
fn hmac_verifier_factory(
algorithm: &Algorithm,
key: &DecodingKey,
) -> jsonwebtoken::errors::Result<Box<dyn JwtVerifier>> {
match algorithm {
Algorithm::HS256 => Ok(Box::new(Hs256Verifier::new(key)?)),
Algorithm::HS384 => Ok(Box::new(Hs384Verifier::new(key)?)),
Algorithm::HS512 => Ok(Box::new(Hs512Verifier::new(key)?)),
_other => Err(jsonwebtoken::errors::ErrorKind::InvalidAlgorithm.into()),
}
}
/// HMAC-only [`CryptoProvider`]. Supports HS256, HS384, HS512 only.
/// JWK utility functions (RSA/EC key extraction) are stubbed out since
/// Attune never uses asymmetric JWKs.
static HMAC_PROVIDER: CryptoProvider = CryptoProvider {
signer_factory: hmac_signer_factory,
verifier_factory: hmac_verifier_factory,
jwk_utils: JwkUtils::new_unimplemented(),
};
static INIT: Once = Once::new();
/// Install the HMAC-only crypto provider for jsonwebtoken.
///
/// Safe to call multiple times — only the first call takes effect.
/// Must be called before any JWT encode/decode operations.
pub fn install() {
INIT.call_once(|| {
// install_default returns Err if already installed (e.g., by a feature-based
// provider). That's fine — we only care that *some* provider is present.
let _ = HMAC_PROVIDER.install_default();
});
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_install_idempotent() {
install();
install(); // second call should not panic
}
#[test]
fn test_hmac_sign_and_verify() {
install();
let secret = b"test-secret-key";
let encoding_key = EncodingKey::from_secret(secret);
let decoding_key = DecodingKey::from_secret(secret);
let message = b"hello world";
let signer =
hmac_signer_factory(&Algorithm::HS256, &encoding_key).expect("should create signer");
let sig = signer.try_sign(message).expect("should sign");
let verifier = hmac_verifier_factory(&Algorithm::HS256, &decoding_key)
.expect("should create verifier");
verifier
.verify(message, &sig)
.expect("signature should verify");
}
#[test]
fn test_unsupported_algorithm_rejected() {
install();
let key = EncodingKey::from_secret(b"key");
let result = hmac_signer_factory(&Algorithm::RS256, &key);
assert!(result.is_err());
}
}

View File

@@ -248,8 +248,10 @@ pub fn extract_token_from_header(auth_header: &str) -> Option<&str> {
#[cfg(test)]
mod tests {
use super::*;
use crate::auth::crypto_provider;
fn test_config() -> JwtConfig {
crypto_provider::install();
JwtConfig {
secret: "test_secret_key_for_testing".to_string(),
access_token_expiration: 3600,
@@ -260,6 +262,7 @@ mod tests {
#[test]
fn test_generate_and_validate_access_token() {
let config = test_config();
let token =
generate_access_token(123, "testuser", &config).expect("Failed to generate token");
@@ -293,6 +296,7 @@ mod tests {
#[test]
fn test_token_with_wrong_secret() {
let config = test_config();
let token = generate_access_token(789, "user", &config).expect("Failed to generate token");
let wrong_config = JwtConfig {
@@ -306,6 +310,7 @@ mod tests {
#[test]
fn test_expired_token() {
crypto_provider::install();
let now = Utc::now().timestamp();
let expired_claims = Claims {
sub: "999".to_string(),

View File

@@ -4,8 +4,10 @@
//! that are used by the API (for all token types), the worker (for execution-scoped
//! tokens), and the sensor service (for sensor tokens).
pub mod crypto_provider;
pub mod jwt;
pub use crypto_provider::install as install_crypto_provider;
pub use jwt::{
extract_token_from_header, generate_access_token, generate_execution_token,
generate_refresh_token, generate_sensor_token, generate_token, validate_token, Claims,

View File

@@ -2,6 +2,14 @@
//!
//! This module provides functions for encrypting and decrypting secret values
//! using AES-256-GCM encryption with randomly generated nonces.
//!
//! ## JSON value encryption
//!
//! [`encrypt_json`] / [`decrypt_json`] operate on [`serde_json::Value`] values.
//! The JSON value is serialised to its compact string form before encryption,
//! and the resulting ciphertext is stored as a JSON string (`Value::String`).
//! This means the JSONB column always holds a plain JSON string when encrypted,
//! and the original structured value is recovered after decryption.
use crate::{Error, Result};
use aes_gcm::{
@@ -9,6 +17,7 @@ use aes_gcm::{
Aes256Gcm, Key, Nonce,
};
use base64::{engine::general_purpose::STANDARD as BASE64, Engine};
use serde_json::Value as JsonValue;
use sha2::{Digest, Sha256};
/// Size of the nonce in bytes (96 bits for AES-GCM)
@@ -55,6 +64,33 @@ pub fn encrypt(plaintext: &str, encryption_key: &str) -> Result<String> {
Ok(BASE64.encode(&result))
}
/// Encrypt a [`JsonValue`] using AES-256-GCM.
///
/// The value is first serialised to its compact JSON string representation,
/// then encrypted with [`encrypt`]. The returned value is a
/// [`JsonValue::String`] containing the base64 ciphertext, suitable for
/// storage in a JSONB column.
pub fn encrypt_json(value: &JsonValue, encryption_key: &str) -> Result<JsonValue> {
let plaintext = serde_json::to_string(value)
.map_err(|e| Error::encryption(format!("Failed to serialise JSON for encryption: {e}")))?;
let ciphertext = encrypt(&plaintext, encryption_key)?;
Ok(JsonValue::String(ciphertext))
}
/// Decrypt a [`JsonValue`] that was previously encrypted with [`encrypt_json`].
///
/// The input must be a [`JsonValue::String`] containing a base64 ciphertext.
/// After decryption the JSON string is parsed back into the original
/// structured [`JsonValue`].
pub fn decrypt_json(value: &JsonValue, encryption_key: &str) -> Result<JsonValue> {
let ciphertext = value
.as_str()
.ok_or_else(|| Error::encryption("Encrypted JSON value must be a string"))?;
let plaintext = decrypt(ciphertext, encryption_key)?;
serde_json::from_str(&plaintext)
.map_err(|e| Error::encryption(format!("Failed to parse decrypted JSON: {e}")))
}
/// Decrypt a ciphertext value using AES-256-GCM
///
/// The ciphertext should be base64-encoded and contain: nonce || encrypted_data || tag
@@ -226,4 +262,61 @@ mod tests {
assert_eq!(key1, key2);
assert_eq!(key1.len(), 32); // 256 bits
}
// ── JSON encryption tests ──────────────────────────────────────
#[test]
fn test_encrypt_decrypt_json_string() {
let value = serde_json::json!("my_secret_token");
let encrypted = encrypt_json(&value, TEST_KEY).expect("encrypt_json should succeed");
assert!(encrypted.is_string(), "encrypted JSON should be a string");
let decrypted = decrypt_json(&encrypted, TEST_KEY).expect("decrypt_json should succeed");
assert_eq!(value, decrypted);
}
#[test]
fn test_encrypt_decrypt_json_object() {
let value = serde_json::json!({"user": "admin", "password": "s3cret", "port": 5432});
let encrypted = encrypt_json(&value, TEST_KEY).expect("encrypt_json should succeed");
let decrypted = decrypt_json(&encrypted, TEST_KEY).expect("decrypt_json should succeed");
assert_eq!(value, decrypted);
}
#[test]
fn test_encrypt_decrypt_json_array() {
let value = serde_json::json!(["token1", "token2", 42, true, null]);
let encrypted = encrypt_json(&value, TEST_KEY).expect("encrypt_json should succeed");
let decrypted = decrypt_json(&encrypted, TEST_KEY).expect("decrypt_json should succeed");
assert_eq!(value, decrypted);
}
#[test]
fn test_encrypt_decrypt_json_number() {
let value = serde_json::json!(42);
let encrypted = encrypt_json(&value, TEST_KEY).unwrap();
let decrypted = decrypt_json(&encrypted, TEST_KEY).unwrap();
assert_eq!(value, decrypted);
}
#[test]
fn test_encrypt_decrypt_json_bool() {
let value = serde_json::json!(true);
let encrypted = encrypt_json(&value, TEST_KEY).unwrap();
let decrypted = decrypt_json(&encrypted, TEST_KEY).unwrap();
assert_eq!(value, decrypted);
}
#[test]
fn test_decrypt_json_wrong_key_fails() {
let value = serde_json::json!({"secret": "data"});
let encrypted = encrypt_json(&value, TEST_KEY).unwrap();
let wrong = "wrong_key_that_is_also_32_chars_long!!!";
assert!(decrypt_json(&encrypted, wrong).is_err());
}
#[test]
fn test_decrypt_json_non_string_fails() {
let not_encrypted = serde_json::json!(42);
assert!(decrypt_json(&not_encrypted, TEST_KEY).is_err());
}
}

View File

@@ -1232,7 +1232,7 @@ pub mod key {
pub name: String,
pub encrypted: bool,
pub encryption_key_hash: Option<String>,
pub value: String,
pub value: JsonValue,
pub created: DateTime<Utc>,
pub updated: DateTime<Utc>,
}

View File

@@ -2,6 +2,7 @@
use crate::models::{key::*, Id, OwnerType};
use crate::Result;
use serde_json::Value as JsonValue;
use sqlx::{Executor, Postgres, QueryBuilder};
use super::{Create, Delete, FindById, List, Repository, Update};
@@ -48,13 +49,13 @@ pub struct CreateKeyInput {
pub name: String,
pub encrypted: bool,
pub encryption_key_hash: Option<String>,
pub value: String,
pub value: JsonValue,
}
#[derive(Debug, Clone, Default)]
pub struct UpdateKeyInput {
pub name: Option<String>,
pub value: Option<String>,
pub value: Option<JsonValue>,
pub encrypted: Option<bool>,
pub encryption_key_hash: Option<String>,
}

View File

@@ -13,6 +13,7 @@ use helpers::*;
use serde_json::json;
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_action() {
let pool = create_test_pool().await.unwrap();
@@ -35,6 +36,7 @@ async fn test_create_action() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_action_with_optional_fields() {
let pool = create_test_pool().await.unwrap();
@@ -71,6 +73,7 @@ async fn test_create_action_with_optional_fields() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_action_by_id() {
let pool = create_test_pool().await.unwrap();
@@ -95,6 +98,7 @@ async fn test_find_action_by_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_action_by_id_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -104,6 +108,7 @@ async fn test_find_action_by_id_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_action_by_ref() {
let pool = create_test_pool().await.unwrap();
@@ -127,6 +132,7 @@ async fn test_find_action_by_ref() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_action_by_ref_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -138,6 +144,7 @@ async fn test_find_action_by_ref_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_actions() {
let pool = create_test_pool().await.unwrap();
@@ -167,6 +174,7 @@ async fn test_list_actions() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_actions_empty() {
let pool = create_test_pool().await.unwrap();
@@ -176,6 +184,7 @@ async fn test_list_actions_empty() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_action() {
let pool = create_test_pool().await.unwrap();
@@ -211,6 +220,7 @@ async fn test_update_action() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_action_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -225,6 +235,7 @@ async fn test_update_action_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_action_partial() {
let pool = create_test_pool().await.unwrap();
@@ -254,6 +265,7 @@ async fn test_update_action_partial() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_action() {
let pool = create_test_pool().await.unwrap();
@@ -278,6 +290,7 @@ async fn test_delete_action() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_action_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -287,6 +300,7 @@ async fn test_delete_action_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_actions_cascade_delete_with_pack() {
let pool = create_test_pool().await.unwrap();
@@ -314,6 +328,7 @@ async fn test_actions_cascade_delete_with_pack() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_action_foreign_key_constraint() {
let pool = create_test_pool().await.unwrap();
@@ -338,6 +353,7 @@ async fn test_action_foreign_key_constraint() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_multiple_actions_same_pack() {
let pool = create_test_pool().await.unwrap();
@@ -362,6 +378,7 @@ async fn test_multiple_actions_same_pack() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_action_unique_ref_constraint() {
let pool = create_test_pool().await.unwrap();
@@ -386,6 +403,7 @@ async fn test_action_unique_ref_constraint() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_action_with_json_schemas() {
let pool = create_test_pool().await.unwrap();
@@ -423,6 +441,7 @@ async fn test_action_with_json_schemas() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_action_timestamps_auto_populated() {
let pool = create_test_pool().await.unwrap();
@@ -443,6 +462,7 @@ async fn test_action_timestamps_auto_populated() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_action_updated_changes_on_update() {
let pool = create_test_pool().await.unwrap();

View File

@@ -21,6 +21,7 @@ use serde_json::json;
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_enforcement_minimal() {
let pool = create_test_pool().await.unwrap();
@@ -93,6 +94,7 @@ async fn test_create_enforcement_minimal() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_enforcement_with_event() {
let pool = create_test_pool().await.unwrap();
@@ -160,6 +162,7 @@ async fn test_create_enforcement_with_event() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_enforcement_with_conditions() {
let pool = create_test_pool().await.unwrap();
@@ -225,6 +228,7 @@ async fn test_create_enforcement_with_conditions() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_enforcement_with_any_condition() {
let pool = create_test_pool().await.unwrap();
@@ -287,6 +291,7 @@ async fn test_create_enforcement_with_any_condition() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_enforcement_without_rule_id() {
let pool = create_test_pool().await.unwrap();
@@ -310,6 +315,7 @@ async fn test_create_enforcement_without_rule_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_enforcement_with_invalid_rule_fails() {
let pool = create_test_pool().await.unwrap();
@@ -333,6 +339,7 @@ async fn test_create_enforcement_with_invalid_rule_fails() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_enforcement_with_nonexistent_event_succeeds() {
let pool = create_test_pool().await.unwrap();
@@ -363,6 +370,7 @@ async fn test_create_enforcement_with_nonexistent_event_succeeds() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_enforcement_by_id() {
let pool = create_test_pool().await.unwrap();
@@ -424,6 +432,7 @@ async fn test_find_enforcement_by_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_enforcement_by_id_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -435,6 +444,7 @@ async fn test_find_enforcement_by_id_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_enforcement_by_id() {
let pool = create_test_pool().await.unwrap();
@@ -490,6 +500,7 @@ async fn test_get_enforcement_by_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_enforcement_by_id_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -504,6 +515,7 @@ async fn test_get_enforcement_by_id_not_found() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_enforcements_empty() {
let pool = create_test_pool().await.unwrap();
@@ -513,6 +525,7 @@ async fn test_list_enforcements_empty() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_enforcements() {
let pool = create_test_pool().await.unwrap();
@@ -584,6 +597,7 @@ async fn test_list_enforcements() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_enforcement_status() {
let pool = create_test_pool().await.unwrap();
@@ -649,6 +663,7 @@ async fn test_update_enforcement_status() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_enforcement_status_transitions() {
let pool = create_test_pool().await.unwrap();
@@ -727,6 +742,7 @@ async fn test_update_enforcement_status_transitions() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_enforcement_payload() {
let pool = create_test_pool().await.unwrap();
@@ -789,6 +805,7 @@ async fn test_update_enforcement_payload() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_enforcement_both_fields() {
let pool = create_test_pool().await.unwrap();
@@ -852,6 +869,7 @@ async fn test_update_enforcement_both_fields() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_enforcement_no_changes() {
let pool = create_test_pool().await.unwrap();
@@ -915,6 +933,7 @@ async fn test_update_enforcement_no_changes() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_enforcement_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -935,6 +954,7 @@ async fn test_update_enforcement_not_found() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_enforcement() {
let pool = create_test_pool().await.unwrap();
@@ -995,6 +1015,7 @@ async fn test_delete_enforcement() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_enforcement_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -1008,6 +1029,7 @@ async fn test_delete_enforcement_not_found() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_enforcements_by_rule() {
let pool = create_test_pool().await.unwrap();
@@ -1100,6 +1122,7 @@ async fn test_find_enforcements_by_rule() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_enforcements_by_status() {
let pool = create_test_pool().await.unwrap();
@@ -1189,6 +1212,7 @@ async fn test_find_enforcements_by_status() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_enforcements_by_event() {
let pool = create_test_pool().await.unwrap();
@@ -1273,6 +1297,7 @@ async fn test_find_enforcements_by_event() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_rule_sets_enforcement_rule_to_null() {
let pool = create_test_pool().await.unwrap();
@@ -1338,6 +1363,7 @@ async fn test_delete_rule_sets_enforcement_rule_to_null() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_enforcement_resolved_at_lifecycle() {
let pool = create_test_pool().await.unwrap();

View File

@@ -21,6 +21,7 @@ use serde_json::json;
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_event_minimal() {
let pool = create_test_pool().await.unwrap();
@@ -60,6 +61,7 @@ async fn test_create_event_minimal() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_event_with_payload() {
let pool = create_test_pool().await.unwrap();
@@ -101,6 +103,7 @@ async fn test_create_event_with_payload() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_event_with_config() {
let pool = create_test_pool().await.unwrap();
@@ -136,6 +139,7 @@ async fn test_create_event_with_config() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_event_without_trigger_id() {
let pool = create_test_pool().await.unwrap();
@@ -158,6 +162,7 @@ async fn test_create_event_without_trigger_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_event_with_source() {
let pool = create_test_pool().await.unwrap();
@@ -191,6 +196,7 @@ async fn test_create_event_with_source() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_event_with_invalid_trigger_fails() {
let pool = create_test_pool().await.unwrap();
@@ -217,6 +223,7 @@ async fn test_create_event_with_invalid_trigger_fails() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_event_by_id() {
let pool = create_test_pool().await.unwrap();
@@ -249,6 +256,7 @@ async fn test_find_event_by_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_event_by_id_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -258,6 +266,7 @@ async fn test_find_event_by_id_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_event_by_id() {
let pool = create_test_pool().await.unwrap();
@@ -284,6 +293,7 @@ async fn test_get_event_by_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_event_by_id_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -298,6 +308,7 @@ async fn test_get_event_by_id_not_found() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_events_empty() {
let pool = create_test_pool().await.unwrap();
@@ -307,6 +318,7 @@ async fn test_list_events_empty() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_events() {
let pool = create_test_pool().await.unwrap();
@@ -345,6 +357,7 @@ async fn test_list_events() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_events_respects_limit() {
let pool = create_test_pool().await.unwrap();
@@ -368,6 +381,7 @@ async fn test_list_events_respects_limit() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_event() {
let pool = create_test_pool().await.unwrap();
@@ -396,6 +410,7 @@ async fn test_delete_event() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_event_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -405,6 +420,7 @@ async fn test_delete_event_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_event_enforcement_retains_event_id() {
let pool = create_test_pool().await.unwrap();
@@ -480,6 +496,7 @@ async fn test_delete_event_enforcement_retains_event_id() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_events_by_trigger() {
let pool = create_test_pool().await.unwrap();
@@ -527,6 +544,7 @@ async fn test_find_events_by_trigger() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_events_by_trigger_ref() {
let pool = create_test_pool().await.unwrap();
@@ -561,6 +579,7 @@ async fn test_find_events_by_trigger_ref() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_events_by_trigger_ref_preserves_after_trigger_deletion() {
let pool = create_test_pool().await.unwrap();
@@ -602,6 +621,7 @@ async fn test_find_events_by_trigger_ref_preserves_after_trigger_deletion() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_event_created_timestamp_auto_set() {
let pool = create_test_pool().await.unwrap();

View File

@@ -20,6 +20,7 @@ use serde_json::json;
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_execution_basic() {
let pool = create_test_pool().await.unwrap();
@@ -61,6 +62,7 @@ async fn test_create_execution_basic() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_execution_without_action() {
let pool = create_test_pool().await.unwrap();
@@ -86,6 +88,7 @@ async fn test_create_execution_without_action() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_execution_with_all_fields() {
let pool = create_test_pool().await.unwrap();
@@ -120,6 +123,7 @@ async fn test_create_execution_with_all_fields() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_execution_with_parent() {
let pool = create_test_pool().await.unwrap();
@@ -177,6 +181,7 @@ async fn test_create_execution_with_parent() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_execution_by_id() {
let pool = create_test_pool().await.unwrap();
@@ -216,6 +221,7 @@ async fn test_find_execution_by_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_execution_by_id_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -227,6 +233,7 @@ async fn test_find_execution_by_id_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_executions() {
let pool = create_test_pool().await.unwrap();
@@ -270,6 +277,7 @@ async fn test_list_executions() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_executions_ordered_by_created_desc() {
let pool = create_test_pool().await.unwrap();
@@ -324,6 +332,7 @@ async fn test_list_executions_ordered_by_created_desc() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_execution_status() {
let pool = create_test_pool().await.unwrap();
@@ -368,6 +377,7 @@ async fn test_update_execution_status() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_execution_result() {
let pool = create_test_pool().await.unwrap();
@@ -413,6 +423,7 @@ async fn test_update_execution_result() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_execution_executor() {
let pool = create_test_pool().await.unwrap();
@@ -456,6 +467,7 @@ async fn test_update_execution_executor() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_execution_status_transitions() {
let pool = create_test_pool().await.unwrap();
@@ -546,6 +558,7 @@ async fn test_update_execution_status_transitions() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_execution_failed_status() {
let pool = create_test_pool().await.unwrap();
@@ -590,6 +603,7 @@ async fn test_update_execution_failed_status() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_execution_no_changes() {
let pool = create_test_pool().await.unwrap();
@@ -633,6 +647,7 @@ async fn test_update_execution_no_changes() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_execution() {
let pool = create_test_pool().await.unwrap();
@@ -675,6 +690,7 @@ async fn test_delete_execution() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_execution_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -688,6 +704,7 @@ async fn test_delete_execution_not_found() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_executions_by_status() {
let pool = create_test_pool().await.unwrap();
@@ -743,6 +760,7 @@ async fn test_find_executions_by_status() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_executions_by_enforcement() {
let pool = create_test_pool().await.unwrap();
@@ -804,6 +822,7 @@ async fn test_find_executions_by_enforcement() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_parent_child_execution_hierarchy() {
let pool = create_test_pool().await.unwrap();
@@ -867,6 +886,7 @@ async fn test_parent_child_execution_hierarchy() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_nested_execution_hierarchy() {
let pool = create_test_pool().await.unwrap();
@@ -945,6 +965,7 @@ async fn test_nested_execution_hierarchy() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_execution_timestamps() {
let pool = create_test_pool().await.unwrap();
@@ -1000,6 +1021,7 @@ async fn test_execution_timestamps() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_execution_config_json() {
let pool = create_test_pool().await.unwrap();
@@ -1047,6 +1069,7 @@ async fn test_execution_config_json() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_execution_result_json() {
let pool = create_test_pool().await.unwrap();

View File

@@ -1116,7 +1116,7 @@ pub struct KeyFixture {
pub name: String,
pub encrypted: bool,
pub encryption_key_hash: Option<String>,
pub value: String,
pub value: serde_json::Value,
}
impl KeyFixture {
@@ -1136,7 +1136,7 @@ impl KeyFixture {
name: name.to_string(),
encrypted: false,
encryption_key_hash: None,
value: value.to_string(),
value: serde_json::json!(value),
}
}
@@ -1157,7 +1157,7 @@ impl KeyFixture {
name: unique_name,
encrypted: false,
encryption_key_hash: None,
value: value.to_string(),
value: serde_json::json!(value),
}
}
@@ -1177,7 +1177,7 @@ impl KeyFixture {
name: name.to_string(),
encrypted: false,
encryption_key_hash: None,
value: value.to_string(),
value: serde_json::json!(value),
}
}
@@ -1198,7 +1198,7 @@ impl KeyFixture {
name: unique_name,
encrypted: false,
encryption_key_hash: None,
value: value.to_string(),
value: serde_json::json!(value),
}
}
@@ -1218,7 +1218,7 @@ impl KeyFixture {
name: name.to_string(),
encrypted: false,
encryption_key_hash: None,
value: value.to_string(),
value: serde_json::json!(value),
}
}
@@ -1239,7 +1239,7 @@ impl KeyFixture {
name: unique_name,
encrypted: false,
encryption_key_hash: None,
value: value.to_string(),
value: serde_json::json!(value),
}
}
@@ -1254,7 +1254,7 @@ impl KeyFixture {
}
pub fn with_value(mut self, value: &str) -> Self {
self.value = value.to_string();
self.value = serde_json::json!(value);
self
}

View File

@@ -16,6 +16,7 @@ use helpers::*;
use serde_json::json;
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_identity() {
let pool = create_test_pool().await.unwrap();
@@ -38,6 +39,7 @@ async fn test_create_identity() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_identity_minimal() {
let pool = create_test_pool().await.unwrap();
@@ -56,6 +58,7 @@ async fn test_create_identity_minimal() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_identity_duplicate_login() {
let pool = create_test_pool().await.unwrap();
@@ -92,6 +95,7 @@ async fn test_create_identity_duplicate_login() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_identity_by_id() {
let pool = create_test_pool().await.unwrap();
@@ -116,6 +120,7 @@ async fn test_find_identity_by_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_identity_by_id_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -125,6 +130,7 @@ async fn test_find_identity_by_id_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_identity_by_login() {
let pool = create_test_pool().await.unwrap();
@@ -148,6 +154,7 @@ async fn test_find_identity_by_login() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_identity_by_login_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -159,6 +166,7 @@ async fn test_find_identity_by_login_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_identities() {
let pool = create_test_pool().await.unwrap();
@@ -190,6 +198,7 @@ async fn test_list_identities() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_identity() {
let pool = create_test_pool().await.unwrap();
@@ -225,6 +234,7 @@ async fn test_update_identity() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_identity_partial() {
let pool = create_test_pool().await.unwrap();
@@ -256,6 +266,7 @@ async fn test_update_identity_partial() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_identity_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -279,6 +290,7 @@ async fn test_update_identity_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_identity() {
let pool = create_test_pool().await.unwrap();
@@ -311,6 +323,7 @@ async fn test_delete_identity() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_identity_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -320,6 +333,7 @@ async fn test_delete_identity_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_identity_timestamps_auto_populated() {
let pool = create_test_pool().await.unwrap();
@@ -344,6 +358,7 @@ async fn test_identity_timestamps_auto_populated() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_identity_updated_changes_on_update() {
let pool = create_test_pool().await.unwrap();
@@ -379,6 +394,7 @@ async fn test_identity_updated_changes_on_update() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_identity_with_complex_attributes() {
let pool = create_test_pool().await.unwrap();
@@ -419,6 +435,7 @@ async fn test_identity_with_complex_attributes() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_identity_login_case_sensitive() {
let pool = create_test_pool().await.unwrap();

View File

@@ -22,6 +22,7 @@ use serde_json::json;
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_inquiry_minimal() {
let pool = create_test_pool().await.unwrap();
@@ -83,6 +84,7 @@ async fn test_create_inquiry_minimal() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_inquiry_with_response_schema() {
let pool = create_test_pool().await.unwrap();
@@ -140,6 +142,7 @@ async fn test_create_inquiry_with_response_schema() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_inquiry_with_timeout() {
let pool = create_test_pool().await.unwrap();
@@ -193,6 +196,7 @@ async fn test_create_inquiry_with_timeout() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_inquiry_with_assigned_user() {
let pool = create_test_pool().await.unwrap();
@@ -255,6 +259,7 @@ async fn test_create_inquiry_with_assigned_user() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_inquiry_with_invalid_execution_fails() {
let pool = create_test_pool().await.unwrap();
@@ -280,6 +285,7 @@ async fn test_create_inquiry_with_invalid_execution_fails() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_inquiry_by_id() {
let pool = create_test_pool().await.unwrap();
@@ -331,6 +337,7 @@ async fn test_find_inquiry_by_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_inquiry_by_id_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -340,6 +347,7 @@ async fn test_find_inquiry_by_id_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_inquiry_by_id() {
let pool = create_test_pool().await.unwrap();
@@ -385,6 +393,7 @@ async fn test_get_inquiry_by_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_inquiry_by_id_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -399,6 +408,7 @@ async fn test_get_inquiry_by_id_not_found() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_inquiries_empty() {
let pool = create_test_pool().await.unwrap();
@@ -408,6 +418,7 @@ async fn test_list_inquiries_empty() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_inquiries() {
let pool = create_test_pool().await.unwrap();
@@ -468,6 +479,7 @@ async fn test_list_inquiries() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_inquiry_status() {
let pool = create_test_pool().await.unwrap();
@@ -523,6 +535,7 @@ async fn test_update_inquiry_status() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_inquiry_status_transitions() {
let pool = create_test_pool().await.unwrap();
@@ -607,6 +620,7 @@ async fn test_update_inquiry_status_transitions() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_inquiry_response() {
let pool = create_test_pool().await.unwrap();
@@ -664,6 +678,7 @@ async fn test_update_inquiry_response() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_inquiry_with_response_and_status() {
let pool = create_test_pool().await.unwrap();
@@ -721,6 +736,7 @@ async fn test_update_inquiry_with_response_and_status() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_inquiry_assignment() {
let pool = create_test_pool().await.unwrap();
@@ -787,6 +803,7 @@ async fn test_update_inquiry_assignment() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_inquiry_no_changes() {
let pool = create_test_pool().await.unwrap();
@@ -841,6 +858,7 @@ async fn test_update_inquiry_no_changes() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_inquiry_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -862,6 +880,7 @@ async fn test_update_inquiry_not_found() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_inquiry() {
let pool = create_test_pool().await.unwrap();
@@ -911,6 +930,7 @@ async fn test_delete_inquiry() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_inquiry_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -920,6 +940,7 @@ async fn test_delete_inquiry_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_execution_cascades_to_inquiries() {
let pool = create_test_pool().await.unwrap();
@@ -986,6 +1007,7 @@ async fn test_delete_execution_cascades_to_inquiries() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_inquiries_by_status() {
let pool = create_test_pool().await.unwrap();
@@ -1064,6 +1086,7 @@ async fn test_find_inquiries_by_status() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_inquiries_by_execution() {
let pool = create_test_pool().await.unwrap();
@@ -1145,6 +1168,7 @@ async fn test_find_inquiries_by_execution() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_inquiry_timestamps_auto_managed() {
let pool = create_test_pool().await.unwrap();
@@ -1211,6 +1235,7 @@ async fn test_inquiry_timestamps_auto_managed() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_inquiry_complex_response_schema() {
let pool = create_test_pool().await.unwrap();

View File

@@ -20,6 +20,7 @@ use helpers::*;
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_key_system_owner() {
let pool = create_test_pool().await.unwrap();
@@ -36,12 +37,13 @@ async fn test_create_key_system_owner() {
assert_eq!(key.owner_action, None);
assert_eq!(key.owner_sensor, None);
assert!(!key.encrypted);
assert_eq!(key.value, "test_value");
assert_eq!(key.value, serde_json::json!("test_value"));
assert!(key.created.timestamp() > 0);
assert!(key.updated.timestamp() > 0);
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_key_system_encrypted() {
let pool = create_test_pool().await.unwrap();
@@ -61,6 +63,7 @@ async fn test_create_key_system_encrypted() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_key_identity_owner() {
let pool = create_test_pool().await.unwrap();
@@ -79,7 +82,7 @@ async fn test_create_key_identity_owner() {
assert_eq!(key.owner, Some(identity.id.to_string()));
assert_eq!(key.owner_identity, Some(identity.id));
assert_eq!(key.owner_pack, None);
assert_eq!(key.value, "secret_token");
assert_eq!(key.value, serde_json::json!("secret_token"));
}
// ============================================================================
@@ -87,6 +90,7 @@ async fn test_create_key_identity_owner() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_key_pack_owner() {
let pool = create_test_pool().await.unwrap();
@@ -104,7 +108,7 @@ async fn test_create_key_pack_owner() {
assert_eq!(key.owner, Some(pack.id.to_string()));
assert_eq!(key.owner_pack, Some(pack.id));
assert_eq!(key.owner_pack_ref, Some(pack.r#ref.clone()));
assert_eq!(key.value, "config_value");
assert_eq!(key.value, serde_json::json!("config_value"));
}
// ============================================================================
@@ -112,6 +116,7 @@ async fn test_create_key_pack_owner() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_key_duplicate_ref_fails() {
let pool = create_test_pool().await.unwrap();
@@ -132,7 +137,7 @@ async fn test_create_key_duplicate_ref_fails() {
name: key_ref.clone(),
encrypted: false,
encryption_key_hash: None,
value: "value1".to_string(),
value: serde_json::json!("value1"),
};
KeyRepository::create(&pool, input.clone()).await.unwrap();
@@ -143,6 +148,7 @@ async fn test_create_key_duplicate_ref_fails() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_key_system_with_owner_fields_fails() {
let pool = create_test_pool().await.unwrap();
@@ -167,7 +173,7 @@ async fn test_create_key_system_with_owner_fields_fails() {
name: "invalid".to_string(),
encrypted: false,
encryption_key_hash: None,
value: "value".to_string(),
value: serde_json::json!("value"),
};
let result = KeyRepository::create(&pool, input).await;
@@ -175,6 +181,7 @@ async fn test_create_key_system_with_owner_fields_fails() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_key_identity_without_owner_id_fails() {
let pool = create_test_pool().await.unwrap();
@@ -193,7 +200,7 @@ async fn test_create_key_identity_without_owner_id_fails() {
name: "invalid".to_string(),
encrypted: false,
encryption_key_hash: None,
value: "value".to_string(),
value: serde_json::json!("value"),
};
let result = KeyRepository::create(&pool, input).await;
@@ -201,6 +208,7 @@ async fn test_create_key_identity_without_owner_id_fails() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_key_multiple_owners_fails() {
let pool = create_test_pool().await.unwrap();
@@ -229,7 +237,7 @@ async fn test_create_key_multiple_owners_fails() {
name: "invalid".to_string(),
encrypted: false,
encryption_key_hash: None,
value: "value".to_string(),
value: serde_json::json!("value"),
};
let result = KeyRepository::create(&pool, input).await;
@@ -237,6 +245,7 @@ async fn test_create_key_multiple_owners_fails() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_key_invalid_ref_format_fails() {
let pool = create_test_pool().await.unwrap();
@@ -255,7 +264,7 @@ async fn test_create_key_invalid_ref_format_fails() {
name: "uppercase".to_string(),
encrypted: false,
encryption_key_hash: None,
value: "value".to_string(),
value: serde_json::json!("value"),
};
let result = KeyRepository::create(&pool, input).await;
@@ -267,6 +276,7 @@ async fn test_create_key_invalid_ref_format_fails() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_id_exists() {
let pool = create_test_pool().await.unwrap();
@@ -285,6 +295,7 @@ async fn test_find_by_id_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_id_not_exists() {
let pool = create_test_pool().await.unwrap();
@@ -293,6 +304,7 @@ async fn test_find_by_id_not_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_by_id_exists() {
let pool = create_test_pool().await.unwrap();
@@ -308,6 +320,7 @@ async fn test_get_by_id_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_by_id_not_exists_fails() {
let pool = create_test_pool().await.unwrap();
@@ -317,6 +330,7 @@ async fn test_get_by_id_not_exists_fails() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_ref_exists() {
let pool = create_test_pool().await.unwrap();
@@ -334,6 +348,7 @@ async fn test_find_by_ref_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_ref_not_exists() {
let pool = create_test_pool().await.unwrap();
@@ -344,6 +359,7 @@ async fn test_find_by_ref_not_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_all_keys() {
let pool = create_test_pool().await.unwrap();
@@ -373,6 +389,7 @@ async fn test_list_all_keys() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_value() {
let pool = create_test_pool().await.unwrap();
@@ -387,17 +404,18 @@ async fn test_update_value() {
tokio::time::sleep(tokio::time::Duration::from_millis(10)).await;
let input = UpdateKeyInput {
value: Some("new_value".to_string()),
value: Some(serde_json::json!("new_value")),
..Default::default()
};
let updated = KeyRepository::update(&pool, key.id, input).await.unwrap();
assert_eq!(updated.value, "new_value");
assert_eq!(updated.value, serde_json::json!("new_value"));
assert!(updated.updated > original_updated);
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_name() {
let pool = create_test_pool().await.unwrap();
@@ -419,6 +437,7 @@ async fn test_update_name() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_encrypted_status() {
let pool = create_test_pool().await.unwrap();
@@ -432,7 +451,7 @@ async fn test_update_encrypted_status() {
let input = UpdateKeyInput {
encrypted: Some(true),
encryption_key_hash: Some("sha256:xyz789".to_string()),
value: Some("encrypted_value".to_string()),
value: Some(serde_json::json!("encrypted_value")),
..Default::default()
};
@@ -443,10 +462,11 @@ async fn test_update_encrypted_status() {
updated.encryption_key_hash,
Some("sha256:xyz789".to_string())
);
assert_eq!(updated.value, "encrypted_value");
assert_eq!(updated.value, serde_json::json!("encrypted_value"));
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_multiple_fields() {
let pool = create_test_pool().await.unwrap();
@@ -459,7 +479,7 @@ async fn test_update_multiple_fields() {
let new_name = format!("updated_name_{}", unique_test_id());
let input = UpdateKeyInput {
name: Some(new_name.clone()),
value: Some("updated_value".to_string()),
value: Some(serde_json::json!("updated_value")),
encrypted: Some(true),
encryption_key_hash: Some("hash123".to_string()),
};
@@ -467,12 +487,13 @@ async fn test_update_multiple_fields() {
let updated = KeyRepository::update(&pool, key.id, input).await.unwrap();
assert_eq!(updated.name, new_name);
assert_eq!(updated.value, "updated_value");
assert_eq!(updated.value, serde_json::json!("updated_value"));
assert!(updated.encrypted);
assert_eq!(updated.encryption_key_hash, Some("hash123".to_string()));
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_no_changes() {
let pool = create_test_pool().await.unwrap();
@@ -495,11 +516,12 @@ async fn test_update_no_changes() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_nonexistent_key_fails() {
let pool = create_test_pool().await.unwrap();
let input = UpdateKeyInput {
value: Some("new_value".to_string()),
value: Some(serde_json::json!("new_value")),
..Default::default()
};
@@ -512,6 +534,7 @@ async fn test_update_nonexistent_key_fails() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_existing_key() {
let pool = create_test_pool().await.unwrap();
@@ -529,6 +552,7 @@ async fn test_delete_existing_key() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_nonexistent_key() {
let pool = create_test_pool().await.unwrap();
@@ -537,6 +561,7 @@ async fn test_delete_nonexistent_key() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_key_when_identity_deleted() {
let pool = create_test_pool().await.unwrap();
@@ -563,6 +588,7 @@ async fn test_delete_key_when_identity_deleted() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_key_when_pack_deleted() {
let pool = create_test_pool().await.unwrap();
@@ -593,6 +619,7 @@ async fn test_delete_key_when_pack_deleted() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_owner_type_system() {
let pool = create_test_pool().await.unwrap();
@@ -616,6 +643,7 @@ async fn test_find_by_owner_type_system() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_owner_type_identity() {
let pool = create_test_pool().await.unwrap();
@@ -650,6 +678,7 @@ async fn test_find_by_owner_type_identity() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_owner_type_pack() {
let pool = create_test_pool().await.unwrap();
@@ -683,6 +712,7 @@ async fn test_find_by_owner_type_pack() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_created_timestamp_set_automatically() {
let pool = create_test_pool().await.unwrap();
@@ -701,6 +731,7 @@ async fn test_created_timestamp_set_automatically() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_updated_timestamp_changes_on_update() {
let pool = create_test_pool().await.unwrap();
@@ -715,7 +746,7 @@ async fn test_updated_timestamp_changes_on_update() {
tokio::time::sleep(tokio::time::Duration::from_millis(10)).await;
let input = UpdateKeyInput {
value: Some("new_value".to_string()),
value: Some(serde_json::json!("new_value")),
..Default::default()
};
@@ -726,6 +757,7 @@ async fn test_updated_timestamp_changes_on_update() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_updated_timestamp_unchanged_on_read() {
let pool = create_test_pool().await.unwrap();
@@ -753,6 +785,7 @@ async fn test_updated_timestamp_unchanged_on_read() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_key_encrypted_flag() {
let pool = create_test_pool().await.unwrap();
@@ -779,6 +812,7 @@ async fn test_key_encrypted_flag() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_encryption_status() {
let pool = create_test_pool().await.unwrap();
@@ -794,7 +828,7 @@ async fn test_update_encryption_status() {
let input = UpdateKeyInput {
encrypted: Some(true),
encryption_key_hash: Some("sha256:newkey".to_string()),
value: Some("encrypted_value".to_string()),
value: Some(serde_json::json!("encrypted_value")),
..Default::default()
};
@@ -805,20 +839,20 @@ async fn test_update_encryption_status() {
encrypted.encryption_key_hash,
Some("sha256:newkey".to_string())
);
assert_eq!(encrypted.value, "encrypted_value");
assert_eq!(encrypted.value, serde_json::json!("encrypted_value"));
// Decrypt it
let input = UpdateKeyInput {
encrypted: Some(false),
encryption_key_hash: None,
value: Some("plain_value".to_string()),
value: Some(serde_json::json!("plain_value")),
..Default::default()
};
let decrypted = KeyRepository::update(&pool, key.id, input).await.unwrap();
assert!(!decrypted.encrypted);
assert_eq!(decrypted.value, "plain_value");
assert_eq!(decrypted.value, serde_json::json!("plain_value"));
}
// ============================================================================
@@ -826,6 +860,7 @@ async fn test_update_encryption_status() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_multiple_keys_same_pack_different_names() {
let pool = create_test_pool().await.unwrap();
@@ -851,6 +886,7 @@ async fn test_multiple_keys_same_pack_different_names() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_same_key_name_different_owners() {
let pool = create_test_pool().await.unwrap();

View File

@@ -9,6 +9,7 @@ use helpers::*;
use sqlx::Row;
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_migrations_applied() {
let pool = create_test_pool().await.unwrap();
@@ -41,6 +42,7 @@ async fn test_migrations_applied() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_pack_table_exists() {
let pool = create_test_pool().await.unwrap();
@@ -62,6 +64,7 @@ async fn test_pack_table_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_action_table_exists() {
let pool = create_test_pool().await.unwrap();
@@ -83,6 +86,7 @@ async fn test_action_table_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_trigger_table_exists() {
let pool = create_test_pool().await.unwrap();
@@ -104,6 +108,7 @@ async fn test_trigger_table_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_sensor_table_exists() {
let pool = create_test_pool().await.unwrap();
@@ -125,6 +130,7 @@ async fn test_sensor_table_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_rule_table_exists() {
let pool = create_test_pool().await.unwrap();
@@ -146,6 +152,7 @@ async fn test_rule_table_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_execution_table_exists() {
let pool = create_test_pool().await.unwrap();
@@ -167,6 +174,7 @@ async fn test_execution_table_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_event_table_exists() {
let pool = create_test_pool().await.unwrap();
@@ -188,6 +196,7 @@ async fn test_event_table_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_enforcement_table_exists() {
let pool = create_test_pool().await.unwrap();
@@ -209,6 +218,7 @@ async fn test_enforcement_table_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_inquiry_table_exists() {
let pool = create_test_pool().await.unwrap();
@@ -230,6 +240,7 @@ async fn test_inquiry_table_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_identity_table_exists() {
let pool = create_test_pool().await.unwrap();
@@ -251,6 +262,7 @@ async fn test_identity_table_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_key_table_exists() {
let pool = create_test_pool().await.unwrap();
@@ -272,6 +284,7 @@ async fn test_key_table_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_table_exists() {
let pool = create_test_pool().await.unwrap();
@@ -293,6 +306,7 @@ async fn test_notification_table_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_runtime_table_exists() {
let pool = create_test_pool().await.unwrap();
@@ -314,6 +328,7 @@ async fn test_runtime_table_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_worker_table_exists() {
let pool = create_test_pool().await.unwrap();
@@ -335,6 +350,7 @@ async fn test_worker_table_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_pack_columns() {
let pool = create_test_pool().await.unwrap();
@@ -381,6 +397,7 @@ async fn test_pack_columns() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_action_columns() {
let pool = create_test_pool().await.unwrap();
@@ -425,6 +442,7 @@ async fn test_action_columns() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_timestamps_auto_populated() {
let pool = create_test_pool().await.unwrap();
clean_database(&pool).await.unwrap();
@@ -443,6 +461,7 @@ async fn test_timestamps_auto_populated() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_json_column_storage() {
let pool = create_test_pool().await.unwrap();
clean_database(&pool).await.unwrap();
@@ -461,6 +480,7 @@ async fn test_json_column_storage() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_array_column_storage() {
let pool = create_test_pool().await.unwrap();
clean_database(&pool).await.unwrap();
@@ -484,6 +504,7 @@ async fn test_array_column_storage() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_unique_constraints() {
let pool = create_test_pool().await.unwrap();
clean_database(&pool).await.unwrap();
@@ -498,6 +519,7 @@ async fn test_unique_constraints() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_foreign_key_constraints() {
let pool = create_test_pool().await.unwrap();
clean_database(&pool).await.unwrap();
@@ -525,6 +547,7 @@ async fn test_foreign_key_constraints() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_enum_types_exist() {
let pool = create_test_pool().await.unwrap();

View File

@@ -89,6 +89,7 @@ impl NotificationFixture {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_notification_minimal() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -119,6 +120,7 @@ async fn test_create_notification_minimal() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_notification_with_content() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -152,6 +154,7 @@ async fn test_create_notification_with_content() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_notification_all_states() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -185,6 +188,7 @@ async fn test_create_notification_all_states() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_notification_by_id() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -205,6 +209,7 @@ async fn test_find_notification_by_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_notification_by_id_not_found() {
let pool = create_test_pool().await.expect("Failed to create pool");
@@ -216,6 +221,7 @@ async fn test_find_notification_by_id_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_notification_state() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -238,6 +244,7 @@ async fn test_update_notification_state() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_notification_content() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -265,6 +272,7 @@ async fn test_update_notification_content() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_notification_state_and_content() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -289,6 +297,7 @@ async fn test_update_notification_state_and_content() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_notification_no_changes() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -310,6 +319,7 @@ async fn test_update_notification_no_changes() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_notification_timestamps() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -337,6 +347,7 @@ async fn test_update_notification_timestamps() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_notification() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -357,6 +368,7 @@ async fn test_delete_notification() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_notification_not_found() {
let pool = create_test_pool().await.expect("Failed to create pool");
@@ -368,6 +380,7 @@ async fn test_delete_notification_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_notifications() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -408,6 +421,7 @@ async fn test_list_notifications() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_state() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -467,6 +481,7 @@ async fn test_find_by_state() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_state_empty() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -485,6 +500,7 @@ async fn test_find_by_state_empty() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_channel() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -541,6 +557,7 @@ async fn test_find_by_channel() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_channel_empty() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -555,6 +572,7 @@ async fn test_find_by_channel_empty() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_with_complex_content() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -589,6 +607,7 @@ async fn test_notification_with_complex_content() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_entity_types() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -615,6 +634,7 @@ async fn test_notification_entity_types() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_activity_types() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -641,6 +661,7 @@ async fn test_notification_activity_types() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_ordering_by_created() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -702,6 +723,7 @@ async fn test_notification_ordering_by_created() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_timestamps_auto_set() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -724,6 +746,7 @@ async fn test_notification_timestamps_auto_set() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_multiple_notifications_same_entity() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -776,6 +799,7 @@ async fn test_multiple_notifications_same_entity() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_content_null_vs_empty_json() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -794,6 +818,7 @@ async fn test_notification_content_null_vs_empty_json() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_notification_content_to_null() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -817,6 +842,7 @@ async fn test_update_notification_content_to_null() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_state_transition_workflow() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -867,6 +893,7 @@ async fn test_notification_state_transition_workflow() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_list_limit() {
let pool = create_test_pool().await.expect("Failed to create pool");
@@ -879,6 +906,7 @@ async fn test_notification_list_limit() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_with_special_characters() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -911,6 +939,7 @@ async fn test_notification_with_special_characters() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_with_long_strings() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -944,6 +973,7 @@ async fn test_notification_with_long_strings() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_state_with_multiple_states() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -1018,6 +1048,7 @@ async fn test_find_by_state_with_multiple_states() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_content_array() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -1034,6 +1065,7 @@ async fn test_notification_content_array() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_content_string_value() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -1046,6 +1078,7 @@ async fn test_notification_content_string_value() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_content_number_value() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -1058,6 +1091,7 @@ async fn test_notification_content_number_value() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_parallel_creation() {
let pool = create_test_pool().await.expect("Failed to create pool");
@@ -1096,6 +1130,7 @@ async fn test_notification_parallel_creation() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_channel_case_sensitive() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -1143,6 +1178,7 @@ async fn test_notification_channel_case_sensitive() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_entity_type_variations() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -1181,6 +1217,7 @@ async fn test_notification_entity_type_variations() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_update_same_state() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -1207,6 +1244,7 @@ async fn test_notification_update_same_state() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_multiple_updates() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());
@@ -1230,6 +1268,7 @@ async fn test_notification_multiple_updates() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_notification_get_by_id_alias() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = NotificationFixture::new(pool.clone());

View File

@@ -12,6 +12,7 @@ use helpers::*;
use serde_json::json;
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_pack() {
let pool = create_test_pool().await.unwrap();
@@ -32,6 +33,7 @@ async fn test_create_pack() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_pack_duplicate_ref() {
let pool = create_test_pool().await.unwrap();
@@ -48,6 +50,7 @@ async fn test_create_pack_duplicate_ref() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_pack_with_tags() {
let pool = create_test_pool().await.unwrap();
@@ -63,6 +66,7 @@ async fn test_create_pack_with_tags() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_pack_standard() {
let pool = create_test_pool().await.unwrap();
@@ -76,6 +80,7 @@ async fn test_create_pack_standard() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_pack_by_id() {
let pool = create_test_pool().await.unwrap();
@@ -95,6 +100,7 @@ async fn test_find_pack_by_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_pack_by_id_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -104,6 +110,7 @@ async fn test_find_pack_by_id_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_pack_by_ref() {
let pool = create_test_pool().await.unwrap();
@@ -122,6 +129,7 @@ async fn test_find_pack_by_ref() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_pack_by_ref_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -133,6 +141,7 @@ async fn test_find_pack_by_ref_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_packs() {
let pool = create_test_pool().await.unwrap();
@@ -163,6 +172,7 @@ async fn test_list_packs() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_packs_with_pagination() {
let pool = create_test_pool().await.unwrap();
@@ -190,6 +200,7 @@ async fn test_list_packs_with_pagination() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_pack() {
let pool = create_test_pool().await.unwrap();
@@ -219,6 +230,7 @@ async fn test_update_pack() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_pack_partial() {
let pool = create_test_pool().await.unwrap();
@@ -246,6 +258,7 @@ async fn test_update_pack_partial() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_pack_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -261,6 +274,7 @@ async fn test_update_pack_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_pack_tags() {
let pool = create_test_pool().await.unwrap();
@@ -286,6 +300,7 @@ async fn test_update_pack_tags() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_pack() {
let pool = create_test_pool().await.unwrap();
@@ -307,6 +322,7 @@ async fn test_delete_pack() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_pack_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -348,6 +364,7 @@ async fn test_delete_pack_not_found() {
// }
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_count_packs() {
let pool = create_test_pool().await.unwrap();
@@ -374,6 +391,7 @@ async fn test_count_packs() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_pack_transaction_commit() {
let pool = create_test_pool().await.unwrap();
@@ -412,6 +430,7 @@ async fn test_pack_transaction_commit() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_pack_transaction_rollback() {
let pool = create_test_pool().await.unwrap();
@@ -446,6 +465,7 @@ async fn test_pack_transaction_rollback() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_pack_invalid_ref_format() {
let pool = create_test_pool().await.unwrap();
@@ -471,6 +491,7 @@ async fn test_pack_invalid_ref_format() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_pack_valid_ref_formats() {
let pool = create_test_pool().await.unwrap();

View File

@@ -168,6 +168,7 @@ impl PermissionSetFixture {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_permission_set_minimal() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -196,6 +197,7 @@ async fn test_create_permission_set_minimal() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_permission_set_with_pack() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -226,6 +228,7 @@ async fn test_create_permission_set_with_pack() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_permission_set_with_complex_grants() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -250,6 +253,7 @@ async fn test_create_permission_set_with_complex_grants() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_permission_set_ref_format_validation() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -282,6 +286,7 @@ async fn test_permission_set_ref_format_validation() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_permission_set_ref_lowercase() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -301,6 +306,7 @@ async fn test_permission_set_ref_lowercase() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_permission_set_duplicate_ref() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -325,6 +331,7 @@ async fn test_permission_set_duplicate_ref() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_permission_set_by_id() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -342,6 +349,7 @@ async fn test_find_permission_set_by_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_permission_set_by_id_not_found() {
let pool = create_test_pool().await.expect("Failed to create pool");
@@ -353,6 +361,7 @@ async fn test_find_permission_set_by_id_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_permission_sets() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -372,6 +381,7 @@ async fn test_list_permission_sets() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_permission_set_label() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -393,6 +403,7 @@ async fn test_update_permission_set_label() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_permission_set_grants() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -418,6 +429,7 @@ async fn test_update_permission_set_grants() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_permission_set_all_fields() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -441,6 +453,7 @@ async fn test_update_permission_set_all_fields() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_permission_set_no_changes() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -462,6 +475,7 @@ async fn test_update_permission_set_no_changes() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_permission_set_timestamps() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -487,6 +501,7 @@ async fn test_update_permission_set_timestamps() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_permission_set() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -507,6 +522,7 @@ async fn test_delete_permission_set() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_permission_set_not_found() {
let pool = create_test_pool().await.expect("Failed to create pool");
@@ -518,6 +534,7 @@ async fn test_delete_permission_set_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_permission_set_cascade_from_pack() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -538,6 +555,7 @@ async fn test_permission_set_cascade_from_pack() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_permission_set_timestamps_auto_set() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -557,6 +575,7 @@ async fn test_permission_set_timestamps_auto_set() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_permission_assignment() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -572,6 +591,7 @@ async fn test_create_permission_assignment() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_permission_assignment_duplicate() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -593,6 +613,7 @@ async fn test_create_permission_assignment_duplicate() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_permission_assignment_invalid_identity() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -609,6 +630,7 @@ async fn test_create_permission_assignment_invalid_identity() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_permission_assignment_invalid_permset() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -625,6 +647,7 @@ async fn test_create_permission_assignment_invalid_permset() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_permission_assignment_by_id() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -644,6 +667,7 @@ async fn test_find_permission_assignment_by_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_permission_assignment_by_id_not_found() {
let pool = create_test_pool().await.expect("Failed to create pool");
@@ -655,6 +679,7 @@ async fn test_find_permission_assignment_by_id_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_permission_assignments() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -676,6 +701,7 @@ async fn test_list_permission_assignments() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_assignments_by_identity() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -700,6 +726,7 @@ async fn test_find_assignments_by_identity() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_assignments_by_identity_empty() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -714,6 +741,7 @@ async fn test_find_assignments_by_identity_empty() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_permission_assignment() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -736,6 +764,7 @@ async fn test_delete_permission_assignment() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_permission_assignment_not_found() {
let pool = create_test_pool().await.expect("Failed to create pool");
@@ -747,6 +776,7 @@ async fn test_delete_permission_assignment_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_permission_assignment_cascade_from_identity() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -769,6 +799,7 @@ async fn test_permission_assignment_cascade_from_identity() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_permission_assignment_cascade_from_permset() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -791,6 +822,7 @@ async fn test_permission_assignment_cascade_from_permset() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_permission_assignment_timestamp_auto_set() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -807,6 +839,7 @@ async fn test_permission_assignment_timestamp_auto_set() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_multiple_identities_same_permset() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -832,6 +865,7 @@ async fn test_multiple_identities_same_permset() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_one_identity_multiple_permsets() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -864,6 +898,7 @@ async fn test_one_identity_multiple_permsets() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_permission_set_ordering() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());
@@ -904,6 +939,7 @@ async fn test_permission_set_ordering() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_permission_assignment_ordering() {
let pool = create_test_pool().await.expect("Failed to create pool");
let fixture = PermissionSetFixture::new(pool.clone());

View File

@@ -9,6 +9,7 @@ mod helpers;
use helpers::{ActionFixture, PackFixture};
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_upsert_queue_stats() {
let pool = helpers::create_test_pool().await.unwrap();
@@ -66,6 +67,7 @@ async fn test_upsert_queue_stats() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_queue_stats_by_action() {
let pool = helpers::create_test_pool().await.unwrap();
@@ -107,6 +109,7 @@ async fn test_find_queue_stats_by_action() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_active_queue_stats() {
let pool = helpers::create_test_pool().await.unwrap();
@@ -171,6 +174,7 @@ async fn test_list_active_queue_stats() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_queue_stats() {
let pool = helpers::create_test_pool().await.unwrap();
@@ -220,6 +224,7 @@ async fn test_delete_queue_stats() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_batch_upsert_queue_stats() {
let pool = helpers::create_test_pool().await.unwrap();
@@ -262,6 +267,7 @@ async fn test_batch_upsert_queue_stats() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_clear_stale_queue_stats() {
let pool = helpers::create_test_pool().await.unwrap();
@@ -301,6 +307,7 @@ async fn test_clear_stale_queue_stats() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_queue_stats_cascade_delete() {
let pool = helpers::create_test_pool().await.unwrap();

View File

@@ -90,6 +90,7 @@ async fn setup_db() -> PgPool {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_artifact() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("create_artifact");
@@ -109,6 +110,7 @@ async fn test_create_artifact() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_id_exists() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("find_by_id_exists");
@@ -130,6 +132,7 @@ async fn test_find_by_id_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_id_not_exists() {
let pool = setup_db().await;
let non_existent_id = 999_999_999_999i64;
@@ -142,6 +145,7 @@ async fn test_find_by_id_not_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_by_id_not_found_error() {
let pool = setup_db().await;
let non_existent_id = 999_999_999_998i64;
@@ -158,6 +162,7 @@ async fn test_get_by_id_not_found_error() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_ref_exists() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("find_by_ref_exists");
@@ -177,6 +182,7 @@ async fn test_find_by_ref_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_ref_not_exists() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("find_by_ref_not_exists");
@@ -189,6 +195,7 @@ async fn test_find_by_ref_not_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_artifacts() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("list");
@@ -215,6 +222,7 @@ async fn test_list_artifacts() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_artifact_ref() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("update_ref");
@@ -241,6 +249,7 @@ async fn test_update_artifact_ref() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_artifact_all_fields() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("update_all");
@@ -285,6 +294,7 @@ async fn test_update_artifact_all_fields() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_artifact_no_changes() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("update_no_changes");
@@ -306,6 +316,7 @@ async fn test_update_artifact_no_changes() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_artifact() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("delete");
@@ -329,6 +340,7 @@ async fn test_delete_artifact() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_artifact_not_exists() {
let pool = setup_db().await;
let non_existent_id = 999_999_999_997i64;
@@ -345,6 +357,7 @@ async fn test_delete_artifact_not_exists() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_artifact_all_types() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("all_types");
@@ -372,6 +385,7 @@ async fn test_artifact_all_types() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_artifact_all_scopes() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("all_scopes");
@@ -397,6 +411,7 @@ async fn test_artifact_all_scopes() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_artifact_all_retention_policies() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("all_retention");
@@ -425,6 +440,7 @@ async fn test_artifact_all_retention_policies() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_scope() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("find_by_scope");
@@ -456,6 +472,7 @@ async fn test_find_by_scope() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_owner() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("find_by_owner");
@@ -486,6 +503,7 @@ async fn test_find_by_owner() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_type() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("find_by_type");
@@ -515,6 +533,7 @@ async fn test_find_by_type() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_scope_and_owner() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("find_by_scope_and_owner");
@@ -550,6 +569,7 @@ async fn test_find_by_scope_and_owner() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_retention_policy() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("find_by_retention");
@@ -584,6 +604,7 @@ async fn test_find_by_retention_policy() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_timestamps_auto_set_on_create() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("timestamps_create");
@@ -599,6 +620,7 @@ async fn test_timestamps_auto_set_on_create() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_updated_timestamp_changes_on_update() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("timestamps_update");
@@ -629,6 +651,7 @@ async fn test_updated_timestamp_changes_on_update() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_artifact_with_empty_owner() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("empty_owner");
@@ -643,6 +666,7 @@ async fn test_artifact_with_empty_owner() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_artifact_with_special_characters_in_ref() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("special_chars");
@@ -660,6 +684,7 @@ async fn test_artifact_with_special_characters_in_ref() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_artifact_with_zero_retention_limit() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("zero_retention");
@@ -674,6 +699,7 @@ async fn test_artifact_with_zero_retention_limit() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_artifact_with_negative_retention_limit() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("negative_retention");
@@ -688,6 +714,7 @@ async fn test_artifact_with_negative_retention_limit() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_artifact_with_large_retention_limit() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("large_retention");
@@ -702,6 +729,7 @@ async fn test_artifact_with_large_retention_limit() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_artifact_with_long_ref() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("long_ref");
@@ -716,6 +744,7 @@ async fn test_artifact_with_long_ref() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_multiple_artifacts_same_ref_allowed() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("duplicate_ref");
@@ -744,6 +773,7 @@ async fn test_multiple_artifacts_same_ref_allowed() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_scope_ordered_by_created() {
let pool = setup_db().await;
let fixture = ArtifactFixture::new("scope_ordering");

View File

@@ -117,6 +117,7 @@ async fn setup_db() -> PgPool {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_runtime() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("create_runtime");
@@ -139,6 +140,7 @@ async fn test_create_runtime() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_runtime_minimal() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("create_runtime_minimal");
@@ -157,6 +159,7 @@ async fn test_create_runtime_minimal() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_runtime_by_id() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("find_by_id");
@@ -176,6 +179,7 @@ async fn test_find_runtime_by_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_runtime_by_id_not_found() {
let pool = setup_db().await;
@@ -187,6 +191,7 @@ async fn test_find_runtime_by_id_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_runtime_by_ref() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("find_by_ref");
@@ -206,6 +211,7 @@ async fn test_find_runtime_by_ref() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_runtime_by_ref_not_found() {
let pool = setup_db().await;
@@ -217,6 +223,7 @@ async fn test_find_runtime_by_ref_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_runtimes() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("list_runtimes");
@@ -241,6 +248,7 @@ async fn test_list_runtimes() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_runtime() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("update_runtime");
@@ -275,6 +283,7 @@ async fn test_update_runtime() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_runtime_partial() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("update_partial");
@@ -303,6 +312,7 @@ async fn test_update_runtime_partial() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_runtime_empty() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("update_empty");
@@ -325,6 +335,7 @@ async fn test_update_runtime_empty() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_runtime() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("delete_runtime");
@@ -348,6 +359,7 @@ async fn test_delete_runtime() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_runtime_not_found() {
let pool = setup_db().await;
@@ -373,6 +385,7 @@ async fn test_delete_runtime_not_found() {
// }
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_pack() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("find_by_pack");
@@ -434,6 +447,7 @@ async fn test_find_by_pack() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_pack_empty() {
let pool = setup_db().await;
@@ -445,6 +459,7 @@ async fn test_find_by_pack_empty() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_runtime_created_successfully() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("created_test");
@@ -467,6 +482,7 @@ async fn test_runtime_created_successfully() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_duplicate_ref_fails() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("duplicate_ref");
@@ -482,6 +498,7 @@ async fn test_duplicate_ref_fails() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_json_fields() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("json_fields");
@@ -500,6 +517,7 @@ async fn test_json_fields() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_empty_json_distributions() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("empty_json");
@@ -516,6 +534,7 @@ async fn test_empty_json_distributions() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_ordering() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("list_ordering");
@@ -558,6 +577,7 @@ async fn test_list_ordering() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_timestamps() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("timestamps");
@@ -577,6 +597,7 @@ async fn test_timestamps() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_changes_timestamp() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("timestamp_update");
@@ -602,6 +623,7 @@ async fn test_update_changes_timestamp() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_pack_ref_without_pack_id() {
let pool = setup_db().await;
let fixture = RuntimeFixture::new("pack_ref_only");

View File

@@ -101,6 +101,7 @@ async fn setup_db() -> PgPool {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_worker() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("create_worker");
@@ -125,6 +126,7 @@ async fn test_create_worker() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_worker_minimal() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("create_worker_minimal");
@@ -145,6 +147,7 @@ async fn test_create_worker_minimal() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_worker_by_id() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("find_by_id");
@@ -165,6 +168,7 @@ async fn test_find_worker_by_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_worker_by_id_not_found() {
let pool = setup_db().await;
@@ -176,6 +180,7 @@ async fn test_find_worker_by_id_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_worker_by_name() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("find_by_name");
@@ -195,6 +200,7 @@ async fn test_find_worker_by_name() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_worker_by_name_not_found() {
let pool = setup_db().await;
@@ -206,6 +212,7 @@ async fn test_find_worker_by_name_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_workers() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("list_workers");
@@ -230,6 +237,7 @@ async fn test_list_workers() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_worker() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("update_worker");
@@ -267,6 +275,7 @@ async fn test_update_worker() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_worker_partial() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("update_partial");
@@ -298,6 +307,7 @@ async fn test_update_worker_partial() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_worker_empty() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("update_empty");
@@ -320,6 +330,7 @@ async fn test_update_worker_empty() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_worker() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("delete_worker");
@@ -343,6 +354,7 @@ async fn test_delete_worker() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_worker_not_found() {
let pool = setup_db().await;
@@ -358,6 +370,7 @@ async fn test_delete_worker_not_found() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_status_active() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("find_by_status_active");
@@ -393,6 +406,7 @@ async fn test_find_by_status_active() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_status_all_statuses() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("find_by_status_all");
@@ -421,6 +435,7 @@ async fn test_find_by_status_all_statuses() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_type_local() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("find_by_type_local");
@@ -451,6 +466,7 @@ async fn test_find_by_type_local() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_type_all_types() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("find_by_type_all");
@@ -474,6 +490,7 @@ async fn test_find_by_type_all_types() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_heartbeat() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("update_heartbeat");
@@ -503,6 +520,7 @@ async fn test_update_heartbeat() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_heartbeat_multiple_times() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("heartbeat_multiple");
@@ -544,6 +562,7 @@ async fn test_update_heartbeat_multiple_times() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_worker_with_runtime() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("with_runtime");
@@ -593,6 +612,7 @@ async fn test_worker_with_runtime() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_worker_type_local() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("type_local");
@@ -606,6 +626,7 @@ async fn test_worker_type_local() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_worker_type_remote() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("type_remote");
@@ -619,6 +640,7 @@ async fn test_worker_type_remote() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_worker_type_container() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("type_container");
@@ -632,6 +654,7 @@ async fn test_worker_type_container() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_worker_status_active() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("status_active");
@@ -646,6 +669,7 @@ async fn test_worker_status_active() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_worker_status_inactive() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("status_inactive");
@@ -660,6 +684,7 @@ async fn test_worker_status_inactive() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_worker_status_busy() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("status_busy");
@@ -674,6 +699,7 @@ async fn test_worker_status_busy() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_worker_status_error() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("status_error");
@@ -692,6 +718,7 @@ async fn test_worker_status_error() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_duplicate_name_allowed() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("duplicate_name");
@@ -718,6 +745,7 @@ async fn test_duplicate_name_allowed() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_json_fields() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("json_fields");
@@ -737,6 +765,7 @@ async fn test_json_fields() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_null_json_fields() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("null_json");
@@ -751,6 +780,7 @@ async fn test_null_json_fields() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_null_status() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("null_status");
@@ -765,6 +795,7 @@ async fn test_null_status() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_ordering() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("list_ordering");
@@ -807,6 +838,7 @@ async fn test_list_ordering() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_timestamps() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("timestamps");
@@ -826,6 +858,7 @@ async fn test_timestamps() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_changes_timestamp() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("timestamp_update");
@@ -851,6 +884,7 @@ async fn test_update_changes_timestamp() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_heartbeat_updates_timestamp() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("heartbeat_updates");
@@ -879,6 +913,7 @@ async fn test_heartbeat_updates_timestamp() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_port_range() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("port_range");
@@ -899,6 +934,7 @@ async fn test_port_range() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_status_lifecycle() {
let pool = setup_db().await;
let fixture = WorkerFixture::new("status_lifecycle");

View File

@@ -20,6 +20,7 @@ use serde_json::json;
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_rule() {
let pool = create_test_pool().await.unwrap();
@@ -80,6 +81,7 @@ async fn test_create_rule() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_rule_disabled() {
let pool = create_test_pool().await.unwrap();
@@ -121,6 +123,7 @@ async fn test_create_rule_disabled() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_rule_with_complex_conditions() {
let pool = create_test_pool().await.unwrap();
@@ -170,6 +173,7 @@ async fn test_create_rule_with_complex_conditions() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_rule_duplicate_ref() {
let pool = create_test_pool().await.unwrap();
@@ -246,6 +250,7 @@ async fn test_create_rule_duplicate_ref() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_rule_invalid_ref_format_uppercase() {
let pool = create_test_pool().await.unwrap();
@@ -287,6 +292,7 @@ async fn test_create_rule_invalid_ref_format_uppercase() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_rule_invalid_ref_format_no_dot() {
let pool = create_test_pool().await.unwrap();
@@ -332,6 +338,7 @@ async fn test_create_rule_invalid_ref_format_no_dot() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_rule_by_id() {
let pool = create_test_pool().await.unwrap();
@@ -380,6 +387,7 @@ async fn test_find_rule_by_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_rule_by_id_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -389,6 +397,7 @@ async fn test_find_rule_by_id_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_rule_by_ref() {
let pool = create_test_pool().await.unwrap();
@@ -437,6 +446,7 @@ async fn test_find_rule_by_ref() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_rule_by_ref_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -448,6 +458,7 @@ async fn test_find_rule_by_ref_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_rules() {
let pool = create_test_pool().await.unwrap();
@@ -500,6 +511,7 @@ async fn test_list_rules() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_rules_ordered_by_ref() {
let pool = create_test_pool().await.unwrap();
@@ -558,6 +570,7 @@ async fn test_list_rules_ordered_by_ref() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_rule_label() {
let pool = create_test_pool().await.unwrap();
@@ -610,6 +623,7 @@ async fn test_update_rule_label() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_rule_description() {
let pool = create_test_pool().await.unwrap();
@@ -660,6 +674,7 @@ async fn test_update_rule_description() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_rule_conditions() {
let pool = create_test_pool().await.unwrap();
@@ -711,6 +726,7 @@ async fn test_update_rule_conditions() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_rule_enabled() {
let pool = create_test_pool().await.unwrap();
@@ -763,6 +779,7 @@ async fn test_update_rule_enabled() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_rule_multiple_fields() {
let pool = create_test_pool().await.unwrap();
@@ -820,6 +837,7 @@ async fn test_update_rule_multiple_fields() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_rule_no_changes() {
let pool = create_test_pool().await.unwrap();
@@ -872,6 +890,7 @@ async fn test_update_rule_no_changes() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_rule() {
let pool = create_test_pool().await.unwrap();
@@ -919,6 +938,7 @@ async fn test_delete_rule() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_rule_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -932,6 +952,7 @@ async fn test_delete_rule_not_found() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_rules_by_pack() {
let pool = create_test_pool().await.unwrap();
@@ -1021,6 +1042,7 @@ async fn test_find_rules_by_pack() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_rules_by_action() {
let pool = create_test_pool().await.unwrap();
@@ -1102,6 +1124,7 @@ async fn test_find_rules_by_action() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_rules_by_trigger() {
let pool = create_test_pool().await.unwrap();
@@ -1185,6 +1208,7 @@ async fn test_find_rules_by_trigger() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_enabled_rules() {
let pool = create_test_pool().await.unwrap();
@@ -1264,6 +1288,7 @@ async fn test_find_enabled_rules() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_cascade_delete_pack_deletes_rules() {
let pool = create_test_pool().await.unwrap();
@@ -1319,6 +1344,7 @@ async fn test_cascade_delete_pack_deletes_rules() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_rule_timestamps() {
let pool = create_test_pool().await.unwrap();

View File

@@ -20,6 +20,7 @@ use serde_json::json;
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_sensor_minimal() {
let pool = create_test_pool().await.unwrap();
@@ -68,6 +69,7 @@ async fn test_create_sensor_minimal() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_sensor_with_param_schema() {
let pool = create_test_pool().await.unwrap();
@@ -119,6 +121,7 @@ async fn test_create_sensor_with_param_schema() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_sensor_without_pack() {
let pool = create_test_pool().await.unwrap();
@@ -150,6 +153,7 @@ async fn test_create_sensor_without_pack() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_sensor_duplicate_ref_fails() {
let pool = create_test_pool().await.unwrap();
@@ -199,6 +203,7 @@ async fn test_create_sensor_duplicate_ref_fails() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_sensor_invalid_ref_format_fails() {
let pool = create_test_pool().await.unwrap();
@@ -252,6 +257,7 @@ async fn test_create_sensor_invalid_ref_format_fails() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_sensor_invalid_pack_fails() {
let pool = create_test_pool().await.unwrap();
@@ -288,6 +294,7 @@ async fn test_create_sensor_invalid_pack_fails() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_sensor_invalid_trigger_fails() {
let pool = create_test_pool().await.unwrap();
@@ -319,6 +326,7 @@ async fn test_create_sensor_invalid_trigger_fails() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_sensor_invalid_runtime_fails() {
let pool = create_test_pool().await.unwrap();
@@ -354,6 +362,7 @@ async fn test_create_sensor_invalid_runtime_fails() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_id_exists() {
let pool = create_test_pool().await.unwrap();
@@ -397,6 +406,7 @@ async fn test_find_by_id_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_id_not_exists() {
let pool = create_test_pool().await.unwrap();
@@ -405,6 +415,7 @@ async fn test_find_by_id_not_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_by_id_exists() {
let pool = create_test_pool().await.unwrap();
@@ -443,6 +454,7 @@ async fn test_get_by_id_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_by_id_not_exists_fails() {
let pool = create_test_pool().await.unwrap();
@@ -452,6 +464,7 @@ async fn test_get_by_id_not_exists_fails() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_ref_exists() {
let pool = create_test_pool().await.unwrap();
@@ -494,6 +507,7 @@ async fn test_find_by_ref_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_ref_not_exists() {
let pool = create_test_pool().await.unwrap();
@@ -504,6 +518,7 @@ async fn test_find_by_ref_not_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_by_ref_exists() {
let pool = create_test_pool().await.unwrap();
@@ -544,6 +559,7 @@ async fn test_get_by_ref_exists() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_get_by_ref_not_exists_fails() {
let pool = create_test_pool().await.unwrap();
@@ -553,6 +569,7 @@ async fn test_get_by_ref_not_exists_fails() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_all_sensors() {
let pool = create_test_pool().await.unwrap();
@@ -610,6 +627,7 @@ async fn test_list_all_sensors() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_empty() {
let pool = create_test_pool().await.unwrap();
@@ -624,6 +642,7 @@ async fn test_list_empty() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_label() {
let pool = create_test_pool().await.unwrap();
@@ -676,6 +695,7 @@ async fn test_update_label() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_description() {
let pool = create_test_pool().await.unwrap();
@@ -720,6 +740,7 @@ async fn test_update_description() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_entrypoint() {
let pool = create_test_pool().await.unwrap();
@@ -764,6 +785,7 @@ async fn test_update_entrypoint() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_enabled_status() {
let pool = create_test_pool().await.unwrap();
@@ -823,6 +845,7 @@ async fn test_update_enabled_status() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_param_schema() {
let pool = create_test_pool().await.unwrap();
@@ -877,6 +900,7 @@ async fn test_update_param_schema() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_multiple_fields() {
let pool = create_test_pool().await.unwrap();
@@ -929,6 +953,7 @@ async fn test_update_multiple_fields() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_no_changes() {
let pool = create_test_pool().await.unwrap();
@@ -978,6 +1003,7 @@ async fn test_update_no_changes() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_nonexistent_sensor_fails() {
let pool = create_test_pool().await.unwrap();
@@ -995,6 +1021,7 @@ async fn test_update_nonexistent_sensor_fails() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_existing_sensor() {
let pool = create_test_pool().await.unwrap();
@@ -1037,6 +1064,7 @@ async fn test_delete_existing_sensor() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_nonexistent_sensor() {
let pool = create_test_pool().await.unwrap();
@@ -1045,6 +1073,7 @@ async fn test_delete_nonexistent_sensor() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_sensor_when_pack_deleted() {
let pool = create_test_pool().await.unwrap();
@@ -1088,6 +1117,7 @@ async fn test_delete_sensor_when_pack_deleted() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_sensor_when_trigger_deleted() {
let pool = create_test_pool().await.unwrap();
@@ -1131,6 +1161,7 @@ async fn test_delete_sensor_when_trigger_deleted() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_sensor_when_runtime_deleted() {
let pool = create_test_pool().await.unwrap();
@@ -1178,6 +1209,7 @@ async fn test_delete_sensor_when_runtime_deleted() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_trigger() {
let pool = create_test_pool().await.unwrap();
@@ -1252,6 +1284,7 @@ async fn test_find_by_trigger() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_trigger_no_sensors() {
let pool = create_test_pool().await.unwrap();
@@ -1273,6 +1306,7 @@ async fn test_find_by_trigger_no_sensors() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_enabled() {
let pool = create_test_pool().await.unwrap();
@@ -1329,6 +1363,7 @@ async fn test_find_enabled() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_enabled_empty() {
let pool = create_test_pool().await.unwrap();
@@ -1368,6 +1403,7 @@ async fn test_find_enabled_empty() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_pack() {
let pool = create_test_pool().await.unwrap();
@@ -1453,6 +1489,7 @@ async fn test_find_by_pack() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_pack_no_sensors() {
let pool = create_test_pool().await.unwrap();
@@ -1473,6 +1510,7 @@ async fn test_find_by_pack_no_sensors() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_created_timestamp_set_automatically() {
let pool = create_test_pool().await.unwrap();
@@ -1514,6 +1552,7 @@ async fn test_created_timestamp_set_automatically() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_updated_timestamp_changes_on_update() {
let pool = create_test_pool().await.unwrap();
@@ -1564,6 +1603,7 @@ async fn test_updated_timestamp_changes_on_update() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_updated_timestamp_unchanged_on_read() {
let pool = create_test_pool().await.unwrap();
@@ -1614,6 +1654,7 @@ async fn test_updated_timestamp_unchanged_on_read() {
// ============================================================================
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_param_schema_complex_structure() {
let pool = create_test_pool().await.unwrap();
@@ -1688,6 +1729,7 @@ async fn test_param_schema_complex_structure() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_param_schema_can_be_null() {
let pool = create_test_pool().await.unwrap();

View File

@@ -16,6 +16,7 @@ use helpers::*;
use serde_json::json;
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_trigger() {
let pool = create_test_pool().await.unwrap();
@@ -48,6 +49,7 @@ async fn test_create_trigger() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_trigger_without_pack() {
let pool = create_test_pool().await.unwrap();
@@ -72,6 +74,7 @@ async fn test_create_trigger_without_pack() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_trigger_with_schemas() {
let pool = create_test_pool().await.unwrap();
@@ -116,6 +119,7 @@ async fn test_create_trigger_with_schemas() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_trigger_disabled() {
let pool = create_test_pool().await.unwrap();
@@ -138,6 +142,7 @@ async fn test_create_trigger_disabled() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_create_trigger_duplicate_ref() {
let pool = create_test_pool().await.unwrap();
@@ -182,6 +187,7 @@ async fn test_create_trigger_duplicate_ref() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_trigger_by_id() {
let pool = create_test_pool().await.unwrap();
@@ -215,6 +221,7 @@ async fn test_find_trigger_by_id() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_trigger_by_id_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -224,6 +231,7 @@ async fn test_find_trigger_by_id_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_trigger_by_ref() {
let pool = create_test_pool().await.unwrap();
@@ -257,6 +265,7 @@ async fn test_find_trigger_by_ref() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_trigger_by_ref_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -268,6 +277,7 @@ async fn test_find_trigger_by_ref_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_list_triggers() {
let pool = create_test_pool().await.unwrap();
@@ -314,6 +324,7 @@ async fn test_list_triggers() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_triggers_by_pack() {
let pool = create_test_pool().await.unwrap();
@@ -384,6 +395,7 @@ async fn test_find_triggers_by_pack() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_enabled_triggers() {
let pool = create_test_pool().await.unwrap();
@@ -436,6 +448,7 @@ async fn test_find_enabled_triggers() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_trigger() {
let pool = create_test_pool().await.unwrap();
@@ -483,6 +496,7 @@ async fn test_update_trigger() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_trigger_partial() {
let pool = create_test_pool().await.unwrap();
@@ -520,6 +534,7 @@ async fn test_update_trigger_partial() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_trigger_schemas() {
let pool = create_test_pool().await.unwrap();
@@ -569,6 +584,7 @@ async fn test_update_trigger_schemas() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_update_trigger_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -593,6 +609,7 @@ async fn test_update_trigger_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_trigger() {
let pool = create_test_pool().await.unwrap();
@@ -629,6 +646,7 @@ async fn test_delete_trigger() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_delete_trigger_not_found() {
let pool = create_test_pool().await.unwrap();
@@ -638,6 +656,7 @@ async fn test_delete_trigger_not_found() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_trigger_timestamps_auto_populated() {
let pool = create_test_pool().await.unwrap();
@@ -666,6 +685,7 @@ async fn test_trigger_timestamps_auto_populated() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_trigger_updated_changes_on_update() {
let pool = create_test_pool().await.unwrap();
@@ -709,6 +729,7 @@ async fn test_trigger_updated_changes_on_update() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_multiple_triggers_same_pack() {
let pool = create_test_pool().await.unwrap();
@@ -754,6 +775,7 @@ async fn test_multiple_triggers_same_pack() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_trigger_cascade_delete_with_pack() {
let pool = create_test_pool().await.unwrap();

View File

@@ -36,6 +36,7 @@ async fn create_test_trigger(pool: &PgPool) -> Trigger {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_webhook_enable() {
let pool = setup_test_db().await;
let trigger = create_test_trigger(&pool).await;
@@ -76,6 +77,7 @@ async fn test_webhook_enable() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_webhook_disable() {
let pool = setup_test_db().await;
let trigger = create_test_trigger(&pool).await;
@@ -113,6 +115,7 @@ async fn test_webhook_disable() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_webhook_key_regeneration() {
let pool = setup_test_db().await;
let trigger = create_test_trigger(&pool).await;
@@ -153,6 +156,7 @@ async fn test_webhook_key_regeneration() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_find_by_webhook_key() {
let pool = setup_test_db().await;
let trigger = create_test_trigger(&pool).await;
@@ -189,6 +193,7 @@ async fn test_find_by_webhook_key() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_webhook_key_uniqueness() {
let pool = setup_test_db().await;
let trigger1 = create_test_trigger(&pool).await;
@@ -220,6 +225,7 @@ async fn test_webhook_key_uniqueness() {
}
#[tokio::test]
#[ignore = "integration test — requires database"]
async fn test_enable_webhook_idempotent() {
let pool = setup_test_db().await;
let trigger = create_test_trigger(&pool).await;

View File

@@ -1,7 +1,8 @@
use attune_executor::workflow::context::WorkflowContext;
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion};
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use serde_json::json;
use std::collections::HashMap;
use std::hint::black_box;
fn bench_context_clone_empty(c: &mut Criterion) {
let ctx = WorkflowContext::new(json!({}), HashMap::new());

View File

@@ -44,6 +44,9 @@ struct Args {
#[tokio::main]
async fn main() -> Result<()> {
// Install HMAC-only JWT crypto provider (must be before any token operations)
attune_common::auth::install_crypto_provider();
let args = Args::parse();
// Initialize tracing with specified log level

View File

@@ -27,6 +27,9 @@ struct Args {
#[tokio::main]
async fn main() -> Result<()> {
// Install HMAC-only JWT crypto provider (must be before any token operations)
attune_common::auth::install_crypto_provider();
let args = Args::parse();
// Initialize tracing with specified log level

View File

@@ -26,6 +26,9 @@ struct Args {
#[tokio::main]
async fn main() -> Result<()> {
// Install HMAC-only JWT crypto provider (must be before any token operations)
attune_common::auth::install_crypto_provider();
let args = Args::parse();
// Initialize tracing with specified log level

View File

@@ -30,9 +30,7 @@ hostname = "0.4"
regex = { workspace = true }
async-trait = { workspace = true }
thiserror = { workspace = true }
aes-gcm = { workspace = true }
sha2 = { workspace = true }
base64 = { workspace = true }
tempfile = { workspace = true }
jsonwebtoken = { workspace = true }
libc = "0.2"

View File

@@ -23,6 +23,9 @@ struct Args {
#[tokio::main]
async fn main() -> Result<()> {
// Install HMAC-only JWT crypto provider (must be before any token operations)
attune_common::auth::install_crypto_provider();
// Initialize tracing
tracing_subscriber::fmt()
.with_target(false)

View File

@@ -105,8 +105,9 @@ pub struct ExecutionContext {
/// Environment variables
pub env: HashMap<String, String>,
/// Secrets (passed securely via stdin, not environment variables)
pub secrets: HashMap<String, String>,
/// Secrets (passed securely via stdin, not environment variables).
/// Values are JSON — strings, objects, arrays, numbers, or booleans.
pub secrets: HashMap<String, serde_json::Value>,
/// Execution timeout in seconds
pub timeout: Option<u64>,

View File

@@ -39,7 +39,7 @@ impl NativeRuntime {
async fn execute_binary(
&self,
binary_path: PathBuf,
secrets: &std::collections::HashMap<String, String>,
_secrets: &std::collections::HashMap<String, serde_json::Value>,
env: &std::collections::HashMap<String, String>,
parameters_stdin: Option<&str>,
timeout: Option<u64>,
@@ -94,31 +94,17 @@ impl NativeRuntime {
.spawn()
.map_err(|e| RuntimeError::ExecutionFailed(format!("Failed to spawn binary: {}", e)))?;
// Write to stdin - parameters (if using stdin delivery) and/or secrets
// If this fails, the process has already started, so we continue and capture output
// Write parameters to stdin as a single JSON line.
// Secrets are merged into the parameters map by the caller, so the
// action reads everything with a single readline().
let stdin_write_error = if let Some(mut stdin) = child.stdin.take() {
let mut error = None;
// Write parameters first if using stdin delivery
if let Some(params_data) = parameters_stdin {
if let Err(e) = stdin.write_all(params_data.as_bytes()).await {
error = Some(format!("Failed to write parameters to stdin: {}", e));
} else if let Err(e) = stdin.write_all(b"\n---ATTUNE_PARAMS_END---\n").await {
error = Some(format!("Failed to write parameter delimiter: {}", e));
}
}
// Write secrets as JSON (always, for backward compatibility)
if error.is_none() && !secrets.is_empty() {
match serde_json::to_string(secrets) {
Ok(secrets_json) => {
if let Err(e) = stdin.write_all(secrets_json.as_bytes()).await {
error = Some(format!("Failed to write secrets to stdin: {}", e));
} else if let Err(e) = stdin.write_all(b"\n").await {
error = Some(format!("Failed to write newline to stdin: {}", e));
}
}
Err(e) => error = Some(format!("Failed to serialize secrets: {}", e)),
} else if let Err(e) = stdin.write_all(b"\n").await {
error = Some(format!("Failed to write newline to stdin: {}", e));
}
}
@@ -331,6 +317,15 @@ impl Runtime for NativeRuntime {
context.action_ref, context.execution_id, context.parameter_delivery, context.parameter_format
);
// Merge secrets into parameters as a single JSON document.
// Actions receive everything via one readline() on stdin.
// Secret values are already JsonValue (string, object, array, etc.)
// so they are inserted directly without wrapping.
let mut merged_parameters = context.parameters.clone();
for (key, value) in &context.secrets {
merged_parameters.insert(key.clone(), value.clone());
}
// Prepare environment and parameters according to delivery method
let mut env = context.env.clone();
let config = ParameterDeliveryConfig {
@@ -339,7 +334,7 @@ impl Runtime for NativeRuntime {
};
let prepared_params =
parameter_passing::prepare_parameters(&context.parameters, &mut env, config)?;
parameter_passing::prepare_parameters(&merged_parameters, &mut env, config)?;
// Get stdin content if parameters are delivered via stdin
let parameters_stdin = prepared_params.stdin_content();
@@ -351,7 +346,7 @@ impl Runtime for NativeRuntime {
self.execute_binary(
binary_path,
&context.secrets,
&std::collections::HashMap::new(),
&env,
parameters_stdin,
context.timeout,

View File

@@ -20,6 +20,7 @@ use super::{
};
use async_trait::async_trait;
use attune_common::models::runtime::{EnvironmentConfig, RuntimeExecutionConfig};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use tokio::process::Command;
use tracing::{debug, error, info, warn};
@@ -645,12 +646,21 @@ impl Runtime for ProcessRuntime {
env.insert(key.clone(), resolved);
}
}
// Merge secrets into parameters as a single JSON document.
// Actions receive everything via one readline() on stdin.
// Secret values are already JsonValue (string, object, array, etc.)
// so they are inserted directly without wrapping.
let mut merged_parameters = context.parameters.clone();
for (key, value) in &context.secrets {
merged_parameters.insert(key.clone(), value.clone());
}
let param_config = ParameterDeliveryConfig {
delivery: context.parameter_delivery,
format: context.parameter_format,
};
let prepared_params =
parameter_passing::prepare_parameters(&context.parameters, &mut env, param_config)?;
parameter_passing::prepare_parameters(&merged_parameters, &mut env, param_config)?;
let parameters_stdin = prepared_params.stdin_content();
// Determine working directory: use context override, or pack dir
@@ -725,10 +735,11 @@ impl Runtime for ProcessRuntime {
.unwrap_or_else(|| "<none>".to_string()),
);
// Execute with streaming output capture (with optional cancellation support)
// Execute with streaming output capture (with optional cancellation support).
// Secrets are already merged into parameters — no separate secrets arg needed.
process_executor::execute_streaming_cancellable(
cmd,
&context.secrets,
&HashMap::new(),
parameters_stdin,
context.timeout,
context.max_stdout_bytes,

View File

@@ -2,7 +2,7 @@
//!
//! Provides common subprocess execution infrastructure used by all runtime
//! implementations. Handles streaming stdout/stderr capture, bounded log
//! collection, timeout management, stdin parameter/secret delivery, and
//! collection, timeout management, stdin parameter delivery, and
//! output format parsing.
//!
//! ## Cancellation Support
@@ -28,22 +28,22 @@ use tracing::{debug, info, warn};
/// This is the core execution function used by all runtime implementations.
/// It handles:
/// - Spawning the process with piped I/O
/// - Writing parameters and secrets to stdin
/// - Writing parameters (with secrets merged in) to stdin
/// - Streaming stdout/stderr with bounded log collection
/// - Timeout management
/// - Output format parsing (JSON, YAML, JSONL, text)
///
/// # Arguments
/// * `cmd` - Pre-configured `Command` (interpreter, args, env vars, working dir already set)
/// * `secrets` - Secrets to pass via stdin (as JSON)
/// * `parameters_stdin` - Optional parameter data to write to stdin before secrets
/// * `secrets` - Deprecated/unused — secrets are now merged into parameters by the caller
/// * `parameters_stdin` - Optional parameter data (including secrets) to write to stdin
/// * `timeout_secs` - Optional execution timeout in seconds
/// * `max_stdout_bytes` - Maximum stdout size before truncation
/// * `max_stderr_bytes` - Maximum stderr size before truncation
/// * `output_format` - How to parse stdout (Text, Json, Yaml, Jsonl)
pub async fn execute_streaming(
cmd: Command,
secrets: &HashMap<String, String>,
_secrets: &HashMap<String, serde_json::Value>,
parameters_stdin: Option<&str>,
timeout_secs: Option<u64>,
max_stdout_bytes: usize,
@@ -52,7 +52,7 @@ pub async fn execute_streaming(
) -> RuntimeResult<ExecutionResult> {
execute_streaming_cancellable(
cmd,
secrets,
_secrets,
parameters_stdin,
timeout_secs,
max_stdout_bytes,
@@ -68,7 +68,7 @@ pub async fn execute_streaming(
/// This is the core execution function used by all runtime implementations.
/// It handles:
/// - Spawning the process with piped I/O
/// - Writing parameters and secrets to stdin
/// - Writing parameters (with secrets merged in) to stdin
/// - Streaming stdout/stderr with bounded log collection
/// - Timeout management
/// - Graceful cancellation via SIGINT → SIGTERM → SIGKILL escalation
@@ -76,8 +76,8 @@ pub async fn execute_streaming(
///
/// # Arguments
/// * `cmd` - Pre-configured `Command` (interpreter, args, env vars, working dir already set)
/// * `secrets` - Secrets to pass via stdin (as JSON)
/// * `parameters_stdin` - Optional parameter data to write to stdin before secrets
/// * `secrets` - Deprecated/unused — secrets are now merged into parameters by the caller
/// * `parameters_stdin` - Optional parameter data (including secrets) to write to stdin
/// * `timeout_secs` - Optional execution timeout in seconds
/// * `max_stdout_bytes` - Maximum stdout size before truncation
/// * `max_stderr_bytes` - Maximum stderr size before truncation
@@ -86,7 +86,7 @@ pub async fn execute_streaming(
#[allow(clippy::too_many_arguments)]
pub async fn execute_streaming_cancellable(
mut cmd: Command,
secrets: &HashMap<String, String>,
_secrets: &HashMap<String, serde_json::Value>,
parameters_stdin: Option<&str>,
timeout_secs: Option<u64>,
max_stdout_bytes: usize,
@@ -103,34 +103,19 @@ pub async fn execute_streaming_cancellable(
.stderr(std::process::Stdio::piped())
.spawn()?;
// Write to stdin - parameters (if using stdin delivery) and/or secrets.
// Write to stdin - parameters (with secrets already merged in by the caller).
// If this fails, the process has already started, so we continue and capture output.
let stdin_write_error = if let Some(mut stdin) = child.stdin.take() {
let mut error = None;
// Write parameters first if using stdin delivery.
// When the caller provides parameters_stdin (i.e. the action uses
// stdin delivery), always write the content — even if it's "{}" —
// because the script expects to read valid JSON from stdin.
// Write parameters to stdin as a single JSON line.
// Secrets are merged into the parameters map by the caller, so the
// action reads everything with a single readline().
if let Some(params_data) = parameters_stdin {
if let Err(e) = stdin.write_all(params_data.as_bytes()).await {
error = Some(format!("Failed to write parameters to stdin: {}", e));
} else if let Err(e) = stdin.write_all(b"\n---ATTUNE_PARAMS_END---\n").await {
error = Some(format!("Failed to write parameter delimiter: {}", e));
}
}
// Write secrets as JSON (always, for backward compatibility)
if error.is_none() && !secrets.is_empty() {
match serde_json::to_string(secrets) {
Ok(secrets_json) => {
if let Err(e) = stdin.write_all(secrets_json.as_bytes()).await {
error = Some(format!("Failed to write secrets to stdin: {}", e));
} else if let Err(e) = stdin.write_all(b"\n").await {
error = Some(format!("Failed to write newline to stdin: {}", e));
}
}
Err(e) => error = Some(format!("Failed to serialize secrets: {}", e)),
} else if let Err(e) = stdin.write_all(b"\n").await {
error = Some(format!("Failed to write newline to stdin: {}", e));
}
}

View File

@@ -65,7 +65,7 @@ impl ShellRuntime {
async fn execute_with_streaming(
&self,
mut cmd: Command,
secrets: &std::collections::HashMap<String, String>,
_secrets: &std::collections::HashMap<String, String>,
parameters_stdin: Option<&str>,
timeout_secs: Option<u64>,
max_stdout_bytes: usize,
@@ -81,39 +81,19 @@ impl ShellRuntime {
.stderr(Stdio::piped())
.spawn()?;
// Write to stdin - parameters (if using stdin delivery) and/or secrets
// If this fails, the process has already started, so we continue and capture output
// Write to stdin - parameters (with secrets already merged in by the caller).
// If this fails, the process has already started, so we continue and capture output.
let stdin_write_error = if let Some(mut stdin) = child.stdin.take() {
let mut error = None;
// Write parameters first if using stdin delivery.
// Skip empty/trivial content ("{}","","[]") to avoid polluting stdin
// before secrets — scripts that read secrets via readline() expect
// the secrets JSON as the first line.
let has_real_params = parameters_stdin
.map(|s| !matches!(s.trim(), "" | "{}" | "[]"))
.unwrap_or(false);
// Write parameters to stdin as a single JSON line.
// Secrets are merged into the parameters map by the caller, so the
// action reads everything with a single readline().
if let Some(params_data) = parameters_stdin {
if has_real_params {
if let Err(e) = stdin.write_all(params_data.as_bytes()).await {
error = Some(format!("Failed to write parameters to stdin: {}", e));
} else if let Err(e) = stdin.write_all(b"\n---ATTUNE_PARAMS_END---\n").await {
error = Some(format!("Failed to write parameter delimiter: {}", e));
}
}
}
// Write secrets as JSON (always, for backward compatibility)
if error.is_none() && !secrets.is_empty() {
match serde_json::to_string(secrets) {
Ok(secrets_json) => {
if let Err(e) = stdin.write_all(secrets_json.as_bytes()).await {
error = Some(format!("Failed to write secrets to stdin: {}", e));
} else if let Err(e) = stdin.write_all(b"\n").await {
error = Some(format!("Failed to write newline to stdin: {}", e));
}
}
Err(e) => error = Some(format!("Failed to serialize secrets: {}", e)),
if let Err(e) = stdin.write_all(params_data.as_bytes()).await {
error = Some(format!("Failed to write parameters to stdin: {}", e));
} else if let Err(e) = stdin.write_all(b"\n").await {
error = Some(format!("Failed to write newline to stdin: {}", e));
}
}
@@ -338,7 +318,12 @@ impl ShellRuntime {
script.push_str("declare -A ATTUNE_SECRETS\n");
for (key, value) in &context.secrets {
let escaped_key = bash_single_quote_escape(key);
let escaped_val = bash_single_quote_escape(value);
// Serialize structured JSON values to string for bash; plain strings used directly.
let val_str = match value {
serde_json::Value::String(s) => s.clone(),
other => other.to_string(),
};
let escaped_val = bash_single_quote_escape(&val_str);
script.push_str(&format!(
"ATTUNE_SECRETS['{}']='{}'\n",
escaped_key, escaped_val
@@ -388,7 +373,7 @@ impl ShellRuntime {
async fn execute_shell_file(
&self,
script_path: PathBuf,
secrets: &std::collections::HashMap<String, String>,
_secrets: &std::collections::HashMap<String, String>,
env: &std::collections::HashMap<String, String>,
parameters_stdin: Option<&str>,
timeout_secs: Option<u64>,
@@ -396,11 +381,7 @@ impl ShellRuntime {
max_stderr_bytes: usize,
output_format: OutputFormat,
) -> RuntimeResult<ExecutionResult> {
debug!(
"Executing shell file: {:?} with {} secrets",
script_path,
secrets.len()
);
debug!("Executing shell file: {:?}", script_path,);
// Build command
let mut cmd = Command::new(&self.shell_path);
@@ -413,7 +394,7 @@ impl ShellRuntime {
self.execute_with_streaming(
cmd,
secrets,
&std::collections::HashMap::new(),
parameters_stdin,
timeout_secs,
max_stdout_bytes,
@@ -463,6 +444,13 @@ impl Runtime for ShellRuntime {
context.parameters
);
// Merge secrets into parameters as a single JSON document.
// Actions receive everything via one readline() on stdin.
let mut merged_parameters = context.parameters.clone();
for (key, value) in &context.secrets {
merged_parameters.insert(key.clone(), value.clone());
}
// Prepare environment and parameters according to delivery method
let mut env = context.env.clone();
let config = ParameterDeliveryConfig {
@@ -471,7 +459,7 @@ impl Runtime for ShellRuntime {
};
let prepared_params =
parameter_passing::prepare_parameters(&context.parameters, &mut env, config)?;
parameter_passing::prepare_parameters(&merged_parameters, &mut env, config)?;
// Get stdin content if parameters are delivered via stdin
let parameters_stdin = prepared_params.stdin_content();
@@ -486,12 +474,13 @@ impl Runtime for ShellRuntime {
info!("No parameters will be sent via stdin");
}
// If code_path is provided, execute the file directly
// If code_path is provided, execute the file directly.
// Secrets are already merged into parameters — no separate secrets arg needed.
if let Some(code_path) = &context.code_path {
return self
.execute_shell_file(
code_path.clone(),
&context.secrets,
&HashMap::new(),
&env,
parameters_stdin,
context.timeout,
@@ -747,8 +736,11 @@ mod tests {
env: HashMap::new(),
secrets: {
let mut s = HashMap::new();
s.insert("api_key".to_string(), "secret_key_12345".to_string());
s.insert("db_password".to_string(), "super_secret_pass".to_string());
s.insert("api_key".to_string(), serde_json::json!("secret_key_12345"));
s.insert(
"db_password".to_string(),
serde_json::json!("super_secret_pass"),
);
s
},
timeout: Some(10),

View File

@@ -2,31 +2,42 @@
//!
//! Handles fetching, decrypting, and injecting secrets into execution environments.
//! Secrets are stored encrypted in the database and decrypted on-demand for execution.
//!
//! Key values are stored as JSONB — they can be plain strings, objects, arrays,
//! numbers, or booleans. When encrypted, the JSON value is serialised to a
//! compact string, encrypted, and stored as a JSON string. Decryption reverses
//! this process, recovering the original structured value.
//!
//! Encryption and decryption use the shared `attune_common::crypto` module
//! (`encrypt_json` / `decrypt_json`) which stores ciphertext in the format
//! `BASE64(nonce ++ ciphertext)`. This is the same format used by the API
//! service, so keys encrypted by the API can be decrypted by the worker and
//! vice versa.
use aes_gcm::{
aead::{Aead, AeadCore, KeyInit, OsRng},
Aes256Gcm, Key as AesKey, Nonce,
};
use attune_common::error::{Error, Result};
use attune_common::models::{key::Key, Action, OwnerType};
use attune_common::repositories::key::KeyRepository;
use base64::{engine::general_purpose::STANDARD as BASE64, Engine};
use sha2::{Digest, Sha256};
use serde_json::Value as JsonValue;
use sqlx::PgPool;
use std::collections::HashMap;
use tracing::{debug, warn};
/// Secret manager for handling secret operations
/// Secret manager for handling secret operations.
///
/// Holds the database connection pool and the raw encryption key string.
/// The encryption key is passed through to `attune_common::crypto` which
/// derives the AES-256 key internally via SHA-256.
pub struct SecretManager {
pool: PgPool,
encryption_key: Option<Vec<u8>>,
encryption_key: Option<String>,
}
impl SecretManager {
/// Create a new secret manager
/// Create a new secret manager.
///
/// `encryption_key` is the raw key string (≥ 32 characters) used for
/// AES-256-GCM encryption/decryption via `attune_common::crypto`.
pub fn new(pool: PgPool, encryption_key: Option<String>) -> Result<Self> {
let encryption_key = encryption_key.map(|key| Self::derive_key(&key));
if encryption_key.is_none() {
warn!("No encryption key configured - encrypted secrets will fail to decrypt");
}
@@ -37,14 +48,7 @@ impl SecretManager {
})
}
/// Derive encryption key from password/key string
fn derive_key(key: &str) -> Vec<u8> {
let mut hasher = Sha256::new();
hasher.update(key.as_bytes());
hasher.finalize().to_vec()
}
/// Fetch all secrets relevant to an action execution
/// Fetch all secrets relevant to an action execution.
///
/// Secrets are fetched in order of precedence:
/// 1. System-level secrets (owner_type='system')
@@ -52,10 +56,12 @@ impl SecretManager {
/// 3. Action-level secrets (owner_type='action')
///
/// More specific secrets override less specific ones with the same name.
/// Values are returned as [`JsonValue`] — they may be strings, objects,
/// arrays, numbers, or booleans.
pub async fn fetch_secrets_for_action(
&self,
action: &Action,
) -> Result<HashMap<String, String>> {
) -> Result<HashMap<String, JsonValue>> {
debug!("Fetching secrets for action: {}", action.r#ref);
let mut secrets = HashMap::new();
@@ -126,13 +132,17 @@ impl SecretManager {
.map_err(Into::into)
}
/// Decrypt a secret if it's encrypted, otherwise return the value as-is
fn decrypt_if_needed(&self, key: &Key) -> Result<String> {
/// Decrypt a secret if it's encrypted, otherwise return the value as-is.
///
/// For unencrypted keys the JSONB value is returned directly.
/// For encrypted keys the value (a JSON string containing base64 ciphertext)
/// is decrypted via `attune_common::crypto::decrypt_json` and parsed back
/// into the original [`JsonValue`].
fn decrypt_if_needed(&self, key: &Key) -> Result<JsonValue> {
if !key.encrypted {
return Ok(key.value.clone());
}
// Encrypted secret requires encryption key
let encryption_key = self
.encryption_key
.as_ref()
@@ -140,7 +150,7 @@ impl SecretManager {
// Verify encryption key hash if present
if let Some(expected_hash) = &key.encryption_key_hash {
let actual_hash = Self::compute_key_hash_from_bytes(encryption_key);
let actual_hash = attune_common::crypto::hash_encryption_key(encryption_key);
if &actual_hash != expected_hash {
return Err(Error::Internal(format!(
"Encryption key hash mismatch for secret '{}'",
@@ -149,100 +159,23 @@ impl SecretManager {
}
}
Self::decrypt_value(&key.value, encryption_key)
attune_common::crypto::decrypt_json(&key.value, encryption_key)
.map_err(|e| Error::Internal(format!("Failed to decrypt key '{}': {}", key.name, e)))
}
/// Decrypt an encrypted value
/// Compute hash of the encryption key.
///
/// Format: "nonce:ciphertext" (both base64-encoded)
fn decrypt_value(encrypted_value: &str, key: &[u8]) -> Result<String> {
// Parse format: "nonce:ciphertext"
let parts: Vec<&str> = encrypted_value.split(':').collect();
if parts.len() != 2 {
return Err(Error::Internal(
"Invalid encrypted value format. Expected 'nonce:ciphertext'".to_string(),
));
}
let nonce_bytes = BASE64
.decode(parts[0])
.map_err(|e| Error::Internal(format!("Failed to decode nonce: {}", e)))?;
let ciphertext = BASE64
.decode(parts[1])
.map_err(|e| Error::Internal(format!("Failed to decode ciphertext: {}", e)))?;
// Create cipher
let key_array: [u8; 32] = key
.try_into()
.map_err(|_| Error::Internal("Invalid key length".to_string()))?;
let cipher_key = AesKey::<Aes256Gcm>::from_slice(&key_array);
let cipher = Aes256Gcm::new(cipher_key);
// Create nonce
let nonce = Nonce::from_slice(&nonce_bytes);
// Decrypt
let plaintext = cipher
.decrypt(nonce, ciphertext.as_ref())
.map_err(|e| Error::Internal(format!("Decryption failed: {}", e)))?;
String::from_utf8(plaintext)
.map_err(|e| Error::Internal(format!("Invalid UTF-8 in decrypted value: {}", e)))
}
/// Encrypt a value (for testing and future use)
#[allow(dead_code)]
pub fn encrypt_value(&self, plaintext: &str) -> Result<String> {
let encryption_key = self
.encryption_key
.as_ref()
.ok_or_else(|| Error::Internal("No encryption key configured".to_string()))?;
Self::encrypt_value_with_key(plaintext, encryption_key)
}
/// Encrypt a value with a specific key (static method)
fn encrypt_value_with_key(plaintext: &str, encryption_key: &[u8]) -> Result<String> {
// Create cipher
let key_array: [u8; 32] = encryption_key
.try_into()
.map_err(|_| Error::Internal("Invalid key length".to_string()))?;
let cipher_key = AesKey::<Aes256Gcm>::from_slice(&key_array);
let cipher = Aes256Gcm::new(cipher_key);
// Generate random nonce
let nonce = Aes256Gcm::generate_nonce(&mut OsRng);
// Encrypt
let ciphertext = cipher
.encrypt(&nonce, plaintext.as_bytes())
.map_err(|e| Error::Internal(format!("Encryption failed: {}", e)))?;
// Format: "nonce:ciphertext" (both base64-encoded)
let nonce_b64 = BASE64.encode(nonce);
let ciphertext_b64 = BASE64.encode(&ciphertext);
Ok(format!("{}:{}", nonce_b64, ciphertext_b64))
}
/// Compute hash of the encryption key
/// Uses the shared `attune_common::crypto::hash_encryption_key` so the
/// hash format is consistent with values stored by the API.
pub fn compute_key_hash(&self) -> String {
if let Some(key) = &self.encryption_key {
Self::compute_key_hash_from_bytes(key)
attune_common::crypto::hash_encryption_key(key)
} else {
String::new()
}
}
/// Compute hash from key bytes (static method)
fn compute_key_hash_from_bytes(key: &[u8]) -> String {
let mut hasher = Sha256::new();
hasher.update(key);
format!("{:x}", hasher.finalize())
}
/// Prepare secrets as environment variables
/// Prepare secrets as environment variables.
///
/// **DEPRECATED - SECURITY VULNERABILITY**: This method exposes secrets in the process
/// environment, making them visible in process listings (`ps auxe`) and `/proc/[pid]/environ`.
@@ -252,16 +185,26 @@ impl SecretManager {
///
/// Secret names are converted to uppercase and prefixed with "SECRET_"
/// Example: "api_key" becomes "SECRET_API_KEY"
///
/// String values are used directly; structured values are serialised to
/// compact JSON.
#[deprecated(
since = "0.2.0",
note = "Secrets in environment variables are insecure. Pass secrets via stdin instead."
)]
pub fn prepare_secret_env(&self, secrets: &HashMap<String, String>) -> HashMap<String, String> {
pub fn prepare_secret_env(
&self,
secrets: &HashMap<String, JsonValue>,
) -> HashMap<String, String> {
secrets
.iter()
.map(|(name, value)| {
let env_name = format!("SECRET_{}", name.to_uppercase().replace('-', "_"));
(env_name, value.clone())
let env_value = match value {
JsonValue::String(s) => s.clone(),
other => other.to_string(),
};
(env_name, env_value)
})
.collect()
}
@@ -270,78 +213,79 @@ impl SecretManager {
#[cfg(test)]
mod tests {
use super::*;
use attune_common::crypto;
// Helper to derive a test encryption key
fn derive_test_key(key: &str) -> Vec<u8> {
let mut hasher = Sha256::new();
hasher.update(key.as_bytes());
hasher.finalize().to_vec()
// ── encrypt / decrypt round-trip using shared crypto ───────────
const TEST_KEY: &str = "this_is_a_test_key_that_is_32_chars_long!!!!";
#[test]
fn test_encrypt_decrypt_roundtrip_string() {
let value = serde_json::json!("my-secret-value");
let encrypted = crypto::encrypt_json(&value, TEST_KEY).unwrap();
let decrypted = crypto::decrypt_json(&encrypted, TEST_KEY).unwrap();
assert_eq!(value, decrypted);
}
#[test]
fn test_encrypt_decrypt_roundtrip() {
let key = derive_test_key("test-encryption-key-12345");
let plaintext = "my-secret-value";
let encrypted = SecretManager::encrypt_value_with_key(plaintext, &key).unwrap();
// Verify format
assert!(encrypted.contains(':'));
let parts: Vec<&str> = encrypted.split(':').collect();
assert_eq!(parts.len(), 2);
// Decrypt and verify
let decrypted = SecretManager::decrypt_value(&encrypted, &key).unwrap();
assert_eq!(decrypted, plaintext);
fn test_encrypt_decrypt_roundtrip_object() {
let value = serde_json::json!({"user": "admin", "password": "s3cret"});
let encrypted = crypto::encrypt_json(&value, TEST_KEY).unwrap();
let decrypted = crypto::decrypt_json(&encrypted, TEST_KEY).unwrap();
assert_eq!(value, decrypted);
}
#[test]
fn test_encrypt_decrypt_different_values() {
let key = derive_test_key("test-encryption-key-12345");
fn test_encrypt_produces_different_ciphertext() {
let value = serde_json::json!("my-secret-value");
let encrypted1 = crypto::encrypt_json(&value, TEST_KEY).unwrap();
let encrypted2 = crypto::encrypt_json(&value, TEST_KEY).unwrap();
let plaintext1 = "secret1";
let plaintext2 = "secret2";
let encrypted1 = SecretManager::encrypt_value_with_key(plaintext1, &key).unwrap();
let encrypted2 = SecretManager::encrypt_value_with_key(plaintext2, &key).unwrap();
// Encrypted values should be different (due to random nonces)
// Different ciphertexts due to random nonces
assert_ne!(encrypted1, encrypted2);
// Both should decrypt correctly
let decrypted1 = SecretManager::decrypt_value(&encrypted1, &key).unwrap();
let decrypted2 = SecretManager::decrypt_value(&encrypted2, &key).unwrap();
assert_eq!(decrypted1, plaintext1);
assert_eq!(decrypted2, plaintext2);
// Both decrypt to the same value
assert_eq!(crypto::decrypt_json(&encrypted1, TEST_KEY).unwrap(), value);
assert_eq!(crypto::decrypt_json(&encrypted2, TEST_KEY).unwrap(), value);
}
#[test]
fn test_decrypt_with_wrong_key() {
let key1 = derive_test_key("key1");
let key2 = derive_test_key("key2");
fn test_decrypt_with_wrong_key_fails() {
let value = serde_json::json!("secret");
let encrypted = crypto::encrypt_json(&value, TEST_KEY).unwrap();
let plaintext = "secret";
let encrypted = SecretManager::encrypt_value_with_key(plaintext, &key1).unwrap();
// Decrypting with wrong key should fail
let result = SecretManager::decrypt_value(&encrypted, &key2);
assert!(result.is_err());
let wrong_key = "wrong_key_that_is_also_32_chars_long!!!";
assert!(crypto::decrypt_json(&encrypted, wrong_key).is_err());
}
// ── prepare_secret_env ────────────────────────────────────────
#[test]
fn test_prepare_secret_env() {
// Test the static method directly without creating a SecretManager instance
let mut secrets = HashMap::new();
secrets.insert("api_key".to_string(), "secret123".to_string());
secrets.insert("db-password".to_string(), "pass456".to_string());
secrets.insert("oauth_token".to_string(), "token789".to_string());
let mut secrets: HashMap<String, JsonValue> = HashMap::new();
secrets.insert(
"api_key".to_string(),
JsonValue::String("secret123".to_string()),
);
secrets.insert(
"db-password".to_string(),
JsonValue::String("pass456".to_string()),
);
secrets.insert(
"oauth_token".to_string(),
JsonValue::String("token789".to_string()),
);
// Call prepare_secret_env as a static-like method
// Replicate the logic without constructing a full SecretManager
let env: HashMap<String, String> = secrets
.iter()
.map(|(name, value)| {
let env_name = format!("SECRET_{}", name.to_uppercase().replace('-', "_"));
(env_name, value.clone())
let env_value = match value {
JsonValue::String(s) => s.clone(),
other => other.to_string(),
};
(env_name, env_value)
})
.collect();
@@ -352,35 +296,47 @@ mod tests {
}
#[test]
fn test_compute_key_hash() {
let key1 = derive_test_key("test-key");
let key2 = derive_test_key("test-key");
let key3 = derive_test_key("different-key");
fn test_prepare_secret_env_structured_value() {
let mut secrets: HashMap<String, JsonValue> = HashMap::new();
secrets.insert(
"db_config".to_string(),
serde_json::json!({"host": "db.example.com", "port": 5432}),
);
let hash1 = SecretManager::compute_key_hash_from_bytes(&key1);
let hash2 = SecretManager::compute_key_hash_from_bytes(&key2);
let hash3 = SecretManager::compute_key_hash_from_bytes(&key3);
let env: HashMap<String, String> = secrets
.iter()
.map(|(name, value)| {
let env_name = format!("SECRET_{}", name.to_uppercase().replace('-', "_"));
let env_value = match value {
JsonValue::String(s) => s.clone(),
other => other.to_string(),
};
(env_name, env_value)
})
.collect();
// Same key should produce same hash
// Structured values should be serialised to compact JSON
let db_config = env.get("SECRET_DB_CONFIG").unwrap();
let parsed: serde_json::Value = serde_json::from_str(db_config).unwrap();
assert_eq!(parsed["host"], "db.example.com");
assert_eq!(parsed["port"], 5432);
}
// ── compute_key_hash ──────────────────────────────────────────
#[test]
fn test_compute_key_hash_consistent() {
let hash1 = crypto::hash_encryption_key(TEST_KEY);
let hash2 = crypto::hash_encryption_key(TEST_KEY);
assert_eq!(hash1, hash2);
// Different key should produce different hash
assert_ne!(hash1, hash3);
// Hash should not be empty
assert!(!hash1.is_empty());
// SHA-256 → 64 hex characters
assert_eq!(hash1.len(), 64);
}
#[test]
fn test_invalid_encrypted_format() {
let key = derive_test_key("test-key");
// Invalid formats should fail
let result = SecretManager::decrypt_value("no-colon", &key);
assert!(result.is_err());
let result = SecretManager::decrypt_value("too:many:colons", &key);
assert!(result.is_err());
let result = SecretManager::decrypt_value("invalid-base64:also-invalid", &key);
assert!(result.is_err());
fn test_compute_key_hash_different_keys() {
let hash1 = crypto::hash_encryption_key(TEST_KEY);
let hash2 = crypto::hash_encryption_key("different_key_that_is_32_chars_long!!");
assert_ne!(hash1, hash2);
}
}

View File

@@ -66,9 +66,9 @@ print(json.dumps(result))
let mut s = HashMap::new();
s.insert(
"api_key".to_string(),
"super_secret_key_do_not_expose".to_string(),
serde_json::json!("super_secret_key_do_not_expose"),
);
s.insert("password".to_string(), "secret_pass_123".to_string());
s.insert("password".to_string(), serde_json::json!("secret_pass_123"));
s
},
timeout: Some(10),
@@ -125,9 +125,9 @@ async fn test_shell_secrets_not_in_environ() {
let mut s = HashMap::new();
s.insert(
"api_key".to_string(),
"super_secret_key_do_not_expose".to_string(),
serde_json::json!("super_secret_key_do_not_expose"),
);
s.insert("password".to_string(), "secret_pass_123".to_string());
s.insert("password".to_string(), serde_json::json!("secret_pass_123"));
s
},
timeout: Some(10),
@@ -227,7 +227,7 @@ print(json.dumps({'secret_a': secrets.get('secret_a')}))
env: HashMap::new(),
secrets: {
let mut s = HashMap::new();
s.insert("secret_a".to_string(), "value_a".to_string());
s.insert("secret_a".to_string(), serde_json::json!("value_a"));
s
},
timeout: Some(10),
@@ -273,7 +273,7 @@ print(json.dumps({
env: HashMap::new(),
secrets: {
let mut s = HashMap::new();
s.insert("secret_b".to_string(), "value_b".to_string());
s.insert("secret_b".to_string(), serde_json::json!("value_b"));
s
},
timeout: Some(10),
@@ -458,7 +458,10 @@ echo "PASS: No secrets in environment"
env: HashMap::new(),
secrets: {
let mut s = HashMap::new();
s.insert("db_password".to_string(), "SUPER_SECRET_VALUE".to_string());
s.insert(
"db_password".to_string(),
serde_json::json!("SUPER_SECRET_VALUE"),
);
s
},
timeout: Some(10),
@@ -535,7 +538,10 @@ print(json.dumps({"leaked": leaked}))
env: HashMap::new(),
secrets: {
let mut s = HashMap::new();
s.insert("api_key".to_string(), "TOP_SECRET_API_KEY".to_string());
s.insert(
"api_key".to_string(),
serde_json::json!("TOP_SECRET_API_KEY"),
);
s
},
timeout: Some(10),