making linters happy
Some checks failed
CI / Rust Blocking Checks (push) Failing after 22s
CI / Web Blocking Checks (push) Failing after 26s
CI / Security Blocking Checks (push) Successful in 9s
CI / Web Advisory Checks (push) Successful in 32s
CI / Security Advisory Checks (push) Has been cancelled

This commit is contained in:
2026-03-04 23:44:45 -06:00
parent 6a5a3c2b78
commit 13749409cd
81 changed files with 468 additions and 599 deletions

View File

@@ -107,7 +107,7 @@ impl HistoryQueryParams {
pub fn to_repo_params(
&self,
) -> attune_common::repositories::entity_history::HistoryQueryParams {
let limit = (self.page_size.min(1000).max(1)) as i64;
let limit = (self.page_size.clamp(1, 1000)) as i64;
let offset = ((self.page.saturating_sub(1)) as i64) * limit;
attune_common::repositories::entity_history::HistoryQueryParams {

View File

@@ -160,7 +160,10 @@ pub async fn create_action(
request.validate()?;
// Check if action with same ref already exists
if let Some(_) = ActionRepository::find_by_ref(&state.db, &request.r#ref).await? {
if ActionRepository::find_by_ref(&state.db, &request.r#ref)
.await?
.is_some()
{
return Err(ApiError::Conflict(format!(
"Action with ref '{}' already exists",
request.r#ref

View File

@@ -1877,7 +1877,7 @@ pub async fn stream_artifact(
Some((
Ok(Event::default()
.event("content")
.data(String::from_utf8_lossy(&buf).into_owned())),
.data(String::from_utf8_lossy(&buf))),
TailState::Tailing {
full_path,
file_path,
@@ -1967,7 +1967,7 @@ pub async fn stream_artifact(
Some((
Ok(Event::default()
.event("append")
.data(String::from_utf8_lossy(&new_buf).into_owned())),
.data(String::from_utf8_lossy(&new_buf))),
TailState::Tailing {
full_path,
file_path,

View File

@@ -158,7 +158,10 @@ pub async fn register(
.map_err(|e| ApiError::ValidationError(format!("Invalid registration request: {}", e)))?;
// Check if login already exists
if let Some(_) = IdentityRepository::find_by_login(&state.db, &payload.login).await? {
if IdentityRepository::find_by_login(&state.db, &payload.login)
.await?
.is_some()
{
return Err(ApiError::Conflict(format!(
"Identity with login '{}' already exists",
payload.login

View File

@@ -138,7 +138,10 @@ pub async fn create_key(
request.validate()?;
// Check if key with same ref already exists
if let Some(_) = KeyRepository::find_by_ref(&state.db, &request.r#ref).await? {
if KeyRepository::find_by_ref(&state.db, &request.r#ref)
.await?
.is_some()
{
return Err(ApiError::Conflict(format!(
"Key with ref '{}' already exists",
request.r#ref

View File

@@ -585,10 +585,9 @@ pub async fn upload_pack(
skip_tests,
)
.await
.map_err(|e| {
.inspect_err(|_e| {
// Clean up permanent storage on failure
let _ = std::fs::remove_dir_all(&final_path);
e
})?;
// Fetch the registered pack
@@ -947,8 +946,8 @@ async fn register_pack_internal(
// a best-effort optimisation for non-Docker (bare-metal) setups
// where the API host has the interpreter available.
if let Some(ref env_cfg) = exec_config.environment {
if env_cfg.env_type != "none" {
if !env_dir.exists() && !env_cfg.create_command.is_empty() {
if env_cfg.env_type != "none"
&& !env_dir.exists() && !env_cfg.create_command.is_empty() {
// Ensure parent directories exist
if let Some(parent) = env_dir.parent() {
let _ = std::fs::create_dir_all(parent);
@@ -1002,7 +1001,6 @@ async fn register_pack_internal(
}
}
}
}
}
// Attempt to install dependencies if manifest file exists.
@@ -1107,9 +1105,7 @@ async fn register_pack_internal(
if is_new_pack {
let _ = PackRepository::delete(&state.db, pack.id).await;
}
return Err(ApiError::BadRequest(format!(
"Pack registration failed: tests did not pass. Use force=true to register anyway."
)));
return Err(ApiError::BadRequest("Pack registration failed: tests did not pass. Use force=true to register anyway.".to_string()));
}
if !test_passed && force {
@@ -1359,10 +1355,9 @@ pub async fn install_pack(
request.skip_tests,
)
.await
.map_err(|e| {
.inspect_err(|_e| {
// Clean up the permanent storage if registration fails
let _ = std::fs::remove_dir_all(&final_path);
e
})?;
// Fetch the registered pack

View File

@@ -290,7 +290,10 @@ pub async fn create_rule(
request.validate()?;
// Check if rule with same ref already exists
if let Some(_) = RuleRepository::find_by_ref(&state.db, &request.r#ref).await? {
if RuleRepository::find_by_ref(&state.db, &request.r#ref)
.await?
.is_some()
{
return Err(ApiError::Conflict(format!(
"Rule with ref '{}' already exists",
request.r#ref

View File

@@ -198,7 +198,10 @@ pub async fn create_trigger(
request.validate()?;
// Check if trigger with same ref already exists
if let Some(_) = TriggerRepository::find_by_ref(&state.db, &request.r#ref).await? {
if TriggerRepository::find_by_ref(&state.db, &request.r#ref)
.await?
.is_some()
{
return Err(ApiError::Conflict(format!(
"Trigger with ref '{}' already exists",
request.r#ref
@@ -623,7 +626,10 @@ pub async fn create_sensor(
request.validate()?;
// Check if sensor with same ref already exists
if let Some(_) = SensorRepository::find_by_ref(&state.db, &request.r#ref).await? {
if SensorRepository::find_by_ref(&state.db, &request.r#ref)
.await?
.is_some()
{
return Err(ApiError::Conflict(format!(
"Sensor with ref '{}' already exists",
request.r#ref

View File

@@ -714,6 +714,7 @@ pub async fn receive_webhook(
}
// Helper function to log webhook events
#[allow(clippy::too_many_arguments)]
async fn log_webhook_event(
state: &AppState,
trigger: &attune_common::models::trigger::Trigger,
@@ -753,6 +754,7 @@ async fn log_webhook_event(
}
// Helper function to log failures when trigger is not found
#[allow(clippy::too_many_arguments)]
async fn log_webhook_failure(
_state: &AppState,
webhook_key: String,

View File

@@ -181,7 +181,10 @@ pub async fn create_workflow(
request.validate()?;
// Check if workflow with same ref already exists
if let Some(_) = WorkflowDefinitionRepository::find_by_ref(&state.db, &request.r#ref).await? {
if WorkflowDefinitionRepository::find_by_ref(&state.db, &request.r#ref)
.await?
.is_some()
{
return Err(ApiError::Conflict(format!(
"Workflow with ref '{}' already exists",
request.r#ref
@@ -519,7 +522,7 @@ pub async fn update_workflow_file(
/// Write a workflow definition to disk as YAML
async fn write_workflow_yaml(
packs_base_dir: &PathBuf,
packs_base_dir: &std::path::Path,
pack_ref: &str,
request: &SaveWorkflowFileRequest,
) -> Result<(), ApiError> {
@@ -630,9 +633,7 @@ fn build_action_yaml(pack_ref: &str, request: &SaveWorkflowFileRequest) -> Strin
"# Action definition for workflow {}.{}",
pack_ref, request.name
));
lines.push(format!(
"# The workflow graph (tasks, transitions, variables) is in:"
));
lines.push("# The workflow graph (tasks, transitions, variables) is in:".to_string());
lines.push(format!(
"# actions/workflows/{}.workflow.yaml",
request.name
@@ -646,7 +647,7 @@ fn build_action_yaml(pack_ref: &str, request: &SaveWorkflowFileRequest) -> Strin
lines.push(format!("description: \"{}\"", desc.replace('"', "\\\"")));
}
}
lines.push(format!("enabled: true"));
lines.push("enabled: true".to_string());
lines.push(format!(
"workflow_file: workflows/{}.workflow.yaml",
request.name
@@ -658,7 +659,7 @@ fn build_action_yaml(pack_ref: &str, request: &SaveWorkflowFileRequest) -> Strin
if !obj.is_empty() {
lines.push(String::new());
let params_yaml = serde_yaml_ng::to_string(params).unwrap_or_default();
lines.push(format!("parameters:"));
lines.push("parameters:".to_string());
// Indent the YAML output under `parameters:`
for line in params_yaml.lines() {
lines.push(format!(" {}", line));
@@ -673,7 +674,7 @@ fn build_action_yaml(pack_ref: &str, request: &SaveWorkflowFileRequest) -> Strin
if !obj.is_empty() {
lines.push(String::new());
let output_yaml = serde_yaml_ng::to_string(output).unwrap_or_default();
lines.push(format!("output:"));
lines.push("output:".to_string());
for line in output_yaml.lines() {
lines.push(format!(" {}", line));
}
@@ -685,7 +686,7 @@ fn build_action_yaml(pack_ref: &str, request: &SaveWorkflowFileRequest) -> Strin
if let Some(ref tags) = request.tags {
if !tags.is_empty() {
lines.push(String::new());
lines.push(format!("tags:"));
lines.push("tags:".to_string());
for tag in tags {
lines.push(format!(" - {}", tag));
}
@@ -701,6 +702,7 @@ fn build_action_yaml(pack_ref: &str, request: &SaveWorkflowFileRequest) -> Strin
/// This ensures the workflow appears in action lists and the action palette in the
/// workflow builder. The action is linked to the workflow definition via the
/// `workflow_def` FK.
#[allow(clippy::too_many_arguments)]
async fn create_companion_action(
db: &sqlx::PgPool,
workflow_ref: &str,
@@ -835,6 +837,7 @@ async fn update_companion_action(
///
/// If the action already exists, update it. If it doesn't exist (e.g., for workflows
/// created before the companion-action fix), create it.
#[allow(clippy::too_many_arguments)]
async fn ensure_companion_action(
db: &sqlx::PgPool,
workflow_def_id: i64,

View File

@@ -362,11 +362,11 @@ impl Drop for TestContext {
let test_packs_dir = self.test_packs_dir.clone();
// Spawn cleanup task in background
let _ = tokio::spawn(async move {
drop(tokio::spawn(async move {
if let Err(e) = cleanup_test_schema(&schema).await {
eprintln!("Failed to cleanup test schema {}: {}", schema, e);
}
});
}));
// Cleanup the test packs directory synchronously
let _ = std::fs::remove_dir_all(&test_packs_dir);

View File

@@ -64,7 +64,7 @@ async fn test_sync_pack_workflows_endpoint() {
// Use unique pack name to avoid conflicts in parallel tests
let pack_name = format!(
"test_pack_{}",
uuid::Uuid::new_v4().to_string().replace("-", "")[..8].to_string()
&uuid::Uuid::new_v4().to_string().replace("-", "")[..8]
);
// Create temporary directory for pack workflows
@@ -100,7 +100,7 @@ async fn test_validate_pack_workflows_endpoint() {
// Use unique pack name to avoid conflicts in parallel tests
let pack_name = format!(
"test_pack_{}",
uuid::Uuid::new_v4().to_string().replace("-", "")[..8].to_string()
&uuid::Uuid::new_v4().to_string().replace("-", "")[..8]
);
// Create pack in database
@@ -158,7 +158,7 @@ async fn test_sync_workflows_requires_authentication() {
// Use unique pack name to avoid conflicts in parallel tests
let pack_name = format!(
"test_pack_{}",
uuid::Uuid::new_v4().to_string().replace("-", "")[..8].to_string()
&uuid::Uuid::new_v4().to_string().replace("-", "")[..8]
);
// Create pack in database
@@ -185,7 +185,7 @@ async fn test_validate_workflows_requires_authentication() {
// Use unique pack name to avoid conflicts in parallel tests
let pack_name = format!(
"test_pack_{}",
uuid::Uuid::new_v4().to_string().replace("-", "")[..8].to_string()
&uuid::Uuid::new_v4().to_string().replace("-", "")[..8]
);
// Create pack in database

View File

@@ -14,7 +14,7 @@ use helpers::*;
fn unique_pack_name() -> String {
format!(
"test_pack_{}",
uuid::Uuid::new_v4().to_string().replace("-", "")[..8].to_string()
&uuid::Uuid::new_v4().to_string().replace("-", "")[..8]
)
}