re-uploading work

This commit is contained in:
2026-02-04 17:46:30 -06:00
commit 3b14c65998
1388 changed files with 381262 additions and 0 deletions

View File

@@ -0,0 +1,360 @@
//! Registry client for fetching and parsing pack indices
//!
//! This module provides functionality for:
//! - Fetching index files from HTTP(S) and file:// URLs
//! - Caching indices with TTL-based expiration
//! - Searching packs across multiple registries
//! - Handling authenticated registries
use super::{PackIndex, PackIndexEntry};
use crate::config::{PackRegistryConfig, RegistryIndexConfig};
use crate::error::{Error, Result};
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::{Arc, RwLock};
use std::time::{Duration, SystemTime};
/// Cached registry index with expiration
#[derive(Clone)]
struct CachedIndex {
/// The parsed index
index: PackIndex,
/// When this cache entry was created
cached_at: SystemTime,
/// TTL in seconds
ttl: u64,
}
impl CachedIndex {
/// Check if this cache entry is expired
fn is_expired(&self) -> bool {
match SystemTime::now().duration_since(self.cached_at) {
Ok(duration) => duration.as_secs() > self.ttl,
Err(_) => true, // If time went backwards, consider expired
}
}
}
/// Registry client for fetching and managing pack indices
pub struct RegistryClient {
/// Configuration
config: PackRegistryConfig,
/// HTTP client
http_client: reqwest::Client,
/// Cache of fetched indices (URL -> CachedIndex)
cache: Arc<RwLock<HashMap<String, CachedIndex>>>,
}
impl RegistryClient {
/// Create a new registry client
pub fn new(config: PackRegistryConfig) -> Result<Self> {
let timeout = Duration::from_secs(config.timeout);
let http_client = reqwest::Client::builder()
.timeout(timeout)
.user_agent(format!("attune-registry-client/{}", env!("CARGO_PKG_VERSION")))
.build()
.map_err(|e| Error::Internal(format!("Failed to create HTTP client: {}", e)))?;
Ok(Self {
config,
http_client,
cache: Arc::new(RwLock::new(HashMap::new())),
})
}
/// Get all enabled registries sorted by priority (lower number = higher priority)
pub fn get_registries(&self) -> Vec<RegistryIndexConfig> {
let mut registries: Vec<_> = self.config.indices
.iter()
.filter(|r| r.enabled)
.cloned()
.collect();
// Sort by priority (ascending)
registries.sort_by_key(|r| r.priority);
registries
}
/// Fetch a pack index from a registry
pub async fn fetch_index(&self, registry: &RegistryIndexConfig) -> Result<PackIndex> {
// Check cache first if caching is enabled
if self.config.cache_enabled {
if let Some(cached) = self.get_cached_index(&registry.url) {
if !cached.is_expired() {
tracing::debug!("Using cached index for registry: {}", registry.url);
return Ok(cached.index);
}
}
}
// Fetch fresh index
tracing::info!("Fetching index from registry: {}", registry.url);
let index = self.fetch_index_from_url(registry).await?;
// Cache the result
if self.config.cache_enabled {
self.cache_index(&registry.url, index.clone());
}
Ok(index)
}
/// Fetch index from URL (bypassing cache)
async fn fetch_index_from_url(&self, registry: &RegistryIndexConfig) -> Result<PackIndex> {
let url = &registry.url;
// Handle file:// URLs
if url.starts_with("file://") {
return self.fetch_index_from_file(url).await;
}
// Validate HTTPS if allow_http is false
if !self.config.allow_http && url.starts_with("http://") {
return Err(Error::Configuration(format!(
"HTTP registry not allowed: {}. Set allow_http: true to enable.",
url
)));
}
// Build HTTP request
let mut request = self.http_client.get(url);
// Add custom headers
for (key, value) in &registry.headers {
request = request.header(key, value);
}
// Send request
let response = request
.send()
.await
.map_err(|e| Error::internal(format!("Failed to fetch registry index: {}", e)))?;
// Check status
if !response.status().is_success() {
return Err(Error::internal(format!(
"Registry returned error status {}: {}",
response.status(),
url
)));
}
// Parse JSON
let index: PackIndex = response
.json()
.await
.map_err(|e| Error::internal(format!("Failed to parse registry index: {}", e)))?;
Ok(index)
}
/// Fetch index from file:// URL
async fn fetch_index_from_file(&self, url: &str) -> Result<PackIndex> {
let path = url.strip_prefix("file://")
.ok_or_else(|| Error::Configuration(format!("Invalid file URL: {}", url)))?;
let path = PathBuf::from(path);
let content = tokio::fs::read_to_string(&path)
.await
.map_err(|e| Error::internal(format!("Failed to read index file: {}", e)))?;
let index: PackIndex = serde_json::from_str(&content)
.map_err(|e| Error::internal(format!("Failed to parse index file: {}", e)))?;
Ok(index)
}
/// Get cached index if available
fn get_cached_index(&self, url: &str) -> Option<CachedIndex> {
let cache = self.cache.read().ok()?;
cache.get(url).cloned()
}
/// Cache an index
fn cache_index(&self, url: &str, index: PackIndex) {
let cached = CachedIndex {
index,
cached_at: SystemTime::now(),
ttl: self.config.cache_ttl,
};
if let Ok(mut cache) = self.cache.write() {
cache.insert(url.to_string(), cached);
}
}
/// Clear the cache
pub fn clear_cache(&self) {
if let Ok(mut cache) = self.cache.write() {
cache.clear();
}
}
/// Search for a pack by reference across all registries
pub async fn search_pack(&self, pack_ref: &str) -> Result<Option<(PackIndexEntry, String)>> {
let registries = self.get_registries();
for registry in registries {
match self.fetch_index(&registry).await {
Ok(index) => {
if let Some(pack) = index.packs.iter().find(|p| p.pack_ref == pack_ref) {
return Ok(Some((pack.clone(), registry.url.clone())));
}
}
Err(e) => {
tracing::warn!(
"Failed to fetch registry {}: {}",
registry.url,
e
);
continue;
}
}
}
Ok(None)
}
/// Search for packs by keyword across all registries
pub async fn search_packs(&self, keyword: &str) -> Result<Vec<(PackIndexEntry, String)>> {
let registries = self.get_registries();
let mut results = Vec::new();
let keyword_lower = keyword.to_lowercase();
for registry in registries {
match self.fetch_index(&registry).await {
Ok(index) => {
for pack in index.packs {
// Search in ref, label, description, and keywords
let matches = pack.pack_ref.to_lowercase().contains(&keyword_lower)
|| pack.label.to_lowercase().contains(&keyword_lower)
|| pack.description.to_lowercase().contains(&keyword_lower)
|| pack.keywords.iter().any(|k| k.to_lowercase().contains(&keyword_lower));
if matches {
results.push((pack, registry.url.clone()));
}
}
}
Err(e) => {
tracing::warn!(
"Failed to fetch registry {}: {}",
registry.url,
e
);
continue;
}
}
}
Ok(results)
}
/// Get pack from specific registry
pub async fn get_pack_from_registry(
&self,
pack_ref: &str,
registry_name: &str,
) -> Result<Option<PackIndexEntry>> {
// Find registry by name
let registry = self.config.indices
.iter()
.find(|r| r.name.as_deref() == Some(registry_name))
.ok_or_else(|| Error::not_found("registry", "name", registry_name))?;
let index = self.fetch_index(registry).await?;
Ok(index.packs.into_iter().find(|p| p.pack_ref == pack_ref))
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::config::RegistryIndexConfig;
#[test]
fn test_cached_index_expiration() {
let index = PackIndex {
registry_name: "Test".to_string(),
registry_url: "https://example.com".to_string(),
version: "1.0".to_string(),
last_updated: "2024-01-20T12:00:00Z".to_string(),
packs: vec![],
};
let cached = CachedIndex {
index,
cached_at: SystemTime::now(),
ttl: 3600,
};
assert!(!cached.is_expired());
// Test with expired cache
let cached_old = CachedIndex {
index: cached.index.clone(),
cached_at: SystemTime::now() - Duration::from_secs(7200),
ttl: 3600,
};
assert!(cached_old.is_expired());
}
#[test]
fn test_get_registries_sorted() {
let config = PackRegistryConfig {
enabled: true,
indices: vec![
RegistryIndexConfig {
url: "https://registry3.example.com".to_string(),
priority: 3,
enabled: true,
name: Some("Registry 3".to_string()),
headers: HashMap::new(),
},
RegistryIndexConfig {
url: "https://registry1.example.com".to_string(),
priority: 1,
enabled: true,
name: Some("Registry 1".to_string()),
headers: HashMap::new(),
},
RegistryIndexConfig {
url: "https://registry2.example.com".to_string(),
priority: 2,
enabled: true,
name: Some("Registry 2".to_string()),
headers: HashMap::new(),
},
RegistryIndexConfig {
url: "https://disabled.example.com".to_string(),
priority: 0,
enabled: false,
name: Some("Disabled".to_string()),
headers: HashMap::new(),
},
],
cache_ttl: 3600,
cache_enabled: true,
timeout: 120,
verify_checksums: true,
allow_http: false,
};
let client = RegistryClient::new(config).unwrap();
let registries = client.get_registries();
assert_eq!(registries.len(), 3); // Disabled one excluded
assert_eq!(registries[0].priority, 1);
assert_eq!(registries[1].priority, 2);
assert_eq!(registries[2].priority, 3);
}
}

View File

@@ -0,0 +1,525 @@
//! Pack Dependency Validation
//!
//! This module provides functionality for validating pack dependencies including:
//! - Runtime dependencies (Python, Node.js, shell versions)
//! - Pack dependencies with version constraints
//! - Semver version parsing and comparison
use crate::error::{Error, Result};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::process::Command;
/// Dependency validation result
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DependencyValidation {
/// Whether all dependencies are satisfied
pub valid: bool,
/// Runtime dependencies validation
pub runtime_deps: Vec<RuntimeDepValidation>,
/// Pack dependencies validation
pub pack_deps: Vec<PackDepValidation>,
/// Warnings (non-blocking issues)
pub warnings: Vec<String>,
/// Errors (blocking issues)
pub errors: Vec<String>,
}
/// Runtime dependency validation result
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RuntimeDepValidation {
/// Runtime name (e.g., "python3", "nodejs")
pub runtime: String,
/// Required version constraint (e.g., ">=3.8", "^14.0.0")
pub required_version: Option<String>,
/// Detected version on system
pub detected_version: Option<String>,
/// Whether requirement is satisfied
pub satisfied: bool,
/// Error message if not satisfied
pub error: Option<String>,
}
/// Pack dependency validation result
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PackDepValidation {
/// Pack reference
pub pack_ref: String,
/// Required version constraint (e.g., "1.0.0", ">=1.2.0", "^2.0.0")
pub required_version: String,
/// Installed version (if pack is installed)
pub installed_version: Option<String>,
/// Whether requirement is satisfied
pub satisfied: bool,
/// Error message if not satisfied
pub error: Option<String>,
}
/// Dependency validator
pub struct DependencyValidator {
/// Cache for runtime version checks
runtime_cache: HashMap<String, Option<String>>,
}
impl DependencyValidator {
/// Create a new dependency validator
pub fn new() -> Self {
Self {
runtime_cache: HashMap::new(),
}
}
/// Validate all dependencies for a pack
pub async fn validate(
&mut self,
runtime_deps: &[String],
pack_deps: &[(String, String)],
installed_packs: &HashMap<String, String>,
) -> Result<DependencyValidation> {
let mut validation = DependencyValidation {
valid: true,
runtime_deps: Vec::new(),
pack_deps: Vec::new(),
warnings: Vec::new(),
errors: Vec::new(),
};
// Validate runtime dependencies
for runtime_dep in runtime_deps {
let result = self.validate_runtime_dep(runtime_dep).await?;
if !result.satisfied {
validation.valid = false;
if let Some(error) = &result.error {
validation.errors.push(error.clone());
}
}
validation.runtime_deps.push(result);
}
// Validate pack dependencies
for (pack_ref, version_constraint) in pack_deps {
let result = self.validate_pack_dep(pack_ref, version_constraint, installed_packs)?;
if !result.satisfied {
validation.valid = false;
if let Some(error) = &result.error {
validation.errors.push(error.clone());
}
}
validation.pack_deps.push(result);
}
Ok(validation)
}
/// Validate a single runtime dependency
async fn validate_runtime_dep(&mut self, runtime_dep: &str) -> Result<RuntimeDepValidation> {
// Parse runtime dependency (e.g., "python3>=3.8", "nodejs^14.0.0")
let (runtime, version_constraint) = parse_runtime_dep(runtime_dep)?;
// Check if we have a cached version
let detected_version = if let Some(cached) = self.runtime_cache.get(&runtime) {
cached.clone()
} else {
// Detect runtime version
let version = detect_runtime_version(&runtime).await;
self.runtime_cache.insert(runtime.clone(), version.clone());
version
};
// Validate version constraint
let satisfied = if let Some(ref constraint) = version_constraint {
if let Some(ref detected) = detected_version {
match_version_constraint(detected, constraint)?
} else {
false
}
} else {
// No version constraint, just check if runtime exists
detected_version.is_some()
};
let error = if !satisfied {
if detected_version.is_none() {
Some(format!("Runtime '{}' not found on system", runtime))
} else if let Some(ref constraint) = version_constraint {
Some(format!(
"Runtime '{}' version {} does not satisfy constraint '{}'",
runtime,
detected_version.as_ref().unwrap(),
constraint
))
} else {
None
}
} else {
None
};
Ok(RuntimeDepValidation {
runtime,
required_version: version_constraint,
detected_version,
satisfied,
error,
})
}
/// Validate a single pack dependency
fn validate_pack_dep(
&self,
pack_ref: &str,
version_constraint: &str,
installed_packs: &HashMap<String, String>,
) -> Result<PackDepValidation> {
let installed_version = installed_packs.get(pack_ref).cloned();
let satisfied = if let Some(ref installed) = installed_version {
match_version_constraint(installed, version_constraint)?
} else {
false
};
let error = if !satisfied {
if installed_version.is_none() {
Some(format!("Required pack '{}' is not installed", pack_ref))
} else {
Some(format!(
"Pack '{}' version {} does not satisfy constraint '{}'",
pack_ref,
installed_version.as_ref().unwrap(),
version_constraint
))
}
} else {
None
};
Ok(PackDepValidation {
pack_ref: pack_ref.to_string(),
required_version: version_constraint.to_string(),
installed_version,
satisfied,
error,
})
}
}
impl Default for DependencyValidator {
fn default() -> Self {
Self::new()
}
}
/// Parse runtime dependency string (e.g., "python3>=3.8" -> ("python3", Some(">=3.8")))
fn parse_runtime_dep(runtime_dep: &str) -> Result<(String, Option<String>)> {
// Find operator position
let operators = [">=", "<=", "^", "~", ">", "<", "="];
for op in &operators {
if let Some(pos) = runtime_dep.find(op) {
let runtime = runtime_dep[..pos].trim().to_string();
let version = runtime_dep[pos..].trim().to_string();
return Ok((runtime, Some(version)));
}
}
// No version constraint
Ok((runtime_dep.trim().to_string(), None))
}
/// Detect runtime version on the system
async fn detect_runtime_version(runtime: &str) -> Option<String> {
match runtime {
"python3" | "python" => detect_python_version().await,
"nodejs" | "node" => detect_nodejs_version().await,
"shell" | "bash" | "sh" => detect_shell_version().await,
_ => None,
}
}
/// Detect Python version
async fn detect_python_version() -> Option<String> {
// Try python3 first
if let Ok(output) = Command::new("python3").arg("--version").output() {
if output.status.success() {
let version_str = String::from_utf8_lossy(&output.stdout);
return parse_python_version(&version_str);
}
}
// Fallback to python
if let Ok(output) = Command::new("python").arg("--version").output() {
if output.status.success() {
let version_str = String::from_utf8_lossy(&output.stdout);
return parse_python_version(&version_str);
}
}
None
}
/// Parse Python version from output (e.g., "Python 3.9.7" -> "3.9.7")
fn parse_python_version(output: &str) -> Option<String> {
let parts: Vec<&str> = output.split_whitespace().collect();
if parts.len() >= 2 {
Some(parts[1].trim().to_string())
} else {
None
}
}
/// Detect Node.js version
async fn detect_nodejs_version() -> Option<String> {
// Try node first
if let Ok(output) = Command::new("node").arg("--version").output() {
if output.status.success() {
let version_str = String::from_utf8_lossy(&output.stdout);
return Some(version_str.trim().trim_start_matches('v').to_string());
}
}
// Try nodejs
if let Ok(output) = Command::new("nodejs").arg("--version").output() {
if output.status.success() {
let version_str = String::from_utf8_lossy(&output.stdout);
return Some(version_str.trim().trim_start_matches('v').to_string());
}
}
None
}
/// Detect shell version
async fn detect_shell_version() -> Option<String> {
// Bash version
if let Ok(output) = Command::new("bash").arg("--version").output() {
if output.status.success() {
let version_str = String::from_utf8_lossy(&output.stdout);
if let Some(line) = version_str.lines().next() {
// Parse "GNU bash, version 5.1.16(1)-release"
if let Some(start) = line.find("version ") {
let version_part = &line[start + 8..];
if let Some(end) = version_part.find(|c: char| !c.is_numeric() && c != '.') {
return Some(version_part[..end].to_string());
}
}
}
}
}
// Default to "1.0.0" if shell exists
if Command::new("sh").arg("--version").output().is_ok() {
return Some("1.0.0".to_string());
}
None
}
/// Match version against constraint
fn match_version_constraint(version: &str, constraint: &str) -> Result<bool> {
// Handle wildcard constraint
if constraint == "*" {
return Ok(true);
}
// Parse constraint
if constraint.starts_with(">=") {
let required = constraint[2..].trim();
Ok(compare_versions(version, required)? >= 0)
} else if constraint.starts_with("<=") {
let required = constraint[2..].trim();
Ok(compare_versions(version, required)? <= 0)
} else if constraint.starts_with('>') {
let required = constraint[1..].trim();
Ok(compare_versions(version, required)? > 0)
} else if constraint.starts_with('<') {
let required = constraint[1..].trim();
Ok(compare_versions(version, required)? < 0)
} else if constraint.starts_with('=') {
let required = constraint[1..].trim();
Ok(compare_versions(version, required)? == 0)
} else if constraint.starts_with('^') {
// Caret: Compatible with version (major.minor.patch)
// ^1.2.3 := >=1.2.3 <2.0.0
let required = constraint[1..].trim();
match_caret_constraint(version, required)
} else if constraint.starts_with('~') {
// Tilde: Approximately equivalent to version
// ~1.2.3 := >=1.2.3 <1.3.0
let required = constraint[1..].trim();
match_tilde_constraint(version, required)
} else {
// Exact match
Ok(compare_versions(version, constraint)? == 0)
}
}
/// Compare two semver versions (-1: v1 < v2, 0: v1 == v2, 1: v1 > v2)
fn compare_versions(v1: &str, v2: &str) -> Result<i32> {
let parts1 = parse_version(v1)?;
let parts2 = parse_version(v2)?;
for i in 0..3 {
if parts1[i] < parts2[i] {
return Ok(-1);
} else if parts1[i] > parts2[i] {
return Ok(1);
}
}
Ok(0)
}
/// Parse version string to [major, minor, patch]
fn parse_version(version: &str) -> Result<[u32; 3]> {
let parts: Vec<&str> = version.split('.').collect();
if parts.is_empty() {
return Err(Error::validation(format!("Invalid version: {}", version)));
}
let mut result = [0u32; 3];
for (i, part) in parts.iter().enumerate().take(3) {
result[i] = part
.parse()
.map_err(|_| Error::validation(format!("Invalid version number: {}", part)))?;
}
Ok(result)
}
/// Match caret constraint (^1.2.3 := >=1.2.3 <2.0.0)
fn match_caret_constraint(version: &str, required: &str) -> Result<bool> {
let v_parts = parse_version(version)?;
let r_parts = parse_version(required)?;
// Must be >= required version
if compare_versions(version, required)? < 0 {
return Ok(false);
}
// Must have same major version (if major > 0)
if r_parts[0] > 0 {
Ok(v_parts[0] == r_parts[0])
} else if r_parts[1] > 0 {
// If major is 0, must have same minor version
Ok(v_parts[0] == 0 && v_parts[1] == r_parts[1])
} else {
// If major and minor are 0, must have same patch version
Ok(v_parts[0] == 0 && v_parts[1] == 0 && v_parts[2] == r_parts[2])
}
}
/// Match tilde constraint (~1.2.3 := >=1.2.3 <1.3.0)
fn match_tilde_constraint(version: &str, required: &str) -> Result<bool> {
let v_parts = parse_version(version)?;
let r_parts = parse_version(required)?;
// Must be >= required version
if compare_versions(version, required)? < 0 {
return Ok(false);
}
// Must have same major and minor version
Ok(v_parts[0] == r_parts[0] && v_parts[1] == r_parts[1])
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_runtime_dep() {
let (runtime, version) = parse_runtime_dep("python3>=3.8").unwrap();
assert_eq!(runtime, "python3");
assert_eq!(version, Some(">=3.8".to_string()));
let (runtime, version) = parse_runtime_dep("nodejs").unwrap();
assert_eq!(runtime, "nodejs");
assert_eq!(version, None);
let (runtime, version) = parse_runtime_dep("python3 >= 3.8").unwrap();
assert_eq!(runtime, "python3");
assert_eq!(version, Some(">= 3.8".to_string()));
}
#[test]
fn test_parse_version() {
assert_eq!(parse_version("1.2.3").unwrap(), [1, 2, 3]);
assert_eq!(parse_version("1.0.0").unwrap(), [1, 0, 0]);
assert_eq!(parse_version("0.1").unwrap(), [0, 1, 0]);
assert_eq!(parse_version("2").unwrap(), [2, 0, 0]);
}
#[test]
fn test_compare_versions() {
assert_eq!(compare_versions("1.2.3", "1.2.3").unwrap(), 0);
assert_eq!(compare_versions("1.2.3", "1.2.4").unwrap(), -1);
assert_eq!(compare_versions("1.3.0", "1.2.9").unwrap(), 1);
assert_eq!(compare_versions("2.0.0", "1.9.9").unwrap(), 1);
}
#[test]
fn test_match_version_constraint() {
assert!(match_version_constraint("1.2.3", ">=1.2.0").unwrap());
assert!(match_version_constraint("1.2.3", "<=1.3.0").unwrap());
assert!(match_version_constraint("1.2.3", ">1.2.2").unwrap());
assert!(match_version_constraint("1.2.3", "<1.2.4").unwrap());
assert!(match_version_constraint("1.2.3", "=1.2.3").unwrap());
assert!(match_version_constraint("1.2.3", "1.2.3").unwrap());
assert!(!match_version_constraint("1.2.3", ">=1.2.4").unwrap());
assert!(!match_version_constraint("1.2.3", "<1.2.3").unwrap());
}
#[test]
fn test_match_caret_constraint() {
// ^1.2.3 := >=1.2.3 <2.0.0
assert!(match_caret_constraint("1.2.3", "1.2.3").unwrap());
assert!(match_caret_constraint("1.2.4", "1.2.3").unwrap());
assert!(match_caret_constraint("1.9.9", "1.2.3").unwrap());
assert!(!match_caret_constraint("2.0.0", "1.2.3").unwrap());
assert!(!match_caret_constraint("1.2.2", "1.2.3").unwrap());
// ^0.2.3 := >=0.2.3 <0.3.0
assert!(match_caret_constraint("0.2.3", "0.2.3").unwrap());
assert!(match_caret_constraint("0.2.9", "0.2.3").unwrap());
assert!(!match_caret_constraint("0.3.0", "0.2.3").unwrap());
// ^0.0.3 := =0.0.3
assert!(match_caret_constraint("0.0.3", "0.0.3").unwrap());
assert!(!match_caret_constraint("0.0.4", "0.0.3").unwrap());
}
#[test]
fn test_match_tilde_constraint() {
// ~1.2.3 := >=1.2.3 <1.3.0
assert!(match_tilde_constraint("1.2.3", "1.2.3").unwrap());
assert!(match_tilde_constraint("1.2.9", "1.2.3").unwrap());
assert!(!match_tilde_constraint("1.3.0", "1.2.3").unwrap());
assert!(!match_tilde_constraint("1.2.2", "1.2.3").unwrap());
}
#[test]
fn test_parse_python_version() {
assert_eq!(
parse_python_version("Python 3.9.7"),
Some("3.9.7".to_string())
);
assert_eq!(
parse_python_version("Python 2.7.18"),
Some("2.7.18".to_string())
);
}
}

View File

@@ -0,0 +1,722 @@
//! Pack installer module for downloading and extracting packs from various sources
//!
//! This module provides functionality for:
//! - Cloning git repositories
//! - Downloading and extracting archives (zip, tar.gz)
//! - Copying local directories
//! - Verifying checksums
//! - Resolving registry references to install sources
//! - Progress reporting during installation
use super::{Checksum, InstallSource, PackIndexEntry, RegistryClient};
use crate::config::PackRegistryConfig;
use crate::error::{Error, Result};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use tokio::fs;
use tokio::process::Command;
/// Progress callback type
pub type ProgressCallback = Arc<dyn Fn(ProgressEvent) + Send + Sync>;
/// Progress event during pack installation
#[derive(Debug, Clone)]
pub enum ProgressEvent {
/// Started a new step
StepStarted {
step: String,
message: String,
},
/// Step completed
StepCompleted {
step: String,
message: String,
},
/// Download progress
Downloading {
url: String,
downloaded_bytes: u64,
total_bytes: Option<u64>,
},
/// Extraction progress
Extracting {
file: String,
},
/// Verification progress
Verifying {
message: String,
},
/// Warning message
Warning {
message: String,
},
/// Info message
Info {
message: String,
},
}
/// Pack installer for handling various installation sources
pub struct PackInstaller {
/// Temporary directory for downloads
temp_dir: PathBuf,
/// Registry client for resolving pack references
registry_client: Option<RegistryClient>,
/// Whether to verify checksums
verify_checksums: bool,
/// Progress callback (optional)
progress_callback: Option<ProgressCallback>,
}
/// Information about an installed pack
#[derive(Debug, Clone)]
pub struct InstalledPack {
/// Path to the pack directory
pub path: PathBuf,
/// Installation source
pub source: PackSource,
/// Checksum (if available and verified)
pub checksum: Option<String>,
}
/// Pack installation source type
#[derive(Debug, Clone)]
pub enum PackSource {
/// Git repository
Git {
url: String,
git_ref: Option<String>,
},
/// Archive URL (zip, tar.gz, tgz)
Archive { url: String },
/// Local directory
LocalDirectory { path: PathBuf },
/// Local archive file
LocalArchive { path: PathBuf },
/// Registry reference
Registry {
pack_ref: String,
version: Option<String>,
},
}
impl PackInstaller {
/// Create a new pack installer
pub async fn new(
temp_base_dir: impl AsRef<Path>,
registry_config: Option<PackRegistryConfig>,
) -> Result<Self> {
let temp_dir = temp_base_dir.as_ref().join("pack-installs");
fs::create_dir_all(&temp_dir)
.await
.map_err(|e| Error::internal(format!("Failed to create temp directory: {}", e)))?;
let (registry_client, verify_checksums) = if let Some(config) = registry_config {
let verify_checksums = config.verify_checksums;
(Some(RegistryClient::new(config)?), verify_checksums)
} else {
(None, false)
};
Ok(Self {
temp_dir,
registry_client,
verify_checksums,
progress_callback: None,
})
}
/// Set progress callback
pub fn with_progress_callback(mut self, callback: ProgressCallback) -> Self {
self.progress_callback = Some(callback);
self
}
/// Report progress event
fn report_progress(&self, event: ProgressEvent) {
if let Some(ref callback) = self.progress_callback {
callback(event);
}
}
/// Install a pack from the given source
pub async fn install(&self, source: PackSource) -> Result<InstalledPack> {
match source {
PackSource::Git { url, git_ref } => self.install_from_git(&url, git_ref.as_deref()).await,
PackSource::Archive { url } => self.install_from_archive_url(&url, None).await,
PackSource::LocalDirectory { path } => self.install_from_local_directory(&path).await,
PackSource::LocalArchive { path } => self.install_from_local_archive(&path).await,
PackSource::Registry { pack_ref, version } => {
self.install_from_registry(&pack_ref, version.as_deref()).await
}
}
}
/// Install from git repository
async fn install_from_git(&self, url: &str, git_ref: Option<&str>) -> Result<InstalledPack> {
tracing::info!("Installing pack from git: {} (ref: {:?})", url, git_ref);
self.report_progress(ProgressEvent::StepStarted {
step: "clone".to_string(),
message: format!("Cloning git repository: {}", url),
});
// Create unique temp directory for this installation
let install_dir = self.create_temp_dir().await?;
// Clone the repository
let mut clone_cmd = Command::new("git");
clone_cmd.arg("clone");
// Add depth=1 for faster cloning if no specific ref
if git_ref.is_none() {
clone_cmd.arg("--depth").arg("1");
}
clone_cmd.arg(&url).arg(&install_dir);
let output = clone_cmd
.output()
.await
.map_err(|e| Error::internal(format!("Failed to execute git clone: {}", e)))?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(Error::internal(format!("Git clone failed: {}", stderr)));
}
// Checkout specific ref if provided
if let Some(ref_spec) = git_ref {
let checkout_output = Command::new("git")
.arg("-C")
.arg(&install_dir)
.arg("checkout")
.arg(ref_spec)
.output()
.await
.map_err(|e| Error::internal(format!("Failed to execute git checkout: {}", e)))?;
if !checkout_output.status.success() {
let stderr = String::from_utf8_lossy(&checkout_output.stderr);
return Err(Error::internal(format!("Git checkout failed: {}", stderr)));
}
}
// Find pack.yaml (could be at root or in pack/ subdirectory)
let pack_dir = self.find_pack_directory(&install_dir).await?;
Ok(InstalledPack {
path: pack_dir,
source: PackSource::Git {
url: url.to_string(),
git_ref: git_ref.map(String::from),
},
checksum: None,
})
}
/// Install from archive URL
async fn install_from_archive_url(
&self,
url: &str,
expected_checksum: Option<&str>,
) -> Result<InstalledPack> {
tracing::info!("Installing pack from archive: {}", url);
// Download the archive
let archive_path = self.download_archive(url).await?;
// Verify checksum if provided
if let Some(checksum_str) = expected_checksum {
if self.verify_checksums {
self.verify_archive_checksum(&archive_path, checksum_str)
.await?;
}
}
// Extract the archive
let extract_dir = self.extract_archive(&archive_path).await?;
// Find pack.yaml
let pack_dir = self.find_pack_directory(&extract_dir).await?;
// Clean up archive file
let _ = fs::remove_file(&archive_path).await;
Ok(InstalledPack {
path: pack_dir,
source: PackSource::Archive {
url: url.to_string(),
},
checksum: expected_checksum.map(String::from),
})
}
/// Install from local directory
async fn install_from_local_directory(&self, source_path: &Path) -> Result<InstalledPack> {
tracing::info!("Installing pack from local directory: {:?}", source_path);
// Verify source exists and is a directory
if !source_path.exists() {
return Err(Error::not_found("directory", "path", source_path.display().to_string()));
}
if !source_path.is_dir() {
return Err(Error::validation(format!(
"Path is not a directory: {}",
source_path.display()
)));
}
// Create temp directory
let install_dir = self.create_temp_dir().await?;
// Copy directory contents
self.copy_directory(source_path, &install_dir).await?;
// Find pack.yaml
let pack_dir = self.find_pack_directory(&install_dir).await?;
Ok(InstalledPack {
path: pack_dir,
source: PackSource::LocalDirectory {
path: source_path.to_path_buf(),
},
checksum: None,
})
}
/// Install from local archive file
async fn install_from_local_archive(&self, archive_path: &Path) -> Result<InstalledPack> {
tracing::info!("Installing pack from local archive: {:?}", archive_path);
// Verify file exists
if !archive_path.exists() {
return Err(Error::not_found("file", "path", archive_path.display().to_string()));
}
if !archive_path.is_file() {
return Err(Error::validation(format!(
"Path is not a file: {}",
archive_path.display()
)));
}
// Extract the archive
let extract_dir = self.extract_archive(archive_path).await?;
// Find pack.yaml
let pack_dir = self.find_pack_directory(&extract_dir).await?;
Ok(InstalledPack {
path: pack_dir,
source: PackSource::LocalArchive {
path: archive_path.to_path_buf(),
},
checksum: None,
})
}
/// Install from registry reference
async fn install_from_registry(
&self,
pack_ref: &str,
version: Option<&str>,
) -> Result<InstalledPack> {
tracing::info!(
"Installing pack from registry: {} (version: {:?})",
pack_ref,
version
);
let registry_client = self
.registry_client
.as_ref()
.ok_or_else(|| Error::configuration("Registry client not configured"))?;
// Search for the pack
let (pack_entry, _registry_url) = registry_client
.search_pack(pack_ref)
.await?
.ok_or_else(|| Error::not_found("pack", "ref", pack_ref))?;
// Validate version if specified
if let Some(requested_version) = version {
if requested_version != "latest" && pack_entry.version != requested_version {
return Err(Error::validation(format!(
"Pack {} version {} not found (available: {})",
pack_ref, requested_version, pack_entry.version
)));
}
}
// Get the preferred install source (try git first, then archive)
let install_source = self.select_install_source(&pack_entry)?;
// Install from the selected source
match install_source {
InstallSource::Git {
url,
git_ref,
checksum,
} => {
let mut installed = self
.install_from_git(&url, git_ref.as_deref())
.await?;
installed.checksum = Some(checksum);
Ok(installed)
}
InstallSource::Archive { url, checksum } => {
self.install_from_archive_url(&url, Some(&checksum)).await
}
}
}
/// Select the best install source from a pack entry
fn select_install_source(&self, pack_entry: &PackIndexEntry) -> Result<InstallSource> {
if pack_entry.install_sources.is_empty() {
return Err(Error::validation(format!(
"Pack {} has no install sources",
pack_entry.pack_ref
)));
}
// Prefer git sources for development
for source in &pack_entry.install_sources {
if matches!(source, InstallSource::Git { .. }) {
return Ok(source.clone());
}
}
// Fall back to first archive source
for source in &pack_entry.install_sources {
if matches!(source, InstallSource::Archive { .. }) {
return Ok(source.clone());
}
}
// Return first source if no preference matched
Ok(pack_entry.install_sources[0].clone())
}
/// Download an archive from a URL
async fn download_archive(&self, url: &str) -> Result<PathBuf> {
let client = reqwest::Client::new();
let response = client
.get(url)
.send()
.await
.map_err(|e| Error::internal(format!("Failed to download archive: {}", e)))?;
if !response.status().is_success() {
return Err(Error::internal(format!(
"Failed to download archive: HTTP {}",
response.status()
)));
}
// Determine filename from URL
let filename = url
.split('/')
.last()
.unwrap_or("archive.zip")
.to_string();
let archive_path = self.temp_dir.join(&filename);
// Download to file
let bytes = response
.bytes()
.await
.map_err(|e| Error::internal(format!("Failed to read archive bytes: {}", e)))?;
fs::write(&archive_path, &bytes)
.await
.map_err(|e| Error::internal(format!("Failed to write archive: {}", e)))?;
Ok(archive_path)
}
/// Extract an archive (zip or tar.gz)
async fn extract_archive(&self, archive_path: &Path) -> Result<PathBuf> {
let extract_dir = self.create_temp_dir().await?;
let extension = archive_path
.extension()
.and_then(|e| e.to_str())
.unwrap_or("");
match extension {
"zip" => self.extract_zip(archive_path, &extract_dir).await?,
"gz" | "tgz" => self.extract_tar_gz(archive_path, &extract_dir).await?,
_ => {
return Err(Error::validation(format!(
"Unsupported archive format: {}",
extension
)));
}
}
Ok(extract_dir)
}
/// Extract a zip archive
async fn extract_zip(&self, archive_path: &Path, extract_dir: &Path) -> Result<()> {
let output = Command::new("unzip")
.arg("-q") // Quiet
.arg(archive_path)
.arg("-d")
.arg(extract_dir)
.output()
.await
.map_err(|e| Error::internal(format!("Failed to execute unzip: {}", e)))?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(Error::internal(format!("Failed to extract zip: {}", stderr)));
}
Ok(())
}
/// Extract a tar.gz archive
async fn extract_tar_gz(&self, archive_path: &Path, extract_dir: &Path) -> Result<()> {
let output = Command::new("tar")
.arg("xzf")
.arg(archive_path)
.arg("-C")
.arg(extract_dir)
.output()
.await
.map_err(|e| Error::internal(format!("Failed to execute tar: {}", e)))?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(Error::internal(format!("Failed to extract tar.gz: {}", stderr)));
}
Ok(())
}
/// Verify archive checksum
async fn verify_archive_checksum(
&self,
archive_path: &Path,
checksum_str: &str,
) -> Result<()> {
let checksum = Checksum::parse(checksum_str)
.map_err(|e| Error::validation(format!("Invalid checksum: {}", e)))?;
let computed = self.compute_checksum(archive_path, &checksum.algorithm).await?;
if computed != checksum.hash {
return Err(Error::validation(format!(
"Checksum mismatch: expected {}, got {}",
checksum.hash, computed
)));
}
tracing::info!("Checksum verified: {}", checksum_str);
Ok(())
}
/// Compute checksum of a file
async fn compute_checksum(&self, path: &Path, algorithm: &str) -> Result<String> {
let command = match algorithm {
"sha256" => "sha256sum",
"sha512" => "sha512sum",
"sha1" => "sha1sum",
"md5" => "md5sum",
_ => {
return Err(Error::validation(format!(
"Unsupported hash algorithm: {}",
algorithm
)));
}
};
let output = Command::new(command)
.arg(path)
.output()
.await
.map_err(|e| Error::internal(format!("Failed to compute checksum: {}", e)))?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(Error::internal(format!("Checksum computation failed: {}", stderr)));
}
let stdout = String::from_utf8_lossy(&output.stdout);
let hash = stdout
.split_whitespace()
.next()
.ok_or_else(|| Error::internal("Failed to parse checksum output"))?;
Ok(hash.to_lowercase())
}
/// Find pack directory (pack.yaml location)
async fn find_pack_directory(&self, base_dir: &Path) -> Result<PathBuf> {
// Check if pack.yaml exists at root
let root_pack_yaml = base_dir.join("pack.yaml");
if root_pack_yaml.exists() {
return Ok(base_dir.to_path_buf());
}
// Check in pack/ subdirectory
let pack_subdir = base_dir.join("pack");
let pack_subdir_yaml = pack_subdir.join("pack.yaml");
if pack_subdir_yaml.exists() {
return Ok(pack_subdir);
}
// Check in first subdirectory (common for GitHub archives)
let mut entries = fs::read_dir(base_dir)
.await
.map_err(|e| Error::internal(format!("Failed to read directory: {}", e)))?;
while let Some(entry) = entries
.next_entry()
.await
.map_err(|e| Error::internal(format!("Failed to read directory entry: {}", e)))?
{
let path = entry.path();
if path.is_dir() {
let subdir_pack_yaml = path.join("pack.yaml");
if subdir_pack_yaml.exists() {
return Ok(path);
}
}
}
Err(Error::validation(format!(
"pack.yaml not found in {}",
base_dir.display()
)))
}
/// Copy directory recursively
#[async_recursion::async_recursion]
async fn copy_directory(&self, src: &Path, dst: &Path) -> Result<()> {
use tokio::fs;
// Create destination directory if it doesn't exist
fs::create_dir_all(dst)
.await
.map_err(|e| Error::internal(format!("Failed to create destination directory: {}", e)))?;
// Read source directory
let mut entries = fs::read_dir(src)
.await
.map_err(|e| Error::internal(format!("Failed to read source directory: {}", e)))?;
// Copy each entry
while let Some(entry) = entries
.next_entry()
.await
.map_err(|e| Error::internal(format!("Failed to read directory entry: {}", e)))?
{
let path = entry.path();
let file_name = entry.file_name();
let dest_path = dst.join(&file_name);
let metadata = entry
.metadata()
.await
.map_err(|e| Error::internal(format!("Failed to read entry metadata: {}", e)))?;
if metadata.is_dir() {
// Recursively copy subdirectory
self.copy_directory(&path, &dest_path).await?;
} else {
// Copy file
fs::copy(&path, &dest_path)
.await
.map_err(|e| Error::internal(format!("Failed to copy file: {}", e)))?;
}
}
Ok(())
}
/// Create a unique temporary directory
async fn create_temp_dir(&self) -> Result<PathBuf> {
let uuid = uuid::Uuid::new_v4();
let dir = self.temp_dir.join(uuid.to_string());
fs::create_dir_all(&dir)
.await
.map_err(|e| Error::internal(format!("Failed to create temp directory: {}", e)))?;
Ok(dir)
}
/// Clean up temporary directory
pub async fn cleanup(&self, pack_path: &Path) -> Result<()> {
if pack_path.starts_with(&self.temp_dir) {
fs::remove_dir_all(pack_path)
.await
.map_err(|e| Error::internal(format!("Failed to cleanup temp directory: {}", e)))?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_checksum_parsing() {
let checksum = Checksum::parse("sha256:abc123def456").unwrap();
assert_eq!(checksum.algorithm, "sha256");
assert_eq!(checksum.hash, "abc123def456");
}
#[tokio::test]
async fn test_select_install_source_prefers_git() {
let entry = PackIndexEntry {
pack_ref: "test".to_string(),
label: "Test".to_string(),
description: "Test pack".to_string(),
version: "1.0.0".to_string(),
author: "Test".to_string(),
email: None,
homepage: None,
repository: None,
license: "MIT".to_string(),
keywords: vec![],
runtime_deps: vec![],
install_sources: vec![
InstallSource::Archive {
url: "https://example.com/archive.zip".to_string(),
checksum: "sha256:abc123".to_string(),
},
InstallSource::Git {
url: "https://github.com/example/pack".to_string(),
git_ref: Some("v1.0.0".to_string()),
checksum: "sha256:def456".to_string(),
},
],
contents: Default::default(),
dependencies: None,
meta: None,
};
let temp_dir = std::env::temp_dir().join("attune-test");
let installer = PackInstaller::new(&temp_dir, None).await.unwrap();
let source = installer.select_install_source(&entry).unwrap();
assert!(matches!(source, InstallSource::Git { .. }));
}
}

View File

@@ -0,0 +1,389 @@
//! Pack registry module for managing pack indices and installation sources
//!
//! This module provides data structures and functionality for:
//! - Pack registry index files (JSON format)
//! - Pack installation sources (git, archive, local)
//! - Registry client for fetching and parsing indices
//! - Pack search and discovery
pub mod client;
pub mod dependency;
pub mod installer;
pub mod storage;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
// Re-export client, installer, storage, and dependency utilities
pub use client::RegistryClient;
pub use dependency::{
DependencyValidation, DependencyValidator, PackDepValidation, RuntimeDepValidation,
};
pub use installer::{InstalledPack, PackInstaller, PackSource};
pub use storage::{
calculate_directory_checksum, calculate_file_checksum, verify_checksum, PackStorage,
};
/// Pack registry index file
///
/// This is the top-level structure of a pack registry index file (typically index.json).
/// It contains metadata about the registry and a list of available packs.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PackIndex {
/// Human-readable registry name
pub registry_name: String,
/// Registry homepage URL
pub registry_url: String,
/// Index format version (semantic versioning)
pub version: String,
/// ISO 8601 timestamp of last update
pub last_updated: String,
/// List of available packs
pub packs: Vec<PackIndexEntry>,
}
/// Pack entry in a registry index
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PackIndexEntry {
/// Unique pack identifier (matches pack.yaml ref)
#[serde(rename = "ref")]
pub pack_ref: String,
/// Human-readable pack name
pub label: String,
/// Brief pack description
pub description: String,
/// Semantic version (latest available)
pub version: String,
/// Pack author/maintainer name
pub author: String,
/// Contact email
#[serde(skip_serializing_if = "Option::is_none")]
pub email: Option<String>,
/// Pack homepage URL
#[serde(skip_serializing_if = "Option::is_none")]
pub homepage: Option<String>,
/// Source repository URL
#[serde(skip_serializing_if = "Option::is_none")]
pub repository: Option<String>,
/// SPDX license identifier
pub license: String,
/// Searchable keywords/tags
#[serde(default)]
pub keywords: Vec<String>,
/// Required runtimes (python3, nodejs, shell)
pub runtime_deps: Vec<String>,
/// Available installation sources
pub install_sources: Vec<InstallSource>,
/// Pack components summary
pub contents: PackContents,
/// Pack dependencies
#[serde(skip_serializing_if = "Option::is_none")]
pub dependencies: Option<PackDependencies>,
/// Additional metadata
#[serde(skip_serializing_if = "Option::is_none")]
pub meta: Option<PackMeta>,
}
/// Installation source for a pack
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "lowercase")]
pub enum InstallSource {
/// Git repository source
Git {
/// Git repository URL
url: String,
/// Git ref (tag, branch, commit)
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "ref")]
git_ref: Option<String>,
/// Checksum in format "algorithm:hash"
checksum: String,
},
/// Archive (zip, tar.gz) source
Archive {
/// Archive URL
url: String,
/// Checksum in format "algorithm:hash"
checksum: String,
},
}
impl InstallSource {
/// Get the URL for this install source
pub fn url(&self) -> &str {
match self {
InstallSource::Git { url, .. } => url,
InstallSource::Archive { url, .. } => url,
}
}
/// Get the checksum for this install source
pub fn checksum(&self) -> &str {
match self {
InstallSource::Git { checksum, .. } => checksum,
InstallSource::Archive { checksum, .. } => checksum,
}
}
/// Get the source type as a string
pub fn source_type(&self) -> &'static str {
match self {
InstallSource::Git { .. } => "git",
InstallSource::Archive { .. } => "archive",
}
}
}
/// Pack contents summary
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PackContents {
/// List of actions
#[serde(default)]
pub actions: Vec<ComponentSummary>,
/// List of sensors
#[serde(default)]
pub sensors: Vec<ComponentSummary>,
/// List of triggers
#[serde(default)]
pub triggers: Vec<ComponentSummary>,
/// List of bundled rules
#[serde(default)]
pub rules: Vec<ComponentSummary>,
/// List of bundled workflows
#[serde(default)]
pub workflows: Vec<ComponentSummary>,
}
/// Component summary (action, sensor, trigger, etc.)
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ComponentSummary {
/// Component name
pub name: String,
/// Brief description
pub description: String,
}
/// Pack dependencies
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PackDependencies {
/// Attune version requirement (semver)
#[serde(skip_serializing_if = "Option::is_none")]
pub attune_version: Option<String>,
/// Python version requirement
#[serde(skip_serializing_if = "Option::is_none")]
pub python_version: Option<String>,
/// Node.js version requirement
#[serde(skip_serializing_if = "Option::is_none")]
pub nodejs_version: Option<String>,
/// Pack dependencies (format: "ref@version")
#[serde(default)]
pub packs: Vec<String>,
}
/// Additional pack metadata
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct PackMeta {
/// Download count
#[serde(skip_serializing_if = "Option::is_none")]
pub downloads: Option<u64>,
/// Star/rating count
#[serde(skip_serializing_if = "Option::is_none")]
pub stars: Option<u64>,
/// Tested Attune versions
#[serde(default)]
pub tested_attune_versions: Vec<String>,
/// Additional custom fields
#[serde(flatten)]
pub extra: HashMap<String, serde_json::Value>,
}
/// Checksum with algorithm
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Checksum {
/// Hash algorithm (sha256, sha512, etc.)
pub algorithm: String,
/// Hash value (hex string)
pub hash: String,
}
impl Checksum {
/// Parse a checksum string in format "algorithm:hash"
pub fn parse(s: &str) -> Result<Self, String> {
let parts: Vec<&str> = s.splitn(2, ':').collect();
if parts.len() != 2 {
return Err(format!("Invalid checksum format: {}. Expected 'algorithm:hash'", s));
}
let algorithm = parts[0].to_lowercase();
let hash = parts[1].to_lowercase();
// Validate algorithm
match algorithm.as_str() {
"sha256" | "sha512" | "sha1" | "md5" => {}
_ => return Err(format!("Unsupported hash algorithm: {}", algorithm)),
}
// Basic validation of hash format (hex string)
if !hash.chars().all(|c| c.is_ascii_hexdigit()) {
return Err(format!("Invalid hash format: {}. Must be hexadecimal", hash));
}
Ok(Self { algorithm, hash })
}
/// Format as "algorithm:hash"
pub fn to_string(&self) -> String {
format!("{}:{}", self.algorithm, self.hash)
}
}
impl std::fmt::Display for Checksum {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}:{}", self.algorithm, self.hash)
}
}
impl std::str::FromStr for Checksum {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Self::parse(s)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_checksum_parse() {
let checksum = Checksum::parse("sha256:abc123def456").unwrap();
assert_eq!(checksum.algorithm, "sha256");
assert_eq!(checksum.hash, "abc123def456");
let checksum = Checksum::parse("SHA256:ABC123DEF456").unwrap();
assert_eq!(checksum.algorithm, "sha256");
assert_eq!(checksum.hash, "abc123def456");
}
#[test]
fn test_checksum_parse_invalid() {
assert!(Checksum::parse("invalid").is_err());
assert!(Checksum::parse("sha256").is_err());
assert!(Checksum::parse("sha256:xyz").is_err()); // non-hex
assert!(Checksum::parse("unknown:abc123").is_err()); // unknown algorithm
}
#[test]
fn test_checksum_to_string() {
let checksum = Checksum {
algorithm: "sha256".to_string(),
hash: "abc123".to_string(),
};
assert_eq!(checksum.to_string(), "sha256:abc123");
}
#[test]
fn test_install_source_getters() {
let git_source = InstallSource::Git {
url: "https://github.com/example/pack".to_string(),
git_ref: Some("v1.0.0".to_string()),
checksum: "sha256:abc123".to_string(),
};
assert_eq!(git_source.url(), "https://github.com/example/pack");
assert_eq!(git_source.checksum(), "sha256:abc123");
assert_eq!(git_source.source_type(), "git");
let archive_source = InstallSource::Archive {
url: "https://example.com/pack.zip".to_string(),
checksum: "sha256:def456".to_string(),
};
assert_eq!(archive_source.url(), "https://example.com/pack.zip");
assert_eq!(archive_source.checksum(), "sha256:def456");
assert_eq!(archive_source.source_type(), "archive");
}
#[test]
fn test_pack_index_deserialization() {
let json = r#"{
"registry_name": "Test Registry",
"registry_url": "https://registry.example.com",
"version": "1.0",
"last_updated": "2024-01-20T12:00:00Z",
"packs": [
{
"ref": "test-pack",
"label": "Test Pack",
"description": "A test pack",
"version": "1.0.0",
"author": "Test Author",
"license": "Apache-2.0",
"keywords": ["test"],
"runtime_deps": ["python3"],
"install_sources": [
{
"type": "git",
"url": "https://github.com/example/pack",
"ref": "v1.0.0",
"checksum": "sha256:abc123"
}
],
"contents": {
"actions": [
{
"name": "test_action",
"description": "Test action"
}
],
"sensors": [],
"triggers": [],
"rules": [],
"workflows": []
}
}
]
}"#;
let index: PackIndex = serde_json::from_str(json).unwrap();
assert_eq!(index.registry_name, "Test Registry");
assert_eq!(index.packs.len(), 1);
assert_eq!(index.packs[0].pack_ref, "test-pack");
assert_eq!(index.packs[0].install_sources.len(), 1);
}
}

View File

@@ -0,0 +1,394 @@
//! Pack Storage Management
//!
//! This module provides utilities for managing pack storage, including:
//! - Checksum calculation (SHA256)
//! - Pack directory management
//! - Storage path resolution
//! - Pack content verification
use crate::error::{Error, Result};
use sha2::{Digest, Sha256};
use std::fs;
use std::io::Read;
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
/// Pack storage manager
pub struct PackStorage {
base_dir: PathBuf,
}
impl PackStorage {
/// Create a new PackStorage instance
///
/// # Arguments
///
/// * `base_dir` - Base directory for pack storage (e.g., /opt/attune/packs)
pub fn new<P: Into<PathBuf>>(base_dir: P) -> Self {
Self {
base_dir: base_dir.into(),
}
}
/// Get the storage path for a pack
///
/// # Arguments
///
/// * `pack_ref` - Pack reference (e.g., "core", "my_pack")
/// * `version` - Optional version (e.g., "1.0.0")
///
/// # Returns
///
/// Path where the pack should be stored
pub fn get_pack_path(&self, pack_ref: &str, version: Option<&str>) -> PathBuf {
if let Some(v) = version {
self.base_dir.join(format!("{}-{}", pack_ref, v))
} else {
self.base_dir.join(pack_ref)
}
}
/// Ensure the base directory exists
pub fn ensure_base_dir(&self) -> Result<()> {
if !self.base_dir.exists() {
fs::create_dir_all(&self.base_dir).map_err(|e| {
Error::io(format!(
"Failed to create pack storage directory {}: {}",
self.base_dir.display(),
e
))
})?;
}
Ok(())
}
/// Move a pack from temporary location to permanent storage
///
/// # Arguments
///
/// * `source` - Source directory (temporary location)
/// * `pack_ref` - Pack reference
/// * `version` - Optional version
///
/// # Returns
///
/// The final storage path
pub fn install_pack<P: AsRef<Path>>(
&self,
source: P,
pack_ref: &str,
version: Option<&str>,
) -> Result<PathBuf> {
self.ensure_base_dir()?;
let dest = self.get_pack_path(pack_ref, version);
// Remove existing installation if present
if dest.exists() {
fs::remove_dir_all(&dest).map_err(|e| {
Error::io(format!(
"Failed to remove existing pack at {}: {}",
dest.display(),
e
))
})?;
}
// Copy the pack to permanent storage
copy_dir_all(source.as_ref(), &dest)?;
Ok(dest)
}
/// Remove a pack from storage
///
/// # Arguments
///
/// * `pack_ref` - Pack reference
/// * `version` - Optional version
pub fn uninstall_pack(&self, pack_ref: &str, version: Option<&str>) -> Result<()> {
let path = self.get_pack_path(pack_ref, version);
if path.exists() {
fs::remove_dir_all(&path).map_err(|e| {
Error::io(format!(
"Failed to remove pack at {}: {}",
path.display(),
e
))
})?;
}
Ok(())
}
/// Check if a pack is installed
pub fn is_installed(&self, pack_ref: &str, version: Option<&str>) -> bool {
let path = self.get_pack_path(pack_ref, version);
path.exists() && path.is_dir()
}
/// List all installed packs
pub fn list_installed(&self) -> Result<Vec<String>> {
if !self.base_dir.exists() {
return Ok(Vec::new());
}
let mut packs = Vec::new();
let entries = fs::read_dir(&self.base_dir).map_err(|e| {
Error::io(format!(
"Failed to read pack directory {}: {}",
self.base_dir.display(),
e
))
})?;
for entry in entries {
let entry = entry.map_err(|e| Error::io(format!("Failed to read directory entry: {}", e)))?;
let path = entry.path();
if path.is_dir() {
if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
packs.push(name.to_string());
}
}
}
Ok(packs)
}
}
/// Calculate SHA256 checksum of a directory
///
/// This recursively hashes all files in the directory in a deterministic order
/// (sorted by path) to produce a consistent checksum.
///
/// # Arguments
///
/// * `path` - Path to the directory
///
/// # Returns
///
/// Hex-encoded SHA256 checksum
pub fn calculate_directory_checksum<P: AsRef<Path>>(path: P) -> Result<String> {
let path = path.as_ref();
if !path.exists() {
return Err(Error::io(format!(
"Path does not exist: {}",
path.display()
)));
}
if !path.is_dir() {
return Err(Error::validation(format!(
"Path is not a directory: {}",
path.display()
)));
}
let mut hasher = Sha256::new();
let mut files: Vec<PathBuf> = Vec::new();
// Collect all files in sorted order for deterministic hashing
for entry in WalkDir::new(path).sort_by_file_name().into_iter() {
let entry = entry.map_err(|e| Error::io(format!("Failed to walk directory: {}", e)))?;
if entry.file_type().is_file() {
files.push(entry.path().to_path_buf());
}
}
// Hash each file
for file_path in files {
// Include relative path in hash for structure integrity
let rel_path = file_path
.strip_prefix(path)
.map_err(|e| Error::io(format!("Failed to strip prefix: {}", e)))?;
hasher.update(rel_path.to_string_lossy().as_bytes());
// Hash file contents
let mut file = fs::File::open(&file_path).map_err(|e| {
Error::io(format!("Failed to open file {}: {}", file_path.display(), e))
})?;
let mut buffer = [0u8; 8192];
loop {
let n = file.read(&mut buffer).map_err(|e| {
Error::io(format!("Failed to read file {}: {}", file_path.display(), e))
})?;
if n == 0 {
break;
}
hasher.update(&buffer[..n]);
}
}
let result = hasher.finalize();
Ok(format!("{:x}", result))
}
/// Calculate SHA256 checksum of a single file
///
/// # Arguments
///
/// * `path` - Path to the file
///
/// # Returns
///
/// Hex-encoded SHA256 checksum
pub fn calculate_file_checksum<P: AsRef<Path>>(path: P) -> Result<String> {
let path = path.as_ref();
if !path.exists() {
return Err(Error::io(format!(
"File does not exist: {}",
path.display()
)));
}
if !path.is_file() {
return Err(Error::validation(format!(
"Path is not a file: {}",
path.display()
)));
}
let mut hasher = Sha256::new();
let mut file = fs::File::open(path).map_err(|e| {
Error::io(format!("Failed to open file {}: {}", path.display(), e))
})?;
let mut buffer = [0u8; 8192];
loop {
let n = file.read(&mut buffer).map_err(|e| {
Error::io(format!("Failed to read file {}: {}", path.display(), e))
})?;
if n == 0 {
break;
}
hasher.update(&buffer[..n]);
}
let result = hasher.finalize();
Ok(format!("{:x}", result))
}
/// Copy a directory recursively
fn copy_dir_all(src: &Path, dst: &Path) -> Result<()> {
fs::create_dir_all(dst).map_err(|e| {
Error::io(format!(
"Failed to create destination directory {}: {}",
dst.display(),
e
))
})?;
for entry in fs::read_dir(src).map_err(|e| {
Error::io(format!(
"Failed to read source directory {}: {}",
src.display(),
e
))
})? {
let entry = entry.map_err(|e| Error::io(format!("Failed to read directory entry: {}", e)))?;
let path = entry.path();
let file_name = entry.file_name();
let dest_path = dst.join(&file_name);
if path.is_dir() {
copy_dir_all(&path, &dest_path)?;
} else {
fs::copy(&path, &dest_path).map_err(|e| {
Error::io(format!(
"Failed to copy file {} to {}: {}",
path.display(),
dest_path.display(),
e
))
})?;
}
}
Ok(())
}
/// Verify a pack's checksum matches the expected value
///
/// # Arguments
///
/// * `pack_path` - Path to the pack directory
/// * `expected_checksum` - Expected SHA256 checksum (hex-encoded)
///
/// # Returns
///
/// `Ok(true)` if checksums match, `Ok(false)` if they don't match,
/// or `Err` on I/O errors
pub fn verify_checksum<P: AsRef<Path>>(pack_path: P, expected_checksum: &str) -> Result<bool> {
let actual = calculate_directory_checksum(pack_path)?;
Ok(actual.eq_ignore_ascii_case(expected_checksum))
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs::File;
use std::io::Write;
use tempfile::TempDir;
#[test]
fn test_pack_storage_paths() {
let storage = PackStorage::new("/opt/attune/packs");
let path1 = storage.get_pack_path("core", None);
assert_eq!(path1, PathBuf::from("/opt/attune/packs/core"));
let path2 = storage.get_pack_path("core", Some("1.0.0"));
assert_eq!(path2, PathBuf::from("/opt/attune/packs/core-1.0.0"));
}
#[test]
fn test_calculate_file_checksum() {
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("test.txt");
let mut file = File::create(&file_path).unwrap();
file.write_all(b"Hello, world!").unwrap();
drop(file);
let checksum = calculate_file_checksum(&file_path).unwrap();
// Known SHA256 of "Hello, world!"
assert_eq!(
checksum,
"315f5bdb76d078c43b8ac0064e4a0164612b1fce77c869345bfc94c75894edd3"
);
}
#[test]
fn test_calculate_directory_checksum() {
let temp_dir = TempDir::new().unwrap();
// Create a simple directory structure
let subdir = temp_dir.path().join("subdir");
fs::create_dir(&subdir).unwrap();
let file1 = temp_dir.path().join("file1.txt");
let mut f = File::create(&file1).unwrap();
f.write_all(b"content1").unwrap();
drop(f);
let file2 = subdir.join("file2.txt");
let mut f = File::create(&file2).unwrap();
f.write_all(b"content2").unwrap();
drop(f);
let checksum1 = calculate_directory_checksum(temp_dir.path()).unwrap();
// Calculate again - should be deterministic
let checksum2 = calculate_directory_checksum(temp_dir.path()).unwrap();
assert_eq!(checksum1, checksum2);
assert_eq!(checksum1.len(), 64); // SHA256 is 64 hex characters
}
}