diff --git a/Cargo.lock b/Cargo.lock index bfc3c05..2a98533 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -70,12 +70,6 @@ dependencies = [ "windows-sys", ] -[[package]] -name = "anyhow" -version = "1.0.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100" - [[package]] name = "assert_cmd" version = "2.0.17" @@ -96,10 +90,10 @@ dependencies = [ name = "athena" version = "0.1.0" dependencies = [ - "anyhow", "assert_cmd", "chrono", "clap", + "indexmap", "pest", "pest_derive", "predicates", @@ -109,7 +103,6 @@ dependencies = [ "serial_test", "tempfile", "thiserror 1.0.69", - "uuid", ] [[package]] @@ -459,6 +452,7 @@ checksum = "f2481980430f9f78649238835720ddccc57e52df14ffce1c6f37391d61b563e9" dependencies = [ "equivalent", "hashbrown", + "serde", ] [[package]] @@ -964,17 +958,6 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" -[[package]] -name = "uuid" -version = "1.18.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2" -dependencies = [ - "getrandom", - "js-sys", - "wasm-bindgen", -] - [[package]] name = "version_check" version = "0.9.5" diff --git a/Cargo.toml b/Cargo.toml index 7c17dc0..728a119 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,18 +24,14 @@ pest_derive = "2.7" # Serialization serde = { version = "1.0", features = ["derive"] } serde_yaml = "0.9" +indexmap = { version = "2.2", features = ["serde"] } # Error handling -anyhow = "1.0" thiserror = "1.0" # Utilities -uuid = { version = "1.5", features = ["v4"] } chrono = { version = "0.4", features = ["serde"] } -# Template and file operations (for boilerplate generation) -tempfile = "3.8" - [dev-dependencies] # Testing assert_cmd = "2.0" diff --git a/LICENSE b/LICENSE index 5713225..310c764 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2025 - Arnaud FISCHER (alias Jeck0v) +Copyright (c) 2026 - Arnaud FISCHER (alias Jeck0v) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md index 001a24a..8939c54 100644 --- a/README.md +++ b/README.md @@ -1,33 +1,25 @@ -# Athena - Production-Ready DevOps Toolkit +# Athena - Declarative DevOps Toolkit [![Rust](https://img.shields.io/badge/Rust-1.70+-orange.svg)](https://www.rust-lang.org) [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](LICENSE) [![Version](https://img.shields.io/badge/Version-0.1.0-green.svg)](Cargo.toml) -Athena is a powerful CLI tool for back-end developers and DevOps engineers that transforms a COBOL-inspired DSL into production-ready Docker Compose configurations with minimal effort. +Athena is an open source CLI tool for back-end developers and DevOps engineers that turns a COBOL-inspired DSL into clean, consistent Docker Compose configurations. -Built with performance and maintainability in mind, Athena uses intelligent defaults and modern Docker standards to generate optimized configurations with minimal configuration. +The goal is simple: reduce YAML noise, enforce sane defaults, and make infrastructure definitions easier to read, validate and evolve. -## Why Athena DSL? - -Writing infrastructure in plain YAML often leads to: +Athena focuses on correctness, clarity and automation first. Production hardening is intentionally progressive and opinionated, not magic. -- **Repetition**: ports, env vars, healthchecks duplicated across files -- **Verbosity**: even small projects need hundreds of lines of config -- **Errors**: indentation, misplaced keys, and subtle schema mistakes -- **Low readability**: hard for newcomers to understand what's happening +## Why Athena DSL? -Athena introduces a **COBOL-inspired DSL** designed for clarity and speed: +Writing infrastructure directly in YAML often leads to repetitive, fragile and hard to review configurations. -### Advantages over plain YAML -- **Declarative & explicit**: easy to read and understand at a glance -- **Minimal boilerplate**: no need to repeat Docker defaults -- **Error-resistant**: parser catches common mistakes early -- **Smart defaults**: healthchecks, restart policies, and networks added automatically -- **Composable**: same DSL can currently generate Docker Compose, and in the future Kubernetes and Terraform +Athena introduces a DSL designed to be explicit, readable and tooling-friendly, while remaining close to Docker concepts. ### Example + Instead of writing verbose YAML: + ```yaml services: backend: @@ -47,7 +39,8 @@ services: retries: 5 ``` -You just write: +You write: + ```athena DEPLOYMENT-ID MY_APP @@ -63,150 +56,104 @@ IMAGE-ID postgres:15 END SERVICE ``` -Athena expands this into **production-ready Docker Compose** with all the right defaults. +Athena expands this into a complete Docker Compose file with validated structure and consistent defaults. -## Enhanced Error Handling +## Quick Start -Athena now features **revolutionary error handling** with precise location information and intelligent suggestions: +```bash +git clone https://github.com/Jeck0v/Athena +cd athena +cargo install --path . -### Before (Cryptic Errors) -``` -Error: Parse error: Expected athena_file rule +athena build deploy.ath ``` -### After (Enhanced Errors) -``` -Error: Parse error at line 8, column 1: Missing 'END SERVICE' statement - | - 8 | # Missing END SERVICE for demonstration - | ^ Error here +## Usage -Suggestion: Each SERVICE block must be closed with 'END SERVICE' +```bash +athena build deploy.ath # Generate docker-compose.yml +athena build deploy.ath -o custom.yml # Custom output file +athena validate deploy.ath # Validate syntax only +athena info # Show DSL information +athena info --examples # Show usage examples +athena info --directives # Show all directives ``` -### Smart Validation with Suggestions -``` -Error: Port conflict detected! Host port 8080 is used by multiple services: app1, app2 -Affected services: app1, app2 +If no file is specified, Athena looks for a `.ath` file in the current directory. -Suggestion: Use different host ports, e.g., 8080, 8081 -``` +## What Athena Handles -**[Learn more about Enhanced Error Handling →](docs/ERROR_HANDLING.md)** +- **Service type detection** - recognizes Database, Cache, WebApp and Proxy patterns from the image name +- **Healthchecks** - generates appropriate healthchecks per service type with tuned intervals +- **Restart policies** - `always` for databases and caches, `unless-stopped` for applications +- **Network isolation** - automatic bridge network shared across services +- **Dependency ordering** - services are sorted topologically in the output +- **Dockerfile fallback** - no image specified means Athena looks for a Dockerfile +- **Resource limits** - CPU and memory constraints via `RESOURCE-LIMITS` +- **Metadata labels** - every service is tagged with project, type and generation date +- **Docker Swarm** - replicas, update config and overlay networks when needed -## Quick Start +Everything Athena adds is visible in the generated output. No hidden behavior. -### Installation -```bash -# Install from source -git clone https://github.com/Jeck0v/Athena -cd athena -cargo install --path . +## Error Handling + +Athena provides precise parser errors with line and column information, visual context and actionable suggestions. -# Verify installation -athena --version ``` +Error: Parse error at line 8, column 1: Missing 'END SERVICE' statement + | + 8 | # Missing END SERVICE + | ^ Error here -### Generate Docker Compose -```bash -# Create a simple deploy.ath file -echo 'DEPLOYMENT-ID MY_APP +Suggestion: Each SERVICE block must be closed with 'END SERVICE' +``` -SERVICES SECTION +Validation also catches port conflicts, invalid service references and circular dependencies before any file is generated. -SERVICE backend -PORT-MAPPING 8000 TO 8000 -ENV-VARIABLE {{DATABASE_URL}} -END SERVICE +See [Error Handling documentation](docs/ERROR_HANDLING.md) for the full reference. -SERVICE database -IMAGE-ID postgres:15 -END SERVICE' > deploy.ath +## DSL Reference -# Generate production-ready docker-compose.yml -athena build deploy.ath -``` +| Directive | Example | +|---|---| +| `DEPLOYMENT-ID` | `DEPLOYMENT-ID my_project` | +| `VERSION-ID` | `VERSION-ID 1.0.0` | +| `NETWORK-NAME` | `NETWORK-NAME app_network` | +| `IMAGE-ID` | `IMAGE-ID postgres:15` | +| `PORT-MAPPING` | `PORT-MAPPING 8080 TO 80` | +| `ENV-VARIABLE` | `ENV-VARIABLE {{DATABASE_URL}}` | +| `COMMAND` | `COMMAND "npm start"` | +| `VOLUME-MAPPING` | `VOLUME-MAPPING "./data" TO "/app/data"` | +| `DEPENDS-ON` | `DEPENDS-ON database` | +| `HEALTH-CHECK` | `HEALTH-CHECK "curl -f http://localhost/health"` | +| `RESTART-POLICY` | `RESTART-POLICY always` | +| `RESOURCE-LIMITS` | `RESOURCE-LIMITS CPU "0.5" MEMORY "512M"` | +| `REPLICAS` | `REPLICAS 3` | -## Key Features - -### Enhanced Error Handling System (New!) -- **Line & Column Precision** => Exact error locations with visual context -- **Intelligent Suggestions** => Automatic recommendations for common fixes -- **Advanced Validation** => Port conflicts, service references, circular dependencies -- **Fail-Fast Processing** => Immediate feedback with no partial generation - -### Intelligent Defaults 2025+ -- Auto check for the Dockerfile -- Auto-detects service types database, Cache, WebApp, Proxy patterns -- Smart restart policies `always` for databases, `unless-stopped` for apps -- Optimized health checks different intervals per service type -- Container naming follows modern conventions (`project-service`) - -### Docker-First Approach -- Dockerfile by default => No image? Just dont configure it and athena will check for your Dockerfile nativement -- Intelligent networking => Auto-configured networks with proper isolation -- Production-ready => Security, resource limits, and health monitoring -- Standards compliant => Follows Docker Compose 2025 best practices - -### Performance Optimized -- Topological sorting => Services ordered by dependencies automatically -- Iterative validation => Fast circular dependency detection -- Optimized parsing => **<1ms parse time, <2ms generation** -- Memory efficient => Pre-allocated structures for large compositions - -### Syntax Highlighting (SOON) -- **Beautiful DSL highlighting** for `.ath` files with customizable colors -- **Zed editor extension** ready to install in `syntax-highlighting/` -- **Smart color coding** for keywords, directives, template variables, and more -- **Easy customization** via `colors.json` make it your own! +Full syntax documentation: [DSL Reference](docs/DSL_REFERENCE.md) ## Documentation -### Core Documentation -- [Enhanced Error Handling (**New**)](docs/ERROR_HANDLING.md) - Complete guide to Athena's advanced error system. -- [Syntax Highlighting (**New**)](syntax-highlighting/README.md) - Beautiful colors for `.ath` files in Zed editor. -- [Installation Guide](docs/INSTALLATION.md) -- [Docker Compose Generator Usage](docs/DSL_REFERENCE.md) -- [Examples](docs/EXAMPLES.md) - -### Development -- [Architecture Overview](docs/ARCHITECTURE.md) -- [Development Guide](docs/DEVELOPMENT.md) -- [Testing Documentation](docs/TESTING.md) - -## Basic Usage - -```bash -athena build deploy.ath # Generate docker-compose.yml -athena build deploy.ath -o custom.yml # Custom output file -athena validate deploy.ath # Validate syntax only -athena info # Show DSL information -athena info --examples # Show usage examples -athena info --directives # Show all directives -``` +- [DSL Reference](docs/DSL_REFERENCE.md) - complete syntax and directives +- [Error Handling](docs/ERROR_HANDLING.md) - error system and validation rules +- [Architecture](docs/ARCHITECTURE.md) - project structure and design +- [Features](docs/FEATURES.md) - detailed feature documentation +- [Examples](docs/EXAMPLES.md) - example configurations +- [Testing](docs/TESTING.md) - test suite and conventions +- [Development](docs/DEVELOPMENT.md) - contributing and development workflow +- [Installation](docs/INSTALLATION.md) - installation options -## What Athena Adds Automatically +## Design Principles -- Smart service detection (Database, Cache, WebApp, Proxy) -- Optimized health checks with service-specific intervals -- Production restart policies based on service type -- Modern container naming (`project-service`) -- Metadata labels for tracking and management -- Resource management with deploy sections -- Network isolation with custom networks -- Dockerfile integration when no image specified -- Dependency ordering with topological sort +Athena is opinionated but transparent. Everything it adds is explicit in the generated output. -## Acknowledgments +Defaults are meant to be reasonable starting points, not final production guarantees. You are expected to review and adapt the output to your actual deployment constraints. -- **Pest** for powerful parsing capabilities -- **Clap** for excellent CLI framework -- **Docker Community** for container standards -- **Rust Community** for the amazing ecosystem +Athena does not hide Docker. It reduces friction around it. ## License -This project is licensed under the MIT License see the [LICENSE](LICENSE) file for details. +MIT License. See [LICENSE](LICENSE) for details. --- diff --git a/docker-compose.yml b/docker-compose.yml index 41352d7..abc9dee 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,56 +1,74 @@ # Generated by Athena v0.1.0 from test_no_conflicts deployment -# Developed by UNFAIR Team: https://github.com/Jeck0v/Athena -# Generated: 2025-11-24 09:32:38 UTC +# Developed by UNFAIR Team: https://github.com/Jeck0v/Athena +# Generated: 2026-02-11 00:57:50 UTC # Features: Intelligent defaults, optimized networking, enhanced health checks # Services: 3 configured with intelligent defaults services: - app3: - image: apache:latest - container_name: test-no-conflicts-app3 - ports: - - 9000:80 - restart: always - networks: - - test_no_conflicts_network - pull_policy: missing - labels: - athena.type: proxy - athena.project: test_no_conflicts - athena.service: app3 - athena.generated: 2025-11-24 - app1: image: nginx:alpine - container_name: test-no-conflicts-app1 ports: - 8080:80 + healthcheck: + test: + - CMD-SHELL + - curl -f http://localhost:80/ || exit 1 + interval: 20s + timeout: 5s + retries: 3 + start_period: 30s restart: always networks: - test_no_conflicts_network - pull_policy: missing labels: - athena.type: proxy + athena.generated: 2026-02-11 athena.project: test_no_conflicts athena.service: app1 - athena.generated: 2025-11-24 + athena.type: proxy app2: image: httpd:alpine - container_name: test-no-conflicts-app2 ports: - 8081:8000 + healthcheck: + test: + - CMD-SHELL + - curl -f http://localhost:8000/ || exit 1 + interval: 30s + timeout: 10s + retries: 3 + start_period: 40s restart: unless-stopped networks: - test_no_conflicts_network - pull_policy: missing labels: - athena.service: app2 + athena.generated: 2026-02-11 athena.project: test_no_conflicts + athena.service: app2 athena.type: generic - athena.generated: 2025-11-24 + + app3: + image: apache:latest + ports: + - 9000:80 + healthcheck: + test: + - CMD-SHELL + - curl -f http://localhost:80/ || exit 1 + interval: 20s + timeout: 5s + retries: 3 + start_period: 30s + restart: always + networks: + - test_no_conflicts_network + labels: + athena.generated: 2026-02-11 + athena.project: test_no_conflicts + athena.service: app3 + athena.type: proxy networks: test_no_conflicts_network: driver: bridge -name: test_no_conflicts \ No newline at end of file +name: test-no-conflicts \ No newline at end of file diff --git a/src/athena/dockerfile.rs b/src/athena/dockerfile.rs index 048c9c8..e1c28ae 100644 --- a/src/athena/dockerfile.rs +++ b/src/athena/dockerfile.rs @@ -1,7 +1,9 @@ +use std::collections::HashSet; +use std::fmt::Write; use std::fs; use std::path::Path; -use std::collections::HashSet; -use crate::athena::error::{AthenaResult, AthenaError}; + +use crate::athena::error::{AthenaError, AthenaResult}; #[derive(Debug, Clone, PartialEq)] pub struct DockerfileArg { @@ -14,105 +16,84 @@ pub struct DockerfileArg { pub struct DockerfileAnalysis { pub args: Vec, pub dockerfile_path: String, - #[allow(dead_code)] - pub content: String, } -/// Parse Dockerfile and extract ARG declarations +/// Parse Dockerfile and extract ARG declarations. pub fn analyze_dockerfile>(dockerfile_path: P) -> AthenaResult { let path_str = dockerfile_path.as_ref().to_string_lossy().to_string(); - - // Check if file exists + if !dockerfile_path.as_ref().exists() { return Err(AthenaError::config_error(format!( - "Dockerfile not found: {}. CREATE-ARGS validation requires a Dockerfile to be present.", - path_str + "Dockerfile not found: {path_str}. CREATE-ARGS validation requires a Dockerfile to be present." ))); } - // Read file content - let content = fs::read_to_string(&dockerfile_path) - .map_err(|e| AthenaError::config_error(format!( - "Failed to read Dockerfile '{}': {}", - path_str, e - )))?; + let content = fs::read_to_string(&dockerfile_path).map_err(|e| { + AthenaError::config_error(format!("Failed to read Dockerfile '{path_str}': {e}")) + })?; - // Parse ARG declarations let args = extract_arg_declarations(&content)?; Ok(DockerfileAnalysis { args, dockerfile_path: path_str, - content, }) } -/// Extract ARG declarations from Dockerfile content +/// Extract ARG declarations from Dockerfile content. fn extract_arg_declarations(content: &str) -> AthenaResult> { let mut args = Vec::new(); - + for (line_number, line) in content.lines().enumerate() { let trimmed = line.trim(); - + // Skip comments and empty lines if trimmed.starts_with('#') || trimmed.is_empty() { continue; } - + // Look for ARG instructions if let Some(arg_line) = trimmed.strip_prefix("ARG ") { if let Some(dockerfile_arg) = parse_arg_line(arg_line, line_number + 1)? { args.push(dockerfile_arg); } } - // Handle multi-line ARG (less common but possible) - else if trimmed.eq_ignore_ascii_case("ARG") { - // This would be a syntax error in Dockerfile, but we can handle it gracefully - continue; - } + // Handle bare "ARG" without a name (Dockerfile syntax error, skip gracefully) } Ok(args) } -/// Parse a single ARG line from Dockerfile +/// Parse a single ARG line from Dockerfile. fn parse_arg_line(arg_line: &str, line_number: usize) -> AthenaResult> { let arg_line = arg_line.trim(); - - // Handle ARG NAME=default_value + if let Some(eq_pos) = arg_line.find('=') { let name = arg_line[..eq_pos].trim().to_string(); let default_value = arg_line[eq_pos + 1..].trim(); - - // Validate argument name + if !is_valid_arg_name(&name) { return Err(AthenaError::config_error(format!( - "Invalid ARG name '{}' at line {} in Dockerfile", - name, line_number + "Invalid ARG name '{name}' at line {line_number} in Dockerfile" ))); } - - // Remove quotes if present - let cleaned_default = clean_dockerfile_value(default_value); - + + let cleaned_default = strip_quotes(default_value); + Ok(Some(DockerfileArg { name, default_value: Some(cleaned_default), line_number, })) - } - // Handle ARG NAME (no default value) - else { - let name = arg_line.trim().to_string(); - - // Validate argument name + } else { + let name = arg_line.to_string(); + if !is_valid_arg_name(&name) { return Err(AthenaError::config_error(format!( - "Invalid ARG name '{}' at line {} in Dockerfile", - name, line_number + "Invalid ARG name '{name}' at line {line_number} in Dockerfile" ))); } - + Ok(Some(DockerfileArg { name, default_value: None, @@ -121,185 +102,159 @@ fn parse_arg_line(arg_line: &str, line_number: usize) -> AthenaResult bool { - if name.is_empty() { - return false; - } - - // Docker ARG names can contain alphanumeric characters and underscores - // and must start with a letter or underscore - let chars: Vec = name.chars().collect(); - - // Check first character - if !chars[0].is_alphabetic() && chars[0] != '_' { - return false; - } - - // Check remaining characters - for &c in &chars[1..] { - if !c.is_alphanumeric() && c != '_' { - return false; - } + let mut chars = name.chars(); + + match chars.next() { + None => return false, + Some(c) if !c.is_alphabetic() && c != '_' => return false, + _ => {} } - - true + + chars.all(|c| c.is_alphanumeric() || c == '_') } -/// Clean default values by removing surrounding quotes -fn clean_dockerfile_value(value: &str) -> String { +/// Strip surrounding quotes (single or double) from a value. +fn strip_quotes(value: &str) -> String { let trimmed = value.trim(); - - // Remove double quotes - if trimmed.starts_with('"') && trimmed.ends_with('"') && trimmed.len() >= 2 { - return trimmed[1..trimmed.len()-1].to_string(); + + if let Some(inner) = trimmed.strip_prefix('"').and_then(|s| s.strip_suffix('"')) { + return inner.to_string(); } - - // Remove single quotes - if trimmed.starts_with('\'') && trimmed.ends_with('\'') && trimmed.len() >= 2 { - return trimmed[1..trimmed.len()-1].to_string(); + + if let Some(inner) = trimmed.strip_prefix('\'').and_then(|s| s.strip_suffix('\'')) { + return inner.to_string(); } - + trimmed.to_string() } -/// Validate BUILD-ARGS against Dockerfile ARGs +/// Validate BUILD-ARGS against Dockerfile ARGs. pub fn validate_build_args_against_dockerfile( build_args: &std::collections::HashMap, dockerfile_analysis: &DockerfileAnalysis, -) -> AthenaResult> { +) -> Vec { let mut warnings = Vec::new(); - - // Create a set of available ARGs for quick lookup + let available_args: HashSet = dockerfile_analysis .args .iter() .map(|arg| arg.name.clone()) .collect(); - - // Check each BUILD-ARG against Dockerfile + for build_arg_name in build_args.keys() { if !available_args.contains(build_arg_name) { - // Find similar ARG names for suggestions let suggestions = find_similar_arg_names(build_arg_name, &available_args); - + let mut warning = format!( - "BUILD-ARG '{}' not found in Dockerfile '{}'", - build_arg_name, dockerfile_analysis.dockerfile_path + "BUILD-ARG '{build_arg_name}' not found in Dockerfile '{}'", + dockerfile_analysis.dockerfile_path ); - + if !available_args.is_empty() { - warning.push_str(&format!( + let _ = write!( + warning, "\nAvailable ARGs in Dockerfile: {}", available_args.iter().cloned().collect::>().join(", ") - )); + ); } - + if !suggestions.is_empty() { - warning.push_str(&format!( - "\nDid you mean: {}?", - suggestions.join(", ") - )); + let _ = write!(warning, "\nDid you mean: {}?", suggestions.join(", ")); } - + warnings.push(warning); } } - - Ok(warnings) + + warnings } -/// Find ARG names similar to the given name (simple Levenshtein-like approach) +/// Find ARG names similar to the given name for suggestion purposes. fn find_similar_arg_names(target: &str, available: &HashSet) -> Vec { let mut similar = Vec::new(); let target_lower = target.to_lowercase(); - + for arg_name in available { let arg_lower = arg_name.to_lowercase(); - - // Exact match (case-insensitive) + if arg_lower == target_lower { similar.push(arg_name.clone()); continue; } - - // Contains or is contained + if arg_lower.contains(&target_lower) || target_lower.contains(&arg_lower) { similar.push(arg_name.clone()); continue; } - - // Simple similarity check (common prefix/suffix) - if let Some(similarity) = calculate_similarity(&target_lower, &arg_lower) { - if similarity > 0.6 { - similar.push(arg_name.clone()); - } + + if calculate_similarity(&target_lower, &arg_lower) > 0.6 { + similar.push(arg_name.clone()); } } - - // Sort by similarity (exact matches first, then by name) + similar.sort(); - similar.truncate(3); // Limit suggestions to top 3 - + similar.truncate(3); + similar } -/// Calculate simple similarity score between two strings -fn calculate_similarity(a: &str, b: &str) -> Option { +/// Calculate a simple positional similarity score between two strings. +/// +/// Returns a value between 0.0 (no similarity) and 1.0 (identical). +fn calculate_similarity(a: &str, b: &str) -> f32 { if a.is_empty() || b.is_empty() { - return Some(0.0); + return 0.0; } - - let len_a = a.len(); - let len_b = b.len(); - let max_len = len_a.max(len_b); - - // Simple approach: count common characters at same positions - let common_chars = a.chars() - .zip(b.chars()) - .filter(|(a, b)| a == b) - .count(); - - Some(common_chars as f32 / max_len as f32) + + let max_len = a.len().max(b.len()); + + let common_chars = a.chars().zip(b.chars()).filter(|(ca, cb)| ca == cb).count(); + + common_chars as f32 / max_len as f32 } #[cfg(test)] mod tests { use super::*; use std::collections::HashMap; - + #[test] fn test_parse_arg_line_with_default() { let result = parse_arg_line("NODE_VERSION=18", 1).unwrap(); assert!(result.is_some()); - + let arg = result.unwrap(); assert_eq!(arg.name, "NODE_VERSION"); assert_eq!(arg.default_value, Some("18".to_string())); assert_eq!(arg.line_number, 1); } - + #[test] fn test_parse_arg_line_without_default() { let result = parse_arg_line("BUILD_ENV", 2).unwrap(); assert!(result.is_some()); - + let arg = result.unwrap(); assert_eq!(arg.name, "BUILD_ENV"); assert_eq!(arg.default_value, None); assert_eq!(arg.line_number, 2); } - + #[test] fn test_parse_arg_line_with_quoted_default() { let result = parse_arg_line(r#"APP_NAME="my-app""#, 3).unwrap(); assert!(result.is_some()); - + let arg = result.unwrap(); assert_eq!(arg.name, "APP_NAME"); assert_eq!(arg.default_value, Some("my-app".to_string())); } - + #[test] fn test_extract_arg_declarations() { let dockerfile_content = r#" @@ -314,23 +269,23 @@ ARG API_URL ARG DEBUG="false" RUN echo "Building app" "#; - + let args = extract_arg_declarations(dockerfile_content).unwrap(); assert_eq!(args.len(), 4); - + assert_eq!(args[0].name, "NODE_ENV"); assert_eq!(args[0].default_value, Some("production".to_string())); - + assert_eq!(args[1].name, "PORT"); assert_eq!(args[1].default_value, Some("3000".to_string())); - + assert_eq!(args[2].name, "API_URL"); assert_eq!(args[2].default_value, None); - + assert_eq!(args[3].name, "DEBUG"); assert_eq!(args[3].default_value, Some("false".to_string())); } - + #[test] fn test_validate_build_args_success() { let mut dockerfile_args = Vec::new(); @@ -344,68 +299,72 @@ RUN echo "Building app" default_value: None, line_number: 2, }); - + let analysis = DockerfileAnalysis { args: dockerfile_args, dockerfile_path: "Dockerfile".to_string(), - content: "".to_string(), }; - + let mut build_args = HashMap::new(); build_args.insert("NODE_VERSION".to_string(), "20".to_string()); build_args.insert("BUILD_ENV".to_string(), "production".to_string()); - - let warnings = validate_build_args_against_dockerfile(&build_args, &analysis).unwrap(); + + let warnings = validate_build_args_against_dockerfile(&build_args, &analysis); assert!(warnings.is_empty()); } - + #[test] fn test_validate_build_args_with_warnings() { - let dockerfile_args = vec![ - DockerfileArg { - name: "NODE_VERSION".to_string(), - default_value: Some("18".to_string()), - line_number: 1, - }, - ]; - + let dockerfile_args = vec![DockerfileArg { + name: "NODE_VERSION".to_string(), + default_value: Some("18".to_string()), + line_number: 1, + }]; + let analysis = DockerfileAnalysis { args: dockerfile_args, dockerfile_path: "Dockerfile".to_string(), - content: "".to_string(), }; - + let mut build_args = HashMap::new(); build_args.insert("NODEJS_VERSION".to_string(), "20".to_string()); // Typo build_args.insert("UNKNOWN_ARG".to_string(), "value".to_string()); - - let warnings = validate_build_args_against_dockerfile(&build_args, &analysis).unwrap(); + + let warnings = validate_build_args_against_dockerfile(&build_args, &analysis); assert_eq!(warnings.len(), 2); - - // Check that both invalid args are mentioned in warnings + let warning_text = warnings.join(" "); assert!(warning_text.contains("NODEJS_VERSION")); assert!(warning_text.contains("UNKNOWN_ARG")); } - + #[test] fn test_is_valid_arg_name() { assert!(is_valid_arg_name("NODE_VERSION")); assert!(is_valid_arg_name("API_URL")); assert!(is_valid_arg_name("_PRIVATE")); assert!(is_valid_arg_name("VERSION123")); - + assert!(!is_valid_arg_name("")); assert!(!is_valid_arg_name("123VERSION")); // Starts with number - assert!(!is_valid_arg_name("API-URL")); // Contains hyphen - assert!(!is_valid_arg_name("API.URL")); // Contains dot + assert!(!is_valid_arg_name("API-URL")); // Contains hyphen + assert!(!is_valid_arg_name("API.URL")); // Contains dot } - + + #[test] + fn test_strip_quotes() { + assert_eq!(strip_quotes("\"production\""), "production"); + assert_eq!(strip_quotes("'development'"), "development"); + assert_eq!(strip_quotes("plain"), "plain"); + assert_eq!(strip_quotes(" spaced "), "spaced"); + } + #[test] - fn test_clean_dockerfile_value() { - assert_eq!(clean_dockerfile_value("\"production\""), "production"); - assert_eq!(clean_dockerfile_value("'development'"), "development"); - assert_eq!(clean_dockerfile_value("plain"), "plain"); - assert_eq!(clean_dockerfile_value(" spaced "), "spaced"); + fn test_calculate_similarity() { + assert!(calculate_similarity("", "abc") == 0.0); + assert!(calculate_similarity("abc", "") == 0.0); + assert!(calculate_similarity("abc", "abc") == 1.0); + assert!(calculate_similarity("abc", "axc") > 0.5); + assert!(calculate_similarity("abc", "xyz") == 0.0); } } \ No newline at end of file diff --git a/src/athena/error.rs b/src/athena/error.rs index cff187d..bea4b24 100644 --- a/src/athena/error.rs +++ b/src/athena/error.rs @@ -1,5 +1,5 @@ +use std::fmt::{self, Write}; use thiserror::Error; -use std::fmt; pub type AthenaResult = Result; @@ -20,10 +20,6 @@ pub enum AthenaError { #[error("{0}")] ValidationError(EnhancedValidationError), - - #[error("Template error: {0}")] - #[allow(dead_code)] - TemplateError(String), } #[derive(Debug, Clone)] @@ -36,7 +32,6 @@ pub struct ErrorLocation { pub struct EnhancedParseError { pub message: String, pub location: Option, - pub context: Option, pub suggestion: Option, pub file_content: Option, } @@ -44,44 +39,32 @@ pub struct EnhancedParseError { #[derive(Debug, Clone)] pub struct EnhancedValidationError { pub message: String, - #[allow(dead_code)] - pub error_type: ValidationErrorType, pub suggestion: Option, pub related_services: Vec, } -#[derive(Debug, Clone)] -pub enum ValidationErrorType { - PortConflict, - ServiceReference, - CircularDependency, - MissingConfiguration, - InvalidFormat, -} - impl fmt::Display for EnhancedParseError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if let Some(location) = &self.location { - write!(f, "Parse error at line {}, column {}: {}", - location.line, location.column, self.message)?; - + write!( + f, + "Parse error at line {}, column {}: {}", + location.line, location.column, self.message + )?; + if let Some(content) = &self.file_content { if let Some(context_display) = self.format_context(content) { - write!(f, "\n{}", context_display)?; + write!(f, "\n{context_display}")?; } } } else { write!(f, "Parse error: {}", self.message)?; } - - if let Some(context) = &self.context { - write!(f, "\n{}", context)?; - } - + if let Some(suggestion) = &self.suggestion { - write!(f, "\n\nSuggestion: {}", suggestion)?; + write!(f, "\n\nSuggestion: {suggestion}")?; } - + Ok(()) } } @@ -89,15 +72,15 @@ impl fmt::Display for EnhancedParseError { impl fmt::Display for EnhancedValidationError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Validation error: {}", self.message)?; - + if !self.related_services.is_empty() { write!(f, "\nAffected services: {}", self.related_services.join(", "))?; } - + if let Some(suggestion) = &self.suggestion { - write!(f, "\n\nSuggestion: {}", suggestion)?; + write!(f, "\n\nSuggestion: {suggestion}")?; } - + Ok(()) } } @@ -107,148 +90,110 @@ impl EnhancedParseError { Self { message, location: None, - context: None, suggestion: None, file_content: None, } } - + pub fn with_location(mut self, line: usize, column: usize) -> Self { self.location = Some(ErrorLocation { line, column }); self } - - #[allow(dead_code)] - pub fn with_context(mut self, context: String) -> Self { - self.context = Some(context); - self - } - + pub fn with_suggestion(mut self, suggestion: String) -> Self { self.suggestion = Some(suggestion); self } - + pub fn with_file_content(mut self, content: String) -> Self { self.file_content = Some(content); self } - + fn format_context(&self, content: &str) -> Option { let location = self.location.as_ref()?; let lines: Vec<&str> = content.lines().collect(); - + if location.line == 0 || location.line > lines.len() { return None; } - + let line_idx = location.line - 1; let line = lines[line_idx]; - + let mut result = String::new(); - result.push_str(" |\n"); - result.push_str(&format!("{:2} | {}\n", location.line, line)); - result.push_str(" | "); - + let _ = writeln!(result, " |"); + let _ = writeln!(result, "{:2} | {line}", location.line); + let _ = write!(result, " | "); + for _ in 0..location.column.saturating_sub(1) { result.push(' '); } result.push_str("^ Error here"); - + Some(result) } } impl EnhancedValidationError { - pub fn new(message: String, error_type: ValidationErrorType) -> Self { + pub fn new(message: String) -> Self { Self { message, - error_type, suggestion: None, related_services: Vec::new(), } } - + pub fn with_suggestion(mut self, suggestion: String) -> Self { self.suggestion = Some(suggestion); self } - + pub fn with_services(mut self, services: Vec) -> Self { self.related_services = services; self } - - + pub fn service_reference(service: &str, dependency: &str, available: &[String]) -> Self { let message = format!( - "Service '{}' depends on '{}' which doesn't exist", - service, dependency + "Service '{service}' depends on '{dependency}' which doesn't exist" ); - + let suggestion = format!( "Available services: {}. Check the service name in your DEPENDS-ON declaration", available.join(", ") ); - - Self::new(message, ValidationErrorType::ServiceReference) + + Self::new(message) .with_suggestion(suggestion) .with_services(vec![service.to_string(), dependency.to_string()]) } - + pub fn circular_dependency(service: &str) -> Self { let message = format!( - "Circular dependency detected involving service '{}'", - service + "Circular dependency detected involving service '{service}'" ); - - let suggestion = "Check the DEPENDS-ON declarations in your .ath file and remove circular dependencies".to_string(); - - Self::new(message, ValidationErrorType::CircularDependency) + + let suggestion = + "Check the DEPENDS-ON declarations in your .ath file and remove circular dependencies" + .to_string(); + + Self::new(message) .with_suggestion(suggestion) .with_services(vec![service.to_string()]) } } - impl AthenaError { - #[allow(dead_code)] - pub fn parse_error_simple>(msg: T) -> Self { - AthenaError::ParseError(EnhancedParseError::new(msg.into())) - } - - #[allow(dead_code)] - pub fn parse_error_with_location>(msg: T, line: usize, column: usize) -> Self { - AthenaError::ParseError( - EnhancedParseError::new(msg.into()) - .with_location(line, column) - ) - } - - #[allow(dead_code)] pub fn parse_error_enhanced(error: EnhancedParseError) -> Self { AthenaError::ParseError(error) } - #[allow(dead_code)] pub fn config_error>(msg: T) -> Self { AthenaError::ConfigError(msg.into()) } - #[allow(dead_code)] - pub fn validation_error_simple>(msg: T) -> Self { - AthenaError::ValidationError( - EnhancedValidationError::new(msg.into(), ValidationErrorType::InvalidFormat) - ) - } - - #[allow(dead_code)] pub fn validation_error_enhanced(error: EnhancedValidationError) -> Self { AthenaError::ValidationError(error) } - - #[allow(dead_code)] - pub fn template_error>(msg: T) -> Self { - AthenaError::TemplateError(msg.into()) - } } \ No newline at end of file diff --git a/src/athena/generator/compose.rs b/src/athena/generator/compose.rs index 0a82938..efb5a9f 100644 --- a/src/athena/generator/compose.rs +++ b/src/athena/generator/compose.rs @@ -1,52 +1,26 @@ +use indexmap::IndexMap; use serde::{Deserialize, Serialize}; -use std::collections::HashMap; +use std::collections::BTreeMap; +use std::fmt::Write; use super::defaults::{DefaultsEngine, EnhancedDockerService}; use crate::athena::dockerfile::{analyze_dockerfile, validate_build_args_against_dockerfile}; use crate::athena::error::{ - AthenaError, AthenaResult, EnhancedValidationError, ValidationErrorType, + AthenaError, AthenaResult, EnhancedValidationError, }; -use crate::athena::parser::ast::*; +use crate::athena::parser::ast::{AthenaFile, NetworkDriver, VolumeDefinition}; #[derive(Debug, Serialize, Deserialize)] pub struct DockerCompose { - services: HashMap, + services: IndexMap, #[serde(skip_serializing_if = "Option::is_none")] - networks: Option>, + networks: Option>, #[serde(skip_serializing_if = "Option::is_none")] - volumes: Option>, + volumes: Option>, #[serde(skip_serializing_if = "Option::is_none")] name: Option, } -// Legacy DockerService - kept for backward compatibility -// Use EnhancedDockerService for new implementations - -#[derive(Debug, Serialize, Deserialize)] -pub struct DockerHealthCheck { - test: Vec, - interval: String, - timeout: String, - retries: u32, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct DockerDeploy { - resources: Option, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct DockerResources { - limits: Option, - reservations: Option, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct ResourceSpec { - cpus: Option, - memory: Option, -} - #[derive(Debug, Serialize, Deserialize)] pub struct DockerNetwork { driver: String, @@ -69,8 +43,8 @@ pub fn generate_docker_compose(athena_file: &AthenaFile) -> AthenaResult let network_name = athena_file.get_network_name(); let mut compose = DockerCompose { - name: Some(project_name.clone()), - services: HashMap::new(), + name: Some(project_name.to_lowercase().replace('_', "-")), + services: IndexMap::new(), networks: None, volumes: None, }; @@ -85,8 +59,11 @@ pub fn generate_docker_compose(athena_file: &AthenaFile) -> AthenaResult } } - // Convert services using intelligent defaults - for service in &athena_file.services.services { + // Sort services in dependency order (no-deps first, then dependents) + let sorted_services = topological_sort_services(&athena_file.services.services); + + // Convert services using intelligent defaults, inserting in topological order + for service in &sorted_services { let enhanced_service = DefaultsEngine::create_enhanced_service(service, &network_name, &project_name); compose @@ -106,9 +83,87 @@ pub fn generate_docker_compose(athena_file: &AthenaFile) -> AthenaResult Ok(add_enhanced_yaml_comments(formatted_yaml, athena_file)) } +/// Sort services in topological order: services with no dependencies first, +/// then services that depend on them, etc. Falls back to original order on cycles. +fn topological_sort_services(services: &[crate::athena::parser::ast::Service]) -> Vec<&crate::athena::parser::ast::Service> { + use std::collections::{HashMap, HashSet, VecDeque}; + + let name_to_service: HashMap<&str, &crate::athena::parser::ast::Service> = + services.iter().map(|s| (s.name.as_str(), s)).collect(); + + // Build in-degree map + let mut in_degree: HashMap<&str, usize> = HashMap::new(); + let mut dependents: HashMap<&str, Vec<&str>> = HashMap::new(); + + for service in services { + in_degree.entry(service.name.as_str()).or_insert(0); + dependents.entry(service.name.as_str()).or_default(); + for dep in &service.depends_on { + if name_to_service.contains_key(dep.as_str()) { + dependents.entry(dep.as_str()).or_default().push(&service.name); + *in_degree.entry(service.name.as_str()).or_insert(0) += 1; + } + } + } + + // Kahn's algorithm + let mut queue: VecDeque<&str> = in_degree + .iter() + .filter(|(_, °)| deg == 0) + .map(|(&name, _)| name) + .collect(); + + // Sort the initial queue for determinism among peers + let mut queue_vec: Vec<&str> = queue.drain(..).collect(); + queue_vec.sort(); + queue = queue_vec.into_iter().collect(); + + let mut sorted: Vec<&crate::athena::parser::ast::Service> = Vec::with_capacity(services.len()); + let mut visited = HashSet::new(); + + while let Some(current) = queue.pop_front() { + if visited.contains(current) { + continue; + } + visited.insert(current); + + if let Some(&service) = name_to_service.get(current) { + sorted.push(service); + } + + // Collect and sort neighbors for deterministic order among peers + let mut neighbors: Vec<&str> = dependents + .get(current) + .map(|v| v.as_slice()) + .unwrap_or_default() + .to_vec(); + neighbors.sort(); + + for neighbor in neighbors { + if let Some(deg) = in_degree.get_mut(neighbor) { + *deg = deg.saturating_sub(1); + if *deg == 0 { + queue.push_back(neighbor); + } + } + } + } + + // Fallback: if cycle detected, append remaining services in original order + if sorted.len() < services.len() { + for service in services { + if !visited.contains(service.name.as_str()) { + sorted.push(service); + } + } + } + + sorted +} + /// Create optimized network configuration with Docker Swarm support -fn create_optimized_networks(athena_file: &AthenaFile) -> HashMap { - let mut networks = HashMap::new(); +fn create_optimized_networks(athena_file: &AthenaFile) -> BTreeMap { + let mut networks = BTreeMap::new(); if let Some(env) = &athena_file.environment { // Use networks defined in environment section @@ -151,8 +206,8 @@ fn create_optimized_networks(athena_file: &AthenaFile) -> HashMap HashMap { - let mut volumes = HashMap::new(); +fn create_optimized_volumes(volume_defs: &[VolumeDefinition]) -> BTreeMap { + let mut volumes = BTreeMap::new(); for vol_def in volume_defs { volumes.insert( vol_def.name.clone(), @@ -179,8 +234,7 @@ fn validate_compose_enhanced( if service.image.is_none() && service.build.is_none() { return Err(AthenaError::validation_error_enhanced( EnhancedValidationError::new( - format!("Service '{}' is missing both image and build configuration", service_name), - ValidationErrorType::MissingConfiguration + format!("Service '{service_name}' is missing both image and build configuration"), ) .with_suggestion("Add IMAGE-ID \"image:tag\" or ensure a Dockerfile exists in the current directory".to_string()) .with_services(vec![service_name.clone()]) @@ -205,8 +259,7 @@ fn validate_compose_enhanced( if !is_valid_port_mapping(port_mapping) { return Err(AthenaError::validation_error_enhanced( EnhancedValidationError::new( - format!("Service '{}' has invalid port mapping: {}", service_name, port_mapping), - ValidationErrorType::InvalidFormat + format!("Service '{service_name}' has invalid port mapping: {port_mapping}"), ) .with_suggestion("Use format: PORT-MAPPING TO , e.g., PORT-MAPPING 8080 TO 80".to_string()) .with_services(vec![service_name.clone()]) @@ -350,7 +403,6 @@ fn detect_port_conflicts(compose: &DockerCompose) -> AthenaResult<()> { port, services.join(", ") ), - ValidationErrorType::PortConflict, ) .with_suggestion(suggestion) .with_services(services); @@ -385,53 +437,32 @@ fn generate_port_suggestions(base_port: &str, count: usize) -> String { } } -/// Validate BUILD-ARGS against Dockerfile ARGs (intelligent validation) +/// Validate BUILD-ARGS against Dockerfile ARGs (intelligent validation). fn validate_dockerfile_build_args(athena_file: &AthenaFile) -> AthenaResult<()> { - // Check each service that has build_args for service in &athena_file.services.services { if let Some(build_args) = &service.build_args { - // Try to find and analyze the Dockerfile - let dockerfile_path = "Dockerfile"; // Default path - - match analyze_dockerfile(dockerfile_path) { - Ok(dockerfile_analysis) => { - // Validate BUILD-ARGS against Dockerfile ARGs - match validate_build_args_against_dockerfile(build_args, &dockerfile_analysis) { - Ok(warnings) => { - // For now, we treat warnings as validation errors - // In the future, we could make this configurable - if !warnings.is_empty() { - let combined_warning = warnings.join("\n\n"); - return Err(AthenaError::validation_error_enhanced( - EnhancedValidationError::new( - format!( - "BUILD-ARGS validation failed for service '{}':\n\n{}", - service.name, combined_warning - ), - ValidationErrorType::InvalidFormat, - ) - .with_suggestion( - "Ensure all BUILD-ARGS correspond to ARG declarations in your Dockerfile".to_string() - ) - .with_services(vec![service.name.clone()]) - )); - } - } - Err(e) => { - // Validation process failed, but we don't want to block builds - eprintln!( - "Warning: Could not validate BUILD-ARGS for service '{}': {}", - service.name, e - ); - } - } - } - Err(_) => { - // Dockerfile not found or not readable - // This is OK - just skip validation for this service - // We could add a warning here in verbose mode - continue; - } + let dockerfile_path = "Dockerfile"; + + let dockerfile_analysis = match analyze_dockerfile(dockerfile_path) { + Ok(analysis) => analysis, + Err(_) => continue, + }; + + let warnings = validate_build_args_against_dockerfile(build_args, &dockerfile_analysis); + + if !warnings.is_empty() { + let combined_warning = warnings.join("\n\n"); + return Err(AthenaError::validation_error_enhanced( + EnhancedValidationError::new(format!( + "BUILD-ARGS validation failed for service '{}':\n\n{combined_warning}", + service.name + )) + .with_suggestion( + "Ensure all BUILD-ARGS correspond to ARG declarations in your Dockerfile" + .to_string(), + ) + .with_services(vec![service.name.clone()]), + )); } } } @@ -481,37 +512,38 @@ fn improve_yaml_formatting(yaml: String) -> String { /// Add enhanced YAML comments with metadata and optimization notes fn add_enhanced_yaml_comments(yaml: String, athena_file: &AthenaFile) -> String { - let mut result = String::with_capacity(yaml.len() + 500); // Pre-allocate for better performance + let mut result = String::with_capacity(yaml.len() + 500); - // Enhanced header with metadata - result.push_str(&format!( - "# Generated by Athena v{} from {} deployment\n", + let _ = writeln!( + result, + "# Generated by Athena v{} from {} deployment", env!("CARGO_PKG_VERSION"), athena_file.get_project_name() - )); - result.push_str("# Developed by UNFAIR Team: https://github.com/Jeck0v/Athena \n"); + ); + let _ = writeln!(result, "# Developed by UNFAIR Team: https://github.com/Jeck0v/Athena"); if let Some(deployment) = &athena_file.deployment { if let Some(version) = &deployment.version_id { - result.push_str(&format!("# Project Version: {}\n", version)); + let _ = writeln!(result, "# Project Version: {version}"); } } - result.push_str(&format!( - "# Generated: {}\n", + let _ = writeln!( + result, + "# Generated: {}", chrono::Utc::now().format("%Y-%m-%d %H:%M:%S UTC") - )); + ); - result.push_str( - "# Features: Intelligent defaults, optimized networking, enhanced health checks\n\n", + let _ = writeln!( + result, + "# Features: Intelligent defaults, optimized networking, enhanced health checks\n" ); - // Add service count and optimization info let service_count = athena_file.services.services.len(); - result.push_str(&format!( - "# Services: {} configured with intelligent defaults\n\n", - service_count - )); + let _ = writeln!( + result, + "# Services: {service_count} configured with intelligent defaults\n" + ); result.push_str(&yaml); @@ -521,6 +553,7 @@ fn add_enhanced_yaml_comments(yaml: String, athena_file: &AthenaFile) -> String #[cfg(test)] mod tests { use super::*; + use crate::athena::parser::ast::{DeploymentSection, PortMapping, Protocol, Service}; #[test] fn test_enhanced_compose_generation() { @@ -549,7 +582,7 @@ mod tests { assert!(yaml.contains("image: python:3.11-slim")); assert!(yaml.contains("8000:8000")); assert!(yaml.contains("restart: unless-stopped")); - assert!(yaml.contains("container_name: test-project-backend")); + assert!(!yaml.contains("container_name:")); } #[test] diff --git a/src/athena/generator/defaults.rs b/src/athena/generator/defaults.rs index cf4875d..bbc7d86 100644 --- a/src/athena/generator/defaults.rs +++ b/src/athena/generator/defaults.rs @@ -1,41 +1,18 @@ -use std::collections::HashMap; +use std::collections::{BTreeMap, HashMap}; use serde::{Deserialize, Serialize}; -use crate::athena::parser::ast::*; +use crate::athena::parser::ast::{ + EnvironmentVariable, FailureAction, PortMapping, Protocol, ResourceLimits, RestartPolicy, + Service, SwarmConfig, VolumeMapping, +}; /// Default Docker Compose configurations based on service patterns and Docker standards #[derive(Debug, Clone)] -#[allow(dead_code)] pub struct ServiceDefaults { pub restart_policy: RestartPolicy, pub health_check_interval: String, pub health_check_timeout: String, pub health_check_retries: u32, pub health_check_start_period: String, - #[allow(dead_code)] - pub network_mode: NetworkMode, - pub pull_policy: PullPolicy, -} - -#[derive(Debug, Clone)] -#[allow(dead_code)] -pub enum NetworkMode { - Bridge, - #[allow(dead_code)] - Host, - #[allow(dead_code)] - None, - #[allow(dead_code)] - Custom(String), -} - -#[derive(Debug, Clone)] -#[allow(dead_code)] -pub enum PullPolicy { - #[allow(dead_code)] - Always, - Missing, - #[allow(dead_code)] - Never, } impl Default for ServiceDefaults { @@ -46,8 +23,6 @@ impl Default for ServiceDefaults { health_check_timeout: "10s".to_string(), health_check_retries: 3, health_check_start_period: "40s".to_string(), - network_mode: NetworkMode::Bridge, - pull_policy: PullPolicy::Missing, } } } @@ -60,8 +35,6 @@ pub struct EnhancedDockerService { #[serde(skip_serializing_if = "Option::is_none")] pub build: Option, #[serde(skip_serializing_if = "Option::is_none")] - pub container_name: Option, - #[serde(skip_serializing_if = "Option::is_none")] pub ports: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub environment: Option>, @@ -77,9 +50,8 @@ pub struct EnhancedDockerService { #[serde(skip_serializing_if = "Option::is_none")] pub deploy: Option, pub networks: Vec, - pub pull_policy: String, #[serde(skip_serializing_if = "Option::is_none")] - pub labels: Option>, + pub labels: Option>, } #[derive(Debug, Serialize, Deserialize)] @@ -111,7 +83,7 @@ pub struct EnhancedDeploy { #[serde(skip_serializing_if = "Option::is_none")] pub update_config: Option, #[serde(skip_serializing_if = "Option::is_none")] - pub labels: Option>, + pub labels: Option>, } #[derive(Debug, Serialize, Deserialize)] @@ -207,8 +179,6 @@ impl DefaultsEngine { health_check_timeout: "5s".to_string(), health_check_retries: 5, health_check_start_period: "60s".to_string(), - network_mode: NetworkMode::Bridge, - pull_policy: PullPolicy::Missing, }, ServiceType::Cache => ServiceDefaults { restart_policy: RestartPolicy::Always, @@ -216,8 +186,6 @@ impl DefaultsEngine { health_check_timeout: "3s".to_string(), health_check_retries: 3, health_check_start_period: "20s".to_string(), - network_mode: NetworkMode::Bridge, - pull_policy: PullPolicy::Missing, }, ServiceType::Proxy => ServiceDefaults { restart_policy: RestartPolicy::Always, @@ -225,8 +193,6 @@ impl DefaultsEngine { health_check_timeout: "5s".to_string(), health_check_retries: 3, health_check_start_period: "30s".to_string(), - network_mode: NetworkMode::Bridge, - pull_policy: PullPolicy::Missing, }, ServiceType::WebApp => ServiceDefaults { restart_policy: RestartPolicy::UnlessStopped, @@ -234,8 +200,6 @@ impl DefaultsEngine { health_check_timeout: "10s".to_string(), health_check_retries: 3, health_check_start_period: "40s".to_string(), - network_mode: NetworkMode::Bridge, - pull_policy: PullPolicy::Missing, }, ServiceType::Generic => ServiceDefaults::default(), } @@ -251,7 +215,7 @@ impl DefaultsEngine { let defaults = Self::get_defaults_for_type(service_type); let build_config = Self::create_build_config(service, project_name); - let mut enhanced_service = EnhancedDockerService { + let enhanced_service = EnhancedDockerService { // If we have build config with args, don't use image (build takes precedence) image: if build_config.is_some() && service.build_args.is_some() { None @@ -259,7 +223,6 @@ impl DefaultsEngine { service.image.clone() }, build: build_config, - container_name: Some(format!("{}_{}", project_name, service.name)), ports: Self::convert_ports(&service.ports), environment: Self::convert_environment(&service.environment), command: service.command.clone(), @@ -269,19 +232,13 @@ impl DefaultsEngine { } else { Some(service.depends_on.clone()) }, - healthcheck: Self::convert_healthcheck(&service.health_check, &defaults), + healthcheck: Self::convert_healthcheck(&service.health_check, &defaults, service_type, &service.ports), restart: Self::convert_restart_policy(&service.restart, &defaults), deploy: Self::convert_deploy(&service.resources, &service.swarm_config), networks: vec![network_name.to_string()], - pull_policy: Self::convert_pull_policy(&defaults.pull_policy), labels: Some(Self::generate_labels(project_name, &service.name, service_type)), }; - - // Optimize container name for readability - enhanced_service.container_name = Some( - format!("{}-{}", project_name.to_lowercase().replace("_", "-"), service.name) - ); - + enhanced_service } @@ -324,7 +281,7 @@ impl DefaultsEngine { for env_var in env_vars { match env_var { EnvironmentVariable::Template(var_name) => { - env_list.push(format!("{}=${{{}}}", var_name, var_name)); + env_list.push(format!("{var_name}=${{{var_name}}}")); } EnvironmentVariable::Literal(value) => { // If it's already in KEY=VALUE format, use as-is @@ -332,7 +289,7 @@ impl DefaultsEngine { if value.contains('=') { env_list.push(value.clone()); } else { - env_list.push(format!("VALUE={}", value)); + env_list.push(format!("VALUE={value}")); } } } @@ -362,11 +319,48 @@ impl DefaultsEngine { } fn convert_healthcheck( - health_check: &Option, - defaults: &ServiceDefaults + health_check: &Option, + defaults: &ServiceDefaults, + service_type: ServiceType, + ports: &[PortMapping], ) -> Option { - health_check.as_ref().map(|cmd| EnhancedHealthCheck { - test: vec!["CMD-SHELL".to_string(), cmd.clone()], + // If the user specified a healthcheck, use it directly + if let Some(cmd) = health_check { + return Some(EnhancedHealthCheck { + test: vec!["CMD-SHELL".to_string(), cmd.clone()], + interval: defaults.health_check_interval.clone(), + timeout: defaults.health_check_timeout.clone(), + retries: defaults.health_check_retries, + start_period: defaults.health_check_start_period.clone(), + }); + } + + // Otherwise, generate an automatic healthcheck based on service type + let auto_cmd = match service_type { + ServiceType::Database => Some("pg_isready -U postgres || mysqladmin ping -h localhost || mongosh --eval 'db.runCommand(\"ping\")' --quiet".to_string()), + ServiceType::Cache => Some("redis-cli ping || echo 'STATS' | nc localhost 11211".to_string()), + ServiceType::Proxy => { + let port = ports.first().map_or(80, |p| p.container_port); + Some(format!("curl -f http://localhost:{port}/ || exit 1")) + } + ServiceType::WebApp => { + if let Some(first_port) = ports.first() { + Some(format!("curl -f http://localhost:{}/ || exit 1", first_port.container_port)) + } else { + None + } + } + ServiceType::Generic => { + if let Some(first_port) = ports.first() { + Some(format!("curl -f http://localhost:{}/ || exit 1", first_port.container_port)) + } else { + None + } + } + }; + + auto_cmd.map(|cmd| EnhancedHealthCheck { + test: vec!["CMD-SHELL".to_string(), cmd], interval: defaults.health_check_interval.clone(), timeout: defaults.health_check_timeout.clone(), retries: defaults.health_check_retries, @@ -402,13 +396,23 @@ impl DefaultsEngine { reservations: None, }); - let restart_policy = Some(EnhancedRestartPolicy { - condition: "on-failure".to_string(), - delay: "5s".to_string(), - max_attempts: 3, - window: "120s".to_string(), + // Only add deploy.restart_policy when Swarm features are active. + // In plain Compose mode, the top-level `restart:` field is sufficient. + let has_swarm = swarm_config.as_ref().is_some_and(|s| { + s.replicas.is_some() || s.update_config.is_some() || s.labels.is_some() }); + let restart_policy = if has_swarm { + Some(EnhancedRestartPolicy { + condition: "on-failure".to_string(), + delay: "5s".to_string(), + max_attempts: 3, + window: "120s".to_string(), + }) + } else { + None + }; + let mut enhanced_deploy = EnhancedDeploy { resources: enhanced_resources, restart_policy, @@ -420,7 +424,7 @@ impl DefaultsEngine { // Add Swarm-specific configurations if let Some(swarm) = swarm_config { enhanced_deploy.replicas = swarm.replicas; - enhanced_deploy.labels = swarm.labels.clone(); + enhanced_deploy.labels = swarm.labels.as_ref().map(|l| l.iter().map(|(k, v)| (k.clone(), v.clone())).collect()); if let Some(update_config) = &swarm.update_config { enhanced_deploy.update_config = Some(SwarmUpdateConfig { @@ -442,19 +446,12 @@ impl DefaultsEngine { Some(enhanced_deploy) } - fn convert_pull_policy(pull_policy: &PullPolicy) -> String { - match pull_policy { - PullPolicy::Always => "always".to_string(), - PullPolicy::Missing => "missing".to_string(), - PullPolicy::Never => "never".to_string(), - } - } - - fn generate_labels(project_name: &str, service_name: &str, service_type: ServiceType) -> HashMap { - let mut labels = HashMap::new(); + + fn generate_labels(project_name: &str, service_name: &str, service_type: ServiceType) -> BTreeMap { + let mut labels = BTreeMap::new(); labels.insert("athena.project".to_string(), project_name.to_string()); labels.insert("athena.service".to_string(), service_name.to_string()); - labels.insert("athena.type".to_string(), format!("{:?}", service_type).to_lowercase()); + labels.insert("athena.type".to_string(), format!("{service_type:?}").to_lowercase()); labels.insert("athena.generated".to_string(), chrono::Utc::now().format("%Y-%m-%d").to_string()); labels } diff --git a/src/athena/mod.rs b/src/athena/mod.rs index 52f517c..6ea6dc9 100644 --- a/src/athena/mod.rs +++ b/src/athena/mod.rs @@ -5,13 +5,4 @@ pub mod dockerfile; pub use error::{AthenaError, AthenaResult}; pub use parser::parse_athena_file; -pub use generator::generate_docker_compose; - -/// Configuration for Athena operations -#[derive(Debug, Clone, Default)] -#[allow(dead_code)] -pub struct AthenaConfig { - pub output_file: Option, - pub validate_only: bool, - pub verbose: bool, -} \ No newline at end of file +pub use generator::generate_docker_compose; \ No newline at end of file diff --git a/src/athena/parser/ast.rs b/src/athena/parser/ast.rs index f88daf7..e123cb1 100644 --- a/src/athena/parser/ast.rs +++ b/src/athena/parser/ast.rs @@ -162,13 +162,6 @@ impl AthenaFile { .unwrap_or_else(|| format!("{}_network", self.get_project_name().to_lowercase())) } - #[allow(dead_code)] - pub fn get_networks(&self) -> Vec<&NetworkDefinition> { - self.environment - .as_ref() - .map(|e| e.networks.iter().collect()) - .unwrap_or_default() - } } impl Service { @@ -224,15 +217,3 @@ impl UpdateConfig { } } -impl NetworkDefinition { - #[allow(dead_code)] - pub fn new(name: String) -> Self { - Self { - name, - driver: None, - attachable: None, - encrypted: None, - ingress: None, - } - } -} \ No newline at end of file diff --git a/src/athena/parser/mod.rs b/src/athena/parser/mod.rs index b7d2d1a..76766ba 100644 --- a/src/athena/parser/mod.rs +++ b/src/athena/parser/mod.rs @@ -1,6 +1,5 @@ pub mod ast; #[allow(clippy::module_inception)] pub mod parser; -pub mod optimized_parser; pub use parser::parse_athena_file; \ No newline at end of file diff --git a/src/athena/parser/optimized_parser.rs b/src/athena/parser/optimized_parser.rs deleted file mode 100644 index 71cb0ed..0000000 --- a/src/athena/parser/optimized_parser.rs +++ /dev/null @@ -1,326 +0,0 @@ -use std::collections::HashMap; -use crate::athena::error::{AthenaError, AthenaResult, EnhancedParseError, EnhancedValidationError, ValidationErrorType}; -use super::ast::*; - -/// Optimized parser with performance improvements and better error handling -#[allow(dead_code)] -pub struct OptimizedParser; - -impl OptimizedParser { - /// Parse Athena file with optimized performance and enhanced error reporting - #[allow(dead_code)] - #[allow(dead_code)] - pub fn parse_with_performance_optimizations(input: &str) -> AthenaResult { - // Pre-validate input for common issues - Self::pre_validate_input(input)?; - - // Use the original parser but with optimizations - let result = super::parser::parse_athena_file(input)?; - - // Post-process for optimization - Self::optimize_ast(result) - } - - /// Pre-validation for common syntax issues to fail fast - #[allow(dead_code)] - fn pre_validate_input(input: &str) -> AthenaResult<()> { - let trimmed = input.trim(); - - if trimmed.is_empty() { - return Err(AthenaError::ParseError(EnhancedParseError::new( - "Input file is empty. Please provide a valid .ath file with at least a SERVICES SECTION.".to_string() - ))); - } - - // Check for required SERVICES SECTION - if !trimmed.contains("SERVICES SECTION") { - return Err(AthenaError::ParseError(EnhancedParseError::new( - "Missing required 'SERVICES SECTION'. Every .ath file must contain at least one service definition.".to_string() - ))); - } - - // Check for balanced SERVICE/END SERVICE blocks - let service_count = trimmed.matches("SERVICE ").count(); - let end_service_count = trimmed.matches("END SERVICE").count(); - - if service_count != end_service_count { - return Err(AthenaError::ParseError(EnhancedParseError::new( - format!( - "Unbalanced SERVICE blocks: found {} 'SERVICE' declarations but {} 'END SERVICE' statements. \ - Each SERVICE block must be closed with 'END SERVICE'.", - service_count, end_service_count - ) - ))); - } - - // Check for empty service names - if trimmed.contains("SERVICE \n") || trimmed.contains("SERVICE\n") { - return Err(AthenaError::ParseError(EnhancedParseError::new( - "Empty service name detected. Each SERVICE declaration must be followed by a service name.".to_string() - ))); - } - - Ok(()) - } - - /// Optimize the AST after parsing for better runtime performance - #[allow(dead_code)] - fn optimize_ast(mut athena_file: AthenaFile) -> AthenaResult { - // Sort services by dependency order for faster dependency resolution - athena_file.services.services = Self::topological_sort_services(athena_file.services.services)?; - - // Optimize service configurations - for service in &mut athena_file.services.services { - Self::optimize_service(service); - } - - // Set intelligent defaults if missing - Self::apply_intelligent_defaults(&mut athena_file); - - Ok(athena_file) - } - - /// Topological sort services by dependencies for optimal processing order - #[allow(dead_code)] - fn topological_sort_services(services: Vec) -> AthenaResult> { - use std::collections::{HashMap, VecDeque}; - - // Build dependency graph - let mut service_map: HashMap = HashMap::new(); - let mut in_degree: HashMap = HashMap::new(); - let mut graph: HashMap> = HashMap::new(); - - // Initialize structures - for service in services { - service_map.insert(service.name.clone(), service); - } - - for (name, service) in &service_map { - in_degree.insert(name.clone(), 0); - graph.insert(name.clone(), Vec::new()); - - for dep in &service.depends_on { - if !service_map.contains_key(dep) { - return Err(AthenaError::ValidationError(EnhancedValidationError::new( - format!("Service '{}' depends on '{}' which doesn't exist", name, dep), - ValidationErrorType::ServiceReference - ))); - } - } - } - - // Build edges and calculate in-degrees - for (name, service) in &service_map { - for dep in &service.depends_on { - graph.get_mut(dep).unwrap().push(name.clone()); - *in_degree.get_mut(name).unwrap() += 1; - } - } - - // Kahn's algorithm for topological sorting - let mut queue: VecDeque = in_degree.iter() - .filter(|(_, °ree)| degree == 0) - .map(|(name, _)| name.clone()) - .collect(); - - let mut sorted_services = Vec::new(); - - while let Some(current) = queue.pop_front() { - sorted_services.push(service_map.remove(¤t).unwrap()); - - for neighbor in &graph[¤t] { - let degree = in_degree.get_mut(neighbor).unwrap(); - *degree -= 1; - if *degree == 0 { - queue.push_back(neighbor.clone()); - } - } - } - - if sorted_services.len() != service_map.len() + sorted_services.len() { - return Err(AthenaError::ValidationError(EnhancedValidationError::new( - "Circular dependency detected in services".to_string(), - ValidationErrorType::CircularDependency - ))); - } - - Ok(sorted_services) - } - - /// Optimize individual service configuration - #[allow(dead_code)] - fn optimize_service(service: &mut Service) { - // Remove duplicate environment variables - service.environment.dedup(); - - // Remove duplicate dependencies - service.depends_on.dedup(); - - // Optimize port mappings (remove duplicates, sort for consistency) - service.ports.dedup_by(|a, b| { - a.host_port == b.host_port && a.container_port == b.container_port - }); - service.ports.sort_by_key(|p| p.host_port); - - // Optimize volume mappings - service.volumes.dedup_by(|a, b| { - a.host_path == b.host_path && a.container_path == b.container_path - }); - } - - /// Apply intelligent defaults based on service patterns - #[allow(dead_code)] - fn apply_intelligent_defaults(athena_file: &mut AthenaFile) { - // Set default deployment if missing - if athena_file.deployment.is_none() { - athena_file.deployment = Some(DeploymentSection { - deployment_id: "athena-project".to_string(), - version_id: Some("1.0.0".to_string()), - }); - } - - // Set default environment if missing - if athena_file.environment.is_none() { - athena_file.environment = Some(EnvironmentSection { - networks: Vec::new(), // Will create default network - volumes: Vec::new(), - secrets: HashMap::new(), - }); - } - - // Apply service-specific defaults - for service in &mut athena_file.services.services { - // Set default restart policy if missing - if service.restart.is_none() { - service.restart = Some(match service.image.as_deref() { - Some(img) if img.contains("postgres") || img.contains("mysql") || img.contains("mongodb") => { - RestartPolicy::Always - }, - Some(img) if img.contains("redis") || img.contains("memcached") => { - RestartPolicy::Always - }, - _ => RestartPolicy::UnlessStopped - }); - } - } - } - - /// Parse with caching for repeated parsing operations - #[allow(dead_code)] - pub fn parse_with_cache( - input: &str, - cache: &mut HashMap - ) -> AthenaResult { - use std::collections::hash_map::DefaultHasher; - use std::hash::{Hash, Hasher}; - - // Create hash of input for caching - let mut hasher = DefaultHasher::new(); - input.hash(&mut hasher); - let input_hash = hasher.finish(); - - // Check cache first - if let Some(cached_result) = cache.get(&input_hash) { - return Ok(cached_result.clone()); - } - - // Parse and cache result - let result = Self::parse_with_performance_optimizations(input)?; - cache.insert(input_hash, result.clone()); - - Ok(result) - } - - /// Validate syntax without full parsing for quick feedback - #[allow(dead_code)] - pub fn quick_syntax_check(input: &str) -> AthenaResult<()> { - Self::pre_validate_input(input)?; - - // Quick regex-based checks for common syntax issues - let lines: Vec<&str> = input.lines().collect(); - - for (line_num, line) in lines.iter().enumerate() { - let line_num = line_num + 1; - let trimmed = line.trim(); - - if trimmed.is_empty() || trimmed.starts_with('#') { - continue; - } - - // Check for malformed directives - if trimmed.starts_with("SERVICE") && !trimmed.contains(' ') && trimmed != "SERVICE" { - return Err(AthenaError::ParseError(EnhancedParseError::new( - format!("Line {}: SERVICE directive requires a service name", line_num) - ))); - } - - if trimmed.contains("PORT-MAPPING") && !trimmed.contains("TO") { - return Err(AthenaError::ParseError(EnhancedParseError::new( - format!("Line {}: PORT-MAPPING requires 'TO' keyword (e.g., 'PORT-MAPPING 8080 TO 80')", line_num) - ))); - } - - if trimmed.contains("VOLUME-MAPPING") && !trimmed.contains("TO") { - return Err(AthenaError::ParseError(EnhancedParseError::new( - format!("Line {}: VOLUME-MAPPING requires 'TO' keyword", line_num) - ))); - } - } - - Ok(()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - #[allow(dead_code)] - fn test_pre_validation() { - assert!(OptimizedParser::pre_validate_input("").is_err()); - assert!(OptimizedParser::pre_validate_input("DEPLOYMENT-ID test").is_err()); - - let valid_input = r#" - SERVICES SECTION - SERVICE test - IMAGE-ID alpine:latest - END SERVICE - "#; - assert!(OptimizedParser::pre_validate_input(valid_input).is_ok()); - } - - #[test] - #[allow(dead_code)] - fn test_unbalanced_service_blocks() { - let invalid_input = r#" - SERVICES SECTION - SERVICE test1 - IMAGE-ID alpine:latest - SERVICE test2 - IMAGE-ID nginx:latest - END SERVICE - "#; - assert!(OptimizedParser::pre_validate_input(invalid_input).is_err()); - } - - #[test] - #[allow(dead_code)] - fn test_quick_syntax_check() { - let invalid_port = r#" - SERVICES SECTION - SERVICE test - PORT-MAPPING 8080 WRONG 80 - END SERVICE - "#; - assert!(OptimizedParser::quick_syntax_check(invalid_port).is_err()); - - let valid_port = r#" - SERVICES SECTION - SERVICE test - PORT-MAPPING 8080 TO 80 - END SERVICE - "#; - assert!(OptimizedParser::quick_syntax_check(valid_port).is_ok()); - } -} \ No newline at end of file diff --git a/src/athena/parser/parser.rs b/src/athena/parser/parser.rs index ebafa2b..dc30785 100644 --- a/src/athena/parser/parser.rs +++ b/src/athena/parser/parser.rs @@ -3,7 +3,11 @@ use pest_derive::Parser; use std::collections::HashMap; use crate::athena::error::{AthenaError, AthenaResult, EnhancedParseError}; -use super::ast::*; +use super::ast::{ + AthenaFile, DeploymentSection, EnvironmentSection, EnvironmentVariable, FailureAction, + NetworkDefinition, NetworkDriver, PortMapping, Protocol, ResourceLimits, RestartPolicy, + Service, ServicesSection, SwarmConfig, UpdateConfig, VolumeDefinition, VolumeMapping, +}; #[derive(Parser)] #[grammar = "athena/parser/grammar.pest"] @@ -311,7 +315,7 @@ fn parse_service_item(pair: pest::iterators::Pair, service: &mut Service) } else if replicas_str.len() > 10 { "Replicas number is too large. Use a reasonable value like: 1, 2, 3, 5, 10, etc.".to_string() } else { - format!("'{}' is not a valid number. Use a positive integer like: 1, 2, 3, 5, 10, etc.", replicas_str) + format!("'{replicas_str}' is not a valid number. Use a positive integer like: 1, 2, 3, 5, 10, etc.") }; AthenaError::ParseError( @@ -321,10 +325,8 @@ fn parse_service_item(pair: pest::iterators::Pair, service: &mut Service) ) })?; - if service.swarm_config.is_none() { - service.swarm_config = Some(SwarmConfig::new()); - } - service.swarm_config.as_mut().unwrap().replicas = Some(replicas); + service.swarm_config.get_or_insert_with(SwarmConfig::new) + .replicas = Some(replicas); } } Rule::swarm_update_config => { @@ -435,7 +437,7 @@ fn parse_restart_policy(pair: pest::iterators::Pair) -> AthenaResult Ok(RestartPolicy::UnlessStopped), "on-failure" => Ok(RestartPolicy::OnFailure), "no" => Ok(RestartPolicy::No), - _ => Err(AthenaError::ParseError(EnhancedParseError::new(format!("Invalid restart policy: {}", policy_str)))) + _ => Err(AthenaError::ParseError(EnhancedParseError::new(format!("Invalid restart policy: {policy_str}")))) } } @@ -501,7 +503,7 @@ fn parse_update_config(pair: pest::iterators::Pair) -> AthenaResult().is_ok() && parallelism_str.starts_with('-') { "Parallelism must be a positive number. Use a value like: 1, 2, 3, 4, etc.".to_string() } else { - format!("'{}' is not a valid number. Use a positive integer like: 1, 2, 3, 4, etc.", parallelism_str) + format!("'{parallelism_str}' is not a valid number. Use a positive integer like: 1, 2, 3, 4, etc.") }; AthenaError::ParseError( @@ -522,7 +524,7 @@ fn parse_update_config(pair: pest::iterators::Pair) -> AthenaResult FailureAction::Rollback, _ => { return Err(AthenaError::ParseError( - EnhancedParseError::new(format!("Invalid failure action: {}", action_str)) + EnhancedParseError::new(format!("Invalid failure action: {action_str}")) .with_location(line, column) .with_suggestion("Valid failure actions are: CONTINUE, PAUSE, ROLLBACK".to_string()) )); @@ -571,9 +573,9 @@ fn parse_swarm_labels(pair: pest::iterators::Pair) -> AthenaResult) -> AthenaResult String { - if input.starts_with('"') && input.ends_with('"') { - input[1..input.len()-1].to_string() - } else { - input.to_string() - } + input + .strip_prefix('"') + .and_then(|s| s.strip_suffix('"')) + .unwrap_or(input) + .to_string() } fn create_enhanced_parse_error( @@ -607,7 +609,7 @@ fn create_enhanced_parse_error( column: usize, file_content: &str, ) -> EnhancedParseError { - let base_message = format!("{}", pest_error); + let base_message = format!("{pest_error}"); // Extract meaningful error message from Pest error let (clean_message, suggestion) = match &pest_error.variant { diff --git a/src/cli/commands.rs b/src/cli/commands.rs index 63ba60c..03a09b2 100644 --- a/src/cli/commands.rs +++ b/src/cli/commands.rs @@ -11,7 +11,7 @@ pub fn execute_command(command: Option, verbose: bool) -> AthenaResult if verbose { println!("Magic mode: Auto-detecting and building..."); } - execute_build(None, None, false, true) // verbose=true by default in magic mode + execute_build(None, None, false, true) } Some(Commands::Build { input, @@ -19,7 +19,7 @@ pub fn execute_command(command: Option, verbose: bool) -> AthenaResult validate_only, quiet, }) => { - let verbose = should_be_verbose(verbose, quiet); + let verbose = should_be_verbose(quiet); execute_build(input, output, validate_only, verbose) } @@ -28,7 +28,10 @@ pub fn execute_command(command: Option, verbose: bool) -> AthenaResult Some(Commands::Info { examples, directives, - }) => execute_info(examples, directives), + }) => { + execute_info(examples, directives); + Ok(()) + } } } @@ -38,16 +41,13 @@ fn execute_build( validate_only: bool, verbose: bool, ) -> AthenaResult<()> { - // Auto-detection of the .ath file let input = auto_detect_ath_file(input)?; if verbose { println!("Reading Athena file: {}", input.display()); } - // Read and parse the input file let content = fs::read_to_string(&input).map_err(AthenaError::IoError)?; - // Automatic validation (always done) if verbose { println!("Validating syntax..."); } @@ -66,25 +66,22 @@ fn execute_build( return Ok(()); } - // Generate docker-compose.yml (includes validation) let compose_yaml = generate_docker_compose(&athena_file)?; - // Determine output file let output_path = output.unwrap_or_else(|| "docker-compose.yml".into()); - // Write output fs::write(&output_path, &compose_yaml).map_err(AthenaError::IoError)?; - println!( - "Generated docker-compose.yml at: {}", - output_path.display() - ); + println!("Generated docker-compose.yml at: {}", output_path.display()); if verbose { println!("Project details:"); - println!(" • Project name: {}", athena_file.get_project_name()); - println!(" • Network name: {}", athena_file.get_network_name()); - println!(" • Services: {}", athena_file.services.services.len()); + println!(" - Project name: {}", athena_file.get_project_name()); + println!(" - Network name: {}", athena_file.get_network_name()); + println!( + " - Services: {}", + athena_file.services.services.len() + ); for service in &athena_file.services.services { println!( @@ -98,9 +95,7 @@ fn execute_build( Ok(()) } - fn execute_validate(input: Option, verbose: bool) -> AthenaResult<()> { - // Auto-detection of the .ath file let input = auto_detect_ath_file(input)?; if verbose { println!("Validating Athena file: {}", input.display()); @@ -110,7 +105,7 @@ fn execute_validate(input: Option, verbose: bool) -> AthenaR let athena_file = parse_athena_file(&content)?; - println!("✓ Athena file is valid"); + println!("Athena file is valid"); if verbose { println!("Project name: {}", athena_file.get_project_name()); @@ -128,7 +123,7 @@ fn execute_validate(input: Option, verbose: bool) -> AthenaR Ok(()) } -fn execute_info(examples: bool, directives: bool) -> AthenaResult<()> { +fn execute_info(examples: bool, directives: bool) { if examples { show_examples(); } else if directives { @@ -136,8 +131,6 @@ fn execute_info(examples: bool, directives: bool) -> AthenaResult<()> { } else { show_general_info(); } - - Ok(()) } fn show_general_info() { @@ -267,4 +260,4 @@ fn show_directives() { println!(" ENV-VARIABLE {{DATABASE_URL}}"); println!(" VOLUME-MAPPING \"./data\" TO \"/app/data\" (rw)"); println!(" RESOURCE-LIMITS CPU \"0.5\" MEMORY \"1G\""); -} +} \ No newline at end of file diff --git a/src/cli/utils.rs b/src/cli/utils.rs index 7341e67..0242567 100644 --- a/src/cli/utils.rs +++ b/src/cli/utils.rs @@ -1,54 +1,56 @@ +use std::fmt::Write; use std::fs; use std::path::PathBuf; -use crate::athena::{AthenaResult, AthenaError}; +use crate::athena::{AthenaError, AthenaResult}; -/// Auto-detect a .ath file in the current directory +/// Auto-detect a .ath file in the current directory. +/// +/// If `input` is `Some`, returns it directly. Otherwise, scans the current +/// directory for `.ath` files and returns the single match, or an error if +/// zero or multiple files are found. pub fn auto_detect_ath_file(input: Option) -> AthenaResult { - match input { - Some(path) => Ok(path), - None => { - // Search for .ath files in the current directory - let ath_files: Vec<_> = fs::read_dir(".") - .map_err(AthenaError::IoError)? - .filter_map(|entry| { - let entry = entry.ok()?; - let path = entry.path(); - if path.extension()? == "ath" && path.is_file() { - Some(path) - } else { - None - } - }) - .collect(); + if let Some(path) = input { + return Ok(path); + } - match ath_files.len() { - 0 => Err(AthenaError::ConfigError( - "No .ath file found in current directory. Please specify a file or create one.".to_string() - )), - 1 => { - let file = &ath_files[0]; - println!("Auto-detected: {}", file.display()); - Ok(file.clone()) - }, - _ => { - let mut error_msg = "Multiple .ath files found. Please specify which one to use:\n".to_string(); - for file in &ath_files { - error_msg.push_str(&format!(" - {}\n", file.display())); - } - error_msg.push_str("\nUsage: athena build "); - Err(AthenaError::ConfigError(error_msg)) - } + let ath_files: Vec<_> = fs::read_dir(".") + .map_err(AthenaError::IoError)? + .filter_map(|entry| { + let entry = entry.ok()?; + let path = entry.path(); + if path.extension()? == "ath" && path.is_file() { + Some(path) + } else { + None } + }) + .collect(); + + match ath_files.len() { + 0 => Err(AthenaError::ConfigError( + "No .ath file found in current directory. Please specify a file or create one." + .to_string(), + )), + 1 => { + let file = &ath_files[0]; + println!("Auto-detected: {}", file.display()); + Ok(file.clone()) + } + _ => { + let mut error_msg = + "Multiple .ath files found. Please specify which one to use:\n".to_string(); + for file in &ath_files { + let _ = writeln!(error_msg, " - {}", file.display()); + } + error_msg.push_str("Usage: athena build "); + Err(AthenaError::ConfigError(error_msg)) } } } -/// Determine if we should be in verbose mode (default yes, unless --quiet) -pub fn should_be_verbose(_global_verbose: bool, quiet: bool) -> bool { - if quiet { - false - } else { - true // Verbose by default - } +/// Determine if we should be in verbose mode (default yes, unless --quiet). +#[must_use] +pub fn should_be_verbose(quiet: bool) -> bool { + !quiet } \ No newline at end of file diff --git a/src/lib.rs b/src/lib.rs index 0e48cd5..3d9d9c9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,5 +1,5 @@ pub mod cli; pub mod athena; -pub use athena::{AthenaConfig, AthenaError, AthenaResult}; +pub use athena::{AthenaError, AthenaResult}; pub use cli::Cli; \ No newline at end of file diff --git a/src/main.rs b/src/main.rs index 9a45bb9..2d3b51d 100644 --- a/src/main.rs +++ b/src/main.rs @@ -8,38 +8,38 @@ use cli::{Cli, execute_command}; fn main() { let cli = Cli::parse(); - + if let Err(e) = execute_command(cli.command, cli.verbose) { - eprintln!("Error: {}", e); - + eprintln!("Error: {e}"); + // Print additional context for common errors - match e { - athena::AthenaError::IoError(ref io_err) => { + match &e { + athena::AthenaError::IoError(io_err) => { match io_err.kind() { std::io::ErrorKind::NotFound => { eprintln!("Make sure the file path is correct and the file exists."); - }, + } std::io::ErrorKind::PermissionDenied => { eprintln!("Check file permissions and try running with appropriate privileges."); - }, + } _ => {} } - }, - athena::AthenaError::ParseError(ref msg) => { + } + athena::AthenaError::ParseError(msg) => { eprintln!("Check the syntax of your .ath file. Use 'athena info --examples' for syntax examples."); if msg.message.contains("Parse error") { eprintln!("Common issues: missing END SERVICE, incorrect keywords, or malformed strings."); } - }, - athena::AthenaError::ValidationError(ref msg) => { + } + athena::AthenaError::ValidationError(msg) => { eprintln!("Fix the validation issues in your configuration."); if msg.message.contains("circular") { eprintln!("Review your service dependencies to avoid circular references."); } - }, - _ => {} + } + athena::AthenaError::YamlError(_) | athena::AthenaError::ConfigError(_) => {} } - + process::exit(1); } -} +} \ No newline at end of file diff --git a/tests/integration/build_args_cli_tests.rs b/tests/integration/build_args_cli_tests.rs index f877f80..bdf0520 100644 --- a/tests/integration/build_args_cli_tests.rs +++ b/tests/integration/build_args_cli_tests.rs @@ -250,7 +250,7 @@ fn test_build_args_complex_scenario() { let parsed = parse_yaml(&yaml_content); // Check project name and network - assert_eq!(parsed["name"], "BUILD_ARGS_COMPLEX"); + assert_eq!(parsed["name"], "build-args-complex"); let networks = parsed["networks"].as_mapping().unwrap(); assert!(networks.contains_key(&Value::String("custom_network".to_string())));