diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 81d6fb2..a459121 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,6 +20,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + - name: Checkout sibling dependencies + run: | + git clone --depth 1 https://github.com/broomva/aiOS.git ../aiOS + git clone --depth 1 https://github.com/broomva/lago.git ../lago - uses: dtolnay/rust-toolchain@stable with: components: rustfmt @@ -30,6 +34,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + - name: Checkout sibling dependencies + run: | + git clone --depth 1 https://github.com/broomva/aiOS.git ../aiOS + git clone --depth 1 https://github.com/broomva/lago.git ../lago - uses: dtolnay/rust-toolchain@stable with: components: clippy @@ -46,6 +54,10 @@ jobs: runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 + - name: Checkout sibling dependencies + run: | + git clone --depth 1 https://github.com/broomva/aiOS.git ../aiOS + git clone --depth 1 https://github.com/broomva/lago.git ../lago - uses: dtolnay/rust-toolchain@stable - uses: Swatinem/rust-cache@v2 - run: cargo test --workspace @@ -56,6 +68,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + - name: Checkout sibling dependencies + run: | + git clone --depth 1 https://github.com/broomva/aiOS.git ../aiOS + git clone --depth 1 https://github.com/broomva/lago.git ../lago - uses: dtolnay/rust-toolchain@stable - uses: Swatinem/rust-cache@v2 - run: cargo build --workspace --release @@ -65,9 +81,13 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + - name: Checkout sibling dependencies + run: | + git clone --depth 1 https://github.com/broomva/aiOS.git ../aiOS + git clone --depth 1 https://github.com/broomva/lago.git ../lago - uses: dtolnay/rust-toolchain@master with: - toolchain: "1.80.0" + toolchain: "1.88.0" - uses: Swatinem/rust-cache@v2 - run: cargo check --workspace @@ -85,4 +105,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: EmbarkStudios/cargo-deny-action@v2 + - name: Checkout sibling dependencies + run: | + git clone --depth 1 https://github.com/broomva/aiOS.git ../aiOS + git clone --depth 1 https://github.com/broomva/lago.git ../lago + - uses: dtolnay/rust-toolchain@stable + - run: cargo install cargo-deny --locked + - run: cargo deny check diff --git a/Cargo.lock b/Cargo.lock index d0ff977..215952a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -189,6 +189,7 @@ dependencies = [ "axum", "chrono", "clap", + "dirs", "futures-util", "lago-aios-eventstore-adapter", "lago-core", @@ -201,6 +202,7 @@ dependencies = [ "serde", "serde_json", "tokio", + "toml 0.8.23", "tracing", "tracing-appender", "tracing-subscriber", @@ -807,6 +809,27 @@ dependencies = [ "crypto-common", ] +[[package]] +name = "dirs" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" +dependencies = [ + "libc", + "option-ext", + "redox_users", + "windows-sys 0.61.2", +] + [[package]] name = "displaydoc" version = "0.2.5" @@ -1556,7 +1579,7 @@ dependencies = [ "serde", "serde_json", "thiserror 2.0.18", - "toml", + "toml 0.9.12+spec-1.1.0", "tracing", ] @@ -1585,6 +1608,15 @@ version = "0.2.180" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" +[[package]] +name = "libredox" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1744e39d1d6a9948f4f388969627434e31128196de472883b39f148769bfe30a" +dependencies = [ + "libc", +] + [[package]] name = "linux-raw-sys" version = "0.11.0" @@ -1852,6 +1884,12 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "option-ext" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" + [[package]] name = "ordered-float" version = "5.1.0" @@ -2125,6 +2163,17 @@ dependencies = [ "bitflags", ] +[[package]] +name = "redox_users" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" +dependencies = [ + "getrandom 0.2.17", + "libredox", + "thiserror 2.0.18", +] + [[package]] name = "ref-cast" version = "1.0.25" @@ -2539,6 +2588,15 @@ dependencies = [ "serde_core", ] +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + [[package]] name = "serde_spanned" version = "1.0.4" @@ -2942,6 +3000,18 @@ dependencies = [ "tokio", ] +[[package]] +name = "toml" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +dependencies = [ + "serde", + "serde_spanned 0.6.9", + "toml_datetime 0.6.11", + "toml_edit", +] + [[package]] name = "toml" version = "0.9.12+spec-1.1.0" @@ -2950,13 +3020,22 @@ checksum = "cf92845e79fc2e2def6a5d828f0801e29a2f8acc037becc5ab08595c7d5e9863" dependencies = [ "indexmap", "serde_core", - "serde_spanned", - "toml_datetime", + "serde_spanned 1.0.4", + "toml_datetime 0.7.5+spec-1.1.0", "toml_parser", "toml_writer", "winnow", ] +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] + [[package]] name = "toml_datetime" version = "0.7.5+spec-1.1.0" @@ -2966,6 +3045,20 @@ dependencies = [ "serde_core", ] +[[package]] +name = "toml_edit" +version = "0.22.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +dependencies = [ + "indexmap", + "serde", + "serde_spanned 0.6.9", + "toml_datetime 0.6.11", + "toml_write", + "winnow", +] + [[package]] name = "toml_parser" version = "1.0.8+spec-1.1.0" @@ -2975,6 +3068,12 @@ dependencies = [ "winnow", ] +[[package]] +name = "toml_write" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" + [[package]] name = "toml_writer" version = "1.0.6+spec-1.1.0" @@ -3752,6 +3851,9 @@ name = "winnow" version = "0.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" +dependencies = [ + "memchr", +] [[package]] name = "wiremock" diff --git a/Cargo.toml b/Cargo.toml index 0f5c156..d7b90a9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -81,6 +81,8 @@ serde_yaml = "0.9" wait-timeout = "0.2" walkdir = "2" clap = { version = "4", features = ["derive"] } +dirs = "6" +toml = "0.8" tracing = "0.1" tracing-subscriber = { version = "0.3", features = ["env-filter"] } diff --git a/crates/arcan-core/src/context.rs b/crates/arcan-core/src/context.rs index c2784a7..f4c12fe 100644 --- a/crates/arcan-core/src/context.rs +++ b/crates/arcan-core/src/context.rs @@ -372,7 +372,7 @@ mod tests { make_msg(Role::System, "sys"), make_msg(Role::User, "q1"), make_msg(Role::Assistant, "calling tool"), - ChatMessage::tool_result("call-1", &"x".repeat(500)), + ChatMessage::tool_result("call-1", "x".repeat(500)), make_msg(Role::User, "current"), ]; diff --git a/crates/arcan-core/src/context_compiler.rs b/crates/arcan-core/src/context_compiler.rs index 482bf36..20945b0 100644 --- a/crates/arcan-core/src/context_compiler.rs +++ b/crates/arcan-core/src/context_compiler.rs @@ -283,7 +283,7 @@ mod tests { let result = compile_context(&blocks, &config); // Should keep Persona (255) and Rules (200), drop Memory (50) and Retrieval (30) assert!(result.system_messages.len() <= 3); - assert!(result.dropped_blocks.len() >= 1); + assert!(!result.dropped_blocks.is_empty()); // Persona should always be there assert!( result diff --git a/crates/arcan-core/src/runtime.rs b/crates/arcan-core/src/runtime.rs index b7ee14a..d7ad74a 100644 --- a/crates/arcan-core/src/runtime.rs +++ b/crates/arcan-core/src/runtime.rs @@ -699,11 +699,7 @@ mod tests { } fn execute(&self, call: &ToolCall, _ctx: &ToolContext) -> Result { - let value = call - .input - .get("value") - .cloned() - .unwrap_or_else(|| json!(null)); + let value = call.input.get("value").cloned().unwrap_or(json!(null)); Ok(ToolResult { call_id: call.call_id.clone(), tool_name: call.tool_name.clone(), diff --git a/crates/arcan-provider/src/anthropic.rs b/crates/arcan-provider/src/anthropic.rs index dac8422..4fa0ca7 100644 --- a/crates/arcan-provider/src/anthropic.rs +++ b/crates/arcan-provider/src/anthropic.rs @@ -40,6 +40,45 @@ impl AnthropicConfig { base_url, }) } + + /// Create config from resolved CLI settings. + /// + /// API key is always read from env (never from config file). + /// Other settings use the provided overrides, falling back to env vars. + pub fn from_resolved( + model_override: Option<&str>, + base_url_override: Option<&str>, + max_tokens_override: Option, + ) -> Result { + let api_key = std::env::var("ANTHROPIC_API_KEY").map_err(|_| { + CoreError::Provider("ANTHROPIC_API_KEY environment variable not set".to_string()) + })?; + + let model = model_override + .map(String::from) + .or_else(|| std::env::var("ANTHROPIC_MODEL").ok()) + .unwrap_or_else(|| "claude-sonnet-4-5-20250929".to_string()); + + let max_tokens = max_tokens_override + .or_else(|| { + std::env::var("ANTHROPIC_MAX_TOKENS") + .ok() + .and_then(|s| s.parse().ok()) + }) + .unwrap_or(4096); + + let base_url = base_url_override + .map(String::from) + .or_else(|| std::env::var("ANTHROPIC_BASE_URL").ok()) + .unwrap_or_else(|| "https://api.anthropic.com".to_string()); + + Ok(Self { + api_key, + model, + max_tokens, + base_url, + }) + } } /// Anthropic Messages API provider implementing the `Provider` trait. diff --git a/crates/arcan-provider/src/openai.rs b/crates/arcan-provider/src/openai.rs index 7329661..323ba75 100644 --- a/crates/arcan-provider/src/openai.rs +++ b/crates/arcan-provider/src/openai.rs @@ -72,6 +72,79 @@ impl OpenAiConfig { enable_streaming: true, }) } + + /// Create OpenAI config from resolved CLI settings. + pub fn openai_from_resolved( + model_override: Option<&str>, + base_url_override: Option<&str>, + max_tokens_override: Option, + ) -> Result { + let api_key = std::env::var("OPENAI_API_KEY").map_err(|_| { + CoreError::Provider("OPENAI_API_KEY environment variable not set".to_string()) + })?; + + let model = model_override + .map(String::from) + .or_else(|| std::env::var("OPENAI_MODEL").ok()) + .unwrap_or_else(|| "gpt-4o".to_string()); + + let max_tokens = max_tokens_override + .or_else(|| { + std::env::var("OPENAI_MAX_TOKENS") + .ok() + .and_then(|s| s.parse().ok()) + }) + .unwrap_or(4096); + + let base_url = base_url_override + .map(String::from) + .or_else(|| std::env::var("OPENAI_BASE_URL").ok()) + .unwrap_or_else(|| "https://api.openai.com".to_string()); + + Ok(Self { + api_key, + model, + max_tokens, + base_url, + provider_name: "openai".to_string(), + enable_streaming: false, + }) + } + + /// Create Ollama config from resolved CLI settings. + pub fn ollama_from_resolved( + model_override: Option<&str>, + base_url_override: Option<&str>, + max_tokens_override: Option, + enable_streaming_override: Option, + ) -> Result { + let model = model_override + .map(String::from) + .or_else(|| std::env::var("OLLAMA_MODEL").ok()) + .unwrap_or_else(|| "llama3.2".to_string()); + + let max_tokens = max_tokens_override + .or_else(|| { + std::env::var("OLLAMA_MAX_TOKENS") + .ok() + .and_then(|s| s.parse().ok()) + }) + .unwrap_or(4096); + + let base_url = base_url_override + .map(String::from) + .or_else(|| std::env::var("OLLAMA_BASE_URL").ok()) + .unwrap_or_else(|| "http://localhost:11434".to_string()); + + Ok(Self { + api_key: String::new(), + model, + max_tokens, + base_url, + provider_name: "ollama".to_string(), + enable_streaming: enable_streaming_override.unwrap_or(true), + }) + } } /// Provider implementation for any OpenAI-compatible chat completions API. diff --git a/crates/arcan-tui/Cargo.toml b/crates/arcan-tui/Cargo.toml index 6e61d4a..4b7a4f8 100644 --- a/crates/arcan-tui/Cargo.toml +++ b/crates/arcan-tui/Cargo.toml @@ -2,6 +2,7 @@ name = "arcan-tui" version = "0.2.0" edition = "2024" +license.workspace = true authors.workspace = true repository.workspace = true publish = false diff --git a/crates/arcan/Cargo.toml b/crates/arcan/Cargo.toml index 63733f9..47153cf 100644 --- a/crates/arcan/Cargo.toml +++ b/crates/arcan/Cargo.toml @@ -38,11 +38,13 @@ anyhow.workspace = true async-trait.workspace = true axum.workspace = true clap.workspace = true +dirs.workspace = true futures-util.workspace = true reqwest.workspace = true reqwest-eventsource.workspace = true serde.workspace = true serde_json.workspace = true +toml.workspace = true tokio.workspace = true tracing.workspace = true tracing-subscriber.workspace = true diff --git a/crates/arcan/src/cli_run.rs b/crates/arcan/src/cli_run.rs index 70bfe15..7f442e5 100644 --- a/crates/arcan/src/cli_run.rs +++ b/crates/arcan/src/cli_run.rs @@ -62,13 +62,20 @@ pub async fn run_cli( session_id: &str, message: &str, json_output: bool, + model: Option<&str>, ) -> anyhow::Result { let client = Client::new(); // Ensure session exists (creates if needed via POST /sessions). + let mut session_body = serde_json::json!({ "session_id": session_id }); + if let Some(m) = model { + session_body["model_routing"] = serde_json::json!({ + "primary_model": m, + }); + } client .post(format!("{base_url}/sessions")) - .json(&serde_json::json!({ "session_id": session_id })) + .json(&session_body) .send() .await?; diff --git a/crates/arcan/src/config.rs b/crates/arcan/src/config.rs new file mode 100644 index 0000000..66f472b --- /dev/null +++ b/crates/arcan/src/config.rs @@ -0,0 +1,497 @@ +//! Persistent CLI configuration with layered resolution. +//! +//! Resolution order: hardcoded defaults → global config → project-local config → env vars → CLI flags. + +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::path::{Path, PathBuf}; + +/// Top-level TOML configuration file structure. +/// +/// All fields use `Option` to allow partial configs and clean merging. +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(default)] +pub struct ArcanConfig { + pub defaults: DefaultsConfig, + #[serde(default, skip_serializing_if = "HashMap::is_empty")] + pub providers: HashMap, + pub agent: AgentConfig, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(default)] +pub struct DefaultsConfig { + pub provider: Option, + pub model: Option, + pub port: Option, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(default)] +pub struct ProviderConfig { + pub model: Option, + pub base_url: Option, + pub max_tokens: Option, + pub enable_streaming: Option, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(default)] +pub struct AgentConfig { + pub max_iterations: Option, + pub approval_timeout: Option, +} + +/// Fully resolved configuration with concrete values (no Options). +#[derive(Debug, Clone)] +#[allow(dead_code)] +pub struct ResolvedConfig { + pub provider: String, + pub model: Option, + pub port: u16, + pub max_iterations: u32, + pub approval_timeout: u64, + pub provider_config: Option, +} + +impl ArcanConfig { + /// Merge `other` on top of `self`. Non-None values in `other` win. + pub fn merge(&mut self, other: &ArcanConfig) { + if other.defaults.provider.is_some() { + self.defaults.provider.clone_from(&other.defaults.provider); + } + if other.defaults.model.is_some() { + self.defaults.model.clone_from(&other.defaults.model); + } + if other.defaults.port.is_some() { + self.defaults.port = other.defaults.port; + } + if other.agent.max_iterations.is_some() { + self.agent.max_iterations = other.agent.max_iterations; + } + if other.agent.approval_timeout.is_some() { + self.agent.approval_timeout = other.agent.approval_timeout; + } + for (name, pc) in &other.providers { + let entry = self.providers.entry(name.clone()).or_default(); + if pc.model.is_some() { + entry.model.clone_from(&pc.model); + } + if pc.base_url.is_some() { + entry.base_url.clone_from(&pc.base_url); + } + if pc.max_tokens.is_some() { + entry.max_tokens = pc.max_tokens; + } + if pc.enable_streaming.is_some() { + entry.enable_streaming = pc.enable_streaming; + } + } + } + + /// Set a key using dotted notation. Shortcut keys: + /// `provider` → `defaults.provider`, `model` → `defaults.model`, `port` → `defaults.port`. + pub fn set_key(&mut self, key: &str, value: &str) -> Result<(), String> { + match key { + "provider" | "defaults.provider" => { + self.defaults.provider = Some(value.to_owned()); + } + "model" | "defaults.model" => { + self.defaults.model = Some(value.to_owned()); + } + "port" | "defaults.port" => { + let port: u16 = value + .parse() + .map_err(|e| format!("invalid port value: {e}"))?; + self.defaults.port = Some(port); + } + "agent.max_iterations" | "max_iterations" => { + let v: u32 = value + .parse() + .map_err(|e| format!("invalid max_iterations: {e}"))?; + self.agent.max_iterations = Some(v); + } + "agent.approval_timeout" | "approval_timeout" => { + let v: u64 = value + .parse() + .map_err(|e| format!("invalid approval_timeout: {e}"))?; + self.agent.approval_timeout = Some(v); + } + _ if key.starts_with("providers.") => { + // e.g. providers.ollama.base_url + let rest = &key["providers.".len()..]; + let parts: Vec<&str> = rest.splitn(2, '.').collect(); + if parts.len() != 2 { + return Err(format!( + "invalid provider key: {key} (expected providers..)" + )); + } + let provider_name = parts[0]; + let field = parts[1]; + let entry = self.providers.entry(provider_name.to_owned()).or_default(); + match field { + "model" => entry.model = Some(value.to_owned()), + "base_url" => entry.base_url = Some(value.to_owned()), + "max_tokens" => { + let v: u32 = value + .parse() + .map_err(|e| format!("invalid max_tokens: {e}"))?; + entry.max_tokens = Some(v); + } + "enable_streaming" => { + let v: bool = value + .parse() + .map_err(|e| format!("invalid enable_streaming: {e}"))?; + entry.enable_streaming = Some(v); + } + _ => return Err(format!("unknown provider field: {field}")), + } + } + _ => return Err(format!("unknown config key: {key}")), + } + Ok(()) + } + + /// Get a value by key. Returns None if unset. + pub fn get_key(&self, key: &str) -> Option { + match key { + "provider" | "defaults.provider" => self.defaults.provider.clone(), + "model" | "defaults.model" => self.defaults.model.clone(), + "port" | "defaults.port" => self.defaults.port.map(|p| p.to_string()), + "agent.max_iterations" | "max_iterations" => { + self.agent.max_iterations.map(|v| v.to_string()) + } + "agent.approval_timeout" | "approval_timeout" => { + self.agent.approval_timeout.map(|v| v.to_string()) + } + _ if key.starts_with("providers.") => { + let rest = &key["providers.".len()..]; + let parts: Vec<&str> = rest.splitn(2, '.').collect(); + if parts.len() != 2 { + return None; + } + let pc = self.providers.get(parts[0])?; + match parts[1] { + "model" => pc.model.clone(), + "base_url" => pc.base_url.clone(), + "max_tokens" => pc.max_tokens.map(|v| v.to_string()), + "enable_streaming" => pc.enable_streaming.map(|v| v.to_string()), + _ => None, + } + } + _ => None, + } + } +} + +/// Global config path: `~/.config/arcan/config.toml` +pub fn global_config_path() -> Option { + dirs::config_dir().map(|d| d.join("arcan").join("config.toml")) +} + +/// Project-local config path: `/config.toml` +pub fn local_config_path(data_dir: &Path) -> PathBuf { + data_dir.join("config.toml") +} + +/// Load and merge config from global + local files. +pub fn load_config(data_dir: &Path) -> ArcanConfig { + let mut config = ArcanConfig::default(); + + // Layer 1: global config + if let Some(global_path) = global_config_path() { + if let Some(global) = load_config_file(&global_path) { + config.merge(&global); + } + } + + // Layer 2: project-local config + let local_path = local_config_path(data_dir); + if let Some(local) = load_config_file(&local_path) { + config.merge(&local); + } + + config +} + +/// Load a single TOML config file. Returns None if file doesn't exist or is invalid. +fn load_config_file(path: &Path) -> Option { + let content = std::fs::read_to_string(path).ok()?; + toml::from_str(&content).ok() +} + +/// Save config to the project-local config file. +pub fn save_config(data_dir: &Path, config: &ArcanConfig) -> anyhow::Result<()> { + let path = local_config_path(data_dir); + std::fs::create_dir_all(data_dir)?; + let content = toml::to_string_pretty(config) + .map_err(|e| anyhow::anyhow!("failed to serialize config: {e}"))?; + std::fs::write(&path, content)?; + Ok(()) +} + +/// Resolve the final config by applying env vars and CLI overrides on top. +pub fn resolve( + config: &ArcanConfig, + cli_provider: Option<&str>, + cli_model: Option<&str>, + cli_port: Option, + cli_max_iterations: Option, + cli_approval_timeout: Option, +) -> ResolvedConfig { + // Provider: CLI > env > config > "" + let provider = cli_provider + .map(String::from) + .or_else(|| std::env::var("ARCAN_PROVIDER").ok()) + .or_else(|| config.defaults.provider.clone()) + .unwrap_or_default(); + + // Model: CLI > env > provider-specific config > defaults config > None + let model = cli_model + .map(String::from) + .or_else(|| std::env::var("ARCAN_MODEL").ok()) + .or_else(|| { + config + .providers + .get(&provider) + .and_then(|pc| pc.model.clone()) + }) + .or_else(|| config.defaults.model.clone()); + + // Port: CLI > env > config > 3000 + let port = cli_port + .or_else(|| { + std::env::var("ARCAN_PORT") + .ok() + .and_then(|s| s.parse().ok()) + }) + .or(config.defaults.port) + .unwrap_or(3000); + + // Max iterations: CLI > config > 10 + let max_iterations = cli_max_iterations + .or(config.agent.max_iterations) + .unwrap_or(10); + + // Approval timeout: CLI > config > 300 + let approval_timeout = cli_approval_timeout + .or(config.agent.approval_timeout) + .unwrap_or(300); + + // Provider-specific config section + let provider_config = config.providers.get(&provider).cloned(); + + ResolvedConfig { + provider, + model, + port, + max_iterations, + approval_timeout, + provider_config, + } +} + +/// Generate default config TOML content. +pub fn default_config_content() -> String { + r#"# Arcan CLI Configuration +# Precedence: defaults < config file < env vars < CLI flags + +[defaults] +# provider = "anthropic" # anthropic, openai, ollama, mock +# model = "claude-sonnet-4-5-20250929" +# port = 3000 + +[agent] +# max_iterations = 10 +# approval_timeout = 300 + +# [providers.anthropic] +# model = "claude-sonnet-4-5-20250929" +# max_tokens = 4096 + +# [providers.ollama] +# model = "llama3.2" +# base_url = "http://localhost:11434" +# max_tokens = 4096 +# enable_streaming = true + +# [providers.openai] +# model = "gpt-4o" +# max_tokens = 4096 +"# + .to_owned() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn merge_overrides_non_none() { + let mut base = ArcanConfig::default(); + base.defaults.provider = Some("mock".into()); + base.defaults.port = Some(3000); + + let overlay = ArcanConfig { + defaults: DefaultsConfig { + provider: Some("ollama".into()), + model: Some("llama3.2".into()), + port: None, + }, + ..Default::default() + }; + + base.merge(&overlay); + assert_eq!(base.defaults.provider.as_deref(), Some("ollama")); + assert_eq!(base.defaults.model.as_deref(), Some("llama3.2")); + assert_eq!(base.defaults.port, Some(3000)); // preserved + } + + #[test] + fn set_and_get_shortcut_keys() { + let mut config = ArcanConfig::default(); + config.set_key("provider", "ollama").unwrap(); + config.set_key("model", "gpt-oss:20b").unwrap(); + config.set_key("port", "3001").unwrap(); + + assert_eq!(config.get_key("provider").as_deref(), Some("ollama")); + assert_eq!(config.get_key("model").as_deref(), Some("gpt-oss:20b")); + assert_eq!(config.get_key("port").as_deref(), Some("3001")); + } + + #[test] + fn set_and_get_dotted_provider_keys() { + let mut config = ArcanConfig::default(); + config + .set_key("providers.ollama.base_url", "http://localhost:11434") + .unwrap(); + config + .set_key("providers.ollama.max_tokens", "8192") + .unwrap(); + config + .set_key("providers.ollama.enable_streaming", "true") + .unwrap(); + + assert_eq!( + config.get_key("providers.ollama.base_url").as_deref(), + Some("http://localhost:11434") + ); + assert_eq!( + config.get_key("providers.ollama.max_tokens").as_deref(), + Some("8192") + ); + assert_eq!( + config + .get_key("providers.ollama.enable_streaming") + .as_deref(), + Some("true") + ); + } + + #[test] + fn set_key_rejects_invalid() { + let mut config = ArcanConfig::default(); + assert!(config.set_key("port", "not-a-number").is_err()); + assert!(config.set_key("unknown_key", "value").is_err()); + assert!(config.set_key("providers.ollama.unknown", "v").is_err()); + } + + #[test] + fn get_key_returns_none_for_unset() { + let config = ArcanConfig::default(); + assert!(config.get_key("provider").is_none()); + assert!(config.get_key("providers.ollama.model").is_none()); + } + + #[test] + fn resolve_defaults() { + let config = ArcanConfig::default(); + let resolved = resolve(&config, None, None, None, None, None); + assert_eq!(resolved.provider, ""); + assert!(resolved.model.is_none()); + assert_eq!(resolved.port, 3000); + assert_eq!(resolved.max_iterations, 10); + assert_eq!(resolved.approval_timeout, 300); + } + + #[test] + fn resolve_cli_overrides_config() { + let mut config = ArcanConfig::default(); + config.defaults.provider = Some("ollama".into()); + config.defaults.model = Some("llama3.2".into()); + config.defaults.port = Some(3001); + + let resolved = resolve( + &config, + Some("anthropic"), + Some("claude-3"), + Some(4000), + None, + None, + ); + assert_eq!(resolved.provider, "anthropic"); + assert_eq!(resolved.model.as_deref(), Some("claude-3")); + assert_eq!(resolved.port, 4000); + } + + #[test] + fn resolve_uses_provider_specific_model() { + let mut config = ArcanConfig::default(); + config.defaults.provider = Some("ollama".into()); + let pc = ProviderConfig { + model: Some("special-model".into()), + ..Default::default() + }; + config.providers.insert("ollama".into(), pc); + + let resolved = resolve(&config, None, None, None, None, None); + assert_eq!(resolved.model.as_deref(), Some("special-model")); + } + + #[test] + fn roundtrip_toml_serialization() { + let mut config = ArcanConfig::default(); + config.defaults.provider = Some("ollama".into()); + config.defaults.port = Some(3001); + let pc = ProviderConfig { + model: Some("llama3.2".into()), + ..Default::default() + }; + config.providers.insert("ollama".into(), pc); + + let toml_str = toml::to_string_pretty(&config).expect("serialize"); + let parsed: ArcanConfig = toml::from_str(&toml_str).expect("parse"); + assert_eq!(parsed.defaults.provider.as_deref(), Some("ollama")); + assert_eq!(parsed.defaults.port, Some(3001)); + assert_eq!( + parsed + .providers + .get("ollama") + .and_then(|p| p.model.as_deref()), + Some("llama3.2") + ); + } + + #[test] + fn save_and_load_config_file() { + let dir = std::env::temp_dir().join(format!( + "arcan-config-test-{}", + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap() + .as_nanos() + )); + std::fs::create_dir_all(&dir).unwrap(); + + let mut config = ArcanConfig::default(); + config.set_key("provider", "ollama").unwrap(); + config.set_key("model", "test-model").unwrap(); + save_config(&dir, &config).unwrap(); + + let loaded = load_config(&dir); + assert_eq!(loaded.defaults.provider.as_deref(), Some("ollama")); + assert_eq!(loaded.defaults.model.as_deref(), Some("test-model")); + + let _ = std::fs::remove_dir_all(dir); + } +} diff --git a/crates/arcan/src/daemon.rs b/crates/arcan/src/daemon.rs index b880893..37a7362 100644 --- a/crates/arcan/src/daemon.rs +++ b/crates/arcan/src/daemon.rs @@ -14,7 +14,7 @@ async fn is_daemon_healthy(base_url: &str) -> bool { /// Check if a PID file exists and the process is alive. /// Returns `Some(pid)` if alive, `None` if dead or no PID file. -fn check_existing_pid(data_dir: &Path) -> Option { +pub fn check_existing_pid(data_dir: &Path) -> Option { let pid_path = data_dir.join("daemon.pid"); let raw = fs::read_to_string(&pid_path).ok()?; let pid: u32 = raw.trim().parse().ok()?; @@ -30,7 +30,7 @@ fn check_existing_pid(data_dir: &Path) -> Option { /// Check if a process with the given PID is alive using `kill -0`. #[cfg(unix)] -fn is_process_alive(pid: u32) -> bool { +pub fn is_process_alive(pid: u32) -> bool { std::process::Command::new("kill") .args(["-0", &pid.to_string()]) .stdout(std::process::Stdio::null()) @@ -40,7 +40,7 @@ fn is_process_alive(pid: u32) -> bool { } #[cfg(not(unix))] -fn is_process_alive(_pid: u32) -> bool { +pub fn is_process_alive(_pid: u32) -> bool { // On non-Unix, we can't cheaply check — assume alive if PID file exists. true } @@ -54,6 +54,51 @@ pub fn remove_pid_file(data_dir: &Path) { } } +/// Stop a running daemon by sending SIGTERM and waiting for exit. +pub async fn stop_daemon(data_dir: &Path, port: u16) -> anyhow::Result<()> { + let base_url = format!("http://127.0.0.1:{port}"); + + // Check PID file first. + let Some(pid) = check_existing_pid(data_dir) else { + // No live process — check if the port is healthy anyway. + if is_daemon_healthy(&base_url).await { + anyhow::bail!( + "Daemon is healthy on {base_url} but no PID file found. Stop it manually." + ); + } + anyhow::bail!("No running daemon found."); + }; + + // Send SIGTERM. + #[cfg(unix)] + { + let status = std::process::Command::new("kill") + .args([&pid.to_string()]) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .status(); + if status.is_err() || !status.unwrap().success() { + anyhow::bail!("Failed to send SIGTERM to PID {pid}"); + } + } + + #[cfg(not(unix))] + { + anyhow::bail!("stop is only supported on Unix systems"); + } + + // Poll until process is dead (up to 10s). + for _ in 0..50 { + tokio::time::sleep(Duration::from_millis(200)).await; + if !is_process_alive(pid) { + remove_pid_file(data_dir); + return Ok(()); + } + } + + anyhow::bail!("Daemon (PID {pid}) did not exit within 10 seconds"); +} + /// Ensure a daemon is running on the given port. /// /// 1. Probe `GET /health`. If healthy, return immediately. @@ -61,7 +106,12 @@ pub fn remove_pid_file(data_dir: &Path) { /// 3. If PID is stale, remove PID file and proceed. /// 4. Spawn `arcan serve` as a detached child process. /// 5. Poll `/health` until it succeeds (up to ~12 s). -pub async fn ensure_daemon(data_dir: &Path, port: u16) -> anyhow::Result { +pub async fn ensure_daemon( + data_dir: &Path, + port: u16, + provider: Option<&str>, + model: Option<&str>, +) -> anyhow::Result { let base_url = format!("http://127.0.0.1:{port}"); if is_daemon_healthy(&base_url).await { @@ -97,14 +147,18 @@ pub async fn ensure_daemon(data_dir: &Path, port: u16) -> anyhow::Result // Resolve the current executable so `arcan serve` uses the same binary. let exe = std::env::current_exe()?; + let port_str = port.to_string(); + let dir_str = data_dir.to_string_lossy().to_string(); + let mut args = vec!["serve", "--port", &port_str, "--data-dir", &dir_str]; + if let Some(p) = provider { + args.extend(["--provider", p]); + } + if let Some(m) = model { + args.extend(["--model", m]); + } + let child = std::process::Command::new(exe) - .args([ - "serve", - "--port", - &port.to_string(), - "--data-dir", - &data_dir.to_string_lossy(), - ]) + .args(&args) .stdout(log_file) .stderr(stderr_log) .stdin(std::process::Stdio::null()) diff --git a/crates/arcan/src/main.rs b/crates/arcan/src/main.rs index 9350d26..283247f 100644 --- a/crates/arcan/src/main.rs +++ b/crates/arcan/src/main.rs @@ -1,4 +1,5 @@ mod cli_run; +mod config; mod daemon; use aios_protocol::{ @@ -18,6 +19,7 @@ use arcan_lago::{MemoryCommitTool, MemoryProjection, MemoryProposeTool, MemoryQu use arcan_provider::anthropic::{AnthropicConfig, AnthropicProvider}; use arcand::{canonical::create_canonical_router, mock::MockProvider}; use clap::{Parser, Subcommand}; +use config::ResolvedConfig; use lago_aios_eventstore_adapter::LagoAiosEventStoreAdapter; use lago_core::{ BranchId, EventEnvelope, EventId, EventPayload, EventQuery, Journal, Projection, SessionId, @@ -136,8 +138,16 @@ struct Cli { data_dir: PathBuf, /// HTTP listen port - #[arg(long, default_value_t = 3000, global = true)] - port: u16, + #[arg(long, global = true)] + port: Option, + + /// LLM provider (anthropic, openai, ollama, mock) + #[arg(long, global = true)] + provider: Option, + + /// Model name override + #[arg(long, global = true)] + model: Option, } #[derive(Subcommand)] @@ -145,12 +155,12 @@ enum Command { /// Run the daemon in foreground Serve { /// Maximum orchestrator iterations per run - #[arg(long, default_value_t = 10)] - max_iterations: u32, + #[arg(long)] + max_iterations: Option, /// Approval timeout in seconds (default 300 = 5 minutes) - #[arg(long, default_value_t = 300)] - approval_timeout: u64, + #[arg(long)] + approval_timeout: Option, }, /// Launch the TUI client (auto-starts daemon if needed) Chat { @@ -179,6 +189,35 @@ enum Command { #[arg(long)] json: bool, }, + /// Manage persistent configuration + Config { + #[command(subcommand)] + action: ConfigAction, + }, + /// Show daemon status, provider, model, and session info + Status, + /// Stop the running daemon + Stop, +} + +#[derive(Subcommand)] +enum ConfigAction { + /// Set a config value (e.g., `arcan config set provider ollama`) + Set { + /// Config key (provider, model, port, or dotted path like providers.ollama.base_url) + key: String, + /// Value to set + value: String, + }, + /// Get a config value + Get { + /// Config key + key: String, + }, + /// List all configuration + List, + /// Initialize a default config file + Init, } async fn shutdown_signal() { @@ -253,12 +292,68 @@ async fn resolve_session( "default".to_owned() } -fn run_serve( - data_dir: &Path, - port: u16, - _max_iterations: u32, - _approval_timeout: u64, -) -> anyhow::Result<()> { +/// Build provider from resolved configuration. +fn build_provider(resolved: &ResolvedConfig) -> anyhow::Result> { + let pc = resolved.provider_config.as_ref(); + + match resolved.provider.as_str() { + "mock" => { + tracing::warn!("Provider: MockProvider (forced via config)"); + Ok(Arc::new(MockProvider)) + } + "openai" => { + let config = arcan_provider::openai::OpenAiConfig::openai_from_resolved( + resolved.model.as_deref(), + pc.and_then(|p| p.base_url.as_deref()), + pc.and_then(|p| p.max_tokens), + )?; + tracing::info!(model = %config.model, "Provider: OpenAI"); + Ok(Arc::new( + arcan_provider::openai::OpenAiCompatibleProvider::new(config), + )) + } + "ollama" => { + let config = arcan_provider::openai::OpenAiConfig::ollama_from_resolved( + resolved.model.as_deref(), + pc.and_then(|p| p.base_url.as_deref()), + pc.and_then(|p| p.max_tokens), + pc.and_then(|p| p.enable_streaming), + )?; + tracing::info!(model = %config.model, base_url = %config.base_url, "Provider: Ollama"); + Ok(Arc::new( + arcan_provider::openai::OpenAiCompatibleProvider::new(config), + )) + } + "anthropic" => { + let config = AnthropicConfig::from_resolved( + resolved.model.as_deref(), + pc.and_then(|p| p.base_url.as_deref()), + pc.and_then(|p| p.max_tokens), + )?; + tracing::info!(model = %config.model, "Provider: Anthropic"); + Ok(Arc::new(AnthropicProvider::new(config))) + } + // Auto-detect: try providers in order + _ => { + if let Ok(config) = AnthropicConfig::from_env() { + tracing::info!(model = %config.model, "Provider: Anthropic (auto-detected)"); + Ok(Arc::new(AnthropicProvider::new(config))) + } else if let Ok(config) = arcan_provider::openai::OpenAiConfig::openai_from_env() { + tracing::info!(model = %config.model, "Provider: OpenAI (auto-detected)"); + Ok(Arc::new( + arcan_provider::openai::OpenAiCompatibleProvider::new(config), + )) + } else { + tracing::warn!( + "Provider: MockProvider (set ARCAN_PROVIDER or API key env vars for real LLM)" + ); + Ok(Arc::new(MockProvider)) + } + } + } +} + +fn run_serve(data_dir: &Path, resolved: &ResolvedConfig) -> anyhow::Result<()> { let workspace_root = std::env::current_dir()?; // --- Lago persistence --- @@ -270,6 +365,9 @@ fn run_serve( workspace = %workspace_root.display(), journal = %journal_path.display(), blobs = %blobs_path.display(), + provider = %resolved.provider, + model = ?resolved.model, + port = resolved.port, "Starting arcan" ); @@ -315,65 +413,7 @@ fn run_serve( registry.register(MemoryCommitTool::new(journal.clone())); // --- Provider --- - // Selection order: ARCAN_PROVIDER env var > auto-detect from API keys > MockProvider - let provider_name = std::env::var("ARCAN_PROVIDER").unwrap_or_default(); - let provider: Arc = match provider_name.as_str() { - "mock" => { - tracing::warn!("Provider: MockProvider (forced via ARCAN_PROVIDER=mock)"); - Arc::new(MockProvider) - } - "openai" => match arcan_provider::openai::OpenAiConfig::openai_from_env() { - Ok(config) => { - tracing::info!(model = %config.model, "Provider: OpenAI"); - Arc::new(arcan_provider::openai::OpenAiCompatibleProvider::new( - config, - )) - } - Err(e) => { - tracing::error!("ARCAN_PROVIDER=openai but config failed: {e}"); - return Err(e.into()); - } - }, - "ollama" => match arcan_provider::openai::OpenAiConfig::ollama_from_env() { - Ok(config) => { - tracing::info!(model = %config.model, base_url = %config.base_url, "Provider: Ollama"); - Arc::new(arcan_provider::openai::OpenAiCompatibleProvider::new( - config, - )) - } - Err(e) => { - tracing::error!("ARCAN_PROVIDER=ollama but config failed: {e}"); - return Err(e.into()); - } - }, - "anthropic" => match AnthropicConfig::from_env() { - Ok(config) => { - tracing::info!(model = %config.model, "Provider: Anthropic"); - Arc::new(AnthropicProvider::new(config)) - } - Err(e) => { - tracing::error!("ARCAN_PROVIDER=anthropic but config failed: {e}"); - return Err(e.into()); - } - }, - // Auto-detect: try providers in order - _ => { - if let Ok(config) = AnthropicConfig::from_env() { - tracing::info!(model = %config.model, "Provider: Anthropic (auto-detected)"); - Arc::new(AnthropicProvider::new(config)) - } else if let Ok(config) = arcan_provider::openai::OpenAiConfig::openai_from_env() { - tracing::info!(model = %config.model, "Provider: OpenAI (auto-detected)"); - Arc::new(arcan_provider::openai::OpenAiCompatibleProvider::new( - config, - )) - } else { - tracing::warn!( - "Provider: MockProvider (set ARCAN_PROVIDER or API key env vars for real LLM)" - ); - Arc::new(MockProvider) - } - } - }; + let provider = build_provider(resolved)?; // --- Canonical aiOS runtime adapters --- let event_store: Arc = @@ -410,6 +450,7 @@ fn run_serve( // Build provider stack and blocking HTTP clients before entering Tokio runtime. let data_dir_owned = data_dir.to_path_buf(); + let port = resolved.port; let tokio_runtime = tokio::runtime::Builder::new_multi_thread() .enable_all() .build()?; @@ -435,14 +476,22 @@ fn run_serve( async fn run_chat( data_dir: PathBuf, - port: u16, + resolved: &ResolvedConfig, session: Option, url: Option, ) -> anyhow::Result<()> { // Ensure daemon is running first to avoid redb lock conflicts. let base_url = match url { Some(u) => u, - None => daemon::ensure_daemon(&data_dir, port).await?, + None => { + daemon::ensure_daemon( + &data_dir, + resolved.port, + Some(resolved.provider.as_str()).filter(|s| !s.is_empty()), + resolved.model.as_deref(), + ) + .await? + } }; // Resolve session via API (no direct journal access). @@ -458,7 +507,7 @@ async fn run_chat( async fn run_message( data_dir: PathBuf, - port: u16, + resolved: &ResolvedConfig, message: String, session: Option, url: Option, @@ -467,7 +516,15 @@ async fn run_message( // Ensure daemon is running first. let base_url = match url { Some(u) => u, - None => daemon::ensure_daemon(&data_dir, port).await?, + None => { + daemon::ensure_daemon( + &data_dir, + resolved.port, + Some(resolved.provider.as_str()).filter(|s| !s.is_empty()), + resolved.model.as_deref(), + ) + .await? + } }; // Resolve session via API (no direct journal access). @@ -477,17 +534,149 @@ async fn run_message( tracing::warn!(%error, "failed to persist last_session hint"); } - let exit_code = cli_run::run_cli(&base_url, &session, &message, json_output).await?; + let exit_code = cli_run::run_cli( + &base_url, + &session, + &message, + json_output, + resolved.model.as_deref(), + ) + .await?; if exit_code != 0 { std::process::exit(exit_code); } Ok(()) } +#[allow(clippy::print_stdout)] +fn run_config(data_dir: &Path, action: ConfigAction) -> anyhow::Result<()> { + match action { + ConfigAction::Init => { + let path = config::local_config_path(data_dir); + if path.exists() { + println!("Config file already exists: {}", path.display()); + } else { + std::fs::create_dir_all(data_dir)?; + std::fs::write(&path, config::default_config_content())?; + println!("Created config: {}", path.display()); + } + } + ConfigAction::Set { key, value } => { + let mut cfg = config::load_config(data_dir); + cfg.set_key(&key, &value) + .map_err(|e| anyhow::anyhow!("{e}"))?; + config::save_config(data_dir, &cfg)?; + println!("{key} = {value}"); + } + ConfigAction::Get { key } => { + let cfg = config::load_config(data_dir); + match cfg.get_key(&key) { + Some(value) => println!("{value}"), + None => println!("(not set)"), + } + } + ConfigAction::List => { + let cfg = config::load_config(data_dir); + let content = toml::to_string_pretty(&cfg) + .map_err(|e| anyhow::anyhow!("failed to serialize config: {e}"))?; + if content.trim().is_empty() + || content.trim() == "[defaults]\n\n[agent]" + || content + .lines() + .all(|l| l.trim().is_empty() || l.starts_with('[')) + { + println!("(no config values set)"); + if let Some(path) = config::global_config_path() { + println!("Global config: {}", path.display()); + } + println!( + "Local config: {}", + config::local_config_path(data_dir).display() + ); + } else { + print!("{content}"); + } + } + } + Ok(()) +} + +#[allow(clippy::print_stdout)] +async fn run_status(data_dir: &Path, resolved: &ResolvedConfig) -> anyhow::Result<()> { + println!("Arcan Status"); + println!("============"); + + // Config + println!( + "Provider: {}", + if resolved.provider.is_empty() { + "(auto-detect)" + } else { + &resolved.provider + } + ); + println!( + "Model: {}", + resolved.model.as_deref().unwrap_or("(provider default)") + ); + println!("Port: {}", resolved.port); + println!("Data dir: {}", data_dir.display()); + + // Daemon status + let base_url = format!("http://127.0.0.1:{}", resolved.port); + match daemon::check_existing_pid(data_dir) { + Some(pid) => { + let client = reqwest::Client::builder() + .timeout(std::time::Duration::from_secs(2)) + .build()?; + let healthy = matches!( + client.get(format!("{base_url}/health")).send().await, + Ok(resp) if resp.status().is_success() + ); + if healthy { + println!("Daemon: running (PID {pid}, healthy)"); + } else { + println!("Daemon: running (PID {pid}, NOT healthy)"); + } + } + None => { + println!("Daemon: not running"); + } + } + + // Last session + match read_last_session_hint(data_dir) { + Some(session) => println!("Session: {session}"), + None => println!("Session: (none)"), + } + + // Config file locations + if let Some(global_path) = config::global_config_path() { + let exists = global_path.exists(); + println!( + "Global cfg: {} {}", + global_path.display(), + if exists { "" } else { "(not found)" } + ); + } + let local_path = config::local_config_path(data_dir); + let exists = local_path.exists(); + println!( + "Local cfg: {} {}", + local_path.display(), + if exists { "" } else { "(not found)" } + ); + + Ok(()) +} + fn main() -> anyhow::Result<()> { let cli = Cli::parse(); let data_dir = resolve_data_dir(&cli.data_dir)?; + // Load layered config. + let file_config = config::load_config(&data_dir); + match cli.command { Some(Command::Serve { max_iterations, @@ -498,7 +687,16 @@ fn main() -> anyhow::Result<()> { .with_env_filter(EnvFilter::from_default_env()) .init(); - run_serve(&data_dir, cli.port, max_iterations, approval_timeout) + let resolved = config::resolve( + &file_config, + cli.provider.as_deref(), + cli.model.as_deref(), + cli.port, + max_iterations, + approval_timeout, + ); + + run_serve(&data_dir, &resolved) } Some(Command::Chat { session, url }) => { // File-based logging for TUI mode (don't clobber the terminal) @@ -510,10 +708,19 @@ fn main() -> anyhow::Result<()> { .with_env_filter(EnvFilter::from_default_env()) .init(); + let resolved = config::resolve( + &file_config, + cli.provider.as_deref(), + cli.model.as_deref(), + cli.port, + None, + None, + ); + let runtime = tokio::runtime::Builder::new_multi_thread() .enable_all() .build()?; - runtime.block_on(run_chat(data_dir, cli.port, session, url)) + runtime.block_on(run_chat(data_dir, &resolved, session, url)) } Some(Command::Run { message, @@ -530,10 +737,52 @@ fn main() -> anyhow::Result<()> { .with_env_filter(EnvFilter::from_default_env()) .init(); + let resolved = config::resolve( + &file_config, + cli.provider.as_deref(), + cli.model.as_deref(), + cli.port, + None, + None, + ); + + let runtime = tokio::runtime::Builder::new_multi_thread() + .enable_all() + .build()?; + runtime.block_on(run_message( + data_dir, &resolved, message, session, url, json, + )) + } + Some(Command::Config { action }) => run_config(&data_dir, action), + Some(Command::Status) => { + let resolved = config::resolve( + &file_config, + cli.provider.as_deref(), + cli.model.as_deref(), + cli.port, + None, + None, + ); + let runtime = tokio::runtime::Builder::new_multi_thread() .enable_all() .build()?; - runtime.block_on(run_message(data_dir, cli.port, message, session, url, json)) + runtime.block_on(run_status(&data_dir, &resolved)) + } + Some(Command::Stop) => { + let resolved = config::resolve( + &file_config, + cli.provider.as_deref(), + cli.model.as_deref(), + cli.port, + None, + None, + ); + + let runtime = tokio::runtime::Builder::new_multi_thread() + .enable_all() + .build()?; + runtime.block_on(daemon::stop_daemon(&data_dir, resolved.port)) } None => { // Default: launch TUI with auto-daemon (same as `arcan chat`) @@ -545,10 +794,19 @@ fn main() -> anyhow::Result<()> { .with_env_filter(EnvFilter::from_default_env()) .init(); + let resolved = config::resolve( + &file_config, + cli.provider.as_deref(), + cli.model.as_deref(), + cli.port, + None, + None, + ); + let runtime = tokio::runtime::Builder::new_multi_thread() .enable_all() .build()?; - runtime.block_on(run_chat(data_dir, cli.port, None, None)) + runtime.block_on(run_chat(data_dir, &resolved, None, None)) } } } diff --git a/deny.toml b/deny.toml index 596b01c..04f7bd6 100644 --- a/deny.toml +++ b/deny.toml @@ -1,15 +1,20 @@ -# cargo-deny configuration +# cargo-deny configuration (v2) # Run: cargo deny check # Install: cargo install cargo-deny +[graph] +targets = [] +all-features = false +no-default-features = false + [advisories] -vulnerability = "deny" -unmaintained = "warn" -yanked = "warn" -notice = "warn" +version = 2 +# paste is unmaintained but still widely used transitively (via ratatui). +# No action needed until ratatui migrates to pastey. +ignore = ["RUSTSEC-2024-0436"] [licenses] -unlicensed = "deny" +version = 2 allow = [ "MIT", "Apache-2.0",