diff --git a/CHANGELOG.md b/CHANGELOG.md index c1b0c3d..012ed08 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,17 @@ All notable changes to rtk (Rust Token Killer) will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [Unreleased] + +### Features + +* **platform:** add multi-platform AI assistant support infrastructure + - Added `platform.ai_platform` configuration (claude, gemini, cursor, windsurf) + - Implemented SessionProvider trait for Gemini, Cursor, and Windsurf + - Added config.toml.example with platform configuration examples + - Discover command now respects configured AI platform + - Providers return helpful error messages for non-Claude platforms + ## [0.20.0](https://github.com/rtk-ai/rtk/compare/v0.19.0...v0.20.0) (2026-02-16) diff --git a/CLAUDE.md b/CLAUDE.md index fc8002e..21fb08a 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -2,6 +2,8 @@ This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. +**Note**: RTK now supports multiple AI platforms (Claude, Gemini, Cursor, Windsurf) through the `platform.ai_platform` configuration. See config.rs for details. + ## Project Overview **rtk (Rust Token Killer)** is a high-performance CLI proxy that minimizes LLM token consumption by filtering and compressing command outputs. It achieves 60-90% token savings on common development operations through smart filtering, grouping, truncation, and deduplication. diff --git a/README.md b/README.md index 4960e85..df91846 100644 --- a/README.md +++ b/README.md @@ -419,6 +419,28 @@ database_path = "/path/to/custom.db" Priority: `RTK_DB_PATH` env var > `config.toml` > default location. +### AI Platform Configuration + +RTK now supports multiple AI coding assistants. Configure your platform in `~/.config/rtk/config.toml`: + +```toml +[platform] +# Supported values: claude, gemini, cursor, windsurf +ai_platform = "claude" # Default +``` + +**Supported Platforms:** +- **claude** (default) - Anthropic Claude Code (fully supported) +- **gemini** - Google Gemini Code Assist (planned) +- **cursor** - Cursor AI (planned) +- **windsurf** - Windsurf (planned) + +**Current Status:** +- ✅ **Claude Code**: Full integration with hooks, session discovery, and command tracking +- 🚧 **Gemini/Cursor/Windsurf**: Infrastructure in place, implementation in progress + +To use RTK with Claude Code (default), no configuration needed. For other platforms, set `ai_platform` in your config file. Note that non-Claude platforms are not yet fully implemented. + ### Tee: Full Output Recovery When RTK filters command output, LLM agents lose failure details (stack traces, assertion messages) and may re-run the same command 2-3 times. The **tee** feature saves raw output to a file so the agent can read it without re-executing. diff --git a/config.toml.example b/config.toml.example new file mode 100644 index 0000000..79bf503 --- /dev/null +++ b/config.toml.example @@ -0,0 +1,38 @@ +# RTK Configuration Example +# Copy to ~/.config/rtk/config.toml and customize + +# Tracking configuration +[tracking] +enabled = true +history_days = 90 +# Optional: custom database path +# database_path = "/path/to/custom.db" + +# Display configuration +[display] +colors = true +emoji = true +max_width = 120 + +# Filter configuration +[filters] +ignore_dirs = [".git", "node_modules", "target", "__pycache__", ".venv", "vendor"] +ignore_files = ["*.lock", "*.min.js", "*.min.css"] + +# Tee configuration (output recovery on failures) +[tee] +enabled = true +mode = "failures" # "failures", "always", or "never" +max_files = 20 +max_file_size = 1048576 # 1MB +# Optional: custom directory +# directory = "/custom/path" + +# Platform configuration (NEW) +[platform] +# Supported AI coding assistant platforms: +# - "claude" : Anthropic Claude Code (fully supported) +# - "gemini" : Google Gemini Code Assist (planned) +# - "cursor" : Cursor AI (planned) +# - "windsurf" : Windsurf (planned) +ai_platform = "claude" diff --git a/src/config.rs b/src/config.rs index 1015012..5fb6a08 100644 --- a/src/config.rs +++ b/src/config.rs @@ -2,6 +2,35 @@ use anyhow::Result; use serde::{Deserialize, Serialize}; use std::path::PathBuf; +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum AIPlatform { + Claude, + Gemini, + Cursor, + Windsurf, +} + +impl Default for AIPlatform { + fn default() -> Self { + AIPlatform::Claude + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct PlatformConfig { + /// Which AI coding assistant platform is being used + pub ai_platform: AIPlatform, +} + +impl Default for PlatformConfig { + fn default() -> Self { + Self { + ai_platform: AIPlatform::Claude, + } + } +} + #[derive(Debug, Serialize, Deserialize, Default)] pub struct Config { #[serde(default)] @@ -12,6 +41,8 @@ pub struct Config { pub filters: FilterConfig, #[serde(default)] pub tee: crate::tee::TeeConfig, + #[serde(default)] + pub platform: PlatformConfig, } #[derive(Debug, Serialize, Deserialize)] @@ -125,3 +156,57 @@ pub fn show_config() -> Result<()> { Ok(()) } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_default_platform_is_claude() { + let config = Config::default(); + assert_eq!(config.platform.ai_platform, AIPlatform::Claude); + } + + #[test] + fn test_platform_config_serialization() { + let config = Config { + platform: PlatformConfig { + ai_platform: AIPlatform::Gemini, + }, + ..Default::default() + }; + + let toml_str = toml::to_string(&config).unwrap(); + assert!(toml_str.contains("gemini")); + } + + #[test] + fn test_platform_config_deserialization() { + let toml_str = r#" + [platform] + ai_platform = "cursor" + "#; + + let config: Config = toml::from_str(toml_str).unwrap(); + assert_eq!(config.platform.ai_platform, AIPlatform::Cursor); + } + + #[test] + fn test_all_platforms_deserialize() { + let platforms = vec!["claude", "gemini", "cursor", "windsurf"]; + for platform in platforms { + let toml_str = format!( + r#" + [platform] + ai_platform = "{}" + "#, + platform + ); + let config: Config = toml::from_str(&toml_str).unwrap(); + assert!(matches!( + config.platform.ai_platform, + AIPlatform::Claude | AIPlatform::Gemini | AIPlatform::Cursor | AIPlatform::Windsurf + )); + } + } +} diff --git a/src/discover/mod.rs b/src/discover/mod.rs index a8cee12..f3dbda8 100644 --- a/src/discover/mod.rs +++ b/src/discover/mod.rs @@ -5,10 +5,12 @@ mod report; use anyhow::Result; use std::collections::HashMap; -use provider::{ClaudeProvider, SessionProvider}; +use provider::{ClaudeProvider, CursorProvider, GeminiProvider, SessionProvider, WindsurfProvider}; use registry::{category_avg_tokens, classify_command, split_command_chain, Classification}; use report::{DiscoverReport, SupportedEntry, UnsupportedEntry}; +use crate::config::{AIPlatform, Config}; + /// Aggregation bucket for supported commands. struct SupportedBucket { rtk_equivalent: &'static str, @@ -34,7 +36,20 @@ pub fn run( format: &str, verbose: u8, ) -> Result<()> { - let provider = ClaudeProvider; + // Load config to determine which AI platform to use + let config = Config::load().unwrap_or_else(|e| { + if verbose > 0 { + eprintln!("Warning: Failed to load config ({}), using defaults", e); + } + Config::default() + }); + + let provider: Box = match config.platform.ai_platform { + AIPlatform::Claude => Box::new(ClaudeProvider), + AIPlatform::Gemini => Box::new(GeminiProvider), + AIPlatform::Cursor => Box::new(CursorProvider), + AIPlatform::Windsurf => Box::new(WindsurfProvider), + }; // Determine project filter let project_filter = if all { @@ -45,7 +60,7 @@ pub fn run( // Default: current working directory let cwd = std::env::current_dir()?; let cwd_str = cwd.to_string_lossy().to_string(); - let encoded = ClaudeProvider::encode_project_path(&cwd_str); + let encoded = provider.encode_project_path(&cwd_str); Some(encoded) }; diff --git a/src/discover/provider.rs b/src/discover/provider.rs index e9218b2..adbc4cb 100644 --- a/src/discover/provider.rs +++ b/src/discover/provider.rs @@ -21,7 +21,7 @@ pub struct ExtractedCommand { pub sequence_index: usize, } -/// Trait for session providers (Claude Code, future: Cursor, Windsurf). +/// Trait for session providers (Claude Code, Gemini, Cursor, Windsurf). pub trait SessionProvider { fn discover_sessions( &self, @@ -29,9 +29,15 @@ pub trait SessionProvider { since_days: Option, ) -> Result>; fn extract_commands(&self, path: &Path) -> Result>; + fn name(&self) -> &'static str; + /// Encode a project path to the provider's directory naming format + fn encode_project_path(&self, path: &str) -> String; } pub struct ClaudeProvider; +pub struct GeminiProvider; +pub struct CursorProvider; +pub struct WindsurfProvider; impl ClaudeProvider { /// Get the base directory for Claude Code projects. @@ -232,6 +238,108 @@ impl SessionProvider for ClaudeProvider { Ok(commands) } + + fn name(&self) -> &'static str { + "Claude Code" + } + + fn encode_project_path(&self, path: &str) -> String { + Self::encode_project_path(path) + } +} + +// Gemini Code Assist Provider (Google Cloud) +impl SessionProvider for GeminiProvider { + fn discover_sessions( + &self, + _project_filter: Option<&str>, + _since_days: Option, + ) -> Result> { + // TODO: Implement Gemini session discovery + // Gemini Code Assist stores sessions in a different location/format + // For now, return empty list + anyhow::bail!( + "Gemini Code Assist integration not yet implemented.\n\ + Set platform.ai_platform = \"claude\" in config.toml to use Claude Code." + ) + } + + fn extract_commands(&self, _path: &Path) -> Result> { + // TODO: Implement Gemini command extraction + anyhow::bail!("Gemini Code Assist integration not yet implemented") + } + + fn name(&self) -> &'static str { + "Gemini Code Assist" + } + + fn encode_project_path(&self, path: &str) -> String { + // TODO: Implement Gemini-specific path encoding when session format is known + // For now, return raw path as we don't know Gemini's storage structure + path.to_string() + } +} + +// Cursor AI Provider +impl SessionProvider for CursorProvider { + fn discover_sessions( + &self, + _project_filter: Option<&str>, + _since_days: Option, + ) -> Result> { + // TODO: Implement Cursor session discovery + // Cursor likely stores sessions in ~/.cursor/ or similar + // For now, return empty list + anyhow::bail!( + "Cursor AI integration not yet implemented.\n\ + Set platform.ai_platform = \"claude\" in config.toml to use Claude Code." + ) + } + + fn extract_commands(&self, _path: &Path) -> Result> { + // TODO: Implement Cursor command extraction + anyhow::bail!("Cursor AI integration not yet implemented") + } + + fn name(&self) -> &'static str { + "Cursor AI" + } + + fn encode_project_path(&self, path: &str) -> String { + // TODO: Implement Cursor-specific path encoding when session format is known + // For now, return raw path as we don't know Cursor's storage structure + path.to_string() + } +} + +// Windsurf Provider +impl SessionProvider for WindsurfProvider { + fn discover_sessions( + &self, + _project_filter: Option<&str>, + _since_days: Option, + ) -> Result> { + // TODO: Implement Windsurf session discovery + anyhow::bail!( + "Windsurf integration not yet implemented.\n\ + Set platform.ai_platform = \"claude\" in config.toml to use Claude Code." + ) + } + + fn extract_commands(&self, _path: &Path) -> Result> { + // TODO: Implement Windsurf command extraction + anyhow::bail!("Windsurf integration not yet implemented") + } + + fn name(&self) -> &'static str { + "Windsurf" + } + + fn encode_project_path(&self, path: &str) -> String { + // TODO: Implement Windsurf-specific path encoding when session format is known + // For now, return raw path as we don't know Windsurf's storage structure + path.to_string() + } } #[cfg(test)]