From 4ff8129eaf57f17f8a9f4be647c2044119179515 Mon Sep 17 00:00:00 2001 From: lbb Date: Sat, 28 Feb 2026 14:05:58 +0800 Subject: [PATCH 01/12] Add support for Windsurf and Cline AI (#24) * feat(adapter): add windsurf and cline support Co-authored-by: lbb * feat(adapter): add windsurf and cline skills support Co-authored-by: lbb --------- Co-authored-by: Cursor Agent --- KNOWLEDGE_BASE.md | 60 +++- README.md | 62 +++- README_ZH.md | 62 +++- src/__tests__/cline-rules.test.ts | 36 +++ src/__tests__/cline-skills.test.ts | 35 +++ src/__tests__/windsurf-rules.test.ts | 36 +++ src/__tests__/windsurf-skills.test.ts | 35 +++ src/adapters/cline-rules.ts | 20 ++ src/adapters/cline-skills.ts | 15 + src/adapters/index.ts | 20 ++ src/adapters/windsurf-rules.ts | 20 ++ src/adapters/windsurf-skills.ts | 15 + src/commands/helpers.ts | 95 ++++-- src/completion/scripts.ts | 246 ++++++++++++++- src/index.ts | 423 +++++++++++++++++++++++++- src/project-config.ts | 109 ++++++- 16 files changed, 1243 insertions(+), 46 deletions(-) create mode 100644 src/__tests__/cline-rules.test.ts create mode 100644 src/__tests__/cline-skills.test.ts create mode 100644 src/__tests__/windsurf-rules.test.ts create mode 100644 src/__tests__/windsurf-skills.test.ts create mode 100644 src/adapters/cline-rules.ts create mode 100644 src/adapters/cline-skills.ts create mode 100644 src/adapters/windsurf-rules.ts create mode 100644 src/adapters/windsurf-skills.ts diff --git a/KNOWLEDGE_BASE.md b/KNOWLEDGE_BASE.md index 476998b..006a647 100644 --- a/KNOWLEDGE_BASE.md +++ b/KNOWLEDGE_BASE.md @@ -1,14 +1,14 @@ # Project Knowledge Base ## Project Overview -**AI Rules Sync (ais)** is a CLI tool designed to synchronize agent rules from a centralized Git repository to local projects using symbolic links. It supports **Cursor rules**, **Cursor commands**, **Cursor skills**, **Cursor subagents**, **Copilot instructions**, **Claude Code rules/skills/subagents/CLAUDE.md**, **Trae rules/skills**, **OpenCode agents/skills/commands/tools**, **Codex rules/skills**, **Gemini CLI commands/skills/subagents**, and **universal AGENTS.md support**, keeping projects up-to-date across teams. +**AI Rules Sync (ais)** is a CLI tool designed to synchronize agent rules from a centralized Git repository to local projects using symbolic links. It supports **Cursor rules**, **Cursor commands**, **Cursor skills**, **Cursor subagents**, **Copilot instructions**, **Claude Code rules/skills/subagents/CLAUDE.md**, **Trae rules/skills**, **OpenCode agents/skills/commands/tools**, **Codex rules/skills**, **Gemini CLI commands/skills/subagents**, **Windsurf rules/skills**, **Cline rules/skills**, and **universal AGENTS.md support**, keeping projects up-to-date across teams. A key feature is **User Mode** (`--user` / `-u`): use `$HOME` as project root to manage AI config files in `~/.claude/`, `~/.cursor/`, etc. Entries are tracked in `~/.config/ai-rules-sync/user.json` (or a user-configured custom path for dotfiles integration) and gitignore management is skipped automatically. ## Core Concepts -- **Rules Repository**: A Git repository containing rule definitions in official tool paths (`.cursor/rules/`, `.cursor/commands/`, `.cursor/skills/`, `.cursor/agents/`, `.github/instructions/`, `.claude/skills/`, `.claude/agents/`, `.claude/` (for CLAUDE.md), `.trae/rules/`, `.trae/skills/`, `.opencode/agents/`, `.opencode/skills/`, `.opencode/commands/`, `.opencode/tools/`, `.codex/rules/`, `.agents/skills/`, `.gemini/commands/`, `.gemini/skills/`, `.gemini/agents/`, `agents-md/`). +- **Rules Repository**: A Git repository containing rule definitions in official tool paths (`.cursor/rules/`, `.cursor/commands/`, `.cursor/skills/`, `.cursor/agents/`, `.github/instructions/`, `.claude/skills/`, `.claude/agents/`, `.claude/` (for CLAUDE.md), `.trae/rules/`, `.trae/skills/`, `.opencode/agents/`, `.opencode/skills/`, `.opencode/commands/`, `.opencode/tools/`, `.codex/rules/`, `.agents/skills/`, `.gemini/commands/`, `.gemini/skills/`, `.gemini/agents/`, `.windsurf/rules/`, `.windsurf/skills/`, `.clinerules/`, `.cline/skills/`, `agents-md/`). - **Symbolic Links**: Entries are linked from the local cache of the repo to project directories, avoiding file duplication and drift. -- **Dependency Tracking**: Uses `ai-rules-sync.json` to track project dependencies (Cursor rules/commands/skills/subagents, Copilot instructions, Claude Code rules/skills/subagents/CLAUDE.md, Trae rules/skills, OpenCode agents/skills/commands/tools, Codex rules/skills, Gemini CLI commands/skills/subagents, AGENTS.md). +- **Dependency Tracking**: Uses `ai-rules-sync.json` to track project dependencies (Cursor rules/commands/skills/subagents, Copilot instructions, Claude Code rules/skills/subagents/CLAUDE.md, Trae rules/skills, OpenCode agents/skills/commands/tools, Codex rules/skills, Gemini CLI commands/skills/subagents, Windsurf rules/skills, Cline rules/skills, AGENTS.md). - **Privacy**: Supports private/local entries via `ai-rules-sync.local.json` and `.git/info/exclude`. - **User Mode**: `--user` / `-u` flag on add/remove/install commands. Sets `projectPath = $HOME`, stores dependencies in `~/.config/ai-rules-sync/user.json`, skips gitignore management. Enables `ais user install` to restore all user-scope symlinks on a new machine. (`--global`/`-g` kept as deprecated aliases.) - **User Config Path**: Configurable via `ais config user set ` for dotfiles integration (e.g. `~/dotfiles/ai-rules-sync/user.json`). @@ -49,6 +49,10 @@ src/ │ ├── gemini-commands.ts # Gemini CLI commands adapter (file mode) │ ├── gemini-skills.ts # Gemini CLI skills adapter (directory mode) │ ├── gemini-agents.ts # Gemini CLI agents adapter (file mode) +│ ├── windsurf-rules.ts # Windsurf rules adapter (file mode) +│ ├── windsurf-skills.ts # Windsurf skills adapter (directory mode) +│ ├── cline-rules.ts # Cline rules adapter (file mode) +│ ├── cline-skills.ts # Cline skills adapter (directory mode) │ └── agents-md.ts # Universal AGENTS.md adapter (file mode) ├── cli/ # CLI registration layer │ └── register.ts # Declarative command registration (registerAdapterCommands) @@ -229,6 +233,17 @@ interface SourceDirConfig { skills?: string; // Default: ".gemini/skills" agents?: string; // Default: ".gemini/agents" }; + warp?: { + skills?: string; // Default: ".agents/skills" + }; + windsurf?: { + rules?: string; // Default: ".windsurf/rules" + skills?: string; // Default: ".windsurf/skills" + }; + cline?: { + rules?: string; // Default: ".clinerules" + skills?: string; // Default: ".cline/skills" + }; agentsMd?: { file?: string; // Default: "." (repository root) }; @@ -277,6 +292,17 @@ interface ProjectConfig { skills?: Record; agents?: Record; }; + warp?: { + skills?: Record; + }; + windsurf?: { + rules?: Record; + skills?: Record; + }; + cline?: { + rules?: Record; + skills?: Record; + }; // Universal AGENTS.md support (tool-agnostic) agentsMd?: Record; } @@ -431,6 +457,26 @@ Gemini CLI (https://geminicli.com/) is supported with three entry types: - File-based synchronization with `.md` suffix for Gemini CLI subagents (Markdown with YAML frontmatter). - **Purpose**: Specialized subagents with defined capabilities. +### 14.4. Windsurf Rule Synchronization +- **Syntax**: `ais windsurf add [alias]` +- Links `/.windsurf/rules/` to `.windsurf/rules/`. +- File-based synchronization with `.md` suffix for Windsurf workspace rules. + +### 14.5. Windsurf Skill Synchronization +- **Syntax**: `ais windsurf skills add [alias]` +- Links `/.windsurf/skills/` to `.windsurf/skills/`. +- Directory-based synchronization for Windsurf Cascade skills (`SKILL.md` inside each skill folder). + +### 14.6. Cline Rule Synchronization +- **Syntax**: `ais cline add [alias]` +- Links `/.clinerules/` to `.clinerules/`. +- File-based synchronization with `.md`/`.txt` suffixes for Cline rules. + +### 14.7. Cline Skill Synchronization +- **Syntax**: `ais cline skills add [alias]` +- Links `/.cline/skills/` to `.cline/skills/`. +- Directory-based synchronization for Cline skills (`SKILL.md` inside each skill folder). + ### 15. Import Command - **Syntax**: `ais import ` or `ais import ` - Copies entry from project to rules repository, commits, and creates symlink back. @@ -454,6 +500,8 @@ Gemini CLI (https://geminicli.com/) is supported with three entry types: - `ais opencode install` - Install all OpenCode agents, skills, commands, and tools. - `ais codex install` - Install all Codex rules and skills. - `ais gemini install` - Install all Gemini CLI commands, skills, and subagents. +- `ais windsurf install` - Install all Windsurf rules and skills. +- `ais cline install` - Install all Cline rules and skills. - `ais agents-md install` - Install AGENTS.md files. - `ais install` - Install everything (smart dispatch). - `ais install --user` / `ais user install` - Install all user-scope AI config files from `~/.config/ai-rules-sync/user.json`. (`--global` and `ais global install` kept as deprecated aliases.) @@ -875,6 +923,10 @@ ais user install | gemini-commands | gemini | commands | file | .gemini/commands | .toml | [Gemini Commands](https://geminicli.com/docs/cli/custom-commands/) | | gemini-skills | gemini | skills | directory | .gemini/skills | - | [Gemini Skills](https://geminicli.com/docs/cli/skills/) | | gemini-agents | gemini | subagents | file | .gemini/agents | .md | [Gemini Subagents](https://geminicli.com/docs/core/subagents/) | +| windsurf-rules | windsurf | rules | file | .windsurf/rules | .md | [Windsurf Memories & Rules](https://docs.windsurf.com/windsurf/cascade/memories) | +| windsurf-skills | windsurf | skills | directory | .windsurf/skills | - | [Windsurf Skills](https://docs.windsurf.com/windsurf/cascade/skills) | +| cline-rules | cline | rules | file | .clinerules | .md, .txt | [Cline Rules](https://docs.cline.bot/customization/cline-rules) | +| cline-skills | cline | skills | directory | .cline/skills | - | [Cline Skills](https://docs.cline.bot/customization/skills) | ## Development Guidelines - **TypeScript**: Strict mode enabled. @@ -895,6 +947,8 @@ ais user install ## Changelog ### 2026-02 +- Added **Windsurf support**: rules (`.windsurf/rules`, `.md`) and skills (`.windsurf/skills`) with full CLI/completion integration +- Added **Cline support**: rules (`.clinerules`, `.md`/`.txt`) and skills (`.cline/skills`) with full CLI/completion integration - Added **User Mode** (`--user` / `-u`): manage personal AI config files (`~/.claude/CLAUDE.md`, etc.) with version control; `ais user install` restores all symlinks on new machines - Added **claude-md adapter**: sync CLAUDE.md-style files; `ais claude md add CLAUDE --user` - Added **User Config Path**: `ais config user set ` for dotfiles integration diff --git a/README.md b/README.md index 8069915..e69d41a 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ **AI Rules Sync (AIS)** - Synchronize, manage, and share your AI agent rules across projects and teams. -Stop copying `.mdc` files around. Manage your rules in Git repositories and sync them via symbolic links. Supports 9 AI tools and **User Mode** for personal config files — see [Supported Tools](#supported-tools). +Stop copying `.mdc` files around. Manage your rules in Git repositories and sync them via symbolic links. Supports 11 AI tools and **User Mode** for personal config files — see [Supported Tools](#supported-tools). --- @@ -98,6 +98,10 @@ ais completion install | Gemini CLI | Subagents | file | `.gemini/agents/` | `.md` | [Docs](https://geminicli.com/docs/core/subagents/) | | Warp | Rules | file | `.` (root) | `.md` | [Docs](https://docs.warp.dev/agent-platform/capabilities/rules) — same as AGENTS.md, use `ais agents-md` | | Warp | Skills | directory | `.agents/skills/` | - | [Docs](https://docs.warp.dev/agent-platform/capabilities/skills) | +| Windsurf | Rules | file | `.windsurf/rules/` | `.md` | [Docs](https://docs.windsurf.com/windsurf/cascade/memories) | +| Windsurf | Skills | directory | `.windsurf/skills/` | - | [Docs](https://docs.windsurf.com/windsurf/cascade/skills) | +| Cline | Rules | file | `.clinerules/` | `.md`, `.txt` | [Docs](https://docs.cline.bot/customization/cline-rules) | +| Cline | Skills | directory | `.cline/skills/` | - | [Docs](https://docs.cline.bot/customization/skills) | | **Universal** | **AGENTS.md** | file | `.` (root) | `.md` | [Standard](https://agents.md/) | **Modes:** @@ -595,6 +599,40 @@ ais warp skills remove my-skill ais warp skills install ``` +### Windsurf + +```bash +# Add rule +ais windsurf add project-style + +# Add skill +ais windsurf skills add deploy-staging + +# Remove +ais windsurf remove project-style + +# Install all +ais windsurf install +``` + +> Note: Windsurf Memories are managed inside Cascade UI/runtime. AIS syncs file-based artifacts (`.windsurf/rules` and `.windsurf/skills`). + +### Cline + +```bash +# Add rule +ais cline add coding + +# Add skill +ais cline skills add release-checklist + +# Remove +ais cline remove coding + +# Install all +ais cline install +``` + --- ## Advanced Features @@ -847,6 +885,26 @@ Create `ai-rules-sync.json` in your rules repository: "commands": ".opencode/commands", "tools": ".opencode/tools" }, + "codex": { + "rules": ".codex/rules", + "skills": ".agents/skills" + }, + "gemini": { + "commands": ".gemini/commands", + "skills": ".gemini/skills", + "agents": ".gemini/agents" + }, + "warp": { + "skills": ".agents/skills" + }, + "windsurf": { + "rules": ".windsurf/rules", + "skills": ".windsurf/skills" + }, + "cline": { + "rules": ".clinerules", + "skills": ".cline/skills" + }, "agentsMd": { "file": "." } @@ -923,7 +981,7 @@ ais copilot instructions add } ``` -All other tools (`copilot`, `trae`, `opencode`, `codex`, `gemini`) follow the same structure — see [Supported Tools](#supported-tools) for their key names. +All other tools (`copilot`, `trae`, `opencode`, `codex`, `gemini`, `warp`, `windsurf`, `cline`) follow the same structure — see [Supported Tools](#supported-tools) for their key names. **Format types:** diff --git a/README_ZH.md b/README_ZH.md index 44e9bca..d4b9dda 100644 --- a/README_ZH.md +++ b/README_ZH.md @@ -8,7 +8,7 @@ **AI Rules Sync (AIS)** - 跨项目和团队同步、管理和共享你的 AI 代理规则。 -不再复制粘贴 `.mdc` 文件。在 Git 仓库中管理规则,通过软链接同步。支持 9 款 AI 工具及 **User 模式**(管理个人配置文件)——详见[支持的工具](#支持的工具)。 +不再复制粘贴 `.mdc` 文件。在 Git 仓库中管理规则,通过软链接同步。支持 11 款 AI 工具及 **User 模式**(管理个人配置文件)——详见[支持的工具](#支持的工具)。 --- @@ -98,6 +98,10 @@ ais completion install | Gemini CLI | Subagents | file | `.gemini/agents/` | `.md` | [文档](https://geminicli.com/docs/core/subagents/) | | Warp | Rules | file | `.`(根目录) | `.md` | [文档](https://docs.warp.dev/agent-platform/capabilities/rules) — 与 AGENTS.md 相同,使用 `ais agents-md` | | Warp | Skills | directory | `.agents/skills/` | - | [文档](https://docs.warp.dev/agent-platform/capabilities/skills) | +| Windsurf | Rules | file | `.windsurf/rules/` | `.md` | [文档](https://docs.windsurf.com/windsurf/cascade/memories) | +| Windsurf | Skills | directory | `.windsurf/skills/` | - | [文档](https://docs.windsurf.com/windsurf/cascade/skills) | +| Cline | Rules | file | `.clinerules/` | `.md`, `.txt` | [文档](https://docs.cline.bot/customization/cline-rules) | +| Cline | Skills | directory | `.cline/skills/` | - | [文档](https://docs.cline.bot/customization/skills) | | **通用** | **AGENTS.md** | file | `.`(根目录) | `.md` | [标准](https://agents.md/) | **模式说明:** @@ -595,6 +599,40 @@ ais warp skills remove my-skill ais warp skills install ``` +### Windsurf + +```bash +# 添加规则 +ais windsurf add project-style + +# 添加技能 +ais windsurf skills add deploy-staging + +# 移除 +ais windsurf remove project-style + +# 安装全部 +ais windsurf install +``` + +> 说明:Windsurf Memories 由 Cascade 运行时/界面管理。AIS 仅同步可文件化内容(`.windsurf/rules` 与 `.windsurf/skills`)。 + +### Cline + +```bash +# 添加规则 +ais cline add coding + +# 添加技能 +ais cline skills add release-checklist + +# 移除 +ais cline remove coding + +# 安装全部 +ais cline install +``` + --- ## 高级功能 @@ -847,6 +885,26 @@ ais user install "commands": ".opencode/commands", "tools": ".opencode/tools" }, + "codex": { + "rules": ".codex/rules", + "skills": ".agents/skills" + }, + "gemini": { + "commands": ".gemini/commands", + "skills": ".gemini/skills", + "agents": ".gemini/agents" + }, + "warp": { + "skills": ".agents/skills" + }, + "windsurf": { + "rules": ".windsurf/rules", + "skills": ".windsurf/skills" + }, + "cline": { + "rules": ".clinerules", + "skills": ".cline/skills" + }, "agentsMd": { "file": "." } @@ -923,7 +981,7 @@ ais copilot instructions add } ``` -其他工具(`copilot`、`trae`、`opencode`、`codex`、`gemini`)的结构相同,键名参见[支持的工具](#支持的工具)。 +其他工具(`copilot`、`trae`、`opencode`、`codex`、`gemini`、`warp`、`windsurf`、`cline`)的结构相同,键名参见[支持的工具](#支持的工具)。 **格式类型:** diff --git a/src/__tests__/cline-rules.test.ts b/src/__tests__/cline-rules.test.ts new file mode 100644 index 0000000..039dbef --- /dev/null +++ b/src/__tests__/cline-rules.test.ts @@ -0,0 +1,36 @@ +import { describe, it, expect } from 'vitest'; +import { clineRulesAdapter } from '../adapters/cline-rules.js'; +import { adapterRegistry } from '../adapters/index.js'; + +describe('cline-rules adapter', () => { + it('should have correct basic properties', () => { + expect(clineRulesAdapter.name).toBe('cline-rules'); + expect(clineRulesAdapter.tool).toBe('cline'); + expect(clineRulesAdapter.subtype).toBe('rules'); + expect(clineRulesAdapter.defaultSourceDir).toBe('.clinerules'); + expect(clineRulesAdapter.targetDir).toBe('.clinerules'); + expect(clineRulesAdapter.mode).toBe('file'); + expect(clineRulesAdapter.fileSuffixes).toEqual(['.md', '.txt']); + }); + + it('should have correct config path', () => { + expect(clineRulesAdapter.configPath).toEqual(['cline', 'rules']); + }); + + it('should be registered in adapterRegistry', () => { + const retrieved = adapterRegistry.getByName('cline-rules'); + expect(retrieved).toBe(clineRulesAdapter); + }); + + it('should be retrievable by tool and subtype', () => { + const retrieved = adapterRegistry.get('cline', 'rules'); + expect(retrieved).toBe(clineRulesAdapter); + }); + + it('should have required adapter methods', () => { + expect(clineRulesAdapter.addDependency).toBeDefined(); + expect(clineRulesAdapter.removeDependency).toBeDefined(); + expect(clineRulesAdapter.link).toBeDefined(); + expect(clineRulesAdapter.unlink).toBeDefined(); + }); +}); diff --git a/src/__tests__/cline-skills.test.ts b/src/__tests__/cline-skills.test.ts new file mode 100644 index 0000000..2fc0f9e --- /dev/null +++ b/src/__tests__/cline-skills.test.ts @@ -0,0 +1,35 @@ +import { describe, it, expect } from 'vitest'; +import { clineSkillsAdapter } from '../adapters/cline-skills.js'; +import { adapterRegistry } from '../adapters/index.js'; + +describe('cline-skills adapter', () => { + it('should have correct basic properties', () => { + expect(clineSkillsAdapter.name).toBe('cline-skills'); + expect(clineSkillsAdapter.tool).toBe('cline'); + expect(clineSkillsAdapter.subtype).toBe('skills'); + expect(clineSkillsAdapter.defaultSourceDir).toBe('.cline/skills'); + expect(clineSkillsAdapter.targetDir).toBe('.cline/skills'); + expect(clineSkillsAdapter.mode).toBe('directory'); + }); + + it('should have correct config path', () => { + expect(clineSkillsAdapter.configPath).toEqual(['cline', 'skills']); + }); + + it('should be registered in adapterRegistry', () => { + const retrieved = adapterRegistry.getByName('cline-skills'); + expect(retrieved).toBe(clineSkillsAdapter); + }); + + it('should be retrievable by tool and subtype', () => { + const retrieved = adapterRegistry.get('cline', 'skills'); + expect(retrieved).toBe(clineSkillsAdapter); + }); + + it('should have required adapter methods', () => { + expect(clineSkillsAdapter.addDependency).toBeDefined(); + expect(clineSkillsAdapter.removeDependency).toBeDefined(); + expect(clineSkillsAdapter.link).toBeDefined(); + expect(clineSkillsAdapter.unlink).toBeDefined(); + }); +}); diff --git a/src/__tests__/windsurf-rules.test.ts b/src/__tests__/windsurf-rules.test.ts new file mode 100644 index 0000000..e1ff074 --- /dev/null +++ b/src/__tests__/windsurf-rules.test.ts @@ -0,0 +1,36 @@ +import { describe, it, expect } from 'vitest'; +import { windsurfRulesAdapter } from '../adapters/windsurf-rules.js'; +import { adapterRegistry } from '../adapters/index.js'; + +describe('windsurf-rules adapter', () => { + it('should have correct basic properties', () => { + expect(windsurfRulesAdapter.name).toBe('windsurf-rules'); + expect(windsurfRulesAdapter.tool).toBe('windsurf'); + expect(windsurfRulesAdapter.subtype).toBe('rules'); + expect(windsurfRulesAdapter.defaultSourceDir).toBe('.windsurf/rules'); + expect(windsurfRulesAdapter.targetDir).toBe('.windsurf/rules'); + expect(windsurfRulesAdapter.mode).toBe('file'); + expect(windsurfRulesAdapter.fileSuffixes).toEqual(['.md']); + }); + + it('should have correct config path', () => { + expect(windsurfRulesAdapter.configPath).toEqual(['windsurf', 'rules']); + }); + + it('should be registered in adapterRegistry', () => { + const retrieved = adapterRegistry.getByName('windsurf-rules'); + expect(retrieved).toBe(windsurfRulesAdapter); + }); + + it('should be retrievable by tool and subtype', () => { + const retrieved = adapterRegistry.get('windsurf', 'rules'); + expect(retrieved).toBe(windsurfRulesAdapter); + }); + + it('should have required adapter methods', () => { + expect(windsurfRulesAdapter.addDependency).toBeDefined(); + expect(windsurfRulesAdapter.removeDependency).toBeDefined(); + expect(windsurfRulesAdapter.link).toBeDefined(); + expect(windsurfRulesAdapter.unlink).toBeDefined(); + }); +}); diff --git a/src/__tests__/windsurf-skills.test.ts b/src/__tests__/windsurf-skills.test.ts new file mode 100644 index 0000000..e54db78 --- /dev/null +++ b/src/__tests__/windsurf-skills.test.ts @@ -0,0 +1,35 @@ +import { describe, it, expect } from 'vitest'; +import { windsurfSkillsAdapter } from '../adapters/windsurf-skills.js'; +import { adapterRegistry } from '../adapters/index.js'; + +describe('windsurf-skills adapter', () => { + it('should have correct basic properties', () => { + expect(windsurfSkillsAdapter.name).toBe('windsurf-skills'); + expect(windsurfSkillsAdapter.tool).toBe('windsurf'); + expect(windsurfSkillsAdapter.subtype).toBe('skills'); + expect(windsurfSkillsAdapter.defaultSourceDir).toBe('.windsurf/skills'); + expect(windsurfSkillsAdapter.targetDir).toBe('.windsurf/skills'); + expect(windsurfSkillsAdapter.mode).toBe('directory'); + }); + + it('should have correct config path', () => { + expect(windsurfSkillsAdapter.configPath).toEqual(['windsurf', 'skills']); + }); + + it('should be registered in adapterRegistry', () => { + const retrieved = adapterRegistry.getByName('windsurf-skills'); + expect(retrieved).toBe(windsurfSkillsAdapter); + }); + + it('should be retrievable by tool and subtype', () => { + const retrieved = adapterRegistry.get('windsurf', 'skills'); + expect(retrieved).toBe(windsurfSkillsAdapter); + }); + + it('should have required adapter methods', () => { + expect(windsurfSkillsAdapter.addDependency).toBeDefined(); + expect(windsurfSkillsAdapter.removeDependency).toBeDefined(); + expect(windsurfSkillsAdapter.link).toBeDefined(); + expect(windsurfSkillsAdapter.unlink).toBeDefined(); + }); +}); diff --git a/src/adapters/cline-rules.ts b/src/adapters/cline-rules.ts new file mode 100644 index 0000000..ca4bb26 --- /dev/null +++ b/src/adapters/cline-rules.ts @@ -0,0 +1,20 @@ +import { SyncAdapter } from './types.js'; +import { createBaseAdapter, createMultiSuffixResolver, createSuffixAwareTargetResolver } from './base.js'; + +const SUFFIXES = ['.md', '.txt']; + +/** + * Adapter for Cline rules (.md/.txt files in .clinerules/) + */ +export const clineRulesAdapter: SyncAdapter = createBaseAdapter({ + name: 'cline-rules', + tool: 'cline', + subtype: 'rules', + configPath: ['cline', 'rules'], + mode: 'file', + fileSuffixes: SUFFIXES, + defaultSourceDir: '.clinerules', + targetDir: '.clinerules', + resolveSource: createMultiSuffixResolver(SUFFIXES, 'Rule'), + resolveTargetName: createSuffixAwareTargetResolver(SUFFIXES) +}); diff --git a/src/adapters/cline-skills.ts b/src/adapters/cline-skills.ts new file mode 100644 index 0000000..4378f65 --- /dev/null +++ b/src/adapters/cline-skills.ts @@ -0,0 +1,15 @@ +import { SyncAdapter } from './types.js'; +import { createBaseAdapter } from './base.js'; + +/** + * Adapter for Cline skills (directories with SKILL.md in .cline/skills/) + */ +export const clineSkillsAdapter: SyncAdapter = createBaseAdapter({ + name: 'cline-skills', + tool: 'cline', + subtype: 'skills', + configPath: ['cline', 'skills'], + mode: 'directory', + defaultSourceDir: '.cline/skills', + targetDir: '.cline/skills' +}); diff --git a/src/adapters/index.ts b/src/adapters/index.ts index b7c8cdc..05bbec5 100644 --- a/src/adapters/index.ts +++ b/src/adapters/index.ts @@ -24,6 +24,10 @@ import { geminiCommandsAdapter } from './gemini-commands.js'; import { geminiSkillsAdapter } from './gemini-skills.js'; import { geminiAgentsAdapter } from './gemini-agents.js'; import { warpSkillsAdapter } from './warp-skills.js'; +import { windsurfRulesAdapter } from './windsurf-rules.js'; +import { clineRulesAdapter } from './cline-rules.js'; +import { windsurfSkillsAdapter } from './windsurf-skills.js'; +import { clineSkillsAdapter } from './cline-skills.js'; import { ProjectConfig } from '../project-config.js'; // Re-export types and utilities @@ -65,6 +69,10 @@ class DefaultAdapterRegistry implements AdapterRegistry { this.register(geminiSkillsAdapter); this.register(geminiAgentsAdapter); this.register(warpSkillsAdapter); + this.register(windsurfRulesAdapter); + this.register(clineRulesAdapter); + this.register(windsurfSkillsAdapter); + this.register(clineSkillsAdapter); } register(adapter: SyncAdapter): void { @@ -212,5 +220,17 @@ export function findAdapterForAlias( if (cfg.warp?.skills?.[alias]) { return { adapter: warpSkillsAdapter, section: 'warp.skills' }; } + if (cfg.windsurf?.rules?.[alias]) { + return { adapter: windsurfRulesAdapter, section: 'windsurf.rules' }; + } + if (cfg.windsurf?.skills?.[alias]) { + return { adapter: windsurfSkillsAdapter, section: 'windsurf.skills' }; + } + if (cfg.cline?.rules?.[alias]) { + return { adapter: clineRulesAdapter, section: 'cline.rules' }; + } + if (cfg.cline?.skills?.[alias]) { + return { adapter: clineSkillsAdapter, section: 'cline.skills' }; + } return null; } diff --git a/src/adapters/windsurf-rules.ts b/src/adapters/windsurf-rules.ts new file mode 100644 index 0000000..94ae311 --- /dev/null +++ b/src/adapters/windsurf-rules.ts @@ -0,0 +1,20 @@ +import { SyncAdapter } from './types.js'; +import { createBaseAdapter, createSingleSuffixResolver, createSuffixAwareTargetResolver } from './base.js'; + +const SUFFIX = '.md'; + +/** + * Adapter for Windsurf rules (.md files in .windsurf/rules/) + */ +export const windsurfRulesAdapter: SyncAdapter = createBaseAdapter({ + name: 'windsurf-rules', + tool: 'windsurf', + subtype: 'rules', + configPath: ['windsurf', 'rules'], + mode: 'file', + fileSuffixes: [SUFFIX], + defaultSourceDir: '.windsurf/rules', + targetDir: '.windsurf/rules', + resolveSource: createSingleSuffixResolver(SUFFIX, 'Rule'), + resolveTargetName: createSuffixAwareTargetResolver([SUFFIX]) +}); diff --git a/src/adapters/windsurf-skills.ts b/src/adapters/windsurf-skills.ts new file mode 100644 index 0000000..72d4265 --- /dev/null +++ b/src/adapters/windsurf-skills.ts @@ -0,0 +1,15 @@ +import { SyncAdapter } from './types.js'; +import { createBaseAdapter } from './base.js'; + +/** + * Adapter for Windsurf skills (directories with SKILL.md in .windsurf/skills/) + */ +export const windsurfSkillsAdapter: SyncAdapter = createBaseAdapter({ + name: 'windsurf-skills', + tool: 'windsurf', + subtype: 'skills', + configPath: ['windsurf', 'skills'], + mode: 'directory', + defaultSourceDir: '.windsurf/skills', + targetDir: '.windsurf/skills' +}); diff --git a/src/commands/helpers.ts b/src/commands/helpers.ts index c2bea6a..e891f97 100644 --- a/src/commands/helpers.ts +++ b/src/commands/helpers.ts @@ -70,41 +70,73 @@ export async function getTargetRepo(options: { target?: string }): Promise { const cfg = await getCombinedProjectConfig(projectPath); - const cursorCount = Object.keys(cfg.cursor?.rules || {}).length + - Object.keys(cfg.cursor?.commands || {}).length + - Object.keys(cfg.cursor?.skills || {}).length; - const copilotCount = Object.keys(cfg.copilot?.instructions || {}).length; - const claudeCount = Object.keys(cfg.claude?.skills || {}).length + - Object.keys(cfg.claude?.agents || {}).length; - const traeCount = Object.keys(cfg.trae?.rules || {}).length + - Object.keys(cfg.trae?.skills || {}).length; - const opencodeCount = Object.keys(cfg.opencode?.agents || {}).length + - Object.keys(cfg.opencode?.skills || {}).length + - Object.keys(cfg.opencode?.commands || {}).length + - Object.keys(cfg.opencode?.tools || {}).length; - const codexCount = Object.keys(cfg.codex?.rules || {}).length + - Object.keys(cfg.codex?.skills || {}).length; - const geminiCount = Object.keys(cfg.gemini?.commands || {}).length + - Object.keys(cfg.gemini?.skills || {}).length + - Object.keys(cfg.gemini?.agents || {}).length; - const agentsMdCount = Object.keys(cfg.agentsMd || {}).length; - - if (cursorCount > 0 && copilotCount === 0 && claudeCount === 0 && traeCount === 0 && opencodeCount === 0 && codexCount === 0 && geminiCount === 0 && agentsMdCount === 0) return 'cursor'; - if (copilotCount > 0 && cursorCount === 0 && claudeCount === 0 && traeCount === 0 && opencodeCount === 0 && codexCount === 0 && geminiCount === 0 && agentsMdCount === 0) return 'copilot'; - if (claudeCount > 0 && cursorCount === 0 && copilotCount === 0 && traeCount === 0 && opencodeCount === 0 && codexCount === 0 && geminiCount === 0 && agentsMdCount === 0) return 'claude'; - if (traeCount > 0 && cursorCount === 0 && copilotCount === 0 && claudeCount === 0 && opencodeCount === 0 && codexCount === 0 && geminiCount === 0 && agentsMdCount === 0) return 'trae'; - if (opencodeCount > 0 && cursorCount === 0 && copilotCount === 0 && claudeCount === 0 && traeCount === 0 && codexCount === 0 && geminiCount === 0 && agentsMdCount === 0) return 'opencode'; - if (codexCount > 0 && cursorCount === 0 && copilotCount === 0 && claudeCount === 0 && traeCount === 0 && opencodeCount === 0 && geminiCount === 0 && agentsMdCount === 0) return 'codex'; - if (geminiCount > 0 && cursorCount === 0 && copilotCount === 0 && claudeCount === 0 && traeCount === 0 && opencodeCount === 0 && codexCount === 0 && agentsMdCount === 0) return 'gemini'; - if (agentsMdCount > 0 && cursorCount === 0 && copilotCount === 0 && claudeCount === 0 && traeCount === 0 && opencodeCount === 0 && codexCount === 0 && geminiCount === 0) return 'agents-md'; - if (cursorCount === 0 && copilotCount === 0 && claudeCount === 0 && traeCount === 0 && opencodeCount === 0 && codexCount === 0 && geminiCount === 0 && agentsMdCount === 0) return 'none'; + const counts: Record, number> = { + cursor: + Object.keys(cfg.cursor?.rules || {}).length + + Object.keys(cfg.cursor?.commands || {}).length + + Object.keys(cfg.cursor?.skills || {}).length + + Object.keys(cfg.cursor?.agents || {}).length, + copilot: + Object.keys(cfg.copilot?.instructions || {}).length + + Object.keys(cfg.copilot?.skills || {}).length + + Object.keys(cfg.copilot?.prompts || {}).length + + Object.keys(cfg.copilot?.agents || {}).length, + claude: + Object.keys(cfg.claude?.skills || {}).length + + Object.keys(cfg.claude?.agents || {}).length + + Object.keys(cfg.claude?.rules || {}).length + + Object.keys(cfg.claude?.md || {}).length, + trae: + Object.keys(cfg.trae?.rules || {}).length + + Object.keys(cfg.trae?.skills || {}).length, + opencode: + Object.keys(cfg.opencode?.agents || {}).length + + Object.keys(cfg.opencode?.skills || {}).length + + Object.keys(cfg.opencode?.commands || {}).length + + Object.keys(cfg.opencode?.tools || {}).length, + codex: + Object.keys(cfg.codex?.rules || {}).length + + Object.keys(cfg.codex?.skills || {}).length, + gemini: + Object.keys(cfg.gemini?.commands || {}).length + + Object.keys(cfg.gemini?.skills || {}).length + + Object.keys(cfg.gemini?.agents || {}).length, + warp: Object.keys(cfg.warp?.skills || {}).length, + windsurf: + Object.keys(cfg.windsurf?.rules || {}).length + + Object.keys(cfg.windsurf?.skills || {}).length, + cline: + Object.keys(cfg.cline?.rules || {}).length + + Object.keys(cfg.cline?.skills || {}).length, + 'agents-md': Object.keys(cfg.agentsMd || {}).length + }; + + const activeModes = (Object.entries(counts) as [Exclude, number][]) + .filter(([, count]) => count > 0) + .map(([mode]) => mode); + + if (activeModes.length === 0) return 'none'; + if (activeModes.length === 1) return activeModes[0]; return 'ambiguous'; } @@ -112,10 +144,11 @@ export async function inferDefaultMode(projectPath: string): Promise/dev/null)" -- "\$cur") ) + return 0 + fi + + # windsurf rules subcommand add + if [[ "\$ppprev" == "windsurf" && "\$pprev" == "rules" && "\$prev" == "add" ]]; then + COMPREPLY=( $(compgen -W "$(ais _complete windsurf-rules 2>/dev/null)" -- "\$cur") ) + return 0 + fi + + # windsurf skills add + if [[ "\$ppprev" == "windsurf" && "\$pprev" == "skills" && "\$prev" == "add" ]]; then + COMPREPLY=( $(compgen -W "$(ais _complete windsurf-skills 2>/dev/null)" -- "\$cur") ) + return 0 + fi + + # cline rules add + if [[ "\$pprev" == "cline" && "\$prev" == "add" ]]; then + COMPREPLY=( $(compgen -W "$(ais _complete cline-rules 2>/dev/null)" -- "\$cur") ) + return 0 + fi + + # cline rules subcommand add + if [[ "\$ppprev" == "cline" && "\$pprev" == "rules" && "\$prev" == "add" ]]; then + COMPREPLY=( $(compgen -W "$(ais _complete cline-rules 2>/dev/null)" -- "\$cur") ) + return 0 + fi + + # cline skills add + if [[ "\$ppprev" == "cline" && "\$pprev" == "skills" && "\$prev" == "add" ]]; then + COMPREPLY=( $(compgen -W "$(ais _complete cline-skills 2>/dev/null)" -- "\$cur") ) + return 0 + fi + # agents-md if [[ "\$pprev" == "agents-md" && "\$prev" == "add" ]]; then COMPREPLY=( $(compgen -W "$(ais _complete agents-md 2>/dev/null)" -- "\$cur") ) @@ -274,6 +310,42 @@ _ais_complete() { return 0 fi + # windsurf + if [[ "\$prev" == "windsurf" ]]; then + COMPREPLY=( $(compgen -W "add remove install add-all import rules skills" -- "\$cur") ) + return 0 + fi + + # cline + if [[ "\$prev" == "cline" ]]; then + COMPREPLY=( $(compgen -W "add remove install add-all import rules skills" -- "\$cur") ) + return 0 + fi + + # windsurf rules + if [[ "\$pprev" == "windsurf" && "\$prev" == "rules" ]]; then + COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) + return 0 + fi + + # windsurf skills + if [[ "\$pprev" == "windsurf" && "\$prev" == "skills" ]]; then + COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) + return 0 + fi + + # cline rules + if [[ "\$pprev" == "cline" && "\$prev" == "rules" ]]; then + COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) + return 0 + fi + + # cline skills + if [[ "\$pprev" == "cline" && "\$prev" == "skills" ]]; then + COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) + return 0 + fi + # agents-md if [[ "\$prev" == "agents-md" ]]; then COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) @@ -345,7 +417,7 @@ _ais_complete() { fi if [[ "\$prev" == "ais" ]]; then - COMPREPLY=( $(compgen -W "cursor copilot claude trae opencode codex gemini warp agents-md use list git add remove install import completion" -- "\$cur") ) + COMPREPLY=( $(compgen -W "cursor copilot claude trae opencode codex gemini warp windsurf cline agents-md use list git add remove install import completion" -- "\$cur") ) return 0 fi } @@ -365,6 +437,8 @@ subcmds=( 'codex:Manage Codex rules and skills' 'gemini:Manage Gemini CLI commands, skills, and agents' 'warp:Manage Warp agent skills' + 'windsurf:Manage Windsurf rules and skills' + 'cline:Manage Cline rules and skills' 'agents-md:Manage AGENTS.md files (agents.md standard)' 'use:Configure rules repository' 'list:List configured repositories' @@ -376,7 +450,7 @@ subcmds=( 'completion:Output shell completion script' ) - local -a cursor_subcmds copilot_subcmds claude_subcmds trae_subcmds opencode_subcmds codex_subcmds gemini_subcmds warp_subcmds agents_md_subcmds cursor_rules_subcmds cursor_commands_subcmds cursor_skills_subcmds cursor_agents_subcmds copilot_instructions_subcmds copilot_skills_subcmds copilot_prompts_subcmds copilot_agents_subcmds claude_skills_subcmds claude_agents_subcmds trae_rules_subcmds trae_skills_subcmds opencode_agents_subcmds opencode_skills_subcmds opencode_commands_subcmds opencode_tools_subcmds codex_rules_subcmds codex_skills_subcmds gemini_commands_subcmds gemini_skills_subcmds gemini_agents_subcmds warp_skills_subcmds + local -a cursor_subcmds copilot_subcmds claude_subcmds trae_subcmds opencode_subcmds codex_subcmds gemini_subcmds warp_subcmds windsurf_subcmds cline_subcmds agents_md_subcmds cursor_rules_subcmds cursor_commands_subcmds cursor_skills_subcmds cursor_agents_subcmds copilot_instructions_subcmds copilot_skills_subcmds copilot_prompts_subcmds copilot_agents_subcmds claude_skills_subcmds claude_agents_subcmds trae_rules_subcmds trae_skills_subcmds opencode_agents_subcmds opencode_skills_subcmds opencode_commands_subcmds opencode_tools_subcmds codex_rules_subcmds codex_skills_subcmds gemini_commands_subcmds gemini_skills_subcmds gemini_agents_subcmds warp_skills_subcmds windsurf_rules_subcmds windsurf_skills_subcmds cline_rules_subcmds cline_skills_subcmds cursor_subcmds=('add:Add a Cursor rule' 'remove:Remove a Cursor rule' 'install:Install all Cursor entries' 'import:Import entry to repository' 'rules:Manage rules explicitly' 'commands:Manage commands' 'skills:Manage skills' 'agents:Manage agents') copilot_subcmds=('instructions:Manage GitHub Copilot instructions' 'prompts:Manage GitHub Copilot prompt files' 'skills:Manage GitHub Copilot skills' 'agents:Manage GitHub Copilot custom agents' 'install:Install all GitHub Copilot entries') copilot_instructions_subcmds=('add:Add a GitHub Copilot instruction' 'remove:Remove a GitHub Copilot instruction' 'install:Install all GitHub Copilot instructions' 'import:Import instruction to repository') @@ -408,6 +482,12 @@ subcmds=( gemini_agents_subcmds=('add:Add a Gemini agent' 'remove:Remove a Gemini agent' 'install:Install all Gemini agents' 'import:Import agent to repository') warp_subcmds=('skills:Manage Warp skills' 'install:Install all Warp entries' 'import:Import entry to repository') warp_skills_subcmds=('add:Add a Warp skill' 'remove:Remove a Warp skill' 'install:Install all Warp skills' 'import:Import skill to repository') + windsurf_subcmds=('add:Add a Windsurf rule' 'remove:Remove a Windsurf rule' 'install:Install all Windsurf entries' 'add-all:Add all Windsurf entries' 'import:Import entry to repository' 'rules:Manage Windsurf rules' 'skills:Manage Windsurf skills') + windsurf_rules_subcmds=('add:Add a Windsurf rule' 'remove:Remove a Windsurf rule' 'install:Install all Windsurf rules' 'import:Import rule to repository') + windsurf_skills_subcmds=('add:Add a Windsurf skill' 'remove:Remove a Windsurf skill' 'install:Install all Windsurf skills' 'import:Import skill to repository') + cline_subcmds=('add:Add a Cline rule' 'remove:Remove a Cline rule' 'install:Install all Cline entries' 'add-all:Add all Cline entries' 'import:Import entry to repository' 'rules:Manage Cline rules' 'skills:Manage Cline skills') + cline_rules_subcmds=('add:Add a Cline rule' 'remove:Remove a Cline rule' 'install:Install all Cline rules' 'import:Import rule to repository') + cline_skills_subcmds=('add:Add a Cline skill' 'remove:Remove a Cline skill' 'install:Install all Cline skills' 'import:Import skill to repository') _arguments -C \\ '1:command:->command' \\ @@ -446,6 +526,12 @@ subcmds=( warp) _describe 'subcommand' warp_subcmds ;; + windsurf) + _describe 'subcommand' windsurf_subcmds + ;; + cline) + _describe 'subcommand' cline_subcmds + ;; agents-md) _describe 'subcommand' agents_md_subcmds ;; @@ -582,6 +668,46 @@ subcmds=( ;; esac ;; + windsurf) + case "\$words[3]" in + add) + local -a windsurf_rules + windsurf_rules=(\${(f)"\$(ais _complete windsurf-rules 2>/dev/null)"}) + if (( \$#windsurf_rules )); then + compadd "\$windsurf_rules[@]" + fi + ;; + rules) + _describe 'subsubcommand' windsurf_rules_subcmds + ;; + skills) + _describe 'subsubcommand' windsurf_skills_subcmds + ;; + *) + _describe 'subsubcommand' windsurf_subcmds + ;; + esac + ;; + cline) + case "\$words[3]" in + add) + local -a cline_rules + cline_rules=(\${(f)"\$(ais _complete cline-rules 2>/dev/null)"}) + if (( \$#cline_rules )); then + compadd "\$cline_rules[@]" + fi + ;; + rules) + _describe 'subsubcommand' cline_rules_subcmds + ;; + skills) + _describe 'subsubcommand' cline_skills_subcmds + ;; + *) + _describe 'subsubcommand' cline_subcmds + ;; + esac + ;; agents-md) case "\$words[3]" in add) @@ -892,6 +1018,72 @@ subcmds=( ;; esac ;; + windsurf) + case \"\$words[3]\" in + add) + local -a windsurf_rules + windsurf_rules=(\${(f)\"$(ais _complete windsurf-rules 2>/dev/null)\"}) + if (( \$#windsurf_rules )); then + compadd \"\$windsurf_rules[@]\" + fi + ;; + rules) + case \"\$words[4]\" in + add) + local -a windsurf_rules + windsurf_rules=(\${(f)\"$(ais _complete windsurf-rules 2>/dev/null)\"}) + if (( \$#windsurf_rules )); then + compadd \"\$windsurf_rules[@]\" + fi + ;; + esac + ;; + skills) + case \"\$words[4]\" in + add) + local -a windsurf_skills + windsurf_skills=(\${(f)\"$(ais _complete windsurf-skills 2>/dev/null)\"}) + if (( \$#windsurf_skills )); then + compadd \"\$windsurf_skills[@]\" + fi + ;; + esac + ;; + esac + ;; + cline) + case \"\$words[3]\" in + add) + local -a cline_rules + cline_rules=(\${(f)\"$(ais _complete cline-rules 2>/dev/null)\"}) + if (( \$#cline_rules )); then + compadd \"\$cline_rules[@]\" + fi + ;; + rules) + case \"\$words[4]\" in + add) + local -a cline_rules + cline_rules=(\${(f)\"$(ais _complete cline-rules 2>/dev/null)\"}) + if (( \$#cline_rules )); then + compadd \"\$cline_rules[@]\" + fi + ;; + esac + ;; + skills) + case \"\$words[4]\" in + add) + local -a cline_skills + cline_skills=(\${(f)\"$(ais _complete cline-skills 2>/dev/null)\"}) + if (( \$#cline_skills )); then + compadd \"\$cline_skills[@]\" + fi + ;; + esac + ;; + esac + ;; agents-md) case \"\$words[3]\" in add) @@ -928,6 +1120,8 @@ complete -c ais -n "__fish_use_subcommand" -a "opencode" -d "Manage OpenCode age complete -c ais -n "__fish_use_subcommand" -a "codex" -d "Manage Codex rules and skills" complete -c ais -n "__fish_use_subcommand" -a "gemini" -d "Manage Gemini CLI commands, skills, and agents" complete -c ais -n "__fish_use_subcommand" -a "warp" -d "Manage Warp agent skills" +complete -c ais -n "__fish_use_subcommand" -a "windsurf" -d "Manage Windsurf rules and skills" +complete -c ais -n "__fish_use_subcommand" -a "cline" -d "Manage Cline rules and skills" complete -c ais -n "__fish_use_subcommand" -a "agents-md" -d "Manage AGENTS.md files (agents.md standard)" complete -c ais -n "__fish_use_subcommand" -a "use" -d "Configure rules repository" complete -c ais -n "__fish_use_subcommand" -a "list" -d "List configured repositories" @@ -1125,6 +1319,48 @@ complete -c ais -n "__fish_seen_subcommand_from warp; and __fish_seen_subcommand complete -c ais -n "__fish_seen_subcommand_from warp; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Warp skills" complete -c ais -n "__fish_seen_subcommand_from warp; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import skill to repository" +# windsurf subcommands +complete -c ais -n "__fish_seen_subcommand_from windsurf; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "add" -d "Add a Windsurf rule" +complete -c ais -n "__fish_seen_subcommand_from windsurf; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "remove" -d "Remove a Windsurf rule" +complete -c ais -n "__fish_seen_subcommand_from windsurf; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "install" -d "Install all Windsurf entries" +complete -c ais -n "__fish_seen_subcommand_from windsurf; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "add-all" -d "Add all Windsurf entries" +complete -c ais -n "__fish_seen_subcommand_from windsurf; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "import" -d "Import entry to repository" +complete -c ais -n "__fish_seen_subcommand_from windsurf; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "rules" -d "Manage Windsurf rules" +complete -c ais -n "__fish_seen_subcommand_from windsurf; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "skills" -d "Manage Windsurf skills" + +# windsurf rules subcommands +complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Windsurf rule" +complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Windsurf rule" +complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Windsurf rules" +complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import rule to repository" + +# windsurf skills subcommands +complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Windsurf skill" +complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Windsurf skill" +complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Windsurf skills" +complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import skill to repository" + +# cline subcommands +complete -c ais -n "__fish_seen_subcommand_from cline; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "add" -d "Add a Cline rule" +complete -c ais -n "__fish_seen_subcommand_from cline; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "remove" -d "Remove a Cline rule" +complete -c ais -n "__fish_seen_subcommand_from cline; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "install" -d "Install all Cline entries" +complete -c ais -n "__fish_seen_subcommand_from cline; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "add-all" -d "Add all Cline entries" +complete -c ais -n "__fish_seen_subcommand_from cline; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "import" -d "Import entry to repository" +complete -c ais -n "__fish_seen_subcommand_from cline; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "rules" -d "Manage Cline rules" +complete -c ais -n "__fish_seen_subcommand_from cline; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "skills" -d "Manage Cline skills" + +# cline rules subcommands +complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Cline rule" +complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Cline rule" +complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Cline rules" +complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import rule to repository" + +# cline skills subcommands +complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Cline skill" +complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Cline skill" +complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Cline skills" +complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import skill to repository" + # agents-md subcommands complete -c ais -n "__fish_seen_subcommand_from agents-md; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add an AGENTS.md file" complete -c ais -n "__fish_seen_subcommand_from agents-md; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove an AGENTS.md file" @@ -1155,6 +1391,12 @@ complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcomma complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcommand_from skills; and __fish_seen_subcommand_from add" -a "(ais _complete gemini-skills 2>/dev/null)" complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcommand_from agents; and __fish_seen_subcommand_from add" -a "(ais _complete gemini-agents 2>/dev/null)" complete -c ais -n "__fish_seen_subcommand_from warp; and __fish_seen_subcommand_from skills; and __fish_seen_subcommand_from add" -a "(ais _complete warp-skills 2>/dev/null)" +complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from add" -a "(ais _complete windsurf-rules 2>/dev/null)" +complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from rules; and __fish_seen_subcommand_from add" -a "(ais _complete windsurf-rules 2>/dev/null)" +complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from skills; and __fish_seen_subcommand_from add" -a "(ais _complete windsurf-skills 2>/dev/null)" +complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from add" -a "(ais _complete cline-rules 2>/dev/null)" +complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from rules; and __fish_seen_subcommand_from add" -a "(ais _complete cline-rules 2>/dev/null)" +complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from skills; and __fish_seen_subcommand_from add" -a "(ais _complete cline-skills 2>/dev/null)" complete -c ais -n "__fish_seen_subcommand_from agents-md; and __fish_seen_subcommand_from add" -a "(ais _complete agents-md 2>/dev/null)" `; diff --git a/src/index.ts b/src/index.ts index d8c2ec4..b2dbbf9 100644 --- a/src/index.ts +++ b/src/index.ts @@ -11,6 +11,7 @@ import { getCombinedProjectConfig, getRepoSourceConfig, getSourceDir } from './p import { checkAndPromptCompletion, forceInstallCompletion } from './completion.js'; import { getCompletionScript } from './completion/scripts.js'; import { adapterRegistry, getAdapter, findAdapterForAlias } from './adapters/index.js'; +import { SyncAdapter } from './adapters/types.js'; import { copilotInstructionsAdapter } from './adapters/copilot-instructions.js'; import { copilotSkillsAdapter } from './adapters/copilot-skills.js'; import { copilotPromptsAdapter } from './adapters/copilot-prompts.js'; @@ -138,7 +139,7 @@ program // ============ Top-level shortcuts ============ program .command('add') - .description('Add an entry (auto-detects cursor/copilot if unambiguous)') + .description('Add an entry (auto-detects cursor/copilot when unambiguous)') .argument('', 'Rule/Instruction name in the rules repo') .argument('[alias]', 'Alias in the project') .option('-l, --local', 'Add to ai-rules-sync.local.json (private)') @@ -169,8 +170,22 @@ program throw new Error('For Trae components, please use "ais trae rules/skills add" explicitly.'); } else if (mode === 'opencode') { throw new Error('For OpenCode components, please use "ais opencode agents/skills/commands/tools add" explicitly.'); + } else if (mode === 'codex') { + throw new Error('For Codex components, please use "ais codex rules/skills add" explicitly.'); + } else if (mode === 'gemini') { + throw new Error('For Gemini components, please use "ais gemini commands/skills/agents add" explicitly.'); + } else if (mode === 'warp') { + throw new Error('For Warp components, please use "ais warp skills add" explicitly.'); + } else if (mode === 'windsurf') { + const adapter = getAdapter('windsurf', 'rules'); + await handleAdd(adapter, { projectPath, repo: currentRepo, isLocal: options.local || false }, name, alias, addOptions); + } else if (mode === 'cline') { + const adapter = getAdapter('cline', 'rules'); + await handleAdd(adapter, { projectPath, repo: currentRepo, isLocal: options.local || false }, name, alias, addOptions); } else if (mode === 'agents-md') { throw new Error('For AGENTS.md files, please use "ais agents-md add" explicitly.'); + } else { + throw new Error(`Cannot determine which tool to use for mode "${mode}"`); } } catch (error: any) { console.error(chalk.red('Error adding entry:'), error.message); @@ -222,6 +237,35 @@ program await a.removeDependency(projectPath, alias); } return; + } else if (mode === 'opencode') { + const opencodeAdapters = adapterRegistry.getForTool('opencode'); + for (const a of opencodeAdapters) { + await a.unlink(projectPath, alias); + await a.removeDependency(projectPath, alias); + } + return; + } else if (mode === 'codex') { + const codexAdapters = adapterRegistry.getForTool('codex'); + for (const a of codexAdapters) { + await a.unlink(projectPath, alias); + await a.removeDependency(projectPath, alias); + } + return; + } else if (mode === 'gemini') { + const geminiAdapters = adapterRegistry.getForTool('gemini'); + for (const a of geminiAdapters) { + await a.unlink(projectPath, alias); + await a.removeDependency(projectPath, alias); + } + return; + } else if (mode === 'warp') { + adapter = getAdapter('warp', 'skills'); + } else if (mode === 'windsurf') { + adapter = getAdapter('windsurf', 'rules'); + } else if (mode === 'cline') { + adapter = getAdapter('cline', 'rules'); + } else if (mode === 'agents-md') { + adapter = getAdapter('agents-md', 'file'); } else { throw new Error(`Cannot determine which tool to use for alias "${alias}"`); } @@ -236,7 +280,7 @@ program program .command('install') - .description('Install all entries from config (cursor + copilot + claude + trae), or --user for user config') + .description('Install all entries from config, or --user for user config') .option('-u, --user', 'Install all user config entries (~/.config/ai-rules-sync/user.json)') .option('-g, --global', 'Install all user config entries (deprecated alias for --user)') .action(async (cmdOptions: { user?: boolean; global?: boolean }) => { @@ -275,6 +319,15 @@ program if (mode === 'gemini' || mode === 'ambiguous') { await installEntriesForTool(adapterRegistry.getForTool('gemini'), projectPath); } + if (mode === 'warp' || mode === 'ambiguous') { + await installEntriesForTool(adapterRegistry.getForTool('warp'), projectPath); + } + if (mode === 'windsurf' || mode === 'ambiguous') { + await installEntriesForTool(adapterRegistry.getForTool('windsurf'), projectPath); + } + if (mode === 'cline' || mode === 'ambiguous') { + await installEntriesForTool(adapterRegistry.getForTool('cline'), projectPath); + } if (mode === 'agents-md' || mode === 'ambiguous') { await installEntriesForTool(adapterRegistry.getForTool('agents-md'), projectPath); } @@ -288,7 +341,7 @@ program program .command('add-all') .description('Discover and install all configurations from rules repository') - .option('--tools ', 'Filter by tools (comma-separated): cursor,copilot,claude,trae,opencode,codex,gemini,agents-md') + .option('--tools ', 'Filter by tools (comma-separated): cursor,copilot,claude,trae,opencode,codex,gemini,warp,windsurf,cline,agents-md') .option('--adapters ', 'Filter by adapters (comma-separated)') .option('--dry-run', 'Preview without making changes') .option('-f, --force', 'Overwrite existing entries') @@ -1197,6 +1250,353 @@ registerAdapterCommands({ adapter: getAdapter('gemini', 'skills'), parentCommand const geminiAgents = gemini.command('agents').description('Manage Gemini agents'); registerAdapterCommands({ adapter: getAdapter('gemini', 'agents'), parentCommand: geminiAgents, programOpts: () => program.opts() }); +// ============ Warp command group ============ +const warp = program + .command('warp') + .description('Manage Warp skills in a project'); + +warp + .command('install') + .description('Install all Warp skills from config') + .action(async () => { + try { + await installEntriesForTool(adapterRegistry.getForTool('warp'), process.cwd()); + } catch (error: any) { + console.error(chalk.red('Error installing Warp entries:'), error.message); + process.exit(1); + } + }); + +warp + .command('import ') + .description('Import Warp skill from project to repository') + .option('-l, --local', 'Add to ai-rules-sync.local.json (private)') + .option('-m, --message ', 'Custom git commit message') + .option('-f, --force', 'Overwrite if entry already exists in repository') + .option('-p, --push', 'Push to remote repository after commit') + .action(async (name, options) => { + try { + const repo = await getTargetRepo(program.opts()); + await handleImport(getAdapter('warp', 'skills'), { projectPath: process.cwd(), repo, isLocal: options.local || false }, name, options); + } catch (error: any) { + console.error(chalk.red('Error importing Warp skill:'), error.message); + process.exit(1); + } + }); + +const warpSkills = warp.command('skills').description('Manage Warp skills'); +registerAdapterCommands({ adapter: getAdapter('warp', 'skills'), parentCommand: warpSkills, programOpts: () => program.opts() }); + +// ============ Windsurf command group ============ +const windsurf = program + .command('windsurf') + .description('Manage Windsurf rules and skills in a project'); + +// windsurf add (default to rules) +windsurf + .command('add [alias]') + .description('Sync Windsurf rules to project (.windsurf/rules/...)') + .option('-l, --local', 'Add to ai-rules-sync.local.json (private rule)') + .option('-d, --target-dir ', 'Custom target directory for this entry') + .action(async (name, alias, options) => { + try { + const repo = await getTargetRepo(program.opts()); + const adapter = getAdapter('windsurf', 'rules'); + await handleAdd(adapter, { projectPath: process.cwd(), repo, isLocal: options.local || false }, name, alias, { + local: options.local, + targetDir: options.targetDir + }); + } catch (error: any) { + console.error(chalk.red('Error adding Windsurf rule:'), error.message); + process.exit(1); + } + }); + +// windsurf remove (default to rules) +windsurf + .command('remove ') + .description('Remove a Windsurf rule from project') + .action(async (alias) => { + try { + const adapter = getAdapter('windsurf', 'rules'); + await handleRemove(adapter, process.cwd(), alias); + } catch (error: any) { + console.error(chalk.red('Error removing Windsurf rule:'), error.message); + process.exit(1); + } + }); + +windsurf + .command('install') + .description('Install all Windsurf rules and skills from config') + .action(async () => { + try { + await installEntriesForTool(adapterRegistry.getForTool('windsurf'), process.cwd()); + } catch (error: any) { + console.error(chalk.red('Error installing Windsurf entries:'), error.message); + process.exit(1); + } + }); + +windsurf + .command('add-all') + .description('Add all Windsurf entries from repository') + .option('--dry-run', 'Preview without making changes') + .option('-f, --force', 'Overwrite existing entries') + .option('-i, --interactive', 'Prompt for each entry') + .option('-l, --local', 'Add to ai-rules-sync.local.json') + .option('--skip-existing', 'Skip entries already in config') + .option('--quiet', 'Minimal output') + .option('-s, --source-dir ', 'Custom source directory (can be repeated)', collect) + .action(async (options) => { + try { + const projectPath = process.cwd(); + const opts = program.opts(); + const currentRepo = await getTargetRepo(opts); + let sourceDirOverrides; + if (options.sourceDir && options.sourceDir.length > 0) { + try { + sourceDirOverrides = parseSourceDirParams(options.sourceDir, 'windsurf'); + } catch (error: any) { + console.error(chalk.red('Error parsing --source-dir:'), error.message); + process.exit(1); + } + } + + const result = await handleAddAll( + projectPath, + currentRepo, + adapterRegistry, + { + target: opts.target, + tools: ['windsurf'], + dryRun: options.dryRun, + force: options.force, + interactive: options.interactive, + isLocal: options.local, + skipExisting: options.skipExisting, + quiet: options.quiet, + sourceDirOverrides + } + ); + + if (!options.quiet) { + console.log(chalk.bold('\nSummary:')); + console.log(chalk.green(` Installed: ${result.installed}`)); + if (result.skipped > 0) { + console.log(chalk.yellow(` Skipped: ${result.skipped}`)); + } + if (result.errors.length > 0) { + console.log(chalk.red(` Errors: ${result.errors.length}`)); + result.errors.forEach(e => { + console.log(chalk.red(` - ${e.entry}: ${e.error}`)); + }); + } + } + + if (result.errors.length > 0) { + process.exit(1); + } + } catch (error: any) { + console.error(chalk.red('Error in windsurf add-all:'), error.message); + process.exit(1); + } + }); + +windsurf + .command('import ') + .description('Import Windsurf rule/skill from project to repository (auto-detects subtype)') + .option('-l, --local', 'Add to ai-rules-sync.local.json (private)') + .option('-m, --message ', 'Custom git commit message') + .option('-f, --force', 'Overwrite if entry already exists in repository') + .option('-p, --push', 'Push to remote repository after commit') + .action(async (name, options) => { + try { + const projectPath = process.cwd(); + const repo = await getTargetRepo(program.opts()); + const windsurfAdapters = adapterRegistry.getForTool('windsurf'); + let foundAdapter: SyncAdapter | null = null; + + for (const adapter of windsurfAdapters) { + const targetPath = path.join(projectPath, adapter.targetDir, name); + if (await fs.pathExists(targetPath)) { + foundAdapter = adapter; + break; + } + } + + if (!foundAdapter) { + throw new Error(`Entry "${name}" not found in .windsurf/rules or .windsurf/skills.`); + } + + console.log(chalk.gray(`Detected ${foundAdapter.subtype}: ${name}`)); + await handleImport(foundAdapter, { projectPath, repo, isLocal: options.local || false }, name, options); + } catch (error: any) { + console.error(chalk.red('Error importing Windsurf entry:'), error.message); + process.exit(1); + } + }); + +const windsurfRules = windsurf.command('rules').description('Manage Windsurf rules'); +registerAdapterCommands({ adapter: getAdapter('windsurf', 'rules'), parentCommand: windsurfRules, programOpts: () => program.opts() }); +const windsurfSkills = windsurf.command('skills').description('Manage Windsurf skills'); +registerAdapterCommands({ adapter: getAdapter('windsurf', 'skills'), parentCommand: windsurfSkills, programOpts: () => program.opts() }); + +// ============ Cline command group ============ +const cline = program + .command('cline') + .description('Manage Cline rules and skills in a project'); + +// cline add (default to rules) +cline + .command('add [alias]') + .description('Sync Cline rules to project (.clinerules/...)') + .option('-l, --local', 'Add to ai-rules-sync.local.json (private rule)') + .option('-d, --target-dir ', 'Custom target directory for this entry') + .action(async (name, alias, options) => { + try { + const repo = await getTargetRepo(program.opts()); + const adapter = getAdapter('cline', 'rules'); + await handleAdd(adapter, { projectPath: process.cwd(), repo, isLocal: options.local || false }, name, alias, { + local: options.local, + targetDir: options.targetDir + }); + } catch (error: any) { + console.error(chalk.red('Error adding Cline rule:'), error.message); + process.exit(1); + } + }); + +// cline remove (default to rules) +cline + .command('remove ') + .description('Remove a Cline rule from project') + .action(async (alias) => { + try { + const adapter = getAdapter('cline', 'rules'); + await handleRemove(adapter, process.cwd(), alias); + } catch (error: any) { + console.error(chalk.red('Error removing Cline rule:'), error.message); + process.exit(1); + } + }); + +cline + .command('install') + .description('Install all Cline rules and skills from config') + .action(async () => { + try { + await installEntriesForTool(adapterRegistry.getForTool('cline'), process.cwd()); + } catch (error: any) { + console.error(chalk.red('Error installing Cline entries:'), error.message); + process.exit(1); + } + }); + +cline + .command('add-all') + .description('Add all Cline entries from repository') + .option('--dry-run', 'Preview without making changes') + .option('-f, --force', 'Overwrite existing entries') + .option('-i, --interactive', 'Prompt for each entry') + .option('-l, --local', 'Add to ai-rules-sync.local.json') + .option('--skip-existing', 'Skip entries already in config') + .option('--quiet', 'Minimal output') + .option('-s, --source-dir ', 'Custom source directory (can be repeated)', collect) + .action(async (options) => { + try { + const projectPath = process.cwd(); + const opts = program.opts(); + const currentRepo = await getTargetRepo(opts); + let sourceDirOverrides; + if (options.sourceDir && options.sourceDir.length > 0) { + try { + sourceDirOverrides = parseSourceDirParams(options.sourceDir, 'cline'); + } catch (error: any) { + console.error(chalk.red('Error parsing --source-dir:'), error.message); + process.exit(1); + } + } + + const result = await handleAddAll( + projectPath, + currentRepo, + adapterRegistry, + { + target: opts.target, + tools: ['cline'], + dryRun: options.dryRun, + force: options.force, + interactive: options.interactive, + isLocal: options.local, + skipExisting: options.skipExisting, + quiet: options.quiet, + sourceDirOverrides + } + ); + + if (!options.quiet) { + console.log(chalk.bold('\nSummary:')); + console.log(chalk.green(` Installed: ${result.installed}`)); + if (result.skipped > 0) { + console.log(chalk.yellow(` Skipped: ${result.skipped}`)); + } + if (result.errors.length > 0) { + console.log(chalk.red(` Errors: ${result.errors.length}`)); + result.errors.forEach(e => { + console.log(chalk.red(` - ${e.entry}: ${e.error}`)); + }); + } + } + + if (result.errors.length > 0) { + process.exit(1); + } + } catch (error: any) { + console.error(chalk.red('Error in cline add-all:'), error.message); + process.exit(1); + } + }); + +cline + .command('import ') + .description('Import Cline rule/skill from project to repository (auto-detects subtype)') + .option('-l, --local', 'Add to ai-rules-sync.local.json (private)') + .option('-m, --message ', 'Custom git commit message') + .option('-f, --force', 'Overwrite if entry already exists in repository') + .option('-p, --push', 'Push to remote repository after commit') + .action(async (name, options) => { + try { + const projectPath = process.cwd(); + const repo = await getTargetRepo(program.opts()); + const clineAdapters = adapterRegistry.getForTool('cline'); + let foundAdapter: SyncAdapter | null = null; + + for (const adapter of clineAdapters) { + const targetPath = path.join(projectPath, adapter.targetDir, name); + if (await fs.pathExists(targetPath)) { + foundAdapter = adapter; + break; + } + } + + if (!foundAdapter) { + throw new Error(`Entry "${name}" not found in .clinerules or .cline/skills.`); + } + + console.log(chalk.gray(`Detected ${foundAdapter.subtype}: ${name}`)); + await handleImport(foundAdapter, { projectPath, repo, isLocal: options.local || false }, name, options); + } catch (error: any) { + console.error(chalk.red('Error importing Cline entry:'), error.message); + process.exit(1); + } + }); + +const clineRules = cline.command('rules').description('Manage Cline rules'); +registerAdapterCommands({ adapter: getAdapter('cline', 'rules'), parentCommand: clineRules, programOpts: () => program.opts() }); +const clineSkills = cline.command('skills').description('Manage Cline skills'); +registerAdapterCommands({ adapter: getAdapter('cline', 'skills'), parentCommand: clineSkills, programOpts: () => program.opts() }); + // ============ Git command ============ program .command('git') @@ -1216,7 +1616,7 @@ program // ============ Internal _complete command ============ program .command('_complete') - .argument('', 'Type of completion: cursor, cursor-commands, cursor-skills, cursor-agents, copilot, claude-skills, claude-agents, claude-rules, trae-rules, trae-skills, opencode-agents, opencode-skills, opencode-commands, opencode-tools, codex-rules, codex-skills, gemini-commands, gemini-skills, gemini-agents, agents-md') + .argument('', 'Type of completion: cursor, cursor-commands, cursor-skills, cursor-agents, copilot, claude-skills, claude-agents, claude-rules, trae-rules, trae-skills, opencode-agents, opencode-skills, opencode-commands, opencode-tools, codex-rules, codex-skills, gemini-commands, gemini-skills, gemini-agents, warp-skills, windsurf-rules, windsurf-skills, cline-rules, cline-skills, agents-md') .description('Internal command for shell completion') .action(async (type: string) => { try { @@ -1293,6 +1693,21 @@ program case 'gemini-agents': sourceDir = getSourceDir(repoConfig, 'gemini', 'agents', '.gemini/agents'); break; + case 'warp-skills': + sourceDir = getSourceDir(repoConfig, 'warp', 'skills', '.agents/skills'); + break; + case 'windsurf-rules': + sourceDir = getSourceDir(repoConfig, 'windsurf', 'rules', '.windsurf/rules'); + break; + case 'windsurf-skills': + sourceDir = getSourceDir(repoConfig, 'windsurf', 'skills', '.windsurf/skills'); + break; + case 'cline-rules': + sourceDir = getSourceDir(repoConfig, 'cline', 'rules', '.clinerules'); + break; + case 'cline-skills': + sourceDir = getSourceDir(repoConfig, 'cline', 'skills', '.cline/skills'); + break; case 'agents-md': sourceDir = getSourceDir(repoConfig, 'agents-md', 'file', 'agents-md'); break; diff --git a/src/project-config.ts b/src/project-config.ts index 436a2b6..7f4f53e 100644 --- a/src/project-config.ts +++ b/src/project-config.ts @@ -87,6 +87,18 @@ export interface SourceDirConfig { // Source directory for warp skills, default: ".agents/skills" skills?: string; }; + windsurf?: { + // Source directory for Windsurf rules, default: ".windsurf/rules" + rules?: string; + // Source directory for Windsurf skills, default: ".windsurf/skills" + skills?: string; + }; + cline?: { + // Source directory for Cline rules, default: ".clinerules" + rules?: string; + // Source directory for Cline skills, default: ".cline/skills" + skills?: string; + }; agentsMd?: { // Source directory for AGENTS.md files, default: "." (repository root) file?: string; @@ -155,6 +167,14 @@ export interface ProjectConfig { warp?: { skills?: Record; }; + windsurf?: { + rules?: Record; + skills?: Record; + }; + cline?: { + rules?: Record; + skills?: Record; + }; // Universal AGENTS.md support (tool-agnostic) agentsMd?: Record; } @@ -197,6 +217,22 @@ export interface RepoSourceConfig { rules?: string; skills?: string; }; + gemini?: { + commands?: string; + skills?: string; + agents?: string; + }; + warp?: { + skills?: string; + }; + windsurf?: { + rules?: string; + skills?: string; + }; + cline?: { + rules?: string; + skills?: string; + }; agentsMd?: { file?: string; }; @@ -271,9 +307,22 @@ function mergeCombined(main: ProjectConfig, local: ProjectConfig): ProjectConfig rules: { ...(main.codex?.rules || {}), ...(local.codex?.rules || {}) }, skills: { ...(main.codex?.skills || {}), ...(local.codex?.skills || {}) } }, + gemini: { + commands: { ...(main.gemini?.commands || {}), ...(local.gemini?.commands || {}) }, + skills: { ...(main.gemini?.skills || {}), ...(local.gemini?.skills || {}) }, + agents: { ...(main.gemini?.agents || {}), ...(local.gemini?.agents || {}) } + }, warp: { skills: { ...(main.warp?.skills || {}), ...(local.warp?.skills || {}) } }, + windsurf: { + rules: { ...(main.windsurf?.rules || {}), ...(local.windsurf?.rules || {}) }, + skills: { ...(main.windsurf?.skills || {}), ...(local.windsurf?.skills || {}) } + }, + cline: { + rules: { ...(main.cline?.rules || {}), ...(local.cline?.rules || {}) }, + skills: { ...(main.cline?.skills || {}), ...(local.cline?.skills || {}) } + }, agentsMd: { ...(main.agentsMd || {}), ...(local.agentsMd || {}) } }; } @@ -304,6 +353,10 @@ export async function getRepoSourceConfig(projectPath: string): Promise Date: Sat, 28 Feb 2026 14:16:00 +0800 Subject: [PATCH 02/12] feat: add proper user-level sync support for all tools (#25) - Add userTargetDir to SyncAdapter/AdapterConfig for tools whose user-level filesystem path differs from their project-level path - Fix OpenCode user mode: symlinks now land in ~/.config/opencode/ (XDG) instead of the incorrect ~/.opencode/ - Add gemini-md adapter for ~/.gemini/GEMINI.md (ais gemini md --user) - Add codex-md adapter for ~/.codex/AGENTS.md (ais codex md --user) - Fix getSourceDir() missing branches for gemini.* and codex.md - Fix mergeCombined() missing gemini section entirely - Add 63 new tests covering adapters, sync-engine userTargetDir behavior, project-config merge, and findAdapterForAlias - Update KNOWLEDGE_BASE.md, README.md, README_ZH.md Co-authored-by: Claude Sonnet 4.6 --- KNOWLEDGE_BASE.md | 649 +++++++++++++++++++++++++++--- README.md | 59 ++- README_ZH.md | 59 ++- src/__tests__/codex-md.test.ts | 51 +++ src/__tests__/gemini-md.test.ts | 51 +++ src/adapters/base.ts | 2 + src/adapters/codex-md.ts | 30 ++ src/adapters/gemini-md.ts | 30 ++ src/adapters/index.ts | 10 + src/adapters/opencode-agents.ts | 1 + src/adapters/opencode-commands.ts | 1 + src/adapters/opencode-skills.ts | 1 + src/adapters/opencode-tools.ts | 1 + src/adapters/types.ts | 3 + src/index.ts | 12 +- src/project-config.ts | 34 +- src/sync-engine.ts | 5 + tests/codex-adapters.test.ts | 29 +- tests/opencode-adapters.test.ts | 100 +++++ tests/project-config.test.ts | 140 +++++++ tests/sync-engine.test.ts | 76 ++++ 21 files changed, 1271 insertions(+), 73 deletions(-) create mode 100644 src/__tests__/codex-md.test.ts create mode 100644 src/__tests__/gemini-md.test.ts create mode 100644 src/adapters/codex-md.ts create mode 100644 src/adapters/gemini-md.ts create mode 100644 tests/opencode-adapters.test.ts diff --git a/KNOWLEDGE_BASE.md b/KNOWLEDGE_BASE.md index 006a647..e9cffb0 100644 --- a/KNOWLEDGE_BASE.md +++ b/KNOWLEDGE_BASE.md @@ -1,14 +1,14 @@ # Project Knowledge Base ## Project Overview -**AI Rules Sync (ais)** is a CLI tool designed to synchronize agent rules from a centralized Git repository to local projects using symbolic links. It supports **Cursor rules**, **Cursor commands**, **Cursor skills**, **Cursor subagents**, **Copilot instructions**, **Claude Code rules/skills/subagents/CLAUDE.md**, **Trae rules/skills**, **OpenCode agents/skills/commands/tools**, **Codex rules/skills**, **Gemini CLI commands/skills/subagents**, **Windsurf rules/skills**, **Cline rules/skills**, and **universal AGENTS.md support**, keeping projects up-to-date across teams. +**AI Rules Sync (ais)** is a CLI tool designed to synchronize agent rules from a centralized Git repository to local projects using symbolic links. It supports **Cursor rules**, **Cursor commands**, **Cursor skills**, **Cursor subagents**, **Copilot instructions**, **Claude Code rules/skills/subagents/CLAUDE.md**, **Trae rules/skills**, **OpenCode agents/skills/commands/tools**, **Codex rules/skills/AGENTS.md**, **Gemini CLI commands/skills/agents/GEMINI.md**, **Windsurf rules/skills**, **Cline rules/skills**, and **universal AGENTS.md support**, keeping projects up-to-date across teams. -A key feature is **User Mode** (`--user` / `-u`): use `$HOME` as project root to manage AI config files in `~/.claude/`, `~/.cursor/`, etc. Entries are tracked in `~/.config/ai-rules-sync/user.json` (or a user-configured custom path for dotfiles integration) and gitignore management is skipped automatically. +A key feature is **User Mode** (`--user` / `-u`): use `$HOME` as project root to manage AI config files in `~/.claude/`, `~/.gemini/`, `~/.codex/`, `~/.config/opencode/`, etc. Entries are tracked in `~/.config/ai-rules-sync/user.json` (or a user-configured custom path for dotfiles integration) and gitignore management is skipped automatically. ## Core Concepts -- **Rules Repository**: A Git repository containing rule definitions in official tool paths (`.cursor/rules/`, `.cursor/commands/`, `.cursor/skills/`, `.cursor/agents/`, `.github/instructions/`, `.claude/skills/`, `.claude/agents/`, `.claude/` (for CLAUDE.md), `.trae/rules/`, `.trae/skills/`, `.opencode/agents/`, `.opencode/skills/`, `.opencode/commands/`, `.opencode/tools/`, `.codex/rules/`, `.agents/skills/`, `.gemini/commands/`, `.gemini/skills/`, `.gemini/agents/`, `.windsurf/rules/`, `.windsurf/skills/`, `.clinerules/`, `.cline/skills/`, `agents-md/`). +- **Rules Repository**: A Git repository containing rule definitions in official tool paths (`.cursor/rules/`, `.cursor/commands/`, `.cursor/skills/`, `.cursor/agents/`, `.github/instructions/`, `.claude/skills/`, `.claude/agents/`, `.claude/` (for CLAUDE.md), `.trae/rules/`, `.trae/skills/`, `.opencode/agents/`, `.opencode/skills/`, `.opencode/commands/`, `.opencode/tools/`, `.codex/rules/`, `.codex/` (for AGENTS.md), `.agents/skills/`, `.gemini/commands/`, `.gemini/skills/`, `.gemini/agents/`, `.gemini/` (for GEMINI.md), `.windsurf/rules/`, `.windsurf/skills/`, `.clinerules/`, `.cline/skills/`, `agents-md/`). - **Symbolic Links**: Entries are linked from the local cache of the repo to project directories, avoiding file duplication and drift. -- **Dependency Tracking**: Uses `ai-rules-sync.json` to track project dependencies (Cursor rules/commands/skills/subagents, Copilot instructions, Claude Code rules/skills/subagents/CLAUDE.md, Trae rules/skills, OpenCode agents/skills/commands/tools, Codex rules/skills, Gemini CLI commands/skills/subagents, Windsurf rules/skills, Cline rules/skills, AGENTS.md). +- **Dependency Tracking**: Uses `ai-rules-sync.json` to track project dependencies (Cursor rules/commands/skills/subagents, Copilot instructions, Claude Code rules/skills/subagents/CLAUDE.md, Trae rules/skills, OpenCode agents/skills/commands/tools, Codex rules/skills/AGENTS.md, Gemini CLI commands/skills/agents/GEMINI.md, Windsurf rules/skills, Cline rules/skills, universal AGENTS.md). - **Privacy**: Supports private/local entries via `ai-rules-sync.local.json` and `.git/info/exclude`. - **User Mode**: `--user` / `-u` flag on add/remove/install commands. Sets `projectPath = $HOME`, stores dependencies in `~/.config/ai-rules-sync/user.json`, skips gitignore management. Enables `ais user install` to restore all user-scope symlinks on a new machine. (`--global`/`-g` kept as deprecated aliases.) - **User Config Path**: Configurable via `ais config user set ` for dotfiles integration (e.g. `~/dotfiles/ai-rules-sync/user.json`). @@ -909,34 +909,48 @@ ais user install | copilot-instructions | copilot | instructions | file | .github/instructions | .instructions.md, .md | [Copilot Instructions](https://docs.github.com/en/copilot/how-tos/configure-custom-instructions/add-repository-instructions) | | claude-skills | claude | skills | directory | .claude/skills | - | [Claude Code Skills](https://code.claude.com/docs/en/skills) | | claude-agents | claude | subagents | directory | .claude/agents | - | [Claude Code Subagents](https://code.claude.com/docs/en/sub-agents) | -| claude-rules | claude | rules | file | .claude/rules | .md | [Claude Code Memory](https://code.claude.com/docs/en/memory) | -| claude-md | claude | md | file | .claude | .md | [Claude Code CLAUDE.md](https://docs.anthropic.com/en/docs/claude-code/memory) | -| trae-rules | trae | rules | file | .trae/rules | .md | [Trae Rules](https://docs.trae.ai/ide/rules) | -| trae-skills | trae | skills | directory | .trae/skills | - | [Trae Skills](https://docs.trae.ai/ide/skills) | -| **agents-md** | **agents-md** | **file** | **file** | **.** (root) | **.md** | **[agents.md standard](https://agents.md/)** | -| opencode-agents | opencode | agents | file | .opencode/agents | .md | [OpenCode Agents](https://opencode.ai/docs/agents/) | -| opencode-skills | opencode | skills | directory | .opencode/skills | - | [OpenCode Skills](https://opencode.ai/docs/skills/) | -| opencode-commands | opencode | commands | file | .opencode/commands | .md | [OpenCode Commands](https://opencode.ai/docs/commands/) | -| opencode-tools | opencode | tools | file | .opencode/tools | .ts, .js | [OpenCode Tools](https://opencode.ai/docs/tools/) | -| codex-rules | codex | rules | file | .codex/rules | .rules | [OpenAI Codex Rules](https://developers.openai.com/codex/rules) | -| codex-skills | codex | skills | directory | .agents/skills | - | [OpenAI Codex Skills](https://developers.openai.com/codex/skills) | -| gemini-commands | gemini | commands | file | .gemini/commands | .toml | [Gemini Commands](https://geminicli.com/docs/cli/custom-commands/) | -| gemini-skills | gemini | skills | directory | .gemini/skills | - | [Gemini Skills](https://geminicli.com/docs/cli/skills/) | -| gemini-agents | gemini | subagents | file | .gemini/agents | .md | [Gemini Subagents](https://geminicli.com/docs/core/subagents/) | -| windsurf-rules | windsurf | rules | file | .windsurf/rules | .md | [Windsurf Memories & Rules](https://docs.windsurf.com/windsurf/cascade/memories) | -| windsurf-skills | windsurf | skills | directory | .windsurf/skills | - | [Windsurf Skills](https://docs.windsurf.com/windsurf/cascade/skills) | -| cline-rules | cline | rules | file | .clinerules | .md, .txt | [Cline Rules](https://docs.cline.bot/customization/cline-rules) | -| cline-skills | cline | skills | directory | .cline/skills | - | [Cline Skills](https://docs.cline.bot/customization/skills) | +| Adapter | Tool | Subtype | Mode | Source Dir | Target Dir (project) | User-level Target | File Suffixes | Reference | +|---------|------|---------|------|------------|----------------------|-------------------|---------------|-----------| +| cursor-rules | cursor | rules | hybrid | .cursor/rules | .cursor/rules | - | .mdc, .md | [Cursor Rules](https://cursor.com/docs/context/rules) | +| cursor-commands | cursor | commands | file | .cursor/commands | .cursor/commands | - | .md | [Cursor Commands](https://cursor.com/docs/context/commands) | +| cursor-skills | cursor | skills | directory | .cursor/skills | .cursor/skills | - | - | [Cursor Skills](https://cursor.com/docs/context/skills) | +| cursor-agents | cursor | subagents | directory | .cursor/agents | .cursor/agents | - | - | [Cursor subagents](https://cursor.com/docs/context/subagents) | +| copilot-instructions | copilot | instructions | file | .github/instructions | .github/instructions | - | .instructions.md, .md | [Copilot Instructions](https://docs.github.com/en/copilot/customizing-copilot/adding-custom-instructions-for-github-copilot) | +| claude-skills | claude | skills | directory | .claude/skills | .claude/skills | - | - | [Claude Code Skills](https://code.claude.com/docs/en/skills) | +| claude-agents | claude | subagents | directory | .claude/agents | .claude/agents | - | - | [Claude Code Subagents](https://code.claude.com/docs/en/sub-agents) | +| claude-rules | claude | rules | file | .claude/rules | .claude/rules | - | .md | [Claude Code](https://claude.ai/code) | +| claude-md | claude | md | file | .claude | .claude | - | .md | [Claude Code CLAUDE.md](https://claude.ai/code) | +| trae-rules | trae | rules | file | .trae/rules | .trae/rules | - | .md | [Trae AI](https://trae.ai/) | +| trae-skills | trae | skills | directory | .trae/skills | .trae/skills | - | - | [Trae AI](https://trae.ai/) | +| **agents-md** | **agents-md** | **file** | **file** | **.** (root) | **.** | **-** | **.md** | **[agents.md standard](https://agents.md/)** | +| opencode-agents | opencode | agents | file | .opencode/agents | .opencode/agents | .config/opencode/agents | .md | [OpenCode](https://opencode.ing/) | +| opencode-skills | opencode | skills | directory | .opencode/skills | .opencode/skills | .config/opencode/skills | - | [OpenCode](https://opencode.ing/) | +| opencode-commands | opencode | commands | file | .opencode/commands | .opencode/commands | .config/opencode/commands | .md | [OpenCode](https://opencode.ing/) | +| opencode-tools | opencode | tools | file | .opencode/tools | .opencode/tools | .config/opencode/tools | .ts, .js | [OpenCode](https://opencode.ing/) | +| codex-rules | codex | rules | file | .codex/rules | .codex/rules | - | .rules | [OpenAI Codex Rules](https://developers.openai.com/codex/rules) | +| codex-skills | codex | skills | directory | .agents/skills | .agents/skills | - | - | [OpenAI Codex Skills](https://developers.openai.com/codex/skills) | +| **codex-md** | **codex** | **md** | **file** | **.codex** | **.codex** | **-** | **.md** | **[Codex AGENTS.md](https://developers.openai.com/codex)** | +| gemini-commands | gemini | commands | file | .gemini/commands | .gemini/commands | - | .toml | [Gemini CLI](https://geminicli.com/) | +| gemini-skills | gemini | skills | directory | .gemini/skills | .gemini/skills | - | - | [Gemini CLI](https://geminicli.com/) | +| gemini-agents | gemini | agents | file | .gemini/agents | .gemini/agents | - | .md | [Gemini CLI](https://geminicli.com/) | +| **gemini-md** | **gemini** | **md** | **file** | **.gemini** | **.gemini** | **-** | **.md** | **[Gemini CLI GEMINI.md](https://geminicli.com/)** | +| windsurf-rules | windsurf | rules | file | .windsurf/rules | .windsurf/rules | - | .md | [Windsurf Memories & Rules](https://docs.windsurf.com/windsurf/cascade/memories) | +| windsurf-skills | windsurf | skills | directory | .windsurf/skills | .windsurf/skills | - | - | [Windsurf Skills](https://docs.windsurf.com/windsurf/cascade/skills) | +| cline-rules | cline | rules | file | .clinerules | .clinerules | - | .md, .txt | [Cline Rules](https://docs.cline.bot/customization/cline-rules) | +| cline-skills | cline | skills | directory | .cline/skills | .cline/skills | - | - | [Cline Skills](https://docs.cline.bot/customization/skills) | + +> **User-level Target** column: When `--user` is used, symlinks land at `~/` instead of `~/`. A `-` means the tool has no dedicated XDG/user path — user mode just uses `~/` (which for claude-md means `~/.claude/CLAUDE.md`, for gemini-md means `~/.gemini/GEMINI.md`, etc.). ## Development Guidelines - **TypeScript**: Strict mode enabled. - **Testing**: Vitest for unit tests. - **Style**: Functional programming style preferred. - **Adding New AI Tools**: - 1. Create adapter in `src/adapters/.ts` - 2. Register in `src/adapters/index.ts` + 1. Create adapter in `src/adapters/.ts` (use `createBaseAdapter`; add `userTargetDir` if tool has a different XDG user-level path) + 2. Register in `src/adapters/index.ts` (constructor + `findAdapterForAlias`) 3. Add CLI commands in `src/index.ts` using `registerAdapterCommands()` - 4. Update `ProjectConfig` interface in `src/project-config.ts` + 4. Update `ProjectConfig`, `SourceDirConfig`, `RepoSourceConfig`, `mergeCombined`, and `getSourceDir` in `src/project-config.ts` +- **`userTargetDir`**: Set this on an adapter when the tool's official user-level filesystem path differs from its project-level path (e.g., OpenCode uses `~/.config/opencode/` not `~/.opencode/`). Leave unset when `~/` is already the correct user path. ### Choosing Adapter Mode @@ -944,30 +958,567 @@ ais user install - Use **file** mode for tools with single files and consistent suffix (commands with `.md`) - Use **hybrid** mode when entries can be either files or directories (cursor-rules) -## Changelog +## Recent Changes + +### Windsurf & Cline Support (2026-02) -### 2026-02 - Added **Windsurf support**: rules (`.windsurf/rules`, `.md`) and skills (`.windsurf/skills`) with full CLI/completion integration - Added **Cline support**: rules (`.clinerules`, `.md`/`.txt`) and skills (`.cline/skills`) with full CLI/completion integration -- Added **User Mode** (`--user` / `-u`): manage personal AI config files (`~/.claude/CLAUDE.md`, etc.) with version control; `ais user install` restores all symlinks on new machines -- Added **claude-md adapter**: sync CLAUDE.md-style files; `ais claude md add CLAUDE --user` -- Added **User Config Path**: `ais config user set ` for dotfiles integration -- Added **Gemini CLI support**: commands (`.toml`), skills (directory), subagents (`.md`) -- Added **OpenAI Codex support**: rules (`.rules`, Starlark), skills (`.agents/skills/`) -- Renamed deprecated `--global` / `-g` flags to `--user` / `-u` - -### 2026-01 -- Added **Custom Source Directories**: 4-layer priority system (CLI > global config > repo config > adapter defaults); `ais config repo set-source` -- Added **Custom Target Directories**: `-d` option on `add` commands; monorepo support -- Added **Universal AGENTS.md support**: `ais agents-md` for [agents.md standard](https://agents.md/) -- Fixed OpenCode adapters to match official docs (removed `rules`, fixed modes for agents/commands/tools) -- Migrated config directory from `~/.ai-rules-sync/` to `~/.config/ai-rules-sync/` (XDG spec) - -### Previously -- Added Warp skills support -- Added Copilot prompt files and custom agents -- Added Claude Code rules/skills/subagents/CLAUDE.md support -- Added Trae rules and skills support -- Added OpenCode agents/skills/commands/tools support -- Added `add-all` bulk discovery and installation -- Added shell tab completion + +### Proper User-Level Sync for All Tools + gemini-md / codex-md Adapters (2026-02) + +**Added full user-level support for Gemini CLI and Codex, fixed OpenCode XDG paths.** + +**Problem Solved:** +- `--user` mode set `projectPath = $HOME` but used the same `targetDir` for all adapters — causing OpenCode user-level files to land in `~/.opencode/...` instead of the official XDG path `~/.config/opencode/...` +- No adapter existed for `~/.gemini/GEMINI.md` (Gemini CLI's user-level context file) +- No adapter existed for `~/.codex/AGENTS.md` (Codex CLI's user-level instructions file) +- `getSourceDir()` had no branch for `gemini` or `codex.md`, returning the adapter default instead of any repo-configured path + +**Features Implemented:** + +1. **`userTargetDir` on Adapter Interface**: + - New optional field `userTargetDir?: string` on `SyncAdapter` and `AdapterConfig` + - When `options.skipIgnore === true` (user mode) AND adapter has `userTargetDir`, `sync-engine.ts` uses it as the symlink destination instead of `targetDir` + - OpenCode adapters now carry `userTargetDir` pointing to XDG paths + +2. **OpenCode XDG Path Fix**: + - `opencode-commands`: `userTargetDir = '.config/opencode/commands'` → `~/.config/opencode/commands/` + - `opencode-agents`: `userTargetDir = '.config/opencode/agents'` → `~/.config/opencode/agents/` + - `opencode-skills`: `userTargetDir = '.config/opencode/skills'` → `~/.config/opencode/skills/` + - `opencode-tools`: `userTargetDir = '.config/opencode/tools'` → `~/.config/opencode/tools/` + +3. **gemini-md Adapter** (new): + - Manages `~/.gemini/GEMINI.md` (or project-level `.gemini/GEMINI.md`) + - File mode, `.md` suffix; resolves `GEMINI` → `GEMINI.md` + - CLI: `ais gemini md [add|remove|install|import]` + - User mode: `ais gemini md add GEMINI --user` → symlink at `~/.gemini/GEMINI.md` + +4. **codex-md Adapter** (new): + - Manages `~/.codex/AGENTS.md` (or project-level `.codex/AGENTS.md`) + - File mode, `.md` suffix; resolves `AGENTS` → `AGENTS.md` + - CLI: `ais codex md [add|remove|install|import]` + - User mode: `ais codex md add AGENTS --user` → symlink at `~/.codex/AGENTS.md` + +5. **Configuration Updates**: + - `ProjectConfig.gemini` now includes `md?: Record` + - `ProjectConfig.codex` now includes `md?: Record` + - Same additions to `SourceDirConfig` and `RepoSourceConfig` + - `mergeCombined()` now merges `gemini.md`, `codex.md`, and the previously missing `gemini.*` fields + - `getSourceDir()` now handles `gemini.*` (all subtypes) and `codex.md` + - `findAdapterForAlias()` updated for `gemini.md` and `codex.md` + +6. **`_complete` / `userCmd` Updates**: + - `gemini-md` and `codex-md` added to shell completion type list + - `ais user` description now mentions `~/.gemini/GEMINI.md` and `~/.codex/AGENTS.md` + +**Implementation:** +- `src/adapters/types.ts` — Added `userTargetDir?: string` to `SyncAdapter` +- `src/adapters/base.ts` — Added `userTargetDir?: string` to `AdapterConfig`, pass-through in `createBaseAdapter` +- `src/sync-engine.ts` — Apply `adapter.userTargetDir` when `options.skipIgnore === true` +- `src/adapters/opencode-{commands,agents,skills,tools}.ts` — Added `userTargetDir` +- `src/adapters/gemini-md.ts` — New adapter +- `src/adapters/codex-md.ts` — New adapter +- `src/adapters/index.ts` — Registered new adapters, updated `findAdapterForAlias` +- `src/project-config.ts` — Extended types, `mergeCombined`, `getSourceDir` +- `src/index.ts` — Added `ais gemini md` and `ais codex md` subcommands + +**Files Changed:** 9 modified + 2 new adapters, all tests passing (269/269) + +--- + +### User Mode & claude-md Adapter (2026-02) + +**Added User Mode for managing personal AI config files (`~/.claude/CLAUDE.md`, etc.):** + +**Problem Solved:** +- Personal AI config files like `~/.claude/CLAUDE.md` had no version control or cross-machine sync +- Each machine required manual setup of global AI tool configurations + +**Features Implemented:** + +1. **User Mode (`--user` / `-u` flag)**: + - All add/remove/install commands accept `--user` flag + - Sets `projectPath = $HOME` automatically + - Stores dependencies in `~/.config/ai-rules-sync/user.json` + - Skips gitignore management (home dir isn't a git repo) + - `--global` / `-g` kept as deprecated backward-compatible aliases + +2. **claude-md Adapter**: + - New adapter for CLAUDE.md-style files (`.claude/.md`) + - File mode with `.md` suffix; resolves `CLAUDE` → `CLAUDE.md` + - CLI: `ais claude md [add|remove|install|import]` + - User mode usage: `ais claude md add CLAUDE --user` + +3. **One-click User Install**: + - `ais user install` / `ais install --user` + - Reads all entries from `user.json` and recreates symlinks (perfect for new machine setup) + - `ais global install` and `ais install --global` kept as deprecated aliases + +4. **User Config Path Management**: + - `ais config user show` - View current user.json path + - `ais config user set ` - Set custom path (for dotfiles integration) + - `ais config user reset` - Reset to default path + - Stored as `userConfigPath` in `~/.config/ai-rules-sync/config.json` + - `ais config global show|set|reset` kept as deprecated aliases + +5. **claude-rules Adapter** (formalized): + - Adapter for `.claude/rules/` files (`.md` suffix) + - CLI: `ais claude rules [add|remove|install|import]` + +6. **`skipIgnore` in SyncOptions**: + - New optional field prevents gitignore management in user mode + - Set automatically when `--user` is used + +7. **Automatic Migration**: + - On first use, auto-renames `global.json` → `user.json` in the config directory + - Auto-renames `globalConfigPath` → `userConfigPath` in `config.json` + +**Implementation:** +- `src/adapters/claude-md.ts` - New claude-md adapter +- `src/config.ts` - Added `userConfigPath`, `getUserConfigPath()`, `getUserProjectConfig()`, `saveUserProjectConfig()` (replacing `global*` equivalents) +- `src/project-config.ts` - Added `claude.md` and `claude.rules` to config interfaces; added `addUserDependency()`, `removeUserDependency()` +- `src/adapters/types.ts` - Added `skipIgnore?: boolean` to `SyncOptions` +- `src/sync-engine.ts` - Respect `skipIgnore` in `linkEntry()` +- `src/adapters/index.ts` - Registered `claudeMdAdapter` +- `src/commands/handlers.ts` - Added `user?` and `skipIgnore?` to `CommandContext`; user path for `handleAdd`/`handleRemove` +- `src/commands/install.ts` - Added `installUserEntriesForAdapter()`, `installAllUserEntries()` +- `src/commands/config.ts` - Added `handleUserConfigShow/Set/Reset()` +- `src/cli/register.ts` - Added `-u, --user` flag to add/remove/install commands (with `-g, --global` as deprecated aliases) +- `src/index.ts` - Added `ais claude md` subgroup, `ais user install`, `ais config user` commands + +**Files Changed:** 11 modified/new, all tests passing (206/206) + +### OpenAI Codex Support (2026-02) + +**Added complete support for OpenAI Codex project-level rules and skills:** + +**Problem Solved:** +- Teams using Codex needed to share rules and skills across projects +- No centralized way to distribute Codex configurations +- Manual copying led to drift and inconsistency + +**Features Implemented:** + +1. **Codex Rules Adapter**: + - File mode with `.rules` suffix + - Source: `.codex/rules/` + - Syntax: `ais codex rules add [alias]` + - Purpose: Control which commands can run outside sandbox using Starlark syntax + +2. **Codex Skills Adapter**: + - Directory mode (SKILL.md + optional scripts/assets) + - Source: `.agents/skills/` (non-standard location per Codex docs) + - Syntax: `ais codex skills add [alias]` + - Purpose: Task-specific capabilities that extend Codex + +3. **CLI Commands**: + - `ais codex install` - Install all Codex rules and skills + - `ais codex add-all` - Discover and add all entries from repository + - `ais codex import ` - Import from project to repository (auto-detects subtype) + - `ais codex rules [add|remove|install|import]` - Rules management + - `ais codex skills [add|remove|install|import]` - Skills management + +4. **Configuration Support**: + - Extended `SourceDirConfig` with `codex.rules` and `codex.skills` + - Extended `ProjectConfig` with `codex.rules` and `codex.skills` records + - Full support for custom source directories via `-s` option + - Backward compatible with existing configs + +5. **Shell Completion**: + - Added Codex to bash, zsh, and fish completion scripts + - Dynamic completion for rule/skill names via `ais _complete codex-rules|codex-skills` + - Context-aware completions for all subcommands + +6. **Mode Detection**: + - Added 'codex' to `DefaultMode` type + - `ais install` smart dispatch includes Codex + - Auto-detect Codex-only projects + +**Implementation:** +- `src/adapters/codex-rules.ts` - Rules adapter (file mode, `.rules` suffix) +- `src/adapters/codex-skills.ts` - Skills adapter (directory mode, `.agents/skills`) +- `src/adapters/index.ts` - Registered adapters and added to `findAdapterForAlias` +- `src/project-config.ts` - Extended configuration interfaces and helpers +- `src/commands/helpers.ts` - Added 'codex' mode type and inference +- `src/index.ts` - Full CLI command hierarchy with install/add-all/import +- `src/completion/scripts.ts` - Shell completion for all three shells +- `tests/codex-adapters.test.ts` - Complete test coverage (9 tests) + +**Files Changed:** 9 new/modified, all tests passing (126/126) + +**Benefits:** +- Centralized Codex configuration management +- Team-wide consistency for sandbox rules +- Easy sharing of skills across projects +- Follows same pattern as other supported tools + +### Custom Source Directories for Third-Party Repositories (2026-01) + +**Added 4-layer priority system for custom source directory configuration:** + +**Problem Solved:** +- Third-party repositories without `ai-rules-sync.json` were unusable +- Repositories with non-standard directory structures (e.g., `rules/cursor/` instead of `.cursor/rules`) required forking +- No way to override source directories without modifying the repository + +**Solution - 4-Layer Priority:** +``` +CLI Parameters > Global Config > Repository Config > Adapter Defaults +``` + +**Features Implemented:** + +1. **CLI Parameters (`-s/--source-dir`)**: + - Simple format: `ais cursor rules add-all -s custom/rules` + - Dot notation: `ais add-all -s cursor.rules=custom/rules` + - Supports multiple `-s` flags + - Context-aware parsing (infers tool/subtype from command) + +2. **Global Configuration Commands**: + - `ais config repo set-source ` - Persist custom sourceDir + - `ais config repo show ` - View repository configuration + - `ais config repo clear-source [tool.subtype]` - Remove custom sourceDir + - `ais config repo list` - List all repositories with sourceDir + +3. **Enhanced Architecture**: + - Extended `RepoConfig` interface with `sourceDir?: SourceDirConfig` + - Enhanced `getSourceDir()` with `globalOverride` parameter (4-layer priority logic) + - All `add-all` commands support `-s` option (top-level, tool-level, subtype-level) + - Custom parser with simple and dot notation format support + +4. **Configuration Persistence**: + - Global config stored in `~/.config/ai-rules-sync/config.json` + - Per-repository `sourceDir` configuration + - CLI overrides have highest priority (temporary) + - Global config persists across sessions + +**Use Cases:** +- **Exploration**: `ais cursor rules add-all -s custom/rules --dry-run` +- **Persistent**: `ais config repo set-source my-repo cursor.rules custom/rules` +- **Override**: CLI parameter overrides saved configuration + +**Files Changed:** +- `src/config.ts` - Added `sourceDir` to `RepoConfig` interface +- `src/project-config.ts` - Enhanced `getSourceDir()` with priority logic +- `src/commands/add-all.ts` - Added `sourceDirOverrides` parameter throughout +- `src/cli/source-dir-parser.ts` - New parameter parsing module +- `src/commands/config.ts` - New config management commands +- `src/cli/register.ts` - Added `-s` option to all adapter commands +- `src/index.ts` - Added `-s` to all add-all commands + config command group +- `README.md` - Added comprehensive documentation section + +**Benefits:** +- Works with any repository (no ai-rules-sync.json required) +- Flexible (CLI for quick tests, config for persistent use) +- Non-destructive (doesn't modify third-party repositories) +- User-friendly (smart context detection in simple format) + +### Configuration Directory Migration (2026-01) + +**Changed global configuration location to follow XDG Base Directory specification:** +- **Old**: `~/.ai-rules-sync/` +- **New**: `~/.config/ai-rules-sync/` + +**Impact:** +- Global config file: `~/.config/ai-rules-sync/config.json` +- Repository cache: `~/.config/ai-rules-sync/repos/` +- No automatic migration provided - users must manually move files if needed +- Aligns with Linux/macOS standards for configuration file placement + +**Files Changed:** +- `src/config.ts` - Updated `CONFIG_DIR` constant +- `tests/config.test.ts` - Updated test fixtures +- Documentation updated to reflect new paths + +### OpenCode AI Support (2026-01) + +**Added complete support for OpenCode AI (https://opencode.ai) with 5 component types:** +- **Rules** (`.opencode/rules/`) - File mode with `.md` suffix +- **Agents** (`.opencode/agents/`) - Directory mode +- **Skills** (`.opencode/skills/`) - Directory mode +- **Commands** (`.opencode/commands/`) - Directory mode +- **Custom-tools** (`.opencode/custom-tools/`) - Directory mode + +**Implementation:** +- Created 5 new adapters following existing patterns +- Extended `ProjectConfig` and `SourceDirConfig` interfaces +- Added CLI commands: `ais opencode [rules|agents|skills|commands|custom-tools] [add|remove|install|import]` +- Updated shell completion scripts (bash, zsh, fish) +- Added `_complete` command support for all OpenCode types +- Updated mode inference to recognize OpenCode projects + +**Files Changed:** +- `src/adapters/opencode-rules.ts` - New adapter (file mode, .md) +- `src/adapters/opencode-agents.ts` - New adapter (directory mode) +- `src/adapters/opencode-skills.ts` - New adapter (directory mode) +- `src/adapters/opencode-commands.ts` - New adapter (directory mode) +- `src/adapters/opencode-custom-tools.ts` - New adapter (directory mode) +- `src/adapters/index.ts` - Registered all 5 OpenCode adapters +- `src/project-config.ts` - Extended configuration interfaces +- `src/commands/helpers.ts` - Added 'opencode' mode type +- `src/index.ts` - Added OpenCode CLI commands and completion +- `src/completion/scripts.ts` - Added OpenCode to all shell completions +- `README.md` - Documented OpenCode support +- `KNOWLEDGE_BASE.md` - Updated architecture and feature documentation + +### OpenCode Adapters Fix & Universal AGENTS.md Support (2026-01) + +**Fixed OpenCode adapter modes to match official OpenCode documentation:** +- **Removed** `opencode-rules` adapter (OpenCode doesn't have a rules type) +- **Fixed** `opencode-agents` - Changed from directory to **file mode** with `.md` suffix +- **Fixed** `opencode-commands` - Changed from directory to **file mode** with `.md` suffix +- **Renamed** `opencode-custom-tools` → `opencode-tools` - Changed to **file mode** with `.ts`/`.js` suffixes +- **Kept** `opencode-skills` - Remains **directory mode** (contains SKILL.md inside) + +**Added Universal AGENTS.md Support:** +- **New adapter**: `agents-md` - Tool-agnostic support for the [agents.md standard](https://agents.md/) +- Syncs AGENTS.md files from repository to project root +- Makes agent definitions available to any AI coding tool supporting the agents.md format +- **Mode**: File mode with `.md` suffix +- **Target**: Project root (`.`) + +**Configuration Changes:** +- Removed `opencode.rules` from all config interfaces +- Renamed `opencode['custom-tools']` → `opencode.tools` +- Added new top-level `agentsMd` configuration section +- Updated `SourceDirConfig` and `ProjectConfig` interfaces + +**CLI Changes:** +- Added new top-level command group: `ais agents-md [add|remove|install|import]` +- Removed `ais opencode rules` subcommand +- Changed `ais opencode custom-tools` → `ais opencode tools` +- Updated all error messages and help text + +**Implementation:** +- `src/adapters/agents-md.ts` - New universal adapter +- `src/adapters/opencode-rules.ts` - Deleted +- `src/adapters/opencode-custom-tools.ts` - Renamed to `opencode-tools.ts` +- `src/adapters/opencode-agents.ts` - Fixed to file mode +- `src/adapters/opencode-commands.ts` - Fixed to file mode +- `src/adapters/opencode-tools.ts` - Updated to file mode with .ts/.js +- `src/adapters/index.ts` - Updated registry +- `src/project-config.ts` - Updated all config interfaces +- `src/commands/helpers.ts` - Added 'agents-md' mode +- `src/index.ts` - Updated CLI commands and completion +- `src/completion/scripts.ts` - Updated all shell completions +- `README.md` & `README_ZH.md` - Documented changes with reference links +- `KNOWLEDGE_BASE.md` - Updated architecture documentation + +**Reference Links Added:** +All documentation now includes links to official tool documentation for easy reference. + +### AGENTS.md Adapter Redesign (2026-01) + +**Redesigned agents-md adapter for flexible AGENTS.md location:** + +**Problem Solved:** +- Old adapter required AGENTS.md files in fixed `agents-md/` directory +- Didn't align with agents.md standard allowing files anywhere in repo +- Limited name resolution didn't support path-based lookup + +**New Design:** +- **Changed default source directory**: `agents-md` → `.` (repository root) +- **Flexible path resolution** supports 4 patterns: + 1. **Explicit file path**: `frontend/AGENTS.md` → `repo/frontend/AGENTS.md` + 2. **Directory path**: `docs/team` → `repo/docs/team/AGENTS.md` (auto-appends /AGENTS.md) + 3. **Simple name**: `frontend` → tries `repo/frontend/AGENTS.md`, then `repo/AGENTS.md` + 4. **Root level**: `.` or `AGENTS` → `repo/AGENTS.md` +- **Case insensitive**: Supports both `AGENTS.md` and `agents.md` variants +- **Custom config management**: Uses flat `agentsMd` structure instead of nested like other tools + +**Configuration Changes:** +```json +// Old (rules repo) +{ "sourceDir": { "agentsMd": { "file": "agents-md" } } } + +// New (rules repo) - supports any directory +{ "sourceDir": { "agentsMd": { "file": "." } } } + +// Project config - flat structure with flexible paths +{ + "agentsMd": { + "root": { "url": "...", "rule": "AGENTS.md" }, + "frontend": { "url": "...", "rule": "frontend/AGENTS.md" }, + "platform": { "url": "...", "rule": "docs/teams/platform/AGENTS.md" } + } +} +``` + +**Implementation Details:** +- Custom `resolveSource` with multi-pattern path resolution +- Custom `addDependency`/`removeDependency` for flat config structure +- Fixed alias handling in `handleAdd` to prioritize user-provided aliases +- Validates that only AGENTS.md files are supported (rejects other .md files) + +**Migration:** +- **No breaking changes**: Old configs continue to work +- Existing `agents-md/AGENTS.md` will still be found +- Users can gradually migrate to new flexible structure +- New projects can organize AGENTS.md files by directory (frontend/, backend/, etc.) + +**Files Changed:** +- `src/adapters/agents-md.ts` - Complete rewrite with flexible resolution +- `src/project-config.ts` - Updated default comment and config handling +- `src/commands/handlers.ts` - Fixed alias handling for all adapters +- `KNOWLEDGE_BASE.md`, `README.md`, `README_ZH.md` - Documentation updates + +### Custom Target Directories (2026-01) + +**Added support for custom target directories per entry:** + +**Feature Overview:** +- Users can now specify custom target directories for each synced entry +- Allows flexible organization beyond default tool directories +- Perfect for documentation projects, monorepos, and custom team structures +- Supports aliasing same rule to multiple locations + +**Implementation:** +- **Entry-level configuration**: Each config entry can specify `targetDir` field +- **CLI option**: Added `-d, --target-dir ` to all `add` commands +- **Priority resolution**: options.targetDir > config entry targetDir > adapter default +- **Conflict detection**: Prevents overwriting when adding same rule to different locations without alias +- **Suffix handling**: Properly resolves file suffixes when removing aliased entries + +**Configuration Examples:** + +```json +{ + "cursor": { + "rules": { + // Default target directory (.cursor/rules/) + "standard-rule": "https://github.com/company/rules", + + // Custom target directory + "docs-rule": { + "url": "https://github.com/company/rules", + "targetDir": "docs/ai/rules" + }, + + // Same rule to multiple locations (requires alias) + "frontend-auth": { + "url": "https://github.com/company/rules", + "rule": "auth-rules", + "targetDir": "packages/frontend/.cursor/rules" + }, + "backend-auth": { + "url": "https://github.com/company/rules", + "rule": "auth-rules", + "targetDir": "packages/backend/.cursor/rules" + } + } + } +} +``` + +**CLI Usage:** + +```bash +# Add rule to custom directory +ais cursor add my-rule -d docs/ai/rules + +# Add same rule to multiple locations (requires alias) +ais cursor add auth-rules frontend-auth -d packages/frontend/.cursor/rules +ais cursor add auth-rules backend-auth -d packages/backend/.cursor/rules + +# Remove specific location (uses alias as config key) +ais cursor remove frontend-auth + +# Install respects custom targetDir from config +ais cursor install +``` + +**Key Behaviors:** +- **No alias needed** for first-time adds or when targeting different source files +- **Alias required** when adding same source file to different location (prevents config key conflicts) +- **Backward compatible**: Entries without `targetDir` use adapter default +- **Config format**: Uses simple string when no custom targetDir; object format when specified +- **Install support**: `install` command reads and respects `targetDir` from config + +**Files Changed:** +- `src/project-config.ts` - Extended `RuleEntry` type, added `getTargetDir()` and `getEntryConfig()` +- `src/sync-engine.ts` - Dynamic target directory resolution in `linkEntry()`, `unlinkEntry()`, `importEntry()` +- `src/adapters/types.ts` - Extended `SyncOptions` and `addDependency` signature +- `src/adapters/base.ts` - Pass `targetDir` through to config functions +- `src/adapters/agents-md.ts` - Updated custom `addDependency()` for `targetDir` support +- `src/commands/handlers.ts` - Added `AddOptions` with conflict detection +- `src/commands/install.ts` - Extract and pass `targetDir` during installation +- `src/cli/register.ts` - Added `-d, --target-dir` option to adapter commands +- `src/index.ts` - Added `-d, --target-dir` to hardcoded cursor/copilot commands +- All tests passing (105/105) + +### Gemini CLI Support (2026-02) + +**Added complete support for Gemini CLI (https://geminicli.com/) with three entry types:** + +**Problem Solved:** +- Teams using Gemini CLI needed to share commands, skills, and agents across projects +- No centralized way to distribute Gemini CLI configurations +- Manual copying led to drift and inconsistency + +**Features Implemented:** + +1. **Gemini Commands Adapter**: + - File mode with `.toml` suffix (native Gemini CLI format) + - Source: `.gemini/commands/` + - Syntax: `ais gemini commands add [alias]` + - Purpose: Reusable prompts with argument substitution + +2. **Gemini Skills Adapter**: + - Directory mode (SKILL.md + optional assets) + - Source: `.gemini/skills/` + - Syntax: `ais gemini skills add [alias]` + - Purpose: Specialized expertise for specific tasks + +3. **Gemini Agents Adapter**: + - File mode with `.md` suffix (Markdown with YAML frontmatter) + - Source: `.gemini/agents/` + - Syntax: `ais gemini agents add [alias]` + - Purpose: Specialized agents with defined capabilities + +4. **CLI Commands**: + - `ais gemini install` - Install all Gemini commands, skills, and agents + - `ais gemini add-all` - Discover and add all entries from repository + - `ais gemini import ` - Import from project to repository (auto-detects subtype) + - `ais gemini commands [add|remove|install|import]` - Commands management + - `ais gemini skills [add|remove|install|import]` - Skills management + - `ais gemini agents [add|remove|install|import]` - Agents management + +5. **Configuration Support**: + - Extended `SourceDirConfig` with `gemini.commands`, `gemini.skills`, and `gemini.agents` + - Extended `ProjectConfig` with `gemini.commands`, `gemini.skills`, and `gemini.agents` records + - Full support for custom source directories via `-s` option + - Backward compatible with existing configs + +6. **Shell Completion**: + - Added Gemini CLI to bash, zsh, and fish completion scripts + - Dynamic completion for command/skill/agent names via `ais _complete gemini-commands|gemini-skills|gemini-agents` + - Context-aware completions for all subcommands + +7. **Mode Detection**: + - Added 'gemini' to `DefaultMode` type + - `ais install` smart dispatch includes Gemini CLI + - Auto-detect Gemini-only projects + +**Implementation:** +- `src/adapters/gemini-commands.ts` - Commands adapter (file mode, `.toml` suffix) +- `src/adapters/gemini-skills.ts` - Skills adapter (directory mode) +- `src/adapters/gemini-agents.ts` - Agents adapter (file mode, `.md` suffix) +- `src/adapters/index.ts` - Registered adapters and added to `findAdapterForAlias` +- `src/project-config.ts` - Extended configuration interfaces and helpers +- `src/commands/helpers.ts` - Added 'gemini' mode type and inference +- `src/index.ts` - Full CLI command hierarchy with install/add-all/import +- `src/completion/scripts.ts` - Shell completion for all three shells +- `src/__tests__/gemini-commands.test.ts` - Commands adapter tests (5 tests) +- `src/__tests__/gemini-skills.test.ts` - Skills adapter tests (5 tests) +- `src/__tests__/gemini-agents.test.ts` - Agents adapter tests (5 tests) +- `README.md`, `README_ZH.md` - Documentation updates + +**Files Changed:** 15 new/modified, all tests passing (166/166) + +**Benefits:** +- Centralized Gemini CLI configuration management +- Team-wide consistency for commands, skills, and agents +- Easy sharing of configurations across projects +- Native TOML support for commands +- Follows same pattern as other supported tools (Cursor, OpenCode, Codex) diff --git a/README.md b/README.md index e69d41a..7b2e51b 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,9 @@ **AI Rules Sync (AIS)** - Synchronize, manage, and share your AI agent rules across projects and teams. -Stop copying `.mdc` files around. Manage your rules in Git repositories and sync them via symbolic links. Supports 11 AI tools and **User Mode** for personal config files — see [Supported Tools](#supported-tools). +Stop copying `.mdc` files around. Manage your rules in Git repositories and sync them via symbolic links. + +**Supports:** Cursor (rules, commands, skills, subagents), GitHub Copilot (instructions, prompts, skills, agents), Claude Code (rules, skills, subagents, CLAUDE.md), Trae (rules, skills), OpenCode (commands, skills, agents, tools), Codex (rules, skills, AGENTS.md), Gemini CLI (commands, skills, agents, GEMINI.md), Windsurf (rules, skills), Cline (rules, skills), Warp (rules via AGENTS.md, skills), and universal AGENTS.md. Also supports **User Mode** for personal AI config files (`~/.claude/CLAUDE.md`, `~/.gemini/GEMINI.md`, `~/.codex/AGENTS.md`, `~/.config/opencode/`, etc.). --- @@ -93,9 +95,11 @@ ais completion install | OpenCode | Tools | file | `.opencode/tools/` | `.ts`, `.js` | [Docs](https://opencode.ai/docs/tools/) | | Codex | Rules | file | `.codex/rules/` | `.rules` | [Docs](https://developers.openai.com/codex/rules) | | Codex | Skills | directory | `.agents/skills/` | - | [Docs](https://developers.openai.com/codex/skills) | +| Codex | AGENTS.md | file | `.codex/` | `.md` | [Docs](https://developers.openai.com/codex) | | Gemini CLI | Commands | file | `.gemini/commands/` | `.toml` | [Docs](https://geminicli.com/docs/cli/custom-commands/) | | Gemini CLI | Skills | directory | `.gemini/skills/` | - | [Docs](https://geminicli.com/docs/cli/skills/) | -| Gemini CLI | Subagents | file | `.gemini/agents/` | `.md` | [Docs](https://geminicli.com/docs/core/subagents/) | +| Gemini CLI | Agents | file | `.gemini/agents/` | `.md` | [Docs](https://geminicli.com/docs/core/subagents/) | +| Gemini CLI | GEMINI.md | file | `.gemini/` | `.md` | [Website](https://geminicli.com/) | | Warp | Rules | file | `.` (root) | `.md` | [Docs](https://docs.warp.dev/agent-platform/capabilities/rules) — same as AGENTS.md, use `ais agents-md` | | Warp | Skills | directory | `.agents/skills/` | - | [Docs](https://docs.warp.dev/agent-platform/capabilities/skills) | | Windsurf | Rules | file | `.windsurf/rules/` | `.md` | [Docs](https://docs.windsurf.com/windsurf/cascade/memories) | @@ -526,6 +530,9 @@ ais codex rules add default # Add skill ais codex skills add code-assistant +# Add AGENTS.md (project-level context file) +ais codex md add AGENTS + # Install all ais codex install @@ -539,6 +546,13 @@ ais codex rules remove default **Note:** Codex skills use `.agents/skills/` (not `.codex/skills/`) per OpenAI documentation. +**User mode** — manage `~/.codex/AGENTS.md`: + +```bash +ais codex md add AGENTS --user +# → symlink created at ~/.codex/AGENTS.md +``` + ### Gemini CLI ```bash @@ -548,13 +562,21 @@ ais gemini commands add deploy-docs # Add skill (directory) ais gemini skills add code-review -# Add subagent (.md) +# Add agent (.md) ais gemini agents add code-analyzer -# Remove -ais gemini commands remove deploy-docs -ais gemini skills remove code-review -ais gemini agents remove code-analyzer +# Add GEMINI.md (project-level context file) +ais gemini md add GEMINI + +# Install all +ais gemini install +``` + +**User mode** — manage `~/.gemini/GEMINI.md`: + +```bash +ais gemini md add GEMINI --user +# → symlink created at ~/.gemini/GEMINI.md ``` ### AGENTS.md (Universal) @@ -792,13 +814,22 @@ ais cursor add auth-rules backend-auth -d packages/backend/.cursor/rules ### User Mode (Personal AI Config Files) -**Manage personal AI config files (`~/.claude/CLAUDE.md`, `~/.cursor/rules/`, etc.) with version control:** +**Manage personal AI config files with version control:** ```bash -# Add personal CLAUDE.md to user config +# Claude Code: ~/.claude/CLAUDE.md ais claude md add CLAUDE --user -# Add personal Cursor rules +# Gemini CLI: ~/.gemini/GEMINI.md +ais gemini md add GEMINI --user + +# Codex: ~/.codex/AGENTS.md +ais codex md add AGENTS --user + +# OpenCode (XDG path): ~/.config/opencode/commands/ +ais opencode commands add my-cmd --user + +# Cursor rules: ~/.cursor/rules/ ais cursor rules add my-style --user # Install all user entries on a new machine @@ -807,6 +838,8 @@ ais user install ais install --user ``` +> **OpenCode note:** User-level files land in `~/.config/opencode/` (XDG), not `~/.opencode/`. + **Manage user config path** (for dotfiles integration): ```bash @@ -844,6 +877,12 @@ ais user install "md": { "CLAUDE": "https://github.com/me/my-rules.git" }, "rules": { "general": "https://github.com/me/my-rules.git" } }, + "gemini": { + "md": { "GEMINI": "https://github.com/me/my-rules.git" } + }, + "codex": { + "md": { "AGENTS": "https://github.com/me/my-rules.git" } + }, "cursor": { "rules": { "my-style": "https://github.com/me/my-rules.git" } } diff --git a/README_ZH.md b/README_ZH.md index d4b9dda..6a9ef11 100644 --- a/README_ZH.md +++ b/README_ZH.md @@ -8,7 +8,9 @@ **AI Rules Sync (AIS)** - 跨项目和团队同步、管理和共享你的 AI 代理规则。 -不再复制粘贴 `.mdc` 文件。在 Git 仓库中管理规则,通过软链接同步。支持 11 款 AI 工具及 **User 模式**(管理个人配置文件)——详见[支持的工具](#支持的工具)。 +不再复制粘贴 `.mdc` 文件。在 Git 仓库中管理规则,通过软链接同步。 + +**支持:** Cursor(规则、命令、技能、subagents)、GitHub Copilot(指令、提示词、技能、代理)、Claude Code(规则、技能、subagents、CLAUDE.md)、Trae(规则、技能)、OpenCode(命令、技能、代理、工具)、Codex(规则、技能、AGENTS.md)、Gemini CLI(命令、技能、代理、GEMINI.md)、Windsurf(规则、技能)、Cline(规则、技能)、Warp(规则 via AGENTS.md、技能)以及通用的 AGENTS.md。另支持 **User 模式**,用于管理个人 AI 配置文件(如 `~/.claude/CLAUDE.md`、`~/.gemini/GEMINI.md`、`~/.codex/AGENTS.md`、`~/.config/opencode/` 等)。 --- @@ -93,9 +95,11 @@ ais completion install | OpenCode | Tools | file | `.opencode/tools/` | `.ts`, `.js` | [文档](https://opencode.ai/docs/tools/) | | Codex | Rules | file | `.codex/rules/` | `.rules` | [文档](https://developers.openai.com/codex/rules) | | Codex | Skills | directory | `.agents/skills/` | - | [文档](https://developers.openai.com/codex/skills) | +| Codex | AGENTS.md | file | `.codex/` | `.md` | [文档](https://developers.openai.com/codex) | | Gemini CLI | Commands | file | `.gemini/commands/` | `.toml` | [文档](https://geminicli.com/docs/cli/custom-commands/) | | Gemini CLI | Skills | directory | `.gemini/skills/` | - | [文档](https://geminicli.com/docs/cli/skills/) | -| Gemini CLI | Subagents | file | `.gemini/agents/` | `.md` | [文档](https://geminicli.com/docs/core/subagents/) | +| Gemini CLI | Agents | file | `.gemini/agents/` | `.md` | [文档](https://geminicli.com/docs/core/subagents/) | +| Gemini CLI | GEMINI.md | file | `.gemini/` | `.md` | [网站](https://geminicli.com/) | | Warp | Rules | file | `.`(根目录) | `.md` | [文档](https://docs.warp.dev/agent-platform/capabilities/rules) — 与 AGENTS.md 相同,使用 `ais agents-md` | | Warp | Skills | directory | `.agents/skills/` | - | [文档](https://docs.warp.dev/agent-platform/capabilities/skills) | | Windsurf | Rules | file | `.windsurf/rules/` | `.md` | [文档](https://docs.windsurf.com/windsurf/cascade/memories) | @@ -526,6 +530,9 @@ ais codex rules add default # 添加技能 ais codex skills add code-assistant +# 添加 AGENTS.md(项目级上下文文件) +ais codex md add AGENTS + # 安装所有 ais codex install @@ -539,6 +546,13 @@ ais codex rules remove default **注意:** Codex 技能使用 `.agents/skills/` 目录(而非 `.codex/skills/`),这是按照 OpenAI 文档的规定。 +**User 模式** — 管理 `~/.codex/AGENTS.md`: + +```bash +ais codex md add AGENTS --user +# → 符号链接创建于 ~/.codex/AGENTS.md +``` + ### Gemini CLI ```bash @@ -548,13 +562,21 @@ ais gemini commands add deploy-docs # 添加技能(目录) ais gemini skills add code-review -# 添加 subagent(.md) +# 添加代理(.md) ais gemini agents add code-analyzer -# 移除 -ais gemini commands remove deploy-docs -ais gemini skills remove code-review -ais gemini agents remove code-analyzer +# 添加 GEMINI.md(项目级上下文文件) +ais gemini md add GEMINI + +# 安装所有 +ais gemini install +``` + +**User 模式** — 管理 `~/.gemini/GEMINI.md`: + +```bash +ais gemini md add GEMINI --user +# → 符号链接创建于 ~/.gemini/GEMINI.md ``` ### AGENTS.md(通用) @@ -792,13 +814,22 @@ ais cursor add auth-rules backend-auth -d packages/backend/.cursor/rules ### User 模式(个人 AI 配置文件) -**使用版本控制管理个人 AI 配置文件(`~/.claude/CLAUDE.md`、`~/.cursor/rules/` 等):** +**使用版本控制管理个人 AI 配置文件:** ```bash -# 将个人 CLAUDE.md 加入 user 配置 +# Claude Code:~/.claude/CLAUDE.md ais claude md add CLAUDE --user -# 添加个人 Cursor 规则 +# Gemini CLI:~/.gemini/GEMINI.md +ais gemini md add GEMINI --user + +# Codex:~/.codex/AGENTS.md +ais codex md add AGENTS --user + +# OpenCode(XDG 路径):~/.config/opencode/commands/ +ais opencode commands add my-cmd --user + +# Cursor 规则:~/.cursor/rules/ ais cursor rules add my-style --user # 在新机器上一键恢复所有 user 配置 @@ -807,6 +838,8 @@ ais user install ais install --user ``` +> **OpenCode 注意:** User 级别文件存放在 `~/.config/opencode/`(XDG 规范),而非 `~/.opencode/`。 + **管理 user 配置路径**(用于 dotfiles 集成): ```bash @@ -844,6 +877,12 @@ ais user install "md": { "CLAUDE": "https://github.com/me/my-rules.git" }, "rules": { "general": "https://github.com/me/my-rules.git" } }, + "gemini": { + "md": { "GEMINI": "https://github.com/me/my-rules.git" } + }, + "codex": { + "md": { "AGENTS": "https://github.com/me/my-rules.git" } + }, "cursor": { "rules": { "my-style": "https://github.com/me/my-rules.git" } } diff --git a/src/__tests__/codex-md.test.ts b/src/__tests__/codex-md.test.ts new file mode 100644 index 0000000..b6108d4 --- /dev/null +++ b/src/__tests__/codex-md.test.ts @@ -0,0 +1,51 @@ +import { describe, it, expect } from 'vitest'; +import { codexMdAdapter } from '../adapters/codex-md.js'; +import { adapterRegistry } from '../adapters/index.js'; + +describe('codex-md adapter', () => { + it('should have correct basic properties', () => { + expect(codexMdAdapter.name).toBe('codex-md'); + expect(codexMdAdapter.tool).toBe('codex'); + expect(codexMdAdapter.subtype).toBe('md'); + expect(codexMdAdapter.defaultSourceDir).toBe('.codex'); + expect(codexMdAdapter.targetDir).toBe('.codex'); + expect(codexMdAdapter.mode).toBe('file'); + expect(codexMdAdapter.fileSuffixes).toEqual(['.md']); + }); + + it('should have correct config path', () => { + expect(codexMdAdapter.configPath).toEqual(['codex', 'md']); + }); + + it('should not have userTargetDir (user path is same as project path base)', () => { + // codex-md uses ~/.codex/AGENTS.md in user mode — the home dir is the project path + expect(codexMdAdapter.userTargetDir).toBeUndefined(); + }); + + it('should be registered in adapterRegistry', () => { + const retrieved = adapterRegistry.getByName('codex-md'); + expect(retrieved).toBe(codexMdAdapter); + }); + + it('should be retrievable by tool and subtype', () => { + const retrieved = adapterRegistry.get('codex', 'md'); + expect(retrieved).toBe(codexMdAdapter); + }); + + it('should have required adapter methods', () => { + expect(codexMdAdapter.addDependency).toBeDefined(); + expect(codexMdAdapter.removeDependency).toBeDefined(); + expect(codexMdAdapter.link).toBeDefined(); + expect(codexMdAdapter.unlink).toBeDefined(); + }); + + it('should have resolveSource and resolveTargetName hooks', () => { + expect(codexMdAdapter.resolveSource).toBeDefined(); + expect(codexMdAdapter.resolveTargetName).toBeDefined(); + }); + + it('should include codex-md in codex tool adapters', () => { + const codexAdapters = adapterRegistry.getForTool('codex'); + expect(codexAdapters.map(a => a.name)).toContain('codex-md'); + }); +}); diff --git a/src/__tests__/gemini-md.test.ts b/src/__tests__/gemini-md.test.ts new file mode 100644 index 0000000..0eca9e9 --- /dev/null +++ b/src/__tests__/gemini-md.test.ts @@ -0,0 +1,51 @@ +import { describe, it, expect } from 'vitest'; +import { geminiMdAdapter } from '../adapters/gemini-md.js'; +import { adapterRegistry } from '../adapters/index.js'; + +describe('gemini-md adapter', () => { + it('should have correct basic properties', () => { + expect(geminiMdAdapter.name).toBe('gemini-md'); + expect(geminiMdAdapter.tool).toBe('gemini'); + expect(geminiMdAdapter.subtype).toBe('md'); + expect(geminiMdAdapter.defaultSourceDir).toBe('.gemini'); + expect(geminiMdAdapter.targetDir).toBe('.gemini'); + expect(geminiMdAdapter.mode).toBe('file'); + expect(geminiMdAdapter.fileSuffixes).toEqual(['.md']); + }); + + it('should have correct config path', () => { + expect(geminiMdAdapter.configPath).toEqual(['gemini', 'md']); + }); + + it('should not have userTargetDir (user path is same as project path base)', () => { + // gemini-md uses ~/.gemini/GEMINI.md in user mode — the home dir is the project path + expect(geminiMdAdapter.userTargetDir).toBeUndefined(); + }); + + it('should be registered in adapterRegistry', () => { + const retrieved = adapterRegistry.getByName('gemini-md'); + expect(retrieved).toBe(geminiMdAdapter); + }); + + it('should be retrievable by tool and subtype', () => { + const retrieved = adapterRegistry.get('gemini', 'md'); + expect(retrieved).toBe(geminiMdAdapter); + }); + + it('should have required adapter methods', () => { + expect(geminiMdAdapter.addDependency).toBeDefined(); + expect(geminiMdAdapter.removeDependency).toBeDefined(); + expect(geminiMdAdapter.link).toBeDefined(); + expect(geminiMdAdapter.unlink).toBeDefined(); + }); + + it('should have resolveSource and resolveTargetName hooks', () => { + expect(geminiMdAdapter.resolveSource).toBeDefined(); + expect(geminiMdAdapter.resolveTargetName).toBeDefined(); + }); + + it('should include gemini-md in gemini tool adapters', () => { + const geminiAdapters = adapterRegistry.getForTool('gemini'); + expect(geminiAdapters.map(a => a.name)).toContain('gemini-md'); + }); +}); diff --git a/src/adapters/base.ts b/src/adapters/base.ts index b42795a..60a7c15 100644 --- a/src/adapters/base.ts +++ b/src/adapters/base.ts @@ -14,6 +14,7 @@ export interface AdapterConfig { configPath: [string, string]; defaultSourceDir: string; targetDir: string; + userTargetDir?: string; mode: 'directory' | 'file' | 'hybrid'; fileSuffixes?: string[]; hybridFileSuffixes?: string[]; @@ -33,6 +34,7 @@ export function createBaseAdapter(config: AdapterConfig): SyncAdapter { configPath: config.configPath, defaultSourceDir: config.defaultSourceDir, targetDir: config.targetDir, + userTargetDir: config.userTargetDir, mode: config.mode, fileSuffixes: config.fileSuffixes, hybridFileSuffixes: config.hybridFileSuffixes, diff --git a/src/adapters/codex-md.ts b/src/adapters/codex-md.ts new file mode 100644 index 0000000..28e4f29 --- /dev/null +++ b/src/adapters/codex-md.ts @@ -0,0 +1,30 @@ +import { SyncAdapter } from './types.js'; +import { createBaseAdapter, createSingleSuffixResolver, createSuffixAwareTargetResolver } from './base.js'; + +const SUFFIX = '.md'; + +/** + * Adapter for Codex AGENTS.md file (.codex/AGENTS.md) + * Mode: file - links individual .md files from .codex/ directory + * + * Global mode usage: + * ais codex md add AGENTS --global + * → creates symlink at ~/.codex/AGENTS.md + * + * Project mode usage: + * ais codex md add AGENTS + * → creates symlink at ./.codex/AGENTS.md + */ +export const codexMdAdapter: SyncAdapter = createBaseAdapter({ + name: 'codex-md', + tool: 'codex', + subtype: 'md', + configPath: ['codex', 'md'], + defaultSourceDir: '.codex', + targetDir: '.codex', + mode: 'file', + fileSuffixes: [SUFFIX], + + resolveSource: createSingleSuffixResolver(SUFFIX, 'AGENTS.md'), + resolveTargetName: createSuffixAwareTargetResolver([SUFFIX]) +}); diff --git a/src/adapters/gemini-md.ts b/src/adapters/gemini-md.ts new file mode 100644 index 0000000..13473cf --- /dev/null +++ b/src/adapters/gemini-md.ts @@ -0,0 +1,30 @@ +import { SyncAdapter } from './types.js'; +import { createBaseAdapter, createSingleSuffixResolver, createSuffixAwareTargetResolver } from './base.js'; + +const SUFFIX = '.md'; + +/** + * Adapter for Gemini CLI GEMINI.md file (.gemini/GEMINI.md) + * Mode: file - links individual .md files from .gemini/ directory + * + * Global mode usage: + * ais gemini md add GEMINI --global + * → creates symlink at ~/.gemini/GEMINI.md + * + * Project mode usage: + * ais gemini md add GEMINI + * → creates symlink at ./.gemini/GEMINI.md + */ +export const geminiMdAdapter: SyncAdapter = createBaseAdapter({ + name: 'gemini-md', + tool: 'gemini', + subtype: 'md', + configPath: ['gemini', 'md'], + defaultSourceDir: '.gemini', + targetDir: '.gemini', + mode: 'file', + fileSuffixes: [SUFFIX], + + resolveSource: createSingleSuffixResolver(SUFFIX, 'GEMINI.md'), + resolveTargetName: createSuffixAwareTargetResolver([SUFFIX]) +}); diff --git a/src/adapters/index.ts b/src/adapters/index.ts index 05bbec5..d8f437c 100644 --- a/src/adapters/index.ts +++ b/src/adapters/index.ts @@ -23,6 +23,8 @@ import { codexSkillsAdapter } from './codex-skills.js'; import { geminiCommandsAdapter } from './gemini-commands.js'; import { geminiSkillsAdapter } from './gemini-skills.js'; import { geminiAgentsAdapter } from './gemini-agents.js'; +import { geminiMdAdapter } from './gemini-md.js'; +import { codexMdAdapter } from './codex-md.js'; import { warpSkillsAdapter } from './warp-skills.js'; import { windsurfRulesAdapter } from './windsurf-rules.js'; import { clineRulesAdapter } from './cline-rules.js'; @@ -68,6 +70,8 @@ class DefaultAdapterRegistry implements AdapterRegistry { this.register(geminiCommandsAdapter); this.register(geminiSkillsAdapter); this.register(geminiAgentsAdapter); + this.register(geminiMdAdapter); + this.register(codexMdAdapter); this.register(warpSkillsAdapter); this.register(windsurfRulesAdapter); this.register(clineRulesAdapter); @@ -208,6 +212,9 @@ export function findAdapterForAlias( if (cfg.codex?.skills?.[alias]) { return { adapter: codexSkillsAdapter, section: 'codex.skills' }; } + if (cfg.codex?.md?.[alias]) { + return { adapter: codexMdAdapter, section: 'codex.md' }; + } if (cfg.gemini?.commands?.[alias]) { return { adapter: geminiCommandsAdapter, section: 'gemini.commands' }; } @@ -217,6 +224,9 @@ export function findAdapterForAlias( if (cfg.gemini?.agents?.[alias]) { return { adapter: geminiAgentsAdapter, section: 'gemini.agents' }; } + if (cfg.gemini?.md?.[alias]) { + return { adapter: geminiMdAdapter, section: 'gemini.md' }; + } if (cfg.warp?.skills?.[alias]) { return { adapter: warpSkillsAdapter, section: 'warp.skills' }; } diff --git a/src/adapters/opencode-agents.ts b/src/adapters/opencode-agents.ts index cde748c..4662933 100644 --- a/src/adapters/opencode-agents.ts +++ b/src/adapters/opencode-agents.ts @@ -14,6 +14,7 @@ export const opencodeAgentsAdapter: SyncAdapter = createBaseAdapter({ configPath: ['opencode', 'agents'], defaultSourceDir: '.opencode/agents', targetDir: '.opencode/agents', + userTargetDir: '.config/opencode/agents', mode: 'file', fileSuffixes: [SUFFIX], diff --git a/src/adapters/opencode-commands.ts b/src/adapters/opencode-commands.ts index fdc55b9..41613df 100644 --- a/src/adapters/opencode-commands.ts +++ b/src/adapters/opencode-commands.ts @@ -14,6 +14,7 @@ export const opencodeCommandsAdapter: SyncAdapter = createBaseAdapter({ configPath: ['opencode', 'commands'], defaultSourceDir: '.opencode/commands', targetDir: '.opencode/commands', + userTargetDir: '.config/opencode/commands', mode: 'file', fileSuffixes: [SUFFIX], diff --git a/src/adapters/opencode-skills.ts b/src/adapters/opencode-skills.ts index bf5896e..f7fe419 100644 --- a/src/adapters/opencode-skills.ts +++ b/src/adapters/opencode-skills.ts @@ -7,5 +7,6 @@ export const opencodeSkillsAdapter = createBaseAdapter({ configPath: ['opencode', 'skills'], defaultSourceDir: '.opencode/skills', targetDir: '.opencode/skills', + userTargetDir: '.config/opencode/skills', mode: 'directory', }); diff --git a/src/adapters/opencode-tools.ts b/src/adapters/opencode-tools.ts index 4dd404f..f7ae5d3 100644 --- a/src/adapters/opencode-tools.ts +++ b/src/adapters/opencode-tools.ts @@ -14,6 +14,7 @@ export const opencodeToolsAdapter: SyncAdapter = createBaseAdapter({ configPath: ['opencode', 'tools'], defaultSourceDir: '.opencode/tools', targetDir: '.opencode/tools', + userTargetDir: '.config/opencode/tools', mode: 'file', fileSuffixes: SUFFIXES, diff --git a/src/adapters/types.ts b/src/adapters/types.ts index 8a74faf..fe3fd09 100644 --- a/src/adapters/types.ts +++ b/src/adapters/types.ts @@ -23,6 +23,9 @@ export interface SyncAdapter { /** Target directory in project (relative to project root), e.g. ".cursor/rules" */ targetDir: string; + /** Optional override target directory for user-level (global) mode, relative to home dir */ + userTargetDir?: string; + /** Sync mode: 'directory' for linking folders, 'file' for linking individual files, 'hybrid' for both */ mode: 'directory' | 'file' | 'hybrid'; diff --git a/src/index.ts b/src/index.ts index b2dbbf9..692f415 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1124,6 +1124,10 @@ registerAdapterCommands({ adapter: getAdapter('codex', 'rules'), parentCommand: const codexSkills = codex.command('skills').description('Manage Codex skills'); registerAdapterCommands({ adapter: getAdapter('codex', 'skills'), parentCommand: codexSkills, programOpts: () => program.opts() }); +// codex md subgroup (for AGENTS.md files, supports --global) +const codexMd = codex.command('md').description('Manage Codex AGENTS.md files (.codex/AGENTS.md)'); +registerAdapterCommands({ adapter: getAdapter('codex', 'md'), parentCommand: codexMd, programOpts: () => program.opts() }); + // ============ Gemini CLI command group ============ const gemini = program .command('gemini') @@ -1250,6 +1254,10 @@ registerAdapterCommands({ adapter: getAdapter('gemini', 'skills'), parentCommand const geminiAgents = gemini.command('agents').description('Manage Gemini agents'); registerAdapterCommands({ adapter: getAdapter('gemini', 'agents'), parentCommand: geminiAgents, programOpts: () => program.opts() }); +// gemini md subgroup (for GEMINI.md files, supports --global) +const geminiMd = gemini.command('md').description('Manage Gemini GEMINI.md files (.gemini/GEMINI.md)'); +registerAdapterCommands({ adapter: getAdapter('gemini', 'md'), parentCommand: geminiMd, programOpts: () => program.opts() }); + // ============ Warp command group ============ const warp = program .command('warp') @@ -1616,7 +1624,7 @@ program // ============ Internal _complete command ============ program .command('_complete') - .argument('', 'Type of completion: cursor, cursor-commands, cursor-skills, cursor-agents, copilot, claude-skills, claude-agents, claude-rules, trae-rules, trae-skills, opencode-agents, opencode-skills, opencode-commands, opencode-tools, codex-rules, codex-skills, gemini-commands, gemini-skills, gemini-agents, warp-skills, windsurf-rules, windsurf-skills, cline-rules, cline-skills, agents-md') + .argument('', 'Type of completion: cursor, cursor-commands, cursor-skills, cursor-agents, copilot, claude-skills, claude-agents, claude-rules, trae-rules, trae-skills, opencode-agents, opencode-skills, opencode-commands, opencode-tools, codex-rules, codex-skills, codex-md, gemini-commands, gemini-skills, gemini-agents, gemini-md, warp-skills, windsurf-rules, windsurf-skills, cline-rules, cline-skills, agents-md') .description('Internal command for shell completion') .action(async (type: string) => { try { @@ -1906,7 +1914,7 @@ configGlobal // ============ User command group ============ const userCmd = program .command('user') - .description('Manage user-level AI config files (~/.claude/CLAUDE.md, etc.)'); + .description('Manage user-level AI config files (~/.claude/CLAUDE.md, ~/.gemini/GEMINI.md, ~/.codex/AGENTS.md, etc.)'); userCmd .command('install') diff --git a/src/project-config.ts b/src/project-config.ts index 7f4f53e..6a117eb 100644 --- a/src/project-config.ts +++ b/src/project-config.ts @@ -74,6 +74,8 @@ export interface SourceDirConfig { rules?: string; // Source directory for codex skills, default: ".agents/skills" skills?: string; + // Source directory for codex md files (AGENTS.md), default: ".codex" + md?: string; }; gemini?: { // Source directory for gemini commands, default: ".gemini/commands" @@ -82,6 +84,8 @@ export interface SourceDirConfig { skills?: string; // Source directory for gemini agents, default: ".gemini/agents" agents?: string; + // Source directory for gemini md files (GEMINI.md), default: ".gemini" + md?: string; }; warp?: { // Source directory for warp skills, default: ".agents/skills" @@ -158,11 +162,13 @@ export interface ProjectConfig { codex?: { rules?: Record; skills?: Record; + md?: Record; }; gemini?: { commands?: Record; skills?: Record; agents?: Record; + md?: Record; }; warp?: { skills?: Record; @@ -216,6 +222,13 @@ export interface RepoSourceConfig { codex?: { rules?: string; skills?: string; + md?: string; + }; + gemini?: { + commands?: string; + skills?: string; + agents?: string; + md?: string; }; gemini?: { commands?: string; @@ -305,7 +318,14 @@ function mergeCombined(main: ProjectConfig, local: ProjectConfig): ProjectConfig }, codex: { rules: { ...(main.codex?.rules || {}), ...(local.codex?.rules || {}) }, - skills: { ...(main.codex?.skills || {}), ...(local.codex?.skills || {}) } + skills: { ...(main.codex?.skills || {}), ...(local.codex?.skills || {}) }, + md: { ...(main.codex?.md || {}), ...(local.codex?.md || {}) } + }, + gemini: { + commands: { ...(main.gemini?.commands || {}), ...(local.gemini?.commands || {}) }, + skills: { ...(main.gemini?.skills || {}), ...(local.gemini?.skills || {}) }, + agents: { ...(main.gemini?.agents || {}), ...(local.gemini?.agents || {}) }, + md: { ...(main.gemini?.md || {}), ...(local.gemini?.md || {}) } }, gemini: { commands: { ...(main.gemini?.commands || {}), ...(local.gemini?.commands || {}) }, @@ -567,6 +587,18 @@ export function getSourceDir( toolDir = repoConfig.codex?.rules; } else if (subtype === 'skills') { toolDir = repoConfig.codex?.skills; + } else if (subtype === 'md') { + toolDir = repoConfig.codex?.md; + } + } else if (tool === 'gemini') { + if (subtype === 'commands') { + toolDir = repoConfig.gemini?.commands; + } else if (subtype === 'skills') { + toolDir = repoConfig.gemini?.skills; + } else if (subtype === 'agents') { + toolDir = repoConfig.gemini?.agents; + } else if (subtype === 'md') { + toolDir = repoConfig.gemini?.md; } } else if (tool === 'gemini') { if (subtype === 'commands') { diff --git a/src/sync-engine.ts b/src/sync-engine.ts index 631e55b..7171cff 100644 --- a/src/sync-engine.ts +++ b/src/sync-engine.ts @@ -82,6 +82,11 @@ export async function linkEntry( targetDirPath = adapter.targetDir; } + // In user/global mode, use userTargetDir if the adapter defines one + if (options.skipIgnore && adapter.userTargetDir) { + targetDirPath = adapter.userTargetDir; + } + const targetDir = path.join(absoluteProjectPath, targetDirPath); const targetPath = path.join(targetDir, targetName); diff --git a/tests/codex-adapters.test.ts b/tests/codex-adapters.test.ts index c03e54f..9a0f945 100644 --- a/tests/codex-adapters.test.ts +++ b/tests/codex-adapters.test.ts @@ -1,6 +1,7 @@ import { describe, it, expect } from 'vitest'; import { codexRulesAdapter } from '../src/adapters/codex-rules.js'; import { codexSkillsAdapter } from '../src/adapters/codex-skills.js'; +import { codexMdAdapter } from '../src/adapters/codex-md.js'; import { adapterRegistry } from '../src/adapters/index.js'; describe('Codex Adapters', () => { @@ -44,6 +45,25 @@ describe('Codex Adapters', () => { }); }); + describe('codexMdAdapter', () => { + it('should have correct properties', () => { + expect(codexMdAdapter.name).toBe('codex-md'); + expect(codexMdAdapter.tool).toBe('codex'); + expect(codexMdAdapter.subtype).toBe('md'); + expect(codexMdAdapter.defaultSourceDir).toBe('.codex'); + expect(codexMdAdapter.targetDir).toBe('.codex'); + expect(codexMdAdapter.mode).toBe('file'); + }); + + it('should have .md file suffix', () => { + expect(codexMdAdapter.fileSuffixes).toEqual(['.md']); + }); + + it('should have correct config path', () => { + expect(codexMdAdapter.configPath).toEqual(['codex', 'md']); + }); + }); + describe('Adapter Registry Integration', () => { it('should register codex rules adapter', () => { const adapter = adapterRegistry.get('codex', 'rules'); @@ -57,11 +77,18 @@ describe('Codex Adapters', () => { expect(adapter?.name).toBe('codex-skills'); }); + it('should register codex md adapter', () => { + const adapter = adapterRegistry.get('codex', 'md'); + expect(adapter).toBeDefined(); + expect(adapter?.name).toBe('codex-md'); + }); + it('should return codex adapters for tool', () => { const adapters = adapterRegistry.getForTool('codex'); - expect(adapters).toHaveLength(2); + expect(adapters).toHaveLength(3); expect(adapters.map(a => a.name)).toContain('codex-rules'); expect(adapters.map(a => a.name)).toContain('codex-skills'); + expect(adapters.map(a => a.name)).toContain('codex-md'); }); }); }); diff --git a/tests/opencode-adapters.test.ts b/tests/opencode-adapters.test.ts new file mode 100644 index 0000000..e8303af --- /dev/null +++ b/tests/opencode-adapters.test.ts @@ -0,0 +1,100 @@ +import { describe, it, expect } from 'vitest'; +import { opencodeCommandsAdapter } from '../src/adapters/opencode-commands.js'; +import { opencodeAgentsAdapter } from '../src/adapters/opencode-agents.js'; +import { opencodeSkillsAdapter } from '../src/adapters/opencode-skills.js'; +import { opencodeToolsAdapter } from '../src/adapters/opencode-tools.js'; +import { adapterRegistry } from '../src/adapters/index.js'; + +describe('OpenCode Adapters', () => { + describe('opencodeCommandsAdapter', () => { + it('should have correct properties', () => { + expect(opencodeCommandsAdapter.name).toBe('opencode-commands'); + expect(opencodeCommandsAdapter.tool).toBe('opencode'); + expect(opencodeCommandsAdapter.subtype).toBe('commands'); + expect(opencodeCommandsAdapter.defaultSourceDir).toBe('.opencode/commands'); + expect(opencodeCommandsAdapter.targetDir).toBe('.opencode/commands'); + expect(opencodeCommandsAdapter.mode).toBe('file'); + }); + + it('should have correct userTargetDir for XDG path', () => { + expect(opencodeCommandsAdapter.userTargetDir).toBe('.config/opencode/commands'); + }); + + it('should have correct config path', () => { + expect(opencodeCommandsAdapter.configPath).toEqual(['opencode', 'commands']); + }); + }); + + describe('opencodeAgentsAdapter', () => { + it('should have correct properties', () => { + expect(opencodeAgentsAdapter.name).toBe('opencode-agents'); + expect(opencodeAgentsAdapter.tool).toBe('opencode'); + expect(opencodeAgentsAdapter.subtype).toBe('agents'); + expect(opencodeAgentsAdapter.defaultSourceDir).toBe('.opencode/agents'); + expect(opencodeAgentsAdapter.targetDir).toBe('.opencode/agents'); + expect(opencodeAgentsAdapter.mode).toBe('file'); + }); + + it('should have correct userTargetDir for XDG path', () => { + expect(opencodeAgentsAdapter.userTargetDir).toBe('.config/opencode/agents'); + }); + + it('should have correct config path', () => { + expect(opencodeAgentsAdapter.configPath).toEqual(['opencode', 'agents']); + }); + }); + + describe('opencodeSkillsAdapter', () => { + it('should have correct properties', () => { + expect(opencodeSkillsAdapter.name).toBe('opencode-skills'); + expect(opencodeSkillsAdapter.tool).toBe('opencode'); + expect(opencodeSkillsAdapter.subtype).toBe('skills'); + expect(opencodeSkillsAdapter.defaultSourceDir).toBe('.opencode/skills'); + expect(opencodeSkillsAdapter.targetDir).toBe('.opencode/skills'); + expect(opencodeSkillsAdapter.mode).toBe('directory'); + }); + + it('should have correct userTargetDir for XDG path', () => { + expect(opencodeSkillsAdapter.userTargetDir).toBe('.config/opencode/skills'); + }); + + it('should have correct config path', () => { + expect(opencodeSkillsAdapter.configPath).toEqual(['opencode', 'skills']); + }); + }); + + describe('opencodeToolsAdapter', () => { + it('should have correct properties', () => { + expect(opencodeToolsAdapter.name).toBe('opencode-tools'); + expect(opencodeToolsAdapter.tool).toBe('opencode'); + expect(opencodeToolsAdapter.subtype).toBe('tools'); + expect(opencodeToolsAdapter.defaultSourceDir).toBe('.opencode/tools'); + expect(opencodeToolsAdapter.targetDir).toBe('.opencode/tools'); + expect(opencodeToolsAdapter.mode).toBe('file'); + }); + + it('should have correct userTargetDir for XDG path', () => { + expect(opencodeToolsAdapter.userTargetDir).toBe('.config/opencode/tools'); + }); + + it('should have correct config path', () => { + expect(opencodeToolsAdapter.configPath).toEqual(['opencode', 'tools']); + }); + }); + + describe('Adapter Registry Integration', () => { + it('should register all opencode adapters', () => { + expect(adapterRegistry.get('opencode', 'commands')).toBeDefined(); + expect(adapterRegistry.get('opencode', 'agents')).toBeDefined(); + expect(adapterRegistry.get('opencode', 'skills')).toBeDefined(); + expect(adapterRegistry.get('opencode', 'tools')).toBeDefined(); + }); + + it('all opencode adapters should have userTargetDir pointing to XDG config path', () => { + const adapters = adapterRegistry.getForTool('opencode'); + for (const adapter of adapters) { + expect(adapter.userTargetDir).toMatch(/^\.config\/opencode\//); + } + }); + }); +}); diff --git a/tests/project-config.test.ts b/tests/project-config.test.ts index 3c314e8..1aa043b 100644 --- a/tests/project-config.test.ts +++ b/tests/project-config.test.ts @@ -2,6 +2,7 @@ import { describe, it, expect, vi, beforeEach } from 'vitest'; import path from 'path'; import fs from 'fs-extra'; import { getConfigSource, getCombinedProjectConfig, migrateLegacyToNew, getRepoSourceConfig, getSourceDir } from '../src/project-config.js'; +import { findAdapterForAlias } from '../src/adapters/index.js'; vi.mock('fs-extra'); @@ -198,4 +199,143 @@ describe('getSourceDir', () => { const dir = getSourceDir(config, 'cursor', 'commands', '.cursor/commands'); expect(dir).toBe('my-commands'); }); + + it('handles gemini md', () => { + const config = { + gemini: { md: 'custom-gemini' } + }; + + const dir = getSourceDir(config, 'gemini', 'md', '.gemini'); + expect(dir).toBe('custom-gemini'); + }); + + it('handles codex md', () => { + const config = { + codex: { md: 'custom-codex' } + }; + + const dir = getSourceDir(config, 'codex', 'md', '.codex'); + expect(dir).toBe('custom-codex'); + }); +}); + +describe('getCombinedProjectConfig - gemini and codex md sections', () => { + const projectPath = '/mock/project'; + + beforeEach(() => { + vi.resetAllMocks(); + vi.mocked(fs.writeJson).mockResolvedValue(undefined as any); + }); + + it('merges gemini.md entries from main and local configs', async () => { + vi.mocked(fs.pathExists).mockImplementation(async (p) => { + if (p === path.join(projectPath, 'ai-rules-sync.json')) return true; + if (p === path.join(projectPath, 'ai-rules-sync.local.json')) return true; + return false; + }); + + vi.mocked(fs.readJson).mockImplementation(async (p) => { + if (p === path.join(projectPath, 'ai-rules-sync.json')) { + return { gemini: { md: { GEMINI: 'https://example.com/repo.git' } } }; + } + if (p === path.join(projectPath, 'ai-rules-sync.local.json')) { + return { gemini: { md: { 'GEMINI-local': 'https://local.com/repo.git' } } }; + } + return {}; + }); + + const config = await getCombinedProjectConfig(projectPath); + expect(config.gemini?.md?.['GEMINI']).toBe('https://example.com/repo.git'); + expect(config.gemini?.md?.['GEMINI-local']).toBe('https://local.com/repo.git'); + }); + + it('merges codex.md entries from main and local configs', async () => { + vi.mocked(fs.pathExists).mockImplementation(async (p) => { + if (p === path.join(projectPath, 'ai-rules-sync.json')) return true; + if (p === path.join(projectPath, 'ai-rules-sync.local.json')) return true; + return false; + }); + + vi.mocked(fs.readJson).mockImplementation(async (p) => { + if (p === path.join(projectPath, 'ai-rules-sync.json')) { + return { codex: { md: { AGENTS: 'https://example.com/repo.git' } } }; + } + if (p === path.join(projectPath, 'ai-rules-sync.local.json')) { + return { codex: { md: { 'AGENTS-local': 'https://local.com/repo.git' } } }; + } + return {}; + }); + + const config = await getCombinedProjectConfig(projectPath); + expect(config.codex?.md?.['AGENTS']).toBe('https://example.com/repo.git'); + expect(config.codex?.md?.['AGENTS-local']).toBe('https://local.com/repo.git'); + }); + + it('merges all gemini subtypes together', async () => { + vi.mocked(fs.pathExists).mockImplementation(async (p) => { + if (p === path.join(projectPath, 'ai-rules-sync.json')) return true; + return false; + }); + + vi.mocked(fs.readJson).mockImplementation(async (p) => { + if (p === path.join(projectPath, 'ai-rules-sync.json')) { + return { + gemini: { + commands: { 'my-cmd': 'https://example.com/repo.git' }, + skills: { 'my-skill': 'https://example.com/repo.git' }, + agents: { 'my-agent': 'https://example.com/repo.git' }, + md: { 'GEMINI': 'https://example.com/repo.git' }, + } + }; + } + return {}; + }); + + const config = await getCombinedProjectConfig(projectPath); + expect(config.gemini?.commands?.['my-cmd']).toBeDefined(); + expect(config.gemini?.skills?.['my-skill']).toBeDefined(); + expect(config.gemini?.agents?.['my-agent']).toBeDefined(); + expect(config.gemini?.md?.['GEMINI']).toBeDefined(); + }); +}); + +describe('findAdapterForAlias - gemini.md and codex.md', () => { + it('should find gemini-md adapter for alias in gemini.md', () => { + const cfg = { + gemini: { md: { 'GEMINI': 'https://example.com/repo.git' } } + }; + const result = findAdapterForAlias(cfg, 'GEMINI'); + expect(result).not.toBeNull(); + expect(result?.adapter.name).toBe('gemini-md'); + expect(result?.section).toBe('gemini.md'); + }); + + it('should find codex-md adapter for alias in codex.md', () => { + const cfg = { + codex: { md: { 'AGENTS': 'https://example.com/repo.git' } } + }; + const result = findAdapterForAlias(cfg, 'AGENTS'); + expect(result).not.toBeNull(); + expect(result?.adapter.name).toBe('codex-md'); + expect(result?.section).toBe('codex.md'); + }); + + it('should return null for unknown alias', () => { + const cfg = {}; + const result = findAdapterForAlias(cfg, 'unknown-alias'); + expect(result).toBeNull(); + }); + + it('should prefer gemini.agents over gemini.md for same alias when both present', () => { + // In practice they should not have the same alias, but test priority order + const cfg = { + gemini: { + agents: { 'GEMINI': 'url-agents' }, + md: { 'GEMINI': 'url-md' }, + } + }; + // agents is checked before md in findAdapterForAlias + const result = findAdapterForAlias(cfg, 'GEMINI'); + expect(result?.section).toBe('gemini.agents'); + }); }); diff --git a/tests/sync-engine.test.ts b/tests/sync-engine.test.ts index 6eef71f..a8eeb38 100644 --- a/tests/sync-engine.test.ts +++ b/tests/sync-engine.test.ts @@ -209,3 +209,79 @@ describe('Sync Engine - new sourceDir format', () => { expect(fs.ensureSymlink).toHaveBeenCalledWith(expectedSourcePath, expectedTargetPath); }); }); + +describe('Sync Engine - userTargetDir (global/user mode)', () => { + const mockProjectPath = '/mock/home'; + const mockRepo = { + name: 'test-repo', + url: 'http://test.git', + path: '/mock/repos/test-repo' + }; + + beforeEach(() => { + vi.resetAllMocks(); + vi.mocked(fs.pathExists).mockResolvedValue(true); + vi.mocked(fs.ensureDir).mockResolvedValue(undefined); + vi.mocked(fs.ensureSymlink).mockResolvedValue(undefined); + vi.mocked(fs.lstat).mockResolvedValue({ isSymbolicLink: () => true } as any); + vi.mocked(fs.stat).mockResolvedValue({ isDirectory: () => false } as any); + vi.mocked(fs.remove).mockResolvedValue(undefined); + vi.mocked(utilsModule.addIgnoreEntry).mockResolvedValue(true); + vi.mocked(projectConfigModule.getRepoSourceConfig).mockResolvedValue({}); + vi.mocked(projectConfigModule.getSourceDir).mockReturnValue('.opencode/commands'); + }); + + it('should use userTargetDir instead of targetDir when skipIgnore is true', async () => { + // opencode-commands has targetDir='.opencode/commands', userTargetDir='.config/opencode/commands' + await linkEntry(getAdapter('opencode', 'commands'), { + projectPath: mockProjectPath, + name: 'my-cmd.md', + repo: mockRepo, + skipIgnore: true, + }); + + const expectedTargetPath = path.join(path.resolve(mockProjectPath), '.config', 'opencode', 'commands', 'my-cmd.md'); + expect(fs.ensureSymlink).toHaveBeenCalledWith(expect.any(String), expectedTargetPath); + }); + + it('should use targetDir (not userTargetDir) when skipIgnore is false', async () => { + await linkEntry(getAdapter('opencode', 'commands'), { + projectPath: mockProjectPath, + name: 'my-cmd.md', + repo: mockRepo, + skipIgnore: false, + }); + + const expectedTargetPath = path.join(path.resolve(mockProjectPath), '.opencode', 'commands', 'my-cmd.md'); + expect(fs.ensureSymlink).toHaveBeenCalledWith(expect.any(String), expectedTargetPath); + }); + + it('should use targetDir when adapter has no userTargetDir even if skipIgnore is true', async () => { + // cursor-rules has no userTargetDir + vi.mocked(projectConfigModule.getSourceDir).mockReturnValue('.cursor/rules'); + + await linkEntry(getAdapter('cursor', 'rules'), { + projectPath: mockProjectPath, + name: 'my-rule', + repo: mockRepo, + skipIgnore: true, + }); + + const expectedTargetPath = path.join(path.resolve(mockProjectPath), '.cursor', 'rules', 'my-rule'); + expect(fs.ensureSymlink).toHaveBeenCalledWith(expect.any(String), expectedTargetPath); + }); + + it('should use userTargetDir for opencode-agents in user mode', async () => { + vi.mocked(projectConfigModule.getSourceDir).mockReturnValue('.opencode/agents'); + + await linkEntry(getAdapter('opencode', 'agents'), { + projectPath: mockProjectPath, + name: 'my-agent.md', + repo: mockRepo, + skipIgnore: true, + }); + + const expectedTargetPath = path.join(path.resolve(mockProjectPath), '.config', 'opencode', 'agents', 'my-agent.md'); + expect(fs.ensureSymlink).toHaveBeenCalledWith(expect.any(String), expectedTargetPath); + }); +}); From d19725731ab361e08623bdc101a88b6a803ac108 Mon Sep 17 00:00:00 2001 From: lbb Date: Sat, 28 Feb 2026 16:44:29 +0800 Subject: [PATCH 03/12] Cursor/ai 9384 (#26) * refactor(cli): abstract rules-skills tool groups Co-authored-by: lbb * refactor(config): table-drive source dir resolution Co-authored-by: lbb * refactor(completion): generate scripts from metadata Co-authored-by: lbb * refactor(test): extract shared adapter contract tests Co-authored-by: lbb * refactor(docs): generate supported tools table from manifest Co-authored-by: lbb * docs: record refactor architecture updates Co-authored-by: lbb * fix(config): remove duplicate gemini declarations Co-authored-by: lbb --------- Co-authored-by: Cursor Agent --- KNOWLEDGE_BASE.md | 21 +- README.md | 4 + README_ZH.md | 6 +- docs/supported-tools.json | 394 ++++ package.json | 3 +- scripts/sync-supported-tools.mjs | 106 + src/__tests__/claude-rules.test.ts | 45 +- src/__tests__/cline-rules.test.ts | 45 +- src/__tests__/cline-skills.test.ts | 43 +- src/__tests__/completion-scripts.test.ts | 31 + src/__tests__/copilot-agents.test.ts | 43 +- src/__tests__/copilot-prompts.test.ts | 43 +- src/__tests__/copilot-skills.test.ts | 43 +- src/__tests__/find-adapter-for-alias.test.ts | 58 + src/__tests__/gemini-agents.test.ts | 45 +- src/__tests__/gemini-commands.test.ts | 45 +- src/__tests__/gemini-skills.test.ts | 43 +- src/__tests__/helpers/adapter-contract.ts | 59 + .../project-config-source-dir.test.ts | 111 + src/__tests__/warp-skills.test.ts | 43 +- src/__tests__/windsurf-rules.test.ts | 45 +- src/__tests__/windsurf-skills.test.ts | 43 +- src/adapters/index.ts | 121 +- src/completion/scripts.ts | 2010 ++++++----------- src/index.ts | 454 ++-- src/project-config.ts | 323 +-- 26 files changed, 1883 insertions(+), 2344 deletions(-) create mode 100644 docs/supported-tools.json create mode 100644 scripts/sync-supported-tools.mjs create mode 100644 src/__tests__/completion-scripts.test.ts create mode 100644 src/__tests__/find-adapter-for-alias.test.ts create mode 100644 src/__tests__/helpers/adapter-contract.ts create mode 100644 src/__tests__/project-config-source-dir.test.ts diff --git a/KNOWLEDGE_BASE.md b/KNOWLEDGE_BASE.md index e9cffb0..094544c 100644 --- a/KNOWLEDGE_BASE.md +++ b/KNOWLEDGE_BASE.md @@ -73,6 +73,10 @@ src/ └── utils.ts # Utility functions ``` +Additional root-level tooling: +- `docs/supported-tools.json` - Single source of truth for Supported Tools table rows +- `scripts/sync-supported-tools.mjs` - Regenerates README/README_ZH tool matrix from manifest + ### Adapter System The sync engine uses a plugin-based architecture with unified operations: @@ -896,7 +900,12 @@ ais user install - **Manual Install**: `ais completion install` - Installs completion to shell config file. - **Script Output**: `ais completion [bash|zsh|fish]` - Outputs raw completion script. - **Detection**: Automatically detects shell type from `$SHELL` environment variable. -- **Shell scripts** stored in `src/completion/scripts.ts`. +- **Generation Model**: Completion scripts are generated from shared metadata in `src/completion/scripts.ts` (bash/zsh/fish stay in sync). + +### 22. Supported Tools Documentation Generation +- **Single Source**: Supported tools matrix is defined in `docs/supported-tools.json`. +- **Sync Command**: `npm run docs:sync-tools` regenerates table blocks in both `README.md` and `README_ZH.md`. +- **Update Boundaries**: Table content is replaced between `` and ``. ## Adapter Reference Table @@ -964,6 +973,16 @@ ais user install - Added **Windsurf support**: rules (`.windsurf/rules`, `.md`) and skills (`.windsurf/skills`) with full CLI/completion integration - Added **Cline support**: rules (`.clinerules`, `.md`/`.txt`) and skills (`.cline/skills`) with full CLI/completion integration +- Refactored `project-config` source directory resolution to **table-driven** logic for both legacy-string detection and `getSourceDir()` +- Refactored shell completion generation to **metadata-driven** script builders for bash/zsh/fish +- Added reusable **adapter contract test helper** (`src/__tests__/helpers/adapter-contract.ts`) to reduce duplicated adapter tests +- Added **single-source Supported Tools manifest** (`docs/supported-tools.json`) and sync script (`npm run docs:sync-tools`) for README parity +- Added **User Mode** (`--user` / `-u`): manage personal AI config files (`~/.claude/CLAUDE.md`, etc.) with version control; `ais user install` restores all symlinks on new machines +- Added **claude-md adapter**: sync CLAUDE.md-style files; `ais claude md add CLAUDE --user` +- Added **User Config Path**: `ais config user set ` for dotfiles integration +- Added **Gemini CLI support**: commands (`.toml`), skills (directory), subagents (`.md`) +- Added **OpenAI Codex support**: rules (`.rules`, Starlark), skills (`.agents/skills/`) +- Renamed deprecated `--global` / `-g` flags to `--user` / `-u` ### Proper User-Level Sync for All Tools + gemini-md / codex-md Adapters (2026-02) diff --git a/README.md b/README.md index 7b2e51b..9bc0de6 100644 --- a/README.md +++ b/README.md @@ -73,6 +73,9 @@ ais completion install ## Supported Tools +_This table is generated from `docs/supported-tools.json` via `npm run docs:sync-tools`._ + + | Tool | Type | Mode | Default Source Directory | File Suffixes | Documentation | |------|------|------|--------------------------|---------------|---------------| | Cursor | Rules | hybrid | `.cursor/rules/` | `.mdc`, `.md` | [Docs](https://cursor.com/docs/context/rules) | @@ -107,6 +110,7 @@ ais completion install | Cline | Rules | file | `.clinerules/` | `.md`, `.txt` | [Docs](https://docs.cline.bot/customization/cline-rules) | | Cline | Skills | directory | `.cline/skills/` | - | [Docs](https://docs.cline.bot/customization/skills) | | **Universal** | **AGENTS.md** | file | `.` (root) | `.md` | [Standard](https://agents.md/) | + **Modes:** - **directory**: Links entire directories (skills, agents) diff --git a/README_ZH.md b/README_ZH.md index 6a9ef11..3ce5d89 100644 --- a/README_ZH.md +++ b/README_ZH.md @@ -73,12 +73,15 @@ ais completion install ## 支持的工具 +_此表由 `docs/supported-tools.json` 通过 `npm run docs:sync-tools` 自动生成。_ + + | 工具 | 类型 | 模式 | 默认源目录 | 文件后缀 | 文档 | |------|------|------|------------|----------|------| | Cursor | Rules | hybrid | `.cursor/rules/` | `.mdc`, `.md` | [文档](https://cursor.com/docs/context/rules) | | Cursor | Commands | file | `.cursor/commands/` | `.md` | [文档](https://cursor.com/docs/context/commands) | | Cursor | Skills | directory | `.cursor/skills/` | - | [文档](https://cursor.com/docs/context/skills) | -| Cursor | subagents | directory | `.cursor/agents/` | - | [文档](https://cursor.com/docs/context/subagents) | +| Cursor | Subagents | directory | `.cursor/agents/` | - | [文档](https://cursor.com/docs/context/subagents) | | GitHub Copilot | Instructions | file | `.github/instructions/` | `.instructions.md`, `.md` | [文档](https://docs.github.com/en/copilot/how-tos/configure-custom-instructions/add-repository-instructions) | | GitHub Copilot | Prompts | file | `.github/prompts/` | `.prompt.md`, `.md` | [文档](https://docs.github.com/en/copilot/tutorials/customization-library/prompt-files/your-first-prompt-file) | | GitHub Copilot | Skills | directory | `.github/skills/` | - | [文档](https://docs.github.com/en/copilot/using-github-copilot/using-extensions-to-integrate-external-tools-with-copilot-chat) | @@ -107,6 +110,7 @@ ais completion install | Cline | Rules | file | `.clinerules/` | `.md`, `.txt` | [文档](https://docs.cline.bot/customization/cline-rules) | | Cline | Skills | directory | `.cline/skills/` | - | [文档](https://docs.cline.bot/customization/skills) | | **通用** | **AGENTS.md** | file | `.`(根目录) | `.md` | [标准](https://agents.md/) | + **模式说明:** - **directory**:链接整个目录(技能、代理) diff --git a/docs/supported-tools.json b/docs/supported-tools.json new file mode 100644 index 0000000..fa055a6 --- /dev/null +++ b/docs/supported-tools.json @@ -0,0 +1,394 @@ +[ + { + "toolEn": "Cursor", + "toolZh": "Cursor", + "typeEn": "Rules", + "typeZh": "Rules", + "mode": "hybrid", + "pathEn": ".cursor/rules/", + "pathZh": ".cursor/rules/", + "suffix": ".mdc, .md", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://cursor.com/docs/context/rules" + }, + { + "toolEn": "Cursor", + "toolZh": "Cursor", + "typeEn": "Commands", + "typeZh": "Commands", + "mode": "file", + "pathEn": ".cursor/commands/", + "pathZh": ".cursor/commands/", + "suffix": ".md", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://cursor.com/docs/context/commands" + }, + { + "toolEn": "Cursor", + "toolZh": "Cursor", + "typeEn": "Skills", + "typeZh": "Skills", + "mode": "directory", + "pathEn": ".cursor/skills/", + "pathZh": ".cursor/skills/", + "suffix": "-", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://cursor.com/docs/context/skills" + }, + { + "toolEn": "Cursor", + "toolZh": "Cursor", + "typeEn": "Subagents", + "typeZh": "Subagents", + "mode": "directory", + "pathEn": ".cursor/agents/", + "pathZh": ".cursor/agents/", + "suffix": "-", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://cursor.com/docs/context/subagents" + }, + { + "toolEn": "GitHub Copilot", + "toolZh": "GitHub Copilot", + "typeEn": "Instructions", + "typeZh": "Instructions", + "mode": "file", + "pathEn": ".github/instructions/", + "pathZh": ".github/instructions/", + "suffix": ".instructions.md, .md", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://docs.github.com/en/copilot/how-tos/configure-custom-instructions/add-repository-instructions" + }, + { + "toolEn": "GitHub Copilot", + "toolZh": "GitHub Copilot", + "typeEn": "Prompts", + "typeZh": "Prompts", + "mode": "file", + "pathEn": ".github/prompts/", + "pathZh": ".github/prompts/", + "suffix": ".prompt.md, .md", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://docs.github.com/en/copilot/tutorials/customization-library/prompt-files/your-first-prompt-file" + }, + { + "toolEn": "GitHub Copilot", + "toolZh": "GitHub Copilot", + "typeEn": "Skills", + "typeZh": "Skills", + "mode": "directory", + "pathEn": ".github/skills/", + "pathZh": ".github/skills/", + "suffix": "-", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://docs.github.com/en/copilot/using-github-copilot/using-extensions-to-integrate-external-tools-with-copilot-chat" + }, + { + "toolEn": "GitHub Copilot", + "toolZh": "GitHub Copilot", + "typeEn": "Agents", + "typeZh": "Agents", + "mode": "file", + "pathEn": ".github/agents/", + "pathZh": ".github/agents/", + "suffix": ".agent.md, .md", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://docs.github.com/en/copilot/how-tos/use-copilot-agents/coding-agent/create-custom-agents" + }, + { + "toolEn": "Claude Code", + "toolZh": "Claude Code", + "typeEn": "Rules", + "typeZh": "Rules", + "mode": "file", + "pathEn": ".claude/rules/", + "pathZh": ".claude/rules/", + "suffix": ".md", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://code.claude.com/docs/en/memory" + }, + { + "toolEn": "Claude Code", + "toolZh": "Claude Code", + "typeEn": "Skills", + "typeZh": "Skills", + "mode": "directory", + "pathEn": ".claude/skills/", + "pathZh": ".claude/skills/", + "suffix": "-", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://code.claude.com/docs/en/skills" + }, + { + "toolEn": "Claude Code", + "toolZh": "Claude Code", + "typeEn": "Subagents", + "typeZh": "Subagents", + "mode": "directory", + "pathEn": ".claude/agents/", + "pathZh": ".claude/agents/", + "suffix": "-", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://code.claude.com/docs/en/sub-agents" + }, + { + "toolEn": "Claude Code", + "toolZh": "Claude Code", + "typeEn": "CLAUDE.md", + "typeZh": "CLAUDE.md", + "mode": "file", + "pathEn": ".claude/", + "pathZh": ".claude/", + "suffix": ".md", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://docs.anthropic.com/en/docs/claude-code/memory" + }, + { + "toolEn": "Trae", + "toolZh": "Trae", + "typeEn": "Rules", + "typeZh": "Rules", + "mode": "file", + "pathEn": ".trae/rules/", + "pathZh": ".trae/rules/", + "suffix": ".md", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://docs.trae.ai/ide/rules" + }, + { + "toolEn": "Trae", + "toolZh": "Trae", + "typeEn": "Skills", + "typeZh": "Skills", + "mode": "directory", + "pathEn": ".trae/skills/", + "pathZh": ".trae/skills/", + "suffix": "-", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://docs.trae.ai/ide/skills" + }, + { + "toolEn": "OpenCode", + "toolZh": "OpenCode", + "typeEn": "Commands", + "typeZh": "Commands", + "mode": "file", + "pathEn": ".opencode/commands/", + "pathZh": ".opencode/commands/", + "suffix": ".md", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://opencode.ai/docs/commands/" + }, + { + "toolEn": "OpenCode", + "toolZh": "OpenCode", + "typeEn": "Skills", + "typeZh": "Skills", + "mode": "directory", + "pathEn": ".opencode/skills/", + "pathZh": ".opencode/skills/", + "suffix": "-", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://opencode.ai/docs/skills/" + }, + { + "toolEn": "OpenCode", + "toolZh": "OpenCode", + "typeEn": "Agents", + "typeZh": "Agents", + "mode": "file", + "pathEn": ".opencode/agents/", + "pathZh": ".opencode/agents/", + "suffix": ".md", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://opencode.ai/docs/agents/" + }, + { + "toolEn": "OpenCode", + "toolZh": "OpenCode", + "typeEn": "Tools", + "typeZh": "Tools", + "mode": "file", + "pathEn": ".opencode/tools/", + "pathZh": ".opencode/tools/", + "suffix": ".ts, .js", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://opencode.ai/docs/tools/" + }, + { + "toolEn": "Codex", + "toolZh": "Codex", + "typeEn": "Rules", + "typeZh": "Rules", + "mode": "file", + "pathEn": ".codex/rules/", + "pathZh": ".codex/rules/", + "suffix": ".rules", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://developers.openai.com/codex/rules" + }, + { + "toolEn": "Codex", + "toolZh": "Codex", + "typeEn": "Skills", + "typeZh": "Skills", + "mode": "directory", + "pathEn": ".agents/skills/", + "pathZh": ".agents/skills/", + "suffix": "-", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://developers.openai.com/codex/skills" + }, + { + "toolEn": "Gemini CLI", + "toolZh": "Gemini CLI", + "typeEn": "Commands", + "typeZh": "Commands", + "mode": "file", + "pathEn": ".gemini/commands/", + "pathZh": ".gemini/commands/", + "suffix": ".toml", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://geminicli.com/docs/cli/custom-commands/" + }, + { + "toolEn": "Gemini CLI", + "toolZh": "Gemini CLI", + "typeEn": "Skills", + "typeZh": "Skills", + "mode": "directory", + "pathEn": ".gemini/skills/", + "pathZh": ".gemini/skills/", + "suffix": "-", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://geminicli.com/docs/cli/skills/" + }, + { + "toolEn": "Gemini CLI", + "toolZh": "Gemini CLI", + "typeEn": "Subagents", + "typeZh": "Subagents", + "mode": "file", + "pathEn": ".gemini/agents/", + "pathZh": ".gemini/agents/", + "suffix": ".md", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://geminicli.com/docs/core/subagents/" + }, + { + "toolEn": "Warp", + "toolZh": "Warp", + "typeEn": "Rules", + "typeZh": "Rules", + "mode": "file", + "pathEn": ". (root)", + "pathZh": ".(根目录)", + "suffix": ".md", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://docs.warp.dev/agent-platform/capabilities/rules", + "noteEn": "same as AGENTS.md, use `ais agents-md`", + "noteZh": "与 AGENTS.md 相同,使用 `ais agents-md`" + }, + { + "toolEn": "Warp", + "toolZh": "Warp", + "typeEn": "Skills", + "typeZh": "Skills", + "mode": "directory", + "pathEn": ".agents/skills/", + "pathZh": ".agents/skills/", + "suffix": "-", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://docs.warp.dev/agent-platform/capabilities/skills" + }, + { + "toolEn": "Windsurf", + "toolZh": "Windsurf", + "typeEn": "Rules", + "typeZh": "Rules", + "mode": "file", + "pathEn": ".windsurf/rules/", + "pathZh": ".windsurf/rules/", + "suffix": ".md", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://docs.windsurf.com/windsurf/cascade/memories" + }, + { + "toolEn": "Windsurf", + "toolZh": "Windsurf", + "typeEn": "Skills", + "typeZh": "Skills", + "mode": "directory", + "pathEn": ".windsurf/skills/", + "pathZh": ".windsurf/skills/", + "suffix": "-", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://docs.windsurf.com/windsurf/cascade/skills" + }, + { + "toolEn": "Cline", + "toolZh": "Cline", + "typeEn": "Rules", + "typeZh": "Rules", + "mode": "file", + "pathEn": ".clinerules/", + "pathZh": ".clinerules/", + "suffix": ".md, .txt", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://docs.cline.bot/customization/cline-rules" + }, + { + "toolEn": "Cline", + "toolZh": "Cline", + "typeEn": "Skills", + "typeZh": "Skills", + "mode": "directory", + "pathEn": ".cline/skills/", + "pathZh": ".cline/skills/", + "suffix": "-", + "docLabelEn": "Docs", + "docLabelZh": "文档", + "docUrl": "https://docs.cline.bot/customization/skills" + }, + { + "toolEn": "**Universal**", + "toolZh": "**通用**", + "typeEn": "**AGENTS.md**", + "typeZh": "**AGENTS.md**", + "mode": "file", + "pathEn": ". (root)", + "pathZh": ".(根目录)", + "suffix": ".md", + "docLabelEn": "Standard", + "docLabelZh": "标准", + "docUrl": "https://agents.md/" + } +] diff --git a/package.json b/package.json index 28b927b..1322add 100644 --- a/package.json +++ b/package.json @@ -8,7 +8,8 @@ }, "scripts": { "build": "tsc", - "test": "vitest" + "test": "vitest", + "docs:sync-tools": "node scripts/sync-supported-tools.mjs" }, "files": [ "dist" diff --git a/scripts/sync-supported-tools.mjs b/scripts/sync-supported-tools.mjs new file mode 100644 index 0000000..e23426f --- /dev/null +++ b/scripts/sync-supported-tools.mjs @@ -0,0 +1,106 @@ +#!/usr/bin/env node +import { readFile, writeFile } from 'node:fs/promises'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const START_MARKER = ''; +const END_MARKER = ''; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); +const rootDir = path.resolve(__dirname, '..'); +const dataPath = path.join(rootDir, 'docs', 'supported-tools.json'); + +const targets = [ + { + filePath: path.join(rootDir, 'README.md'), + lang: 'en', + header: '| Tool | Type | Mode | Default Source Directory | File Suffixes | Documentation |', + separator: '|------|------|------|--------------------------|---------------|---------------|' + }, + { + filePath: path.join(rootDir, 'README_ZH.md'), + lang: 'zh', + header: '| 工具 | 类型 | 模式 | 默认源目录 | 文件后缀 | 文档 |', + separator: '|------|------|------|------------|----------|------|' + } +]; + +function formatPathCell(pathValue) { + if (pathValue.includes(' (root)')) { + return `\`${pathValue.replace(' (root)', '')}\` (root)`; + } + if (pathValue.includes('(根目录)')) { + return `\`${pathValue.replace('(根目录)', '')}\`(根目录)`; + } + return `\`${pathValue}\``; +} + +function formatSuffixCell(suffixValue) { + if (suffixValue === '-') { + return '-'; + } + const parts = suffixValue.split(',').map(part => part.trim()).filter(Boolean); + if (parts.length === 0) { + return '-'; + } + return parts.map(part => `\`${part}\``).join(', '); +} + +function formatDocumentationCell(entry, lang) { + const label = lang === 'en' ? entry.docLabelEn : entry.docLabelZh; + const note = lang === 'en' ? entry.noteEn : entry.noteZh; + let cell = `[${label}](${entry.docUrl})`; + if (note) { + cell += ` — ${note}`; + } + return cell; +} + +function buildTableRows(entries, lang) { + return entries.map((entry) => { + const tool = lang === 'en' ? entry.toolEn : entry.toolZh; + const type = lang === 'en' ? entry.typeEn : entry.typeZh; + const pathValue = lang === 'en' ? entry.pathEn : entry.pathZh; + return `| ${tool} | ${type} | ${entry.mode} | ${formatPathCell(pathValue)} | ${formatSuffixCell(entry.suffix)} | ${formatDocumentationCell(entry, lang)} |`; + }); +} + +function updateMarkedBlock(content, replacement) { + const startIndex = content.indexOf(START_MARKER); + const endIndex = content.indexOf(END_MARKER); + if (startIndex === -1 || endIndex === -1 || endIndex < startIndex) { + throw new Error(`Missing or invalid markers: ${START_MARKER} / ${END_MARKER}`); + } + + const before = content.slice(0, startIndex + START_MARKER.length); + const after = content.slice(endIndex); + return `${before}\n${replacement}\n${after}`; +} + +async function main() { + const raw = await readFile(dataPath, 'utf8'); + const entries = JSON.parse(raw); + + for (const target of targets) { + const tableLines = [ + target.header, + target.separator, + ...buildTableRows(entries, target.lang) + ].join('\n'); + + const original = await readFile(target.filePath, 'utf8'); + const updated = updateMarkedBlock(original, tableLines); + if (updated !== original) { + await writeFile(target.filePath, updated, 'utf8'); + console.log(`Updated: ${path.relative(rootDir, target.filePath)}`); + } else { + console.log(`Unchanged: ${path.relative(rootDir, target.filePath)}`); + } + } +} + +main().catch((error) => { + console.error(error.message); + process.exit(1); +}); diff --git a/src/__tests__/claude-rules.test.ts b/src/__tests__/claude-rules.test.ts index 439ae2d..8a26048 100644 --- a/src/__tests__/claude-rules.test.ts +++ b/src/__tests__/claude-rules.test.ts @@ -1,36 +1,21 @@ -import { describe, it, expect } from 'vitest'; +import { describe } from 'vitest'; import { claudeRulesAdapter } from '../adapters/claude-rules.js'; import { adapterRegistry } from '../adapters/index.js'; +import { runStandardAdapterContract } from './helpers/adapter-contract.js'; describe('claude-rules adapter', () => { - it('should have correct basic properties', () => { - expect(claudeRulesAdapter.name).toBe('claude-rules'); - expect(claudeRulesAdapter.tool).toBe('claude'); - expect(claudeRulesAdapter.subtype).toBe('rules'); - expect(claudeRulesAdapter.defaultSourceDir).toBe('.claude/rules'); - expect(claudeRulesAdapter.targetDir).toBe('.claude/rules'); - expect(claudeRulesAdapter.mode).toBe('file'); - expect(claudeRulesAdapter.fileSuffixes).toEqual(['.md']); - }); - - it('should have correct config path', () => { - expect(claudeRulesAdapter.configPath).toEqual(['claude', 'rules']); - }); - - it('should be registered in adapterRegistry', () => { - const retrieved = adapterRegistry.getByName('claude-rules'); - expect(retrieved).toBe(claudeRulesAdapter); - }); - - it('should be retrievable by tool and subtype', () => { - const retrieved = adapterRegistry.get('claude', 'rules'); - expect(retrieved).toBe(claudeRulesAdapter); - }); - - it('should have required adapter methods', () => { - expect(claudeRulesAdapter.addDependency).toBeDefined(); - expect(claudeRulesAdapter.removeDependency).toBeDefined(); - expect(claudeRulesAdapter.link).toBeDefined(); - expect(claudeRulesAdapter.unlink).toBeDefined(); + runStandardAdapterContract({ + adapter: claudeRulesAdapter, + registry: adapterRegistry, + expected: { + name: 'claude-rules', + tool: 'claude', + subtype: 'rules', + defaultSourceDir: '.claude/rules', + targetDir: '.claude/rules', + mode: 'file', + configPath: ['claude', 'rules'], + fileSuffixes: ['.md'] + } }); }); diff --git a/src/__tests__/cline-rules.test.ts b/src/__tests__/cline-rules.test.ts index 039dbef..b3b4263 100644 --- a/src/__tests__/cline-rules.test.ts +++ b/src/__tests__/cline-rules.test.ts @@ -1,36 +1,21 @@ -import { describe, it, expect } from 'vitest'; +import { describe } from 'vitest'; import { clineRulesAdapter } from '../adapters/cline-rules.js'; import { adapterRegistry } from '../adapters/index.js'; +import { runStandardAdapterContract } from './helpers/adapter-contract.js'; describe('cline-rules adapter', () => { - it('should have correct basic properties', () => { - expect(clineRulesAdapter.name).toBe('cline-rules'); - expect(clineRulesAdapter.tool).toBe('cline'); - expect(clineRulesAdapter.subtype).toBe('rules'); - expect(clineRulesAdapter.defaultSourceDir).toBe('.clinerules'); - expect(clineRulesAdapter.targetDir).toBe('.clinerules'); - expect(clineRulesAdapter.mode).toBe('file'); - expect(clineRulesAdapter.fileSuffixes).toEqual(['.md', '.txt']); - }); - - it('should have correct config path', () => { - expect(clineRulesAdapter.configPath).toEqual(['cline', 'rules']); - }); - - it('should be registered in adapterRegistry', () => { - const retrieved = adapterRegistry.getByName('cline-rules'); - expect(retrieved).toBe(clineRulesAdapter); - }); - - it('should be retrievable by tool and subtype', () => { - const retrieved = adapterRegistry.get('cline', 'rules'); - expect(retrieved).toBe(clineRulesAdapter); - }); - - it('should have required adapter methods', () => { - expect(clineRulesAdapter.addDependency).toBeDefined(); - expect(clineRulesAdapter.removeDependency).toBeDefined(); - expect(clineRulesAdapter.link).toBeDefined(); - expect(clineRulesAdapter.unlink).toBeDefined(); + runStandardAdapterContract({ + adapter: clineRulesAdapter, + registry: adapterRegistry, + expected: { + name: 'cline-rules', + tool: 'cline', + subtype: 'rules', + defaultSourceDir: '.clinerules', + targetDir: '.clinerules', + mode: 'file', + configPath: ['cline', 'rules'], + fileSuffixes: ['.md', '.txt'] + } }); }); diff --git a/src/__tests__/cline-skills.test.ts b/src/__tests__/cline-skills.test.ts index 2fc0f9e..bf88f76 100644 --- a/src/__tests__/cline-skills.test.ts +++ b/src/__tests__/cline-skills.test.ts @@ -1,35 +1,20 @@ -import { describe, it, expect } from 'vitest'; +import { describe } from 'vitest'; import { clineSkillsAdapter } from '../adapters/cline-skills.js'; import { adapterRegistry } from '../adapters/index.js'; +import { runStandardAdapterContract } from './helpers/adapter-contract.js'; describe('cline-skills adapter', () => { - it('should have correct basic properties', () => { - expect(clineSkillsAdapter.name).toBe('cline-skills'); - expect(clineSkillsAdapter.tool).toBe('cline'); - expect(clineSkillsAdapter.subtype).toBe('skills'); - expect(clineSkillsAdapter.defaultSourceDir).toBe('.cline/skills'); - expect(clineSkillsAdapter.targetDir).toBe('.cline/skills'); - expect(clineSkillsAdapter.mode).toBe('directory'); - }); - - it('should have correct config path', () => { - expect(clineSkillsAdapter.configPath).toEqual(['cline', 'skills']); - }); - - it('should be registered in adapterRegistry', () => { - const retrieved = adapterRegistry.getByName('cline-skills'); - expect(retrieved).toBe(clineSkillsAdapter); - }); - - it('should be retrievable by tool and subtype', () => { - const retrieved = adapterRegistry.get('cline', 'skills'); - expect(retrieved).toBe(clineSkillsAdapter); - }); - - it('should have required adapter methods', () => { - expect(clineSkillsAdapter.addDependency).toBeDefined(); - expect(clineSkillsAdapter.removeDependency).toBeDefined(); - expect(clineSkillsAdapter.link).toBeDefined(); - expect(clineSkillsAdapter.unlink).toBeDefined(); + runStandardAdapterContract({ + adapter: clineSkillsAdapter, + registry: adapterRegistry, + expected: { + name: 'cline-skills', + tool: 'cline', + subtype: 'skills', + defaultSourceDir: '.cline/skills', + targetDir: '.cline/skills', + mode: 'directory', + configPath: ['cline', 'skills'] + } }); }); diff --git a/src/__tests__/completion-scripts.test.ts b/src/__tests__/completion-scripts.test.ts new file mode 100644 index 0000000..c6a7a51 --- /dev/null +++ b/src/__tests__/completion-scripts.test.ts @@ -0,0 +1,31 @@ +import { describe, expect, it } from 'vitest'; +import { bashScript, fishScript, getCompletionScript, zshScript } from '../completion/scripts.js'; + +describe('completion scripts metadata generation', () => { + it('should include windsurf and cline skills completion in bash script', () => { + expect(bashScript).toContain('ais _complete windsurf-skills'); + expect(bashScript).toContain('ais _complete cline-skills'); + expect(bashScript).toContain('if [[ "$prev" == "windsurf" ]]'); + expect(bashScript).toContain('if [[ "$prev" == "cline" ]]'); + }); + + it('should include nested subcommand arrays and dynamic add completion in zsh script', () => { + expect(zshScript).toContain('windsurf_skills_subcmds'); + expect(zshScript).toContain('cline_skills_subcmds'); + expect(zshScript).toContain('ais _complete cursor-commands'); + expect(zshScript).toContain('ais _complete windsurf-rules'); + }); + + it('should include metadata-driven fish completion entries', () => { + expect(fishScript).toContain('__fish_use_subcommand" -a "windsurf"'); + expect(fishScript).toContain('__fish_use_subcommand" -a "cline"'); + expect(fishScript).toContain('ais _complete windsurf-skills'); + expect(fishScript).toContain('ais _complete cline-skills'); + }); + + it('should return trimmed completion scripts for each shell', () => { + expect(getCompletionScript('bash')).toBe(bashScript.trim()); + expect(getCompletionScript('zsh')).toBe(zshScript.trim()); + expect(getCompletionScript('fish')).toBe(fishScript.trim()); + }); +}); diff --git a/src/__tests__/copilot-agents.test.ts b/src/__tests__/copilot-agents.test.ts index 046fce7..dde7efe 100644 --- a/src/__tests__/copilot-agents.test.ts +++ b/src/__tests__/copilot-agents.test.ts @@ -1,35 +1,20 @@ -import { describe, it, expect } from 'vitest'; +import { describe } from 'vitest'; import { copilotAgentsAdapter } from '../adapters/copilot-agents.js'; import { adapterRegistry } from '../adapters/index.js'; +import { runStandardAdapterContract } from './helpers/adapter-contract.js'; describe('copilot-agents adapter', () => { - it('should have correct basic properties', () => { - expect(copilotAgentsAdapter.name).toBe('copilot-agents'); - expect(copilotAgentsAdapter.tool).toBe('copilot'); - expect(copilotAgentsAdapter.subtype).toBe('agents'); - expect(copilotAgentsAdapter.defaultSourceDir).toBe('.github/agents'); - expect(copilotAgentsAdapter.targetDir).toBe('.github/agents'); - expect(copilotAgentsAdapter.mode).toBe('file'); - }); - - it('should have correct config path', () => { - expect(copilotAgentsAdapter.configPath).toEqual(['copilot', 'agents']); - }); - - it('should be registered in adapterRegistry', () => { - const retrieved = adapterRegistry.getByName('copilot-agents'); - expect(retrieved).toBe(copilotAgentsAdapter); - }); - - it('should be retrievable by tool and subtype', () => { - const retrieved = adapterRegistry.get('copilot', 'agents'); - expect(retrieved).toBe(copilotAgentsAdapter); - }); - - it('should have required adapter methods', () => { - expect(copilotAgentsAdapter.addDependency).toBeDefined(); - expect(copilotAgentsAdapter.removeDependency).toBeDefined(); - expect(copilotAgentsAdapter.link).toBeDefined(); - expect(copilotAgentsAdapter.unlink).toBeDefined(); + runStandardAdapterContract({ + adapter: copilotAgentsAdapter, + registry: adapterRegistry, + expected: { + name: 'copilot-agents', + tool: 'copilot', + subtype: 'agents', + defaultSourceDir: '.github/agents', + targetDir: '.github/agents', + mode: 'file', + configPath: ['copilot', 'agents'] + } }); }); diff --git a/src/__tests__/copilot-prompts.test.ts b/src/__tests__/copilot-prompts.test.ts index df90f81..4043b38 100644 --- a/src/__tests__/copilot-prompts.test.ts +++ b/src/__tests__/copilot-prompts.test.ts @@ -1,35 +1,20 @@ -import { describe, it, expect } from 'vitest'; +import { describe } from 'vitest'; import { copilotPromptsAdapter } from '../adapters/copilot-prompts.js'; import { adapterRegistry } from '../adapters/index.js'; +import { runStandardAdapterContract } from './helpers/adapter-contract.js'; describe('copilot-prompts adapter', () => { - it('should have correct basic properties', () => { - expect(copilotPromptsAdapter.name).toBe('copilot-prompts'); - expect(copilotPromptsAdapter.tool).toBe('copilot'); - expect(copilotPromptsAdapter.subtype).toBe('prompts'); - expect(copilotPromptsAdapter.defaultSourceDir).toBe('.github/prompts'); - expect(copilotPromptsAdapter.targetDir).toBe('.github/prompts'); - expect(copilotPromptsAdapter.mode).toBe('file'); - }); - - it('should have correct config path', () => { - expect(copilotPromptsAdapter.configPath).toEqual(['copilot', 'prompts']); - }); - - it('should be registered in adapterRegistry', () => { - const retrieved = adapterRegistry.getByName('copilot-prompts'); - expect(retrieved).toBe(copilotPromptsAdapter); - }); - - it('should be retrievable by tool and subtype', () => { - const retrieved = adapterRegistry.get('copilot', 'prompts'); - expect(retrieved).toBe(copilotPromptsAdapter); - }); - - it('should have required adapter methods', () => { - expect(copilotPromptsAdapter.addDependency).toBeDefined(); - expect(copilotPromptsAdapter.removeDependency).toBeDefined(); - expect(copilotPromptsAdapter.link).toBeDefined(); - expect(copilotPromptsAdapter.unlink).toBeDefined(); + runStandardAdapterContract({ + adapter: copilotPromptsAdapter, + registry: adapterRegistry, + expected: { + name: 'copilot-prompts', + tool: 'copilot', + subtype: 'prompts', + defaultSourceDir: '.github/prompts', + targetDir: '.github/prompts', + mode: 'file', + configPath: ['copilot', 'prompts'] + } }); }); diff --git a/src/__tests__/copilot-skills.test.ts b/src/__tests__/copilot-skills.test.ts index c9de55d..4315fdd 100644 --- a/src/__tests__/copilot-skills.test.ts +++ b/src/__tests__/copilot-skills.test.ts @@ -1,35 +1,20 @@ -import { describe, it, expect } from 'vitest'; +import { describe } from 'vitest'; import { copilotSkillsAdapter } from '../adapters/copilot-skills.js'; import { adapterRegistry } from '../adapters/index.js'; +import { runStandardAdapterContract } from './helpers/adapter-contract.js'; describe('copilot-skills adapter', () => { - it('should have correct basic properties', () => { - expect(copilotSkillsAdapter.name).toBe('copilot-skills'); - expect(copilotSkillsAdapter.tool).toBe('copilot'); - expect(copilotSkillsAdapter.subtype).toBe('skills'); - expect(copilotSkillsAdapter.defaultSourceDir).toBe('.github/skills'); - expect(copilotSkillsAdapter.targetDir).toBe('.github/skills'); - expect(copilotSkillsAdapter.mode).toBe('directory'); - }); - - it('should have correct config path', () => { - expect(copilotSkillsAdapter.configPath).toEqual(['copilot', 'skills']); - }); - - it('should be registered in adapterRegistry', () => { - const retrieved = adapterRegistry.getByName('copilot-skills'); - expect(retrieved).toBe(copilotSkillsAdapter); - }); - - it('should be retrievable by tool and subtype', () => { - const retrieved = adapterRegistry.get('copilot', 'skills'); - expect(retrieved).toBe(copilotSkillsAdapter); - }); - - it('should have required adapter methods', () => { - expect(copilotSkillsAdapter.addDependency).toBeDefined(); - expect(copilotSkillsAdapter.removeDependency).toBeDefined(); - expect(copilotSkillsAdapter.link).toBeDefined(); - expect(copilotSkillsAdapter.unlink).toBeDefined(); + runStandardAdapterContract({ + adapter: copilotSkillsAdapter, + registry: adapterRegistry, + expected: { + name: 'copilot-skills', + tool: 'copilot', + subtype: 'skills', + defaultSourceDir: '.github/skills', + targetDir: '.github/skills', + mode: 'directory', + configPath: ['copilot', 'skills'] + } }); }); diff --git a/src/__tests__/find-adapter-for-alias.test.ts b/src/__tests__/find-adapter-for-alias.test.ts new file mode 100644 index 0000000..aa719bf --- /dev/null +++ b/src/__tests__/find-adapter-for-alias.test.ts @@ -0,0 +1,58 @@ +import { describe, it, expect } from 'vitest'; +import { findAdapterForAlias } from '../adapters/index.js'; +import type { ProjectConfig } from '../project-config.js'; + +describe('findAdapterForAlias', () => { + it('should resolve windsurf skill alias', () => { + const config: ProjectConfig = { + windsurf: { + skills: { + deploy: 'https://example.com/repo.git' + } + } + }; + + const found = findAdapterForAlias(config, 'deploy'); + expect(found?.adapter.name).toBe('windsurf-skills'); + expect(found?.section).toBe('windsurf.skills'); + }); + + it('should resolve cline rule alias', () => { + const config: ProjectConfig = { + cline: { + rules: { + coding: 'https://example.com/repo.git' + } + } + }; + + const found = findAdapterForAlias(config, 'coding'); + expect(found?.adapter.name).toBe('cline-rules'); + expect(found?.section).toBe('cline.rules'); + }); + + it('should resolve flat agentsMd alias', () => { + const config: ProjectConfig = { + agentsMd: { + AGENTS: 'https://example.com/repo.git' + } + }; + + const found = findAdapterForAlias(config, 'AGENTS'); + expect(found?.adapter.name).toBe('agents-md-file'); + expect(found?.section).toBe('agentsMd'); + }); + + it('should return null when alias does not exist', () => { + const config: ProjectConfig = { + windsurf: { + rules: { + style: 'https://example.com/repo.git' + } + } + }; + + const found = findAdapterForAlias(config, 'unknown'); + expect(found).toBeNull(); + }); +}); diff --git a/src/__tests__/gemini-agents.test.ts b/src/__tests__/gemini-agents.test.ts index c244d34..7cbcefc 100644 --- a/src/__tests__/gemini-agents.test.ts +++ b/src/__tests__/gemini-agents.test.ts @@ -1,36 +1,21 @@ -import { describe, it, expect } from 'vitest'; +import { describe } from 'vitest'; import { geminiAgentsAdapter } from '../adapters/gemini-agents.js'; import { adapterRegistry } from '../adapters/index.js'; +import { runStandardAdapterContract } from './helpers/adapter-contract.js'; describe('gemini-agents adapter', () => { - it('should have correct basic properties', () => { - expect(geminiAgentsAdapter.name).toBe('gemini-agents'); - expect(geminiAgentsAdapter.tool).toBe('gemini'); - expect(geminiAgentsAdapter.subtype).toBe('agents'); - expect(geminiAgentsAdapter.defaultSourceDir).toBe('.gemini/agents'); - expect(geminiAgentsAdapter.targetDir).toBe('.gemini/agents'); - expect(geminiAgentsAdapter.mode).toBe('file'); - expect(geminiAgentsAdapter.fileSuffixes).toEqual(['.md']); - }); - - it('should have correct config path', () => { - expect(geminiAgentsAdapter.configPath).toEqual(['gemini', 'agents']); - }); - - it('should be registered in adapterRegistry', () => { - const retrieved = adapterRegistry.getByName('gemini-agents'); - expect(retrieved).toBe(geminiAgentsAdapter); - }); - - it('should be retrievable by tool and subtype', () => { - const retrieved = adapterRegistry.get('gemini', 'agents'); - expect(retrieved).toBe(geminiAgentsAdapter); - }); - - it('should have required adapter methods', () => { - expect(geminiAgentsAdapter.addDependency).toBeDefined(); - expect(geminiAgentsAdapter.removeDependency).toBeDefined(); - expect(geminiAgentsAdapter.link).toBeDefined(); - expect(geminiAgentsAdapter.unlink).toBeDefined(); + runStandardAdapterContract({ + adapter: geminiAgentsAdapter, + registry: adapterRegistry, + expected: { + name: 'gemini-agents', + tool: 'gemini', + subtype: 'agents', + defaultSourceDir: '.gemini/agents', + targetDir: '.gemini/agents', + mode: 'file', + configPath: ['gemini', 'agents'], + fileSuffixes: ['.md'] + } }); }); diff --git a/src/__tests__/gemini-commands.test.ts b/src/__tests__/gemini-commands.test.ts index 669478a..d7c653b 100644 --- a/src/__tests__/gemini-commands.test.ts +++ b/src/__tests__/gemini-commands.test.ts @@ -1,36 +1,21 @@ -import { describe, it, expect } from 'vitest'; +import { describe } from 'vitest'; import { geminiCommandsAdapter } from '../adapters/gemini-commands.js'; import { adapterRegistry } from '../adapters/index.js'; +import { runStandardAdapterContract } from './helpers/adapter-contract.js'; describe('gemini-commands adapter', () => { - it('should have correct basic properties', () => { - expect(geminiCommandsAdapter.name).toBe('gemini-commands'); - expect(geminiCommandsAdapter.tool).toBe('gemini'); - expect(geminiCommandsAdapter.subtype).toBe('commands'); - expect(geminiCommandsAdapter.defaultSourceDir).toBe('.gemini/commands'); - expect(geminiCommandsAdapter.targetDir).toBe('.gemini/commands'); - expect(geminiCommandsAdapter.mode).toBe('file'); - expect(geminiCommandsAdapter.fileSuffixes).toEqual(['.toml']); - }); - - it('should have correct config path', () => { - expect(geminiCommandsAdapter.configPath).toEqual(['gemini', 'commands']); - }); - - it('should be registered in adapterRegistry', () => { - const retrieved = adapterRegistry.getByName('gemini-commands'); - expect(retrieved).toBe(geminiCommandsAdapter); - }); - - it('should be retrievable by tool and subtype', () => { - const retrieved = adapterRegistry.get('gemini', 'commands'); - expect(retrieved).toBe(geminiCommandsAdapter); - }); - - it('should have required adapter methods', () => { - expect(geminiCommandsAdapter.addDependency).toBeDefined(); - expect(geminiCommandsAdapter.removeDependency).toBeDefined(); - expect(geminiCommandsAdapter.link).toBeDefined(); - expect(geminiCommandsAdapter.unlink).toBeDefined(); + runStandardAdapterContract({ + adapter: geminiCommandsAdapter, + registry: adapterRegistry, + expected: { + name: 'gemini-commands', + tool: 'gemini', + subtype: 'commands', + defaultSourceDir: '.gemini/commands', + targetDir: '.gemini/commands', + mode: 'file', + configPath: ['gemini', 'commands'], + fileSuffixes: ['.toml'] + } }); }); diff --git a/src/__tests__/gemini-skills.test.ts b/src/__tests__/gemini-skills.test.ts index d362551..95d09d8 100644 --- a/src/__tests__/gemini-skills.test.ts +++ b/src/__tests__/gemini-skills.test.ts @@ -1,35 +1,20 @@ -import { describe, it, expect } from 'vitest'; +import { describe } from 'vitest'; import { geminiSkillsAdapter } from '../adapters/gemini-skills.js'; import { adapterRegistry } from '../adapters/index.js'; +import { runStandardAdapterContract } from './helpers/adapter-contract.js'; describe('gemini-skills adapter', () => { - it('should have correct basic properties', () => { - expect(geminiSkillsAdapter.name).toBe('gemini-skills'); - expect(geminiSkillsAdapter.tool).toBe('gemini'); - expect(geminiSkillsAdapter.subtype).toBe('skills'); - expect(geminiSkillsAdapter.defaultSourceDir).toBe('.gemini/skills'); - expect(geminiSkillsAdapter.targetDir).toBe('.gemini/skills'); - expect(geminiSkillsAdapter.mode).toBe('directory'); - }); - - it('should have correct config path', () => { - expect(geminiSkillsAdapter.configPath).toEqual(['gemini', 'skills']); - }); - - it('should be registered in adapterRegistry', () => { - const retrieved = adapterRegistry.getByName('gemini-skills'); - expect(retrieved).toBe(geminiSkillsAdapter); - }); - - it('should be retrievable by tool and subtype', () => { - const retrieved = adapterRegistry.get('gemini', 'skills'); - expect(retrieved).toBe(geminiSkillsAdapter); - }); - - it('should have required adapter methods', () => { - expect(geminiSkillsAdapter.addDependency).toBeDefined(); - expect(geminiSkillsAdapter.removeDependency).toBeDefined(); - expect(geminiSkillsAdapter.link).toBeDefined(); - expect(geminiSkillsAdapter.unlink).toBeDefined(); + runStandardAdapterContract({ + adapter: geminiSkillsAdapter, + registry: adapterRegistry, + expected: { + name: 'gemini-skills', + tool: 'gemini', + subtype: 'skills', + defaultSourceDir: '.gemini/skills', + targetDir: '.gemini/skills', + mode: 'directory', + configPath: ['gemini', 'skills'] + } }); }); diff --git a/src/__tests__/helpers/adapter-contract.ts b/src/__tests__/helpers/adapter-contract.ts new file mode 100644 index 0000000..742abd8 --- /dev/null +++ b/src/__tests__/helpers/adapter-contract.ts @@ -0,0 +1,59 @@ +import { expect, it } from 'vitest'; +import type { AdapterRegistry, SyncAdapter } from '../../adapters/types.js'; + +interface StandardAdapterExpectation { + name: string; + tool: string; + subtype: string; + defaultSourceDir: string; + targetDir: string; + mode: 'directory' | 'file' | 'hybrid'; + configPath: [string, string]; + fileSuffixes?: string[]; +} + +interface StandardAdapterContractOptions { + adapter: SyncAdapter; + registry: AdapterRegistry; + expected: StandardAdapterExpectation; +} + +/** + * Shared contract tests for standard adapters created via createBaseAdapter. + */ +export function runStandardAdapterContract(options: StandardAdapterContractOptions): void { + const { adapter, registry, expected } = options; + + it('should have correct basic properties', () => { + expect(adapter.name).toBe(expected.name); + expect(adapter.tool).toBe(expected.tool); + expect(adapter.subtype).toBe(expected.subtype); + expect(adapter.defaultSourceDir).toBe(expected.defaultSourceDir); + expect(adapter.targetDir).toBe(expected.targetDir); + expect(adapter.mode).toBe(expected.mode); + if (expected.fileSuffixes) { + expect(adapter.fileSuffixes).toEqual(expected.fileSuffixes); + } + }); + + it('should have correct config path', () => { + expect(adapter.configPath).toEqual(expected.configPath); + }); + + it('should be registered in adapterRegistry', () => { + const retrieved = registry.getByName(expected.name); + expect(retrieved).toBe(adapter); + }); + + it('should be retrievable by tool and subtype', () => { + const retrieved = registry.get(expected.tool, expected.subtype); + expect(retrieved).toBe(adapter); + }); + + it('should have required adapter methods', () => { + expect(adapter.addDependency).toBeDefined(); + expect(adapter.removeDependency).toBeDefined(); + expect(adapter.link).toBeDefined(); + expect(adapter.unlink).toBeDefined(); + }); +} diff --git a/src/__tests__/project-config-source-dir.test.ts b/src/__tests__/project-config-source-dir.test.ts new file mode 100644 index 0000000..d45e9be --- /dev/null +++ b/src/__tests__/project-config-source-dir.test.ts @@ -0,0 +1,111 @@ +import { afterEach, beforeEach, describe, expect, it } from 'vitest'; +import fs from 'fs-extra'; +import os from 'os'; +import path from 'path'; +import { getRepoSourceConfig, getSourceDir } from '../project-config.js'; +import type { ProjectConfig, RepoSourceConfig, SourceDirConfig } from '../project-config.js'; + +describe('project-config source directory resolution', () => { + let tempDir: string; + + beforeEach(async () => { + tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'ais-project-config-')); + }); + + afterEach(async () => { + await fs.remove(tempDir); + }); + + it('should prioritize global override over repo config in getSourceDir', () => { + const repoConfig: RepoSourceConfig = { + rootPath: 'repo-root', + windsurf: { + skills: '.windsurf/repo-skills' + } + }; + const override: SourceDirConfig = { + windsurf: { + skills: '.windsurf/override-skills' + } + }; + + expect(getSourceDir(repoConfig, 'windsurf', 'skills', '.windsurf/skills', override)).toBe('.windsurf/override-skills'); + expect(getSourceDir(repoConfig, 'windsurf', 'skills', '.windsurf/skills')).toBe(path.join('repo-root', '.windsurf/repo-skills')); + expect(getSourceDir({ rootPath: 'repo-root' }, 'windsurf', 'skills', '.windsurf/skills')).toBe(path.join('repo-root', '.windsurf/skills')); + }); + + it('should parse new sourceDir format from ai-rules-sync.json', async () => { + const config: ProjectConfig = { + rootPath: 'rules-root', + sourceDir: { + windsurf: { + rules: '.windsurf/rules', + skills: '.windsurf/skills' + }, + cline: { + rules: '.clinerules', + skills: '.cline/skills' + } + } + }; + + await fs.writeJson(path.join(tempDir, 'ai-rules-sync.json'), config, { spaces: 2 }); + + const repoConfig = await getRepoSourceConfig(tempDir); + expect(repoConfig.rootPath).toBe('rules-root'); + expect(repoConfig.windsurf?.rules).toBe('.windsurf/rules'); + expect(repoConfig.windsurf?.skills).toBe('.windsurf/skills'); + expect(repoConfig.cline?.rules).toBe('.clinerules'); + expect(repoConfig.cline?.skills).toBe('.cline/skills'); + }); + + it('should parse legacy string-based source paths and ignore dependency objects', async () => { + const config = { + rootPath: 'legacy-root', + cursor: { + rules: { + localAlias: 'https://example.com/repo.git' + } + }, + windsurf: { + rules: '.windsurf/rules' + }, + cline: { + skills: '.cline/skills' + }, + agentsMd: { + file: 'agents-md' + } + }; + + await fs.writeJson(path.join(tempDir, 'ai-rules-sync.json'), config, { spaces: 2 }); + + const repoConfig = await getRepoSourceConfig(tempDir); + expect(repoConfig.rootPath).toBe('legacy-root'); + expect(repoConfig.windsurf?.rules).toBe('.windsurf/rules'); + expect(repoConfig.cline?.skills).toBe('.cline/skills'); + expect(repoConfig.agentsMd?.file).toBe('agents-md'); + expect(repoConfig.cursor?.rules).toBeUndefined(); + }); + + it('should return only rootPath when config only contains dependency objects', async () => { + const config: ProjectConfig = { + rootPath: 'project-root', + cursor: { + rules: { + localAlias: 'https://example.com/repo.git' + } + }, + windsurf: { + skills: { + deploy: 'https://example.com/repo.git' + } + } + }; + + await fs.writeJson(path.join(tempDir, 'ai-rules-sync.json'), config, { spaces: 2 }); + + const repoConfig = await getRepoSourceConfig(tempDir); + expect(repoConfig).toEqual({ rootPath: 'project-root' }); + }); +}); diff --git a/src/__tests__/warp-skills.test.ts b/src/__tests__/warp-skills.test.ts index e889323..a9b53dd 100644 --- a/src/__tests__/warp-skills.test.ts +++ b/src/__tests__/warp-skills.test.ts @@ -1,35 +1,20 @@ -import { describe, it, expect } from 'vitest'; +import { describe } from 'vitest'; import { warpSkillsAdapter } from '../adapters/warp-skills.js'; import { adapterRegistry } from '../adapters/index.js'; +import { runStandardAdapterContract } from './helpers/adapter-contract.js'; describe('warp-skills adapter', () => { - it('should have correct basic properties', () => { - expect(warpSkillsAdapter.name).toBe('warp-skills'); - expect(warpSkillsAdapter.tool).toBe('warp'); - expect(warpSkillsAdapter.subtype).toBe('skills'); - expect(warpSkillsAdapter.defaultSourceDir).toBe('.agents/skills'); - expect(warpSkillsAdapter.targetDir).toBe('.agents/skills'); - expect(warpSkillsAdapter.mode).toBe('directory'); - }); - - it('should have correct config path', () => { - expect(warpSkillsAdapter.configPath).toEqual(['warp', 'skills']); - }); - - it('should be registered in adapterRegistry', () => { - const retrieved = adapterRegistry.getByName('warp-skills'); - expect(retrieved).toBe(warpSkillsAdapter); - }); - - it('should be retrievable by tool and subtype', () => { - const retrieved = adapterRegistry.get('warp', 'skills'); - expect(retrieved).toBe(warpSkillsAdapter); - }); - - it('should have required adapter methods', () => { - expect(warpSkillsAdapter.addDependency).toBeDefined(); - expect(warpSkillsAdapter.removeDependency).toBeDefined(); - expect(warpSkillsAdapter.link).toBeDefined(); - expect(warpSkillsAdapter.unlink).toBeDefined(); + runStandardAdapterContract({ + adapter: warpSkillsAdapter, + registry: adapterRegistry, + expected: { + name: 'warp-skills', + tool: 'warp', + subtype: 'skills', + defaultSourceDir: '.agents/skills', + targetDir: '.agents/skills', + mode: 'directory', + configPath: ['warp', 'skills'] + } }); }); diff --git a/src/__tests__/windsurf-rules.test.ts b/src/__tests__/windsurf-rules.test.ts index e1ff074..8f42f10 100644 --- a/src/__tests__/windsurf-rules.test.ts +++ b/src/__tests__/windsurf-rules.test.ts @@ -1,36 +1,21 @@ -import { describe, it, expect } from 'vitest'; +import { describe } from 'vitest'; import { windsurfRulesAdapter } from '../adapters/windsurf-rules.js'; import { adapterRegistry } from '../adapters/index.js'; +import { runStandardAdapterContract } from './helpers/adapter-contract.js'; describe('windsurf-rules adapter', () => { - it('should have correct basic properties', () => { - expect(windsurfRulesAdapter.name).toBe('windsurf-rules'); - expect(windsurfRulesAdapter.tool).toBe('windsurf'); - expect(windsurfRulesAdapter.subtype).toBe('rules'); - expect(windsurfRulesAdapter.defaultSourceDir).toBe('.windsurf/rules'); - expect(windsurfRulesAdapter.targetDir).toBe('.windsurf/rules'); - expect(windsurfRulesAdapter.mode).toBe('file'); - expect(windsurfRulesAdapter.fileSuffixes).toEqual(['.md']); - }); - - it('should have correct config path', () => { - expect(windsurfRulesAdapter.configPath).toEqual(['windsurf', 'rules']); - }); - - it('should be registered in adapterRegistry', () => { - const retrieved = adapterRegistry.getByName('windsurf-rules'); - expect(retrieved).toBe(windsurfRulesAdapter); - }); - - it('should be retrievable by tool and subtype', () => { - const retrieved = adapterRegistry.get('windsurf', 'rules'); - expect(retrieved).toBe(windsurfRulesAdapter); - }); - - it('should have required adapter methods', () => { - expect(windsurfRulesAdapter.addDependency).toBeDefined(); - expect(windsurfRulesAdapter.removeDependency).toBeDefined(); - expect(windsurfRulesAdapter.link).toBeDefined(); - expect(windsurfRulesAdapter.unlink).toBeDefined(); + runStandardAdapterContract({ + adapter: windsurfRulesAdapter, + registry: adapterRegistry, + expected: { + name: 'windsurf-rules', + tool: 'windsurf', + subtype: 'rules', + defaultSourceDir: '.windsurf/rules', + targetDir: '.windsurf/rules', + mode: 'file', + configPath: ['windsurf', 'rules'], + fileSuffixes: ['.md'] + } }); }); diff --git a/src/__tests__/windsurf-skills.test.ts b/src/__tests__/windsurf-skills.test.ts index e54db78..bf862d4 100644 --- a/src/__tests__/windsurf-skills.test.ts +++ b/src/__tests__/windsurf-skills.test.ts @@ -1,35 +1,20 @@ -import { describe, it, expect } from 'vitest'; +import { describe } from 'vitest'; import { windsurfSkillsAdapter } from '../adapters/windsurf-skills.js'; import { adapterRegistry } from '../adapters/index.js'; +import { runStandardAdapterContract } from './helpers/adapter-contract.js'; describe('windsurf-skills adapter', () => { - it('should have correct basic properties', () => { - expect(windsurfSkillsAdapter.name).toBe('windsurf-skills'); - expect(windsurfSkillsAdapter.tool).toBe('windsurf'); - expect(windsurfSkillsAdapter.subtype).toBe('skills'); - expect(windsurfSkillsAdapter.defaultSourceDir).toBe('.windsurf/skills'); - expect(windsurfSkillsAdapter.targetDir).toBe('.windsurf/skills'); - expect(windsurfSkillsAdapter.mode).toBe('directory'); - }); - - it('should have correct config path', () => { - expect(windsurfSkillsAdapter.configPath).toEqual(['windsurf', 'skills']); - }); - - it('should be registered in adapterRegistry', () => { - const retrieved = adapterRegistry.getByName('windsurf-skills'); - expect(retrieved).toBe(windsurfSkillsAdapter); - }); - - it('should be retrievable by tool and subtype', () => { - const retrieved = adapterRegistry.get('windsurf', 'skills'); - expect(retrieved).toBe(windsurfSkillsAdapter); - }); - - it('should have required adapter methods', () => { - expect(windsurfSkillsAdapter.addDependency).toBeDefined(); - expect(windsurfSkillsAdapter.removeDependency).toBeDefined(); - expect(windsurfSkillsAdapter.link).toBeDefined(); - expect(windsurfSkillsAdapter.unlink).toBeDefined(); + runStandardAdapterContract({ + adapter: windsurfSkillsAdapter, + registry: adapterRegistry, + expected: { + name: 'windsurf-skills', + tool: 'windsurf', + subtype: 'skills', + defaultSourceDir: '.windsurf/skills', + targetDir: '.windsurf/skills', + mode: 'directory', + configPath: ['windsurf', 'skills'] + } }); }); diff --git a/src/adapters/index.ts b/src/adapters/index.ts index d8f437c..ab30846 100644 --- a/src/adapters/index.ts +++ b/src/adapters/index.ts @@ -74,8 +74,8 @@ class DefaultAdapterRegistry implements AdapterRegistry { this.register(codexMdAdapter); this.register(warpSkillsAdapter); this.register(windsurfRulesAdapter); - this.register(clineRulesAdapter); this.register(windsurfSkillsAdapter); + this.register(clineRulesAdapter); this.register(clineSkillsAdapter); } @@ -149,98 +149,39 @@ export function findAdapterForAlias( cfg: ProjectConfig, alias: string ): { adapter: SyncAdapter; section: string } | null { - if (cfg.cursor?.rules?.[alias]) { - return { adapter: cursorRulesAdapter, section: 'cursor.rules' }; - } - if (cfg.cursor?.commands?.[alias]) { - return { adapter: cursorCommandsAdapter, section: 'cursor.commands' }; - } - if (cfg.cursor?.skills?.[alias]) { - return { adapter: cursorSkillsAdapter, section: 'cursor.skills' }; - } - if (cfg.cursor?.agents?.[alias]) { - return { adapter: cursorAgentsAdapter, section: 'cursor.agents' }; - } - if (cfg.copilot?.instructions?.[alias]) { - return { adapter: copilotInstructionsAdapter, section: 'copilot.instructions' }; - } - if (cfg.copilot?.skills?.[alias]) { - return { adapter: copilotSkillsAdapter, section: 'copilot.skills' }; - } - if (cfg.copilot?.prompts?.[alias]) { - return { adapter: copilotPromptsAdapter, section: 'copilot.prompts' }; - } - if (cfg.copilot?.agents?.[alias]) { - return { adapter: copilotAgentsAdapter, section: 'copilot.agents' }; - } - if (cfg.claude?.skills?.[alias]) { - return { adapter: claudeSkillsAdapter, section: 'claude.skills' }; - } - if (cfg.claude?.agents?.[alias]) { - return { adapter: claudeAgentsAdapter, section: 'claude.agents' }; - } - if (cfg.claude?.rules?.[alias]) { - return { adapter: claudeRulesAdapter, section: 'claude.rules' }; - } - if (cfg.claude?.md?.[alias]) { - return { adapter: claudeMdAdapter, section: 'claude.md' }; - } - if (cfg.trae?.rules?.[alias]) { - return { adapter: traeRulesAdapter, section: 'trae.rules' }; - } - if (cfg.trae?.skills?.[alias]) { - return { adapter: traeSkillsAdapter, section: 'trae.skills' }; - } - if (cfg.opencode?.agents?.[alias]) { - return { adapter: opencodeAgentsAdapter, section: 'opencode.agents' }; - } - if (cfg.opencode?.skills?.[alias]) { - return { adapter: opencodeSkillsAdapter, section: 'opencode.skills' }; - } - if (cfg.opencode?.commands?.[alias]) { - return { adapter: opencodeCommandsAdapter, section: 'opencode.commands' }; - } - if (cfg.opencode?.tools?.[alias]) { - return { adapter: opencodeToolsAdapter, section: 'opencode.tools' }; + for (const adapter of adapterRegistry.all()) { + const sectionConfig = getAliasSectionConfig(cfg, adapter); + if (sectionConfig && Object.prototype.hasOwnProperty.call(sectionConfig, alias)) { + return { adapter, section: getSectionName(adapter) }; + } } - if (cfg.agentsMd?.[alias]) { - return { adapter: agentsMdAdapter, section: 'agentsMd' }; - } - if (cfg.codex?.rules?.[alias]) { - return { adapter: codexRulesAdapter, section: 'codex.rules' }; - } - if (cfg.codex?.skills?.[alias]) { - return { adapter: codexSkillsAdapter, section: 'codex.skills' }; - } - if (cfg.codex?.md?.[alias]) { - return { adapter: codexMdAdapter, section: 'codex.md' }; - } - if (cfg.gemini?.commands?.[alias]) { - return { adapter: geminiCommandsAdapter, section: 'gemini.commands' }; - } - if (cfg.gemini?.skills?.[alias]) { - return { adapter: geminiSkillsAdapter, section: 'gemini.skills' }; - } - if (cfg.gemini?.agents?.[alias]) { - return { adapter: geminiAgentsAdapter, section: 'gemini.agents' }; - } - if (cfg.gemini?.md?.[alias]) { - return { adapter: geminiMdAdapter, section: 'gemini.md' }; - } - if (cfg.warp?.skills?.[alias]) { - return { adapter: warpSkillsAdapter, section: 'warp.skills' }; - } - if (cfg.windsurf?.rules?.[alias]) { - return { adapter: windsurfRulesAdapter, section: 'windsurf.rules' }; + return null; +} + +function getAliasSectionConfig(cfg: ProjectConfig, adapter: SyncAdapter): Record | undefined { + const [topLevel, subLevel] = adapter.configPath; + const top = (cfg as Record)[topLevel]; + if (!top || typeof top !== 'object') { + return undefined; } - if (cfg.windsurf?.skills?.[alias]) { - return { adapter: windsurfSkillsAdapter, section: 'windsurf.skills' }; + + // AGENTS.md dependencies are stored in flat `agentsMd` object for backward compatibility. + if (topLevel === 'agentsMd') { + return top as Record; } - if (cfg.cline?.rules?.[alias]) { - return { adapter: clineRulesAdapter, section: 'cline.rules' }; + + const nested = (top as Record)[subLevel]; + if (!nested || typeof nested !== 'object') { + return undefined; } - if (cfg.cline?.skills?.[alias]) { - return { adapter: clineSkillsAdapter, section: 'cline.skills' }; + + return nested as Record; +} + +function getSectionName(adapter: SyncAdapter): string { + const [topLevel, subLevel] = adapter.configPath; + if (topLevel === 'agentsMd') { + return 'agentsMd'; } - return null; + return `${topLevel}.${subLevel}`; } diff --git a/src/completion/scripts.ts b/src/completion/scripts.ts index ff0e0c1..01f7233 100644 --- a/src/completion/scripts.ts +++ b/src/completion/scripts.ts @@ -1,1404 +1,670 @@ /** * Shell completion scripts for bash, zsh, and fish + * Generated from shared completion metadata to reduce drift. */ -export const bashScript = ` -# ais bash completion -_ais_complete() { - local cur="\${COMP_WORDS[COMP_CWORD]}" - local prev="\${COMP_WORDS[COMP_CWORD-1]}" - local pprev="\${COMP_WORDS[COMP_CWORD-2]}" - local ppprev="\${COMP_WORDS[COMP_CWORD-3]}" - - # cursor commands add - if [[ "\$ppprev" == "cursor" && "\$pprev" == "commands" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete cursor-commands 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # cursor add - if [[ "\$pprev" == "cursor" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete cursor 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # copilot instructions add - if [[ "\$ppprev" == "copilot" && "\$pprev" == "instructions" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete copilot-instructions 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # copilot skills add - if [[ "\$ppprev" == "copilot" && "\$pprev" == "skills" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete copilot-skills 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # copilot prompts add - if [[ "\$ppprev" == "copilot" && "\$pprev" == "prompts" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete copilot-prompts 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # copilot agents add - if [[ "\$ppprev" == "copilot" && "\$pprev" == "agents" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete copilot-agents 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # claude skills add - if [[ "\$ppprev" == "claude" && "\$pprev" == "skills" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete claude-skills 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # claude agents add - if [[ "\$ppprev" == "claude" && "\$pprev" == "agents" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete claude-agents 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # claude rules add - if [[ "\$ppprev" == "claude" && "\$pprev" == "rules" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete claude-rules 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # trae rules add - if [[ "\$ppprev" == "trae" && "\$pprev" == "rules" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete trae-rules 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # trae skills add - if [[ "\$ppprev" == "trae" && "\$pprev" == "skills" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete trae-skills 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # cursor skills add - if [[ "\$ppprev" == "cursor" && "\$pprev" == "skills" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete cursor-skills 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # cursor agents add - if [[ "\$ppprev" == "cursor" && "\$pprev" == "agents" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete cursor-agents 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # cursor rules add - if [[ "\$pprev" == "rules" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete cursor 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # cursor commands - if [[ "\$pprev" == "cursor" && "\$prev" == "commands" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # cursor skills - if [[ "\$pprev" == "cursor" && "\$prev" == "skills" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # cursor agents - if [[ "\$pprev" == "cursor" && "\$prev" == "agents" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # cursor rules - if [[ "\$pprev" == "cursor" && "\$prev" == "rules" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # claude skills - if [[ "\$pprev" == "claude" && "\$prev" == "skills" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # claude agents - if [[ "\$pprev" == "claude" && "\$prev" == "agents" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # trae rules - if [[ "\$pprev" == "trae" && "\$prev" == "rules" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # trae skills - if [[ "\$pprev" == "trae" && "\$prev" == "skills" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # agents-md add - if [[ "\$pprev" == "agents-md" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete agents-md 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # opencode agents - if [[ "\$ppprev" == "opencode" && "\$pprev" == "agents" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete opencode-agents 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # opencode skills - if [[ "\$ppprev" == "opencode" && "\$pprev" == "skills" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete opencode-skills 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # opencode commands - if [[ "\$ppprev" == "opencode" && "\$pprev" == "commands" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete opencode-commands 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # opencode tools - if [[ "\$ppprev" == "opencode" && "\$pprev" == "tools" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete opencode-tools 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # codex rules add - if [[ "\$ppprev" == "codex" && "\$pprev" == "rules" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete codex-rules 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # codex skills add - if [[ "\$ppprev" == "codex" && "\$pprev" == "skills" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete codex-skills 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # gemini commands add - if [[ "\$ppprev" == "gemini" && "\$pprev" == "commands" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete gemini-commands 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # gemini skills add - if [[ "\$ppprev" == "gemini" && "\$pprev" == "skills" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete gemini-skills 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # gemini agents add - if [[ "\$ppprev" == "gemini" && "\$pprev" == "agents" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete gemini-agents 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # warp skills add - if [[ "\$ppprev" == "warp" && "\$pprev" == "skills" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete warp-skills 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # windsurf rules add - if [[ "\$pprev" == "windsurf" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete windsurf-rules 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # windsurf rules subcommand add - if [[ "\$ppprev" == "windsurf" && "\$pprev" == "rules" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete windsurf-rules 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # windsurf skills add - if [[ "\$ppprev" == "windsurf" && "\$pprev" == "skills" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete windsurf-skills 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # cline rules add - if [[ "\$pprev" == "cline" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete cline-rules 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # cline rules subcommand add - if [[ "\$ppprev" == "cline" && "\$pprev" == "rules" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete cline-rules 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # cline skills add - if [[ "\$ppprev" == "cline" && "\$pprev" == "skills" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete cline-skills 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # agents-md - if [[ "\$pprev" == "agents-md" && "\$prev" == "add" ]]; then - COMPREPLY=( $(compgen -W "$(ais _complete agents-md 2>/dev/null)" -- "\$cur") ) - return 0 - fi - - # opencode agents - if [[ "\$pprev" == "opencode" && "\$prev" == "agents" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # opencode skills - if [[ "\$pprev" == "opencode" && "\$prev" == "skills" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # opencode commands - if [[ "\$pprev" == "opencode" && "\$prev" == "commands" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # opencode tools - if [[ "\$pprev" == "opencode" && "\$prev" == "tools" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # codex rules - if [[ "\$pprev" == "codex" && "\$prev" == "rules" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # codex skills - if [[ "\$pprev" == "codex" && "\$prev" == "skills" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # gemini commands - if [[ "\$pprev" == "gemini" && "\$prev" == "commands" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # gemini skills - if [[ "\$pprev" == "gemini" && "\$prev" == "skills" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # gemini agents - if [[ "\$pprev" == "gemini" && "\$prev" == "agents" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # warp skills - if [[ "\$pprev" == "warp" && "\$prev" == "skills" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # windsurf - if [[ "\$prev" == "windsurf" ]]; then - COMPREPLY=( $(compgen -W "add remove install add-all import rules skills" -- "\$cur") ) - return 0 - fi - - # cline - if [[ "\$prev" == "cline" ]]; then - COMPREPLY=( $(compgen -W "add remove install add-all import rules skills" -- "\$cur") ) - return 0 - fi - - # windsurf rules - if [[ "\$pprev" == "windsurf" && "\$prev" == "rules" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # windsurf skills - if [[ "\$pprev" == "windsurf" && "\$prev" == "skills" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # cline rules - if [[ "\$pprev" == "cline" && "\$prev" == "rules" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # cline skills - if [[ "\$pprev" == "cline" && "\$prev" == "skills" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # agents-md - if [[ "\$prev" == "agents-md" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - if [[ "\$prev" == "cursor" ]]; then - COMPREPLY=( $(compgen -W "add remove install import rules commands skills agents" -- "\$cur") ) - return 0 - fi - - # copilot instructions - if [[ "\$pprev" == "copilot" && "\$prev" == "instructions" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # copilot skills - if [[ "\$pprev" == "copilot" && "\$prev" == "skills" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # copilot prompts - if [[ "\$pprev" == "copilot" && "\$prev" == "prompts" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - # copilot agents - if [[ "\$pprev" == "copilot" && "\$prev" == "agents" ]]; then - COMPREPLY=( $(compgen -W "add remove install import" -- "\$cur") ) - return 0 - fi - - if [[ "\$prev" == "copilot" ]]; then - COMPREPLY=( $(compgen -W "instructions prompts skills agents install" -- "\$cur") ) - return 0 - fi - - if [[ "\$prev" == "claude" ]]; then - COMPREPLY=( $(compgen -W "rules skills agents install" -- "\$cur") ) - return 0 - fi - - if [[ "\$prev" == "trae" ]]; then - COMPREPLY=( $(compgen -W "rules skills install" -- "\$cur") ) - return 0 - fi - - if [[ "\$prev" == "opencode" ]]; then - COMPREPLY=( $(compgen -W "commands skills agents tools install import" -- "\$cur") ) - return 0 - fi - - if [[ "\$prev" == "codex" ]]; then - COMPREPLY=( $(compgen -W "rules skills install import" -- "\$cur") ) - return 0 - fi - - if [[ "\$prev" == "gemini" ]]; then - COMPREPLY=( $(compgen -W "commands skills agents install add-all import" -- "\$cur") ) - return 0 - fi - - if [[ "\$prev" == "warp" ]]; then - COMPREPLY=( $(compgen -W "skills install import" -- "\$cur") ) - return 0 - fi - - if [[ "\$prev" == "ais" ]]; then - COMPREPLY=( $(compgen -W "cursor copilot claude trae opencode codex gemini warp windsurf cline agents-md use list git add remove install import completion" -- "\$cur") ) - return 0 - fi +interface CompletionEntry { + name: string; + description: string; } -complete -F _ais_complete ais -`; - -export const zshScript = ` -# ais zsh completion -_ais() { - local -a subcmds -subcmds=( - 'cursor:Manage Cursor rules, commands, and skills' - 'copilot:Manage GitHub Copilot instructions' - 'claude:Manage Claude skills, agents, and plugins' - 'trae:Manage Trae rules and skills' - 'opencode:Manage OpenCode agents, skills, commands, and tools' - 'codex:Manage Codex rules and skills' - 'gemini:Manage Gemini CLI commands, skills, and agents' - 'warp:Manage Warp agent skills' - 'windsurf:Manage Windsurf rules and skills' - 'cline:Manage Cline rules and skills' - 'agents-md:Manage AGENTS.md files (agents.md standard)' - 'use:Configure rules repository' - 'list:List configured repositories' - 'git:Run git commands in rules repository' - 'add:Add a rule (smart dispatch)' - 'remove:Remove a rule (smart dispatch)' - 'install:Install all rules (smart dispatch)' - 'import:Import entry to rules repository' - 'completion:Output shell completion script' - ) - - local -a cursor_subcmds copilot_subcmds claude_subcmds trae_subcmds opencode_subcmds codex_subcmds gemini_subcmds warp_subcmds windsurf_subcmds cline_subcmds agents_md_subcmds cursor_rules_subcmds cursor_commands_subcmds cursor_skills_subcmds cursor_agents_subcmds copilot_instructions_subcmds copilot_skills_subcmds copilot_prompts_subcmds copilot_agents_subcmds claude_skills_subcmds claude_agents_subcmds trae_rules_subcmds trae_skills_subcmds opencode_agents_subcmds opencode_skills_subcmds opencode_commands_subcmds opencode_tools_subcmds codex_rules_subcmds codex_skills_subcmds gemini_commands_subcmds gemini_skills_subcmds gemini_agents_subcmds warp_skills_subcmds windsurf_rules_subcmds windsurf_skills_subcmds cline_rules_subcmds cline_skills_subcmds - cursor_subcmds=('add:Add a Cursor rule' 'remove:Remove a Cursor rule' 'install:Install all Cursor entries' 'import:Import entry to repository' 'rules:Manage rules explicitly' 'commands:Manage commands' 'skills:Manage skills' 'agents:Manage agents') - copilot_subcmds=('instructions:Manage GitHub Copilot instructions' 'prompts:Manage GitHub Copilot prompt files' 'skills:Manage GitHub Copilot skills' 'agents:Manage GitHub Copilot custom agents' 'install:Install all GitHub Copilot entries') - copilot_instructions_subcmds=('add:Add a GitHub Copilot instruction' 'remove:Remove a GitHub Copilot instruction' 'install:Install all GitHub Copilot instructions' 'import:Import instruction to repository') - copilot_prompts_subcmds=('add:Add a GitHub Copilot prompt' 'remove:Remove a GitHub Copilot prompt' 'install:Install all GitHub Copilot prompts' 'import:Import prompt to repository') - copilot_skills_subcmds=('add:Add a GitHub Copilot skill' 'remove:Remove a GitHub Copilot skill' 'install:Install all GitHub Copilot skills' 'import:Import skill to repository') - copilot_agents_subcmds=('add:Add a GitHub Copilot agent' 'remove:Remove a GitHub Copilot agent' 'install:Install all GitHub Copilot agents' 'import:Import agent to repository') - claude_subcmds=('rules:Manage Claude rules' 'skills:Manage Claude skills' 'agents:Manage Claude agents' 'install:Install all Claude components') - trae_subcmds=('rules:Manage Trae rules' 'skills:Manage Trae skills' 'install:Install all Trae entries') - opencode_subcmds=('commands:Manage OpenCode commands' 'skills:Manage OpenCode skills' 'agents:Manage OpenCode agents' 'tools:Manage OpenCode tools' 'install:Install all OpenCode entries' 'import:Import entry to repository') - agents_md_subcmds=('add:Add an AGENTS.md file' 'remove:Remove an AGENTS.md file' 'install:Install AGENTS.md' 'import:Import AGENTS.md to repository') - cursor_rules_subcmds=('add:Add a Cursor rule' 'remove:Remove a Cursor rule' 'install:Install all Cursor rules' 'import:Import rule to repository') - cursor_commands_subcmds=('add:Add a Cursor command' 'remove:Remove a Cursor command' 'install:Install all Cursor commands' 'import:Import command to repository') - cursor_skills_subcmds=('add:Add a Cursor skill' 'remove:Remove a Cursor skill' 'install:Install all Cursor skills' 'import:Import skill to repository') - cursor_agents_subcmds=('add:Add a Cursor agent' 'remove:Remove a Cursor agent' 'install:Install all Cursor agents' 'import:Import agent to repository') - claude_skills_subcmds=('add:Add a Claude skill' 'remove:Remove a Claude skill' 'install:Install all Claude skills' 'import:Import skill to repository') - claude_agents_subcmds=('add:Add a Claude agent' 'remove:Remove a Claude agent' 'install:Install all Claude agents' 'import:Import agent to repository') - trae_rules_subcmds=('add:Add a Trae rule' 'remove:Remove a Trae rule' 'install:Install all Trae rules' 'import:Import rule to repository') - trae_skills_subcmds=('add:Add a Trae skill' 'remove:Remove a Trae skill' 'install:Install all Trae skills' 'import:Import skill to repository') - opencode_agents_subcmds=('add:Add an OpenCode agent' 'remove:Remove an OpenCode agent' 'install:Install all OpenCode agents' 'import:Import agent to repository') - opencode_skills_subcmds=('add:Add an OpenCode skill' 'remove:Remove an OpenCode skill' 'install:Install all OpenCode skills' 'import:Import skill to repository') - opencode_commands_subcmds=('add:Add an OpenCode command' 'remove:Remove an OpenCode command' 'install:Install all OpenCode commands' 'import:Import command to repository') - opencode_tools_subcmds=('add:Add an OpenCode tool' 'remove:Remove an OpenCode tool' 'install:Install all OpenCode tools' 'import:Import tool to repository') - codex_subcmds=('rules:Manage Codex rules' 'skills:Manage Codex skills' 'install:Install all Codex entries' 'import:Import entry to repository') - codex_rules_subcmds=('add:Add a Codex rule' 'remove:Remove a Codex rule' 'install:Install all Codex rules' 'import:Import rule to repository') - codex_skills_subcmds=('add:Add a Codex skill' 'remove:Remove a Codex skill' 'install:Install all Codex skills' 'import:Import skill to repository') - gemini_subcmds=('commands:Manage Gemini commands' 'skills:Manage Gemini skills' 'agents:Manage Gemini agents' 'install:Install all Gemini entries' 'add-all:Add all Gemini entries' 'import:Import entry to repository') - gemini_commands_subcmds=('add:Add a Gemini command' 'remove:Remove a Gemini command' 'install:Install all Gemini commands' 'import:Import command to repository') - gemini_skills_subcmds=('add:Add a Gemini skill' 'remove:Remove a Gemini skill' 'install:Install all Gemini skills' 'import:Import skill to repository') - gemini_agents_subcmds=('add:Add a Gemini agent' 'remove:Remove a Gemini agent' 'install:Install all Gemini agents' 'import:Import agent to repository') - warp_subcmds=('skills:Manage Warp skills' 'install:Install all Warp entries' 'import:Import entry to repository') - warp_skills_subcmds=('add:Add a Warp skill' 'remove:Remove a Warp skill' 'install:Install all Warp skills' 'import:Import skill to repository') - windsurf_subcmds=('add:Add a Windsurf rule' 'remove:Remove a Windsurf rule' 'install:Install all Windsurf entries' 'add-all:Add all Windsurf entries' 'import:Import entry to repository' 'rules:Manage Windsurf rules' 'skills:Manage Windsurf skills') - windsurf_rules_subcmds=('add:Add a Windsurf rule' 'remove:Remove a Windsurf rule' 'install:Install all Windsurf rules' 'import:Import rule to repository') - windsurf_skills_subcmds=('add:Add a Windsurf skill' 'remove:Remove a Windsurf skill' 'install:Install all Windsurf skills' 'import:Import skill to repository') - cline_subcmds=('add:Add a Cline rule' 'remove:Remove a Cline rule' 'install:Install all Cline entries' 'add-all:Add all Cline entries' 'import:Import entry to repository' 'rules:Manage Cline rules' 'skills:Manage Cline skills') - cline_rules_subcmds=('add:Add a Cline rule' 'remove:Remove a Cline rule' 'install:Install all Cline rules' 'import:Import rule to repository') - cline_skills_subcmds=('add:Add a Cline skill' 'remove:Remove a Cline skill' 'install:Install all Cline skills' 'import:Import skill to repository') - - _arguments -C \\ - '1:command:->command' \\ - '2:subcommand:->subcommand' \\ - '3:subsubcommand:->subsubcommand' \\ - '4:name:->name' \\ - '*::arg:->args' - case "\$state" in - command) - _describe 'command' subcmds - ;; - subcommand) - case "\$words[2]" in - cursor) - _describe 'subcommand' cursor_subcmds - ;; - copilot) - _describe 'subcommand' copilot_subcmds - ;; - claude) - _describe 'subcommand' claude_subcmds - ;; - trae) - _describe 'subcommand' trae_subcmds - ;; - opencode) - _describe 'subcommand' opencode_subcmds - ;; - codex) - _describe 'subcommand' codex_subcmds - ;; - gemini) - _describe 'subcommand' gemini_subcmds - ;; - warp) - _describe 'subcommand' warp_subcmds - ;; - windsurf) - _describe 'subcommand' windsurf_subcmds - ;; - cline) - _describe 'subcommand' cline_subcmds - ;; - agents-md) - _describe 'subcommand' agents_md_subcmds - ;; - esac - ;; - subsubcommand) - case "\$words[2]" in - cursor) - case "\$words[3]" in - add) - local -a rules - rules=(\${(f)"\$(ais _complete cursor 2>/dev/null)"}) - if (( \$#rules )); then - compadd "\$rules[@]" - fi - ;; - rules) - _describe 'subsubcommand' cursor_rules_subcmds - ;; - commands) - _describe 'subsubcommand' cursor_commands_subcmds - ;; - skills) - _describe 'subsubcommand' cursor_skills_subcmds - ;; - agents) - _describe 'subsubcommand' cursor_agents_subcmds - ;; - *) - _describe 'subsubcommand' cursor_subcmds - ;; - esac - ;; - copilot) - case "\$words[3]" in - instructions) - _describe 'subsubcommand' copilot_instructions_subcmds - ;; - prompts) - _describe 'subsubcommand' copilot_prompts_subcmds - ;; - skills) - _describe 'subsubcommand' copilot_skills_subcmds - ;; - agents) - _describe 'subsubcommand' copilot_agents_subcmds - ;; - *) - _describe 'subsubcommand' copilot_subcmds - ;; - esac - ;; - claude) - case "\$words[3]" in - skills) - _describe 'subsubcommand' claude_skills_subcmds - ;; - agents) - _describe 'subsubcommand' claude_agents_subcmds - ;; - *) - _describe 'subsubcommand' claude_subcmds - ;; - esac - ;; - trae) - case "\$words[3]" in - rules) - _describe 'subsubcommand' trae_rules_subcmds - ;; - skills) - _describe 'subsubcommand' trae_skills_subcmds - ;; - *) - _describe 'subsubcommand' trae_subcmds - ;; - esac - ;; - opencode) - case "\$words[3]" in - commands) - _describe 'subsubcommand' opencode_commands_subcmds - ;; - skills) - _describe 'subsubcommand' opencode_skills_subcmds - ;; - agents) - _describe 'subsubcommand' opencode_agents_subcmds - ;; - tools) - _describe 'subsubcommand' opencode_tools_subcmds - ;; - *) - _describe 'subsubcommand' opencode_subcmds - ;; - esac - ;; - codex) - case "\$words[3]" in - rules) - _describe 'subsubcommand' codex_rules_subcmds - ;; - skills) - _describe 'subsubcommand' codex_skills_subcmds - ;; - *) - _describe 'subsubcommand' codex_subcmds - ;; - esac - ;; - gemini) - case "\$words[3]" in - commands) - _describe 'subsubcommand' gemini_commands_subcmds - ;; - skills) - _describe 'subsubcommand' gemini_skills_subcmds - ;; - agents) - _describe 'subsubcommand' gemini_agents_subcmds - ;; - *) - _describe 'subsubcommand' gemini_subcmds - ;; - esac - ;; - warp) - case "\$words[3]" in - skills) - _describe 'subsubcommand' warp_skills_subcmds - ;; - *) - _describe 'subsubcommand' warp_subcmds - ;; - esac - ;; - windsurf) - case "\$words[3]" in - add) - local -a windsurf_rules - windsurf_rules=(\${(f)"\$(ais _complete windsurf-rules 2>/dev/null)"}) - if (( \$#windsurf_rules )); then - compadd "\$windsurf_rules[@]" - fi - ;; - rules) - _describe 'subsubcommand' windsurf_rules_subcmds - ;; - skills) - _describe 'subsubcommand' windsurf_skills_subcmds - ;; - *) - _describe 'subsubcommand' windsurf_subcmds - ;; - esac - ;; - cline) - case "\$words[3]" in - add) - local -a cline_rules - cline_rules=(\${(f)"\$(ais _complete cline-rules 2>/dev/null)"}) - if (( \$#cline_rules )); then - compadd "\$cline_rules[@]" - fi - ;; - rules) - _describe 'subsubcommand' cline_rules_subcmds - ;; - skills) - _describe 'subsubcommand' cline_skills_subcmds - ;; - *) - _describe 'subsubcommand' cline_subcmds - ;; - esac - ;; - agents-md) - case "\$words[3]" in - add) - local -a agents_md - agents_md=(\${(f)"\$(ais _complete agents-md 2>/dev/null)"}) - if (( \$#agents_md )); then - compadd "\$agents_md[@]" - fi - ;; - *) - _describe 'subsubcommand' agents_md_subcmds - ;; - esac - ;; - esac - ;; - name) - case \"\$words[2]\" in - cursor) - case \"\$words[3]\" in - add) - local -a rules - rules=(\${(f)\"$(ais _complete cursor 2>/dev/null)\"}) - if (( \$#rules )); then - compadd \"\$rules[@]\" - fi - ;; - rules) - case \"\$words[4]\" in - add) - local -a rules - rules=(\${(f)\"$(ais _complete cursor 2>/dev/null)\"}) - if (( \$#rules )); then - compadd \"\$rules[@]\" - fi - ;; - esac - ;; - commands) - case "\$words[4]" in - add) - local -a commands - commands=(\${(f)"$(ais _complete cursor-commands 2>/dev/null)"}) - if (( \$#commands )); then - compadd "\$commands[@]" - fi - ;; - esac - ;; - skills) - case \"\$words[4]\" in - add) - local -a skills - skills=(\${(f)\"$(ais _complete cursor-skills 2>/dev/null)\"}) - if (( \$#skills )); then - compadd \"\$skills[@]\" - fi - ;; - esac - ;; - agents) - case \"\$words[4]\" in - add) - local -a agents - agents=(\${(f)\"$(ais _complete cursor-agents 2>/dev/null)\"}) - if (( \$#agents )); then - compadd \"\$agents[@]\" - fi - ;; - esac - ;; - esac - ;; - copilot) - case \"\$words[3]\" in - instructions) - case \"\$words[4]\" in - add) - local -a instructions - instructions=(\${(f)\"$(ais _complete copilot-instructions 2>/dev/null)\"}) - if (( \$#instructions )); then - compadd \"\$instructions[@]\" - fi - ;; - esac - ;; - prompts) - case \"\$words[4]\" in - add) - local -a prompts - prompts=(\${(f)\"$(ais _complete copilot-prompts 2>/dev/null)\"}) - if (( \$#prompts )); then - compadd \"\$prompts[@]\" - fi - ;; - esac - ;; - skills) - case \"\$words[4]\" in - add) - local -a skills - skills=(\${(f)\"$(ais _complete copilot-skills 2>/dev/null)\"}) - if (( \$#skills )); then - compadd \"\$skills[@]\" - fi - ;; - esac - ;; - agents) - case \"\$words[4]\" in - add) - local -a agents - agents=(\${(f)\"$(ais _complete copilot-agents 2>/dev/null)\"}) - if (( \$#agents )); then - compadd \"\$agents[@]\" - fi - ;; - esac - ;; - esac - ;; - claude) - case \"\$words[3]\" in - skills) - case \"\$words[4]\" in - add) - local -a skills - skills=(\${(f)\"$(ais _complete claude-skills 2>/dev/null)\"}) - if (( \$#skills )); then - compadd \"\$skills[@]\" - fi - ;; - esac - ;; - agents) - case \"\$words[4]\" in - add) - local -a agents - agents=(\${(f)\"$(ais _complete claude-agents 2>/dev/null)\"}) - if (( \$#agents )); then - compadd \"\$agents[@]\" - fi - ;; - esac - ;; - rules) - case \"\$words[4]\" in - add) - local -a rules - rules=(\${(f)\"$(ais _complete claude-rules 2>/dev/null)\"}) - if (( \$#rules )); then - compadd \"\$rules[@]\" - fi - ;; - esac - ;; - esac - ;; - trae) - case \"\$words[3]\" in - rules) - case \"\$words[4]\" in - add) - local -a rules - rules=(\${(f)\"$(ais _complete trae-rules 2>/dev/null)\"}) - if (( \$#rules )); then - compadd \"\$rules[@]\" - fi - ;; - esac - ;; - skills) - case \"\$words[4]\" in - add) - local -a skills - skills=(\${(f)\"$(ais _complete trae-skills 2>/dev/null)\"}) - if (( \$#skills )); then - compadd \"\$skills[@]\" - fi - ;; - esac - ;; - esac - ;; - opencode) - case \"\$words[3]\" in - commands) - case \"\$words[4]\" in - add) - local -a commands - commands=(\${(f)\"$(ais _complete opencode-commands 2>/dev/null)\"}) - if (( \$#commands )); then - compadd \"\$commands[@]\" - fi - ;; - esac - ;; - skills) - case \"\$words[4]\" in - add) - local -a skills - skills=(\${(f)\"$(ais _complete opencode-skills 2>/dev/null)\"}) - if (( \$#skills )); then - compadd \"\$skills[@]\" - fi - ;; - esac - ;; - agents) - case \"\$words[4]\" in - add) - local -a agents - agents=(\${(f)\"$(ais _complete opencode-agents 2>/dev/null)\"}) - if (( \$#agents )); then - compadd \"\$agents[@]\" - fi - ;; - esac - ;; - tools) - case \"\$words[4]\" in - add) - local -a tools - tools=(\${(f)\"$(ais _complete opencode-tools 2>/dev/null)\"}) - if (( \$#tools )); then - compadd \"\$tools[@]\" - fi - ;; - esac - ;; - esac - ;; - codex) - case \"\$words[3]\" in - rules) - case \"\$words[4]\" in - add) - local -a rules - rules=(\${(f)\"$(ais _complete codex-rules 2>/dev/null)\"}) - if (( \$#rules )); then - compadd \"\$rules[@]\" - fi - ;; - esac - ;; - skills) - case \"\$words[4]\" in - add) - local -a skills - skills=(\${(f)\"$(ais _complete codex-skills 2>/dev/null)\"}) - if (( \$#skills )); then - compadd \"\$skills[@]\" - fi - ;; - esac - ;; - esac - ;; - gemini) - case \"\$words[3]\" in - commands) - case \"\$words[4]\" in - add) - local -a commands - commands=(\${(f)\"$(ais _complete gemini-commands 2>/dev/null)\"}) - if (( \$#commands )); then - compadd \"\$commands[@]\" - fi - ;; - esac - ;; - skills) - case \"\$words[4]\" in - add) - local -a skills - skills=(\${(f)\"$(ais _complete gemini-skills 2>/dev/null)\"}) - if (( \$#skills )); then - compadd \"\$skills[@]\" - fi - ;; - esac - ;; - agents) - case \"\$words[4]\" in - add) - local -a agents - agents=(\${(f)\"$(ais _complete gemini-agents 2>/dev/null)\"}) - if (( \$#agents )); then - compadd \"\$agents[@]\" - fi - ;; - esac - ;; - esac - ;; - warp) - case \"\$words[3]\" in - skills) - case \"\$words[4]\" in - add) - local -a skills - skills=(\${(f)\"$(ais _complete warp-skills 2>/dev/null)\"}) - if (( \$#skills )); then - compadd \"\$skills[@]\" - fi - ;; - esac - ;; - esac - ;; - windsurf) - case \"\$words[3]\" in - add) - local -a windsurf_rules - windsurf_rules=(\${(f)\"$(ais _complete windsurf-rules 2>/dev/null)\"}) - if (( \$#windsurf_rules )); then - compadd \"\$windsurf_rules[@]\" - fi - ;; - rules) - case \"\$words[4]\" in - add) - local -a windsurf_rules - windsurf_rules=(\${(f)\"$(ais _complete windsurf-rules 2>/dev/null)\"}) - if (( \$#windsurf_rules )); then - compadd \"\$windsurf_rules[@]\" - fi - ;; - esac - ;; - skills) - case \"\$words[4]\" in - add) - local -a windsurf_skills - windsurf_skills=(\${(f)\"$(ais _complete windsurf-skills 2>/dev/null)\"}) - if (( \$#windsurf_skills )); then - compadd \"\$windsurf_skills[@]\" - fi - ;; - esac - ;; - esac - ;; - cline) - case \"\$words[3]\" in - add) - local -a cline_rules - cline_rules=(\${(f)\"$(ais _complete cline-rules 2>/dev/null)\"}) - if (( \$#cline_rules )); then - compadd \"\$cline_rules[@]\" - fi - ;; - rules) - case \"\$words[4]\" in - add) - local -a cline_rules - cline_rules=(\${(f)\"$(ais _complete cline-rules 2>/dev/null)\"}) - if (( \$#cline_rules )); then - compadd \"\$cline_rules[@]\" - fi - ;; - esac - ;; - skills) - case \"\$words[4]\" in - add) - local -a cline_skills - cline_skills=(\${(f)\"$(ais _complete cline-skills 2>/dev/null)\"}) - if (( \$#cline_skills )); then - compadd \"\$cline_skills[@]\" - fi - ;; - esac - ;; - esac - ;; - agents-md) - case \"\$words[3]\" in - add) - local -a agents_md - agents_md=(\${(f)\"$(ais _complete agents-md 2>/dev/null)\"}) - if (( \$#agents_md )); then - compadd \"\$agents_md[@]\" - fi - ;; - esac - ;; - esac - ;; - args) - # Handle additional arguments - ;; - esac +interface ToolCompletionSpec { + tool: string; + description: string; + rootSubcommands: CompletionEntry[]; + nestedSubcommands: Record; + rootAddCompletionType?: string; + nestedAddCompletionTypes?: Record; } -# Only define completion if compdef is available (zsh completion initialized) -command -v compdef >/dev/null 2>&1 && compdef _ais ais -`; - -export const fishScript = ` -# ais fish completion -complete -c ais -f - -# Top-level commands -complete -c ais -n "__fish_use_subcommand" -a "cursor" -d "Manage Cursor rules, commands, and skills" -complete -c ais -n "__fish_use_subcommand" -a "copilot" -d "Manage GitHub Copilot instructions" -complete -c ais -n "__fish_use_subcommand" -a "claude" -d "Manage Claude skills, agents, and plugins" -complete -c ais -n "__fish_use_subcommand" -a "trae" -d "Manage Trae rules and skills" -complete -c ais -n "__fish_use_subcommand" -a "opencode" -d "Manage OpenCode agents, skills, commands, and tools" -complete -c ais -n "__fish_use_subcommand" -a "codex" -d "Manage Codex rules and skills" -complete -c ais -n "__fish_use_subcommand" -a "gemini" -d "Manage Gemini CLI commands, skills, and agents" -complete -c ais -n "__fish_use_subcommand" -a "warp" -d "Manage Warp agent skills" -complete -c ais -n "__fish_use_subcommand" -a "windsurf" -d "Manage Windsurf rules and skills" -complete -c ais -n "__fish_use_subcommand" -a "cline" -d "Manage Cline rules and skills" -complete -c ais -n "__fish_use_subcommand" -a "agents-md" -d "Manage AGENTS.md files (agents.md standard)" -complete -c ais -n "__fish_use_subcommand" -a "use" -d "Configure rules repository" -complete -c ais -n "__fish_use_subcommand" -a "list" -d "List configured repositories" -complete -c ais -n "__fish_use_subcommand" -a "git" -d "Run git commands in rules repository" -complete -c ais -n "__fish_use_subcommand" -a "add" -d "Add a rule (smart dispatch)" -complete -c ais -n "__fish_use_subcommand" -a "remove" -d "Remove a rule (smart dispatch)" -complete -c ais -n "__fish_use_subcommand" -a "install" -d "Install all rules (smart dispatch)" -complete -c ais -n "__fish_use_subcommand" -a "import" -d "Import entry to rules repository" -complete -c ais -n "__fish_use_subcommand" -a "completion" -d "Output shell completion script" - -# cursor subcommands -complete -c ais -n "__fish_seen_subcommand_from cursor; and not __fish_seen_subcommand_from add remove install import rules commands skills agents" -a "add" -d "Add a Cursor rule" -complete -c ais -n "__fish_seen_subcommand_from cursor; and not __fish_seen_subcommand_from add remove install import rules commands skills agents" -a "remove" -d "Remove a Cursor rule" -complete -c ais -n "__fish_seen_subcommand_from cursor; and not __fish_seen_subcommand_from add remove install import rules commands skills agents" -a "install" -d "Install all Cursor entries" -complete -c ais -n "__fish_seen_subcommand_from cursor; and not __fish_seen_subcommand_from add remove install import rules commands skills agents" -a "import" -d "Import entry to repository" -complete -c ais -n "__fish_seen_subcommand_from cursor; and not __fish_seen_subcommand_from add remove install import rules commands skills agents" -a "rules" -d "Manage rules explicitly" -complete -c ais -n "__fish_seen_subcommand_from cursor; and not __fish_seen_subcommand_from add remove install import rules commands skills agents" -a "commands" -d "Manage commands" -complete -c ais -n "__fish_seen_subcommand_from cursor; and not __fish_seen_subcommand_from add remove install import rules commands skills agents" -a "skills" -d "Manage skills" -complete -c ais -n "__fish_seen_subcommand_from cursor; and not __fish_seen_subcommand_from add remove install import rules commands skills agents" -a "agents" -d "Manage agents" - -# cursor rules subcommands -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Cursor rule" -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Cursor rule" -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Cursor rules" -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import rule to repository" - -# cursor commands subcommands -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from commands; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Cursor command" -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from commands; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Cursor command" -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from commands; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Cursor commands" -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from commands; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import command to repository" - -# cursor skills subcommands -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Cursor skill" -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Cursor skill" -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Cursor skills" -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import skill to repository" - -# cursor agents subcommands -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Cursor agent" -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Cursor agent" -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Cursor agents" -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import agent to repository" - -# copilot subcommands -complete -c ais -n "__fish_seen_subcommand_from copilot; and not __fish_seen_subcommand_from instructions prompts skills agents install" -a "instructions" -d "Manage GitHub Copilot instructions" -complete -c ais -n "__fish_seen_subcommand_from copilot; and not __fish_seen_subcommand_from instructions prompts skills agents install" -a "prompts" -d "Manage GitHub Copilot prompt files" -complete -c ais -n "__fish_seen_subcommand_from copilot; and not __fish_seen_subcommand_from instructions prompts skills agents install" -a "skills" -d "Manage GitHub Copilot skills" -complete -c ais -n "__fish_seen_subcommand_from copilot; and not __fish_seen_subcommand_from instructions prompts skills agents install" -a "agents" -d "Manage GitHub Copilot custom agents" -complete -c ais -n "__fish_seen_subcommand_from copilot; and not __fish_seen_subcommand_from instructions prompts skills agents install" -a "install" -d "Install all GitHub Copilot entries" - -# copilot instructions subcommands -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from instructions; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a GitHub Copilot instruction" -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from instructions; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a GitHub Copilot instruction" -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from instructions; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all GitHub Copilot instructions" -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from instructions; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import instruction to repository" - -# copilot prompts subcommands -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from prompts; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a GitHub Copilot prompt" -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from prompts; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a GitHub Copilot prompt" -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from prompts; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all GitHub Copilot prompts" -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from prompts; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import prompt to repository" - -# copilot skills subcommands -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a GitHub Copilot skill" -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a GitHub Copilot skill" -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all GitHub Copilot skills" -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import skill to repository" - -# copilot agents subcommands -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a GitHub Copilot agent" -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a GitHub Copilot agent" -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all GitHub Copilot agents" -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import agent to repository" - -# claude subcommands -complete -c ais -n "__fish_seen_subcommand_from claude; and not __fish_seen_subcommand_from rules skills agents install" -a "rules" -d "Manage Claude rules" -complete -c ais -n "__fish_seen_subcommand_from claude; and not __fish_seen_subcommand_from rules skills agents install" -a "skills" -d "Manage Claude skills" -complete -c ais -n "__fish_seen_subcommand_from claude; and not __fish_seen_subcommand_from rules skills agents install" -a "agents" -d "Manage Claude agents" -complete -c ais -n "__fish_seen_subcommand_from claude; and not __fish_seen_subcommand_from rules skills agents install" -a "install" -d "Install all Claude components" - -# trae subcommands -complete -c ais -n "__fish_seen_subcommand_from trae; and not __fish_seen_subcommand_from rules skills install" -a "rules" -d "Manage Trae rules" -complete -c ais -n "__fish_seen_subcommand_from trae; and not __fish_seen_subcommand_from rules skills install" -a "skills" -d "Manage Trae skills" -complete -c ais -n "__fish_seen_subcommand_from trae; and not __fish_seen_subcommand_from rules skills install" -a "install" -d "Install all Trae entries" - -# claude skills subcommands -complete -c ais -n "__fish_seen_subcommand_from claude; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Claude skill" -complete -c ais -n "__fish_seen_subcommand_from claude; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Claude skill" -complete -c ais -n "__fish_seen_subcommand_from claude; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Claude skills" -complete -c ais -n "__fish_seen_subcommand_from claude; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import skill to repository" - -# claude agents subcommands -complete -c ais -n "__fish_seen_subcommand_from claude; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Claude agent" -complete -c ais -n "__fish_seen_subcommand_from claude; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Claude agent" -complete -c ais -n "__fish_seen_subcommand_from claude; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Claude agents" -complete -c ais -n "__fish_seen_subcommand_from claude; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import agent to repository" - -# trae rules subcommands -complete -c ais -n "__fish_seen_subcommand_from trae; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Trae rule" -complete -c ais -n "__fish_seen_subcommand_from trae; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Trae rule" -complete -c ais -n "__fish_seen_subcommand_from trae; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Trae rules" -complete -c ais -n "__fish_seen_subcommand_from trae; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import rule to repository" - -# trae skills subcommands -complete -c ais -n "__fish_seen_subcommand_from trae; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Trae skill" -complete -c ais -n "__fish_seen_subcommand_from trae; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Trae skill" -complete -c ais -n "__fish_seen_subcommand_from trae; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Trae skills" -complete -c ais -n "__fish_seen_subcommand_from trae; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import skill to repository" - -# opencode subcommands -complete -c ais -n "__fish_seen_subcommand_from opencode; and not __fish_seen_subcommand_from commands skills agents tools install import" -a "commands" -d "Manage OpenCode commands" -complete -c ais -n "__fish_seen_subcommand_from opencode; and not __fish_seen_subcommand_from commands skills agents tools install import" -a "skills" -d "Manage OpenCode skills" -complete -c ais -n "__fish_seen_subcommand_from opencode; and not __fish_seen_subcommand_from commands skills agents tools install import" -a "agents" -d "Manage OpenCode agents" -complete -c ais -n "__fish_seen_subcommand_from opencode; and not __fish_seen_subcommand_from commands skills agents tools install import" -a "tools" -d "Manage OpenCode tools" -complete -c ais -n "__fish_seen_subcommand_from opencode; and not __fish_seen_subcommand_from commands skills agents tools install import" -a "install" -d "Install all OpenCode entries" -complete -c ais -n "__fish_seen_subcommand_from opencode; and not __fish_seen_subcommand_from commands skills agents tools install import" -a "import" -d "Import entry to repository" +const TOOL_SPECS: ToolCompletionSpec[] = [ + { + tool: 'cursor', + description: 'Manage Cursor rules, commands, and skills', + rootSubcommands: [ + { name: 'add', description: 'Add a Cursor rule' }, + { name: 'remove', description: 'Remove a Cursor rule' }, + { name: 'install', description: 'Install all Cursor entries' }, + { name: 'import', description: 'Import entry to repository' }, + { name: 'rules', description: 'Manage rules explicitly' }, + { name: 'commands', description: 'Manage commands' }, + { name: 'skills', description: 'Manage skills' }, + { name: 'agents', description: 'Manage agents' } + ], + nestedSubcommands: { + rules: [ + { name: 'add', description: 'Add a Cursor rule' }, + { name: 'remove', description: 'Remove a Cursor rule' }, + { name: 'install', description: 'Install all Cursor rules' }, + { name: 'import', description: 'Import rule to repository' } + ], + commands: [ + { name: 'add', description: 'Add a Cursor command' }, + { name: 'remove', description: 'Remove a Cursor command' }, + { name: 'install', description: 'Install all Cursor commands' }, + { name: 'import', description: 'Import command to repository' } + ], + skills: [ + { name: 'add', description: 'Add a Cursor skill' }, + { name: 'remove', description: 'Remove a Cursor skill' }, + { name: 'install', description: 'Install all Cursor skills' }, + { name: 'import', description: 'Import skill to repository' } + ], + agents: [ + { name: 'add', description: 'Add a Cursor agent' }, + { name: 'remove', description: 'Remove a Cursor agent' }, + { name: 'install', description: 'Install all Cursor agents' }, + { name: 'import', description: 'Import agent to repository' } + ] + }, + rootAddCompletionType: 'cursor', + nestedAddCompletionTypes: { + rules: 'cursor', + commands: 'cursor-commands', + skills: 'cursor-skills', + agents: 'cursor-agents' + } + }, + { + tool: 'copilot', + description: 'Manage GitHub Copilot instructions', + rootSubcommands: [ + { name: 'instructions', description: 'Manage GitHub Copilot instructions' }, + { name: 'prompts', description: 'Manage GitHub Copilot prompt files' }, + { name: 'skills', description: 'Manage GitHub Copilot skills' }, + { name: 'agents', description: 'Manage GitHub Copilot custom agents' }, + { name: 'install', description: 'Install all GitHub Copilot entries' } + ], + nestedSubcommands: { + instructions: [ + { name: 'add', description: 'Add a GitHub Copilot instruction' }, + { name: 'remove', description: 'Remove a GitHub Copilot instruction' }, + { name: 'install', description: 'Install all GitHub Copilot instructions' }, + { name: 'import', description: 'Import instruction to repository' } + ], + prompts: [ + { name: 'add', description: 'Add a GitHub Copilot prompt' }, + { name: 'remove', description: 'Remove a GitHub Copilot prompt' }, + { name: 'install', description: 'Install all GitHub Copilot prompts' }, + { name: 'import', description: 'Import prompt to repository' } + ], + skills: [ + { name: 'add', description: 'Add a GitHub Copilot skill' }, + { name: 'remove', description: 'Remove a GitHub Copilot skill' }, + { name: 'install', description: 'Install all GitHub Copilot skills' }, + { name: 'import', description: 'Import skill to repository' } + ], + agents: [ + { name: 'add', description: 'Add a GitHub Copilot agent' }, + { name: 'remove', description: 'Remove a GitHub Copilot agent' }, + { name: 'install', description: 'Install all GitHub Copilot agents' }, + { name: 'import', description: 'Import agent to repository' } + ] + }, + nestedAddCompletionTypes: { + instructions: 'copilot-instructions', + prompts: 'copilot-prompts', + skills: 'copilot-skills', + agents: 'copilot-agents' + } + }, + { + tool: 'claude', + description: 'Manage Claude skills, agents, and plugins', + rootSubcommands: [ + { name: 'rules', description: 'Manage Claude rules' }, + { name: 'skills', description: 'Manage Claude skills' }, + { name: 'agents', description: 'Manage Claude agents' }, + { name: 'install', description: 'Install all Claude components' } + ], + nestedSubcommands: { + rules: [ + { name: 'add', description: 'Add a Claude rule' }, + { name: 'remove', description: 'Remove a Claude rule' }, + { name: 'install', description: 'Install all Claude rules' }, + { name: 'import', description: 'Import rule to repository' } + ], + skills: [ + { name: 'add', description: 'Add a Claude skill' }, + { name: 'remove', description: 'Remove a Claude skill' }, + { name: 'install', description: 'Install all Claude skills' }, + { name: 'import', description: 'Import skill to repository' } + ], + agents: [ + { name: 'add', description: 'Add a Claude agent' }, + { name: 'remove', description: 'Remove a Claude agent' }, + { name: 'install', description: 'Install all Claude agents' }, + { name: 'import', description: 'Import agent to repository' } + ] + }, + nestedAddCompletionTypes: { + rules: 'claude-rules', + skills: 'claude-skills', + agents: 'claude-agents' + } + }, + { + tool: 'trae', + description: 'Manage Trae rules and skills', + rootSubcommands: [ + { name: 'rules', description: 'Manage Trae rules' }, + { name: 'skills', description: 'Manage Trae skills' }, + { name: 'install', description: 'Install all Trae entries' } + ], + nestedSubcommands: { + rules: [ + { name: 'add', description: 'Add a Trae rule' }, + { name: 'remove', description: 'Remove a Trae rule' }, + { name: 'install', description: 'Install all Trae rules' }, + { name: 'import', description: 'Import rule to repository' } + ], + skills: [ + { name: 'add', description: 'Add a Trae skill' }, + { name: 'remove', description: 'Remove a Trae skill' }, + { name: 'install', description: 'Install all Trae skills' }, + { name: 'import', description: 'Import skill to repository' } + ] + }, + nestedAddCompletionTypes: { + rules: 'trae-rules', + skills: 'trae-skills' + } + }, + { + tool: 'opencode', + description: 'Manage OpenCode agents, skills, commands, and tools', + rootSubcommands: [ + { name: 'commands', description: 'Manage OpenCode commands' }, + { name: 'skills', description: 'Manage OpenCode skills' }, + { name: 'agents', description: 'Manage OpenCode agents' }, + { name: 'tools', description: 'Manage OpenCode tools' }, + { name: 'install', description: 'Install all OpenCode entries' }, + { name: 'import', description: 'Import entry to repository' } + ], + nestedSubcommands: { + commands: [ + { name: 'add', description: 'Add an OpenCode command' }, + { name: 'remove', description: 'Remove an OpenCode command' }, + { name: 'install', description: 'Install all OpenCode commands' }, + { name: 'import', description: 'Import command to repository' } + ], + skills: [ + { name: 'add', description: 'Add an OpenCode skill' }, + { name: 'remove', description: 'Remove an OpenCode skill' }, + { name: 'install', description: 'Install all OpenCode skills' }, + { name: 'import', description: 'Import skill to repository' } + ], + agents: [ + { name: 'add', description: 'Add an OpenCode agent' }, + { name: 'remove', description: 'Remove an OpenCode agent' }, + { name: 'install', description: 'Install all OpenCode agents' }, + { name: 'import', description: 'Import agent to repository' } + ], + tools: [ + { name: 'add', description: 'Add an OpenCode tool' }, + { name: 'remove', description: 'Remove an OpenCode tool' }, + { name: 'install', description: 'Install all OpenCode tools' }, + { name: 'import', description: 'Import tool to repository' } + ] + }, + nestedAddCompletionTypes: { + commands: 'opencode-commands', + skills: 'opencode-skills', + agents: 'opencode-agents', + tools: 'opencode-tools' + } + }, + { + tool: 'codex', + description: 'Manage Codex rules and skills', + rootSubcommands: [ + { name: 'rules', description: 'Manage Codex rules' }, + { name: 'skills', description: 'Manage Codex skills' }, + { name: 'install', description: 'Install all Codex entries' }, + { name: 'import', description: 'Import entry to repository' } + ], + nestedSubcommands: { + rules: [ + { name: 'add', description: 'Add a Codex rule' }, + { name: 'remove', description: 'Remove a Codex rule' }, + { name: 'install', description: 'Install all Codex rules' }, + { name: 'import', description: 'Import rule to repository' } + ], + skills: [ + { name: 'add', description: 'Add a Codex skill' }, + { name: 'remove', description: 'Remove a Codex skill' }, + { name: 'install', description: 'Install all Codex skills' }, + { name: 'import', description: 'Import skill to repository' } + ] + }, + nestedAddCompletionTypes: { + rules: 'codex-rules', + skills: 'codex-skills' + } + }, + { + tool: 'gemini', + description: 'Manage Gemini CLI commands, skills, and agents', + rootSubcommands: [ + { name: 'commands', description: 'Manage Gemini commands' }, + { name: 'skills', description: 'Manage Gemini skills' }, + { name: 'agents', description: 'Manage Gemini agents' }, + { name: 'install', description: 'Install all Gemini entries' }, + { name: 'add-all', description: 'Add all Gemini entries' }, + { name: 'import', description: 'Import entry to repository' } + ], + nestedSubcommands: { + commands: [ + { name: 'add', description: 'Add a Gemini command' }, + { name: 'remove', description: 'Remove a Gemini command' }, + { name: 'install', description: 'Install all Gemini commands' }, + { name: 'import', description: 'Import command to repository' } + ], + skills: [ + { name: 'add', description: 'Add a Gemini skill' }, + { name: 'remove', description: 'Remove a Gemini skill' }, + { name: 'install', description: 'Install all Gemini skills' }, + { name: 'import', description: 'Import skill to repository' } + ], + agents: [ + { name: 'add', description: 'Add a Gemini agent' }, + { name: 'remove', description: 'Remove a Gemini agent' }, + { name: 'install', description: 'Install all Gemini agents' }, + { name: 'import', description: 'Import agent to repository' } + ] + }, + nestedAddCompletionTypes: { + commands: 'gemini-commands', + skills: 'gemini-skills', + agents: 'gemini-agents' + } + }, + { + tool: 'warp', + description: 'Manage Warp agent skills', + rootSubcommands: [ + { name: 'skills', description: 'Manage Warp skills' }, + { name: 'install', description: 'Install all Warp entries' }, + { name: 'import', description: 'Import entry to repository' } + ], + nestedSubcommands: { + skills: [ + { name: 'add', description: 'Add a Warp skill' }, + { name: 'remove', description: 'Remove a Warp skill' }, + { name: 'install', description: 'Install all Warp skills' }, + { name: 'import', description: 'Import skill to repository' } + ] + }, + nestedAddCompletionTypes: { + skills: 'warp-skills' + } + }, + { + tool: 'windsurf', + description: 'Manage Windsurf rules and skills', + rootSubcommands: [ + { name: 'add', description: 'Add a Windsurf rule' }, + { name: 'remove', description: 'Remove a Windsurf rule' }, + { name: 'install', description: 'Install all Windsurf entries' }, + { name: 'add-all', description: 'Add all Windsurf entries' }, + { name: 'import', description: 'Import entry to repository' }, + { name: 'rules', description: 'Manage Windsurf rules' }, + { name: 'skills', description: 'Manage Windsurf skills' } + ], + nestedSubcommands: { + rules: [ + { name: 'add', description: 'Add a Windsurf rule' }, + { name: 'remove', description: 'Remove a Windsurf rule' }, + { name: 'install', description: 'Install all Windsurf rules' }, + { name: 'import', description: 'Import rule to repository' } + ], + skills: [ + { name: 'add', description: 'Add a Windsurf skill' }, + { name: 'remove', description: 'Remove a Windsurf skill' }, + { name: 'install', description: 'Install all Windsurf skills' }, + { name: 'import', description: 'Import skill to repository' } + ] + }, + rootAddCompletionType: 'windsurf-rules', + nestedAddCompletionTypes: { + rules: 'windsurf-rules', + skills: 'windsurf-skills' + } + }, + { + tool: 'cline', + description: 'Manage Cline rules and skills', + rootSubcommands: [ + { name: 'add', description: 'Add a Cline rule' }, + { name: 'remove', description: 'Remove a Cline rule' }, + { name: 'install', description: 'Install all Cline entries' }, + { name: 'add-all', description: 'Add all Cline entries' }, + { name: 'import', description: 'Import entry to repository' }, + { name: 'rules', description: 'Manage Cline rules' }, + { name: 'skills', description: 'Manage Cline skills' } + ], + nestedSubcommands: { + rules: [ + { name: 'add', description: 'Add a Cline rule' }, + { name: 'remove', description: 'Remove a Cline rule' }, + { name: 'install', description: 'Install all Cline rules' }, + { name: 'import', description: 'Import rule to repository' } + ], + skills: [ + { name: 'add', description: 'Add a Cline skill' }, + { name: 'remove', description: 'Remove a Cline skill' }, + { name: 'install', description: 'Install all Cline skills' }, + { name: 'import', description: 'Import skill to repository' } + ] + }, + rootAddCompletionType: 'cline-rules', + nestedAddCompletionTypes: { + rules: 'cline-rules', + skills: 'cline-skills' + } + }, + { + tool: 'agents-md', + description: 'Manage AGENTS.md files (agents.md standard)', + rootSubcommands: [ + { name: 'add', description: 'Add an AGENTS.md file' }, + { name: 'remove', description: 'Remove an AGENTS.md file' }, + { name: 'install', description: 'Install AGENTS.md' }, + { name: 'import', description: 'Import AGENTS.md to repository' } + ], + nestedSubcommands: {}, + rootAddCompletionType: 'agents-md' + } +]; + +const EXTRA_TOP_LEVEL_COMMANDS: CompletionEntry[] = [ + { name: 'use', description: 'Configure rules repository' }, + { name: 'list', description: 'List configured repositories' }, + { name: 'git', description: 'Run git commands in rules repository' }, + { name: 'add', description: 'Add a rule (smart dispatch)' }, + { name: 'remove', description: 'Remove a rule (smart dispatch)' }, + { name: 'install', description: 'Install all rules (smart dispatch)' }, + { name: 'import', description: 'Import entry to rules repository' }, + { name: 'completion', description: 'Output shell completion script' } +]; + +const TOP_LEVEL_COMMANDS: CompletionEntry[] = [ + ...TOOL_SPECS.map(spec => ({ name: spec.tool, description: spec.description })), + ...EXTRA_TOP_LEVEL_COMMANDS +]; + +function toVarName(name: string): string { + return name.replace(/-/g, '_'); +} -# opencode commands subcommands -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from commands; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add an OpenCode command" -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from commands; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove an OpenCode command" -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from commands; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all OpenCode commands" -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from commands; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import command to repository" +function quotedNames(entries: CompletionEntry[]): string { + return entries.map(entry => entry.name).join(' '); +} -# opencode skills subcommands -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add an OpenCode skill" -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove an OpenCode skill" -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all OpenCode skills" -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import skill to repository" +function escapeSingleQuotes(value: string): string { + return value.replace(/'/g, `'\\''`); +} -# opencode agents subcommands -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add an OpenCode agent" -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove an OpenCode agent" -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all OpenCode agents" -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import agent to repository" +function buildZshDescribeItems(entries: CompletionEntry[]): string { + return entries.map(entry => `'${escapeSingleQuotes(entry.name)}:${escapeSingleQuotes(entry.description)}'`).join(' '); +} -# opencode tools subcommands -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from tools; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add an OpenCode tool" -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from tools; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove an OpenCode tool" -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from tools; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all OpenCode tools" -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from tools; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import tool to repository" +function buildZshCompleteTypeBlock(completeType: string, indent: string): string[] { + return [ + `${indent}local -a items`, + `${indent}items=(\${(f)"\$(ais _complete ${completeType} 2>/dev/null)"})`, + `${indent}if (( \$#items )); then`, + `${indent} compadd "\$items[@]"`, + `${indent}fi` + ]; +} -# codex subcommands -complete -c ais -n "__fish_seen_subcommand_from codex; and not __fish_seen_subcommand_from install import rules skills" -a "install" -d "Install all Codex entries" -complete -c ais -n "__fish_seen_subcommand_from codex; and not __fish_seen_subcommand_from install import rules skills" -a "import" -d "Import entry to repository" -complete -c ais -n "__fish_seen_subcommand_from codex; and not __fish_seen_subcommand_from install import rules skills" -a "rules" -d "Manage Codex rules" -complete -c ais -n "__fish_seen_subcommand_from codex; and not __fish_seen_subcommand_from install import rules skills" -a "skills" -d "Manage Codex skills" +function buildBashScript(): string { + const lines: string[] = [ + '# ais bash completion', + '_ais_complete() {', + ' local cur="${COMP_WORDS[COMP_CWORD]}"', + ' local prev="${COMP_WORDS[COMP_CWORD-1]}"', + ' local pprev="${COMP_WORDS[COMP_CWORD-2]}"', + ' local ppprev="${COMP_WORDS[COMP_CWORD-3]}"', + '' + ]; + + for (const spec of TOOL_SPECS) { + if (spec.rootAddCompletionType) { + lines.push(` # ${spec.tool} add`); + lines.push(` if [[ "$pprev" == "${spec.tool}" && "$prev" == "add" ]]; then`); + lines.push(` COMPREPLY=( $(compgen -W "$(ais _complete ${spec.rootAddCompletionType} 2>/dev/null)" -- "$cur") )`); + lines.push(' return 0'); + lines.push(' fi'); + lines.push(''); + } + } -# codex rules subcommands -complete -c ais -n "__fish_seen_subcommand_from codex; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Codex rule" -complete -c ais -n "__fish_seen_subcommand_from codex; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Codex rule" -complete -c ais -n "__fish_seen_subcommand_from codex; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Codex rules" -complete -c ais -n "__fish_seen_subcommand_from codex; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import rule to repository" + for (const spec of TOOL_SPECS) { + if (!spec.nestedAddCompletionTypes) continue; + for (const [subcommand, completeType] of Object.entries(spec.nestedAddCompletionTypes)) { + lines.push(` # ${spec.tool} ${subcommand} add`); + lines.push(` if [[ "$ppprev" == "${spec.tool}" && "$pprev" == "${subcommand}" && "$prev" == "add" ]]; then`); + lines.push(` COMPREPLY=( $(compgen -W "$(ais _complete ${completeType} 2>/dev/null)" -- "$cur") )`); + lines.push(' return 0'); + lines.push(' fi'); + lines.push(''); + } + } -# codex skills subcommands -complete -c ais -n "__fish_seen_subcommand_from codex; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Codex skill" -complete -c ais -n "__fish_seen_subcommand_from codex; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Codex skill" -complete -c ais -n "__fish_seen_subcommand_from codex; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Codex skills" -complete -c ais -n "__fish_seen_subcommand_from codex; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import skill to repository" + for (const spec of TOOL_SPECS) { + for (const [nested, nestedSubcommands] of Object.entries(spec.nestedSubcommands)) { + lines.push(` # ${spec.tool} ${nested}`); + lines.push(` if [[ "$pprev" == "${spec.tool}" && "$prev" == "${nested}" ]]; then`); + lines.push(` COMPREPLY=( $(compgen -W "${quotedNames(nestedSubcommands)}" -- "$cur") )`); + lines.push(' return 0'); + lines.push(' fi'); + lines.push(''); + } + } -# gemini subcommands -complete -c ais -n "__fish_seen_subcommand_from gemini; and not __fish_seen_subcommand_from install add-all import commands skills agents" -a "install" -d "Install all Gemini entries" -complete -c ais -n "__fish_seen_subcommand_from gemini; and not __fish_seen_subcommand_from install add-all import commands skills agents" -a "add-all" -d "Add all Gemini entries" -complete -c ais -n "__fish_seen_subcommand_from gemini; and not __fish_seen_subcommand_from install add-all import commands skills agents" -a "import" -d "Import entry to repository" -complete -c ais -n "__fish_seen_subcommand_from gemini; and not __fish_seen_subcommand_from install add-all import commands skills agents" -a "commands" -d "Manage Gemini commands" -complete -c ais -n "__fish_seen_subcommand_from gemini; and not __fish_seen_subcommand_from install add-all import commands skills agents" -a "skills" -d "Manage Gemini skills" -complete -c ais -n "__fish_seen_subcommand_from gemini; and not __fish_seen_subcommand_from install add-all import commands skills agents" -a "agents" -d "Manage Gemini agents" + for (const spec of TOOL_SPECS) { + lines.push(` # ${spec.tool}`); + lines.push(` if [[ "$prev" == "${spec.tool}" ]]; then`); + lines.push(` COMPREPLY=( $(compgen -W "${quotedNames(spec.rootSubcommands)}" -- "$cur") )`); + lines.push(' return 0'); + lines.push(' fi'); + lines.push(''); + } -# gemini commands subcommands -complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcommand_from commands; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Gemini command" -complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcommand_from commands; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Gemini command" -complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcommand_from commands; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Gemini commands" -complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcommand_from commands; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import command to repository" + lines.push(' if [[ ${COMP_CWORD} -eq 1 ]]; then'); + lines.push(` COMPREPLY=( $(compgen -W "${quotedNames(TOP_LEVEL_COMMANDS)}" -- "$cur") )`); + lines.push(' return 0'); + lines.push(' fi'); + lines.push('}'); + lines.push('complete -F _ais_complete ais'); -# gemini skills subcommands -complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Gemini skill" -complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Gemini skill" -complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Gemini skills" -complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import skill to repository" + return lines.join('\n'); +} -# gemini agents subcommands -complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Gemini agent" -complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Gemini agent" -complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Gemini agents" -complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcommand_from agents; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import agent to repository" +function buildZshScript(): string { + const lines: string[] = [ + '# ais zsh completion', + '_ais() {', + ' local -a subcmds', + ' subcmds=(' + ]; -# warp subcommands -complete -c ais -n "__fish_seen_subcommand_from warp; and not __fish_seen_subcommand_from install import skills" -a "skills" -d "Manage Warp skills" -complete -c ais -n "__fish_seen_subcommand_from warp; and not __fish_seen_subcommand_from install import skills" -a "install" -d "Install all Warp entries" -complete -c ais -n "__fish_seen_subcommand_from warp; and not __fish_seen_subcommand_from install import skills" -a "import" -d "Import entry to repository" + for (const cmd of TOP_LEVEL_COMMANDS) { + lines.push(` '${escapeSingleQuotes(cmd.name)}:${escapeSingleQuotes(cmd.description)}'`); + } + lines.push(' )'); + lines.push(''); + + const variableNames: string[] = []; + for (const spec of TOOL_SPECS) { + const toolVar = `${toVarName(spec.tool)}_subcmds`; + variableNames.push(toolVar); + for (const nested of Object.keys(spec.nestedSubcommands)) { + variableNames.push(`${toVarName(spec.tool)}_${toVarName(nested)}_subcmds`); + } + } -# warp skills subcommands -complete -c ais -n "__fish_seen_subcommand_from warp; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Warp skill" -complete -c ais -n "__fish_seen_subcommand_from warp; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Warp skill" -complete -c ais -n "__fish_seen_subcommand_from warp; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Warp skills" -complete -c ais -n "__fish_seen_subcommand_from warp; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import skill to repository" + lines.push(` local -a ${variableNames.join(' ')}`); + for (const spec of TOOL_SPECS) { + const toolVar = `${toVarName(spec.tool)}_subcmds`; + lines.push(` ${toolVar}=(${buildZshDescribeItems(spec.rootSubcommands)})`); + for (const [nested, nestedEntries] of Object.entries(spec.nestedSubcommands)) { + const nestedVar = `${toVarName(spec.tool)}_${toVarName(nested)}_subcmds`; + lines.push(` ${nestedVar}=(${buildZshDescribeItems(nestedEntries)})`); + } + } + lines.push(''); + + lines.push(' _arguments -C \\'); + lines.push(" '1:command:->command' \\"); + lines.push(" '2:subcommand:->subcommand' \\"); + lines.push(" '3:subsubcommand:->subsubcommand' \\"); + lines.push(" '4:name:->name' \\"); + lines.push(" '*::arg:->args'"); + lines.push(''); + lines.push(' case "$state" in'); + lines.push(' command)'); + lines.push(" _describe 'command' subcmds"); + lines.push(' ;;'); + lines.push(' subcommand)'); + lines.push(' case "$words[2]" in'); + for (const spec of TOOL_SPECS) { + const toolVar = `${toVarName(spec.tool)}_subcmds`; + lines.push(` ${spec.tool})`); + lines.push(` _describe 'subcommand' ${toolVar}`); + lines.push(' ;;'); + } + lines.push(' esac'); + lines.push(' ;;'); + lines.push(' subsubcommand)'); + lines.push(' case "$words[2]" in'); + + for (const spec of TOOL_SPECS) { + const toolVar = `${toVarName(spec.tool)}_subcmds`; + lines.push(` ${spec.tool})`); + lines.push(' case "$words[3]" in'); + if (spec.rootAddCompletionType) { + lines.push(' add)'); + lines.push(...buildZshCompleteTypeBlock(spec.rootAddCompletionType, ' ')); + lines.push(' ;;'); + } + + for (const nested of Object.keys(spec.nestedSubcommands)) { + const nestedVar = `${toVarName(spec.tool)}_${toVarName(nested)}_subcmds`; + lines.push(` ${nested})`); + lines.push(` _describe 'subsubcommand' ${nestedVar}`); + lines.push(' ;;'); + } + + lines.push(' *)'); + lines.push(` _describe 'subsubcommand' ${toolVar}`); + lines.push(' ;;'); + lines.push(' esac'); + lines.push(' ;;'); + } -# windsurf subcommands -complete -c ais -n "__fish_seen_subcommand_from windsurf; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "add" -d "Add a Windsurf rule" -complete -c ais -n "__fish_seen_subcommand_from windsurf; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "remove" -d "Remove a Windsurf rule" -complete -c ais -n "__fish_seen_subcommand_from windsurf; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "install" -d "Install all Windsurf entries" -complete -c ais -n "__fish_seen_subcommand_from windsurf; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "add-all" -d "Add all Windsurf entries" -complete -c ais -n "__fish_seen_subcommand_from windsurf; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "import" -d "Import entry to repository" -complete -c ais -n "__fish_seen_subcommand_from windsurf; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "rules" -d "Manage Windsurf rules" -complete -c ais -n "__fish_seen_subcommand_from windsurf; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "skills" -d "Manage Windsurf skills" + lines.push(' esac'); + lines.push(' ;;'); + lines.push(' name)'); + lines.push(' case "$words[2]" in'); + + for (const spec of TOOL_SPECS) { + const hasNestedCompletions = !!spec.nestedAddCompletionTypes && Object.keys(spec.nestedAddCompletionTypes).length > 0; + if (!spec.rootAddCompletionType && !hasNestedCompletions) { + continue; + } + + lines.push(` ${spec.tool})`); + lines.push(' case "$words[3]" in'); + + if (spec.rootAddCompletionType) { + lines.push(' add)'); + lines.push(...buildZshCompleteTypeBlock(spec.rootAddCompletionType, ' ')); + lines.push(' ;;'); + } + + if (spec.nestedAddCompletionTypes) { + for (const [nested, completeType] of Object.entries(spec.nestedAddCompletionTypes)) { + lines.push(` ${nested})`); + lines.push(' case "$words[4]" in'); + lines.push(' add)'); + lines.push(...buildZshCompleteTypeBlock(completeType, ' ')); + lines.push(' ;;'); + lines.push(' esac'); + lines.push(' ;;'); + } + } + + lines.push(' esac'); + lines.push(' ;;'); + } -# windsurf rules subcommands -complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Windsurf rule" -complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Windsurf rule" -complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Windsurf rules" -complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import rule to repository" + lines.push(' esac'); + lines.push(' ;;'); + lines.push(' args)'); + lines.push(' # Handle additional arguments'); + lines.push(' ;;'); + lines.push(' esac'); + lines.push('}'); + lines.push(''); + lines.push('# Only define completion if compdef is available (zsh completion initialized)'); + lines.push('command -v compdef >/dev/null 2>&1 && compdef _ais ais'); + + return lines.join('\n'); +} -# windsurf skills subcommands -complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Windsurf skill" -complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Windsurf skill" -complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Windsurf skills" -complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import skill to repository" +function buildFishScript(): string { + const lines: string[] = [ + '# ais fish completion', + 'complete -c ais -f', + '', + '# Top-level commands' + ]; -# cline subcommands -complete -c ais -n "__fish_seen_subcommand_from cline; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "add" -d "Add a Cline rule" -complete -c ais -n "__fish_seen_subcommand_from cline; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "remove" -d "Remove a Cline rule" -complete -c ais -n "__fish_seen_subcommand_from cline; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "install" -d "Install all Cline entries" -complete -c ais -n "__fish_seen_subcommand_from cline; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "add-all" -d "Add all Cline entries" -complete -c ais -n "__fish_seen_subcommand_from cline; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "import" -d "Import entry to repository" -complete -c ais -n "__fish_seen_subcommand_from cline; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "rules" -d "Manage Cline rules" -complete -c ais -n "__fish_seen_subcommand_from cline; and not __fish_seen_subcommand_from add remove install add-all import rules skills" -a "skills" -d "Manage Cline skills" + for (const cmd of TOP_LEVEL_COMMANDS) { + lines.push(`complete -c ais -n "__fish_use_subcommand" -a "${cmd.name}" -d "${cmd.description}"`); + } + lines.push(''); + + for (const spec of TOOL_SPECS) { + const allRoot = quotedNames(spec.rootSubcommands); + lines.push(`# ${spec.tool} subcommands`); + for (const subcommand of spec.rootSubcommands) { + lines.push(`complete -c ais -n "__fish_seen_subcommand_from ${spec.tool}; and not __fish_seen_subcommand_from ${allRoot}" -a "${subcommand.name}" -d "${subcommand.description}"`); + } + lines.push(''); + } -# cline rules subcommands -complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Cline rule" -complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Cline rule" -complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Cline rules" -complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from rules; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import rule to repository" + for (const spec of TOOL_SPECS) { + for (const [nested, nestedEntries] of Object.entries(spec.nestedSubcommands)) { + const nestedNames = quotedNames(nestedEntries); + lines.push(`# ${spec.tool} ${nested} subcommands`); + for (const subcommand of nestedEntries) { + lines.push(`complete -c ais -n "__fish_seen_subcommand_from ${spec.tool}; and __fish_seen_subcommand_from ${nested}; and not __fish_seen_subcommand_from ${nestedNames}" -a "${subcommand.name}" -d "${subcommand.description}"`); + } + lines.push(''); + } + } -# cline skills subcommands -complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add a Cline skill" -complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove a Cline skill" -complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install all Cline skills" -complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from skills; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import skill to repository" + for (const spec of TOOL_SPECS) { + if (spec.rootAddCompletionType) { + lines.push(`complete -c ais -n "__fish_seen_subcommand_from ${spec.tool}; and __fish_seen_subcommand_from add" -a "(ais _complete ${spec.rootAddCompletionType} 2>/dev/null)"`); + } + if (spec.nestedAddCompletionTypes) { + for (const [nested, completeType] of Object.entries(spec.nestedAddCompletionTypes)) { + lines.push(`complete -c ais -n "__fish_seen_subcommand_from ${spec.tool}; and __fish_seen_subcommand_from ${nested}; and __fish_seen_subcommand_from add" -a "(ais _complete ${completeType} 2>/dev/null)"`); + } + } + } -# agents-md subcommands -complete -c ais -n "__fish_seen_subcommand_from agents-md; and not __fish_seen_subcommand_from add remove install import" -a "add" -d "Add an AGENTS.md file" -complete -c ais -n "__fish_seen_subcommand_from agents-md; and not __fish_seen_subcommand_from add remove install import" -a "remove" -d "Remove an AGENTS.md file" -complete -c ais -n "__fish_seen_subcommand_from agents-md; and not __fish_seen_subcommand_from add remove install import" -a "install" -d "Install AGENTS.md" -complete -c ais -n "__fish_seen_subcommand_from agents-md; and not __fish_seen_subcommand_from add remove install import" -a "import" -d "Import AGENTS.md to repository" + return lines.join('\n'); +} -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from add" -a "(ais _complete cursor 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from rules; and __fish_seen_subcommand_from add" -a "(ais _complete cursor 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from commands; and __fish_seen_subcommand_from add" -a "(ais _complete cursor-commands 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from skills; and __fish_seen_subcommand_from add" -a "(ais _complete cursor-skills 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from cursor; and __fish_seen_subcommand_from agents; and __fish_seen_subcommand_from add" -a "(ais _complete cursor-agents 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from instructions; and __fish_seen_subcommand_from add" -a "(ais _complete copilot-instructions 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from skills; and __fish_seen_subcommand_from add" -a "(ais _complete copilot-skills 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from prompts; and __fish_seen_subcommand_from add" -a "(ais _complete copilot-prompts 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from copilot; and __fish_seen_subcommand_from agents; and __fish_seen_subcommand_from add" -a "(ais _complete copilot-agents 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from claude; and __fish_seen_subcommand_from skills; and __fish_seen_subcommand_from add" -a "(ais _complete claude-skills 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from claude; and __fish_seen_subcommand_from agents; and __fish_seen_subcommand_from add" -a "(ais _complete claude-agents 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from claude; and __fish_seen_subcommand_from rules; and __fish_seen_subcommand_from add" -a "(ais _complete claude-rules 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from trae; and __fish_seen_subcommand_from rules; and __fish_seen_subcommand_from add" -a "(ais _complete trae-rules 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from trae; and __fish_seen_subcommand_from skills; and __fish_seen_subcommand_from add" -a "(ais _complete trae-skills 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from agents; and __fish_seen_subcommand_from add" -a "(ais _complete opencode-agents 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from skills; and __fish_seen_subcommand_from add" -a "(ais _complete opencode-skills 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from commands; and __fish_seen_subcommand_from add" -a "(ais _complete opencode-commands 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from opencode; and __fish_seen_subcommand_from tools; and __fish_seen_subcommand_from add" -a "(ais _complete opencode-tools 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from codex; and __fish_seen_subcommand_from rules; and __fish_seen_subcommand_from add" -a "(ais _complete codex-rules 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from codex; and __fish_seen_subcommand_from skills; and __fish_seen_subcommand_from add" -a "(ais _complete codex-skills 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcommand_from commands; and __fish_seen_subcommand_from add" -a "(ais _complete gemini-commands 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcommand_from skills; and __fish_seen_subcommand_from add" -a "(ais _complete gemini-skills 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from gemini; and __fish_seen_subcommand_from agents; and __fish_seen_subcommand_from add" -a "(ais _complete gemini-agents 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from warp; and __fish_seen_subcommand_from skills; and __fish_seen_subcommand_from add" -a "(ais _complete warp-skills 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from add" -a "(ais _complete windsurf-rules 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from rules; and __fish_seen_subcommand_from add" -a "(ais _complete windsurf-rules 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from windsurf; and __fish_seen_subcommand_from skills; and __fish_seen_subcommand_from add" -a "(ais _complete windsurf-skills 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from add" -a "(ais _complete cline-rules 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from rules; and __fish_seen_subcommand_from add" -a "(ais _complete cline-rules 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from cline; and __fish_seen_subcommand_from skills; and __fish_seen_subcommand_from add" -a "(ais _complete cline-skills 2>/dev/null)" -complete -c ais -n "__fish_seen_subcommand_from agents-md; and __fish_seen_subcommand_from add" -a "(ais _complete agents-md 2>/dev/null)" -`; +export const bashScript = buildBashScript(); +export const zshScript = buildZshScript(); +export const fishScript = buildFishScript(); /** * Get the completion script for a shell diff --git a/src/index.ts b/src/index.ts index 692f415..a366c4e 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1295,315 +1295,197 @@ warp const warpSkills = warp.command('skills').description('Manage Warp skills'); registerAdapterCommands({ adapter: getAdapter('warp', 'skills'), parentCommand: warpSkills, programOpts: () => program.opts() }); -// ============ Windsurf command group ============ -const windsurf = program - .command('windsurf') - .description('Manage Windsurf rules and skills in a project'); - -// windsurf add (default to rules) -windsurf - .command('add [alias]') - .description('Sync Windsurf rules to project (.windsurf/rules/...)') - .option('-l, --local', 'Add to ai-rules-sync.local.json (private rule)') - .option('-d, --target-dir ', 'Custom target directory for this entry') - .action(async (name, alias, options) => { - try { - const repo = await getTargetRepo(program.opts()); - const adapter = getAdapter('windsurf', 'rules'); - await handleAdd(adapter, { projectPath: process.cwd(), repo, isLocal: options.local || false }, name, alias, { - local: options.local, - targetDir: options.targetDir - }); - } catch (error: any) { - console.error(chalk.red('Error adding Windsurf rule:'), error.message); - process.exit(1); - } - }); +interface RulesAndSkillsToolGroupOptions { + tool: 'windsurf' | 'cline'; + displayName: string; + rulesPathHint: string; + importSearchHint: string; +} -// windsurf remove (default to rules) -windsurf - .command('remove ') - .description('Remove a Windsurf rule from project') - .action(async (alias) => { - try { - const adapter = getAdapter('windsurf', 'rules'); - await handleRemove(adapter, process.cwd(), alias); - } catch (error: any) { - console.error(chalk.red('Error removing Windsurf rule:'), error.message); - process.exit(1); - } - }); +function printAddAllSummary( + result: { installed: number; skipped: number; errors: Array<{ entry: string; error: string }> }, + quiet: boolean | undefined +): void { + if (quiet) { + return; + } + + console.log(chalk.bold('\nSummary:')); + console.log(chalk.green(` Installed: ${result.installed}`)); + if (result.skipped > 0) { + console.log(chalk.yellow(` Skipped: ${result.skipped}`)); + } + if (result.errors.length > 0) { + console.log(chalk.red(` Errors: ${result.errors.length}`)); + result.errors.forEach(e => { + console.log(chalk.red(` - ${e.entry}: ${e.error}`)); + }); + } +} -windsurf - .command('install') - .description('Install all Windsurf rules and skills from config') - .action(async () => { - try { - await installEntriesForTool(adapterRegistry.getForTool('windsurf'), process.cwd()); - } catch (error: any) { - console.error(chalk.red('Error installing Windsurf entries:'), error.message); - process.exit(1); +async function findImportAdapterForTool( + tool: 'windsurf' | 'cline', + projectPath: string, + name: string +): Promise { + const adapters = adapterRegistry.getForTool(tool); + for (const adapter of adapters) { + const targetPath = path.join(projectPath, adapter.targetDir, name); + if (await fs.pathExists(targetPath)) { + return adapter; } - }); + } + return null; +} -windsurf - .command('add-all') - .description('Add all Windsurf entries from repository') - .option('--dry-run', 'Preview without making changes') - .option('-f, --force', 'Overwrite existing entries') - .option('-i, --interactive', 'Prompt for each entry') - .option('-l, --local', 'Add to ai-rules-sync.local.json') - .option('--skip-existing', 'Skip entries already in config') - .option('--quiet', 'Minimal output') - .option('-s, --source-dir ', 'Custom source directory (can be repeated)', collect) - .action(async (options) => { - try { - const projectPath = process.cwd(); - const opts = program.opts(); - const currentRepo = await getTargetRepo(opts); - let sourceDirOverrides; - if (options.sourceDir && options.sourceDir.length > 0) { - try { - sourceDirOverrides = parseSourceDirParams(options.sourceDir, 'windsurf'); - } catch (error: any) { - console.error(chalk.red('Error parsing --source-dir:'), error.message); - process.exit(1); - } +function registerRulesAndSkillsToolGroup(config: RulesAndSkillsToolGroupOptions): void { + const { tool, displayName, rulesPathHint, importSearchHint } = config; + const group = program + .command(tool) + .description(`Manage ${displayName} rules and skills in a project`); + + group + .command('add [alias]') + .description(`Sync ${displayName} rules to project (${rulesPathHint}/...)`) + .option('-l, --local', 'Add to ai-rules-sync.local.json (private rule)') + .option('-d, --target-dir ', 'Custom target directory for this entry') + .action(async (name, alias, options) => { + try { + const repo = await getTargetRepo(program.opts()); + const adapter = getAdapter(tool, 'rules'); + await handleAdd(adapter, { projectPath: process.cwd(), repo, isLocal: options.local || false }, name, alias, { + local: options.local, + targetDir: options.targetDir + }); + } catch (error: any) { + console.error(chalk.red(`Error adding ${displayName} rule:`), error.message); + process.exit(1); } - - const result = await handleAddAll( - projectPath, - currentRepo, - adapterRegistry, - { - target: opts.target, - tools: ['windsurf'], - dryRun: options.dryRun, - force: options.force, - interactive: options.interactive, - isLocal: options.local, - skipExisting: options.skipExisting, - quiet: options.quiet, - sourceDirOverrides - } - ); - - if (!options.quiet) { - console.log(chalk.bold('\nSummary:')); - console.log(chalk.green(` Installed: ${result.installed}`)); - if (result.skipped > 0) { - console.log(chalk.yellow(` Skipped: ${result.skipped}`)); - } - if (result.errors.length > 0) { - console.log(chalk.red(` Errors: ${result.errors.length}`)); - result.errors.forEach(e => { - console.log(chalk.red(` - ${e.entry}: ${e.error}`)); - }); - } + }); + + group + .command('remove ') + .description(`Remove a ${displayName} rule from project`) + .action(async (alias) => { + try { + const adapter = getAdapter(tool, 'rules'); + await handleRemove(adapter, process.cwd(), alias); + } catch (error: any) { + console.error(chalk.red(`Error removing ${displayName} rule:`), error.message); + process.exit(1); } + }); - if (result.errors.length > 0) { + group + .command('install') + .description(`Install all ${displayName} rules and skills from config`) + .action(async () => { + try { + await installEntriesForTool(adapterRegistry.getForTool(tool), process.cwd()); + } catch (error: any) { + console.error(chalk.red(`Error installing ${displayName} entries:`), error.message); process.exit(1); } - } catch (error: any) { - console.error(chalk.red('Error in windsurf add-all:'), error.message); - process.exit(1); - } - }); - -windsurf - .command('import ') - .description('Import Windsurf rule/skill from project to repository (auto-detects subtype)') - .option('-l, --local', 'Add to ai-rules-sync.local.json (private)') - .option('-m, --message ', 'Custom git commit message') - .option('-f, --force', 'Overwrite if entry already exists in repository') - .option('-p, --push', 'Push to remote repository after commit') - .action(async (name, options) => { - try { - const projectPath = process.cwd(); - const repo = await getTargetRepo(program.opts()); - const windsurfAdapters = adapterRegistry.getForTool('windsurf'); - let foundAdapter: SyncAdapter | null = null; - - for (const adapter of windsurfAdapters) { - const targetPath = path.join(projectPath, adapter.targetDir, name); - if (await fs.pathExists(targetPath)) { - foundAdapter = adapter; - break; + }); + + group + .command('add-all') + .description(`Add all ${displayName} entries from repository`) + .option('--dry-run', 'Preview without making changes') + .option('-f, --force', 'Overwrite existing entries') + .option('-i, --interactive', 'Prompt for each entry') + .option('-l, --local', 'Add to ai-rules-sync.local.json') + .option('--skip-existing', 'Skip entries already in config') + .option('--quiet', 'Minimal output') + .option('-s, --source-dir ', 'Custom source directory (can be repeated)', collect) + .action(async (options) => { + try { + const projectPath = process.cwd(); + const opts = program.opts(); + const currentRepo = await getTargetRepo(opts); + let sourceDirOverrides; + + if (options.sourceDir && options.sourceDir.length > 0) { + try { + sourceDirOverrides = parseSourceDirParams(options.sourceDir, tool); + } catch (error: any) { + console.error(chalk.red('Error parsing --source-dir:'), error.message); + process.exit(1); + } } - } - - if (!foundAdapter) { - throw new Error(`Entry "${name}" not found in .windsurf/rules or .windsurf/skills.`); - } - - console.log(chalk.gray(`Detected ${foundAdapter.subtype}: ${name}`)); - await handleImport(foundAdapter, { projectPath, repo, isLocal: options.local || false }, name, options); - } catch (error: any) { - console.error(chalk.red('Error importing Windsurf entry:'), error.message); - process.exit(1); - } - }); -const windsurfRules = windsurf.command('rules').description('Manage Windsurf rules'); -registerAdapterCommands({ adapter: getAdapter('windsurf', 'rules'), parentCommand: windsurfRules, programOpts: () => program.opts() }); -const windsurfSkills = windsurf.command('skills').description('Manage Windsurf skills'); -registerAdapterCommands({ adapter: getAdapter('windsurf', 'skills'), parentCommand: windsurfSkills, programOpts: () => program.opts() }); - -// ============ Cline command group ============ -const cline = program - .command('cline') - .description('Manage Cline rules and skills in a project'); - -// cline add (default to rules) -cline - .command('add [alias]') - .description('Sync Cline rules to project (.clinerules/...)') - .option('-l, --local', 'Add to ai-rules-sync.local.json (private rule)') - .option('-d, --target-dir ', 'Custom target directory for this entry') - .action(async (name, alias, options) => { - try { - const repo = await getTargetRepo(program.opts()); - const adapter = getAdapter('cline', 'rules'); - await handleAdd(adapter, { projectPath: process.cwd(), repo, isLocal: options.local || false }, name, alias, { - local: options.local, - targetDir: options.targetDir - }); - } catch (error: any) { - console.error(chalk.red('Error adding Cline rule:'), error.message); - process.exit(1); - } - }); - -// cline remove (default to rules) -cline - .command('remove ') - .description('Remove a Cline rule from project') - .action(async (alias) => { - try { - const adapter = getAdapter('cline', 'rules'); - await handleRemove(adapter, process.cwd(), alias); - } catch (error: any) { - console.error(chalk.red('Error removing Cline rule:'), error.message); - process.exit(1); - } - }); - -cline - .command('install') - .description('Install all Cline rules and skills from config') - .action(async () => { - try { - await installEntriesForTool(adapterRegistry.getForTool('cline'), process.cwd()); - } catch (error: any) { - console.error(chalk.red('Error installing Cline entries:'), error.message); - process.exit(1); - } - }); + const result = await handleAddAll( + projectPath, + currentRepo, + adapterRegistry, + { + target: opts.target, + tools: [tool], + dryRun: options.dryRun, + force: options.force, + interactive: options.interactive, + isLocal: options.local, + skipExisting: options.skipExisting, + quiet: options.quiet, + sourceDirOverrides + } + ); -cline - .command('add-all') - .description('Add all Cline entries from repository') - .option('--dry-run', 'Preview without making changes') - .option('-f, --force', 'Overwrite existing entries') - .option('-i, --interactive', 'Prompt for each entry') - .option('-l, --local', 'Add to ai-rules-sync.local.json') - .option('--skip-existing', 'Skip entries already in config') - .option('--quiet', 'Minimal output') - .option('-s, --source-dir ', 'Custom source directory (can be repeated)', collect) - .action(async (options) => { - try { - const projectPath = process.cwd(); - const opts = program.opts(); - const currentRepo = await getTargetRepo(opts); - let sourceDirOverrides; - if (options.sourceDir && options.sourceDir.length > 0) { - try { - sourceDirOverrides = parseSourceDirParams(options.sourceDir, 'cline'); - } catch (error: any) { - console.error(chalk.red('Error parsing --source-dir:'), error.message); + printAddAllSummary(result, options.quiet); + if (result.errors.length > 0) { process.exit(1); } + } catch (error: any) { + console.error(chalk.red(`Error in ${tool} add-all:`), error.message); + process.exit(1); } - - const result = await handleAddAll( - projectPath, - currentRepo, - adapterRegistry, - { - target: opts.target, - tools: ['cline'], - dryRun: options.dryRun, - force: options.force, - interactive: options.interactive, - isLocal: options.local, - skipExisting: options.skipExisting, - quiet: options.quiet, - sourceDirOverrides + }); + + group + .command('import ') + .description(`Import ${displayName} rule/skill from project to repository (auto-detects subtype)`) + .option('-l, --local', 'Add to ai-rules-sync.local.json (private)') + .option('-m, --message ', 'Custom git commit message') + .option('-f, --force', 'Overwrite if entry already exists in repository') + .option('-p, --push', 'Push to remote repository after commit') + .action(async (name, options) => { + try { + const projectPath = process.cwd(); + const repo = await getTargetRepo(program.opts()); + const foundAdapter = await findImportAdapterForTool(tool, projectPath, name); + if (!foundAdapter) { + throw new Error(`Entry "${name}" not found in ${importSearchHint}.`); } - ); - if (!options.quiet) { - console.log(chalk.bold('\nSummary:')); - console.log(chalk.green(` Installed: ${result.installed}`)); - if (result.skipped > 0) { - console.log(chalk.yellow(` Skipped: ${result.skipped}`)); - } - if (result.errors.length > 0) { - console.log(chalk.red(` Errors: ${result.errors.length}`)); - result.errors.forEach(e => { - console.log(chalk.red(` - ${e.entry}: ${e.error}`)); - }); - } - } - - if (result.errors.length > 0) { + console.log(chalk.gray(`Detected ${foundAdapter.subtype}: ${name}`)); + await handleImport(foundAdapter, { projectPath, repo, isLocal: options.local || false }, name, options); + } catch (error: any) { + console.error(chalk.red(`Error importing ${displayName} entry:`), error.message); process.exit(1); } - } catch (error: any) { - console.error(chalk.red('Error in cline add-all:'), error.message); - process.exit(1); - } - }); - -cline - .command('import ') - .description('Import Cline rule/skill from project to repository (auto-detects subtype)') - .option('-l, --local', 'Add to ai-rules-sync.local.json (private)') - .option('-m, --message ', 'Custom git commit message') - .option('-f, --force', 'Overwrite if entry already exists in repository') - .option('-p, --push', 'Push to remote repository after commit') - .action(async (name, options) => { - try { - const projectPath = process.cwd(); - const repo = await getTargetRepo(program.opts()); - const clineAdapters = adapterRegistry.getForTool('cline'); - let foundAdapter: SyncAdapter | null = null; + }); - for (const adapter of clineAdapters) { - const targetPath = path.join(projectPath, adapter.targetDir, name); - if (await fs.pathExists(targetPath)) { - foundAdapter = adapter; - break; - } - } + const rules = group.command('rules').description(`Manage ${displayName} rules`); + registerAdapterCommands({ adapter: getAdapter(tool, 'rules'), parentCommand: rules, programOpts: () => program.opts() }); - if (!foundAdapter) { - throw new Error(`Entry "${name}" not found in .clinerules or .cline/skills.`); - } + const skills = group.command('skills').description(`Manage ${displayName} skills`); + registerAdapterCommands({ adapter: getAdapter(tool, 'skills'), parentCommand: skills, programOpts: () => program.opts() }); +} - console.log(chalk.gray(`Detected ${foundAdapter.subtype}: ${name}`)); - await handleImport(foundAdapter, { projectPath, repo, isLocal: options.local || false }, name, options); - } catch (error: any) { - console.error(chalk.red('Error importing Cline entry:'), error.message); - process.exit(1); - } - }); +// ============ Windsurf / Cline command groups ============ +registerRulesAndSkillsToolGroup({ + tool: 'windsurf', + displayName: 'Windsurf', + rulesPathHint: '.windsurf/rules', + importSearchHint: '.windsurf/rules or .windsurf/skills' +}); -const clineRules = cline.command('rules').description('Manage Cline rules'); -registerAdapterCommands({ adapter: getAdapter('cline', 'rules'), parentCommand: clineRules, programOpts: () => program.opts() }); -const clineSkills = cline.command('skills').description('Manage Cline skills'); -registerAdapterCommands({ adapter: getAdapter('cline', 'skills'), parentCommand: clineSkills, programOpts: () => program.opts() }); +registerRulesAndSkillsToolGroup({ + tool: 'cline', + displayName: 'Cline', + rulesPathHint: '.clinerules', + importSearchHint: '.clinerules or .cline/skills' +}); // ============ Git command ============ program diff --git a/src/project-config.ts b/src/project-config.ts index 6a117eb..600b389 100644 --- a/src/project-config.ts +++ b/src/project-config.ts @@ -9,6 +9,80 @@ const LOCAL_CONFIG_FILENAME = 'ai-rules-sync.local.json'; const LEGACY_CONFIG_FILENAME = 'cursor-rules.json'; const LEGACY_LOCAL_CONFIG_FILENAME = 'cursor-rules.local.json'; +const REPO_SOURCE_PATHS = [ + ['cursor', 'rules'], + ['cursor', 'commands'], + ['cursor', 'skills'], + ['cursor', 'agents'], + ['copilot', 'instructions'], + ['copilot', 'skills'], + ['copilot', 'prompts'], + ['copilot', 'agents'], + ['claude', 'skills'], + ['claude', 'agents'], + ['claude', 'rules'], + ['claude', 'md'], + ['trae', 'rules'], + ['trae', 'skills'], + ['opencode', 'agents'], + ['opencode', 'skills'], + ['opencode', 'commands'], + ['opencode', 'tools'], + ['codex', 'rules'], + ['codex', 'skills'], + ['codex', 'md'], + ['gemini', 'commands'], + ['gemini', 'skills'], + ['gemini', 'agents'], + ['gemini', 'md'], + ['warp', 'skills'], + ['windsurf', 'rules'], + ['windsurf', 'skills'], + ['cline', 'rules'], + ['cline', 'skills'], + ['agentsMd', 'file'] +] as const; + +function readNestedStringValue(source: unknown, tool: string, subtype: string): string | undefined { + if (!source || typeof source !== 'object') { + return undefined; + } + + const toolConfig = (source as Record)[tool]; + if (!toolConfig || typeof toolConfig !== 'object') { + return undefined; + } + + const value = (toolConfig as Record)[subtype]; + return typeof value === 'string' ? value : undefined; +} + +function writeNestedStringValue(target: RepoSourceConfig, tool: string, subtype: string, value: string): void { + const mutable = target as Record; + const existingToolConfig = mutable[tool]; + const toolConfig = + existingToolConfig && typeof existingToolConfig === 'object' + ? existingToolConfig as Record + : {}; + toolConfig[subtype] = value; + mutable[tool] = toolConfig; +} + +function buildRepoSourceFromNestedStrings(source: unknown, rootPath?: string): { hasAny: boolean; config: RepoSourceConfig } { + const config: RepoSourceConfig = { rootPath }; + let hasAny = false; + + for (const [tool, subtype] of REPO_SOURCE_PATHS) { + const value = readNestedStringValue(source, tool, subtype); + if (value !== undefined) { + hasAny = true; + writeNestedStringValue(config, tool, subtype, value); + } + } + + return { hasAny, config }; +} + /** * Extended rule entry with optional targetDir */ @@ -230,11 +304,6 @@ export interface RepoSourceConfig { agents?: string; md?: string; }; - gemini?: { - commands?: string; - skills?: string; - agents?: string; - }; warp?: { skills?: string; }; @@ -327,11 +396,6 @@ function mergeCombined(main: ProjectConfig, local: ProjectConfig): ProjectConfig agents: { ...(main.gemini?.agents || {}), ...(local.gemini?.agents || {}) }, md: { ...(main.gemini?.md || {}), ...(local.gemini?.md || {}) } }, - gemini: { - commands: { ...(main.gemini?.commands || {}), ...(local.gemini?.commands || {}) }, - skills: { ...(main.gemini?.skills || {}), ...(local.gemini?.skills || {}) }, - agents: { ...(main.gemini?.agents || {}), ...(local.gemini?.agents || {}) } - }, warp: { skills: { ...(main.warp?.skills || {}), ...(local.warp?.skills || {}) } }, @@ -365,141 +429,13 @@ export async function getRepoSourceConfig(projectPath: string): Promise. values are strings (source dirs). + const { hasAny, config: legacyRepoConfig } = buildRepoSourceFromNestedStrings(config, config.rootPath); + if (hasAny) { + return legacyRepoConfig; } // Not a rules repo config (no sourceDir, no string values) @@ -524,111 +460,18 @@ export function getSourceDir( globalOverride?: SourceDirConfig ): string { const rootPath = repoConfig.rootPath || ''; - let toolDir: string | undefined; // 1. Check globalOverride first (CLI or global config - highest priority) if (globalOverride) { - const toolConfig = (globalOverride as any)[tool]; - if (toolConfig && toolConfig[subtype]) { + const overrideDir = readNestedStringValue(globalOverride, tool, subtype); + if (overrideDir !== undefined) { // globalOverride paths are relative to repo root, so no rootPath prefix - return toolConfig[subtype]; + return overrideDir; } } // 2. Check repoConfig (from repo's ai-rules-sync.json) - if (tool === 'cursor') { - if (subtype === 'rules') { - toolDir = repoConfig.cursor?.rules; - } else if (subtype === 'commands') { - toolDir = repoConfig.cursor?.commands; - } else if (subtype === 'skills') { - toolDir = repoConfig.cursor?.skills; - } else if (subtype === 'agents') { - toolDir = repoConfig.cursor?.agents; - } - } else if (tool === 'copilot') { - if (subtype === 'instructions') { - toolDir = repoConfig.copilot?.instructions; - } else if (subtype === 'skills') { - toolDir = repoConfig.copilot?.skills; - } else if (subtype === 'prompts') { - toolDir = (repoConfig.copilot as any)?.prompts; - } else if (subtype === 'agents') { - toolDir = (repoConfig.copilot as any)?.agents; - } - } else if (tool === 'claude') { - if (subtype === 'skills') { - toolDir = repoConfig.claude?.skills; - } else if (subtype === 'agents') { - toolDir = repoConfig.claude?.agents; - } else if (subtype === 'rules') { - toolDir = repoConfig.claude?.rules; - } else if (subtype === 'md') { - toolDir = (repoConfig.claude as any)?.md; - } - } else if (tool === 'trae') { - if (subtype === 'rules') { - toolDir = repoConfig.trae?.rules; - } else if (subtype === 'skills') { - toolDir = repoConfig.trae?.skills; - } - } else if (tool === 'opencode') { - if (subtype === 'agents') { - toolDir = repoConfig.opencode?.agents; - } else if (subtype === 'skills') { - toolDir = repoConfig.opencode?.skills; - } else if (subtype === 'commands') { - toolDir = repoConfig.opencode?.commands; - } else if (subtype === 'tools') { - toolDir = repoConfig.opencode?.tools; - } - } else if (tool === 'codex') { - if (subtype === 'rules') { - toolDir = repoConfig.codex?.rules; - } else if (subtype === 'skills') { - toolDir = repoConfig.codex?.skills; - } else if (subtype === 'md') { - toolDir = repoConfig.codex?.md; - } - } else if (tool === 'gemini') { - if (subtype === 'commands') { - toolDir = repoConfig.gemini?.commands; - } else if (subtype === 'skills') { - toolDir = repoConfig.gemini?.skills; - } else if (subtype === 'agents') { - toolDir = repoConfig.gemini?.agents; - } else if (subtype === 'md') { - toolDir = repoConfig.gemini?.md; - } - } else if (tool === 'gemini') { - if (subtype === 'commands') { - toolDir = repoConfig.gemini?.commands; - } else if (subtype === 'skills') { - toolDir = repoConfig.gemini?.skills; - } else if (subtype === 'agents') { - toolDir = repoConfig.gemini?.agents; - } - } else if (tool === 'warp') { - if (subtype === 'skills') { - toolDir = repoConfig.warp?.skills; - } - } else if (tool === 'windsurf') { - if (subtype === 'rules') { - toolDir = repoConfig.windsurf?.rules; - } else if (subtype === 'skills') { - toolDir = repoConfig.windsurf?.skills; - } - } else if (tool === 'cline') { - if (subtype === 'rules') { - toolDir = repoConfig.cline?.rules; - } else if (subtype === 'skills') { - toolDir = repoConfig.cline?.skills; - } - } else if (tool === 'agents-md') { - if (subtype === 'file') { - toolDir = repoConfig.agentsMd?.file; - } - } + const toolDir = readNestedStringValue(repoConfig, tool, subtype); // 3. Apply rootPath and default const dir = toolDir ?? defaultDir; From 5221d669a9aa914060e909fbd62113ffad7e0291 Mon Sep 17 00:00:00 2001 From: lbb00 Date: Sat, 28 Feb 2026 16:51:41 +0800 Subject: [PATCH 04/12] chore: release v0.6.0 Co-Authored-By: Claude Sonnet 4.6 --- CHANGELOG.md | 12 ++++++++++++ package.json | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d05a3c5..27afd47 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,17 @@ # ai-rules-sync +## 0.6.0 + +### Minor Changes + +- Add Windsurf, Cline, user-level sync, and Cursor AI 9384 support + + - Add Windsurf and Cline AI adapter support (rules and skills) + - Add user-level sync support (`--user` flag) for all tools, syncing to home directory + - Add user config management via `~/.config/ai-rules-sync/user.json` + - Add Cursor AI 9384 support + - Add Homebrew tap support for `brew install ais` + ## 0.5.0 ### Minor Changes diff --git a/package.json b/package.json index 1322add..6a99f36 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "ai-rules-sync", - "version": "0.5.0", + "version": "0.6.0", "description": "Synchronize, manage, and share your agent rules (Cursor rules and commands, Copilot instructions) with ease.", "type": "module", "bin": { From e70c455fc4fd49ca27f8f62a6d3d12b34b88fc0b Mon Sep 17 00:00:00 2001 From: lbb Date: Mon, 2 Mar 2026 18:55:07 +0800 Subject: [PATCH 05/12] Feat/cli command ux optimization (#27) * feat(cli): add ls and rm command aliases Co-authored-by: lbb * feat(cli): add status/search and dry-run query enhancements Co-authored-by: lbb * feat(completion): expose aliases and new query commands Co-authored-by: lbb * docs: document command aliases and query enhancements Co-authored-by: lbb * test(cli): cover aliases, json output, and dry-run flows Co-authored-by: lbb * docs: promote rm as default command style Co-authored-by: lbb --------- Co-authored-by: Cursor Agent --- KNOWLEDGE_BASE.md | 28 +- README.md | 88 +++++-- README_ZH.md | 88 +++++-- src/__tests__/completion-scripts.test.ts | 17 ++ src/__tests__/config-commands.test.ts | 90 +++++++ src/__tests__/handlers-dry-run.test.ts | 34 +++ src/cli/register.ts | 9 +- src/commands/config.ts | 36 ++- src/commands/handlers.ts | 214 ++++++++++++++- src/completion/scripts.ts | 48 +++- src/index.ts | 321 +++++++++++++++++++++-- 11 files changed, 876 insertions(+), 97 deletions(-) create mode 100644 src/__tests__/config-commands.test.ts create mode 100644 src/__tests__/handlers-dry-run.test.ts diff --git a/KNOWLEDGE_BASE.md b/KNOWLEDGE_BASE.md index 094544c..a844263 100644 --- a/KNOWLEDGE_BASE.md +++ b/KNOWLEDGE_BASE.md @@ -167,7 +167,7 @@ function registerAdapterCommands(options: RegisterCommandsOptions): void { ```typescript // All handlers work with any adapter async function handleAdd(adapter, ctx, name, alias?): Promise -async function handleRemove(adapter, projectPath, alias): Promise +async function handleRemove(adapter, projectPath, alias, isUser?, options?): Promise async function handleImport(adapter, ctx, name, options): Promise ``` @@ -325,8 +325,12 @@ interface ProjectConfig { ### 1. Repository Management - **Use**: `ais use ` - Configure or switch the active rules repository. -- **List**: `ais list` - Show configured repositories and the active one. +- **List**: `ais ls` (alias: `ais list`) - Show configured repositories and the active one. +- **Status**: `ais status` - Show repository availability and project config summary. +- **Search**: `ais search [query]` - Search available entries in the current repository. - **Git Proxy**: `ais git ` - Run git commands directly in the active rules repository context. +- **Script-friendly output**: Query commands support `--json` (`list/ls`, `status`, `search`, `config repo list/ls`, `config repo show`). +- **Safe previews**: `--dry-run` supported for destructive commands (`remove/rm`, `import`). ### 2. Cursor Rule Synchronization - **Syntax**: `ais cursor add [alias]` or `ais cursor rules add [alias]` @@ -969,6 +973,26 @@ ais user install ## Recent Changes +### Command UX Enhancements (2026-03) + +- Added Linux-style aliases while keeping backward compatibility: + - `ais list` → `ais ls` + - `ais remove` → `ais rm` + - Adapter subcommands also support `rm` (for example `ais cursor rules rm react`). +- Added query commands: + - `ais status` for repository/project snapshot output + - `ais search [query]` to discover available entries in the active repository +- Added machine-readable query output with `--json`: + - `ais list/ls --json` + - `ais status --json` + - `ais search --json` + - `ais config repo list/ls --json` + - `ais config repo show --json` +- Added `--dry-run` support for destructive operations: + - remove/rm commands + - import commands +- Updated shell completion metadata to include aliases and new query commands across bash/zsh/fish. + ### Windsurf & Cline Support (2026-02) - Added **Windsurf support**: rules (`.windsurf/rules`, `.md`) and skills (`.windsurf/skills`) with full CLI/completion integration diff --git a/README.md b/README.md index 9bc0de6..9f1e13e 100644 --- a/README.md +++ b/README.md @@ -21,6 +21,7 @@ Stop copying `.mdc` files around. Manage your rules in Git repositories and sync - [Supported Tools](#supported-tools) - [Quick Start](#quick-start) - [Core Concepts](#core-concepts) +- [Recommended Command Style](#recommended-command-style) - [Basic Usage](#basic-usage) - [Tool-Specific Guides](#tool-specific-guides) - [Advanced Features](#advanced-features) @@ -213,7 +214,9 @@ my-rules-repo/ # Set current repository ais use https://github.com/your-org/rules-repo.git -# List all repositories +# List all repositories (Linux-style alias) +ais ls +# Alias kept for compatibility: ais list # Switch between repositories @@ -262,6 +265,37 @@ Read `ai-rules-sync.json` and recreate all symlinks. Use this after cloning a pr } ``` +## Recommended Command Style + +Use Linux-style commands as the default workflow (`remove` / `list` remain fully compatible): + +```bash +# Recommended +ais ls +ais rm old-rule +ais cursor rules rm react + +# Legacy-compatible forms (still supported) +ais list +ais remove old-rule +ais cursor rules remove react + +# Query commands +ais status +ais search react + +# Script/CI JSON output +ais ls --json +ais status --json +ais search react --json +ais config repo ls --json +ais config repo show company-rules --json + +# Safe preview before destructive operations +ais cursor rules rm react --dry-run +ais cursor rules import my-rule --dry-run +``` + --- ## Basic Usage @@ -360,11 +394,11 @@ ais cursor rules import my-rule --force ```bash # Remove a rule (deletes symlink and config entry) -ais cursor remove react +ais cursor rm react # Remove from specific tool -ais cursor commands remove deploy -ais cursor skills remove code-review +ais cursor commands rm deploy +ais cursor skills rm code-review ``` ### Install from Configuration @@ -407,7 +441,7 @@ ais cursor add readme.md ais cursor add my-rule-dir # Remove -ais cursor remove react +ais cursor rm react ``` #### Commands @@ -417,7 +451,7 @@ ais cursor remove react ais cursor commands add deploy-docs # Remove command -ais cursor commands remove deploy-docs +ais cursor commands rm deploy-docs ``` #### Skills @@ -427,7 +461,7 @@ ais cursor commands remove deploy-docs ais cursor skills add code-review # Remove skill -ais cursor skills remove code-review +ais cursor skills rm code-review ``` #### Subagents @@ -437,7 +471,7 @@ ais cursor skills remove code-review ais cursor agents add code-analyzer # Remove subagent -ais cursor agents remove code-analyzer +ais cursor agents rm code-analyzer ``` ### GitHub Copilot @@ -460,10 +494,10 @@ ais copilot skills add web-scraping ais copilot agents add code-reviewer # Remove -ais copilot instructions remove coding-style -ais copilot prompts remove generate-tests -ais copilot skills remove web-scraping -ais copilot agents remove code-reviewer +ais copilot instructions rm coding-style +ais copilot prompts rm generate-tests +ais copilot skills rm web-scraping +ais copilot agents rm code-reviewer ``` ### Claude Code @@ -486,10 +520,10 @@ ais claude md add CLAUDE # → .claude/CLAUDE.md (project) ais claude install # Remove -ais claude rules remove general -ais claude skills remove code-review -ais claude agents remove debugger -ais claude md remove CLAUDE --user +ais claude rules rm general +ais claude skills rm code-review +ais claude agents rm debugger +ais claude md rm CLAUDE --user ``` ### Trae @@ -502,8 +536,8 @@ ais trae rules add project-rules ais trae skills add adapter-builder # Remove -ais trae rules remove project-rules -ais trae skills remove adapter-builder +ais trae rules rm project-rules +ais trae skills rm adapter-builder ``` ### OpenCode @@ -522,7 +556,7 @@ ais opencode commands add build-optimizer ais opencode tools add project-analyzer # Remove -ais opencode agents remove code-reviewer +ais opencode agents rm code-reviewer ``` ### Codex @@ -545,7 +579,7 @@ ais codex rules import my-sandbox-rules ais codex skills import my-helper-skill # Remove -ais codex rules remove default +ais codex rules rm default ``` **Note:** Codex skills use `.agents/skills/` (not `.codex/skills/`) per OpenAI documentation. @@ -597,7 +631,7 @@ ais agents-md add frontend fe-agents ais agents-md add backend be-agents # Remove -ais agents-md remove fe-agents +ais agents-md rm fe-agents ``` ### Warp @@ -614,14 +648,14 @@ ais agents-md add . ais agents-md add src # Remove -ais agents-md remove . +ais agents-md rm . ``` #### Skills ```bash ais warp skills add my-skill -ais warp skills remove my-skill +ais warp skills rm my-skill ais warp skills install ``` @@ -635,7 +669,7 @@ ais windsurf add project-style ais windsurf skills add deploy-staging # Remove -ais windsurf remove project-style +ais windsurf rm project-style # Install all ais windsurf install @@ -653,7 +687,7 @@ ais cline add coding ais cline skills add release-checklist # Remove -ais cline remove coding +ais cline rm coding # Install all ais cline install @@ -680,7 +714,7 @@ ais cursor add my-utils -t personal-rules **View current repository:** ```bash -ais list +ais ls # * company-rules (current) # personal-rules # community-rules @@ -1130,7 +1164,7 @@ ais cursor install ```bash # List repositories -ais list +ais ls # Set repository ais use diff --git a/README_ZH.md b/README_ZH.md index 3ce5d89..a325d0e 100644 --- a/README_ZH.md +++ b/README_ZH.md @@ -21,6 +21,7 @@ - [支持的工具](#支持的工具) - [快速开始](#快速开始) - [核心概念](#核心概念) +- [推荐命令风格](#推荐命令风格) - [基础使用](#基础使用) - [各工具使用指南](#各工具使用指南) - [高级功能](#高级功能) @@ -213,7 +214,9 @@ my-rules-repo/ # 设置当前仓库 ais use https://github.com/your-org/rules-repo.git -# 列出所有仓库 +# 列出所有仓库(Linux 风格别名) +ais ls +# 兼容保留: ais list # 在仓库之间切换 @@ -262,6 +265,37 @@ ais use personal-rules } ``` +## 推荐命令风格 + +日常使用请优先采用 Linux 风格命令(`remove` / `list` 仍完整兼容): + +```bash +# 推荐写法 +ais ls +ais rm old-rule +ais cursor rules rm react + +# 兼容写法(仍可用) +ais list +ais remove old-rule +ais cursor rules remove react + +# 查询命令 +ais status +ais search react + +# 脚本/CI 的 JSON 输出 +ais ls --json +ais status --json +ais search react --json +ais config repo ls --json +ais config repo show company-rules --json + +# 破坏性操作前先预览 +ais cursor rules rm react --dry-run +ais cursor rules import my-rule --dry-run +``` + --- ## 基础使用 @@ -360,11 +394,11 @@ ais cursor rules import my-rule --force ```bash # 移除规则(删除软链接和配置条目) -ais cursor remove react +ais cursor rm react # 从特定工具移除 -ais cursor commands remove deploy -ais cursor skills remove code-review +ais cursor commands rm deploy +ais cursor skills rm code-review ``` ### 从配置安装 @@ -407,7 +441,7 @@ ais cursor add readme.md ais cursor add my-rule-dir # 移除 -ais cursor remove react +ais cursor rm react ``` #### 命令 @@ -417,7 +451,7 @@ ais cursor remove react ais cursor commands add deploy-docs # 移除命令 -ais cursor commands remove deploy-docs +ais cursor commands rm deploy-docs ``` #### 技能 @@ -427,7 +461,7 @@ ais cursor commands remove deploy-docs ais cursor skills add code-review # 移除技能 -ais cursor skills remove code-review +ais cursor skills rm code-review ``` #### Subagents @@ -437,7 +471,7 @@ ais cursor skills remove code-review ais cursor agents add code-analyzer # 移除 subagent -ais cursor agents remove code-analyzer +ais cursor agents rm code-analyzer ``` ### GitHub Copilot @@ -460,10 +494,10 @@ ais copilot skills add web-scraping ais copilot agents add code-reviewer # 移除 -ais copilot instructions remove coding-style -ais copilot prompts remove generate-tests -ais copilot skills remove web-scraping -ais copilot agents remove code-reviewer +ais copilot instructions rm coding-style +ais copilot prompts rm generate-tests +ais copilot skills rm web-scraping +ais copilot agents rm code-reviewer ``` ### Claude Code @@ -486,10 +520,10 @@ ais claude md add CLAUDE # → .claude/CLAUDE.md(项目级) ais claude install # 移除 -ais claude rules remove general -ais claude skills remove code-review -ais claude agents remove debugger -ais claude md remove CLAUDE --user +ais claude rules rm general +ais claude skills rm code-review +ais claude agents rm debugger +ais claude md rm CLAUDE --user ``` ### Trae @@ -502,8 +536,8 @@ ais trae rules add project-rules ais trae skills add adapter-builder # 移除 -ais trae rules remove project-rules -ais trae skills remove adapter-builder +ais trae rules rm project-rules +ais trae skills rm adapter-builder ``` ### OpenCode @@ -522,7 +556,7 @@ ais opencode commands add build-optimizer ais opencode tools add project-analyzer # 移除 -ais opencode agents remove code-reviewer +ais opencode agents rm code-reviewer ``` ### Codex @@ -545,7 +579,7 @@ ais codex rules import my-sandbox-rules ais codex skills import my-helper-skill # 移除 -ais codex rules remove default +ais codex rules rm default ``` **注意:** Codex 技能使用 `.agents/skills/` 目录(而非 `.codex/skills/`),这是按照 OpenAI 文档的规定。 @@ -597,7 +631,7 @@ ais agents-md add frontend fe-agents ais agents-md add backend be-agents # 移除 -ais agents-md remove fe-agents +ais agents-md rm fe-agents ``` ### Warp @@ -614,14 +648,14 @@ ais agents-md add . ais agents-md add src # 移除 -ais agents-md remove . +ais agents-md rm . ``` #### 技能(Skills) ```bash ais warp skills add my-skill -ais warp skills remove my-skill +ais warp skills rm my-skill ais warp skills install ``` @@ -635,7 +669,7 @@ ais windsurf add project-style ais windsurf skills add deploy-staging # 移除 -ais windsurf remove project-style +ais windsurf rm project-style # 安装全部 ais windsurf install @@ -653,7 +687,7 @@ ais cline add coding ais cline skills add release-checklist # 移除 -ais cline remove coding +ais cline rm coding # 安装全部 ais cline install @@ -680,7 +714,7 @@ ais cursor add my-utils -t personal-rules **查看当前仓库:** ```bash -ais list +ais ls # * company-rules (current) # personal-rules # community-rules @@ -1130,7 +1164,7 @@ ais cursor install ```bash # 列出仓库 -ais list +ais ls # 设置仓库 ais use diff --git a/src/__tests__/completion-scripts.test.ts b/src/__tests__/completion-scripts.test.ts index c6a7a51..e28e2ea 100644 --- a/src/__tests__/completion-scripts.test.ts +++ b/src/__tests__/completion-scripts.test.ts @@ -23,6 +23,23 @@ describe('completion scripts metadata generation', () => { expect(fishScript).toContain('ais _complete cline-skills'); }); + it('should include linux-style aliases and new query commands', () => { + expect(bashScript).toContain('list ls'); + expect(bashScript).toContain('remove rm'); + expect(bashScript).toContain('status'); + expect(bashScript).toContain('search'); + + expect(zshScript).toContain("'ls:Alias for list'"); + expect(zshScript).toContain("'rm:Alias for remove'"); + expect(zshScript).toContain("'status:Show repository and config status'"); + expect(zshScript).toContain("'search:Search entries in repository'"); + + expect(fishScript).toContain('-a "ls" -d "Alias for list"'); + expect(fishScript).toContain('-a "rm" -d "Alias for remove"'); + expect(fishScript).toContain('-a "status" -d "Show repository and config status"'); + expect(fishScript).toContain('-a "search" -d "Search entries in repository"'); + }); + it('should return trimmed completion scripts for each shell', () => { expect(getCompletionScript('bash')).toBe(bashScript.trim()); expect(getCompletionScript('zsh')).toBe(zshScript.trim()); diff --git a/src/__tests__/config-commands.test.ts b/src/__tests__/config-commands.test.ts new file mode 100644 index 0000000..731ecfc --- /dev/null +++ b/src/__tests__/config-commands.test.ts @@ -0,0 +1,90 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { listRepos, showRepoConfig } from '../commands/config.js'; +import { getConfig } from '../config.js'; + +vi.mock('../config.js', () => ({ + getConfig: vi.fn(), + setConfig: vi.fn(), + getUserConfigPath: vi.fn() +})); + +describe('config command query output', () => { + beforeEach(() => { + vi.resetAllMocks(); + }); + + it('should output JSON for listRepos with current repo marker', async () => { + vi.mocked(getConfig).mockResolvedValue({ + currentRepo: 'company-rules', + repos: { + 'company-rules': { + name: 'company-rules', + url: 'https://example.com/company.git', + path: '/tmp/company-rules' + }, + 'personal-rules': { + name: 'personal-rules', + url: 'https://example.com/personal.git', + path: '/tmp/personal-rules' + } + } + }); + + const logSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); + await listRepos({ json: true }); + + expect(logSpy).toHaveBeenCalledTimes(1); + const payload = JSON.parse(logSpy.mock.calls[0][0] as string); + expect(payload.currentRepo).toBe('company-rules'); + expect(payload.repositories).toHaveLength(2); + expect(payload.repositories[0]).toMatchObject({ + name: 'company-rules', + isCurrent: true + }); + expect(payload.repositories[1]).toMatchObject({ + name: 'personal-rules', + isCurrent: false + }); + }); + + it('should output JSON for showRepoConfig', async () => { + vi.mocked(getConfig).mockResolvedValue({ + currentRepo: 'company-rules', + repos: { + 'company-rules': { + name: 'company-rules', + url: 'https://example.com/company.git', + path: '/tmp/company-rules', + sourceDir: { + cursor: { rules: '.cursor/rules' } + } + } + } + }); + + const logSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); + await showRepoConfig('company-rules', { json: true }); + + expect(logSpy).toHaveBeenCalledTimes(1); + const payload = JSON.parse(logSpy.mock.calls[0][0] as string); + expect(payload).toMatchObject({ + name: 'company-rules', + url: 'https://example.com/company.git', + path: '/tmp/company-rules', + sourceDir: { + cursor: { rules: '.cursor/rules' } + } + }); + }); + + it('should throw for unknown repo in showRepoConfig', async () => { + vi.mocked(getConfig).mockResolvedValue({ + currentRepo: undefined, + repos: {} + }); + + await expect(showRepoConfig('missing', { json: true })).rejects.toThrow( + 'Repository "missing" not found' + ); + }); +}); diff --git a/src/__tests__/handlers-dry-run.test.ts b/src/__tests__/handlers-dry-run.test.ts new file mode 100644 index 0000000..c6faafe --- /dev/null +++ b/src/__tests__/handlers-dry-run.test.ts @@ -0,0 +1,34 @@ +import os from 'os'; +import path from 'path'; +import fs from 'fs-extra'; +import { describe, expect, it, vi } from 'vitest'; +import { handleRemove } from '../commands/handlers.js'; +import { SyncAdapter } from '../adapters/types.js'; + +describe('handleRemove dry-run', () => { + it('should preview without unlinking or mutating config', async () => { + const projectPath = await fs.mkdtemp(path.join(os.tmpdir(), 'ais-remove-dry-run-')); + await fs.ensureDir(path.join(projectPath, '.cursor', 'rules')); + + const adapter: SyncAdapter = { + name: 'cursor-rules', + tool: 'cursor', + subtype: 'rules', + configPath: ['cursor', 'rules'], + defaultSourceDir: '.cursor/rules', + targetDir: '.cursor/rules', + mode: 'hybrid', + hybridFileSuffixes: ['.md', '.mdc'], + addDependency: vi.fn(async () => ({ migrated: false })), + removeDependency: vi.fn(async () => ({ removedFrom: [], migrated: false })), + link: vi.fn(async () => ({ sourceName: 'rule', targetName: 'rule', linked: true })), + unlink: vi.fn(async () => {}) + }; + + const result = await handleRemove(adapter, projectPath, 'react', false, { dryRun: true }); + + expect(result).toEqual({ removedFrom: [], migrated: false }); + expect(adapter.unlink).not.toHaveBeenCalled(); + expect(adapter.removeDependency).not.toHaveBeenCalled(); + }); +}); diff --git a/src/cli/register.ts b/src/cli/register.ts index f3c637d..cbe7967 100644 --- a/src/cli/register.ts +++ b/src/cli/register.ts @@ -69,13 +69,15 @@ export function registerAdapterCommands(options: RegisterCommandsOptions): void // Remove command parentCommand .command('remove ') + .alias('rm') .description(`Remove a ${adapter.tool} ${entityName} from project`) .option('-u, --user', 'Remove from user config') - .action(async (alias: string, cmdOptions: { user?: boolean }) => { + .option('--dry-run', 'Preview changes without applying') + .action(async (alias: string, cmdOptions: { user?: boolean; dryRun?: boolean }) => { try { const isUser = cmdOptions.user || false; const projectPath = isUser ? os.homedir() : process.cwd(); - await handleRemove(adapter, projectPath, alias, isUser); + await handleRemove(adapter, projectPath, alias, isUser, { dryRun: cmdOptions.dryRun }); } catch (error: any) { console.error(chalk.red(`Error removing ${adapter.tool} ${entityName}:`), error.message); process.exit(1); @@ -174,7 +176,8 @@ export function registerAdapterCommands(options: RegisterCommandsOptions): void .option('-m, --message ', 'Custom git commit message') .option('-f, --force', 'Overwrite if entry already exists in repository') .option('-p, --push', 'Push to remote repository after commit') - .action(async (name: string, cmdOptions: ImportCommandOptions & { local?: boolean }) => { + .option('--dry-run', 'Preview changes without applying') + .action(async (name: string, cmdOptions: ImportCommandOptions & { local?: boolean; dryRun?: boolean }) => { try { const repo = await getTargetRepo(programOpts()); await handleImport(adapter, { diff --git a/src/commands/config.ts b/src/commands/config.ts index 21a49b4..c93277b 100644 --- a/src/commands/config.ts +++ b/src/commands/config.ts @@ -9,6 +9,10 @@ import chalk from 'chalk'; import { getConfig, setConfig, getUserConfigPath } from '../config.js'; import { SourceDirConfig } from '../project-config.js'; +export interface QueryOutputOptions { + json?: boolean; +} + /** * Set sourceDir for a repository * @param repoName - Repository name @@ -84,7 +88,7 @@ export async function clearRepoSourceDir( * Show repository configuration * @param repoName - Repository name */ -export async function showRepoConfig(repoName: string): Promise { +export async function showRepoConfig(repoName: string, options?: QueryOutputOptions): Promise { const config = await getConfig(); const repo = config.repos[repoName]; @@ -92,6 +96,16 @@ export async function showRepoConfig(repoName: string): Promise { throw new Error(`Repository "${repoName}" not found`); } + if (options?.json) { + console.log(JSON.stringify({ + name: repoName, + url: repo.url, + path: repo.path, + sourceDir: repo.sourceDir + }, null, 2)); + return; + } + console.log(chalk.bold(`\nRepository: ${repoName}`)); console.log(chalk.gray('─'.repeat(50))); console.log(JSON.stringify(repo, null, 2)); @@ -100,11 +114,29 @@ export async function showRepoConfig(repoName: string): Promise { /** * List all repositories */ -export async function listRepos(): Promise { +export async function listRepos(options?: QueryOutputOptions): Promise { const config = await getConfig(); const repos = config.repos || {}; const names = Object.keys(repos); + if (options?.json) { + const repositories = names.map(name => { + const repo = repos[name]; + return { + name, + url: repo.url, + path: repo.path, + sourceDir: repo.sourceDir, + isCurrent: name === config.currentRepo + }; + }); + console.log(JSON.stringify({ + currentRepo: config.currentRepo || null, + repositories + }, null, 2)); + return; + } + if (names.length === 0) { console.log(chalk.yellow('No repositories configured.')); return; diff --git a/src/commands/handlers.ts b/src/commands/handlers.ts index 459bfc9..b2c0973 100644 --- a/src/commands/handlers.ts +++ b/src/commands/handlers.ts @@ -5,11 +5,11 @@ import path from 'path'; import chalk from 'chalk'; import fs from 'fs-extra'; -import { RepoConfig } from '../config.js'; +import { RepoConfig, getUserConfigPath, getUserProjectConfig } from '../config.js'; import { SyncAdapter } from '../adapters/types.js'; import { linkEntry, unlinkEntry, importEntry, ImportOptions } from '../sync-engine.js'; import { addIgnoreEntry } from '../utils.js'; -import { addUserDependency, removeUserDependency } from '../project-config.js'; +import { addUserDependency, removeUserDependency, getCombinedProjectConfig, getRepoSourceConfig, getSourceDir, getTargetDir } from '../project-config.js'; /** * Context for command execution @@ -164,6 +164,122 @@ export interface RemoveResult { migrated: boolean; } +export interface RemoveCommandOptions { + dryRun?: boolean; +} + +export interface ImportPreviewResult { + sourcePath: string; + sourceExists: boolean; + sourceIsSymlink: boolean; + destinationPath: string; + destinationExists: boolean; + configFileName: string; + commitMessage: string; +} + +async function getConfigHitsForAlias( + adapter: SyncAdapter, + projectPath: string, + alias: string, + isUser: boolean +): Promise { + const [topLevel, subLevel] = adapter.configPath; + const hits: string[] = []; + + if (isUser) { + const userConfig = await getUserProjectConfig(); + const userPath = await getUserConfigPath(); + if ((userConfig as any)[topLevel]?.[subLevel]?.[alias]) { + hits.push(path.basename(userPath)); + } + return hits; + } + + const mainPath = path.join(projectPath, 'ai-rules-sync.json'); + const localPath = path.join(projectPath, 'ai-rules-sync.local.json'); + + if (await fs.pathExists(mainPath)) { + const mainConfig = await fs.readJson(mainPath); + if (mainConfig?.[topLevel]?.[subLevel]?.[alias]) { + hits.push('ai-rules-sync.json'); + } + } + + if (await fs.pathExists(localPath)) { + const localConfig = await fs.readJson(localPath); + if (localConfig?.[topLevel]?.[subLevel]?.[alias]) { + hits.push('ai-rules-sync.local.json'); + } + } + + // Legacy support for Cursor rules. + if (adapter.tool === 'cursor' && adapter.subtype === 'rules') { + const legacyMainPath = path.join(projectPath, 'cursor-rules.json'); + const legacyLocalPath = path.join(projectPath, 'cursor-rules.local.json'); + + if (await fs.pathExists(legacyMainPath)) { + const legacyMain = await fs.readJson(legacyMainPath); + if (legacyMain?.rules?.[alias]) { + hits.push('cursor-rules.json'); + } + } + + if (await fs.pathExists(legacyLocalPath)) { + const legacyLocal = await fs.readJson(legacyLocalPath); + if (legacyLocal?.rules?.[alias]) { + hits.push('cursor-rules.local.json'); + } + } + } + + return hits; +} + +async function resolveRemoveTargetPath( + adapter: SyncAdapter, + projectPath: string, + alias: string, + isUser: boolean +): Promise<{ targetPath: string; exists: boolean; isSymlink: boolean }> { + let targetDirPath: string; + if (isUser) { + targetDirPath = adapter.userTargetDir || adapter.targetDir; + } else { + const config = await getCombinedProjectConfig(projectPath); + targetDirPath = getTargetDir(config, adapter.tool, adapter.subtype, alias, adapter.targetDir); + } + + const targetDir = path.join(path.resolve(projectPath), targetDirPath); + const candidates = [path.join(targetDir, alias)]; + + const suffixes = adapter.fileSuffixes || adapter.hybridFileSuffixes; + if (suffixes && suffixes.length > 0) { + for (const suffix of suffixes) { + if (!alias.endsWith(suffix)) { + candidates.push(path.join(targetDir, `${alias}${suffix}`)); + } + } + } + + for (const candidate of candidates) { + if (await fs.pathExists(candidate)) { + const stats = await fs.lstat(candidate); + return { + targetPath: candidate, + exists: true, + isSymlink: stats.isSymbolicLink() + }; + } + } + + return { + targetPath: candidates[0], + exists: false, + isSymlink: false + }; +} + /** * Generic remove command handler - works with any adapter */ @@ -171,8 +287,32 @@ export async function handleRemove( adapter: SyncAdapter, projectPath: string, alias: string, - isUser: boolean = false + isUser: boolean = false, + options?: RemoveCommandOptions ): Promise { + if (options?.dryRun) { + const hits = await getConfigHitsForAlias(adapter, projectPath, alias, isUser); + const target = await resolveRemoveTargetPath(adapter, projectPath, alias, isUser); + console.log(chalk.bold(`[DRY RUN] Remove ${adapter.tool} ${adapter.subtype} "${alias}"`)); + if (hits.length > 0) { + console.log(chalk.gray(` Config entries: ${hits.join(', ')}`)); + } else { + console.log(chalk.gray(' Config entries: none')); + } + + if (target.exists) { + const kind = target.isSymlink ? 'symlink' : 'file/directory'; + console.log(chalk.gray(` Filesystem: remove ${target.targetPath} (${kind})`)); + } else { + console.log(chalk.gray(` Filesystem: no matching path found (checked around ${target.targetPath})`)); + } + + return { + removedFrom: hits, + migrated: false + }; + } + await adapter.unlink(projectPath, alias); if (isUser) { @@ -210,6 +350,49 @@ export interface ImportCommandOptions { message?: string; force?: boolean; push?: boolean; + dryRun?: boolean; +} + +export async function previewImport( + adapter: SyncAdapter, + ctx: CommandContext, + name: string, + options: ImportCommandOptions +): Promise { + const absoluteProjectPath = path.resolve(ctx.projectPath); + const projectConfig = await getCombinedProjectConfig(ctx.projectPath); + const targetDirPath = getTargetDir( + projectConfig, + adapter.tool, + adapter.subtype, + name, + adapter.targetDir + ); + + const sourcePath = path.join(absoluteProjectPath, targetDirPath, name); + let sourceExists = false; + let sourceIsSymlink = false; + + if (await fs.pathExists(sourcePath)) { + sourceExists = true; + const stats = await fs.lstat(sourcePath); + sourceIsSymlink = stats.isSymbolicLink(); + } + + const repoConfig = await getRepoSourceConfig(ctx.repo.path); + const sourceDir = getSourceDir(repoConfig, adapter.tool, adapter.subtype, adapter.defaultSourceDir); + const destinationPath = path.join(ctx.repo.path, sourceDir, name); + const destinationExists = await fs.pathExists(destinationPath); + + return { + sourcePath, + sourceExists, + sourceIsSymlink, + destinationPath, + destinationExists, + configFileName: ctx.isLocal ? 'ai-rules-sync.local.json' : 'ai-rules-sync.json', + commitMessage: options.message || `Import ${adapter.tool} ${adapter.subtype}: ${name}` + }; } /** @@ -223,6 +406,31 @@ export async function handleImport( ): Promise { console.log(chalk.gray(`Using repository: ${chalk.cyan(ctx.repo.name)} (${ctx.repo.url})`)); + if (options.dryRun) { + const preview = await previewImport(adapter, ctx, name, options); + console.log(chalk.bold(`[DRY RUN] Import ${adapter.tool} ${adapter.subtype} "${name}"`)); + if (!preview.sourceExists) { + throw new Error(`Entry "${name}" not found in project at ${preview.sourcePath}`); + } + if (preview.sourceIsSymlink) { + throw new Error(`Entry "${name}" is already a symlink (already managed by ai-rules-sync)`); + } + + console.log(chalk.gray(` Copy: ${preview.sourcePath} -> ${preview.destinationPath}`)); + if (preview.destinationExists && !options.force) { + throw new Error(`Entry "${name}" already exists in rules repository at ${preview.destinationPath}. Use --force to overwrite.`); + } + if (preview.destinationExists && options.force) { + console.log(chalk.gray(' Destination exists and would be overwritten (--force).')); + } + console.log(chalk.gray(` Git commit message: ${preview.commitMessage}`)); + if (options.push) { + console.log(chalk.gray(' Git push: enabled')); + } + console.log(chalk.gray(` Config update: ${preview.configFileName}`)); + return; + } + const importOpts: ImportOptions = { projectPath: ctx.projectPath, name, diff --git a/src/completion/scripts.ts b/src/completion/scripts.ts index 01f7233..befb037 100644 --- a/src/completion/scripts.ts +++ b/src/completion/scripts.ts @@ -17,6 +17,11 @@ interface ToolCompletionSpec { nestedAddCompletionTypes?: Record; } +const COMMAND_ALIASES: Record = { + list: ['ls'], + remove: ['rm'] +}; + const TOOL_SPECS: ToolCompletionSpec[] = [ { tool: 'cursor', @@ -384,7 +389,12 @@ const EXTRA_TOP_LEVEL_COMMANDS: CompletionEntry[] = [ { name: 'add', description: 'Add a rule (smart dispatch)' }, { name: 'remove', description: 'Remove a rule (smart dispatch)' }, { name: 'install', description: 'Install all rules (smart dispatch)' }, + { name: 'add-all', description: 'Install all entries from repository' }, { name: 'import', description: 'Import entry to rules repository' }, + { name: 'status', description: 'Show repository and config status' }, + { name: 'search', description: 'Search entries in repository' }, + { name: 'config', description: 'Manage repository configuration' }, + { name: 'user', description: 'Manage user-level AI config entries' }, { name: 'completion', description: 'Output shell completion script' } ]; @@ -397,8 +407,23 @@ function toVarName(name: string): string { return name.replace(/-/g, '_'); } +function expandEntriesWithAliases(entries: CompletionEntry[]): CompletionEntry[] { + const expanded: CompletionEntry[] = []; + for (const entry of entries) { + expanded.push(entry); + const aliases = COMMAND_ALIASES[entry.name] || []; + for (const alias of aliases) { + expanded.push({ + name: alias, + description: `Alias for ${entry.name}` + }); + } + } + return expanded; +} + function quotedNames(entries: CompletionEntry[]): string { - return entries.map(entry => entry.name).join(' '); + return expandEntriesWithAliases(entries).map(entry => entry.name).join(' '); } function escapeSingleQuotes(value: string): string { @@ -406,7 +431,9 @@ function escapeSingleQuotes(value: string): string { } function buildZshDescribeItems(entries: CompletionEntry[]): string { - return entries.map(entry => `'${escapeSingleQuotes(entry.name)}:${escapeSingleQuotes(entry.description)}'`).join(' '); + return expandEntriesWithAliases(entries) + .map(entry => `'${escapeSingleQuotes(entry.name)}:${escapeSingleQuotes(entry.description)}'`) + .join(' '); } function buildZshCompleteTypeBlock(completeType: string, indent: string): string[] { @@ -491,7 +518,7 @@ function buildZshScript(): string { ' subcmds=(' ]; - for (const cmd of TOP_LEVEL_COMMANDS) { + for (const cmd of expandEntriesWithAliases(TOP_LEVEL_COMMANDS)) { lines.push(` '${escapeSingleQuotes(cmd.name)}:${escapeSingleQuotes(cmd.description)}'`); } lines.push(' )'); @@ -624,14 +651,18 @@ function buildFishScript(): string { ]; for (const cmd of TOP_LEVEL_COMMANDS) { - lines.push(`complete -c ais -n "__fish_use_subcommand" -a "${cmd.name}" -d "${cmd.description}"`); + const entries = expandEntriesWithAliases([cmd]); + for (const entry of entries) { + lines.push(`complete -c ais -n "__fish_use_subcommand" -a "${entry.name}" -d "${entry.description}"`); + } } lines.push(''); for (const spec of TOOL_SPECS) { - const allRoot = quotedNames(spec.rootSubcommands); + const rootEntries = expandEntriesWithAliases(spec.rootSubcommands); + const allRoot = rootEntries.map(entry => entry.name).join(' '); lines.push(`# ${spec.tool} subcommands`); - for (const subcommand of spec.rootSubcommands) { + for (const subcommand of rootEntries) { lines.push(`complete -c ais -n "__fish_seen_subcommand_from ${spec.tool}; and not __fish_seen_subcommand_from ${allRoot}" -a "${subcommand.name}" -d "${subcommand.description}"`); } lines.push(''); @@ -639,9 +670,10 @@ function buildFishScript(): string { for (const spec of TOOL_SPECS) { for (const [nested, nestedEntries] of Object.entries(spec.nestedSubcommands)) { - const nestedNames = quotedNames(nestedEntries); + const expandedNestedEntries = expandEntriesWithAliases(nestedEntries); + const nestedNames = expandedNestedEntries.map(entry => entry.name).join(' '); lines.push(`# ${spec.tool} ${nested} subcommands`); - for (const subcommand of nestedEntries) { + for (const subcommand of expandedNestedEntries) { lines.push(`complete -c ais -n "__fish_seen_subcommand_from ${spec.tool}; and __fish_seen_subcommand_from ${nested}; and not __fish_seen_subcommand_from ${nestedNames}" -a "${subcommand.name}" -d "${subcommand.description}"`); } lines.push(''); diff --git a/src/index.ts b/src/index.ts index a366c4e..be0ed42 100644 --- a/src/index.ts +++ b/src/index.ts @@ -4,10 +4,10 @@ import chalk from 'chalk'; import path from 'path'; import os from 'os'; import fs from 'fs-extra'; -import { getConfig, setConfig, getReposBaseDir, getCurrentRepo, RepoConfig } from './config.js'; +import { getConfig, setConfig, getReposBaseDir, getCurrentRepo, getUserConfigPath, getUserProjectConfig, RepoConfig } from './config.js'; import { cloneOrUpdateRepo, runGitCommand } from './git.js'; import { addIgnoreEntry } from './utils.js'; -import { getCombinedProjectConfig, getRepoSourceConfig, getSourceDir } from './project-config.js'; +import { getCombinedProjectConfig, getConfigSource, getRepoSourceConfig, getSourceDir, ProjectConfig } from './project-config.js'; import { checkAndPromptCompletion, forceInstallCompletion } from './completion.js'; import { getCompletionScript } from './completion/scripts.js'; import { adapterRegistry, getAdapter, findAdapterForAlias } from './adapters/index.js'; @@ -27,7 +27,7 @@ import { } from './commands/helpers.js'; import { handleAdd, handleRemove, handleImport } from './commands/handlers.js'; import { installEntriesForAdapter, installEntriesForTool, installAllUserEntries, installAllGlobalEntries } from './commands/install.js'; -import { handleAddAll } from './commands/add-all.js'; +import { discoverAllEntries, handleAddAll } from './commands/add-all.js'; import { parseSourceDirParams } from './cli/source-dir-parser.js'; import { setRepoSourceDir, clearRepoSourceDir, showRepoConfig, listRepos, handleUserConfigShow, handleUserConfigSet, handleUserConfigReset, handleGlobalConfigShow, handleGlobalConfigSet, handleGlobalConfigReset } from './commands/config.js'; import { getFormattedVersion } from './commands/version.js'; @@ -48,6 +48,52 @@ function collect(value: string, previous: string[]): string[] { return previous ? previous.concat([value]) : [value]; } +function getAdapterEntryCount(config: ProjectConfig, adapter: SyncAdapter): number { + const [topLevel, subLevel] = adapter.configPath; + if (topLevel === 'agentsMd') { + return Object.keys(config.agentsMd || {}).length; + } + + const topConfig = (config as any)[topLevel]; + if (!topConfig || typeof topConfig !== 'object') { + return 0; + } + + const section = topConfig[subLevel]; + if (!section || typeof section !== 'object') { + return 0; + } + + return Object.keys(section).length; +} + +function collectToolCounts(config: ProjectConfig): { perTool: Record; total: number } { + const perTool: Record = {}; + let total = 0; + + for (const adapter of adapterRegistry.all()) { + const count = getAdapterEntryCount(config, adapter); + if (count === 0) { + continue; + } + perTool[adapter.tool] = (perTool[adapter.tool] || 0) + count; + total += count; + } + + return { perTool, total }; +} + +function parseCsvOption(input?: string): string[] | undefined { + if (!input) { + return undefined; + } + const values = input + .split(',') + .map((item: string) => item.trim()) + .filter(Boolean); + return values.length > 0 ? values : undefined; +} + program .name('ais') .description('AI Rules Sync - Sync agent rules from git repository') @@ -113,12 +159,28 @@ program // ============ List command ============ program .command('list') + .alias('ls') .description('List all cursor rules git repositories') - .action(async () => { + .option('--json', 'Output repositories as JSON') + .action(async (cmdOptions: { json?: boolean }) => { const config = await getConfig(); const repos = config.repos || {}; const names = Object.keys(repos); + if (cmdOptions.json) { + const repositories = names.map(name => ({ + name, + url: repos[name].url, + path: repos[name].path, + isCurrent: name === config.currentRepo + })); + console.log(JSON.stringify({ + currentRepo: config.currentRepo || null, + repositories + }, null, 2)); + return; + } + if (names.length === 0) { console.log(chalk.yellow('No repositories configured. Use "ais use [url]" to configure.')); return; @@ -136,6 +198,204 @@ program } }); +program + .command('status') + .description('Show repository and configuration status') + .option('-u, --user', 'Include user config status') + .option('--json', 'Output status as JSON') + .action(async (cmdOptions: { user?: boolean; json?: boolean }) => { + try { + const globalConfig = await getConfig(); + const currentRepo = globalConfig.currentRepo ? globalConfig.repos?.[globalConfig.currentRepo] : undefined; + const repoExists = currentRepo ? await fs.pathExists(currentRepo.path) : false; + + const projectPath = process.cwd(); + const projectConfigSource = await getConfigSource(projectPath); + const projectConfig = await getCombinedProjectConfig(projectPath); + const projectMode = await inferDefaultMode(projectPath); + const projectCounts = collectToolCounts(projectConfig); + + let userStatus: + | { + path: string; + exists: boolean; + totalEntries: number; + perTool: Record; + } + | undefined; + if (cmdOptions.user) { + const userConfigPath = await getUserConfigPath(); + const userConfigExists = await fs.pathExists(userConfigPath); + const userConfig = await getUserProjectConfig(); + const userCounts = collectToolCounts(userConfig); + userStatus = { + path: userConfigPath, + exists: userConfigExists, + totalEntries: userCounts.total, + perTool: userCounts.perTool + }; + } + + const statusPayload = { + repository: currentRepo + ? { + name: currentRepo.name, + url: currentRepo.url, + path: currentRepo.path, + exists: repoExists + } + : null, + project: { + path: projectPath, + configSource: projectConfigSource, + inferredMode: projectMode, + totalEntries: projectCounts.total, + perTool: projectCounts.perTool + }, + user: userStatus + }; + + if (cmdOptions.json) { + console.log(JSON.stringify(statusPayload, null, 2)); + return; + } + + console.log(chalk.bold('Repository:')); + if (!currentRepo) { + console.log(chalk.yellow(' No repository configured. Use "ais use " first.')); + } else { + console.log(` Name: ${chalk.cyan(currentRepo.name)}`); + console.log(` URL: ${chalk.gray(currentRepo.url)}`); + console.log(` Path: ${currentRepo.path}`); + console.log(` Available locally: ${repoExists ? chalk.green('yes') : chalk.red('no')}`); + } + + console.log(chalk.bold('\nProject:')); + console.log(` Path: ${projectPath}`); + console.log(` Config source: ${projectConfigSource}`); + console.log(` Inferred mode: ${projectMode}`); + console.log(` Configured entries: ${projectCounts.total}`); + if (projectCounts.total > 0) { + for (const [tool, count] of Object.entries(projectCounts.perTool)) { + console.log(` - ${tool}: ${count}`); + } + } + + if (userStatus) { + console.log(chalk.bold('\nUser config:')); + console.log(` Path: ${userStatus.path}`); + console.log(` Exists: ${userStatus.exists ? chalk.green('yes') : chalk.red('no')}`); + console.log(` Configured entries: ${userStatus.totalEntries}`); + if (userStatus.totalEntries > 0) { + for (const [tool, count] of Object.entries(userStatus.perTool)) { + console.log(` - ${tool}: ${count}`); + } + } + } + } catch (error: any) { + console.error(chalk.red('Error getting status:'), error.message); + process.exit(1); + } + }); + +program + .command('search [query]') + .description('Search entries available in the rules repository') + .option('--tools ', 'Filter by tools (comma-separated)') + .option('--adapters ', 'Filter by adapter names (comma-separated)') + .option('--configured', 'Show only entries already in project config') + .option('--unconfigured', 'Show only entries not in project config') + .option('--json', 'Output search results as JSON') + .action(async (query: string | undefined, cmdOptions: { tools?: string; adapters?: string; configured?: boolean; unconfigured?: boolean; json?: boolean }) => { + try { + if (cmdOptions.configured && cmdOptions.unconfigured) { + throw new Error('Cannot use both --configured and --unconfigured together.'); + } + + const opts = program.opts(); + const repo = await getTargetRepo(opts); + const tools = parseCsvOption(cmdOptions.tools); + const adapters = parseCsvOption(cmdOptions.adapters); + const normalizedQuery = (query || '').trim().toLowerCase(); + + let entries = await discoverAllEntries(process.cwd(), repo, adapterRegistry, { + tools, + adapters + }); + + if (normalizedQuery) { + entries = entries.filter(entry => { + const haystacks = [ + entry.entryName, + entry.sourceName, + entry.adapter.name, + entry.adapter.tool, + entry.adapter.subtype + ]; + return haystacks.some(value => value.toLowerCase().includes(normalizedQuery)); + }); + } + + if (cmdOptions.configured) { + entries = entries.filter(entry => entry.alreadyInConfig); + } + if (cmdOptions.unconfigured) { + entries = entries.filter(entry => !entry.alreadyInConfig); + } + + const serialized = entries.map(entry => ({ + adapter: entry.adapter.name, + tool: entry.adapter.tool, + subtype: entry.adapter.subtype, + entryName: entry.entryName, + sourceName: entry.sourceName, + isDirectory: entry.isDirectory, + configured: entry.alreadyInConfig + })); + + if (cmdOptions.json) { + console.log(JSON.stringify({ + repository: { + name: repo.name, + url: repo.url + }, + query: query || null, + total: serialized.length, + entries: serialized + }, null, 2)); + return; + } + + if (serialized.length === 0) { + console.log(chalk.yellow('No matching entries found.')); + return; + } + + const grouped = new Map(); + for (const item of serialized) { + const key = item.adapter; + const list = grouped.get(key) || []; + list.push(item); + grouped.set(key, list); + } + + console.log(chalk.bold(`Found ${serialized.length} entries:`)); + for (const [adapterName, items] of grouped) { + console.log(chalk.cyan(`\n${adapterName} (${items.length})`)); + for (const item of items) { + const flags: string[] = []; + if (item.configured) flags.push('configured'); + if (item.isDirectory) flags.push('dir'); + const suffix = flags.length > 0 ? ` ${chalk.gray(`[${flags.join(', ')}]`)}` : ''; + console.log(` - ${item.entryName}${suffix}`); + } + } + } catch (error: any) { + console.error(chalk.red('Error searching entries:'), error.message); + process.exit(1); + } + }); + // ============ Top-level shortcuts ============ program .command('add') @@ -195,9 +455,11 @@ program program .command('remove') + .alias('rm') .description('Remove an entry (auto-detects cursor/copilot if unambiguous)') .argument('', 'Alias/name in the project to remove') - .action(async (alias) => { + .option('--dry-run', 'Preview changes without applying') + .action(async (alias, cmdOptions: { dryRun?: boolean }) => { try { const projectPath = process.cwd(); const cfg = await getCombinedProjectConfig(projectPath); @@ -206,7 +468,7 @@ program const found = findAdapterForAlias(cfg, alias); if (found) { - await handleRemove(found.adapter, projectPath, alias); + await handleRemove(found.adapter, projectPath, alias, false, { dryRun: cmdOptions.dryRun }); } else { // Alias not found in config, try to infer mode const mode = await inferDefaultMode(projectPath); @@ -226,36 +488,31 @@ program // Try all Claude adapters const claudeAdapters = adapterRegistry.getForTool('claude'); for (const a of claudeAdapters) { - await a.unlink(projectPath, alias); - await a.removeDependency(projectPath, alias); + await handleRemove(a, projectPath, alias, false, { dryRun: cmdOptions.dryRun }); } return; } else if (mode === 'trae') { const traeAdapters = adapterRegistry.getForTool('trae'); for (const a of traeAdapters) { - await a.unlink(projectPath, alias); - await a.removeDependency(projectPath, alias); + await handleRemove(a, projectPath, alias, false, { dryRun: cmdOptions.dryRun }); } return; } else if (mode === 'opencode') { const opencodeAdapters = adapterRegistry.getForTool('opencode'); for (const a of opencodeAdapters) { - await a.unlink(projectPath, alias); - await a.removeDependency(projectPath, alias); + await handleRemove(a, projectPath, alias, false, { dryRun: cmdOptions.dryRun }); } return; } else if (mode === 'codex') { const codexAdapters = adapterRegistry.getForTool('codex'); for (const a of codexAdapters) { - await a.unlink(projectPath, alias); - await a.removeDependency(projectPath, alias); + await handleRemove(a, projectPath, alias, false, { dryRun: cmdOptions.dryRun }); } return; } else if (mode === 'gemini') { const geminiAdapters = adapterRegistry.getForTool('gemini'); for (const a of geminiAdapters) { - await a.unlink(projectPath, alias); - await a.removeDependency(projectPath, alias); + await handleRemove(a, projectPath, alias, false, { dryRun: cmdOptions.dryRun }); } return; } else if (mode === 'warp') { @@ -270,7 +527,7 @@ program throw new Error(`Cannot determine which tool to use for alias "${alias}"`); } - await handleRemove(adapter, projectPath, alias); + await handleRemove(adapter, projectPath, alias, false, { dryRun: cmdOptions.dryRun }); } } catch (error: any) { console.error(chalk.red('Error removing entry:'), error.message); @@ -417,6 +674,7 @@ program .option('-m, --message ', 'Custom git commit message') .option('-f, --force', 'Overwrite if entry already exists in repository') .option('-p, --push', 'Push to remote repository after commit') + .option('--dry-run', 'Preview changes without applying') .action(async (name, options) => { try { const projectPath = process.cwd(); @@ -475,11 +733,13 @@ cursor // cursor remove (default to rules) cursor .command('remove ') + .alias('rm') .description('Remove a Cursor rule from project') - .action(async (alias) => { + .option('--dry-run', 'Preview changes without applying') + .action(async (alias, options: { dryRun?: boolean }) => { try { const adapter = getAdapter('cursor', 'rules'); - await handleRemove(adapter, process.cwd(), alias); + await handleRemove(adapter, process.cwd(), alias, false, { dryRun: options.dryRun }); } catch (error: any) { console.error(chalk.red('Error removing Cursor rule:'), error.message); process.exit(1); @@ -575,6 +835,7 @@ cursor .option('-m, --message ', 'Custom git commit message') .option('-f, --force', 'Overwrite if entry already exists in repository') .option('-p, --push', 'Push to remote repository after commit') + .option('--dry-run', 'Preview changes without applying') .action(async (name, options) => { try { const projectPath = process.cwd(); @@ -959,6 +1220,7 @@ opencode .option('-m, --message ', 'Custom git commit message') .option('-f, --force', 'Overwrite if entry already exists in repository') .option('-p, --push', 'Push to remote repository after commit') + .option('--dry-run', 'Preview changes without applying') .action(async (name, options) => { try { const projectPath = process.cwd(); @@ -1091,6 +1353,7 @@ codex .option('-m, --message ', 'Custom git commit message') .option('-f, --force', 'Overwrite if entry already exists in repository') .option('-p, --push', 'Push to remote repository after commit') + .option('--dry-run', 'Preview changes without applying') .action(async (name, options) => { try { const projectPath = process.cwd(); @@ -1218,6 +1481,7 @@ gemini .option('-m, --message ', 'Custom git commit message') .option('-f, --force', 'Overwrite if entry already exists in repository') .option('-p, --push', 'Push to remote repository after commit') + .option('--dry-run', 'Preview changes without applying') .action(async (name, options) => { try { const projectPath = process.cwd(); @@ -1282,6 +1546,7 @@ warp .option('-m, --message ', 'Custom git commit message') .option('-f, --force', 'Overwrite if entry already exists in repository') .option('-p, --push', 'Push to remote repository after commit') + .option('--dry-run', 'Preview changes without applying') .action(async (name, options) => { try { const repo = await getTargetRepo(program.opts()); @@ -1365,11 +1630,13 @@ function registerRulesAndSkillsToolGroup(config: RulesAndSkillsToolGroupOptions) group .command('remove ') + .alias('rm') .description(`Remove a ${displayName} rule from project`) - .action(async (alias) => { + .option('--dry-run', 'Preview changes without applying') + .action(async (alias, options: { dryRun?: boolean }) => { try { const adapter = getAdapter(tool, 'rules'); - await handleRemove(adapter, process.cwd(), alias); + await handleRemove(adapter, process.cwd(), alias, false, { dryRun: options.dryRun }); } catch (error: any) { console.error(chalk.red(`Error removing ${displayName} rule:`), error.message); process.exit(1); @@ -1448,6 +1715,7 @@ function registerRulesAndSkillsToolGroup(config: RulesAndSkillsToolGroupOptions) .option('-m, --message ', 'Custom git commit message') .option('-f, --force', 'Overwrite if entry already exists in repository') .option('-p, --push', 'Push to remote repository after commit') + .option('--dry-run', 'Preview changes without applying') .action(async (name, options) => { try { const projectPath = process.cwd(); @@ -1690,9 +1958,10 @@ configRepo configRepo .command('show ') .description('Show repository configuration') - .action(async (repoName: string) => { + .option('--json', 'Output configuration as JSON') + .action(async (repoName: string, options: { json?: boolean }) => { try { - await showRepoConfig(repoName); + await showRepoConfig(repoName, { json: options.json }); } catch (error: any) { console.error(chalk.red('Error showing repository config:'), error.message); process.exit(1); @@ -1701,10 +1970,12 @@ configRepo configRepo .command('list') + .alias('ls') .description('List all repositories') - .action(async () => { + .option('--json', 'Output repositories as JSON') + .action(async (options: { json?: boolean }) => { try { - await listRepos(); + await listRepos({ json: options.json }); } catch (error: any) { console.error(chalk.red('Error listing repositories:'), error.message); process.exit(1); From 1255cf9d888a50996e693a440195a77db18dd6b7 Mon Sep 17 00:00:00 2001 From: lbb00 Date: Wed, 4 Mar 2026 11:56:43 +0800 Subject: [PATCH 06/12] fix: brew install --- README.md | 7 +++++-- README_ZH.md | 7 +++++-- package.json | 3 ++- scripts/setup-homebrew-tap.sh | 37 +++++++++++++++++++++++++++++++++++ 4 files changed, 49 insertions(+), 5 deletions(-) create mode 100755 scripts/setup-homebrew-tap.sh diff --git a/README.md b/README.md index 9f1e13e..b6f69c2 100644 --- a/README.md +++ b/README.md @@ -48,12 +48,15 @@ Stop copying `.mdc` files around. Manage your rules in Git repositories and sync ### Via Homebrew (macOS/Linux) ```bash -brew install lbb00/ai-rules-sync/ais -# or tap first for shorter subsequent commands: brew tap lbb00/ai-rules-sync brew install ais + +# one-off install without tap: +brew install --formula https://raw.githubusercontent.com/lbb00/ai-rules-sync/main/Formula/ais.rb ``` +> `brew tap lbb00/ai-rules-sync` follows Homebrew's naming convention and resolves to the tap repository `lbb00/homebrew-ai-rules-sync`. + ### Via npm ```bash diff --git a/README_ZH.md b/README_ZH.md index a325d0e..803dffe 100644 --- a/README_ZH.md +++ b/README_ZH.md @@ -48,12 +48,15 @@ ### 通过 Homebrew(macOS/Linux) ```bash -brew install lbb00/ai-rules-sync/ais -# 或先 tap,之后命令更简洁: brew tap lbb00/ai-rules-sync brew install ais + +# 不使用 tap 的一次性安装: +brew install --formula https://raw.githubusercontent.com/lbb00/ai-rules-sync/main/Formula/ais.rb ``` +> `brew tap lbb00/ai-rules-sync` 会按 Homebrew 约定映射到 tap 仓库 `lbb00/homebrew-ai-rules-sync`。 + ### 通过 npm ```bash diff --git a/package.json b/package.json index 6a99f36..951c02f 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,8 @@ "scripts": { "build": "tsc", "test": "vitest", - "docs:sync-tools": "node scripts/sync-supported-tools.mjs" + "docs:sync-tools": "node scripts/sync-supported-tools.mjs", + "tap:setup": "bash scripts/setup-homebrew-tap.sh" }, "files": [ "dist" diff --git a/scripts/setup-homebrew-tap.sh b/scripts/setup-homebrew-tap.sh new file mode 100755 index 0000000..aa0a2af --- /dev/null +++ b/scripts/setup-homebrew-tap.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +set -euo pipefail + +SOURCE_REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" +FORMULA_SRC="$SOURCE_REPO_ROOT/Formula/ais.rb" +TAP_REPO="${1:-$HOME/code/homebrew-ai-rules-sync}" + +if [[ ! -f "$FORMULA_SRC" ]]; then + echo "Formula source not found: $FORMULA_SRC" >&2 + exit 1 +fi + +mkdir -p "$TAP_REPO/Formula" + +if [[ ! -d "$TAP_REPO/.git" ]]; then + git init "$TAP_REPO" +fi + +cp "$FORMULA_SRC" "$TAP_REPO/Formula/ais.rb" + +if [[ ! -f "$TAP_REPO/README.md" ]]; then + cat > "$TAP_REPO/README.md" <<'README' +# homebrew-ai-rules-sync + +Homebrew tap for [ai-rules-sync](https://github.com/lbb00/ai-rules-sync). + +## Install + +```bash +brew tap lbb00/ai-rules-sync +brew install ais +``` +README +fi + +echo "Tap repository prepared at: $TAP_REPO" +echo "Formula synced to: $TAP_REPO/Formula/ais.rb" From 21c354f3335ca18f1ecc43408e0a86e8b267b35e Mon Sep 17 00:00:00 2001 From: lbb Date: Wed, 4 Mar 2026 19:24:27 +0800 Subject: [PATCH 07/12] chore(release): automate npm+homebrew pipeline and drop aur (#28) --- .github/workflows/release.yml | 71 +++++++++++++++++++++++++++ .github/workflows/update-homebrew.yml | 63 +++++++++++------------- KNOWLEDGE_BASE.md | 20 ++++++++ README.md | 38 ++++++++++---- README_ZH.md | 38 ++++++++++---- package.json | 3 +- scripts/setup-homebrew-tap.sh | 37 -------------- 7 files changed, 180 insertions(+), 90 deletions(-) create mode 100644 .github/workflows/release.yml delete mode 100755 scripts/setup-homebrew-tap.sh diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..b4a0ca9 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,71 @@ +name: Release + +on: + push: + branches: + - main + paths-ignore: + - Formula/ais.rb + workflow_dispatch: + +permissions: + contents: write + pull-requests: write + +jobs: + release: + runs-on: ubuntu-latest + outputs: + published: ${{ steps.changesets.outputs.published }} + published_version: ${{ steps.version.outputs.version }} + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 9.15.3 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: pnpm + registry-url: https://registry.npmjs.org + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Run tests + run: pnpm test -- --run + + - name: Build + run: pnpm build + + - name: Create release PR or publish + id: changesets + uses: changesets/action@v1 + with: + version: pnpm changeset version + publish: pnpm changeset publish + title: "chore: release versions" + commit: "chore: release versions" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + + - name: Resolve published version + id: version + if: steps.changesets.outputs.published == 'true' + run: | + VERSION=$(node -p "require('./package.json').version") + echo "version=$VERSION" >> "$GITHUB_OUTPUT" + + update-homebrew: + needs: release + if: needs.release.outputs.published == 'true' + uses: ./.github/workflows/update-homebrew.yml + with: + version: ${{ needs.release.outputs.published_version }} diff --git a/.github/workflows/update-homebrew.yml b/.github/workflows/update-homebrew.yml index c9d3796..dbf08ca 100644 --- a/.github/workflows/update-homebrew.yml +++ b/.github/workflows/update-homebrew.yml @@ -1,53 +1,48 @@ name: Update Homebrew Tap on: - workflow_run: - workflows: ["Release"] - types: - - completed + workflow_call: + inputs: + version: + description: Release version to publish (without v prefix) + required: true + type: string + workflow_dispatch: + inputs: + version: + description: Release version to publish (without v prefix) + required: true + type: string + +permissions: + contents: write jobs: update-tap: - if: ${{ github.event.workflow_run.conclusion == 'success' }} runs-on: ubuntu-latest steps: - - name: Get new version - id: version - run: | - VERSION=$(npm view ai-rules-sync version) - echo "version=$VERSION" >> "$GITHUB_OUTPUT" + - name: Checkout repository + uses: actions/checkout@v4 - name: Compute tarball SHA256 id: sha256 + env: + VERSION: ${{ inputs.version }} run: | - URL="https://registry.npmjs.org/ai-rules-sync/-/ai-rules-sync-${{ steps.version.outputs.version }}.tgz" + URL="https://registry.npmjs.org/ai-rules-sync/-/ai-rules-sync-${VERSION}.tgz" SHA=$(curl -fsSL "$URL" | sha256sum | awk '{print $1}') echo "sha256=$SHA" >> "$GITHUB_OUTPUT" - - name: Checkout Tap repository - uses: actions/checkout@v4 - with: - repository: lbb00/homebrew-ai-rules-sync - token: ${{ secrets.HOMEBREW_TAP_TOKEN }} - path: tap - - name: Update Formula + env: + VERSION: ${{ inputs.version }} + SHA: ${{ steps.sha256.outputs.sha256 }} run: | - VERSION="${{ steps.version.outputs.version }}" - SHA="${{ steps.sha256.outputs.sha256 }}" - sed -i "s|url \"https://registry.npmjs.org/ai-rules-sync/-/ai-rules-sync-.*\.tgz\"|url \"https://registry.npmjs.org/ai-rules-sync/-/ai-rules-sync-${VERSION}.tgz\"|" tap/Formula/ais.rb - sed -i "s|sha256 \".*\"|sha256 \"${SHA}\"|" tap/Formula/ais.rb + sed -i "s|url \"https://registry.npmjs.org/ai-rules-sync/-/ai-rules-sync-.*\\.tgz\"|url \"https://registry.npmjs.org/ai-rules-sync/-/ai-rules-sync-${VERSION}.tgz\"|" Formula/ais.rb + sed -i "s|sha256 \".*\"|sha256 \"${SHA}\"|" Formula/ais.rb - - name: Create Pull Request - uses: peter-evans/create-pull-request@v7 + - name: Commit formula update + uses: stefanzweifel/git-auto-commit-action@v5 with: - token: ${{ secrets.HOMEBREW_TAP_TOKEN }} - path: tap - commit-message: "chore: update ais to v${{ steps.version.outputs.version }}" - title: "chore: update ais to v${{ steps.version.outputs.version }}" - body: | - Automated update for ai-rules-sync v${{ steps.version.outputs.version }}. - - - url: `https://registry.npmjs.org/ai-rules-sync/-/ai-rules-sync-${{ steps.version.outputs.version }}.tgz` - - sha256: `${{ steps.sha256.outputs.sha256 }}` - branch: "update-ais-${{ steps.version.outputs.version }}" + commit_message: "chore(homebrew): update formula to v${{ inputs.version }}" + file_pattern: Formula/ais.rb diff --git a/KNOWLEDGE_BASE.md b/KNOWLEDGE_BASE.md index a844263..4ee8906 100644 --- a/KNOWLEDGE_BASE.md +++ b/KNOWLEDGE_BASE.md @@ -1274,6 +1274,26 @@ CLI Parameters > Global Config > Repository Config > Adapter Defaults - `tests/config.test.ts` - Updated test fixtures - Documentation updated to reflect new paths +### Automated Release Pipeline (npm + Homebrew) (2026-03) + +**Added end-to-end release automation via GitHub Actions:** + +1. **`release.yml`**: + - Triggered on push to `main` (and `workflow_dispatch`) + - Uses `changesets/action` to create/update release PRs and publish npm automatically + - Publishes downstream outputs (`published`, `published_version`) for dependent jobs + +2. **`update-homebrew.yml`** (refactored): + - Converted to reusable workflow (`workflow_call`) and manual rerun (`workflow_dispatch`) + - Accepts explicit `version` input (no registry race on version lookup) + - Computes npm tarball SHA256 and updates `Formula/ais.rb` in this repository + +**Distribution Impact:** +- Homebrew tap uses explicit repository URL: + - `brew tap lbb00/ai-rules-sync https://github.com/lbb00/ai-rules-sync` + - `brew install ais` +- npm remains the source artifact for Homebrew formula updates + ### OpenCode AI Support (2026-01) **Added complete support for OpenCode AI (https://opencode.ai) with 5 component types:** diff --git a/README.md b/README.md index b6f69c2..0a5f06a 100644 --- a/README.md +++ b/README.md @@ -45,23 +45,23 @@ Stop copying `.mdc` files around. Manage your rules in Git repositories and sync ## Installation -### Via Homebrew (macOS/Linux) +### Via npm (Recommended) ```bash -brew tap lbb00/ai-rules-sync +npm install -g ai-rules-sync +``` + +### Via Homebrew (macOS only) + +```bash +brew tap lbb00/ai-rules-sync https://github.com/lbb00/ai-rules-sync brew install ais # one-off install without tap: brew install --formula https://raw.githubusercontent.com/lbb00/ai-rules-sync/main/Formula/ais.rb ``` -> `brew tap lbb00/ai-rules-sync` follows Homebrew's naming convention and resolves to the tap repository `lbb00/homebrew-ai-rules-sync`. - -### Via npm - -```bash -npm install -g ai-rules-sync -``` +> This repository is the tap source. Use the explicit tap URL above to avoid Homebrew's default `homebrew-` mapping. **Verify installation:** ```bash @@ -1183,6 +1183,26 @@ autoload -Uz compinit && compinit --- +## Maintainer Release Automation + +Releases are automated with GitHub Actions: + +1. Merge the Changesets release PR into `main`. +2. `release.yml` publishes to npm. +3. `update-homebrew.yml` updates `Formula/ais.rb` in this repository. + +### Required GitHub Secrets + +- `NPM_TOKEN` + +### Recovery / Rollback + +- If npm publish succeeds but Homebrew update fails, rerun: + - `Update Homebrew Tap` (`workflow_dispatch`, with `version`) +- If a bad npm version is published, publish a fixed patch version and let automation sync downstream package channels. + +--- + ## Links - **Documentation**: [https://github.com/lbb00/ai-rules-sync](https://github.com/lbb00/ai-rules-sync) diff --git a/README_ZH.md b/README_ZH.md index 803dffe..792da6e 100644 --- a/README_ZH.md +++ b/README_ZH.md @@ -45,23 +45,23 @@ ## 安装 -### 通过 Homebrew(macOS/Linux) +### 通过 npm(推荐) ```bash -brew tap lbb00/ai-rules-sync +npm install -g ai-rules-sync +``` + +### 通过 Homebrew(仅 macOS) + +```bash +brew tap lbb00/ai-rules-sync https://github.com/lbb00/ai-rules-sync brew install ais # 不使用 tap 的一次性安装: brew install --formula https://raw.githubusercontent.com/lbb00/ai-rules-sync/main/Formula/ais.rb ``` -> `brew tap lbb00/ai-rules-sync` 会按 Homebrew 约定映射到 tap 仓库 `lbb00/homebrew-ai-rules-sync`。 - -### 通过 npm - -```bash -npm install -g ai-rules-sync -``` +> 当前仓库本身就是 tap 源。使用上面的显式 URL 可避免 Homebrew 默认的 `homebrew-` 映射规则。 **验证安装:** ```bash @@ -1183,6 +1183,26 @@ autoload -Uz compinit && compinit --- +## 维护者发布自动化 + +发布流程已由 GitHub Actions 自动化: + +1. 将 Changesets 生成的 release PR 合并到 `main`。 +2. `release.yml` 自动发布 npm。 +3. `update-homebrew.yml` 自动更新当前仓库中的 `Formula/ais.rb`。 + +### 必需的 GitHub Secrets + +- `NPM_TOKEN` + +### 故障恢复 / 回滚 + +- 如果 npm 发布成功但 Homebrew 更新失败,可手动重跑: + - `Update Homebrew Tap`(`workflow_dispatch`,传 `version`) +- 如果发布了错误的 npm 版本,发布一个修复后的 patch 版本,并由自动化流程同步到下游渠道。 + +--- + ## 链接 - **文档**:[https://github.com/lbb00/ai-rules-sync](https://github.com/lbb00/ai-rules-sync) diff --git a/package.json b/package.json index 951c02f..3a03b44 100644 --- a/package.json +++ b/package.json @@ -10,7 +10,8 @@ "build": "tsc", "test": "vitest", "docs:sync-tools": "node scripts/sync-supported-tools.mjs", - "tap:setup": "bash scripts/setup-homebrew-tap.sh" + "release:version": "changeset version", + "release:publish": "changeset publish" }, "files": [ "dist" diff --git a/scripts/setup-homebrew-tap.sh b/scripts/setup-homebrew-tap.sh deleted file mode 100755 index aa0a2af..0000000 --- a/scripts/setup-homebrew-tap.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -SOURCE_REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" -FORMULA_SRC="$SOURCE_REPO_ROOT/Formula/ais.rb" -TAP_REPO="${1:-$HOME/code/homebrew-ai-rules-sync}" - -if [[ ! -f "$FORMULA_SRC" ]]; then - echo "Formula source not found: $FORMULA_SRC" >&2 - exit 1 -fi - -mkdir -p "$TAP_REPO/Formula" - -if [[ ! -d "$TAP_REPO/.git" ]]; then - git init "$TAP_REPO" -fi - -cp "$FORMULA_SRC" "$TAP_REPO/Formula/ais.rb" - -if [[ ! -f "$TAP_REPO/README.md" ]]; then - cat > "$TAP_REPO/README.md" <<'README' -# homebrew-ai-rules-sync - -Homebrew tap for [ai-rules-sync](https://github.com/lbb00/ai-rules-sync). - -## Install - -```bash -brew tap lbb00/ai-rules-sync -brew install ais -``` -README -fi - -echo "Tap repository prepared at: $TAP_REPO" -echo "Formula synced to: $TAP_REPO/Formula/ais.rb" From 95f2a693f9bfe511a3267503602f9e131aaf240a Mon Sep 17 00:00:00 2001 From: lbb Date: Wed, 4 Mar 2026 19:30:25 +0800 Subject: [PATCH 08/12] feat: introduce dotany abstraction layer with linkany-backed symlinks (#29) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add src/dotany/ — generic dotfile management library (DotfileManager, DotfileComposer, FileSystemSource, JsonManifest, dotfile.create/compose) - Add src/plugin/ — ai-rules-sync plugin implementations (GitRepoSource, AiRulesSyncManifest) - Route all symlink ops through linkany (doLink/doUnlink helpers) for atomic operations; remove all bare fs.ensureSymlink calls from manager - Refactor adapters/base.ts: forProject() delegates to dotany API - Refactor commands/handlers.ts, install.ts: use forProject().add/remove/apply - Refactor project-config.ts: registry-driven, no hardcoded tool lists - Refactor sync-engine.ts: importEntry delegates fs to manager.import() - Refactor commands/helpers.ts: inferDefaultMode registry-driven - Update KNOWLEDGE_BASE.md and src/dotany/DESIGN.md to reflect new structure Co-authored-by: Claude Sonnet 4.6 --- KNOWLEDGE_BASE.md | 11 + src/adapters/base.ts | 44 ++- src/adapters/types.ts | 9 + src/commands/handlers.ts | 171 +++++++-- src/commands/helpers.ts | 97 ++--- src/commands/install.ts | 29 +- src/dotany/DESIGN.md | 505 +++++++++++++++++++++++++++ src/dotany/composer.ts | 26 ++ src/dotany/index.ts | 19 + src/dotany/manager.ts | 420 ++++++++++++++++++++++ src/dotany/manifest/json.ts | 55 +++ src/dotany/sources/filesystem.ts | 30 ++ src/dotany/sources/git.ts | 50 +++ src/dotany/types.ts | 109 ++++++ src/plugin/ai-rules-sync-manifest.ts | 69 ++++ src/plugin/git-repo-source.ts | 87 +++++ src/project-config.ts | 388 ++++---------------- src/sync-engine.ts | 81 +++-- 18 files changed, 1742 insertions(+), 458 deletions(-) create mode 100644 src/dotany/DESIGN.md create mode 100644 src/dotany/composer.ts create mode 100644 src/dotany/index.ts create mode 100644 src/dotany/manager.ts create mode 100644 src/dotany/manifest/json.ts create mode 100644 src/dotany/sources/filesystem.ts create mode 100644 src/dotany/sources/git.ts create mode 100644 src/dotany/types.ts create mode 100644 src/plugin/ai-rules-sync-manifest.ts create mode 100644 src/plugin/git-repo-source.ts diff --git a/KNOWLEDGE_BASE.md b/KNOWLEDGE_BASE.md index 4ee8906..afb4b2f 100644 --- a/KNOWLEDGE_BASE.md +++ b/KNOWLEDGE_BASE.md @@ -18,6 +18,7 @@ A key feature is **User Mode** (`--user` / `-u`): use `$HOME` as project root to - **CLI Framework**: Commander.js. - **Config**: Stored in `~/.config/ai-rules-sync/config.json` (global) and project roots. - **Git Operations**: Uses `execa` to run git commands; stores repos in `~/.config/ai-rules-sync/repos/`. +- **Symlink Layer**: All symlink creation/deletion goes through `linkany` (via `DotfileManager.doLink` / `doUnlink`), providing atomic operations. No bare `fs.ensureSymlink` calls in dotany. - **Plugin Architecture**: Modular adapter system for different AI tools. - **Modular CLI**: Declarative command registration using adapters. @@ -25,6 +26,16 @@ A key feature is **User Mode** (`--user` / `-u`): use `$HOME` as project root to ``` src/ +├── dotfile/ # Generic dotfile abstraction library (tool-agnostic) +│ ├── types.ts # All interfaces: SourceResolver, ManifestStore, etc. +│ ├── manager.ts # DotfileManager — all symlink ops via linkany (atomic) +│ ├── composer.ts # DotfileComposer — multi-manager apply/status +│ ├── sources/filesystem.ts # FileSystemSource (local dir, stow basis) +│ ├── manifest/json.ts # JsonManifest (generic JSON, optional namespace) +│ └── index.ts # dotfile.create() / dotfile.compose() entry point +├── plugin/ # ai-rules-sync plugin implementations +│ ├── git-repo-source.ts # GitRepoSource (RepoConfig | RepoResolverFn | null) +│ └── ai-rules-sync-manifest.ts # AiRulesSyncManifest (over ai-rules-sync.json) ├── adapters/ # Plugin architecture for different AI tools │ ├── types.ts # SyncAdapter interface │ ├── base.ts # createBaseAdapter factory function diff --git a/src/adapters/base.ts b/src/adapters/base.ts index 60a7c15..7420332 100644 --- a/src/adapters/base.ts +++ b/src/adapters/base.ts @@ -1,8 +1,14 @@ import fs from 'fs-extra'; import path from 'path'; import { SyncAdapter, SyncOptions, LinkResult, ResolvedSource } from './types.js'; -import { linkEntry as engineLinkEntry, unlinkEntry as engineUnlinkEntry } from '../sync-engine.js'; +import { unlinkEntry as engineUnlinkEntry } from '../sync-engine.js'; import { addDependencyGeneric, removeDependencyGeneric } from '../project-config.js'; +import { dotfile } from '../dotany/index.js'; +import { GitRepoSource } from '../plugin/git-repo-source.js'; +import { AiRulesSyncManifest } from '../plugin/ai-rules-sync-manifest.js'; +import type { DotfileManager } from '../dotany/manager.js'; +import { RepoConfig } from '../config.js'; +import type { RepoResolverFn } from '../dotany/types.js'; /** * Configuration for creating a base adapter @@ -23,8 +29,8 @@ export interface AdapterConfig { } /** - * Create a base adapter with common functionality - * This factory function handles add/remove/link/unlink operations generically + * Create a base adapter with common functionality. + * Delegates link/unlink to the dotfile abstraction layer. */ export function createBaseAdapter(config: AdapterConfig): SyncAdapter { return { @@ -41,6 +47,17 @@ export function createBaseAdapter(config: AdapterConfig): SyncAdapter { resolveSource: config.resolveSource, resolveTargetName: config.resolveTargetName, + forProject(projectPath: string, repoOrResolver: RepoConfig | RepoResolverFn | null, isLocal?: boolean): DotfileManager { + return dotfile.create({ + name: config.name, + source: new GitRepoSource(repoOrResolver, config), + targetDir: config.targetDir, + targetRoot: projectPath, + manifest: new AiRulesSyncManifest(projectPath, config.configPath, isLocal), + resolveTargetName: config.resolveTargetName, + }); + }, + async addDependency(projectPath, name, repoUrl, alias, isLocal = false, targetDir) { return addDependencyGeneric(projectPath, config.configPath, name, repoUrl, alias, isLocal, targetDir); }, @@ -50,12 +67,29 @@ export function createBaseAdapter(config: AdapterConfig): SyncAdapter { }, async link(options: SyncOptions): Promise { - return engineLinkEntry(this, options); + // Delegate to DotfileManager.add() (symlink only — manifest is written separately by addDependency) + // In user/global mode (skipIgnore=true), use userTargetDir if defined + const effectiveTargetDir = options.skipIgnore && config.userTargetDir + ? config.userTargetDir + : config.targetDir; + const manager = dotfile.create({ + name: config.name, + source: new GitRepoSource(options.repo, config), + targetDir: effectiveTargetDir, + targetRoot: options.projectPath, + resolveTargetName: config.resolveTargetName, + // No manifest here: addDependency handles manifest writes separately + }); + return manager.add(options.name, { + alias: options.alias, + targetDir: options.targetDir, + repoUrl: options.repo.url, + }); }, async unlink(projectPath: string, alias: string): Promise { return engineUnlinkEntry(this, projectPath, alias); - } + }, }; } diff --git a/src/adapters/types.ts b/src/adapters/types.ts index fe3fd09..48be305 100644 --- a/src/adapters/types.ts +++ b/src/adapters/types.ts @@ -1,4 +1,6 @@ import { RepoConfig } from '../config.js'; +import type { DotfileManager } from '../dotany/manager.js'; +import type { RepoResolverFn } from '../dotany/types.js'; /** * SyncAdapter defines how a specific type of AI tool configuration @@ -52,6 +54,13 @@ export interface SyncAdapter { */ resolveTargetName?(name: string, alias?: string, sourceSuffix?: string): string; + /** + * Create a DotfileManager bound to a specific project and repo. + * Provides the full dotfile API (add, remove, apply, diff, status, import, readManifest). + * Pass null as repo for remove-only operations (no source resolution needed). + */ + forProject?(projectPath: string, repo: RepoConfig | RepoResolverFn | null, isLocal?: boolean): DotfileManager; + /** Add a dependency to project config */ addDependency( projectPath: string, diff --git a/src/commands/handlers.ts b/src/commands/handlers.ts index b2c0973..5cfafeb 100644 --- a/src/commands/handlers.ts +++ b/src/commands/handlers.ts @@ -8,7 +8,7 @@ import fs from 'fs-extra'; import { RepoConfig, getUserConfigPath, getUserProjectConfig } from '../config.js'; import { SyncAdapter } from '../adapters/types.js'; import { linkEntry, unlinkEntry, importEntry, ImportOptions } from '../sync-engine.js'; -import { addIgnoreEntry } from '../utils.js'; +import { addIgnoreEntry, removeIgnoreEntry } from '../utils.js'; import { addUserDependency, removeUserDependency, getCombinedProjectConfig, getRepoSourceConfig, getSourceDir, getTargetDir } from '../project-config.js'; /** @@ -96,22 +96,19 @@ export async function handleAdd( } } - const result = await adapter.link({ - projectPath: ctx.projectPath, - name, - repo: ctx.repo, - alias, - isLocal: ctx.isLocal, - targetDir: options?.targetDir, - skipIgnore: ctx.skipIgnore - }); - - // Use the provided alias if given, otherwise use targetName if different from sourceName - const depAlias = alias || (result.targetName === result.sourceName ? undefined : result.targetName); - let migrated = false; if (ctx.user || ctx.global) { - // User mode: write to user.json + // User mode: link only (no manifest for user mode via forProject) + const result = await adapter.link({ + projectPath: ctx.projectPath, + name, + repo: ctx.repo, + alias, + isLocal: ctx.isLocal, + targetDir: options?.targetDir, + skipIgnore: ctx.skipIgnore + }); + const depAlias = alias || (result.targetName === result.sourceName ? undefined : result.targetName); await addUserDependency( adapter.configPath, result.sourceName, @@ -120,25 +117,52 @@ export async function handleAdd( options?.targetDir ); console.log(chalk.green(`Updated user config dependency.`)); - } else { - // Project mode: write to project's ai-rules-sync.json - const migration = await adapter.addDependency( - ctx.projectPath, - result.sourceName, - ctx.repo.url, - depAlias, - ctx.isLocal, - options?.targetDir - ); - migrated = migration.migrated; + return { + sourceName: result.sourceName, + targetName: result.targetName, + linked: result.linked, + migrated: false + }; + } + + // Project mode: use forProject().add() to do symlink + manifest in one step + if (adapter.forProject) { + const manager = adapter.forProject(ctx.projectPath, ctx.repo, ctx.isLocal); + const result = await manager.add(name, { + alias, + targetDir: options?.targetDir, + repoUrl: ctx.repo.url, + }); + + // Ignore file management (ai-rules-sync specific, not dotfile layer responsibility) + if (result.linked) { + const relEntry = path.relative(path.resolve(ctx.projectPath), result.targetPath); + if (ctx.isLocal) { + const gitInfoExclude = path.join(ctx.projectPath, '.git', 'info', 'exclude'); + if (await fs.pathExists(path.dirname(gitInfoExclude))) { + await fs.ensureFile(gitInfoExclude); + if (await addIgnoreEntry(gitInfoExclude, relEntry, '# AI Rules Sync')) { + console.log(chalk.green(`Added "${relEntry}" to .git/info/exclude.`)); + } else { + console.log(chalk.gray(`"${relEntry}" already in .git/info/exclude.`)); + } + } else { + console.log(chalk.yellow(`Warning: Could not find .git/info/exclude. Skipping automatic ignore for private entry.`)); + console.log(chalk.yellow(`Please manually add "${relEntry}" to your private ignore file.`)); + } + } else { + const gitignorePath = path.join(ctx.projectPath, '.gitignore'); + if (await addIgnoreEntry(gitignorePath, relEntry, '# AI Rules Sync')) { + console.log(chalk.green(`Added "${relEntry}" to .gitignore.`)); + } else { + console.log(chalk.gray(`"${relEntry}" already in .gitignore.`)); + } + } + } const configFileName = ctx.isLocal ? 'ai-rules-sync.local.json' : 'ai-rules-sync.json'; console.log(chalk.green(`Updated ${configFileName} dependency.`)); - if (migrated) { - console.log(chalk.yellow('Detected legacy "cursor-rules*.json". Migrated to "ai-rules-sync*.json". Consider deleting the legacy files to avoid ambiguity.')); - } - if (ctx.isLocal) { const gitignorePath = path.join(ctx.projectPath, '.gitignore'); const added = await addIgnoreEntry(gitignorePath, 'ai-rules-sync.local.json', '# Local AI Rules Sync Config'); @@ -146,6 +170,49 @@ export async function handleAdd( console.log(chalk.green(`Added "ai-rules-sync.local.json" to .gitignore.`)); } } + + return { + sourceName: result.sourceName, + targetName: result.targetName, + linked: result.linked, + migrated: false + }; + } + + // Legacy fallback: separate link + addDependency calls + const result = await adapter.link({ + projectPath: ctx.projectPath, + name, + repo: ctx.repo, + alias, + isLocal: ctx.isLocal, + targetDir: options?.targetDir, + skipIgnore: ctx.skipIgnore + }); + const depAlias = alias || (result.targetName === result.sourceName ? undefined : result.targetName); + const migration = await adapter.addDependency( + ctx.projectPath, + result.sourceName, + ctx.repo.url, + depAlias, + ctx.isLocal, + options?.targetDir + ); + migrated = migration.migrated; + + const configFileName = ctx.isLocal ? 'ai-rules-sync.local.json' : 'ai-rules-sync.json'; + console.log(chalk.green(`Updated ${configFileName} dependency.`)); + + if (migrated) { + console.log(chalk.yellow('Detected legacy "cursor-rules*.json". Migrated to "ai-rules-sync*.json". Consider deleting the legacy files to avoid ambiguity.')); + } + + if (ctx.isLocal) { + const gitignorePath = path.join(ctx.projectPath, '.gitignore'); + const added = await addIgnoreEntry(gitignorePath, 'ai-rules-sync.local.json', '# Local AI Rules Sync Config'); + if (added) { + console.log(chalk.green(`Added "ai-rules-sync.local.json" to .gitignore.`)); + } } return { @@ -313,9 +380,8 @@ export async function handleRemove( }; } - await adapter.unlink(projectPath, alias); - if (isUser) { + await adapter.unlink(projectPath, alias); const { removedFrom } = await removeUserDependency(adapter.configPath, alias); if (removedFrom.length > 0) { @@ -327,6 +393,30 @@ export async function handleRemove( return { removedFrom, migrated: false }; } + // Project mode: use forProject().remove() to do symlink deletion + manifest update in one step + if (adapter.forProject) { + await adapter.forProject(projectPath, null, false).remove(alias); + + // Ignore cleanup — try both gitignore and git/info/exclude since we don't know + // which was used when the entry was originally added + const ignoreEntries = [`${adapter.targetDir}/${alias}`]; + const gitignorePath = path.join(projectPath, '.gitignore'); + const gitInfoExclude = path.join(projectPath, '.git', 'info', 'exclude'); + for (const entry of ignoreEntries) { + if (await removeIgnoreEntry(gitignorePath, entry)) { + console.log(chalk.green(`Removed "${entry}" from .gitignore.`)); + } + if (await removeIgnoreEntry(gitInfoExclude, entry)) { + console.log(chalk.green(`Removed "${entry}" from .git/info/exclude.`)); + } + } + + const configFileName = 'ai-rules-sync.json'; + return { removedFrom: [configFileName], migrated: false }; + } + + // Legacy fallback + await adapter.unlink(projectPath, alias); const { removedFrom, migrated } = await adapter.removeDependency(projectPath, alias); if (removedFrom.length > 0) { @@ -443,6 +533,23 @@ export async function handleImport( const result = await importEntry(adapter, importOpts); + // Ignore file management for the imported entry + const relEntry = `${adapter.targetDir}/${result.targetName}`; + if (ctx.isLocal) { + const gitInfoExclude = path.join(ctx.projectPath, '.git', 'info', 'exclude'); + if (await fs.pathExists(path.dirname(gitInfoExclude))) { + await fs.ensureFile(gitInfoExclude); + if (await addIgnoreEntry(gitInfoExclude, relEntry, '# AI Rules Sync')) { + console.log(chalk.green(`Added "${relEntry}" to .git/info/exclude.`)); + } + } + } else { + const gitignorePath = path.join(ctx.projectPath, '.gitignore'); + if (await addIgnoreEntry(gitignorePath, relEntry, '# AI Rules Sync')) { + console.log(chalk.green(`Added "${relEntry}" to .gitignore.`)); + } + } + // Add to config await adapter.addDependency(ctx.projectPath, name, ctx.repo.url, undefined, ctx.isLocal); const configFileName = ctx.isLocal ? 'ai-rules-sync.local.json' : 'ai-rules-sync.json'; diff --git a/src/commands/helpers.ts b/src/commands/helpers.ts index e891f97..b0154d4 100644 --- a/src/commands/helpers.ts +++ b/src/commands/helpers.ts @@ -6,8 +6,8 @@ import path from 'path'; import chalk from 'chalk'; import { getConfig, setConfig, getReposBaseDir, getCurrentRepo, RepoConfig } from '../config.js'; import { cloneOrUpdateRepo } from '../git.js'; -import { getCombinedProjectConfig, ProjectConfig } from '../project-config.js'; -import { stripCopilotSuffix } from '../adapters/index.js'; +import { getCombinedProjectConfig } from '../project-config.js'; +import { stripCopilotSuffix, adapterRegistry } from '../adapters/index.js'; /** * Get the target repository based on CLI options @@ -70,68 +70,36 @@ export async function getTargetRepo(options: { target?: string }): Promise { const cfg = await getCombinedProjectConfig(projectPath); - const counts: Record, number> = { - cursor: - Object.keys(cfg.cursor?.rules || {}).length + - Object.keys(cfg.cursor?.commands || {}).length + - Object.keys(cfg.cursor?.skills || {}).length + - Object.keys(cfg.cursor?.agents || {}).length, - copilot: - Object.keys(cfg.copilot?.instructions || {}).length + - Object.keys(cfg.copilot?.skills || {}).length + - Object.keys(cfg.copilot?.prompts || {}).length + - Object.keys(cfg.copilot?.agents || {}).length, - claude: - Object.keys(cfg.claude?.skills || {}).length + - Object.keys(cfg.claude?.agents || {}).length + - Object.keys(cfg.claude?.rules || {}).length + - Object.keys(cfg.claude?.md || {}).length, - trae: - Object.keys(cfg.trae?.rules || {}).length + - Object.keys(cfg.trae?.skills || {}).length, - opencode: - Object.keys(cfg.opencode?.agents || {}).length + - Object.keys(cfg.opencode?.skills || {}).length + - Object.keys(cfg.opencode?.commands || {}).length + - Object.keys(cfg.opencode?.tools || {}).length, - codex: - Object.keys(cfg.codex?.rules || {}).length + - Object.keys(cfg.codex?.skills || {}).length, - gemini: - Object.keys(cfg.gemini?.commands || {}).length + - Object.keys(cfg.gemini?.skills || {}).length + - Object.keys(cfg.gemini?.agents || {}).length, - warp: Object.keys(cfg.warp?.skills || {}).length, - windsurf: - Object.keys(cfg.windsurf?.rules || {}).length + - Object.keys(cfg.windsurf?.skills || {}).length, - cline: - Object.keys(cfg.cline?.rules || {}).length + - Object.keys(cfg.cline?.skills || {}).length, - 'agents-md': Object.keys(cfg.agentsMd || {}).length - }; - - const activeModes = (Object.entries(counts) as [Exclude, number][]) + + // Count total entries per tool using the adapter registry + const toolCounts: Record = {}; + + for (const adapter of adapterRegistry.all()) { + const [topLevel, subLevel] = adapter.configPath; + // agentsMd is flat (no subLevel nesting) + const count = topLevel === 'agentsMd' + ? Object.keys((cfg as any).agentsMd || {}).length + : Object.keys((cfg as any)[topLevel]?.[subLevel] || {}).length; + + // Map adapter tool name to CLI mode name (agentsMd → agents-md) + const modeName = topLevel === 'agentsMd' ? 'agents-md' : topLevel; + toolCounts[modeName] = (toolCounts[modeName] || 0) + count; + } + + const activeModes = Object.entries(toolCounts) .filter(([, count]) => count > 0) .map(([mode]) => mode); @@ -141,10 +109,17 @@ export async function inferDefaultMode(projectPath: string): Promise a.tool === 'agentsMd' ? 'agents-md' : a.tool) + )]; + const toolCommands = tools.map(t => `"ais ${t} ..."`).join(', '); + const explicitTools = toolCommands || '"ais ..."'; + if (mode === 'ambiguous') { throw new Error(`Multiple tool configs exist in this project. Please use ${explicitTools} explicitly.`); } diff --git a/src/commands/install.ts b/src/commands/install.ts index 14cf0f5..b5c9fbc 100644 --- a/src/commands/install.ts +++ b/src/commands/install.ts @@ -11,6 +11,7 @@ import { getCombinedProjectConfig, getConfigSource, RuleEntry, ProjectConfig } f import { getConfig, setConfig, getReposBaseDir, getUserProjectConfig, getUserConfigPath, RepoConfig } from '../config.js'; import { cloneOrUpdateRepo } from '../git.js'; import { parseConfigEntry } from './helpers.js'; +import type { RepoResolverFn } from '../dotany/types.js'; const LOCAL_CONFIG_FILENAME = 'ai-rules-sync.local.json'; @@ -94,12 +95,34 @@ function getEntriesFromConfig( } /** - * Generic install function - works with any adapter + * Generic install function - works with any adapter. + * Uses manager.apply() when the adapter supports forProject() (dotfile API). + * Falls back to manual loop for adapters without forProject(). */ export async function installEntriesForAdapter( adapter: SyncAdapter, projectPath: string ): Promise { + if (adapter.forProject) { + // Modern path: use manager.apply() which reads manifest and re-links all entries + const globalConfig = await getConfig(); + const repos = globalConfig.repos || {}; + + const repoResolver: RepoResolverFn = (repoUrl: string, entryName: string) => + findOrCreateRepo(repos, repoUrl, entryName); + + const manager = adapter.forProject(projectPath, repoResolver); + const result = await manager.apply(); + + if (result.linked.length === 0 && result.skipped.length === 0) { + console.log(chalk.yellow(`No ${adapter.tool} ${adapter.subtype} found in ai-rules-sync*.json.`)); + return; + } + console.log(chalk.green(`All ${adapter.tool} ${adapter.subtype} installed successfully.`)); + return; + } + + // Legacy fallback: manual loop for adapters without forProject() const config = await getCombinedProjectConfig(projectPath); const entries = getEntriesFromConfig(config, adapter); @@ -110,8 +133,6 @@ export async function installEntriesForAdapter( const globalConfig = await getConfig(); const repos = globalConfig.repos || {}; - - // Get local entries to determine isLocal flag const localEntries = await getLocalEntries(projectPath, adapter); for (const [key, value] of Object.entries(entries)) { @@ -121,8 +142,6 @@ export async function installEntriesForAdapter( const repoConfig = await findOrCreateRepo(repos, repoUrl, entryName); const isLocal = Object.prototype.hasOwnProperty.call(localEntries, key); - - // Extract targetDir from config entry if it exists const targetDir = typeof value === 'object' && value.targetDir ? value.targetDir : undefined; await adapter.link({ diff --git a/src/dotany/DESIGN.md b/src/dotany/DESIGN.md new file mode 100644 index 0000000..1847cdb --- /dev/null +++ b/src/dotany/DESIGN.md @@ -0,0 +1,505 @@ +# Dotfile 抽象层设计文档 + +## 一、背景与动机 + +### 问题:硬编码蔓延 + +在引入这一层之前,ai-rules-sync 的核心逻辑分散在几个文件中,并且对具体工具(cursor、copilot、claude 等)存在大量硬编码: + +``` +project-config.ts — 44 行 REPO_SOURCE_PATHS 常量列出所有 tool/subtype 组合 + — mergeCombined() 每个工具各写一遍展开逻辑(约 50 行) + — ProjectConfig / SourceDirConfig / RepoSourceConfig 接口各写一遍 +commands/helpers.ts — DefaultMode 联合类型硬编码所有工具名 + — inferDefaultMode() 对每个工具手动累加 count + — requireExplicitMode() 手写工具列表字符串 +``` + +每新增一个工具(如 windsurf、cline),需要在 **8 个不同位置** 同步修改,漏掉任何一处都会导致 bug。 + +### 目标 + +1. **提取通用能力**:将 symlink 创建、manifest 读写、ignore 文件管理提炼为与工具无关的库。 +2. **消除硬编码**:所有工具列表改为从 adapter registry 动态生成。 +3. **统一抽象**:新增工具只需「创建 adapter 文件 + 注册」两步,其他文件零修改。 +4. **为提取 npm 包铺路**:`src/dotany/` 在结构稳定后可直接提取为独立包。 + +--- + +## 二、对标分析 + +| 特性 | GNU Stow | chezmoi | yadm | 本库 | +|------|----------|---------|------|------| +| 链接方式 | symlink | 文件拷贝 | git 直接管理 | **symlink-only** | +| 状态追踪 | 无 | SHA256 manifest | git history | **可插拔 ManifestStore** | +| 多 source | ✗ | ✗ | ✗ | **✓(SourceResolver 接口)** | +| 扩展性 | ✗ | ✗ | ✗ | **Plugin 架构** | +| Stow 模式 | ✓ | ✗ | ✗ | **✓(不传 manifest)** | +| Manifest 模式 | ✗ | ✓ | 间接 | **✓(传入 manifest)** | +| 多 repo 支持 | ✗ | ✗ | ✗ | **✓(RepoResolverFn)** | +| Library-first | ✗ | ✗ | ✗ | **✓** | + +**核心差异化**:同一套 API 同时支持 Stow 模式(目录即真相)和 Manifest 模式(声明式追踪),通过 `manifest` 参数是否传入来区分,调用方无需感知底层差异。 + +--- + +## 三、模块结构 + +``` +src/ +├── dotfile/ ← 通用 dotfile 库(与 ai-rules-sync 业务无关) +│ ├── types.ts ← 所有核心接口与类型 +│ ├── manager.ts ← DotfileManager 类(唯一实现,通过 linkany 执行 symlink) +│ ├── composer.ts ← DotfileComposer(多 manager 组合) +│ ├── sources/ +│ │ ├── filesystem.ts ← FileSystemSource(本地目录 SourceResolver) +│ │ └── git.ts ← GitSource(git 仓库 SourceResolver,自动 clone/pull) +│ ├── manifest/ +│ │ └── json.ts ← JsonManifest(通用 JSON ManifestStore) +│ └── index.ts ← 主入口:dotfile.create() / dotfile.compose() +│ +├── plugin/ ← ai-rules-sync 的 dotfile 插件实现 +│ ├── git-repo-source.ts ← GitRepoSource(SourceResolver 的 git 仓库实现) +│ └── ai-rules-sync-manifest.ts ← AiRulesSyncManifest(ManifestStore 的 json 实现) +│ +└── adapters/ + ├── base.ts ← createBaseAdapter() 使用 dotfile.create() + └── types.ts ← SyncAdapter 接口含 forProject() 方法 +``` + +--- + +## 四、核心接口设计 + +### 4.1 两个可插拔接口 + +设计的核心是两个接口,让 `DotfileManager` 与具体存储/来源解耦: + +```typescript +// 来源解析:「这个名字对应的文件在哪里?」 +interface SourceResolver { + resolve(name: string, config: ResolveConfig): Promise; + resolveFromManifest?(entry: ManifestEntry): Promise; // apply() 专用 + list?(config: ResolveConfig): Promise; // stow() 专用 + destinationPath?(name: string): Promise; // import() 专用 +} + +// Manifest 持久化:「哪些文件已经被纳管?」 +interface ManifestStore { + readAll(): Promise>; + write(key: string, value: ManifestEntry): Promise; + delete(key: string): Promise; +} +``` + +**为什么是两个接口而不是一个?** + +来源解析(从哪来)和状态追踪(管了哪些)是正交的关注点: +- GNU Stow:有来源解析(目录扫描),无 manifest +- chezmoi:有 manifest,来源是固定目录 +- 本库:两者都可以有,也可以只有其中一个 + +### 4.2 ManifestEntry 的 meta 字段 + +```typescript +interface ManifestEntry { + sourceName: string; // 在源仓库中的原始名称 + meta?: Record; // 插件自定义元数据 +} +``` + +`meta` 是开放的 `Record`,不在通用库层面规定具体字段。ai-rules-sync 插件使用它存储: +- `repoUrl`:源仓库 URL(用于 apply() 时按条目重新定位仓库) +- `targetDir`:覆盖默认目标目录 +- `alias`:目标文件名与源名不同时的别名 + +### 4.3 RepoResolverFn:多 repo 动态解析 + +```typescript +// 定义在 dotfile/types.ts — 返回 any 避免与 ai-rules-sync 的 RepoConfig 耦合 +type RepoResolverFn = (repoUrl: string, entryName: string) => Promise; +``` + +`GitRepoSource` 构造函数接受 `RepoConfig | RepoResolverFn | null`: + +| 传入值 | 适用场景 | +|--------|---------| +| `RepoConfig` | 静态单仓库(add/import 时已知 repo) | +| `RepoResolverFn` | 动态多仓库(install 时按 manifest 条目动态 find/clone) | +| `null` | 仅删除操作(remove 不需要解析 source) | + +### 4.4 DotfileCreateOptions 中的 resolveTargetName + +```typescript +interface DotfileCreateOptions { + // ... + resolveTargetName?: (name: string, alias?: string, sourceSuffix?: string) => string; +} +``` + +这个可选 hook 解决了「suffix 感知重命名」问题: + +**场景**:copilot instructions 文件在仓库中叫 `my-rule.instructions.md`,用户用别名 `my-rule` 引用它。如果直接用 `alias || resolved.name`,目标文件名会是 `my-rule`(丢失 suffix)。 + +**解决**:`createSuffixAwareTargetResolver` 检查 alias 是否已有 suffix,没有则自动追加 source suffix: + +```typescript +// base.ts 中 +const resolver = createSuffixAwareTargetResolver(['.instructions.md', '.md']); +// 调用:resolver('my-rule', undefined, '.instructions.md') → 'my-rule.instructions.md' +``` + +--- + +## 五、DotfileManager 完整 API + +```typescript +class DotfileManager { + // ── 单文件操作 ────────────────────────────────────────────────── + add(name, options?): Promise + // 创建 symlink;若有 manifest,同时写入 manifest。等价于 chezmoi add。 + + remove(alias): Promise + // 删除 symlink;若有 manifest,同时从 manifest 删除。 + + import(targetFilePath, name, options?): Promise + // 将项目中已有的文件纳管:copy → remove original → symlink。 + // 需要 source.destinationPath();纯 fs 操作,不含 git 逻辑。 + + // ── 批量操作 ──────────────────────────────────────────────────── + apply(): Promise + // 幂等地重建所有 manifest 条目对应的 symlink。需要 manifest。 + // 等价于 chezmoi apply。 + + stow(): Promise + // 将 source 目录下所有文件 symlink 到 targetDir。需要 source.list()。 + // 等价于 GNU Stow。 + + unstow(): Promise + // 移除所有 stowed symlink。有 manifest 则按 manifest,否则按 source.list()。 + + restow(): Promise + // unstow() + stow(),用于更新 source 目录后刷新所有 symlink。 + + // ── 只读查询 ──────────────────────────────────────────────────── + diff(): Promise + // 预览 apply() 会做什么,不执行任何写操作。需要 manifest。 + + status(): Promise + // 返回每个 manifest 条目当前的 symlink 状态(linked/missing/conflict)。 + + readManifest(): Promise> + // 读取所有 manifest 条目(无 manifest 时返回 {})。 +} +``` + +### 操作矩阵 + +| 操作 | 需要 source | 需要 manifest | 需要 source.list() | 需要 source.destinationPath() | +|------|------------|--------------|-------------------|------------------------------| +| add | ✓ | 可选(有则写) | ✗ | ✗ | +| remove | ✗ | 可选(有则删) | ✗ | ✗ | +| import | ✓ | 可选(有则写) | ✗ | ✓ | +| apply | ✓ | ✓ | ✗ | ✗ | +| stow | ✓ | 可选(有则写) | ✓ | ✗ | +| unstow | ✓/manifest 二选一 | 可选 | 可选 | ✗ | +| diff | ✓ | ✓ | ✗ | ✗ | +| status | ✗ | ✓ | ✗ | ✗ | + +--- + +## 六、两种工作模式 + +### 6.1 Stow 模式(不传 manifest) + +```typescript +const manager = dotfile.create({ + name: 'shell', + source: new FileSystemSource('~/dotfiles/shell'), + targetDir: '~/', +}); + +await manager.stow(); +// → 将 ~/dotfiles/shell/ 下所有文件 symlink 到 ~/ +// → ~/.zshrc → ~/dotfiles/shell/.zshrc + +await manager.add('.vimrc'); +// → 单独 add 一个文件(不写 manifest) +``` + +**用途**:简单的目录镜像,目录本身即真相,无需追踪状态。 + +### 6.2 Manifest 模式(传入 manifest) + +```typescript +// 项目级:forProject() 自动组装 source + manifest +const manager = adapter.forProject(projectPath, repo, isLocal); + +await manager.add('my-rule', { repoUrl: 'https://...' }); +// → 创建 symlink + 写入 ai-rules-sync.json + +await manager.apply(); +// → 读取 ai-rules-sync.json → 幂等地重建所有 symlink + +await manager.status(); +// → [{ alias: 'my-rule', status: 'linked' }, ...] +``` + +**用途**:声明式管理,支持跨机器同步(`apply()` 基于 manifest 幂等重建)。 + +### 6.3 多 manager 组合(DotfileComposer) + +```typescript +const composer = dotfile.compose([ + adapter.forProject(projectPath, repo1), + adapter.forProject(projectPath, repo2), +]); + +await composer.apply(); // 依次 apply,后者可覆盖前者 +await composer.status(); // 汇总所有 manager 的状态 +``` + +--- + +## 七、内置实现 + +### 7.1 FileSystemSource + +本地目录 SourceResolver,GNU Stow 模式的基础: + +```typescript +const source = new FileSystemSource('/home/user/dotfiles/shell'); +// resolve('zshrc') → { name: 'zshrc', path: '/home/user/dotfiles/shell/zshrc' } +// list() → ['zshrc', 'bashrc', 'vimrc', ...] +// destinationPath('zshrc') → '/home/user/dotfiles/shell/zshrc' +``` + +无任何 git 依赖,可独立于 ai-rules-sync 使用。 + +### 7.2 GitSource + +git 仓库 SourceResolver,自动处理 clone/pull: + +```typescript +const source = new GitSource( + 'https://github.com/user/dotfiles.git', + '/home/user/.cache/my-tool/user-dotfiles', // clone 到这里 + 'nvim' // 使用仓库内 nvim/ 子目录 +); +// resolve('init.lua') → 先 clone/pull,再返回 /...clone.../nvim/init.lua +// list() → 列出 /...clone.../nvim/ 下所有文件 +``` + +无 ai-rules-sync 依赖,可独立使用。 + +### 7.3 JsonManifest + +通用 JSON ManifestStore,支持可选的命名空间: + +```typescript +// 扁平结构(无 namespace) +const m1 = new JsonManifest('/path/to/manifest.json'); + +// 命名空间结构(多工具共用一个文件) +const m2 = new JsonManifest('/path/to/manifest.json', 'cursor-rules'); +// 文件结构:{ "cursor-rules": { "my-rule": { sourceName: "...", meta: {} } } } +``` + +不绑定任何 ai-rules-sync 业务格式,可独立使用。 + +--- + +## 八、plugin 实现说明(ai-rules-sync 专用) + +### GitRepoSource + +负责「从 git 仓库解析出源文件路径」,支持三种构造方式: + +```typescript +// 1. 静态单仓库(add/import 时) +new GitRepoSource(repoConfig, adapterConfig) + +// 2. 动态多仓库(install 的 apply() 时) +new GitRepoSource( + (repoUrl, entryName) => findOrCreateRepo(repos, repoUrl, entryName), + adapterConfig +) + +// 3. 仅删除模式(remove 时不需要 source) +new GitRepoSource(null, adapterConfig) +``` + +关键方法: +- `resolve(name, config)` — `add()` 时调用,使用 `config.repoUrl` 或静态 repo +- `resolveFromManifest(entry)` — `apply()` 时调用,从 `entry.meta.repoUrl` 动态找仓库 +- `destinationPath(name)` — `import()` 时调用,返回仓库中的目标路径 + +### AiRulesSyncManifest + +负责「将 ai-rules-sync.json 适配为通用 ManifestStore 接口」: + +**readAll()** 将 JSON 格式翻译为 `ManifestEntry`: +```json +// ai-rules-sync.json 中 +{ "cursor": { "rules": { "my-rule": "https://..." } } } + +// 翻译为 ManifestEntry +{ "my-rule": { "sourceName": "my-rule", "meta": { "repoUrl": "https://..." } } } +``` + +**write() / delete()** 委托给 `addDependencyGeneric` / `removeDependencyGeneric`,复用现有的 JSON 读写和迁移逻辑。 + +--- + +## 九、关键设计决策 + +### 决策 A:link() 用于 user 模式,project 模式用 forProject().add() + +Phase 2 中命令流统一到 `forProject()` API: + +``` +# project 模式 +ais cursor add my-rule + → adapter.forProject(projectPath, repo, isLocal).add(name, opts) + // 一步完成:symlink + 写 ai-rules-sync.json + +# user 模式(user.json 不走 forProject 的 manifest) +ais cursor add my-rule --user + → adapter.link(...) // 只创建 symlink + → addUserDependency(...) // 单独写 user.json + +# install(重建 symlink) +ais cursor install + → adapter.forProject(projectPath, repoResolver).apply() + // 读 manifest → 按条目动态 find/clone repo → 幂等重建 symlink + +# remove +ais cursor remove my-rule + → adapter.forProject(projectPath, null).remove(alias) + // null 表示无需 source(remove 不需要解析来源) + // 一步完成:删 symlink + 从 manifest 删除 + // ignore 条目清理由 handlers.ts 负责(ai-rules-sync 专有逻辑) +``` + +### 决策 B:import 中 fs 与 git 分离 + +`manager.import()` 只做文件系统操作,git 操作留在 `sync-engine.ts`: + +```typescript +// sync-engine.ts importEntry() +const manager = adapter.forProject(projectPath, repo, isLocal); +await manager.import(targetPath, name, { force, repoUrl: repo.url }); +// manager.import() 完成:copy → remove original → symlink + +// git 操作保留在 sync-engine(ai-rules-sync 专有逻辑) +await execa('git', ['add', relativePath], { cwd: repoDir }); +await execa('git', ['commit', '-m', message], { cwd: repoDir }); +``` + +**权衡**:相比原来「copy → git commit → remove → symlink」的顺序,新顺序是「copy → remove → symlink → git commit」。若 git 失败,symlink 已建立,但 commit 未完成。这简化了实现,且 dotfile 库本身不应感知 git 概念。 + +### 决策 C:避免循环依赖 + +**问题**:如果 `project-config.ts` 的 `mergeCombined()` 从 `adapters/index.ts` 读取 registry,会形成循环: + +``` +project-config.ts → adapters/index.ts → adapters/base.ts → project-config.ts +``` + +**解决**:`mergeCombined()` 不依赖 registry,改为对两个 config 对象的键做动态迭代: + +```typescript +// 不需要知道有哪些 tool,直接合并两个对象中实际存在的键 +function mergeCombined(main, local) { + const allKeys = new Set([...Object.keys(main), ...Object.keys(local)]); + for (const key of allKeys) { + // agentsMd:flat merge + // 其他 tool:按 subtype 两层 merge + } +} +``` + +`helpers.ts` 可以安全地导入 `adapterRegistry`(单向依赖:commands → adapters)。 + +### 决策 D:RepoSourceConfig 使用 any 索引 + +```typescript +interface RepoSourceConfig { + rootPath?: string; + [tool: string]: any; // 而非 Record | string | undefined +} +``` + +严格类型(`Record | string | undefined`)会让 `repoConfig.windsurf?.rules` 这样的访问报 TS 错误,因为 TypeScript 无法静态判断 `windsurf` 是 Record 还是 string。 + +使用 `any` 牺牲了部分类型安全,但保留了直观的点语法访问,且测试文件中已有具体的值断言来保证正确性。 + +### 决策 F:所有 symlink 操作通过 linkany 执行 + +`manager.ts` 不再直接调用 `fs.ensureSymlink` / `fs.remove`,而是通过两个私有 helper 委托给 `linkany`: + +```typescript +// doLink(source, target) → 'linked' | 'noop' | 'conflict' +// 内部调用 linkany.add({ version: 1, installs: [] }, { source, target, atomic: true }) +// 传入 in-memory manifest 对象(非文件路径)→ 获得原子 symlink,无需持久化 + +// doUnlink(target) → boolean +// 只删除 symlink,不删真实文件 +await fs.unlink(target); // 而非 fs.remove(后者会删真实文件) +``` + +**为什么用 in-memory manifest?** + +`linkany.add()` 的第一个参数既接受文件路径字符串,也接受 manifest 对象。传入对象时,linkany 跳过文件读写但仍执行真实的 symlink 操作,获得原子性保证而无需引入额外的持久化文件。 + +**re-link 处理**:linkany 遇到 target 已是指向不同 source 的 symlink 时,会拒绝操作(返回错误 "Refusing to migrate")。`doLink` 在调用 linkany 前检测这种情况并先 `fs.unlink`,由此实现安全的 re-link。 + +**结果映射**: +| `result.changes` 包含 `symlink` 或 `move` action | → `'linked'`(实际创建/更新) | +|---|---| +| changes 为空(target 已正确指向 source) | → `'noop'` | +| target 存在且不是 symlink | → `'conflict'`(提前返回,不调用 linkany) | + +### 决策 E:sync-engine.ts 保留为薄包装层 + +`linkEntry()` 和 `unlinkEntry()` 继续作为导出函数存在(向后兼容): +- `unlinkEntry()` 仍被 `adapter.unlink()` 使用(从 project config 读取 targetDir) +- `linkEntry()` 保留为兼容导出,不再被 `adapter.link()` 调用 +- `importEntry()` 现在按 `adapter.forProject` 是否存在走不同路径,优先使用新 API + +--- + +## 十、新增工具的流程对比 + +### 之前(8 步) + +1. 创建 `src/adapters/-.ts` +2. 在 `src/adapters/index.ts` 注册 +3. 在 `project-config.ts` 的 `REPO_SOURCE_PATHS` 追加条目 +4. 在 `SourceDirConfig` 接口追加字段 +5. 在 `ProjectConfig` 接口追加字段 +6. 在 `RepoSourceConfig` 接口追加字段 +7. 在 `mergeCombined()` 追加合并逻辑 +8. 在 `commands/helpers.ts` 的 `DefaultMode` 联合类型和 `inferDefaultMode()` 追加 + +### 之后(2 步) + +1. 创建 `src/adapters/-.ts` +2. 在 `src/adapters/index.ts` 注册 + +其他所有文件自动感知新工具。 + +--- + +## 十一、未来演进方向 + +### 提取为 npm 包 + +`src/dotany/` 核心模块(`types.ts`、`manager.ts`、`composer.ts`、`sources/`、`manifest/`、`index.ts`)仅依赖 `fs-extra`、`chalk`、`path`、`linkany`,完全不依赖 ai-rules-sync 业务逻辑。结构稳定后可直接发布为 `@ai-rules-sync/dotfile` 或独立的 `dotfile-manager`(需将 `linkany` 作为 peer/direct dependency 一并发布)。 + +### diff/status CLI + +```bash +ais cursor diff # 预览 install 会做什么,不执行 +ais cursor status # 查看所有 cursor rules 的 symlink 状态 +``` diff --git a/src/dotany/composer.ts b/src/dotany/composer.ts new file mode 100644 index 0000000..9505ace --- /dev/null +++ b/src/dotany/composer.ts @@ -0,0 +1,26 @@ +import { ApplyResult, StatusResult } from './types.js'; +import { DotfileManager } from './manager.js'; + +/** + * Compose multiple DotfileManagers, applying them in order. + * Later managers override earlier ones for the same alias. + */ +export class DotfileComposer { + constructor(private managers: DotfileManager[]) {} + + async apply(): Promise { + const results: ApplyResult[] = []; + for (const manager of this.managers) { + results.push(await manager.apply()); + } + return results; + } + + async status(): Promise { + const results: StatusResult[] = []; + for (const manager of this.managers) { + results.push(await manager.status()); + } + return results; + } +} diff --git a/src/dotany/index.ts b/src/dotany/index.ts new file mode 100644 index 0000000..0bb7173 --- /dev/null +++ b/src/dotany/index.ts @@ -0,0 +1,19 @@ +import { DotfileCreateOptions } from './types.js'; +import { DotfileManager } from './manager.js'; +import { DotfileComposer } from './composer.js'; + +export const dotfile = { + create(options: DotfileCreateOptions): DotfileManager { + return new DotfileManager(options); + }, + compose(managers: DotfileManager[]): DotfileComposer { + return new DotfileComposer(managers); + }, +}; + +export * from './types.js'; +export { DotfileManager } from './manager.js'; +export { DotfileComposer } from './composer.js'; +export { FileSystemSource } from './sources/filesystem.js'; +export { GitSource } from './sources/git.js'; +export { JsonManifest } from './manifest/json.js'; diff --git a/src/dotany/manager.ts b/src/dotany/manager.ts new file mode 100644 index 0000000..577ddee --- /dev/null +++ b/src/dotany/manager.ts @@ -0,0 +1,420 @@ +import fs from 'fs-extra'; +import path from 'path'; +import chalk from 'chalk'; +import { add as linkanyAdd } from 'linkany'; +import { DotfileCreateOptions, LinkResult, AddOptions, ApplyResult, ManifestEntry, StowResult, DiffResult, StatusEntry, StatusResult, ManagerImportOptions } from './types.js'; + +/** + * Core dotfile manager: handles symlink creation and manifest reads/writes. + * Works in two modes: + * - Stow mode: no manifest, directory is the source of truth + * - Manifest mode: manifest tracks all linked entries + */ +export class DotfileManager { + constructor(private opts: DotfileCreateOptions) {} + + get targetRoot(): string { + return this.opts.targetRoot ?? process.cwd(); + } + + get targetDir(): string { + return this.opts.targetDir; + } + + /** + * Link a single file and optionally write to manifest (manifest mode). + * Equivalent to `chezmoi add`. + */ + async add(name: string, options: AddOptions = {}): Promise { + const { alias, targetDir, repoUrl } = options; + + // Resolve source + const resolved = await this.opts.source.resolve(name, { repoUrl, targetDir }); + + // Resolve target name (suffix-aware if resolver provided) + const targetName = this.opts.resolveTargetName + ? this.opts.resolveTargetName(name, alias, resolved.suffix) + : (alias || resolved.name); + + // Determine target directory + const targetDirPath = targetDir ? path.normalize(targetDir) : this.opts.targetDir; + + const absoluteRoot = path.resolve(this.targetRoot); + const targetDirAbsolute = path.join(absoluteRoot, targetDirPath); + const targetPath = path.join(targetDirAbsolute, targetName); + + // Ensure target directory exists + await fs.ensureDir(targetDirAbsolute); + + // Create symlink + const linkResult = await this.doLink(resolved.path, targetPath); + if (linkResult === 'conflict') { + console.log(chalk.yellow(`Warning: "${targetPath}" exists and is not a symlink. Skipping to avoid data loss.`)); + return { sourceName: resolved.name, targetName, linked: false, targetPath }; + } + console.log(chalk.green(`Linked "${resolved.name}" to project as "${targetName}".`)); + + // Write to manifest if one is provided + if (this.opts.manifest) { + const manifestEntry: ManifestEntry = { + sourceName: name, + meta: { repoUrl, targetDir, alias }, + }; + await this.opts.manifest.write(targetName, manifestEntry); + } + + return { sourceName: resolved.name, targetName, linked: true, targetPath }; + } + + /** + * Remove a symlink and optionally delete from manifest. + * Equivalent to `chezmoi remove`. + */ + async remove(alias: string): Promise { + const absoluteRoot = path.resolve(this.targetRoot); + + // Get target directory (from manifest entry if available) + let targetDirPath = this.opts.targetDir; + if (this.opts.manifest) { + const entries = await this.opts.manifest.readAll(); + const entry = entries[alias]; + if (entry?.meta?.targetDir) { + targetDirPath = path.normalize(entry.meta.targetDir as string); + } + } + + const targetDir = path.join(absoluteRoot, targetDirPath); + let actualFileName = alias; + let targetPath = path.join(targetDir, alias); + + if (!await fs.pathExists(targetPath)) { + const found = await this.findWithCommonSuffix(targetDir, alias); + if (found) { + actualFileName = path.basename(found); + targetPath = found; + } + } + + const removed = await this.doUnlink(targetPath); + if (removed) { + console.log(chalk.green(`Removed "${alias}" from project.`)); + } else { + console.log(chalk.yellow(`Entry "${alias}" not found in project.`)); + } + + // Remove from manifest + if (this.opts.manifest) { + await this.opts.manifest.delete(alias); + } + } + + /** + * Idempotently apply all manifest entries (chezmoi apply semantics). + * Requires manifest and a source that supports resolveFromManifest(). + */ + async apply(): Promise { + if (!this.opts.manifest) { + throw new Error('apply() requires a manifest store'); + } + + const entries = await this.opts.manifest.readAll(); + const linked: LinkResult[] = []; + const skipped: string[] = []; + + for (const [alias, entry] of Object.entries(entries)) { + try { + let result: LinkResult; + if (this.opts.source.resolveFromManifest) { + // Use manifest-aware resolution (supports multi-repo) + const resolved = await this.opts.source.resolveFromManifest(entry); + const targetName = this.opts.resolveTargetName + ? this.opts.resolveTargetName(entry.sourceName, alias !== entry.sourceName ? alias : undefined, resolved.suffix) + : (alias !== entry.sourceName ? alias : resolved.name); + const targetDirPath = entry.meta?.targetDir + ? path.normalize(entry.meta.targetDir as string) + : this.opts.targetDir; + const absoluteRoot = path.resolve(this.targetRoot); + const targetDirAbsolute = path.join(absoluteRoot, targetDirPath); + const targetPath = path.join(targetDirAbsolute, targetName); + + await fs.ensureDir(targetDirAbsolute); + + const applyLinkResult = await this.doLink(resolved.path, targetPath); + if (applyLinkResult === 'conflict') { + linked.push({ sourceName: resolved.name, targetName, linked: false, targetPath }); + continue; + } + console.log(chalk.green(`Linked "${resolved.name}" to project as "${targetName}".`)); + result = { sourceName: resolved.name, targetName, linked: true, targetPath }; + } else { + // Fall back to add() with manifest entry data + result = await this.add(entry.sourceName, { + alias: alias !== entry.sourceName ? alias : undefined, + targetDir: entry.meta?.targetDir as string | undefined, + repoUrl: entry.meta?.repoUrl as string | undefined, + }); + } + linked.push(result); + } catch (e) { + console.log(chalk.yellow(`Skipped "${alias}": ${(e as Error).message}`)); + skipped.push(alias); + } + } + + return { linked, skipped }; + } + + /** + * Stow all files from source into the target directory (stow-style, no manifest). + * Requires source to implement list(). + */ + async stow(): Promise { + if (!this.opts.source.list) { + throw new Error('stow() requires source to implement list()'); + } + const names = await this.opts.source.list({}); + const results: StowResult = []; + for (const name of names) { + const result = await this.add(name); + results.push(result); + } + return results; + } + + /** + * Remove all stowed symlinks. + * If manifest is present, uses manifest entries. Otherwise uses source.list(). + */ + async unstow(): Promise { + const absoluteRoot = path.resolve(this.targetRoot); + if (this.opts.manifest) { + const entries = await this.opts.manifest.readAll(); + for (const alias of Object.keys(entries)) { + await this.remove(alias); + } + } else if (this.opts.source.list) { + const names = await this.opts.source.list({}); + for (const name of names) { + const targetPath = path.join(absoluteRoot, this.opts.targetDir, name); + await this.doUnlink(targetPath); + } + } else { + throw new Error('unstow() requires either manifest or source.list()'); + } + } + + /** + * Re-stow: unstow then stow. + */ + async restow(): Promise { + await this.unstow(); + return this.stow(); + } + + /** + * Preview what apply() would do without executing. + * Requires manifest. + */ + async diff(): Promise { + if (!this.opts.manifest) { + throw new Error('diff() requires a manifest store'); + } + const entries = await this.opts.manifest.readAll(); + const absoluteRoot = path.resolve(this.targetRoot); + const toCreate: string[] = []; + const toUpdate: string[] = []; + const toDelete: string[] = []; + + for (const [alias, entry] of Object.entries(entries)) { + const targetDirPath = entry.meta?.targetDir + ? path.normalize(entry.meta.targetDir as string) + : this.opts.targetDir; + const targetPath = path.join(absoluteRoot, targetDirPath, alias); + + const lstats = await fs.lstat(targetPath).catch(() => null); + if (!lstats) { + toCreate.push(alias); + continue; + } + if (!lstats.isSymbolicLink()) { + toUpdate.push(alias); // conflict + continue; + } + // Check if pointing to the expected source + try { + let expectedSource: string; + if (this.opts.source.resolveFromManifest) { + const resolved = await this.opts.source.resolveFromManifest(entry); + expectedSource = resolved.path; + } else { + const resolved = await this.opts.source.resolve(entry.sourceName, { + repoUrl: entry.meta?.repoUrl as string | undefined, + targetDir: entry.meta?.targetDir as string | undefined, + }); + expectedSource = resolved.path; + } + const currentTarget = await fs.readlink(targetPath); + if (currentTarget !== expectedSource) { + toUpdate.push(alias); + } + } catch { + toUpdate.push(alias); + } + } + + return { toCreate, toUpdate, toDelete }; + } + + /** + * Return symlink status for each manifest entry. + * Requires manifest. + */ + async status(): Promise { + if (!this.opts.manifest) { + throw new Error('status() requires a manifest store'); + } + const entries = await this.opts.manifest.readAll(); + const absoluteRoot = path.resolve(this.targetRoot); + const statusEntries: StatusEntry[] = []; + + for (const [alias, entry] of Object.entries(entries)) { + const targetDirPath = entry.meta?.targetDir + ? path.normalize(entry.meta.targetDir as string) + : this.opts.targetDir; + const targetPath = path.join(absoluteRoot, targetDirPath, alias); + + const lstats = await fs.lstat(targetPath).catch(() => null); + let status: 'linked' | 'missing' | 'conflict'; + if (!lstats) { + status = 'missing'; + } else if (lstats.isSymbolicLink()) { + status = 'linked'; + } else { + status = 'conflict'; + } + + statusEntries.push({ + alias, + sourceName: entry.sourceName, + targetPath, + status, + }); + } + + return { entries: statusEntries }; + } + + /** + * Import a file into the source and replace it with a symlink. + * Pure filesystem operation — no git logic. + * Requires source to implement destinationPath(). + */ + async import(targetFilePath: string, name: string, options: ManagerImportOptions = {}): Promise { + const { force = false, alias, repoUrl } = options; + + // Validate: file must exist and not be a symlink + if (!await fs.pathExists(targetFilePath)) { + throw new Error(`File "${targetFilePath}" does not exist.`); + } + const stats = await fs.lstat(targetFilePath); + if (stats.isSymbolicLink()) { + throw new Error(`"${targetFilePath}" is already a symlink (already managed).`); + } + + if (!this.opts.source.destinationPath) { + throw new Error('import() requires source to implement destinationPath()'); + } + const sourcePath = await this.opts.source.destinationPath(name); + + // Handle existing destination + if (await fs.pathExists(sourcePath)) { + if (!force) { + throw new Error(`"${name}" already exists at ${sourcePath}. Use force: true to overwrite.`); + } + await fs.remove(sourcePath); + } + + // Ensure destination directory exists + await fs.ensureDir(path.dirname(sourcePath)); + + // Copy to source location + await fs.copy(targetFilePath, sourcePath); + console.log(chalk.green(`Copied "${name}" to source.`)); + + // Remove original + await fs.remove(targetFilePath); + + // Create symlink + await this.doLink(sourcePath, targetFilePath); + console.log(chalk.green(`Linked "${name}" back to "${targetFilePath}".`)); + + const targetName = alias || path.basename(targetFilePath); + + // Write to manifest if one is provided + if (this.opts.manifest) { + const manifestEntry: ManifestEntry = { + sourceName: name, + meta: { alias, repoUrl }, + }; + await this.opts.manifest.write(targetName, manifestEntry); + } + + return { sourceName: name, targetName, linked: true, targetPath: targetFilePath }; + } + + /** + * Read all manifest entries. + */ + async readManifest(): Promise> { + if (!this.opts.manifest) return {}; + return this.opts.manifest.readAll(); + } + + /** + * Create source → target symlink via linkany in-memory mode (atomic operation). + * Returns 'linked' (created/updated) | 'noop' (already correct) | 'conflict' (real file in the way). + */ + private async doLink(source: string, target: string): Promise<'linked' | 'noop' | 'conflict'> { + if (await fs.pathExists(target)) { + const st = await fs.lstat(target); + if (!st.isSymbolicLink()) return 'conflict'; + const current = await fs.readlink(target); + if (current !== source) { + // linkany rejects migrating an existing symlink; unlink first + await fs.unlink(target); + } + // current === source: linkany will detect isSymlinkTo → noop + } + const { result } = await linkanyAdd( + { version: 1 as const, installs: [] }, + { source, target, atomic: true } + ); + if (!result.ok) throw new Error(result.errors.join('; ') || 'Failed to link'); + const actuallyLinked = result.changes.some(c => c.action === 'symlink' || c.action === 'move'); + return actuallyLinked ? 'linked' : 'noop'; + } + + /** + * Safely remove target symlink (only symlinks, never real files). + * Returns true if removed. + */ + private async doUnlink(target: string): Promise { + if (await fs.pathExists(target)) { + const st = await fs.lstat(target); + if (st.isSymbolicLink()) { await fs.unlink(target); return true; } + } + return false; + } + + private async findWithCommonSuffix(targetDir: string, alias: string): Promise { + const suffixes = ['.md', '.instructions.md', '.mdc']; + for (const suffix of suffixes) { + if (!alias.endsWith(suffix)) { + const candidate = path.join(targetDir, `${alias}${suffix}`); + if (await fs.pathExists(candidate)) return candidate; + } + } + return undefined; + } + +} diff --git a/src/dotany/manifest/json.ts b/src/dotany/manifest/json.ts new file mode 100644 index 0000000..e543a32 --- /dev/null +++ b/src/dotany/manifest/json.ts @@ -0,0 +1,55 @@ +import fs from 'fs-extra'; +import path from 'path'; +import { ManifestStore, ManifestEntry } from '../types.js'; + +/** + * Generic JSON-based ManifestStore. + * Supports optional namespace for multi-tool manifests. + * + * File structure with namespace: { [namespace]: { [alias]: ManifestEntry } } + * File structure without namespace: { [alias]: ManifestEntry } + */ +export class JsonManifest implements ManifestStore { + constructor( + private filePath: string, + private namespace?: string + ) {} + + async readAll(): Promise> { + if (!await fs.pathExists(this.filePath)) return {}; + const raw = await fs.readJson(this.filePath); + const section = this.namespace ? raw[this.namespace] : raw; + if (!section || typeof section !== 'object') return {}; + return section as Record; + } + + async write(key: string, value: ManifestEntry): Promise { + let raw: Record = {}; + if (await fs.pathExists(this.filePath)) { + raw = await fs.readJson(this.filePath); + } + if (this.namespace) { + if (!raw[this.namespace] || typeof raw[this.namespace] !== 'object') { + raw[this.namespace] = {}; + } + (raw[this.namespace] as Record)[key] = value; + } else { + raw[key] = value; + } + await fs.ensureDir(path.dirname(this.filePath)); + await fs.writeJson(this.filePath, raw, { spaces: 2 }); + } + + async delete(key: string): Promise { + if (!await fs.pathExists(this.filePath)) return; + const raw = await fs.readJson(this.filePath); + if (this.namespace) { + if (raw[this.namespace] && typeof raw[this.namespace] === 'object') { + delete (raw[this.namespace] as Record)[key]; + } + } else { + delete raw[key]; + } + await fs.writeJson(this.filePath, raw, { spaces: 2 }); + } +} diff --git a/src/dotany/sources/filesystem.ts b/src/dotany/sources/filesystem.ts new file mode 100644 index 0000000..15821f2 --- /dev/null +++ b/src/dotany/sources/filesystem.ts @@ -0,0 +1,30 @@ +import fs from 'fs-extra'; +import path from 'path'; +import { SourceResolver, ResolvedSource, ResolveConfig } from '../types.js'; + +/** + * SourceResolver backed by a local directory (foundation for Stow-style management). + * Unlike GitRepoSource, this has no git dependency. + */ +export class FileSystemSource implements SourceResolver { + constructor(private sourceDir: string) {} + + async resolve(name: string, _config: ResolveConfig = {}): Promise { + const filePath = path.join(this.sourceDir, name); + if (!await fs.pathExists(filePath)) { + throw new Error(`"${name}" not found in ${this.sourceDir}`); + } + return { name, path: filePath }; + } + + async list(_config: ResolveConfig = {}): Promise { + if (!await fs.pathExists(this.sourceDir)) { + return []; + } + return fs.readdir(this.sourceDir); + } + + async destinationPath(name: string): Promise { + return path.join(this.sourceDir, name); + } +} diff --git a/src/dotany/sources/git.ts b/src/dotany/sources/git.ts new file mode 100644 index 0000000..fd44880 --- /dev/null +++ b/src/dotany/sources/git.ts @@ -0,0 +1,50 @@ +import { execa } from 'execa'; +import fs from 'fs-extra'; +import path from 'path'; +import { SourceResolver, ResolveConfig, ResolvedSource } from '../types.js'; + +/** + * SourceResolver backed by a git repository. + * Automatically handles clone/pull; callers only need to specify where to cache the clone. + */ +export class GitSource implements SourceResolver { + private cloned = false; + + constructor( + private repoUrl: string, // git remote URL, e.g. https://github.com/user/repo.git + private cloneDir: string, // local directory to clone into (caller decides location) + private sourceSubDir: string = '' // sub-directory inside the repo; defaults to repo root + ) {} + + /** Ensure the repo is cloned/pulled. Idempotent. */ + private async ensureCloned(): Promise { + if (this.cloned) return; + if (await fs.pathExists(path.join(this.cloneDir, '.git'))) { + await execa('git', ['pull'], { cwd: this.cloneDir }); + } else { + await execa('git', ['clone', this.repoUrl, this.cloneDir]); + } + this.cloned = true; + } + + async resolve(name: string, _config: ResolveConfig = {}): Promise { + await this.ensureCloned(); + const filePath = path.join(this.cloneDir, this.sourceSubDir, name); + if (!await fs.pathExists(filePath)) { + const location = this.sourceSubDir ? this.sourceSubDir : '/'; + throw new Error(`"${name}" not found in ${this.repoUrl} (${location})`); + } + return { name, path: filePath }; + } + + async list(_config: ResolveConfig = {}): Promise { + await this.ensureCloned(); + const dir = path.join(this.cloneDir, this.sourceSubDir); + if (!await fs.pathExists(dir)) return []; + return fs.readdir(dir); + } + + async destinationPath(name: string): Promise { + return path.join(this.cloneDir, this.sourceSubDir, name); + } +} diff --git a/src/dotany/types.ts b/src/dotany/types.ts new file mode 100644 index 0000000..a0bae7d --- /dev/null +++ b/src/dotany/types.ts @@ -0,0 +1,109 @@ +/** + * Core interfaces for the dotfile abstraction library. + * This module provides a generic, pluggable foundation for dotfile management. + */ + +/** Config passed to SourceResolver.resolve() */ +export interface ResolveConfig { + repoUrl?: string; + targetDir?: string; + [key: string]: unknown; +} + +/** Pluggable source resolver interface */ +export interface SourceResolver { + resolve(name: string, config: ResolveConfig): Promise; + resolveFromManifest?(entry: ManifestEntry): Promise; + list?(config: ResolveConfig): Promise; + destinationPath?(name: string): Promise; +} + +export interface ResolvedSource { + /** Final filename (may include suffix) */ + name: string; + /** Absolute path to the source */ + path: string; + /** Detected suffix, if any */ + suffix?: string; +} + +/** Pluggable manifest persistence interface */ +export interface ManifestStore { + readAll(): Promise>; + write(key: string, value: ManifestEntry): Promise; + delete(key: string): Promise; +} + +export interface ManifestEntry { + sourceName: string; + /** Plugin-specific metadata (e.g. repoUrl, targetDir) */ + meta?: Record; +} + +/** Result of a link operation */ +export interface LinkResult { + sourceName: string; + targetName: string; + linked: boolean; + /** Absolute path to the symlink; callers can use this to compute ignore entries */ + targetPath: string; +} + +/** Options for add() */ +export interface AddOptions { + alias?: string; + targetDir?: string; + repoUrl?: string; +} + +export interface ApplyResult { + linked: LinkResult[]; + skipped: string[]; +} + +/** Result of a stow operation (batch link) */ +export type StowResult = LinkResult[]; + +/** diff() returns: differences between manifest and filesystem */ +export interface DiffResult { + toCreate: string[]; // in manifest but symlink missing + toUpdate: string[]; // symlink exists but points to different source + toDelete: string[]; // symlink exists but not in manifest (orphans) +} + +/** status() returns: per-entry symlink status */ +export interface StatusEntry { + alias: string; + sourceName: string; + targetPath: string; + status: 'linked' | 'missing' | 'conflict'; // conflict = exists but not a symlink +} +export interface StatusResult { entries: StatusEntry[]; } + +/** Options for import() */ +export interface ManagerImportOptions { + alias?: string; + force?: boolean; + repoUrl?: string; +} + +/** + * Dynamic repo resolver function for multi-repo support. + * Returns Promise to avoid coupling with app-specific RepoConfig type. + */ +export type RepoResolverFn = (repoUrl: string, entryName: string) => Promise; + +/** DotfileManager creation options */ +export interface DotfileCreateOptions { + name: string; + source: SourceResolver; + /** Target directory relative to targetRoot */ + targetDir: string; + /** Defaults to process.cwd() */ + targetRoot?: string; + mode?: 'file' | 'directory' | 'hybrid'; + /** If not provided, operates in stow mode (no manifest) */ + manifest?: ManifestStore; + /** Optional target name resolver (handles suffix-aware renaming) */ + resolveTargetName?: (name: string, alias?: string, sourceSuffix?: string) => string; +} diff --git a/src/plugin/ai-rules-sync-manifest.ts b/src/plugin/ai-rules-sync-manifest.ts new file mode 100644 index 0000000..2c854ef --- /dev/null +++ b/src/plugin/ai-rules-sync-manifest.ts @@ -0,0 +1,69 @@ +import { ManifestStore, ManifestEntry } from '../dotany/types.js'; +import { getCombinedProjectConfig, addDependencyGeneric, removeDependencyGeneric } from '../project-config.js'; +import type { RuleEntry } from '../project-config.js'; + +/** + * ManifestStore implementation that reads/writes ai-rules-sync.json. + * Migrated from project-config.ts dependency management logic. + */ +export class AiRulesSyncManifest implements ManifestStore { + constructor( + private projectPath: string, + private configPath: [string, string], + private isLocal: boolean = false + ) {} + + async readAll(): Promise> { + const config = await getCombinedProjectConfig(this.projectPath); + const [topLevel, subLevel] = this.configPath; + + // agentsMd is flat (no subLevel nesting) + const section: Record | undefined = topLevel === 'agentsMd' + ? (config as any)[topLevel] + : (config as any)[topLevel]?.[subLevel]; + + if (!section) return {}; + + const result: Record = {}; + for (const [key, value] of Object.entries(section)) { + if (typeof value === 'string') { + result[key] = { + sourceName: key, + meta: { repoUrl: value }, + }; + } else if (value && typeof value === 'object') { + const entry = value as { url: string; rule?: string; targetDir?: string }; + result[key] = { + sourceName: entry.rule || key, + meta: { + repoUrl: entry.url, + ...(entry.targetDir ? { targetDir: entry.targetDir } : {}), + ...(entry.rule && entry.rule !== key ? { alias: key } : {}), + }, + }; + } + } + return result; + } + + async write(key: string, value: ManifestEntry): Promise { + const repoUrl = value.meta?.repoUrl as string; + const originalName = value.sourceName; + const alias = value.meta?.alias as string | undefined; + const targetDir = value.meta?.targetDir as string | undefined; + + await addDependencyGeneric( + this.projectPath, + this.configPath, + originalName, + repoUrl, + key !== originalName ? key : alias, + this.isLocal, + targetDir + ); + } + + async delete(key: string): Promise { + await removeDependencyGeneric(this.projectPath, this.configPath, key); + } +} diff --git a/src/plugin/git-repo-source.ts b/src/plugin/git-repo-source.ts new file mode 100644 index 0000000..7a0afd5 --- /dev/null +++ b/src/plugin/git-repo-source.ts @@ -0,0 +1,87 @@ +import { SourceResolver, ResolvedSource, ResolveConfig, ManifestEntry, RepoResolverFn } from '../dotany/types.js'; +import { GitSource } from '../dotany/sources/git.js'; +import { getRepoSourceConfig, getSourceDir } from '../project-config.js'; +import { RepoConfig } from '../config.js'; + +/** + * Config subset needed for GitRepoSource (mirrors AdapterConfig fields used in source resolution) + */ +export interface GitRepoSourceConfig { + tool: string; + subtype: string; + defaultSourceDir: string; + resolveSource?: (repoDir: string, rootPath: string, name: string) => Promise<{ + sourceName: string; + sourcePath: string; + suffix?: string; + }>; + resolveTargetName?: (name: string, alias?: string, sourceSuffix?: string) => string; +} + +/** + * SourceResolver that resolves files from a git repository. + * Supports static RepoConfig, dynamic RepoResolverFn (multi-repo), or null (remove-only mode). + */ +export class GitRepoSource implements SourceResolver { + constructor( + private repoOrResolver: RepoConfig | RepoResolverFn | null, + private config: GitRepoSourceConfig + ) {} + + async resolve(name: string, resolveConfig: ResolveConfig = {}): Promise { + const repo = await this.getRepo(resolveConfig.repoUrl as string | undefined, name); + return this.resolveFromRepo(repo, name); + } + + /** + * Resolve source using a manifest entry (supports multi-repo apply() flows). + * Uses entry.meta.repoUrl to find the correct repository. + */ + async resolveFromManifest(entry: ManifestEntry): Promise { + const repoUrl = entry.meta?.repoUrl as string | undefined; + const repo = await this.getRepo(repoUrl, entry.sourceName); + return this.resolveFromRepo(repo, entry.sourceName); + } + + /** + * Return the destination path in the repo for import() operations. + */ + async destinationPath(name: string): Promise { + const repo = await this.getRepo(undefined, name); + const repoConfig = await getRepoSourceConfig(repo.path); + const sourceDir = getSourceDir(repoConfig, this.config.tool, this.config.subtype, this.config.defaultSourceDir); + return new GitSource(repo.url, repo.path, sourceDir).destinationPath(name); + } + + private async getRepo(repoUrl: string | undefined, name: string): Promise { + if (typeof this.repoOrResolver === 'function') { + if (!repoUrl) { + throw new Error(`repoUrl is required when using a repo resolver (entry: "${name}")`); + } + return this.repoOrResolver(repoUrl, name); + } + if (!this.repoOrResolver) { + throw new Error(`No repo configured for GitRepoSource (entry: "${name}")`); + } + return this.repoOrResolver; + } + + private async resolveFromRepo(repo: RepoConfig, name: string): Promise { + const repoConfig = await getRepoSourceConfig(repo.path); + const sourceDir = getSourceDir(repoConfig, this.config.tool, this.config.subtype, this.config.defaultSourceDir); + + if (this.config.resolveSource) { + // ai-rules-sync specific: suffix-aware resolution (hybrid/file mode) + const resolved = await this.config.resolveSource(repo.path, sourceDir, name); + return { + name: resolved.sourceName, + path: resolved.sourcePath, + suffix: resolved.suffix, + }; + } + + // Generic path: delegate to GitSource + // (repo is already cloned by the time we get here; ensureCloned() will run git pull) + return new GitSource(repo.url, repo.path, sourceDir).resolve(name); + } +} diff --git a/src/project-config.ts b/src/project-config.ts index 600b389..d884bf5 100644 --- a/src/project-config.ts +++ b/src/project-config.ts @@ -9,40 +9,6 @@ const LOCAL_CONFIG_FILENAME = 'ai-rules-sync.local.json'; const LEGACY_CONFIG_FILENAME = 'cursor-rules.json'; const LEGACY_LOCAL_CONFIG_FILENAME = 'cursor-rules.local.json'; -const REPO_SOURCE_PATHS = [ - ['cursor', 'rules'], - ['cursor', 'commands'], - ['cursor', 'skills'], - ['cursor', 'agents'], - ['copilot', 'instructions'], - ['copilot', 'skills'], - ['copilot', 'prompts'], - ['copilot', 'agents'], - ['claude', 'skills'], - ['claude', 'agents'], - ['claude', 'rules'], - ['claude', 'md'], - ['trae', 'rules'], - ['trae', 'skills'], - ['opencode', 'agents'], - ['opencode', 'skills'], - ['opencode', 'commands'], - ['opencode', 'tools'], - ['codex', 'rules'], - ['codex', 'skills'], - ['codex', 'md'], - ['gemini', 'commands'], - ['gemini', 'skills'], - ['gemini', 'agents'], - ['gemini', 'md'], - ['warp', 'skills'], - ['windsurf', 'rules'], - ['windsurf', 'skills'], - ['cline', 'rules'], - ['cline', 'skills'], - ['agentsMd', 'file'] -] as const; - function readNestedStringValue(source: unknown, tool: string, subtype: string): string | undefined { if (!source || typeof source !== 'object') { return undefined; @@ -68,15 +34,29 @@ function writeNestedStringValue(target: RepoSourceConfig, tool: string, subtype: mutable[tool] = toolConfig; } +/** + * Dynamically iterate over all tool/subtype pairs present in a source object, + * extracting entries where the value is a string (source directory paths). + * Replaces the previous hardcoded REPO_SOURCE_PATHS constant. + */ function buildRepoSourceFromNestedStrings(source: unknown, rootPath?: string): { hasAny: boolean; config: RepoSourceConfig } { const config: RepoSourceConfig = { rootPath }; let hasAny = false; - for (const [tool, subtype] of REPO_SOURCE_PATHS) { - const value = readNestedStringValue(source, tool, subtype); - if (value !== undefined) { - hasAny = true; - writeNestedStringValue(config, tool, subtype, value); + if (!source || typeof source !== 'object') { + return { hasAny, config }; + } + + const src = source as Record; + for (const [tool, toolConfig] of Object.entries(src)) { + if (tool === 'rootPath' || tool === 'sourceDir') continue; + if (!toolConfig || typeof toolConfig !== 'object') continue; + + for (const [subtype, value] of Object.entries(toolConfig as Record)) { + if (typeof value === 'string') { + hasAny = true; + writeNestedStringValue(config, tool, subtype, value); + } } } @@ -93,231 +73,37 @@ export type RuleEntry = string | { }; /** - * Source directory configuration (for rules repositories) - * Defines where source files are located in a rules repo + * Source directory configuration (for rules repositories). + * Defines where source files are located in a rules repo. + * Uses dynamic index signature to support any tool/subtype combination. */ export interface SourceDirConfig { - cursor?: { - // Source directory for cursor rules, default: ".cursor/rules" - rules?: string; - // Source directory for cursor commands, default: ".cursor/commands" - commands?: string; - // Source directory for cursor skills, default: ".cursor/skills" - skills?: string; - // Source directory for cursor agents, default: ".cursor/agents" - agents?: string; - }; - copilot?: { - // Source directory for copilot instructions, default: ".github/instructions" - instructions?: string; - // Source directory for copilot skills, default: ".github/skills" - skills?: string; - // Source directory for copilot prompts, default: ".github/prompts" - prompts?: string; - // Source directory for copilot agents, default: ".github/agents" - agents?: string; - }; - claude?: { - // Source directory for claude skills, default: ".claude/skills" - skills?: string; - // Source directory for claude agents, default: ".claude/agents" - agents?: string; - // Source directory for claude rules, default: ".claude/rules" - rules?: string; - // Source directory for claude md files (CLAUDE.md), default: ".claude" - md?: string; - }; - trae?: { - // Source directory for trae rules, default: ".trae/rules" - rules?: string; - // Source directory for trae skills, default: ".trae/skills" - skills?: string; - }; - opencode?: { - // Source directory for opencode agents, default: ".opencode/agents" - agents?: string; - // Source directory for opencode skills, default: ".opencode/skills" - skills?: string; - // Source directory for opencode commands, default: ".opencode/commands" - commands?: string; - // Source directory for opencode tools, default: ".opencode/tools" - tools?: string; - }; - codex?: { - // Source directory for codex rules, default: ".codex/rules" - rules?: string; - // Source directory for codex skills, default: ".agents/skills" - skills?: string; - // Source directory for codex md files (AGENTS.md), default: ".codex" - md?: string; - }; - gemini?: { - // Source directory for gemini commands, default: ".gemini/commands" - commands?: string; - // Source directory for gemini skills, default: ".gemini/skills" - skills?: string; - // Source directory for gemini agents, default: ".gemini/agents" - agents?: string; - // Source directory for gemini md files (GEMINI.md), default: ".gemini" - md?: string; - }; - warp?: { - // Source directory for warp skills, default: ".agents/skills" - skills?: string; - }; - windsurf?: { - // Source directory for Windsurf rules, default: ".windsurf/rules" - rules?: string; - // Source directory for Windsurf skills, default: ".windsurf/skills" - skills?: string; - }; - cline?: { - // Source directory for Cline rules, default: ".clinerules" - rules?: string; - // Source directory for Cline skills, default: ".cline/skills" - skills?: string; - }; - agentsMd?: { - // Source directory for AGENTS.md files, default: "." (repository root) - file?: string; - }; + [tool: string]: Record | undefined; } /** - * Unified configuration for ai-rules-sync.json - * Used both in rules repos (sourceDir) and in projects (cursor/copilot dependencies) - * - * In rules repos: - * - rootPath: global path prefix - * - sourceDir: where source files are located - * - * In projects: - * - cursor/copilot: dependency records + * Unified configuration for ai-rules-sync.json. + * Used both in rules repos (sourceDir) and in projects (dependency records). + * Dynamic index signature supports any registered tool without code changes. */ export interface ProjectConfig { - // Global path prefix for source directories, default: "" (root directory) - // Only used in rules repos + // Global path prefix for source directories (only used in rules repos) rootPath?: string; // Source directory configuration (only used in rules repos) sourceDir?: SourceDirConfig; - // Dependency records (used in projects) - cursor?: { - // key is the local alias (target name), value is repo url OR object with url and original rule name - rules?: Record; - commands?: Record; - skills?: Record; - agents?: Record; - }; - copilot?: { - // key is the local alias (target name), value is repo url OR object with url and original rule name - instructions?: Record; - skills?: Record; - prompts?: Record; - agents?: Record; - }; - claude?: { - // key is the local alias (target name), value is repo url OR object with url and original rule name - skills?: Record; - agents?: Record; - rules?: Record; - md?: Record; - }; - trae?: { - rules?: Record; - skills?: Record; - }; - opencode?: { - // key is the local alias (target name), value is repo url OR object with url and original rule name - agents?: Record; - skills?: Record; - commands?: Record; - tools?: Record; - }; - codex?: { - rules?: Record; - skills?: Record; - md?: Record; - }; - gemini?: { - commands?: Record; - skills?: Record; - agents?: Record; - md?: Record; - }; - warp?: { - skills?: Record; - }; - windsurf?: { - rules?: Record; - skills?: Record; - }; - cline?: { - rules?: Record; - skills?: Record; - }; - // Universal AGENTS.md support (tool-agnostic) - agentsMd?: Record; + // Dependency records — indexed by tool name, then subtype + [tool: string]: any; } /** - * @deprecated Use ProjectConfig with sourceDir instead - * Kept for backward compatibility during transition + * Repository source configuration. + * Uses dynamic index signature so new tools require zero changes here. + * The `any` index type allows accessing tool-specific sub-properties (e.g. repoConfig.windsurf?.rules) + * without requiring explicit per-tool type declarations. */ export interface RepoSourceConfig { rootPath?: string; - cursor?: { - rules?: string; - skills?: string; - commands?: string; - agents?: string; - }; - copilot?: { - instructions?: string; - skills?: string; - prompts?: string; - agents?: string; - }; - claude?: { - skills?: string; - agents?: string; - rules?: string; - md?: string; - }; - trae?: { - rules?: string; - skills?: string; - }; - opencode?: { - agents?: string; - skills?: string; - commands?: string; - tools?: string; - }; - codex?: { - rules?: string; - skills?: string; - md?: string; - }; - gemini?: { - commands?: string; - skills?: string; - agents?: string; - md?: string; - }; - warp?: { - skills?: string; - }; - windsurf?: { - rules?: string; - skills?: string; - }; - cline?: { - rules?: string; - skills?: string; - }; - agentsMd?: { - file?: string; - }; + [tool: string]: any; } export type ConfigSource = 'new' | 'legacy' | 'none'; @@ -355,60 +141,45 @@ function legacyToNew(legacy: { rules?: Record }): ProjectConf }; } +/** + * Merge two ProjectConfig objects dynamically. + * Registry-driven: works with any tool/subtype combination present in the configs + * without requiring a hardcoded tool list. + * + * Special cases: + * - rootPath / sourceDir: take from main + * - agentsMd: flat object (no subLevel nesting) + * - all others: tool → subtype → Record + */ function mergeCombined(main: ProjectConfig, local: ProjectConfig): ProjectConfig { - return { - cursor: { - rules: { ...(main.cursor?.rules || {}), ...(local.cursor?.rules || {}) }, - commands: { ...(main.cursor?.commands || {}), ...(local.cursor?.commands || {}) }, - skills: { ...(main.cursor?.skills || {}), ...(local.cursor?.skills || {}) }, - agents: { ...(main.cursor?.agents || {}), ...(local.cursor?.agents || {}) } - }, - copilot: { - instructions: { ...(main.copilot?.instructions || {}), ...(local.copilot?.instructions || {}) }, - skills: { ...(main.copilot?.skills || {}), ...(local.copilot?.skills || {}) }, - prompts: { ...(main.copilot?.prompts || {}), ...(local.copilot?.prompts || {}) }, - agents: { ...(main.copilot?.agents || {}), ...(local.copilot?.agents || {}) } - }, - claude: { - skills: { ...(main.claude?.skills || {}), ...(local.claude?.skills || {}) }, - agents: { ...(main.claude?.agents || {}), ...(local.claude?.agents || {}) }, - rules: { ...(main.claude?.rules || {}), ...(local.claude?.rules || {}) }, - md: { ...(main.claude?.md || {}), ...(local.claude?.md || {}) } - }, - trae: { - rules: { ...(main.trae?.rules || {}), ...(local.trae?.rules || {}) }, - skills: { ...(main.trae?.skills || {}), ...(local.trae?.skills || {}) } - }, - opencode: { - agents: { ...(main.opencode?.agents || {}), ...(local.opencode?.agents || {}) }, - skills: { ...(main.opencode?.skills || {}), ...(local.opencode?.skills || {}) }, - commands: { ...(main.opencode?.commands || {}), ...(local.opencode?.commands || {}) }, - tools: { ...(main.opencode?.tools || {}), ...(local.opencode?.tools || {}) } - }, - codex: { - rules: { ...(main.codex?.rules || {}), ...(local.codex?.rules || {}) }, - skills: { ...(main.codex?.skills || {}), ...(local.codex?.skills || {}) }, - md: { ...(main.codex?.md || {}), ...(local.codex?.md || {}) } - }, - gemini: { - commands: { ...(main.gemini?.commands || {}), ...(local.gemini?.commands || {}) }, - skills: { ...(main.gemini?.skills || {}), ...(local.gemini?.skills || {}) }, - agents: { ...(main.gemini?.agents || {}), ...(local.gemini?.agents || {}) }, - md: { ...(main.gemini?.md || {}), ...(local.gemini?.md || {}) } - }, - warp: { - skills: { ...(main.warp?.skills || {}), ...(local.warp?.skills || {}) } - }, - windsurf: { - rules: { ...(main.windsurf?.rules || {}), ...(local.windsurf?.rules || {}) }, - skills: { ...(main.windsurf?.skills || {}), ...(local.windsurf?.skills || {}) } - }, - cline: { - rules: { ...(main.cline?.rules || {}), ...(local.cline?.rules || {}) }, - skills: { ...(main.cline?.skills || {}), ...(local.cline?.skills || {}) } - }, - agentsMd: { ...(main.agentsMd || {}), ...(local.agentsMd || {}) } - }; + const result: Record = {}; + + // Collect all top-level keys from both configs + const allKeys = new Set([...Object.keys(main), ...Object.keys(local)]); + + for (const key of allKeys) { + if (key === 'rootPath' || key === 'sourceDir') { + // Scalar / nested config: prefer main + result[key] = main[key] ?? local[key]; + } else if (key === 'agentsMd') { + // agentsMd is a flat record (no subtype level) + result[key] = { ...(main[key] || {}), ...(local[key] || {}) }; + } else { + // Tool section with subtypes: merge each subtype independently + const mainTool = (main[key] && typeof main[key] === 'object') ? main[key] : {}; + const localTool = (local[key] && typeof local[key] === 'object') ? local[key] : {}; + const subtypes = new Set([...Object.keys(mainTool), ...Object.keys(localTool)]); + result[key] = {}; + for (const subtype of subtypes) { + result[key][subtype] = { + ...(mainTool[subtype] || {}), + ...(localTool[subtype] || {}), + }; + } + } + } + + return result as ProjectConfig; } export async function getConfigSource(projectPath: string): Promise { @@ -447,8 +218,8 @@ export async function getRepoSourceConfig(projectPath: string): Promise { const { projectPath, name, repo, force = false, push = false, commitMessage } = options; - // 1. Check if entry exists in project const absoluteProjectPath = path.resolve(projectPath); - // Get dynamic target directory from project config + // Determine target path in project const projectConfig = await getCombinedProjectConfig(projectPath); const targetDirPath = getTargetDir( projectConfig, @@ -263,63 +262,63 @@ export async function importEntry( name, adapter.targetDir ); + const targetPath = path.join(absoluteProjectPath, targetDirPath, name); - const targetDir = path.join(absoluteProjectPath, targetDirPath); - const targetPath = path.join(targetDir, name); - - if (!await fs.pathExists(targetPath)) { - throw new Error(`Entry "${name}" not found in project at ${targetPath}`); - } - - // 2. Check if it's already a symlink (already managed) - const stats = await fs.lstat(targetPath); - if (stats.isSymbolicLink()) { - throw new Error(`Entry "${name}" is already a symlink (already managed by ai-rules-sync)`); - } - - // 3. Determine destination in rules repository + // Determine destination path in repo (needed for git operations) const repoDir = repo.path; const repoConfig = await getRepoSourceConfig(repoDir); const sourceDir = getSourceDir(repoConfig, adapter.tool, adapter.subtype, adapter.defaultSourceDir); const destPath = path.join(repoDir, sourceDir, name); + const relativePath = path.relative(repoDir, destPath); + + let sourceName: string; + let targetName: string; - // 4. Check if destination already exists - if (await fs.pathExists(destPath)) { - if (!force) { - throw new Error(`Entry "${name}" already exists in rules repository at ${destPath}. Use --force to overwrite.`); + if (adapter.forProject) { + // Modern path: delegate all fs operations (copy, remove, symlink) to manager.import() + const manager = adapter.forProject(projectPath, repo, options.isLocal); + const linkResult = await manager.import(targetPath, name, { + force, + repoUrl: repo.url, + }); + sourceName = linkResult.sourceName; + targetName = linkResult.targetName; + } else { + // Legacy path: manual fs operations + if (!await fs.pathExists(targetPath)) { + throw new Error(`Entry "${name}" not found in project at ${targetPath}`); + } + const stats = await fs.lstat(targetPath); + if (stats.isSymbolicLink()) { + throw new Error(`Entry "${name}" is already a symlink (already managed by ai-rules-sync)`); + } + if (await fs.pathExists(destPath)) { + if (!force) { + throw new Error(`Entry "${name}" already exists in rules repository at ${destPath}. Use --force to overwrite.`); + } + console.log(chalk.yellow(`Entry "${name}" already exists in repository. Overwriting (--force)...`)); + await fs.remove(destPath); } - console.log(chalk.yellow(`Entry "${name}" already exists in repository. Overwriting (--force)...`)); - await fs.remove(destPath); + await fs.copy(targetPath, destPath); + console.log(chalk.green(`Copied "${name}" to rules repository.`)); + await fs.remove(targetPath); + console.log(chalk.green(`Removed original from project.`)); + const linkResult = await adapter.link(options); + sourceName = linkResult.sourceName; + targetName = linkResult.targetName; } - // 5. Copy to rules repository - await fs.copy(targetPath, destPath); - console.log(chalk.green(`Copied "${name}" to rules repository.`)); - - // 6. Git add and commit - const relativePath = path.relative(repoDir, destPath); + // Git add and commit (ai-rules-sync specific, stays in sync-engine) await execa('git', ['add', relativePath], { cwd: repoDir }); const message = commitMessage || `Import ${adapter.tool} ${adapter.subtype}: ${name}`; await execa('git', ['commit', '-m', message], { cwd: repoDir, stdio: 'inherit' }); console.log(chalk.green(`Committed to rules repository.`)); - // 7. Push to remote if --push flag is set if (push) { console.log(chalk.gray('Pushing to remote repository...')); await execa('git', ['push'], { cwd: repoDir, stdio: 'inherit' }); console.log(chalk.green(`Pushed to remote repository.`)); } - // 8. Remove original from project - await fs.remove(targetPath); - console.log(chalk.green(`Removed original from project.`)); - - // 9. Create symlink using existing link functionality - const linkResult = await linkEntry(adapter, options); - - return { - imported: true, - sourceName: linkResult.sourceName, - targetName: linkResult.targetName - }; + return { imported: true, sourceName, targetName }; } From 02aa141a0c5c94355550bf3f1a627eed56edacaa Mon Sep 17 00:00:00 2001 From: lbb Date: Wed, 4 Mar 2026 20:28:24 +0800 Subject: [PATCH 09/12] Codex/fix release action test (#30) * fix(ci): resolve pnpm version conflict in release workflow * ci: add manual workflow to publish npm test prerelease * ci(release): support manual npm test-tag publish via workflow_dispatch * fix(ci): correct test prerelease version generation * refactor(ci): remove redundant publish-test workflow --- .github/workflows/release.yml | 27 +++++++++++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b4a0ca9..0e09fb1 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,6 +7,12 @@ on: paths-ignore: - Formula/ais.rb workflow_dispatch: + inputs: + publish_test: + description: Publish a prerelease package to npm "test" tag + required: false + default: true + type: boolean permissions: contents: write @@ -24,8 +30,6 @@ jobs: - name: Setup pnpm uses: pnpm/action-setup@v4 - with: - version: 9.15.3 - name: Setup Node.js uses: actions/setup-node@v4 @@ -43,8 +47,23 @@ jobs: - name: Build run: pnpm build + - name: Set unique prerelease version for test publish + id: test_version + if: github.event_name == 'workflow_dispatch' && inputs.publish_test + run: | + VERSION=$(node -e 'const fs=require("fs");const pkg=JSON.parse(fs.readFileSync("package.json","utf8"));const [major,minor,patch]=pkg.version.split("-")[0].split(".").map(Number);console.log([major,minor,patch+1].join(".")+"-test."+process.env.GITHUB_RUN_NUMBER+"."+process.env.GITHUB_RUN_ATTEMPT);') + npm version "$VERSION" --no-git-tag-version + echo "version=$VERSION" >> "$GITHUB_OUTPUT" + + - name: Publish npm test tag + if: github.event_name == 'workflow_dispatch' && inputs.publish_test + run: pnpm publish --tag test --no-git-checks + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + - name: Create release PR or publish id: changesets + if: github.event_name != 'workflow_dispatch' || inputs.publish_test != true uses: changesets/action@v1 with: version: pnpm changeset version @@ -63,6 +82,10 @@ jobs: VERSION=$(node -p "require('./package.json').version") echo "version=$VERSION" >> "$GITHUB_OUTPUT" + - name: Summary for test publish + if: github.event_name == 'workflow_dispatch' && inputs.publish_test + run: echo "Published ai-rules-sync@${{ steps.test_version.outputs.version }} to npm tag 'test'." + update-homebrew: needs: release if: needs.release.outputs.published == 'true' From adf96949f6cedaa24f8c2325956926cea4a5ffc2 Mon Sep 17 00:00:00 2001 From: lbb00 Date: Thu, 5 Mar 2026 01:24:53 +0800 Subject: [PATCH 10/12] fix(remove): harden symlink cleanup and upgrade linkany --- package.json | 2 +- pnpm-lock.yaml | 10 +-- src/__tests__/dotany-manager.test.ts | 34 ++++++++ .../handle-remove-forproject.test.ts | 79 +++++++++++++++++++ src/commands/handlers.ts | 40 ++++++++-- src/dotany/manager.ts | 51 +++++++++--- 6 files changed, 191 insertions(+), 25 deletions(-) create mode 100644 src/__tests__/dotany-manager.test.ts create mode 100644 src/__tests__/handle-remove-forproject.test.ts diff --git a/package.json b/package.json index 3a03b44..cc07988 100644 --- a/package.json +++ b/package.json @@ -34,7 +34,7 @@ "commander": "^14.0.2", "execa": "^9.6.1", "fs-extra": "^11.3.3", - "linkany": "^0.0.3" + "linkany": "^0.0.4" }, "devDependencies": { "@changesets/cli": "^2.29.8", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8a966ea..1e531cc 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -21,8 +21,8 @@ importers: specifier: ^11.3.3 version: 11.3.3 linkany: - specifier: ^0.0.3 - version: 0.0.3 + specifier: ^0.0.4 + version: 0.0.4 devDependencies: '@changesets/cli': specifier: ^2.29.8 @@ -674,8 +674,8 @@ packages: jsonfile@6.2.0: resolution: {integrity: sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==} - linkany@0.0.3: - resolution: {integrity: sha512-MIKiWi2DksOV2ID621c5qmsaXyu2ZIWd6IScwBKw+KGmRJwta48Ozb8Nc+dQ3/3BD51phlvcl9CyGDjRQ/Wkdg==} + linkany@0.0.4: + resolution: {integrity: sha512-Rp+iC0M9SbYab9u+qx/5PaLFWh2llszBFTiw6onoint//dhS9miOYFUB6A4egt79W/ld+EzS6mq+uX/sjeBQkw==} hasBin: true locate-path@5.0.0: @@ -1625,7 +1625,7 @@ snapshots: optionalDependencies: graceful-fs: 4.2.11 - linkany@0.0.3: + linkany@0.0.4: dependencies: fs-extra: 11.3.3 diff --git a/src/__tests__/dotany-manager.test.ts b/src/__tests__/dotany-manager.test.ts new file mode 100644 index 0000000..674a0e2 --- /dev/null +++ b/src/__tests__/dotany-manager.test.ts @@ -0,0 +1,34 @@ +import os from 'os'; +import path from 'path'; +import fs from 'fs-extra'; +import { promises as nodeFs } from 'fs'; +import { describe, expect, it } from 'vitest'; +import { DotfileManager } from '../dotany/manager.js'; + +describe('DotfileManager', () => { + it('removes dangling symlink targets during remove()', async () => { + const projectPath = await fs.mkdtemp(path.join(os.tmpdir(), 'ais-dotany-remove-')); + const targetDir = '.cursor/rules'; + const alias = 'dangling'; + const targetPath = path.join(projectPath, targetDir, alias); + + await fs.ensureDir(path.dirname(targetPath)); + await nodeFs.symlink(path.join(projectPath, 'missing-source'), targetPath); + + const manager = new DotfileManager({ + name: 'test', + source: { + async resolve() { + throw new Error('resolve() should not be called in remove() test'); + } + }, + targetRoot: projectPath, + targetDir + }); + + await manager.remove(alias); + + const statAfter = await fs.lstat(targetPath).catch(() => null); + expect(statAfter).toBeNull(); + }); +}); diff --git a/src/__tests__/handle-remove-forproject.test.ts b/src/__tests__/handle-remove-forproject.test.ts new file mode 100644 index 0000000..77337c6 --- /dev/null +++ b/src/__tests__/handle-remove-forproject.test.ts @@ -0,0 +1,79 @@ +import os from 'os'; +import path from 'path'; +import fs from 'fs-extra'; +import { describe, expect, it } from 'vitest'; +import { handleRemove } from '../commands/handlers.js'; +import { createBaseAdapter, createSuffixAwareTargetResolver } from '../adapters/base.js'; + +const CONFIG_FILENAME = 'ai-rules-sync.json'; +const LOCAL_CONFIG_FILENAME = 'ai-rules-sync.local.json'; + +function createCopilotInstructionsAdapter() { + return createBaseAdapter({ + name: 'copilot-instructions', + tool: 'copilot', + subtype: 'instructions', + configPath: ['copilot', 'instructions'], + defaultSourceDir: '.github/instructions', + targetDir: '.github/instructions', + mode: 'file', + fileSuffixes: ['.instructions.md'], + resolveTargetName: createSuffixAwareTargetResolver(['.instructions.md']) + }); +} + +describe('handleRemove forProject mode', () => { + it('removes ignore entries using actual targetDir and suffix-aware filename', async () => { + const projectPath = await fs.mkdtemp(path.join(os.tmpdir(), 'ais-remove-forproject-')); + const adapter = createCopilotInstructionsAdapter(); + const alias = 'security'; + const targetDir = '.custom/instructions'; + + const mainConfigPath = path.join(projectPath, CONFIG_FILENAME); + await fs.writeJson(mainConfigPath, { + copilot: { + instructions: { + [alias]: { + url: 'https://example.com/repo.git', + rule: 'security', + targetDir + } + } + } + }, { spaces: 2 }); + + const targetPath = path.join(projectPath, targetDir, `${alias}.instructions.md`); + const sourceFilePath = path.join(projectPath, 'tmp-source.md'); + await fs.ensureDir(path.dirname(targetPath)); + await fs.writeFile(sourceFilePath, '# source'); + await fs.symlink(sourceFilePath, targetPath); + + const relEntry = path.relative(path.resolve(projectPath), targetPath); + await fs.writeFile(path.join(projectPath, '.gitignore'), `${relEntry}\n`); + + const result = await handleRemove(adapter, projectPath, alias, false); + + expect(result.removedFrom).toEqual([CONFIG_FILENAME]); + const gitignore = await fs.readFile(path.join(projectPath, '.gitignore'), 'utf-8'); + expect(gitignore).not.toContain(relEntry); + }); + + it('reports removedFrom based on the actual config file (local config)', async () => { + const projectPath = await fs.mkdtemp(path.join(os.tmpdir(), 'ais-remove-local-config-')); + const adapter = createCopilotInstructionsAdapter(); + const alias = 'local-only'; + + const localConfigPath = path.join(projectPath, LOCAL_CONFIG_FILENAME); + await fs.writeJson(localConfigPath, { + copilot: { + instructions: { + [alias]: 'https://example.com/repo.git' + } + } + }, { spaces: 2 }); + + const result = await handleRemove(adapter, projectPath, alias, false); + + expect(result.removedFrom).toEqual([LOCAL_CONFIG_FILENAME]); + }); +}); diff --git a/src/commands/handlers.ts b/src/commands/handlers.ts index 5cfafeb..e48e8b0 100644 --- a/src/commands/handlers.ts +++ b/src/commands/handlers.ts @@ -249,7 +249,8 @@ async function getConfigHitsForAlias( adapter: SyncAdapter, projectPath: string, alias: string, - isUser: boolean + isUser: boolean, + includeLegacy: boolean = true ): Promise { const [topLevel, subLevel] = adapter.configPath; const hits: string[] = []; @@ -281,7 +282,7 @@ async function getConfigHitsForAlias( } // Legacy support for Cursor rules. - if (adapter.tool === 'cursor' && adapter.subtype === 'rules') { + if (includeLegacy && adapter.tool === 'cursor' && adapter.subtype === 'rules') { const legacyMainPath = path.join(projectPath, 'cursor-rules.json'); const legacyLocalPath = path.join(projectPath, 'cursor-rules.local.json'); @@ -330,8 +331,8 @@ async function resolveRemoveTargetPath( } for (const candidate of candidates) { - if (await fs.pathExists(candidate)) { - const stats = await fs.lstat(candidate); + const stats = await fs.lstat(candidate).catch(() => null); + if (stats) { return { targetPath: candidate, exists: true, @@ -395,11 +396,37 @@ export async function handleRemove( // Project mode: use forProject().remove() to do symlink deletion + manifest update in one step if (adapter.forProject) { + const removedFrom = await getConfigHitsForAlias(adapter, projectPath, alias, false, false); + const target = await resolveRemoveTargetPath(adapter, projectPath, alias, false); + const projectRoot = path.resolve(projectPath); + const targetDirAbsolute = path.dirname(target.targetPath); + const ignoreEntries = new Set(); + + const addIgnoreCandidate = (baseName: string): void => { + const relativePath = path.relative(projectRoot, path.join(targetDirAbsolute, baseName)); + if (relativePath && relativePath !== '.') { + ignoreEntries.add(relativePath); + } + }; + + addIgnoreCandidate(alias); + if (target.exists) { + addIgnoreCandidate(path.basename(target.targetPath)); + } + + const suffixes = adapter.fileSuffixes || adapter.hybridFileSuffixes; + if (suffixes && suffixes.length > 0) { + for (const suffix of suffixes) { + if (!alias.endsWith(suffix)) { + addIgnoreCandidate(`${alias}${suffix}`); + } + } + } + await adapter.forProject(projectPath, null, false).remove(alias); // Ignore cleanup — try both gitignore and git/info/exclude since we don't know // which was used when the entry was originally added - const ignoreEntries = [`${adapter.targetDir}/${alias}`]; const gitignorePath = path.join(projectPath, '.gitignore'); const gitInfoExclude = path.join(projectPath, '.git', 'info', 'exclude'); for (const entry of ignoreEntries) { @@ -411,8 +438,7 @@ export async function handleRemove( } } - const configFileName = 'ai-rules-sync.json'; - return { removedFrom: [configFileName], migrated: false }; + return { removedFrom, migrated: false }; } // Legacy fallback diff --git a/src/dotany/manager.ts b/src/dotany/manager.ts index 577ddee..a9e228d 100644 --- a/src/dotany/manager.ts +++ b/src/dotany/manager.ts @@ -1,7 +1,8 @@ import fs from 'fs-extra'; import path from 'path'; import chalk from 'chalk'; -import { add as linkanyAdd } from 'linkany'; +import type { Stats } from 'fs'; +import { add as linkanyAdd, uninstall as linkanyUninstall } from 'linkany'; import { DotfileCreateOptions, LinkResult, AddOptions, ApplyResult, ManifestEntry, StowResult, DiffResult, StatusEntry, StatusResult, ManagerImportOptions } from './types.js'; /** @@ -87,7 +88,7 @@ export class DotfileManager { let actualFileName = alias; let targetPath = path.join(targetDir, alias); - if (!await fs.pathExists(targetPath)) { + if (!await this.pathExistsNoFollow(targetPath)) { const found = await this.findWithCommonSuffix(targetDir, alias); if (found) { actualFileName = path.basename(found); @@ -375,13 +376,13 @@ export class DotfileManager { * Returns 'linked' (created/updated) | 'noop' (already correct) | 'conflict' (real file in the way). */ private async doLink(source: string, target: string): Promise<'linked' | 'noop' | 'conflict'> { - if (await fs.pathExists(target)) { - const st = await fs.lstat(target); + const st = await this.lstatIfExists(target); + if (st) { if (!st.isSymbolicLink()) return 'conflict'; const current = await fs.readlink(target); if (current !== source) { - // linkany rejects migrating an existing symlink; unlink first - await fs.unlink(target); + // linkany rejects migrating an existing symlink; remove first via linkany. + await this.unlinkSymlinkWithLinkany(target); } // current === source: linkany will detect isSymlinkTo → noop } @@ -399,11 +400,9 @@ export class DotfileManager { * Returns true if removed. */ private async doUnlink(target: string): Promise { - if (await fs.pathExists(target)) { - const st = await fs.lstat(target); - if (st.isSymbolicLink()) { await fs.unlink(target); return true; } - } - return false; + const st = await this.lstatIfExists(target); + if (!st || !st.isSymbolicLink()) return false; + return this.unlinkSymlinkWithLinkany(target); } private async findWithCommonSuffix(targetDir: string, alias: string): Promise { @@ -411,10 +410,38 @@ export class DotfileManager { for (const suffix of suffixes) { if (!alias.endsWith(suffix)) { const candidate = path.join(targetDir, `${alias}${suffix}`); - if (await fs.pathExists(candidate)) return candidate; + if (await this.pathExistsNoFollow(candidate)) return candidate; } } return undefined; } + private async lstatIfExists(target: string): Promise { + try { + return await fs.lstat(target); + } catch { + return null; + } + } + + private async pathExistsNoFollow(target: string): Promise { + return (await this.lstatIfExists(target)) !== null; + } + + private async unlinkSymlinkWithLinkany(target: string): Promise { + const { result } = await linkanyUninstall( + { + version: 1 as const, + installs: [{ source: target, target }], + }, + { audit: false } + ); + + if (!result.ok) { + throw new Error(result.errors.join('; ') || `Failed to unlink symlink: ${target}`); + } + + return result.changes.some(c => c.action === 'unlink'); + } + } From 0c7358aa5a71dadc567f371e076fc01a881efdbc Mon Sep 17 00:00:00 2001 From: lbb00 Date: Thu, 5 Mar 2026 02:19:33 +0800 Subject: [PATCH 11/12] feat(cli): add check update init lifecycle commands --- KNOWLEDGE_BASE.md | 6 + README.md | 32 +- README_ZH.md | 32 +- src/__tests__/lifecycle-check-update.test.ts | 150 ++++++ src/__tests__/lifecycle-init.test.ts | 34 ++ src/commands/index.ts | 1 + src/commands/lifecycle.ts | 504 +++++++++++++++++++ src/completion/scripts.ts | 3 + src/index.ts | 166 ++++++ 9 files changed, 924 insertions(+), 4 deletions(-) create mode 100644 src/__tests__/lifecycle-check-update.test.ts create mode 100644 src/__tests__/lifecycle-init.test.ts create mode 100644 src/commands/lifecycle.ts diff --git a/KNOWLEDGE_BASE.md b/KNOWLEDGE_BASE.md index afb4b2f..d42dfac 100644 --- a/KNOWLEDGE_BASE.md +++ b/KNOWLEDGE_BASE.md @@ -72,6 +72,7 @@ src/ │ ├── helpers.ts # Helper functions (getTargetRepo, parseConfigEntry, etc.) │ ├── install.ts # Generic install function │ ├── add-all.ts # Discover and install all entries from repository +│ ├── lifecycle.ts # check/update/init lifecycle commands │ └── index.ts # Module exports ├── completion/ # Shell completion │ └── scripts.ts # Shell completion scripts (bash, zsh, fish) @@ -189,6 +190,11 @@ async function installEntriesForAdapter(adapter, projectPath): Promise async function installEntriesForTool(adapters[], projectPath): Promise ``` +**Repository Lifecycle Commands (`src/commands/lifecycle.ts`):** +- `checkRepositories(options)`: collects repo URLs from project/user config and compares local HEAD vs upstream (`ahead/behind`) using `git fetch` + `git rev-list`. +- `updateRepositories(options)`: updates repos (`git pull` via `cloneOrUpdateRepo`) and reapplies links from config. +- `initRulesRepository(options)`: scaffolds `ai-rules-sync.json` with adapter-driven `sourceDir` defaults and creates default source directories. + **Helper Functions (`src/commands/helpers.ts`):** - `getTargetRepo(options)`: Resolve target repository from options or config - `inferDefaultMode(projectPath)`: Auto-detect cursor/copilot mode from config diff --git a/README.md b/README.md index 0a5f06a..98e3b70 100644 --- a/README.md +++ b/README.md @@ -23,6 +23,7 @@ Stop copying `.mdc` files around. Manage your rules in Git repositories and sync - [Core Concepts](#core-concepts) - [Recommended Command Style](#recommended-command-style) - [Basic Usage](#basic-usage) +- [Repository Lifecycle](#repository-lifecycle) - [Tool-Specific Guides](#tool-specific-guides) - [Advanced Features](#advanced-features) - [User Mode](#user-mode-personal-ai-config-files) @@ -286,17 +287,20 @@ ais cursor rules remove react # Query commands ais status ais search react +ais check # Script/CI JSON output ais ls --json ais status --json ais search react --json +ais check --json ais config repo ls --json ais config repo show company-rules --json # Safe preview before destructive operations ais cursor rules rm react --dry-run ais cursor rules import my-rule --dry-run +ais update --dry-run ``` --- @@ -317,11 +321,13 @@ mkdir ~/my-rules-repo cd ~/my-rules-repo git init +# Scaffold default ai-rules-sync.json + source directories +ais init + # Set as current repository ais use ~/my-rules-repo -# Create rules structure -mkdir -p .cursor/rules +# Add your first rule echo "# React Rules" > .cursor/rules/react.mdc git add . git commit -m "Initial commit" @@ -424,6 +430,28 @@ ais copilot install # All copilot entries (instructions + skills) ais install # All tools ``` +## Repository Lifecycle + +```bash +# Check whether configured repositories are behind upstream +ais check + +# Check user config repositories +ais check --user + +# Preview updates without pulling repositories +ais update --dry-run + +# Pull updates and reinstall entries from config +ais update + +# Initialize a rules repository template in current directory +ais init + +# Initialize template in a subdirectory +ais init my-rules-repo +``` + --- ## Tool-Specific Guides diff --git a/README_ZH.md b/README_ZH.md index 792da6e..e262239 100644 --- a/README_ZH.md +++ b/README_ZH.md @@ -23,6 +23,7 @@ - [核心概念](#核心概念) - [推荐命令风格](#推荐命令风格) - [基础使用](#基础使用) +- [仓库生命周期](#仓库生命周期) - [各工具使用指南](#各工具使用指南) - [高级功能](#高级功能) - [User 模式](#user-模式个人-ai-配置文件) @@ -286,17 +287,20 @@ ais cursor rules remove react # 查询命令 ais status ais search react +ais check # 脚本/CI 的 JSON 输出 ais ls --json ais status --json ais search react --json +ais check --json ais config repo ls --json ais config repo show company-rules --json # 破坏性操作前先预览 ais cursor rules rm react --dry-run ais cursor rules import my-rule --dry-run +ais update --dry-run ``` --- @@ -317,11 +321,13 @@ mkdir ~/my-rules-repo cd ~/my-rules-repo git init +# 生成默认 ai-rules-sync.json 和源目录结构 +ais init + # 设置为当前仓库 ais use ~/my-rules-repo -# 创建规则结构 -mkdir -p .cursor/rules +# 添加第一条规则 echo "# React Rules" > .cursor/rules/react.mdc git add . git commit -m "Initial commit" @@ -424,6 +430,28 @@ ais copilot install # 所有 copilot 条目(指令 + 技能 + 提示词 + 代 ais install # 所有工具 ``` +## 仓库生命周期 + +```bash +# 检查配置中依赖仓库是否落后于远端 +ais check + +# 检查 user 配置中的仓库 +ais check --user + +# 只预览更新,不执行 pull +ais update --dry-run + +# 拉取仓库更新并根据配置重装链接 +ais update + +# 在当前目录初始化规则仓库模板 +ais init + +# 在子目录初始化模板 +ais init my-rules-repo +``` + --- ## 各工具使用指南 diff --git a/src/__tests__/lifecycle-check-update.test.ts b/src/__tests__/lifecycle-check-update.test.ts new file mode 100644 index 0000000..3768760 --- /dev/null +++ b/src/__tests__/lifecycle-check-update.test.ts @@ -0,0 +1,150 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import fs from 'fs-extra'; +import { execa } from 'execa'; +import { checkRepositories, updateRepositories } from '../commands/lifecycle.js'; +import { getConfig, setConfig, getUserProjectConfig } from '../config.js'; +import { getCombinedProjectConfig } from '../project-config.js'; +import { cloneOrUpdateRepo } from '../git.js'; +import { installAllUserEntries, installEntriesForAdapter } from '../commands/install.js'; + +vi.mock('execa', () => ({ + execa: vi.fn() +})); + +vi.mock('../config.js', () => ({ + getConfig: vi.fn(), + setConfig: vi.fn(), + getReposBaseDir: vi.fn(() => '/tmp/repos'), + getUserProjectConfig: vi.fn() +})); + +vi.mock('../project-config.js', () => ({ + getCombinedProjectConfig: vi.fn() +})); + +vi.mock('../git.js', () => ({ + cloneOrUpdateRepo: vi.fn() +})); + +vi.mock('../commands/install.js', () => ({ + installAllUserEntries: vi.fn(), + installEntriesForAdapter: vi.fn() +})); + +vi.mock('../adapters/index.js', () => ({ + adapterRegistry: { + all: vi.fn(() => [{ + name: 'cursor-rules', + tool: 'cursor', + subtype: 'rules', + configPath: ['cursor', 'rules'], + defaultSourceDir: '.cursor/rules' + }]) + } +})); + +describe('lifecycle check/update', () => { + beforeEach(() => { + vi.resetAllMocks(); + }); + + it('should detect update-available repositories', async () => { + vi.mocked(getCombinedProjectConfig).mockResolvedValue({ + cursor: { + rules: { + react: 'https://example.com/rules.git' + } + } + } as any); + vi.mocked(getConfig).mockResolvedValue({ + currentRepo: 'rules', + repos: { + rules: { + name: 'rules', + url: 'https://example.com/rules.git', + path: '/tmp/rules' + } + } + } as any); + + vi.spyOn(fs, 'pathExists').mockImplementation(async (target: fs.PathLike) => { + const value = String(target); + return value === '/tmp/rules' || value === '/tmp/rules/.git'; + }); + + const execaMock = vi.mocked(execa as any); + execaMock.mockImplementation(async (_cmd: string, args: string[]) => { + const command = args.join(' '); + if (command === 'fetch --quiet') return { stdout: '' }; + if (command === 'rev-parse --short HEAD') return { stdout: 'abc1234' }; + if (command === 'rev-parse --abbrev-ref --symbolic-full-name @{u}') return { stdout: 'origin/main' }; + if (command === 'rev-list --left-right --count HEAD...@{u}') return { stdout: '0\t2' }; + throw new Error(`Unexpected git command: ${command}`); + }); + + const result = await checkRepositories({ + projectPath: '/tmp/project' + }); + + expect(result.total).toBe(1); + expect(result.updateAvailable).toBe(1); + expect(result.entries[0]).toMatchObject({ + repoUrl: 'https://example.com/rules.git', + status: 'update-available', + ahead: 0, + behind: 2 + }); + }); + + it('should preview updates in dry-run mode without pulling', async () => { + vi.mocked(getCombinedProjectConfig).mockResolvedValue({ + cursor: { + rules: { + react: 'https://example.com/rules.git' + } + } + } as any); + vi.mocked(getUserProjectConfig).mockResolvedValue({} as any); + vi.mocked(getConfig).mockResolvedValue({ + currentRepo: 'rules', + repos: { + rules: { + name: 'rules', + url: 'https://example.com/rules.git', + path: '/tmp/rules' + } + } + } as any); + + vi.spyOn(fs, 'pathExists').mockImplementation(async (target: fs.PathLike) => { + const value = String(target); + return value === '/tmp/rules' || value === '/tmp/rules/.git'; + }); + + const execaMock = vi.mocked(execa as any); + execaMock.mockImplementation(async (_cmd: string, args: string[]) => { + const command = args.join(' '); + if (command === 'fetch --quiet') return { stdout: '' }; + if (command === 'rev-parse --short HEAD') return { stdout: 'abc1234' }; + if (command === 'rev-parse --abbrev-ref --symbolic-full-name @{u}') return { stdout: 'origin/main' }; + if (command === 'rev-list --left-right --count HEAD...@{u}') return { stdout: '0\t1' }; + throw new Error(`Unexpected git command: ${command}`); + }); + + const result = await updateRepositories({ + projectPath: '/tmp/project', + dryRun: true + }); + + expect(result.dryRun).toBe(true); + expect(result.total).toBe(1); + expect(result.entries[0]).toMatchObject({ + repoUrl: 'https://example.com/rules.git', + action: 'would-update' + }); + expect(cloneOrUpdateRepo).not.toHaveBeenCalled(); + expect(setConfig).not.toHaveBeenCalled(); + expect(installAllUserEntries).not.toHaveBeenCalled(); + expect(installEntriesForAdapter).not.toHaveBeenCalled(); + }); +}); diff --git a/src/__tests__/lifecycle-init.test.ts b/src/__tests__/lifecycle-init.test.ts new file mode 100644 index 0000000..284cf60 --- /dev/null +++ b/src/__tests__/lifecycle-init.test.ts @@ -0,0 +1,34 @@ +import os from 'os'; +import path from 'path'; +import fs from 'fs-extra'; +import { describe, expect, it } from 'vitest'; +import { initRulesRepository } from '../commands/lifecycle.js'; + +describe('initRulesRepository', () => { + it('should create a repository template with sourceDir config', async () => { + const cwd = await fs.mkdtemp(path.join(os.tmpdir(), 'ais-init-')); + const result = await initRulesRepository({ + cwd, + name: 'rules-template' + }); + + const configExists = await fs.pathExists(result.configPath); + expect(configExists).toBe(true); + + const config = await fs.readJson(result.configPath); + expect(config.sourceDir?.cursor?.rules).toBe('.cursor/rules'); + expect(config.sourceDir?.copilot?.instructions).toBe('.github/instructions'); + + const cursorDir = path.join(result.projectPath, '.cursor', 'rules'); + expect(await fs.pathExists(cursorDir)).toBe(true); + expect(result.createdDirectories.length).toBeGreaterThan(0); + }); + + it('should throw if ai-rules-sync.json exists and force is not enabled', async () => { + const projectPath = await fs.mkdtemp(path.join(os.tmpdir(), 'ais-init-force-')); + const configPath = path.join(projectPath, 'ai-rules-sync.json'); + await fs.writeJson(configPath, { sourceDir: {} }, { spaces: 2 }); + + await expect(initRulesRepository({ cwd: projectPath })).rejects.toThrow('already exists'); + }); +}); diff --git a/src/commands/index.ts b/src/commands/index.ts index e15f1fb..eddbe36 100644 --- a/src/commands/index.ts +++ b/src/commands/index.ts @@ -5,3 +5,4 @@ export * from './helpers.js'; export * from './handlers.js'; export * from './install.js'; +export * from './lifecycle.js'; diff --git a/src/commands/lifecycle.ts b/src/commands/lifecycle.ts new file mode 100644 index 0000000..88b6650 --- /dev/null +++ b/src/commands/lifecycle.ts @@ -0,0 +1,504 @@ +import path from 'path'; +import fs from 'fs-extra'; +import { execa } from 'execa'; +import { adapterRegistry } from '../adapters/index.js'; +import { RepoConfig, getConfig, setConfig, getReposBaseDir, getUserProjectConfig } from '../config.js'; +import { cloneOrUpdateRepo } from '../git.js'; +import { ProjectConfig, RuleEntry, SourceDirConfig, getCombinedProjectConfig } from '../project-config.js'; +import { installAllUserEntries, installEntriesForAdapter } from './install.js'; + +type CheckStatus = + | 'up-to-date' + | 'update-available' + | 'ahead' + | 'diverged' + | 'no-upstream' + | 'missing-local' + | 'not-configured' + | 'error'; + +export interface RepoCheckEntry { + repoUrl: string; + repoName?: string; + localPath?: string; + status: CheckStatus; + ahead: number; + behind: number; + currentCommit?: string; + upstreamRef?: string; + message?: string; +} + +export interface CheckResult { + scope: 'project' | 'user'; + total: number; + updateAvailable: number; + entries: RepoCheckEntry[]; +} + +export interface CheckOptions { + projectPath: string; + user?: boolean; + fetch?: boolean; + target?: string; +} + +export type UpdateAction = 'updated' | 'unchanged' | 'would-update' | 'would-clone' | 'error'; + +export interface UpdateEntry { + repoUrl: string; + repoName: string; + localPath: string; + action: UpdateAction; + beforeCommit?: string; + afterCommit?: string; + message?: string; +} + +export interface UpdateResult { + scope: 'project' | 'user'; + dryRun: boolean; + total: number; + updated: number; + unchanged: number; + failed: number; + reinstalled: boolean; + entries: UpdateEntry[]; +} + +export interface InitOptions { + cwd: string; + name?: string; + force?: boolean; + createDirs?: boolean; +} + +export interface InitResult { + projectPath: string; + configPath: string; + createdDirectories: string[]; +} + +function isUrlLike(target: string): boolean { + return target.includes('://') || target.includes('git@') || target.endsWith('.git'); +} + +function getConfigSection(config: ProjectConfig, configPath: [string, string]): Record { + const [topLevel, subLevel] = configPath; + if (topLevel === 'agentsMd') { + return ((config as any).agentsMd || {}) as Record; + } + return (((config as any)[topLevel] || {})[subLevel] || {}) as Record; +} + +function collectRepoUrls(config: ProjectConfig): string[] { + const repoUrls = new Set(); + + for (const adapter of adapterRegistry.all()) { + const section = getConfigSection(config, adapter.configPath); + for (const value of Object.values(section)) { + if (typeof value === 'string') { + repoUrls.add(value); + continue; + } + if (value && typeof value === 'object' && typeof value.url === 'string') { + repoUrls.add(value.url); + } + } + } + + return Array.from(repoUrls); +} + +function filterRepoUrls(repoUrls: string[], repos: Record, target?: string): string[] { + if (!target) return repoUrls; + + const matchByName = repos[target]; + if (matchByName) { + return repoUrls.filter(url => url === matchByName.url); + } + + if (isUrlLike(target)) { + return repoUrls.filter(url => url === target); + } + + throw new Error(`Repository "${target}" not found in configuration.`); +} + +function findRepoByUrl(repos: Record, repoUrl: string): RepoConfig | undefined { + for (const repo of Object.values(repos)) { + if (repo.url === repoUrl) { + return repo; + } + } + return undefined; +} + +async function ensureRepoByUrl(repos: Record, repoUrl: string): Promise { + const existing = findRepoByUrl(repos, repoUrl); + if (existing) return existing; + + let name = path.basename(repoUrl, '.git'); + if (!name) { + name = `repo-${Date.now()}`; + } + + if (repos[name] && repos[name].url !== repoUrl) { + name = `${name}-${Date.now()}`; + } + + const repoConfig: RepoConfig = { + name, + url: repoUrl, + path: path.join(getReposBaseDir(), name) + }; + + const newRepos = { ...repos, [name]: repoConfig }; + await setConfig({ repos: newRepos }); + repos[name] = repoConfig; + return repoConfig; +} + +async function runGit(repoPath: string, args: string[]): Promise { + const result = await execa('git', args, { cwd: repoPath }); + return result.stdout.trim(); +} + +async function safeRunGit(repoPath: string, args: string[]): Promise { + try { + return await runGit(repoPath, args); + } catch { + return undefined; + } +} + +async function inspectRepo(repoUrl: string, repo: RepoConfig, shouldFetch: boolean): Promise { + const gitPath = path.join(repo.path, '.git'); + const hasRepoDir = await fs.pathExists(repo.path); + const hasGitDir = hasRepoDir && await fs.pathExists(gitPath); + + if (!hasRepoDir || !hasGitDir) { + return { + repoUrl, + repoName: repo.name, + localPath: repo.path, + status: 'missing-local', + ahead: 0, + behind: 0, + message: 'Repository is not cloned locally.' + }; + } + + if (shouldFetch) { + try { + await runGit(repo.path, ['fetch', '--quiet']); + } catch (error: any) { + return { + repoUrl, + repoName: repo.name, + localPath: repo.path, + status: 'error', + ahead: 0, + behind: 0, + message: `git fetch failed: ${error.message}` + }; + } + } + + const currentCommit = await safeRunGit(repo.path, ['rev-parse', '--short', 'HEAD']); + const upstreamRef = await safeRunGit(repo.path, ['rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{u}']); + + if (!upstreamRef) { + return { + repoUrl, + repoName: repo.name, + localPath: repo.path, + status: 'no-upstream', + ahead: 0, + behind: 0, + currentCommit, + message: 'No upstream branch configured.' + }; + } + + const counts = await safeRunGit(repo.path, ['rev-list', '--left-right', '--count', 'HEAD...@{u}']); + if (!counts) { + return { + repoUrl, + repoName: repo.name, + localPath: repo.path, + status: 'error', + ahead: 0, + behind: 0, + currentCommit, + upstreamRef, + message: 'Failed to compare local and upstream commits.' + }; + } + + const [aheadRaw, behindRaw] = counts.split(/\s+/); + const ahead = Number.parseInt(aheadRaw || '0', 10); + const behind = Number.parseInt(behindRaw || '0', 10); + + let status: CheckStatus = 'up-to-date'; + if (ahead > 0 && behind > 0) { + status = 'diverged'; + } else if (behind > 0) { + status = 'update-available'; + } else if (ahead > 0) { + status = 'ahead'; + } + + return { + repoUrl, + repoName: repo.name, + localPath: repo.path, + status, + ahead, + behind, + currentCommit, + upstreamRef + }; +} + +async function getScopeRepoUrls(options: CheckOptions): Promise<{ scope: 'project' | 'user'; repoUrls: string[]; repos: Record }> { + const scope = options.user ? 'user' : 'project'; + const projectConfig = options.user + ? await getUserProjectConfig() + : await getCombinedProjectConfig(options.projectPath); + const config = await getConfig(); + const repos = config.repos || {}; + + const repoUrls = filterRepoUrls(collectRepoUrls(projectConfig), repos, options.target); + return { scope, repoUrls, repos }; +} + +async function reinstallScopeEntries(scope: 'project' | 'user', projectPath: string): Promise { + if (scope === 'user') { + await installAllUserEntries(adapterRegistry.all()); + return; + } + + const config = await getCombinedProjectConfig(projectPath); + for (const adapter of adapterRegistry.all()) { + const section = getConfigSection(config, adapter.configPath); + if (Object.keys(section).length === 0) { + continue; + } + await installEntriesForAdapter(adapter, projectPath); + } +} + +export async function checkRepositories(options: CheckOptions): Promise { + const { scope, repoUrls, repos } = await getScopeRepoUrls(options); + const entries: RepoCheckEntry[] = []; + const shouldFetch = options.fetch !== false; + + for (const repoUrl of repoUrls) { + const repo = findRepoByUrl(repos, repoUrl); + if (!repo) { + entries.push({ + repoUrl, + status: 'not-configured', + ahead: 0, + behind: 0, + message: 'Repository URL exists in config but is not configured locally.' + }); + continue; + } + + const inspected = await inspectRepo(repoUrl, repo, shouldFetch); + entries.push(inspected); + } + + const updateAvailable = entries.filter(entry => entry.status === 'update-available' || entry.status === 'diverged').length; + return { + scope, + total: entries.length, + updateAvailable, + entries + }; +} + +function shortHead(repoPath: string): Promise { + return safeRunGit(repoPath, ['rev-parse', '--short', 'HEAD']); +} + +function shouldConsiderUpdate(checkEntry?: RepoCheckEntry): boolean { + if (!checkEntry) return true; + return checkEntry.status === 'update-available' || checkEntry.status === 'diverged' || checkEntry.status === 'missing-local' || checkEntry.status === 'not-configured'; +} + +export async function updateRepositories(options: CheckOptions & { dryRun?: boolean }): Promise { + const dryRun = options.dryRun === true; + const checked = await checkRepositories({ ...options, fetch: true }); + const config = await getConfig(); + const repos = config.repos || {}; + const checkByUrl = new Map(checked.entries.map(entry => [entry.repoUrl, entry])); + + const entries: UpdateEntry[] = []; + let updated = 0; + let unchanged = 0; + let failed = 0; + + for (const entry of checked.entries) { + const checkEntry = checkByUrl.get(entry.repoUrl); + if (checkEntry?.status === 'error') { + failed++; + entries.push({ + repoUrl: entry.repoUrl, + repoName: entry.repoName || '(unknown)', + localPath: entry.localPath || '', + action: 'error', + message: entry.message || 'Failed to inspect repository state.' + }); + continue; + } + + const isCandidate = shouldConsiderUpdate(checkEntry); + if (!isCandidate) { + entries.push({ + repoUrl: entry.repoUrl, + repoName: entry.repoName || '(unknown)', + localPath: entry.localPath || '', + action: 'unchanged', + beforeCommit: entry.currentCommit + }); + unchanged++; + continue; + } + + const existingRepo = findRepoByUrl(repos, entry.repoUrl); + if (!existingRepo && dryRun) { + entries.push({ + repoUrl: entry.repoUrl, + repoName: '(new)', + localPath: path.join(getReposBaseDir(), path.basename(entry.repoUrl, '.git') || 'repo'), + action: 'would-clone', + message: 'Repository would be configured and cloned.' + }); + continue; + } + + try { + const repo = existingRepo || await ensureRepoByUrl(repos, entry.repoUrl); + const beforeCommit = await shortHead(repo.path); + + if (dryRun) { + entries.push({ + repoUrl: repo.url, + repoName: repo.name, + localPath: repo.path, + action: 'would-update', + beforeCommit + }); + continue; + } + + await cloneOrUpdateRepo(repo); + const afterCommit = await shortHead(repo.path); + + if (!beforeCommit || beforeCommit !== afterCommit) { + updated++; + entries.push({ + repoUrl: repo.url, + repoName: repo.name, + localPath: repo.path, + action: 'updated', + beforeCommit, + afterCommit + }); + } else { + unchanged++; + entries.push({ + repoUrl: repo.url, + repoName: repo.name, + localPath: repo.path, + action: 'unchanged', + beforeCommit, + afterCommit + }); + } + } catch (error: any) { + failed++; + entries.push({ + repoUrl: entry.repoUrl, + repoName: existingRepo?.name || '(unknown)', + localPath: existingRepo?.path || '', + action: 'error', + message: error.message + }); + } + } + + let reinstalled = false; + if (!dryRun && failed === 0 && checked.total > 0) { + await reinstallScopeEntries(checked.scope, options.projectPath); + reinstalled = true; + } + + return { + scope: checked.scope, + dryRun, + total: checked.total, + updated, + unchanged, + failed, + reinstalled, + entries + }; +} + +function buildTemplateSourceDirConfig(): SourceDirConfig { + const sourceDir: SourceDirConfig = {}; + + for (const adapter of adapterRegistry.all()) { + const toolSection = sourceDir[adapter.tool] || {}; + if (!toolSection[adapter.subtype]) { + toolSection[adapter.subtype] = adapter.defaultSourceDir; + } + sourceDir[adapter.tool] = toolSection; + } + + return sourceDir; +} + +export async function initRulesRepository(options: InitOptions): Promise { + const projectPath = options.name + ? path.resolve(options.cwd, options.name) + : path.resolve(options.cwd); + + await fs.ensureDir(projectPath); + + const configPath = path.join(projectPath, 'ai-rules-sync.json'); + const configExists = await fs.pathExists(configPath); + if (configExists && !options.force) { + throw new Error(`"${configPath}" already exists. Use --force to overwrite.`); + } + + const sourceDir = buildTemplateSourceDirConfig(); + await fs.writeJson(configPath, { sourceDir }, { spaces: 2 }); + + const createdDirectories: string[] = []; + if (options.createDirs !== false) { + const dirs = new Set(); + for (const adapter of adapterRegistry.all()) { + if (!adapter.defaultSourceDir || adapter.defaultSourceDir === '.') { + continue; + } + dirs.add(adapter.defaultSourceDir); + } + + for (const relativeDir of Array.from(dirs).sort()) { + const fullDir = path.join(projectPath, relativeDir); + await fs.ensureDir(fullDir); + createdDirectories.push(fullDir); + } + } + + return { + projectPath, + configPath, + createdDirectories + }; +} diff --git a/src/completion/scripts.ts b/src/completion/scripts.ts index befb037..a73dac2 100644 --- a/src/completion/scripts.ts +++ b/src/completion/scripts.ts @@ -393,6 +393,9 @@ const EXTRA_TOP_LEVEL_COMMANDS: CompletionEntry[] = [ { name: 'import', description: 'Import entry to rules repository' }, { name: 'status', description: 'Show repository and config status' }, { name: 'search', description: 'Search entries in repository' }, + { name: 'check', description: 'Check repository update status' }, + { name: 'update', description: 'Update repositories and reinstall entries' }, + { name: 'init', description: 'Initialize a rules repository template' }, { name: 'config', description: 'Manage repository configuration' }, { name: 'user', description: 'Manage user-level AI config entries' }, { name: 'completion', description: 'Output shell completion script' } diff --git a/src/index.ts b/src/index.ts index be0ed42..9719b8d 100644 --- a/src/index.ts +++ b/src/index.ts @@ -31,6 +31,7 @@ import { discoverAllEntries, handleAddAll } from './commands/add-all.js'; import { parseSourceDirParams } from './cli/source-dir-parser.js'; import { setRepoSourceDir, clearRepoSourceDir, showRepoConfig, listRepos, handleUserConfigShow, handleUserConfigSet, handleUserConfigReset, handleGlobalConfigShow, handleGlobalConfigSet, handleGlobalConfigReset } from './commands/config.js'; import { getFormattedVersion } from './commands/version.js'; +import { checkRepositories, updateRepositories, initRulesRepository } from './commands/lifecycle.js'; // Intercept version flags to show detailed version info before Commander processes them if (process.argv.includes('-v') || process.argv.includes('--version')) { @@ -94,6 +95,27 @@ function parseCsvOption(input?: string): string[] | undefined { return values.length > 0 ? values : undefined; } +function formatCheckStatus(status: string): string { + switch (status) { + case 'up-to-date': + return chalk.green('up-to-date'); + case 'update-available': + return chalk.yellow('update-available'); + case 'diverged': + return chalk.yellow('diverged'); + case 'ahead': + return chalk.blue('ahead'); + case 'no-upstream': + return chalk.gray('no-upstream'); + case 'missing-local': + return chalk.yellow('missing-local'); + case 'not-configured': + return chalk.yellow('not-configured'); + default: + return chalk.red(status); + } +} + program .name('ais') .description('AI Rules Sync - Sync agent rules from git repository') @@ -396,6 +418,150 @@ program } }); +program + .command('check') + .description('Check for repository updates used by current config') + .option('-u, --user', 'Check repositories from user config') + .option('-g, --global', 'Check repositories from user config (deprecated alias for --user)') + .option('--no-fetch', 'Skip git fetch before checking') + .option('--json', 'Output results as JSON') + .action(async (cmdOptions: { user?: boolean; global?: boolean; fetch?: boolean; json?: boolean }) => { + try { + const opts = program.opts(); + const result = await checkRepositories({ + projectPath: process.cwd(), + user: cmdOptions.user || cmdOptions.global, + fetch: cmdOptions.fetch, + target: opts.target + }); + + if (cmdOptions.json) { + console.log(JSON.stringify(result, null, 2)); + return; + } + + if (result.total === 0) { + console.log(chalk.yellow('No repository references found in the selected config.')); + return; + } + + console.log(chalk.bold(`Repository check (${result.scope} scope):`)); + for (const entry of result.entries) { + const name = entry.repoName || entry.repoUrl; + const counts = + entry.status === 'update-available' || entry.status === 'ahead' || entry.status === 'diverged' + ? chalk.gray(` (ahead ${entry.ahead}, behind ${entry.behind})`) + : ''; + const detail = entry.message ? chalk.gray(` - ${entry.message}`) : ''; + console.log(` - ${chalk.cyan(name)}: ${formatCheckStatus(entry.status)}${counts}${detail}`); + } + + if (result.updateAvailable > 0) { + console.log(chalk.yellow(`\n${result.updateAvailable} repositories have updates available.`)); + } else { + console.log(chalk.green('\nAll repositories are up-to-date.')); + } + } catch (error: any) { + console.error(chalk.red('Error checking repositories:'), error.message); + process.exit(1); + } + }); + +program + .command('update') + .description('Update repositories used by current config and reinstall entries') + .option('-u, --user', 'Update repositories from user config') + .option('-g, --global', 'Update repositories from user config (deprecated alias for --user)') + .option('--dry-run', 'Preview updates without pulling repositories') + .option('--json', 'Output results as JSON') + .action(async (cmdOptions: { user?: boolean; global?: boolean; dryRun?: boolean; json?: boolean }) => { + try { + const opts = program.opts(); + const result = await updateRepositories({ + projectPath: process.cwd(), + user: cmdOptions.user || cmdOptions.global, + dryRun: cmdOptions.dryRun, + target: opts.target + }); + + if (cmdOptions.json) { + console.log(JSON.stringify(result, null, 2)); + return; + } + + if (result.total === 0) { + console.log(chalk.yellow('No repository references found in the selected config.')); + return; + } + + console.log(chalk.bold(`${result.dryRun ? '[DRY RUN] ' : ''}Repository update (${result.scope} scope):`)); + for (const entry of result.entries) { + const name = entry.repoName || entry.repoUrl; + const detail = entry.message ? chalk.gray(` - ${entry.message}`) : ''; + const commitChange = entry.beforeCommit || entry.afterCommit + ? chalk.gray(` (${entry.beforeCommit || '-'} -> ${entry.afterCommit || '-'})`) + : ''; + const actionColor = + entry.action === 'updated' + ? chalk.green(entry.action) + : entry.action === 'error' + ? chalk.red(entry.action) + : entry.action === 'would-update' || entry.action === 'would-clone' + ? chalk.yellow(entry.action) + : chalk.gray(entry.action); + console.log(` - ${chalk.cyan(name)}: ${actionColor}${commitChange}${detail}`); + } + + console.log(chalk.bold('\nSummary:')); + console.log(` Updated: ${chalk.green(String(result.updated))}`); + console.log(` Unchanged: ${chalk.gray(String(result.unchanged))}`); + console.log(` Failed: ${result.failed > 0 ? chalk.red(String(result.failed)) : chalk.gray('0')}`); + if (!result.dryRun) { + console.log(` Reinstalled entries: ${result.reinstalled ? chalk.green('yes') : chalk.yellow('no')}`); + } + + if (result.failed > 0) { + process.exit(1); + } + } catch (error: any) { + console.error(chalk.red('Error updating repositories:'), error.message); + process.exit(1); + } + }); + +program + .command('init [name]') + .description('Initialize an ai-rules-sync repository template') + .option('-f, --force', 'Overwrite existing ai-rules-sync.json') + .option('--no-dirs', 'Do not create default source directories') + .option('--json', 'Output results as JSON') + .action(async (name: string | undefined, cmdOptions: { force?: boolean; dirs?: boolean; json?: boolean }) => { + try { + const result = await initRulesRepository({ + cwd: process.cwd(), + name, + force: cmdOptions.force, + createDirs: cmdOptions.dirs + }); + + if (cmdOptions.json) { + console.log(JSON.stringify(result, null, 2)); + return; + } + + console.log(chalk.green(`Initialized repository template at ${result.projectPath}`)); + console.log(chalk.gray(`Config: ${result.configPath}`)); + if (result.createdDirectories.length > 0) { + console.log(chalk.gray(`Created ${result.createdDirectories.length} source directories.`)); + } else { + console.log(chalk.gray('No source directories created.')); + } + } catch (error: any) { + console.error(chalk.red('Error initializing repository template:'), error.message); + process.exit(1); + } + }); + // ============ Top-level shortcuts ============ program .command('add') From aa53caac6d4c06062d1388a750128a67bc6f074b Mon Sep 17 00:00:00 2001 From: lbb00 Date: Thu, 5 Mar 2026 03:03:37 +0800 Subject: [PATCH 12/12] refactor!: remove legacy compatibility code paths --- KNOWLEDGE_BASE.md | 19 +- README.md | 9 +- README_ZH.md | 9 +- src/__tests__/handlers-dry-run.test.ts | 9 +- .../project-config-source-dir.test.ts | 29 --- src/adapters/agents-md.ts | 10 +- src/adapters/claude-md.ts | 2 +- src/adapters/codex-md.ts | 2 +- src/adapters/gemini-md.ts | 2 +- src/adapters/index.ts | 2 +- src/adapters/types.ts | 6 +- src/commands/add-all.ts | 1 - src/commands/config.ts | 15 -- src/commands/handlers.ts | 220 +++++------------- src/commands/install.ts | 116 +-------- src/completion.ts | 38 +-- src/config.ts | 68 +----- src/index.ts | 66 +----- src/link.ts | 8 - src/project-config.ts | 107 +-------- src/sync-engine.ts | 44 +--- tests/completion.test.ts | 59 +---- tests/config.test.ts | 26 --- tests/project-config.test.ts | 75 ++---- 24 files changed, 158 insertions(+), 784 deletions(-) delete mode 100644 src/link.ts diff --git a/KNOWLEDGE_BASE.md b/KNOWLEDGE_BASE.md index d42dfac..7ce8bf3 100644 --- a/KNOWLEDGE_BASE.md +++ b/KNOWLEDGE_BASE.md @@ -10,7 +10,7 @@ A key feature is **User Mode** (`--user` / `-u`): use `$HOME` as project root to - **Symbolic Links**: Entries are linked from the local cache of the repo to project directories, avoiding file duplication and drift. - **Dependency Tracking**: Uses `ai-rules-sync.json` to track project dependencies (Cursor rules/commands/skills/subagents, Copilot instructions, Claude Code rules/skills/subagents/CLAUDE.md, Trae rules/skills, OpenCode agents/skills/commands/tools, Codex rules/skills/AGENTS.md, Gemini CLI commands/skills/agents/GEMINI.md, Windsurf rules/skills, Cline rules/skills, universal AGENTS.md). - **Privacy**: Supports private/local entries via `ai-rules-sync.local.json` and `.git/info/exclude`. -- **User Mode**: `--user` / `-u` flag on add/remove/install commands. Sets `projectPath = $HOME`, stores dependencies in `~/.config/ai-rules-sync/user.json`, skips gitignore management. Enables `ais user install` to restore all user-scope symlinks on a new machine. (`--global`/`-g` kept as deprecated aliases.) +- **User Mode**: `--user` / `-u` flag on add/remove/install commands. Sets `projectPath = $HOME`, stores dependencies in `~/.config/ai-rules-sync/user.json`, skips gitignore management. Enables `ais user install` to restore all user-scope symlinks on a new machine. - **User Config Path**: Configurable via `ais config user set ` for dotfiles integration (e.g. `~/dotfiles/ai-rules-sync/user.json`). ## Architecture @@ -112,8 +112,8 @@ interface SyncAdapter { resolveTargetName?(...): string; // Unified operations (provided by createBaseAdapter) - addDependency(projectPath, name, repoUrl, alias?, isLocal?): Promise<{migrated}>; - removeDependency(projectPath, alias): Promise<{removedFrom, migrated}>; + addDependency(projectPath, name, repoUrl, alias?, isLocal?): Promise; + removeDependency(projectPath, alias): Promise<{removedFrom}>; link(options): Promise; unlink(projectPath, alias): Promise; } @@ -529,7 +529,7 @@ Gemini CLI (https://geminicli.com/) is supported with three entry types: - `ais cline install` - Install all Cline rules and skills. - `ais agents-md install` - Install AGENTS.md files. - `ais install` - Install everything (smart dispatch). -- `ais install --user` / `ais user install` - Install all user-scope AI config files from `~/.config/ai-rules-sync/user.json`. (`--global` and `ais global install` kept as deprecated aliases.) +- `ais install --user` / `ais user install` - Install all user-scope AI config files from `~/.config/ai-rules-sync/user.json`. ### 17. Bulk Discovery and Installation (add-all) @@ -741,7 +741,6 @@ ais cursor rules add-all -s experimental/rules - `--user` / `-u` flag sets `projectPath = $HOME` and stores dependencies in `~/.config/ai-rules-sync/user.json` instead of a project's `ai-rules-sync.json` - Gitignore management is skipped automatically (home dir is not a git repo) - Symlinks are created at absolute paths (e.g., `~/.claude/CLAUDE.md`) -- `--global` / `-g` are kept as deprecated backward-compatible aliases **Commands:** ```bash @@ -913,8 +912,7 @@ ais user install ``` - **`ai-rules-sync.local.json`**: Private dependencies (merged, takes precedence). -- **Legacy format**: Old configs with `cursor.rules` as string are still supported. -- **Legacy files**: `cursor-rules*.json` are read-only compatible; write operations migrate to new format. +- **Config files**: Only `ai-rules-sync.json` and `ai-rules-sync.local.json` are supported. ### 21. Shell Completion - **Auto-Install**: On first run, AIS prompts to install shell completion automatically. @@ -1023,7 +1021,7 @@ ais user install - Added **User Config Path**: `ais config user set ` for dotfiles integration - Added **Gemini CLI support**: commands (`.toml`), skills (directory), subagents (`.md`) - Added **OpenAI Codex support**: rules (`.rules`, Starlark), skills (`.agents/skills/`) -- Renamed deprecated `--global` / `-g` flags to `--user` / `-u` +- Standardized user-scope flags on `--user` / `-u` (legacy `--global` / `-g` aliases removed) ### Proper User-Level Sync for All Tools + gemini-md / codex-md Adapters (2026-02) @@ -1102,7 +1100,6 @@ ais user install - Sets `projectPath = $HOME` automatically - Stores dependencies in `~/.config/ai-rules-sync/user.json` - Skips gitignore management (home dir isn't a git repo) - - `--global` / `-g` kept as deprecated backward-compatible aliases 2. **claude-md Adapter**: - New adapter for CLAUDE.md-style files (`.claude/.md`) @@ -1113,14 +1110,12 @@ ais user install 3. **One-click User Install**: - `ais user install` / `ais install --user` - Reads all entries from `user.json` and recreates symlinks (perfect for new machine setup) - - `ais global install` and `ais install --global` kept as deprecated aliases 4. **User Config Path Management**: - `ais config user show` - View current user.json path - `ais config user set ` - Set custom path (for dotfiles integration) - `ais config user reset` - Reset to default path - Stored as `userConfigPath` in `~/.config/ai-rules-sync/config.json` - - `ais config global show|set|reset` kept as deprecated aliases 5. **claude-rules Adapter** (formalized): - Adapter for `.claude/rules/` files (`.md` suffix) @@ -1144,7 +1139,7 @@ ais user install - `src/commands/handlers.ts` - Added `user?` and `skipIgnore?` to `CommandContext`; user path for `handleAdd`/`handleRemove` - `src/commands/install.ts` - Added `installUserEntriesForAdapter()`, `installAllUserEntries()` - `src/commands/config.ts` - Added `handleUserConfigShow/Set/Reset()` -- `src/cli/register.ts` - Added `-u, --user` flag to add/remove/install commands (with `-g, --global` as deprecated aliases) +- `src/cli/register.ts` - Added `-u, --user` flag to add/remove/install commands - `src/index.ts` - Added `ais claude md` subgroup, `ais user install`, `ais config user` commands **Files Changed:** 11 modified/new, all tests passing (206/206) diff --git a/README.md b/README.md index 98e3b70..c433367 100644 --- a/README.md +++ b/README.md @@ -1128,13 +1128,12 @@ ais cursor add company-secrets --local - Should be in `.gitignore` (AIS adds it automatically) - Merges with main config (local takes precedence) -### Legacy Compatibility +### Config Compatibility -**Old `cursor-rules.json` format is still supported:** +AIS only reads and writes: -- If `ai-rules-sync.json` doesn't exist but `cursor-rules.json` does, AIS will read it -- Running any write command (add/remove) will migrate to new format -- Only Cursor rules are supported in legacy format +- `ai-rules-sync.json` +- `ai-rules-sync.local.json` --- diff --git a/README_ZH.md b/README_ZH.md index e262239..ca9e884 100644 --- a/README_ZH.md +++ b/README_ZH.md @@ -1128,13 +1128,12 @@ ais cursor add company-secrets --local - 应在 `.gitignore` 中(AIS 自动添加) - 与主配置合并(本地优先) -### Legacy 兼容性 +### 配置兼容性 -**旧的 `cursor-rules.json` 格式仍然支持:** +AIS 仅读取和写入: -- 如果 `ai-rules-sync.json` 不存在但 `cursor-rules.json` 存在,AIS 会读取它 -- 运行任何写命令(add/remove)会迁移到新格式 -- Legacy 格式仅支持 Cursor 规则 +- `ai-rules-sync.json` +- `ai-rules-sync.local.json` --- diff --git a/src/__tests__/handlers-dry-run.test.ts b/src/__tests__/handlers-dry-run.test.ts index c6faafe..4bb8925 100644 --- a/src/__tests__/handlers-dry-run.test.ts +++ b/src/__tests__/handlers-dry-run.test.ts @@ -19,15 +19,18 @@ describe('handleRemove dry-run', () => { targetDir: '.cursor/rules', mode: 'hybrid', hybridFileSuffixes: ['.md', '.mdc'], - addDependency: vi.fn(async () => ({ migrated: false })), - removeDependency: vi.fn(async () => ({ removedFrom: [], migrated: false })), + forProject: vi.fn(() => { + throw new Error('forProject should not be called in dry-run'); + }) as any, + addDependency: vi.fn(async () => {}), + removeDependency: vi.fn(async () => ({ removedFrom: [] })), link: vi.fn(async () => ({ sourceName: 'rule', targetName: 'rule', linked: true })), unlink: vi.fn(async () => {}) }; const result = await handleRemove(adapter, projectPath, 'react', false, { dryRun: true }); - expect(result).toEqual({ removedFrom: [], migrated: false }); + expect(result).toEqual({ removedFrom: [] }); expect(adapter.unlink).not.toHaveBeenCalled(); expect(adapter.removeDependency).not.toHaveBeenCalled(); }); diff --git a/src/__tests__/project-config-source-dir.test.ts b/src/__tests__/project-config-source-dir.test.ts index d45e9be..150eb4d 100644 --- a/src/__tests__/project-config-source-dir.test.ts +++ b/src/__tests__/project-config-source-dir.test.ts @@ -59,35 +59,6 @@ describe('project-config source directory resolution', () => { expect(repoConfig.cline?.skills).toBe('.cline/skills'); }); - it('should parse legacy string-based source paths and ignore dependency objects', async () => { - const config = { - rootPath: 'legacy-root', - cursor: { - rules: { - localAlias: 'https://example.com/repo.git' - } - }, - windsurf: { - rules: '.windsurf/rules' - }, - cline: { - skills: '.cline/skills' - }, - agentsMd: { - file: 'agents-md' - } - }; - - await fs.writeJson(path.join(tempDir, 'ai-rules-sync.json'), config, { spaces: 2 }); - - const repoConfig = await getRepoSourceConfig(tempDir); - expect(repoConfig.rootPath).toBe('legacy-root'); - expect(repoConfig.windsurf?.rules).toBe('.windsurf/rules'); - expect(repoConfig.cline?.skills).toBe('.cline/skills'); - expect(repoConfig.agentsMd?.file).toBe('agents-md'); - expect(repoConfig.cursor?.rules).toBeUndefined(); - }); - it('should return only rootPath when config only contains dependency objects', async () => { const config: ProjectConfig = { rootPath: 'project-root', diff --git a/src/adapters/agents-md.ts b/src/adapters/agents-md.ts index 299525b..aac05ec 100644 --- a/src/adapters/agents-md.ts +++ b/src/adapters/agents-md.ts @@ -1,8 +1,7 @@ import path from 'path'; import fs from 'fs-extra'; -import { SyncAdapter, ResolvedSource, SyncOptions, LinkResult } from './types.js'; +import { SyncAdapter, ResolvedSource } from './types.js'; import { createBaseAdapter } from './base.js'; -import { linkEntry as engineLinkEntry, unlinkEntry as engineUnlinkEntry } from '../sync-engine.js'; const SUFFIX = '.md'; const CONFIG_FILENAME = 'ai-rules-sync.json'; @@ -144,7 +143,7 @@ export const agentsMdAdapter: SyncAdapter = { ...baseAdapter, // Custom addDependency that writes to flat agentsMd structure - async addDependency(projectPath: string, name: string, repoUrl: string, alias?: string, isLocal: boolean = false, targetDir?: string): Promise<{ migrated: boolean }> { + async addDependency(projectPath: string, name: string, repoUrl: string, alias?: string, isLocal: boolean = false, targetDir?: string): Promise { const configPath = path.join(projectPath, isLocal ? LOCAL_CONFIG_FILENAME : CONFIG_FILENAME); const config = await readConfigFile(configPath); @@ -167,11 +166,10 @@ export const agentsMdAdapter: SyncAdapter = { config.agentsMd[targetName] = entryValue; await writeConfigFile(configPath, config); - return { migrated: false }; }, // Custom removeDependency that removes from flat agentsMd structure - async removeDependency(projectPath: string, alias: string): Promise<{ removedFrom: string[]; migrated: boolean }> { + async removeDependency(projectPath: string, alias: string): Promise<{ removedFrom: string[] }> { const removedFrom: string[] = []; const mainPath = path.join(projectPath, CONFIG_FILENAME); @@ -190,6 +188,6 @@ export const agentsMdAdapter: SyncAdapter = { removedFrom.push(LOCAL_CONFIG_FILENAME); } - return { removedFrom, migrated: false }; + return { removedFrom }; } }; diff --git a/src/adapters/claude-md.ts b/src/adapters/claude-md.ts index 1c365cf..8df7c53 100644 --- a/src/adapters/claude-md.ts +++ b/src/adapters/claude-md.ts @@ -8,7 +8,7 @@ const SUFFIX = '.md'; * Mode: file - links individual .md files from .claude/ directory * * Global mode usage: - * ais claude md add CLAUDE --global + * ais claude md add CLAUDE --user * → creates symlink at ~/.claude/CLAUDE.md * * Project mode usage: diff --git a/src/adapters/codex-md.ts b/src/adapters/codex-md.ts index 28e4f29..d541b3d 100644 --- a/src/adapters/codex-md.ts +++ b/src/adapters/codex-md.ts @@ -8,7 +8,7 @@ const SUFFIX = '.md'; * Mode: file - links individual .md files from .codex/ directory * * Global mode usage: - * ais codex md add AGENTS --global + * ais codex md add AGENTS --user * → creates symlink at ~/.codex/AGENTS.md * * Project mode usage: diff --git a/src/adapters/gemini-md.ts b/src/adapters/gemini-md.ts index 13473cf..c69200d 100644 --- a/src/adapters/gemini-md.ts +++ b/src/adapters/gemini-md.ts @@ -8,7 +8,7 @@ const SUFFIX = '.md'; * Mode: file - links individual .md files from .gemini/ directory * * Global mode usage: - * ais gemini md add GEMINI --global + * ais gemini md add GEMINI --user * → creates symlink at ~/.gemini/GEMINI.md * * Project mode usage: diff --git a/src/adapters/index.ts b/src/adapters/index.ts index ab30846..a5e7948 100644 --- a/src/adapters/index.ts +++ b/src/adapters/index.ts @@ -165,7 +165,7 @@ function getAliasSectionConfig(cfg: ProjectConfig, adapter: SyncAdapter): Record return undefined; } - // AGENTS.md dependencies are stored in flat `agentsMd` object for backward compatibility. + // AGENTS.md dependencies use a flat `agentsMd` object. if (topLevel === 'agentsMd') { return top as Record; } diff --git a/src/adapters/types.ts b/src/adapters/types.ts index 48be305..908410a 100644 --- a/src/adapters/types.ts +++ b/src/adapters/types.ts @@ -59,7 +59,7 @@ export interface SyncAdapter { * Provides the full dotfile API (add, remove, apply, diff, status, import, readManifest). * Pass null as repo for remove-only operations (no source resolution needed). */ - forProject?(projectPath: string, repo: RepoConfig | RepoResolverFn | null, isLocal?: boolean): DotfileManager; + forProject(projectPath: string, repo: RepoConfig | RepoResolverFn | null, isLocal?: boolean): DotfileManager; /** Add a dependency to project config */ addDependency( @@ -69,13 +69,13 @@ export interface SyncAdapter { alias?: string, isLocal?: boolean, targetDir?: string - ): Promise<{ migrated: boolean }>; + ): Promise; /** Remove a dependency from project config */ removeDependency( projectPath: string, alias: string - ): Promise<{ removedFrom: string[]; migrated: boolean }>; + ): Promise<{ removedFrom: string[] }>; /** Link entry from repo to project (filesystem) */ link(options: SyncOptions): Promise; diff --git a/src/commands/add-all.ts b/src/commands/add-all.ts index 9e70e27..fac3384 100644 --- a/src/commands/add-all.ts +++ b/src/commands/add-all.ts @@ -5,7 +5,6 @@ import readline from 'readline'; import { RepoConfig } from '../config.js'; import { SyncAdapter, AdapterRegistry } from '../adapters/types.js'; import { getRepoSourceConfig, getSourceDir, getCombinedProjectConfig } from '../project-config.js'; -import { linkEntry } from '../sync-engine.js'; /** * Simple yes/no prompt using readline diff --git a/src/commands/config.ts b/src/commands/config.ts index c93277b..5be0edd 100644 --- a/src/commands/config.ts +++ b/src/commands/config.ts @@ -175,11 +175,6 @@ export async function handleUserConfigShow(): Promise { console.log(`User config path: ${chalk.cyan(displayPath)} ${label}`); } -/** @deprecated Use handleUserConfigShow() instead */ -export async function handleGlobalConfigShow(): Promise { - return handleUserConfigShow(); -} - /** * Set a custom user config path */ @@ -194,11 +189,6 @@ export async function handleUserConfigSet(customPath: string): Promise { console.log(chalk.green(`✓ User config path set to: ${chalk.cyan(stored)}`)); } -/** @deprecated Use handleUserConfigSet() instead */ -export async function handleGlobalConfigSet(customPath: string): Promise { - return handleUserConfigSet(customPath); -} - /** * Reset user config path to default */ @@ -212,8 +202,3 @@ export async function handleUserConfigReset(): Promise { console.log(chalk.gray('User config path is already at default.')); } } - -/** @deprecated Use handleUserConfigReset() instead */ -export async function handleGlobalConfigReset(): Promise { - return handleUserConfigReset(); -} diff --git a/src/commands/handlers.ts b/src/commands/handlers.ts index e48e8b0..10d6240 100644 --- a/src/commands/handlers.ts +++ b/src/commands/handlers.ts @@ -7,7 +7,7 @@ import chalk from 'chalk'; import fs from 'fs-extra'; import { RepoConfig, getUserConfigPath, getUserProjectConfig } from '../config.js'; import { SyncAdapter } from '../adapters/types.js'; -import { linkEntry, unlinkEntry, importEntry, ImportOptions } from '../sync-engine.js'; +import { importEntry, ImportOptions } from '../sync-engine.js'; import { addIgnoreEntry, removeIgnoreEntry } from '../utils.js'; import { addUserDependency, removeUserDependency, getCombinedProjectConfig, getRepoSourceConfig, getSourceDir, getTargetDir } from '../project-config.js'; @@ -20,8 +20,6 @@ export interface CommandContext { isLocal: boolean; /** When true, uses user config (user.json) instead of project config */ user?: boolean; - /** @deprecated Use user instead */ - global?: boolean; /** When true, skip gitignore management (used for user mode) */ skipIgnore?: boolean; } @@ -34,8 +32,6 @@ export interface AddOptions { targetDir?: string; /** When true, uses user config (user.json) instead of project config */ user?: boolean; - /** @deprecated Use user instead */ - global?: boolean; } /** @@ -45,7 +41,6 @@ export interface AddResult { sourceName: string; targetName: string; linked: boolean; - migrated: boolean; } /** @@ -96,8 +91,7 @@ export async function handleAdd( } } - let migrated = false; - if (ctx.user || ctx.global) { + if (ctx.user) { // User mode: link only (no manifest for user mode via forProject) const result = await adapter.link({ projectPath: ctx.projectPath, @@ -120,93 +114,47 @@ export async function handleAdd( return { sourceName: result.sourceName, targetName: result.targetName, - linked: result.linked, - migrated: false + linked: result.linked }; } // Project mode: use forProject().add() to do symlink + manifest in one step - if (adapter.forProject) { - const manager = adapter.forProject(ctx.projectPath, ctx.repo, ctx.isLocal); - const result = await manager.add(name, { - alias, - targetDir: options?.targetDir, - repoUrl: ctx.repo.url, - }); + const manager = adapter.forProject(ctx.projectPath, ctx.repo, ctx.isLocal); + const result = await manager.add(name, { + alias, + targetDir: options?.targetDir, + repoUrl: ctx.repo.url, + }); - // Ignore file management (ai-rules-sync specific, not dotfile layer responsibility) - if (result.linked) { - const relEntry = path.relative(path.resolve(ctx.projectPath), result.targetPath); - if (ctx.isLocal) { - const gitInfoExclude = path.join(ctx.projectPath, '.git', 'info', 'exclude'); - if (await fs.pathExists(path.dirname(gitInfoExclude))) { - await fs.ensureFile(gitInfoExclude); - if (await addIgnoreEntry(gitInfoExclude, relEntry, '# AI Rules Sync')) { - console.log(chalk.green(`Added "${relEntry}" to .git/info/exclude.`)); - } else { - console.log(chalk.gray(`"${relEntry}" already in .git/info/exclude.`)); - } + // Ignore file management (ai-rules-sync specific, not dotfile layer responsibility) + if (result.linked) { + const relEntry = path.relative(path.resolve(ctx.projectPath), result.targetPath); + if (ctx.isLocal) { + const gitInfoExclude = path.join(ctx.projectPath, '.git', 'info', 'exclude'); + if (await fs.pathExists(path.dirname(gitInfoExclude))) { + await fs.ensureFile(gitInfoExclude); + if (await addIgnoreEntry(gitInfoExclude, relEntry, '# AI Rules Sync')) { + console.log(chalk.green(`Added "${relEntry}" to .git/info/exclude.`)); } else { - console.log(chalk.yellow(`Warning: Could not find .git/info/exclude. Skipping automatic ignore for private entry.`)); - console.log(chalk.yellow(`Please manually add "${relEntry}" to your private ignore file.`)); + console.log(chalk.gray(`"${relEntry}" already in .git/info/exclude.`)); } } else { - const gitignorePath = path.join(ctx.projectPath, '.gitignore'); - if (await addIgnoreEntry(gitignorePath, relEntry, '# AI Rules Sync')) { - console.log(chalk.green(`Added "${relEntry}" to .gitignore.`)); - } else { - console.log(chalk.gray(`"${relEntry}" already in .gitignore.`)); - } + console.log(chalk.yellow(`Warning: Could not find .git/info/exclude. Skipping automatic ignore for private entry.`)); + console.log(chalk.yellow(`Please manually add "${relEntry}" to your private ignore file.`)); } - } - - const configFileName = ctx.isLocal ? 'ai-rules-sync.local.json' : 'ai-rules-sync.json'; - console.log(chalk.green(`Updated ${configFileName} dependency.`)); - - if (ctx.isLocal) { + } else { const gitignorePath = path.join(ctx.projectPath, '.gitignore'); - const added = await addIgnoreEntry(gitignorePath, 'ai-rules-sync.local.json', '# Local AI Rules Sync Config'); - if (added) { - console.log(chalk.green(`Added "ai-rules-sync.local.json" to .gitignore.`)); + if (await addIgnoreEntry(gitignorePath, relEntry, '# AI Rules Sync')) { + console.log(chalk.green(`Added "${relEntry}" to .gitignore.`)); + } else { + console.log(chalk.gray(`"${relEntry}" already in .gitignore.`)); } } - - return { - sourceName: result.sourceName, - targetName: result.targetName, - linked: result.linked, - migrated: false - }; } - // Legacy fallback: separate link + addDependency calls - const result = await adapter.link({ - projectPath: ctx.projectPath, - name, - repo: ctx.repo, - alias, - isLocal: ctx.isLocal, - targetDir: options?.targetDir, - skipIgnore: ctx.skipIgnore - }); - const depAlias = alias || (result.targetName === result.sourceName ? undefined : result.targetName); - const migration = await adapter.addDependency( - ctx.projectPath, - result.sourceName, - ctx.repo.url, - depAlias, - ctx.isLocal, - options?.targetDir - ); - migrated = migration.migrated; - const configFileName = ctx.isLocal ? 'ai-rules-sync.local.json' : 'ai-rules-sync.json'; console.log(chalk.green(`Updated ${configFileName} dependency.`)); - if (migrated) { - console.log(chalk.yellow('Detected legacy "cursor-rules*.json". Migrated to "ai-rules-sync*.json". Consider deleting the legacy files to avoid ambiguity.')); - } - if (ctx.isLocal) { const gitignorePath = path.join(ctx.projectPath, '.gitignore'); const added = await addIgnoreEntry(gitignorePath, 'ai-rules-sync.local.json', '# Local AI Rules Sync Config'); @@ -218,8 +166,7 @@ export async function handleAdd( return { sourceName: result.sourceName, targetName: result.targetName, - linked: result.linked, - migrated + linked: result.linked }; } @@ -228,7 +175,6 @@ export async function handleAdd( */ export interface RemoveResult { removedFrom: string[]; - migrated: boolean; } export interface RemoveCommandOptions { @@ -249,8 +195,7 @@ async function getConfigHitsForAlias( adapter: SyncAdapter, projectPath: string, alias: string, - isUser: boolean, - includeLegacy: boolean = true + isUser: boolean ): Promise { const [topLevel, subLevel] = adapter.configPath; const hits: string[] = []; @@ -281,26 +226,6 @@ async function getConfigHitsForAlias( } } - // Legacy support for Cursor rules. - if (includeLegacy && adapter.tool === 'cursor' && adapter.subtype === 'rules') { - const legacyMainPath = path.join(projectPath, 'cursor-rules.json'); - const legacyLocalPath = path.join(projectPath, 'cursor-rules.local.json'); - - if (await fs.pathExists(legacyMainPath)) { - const legacyMain = await fs.readJson(legacyMainPath); - if (legacyMain?.rules?.[alias]) { - hits.push('cursor-rules.json'); - } - } - - if (await fs.pathExists(legacyLocalPath)) { - const legacyLocal = await fs.readJson(legacyLocalPath); - if (legacyLocal?.rules?.[alias]) { - hits.push('cursor-rules.local.json'); - } - } - } - return hits; } @@ -376,8 +301,7 @@ export async function handleRemove( } return { - removedFrom: hits, - migrated: false + removedFrom: hits }; } @@ -391,71 +315,53 @@ export async function handleRemove( console.log(chalk.yellow(`"${alias}" was not found in user config.`)); } - return { removedFrom, migrated: false }; + return { removedFrom }; } // Project mode: use forProject().remove() to do symlink deletion + manifest update in one step - if (adapter.forProject) { - const removedFrom = await getConfigHitsForAlias(adapter, projectPath, alias, false, false); - const target = await resolveRemoveTargetPath(adapter, projectPath, alias, false); - const projectRoot = path.resolve(projectPath); - const targetDirAbsolute = path.dirname(target.targetPath); - const ignoreEntries = new Set(); - - const addIgnoreCandidate = (baseName: string): void => { - const relativePath = path.relative(projectRoot, path.join(targetDirAbsolute, baseName)); - if (relativePath && relativePath !== '.') { - ignoreEntries.add(relativePath); - } - }; - - addIgnoreCandidate(alias); - if (target.exists) { - addIgnoreCandidate(path.basename(target.targetPath)); - } - - const suffixes = adapter.fileSuffixes || adapter.hybridFileSuffixes; - if (suffixes && suffixes.length > 0) { - for (const suffix of suffixes) { - if (!alias.endsWith(suffix)) { - addIgnoreCandidate(`${alias}${suffix}`); - } - } + const removedFrom = await getConfigHitsForAlias(adapter, projectPath, alias, false); + const target = await resolveRemoveTargetPath(adapter, projectPath, alias, false); + const projectRoot = path.resolve(projectPath); + const targetDirAbsolute = path.dirname(target.targetPath); + const ignoreEntries = new Set(); + + const addIgnoreCandidate = (baseName: string): void => { + const relativePath = path.relative(projectRoot, path.join(targetDirAbsolute, baseName)); + if (relativePath && relativePath !== '.') { + ignoreEntries.add(relativePath); } + }; - await adapter.forProject(projectPath, null, false).remove(alias); + addIgnoreCandidate(alias); + if (target.exists) { + addIgnoreCandidate(path.basename(target.targetPath)); + } - // Ignore cleanup — try both gitignore and git/info/exclude since we don't know - // which was used when the entry was originally added - const gitignorePath = path.join(projectPath, '.gitignore'); - const gitInfoExclude = path.join(projectPath, '.git', 'info', 'exclude'); - for (const entry of ignoreEntries) { - if (await removeIgnoreEntry(gitignorePath, entry)) { - console.log(chalk.green(`Removed "${entry}" from .gitignore.`)); - } - if (await removeIgnoreEntry(gitInfoExclude, entry)) { - console.log(chalk.green(`Removed "${entry}" from .git/info/exclude.`)); + const suffixes = adapter.fileSuffixes || adapter.hybridFileSuffixes; + if (suffixes && suffixes.length > 0) { + for (const suffix of suffixes) { + if (!alias.endsWith(suffix)) { + addIgnoreCandidate(`${alias}${suffix}`); } } - - return { removedFrom, migrated: false }; } - // Legacy fallback - await adapter.unlink(projectPath, alias); - const { removedFrom, migrated } = await adapter.removeDependency(projectPath, alias); + await adapter.forProject(projectPath, null, false).remove(alias); - if (removedFrom.length > 0) { - console.log(chalk.green(`Removed "${alias}" from configuration: ${removedFrom.join(', ')}`)); - } else { - console.log(chalk.yellow(`"${alias}" was not found in any configuration file.`)); - } - - if (migrated) { - console.log(chalk.yellow('Detected legacy "cursor-rules*.json". Migrated to "ai-rules-sync*.json". Consider deleting the legacy files to avoid ambiguity.')); + // Ignore cleanup — try both gitignore and git/info/exclude since we don't know + // which was used when the entry was originally added + const gitignorePath = path.join(projectPath, '.gitignore'); + const gitInfoExclude = path.join(projectPath, '.git', 'info', 'exclude'); + for (const entry of ignoreEntries) { + if (await removeIgnoreEntry(gitignorePath, entry)) { + console.log(chalk.green(`Removed "${entry}" from .gitignore.`)); + } + if (await removeIgnoreEntry(gitInfoExclude, entry)) { + console.log(chalk.green(`Removed "${entry}" from .git/info/exclude.`)); + } } - return { removedFrom, migrated }; + return { removedFrom }; } /** diff --git a/src/commands/install.ts b/src/commands/install.ts index b5c9fbc..4e5f7bc 100644 --- a/src/commands/install.ts +++ b/src/commands/install.ts @@ -7,46 +7,12 @@ import os from 'os'; import chalk from 'chalk'; import fs from 'fs-extra'; import { SyncAdapter } from '../adapters/types.js'; -import { getCombinedProjectConfig, getConfigSource, RuleEntry, ProjectConfig } from '../project-config.js'; +import { RuleEntry } from '../project-config.js'; import { getConfig, setConfig, getReposBaseDir, getUserProjectConfig, getUserConfigPath, RepoConfig } from '../config.js'; import { cloneOrUpdateRepo } from '../git.js'; import { parseConfigEntry } from './helpers.js'; import type { RepoResolverFn } from '../dotany/types.js'; -const LOCAL_CONFIG_FILENAME = 'ai-rules-sync.local.json'; - -/** - * Read local config entries for a specific adapter - */ -async function getLocalEntries( - projectPath: string, - adapter: SyncAdapter -): Promise> { - const source = await getConfigSource(projectPath); - const localFileName = source === 'new' ? 'ai-rules-sync.local.json' : 'cursor-rules.local.json'; - const localPath = path.join(projectPath, localFileName); - - if (!await fs.pathExists(localPath)) { - return {}; - } - - try { - const raw = await fs.readJson(localPath); - const [topLevel, subLevel] = adapter.configPath; - - if (source === 'new') { - return (raw as any)?.[topLevel]?.[subLevel] || {}; - } else { - // Legacy format only has cursor.rules - return adapter.configPath[0] === 'cursor' && adapter.configPath[1] === 'rules' - ? (raw?.rules || {}) - : {}; - } - } catch { - return {}; - } -} - /** * Find or create a repo configuration */ @@ -83,75 +49,26 @@ async function findOrCreateRepo( return repoConfig; } -/** - * Get entries from project config for a specific adapter - */ -function getEntriesFromConfig( - config: ProjectConfig, - adapter: SyncAdapter -): Record | undefined { - const [topLevel, subLevel] = adapter.configPath; - return (config as any)?.[topLevel]?.[subLevel]; -} - /** * Generic install function - works with any adapter. - * Uses manager.apply() when the adapter supports forProject() (dotfile API). - * Falls back to manual loop for adapters without forProject(). + * Uses manager.apply() from the dotfile API. */ export async function installEntriesForAdapter( adapter: SyncAdapter, projectPath: string ): Promise { - if (adapter.forProject) { - // Modern path: use manager.apply() which reads manifest and re-links all entries - const globalConfig = await getConfig(); - const repos = globalConfig.repos || {}; - - const repoResolver: RepoResolverFn = (repoUrl: string, entryName: string) => - findOrCreateRepo(repos, repoUrl, entryName); - - const manager = adapter.forProject(projectPath, repoResolver); - const result = await manager.apply(); - - if (result.linked.length === 0 && result.skipped.length === 0) { - console.log(chalk.yellow(`No ${adapter.tool} ${adapter.subtype} found in ai-rules-sync*.json.`)); - return; - } - console.log(chalk.green(`All ${adapter.tool} ${adapter.subtype} installed successfully.`)); - return; - } - - // Legacy fallback: manual loop for adapters without forProject() - const config = await getCombinedProjectConfig(projectPath); - const entries = getEntriesFromConfig(config, adapter); - - if (!entries || Object.keys(entries).length === 0) { - console.log(chalk.yellow(`No ${adapter.tool} ${adapter.subtype} found in ai-rules-sync*.json.`)); - return; - } - const globalConfig = await getConfig(); const repos = globalConfig.repos || {}; - const localEntries = await getLocalEntries(projectPath, adapter); - - for (const [key, value] of Object.entries(entries)) { - const { repoUrl, entryName, alias } = parseConfigEntry(key, value); - console.log(chalk.blue(`Installing ${adapter.tool} ${adapter.subtype} "${entryName}" (as "${key}") from ${repoUrl}...`)); + const repoResolver: RepoResolverFn = (repoUrl: string, entryName: string) => + findOrCreateRepo(repos, repoUrl, entryName); - const repoConfig = await findOrCreateRepo(repos, repoUrl, entryName); - const isLocal = Object.prototype.hasOwnProperty.call(localEntries, key); - const targetDir = typeof value === 'object' && value.targetDir ? value.targetDir : undefined; + const manager = adapter.forProject(projectPath, repoResolver); + const result = await manager.apply(); - await adapter.link({ - projectPath, - name: entryName, - repo: repoConfig, - alias, - isLocal, - targetDir - }); + if (result.linked.length === 0 && result.skipped.length === 0) { + console.log(chalk.yellow(`No ${adapter.tool} ${adapter.subtype} found in ai-rules-sync*.json.`)); + return; } console.log(chalk.green(`All ${adapter.tool} ${adapter.subtype} installed successfully.`)); @@ -211,13 +128,6 @@ export async function installUserEntriesForAdapter( console.log(chalk.green(`All user ${adapter.tool} ${adapter.subtype} installed successfully.`)); } -/** @deprecated Use installUserEntriesForAdapter() instead */ -export async function installGlobalEntriesForAdapter( - adapter: SyncAdapter -): Promise { - return installUserEntriesForAdapter(adapter); -} - /** * Install all user entries for all adapters */ @@ -248,11 +158,3 @@ export async function installAllUserEntries( return { total }; } - -/** @deprecated Use installAllUserEntries() instead */ -export async function installAllGlobalEntries( - adapters: SyncAdapter[] -): Promise<{ total: number }> { - return installAllUserEntries(adapters); -} - diff --git a/src/completion.ts b/src/completion.ts index 85a66ec..0792935 100644 --- a/src/completion.ts +++ b/src/completion.ts @@ -11,16 +11,6 @@ export type ShellType = 'bash' | 'zsh' | 'fish' | 'unknown'; export const COMPLETION_START_MARKER = '# >>> ais shell completion >>>'; export const COMPLETION_END_MARKER = '# <<< ais shell completion <<<'; -// Legacy patterns for backward compatibility (removing old format) -const LEGACY_PATTERNS = [ - '# ais shell completion', - '# Save and source AIS completion script', - 'ais completion fish | source', - 'eval "$(ais completion)"', - 'ais completion > ~/.zsh/ais_completion.zsh', - 'source ~/.zsh/ais_completion.zsh', -]; - /** * Detect the current shell type from environment variable */ @@ -82,7 +72,7 @@ export function getCompletionSnippet(shell: ShellType): string { /** * Check if completion is already installed in the config file - * Detects both new block format and legacy format + * Detects marker-based block format. */ export async function isCompletionInstalled(configPath: string): Promise { if (!await fs.pathExists(configPath)) { @@ -90,14 +80,7 @@ export async function isCompletionInstalled(configPath: string): Promise { } /** - * Remove all ais completion code from content (both new block format and legacy format) + * Remove marker-based ais completion block from content. */ export function removeCompletionCode(content: string): string { - // First, remove new block format using regex + // Remove block format using regex. // Match: optional newline, start marker, any content, end marker, optional newline const blockRegex = new RegExp( `\\n?${escapeRegex(COMPLETION_START_MARKER)}[\\s\\S]*?${escapeRegex(COMPLETION_END_MARKER)}\\n?`, 'g' ); content = content.replace(blockRegex, '\n'); - - // Then, remove any legacy patterns (line by line) - const lines = content.split('\n'); - const filteredLines = lines.filter(line => { - return !LEGACY_PATTERNS.some(pattern => line.includes(pattern)); - }); - - // Clean up multiple consecutive empty lines - let result = filteredLines.join('\n'); - result = result.replace(/\n{3,}/g, '\n\n'); - - return result; + return content.replace(/\n{3,}/g, '\n\n'); } /** diff --git a/src/config.ts b/src/config.ts index dd59315..d9c29a8 100644 --- a/src/config.ts +++ b/src/config.ts @@ -18,10 +18,6 @@ export interface Config { currentRepo?: string; // name of the current repo repos: Record; completionInstalled?: boolean; // whether shell completion setup has been handled - // Deprecated field for migration - repoUrl?: string; - // Deprecated: renamed to userConfigPath - globalConfigPath?: string; // Optional custom path for user.json (supports dotfiles integration) userConfigPath?: string; } @@ -30,33 +26,7 @@ export async function getConfig(): Promise { try { if (await fs.pathExists(CONFIG_FILE)) { const config = await fs.readJson(CONFIG_FILE); - - // Migration logic for old config format - if (config.repoUrl && !config.repos) { - const defaultName = 'default'; - const defaultPath = path.join(REPOS_BASE_DIR, defaultName); - - // Move old repo dir if exists - const oldRepoDir = path.join(CONFIG_DIR, 'repo'); - if (await fs.pathExists(oldRepoDir)) { - await fs.ensureDir(REPOS_BASE_DIR); - if (!await fs.pathExists(defaultPath)) { - await fs.move(oldRepoDir, defaultPath); - } - } - - return { - currentRepo: defaultName, - repos: { - [defaultName]: { - name: defaultName, - url: config.repoUrl, - path: defaultPath - } - } - }; - } - return config; + return { repos: {}, ...config }; } } catch (error) { // ignore error @@ -74,11 +44,6 @@ export async function setConfig(config: Partial) { newConfig.repos = {}; } - // Clean up deprecated field - if ('repoUrl' in newConfig) { - delete (newConfig as any).repoUrl; - } - await fs.writeJson(CONFIG_FILE, newConfig, { spaces: 2 }); } @@ -97,39 +62,18 @@ export async function getCurrentRepo(): Promise { /** * Get the path to the user project config file. * Uses custom path from config if set, otherwise defaults to ~/.config/ai-rules-sync/user.json. - * Handles migration from old globalConfigPath and global.json. */ export async function getUserConfigPath(): Promise { const config = await getConfig(); - // Migration: rename globalConfigPath → userConfigPath - if (config.globalConfigPath && !config.userConfigPath) { - const migratedPath = config.globalConfigPath; - delete (config as any).globalConfigPath; - config.userConfigPath = migratedPath; - await fs.ensureDir(CONFIG_DIR); - await fs.writeJson(CONFIG_FILE, config, { spaces: 2 }); - } - if (config.userConfigPath) { // Support ~ expansion return config.userConfigPath.replace(/^~/, os.homedir()); } - // Migration: rename global.json → user.json if user.json doesn't exist - const oldFile = path.join(CONFIG_DIR, 'global.json'); - if (!await fs.pathExists(DEFAULT_USER_CONFIG_FILE) && await fs.pathExists(oldFile)) { - await fs.move(oldFile, DEFAULT_USER_CONFIG_FILE); - } - return DEFAULT_USER_CONFIG_FILE; } -/** @deprecated Use getUserConfigPath() instead */ -export async function getGlobalConfigPath(): Promise { - return getUserConfigPath(); -} - /** * Get the user project config (stored in user.json). */ @@ -145,11 +89,6 @@ export async function getUserProjectConfig(): Promise { - return getUserProjectConfig(); -} - /** * Save the user project config to user.json. */ @@ -158,8 +97,3 @@ export async function saveUserProjectConfig(projectConfig: import('./project-con await fs.ensureDir(path.dirname(userPath)); await fs.writeJson(userPath, projectConfig, { spaces: 2 }); } - -/** @deprecated Use saveUserProjectConfig() instead */ -export async function saveGlobalProjectConfig(projectConfig: import('./project-config.js').ProjectConfig): Promise { - return saveUserProjectConfig(projectConfig); -} diff --git a/src/index.ts b/src/index.ts index 9719b8d..c4b2234 100644 --- a/src/index.ts +++ b/src/index.ts @@ -26,10 +26,10 @@ import { DefaultMode } from './commands/helpers.js'; import { handleAdd, handleRemove, handleImport } from './commands/handlers.js'; -import { installEntriesForAdapter, installEntriesForTool, installAllUserEntries, installAllGlobalEntries } from './commands/install.js'; +import { installEntriesForAdapter, installEntriesForTool, installAllUserEntries } from './commands/install.js'; import { discoverAllEntries, handleAddAll } from './commands/add-all.js'; import { parseSourceDirParams } from './cli/source-dir-parser.js'; -import { setRepoSourceDir, clearRepoSourceDir, showRepoConfig, listRepos, handleUserConfigShow, handleUserConfigSet, handleUserConfigReset, handleGlobalConfigShow, handleGlobalConfigSet, handleGlobalConfigReset } from './commands/config.js'; +import { setRepoSourceDir, clearRepoSourceDir, showRepoConfig, listRepos, handleUserConfigShow, handleUserConfigSet, handleUserConfigReset } from './commands/config.js'; import { getFormattedVersion } from './commands/version.js'; import { checkRepositories, updateRepositories, initRulesRepository } from './commands/lifecycle.js'; @@ -422,15 +422,14 @@ program .command('check') .description('Check for repository updates used by current config') .option('-u, --user', 'Check repositories from user config') - .option('-g, --global', 'Check repositories from user config (deprecated alias for --user)') .option('--no-fetch', 'Skip git fetch before checking') .option('--json', 'Output results as JSON') - .action(async (cmdOptions: { user?: boolean; global?: boolean; fetch?: boolean; json?: boolean }) => { + .action(async (cmdOptions: { user?: boolean; fetch?: boolean; json?: boolean }) => { try { const opts = program.opts(); const result = await checkRepositories({ projectPath: process.cwd(), - user: cmdOptions.user || cmdOptions.global, + user: cmdOptions.user, fetch: cmdOptions.fetch, target: opts.target }); @@ -471,15 +470,14 @@ program .command('update') .description('Update repositories used by current config and reinstall entries') .option('-u, --user', 'Update repositories from user config') - .option('-g, --global', 'Update repositories from user config (deprecated alias for --user)') .option('--dry-run', 'Preview updates without pulling repositories') .option('--json', 'Output results as JSON') - .action(async (cmdOptions: { user?: boolean; global?: boolean; dryRun?: boolean; json?: boolean }) => { + .action(async (cmdOptions: { user?: boolean; dryRun?: boolean; json?: boolean }) => { try { const opts = program.opts(); const result = await updateRepositories({ projectPath: process.cwd(), - user: cmdOptions.user || cmdOptions.global, + user: cmdOptions.user, dryRun: cmdOptions.dryRun, target: opts.target }); @@ -705,10 +703,9 @@ program .command('install') .description('Install all entries from config, or --user for user config') .option('-u, --user', 'Install all user config entries (~/.config/ai-rules-sync/user.json)') - .option('-g, --global', 'Install all user config entries (deprecated alias for --user)') - .action(async (cmdOptions: { user?: boolean; global?: boolean }) => { + .action(async (cmdOptions: { user?: boolean }) => { try { - if (cmdOptions.user || cmdOptions.global) { + if (cmdOptions.user) { await installAllUserEntries(adapterRegistry.all()); return; } @@ -1191,7 +1188,7 @@ registerAdapterCommands({ adapter: getAdapter('claude', 'skills'), parentCommand const claudeAgents = claude.command('agents').description('Manage Claude agents'); registerAdapterCommands({ adapter: getAdapter('claude', 'agents'), parentCommand: claudeAgents, programOpts: () => program.opts() }); -// claude md subgroup (for CLAUDE.md files, supports --global) +// claude md subgroup (for CLAUDE.md files) const claudeMd = claude.command('md').description('Manage Claude CLAUDE.md files (.claude/CLAUDE.md)'); registerAdapterCommands({ adapter: getAdapter('claude', 'md'), parentCommand: claudeMd, programOpts: () => program.opts() }); @@ -1553,7 +1550,7 @@ registerAdapterCommands({ adapter: getAdapter('codex', 'rules'), parentCommand: const codexSkills = codex.command('skills').description('Manage Codex skills'); registerAdapterCommands({ adapter: getAdapter('codex', 'skills'), parentCommand: codexSkills, programOpts: () => program.opts() }); -// codex md subgroup (for AGENTS.md files, supports --global) +// codex md subgroup (for AGENTS.md files) const codexMd = codex.command('md').description('Manage Codex AGENTS.md files (.codex/AGENTS.md)'); registerAdapterCommands({ adapter: getAdapter('codex', 'md'), parentCommand: codexMd, programOpts: () => program.opts() }); @@ -1684,7 +1681,7 @@ registerAdapterCommands({ adapter: getAdapter('gemini', 'skills'), parentCommand const geminiAgents = gemini.command('agents').description('Manage Gemini agents'); registerAdapterCommands({ adapter: getAdapter('gemini', 'agents'), parentCommand: geminiAgents, programOpts: () => program.opts() }); -// gemini md subgroup (for GEMINI.md files, supports --global) +// gemini md subgroup (for GEMINI.md files) const geminiMd = gemini.command('md').description('Manage Gemini GEMINI.md files (.gemini/GEMINI.md)'); registerAdapterCommands({ adapter: getAdapter('gemini', 'md'), parentCommand: geminiMd, programOpts: () => program.opts() }); @@ -2189,47 +2186,6 @@ configUser } }); -// config global subgroup (deprecated alias for config user) -const configGlobal = configCmd - .command('global') - .description('Deprecated: use "ais config user" instead'); - -configGlobal - .command('show') - .description('Show current user config path (deprecated: use "ais config user show")') - .action(async () => { - try { - await handleUserConfigShow(); - } catch (error: any) { - console.error(chalk.red('Error showing user config path:'), error.message); - process.exit(1); - } - }); - -configGlobal - .command('set ') - .description('Set custom user config path (deprecated: use "ais config user set")') - .action(async (customPath: string) => { - try { - await handleUserConfigSet(customPath); - } catch (error: any) { - console.error(chalk.red('Error setting user config path:'), error.message); - process.exit(1); - } - }); - -configGlobal - .command('reset') - .description('Reset user config path to default (deprecated: use "ais config user reset")') - .action(async () => { - try { - await handleUserConfigReset(); - } catch (error: any) { - console.error(chalk.red('Error resetting user config path:'), error.message); - process.exit(1); - } - }); - // ============ User command group ============ const userCmd = program .command('user') diff --git a/src/link.ts b/src/link.ts deleted file mode 100644 index c01863a..0000000 --- a/src/link.ts +++ /dev/null @@ -1,8 +0,0 @@ -/** - * Link module - re-exports from sync engine - * - * This file exists for backward compatibility. - * New code should import directly from './sync-engine.js' or use adapters. - */ - -export { linkEntry, unlinkEntry, importEntry, ImportOptions } from './sync-engine.js'; diff --git a/src/project-config.ts b/src/project-config.ts index d884bf5..be1b574 100644 --- a/src/project-config.ts +++ b/src/project-config.ts @@ -5,10 +5,6 @@ import { getUserConfigPath, getUserProjectConfig, saveUserProjectConfig } from ' const CONFIG_FILENAME = 'ai-rules-sync.json'; const LOCAL_CONFIG_FILENAME = 'ai-rules-sync.local.json'; -// Legacy (temporary) compatibility. Intentionally centralized so it can be removed in a future version. -const LEGACY_CONFIG_FILENAME = 'cursor-rules.json'; -const LEGACY_LOCAL_CONFIG_FILENAME = 'cursor-rules.local.json'; - function readNestedStringValue(source: unknown, tool: string, subtype: string): string | undefined { if (!source || typeof source !== 'object') { return undefined; @@ -106,7 +102,7 @@ export interface RepoSourceConfig { [tool: string]: any; } -export type ConfigSource = 'new' | 'legacy' | 'none'; +export type ConfigSource = 'new' | 'none'; async function readConfigFile(filePath: string): Promise { if (await fs.pathExists(filePath)) { @@ -126,21 +122,6 @@ async function hasAnyNewConfig(projectPath: string): Promise { ); } -async function hasAnyLegacyConfig(projectPath: string): Promise { - return ( - await fs.pathExists(path.join(projectPath, LEGACY_CONFIG_FILENAME)) || - await fs.pathExists(path.join(projectPath, LEGACY_LOCAL_CONFIG_FILENAME)) - ); -} - -function legacyToNew(legacy: { rules?: Record }): ProjectConfig { - return { - cursor: { - rules: legacy.rules || {} - } - }; -} - /** * Merge two ProjectConfig objects dynamically. * Registry-driven: works with any tool/subtype combination present in the configs @@ -184,14 +165,13 @@ function mergeCombined(main: ProjectConfig, local: ProjectConfig): ProjectConfig export async function getConfigSource(projectPath: string): Promise { if (await hasAnyNewConfig(projectPath)) return 'new'; - if (await hasAnyLegacyConfig(projectPath)) return 'legacy'; return 'none'; } /** * Read repository-side source configuration (used when `projectPath` is a rules repo). * Returns the full config which may contain sourceDir. - * Supports both new (sourceDir) and legacy (flat cursor/copilot with string values) formats. + * Supports sourceDir-based repository configuration. */ export async function getRepoSourceConfig(projectPath: string): Promise { const newPath = path.join(projectPath, CONFIG_FILENAME); @@ -203,13 +183,7 @@ export async function getRepoSourceConfig(projectPath: string): Promise. values are strings (source dirs). - const { hasAny, config: legacyRepoConfig } = buildRepoSourceFromNestedStrings(config, config.rootPath); - if (hasAny) { - return legacyRepoConfig; - } - - // Not a rules repo config (no sourceDir, no string values) + // Not a rules repo config (no sourceDir) return { rootPath: config.rootPath }; } return {}; @@ -289,50 +263,9 @@ export function getTargetDir( } export async function getCombinedProjectConfig(projectPath: string): Promise { - const source = await getConfigSource(projectPath); - - if (source === 'new') { - const main = await readConfigFile(path.join(projectPath, CONFIG_FILENAME)); - const local = await readConfigFile(path.join(projectPath, LOCAL_CONFIG_FILENAME)); - return mergeCombined(main, local); - } - - if (source === 'legacy') { - const legacyMain = await readConfigFile<{ rootPath?: string; rules?: Record }>( - path.join(projectPath, LEGACY_CONFIG_FILENAME) - ); - const legacyLocal = await readConfigFile<{ rootPath?: string; rules?: Record }>( - path.join(projectPath, LEGACY_LOCAL_CONFIG_FILENAME) - ); - return mergeCombined(legacyToNew(legacyMain), legacyToNew(legacyLocal)); - } - - return mergeCombined({}, {}); -} - -/** - * Migrate legacy cursor-rules*.json into new ai-rules-sync*.json files. - * This is ONLY called by write paths (add/remove) to keep legacy-compat removable. - */ -export async function migrateLegacyToNew(projectPath: string): Promise<{ migrated: boolean }> { - const source = await getConfigSource(projectPath); - if (source !== 'legacy') return { migrated: false }; - - const legacyMainPath = path.join(projectPath, LEGACY_CONFIG_FILENAME); - const legacyLocalPath = path.join(projectPath, LEGACY_LOCAL_CONFIG_FILENAME); - - const legacyMain = await readConfigFile<{ rules?: Record }>(legacyMainPath); - const legacyLocal = await readConfigFile<{ rules?: Record }>(legacyLocalPath); - - const newMain = legacyToNew(legacyMain); - const newLocal = legacyToNew(legacyLocal); - - await fs.writeJson(path.join(projectPath, CONFIG_FILENAME), newMain, { spaces: 2 }); - if (Object.keys(newLocal.cursor?.rules || {}).length > 0) { - await fs.writeJson(path.join(projectPath, LOCAL_CONFIG_FILENAME), newLocal, { spaces: 2 }); - } - - return { migrated: true }; + const main = await readConfigFile(path.join(projectPath, CONFIG_FILENAME)); + const local = await readConfigFile(path.join(projectPath, LOCAL_CONFIG_FILENAME)); + return mergeCombined(main, local); } async function readNewConfigForWrite(projectPath: string, isLocal: boolean): Promise { @@ -357,8 +290,7 @@ export async function addDependencyGeneric( alias?: string, isLocal: boolean = false, targetDir?: string -): Promise<{ migrated: boolean }> { - const migration = await migrateLegacyToNew(projectPath); +): Promise { const config = await readNewConfigForWrite(projectPath, isLocal); const [topLevel, subLevel] = configPath; @@ -386,7 +318,6 @@ export async function addDependencyGeneric( (config as any)[topLevel][subLevel][targetName] = entryValue; await writeNewConfig(projectPath, isLocal, config); - return migration; } /** @@ -396,8 +327,7 @@ export async function removeDependencyGeneric( projectPath: string, configPath: [string, string], alias: string -): Promise<{ removedFrom: string[]; migrated: boolean }> { - const migration = await migrateLegacyToNew(projectPath); +): Promise<{ removedFrom: string[] }> { const removedFrom: string[] = []; const [topLevel, subLevel] = configPath; @@ -418,7 +348,7 @@ export async function removeDependencyGeneric( removedFrom.push(LOCAL_CONFIG_FILENAME); } - return { removedFrom, migrated: migration.migrated }; + return { removedFrom }; } /** @@ -455,17 +385,6 @@ export async function addUserDependency( await saveUserProjectConfig(config); } -/** @deprecated Use addUserDependency() instead */ -export async function addGlobalDependency( - configPath: [string, string], - name: string, - repoUrl: string, - alias?: string, - targetDir?: string -): Promise { - return addUserDependency(configPath, name, repoUrl, alias, targetDir); -} - /** * Remove a dependency from the user project config (user.json). * Used when --user flag is set. @@ -487,11 +406,3 @@ export async function removeUserDependency( return { removedFrom }; } - -/** @deprecated Use removeUserDependency() instead */ -export async function removeGlobalDependency( - configPath: [string, string], - alias: string -): Promise<{ removedFrom: string[] }> { - return removeUserDependency(configPath, alias); -} diff --git a/src/sync-engine.ts b/src/sync-engine.ts index 489ca6a..5c1e26d 100644 --- a/src/sync-engine.ts +++ b/src/sync-engine.ts @@ -271,42 +271,14 @@ export async function importEntry( const destPath = path.join(repoDir, sourceDir, name); const relativePath = path.relative(repoDir, destPath); - let sourceName: string; - let targetName: string; - - if (adapter.forProject) { - // Modern path: delegate all fs operations (copy, remove, symlink) to manager.import() - const manager = adapter.forProject(projectPath, repo, options.isLocal); - const linkResult = await manager.import(targetPath, name, { - force, - repoUrl: repo.url, - }); - sourceName = linkResult.sourceName; - targetName = linkResult.targetName; - } else { - // Legacy path: manual fs operations - if (!await fs.pathExists(targetPath)) { - throw new Error(`Entry "${name}" not found in project at ${targetPath}`); - } - const stats = await fs.lstat(targetPath); - if (stats.isSymbolicLink()) { - throw new Error(`Entry "${name}" is already a symlink (already managed by ai-rules-sync)`); - } - if (await fs.pathExists(destPath)) { - if (!force) { - throw new Error(`Entry "${name}" already exists in rules repository at ${destPath}. Use --force to overwrite.`); - } - console.log(chalk.yellow(`Entry "${name}" already exists in repository. Overwriting (--force)...`)); - await fs.remove(destPath); - } - await fs.copy(targetPath, destPath); - console.log(chalk.green(`Copied "${name}" to rules repository.`)); - await fs.remove(targetPath); - console.log(chalk.green(`Removed original from project.`)); - const linkResult = await adapter.link(options); - sourceName = linkResult.sourceName; - targetName = linkResult.targetName; - } + // Delegate all fs operations (copy, remove, symlink) to manager.import() + const manager = adapter.forProject(projectPath, repo, options.isLocal); + const linkResult = await manager.import(targetPath, name, { + force, + repoUrl: repo.url, + }); + const sourceName = linkResult.sourceName; + const targetName = linkResult.targetName; // Git add and commit (ai-rules-sync specific, stays in sync-engine) await execa('git', ['add', relativePath], { cwd: repoDir }); diff --git a/tests/completion.test.ts b/tests/completion.test.ts index 3678315..3cad30a 100644 --- a/tests/completion.test.ts +++ b/tests/completion.test.ts @@ -146,20 +146,6 @@ describe('Completion Module', () => { expect(result).toBe(true); }); - it('should return true if config contains legacy ais completion marker', async () => { - vi.mocked(fs.pathExists).mockResolvedValue(true); - vi.mocked(fs.readFile).mockResolvedValue('# some config\n# ais shell completion\neval "$(ais completion)"'); - const result = await completionModule.isCompletionInstalled('/mock/path/.zshrc'); - expect(result).toBe(true); - }); - - it('should return true if config contains ais completion command', async () => { - vi.mocked(fs.pathExists).mockResolvedValue(true); - vi.mocked(fs.readFile).mockResolvedValue('# some config\neval "$(ais completion)"'); - const result = await completionModule.isCompletionInstalled('/mock/path/.zshrc'); - expect(result).toBe(true); - }); - it('should return false if config does not contain ais completion', async () => { vi.mocked(fs.pathExists).mockResolvedValue(true); vi.mocked(fs.readFile).mockResolvedValue('# some other config\nexport PATH=$PATH:/usr/local/bin'); @@ -179,45 +165,17 @@ describe('Completion Module', () => { expect(result).toContain('# more config'); }); - it('should remove legacy bash format', () => { - const content = '# some config\n# ais shell completion\neval "$(ais completion)"\n# more config'; - const result = completionModule.removeCompletionCode(content); - expect(result).not.toContain('# ais shell completion'); - expect(result).not.toContain('eval "$(ais completion)"'); - expect(result).toContain('# some config'); - expect(result).toContain('# more config'); - }); - - it('should remove legacy zsh format with all lines', () => { - const content = '# some config\n# ais shell completion\n# Save and source AIS completion script\nais completion > ~/.zsh/ais_completion.zsh 2>/dev/null && source ~/.zsh/ais_completion.zsh\n# more config'; - const result = completionModule.removeCompletionCode(content); - expect(result).not.toContain('# ais shell completion'); - expect(result).not.toContain('# Save and source AIS completion script'); - expect(result).not.toContain('ais completion > ~/.zsh/ais_completion.zsh'); - expect(result).toContain('# some config'); - expect(result).toContain('# more config'); - }); - - it('should remove legacy fish format', () => { - const content = '# some config\n# ais shell completion\nais completion fish | source\n# more config'; - const result = completionModule.removeCompletionCode(content); - expect(result).not.toContain('# ais shell completion'); - expect(result).not.toContain('ais completion fish | source'); - expect(result).toContain('# some config'); - expect(result).toContain('# more config'); - }); - - it('should handle mixed old and new formats', () => { - const content = `# config\n# ais shell completion\neval "$(ais completion)"\n${completionModule.COMPLETION_START_MARKER}\nsome content\n${completionModule.COMPLETION_END_MARKER}\n# end`; + it('should remove completion block while preserving unrelated lines', () => { + const content = `# config\nexport PATH=/usr/bin:$PATH\n${completionModule.COMPLETION_START_MARKER}\nsome content\n${completionModule.COMPLETION_END_MARKER}\n# end`; const result = completionModule.removeCompletionCode(content); - expect(result).not.toContain('# ais shell completion'); expect(result).not.toContain(completionModule.COMPLETION_START_MARKER); + expect(result).toContain('export PATH=/usr/bin:$PATH'); expect(result).toContain('# config'); expect(result).toContain('# end'); }); it('should clean up multiple consecutive empty lines', () => { - const content = '# config\n\n\n\n# ais shell completion\neval "$(ais completion)"\n\n\n# end'; + const content = '# config\n\n\n\nline\n\n\n# end'; const result = completionModule.removeCompletionCode(content); // Should not have more than 2 consecutive newlines expect(result).not.toMatch(/\n{3,}/); @@ -239,15 +197,6 @@ describe('Completion Module', () => { expect(result.success).toBe(true); }); - it('should return alreadyInstalled true if completion exists (legacy format)', async () => { - vi.mocked(fs.pathExists).mockResolvedValue(true); - vi.mocked(fs.readFile).mockResolvedValue('# ais shell completion\neval "$(ais completion)"'); - - const result = await completionModule.installCompletionToFile('zsh'); - expect(result.alreadyInstalled).toBe(true); - expect(result.success).toBe(true); - }); - it('should append completion snippet if not installed', async () => { vi.mocked(fs.pathExists).mockResolvedValue(true); vi.mocked(fs.readFile).mockResolvedValue('# existing config'); diff --git a/tests/config.test.ts b/tests/config.test.ts index 7f2dc46..5c99706 100644 --- a/tests/config.test.ts +++ b/tests/config.test.ts @@ -45,20 +45,6 @@ describe('Config Module', () => { expect(config).toEqual(mockConfig); }); - it('should migrate legacy config correctly', async () => { - const legacyConfig = { repoUrl: 'http://old.git' }; - vi.mocked(fs.pathExists).mockResolvedValueOnce(true); // config exists - vi.mocked(fs.readJson).mockResolvedValue(legacyConfig); - // For migration check - vi.mocked(fs.pathExists).mockResolvedValue(false); // old repo dir check or default path check - - const config = await configModule.getConfig(); - - expect(config.currentRepo).toBe('default'); - expect(config.repos.default.url).toBe('http://old.git'); - expect(config.repos.default.name).toBe('default'); - }); - it('should save config correctly', async () => { vi.mocked(fs.pathExists).mockResolvedValue(false); // no existing config @@ -79,16 +65,4 @@ describe('Config Module', () => { ); }); - it('should remove legacy repoUrl field when saving', async () => { - vi.mocked(fs.pathExists).mockResolvedValue(false); - - await configModule.setConfig({ repoUrl: 'legacy' } as any); - - expect(fs.writeJson).toHaveBeenCalledWith( - mockConfigFile, - expect.not.objectContaining({ repoUrl: 'legacy' }), - { spaces: 2 } - ); - }); }); - diff --git a/tests/project-config.test.ts b/tests/project-config.test.ts index 1aa043b..4ffd51e 100644 --- a/tests/project-config.test.ts +++ b/tests/project-config.test.ts @@ -1,12 +1,12 @@ import { describe, it, expect, vi, beforeEach } from 'vitest'; import path from 'path'; import fs from 'fs-extra'; -import { getConfigSource, getCombinedProjectConfig, migrateLegacyToNew, getRepoSourceConfig, getSourceDir } from '../src/project-config.js'; +import { getConfigSource, getCombinedProjectConfig, getRepoSourceConfig, getSourceDir } from '../src/project-config.js'; import { findAdapterForAlias } from '../src/adapters/index.js'; vi.mock('fs-extra'); -describe('project-config (ai-rules-sync + legacy compat)', () => { +describe('project-config (ai-rules-sync)', () => { const projectPath = '/mock/project'; beforeEach(() => { @@ -18,7 +18,6 @@ describe('project-config (ai-rules-sync + legacy compat)', () => { it('prefers new config when ai-rules-sync.json exists', async () => { vi.mocked(fs.pathExists).mockImplementation(async (p) => { if (p === path.join(projectPath, 'ai-rules-sync.json')) return true; - if (p === path.join(projectPath, 'cursor-rules.json')) return true; // should be ignored return false; }); @@ -26,64 +25,39 @@ describe('project-config (ai-rules-sync + legacy compat)', () => { expect(source).toBe('new'); }); - it('falls back to legacy cursor-rules.json when no new config exists', async () => { + it('returns none when no ai-rules-sync config exists', async () => { vi.mocked(fs.pathExists).mockImplementation(async (p) => { if (p === path.join(projectPath, 'ai-rules-sync.json')) return false; if (p === path.join(projectPath, 'ai-rules-sync.local.json')) return false; - if (p === path.join(projectPath, 'cursor-rules.json')) return true; - if (p === path.join(projectPath, 'cursor-rules.local.json')) return false; return false; }); - vi.mocked(fs.readJson).mockImplementation(async (p) => { - if (p === path.join(projectPath, 'cursor-rules.json')) { - return { rules: { react: 'https://example.com/repo.git' } }; - } - return {}; - }); - + const source = await getConfigSource(projectPath); + expect(source).toBe('none'); const combined = await getCombinedProjectConfig(projectPath); - expect(combined.cursor?.rules?.react).toBe('https://example.com/repo.git'); - expect(Object.keys(combined.copilot?.instructions || {})).toHaveLength(0); + expect(combined).toEqual({}); }); - it('migrates legacy cursor-rules*.json into ai-rules-sync*.json on write paths', async () => { + it('merges main/local ai-rules-sync configs', async () => { vi.mocked(fs.pathExists).mockImplementation(async (p) => { - if (p === path.join(projectPath, 'ai-rules-sync.json')) return false; - if (p === path.join(projectPath, 'ai-rules-sync.local.json')) return false; - if (p === path.join(projectPath, 'cursor-rules.json')) return true; - if (p === path.join(projectPath, 'cursor-rules.local.json')) return true; + if (p === path.join(projectPath, 'ai-rules-sync.json')) return true; + if (p === path.join(projectPath, 'ai-rules-sync.local.json')) return true; return false; }); vi.mocked(fs.readJson).mockImplementation(async (p) => { - if (p === path.join(projectPath, 'cursor-rules.json')) { - return { rules: { a: 'url-a' } }; + if (p === path.join(projectPath, 'ai-rules-sync.json')) { + return { cursor: { rules: { a: 'url-a' } } }; } - if (p === path.join(projectPath, 'cursor-rules.local.json')) { - return { rules: { b: { url: 'url-b', rule: 'bb' } } }; + if (p === path.join(projectPath, 'ai-rules-sync.local.json')) { + return { cursor: { rules: { b: { url: 'url-b', rule: 'bb' } } } }; } return {}; }); - const res = await migrateLegacyToNew(projectPath); - expect(res.migrated).toBe(true); - - expect(fs.writeJson).toHaveBeenCalledWith( - path.join(projectPath, 'ai-rules-sync.json'), - expect.objectContaining({ - cursor: { rules: { a: 'url-a' } }, - }), - { spaces: 2 } - ); - - expect(fs.writeJson).toHaveBeenCalledWith( - path.join(projectPath, 'ai-rules-sync.local.json'), - expect.objectContaining({ - cursor: { rules: { b: { url: 'url-b', rule: 'bb' } } }, - }), - { spaces: 2 } - ); + const config = await getCombinedProjectConfig(projectPath); + expect(config.cursor?.rules?.a).toBe('url-a'); + expect(config.cursor?.rules?.b).toEqual({ url: 'url-b', rule: 'bb' }); }); }); @@ -117,23 +91,6 @@ describe('getRepoSourceConfig - sourceDir format', () => { expect(config.copilot?.instructions).toBe('.github/instructions'); }); - it('parses legacy flat format (string values) correctly', async () => { - vi.mocked(fs.pathExists).mockResolvedValue(true); - vi.mocked(fs.readJson).mockResolvedValue({ - rootPath: 'config', - cursor: { - rules: 'custom-rules', - commands: 'custom-commands' - } - }); - - const config = await getRepoSourceConfig(repoPath); - - expect(config.rootPath).toBe('config'); - expect(config.cursor?.rules).toBe('custom-rules'); - expect(config.cursor?.commands).toBe('custom-commands'); - }); - it('returns empty config when cursor/copilot are dependency records (not source dirs)', async () => { vi.mocked(fs.pathExists).mockResolvedValue(true); vi.mocked(fs.readJson).mockResolvedValue({