diff --git a/README.md b/README.md
index 73f7cef..6d2adcf 100644
--- a/README.md
+++ b/README.md
@@ -135,29 +135,55 @@ Each task gets its own implement→review loop:
### 4. Session Continuity
-Maintain context across long sessions and context clears with the ledger system:
+Maintain context across long sessions and context clears with structured compaction:
#### Ledger System
-The **continuity ledger** captures essential session state:
+The **continuity ledger** serves as both session state and compaction summary. Based on [Factory.ai's structured compaction research](https://factory.ai/blog/context-compression), which found that structured summarization with deterministic file tracking retains more useful context.
```
/ledger
```
Creates/updates `thoughts/ledgers/CONTINUITY_{session-name}.md` with:
-- Goal and constraints
-- Key decisions with rationale
-- Current state (Done/Now/Next)
-- Working set (branch, key files)
-**Auto-injection:** When starting a session, the most recent ledger is automatically injected into the system prompt.
+```markdown
+# Session: {name}
+Updated: {timestamp}
+
+## Goal
+## Constraints
+## Progress
+### Done
+- [x] {Completed items}
+### In Progress
+- [ ] {Current work}
+### Blocked
+- {Issues, if any}
+## Key Decisions
+- **{Decision}**: {Rationale}
+## Next Steps
+1. {Ordered list}
+## File Operations
+### Read
+- `{paths read since last compaction}`
+### Modified
+- `{paths written/edited since last compaction}`
+## Critical Context
+- {Data, examples, references needed to continue}
+```
+
+**Key features:**
+
+- **Iterative merging** - Updates preserve existing information, adding new progress rather than regenerating from scratch
+- **Deterministic file tracking** - Read/write/edit operations tracked automatically via tool call interception, not LLM extraction
+- **Auto-injection** - Most recent ledger injected into system prompt on session start
**Auto-clear:** At 80% context usage, the system automatically:
-1. Updates the ledger
-2. Creates a handoff document
+1. Captures file operations tracked since last clear
+2. Updates ledger with current state (iterative merge with previous)
3. Clears the session
-4. Injects the ledger into the fresh context
+4. Injects the updated ledger into fresh context
#### Artifact Search
@@ -170,26 +196,17 @@ Search past work to find relevant precedent:
Searches across:
- Ledgers (`thoughts/ledgers/`)
-- Handoffs (`thoughts/shared/handoffs/`)
- Plans (`thoughts/shared/plans/`)
**Auto-indexing:** Artifacts are automatically indexed when created.
-#### Handoff
-
-Save/resume session state for continuity:
-
-- `handoff-creator`: Save current session (reads ledger for context)
-- `handoff-resumer`: Resume from handoff
-- Output: `thoughts/shared/handoffs/`
-
## Commands
| Command | Description |
|---------|-------------|
| `/init` | Initialize project with ARCHITECTURE.md and CODE_STYLE.md |
| `/ledger` | Create or update continuity ledger for session state |
-| `/search` | Search past handoffs, plans, and ledgers |
+| `/search` | Search past plans and ledgers |
## Agents
@@ -207,8 +224,6 @@ Save/resume session state for continuity:
| reviewer | subagent | claude-opus-4-5 | Review correctness and style |
| ledger-creator | subagent | claude-sonnet | Create/update continuity ledgers |
| artifact-searcher | subagent | claude-sonnet | Search past work for precedent |
-| handoff-creator | subagent | claude-opus-4-5 | Save session state |
-| handoff-resumer | subagent | claude-opus-4-5 | Resume from handoff |
## Tools
@@ -217,7 +232,7 @@ Save/resume session state for continuity:
| `ast_grep_search` | AST-aware code pattern search |
| `ast_grep_replace` | AST-aware code pattern replacement |
| `look_at` | Extract file structure for large files |
-| `artifact_search` | Search past handoffs, plans, and ledgers |
+| `artifact_search` | Search past plans and ledgers |
| `background_task` | Run long-running tasks in background |
| `background_output` | Check background task status/output |
| `background_cancel` | Cancel background tasks |
@@ -229,7 +244,8 @@ Save/resume session state for continuity:
|------|-------------|
| Think Mode | Keywords like "think hard" enable 32k token thinking budget |
| Ledger Loader | Injects continuity ledger into system prompt |
-| Auto-Clear Ledger | At 80% context, saves ledger + handoff and clears session |
+| Auto-Clear Ledger | At 80% context, saves ledger with file ops and clears session |
+| File Ops Tracker | Tracks read/write/edit tool calls for deterministic file operation logging |
| Artifact Auto-Index | Indexes artifacts when written to thoughts/ directories |
| Auto-Compact | Summarizes session when hitting token limits |
| Context Injector | Injects ARCHITECTURE.md, CODE_STYLE.md, .cursorrules |
@@ -276,8 +292,7 @@ micode/
├── ledgers/ # Continuity ledgers
└── shared/
├── designs/ # Brainstorm outputs
- ├── plans/ # Implementation plans
- └── handoffs/ # Session handoffs
+ └── plans/ # Implementation plans
```
## Development
@@ -341,3 +356,4 @@ Built on techniques from:
- **[oh-my-opencode](https://github.com/code-yeongyu/oh-my-opencode)** - OpenCode plugin architecture, agent orchestration patterns, and trusted publishing setup
- **[HumanLayer ACE-FCA](https://github.com/humanlayer/12-factor-agents)** - Advanced Context Engineering for Coding Agents, structured workflows, and the research → plan → implement methodology
+- **[Factory.ai Context Compression](https://factory.ai/blog/context-compression)** - Structured compaction research showing that anchored iterative summarization with deterministic file tracking outperforms generic compression
diff --git a/src/agents/commander.ts b/src/agents/commander.ts
index 8b9a4c9..a2c7171 100644
--- a/src/agents/commander.ts
+++ b/src/agents/commander.ts
@@ -76,9 +76,9 @@ Just do it - including obvious follow-up actions.
Reference plan file in commit body
-
-Save session state
-Resume from handoff
+
+System auto-updates ledger at 80% context usage
+
@@ -89,8 +89,7 @@ Just do it - including obvious follow-up actions.
-
-
+
locator, analyzer, pattern-finder
planner then executor
diff --git a/src/agents/handoff-creator.ts b/src/agents/handoff-creator.ts
deleted file mode 100644
index 3e1a8b4..0000000
--- a/src/agents/handoff-creator.ts
+++ /dev/null
@@ -1,73 +0,0 @@
-import type { AgentConfig } from "@opencode-ai/sdk";
-
-export const handoffCreatorAgent: AgentConfig = {
- description: "Creates handoff documents for session continuity",
- mode: "subagent",
- model: "anthropic/claude-opus-4-5",
- temperature: 0.2,
- tools: {
- edit: false,
- task: false,
- },
- prompt: `
-Create handoff document to transfer context to future session.
-
-
-
-Hitting context limits
-Ending work session
-Switching to different task
-
-
-
-FIRST check for existing ledger at thoughts/ledgers/CONTINUITY_*.md
-If ledger exists, use its session name for handoff directory
-Capture ALL in-progress work
-Include exact file:line references for changes
-Document learnings and gotchas
-Prioritize next steps clearly
-Include git state (branch, commit)
-Reference all artifacts created
-
-
-
-Check for ledger at thoughts/ledgers/CONTINUITY_*.md
-If ledger exists, extract session name and state
-Review what was worked on
-Check git status for uncommitted changes
-Gather learnings and decisions made
-Identify next steps in priority order
-Write handoff document
-Commit handoff document
-
-
-
-If ledger exists: thoughts/shared/handoffs/{session-name}/YYYY-MM-DD_HH-MM-SS.md
-Otherwise: thoughts/shared/handoffs/YYYY-MM-DD_HH-MM-SS_description.md
-
-
-
-
-date: [ISO datetime]
-branch: [branch name]
-commit: [hash]
-session: [session name from ledger, if available]
-
-
-Table with Task | Status (completed/in-progress/blocked)
-Working on, Blocked by, Plan location
-
-Discoveries, gotchas, decisions made and why
-
-Anything else for next session
-
-
-
-
-
-Handoff: [path]
-Tasks: [X done, Y in-progress]
-Next: [top priority]
-
-`,
-};
diff --git a/src/agents/handoff-resumer.ts b/src/agents/handoff-resumer.ts
deleted file mode 100644
index 5dc46fa..0000000
--- a/src/agents/handoff-resumer.ts
+++ /dev/null
@@ -1,80 +0,0 @@
-import type { AgentConfig } from "@opencode-ai/sdk";
-
-export const handoffResumerAgent: AgentConfig = {
- description: "Resumes work from a handoff document",
- mode: "subagent",
- model: "anthropic/claude-opus-4-5",
- temperature: 0.2,
- tools: {
- write: false,
- edit: false,
- task: false,
- },
- prompt: `
-Resume work from a handoff document. Verify state before proceeding.
-
-
-
-Read handoff document COMPLETELY
-Load ALL referenced artifacts
-Verify git state matches
-Check for changes since handoff
-Report discrepancies before proceeding
-Don't assume - verify
-
-
-
-Find handoff (use provided path or list available)
-Read handoff completely
-Load referenced plans, research, files
-Verify current state matches
-Report analysis
-Wait for confirmation
-
-
-
-Current branch
-Commit history (ahead/behind)
-Files mentioned still exist
-Changes mentioned are present
-No conflicting changes made
-
-
-
-
-## Resuming: [handoff path]
-
-**Created**: [date]
-**Branch**: [expected] → [actual]
-**Commit**: [expected] → [actual]
-
-### State
-- Branch: [matches/differs]
-- Commit: [matches/X ahead/X behind]
-- Files: [verified/issues]
-
-### Tasks
-| Task | Status | Verified |
-|------|--------|----------|
-| [Task] | [status] | [yes/no] |
-
-### Learnings
-- [From handoff]
-
-### Next Action
-[Top priority from handoff]
-
-### Loaded
-- [x] [artifact]
-- [x] [artifact]
-
-
-
-
-Report discrepancy and wait for guidance
-Branch different
-Unexpected commits
-Files changed/missing
-Conflicting work detected
-`,
-};
diff --git a/src/agents/index.ts b/src/agents/index.ts
index 0b1c578..f67877e 100644
--- a/src/agents/index.ts
+++ b/src/agents/index.ts
@@ -7,8 +7,6 @@ import { plannerAgent } from "./planner";
import { implementerAgent } from "./implementer";
import { reviewerAgent } from "./reviewer";
import { executorAgent } from "./executor";
-import { handoffCreatorAgent } from "./handoff-creator";
-import { handoffResumerAgent } from "./handoff-resumer";
import { primaryAgent, PRIMARY_AGENT_NAME } from "./commander";
import { projectInitializerAgent } from "./project-initializer";
import { ledgerCreatorAgent } from "./ledger-creator";
@@ -24,8 +22,6 @@ export const agents: Record = {
implementer: implementerAgent,
reviewer: reviewerAgent,
executor: executorAgent,
- "handoff-creator": handoffCreatorAgent,
- "handoff-resumer": handoffResumerAgent,
"project-initializer": projectInitializerAgent,
"ledger-creator": ledgerCreatorAgent,
"artifact-searcher": artifactSearcherAgent,
@@ -42,8 +38,6 @@ export {
implementerAgent,
reviewerAgent,
executorAgent,
- handoffCreatorAgent,
- handoffResumerAgent,
projectInitializerAgent,
ledgerCreatorAgent,
artifactSearcherAgent,
diff --git a/src/agents/ledger-creator.ts b/src/agents/ledger-creator.ts
index cd65000..42e70e0 100644
--- a/src/agents/ledger-creator.ts
+++ b/src/agents/ledger-creator.ts
@@ -15,19 +15,50 @@ Create or update a continuity ledger to preserve session state across context cl
The ledger captures the essential context needed to resume work seamlessly.
+
+Create new ledger when none exists
+Update existing ledger with new information
+
+
Keep the ledger CONCISE - only essential information
Focus on WHAT and WHY, not HOW
-State should have exactly ONE item in "Now"
Mark uncertain information as UNCONFIRMED
Include git branch and key file paths
+
+PRESERVE all existing information from previous ledger
+ADD new progress, decisions, context from new messages
+UPDATE Progress: move In Progress items to Done when completed
+UPDATE Next Steps based on current state
+MERGE file operations: combine previous + new (passed deterministically)
+Never lose information - only add or update
+
+
+
+When updating an existing ledger, you will receive:
+
+
+{content of existing ledger}
+
+
+
+Read: path1, path2, path3
+Modified: path4, path5
+
+
+
+Update the ledger with the current session state. Merge the file operations above with any existing ones in the previous ledger.
+
+
+
-Check for existing ledger at thoughts/ledgers/CONTINUITY_*.md
-If exists, read and update it
-If not, create new ledger with session name from current task
+Check if previous-ledger is provided in input
+If provided: parse existing content and merge with new state
+If not: create new ledger with session name from current task
Gather current state: goal, decisions, progress, blockers
+Merge file operations (previous + new from input)
Write ledger in the exact format below
@@ -38,21 +69,37 @@ The ledger captures the essential context needed to resume work seamlessly.
Updated: {ISO timestamp}
## Goal
-{One sentence describing success criteria}
+{What we're trying to accomplish - one sentence describing success criteria}
## Constraints
{Technical requirements, patterns to follow, things to avoid}
+## Progress
+### Done
+- [x] {Completed items}
+
+### In Progress
+- [ ] {Current work - what's actively being worked on}
+
+### Blocked
+- {Issues preventing progress, if any}
+
## Key Decisions
-- {Decision}: {Rationale}
+- **{Decision}**: {Rationale}
+
+## Next Steps
+1. {Ordered list of what to do next}
+
+## File Operations
+### Read
+- \`{paths that were read}\`
-## State
-- Done: {Completed items as comma-separated list}
-- Now: {Current focus - exactly ONE thing}
-- Next: {Queued items in priority order}
+### Modified
+- \`{paths that were written or edited}\`
-## Open Questions
-- UNCONFIRMED: {Things needing verification}
+## Critical Context
+- {Data, examples, references needed to continue work}
+- {Important findings or discoveries}
## Working Set
- Branch: \`{branch-name}\`
@@ -61,6 +108,6 @@ Updated: {ISO timestamp}
Ledger updated: thoughts/ledgers/CONTINUITY_{session-name}.md
-State: {Now item}
+State: {Current In Progress item}
`,
};
diff --git a/src/hooks/artifact-auto-index.ts b/src/hooks/artifact-auto-index.ts
index 310f600..1c39a55 100644
--- a/src/hooks/artifact-auto-index.ts
+++ b/src/hooks/artifact-auto-index.ts
@@ -6,54 +6,43 @@ import { readFileSync } from "node:fs";
import { getArtifactIndex } from "../tools/artifact-index";
const LEDGER_PATH_PATTERN = /thoughts\/ledgers\/CONTINUITY_(.+)\.md$/;
-const HANDOFF_PATH_PATTERN = /thoughts\/shared\/handoffs\/(.+)\.md$/;
const PLAN_PATH_PATTERN = /thoughts\/shared\/plans\/(.+)\.md$/;
-function parseLedger(content: string, filePath: string, sessionName: string) {
+export function parseLedger(content: string, filePath: string, sessionName: string) {
const goalMatch = content.match(/## Goal\n([^\n]+)/);
- const stateMatch = content.match(/- Now: ([^\n]+)/);
+ const stateMatch = content.match(/### In Progress\n- \[ \] ([^\n]+)/);
const decisionsMatch = content.match(/## Key Decisions\n([\s\S]*?)(?=\n## |$)/);
- return {
- id: `ledger-${sessionName}`,
- sessionName,
- filePath,
- goal: goalMatch?.[1] || "",
- stateNow: stateMatch?.[1] || "",
- keyDecisions: decisionsMatch?.[1]?.trim() || "",
- };
-}
-
-function parseHandoff(content: string, filePath: string, fileName: string) {
- // Extract session from frontmatter if present
- const sessionMatch = content.match(/^session:\s*(.+)$/m);
- const sessionName = sessionMatch?.[1] || fileName;
+ // Parse file operations from new ledger format
+ const fileOpsSection = content.match(/## File Operations\n([\s\S]*?)(?=\n## |$)/);
+ let filesRead = "";
+ let filesModified = "";
- // Extract task summary
- const taskMatch = content.match(/\*\*Working on:\*\*\s*([^\n]+)/);
- const taskSummary = taskMatch?.[1] || "";
+ if (fileOpsSection) {
+ const readMatch = fileOpsSection[1].match(/### Read\n([\s\S]*?)(?=\n### |$)/);
+ const modifiedMatch = fileOpsSection[1].match(/### Modified\n([\s\S]*?)(?=\n### |$)/);
- // Extract learnings
- const learningsMatch = content.match(/## Learnings\n\n([\s\S]*?)(?=\n## |$)/);
- const learnings = learningsMatch?.[1]?.trim() || "";
+ if (readMatch) {
+ // Extract paths from markdown list items like "- `path`"
+ const paths = readMatch[1].match(/`([^`]+)`/g);
+ filesRead = paths ? paths.map((p) => p.replace(/`/g, "")).join(",") : "";
+ }
- // Extract what worked
- const workedMatch = content.match(/## What Worked\n\n([\s\S]*?)(?=\n## |$)/);
- const whatWorked = workedMatch?.[1]?.trim() || learnings;
-
- // Extract what failed
- const failedMatch = content.match(/## What Failed\n\n([\s\S]*?)(?=\n## |$)/);
- const whatFailed = failedMatch?.[1]?.trim() || "";
+ if (modifiedMatch) {
+ const paths = modifiedMatch[1].match(/`([^`]+)`/g);
+ filesModified = paths ? paths.map((p) => p.replace(/`/g, "")).join(",") : "";
+ }
+ }
return {
- id: `handoff-${fileName}`,
+ id: `ledger-${sessionName}`,
sessionName,
filePath,
- taskSummary,
- whatWorked,
- whatFailed,
- learnings,
- outcome: "UNKNOWN" as const,
+ goal: goalMatch?.[1] || "",
+ stateNow: stateMatch?.[1] || "",
+ keyDecisions: decisionsMatch?.[1]?.trim() || "",
+ filesRead,
+ filesModified,
};
}
@@ -103,17 +92,6 @@ export function createArtifactAutoIndexHook(_ctx: PluginInput) {
return;
}
- // Check if it's a handoff
- const handoffMatch = filePath.match(HANDOFF_PATH_PATTERN);
- if (handoffMatch) {
- const content = readFileSync(filePath, "utf-8");
- const index = await getArtifactIndex();
- const record = parseHandoff(content, filePath, handoffMatch[1]);
- await index.indexHandoff(record);
- console.log(`[artifact-auto-index] Indexed handoff: ${filePath}`);
- return;
- }
-
// Check if it's a plan
const planMatch = filePath.match(PLAN_PATH_PATTERN);
if (planMatch) {
diff --git a/src/hooks/auto-clear-ledger.ts b/src/hooks/auto-clear-ledger.ts
index 334db22..5d9b39f 100644
--- a/src/hooks/auto-clear-ledger.ts
+++ b/src/hooks/auto-clear-ledger.ts
@@ -1,6 +1,7 @@
// src/hooks/auto-clear-ledger.ts
import type { PluginInput } from "@opencode-ai/plugin";
import { findCurrentLedger, formatLedgerInjection } from "./ledger-loader";
+import { getFileOps, clearFileOps, formatFileOpsForPrompt } from "./file-ops-tracker";
// Model context limits (tokens)
const MODEL_CONTEXT_LIMITS: Record = {
@@ -103,7 +104,11 @@ export function createAutoClearLedgerHook(ctx: PluginInput) {
})
.catch(() => {});
- // Step 1: Spawn ledger-creator agent to update ledger
+ // Step 1: Get file operations and existing ledger (don't clear yet)
+ const fileOps = getFileOps(sessionID);
+ const existingLedger = await findCurrentLedger(ctx.directory);
+
+ // Step 2: Spawn ledger-creator agent to update ledger
const ledgerSessionResp = await ctx.client.session.create({
body: {},
query: { directory: ctx.directory },
@@ -111,12 +116,24 @@ export function createAutoClearLedgerHook(ctx: PluginInput) {
const ledgerSessionID = (ledgerSessionResp as { data?: { id?: string } }).data?.id;
if (ledgerSessionID) {
+ // Build prompt with previous ledger and file ops
+ let promptText = "";
+
+ if (existingLedger) {
+ promptText += `\n${existingLedger.content}\n\n\n`;
+ }
+
+ promptText += formatFileOpsForPrompt(fileOps);
+ promptText += "\n\n\n";
+ promptText += existingLedger
+ ? "Update the ledger with the current session state. Merge the file operations above with any existing ones in the previous ledger."
+ : "Create a new continuity ledger for this session.";
+ promptText += "\n";
+
await ctx.client.session.prompt({
path: { id: ledgerSessionID },
body: {
- parts: [
- { type: "text", text: "Update the continuity ledger with current session state before context clear." },
- ],
+ parts: [{ type: "text", text: promptText }],
agent: "ledger-creator",
},
query: { directory: ctx.directory },
@@ -124,6 +141,7 @@ export function createAutoClearLedgerHook(ctx: PluginInput) {
// Wait for ledger completion (poll for idle)
let attempts = 0;
+ let ledgerCompleted = false;
while (attempts < 30) {
await new Promise((resolve) => setTimeout(resolve, 2000));
const statusResp = await ctx.client.session.get({
@@ -131,46 +149,15 @@ export function createAutoClearLedgerHook(ctx: PluginInput) {
query: { directory: ctx.directory },
});
if ((statusResp as { data?: { status?: string } }).data?.status === "idle") {
+ ledgerCompleted = true;
break;
}
attempts++;
}
- }
-
- // Step 2: Spawn handoff-creator agent
- const handoffSessionResp = await ctx.client.session.create({
- body: {},
- query: { directory: ctx.directory },
- });
- const handoffSessionID = (handoffSessionResp as { data?: { id?: string } }).data?.id;
-
- if (handoffSessionID) {
- await ctx.client.session.prompt({
- path: { id: handoffSessionID },
- body: {
- parts: [
- {
- type: "text",
- text: "Create a handoff document. Read the current ledger at thoughts/ledgers/ for context.",
- },
- ],
- agent: "handoff-creator",
- },
- query: { directory: ctx.directory },
- });
- // Wait for handoff completion
- let attempts = 0;
- while (attempts < 30) {
- await new Promise((resolve) => setTimeout(resolve, 2000));
- const statusResp = await ctx.client.session.get({
- path: { id: handoffSessionID },
- query: { directory: ctx.directory },
- });
- if ((statusResp as { data?: { status?: string } }).data?.status === "idle") {
- break;
- }
- attempts++;
+ // Only clear file ops after ledger-creator successfully completed
+ if (ledgerCompleted) {
+ clearFileOps(sessionID);
}
}
@@ -207,7 +194,7 @@ export function createAutoClearLedgerHook(ctx: PluginInput) {
.showToast({
body: {
title: "Context Cleared",
- message: "Ledger + handoff saved. Session ready to continue.",
+ message: "Ledger saved. Session ready to continue.",
variant: "success",
duration: 5000,
},
diff --git a/src/hooks/file-ops-tracker.ts b/src/hooks/file-ops-tracker.ts
new file mode 100644
index 0000000..3bafbcb
--- /dev/null
+++ b/src/hooks/file-ops-tracker.ts
@@ -0,0 +1,96 @@
+// src/hooks/file-ops-tracker.ts
+import type { PluginInput } from "@opencode-ai/plugin";
+
+interface FileOps {
+ read: Set;
+ modified: Set;
+}
+
+// Per-session file operation tracking
+const sessionFileOps = new Map();
+
+function getOrCreateOps(sessionID: string): FileOps {
+ let ops = sessionFileOps.get(sessionID);
+ if (!ops) {
+ ops = { read: new Set(), modified: new Set() };
+ sessionFileOps.set(sessionID, ops);
+ }
+ return ops;
+}
+
+export function trackFileOp(sessionID: string, operation: "read" | "write" | "edit", filePath: string): void {
+ const ops = getOrCreateOps(sessionID);
+ if (operation === "read") {
+ ops.read.add(filePath);
+ } else {
+ // write and edit both modify files
+ ops.modified.add(filePath);
+ }
+}
+
+export function getFileOps(sessionID: string): FileOps {
+ const ops = sessionFileOps.get(sessionID);
+ if (!ops) {
+ return { read: new Set(), modified: new Set() };
+ }
+ return ops;
+}
+
+export function clearFileOps(sessionID: string): void {
+ sessionFileOps.delete(sessionID);
+}
+
+export function getAndClearFileOps(sessionID: string): FileOps {
+ const ops = getFileOps(sessionID);
+ // Return copies of the sets before clearing
+ const result = {
+ read: new Set(ops.read),
+ modified: new Set(ops.modified),
+ };
+ clearFileOps(sessionID);
+ return result;
+}
+
+export function formatFileOpsForPrompt(ops: FileOps): string {
+ const readPaths = Array.from(ops.read).sort();
+ const modifiedPaths = Array.from(ops.modified).sort();
+
+ let result = "\n";
+ result += `Read: ${readPaths.length > 0 ? readPaths.join(", ") : "(none)"}\n`;
+ result += `Modified: ${modifiedPaths.length > 0 ? modifiedPaths.join(", ") : "(none)"}\n`;
+ result += "";
+
+ return result;
+}
+
+export function createFileOpsTrackerHook(_ctx: PluginInput) {
+ return {
+ "tool.execute.after": async (
+ input: { tool: string; sessionID: string; args?: Record },
+ _output: { output?: string },
+ ) => {
+ const toolName = input.tool.toLowerCase();
+
+ // Only track read, write, edit tools
+ if (!["read", "write", "edit"].includes(toolName)) {
+ return;
+ }
+
+ // Extract file path from args
+ const filePath = input.args?.filePath as string | undefined;
+ if (!filePath) return;
+
+ trackFileOp(input.sessionID, toolName as "read" | "write" | "edit", filePath);
+ },
+
+ event: async ({ event }: { event: { type: string; properties?: unknown } }) => {
+ // Clean up on session delete
+ if (event.type === "session.deleted") {
+ const props = event.properties as { info?: { id?: string } } | undefined;
+ if (props?.info?.id) {
+ clearFileOps(props.info.id);
+ }
+ }
+ },
+ };
+}
diff --git a/src/index.ts b/src/index.ts
index 8e5409c..74ad5ff 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -19,6 +19,7 @@ import { createCommentCheckerHook } from "./hooks/comment-checker";
import { createAutoClearLedgerHook } from "./hooks/auto-clear-ledger";
import { createLedgerLoaderHook } from "./hooks/ledger-loader";
import { createArtifactAutoIndexHook } from "./hooks/artifact-auto-index";
+import { createFileOpsTrackerHook } from "./hooks/file-ops-tracker";
// Background Task System
import { BackgroundTaskManager, createBackgroundTaskTools } from "./tools/background-task";
@@ -87,6 +88,7 @@ const OpenCodeConfigPlugin: Plugin = async (ctx) => {
const contextWindowMonitorHook = createContextWindowMonitorHook(ctx);
const commentCheckerHook = createCommentCheckerHook(ctx);
const artifactAutoIndexHook = createArtifactAutoIndexHook(ctx);
+ const fileOpsTrackerHook = createFileOpsTrackerHook(ctx);
// Background Task System
const backgroundTaskManager = new BackgroundTaskManager(ctx);
@@ -201,6 +203,12 @@ const OpenCodeConfigPlugin: Plugin = async (ctx) => {
// Auto-index artifacts when written to thoughts/ directories
await artifactAutoIndexHook["tool.execute.after"]({ tool: input.tool, args: input.args }, output);
+
+ // Track file operations for ledger
+ await fileOpsTrackerHook["tool.execute.after"](
+ { tool: input.tool, sessionID: input.sessionID, args: input.args },
+ output,
+ );
},
event: async ({ event }) => {
@@ -221,6 +229,9 @@ const OpenCodeConfigPlugin: Plugin = async (ctx) => {
// Background task manager event handling
backgroundTaskManager.handleEvent(event);
+
+ // File ops tracker cleanup
+ await fileOpsTrackerHook.event({ event });
},
};
};
diff --git a/src/tools/artifact-index/index.ts b/src/tools/artifact-index/index.ts
index 89ca19d..94de30f 100644
--- a/src/tools/artifact-index/index.ts
+++ b/src/tools/artifact-index/index.ts
@@ -8,17 +8,6 @@ import { homedir } from "node:os";
const DEFAULT_DB_DIR = join(homedir(), ".config", "opencode", "artifact-index");
const DB_NAME = "context.db";
-export interface HandoffRecord {
- id: string;
- sessionName?: string;
- filePath: string;
- taskSummary?: string;
- whatWorked?: string;
- whatFailed?: string;
- learnings?: string;
- outcome?: "SUCCEEDED" | "PARTIAL_PLUS" | "PARTIAL_MINUS" | "FAILED" | "UNKNOWN";
-}
-
export interface PlanRecord {
id: string;
title?: string;
@@ -34,10 +23,12 @@ export interface LedgerRecord {
goal?: string;
stateNow?: string;
keyDecisions?: string;
+ filesRead?: string;
+ filesModified?: string;
}
export interface SearchResult {
- type: "handoff" | "plan" | "ledger";
+ type: "plan" | "ledger";
id: string;
filePath: string;
title?: string;
@@ -79,18 +70,6 @@ export class ArtifactIndex {
private getInlineSchema(): string {
return `
- CREATE TABLE IF NOT EXISTS handoffs (
- id TEXT PRIMARY KEY,
- session_name TEXT,
- file_path TEXT UNIQUE NOT NULL,
- task_summary TEXT,
- what_worked TEXT,
- what_failed TEXT,
- learnings TEXT,
- outcome TEXT,
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
- indexed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
- );
CREATE TABLE IF NOT EXISTS plans (
id TEXT PRIMARY KEY,
title TEXT,
@@ -107,70 +86,16 @@ export class ArtifactIndex {
goal TEXT,
state_now TEXT,
key_decisions TEXT,
+ files_read TEXT,
+ files_modified TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
indexed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
- CREATE VIRTUAL TABLE IF NOT EXISTS handoffs_fts USING fts5(id, session_name, task_summary, what_worked, what_failed, learnings);
CREATE VIRTUAL TABLE IF NOT EXISTS plans_fts USING fts5(id, title, overview, approach);
CREATE VIRTUAL TABLE IF NOT EXISTS ledgers_fts USING fts5(id, session_name, goal, state_now, key_decisions);
`;
}
- async indexHandoff(record: HandoffRecord): Promise {
- if (!this.db) throw new Error("Database not initialized");
-
- // Check for existing record by file_path to clean up old FTS entry
- const existing = this.db
- .query<{ id: string }, [string]>(`SELECT id FROM handoffs WHERE file_path = ?`)
- .get(record.filePath);
- if (existing) {
- this.db.run(`DELETE FROM handoffs_fts WHERE id = ?`, [existing.id]);
- }
-
- // Upsert handoff
- this.db.run(
- `
- INSERT INTO handoffs (id, session_name, file_path, task_summary, what_worked, what_failed, learnings, outcome, indexed_at)
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
- ON CONFLICT(file_path) DO UPDATE SET
- id = excluded.id,
- session_name = excluded.session_name,
- task_summary = excluded.task_summary,
- what_worked = excluded.what_worked,
- what_failed = excluded.what_failed,
- learnings = excluded.learnings,
- outcome = excluded.outcome,
- indexed_at = CURRENT_TIMESTAMP
- `,
- [
- record.id,
- record.sessionName ?? null,
- record.filePath,
- record.taskSummary ?? null,
- record.whatWorked ?? null,
- record.whatFailed ?? null,
- record.learnings ?? null,
- record.outcome ?? null,
- ],
- );
-
- // Insert new FTS entry
- this.db.run(
- `
- INSERT INTO handoffs_fts (id, session_name, task_summary, what_worked, what_failed, learnings)
- VALUES (?, ?, ?, ?, ?, ?)
- `,
- [
- record.id,
- record.sessionName ?? null,
- record.taskSummary ?? null,
- record.whatWorked ?? null,
- record.whatFailed ?? null,
- record.learnings ?? null,
- ],
- );
- }
-
async indexPlan(record: PlanRecord): Promise {
if (!this.db) throw new Error("Database not initialized");
@@ -218,14 +143,16 @@ export class ArtifactIndex {
this.db.run(
`
- INSERT INTO ledgers (id, session_name, file_path, goal, state_now, key_decisions, indexed_at)
- VALUES (?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
+ INSERT INTO ledgers (id, session_name, file_path, goal, state_now, key_decisions, files_read, files_modified, indexed_at)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
ON CONFLICT(file_path) DO UPDATE SET
id = excluded.id,
session_name = excluded.session_name,
goal = excluded.goal,
state_now = excluded.state_now,
key_decisions = excluded.key_decisions,
+ files_read = excluded.files_read,
+ files_modified = excluded.files_modified,
indexed_at = CURRENT_TIMESTAMP
`,
[
@@ -235,6 +162,8 @@ export class ArtifactIndex {
record.goal ?? null,
record.stateNow ?? null,
record.keyDecisions ?? null,
+ record.filesRead ?? null,
+ record.filesModified ?? null,
],
);
@@ -259,28 +188,6 @@ export class ArtifactIndex {
const results: SearchResult[] = [];
const escapedQuery = this.escapeFtsQuery(query);
- // Search handoffs
- const handoffs = this.db
- .query<{ id: string; file_path: string; task_summary: string; rank: number }, [string, number]>(`
- SELECT h.id, h.file_path, h.task_summary, rank
- FROM handoffs_fts
- JOIN handoffs h ON handoffs_fts.id = h.id
- WHERE handoffs_fts MATCH ?
- ORDER BY rank
- LIMIT ?
- `)
- .all(escapedQuery, limit);
-
- for (const row of handoffs) {
- results.push({
- type: "handoff",
- id: row.id,
- filePath: row.file_path,
- summary: row.task_summary,
- score: -row.rank, // FTS5 rank is negative, lower is better
- });
- }
-
// Search plans
const plans = this.db
.query<{ id: string; file_path: string; title: string; rank: number }, [string, number]>(`
diff --git a/src/tools/artifact-index/schema.sql b/src/tools/artifact-index/schema.sql
index bf7c399..3e01735 100644
--- a/src/tools/artifact-index/schema.sql
+++ b/src/tools/artifact-index/schema.sql
@@ -2,20 +2,6 @@
-- Artifact Index Schema for SQLite + FTS5
-- NOTE: FTS tables are standalone (not content-linked) and manually synced by code
--- Handoffs table
-CREATE TABLE IF NOT EXISTS handoffs (
- id TEXT PRIMARY KEY,
- session_name TEXT,
- file_path TEXT UNIQUE NOT NULL,
- task_summary TEXT,
- what_worked TEXT,
- what_failed TEXT,
- learnings TEXT,
- outcome TEXT CHECK(outcome IN ('SUCCEEDED', 'PARTIAL_PLUS', 'PARTIAL_MINUS', 'FAILED', 'UNKNOWN')),
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
- indexed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
-);
-
-- Plans table
CREATE TABLE IF NOT EXISTS plans (
id TEXT PRIMARY KEY,
@@ -27,7 +13,7 @@ CREATE TABLE IF NOT EXISTS plans (
indexed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
--- Ledgers table
+-- Ledgers table - with file operation tracking
CREATE TABLE IF NOT EXISTS ledgers (
id TEXT PRIMARY KEY,
session_name TEXT,
@@ -35,20 +21,13 @@ CREATE TABLE IF NOT EXISTS ledgers (
goal TEXT,
state_now TEXT,
key_decisions TEXT,
+ files_read TEXT,
+ files_modified TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
indexed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- FTS5 virtual tables for full-text search (standalone, manually synced)
-CREATE VIRTUAL TABLE IF NOT EXISTS handoffs_fts USING fts5(
- id,
- session_name,
- task_summary,
- what_worked,
- what_failed,
- learnings
-);
-
CREATE VIRTUAL TABLE IF NOT EXISTS plans_fts USING fts5(
id,
title,
diff --git a/src/tools/artifact-search.ts b/src/tools/artifact-search.ts
index cda7150..52acb20 100644
--- a/src/tools/artifact-search.ts
+++ b/src/tools/artifact-search.ts
@@ -3,7 +3,7 @@ import { tool } from "@opencode-ai/plugin/tool";
import { getArtifactIndex } from "./artifact-index";
export const artifact_search = tool({
- description: `Search past handoffs, plans, and ledgers for relevant precedent.
+ description: `Search past plans and ledgers for relevant precedent.
Use this to find:
- Similar problems you've solved before
- Patterns and approaches that worked
@@ -12,10 +12,7 @@ Returns ranked results with file paths for further reading.`,
args: {
query: tool.schema.string().describe("Search query - describe what you're looking for"),
limit: tool.schema.number().optional().describe("Max results to return (default: 10)"),
- type: tool.schema
- .enum(["all", "handoff", "plan", "ledger"])
- .optional()
- .describe("Filter by artifact type (default: all)"),
+ type: tool.schema.enum(["all", "plan", "ledger"]).optional().describe("Filter by artifact type (default: all)"),
},
execute: async (args) => {
try {
diff --git a/tests/agents/commander.test.ts b/tests/agents/commander.test.ts
new file mode 100644
index 0000000..b5ff5fc
--- /dev/null
+++ b/tests/agents/commander.test.ts
@@ -0,0 +1,14 @@
+import { describe, it, expect } from "bun:test";
+import { primaryAgent } from "../../src/agents/commander";
+
+describe("commander agent", () => {
+ it("should not reference handoff agents in prompt", () => {
+ expect(primaryAgent.prompt).not.toContain("handoff-creator");
+ expect(primaryAgent.prompt).not.toContain("handoff-resumer");
+ expect(primaryAgent.prompt).not.toContain('');
+ });
+
+ it("should still reference ledger", () => {
+ expect(primaryAgent.prompt).toContain("ledger");
+ });
+});
diff --git a/tests/agents/index.test.ts b/tests/agents/index.test.ts
new file mode 100644
index 0000000..74abc3b
--- /dev/null
+++ b/tests/agents/index.test.ts
@@ -0,0 +1,20 @@
+import { describe, it, expect } from "bun:test";
+
+describe("agents index", () => {
+ it("should not export handoff agents", async () => {
+ const module = await import("../../src/agents/index");
+
+ expect(module.agents["handoff-creator"]).toBeUndefined();
+ expect(module.agents["handoff-resumer"]).toBeUndefined();
+ expect((module as Record).handoffCreatorAgent).toBeUndefined();
+ expect((module as Record).handoffResumerAgent).toBeUndefined();
+ });
+
+ it("should still export other agents", async () => {
+ const module = await import("../../src/agents/index");
+
+ expect(module.agents["ledger-creator"]).toBeDefined();
+ expect(module.agents["brainstormer"]).toBeDefined();
+ expect(module.agents["commander"]).toBeDefined();
+ });
+});
diff --git a/tests/agents/ledger-creator.test.ts b/tests/agents/ledger-creator.test.ts
index 661eaaa..34e6ec7 100644
--- a/tests/agents/ledger-creator.test.ts
+++ b/tests/agents/ledger-creator.test.ts
@@ -8,11 +8,30 @@ describe("ledgerCreatorAgent", () => {
});
it("should have description mentioning ledger", () => {
- expect(ledgerCreatorAgent.description.toLowerCase()).toContain("ledger");
+ expect(ledgerCreatorAgent.description?.toLowerCase()).toContain("ledger");
});
it("should disable edit and task tools", () => {
expect(ledgerCreatorAgent.tools?.edit).toBe(false);
expect(ledgerCreatorAgent.tools?.task).toBe(false);
});
+
+ it("should support iterative update mode", () => {
+ expect(ledgerCreatorAgent.prompt).toContain("previous-ledger");
+ expect(ledgerCreatorAgent.prompt).toContain("PRESERVE");
+ expect(ledgerCreatorAgent.prompt).toContain("MERGE");
+ });
+
+ it("should include file operations section in format", () => {
+ expect(ledgerCreatorAgent.prompt).toContain("## File Operations");
+ expect(ledgerCreatorAgent.prompt).toContain("### Read");
+ expect(ledgerCreatorAgent.prompt).toContain("### Modified");
+ });
+
+ it("should have updated ledger format with Progress section", () => {
+ expect(ledgerCreatorAgent.prompt).toContain("## Progress");
+ expect(ledgerCreatorAgent.prompt).toContain("### Done");
+ expect(ledgerCreatorAgent.prompt).toContain("### In Progress");
+ expect(ledgerCreatorAgent.prompt).toContain("### Blocked");
+ });
});
diff --git a/tests/hooks/artifact-auto-index-ledger.test.ts b/tests/hooks/artifact-auto-index-ledger.test.ts
new file mode 100644
index 0000000..0408fed
--- /dev/null
+++ b/tests/hooks/artifact-auto-index-ledger.test.ts
@@ -0,0 +1,130 @@
+import { describe, it, expect } from "bun:test";
+import { parseLedger } from "../../src/hooks/artifact-auto-index";
+
+describe("artifact-auto-index ledger parsing", () => {
+ it("should parse file operations from ledger", () => {
+ const content = `# Session: test-session
+Updated: 2025-01-30T12:00:00Z
+
+## Goal
+Implement structured compaction
+
+## Constraints
+Follow existing patterns
+
+## Progress
+### Done
+- [x] Remove handoff
+
+### In Progress
+- [ ] Add file tracking
+
+### Blocked
+- None
+
+## Key Decisions
+- **Use iterative merging**: Better preservation
+
+## Next Steps
+1. Test the implementation
+
+## File Operations
+### Read
+- \`src/hooks/auto-clear-ledger.ts\`
+- \`src/agents/ledger-creator.ts\`
+
+### Modified
+- \`src/hooks/file-ops-tracker.ts\`
+
+## Critical Context
+- Based on Factory.ai approach
+`;
+
+ const result = parseLedger(content, "thoughts/ledgers/CONTINUITY_test.md", "test-session");
+
+ expect(result.id).toBe("ledger-test-session");
+ expect(result.sessionName).toBe("test-session");
+ expect(result.goal).toBe("Implement structured compaction");
+ expect(result.stateNow).toBe("Add file tracking");
+ expect(result.filesRead).toBe("src/hooks/auto-clear-ledger.ts,src/agents/ledger-creator.ts");
+ expect(result.filesModified).toBe("src/hooks/file-ops-tracker.ts");
+ });
+
+ it("should handle ledger without file operations section", () => {
+ const content = `# Session: old-session
+Updated: 2025-01-30T12:00:00Z
+
+## Goal
+Some old goal
+
+## Progress
+### In Progress
+- [ ] Current task
+
+## Key Decisions
+- **Decision**: Reason
+`;
+
+ const result = parseLedger(content, "thoughts/ledgers/CONTINUITY_old.md", "old-session");
+
+ expect(result.filesRead).toBe("");
+ expect(result.filesModified).toBe("");
+ expect(result.goal).toBe("Some old goal");
+ expect(result.stateNow).toBe("Current task");
+ });
+
+ it("should handle empty file operations lists", () => {
+ const content = `# Session: empty-ops
+Updated: 2025-01-30T12:00:00Z
+
+## Goal
+Test empty ops
+
+## Progress
+### In Progress
+- [ ] Testing
+
+## File Operations
+### Read
+(none)
+
+### Modified
+(none)
+
+## Key Decisions
+`;
+
+ const result = parseLedger(content, "thoughts/ledgers/CONTINUITY_empty.md", "empty-ops");
+
+ expect(result.filesRead).toBe("");
+ expect(result.filesModified).toBe("");
+ });
+
+ it("should handle multiple file paths", () => {
+ const content = `# Session: multi-files
+Updated: 2025-01-30T12:00:00Z
+
+## Goal
+Test multiple files
+
+## Progress
+### In Progress
+- [ ] Testing
+
+## File Operations
+### Read
+- \`file1.ts\`
+- \`file2.ts\`
+- \`file3.ts\`
+
+### Modified
+- \`mod1.ts\`
+- \`mod2.ts\`
+`;
+
+ const result = parseLedger(content, "thoughts/ledgers/CONTINUITY_multi.md", "multi-files");
+
+ expect(result.filesRead).toBe("file1.ts,file2.ts,file3.ts");
+ expect(result.filesModified).toBe("mod1.ts,mod2.ts");
+ });
+});
diff --git a/tests/hooks/artifact-auto-index.test.ts b/tests/hooks/artifact-auto-index.test.ts
new file mode 100644
index 0000000..2c6d633
--- /dev/null
+++ b/tests/hooks/artifact-auto-index.test.ts
@@ -0,0 +1,21 @@
+import { describe, it, expect } from "bun:test";
+
+describe("artifact-auto-index", () => {
+ it("should not have handoff pattern or parsing", async () => {
+ const fs = await import("node:fs/promises");
+ const source = await fs.readFile("src/hooks/artifact-auto-index.ts", "utf-8");
+ expect(source).not.toContain("HANDOFF_PATH_PATTERN");
+ expect(source).not.toContain("parseHandoff");
+ expect(source).not.toContain("indexHandoff");
+ expect(source).not.toContain("handoffMatch");
+ });
+
+ it("should still have ledger and plan patterns", async () => {
+ const fs = await import("node:fs/promises");
+ const source = await fs.readFile("src/hooks/artifact-auto-index.ts", "utf-8");
+ expect(source).toContain("LEDGER_PATH_PATTERN");
+ expect(source).toContain("PLAN_PATH_PATTERN");
+ expect(source).toContain("parseLedger");
+ expect(source).toContain("parsePlan");
+ });
+});
diff --git a/tests/hooks/auto-clear-ledger-fileops.test.ts b/tests/hooks/auto-clear-ledger-fileops.test.ts
new file mode 100644
index 0000000..e5a82d3
--- /dev/null
+++ b/tests/hooks/auto-clear-ledger-fileops.test.ts
@@ -0,0 +1,21 @@
+import { describe, it, expect } from "bun:test";
+
+describe("auto-clear-ledger file ops integration", () => {
+ it("should import file-ops-tracker functions", async () => {
+ const fs = await import("node:fs/promises");
+ const source = await fs.readFile("src/hooks/auto-clear-ledger.ts", "utf-8");
+ expect(source).toContain('from "./file-ops-tracker"');
+ // Uses getFileOps first, then clearFileOps after success (not getAndClearFileOps)
+ expect(source).toContain("getFileOps");
+ expect(source).toContain("clearFileOps");
+ expect(source).toContain("formatFileOpsForPrompt");
+ });
+
+ it("should pass file ops to ledger-creator prompt", async () => {
+ const fs = await import("node:fs/promises");
+ const source = await fs.readFile("src/hooks/auto-clear-ledger.ts", "utf-8");
+ expect(source).toContain("previous-ledger");
+ // formatFileOpsForPrompt outputs tag
+ expect(source).toContain("formatFileOpsForPrompt(fileOps)");
+ });
+});
diff --git a/tests/hooks/auto-clear-ledger.test.ts b/tests/hooks/auto-clear-ledger.test.ts
index 0ced38d..77c0933 100644
--- a/tests/hooks/auto-clear-ledger.test.ts
+++ b/tests/hooks/auto-clear-ledger.test.ts
@@ -16,4 +16,12 @@ describe("auto-clear-ledger", () => {
const { CLEAR_COOLDOWN_MS } = await import("../../src/hooks/auto-clear-ledger");
expect(CLEAR_COOLDOWN_MS).toBe(60_000);
});
+
+ it("should not reference handoff-creator in source", async () => {
+ const fs = await import("node:fs/promises");
+ const source = await fs.readFile("src/hooks/auto-clear-ledger.ts", "utf-8");
+ expect(source).not.toContain("handoff-creator");
+ expect(source).not.toContain("handoffSessionID");
+ expect(source).not.toContain("handoff");
+ });
});
diff --git a/tests/hooks/file-ops-tracker.test.ts b/tests/hooks/file-ops-tracker.test.ts
new file mode 100644
index 0000000..00f5b9e
--- /dev/null
+++ b/tests/hooks/file-ops-tracker.test.ts
@@ -0,0 +1,102 @@
+import { describe, it, expect, beforeEach } from "bun:test";
+import {
+ trackFileOp,
+ getFileOps,
+ clearFileOps,
+ getAndClearFileOps,
+ createFileOpsTrackerHook,
+} from "../../src/hooks/file-ops-tracker";
+
+describe("file-ops-tracker", () => {
+ const testSessionID = "test-session-123";
+
+ beforeEach(() => {
+ clearFileOps(testSessionID);
+ });
+
+ describe("trackFileOp", () => {
+ it("should track read operations", () => {
+ trackFileOp(testSessionID, "read", "/path/to/file.ts");
+ const ops = getFileOps(testSessionID);
+ expect(ops.read.has("/path/to/file.ts")).toBe(true);
+ expect(ops.modified.size).toBe(0);
+ });
+
+ it("should track write operations as modified", () => {
+ trackFileOp(testSessionID, "write", "/path/to/file.ts");
+ const ops = getFileOps(testSessionID);
+ expect(ops.modified.has("/path/to/file.ts")).toBe(true);
+ expect(ops.read.size).toBe(0);
+ });
+
+ it("should track edit operations as modified", () => {
+ trackFileOp(testSessionID, "edit", "/path/to/file.ts");
+ const ops = getFileOps(testSessionID);
+ expect(ops.modified.has("/path/to/file.ts")).toBe(true);
+ });
+
+ it("should deduplicate paths", () => {
+ trackFileOp(testSessionID, "read", "/path/to/file.ts");
+ trackFileOp(testSessionID, "read", "/path/to/file.ts");
+ trackFileOp(testSessionID, "read", "/path/to/file.ts");
+ const ops = getFileOps(testSessionID);
+ expect(ops.read.size).toBe(1);
+ });
+
+ it("should track multiple files", () => {
+ trackFileOp(testSessionID, "read", "/path/to/a.ts");
+ trackFileOp(testSessionID, "read", "/path/to/b.ts");
+ trackFileOp(testSessionID, "write", "/path/to/c.ts");
+ const ops = getFileOps(testSessionID);
+ expect(ops.read.size).toBe(2);
+ expect(ops.modified.size).toBe(1);
+ });
+ });
+
+ describe("getFileOps", () => {
+ it("should return empty sets for unknown session", () => {
+ const ops = getFileOps("unknown-session");
+ expect(ops.read.size).toBe(0);
+ expect(ops.modified.size).toBe(0);
+ });
+ });
+
+ describe("clearFileOps", () => {
+ it("should clear all operations for session", () => {
+ trackFileOp(testSessionID, "read", "/path/to/file.ts");
+ trackFileOp(testSessionID, "write", "/path/to/other.ts");
+ clearFileOps(testSessionID);
+ const ops = getFileOps(testSessionID);
+ expect(ops.read.size).toBe(0);
+ expect(ops.modified.size).toBe(0);
+ });
+ });
+
+ describe("getAndClearFileOps", () => {
+ it("should return ops and clear them", () => {
+ trackFileOp(testSessionID, "read", "/path/to/file.ts");
+ trackFileOp(testSessionID, "write", "/path/to/other.ts");
+
+ const ops = getAndClearFileOps(testSessionID);
+ expect(ops.read.has("/path/to/file.ts")).toBe(true);
+ expect(ops.modified.has("/path/to/other.ts")).toBe(true);
+
+ // Should be cleared now
+ const opsAfter = getFileOps(testSessionID);
+ expect(opsAfter.read.size).toBe(0);
+ expect(opsAfter.modified.size).toBe(0);
+ });
+ });
+
+ describe("createFileOpsTrackerHook", () => {
+ it("should export hook creator function", () => {
+ expect(typeof createFileOpsTrackerHook).toBe("function");
+ });
+
+ it("should return hook with tool.execute.after handler", () => {
+ const mockCtx = { directory: "/test" } as any;
+ const hook = createFileOpsTrackerHook(mockCtx);
+ expect(hook["tool.execute.after"]).toBeDefined();
+ });
+ });
+});
diff --git a/tests/index-file-ops.test.ts b/tests/index-file-ops.test.ts
new file mode 100644
index 0000000..92725bc
--- /dev/null
+++ b/tests/index-file-ops.test.ts
@@ -0,0 +1,11 @@
+import { describe, it, expect } from "bun:test";
+
+describe("index file-ops integration", () => {
+ it("should import file-ops-tracker hook", async () => {
+ const fs = await import("node:fs/promises");
+ const source = await fs.readFile("src/index.ts", "utf-8");
+ expect(source).toContain('from "./hooks/file-ops-tracker"');
+ expect(source).toContain("createFileOpsTrackerHook");
+ expect(source).toContain("fileOpsTrackerHook");
+ });
+});
diff --git a/tests/tools/artifact-index-no-handoff.test.ts b/tests/tools/artifact-index-no-handoff.test.ts
new file mode 100644
index 0000000..5a74d27
--- /dev/null
+++ b/tests/tools/artifact-index-no-handoff.test.ts
@@ -0,0 +1,23 @@
+import { describe, it, expect } from "bun:test";
+
+describe("artifact-index without handoffs", () => {
+ it("should not export HandoffRecord interface", async () => {
+ const fs = await import("node:fs/promises");
+ const source = await fs.readFile("src/tools/artifact-index/index.ts", "utf-8");
+ expect(source).not.toContain("export interface HandoffRecord");
+ expect(source).not.toContain("async indexHandoff");
+ });
+
+ it("should not have handoff in SearchResult type", async () => {
+ const fs = await import("node:fs/promises");
+ const source = await fs.readFile("src/tools/artifact-index/index.ts", "utf-8");
+ expect(source).not.toContain('type: "handoff"');
+ });
+
+ it("should have LedgerRecord with file operation fields", async () => {
+ const fs = await import("node:fs/promises");
+ const source = await fs.readFile("src/tools/artifact-index/index.ts", "utf-8");
+ expect(source).toContain("filesRead?: string");
+ expect(source).toContain("filesModified?: string");
+ });
+});
diff --git a/tests/tools/artifact-index-schema.test.ts b/tests/tools/artifact-index-schema.test.ts
new file mode 100644
index 0000000..bb7c883
--- /dev/null
+++ b/tests/tools/artifact-index-schema.test.ts
@@ -0,0 +1,26 @@
+import { describe, it, expect } from "bun:test";
+
+describe("artifact-index schema", () => {
+ it("should not have handoff tables", async () => {
+ const fs = await import("node:fs/promises");
+ const schema = await fs.readFile("src/tools/artifact-index/schema.sql", "utf-8");
+ expect(schema).not.toContain("CREATE TABLE IF NOT EXISTS handoffs");
+ expect(schema).not.toContain("CREATE VIRTUAL TABLE IF NOT EXISTS handoffs_fts");
+ });
+
+ it("should still have ledgers and plans tables", async () => {
+ const fs = await import("node:fs/promises");
+ const schema = await fs.readFile("src/tools/artifact-index/schema.sql", "utf-8");
+ expect(schema).toContain("CREATE TABLE IF NOT EXISTS ledgers");
+ expect(schema).toContain("CREATE TABLE IF NOT EXISTS plans");
+ expect(schema).toContain("CREATE VIRTUAL TABLE IF NOT EXISTS ledgers_fts");
+ expect(schema).toContain("CREATE VIRTUAL TABLE IF NOT EXISTS plans_fts");
+ });
+
+ it("should have files_read and files_modified columns in ledgers", async () => {
+ const fs = await import("node:fs/promises");
+ const schema = await fs.readFile("src/tools/artifact-index/schema.sql", "utf-8");
+ expect(schema).toContain("files_read TEXT");
+ expect(schema).toContain("files_modified TEXT");
+ });
+});
diff --git a/tests/tools/artifact-index.test.ts b/tests/tools/artifact-index.test.ts
index 782935a..1f72a9e 100644
--- a/tests/tools/artifact-index.test.ts
+++ b/tests/tools/artifact-index.test.ts
@@ -27,29 +27,6 @@ describe("ArtifactIndex", () => {
await index.close();
});
- it("should index and search handoffs", async () => {
- const { ArtifactIndex } = await import("../../src/tools/artifact-index");
- const index = new ArtifactIndex(testDir);
- await index.initialize();
-
- await index.indexHandoff({
- id: "test-1",
- sessionName: "auth-feature",
- filePath: "/path/to/handoff.md",
- taskSummary: "Implement OAuth authentication",
- whatWorked: "JWT tokens work well",
- whatFailed: "Session refresh had issues",
- learnings: "Use refresh tokens for long sessions",
- outcome: "SUCCEEDED",
- });
-
- const results = await index.search("OAuth authentication");
- expect(results.length).toBeGreaterThan(0);
- expect(results[0].type).toBe("handoff");
-
- await index.close();
- });
-
it("should index and search plans", async () => {
const { ArtifactIndex } = await import("../../src/tools/artifact-index");
const index = new ArtifactIndex(testDir);
@@ -82,6 +59,8 @@ describe("ArtifactIndex", () => {
goal: "Migrate from MySQL to PostgreSQL",
stateNow: "Schema conversion in progress",
keyDecisions: "Use pgloader for data migration",
+ filesRead: "src/db/schema.ts,src/db/migrations/001.sql",
+ filesModified: "src/db/config.ts",
});
const results = await index.search("PostgreSQL migration");
@@ -90,4 +69,25 @@ describe("ArtifactIndex", () => {
await index.close();
});
+
+ it("should index ledger with file operations", async () => {
+ const { ArtifactIndex } = await import("../../src/tools/artifact-index");
+ const index = new ArtifactIndex(testDir);
+ await index.initialize();
+
+ await index.indexLedger({
+ id: "ledger-2",
+ sessionName: "feature-work",
+ filePath: "/path/to/ledger2.md",
+ goal: "Implement new feature",
+ filesRead: "src/a.ts,src/b.ts",
+ filesModified: "src/c.ts",
+ });
+
+ // Verify it was indexed (search should find it)
+ const results = await index.search("feature");
+ expect(results.length).toBeGreaterThan(0);
+
+ await index.close();
+ });
});
diff --git a/tests/tools/artifact-search.test.ts b/tests/tools/artifact-search.test.ts
new file mode 100644
index 0000000..70aaec2
--- /dev/null
+++ b/tests/tools/artifact-search.test.ts
@@ -0,0 +1,17 @@
+import { describe, it, expect } from "bun:test";
+
+describe("artifact-search tool", () => {
+ it("should not have handoff in type enum", async () => {
+ const fs = await import("node:fs/promises");
+ const source = await fs.readFile("src/tools/artifact-search.ts", "utf-8");
+ expect(source).not.toContain('"handoff"');
+ expect(source).toContain('"plan"');
+ expect(source).toContain('"ledger"');
+ });
+
+ it("should not mention handoffs in description", async () => {
+ const fs = await import("node:fs/promises");
+ const source = await fs.readFile("src/tools/artifact-search.ts", "utf-8");
+ expect(source).not.toContain("handoffs");
+ });
+});