From a7523435d1319f1a12e84498bcbd601cc91070bb Mon Sep 17 00:00:00 2001 From: blocks Date: Fri, 20 Mar 2026 17:10:54 +0800 Subject: [PATCH 1/9] feat: add release automation and dist cli smoke --- .github/workflows/ci.yml | 3 + .github/workflows/release.yml | 49 +++++++++ CONTRIBUTING.md | 2 + README.en.md | 1 + README.md | 1 + docs/release-checklist.md | 40 +++++--- package.json | 6 +- src/cli.ts | 6 +- test/audit.test.ts | 9 +- test/dist-cli-smoke.test.ts | 187 ++++++++++++++++++++++++++++++++++ test/docs-contract.test.ts | 26 ++++- test/helpers/cli-runner.ts | 27 +++++ test/memory-command.test.ts | 10 +- test/session-command.test.ts | 10 +- 14 files changed, 329 insertions(+), 48 deletions(-) create mode 100644 .github/workflows/release.yml create mode 100644 test/dist-cli-smoke.test.ts create mode 100644 test/helpers/cli-runner.ts diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cbdadeb..fd41073 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -43,5 +43,8 @@ jobs: - name: Build run: pnpm build + - name: Dist CLI Smoke + run: pnpm test:dist-cli-smoke + - name: Pack Check run: pnpm pack:check diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..c91be33 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,49 @@ +name: Release + +on: + push: + tags: + - "v*" + +jobs: + release: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: pnpm + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Verify tag matches package version + run: | + PACKAGE_VERSION=$(node -p "require('./package.json').version") + if [ "${GITHUB_REF_NAME}" != "v${PACKAGE_VERSION}" ]; then + echo "Tag ${GITHUB_REF_NAME} does not match package version v${PACKAGE_VERSION}" >&2 + exit 1 + fi + + - name: Verify release candidate + run: pnpm verify:release + + - name: Pack release tarball + id: pack + run: | + TARBALL=$(npm pack | tail -n 1) + echo "tarball=${TARBALL}" >> "$GITHUB_OUTPUT" + + - name: Create GitHub Release + env: + GH_TOKEN: ${{ github.token }} + run: gh release create "${GITHUB_REF_NAME}" "${{ steps.pack.outputs.tarball }}" --generate-notes --verify-tag diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1da6d9f..7a87e88 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -26,6 +26,7 @@ pnpm test:reviewer-smoke pnpm test:cli-smoke pnpm test pnpm build +pnpm test:dist-cli-smoke ``` Use Node 20+ and `pnpm`. @@ -37,6 +38,7 @@ Use Node 20+ and `pnpm`. - Add or update tests for logic changes. - Update docs whenever behavior, config, or file layout changes. - Include screenshots or terminal output only when it helps explain the UX. +- If you touch release-facing CLI behavior, validate `node dist/cli.js` or `pnpm test:dist-cli-smoke`. ## Coding Guidelines diff --git a/README.en.md b/README.en.md index 56be9fa..aa022fe 100644 --- a/README.en.md +++ b/README.en.md @@ -274,6 +274,7 @@ Current public-ready status: - topic-aware startup lookup: available - session continuity companion layer: available - reviewer audit surfaces: available +- tagged GitHub Releases: available with tarball artifacts; npm publish remains manual - native memory / native hooks primary path: not enabled and not trusted as the main implementation path ## Roadmap diff --git a/README.md b/README.md index f87a5c4..4e85177 100644 --- a/README.md +++ b/README.md @@ -274,6 +274,7 @@ Session continuity: - topic-aware startup lookup:可用 - session continuity companion layer:可用 - reviewer audit surfaces:可用 +- tagged GitHub Releases:可用,提供 tarball artifact;npm publish 仍保持手动流程 - native memory / native hooks primary path:未启用,仍非 trusted implementation path ## 路线图 diff --git a/docs/release-checklist.md b/docs/release-checklist.md index 3360e8a..ef0ac22 100644 --- a/docs/release-checklist.md +++ b/docs/release-checklist.md @@ -22,26 +22,29 @@ Use this checklist before cutting any alpha or beta release of `codex-auto-memor - Run `pnpm test:docs-contract` - Run `pnpm test:reviewer-smoke` - Run `pnpm test:cli-smoke` +- Run `pnpm test:dist-cli-smoke` - Run `pnpm test` - Run `pnpm build` - Run `pnpm pack:check` -- Run `pnpm exec tsx src/cli.ts audit` if you want the repository privacy scan; keep it as a manual release-time check instead of a CI gate. -- Run `pnpm exec tsx src/cli.ts session refresh --json` and confirm `action`, `writeMode`, and `rolloutSelection` reflect the selected provenance. -- Run `pnpm exec tsx src/cli.ts session load --json` and confirm older JSON consumers still receive the existing core fields. -- Run `pnpm exec tsx src/cli.ts session status --json` and confirm the latest explicit audit drill-down matches the newest audit-log entry when present. -- Run `pnpm exec tsx src/cli.ts memory --recent --json` and confirm suppressed conflict candidates remain reviewer-visible instead of being silently merged. -- Confirm `pnpm exec tsx src/cli.ts session load --json` / `status --json` still expose `confidence` and warnings when the rollout required a conservative continuity summary. +- After `pnpm build`, prefer validating release-facing CLI behavior through `node dist/cli.js ...` rather than `tsx src/cli.ts`. +- Run `node dist/cli.js --version` and confirm it matches `package.json`. +- Run `node dist/cli.js audit` if you want the repository privacy scan; keep it as a manual release-time check instead of a CI gate. +- Run `node dist/cli.js session refresh --json` and confirm `action`, `writeMode`, and `rolloutSelection` reflect the selected provenance. +- Run `node dist/cli.js session load --json` and confirm older JSON consumers still receive the existing core fields. +- Run `node dist/cli.js session status --json` and confirm the latest explicit audit drill-down matches the newest audit-log entry when present. +- Run `node dist/cli.js memory --recent --json` and confirm suppressed conflict candidates remain reviewer-visible instead of being silently merged. +- Confirm `node dist/cli.js session load --json` / `status --json` still expose `confidence` and warnings when the rollout required a conservative continuity summary. - Confirm continuity reviewer warnings stay in diagnostics / audit surfaces and are not written into continuity Markdown body text. - Run a local smoke flow: - - `pnpm exec tsx src/cli.ts init` - - `pnpm exec tsx src/cli.ts remember "..."` - - `pnpm exec tsx src/cli.ts memory --recent --print-startup` - - `pnpm exec tsx src/cli.ts session status` - - `pnpm exec tsx src/cli.ts session save` - - `pnpm exec tsx src/cli.ts session refresh` - - `pnpm exec tsx src/cli.ts session load --print-startup` - - `pnpm exec tsx src/cli.ts forget "..."` - - `pnpm exec tsx src/cli.ts doctor` + - `node dist/cli.js init` + - `node dist/cli.js remember "..."` + - `node dist/cli.js memory --recent --print-startup` + - `node dist/cli.js session status` + - `node dist/cli.js session save` + - `node dist/cli.js session refresh` + - `node dist/cli.js session load --print-startup` + - `node dist/cli.js forget "..."` + - `node dist/cli.js doctor` ## Documentation checks @@ -64,3 +67,10 @@ Do not tag a release unless: - docs are current - review artifacts are in place - the current milestone can be explained without reading every commit in the repository +- the tag format is `v` + +## Release automation notes + +- A pushed `v*` tag now runs the GitHub Release workflow. +- The workflow verifies `GITHUB_REF_NAME === v${package.json.version}`, runs `pnpm verify:release`, and uploads the `npm pack` tarball to the GitHub Release. +- npm publish remains manual until registry credentials and approval posture are intentionally wired. diff --git a/package.json b/package.json index 8828aba..3b7e3f8 100644 --- a/package.json +++ b/package.json @@ -20,16 +20,18 @@ "packageManager": "pnpm@10.11.0", "scripts": { "build": "tsc -p tsconfig.build.json", - "ci": "pnpm lint && pnpm test:docs-contract && pnpm test:reviewer-smoke && pnpm test:cli-smoke && pnpm test && pnpm build && pnpm pack:check", "clean": "rimraf dist coverage .tmp", "dev": "tsx src/cli.ts", "lint": "tsc --noEmit -p tsconfig.json", "pack:check": "npm pack --dry-run", + "prepack": "pnpm build", "test": "vitest run", "test:cli-smoke": "vitest run test/audit.test.ts test/memory-command.test.ts test/session-command.test.ts", + "test:dist-cli-smoke": "vitest run test/dist-cli-smoke.test.ts", "test:docs-contract": "vitest run test/docs-contract.test.ts", "test:reviewer-smoke": "vitest run test/docs-contract.test.ts test/memory-command.test.ts test/session-command.test.ts test/session-continuity.test.ts", - "test:watch": "vitest" + "test:watch": "vitest", + "verify:release": "pnpm lint && pnpm test:docs-contract && pnpm test:reviewer-smoke && pnpm test:cli-smoke && pnpm test && pnpm build && pnpm test:dist-cli-smoke && pnpm pack:check" }, "keywords": [ "codex", diff --git a/src/cli.ts b/src/cli.ts index b863156..66d5b12 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,5 +1,6 @@ #!/usr/bin/env node +import { createRequire } from "node:module"; import { Command } from "commander"; import { runInit } from "./lib/commands/init.js"; import { runMemory } from "./lib/commands/memory.js"; @@ -12,6 +13,9 @@ import { runWrappedCodex } from "./lib/commands/wrapper.js"; import { runAudit } from "./lib/commands/audit.js"; import { runSession } from "./lib/commands/session.js"; +const require = createRequire(import.meta.url); +const { version } = require("../package.json") as { version: string }; + function isWrapperCommand(input?: string): input is "run" | "exec" | "resume" { return input === "run" || input === "exec" || input === "resume"; } @@ -28,7 +32,7 @@ async function main(): Promise { program .name("cam") .description("Codex Auto Memory companion CLI") - .version("0.1.0"); + .version(version); program .command("init") diff --git a/test/audit.test.ts b/test/audit.test.ts index e6d6a1b..381a68c 100644 --- a/test/audit.test.ts +++ b/test/audit.test.ts @@ -6,12 +6,9 @@ import { runAuditScan } from "../src/lib/security/audit.js"; import { runAudit } from "../src/lib/commands/audit.js"; import { runCommandCapture } from "../src/lib/util/process.js"; import * as processUtils from "../src/lib/util/process.js"; +import { runCli } from "./helpers/cli-runner.js"; const tempDirs: string[] = []; -const sourceCliPath = path.resolve("src/cli.ts"); -const tsxBinaryPath = path.resolve( - process.platform === "win32" ? "node_modules/.bin/tsx.cmd" : "node_modules/.bin/tsx" -); async function tempDir(prefix: string): Promise { const dir = await fs.mkdtemp(path.join(os.tmpdir(), prefix)); @@ -33,10 +30,6 @@ async function initRepo(repoDir: string): Promise { runCommandCapture("git", ["commit", "-m", "init"], repoDir, gitEnv); } -function runCli(repoDir: string, args: string[]) { - return runCommandCapture(tsxBinaryPath, [sourceCliPath, ...args], repoDir); -} - afterEach(async () => { await Promise.all(tempDirs.splice(0).map((dir) => fs.rm(dir, { recursive: true, force: true }))); }); diff --git a/test/dist-cli-smoke.test.ts b/test/dist-cli-smoke.test.ts new file mode 100644 index 0000000..583fa3a --- /dev/null +++ b/test/dist-cli-smoke.test.ts @@ -0,0 +1,187 @@ +import fs from "node:fs/promises"; +import os from "node:os"; +import path from "node:path"; +import { afterEach, describe, expect, it } from "vitest"; +import { detectProjectContext } from "../src/lib/domain/project-context.js"; +import { MemoryStore } from "../src/lib/domain/memory-store.js"; +import { SessionContinuityStore } from "../src/lib/domain/session-continuity-store.js"; +import type { AppConfig } from "../src/lib/types.js"; +import { + initGitRepo, + makeAppConfig, + writeCamConfig +} from "./helpers/cam-test-fixtures.js"; +import { runCli } from "./helpers/cli-runner.js"; + +const tempDirs: string[] = []; +const originalHome = process.env.HOME; + +async function tempDir(prefix: string): Promise { + const dir = await fs.mkdtemp(path.join(os.tmpdir(), prefix)); + tempDirs.push(dir); + return dir; +} + +afterEach(async () => { + process.env.HOME = originalHome; + await Promise.all(tempDirs.splice(0).map((dir) => fs.rm(dir, { recursive: true, force: true }))); +}); + +describe("dist cli smoke", () => { + it("reports the package version from the compiled cli entrypoint", async () => { + const repoDir = await tempDir("cam-dist-version-"); + const packageJson = JSON.parse( + await fs.readFile(path.resolve("package.json"), "utf8") + ) as { version: string }; + + const result = runCli(repoDir, ["--version"], { entrypoint: "dist" }); + + expect(result.exitCode).toBe(0); + expect(result.stdout.trim()).toBe(packageJson.version); + }); + + it("serves reviewer json surfaces from the compiled cli entrypoint", async () => { + const homeDir = await tempDir("cam-dist-home-"); + const projectDir = await tempDir("cam-dist-project-"); + const memoryRoot = await tempDir("cam-dist-memory-root-"); + process.env.HOME = homeDir; + + const config = makeAppConfig(); + await writeCamConfig(projectDir, config, { + autoMemoryDirectory: memoryRoot + }); + + const project = detectProjectContext(projectDir); + const memoryStore = new MemoryStore(project, { + ...config, + autoMemoryDirectory: memoryRoot + }); + await memoryStore.ensureLayout(); + await memoryStore.remember( + "project", + "workflow", + "prefer-pnpm", + "Prefer pnpm in this repository.", + ["Use pnpm instead of npm in this repository."], + "Manual note." + ); + await memoryStore.appendSyncAuditEntry({ + appliedAt: "2026-03-14T12:00:00.000Z", + projectId: project.projectId, + worktreeId: project.worktreeId, + rolloutPath: "/tmp/rollout-dist-smoke.jsonl", + sessionId: "session-dist-smoke", + configuredExtractorMode: "heuristic", + configuredExtractorName: "heuristic", + actualExtractorMode: "heuristic", + actualExtractorName: "heuristic", + extractorMode: "heuristic", + extractorName: "heuristic", + sessionSource: "rollout-jsonl", + status: "applied", + appliedCount: 1, + scopesTouched: ["project"], + resultSummary: "1 operation(s) applied", + operations: [ + { + action: "upsert", + scope: "project", + topic: "workflow", + id: "prefer-pnpm", + summary: "Prefer pnpm in this repository.", + details: ["Use pnpm instead of npm in this repository."], + reason: "Manual note.", + sources: ["manual"] + } + ] + }); + + const continuityStore = new SessionContinuityStore(project, { + ...config, + autoMemoryDirectory: memoryRoot + }); + await continuityStore.saveSummary( + { + project: { + goal: "Continue reviewing the release-facing CLI surface.", + confirmedWorking: ["Compiled CLI reviewer surfaces are available."], + triedAndFailed: [], + notYetTried: [], + incompleteNext: [], + filesDecisionsEnvironment: [] + }, + projectLocal: { + goal: "", + confirmedWorking: [], + triedAndFailed: [], + notYetTried: [], + incompleteNext: [], + filesDecisionsEnvironment: [] + } + }, + "project" + ); + + const memoryResult = runCli(projectDir, ["memory", "--recent", "1", "--json"], { + entrypoint: "dist" + }); + const sessionResult = runCli(projectDir, ["session", "status", "--json"], { + entrypoint: "dist" + }); + + expect(memoryResult.exitCode).toBe(0); + expect(sessionResult.exitCode).toBe(0); + + const memoryPayload = JSON.parse(memoryResult.stdout) as { + recentSyncAudit: Array<{ rolloutPath: string }>; + }; + const sessionPayload = JSON.parse(sessionResult.stdout) as { + projectLocation: { exists: boolean }; + }; + + expect(memoryPayload.recentSyncAudit).toHaveLength(1); + expect(memoryPayload.recentSyncAudit[0]?.rolloutPath).toBe("/tmp/rollout-dist-smoke.jsonl"); + expect(sessionPayload.projectLocation.exists).toBe(true); + }, 30_000); + + it("routes exec through the compiled wrapper entrypoint", async () => { + const repoDir = await tempDir("cam-dist-wrapper-repo-"); + const memoryRoot = await tempDir("cam-dist-wrapper-memory-"); + await initGitRepo(repoDir); + + const capturedArgsPath = path.join(repoDir, "captured-args.json"); + const mockCodexPath = path.join(repoDir, "mock-codex"); + await fs.writeFile( + mockCodexPath, + `#!/usr/bin/env node +const fs = require("node:fs"); +fs.writeFileSync(${JSON.stringify(capturedArgsPath)}, JSON.stringify(process.argv.slice(2), null, 2)); +`, + "utf8" + ); + await fs.chmod(mockCodexPath, 0o755); + + const projectConfig: AppConfig = makeAppConfig({ + autoMemoryEnabled: false, + codexBinary: mockCodexPath, + sessionContinuityAutoLoad: false, + sessionContinuityAutoSave: false + }); + await writeCamConfig(repoDir, projectConfig, { + autoMemoryDirectory: memoryRoot, + autoMemoryEnabled: false, + sessionContinuityAutoLoad: false, + sessionContinuityAutoSave: false + }); + + const result = runCli(repoDir, ["exec", "continue"], { + entrypoint: "dist" + }); + const capturedArgs = JSON.parse(await fs.readFile(capturedArgsPath, "utf8")) as string[]; + + expect(result.exitCode).toBe(0); + expect(capturedArgs).toContain("exec"); + expect(capturedArgs).toContain("continue"); + expect(capturedArgs.some((value) => value.startsWith("base_instructions="))).toBe(true); + }, 30_000); +}); diff --git a/test/docs-contract.test.ts b/test/docs-contract.test.ts index 1bac61d..17244cf 100644 --- a/test/docs-contract.test.ts +++ b/test/docs-contract.test.ts @@ -12,25 +12,43 @@ describe("docs contract", () => { const readmeEn = await readDoc("README.en.md"); const releaseChecklist = await readDoc("docs/release-checklist.md"); const contributing = await readDoc("CONTRIBUTING.md"); + const ciWorkflow = await readDoc(".github/workflows/ci.yml"); + const releaseWorkflow = await readDoc(".github/workflows/release.yml"); + const packageJson = JSON.parse(await readDoc("package.json")) as { + scripts: Record; + }; expect(readme).toContain("cam memory"); expect(readme).toContain("cam session status"); expect(readme).toContain("cam session refresh"); expect(readme).toContain("reviewer warning prose"); + expect(readme).toContain("tagged GitHub Releases"); expect(readmeEn).toContain("cam memory"); expect(readmeEn).toContain("cam session status"); expect(readmeEn).toContain("confidence"); expect(readmeEn).toContain("deterministic scrub"); - expect(releaseChecklist).toContain("pnpm exec tsx src/cli.ts audit"); + expect(readmeEn).toContain("tagged GitHub Releases"); + expect(releaseChecklist).toContain("pnpm test:dist-cli-smoke"); + expect(releaseChecklist).toContain("node dist/cli.js --version"); + expect(releaseChecklist).toContain("node dist/cli.js audit"); expect(releaseChecklist).toContain("pnpm test:docs-contract"); expect(releaseChecklist).toContain("pnpm test:reviewer-smoke"); expect(releaseChecklist).toContain("pnpm test:cli-smoke"); expect(releaseChecklist).toContain("pnpm pack:check"); - expect(releaseChecklist).toContain("pnpm exec tsx src/cli.ts session refresh --json"); - expect(releaseChecklist).toContain("pnpm exec tsx src/cli.ts session load --json"); - expect(releaseChecklist).toContain("pnpm exec tsx src/cli.ts session status --json"); + expect(releaseChecklist).toContain("node dist/cli.js session refresh --json"); + expect(releaseChecklist).toContain("node dist/cli.js session load --json"); + expect(releaseChecklist).toContain("node dist/cli.js session status --json"); expect(contributing).toContain("reviewer-only warnings"); expect(contributing).toContain("pnpm test:docs-contract"); + expect(contributing).toContain("pnpm test:dist-cli-smoke"); + expect(packageJson.scripts["test:dist-cli-smoke"]).toBe("vitest run test/dist-cli-smoke.test.ts"); + expect(packageJson.scripts.prepack).toBe("pnpm build"); + expect(packageJson.scripts["verify:release"]).toContain("pnpm test:dist-cli-smoke"); + expect(ciWorkflow).toContain("Dist CLI Smoke"); + expect(releaseWorkflow).toContain("tags:"); + expect(releaseWorkflow).toContain("v*"); + expect(releaseWorkflow).toContain("pnpm verify:release"); + expect(releaseWorkflow).toContain("gh release create"); }); it("keeps continuity, architecture, and migration wording aligned with the current product posture", async () => { diff --git a/test/helpers/cli-runner.ts b/test/helpers/cli-runner.ts new file mode 100644 index 0000000..364bd11 --- /dev/null +++ b/test/helpers/cli-runner.ts @@ -0,0 +1,27 @@ +import path from "node:path"; +import { runCommandCapture } from "../../src/lib/util/process.js"; +import type { ProcessOutput } from "../../src/lib/util/process.js"; + +export type CliEntrypoint = "dist" | "source"; + +const sourceCliPath = path.resolve("src/cli.ts"); +const distCliPath = path.resolve("dist/cli.js"); +const tsxBinaryPath = path.resolve( + process.platform === "win32" ? "node_modules/.bin/tsx.cmd" : "node_modules/.bin/tsx" +); + +export function runCli( + repoDir: string, + args: string[], + options: { + entrypoint?: CliEntrypoint; + env?: NodeJS.ProcessEnv; + } = {} +): ProcessOutput { + const entrypoint = options.entrypoint ?? "source"; + if (entrypoint === "dist") { + return runCommandCapture("node", [distCliPath, ...args], repoDir, options.env); + } + + return runCommandCapture(tsxBinaryPath, [sourceCliPath, ...args], repoDir, options.env); +} diff --git a/test/memory-command.test.ts b/test/memory-command.test.ts index 1ef6be3..e87bcde 100644 --- a/test/memory-command.test.ts +++ b/test/memory-command.test.ts @@ -6,19 +6,15 @@ import { runMemory } from "../src/lib/commands/memory.js"; import { configPaths } from "../src/lib/config/load-config.js"; import { detectProjectContext } from "../src/lib/domain/project-context.js"; import { MemoryStore } from "../src/lib/domain/memory-store.js"; -import { runCommandCapture } from "../src/lib/util/process.js"; import type { AppConfig, MemoryCommandOutput } from "../src/lib/types.js"; import { makeAppConfig, writeCamConfig } from "./helpers/cam-test-fixtures.js"; +import { runCli } from "./helpers/cli-runner.js"; const tempDirs: string[] = []; const originalHome = process.env.HOME; -const sourceCliPath = path.resolve("src/cli.ts"); -const tsxBinaryPath = path.resolve( - process.platform === "win32" ? "node_modules/.bin/tsx.cmd" : "node_modules/.bin/tsx" -); async function tempDir(prefix: string): Promise { const dir = await fs.mkdtemp(path.join(os.tmpdir(), prefix)); @@ -34,10 +30,6 @@ afterEach(async () => { const buildProjectConfig = makeAppConfig; const writeProjectConfig = writeCamConfig; -function runCli(repoDir: string, args: string[]) { - return runCommandCapture(tsxBinaryPath, [sourceCliPath, ...args], repoDir); -} - describe("runMemory", () => { it("shows scope details and recent audit entries", async () => { const homeDir = await tempDir("cam-memory-home-"); diff --git a/test/session-command.test.ts b/test/session-command.test.ts index 1b2b9bf..1a97965 100644 --- a/test/session-command.test.ts +++ b/test/session-command.test.ts @@ -7,7 +7,6 @@ import { runWrappedCodex } from "../src/lib/commands/wrapper.js"; import { detectProjectContext } from "../src/lib/domain/project-context.js"; import { SessionContinuityStore } from "../src/lib/domain/session-continuity-store.js"; import { SyncService } from "../src/lib/domain/sync-service.js"; -import { runCommandCapture } from "../src/lib/util/process.js"; import type { SessionContinuityAuditEntry } from "../src/lib/types.js"; import { initGitRepo, @@ -15,13 +14,10 @@ import { makeRolloutFixture, writeCamConfig } from "./helpers/cam-test-fixtures.js"; +import { runCli } from "./helpers/cli-runner.js"; const tempDirs: string[] = []; const originalSessionsDir = process.env.CAM_CODEX_SESSIONS_DIR; -const sourceCliPath = path.resolve("src/cli.ts"); -const tsxBinaryPath = path.resolve( - process.platform === "win32" ? "node_modules/.bin/tsx.cmd" : "node_modules/.bin/tsx" -); async function tempDir(prefix: string): Promise { const dir = await fs.mkdtemp(path.join(os.tmpdir(), prefix)); @@ -32,10 +28,6 @@ async function tempDir(prefix: string): Promise { const initRepo = initGitRepo; const configJson = makeAppConfig; -function runCli(repoDir: string, args: string[]) { - return runCommandCapture(tsxBinaryPath, [sourceCliPath, ...args], repoDir); -} - const writeProjectConfig = writeCamConfig; const rolloutFixture = makeRolloutFixture; From 2a6b8dce0c27aab7ff22a318f8338df8ad0d0127 Mon Sep 17 00:00:00 2001 From: blocks Date: Fri, 20 Mar 2026 17:36:41 +0800 Subject: [PATCH 2/9] docs: capture next-phase refactor focus --- CONTRIBUTING.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7a87e88..e355d58 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -40,6 +40,13 @@ Use Node 20+ and `pnpm`. - Include screenshots or terminal output only when it helps explain the UX. - If you touch release-facing CLI behavior, validate `node dist/cli.js` or `pnpm test:dist-cli-smoke`. +## Current maintainer focus + +- Prefer structural simplification over feature expansion in the next phase. +- If you refactor repository structure, keep the command surface stable unless a behavior change is intentional and documented. +- Before borrowing ideas from similar tools such as `mem0`, first inspect their current public docs or repository context and extract only patterns that fit this project's companion-first posture. +- Use external research to improve module boundaries, reviewer surfaces, and maintainability, not to broaden the product scope by default. + ## Coding Guidelines - Prefer small modules with explicit responsibilities. From 2465c0424a295c2edee2bcab01169ee9205cd046 Mon Sep 17 00:00:00 2001 From: blocks Date: Fri, 20 Mar 2026 18:54:14 +0800 Subject: [PATCH 3/9] refactor: narrow cli entrypoint and continuity persistence --- CONTRIBUTING.md | 7 + docs/architecture.en.md | 17 + docs/architecture.md | 17 + package.json | 6 +- src/cli.ts | 156 +---- src/lib/cli/register-commands.ts | 158 +++++ src/lib/commands/doctor.ts | 2 +- src/lib/commands/forget.ts | 3 +- src/lib/commands/memory.ts | 15 +- src/lib/commands/remember.ts | 3 +- src/lib/commands/session-presenters.ts | 437 +++++++++++++ src/lib/commands/session.ts | 552 +--------------- src/lib/commands/sync.ts | 3 +- src/lib/commands/wrapper.ts | 97 +-- .../domain/session-continuity-persistence.ts | 195 ++++++ .../common.ts => runtime/runtime-context.ts} | 35 +- test/helpers/session-test-support.ts | 75 +++ test/session-command.test.ts | 595 +----------------- test/wrapper-session-continuity.test.ts | 563 +++++++++++++++++ 19 files changed, 1573 insertions(+), 1363 deletions(-) create mode 100644 src/lib/cli/register-commands.ts create mode 100644 src/lib/commands/session-presenters.ts create mode 100644 src/lib/domain/session-continuity-persistence.ts rename src/lib/{commands/common.ts => runtime/runtime-context.ts} (58%) create mode 100644 test/helpers/session-test-support.ts create mode 100644 test/wrapper-session-continuity.test.ts diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e355d58..706ff86 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -54,6 +54,13 @@ Use Node 20+ and `pnpm`. - Avoid over-engineering. Start with the simplest version that keeps future migration possible. - Keep comments in English. - Keep reviewer-only warnings and confidence prose in audit/reviewer surfaces; they should not become continuity body content. +- Keep `src/cli.ts` narrow. New commands should be registered through `src/lib/cli/register-commands.ts` instead of expanding the main entrypoint again. +- Keep runtime composition in `src/lib/runtime/runtime-context.ts`; command files should depend on that runtime surface instead of rebuilding their own composition helpers. +- When touching continuity persistence, preserve the current contract split: + - `cam session save` = `merge` + - `cam session refresh` = `replace` + - wrapper auto-save = `merge` +- If you split tests, keep `runSession` and wrapper continuity coverage in separate files and share helpers from `test/helpers/` rather than re-inlining temp-dir or mock-wrapper setup. ## Documentation Guidelines diff --git a/docs/architecture.en.md b/docs/architecture.en.md index 7bfedab..d046e79 100644 --- a/docs/architecture.en.md +++ b/docs/architecture.en.md @@ -14,6 +14,17 @@ The shared goal is to keep memory auditable, editable, and migration-friendly instead of hiding state inside opaque caches. +The implementation also now follows an intentionally narrow code layout: + +- `src/cli.ts`: wrapper fast path, version wiring, and Commander bootstrap only +- `src/lib/cli/register-commands.ts`: centralized command registration +- `src/lib/runtime/runtime-context.ts`: runtime composition and config-patch reload +- `src/lib/commands/*`: command orchestration and reviewer-facing text/json surfaces +- `src/lib/domain/*`: core memory, continuity, audit, and rollout behavior +- `src/lib/util/*`: utility layer + +The goal is not prettier abstraction for its own sake. The goal is a narrower entrypoint, thinner command files, and less duplicated orchestration. + ## Design principles - local-first and auditable @@ -173,6 +184,12 @@ The architecture keeps these replacement boundaries explicit: - `MemoryStore` - `RuntimeInjector` +The current code layout tries to keep those seams visible in practice: + +- CLI registration is separated from wrapper fast-path bootstrap +- command orchestration is separated from domain persistence +- shared continuity persistence is separated from rollout provenance selection + That keeps the integration layer replaceable without rewriting the user mental model. ## 9. Validation priorities diff --git a/docs/architecture.md b/docs/architecture.md index f4080d0..fd49525 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -14,6 +14,17 @@ 它们的共同目标是:让 memory 保持可审计、可编辑、可迁移,而不是把复杂状态藏进 opaque cache。 +当前实现也刻意保持一个“窄入口 + 清晰分层”的代码组织: + +- `src/cli.ts`:只负责 wrapper fast path、版本与 Commander 启动 +- `src/lib/cli/register-commands.ts`:集中做命令注册 +- `src/lib/runtime/runtime-context.ts`:集中做 runtime composition 与 config patch 后的 reload +- `src/lib/commands/*`:命令编排与 reviewer-facing text/json surface +- `src/lib/domain/*`:memory / continuity / audit / rollout 的核心语义与存储行为 +- `src/lib/util/*`:纯工具层 + +这样做的目标不是“架构更花”,而是让入口更窄、命令层更薄、shared orchestration 不在多个命令文件里重复扩散。 + ## 设计原则 - local-first and auditable @@ -173,6 +184,12 @@ project-local continuity 适合放: - `MemoryStore` - `RuntimeInjector` +在当前代码里,对应的实现分层也尽量保持显式: + +- CLI registration 与 wrapper fast path 分开 +- command orchestration 与 domain persistence 分开 +- continuity 的 shared persistence 与 rollout provenance selection 分开 + 这样未来若需要重评接入方式,可以替换 integration layer,而不是推翻用户心智模型。 ## 9. 验证重点 diff --git a/package.json b/package.json index 3b7e3f8..bbe921c 100644 --- a/package.json +++ b/package.json @@ -19,17 +19,17 @@ }, "packageManager": "pnpm@10.11.0", "scripts": { - "build": "tsc -p tsconfig.build.json", + "build": "rimraf dist && tsc -p tsconfig.build.json", "clean": "rimraf dist coverage .tmp", "dev": "tsx src/cli.ts", "lint": "tsc --noEmit -p tsconfig.json", "pack:check": "npm pack --dry-run", "prepack": "pnpm build", "test": "vitest run", - "test:cli-smoke": "vitest run test/audit.test.ts test/memory-command.test.ts test/session-command.test.ts", + "test:cli-smoke": "vitest run test/audit.test.ts test/memory-command.test.ts test/session-command.test.ts test/wrapper-session-continuity.test.ts", "test:dist-cli-smoke": "vitest run test/dist-cli-smoke.test.ts", "test:docs-contract": "vitest run test/docs-contract.test.ts", - "test:reviewer-smoke": "vitest run test/docs-contract.test.ts test/memory-command.test.ts test/session-command.test.ts test/session-continuity.test.ts", + "test:reviewer-smoke": "vitest run test/docs-contract.test.ts test/memory-command.test.ts test/session-command.test.ts test/wrapper-session-continuity.test.ts test/session-continuity.test.ts", "test:watch": "vitest", "verify:release": "pnpm lint && pnpm test:docs-contract && pnpm test:reviewer-smoke && pnpm test:cli-smoke && pnpm test && pnpm build && pnpm test:dist-cli-smoke && pnpm pack:check" }, diff --git a/src/cli.ts b/src/cli.ts index 66d5b12..a0ebb4c 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -2,16 +2,8 @@ import { createRequire } from "node:module"; import { Command } from "commander"; -import { runInit } from "./lib/commands/init.js"; -import { runMemory } from "./lib/commands/memory.js"; -import { runRemember } from "./lib/commands/remember.js"; -import { runForget } from "./lib/commands/forget.js"; -import { runSync } from "./lib/commands/sync.js"; -import { runDoctor } from "./lib/commands/doctor.js"; -import { installHooks, removeHooks } from "./lib/commands/hooks.js"; +import { registerCommands } from "./lib/cli/register-commands.js"; import { runWrappedCodex } from "./lib/commands/wrapper.js"; -import { runAudit } from "./lib/commands/audit.js"; -import { runSession } from "./lib/commands/session.js"; const require = createRequire(import.meta.url); const { version } = require("../package.json") as { version: string }; @@ -20,6 +12,13 @@ function isWrapperCommand(input?: string): input is "run" | "exec" | "resume" { return input === "run" || input === "exec" || input === "resume"; } +function createProgram(): Command { + const program = new Command(); + program.name("cam").description("Codex Auto Memory companion CLI").version(version); + registerCommands(program); + return program; +} + async function main(): Promise { const rawArgs = process.argv.slice(2); if (isWrapperCommand(rawArgs[0])) { @@ -28,144 +27,7 @@ async function main(): Promise { return; } - const program = new Command(); - program - .name("cam") - .description("Codex Auto Memory companion CLI") - .version(version); - - program - .command("init") - .description("Initialize Codex Auto Memory in the current project") - .action(async () => { - process.stdout.write(`${await runInit()}\n`); - }); - - program - .command("memory") - .description("Inspect local memory state") - .option("--json", "Print JSON output") - .option("--scope ", "Show a single memory scope: global, project, project-local, or all", "all") - .option("--recent [count]", "Show recent sync audit entries") - .option("--enable", "Enable auto memory in config") - .option("--disable", "Disable auto memory in config") - .option("--config-scope ", "Config scope to edit: user, project, or local", "local") - .option("--print-startup", "Print the compiled startup memory block") - .option("--open", "Open the memory directory in the default file browser") - .action(async (options) => { - process.stdout.write(`${await runMemory(options)}\n`); - }); - - program - .command("remember") - .description("Persist a memory entry immediately") - .argument("", "Memory summary text") - .option("--scope ", "Memory scope: global, project, or project-local") - .option("--topic ", "Topic file name", "workflow") - .option("--detail ", "Additional detail bullets") - .action(async (text, options) => { - process.stdout.write(`${await runRemember(text, options)}\n`); - }); - - program - .command("forget") - .description("Delete matching memory entries") - .argument("", "Search query used to find memory entries") - .option("--scope ", "Specific scope to target, or all") - .action(async (query, options) => { - process.stdout.write(`${await runForget(query, options)}\n`); - }); - - program - .command("sync") - .description("Sync the latest rollout into markdown memory") - .option("--rollout ", "Specific rollout JSONL file to process") - .option("--force", "Re-process a rollout even if it was already synced") - .action(async (options) => { - process.stdout.write(`${await runSync(options)}\n`); - }); - - program - .command("doctor") - .description("Inspect local Codex Auto Memory wiring and environment") - .option("--json", "Print JSON output") - .action(async (options) => { - process.stdout.write(`${await runDoctor(options)}\n`); - }); - - program - .command("audit") - .description("Scan tracked files and git history for privacy and secret-hygiene risks") - .option("--json", "Print JSON output") - .option("--history", "Force-enable git history scanning") - .option("--no-history", "Disable git history scanning") - .action(async (options) => { - process.stdout.write(`${await runAudit(options)}\n`); - }); - - const sessionCommand = program.command("session").description("Manage temporary cross-session continuity state"); - sessionCommand - .command("status") - .description("Inspect current session continuity state") - .option("--json", "Print JSON output") - .action(async (options) => { - process.stdout.write(`${await runSession("status", options)}\n`); - }); - sessionCommand - .command("save") - .description("Save temporary session continuity from a rollout") - .option("--json", "Print JSON output") - .option("--rollout ", "Specific rollout JSONL file to summarize") - .option("--scope ", "Target continuity scope: project, project-local, or both", "both") - .action(async (options) => { - process.stdout.write(`${await runSession("save", options)}\n`); - }); - sessionCommand - .command("refresh") - .description("Regenerate session continuity from provenance and replace the selected scope") - .option("--json", "Print JSON output") - .option("--rollout ", "Specific rollout JSONL file to summarize") - .option("--scope ", "Target continuity scope: project, project-local, or both", "both") - .action(async (options) => { - process.stdout.write(`${await runSession("refresh", options)}\n`); - }); - sessionCommand - .command("load") - .description("Load current session continuity summary") - .option("--json", "Print JSON output") - .option("--print-startup", "Print the compiled startup continuity block") - .action(async (options) => { - process.stdout.write(`${await runSession("load", options)}\n`); - }); - sessionCommand - .command("clear") - .description("Clear active session continuity state") - .option("--scope ", "Target continuity scope: project, project-local, or both", "both") - .action(async (options) => { - process.stdout.write(`${await runSession("clear", options)}\n`); - }); - sessionCommand - .command("open") - .description("Open the local session continuity directory") - .action(async (options) => { - process.stdout.write(`${await runSession("open", options)}\n`); - }); - - const hooksCommand = program.command("hooks").description("Manage future hook bridge assets"); - hooksCommand - .command("install") - .description("Generate local hook bridge assets") - .action(async () => { - process.stdout.write(`${await installHooks()}\n`); - }); - hooksCommand - .command("remove") - .description("Describe how to remove generated hook bridge assets") - .action(async () => { - process.stdout.write(`${await removeHooks()}\n`); - }); - - await program.parseAsync(process.argv); + await createProgram().parseAsync(process.argv); } main().catch((error: unknown) => { diff --git a/src/lib/cli/register-commands.ts b/src/lib/cli/register-commands.ts new file mode 100644 index 0000000..122cc95 --- /dev/null +++ b/src/lib/cli/register-commands.ts @@ -0,0 +1,158 @@ +import { Command } from "commander"; +import { runAudit } from "../commands/audit.js"; +import { runDoctor } from "../commands/doctor.js"; +import { runForget } from "../commands/forget.js"; +import { installHooks, removeHooks } from "../commands/hooks.js"; +import { runInit } from "../commands/init.js"; +import { runMemory } from "../commands/memory.js"; +import { runRemember } from "../commands/remember.js"; +import { runSession } from "../commands/session.js"; +import { runSync } from "../commands/sync.js"; + +type AsyncCommandHandler = (...args: Args) => Promise; + +function withStdout( + handler: AsyncCommandHandler +): (...args: Args) => Promise { + return async (...args: Args): Promise => { + process.stdout.write(`${await handler(...args)}\n`); + }; +} + +function registerSessionCommands(program: Command): void { + const sessionCommand = program + .command("session") + .description("Manage temporary cross-session continuity state"); + + sessionCommand + .command("status") + .description("Inspect current session continuity state") + .option("--json", "Print JSON output") + .action(withStdout(async (options) => runSession("status", options))); + + sessionCommand + .command("save") + .description("Save temporary session continuity from a rollout") + .option("--json", "Print JSON output") + .option("--rollout ", "Specific rollout JSONL file to summarize") + .option( + "--scope ", + "Target continuity scope: project, project-local, or both", + "both" + ) + .action(withStdout(async (options) => runSession("save", options))); + + sessionCommand + .command("refresh") + .description("Regenerate session continuity from provenance and replace the selected scope") + .option("--json", "Print JSON output") + .option("--rollout ", "Specific rollout JSONL file to summarize") + .option( + "--scope ", + "Target continuity scope: project, project-local, or both", + "both" + ) + .action(withStdout(async (options) => runSession("refresh", options))); + + sessionCommand + .command("load") + .description("Load current session continuity summary") + .option("--json", "Print JSON output") + .option("--print-startup", "Print the compiled startup continuity block") + .action(withStdout(async (options) => runSession("load", options))); + + sessionCommand + .command("clear") + .description("Clear active session continuity state") + .option( + "--scope ", + "Target continuity scope: project, project-local, or both", + "both" + ) + .action(withStdout(async (options) => runSession("clear", options))); + + sessionCommand + .command("open") + .description("Open the local session continuity directory") + .action(withStdout(async (options) => runSession("open", options))); +} + +function registerHookCommands(program: Command): void { + const hooksCommand = program + .command("hooks") + .description("Manage future hook bridge assets"); + + hooksCommand + .command("install") + .description("Generate local hook bridge assets") + .action(withStdout(async () => installHooks())); + + hooksCommand + .command("remove") + .description("Describe how to remove generated hook bridge assets") + .action(withStdout(async () => removeHooks())); +} + +export function registerCommands(program: Command): void { + program + .command("init") + .description("Initialize Codex Auto Memory in the current project") + .action(withStdout(async () => runInit())); + + program + .command("memory") + .description("Inspect local memory state") + .option("--json", "Print JSON output") + .option( + "--scope ", + "Show a single memory scope: global, project, project-local, or all", + "all" + ) + .option("--recent [count]", "Show recent sync audit entries") + .option("--enable", "Enable auto memory in config") + .option("--disable", "Disable auto memory in config") + .option("--config-scope ", "Config scope to edit: user, project, or local", "local") + .option("--print-startup", "Print the compiled startup memory block") + .option("--open", "Open the memory directory in the default file browser") + .action(withStdout(async (options) => runMemory(options))); + + program + .command("remember") + .description("Persist a memory entry immediately") + .argument("", "Memory summary text") + .option("--scope ", "Memory scope: global, project, or project-local") + .option("--topic ", "Topic file name", "workflow") + .option("--detail ", "Additional detail bullets") + .action(withStdout(async (text, options) => runRemember(text, options))); + + program + .command("forget") + .description("Delete matching memory entries") + .argument("", "Search query used to find memory entries") + .option("--scope ", "Specific scope to target, or all") + .action(withStdout(async (query, options) => runForget(query, options))); + + program + .command("sync") + .description("Sync the latest rollout into markdown memory") + .option("--rollout ", "Specific rollout JSONL file to process") + .option("--force", "Re-process a rollout even if it was already synced") + .action(withStdout(async (options) => runSync(options))); + + program + .command("doctor") + .description("Inspect local Codex Auto Memory wiring and environment") + .option("--json", "Print JSON output") + .action(withStdout(async (options) => runDoctor(options))); + + program + .command("audit") + .description("Scan tracked files and git history for privacy and secret-hygiene risks") + .option("--json", "Print JSON output") + .option("--history", "Force-enable git history scanning") + .option("--no-history", "Disable git history scanning") + .action(withStdout(async (options) => runAudit(options))); + + registerSessionCommands(program); + registerHookCommands(program); +} diff --git a/src/lib/commands/doctor.ts b/src/lib/commands/doctor.ts index e1f9774..bdca4ff 100644 --- a/src/lib/commands/doctor.ts +++ b/src/lib/commands/doctor.ts @@ -1,6 +1,6 @@ import { runCommandCapture } from "../util/process.js"; import { buildNativeReadinessReport, parseCodexFeatures } from "../runtime/codex-features.js"; -import { buildRuntimeContext } from "./common.js"; +import { buildRuntimeContext } from "../runtime/runtime-context.js"; interface DoctorOptions { cwd?: string; diff --git a/src/lib/commands/forget.ts b/src/lib/commands/forget.ts index 20b94e8..7be7e38 100644 --- a/src/lib/commands/forget.ts +++ b/src/lib/commands/forget.ts @@ -1,4 +1,4 @@ -import { buildRuntimeContext } from "./common.js"; +import { buildRuntimeContext } from "../runtime/runtime-context.js"; import type { MemoryScope } from "../types.js"; interface ForgetOptions { @@ -21,4 +21,3 @@ export async function runForget( ...deleted.map((entry) => `- ${entry.scope}/${entry.topic}/${entry.id}: ${entry.summary}`) ].join("\n"); } - diff --git a/src/lib/commands/memory.ts b/src/lib/commands/memory.ts index 318cddc..51e6d4e 100644 --- a/src/lib/commands/memory.ts +++ b/src/lib/commands/memory.ts @@ -1,5 +1,4 @@ import path from "node:path"; -import { patchConfigFile } from "../config/write-config.js"; import { formatMemorySyncAuditEntry } from "../domain/memory-sync-audit.js"; import { buildCompactHistoryPreview } from "../domain/reviewer-history.js"; import { openPath } from "../util/open.js"; @@ -11,7 +10,10 @@ import type { MemorySyncAuditEntry, SyncRecoveryRecord } from "../types.js"; -import { buildRuntimeContext } from "./common.js"; +import { + buildRuntimeContext, + patchConfigAndReloadRuntime +} from "../runtime/runtime-context.js"; interface MemoryOptions { cwd?: string; @@ -101,15 +103,14 @@ export async function runMemory(options: MemoryOptions = {}): Promise { } let configUpdateMessage: string | undefined; - const initialRuntime = await buildRuntimeContext(cwd); + let runtime = await buildRuntimeContext(cwd); if (options.enable || options.disable) { - const filePath = await patchConfigFile(initialRuntime.project.projectRoot, configScope, { + const reloaded = await patchConfigAndReloadRuntime(cwd, configScope, { autoMemoryEnabled: Boolean(options.enable) }); - configUpdateMessage = `Updated ${configScope} config: ${filePath}`; + runtime = reloaded.runtime; + configUpdateMessage = `Updated ${configScope} config: ${reloaded.configUpdatePath}`; } - - const runtime = await buildRuntimeContext(cwd); const startup = await compileStartupMemory( runtime.syncService.memoryStore, runtime.loadedConfig.config.maxStartupLines diff --git a/src/lib/commands/remember.ts b/src/lib/commands/remember.ts index d810eeb..a707470 100644 --- a/src/lib/commands/remember.ts +++ b/src/lib/commands/remember.ts @@ -1,6 +1,6 @@ import { slugify } from "../util/text.js"; import type { MemoryScope } from "../types.js"; -import { buildRuntimeContext } from "./common.js"; +import { buildRuntimeContext } from "../runtime/runtime-context.js"; interface RememberOptions { cwd?: string; @@ -30,4 +30,3 @@ export async function runRemember( return `Saved memory to ${scope}/${topic} with id ${id}.`; } - diff --git a/src/lib/commands/session-presenters.ts b/src/lib/commands/session-presenters.ts new file mode 100644 index 0000000..642872d --- /dev/null +++ b/src/lib/commands/session-presenters.ts @@ -0,0 +1,437 @@ +import { buildCompactHistoryPreview } from "../domain/reviewer-history.js"; +import { + compileSessionContinuity, + createEmptySessionContinuityState +} from "../domain/session-continuity.js"; +import { + formatSessionContinuityAuditDrillDown, + formatSessionContinuityDiagnostics, + normalizeContinuityRecoveryRecord, + normalizeSessionContinuityAuditTrigger, + normalizeSessionContinuityWriteMode, + toSessionContinuityDiagnostics +} from "../domain/session-continuity-diagnostics.js"; +import type { PersistSessionContinuityResult } from "../domain/session-continuity-persistence.js"; +import type { RuntimeContext } from "../runtime/runtime-context.js"; +import type { + CompiledSessionContinuity, + ContinuityRecoveryRecord, + SessionContinuityAuditEntry, + SessionContinuityDiagnostics, + SessionContinuityLocation, + SessionContinuityState, + SessionContinuityWriteMode +} from "../types.js"; + +const recentContinuityAuditLimit = 5; +const recentContinuityPreviewReadLimit = 10; +const recentContinuityPreviewGroupLimit = 3; + +interface RolloutSelectionSummary { + kind: string; +} + +export interface SessionInspectionView { + autoLoad: boolean; + autoSave: boolean; + localPathStyle: RuntimeContext["loadedConfig"]["config"]["sessionContinuityLocalPathStyle"]; + maxLines: number; + projectLocation: SessionContinuityLocation; + localLocation: SessionContinuityLocation; + projectState: SessionContinuityState | null; + localState: SessionContinuityState | null; + mergedState: SessionContinuityState; + startup: CompiledSessionContinuity; + latestContinuityAuditEntry: SessionContinuityAuditEntry | null; + latestContinuityDiagnostics: SessionContinuityDiagnostics | null; + recentContinuityAuditEntries: SessionContinuityAuditEntry[]; + recentContinuityAuditPreviewEntries: SessionContinuityAuditEntry[]; + continuityAuditPath: string; + pendingContinuityRecovery: ContinuityRecoveryRecord | null; + continuityRecoveryPath: string; +} + +function existingContinuitySourceFiles( + ...locations: SessionContinuityLocation[] +): string[] { + return locations.filter((location) => location.exists).map((location) => location.path); +} + +function formatRecentGenerationLines(entries: SessionContinuityAuditEntry[]): string[] { + if (entries.length === 0) { + return ["- none recorded yet"]; + } + + const preview = buildCompactHistoryPreview(entries, { + excludeLeadingCount: 1, + maxGroups: recentContinuityPreviewGroupLimit, + getSignature: (entry) => + JSON.stringify({ + rolloutPath: entry.rolloutPath, + sourceSessionId: entry.sourceSessionId, + scope: entry.scope, + trigger: normalizeSessionContinuityAuditTrigger(entry.trigger), + writeMode: normalizeSessionContinuityWriteMode(entry.writeMode), + preferredPath: entry.preferredPath, + actualPath: entry.actualPath, + confidence: entry.confidence ?? "high", + warnings: entry.warnings ?? [], + fallbackReason: entry.fallbackReason ?? null, + codexExitCode: entry.codexExitCode ?? null, + evidenceCounts: { + successfulCommands: entry.evidenceCounts.successfulCommands, + failedCommands: entry.evidenceCounts.failedCommands, + fileWrites: entry.evidenceCounts.fileWrites, + nextSteps: entry.evidenceCounts.nextSteps, + untried: entry.evidenceCounts.untried + }, + writtenPaths: entry.writtenPaths + }) + }); + + if (preview.totalRawCount === 0) { + return ["- none beyond latest"]; + } + + const lines = preview.groups.flatMap((group) => [ + `- ${group.latest.generatedAt}: ${formatSessionContinuityDiagnostics(toSessionContinuityDiagnostics(group.latest))}`, + ` Rollout: ${group.latest.rolloutPath}`, + ...(group.rawCount > 1 + ? [` Repeated similar generations hidden: ${group.rawCount - 1}`] + : []) + ]); + + if (preview.omittedRawCount > 0) { + lines.push(`- older generations omitted: ${preview.omittedRawCount}`); + } + + return lines.length > 0 ? lines : ["- none beyond latest"]; +} + +function formatPendingContinuityRecovery( + record: ContinuityRecoveryRecord, + recoveryPath: string +): string[] { + const normalized = normalizeContinuityRecoveryRecord(record); + const warnings = normalized.warnings ?? []; + const lines = [ + "Pending continuity recovery:", + `- Recovery file: ${recoveryPath}`, + `- Failed stage: ${normalized.failedStage}`, + `- Rollout: ${normalized.rolloutPath}`, + ...(normalized.trigger ? [`- Trigger: ${normalized.trigger}`] : []), + ...(normalized.writeMode ? [`- Write mode: ${normalized.writeMode}`] : []), + `- Scope: ${normalized.scope}`, + `- Generation: ${normalized.actualPath} | preferred ${normalized.preferredPath}${normalized.confidence ? ` | confidence ${normalized.confidence}` : ""}`, + `- Failure: ${normalized.failureMessage}` + ]; + + if (warnings.length > 0) { + lines.push(...warnings.map((warning) => `- Warning: ${warning}`)); + } + + if (normalized.writtenPaths.length > 0) { + lines.push(...normalized.writtenPaths.map((filePath) => `- Written: ${filePath}`)); + } + + return lines; +} + +function formatLayerSection( + title: string, + state: SessionContinuityState | null, + nextLabel = "Incomplete / next:" +): string[] { + return [ + title, + `Goal: ${state?.goal || "No active goal recorded."}`, + "", + "Confirmed working:", + ...(state?.confirmedWorking.length + ? state.confirmedWorking.map((item) => `- ${item}`) + : ["- Nothing confirmed yet."]), + "", + "Tried and failed:", + ...(state?.triedAndFailed.length + ? state.triedAndFailed.map((item) => `- ${item}`) + : ["- No failed approaches recorded."]), + "", + nextLabel, + ...(state?.incompleteNext.length + ? state.incompleteNext.map((item) => `- ${item}`) + : ["- No next step recorded."]) + ]; +} + +export async function loadSessionInspectionView( + runtime: RuntimeContext +): Promise { + const projectLocation = await runtime.sessionContinuityStore.getLocation("project"); + const localLocation = await runtime.sessionContinuityStore.getLocation("project-local"); + const projectState = await runtime.sessionContinuityStore.readState("project"); + const localState = await runtime.sessionContinuityStore.readState("project-local"); + const recentContinuityAuditPreviewEntries = + await runtime.sessionContinuityStore.readRecentAuditEntries(recentContinuityPreviewReadLimit); + const latestContinuityAuditEntry = recentContinuityAuditPreviewEntries[0] ?? null; + const latestContinuityDiagnostics = latestContinuityAuditEntry + ? toSessionContinuityDiagnostics(latestContinuityAuditEntry) + : null; + const mergedState = + (await runtime.sessionContinuityStore.readMergedState()) ?? + createEmptySessionContinuityState( + "project-local", + runtime.project.projectId, + runtime.project.worktreeId + ); + const pendingContinuityRecoveryRecord = + await runtime.sessionContinuityStore.readRecoveryRecord(); + const pendingContinuityRecovery = pendingContinuityRecoveryRecord + ? normalizeContinuityRecoveryRecord(pendingContinuityRecoveryRecord) + : null; + const startup = compileSessionContinuity( + mergedState, + existingContinuitySourceFiles(projectLocation, localLocation), + runtime.loadedConfig.config.maxSessionContinuityLines + ); + + return { + autoLoad: runtime.loadedConfig.config.sessionContinuityAutoLoad, + autoSave: runtime.loadedConfig.config.sessionContinuityAutoSave, + localPathStyle: runtime.loadedConfig.config.sessionContinuityLocalPathStyle, + maxLines: runtime.loadedConfig.config.maxSessionContinuityLines, + projectLocation, + localLocation, + projectState, + localState, + mergedState, + startup, + latestContinuityAuditEntry, + latestContinuityDiagnostics, + recentContinuityAuditEntries: recentContinuityAuditPreviewEntries.slice( + 0, + recentContinuityAuditLimit + ), + recentContinuityAuditPreviewEntries, + continuityAuditPath: runtime.sessionContinuityStore.paths.auditFile, + pendingContinuityRecovery, + continuityRecoveryPath: runtime.sessionContinuityStore.getRecoveryPath() + }; +} + +export function formatPersistedSessionText( + action: "save" | "refresh", + persisted: PersistSessionContinuityResult, + rolloutSelection?: RolloutSelectionSummary +): string { + return [ + action === "refresh" + ? `Refreshed session continuity from ${persisted.rolloutPath}` + : `Saved session continuity from ${persisted.rolloutPath}`, + ...(action === "refresh" && rolloutSelection + ? [`Selection: ${rolloutSelection.kind} | write mode: replace`] + : []), + formatSessionContinuityDiagnostics(persisted.diagnostics), + ...(persisted.latestContinuityAuditEntry + ? formatSessionContinuityAuditDrillDown(persisted.latestContinuityAuditEntry) + : []), + ...(persisted.excludePath ? [`Local exclude updated: ${persisted.excludePath}`] : []) + ].join("\n"); +} + +export function buildPersistedSessionJson( + action: "save" | "refresh", + persisted: PersistSessionContinuityResult, + rolloutSelection?: RolloutSelectionSummary & { rolloutPath: string } +): string { + return JSON.stringify( + { + ...(action === "refresh" && rolloutSelection + ? { + action: "refresh", + writeMode: "replace" satisfies SessionContinuityWriteMode, + rolloutSelection + } + : {}), + rolloutPath: persisted.rolloutPath, + written: persisted.written, + excludePath: persisted.excludePath, + summary: persisted.summary, + diagnostics: persisted.diagnostics, + latestContinuityAuditEntry: persisted.latestContinuityAuditEntry, + recentContinuityAuditEntries: persisted.recentContinuityAuditEntries, + continuityAuditPath: persisted.continuityAuditPath, + pendingContinuityRecovery: persisted.pendingContinuityRecovery, + continuityRecoveryPath: persisted.continuityRecoveryPath + }, + null, + 2 + ); +} + +export function buildSessionLoadJson(view: SessionInspectionView): string { + return JSON.stringify( + { + projectLocation: view.projectLocation, + localLocation: view.localLocation, + projectState: view.projectState, + localState: view.localState, + mergedState: view.mergedState, + startup: view.startup, + latestContinuityAuditEntry: view.latestContinuityAuditEntry, + latestContinuityDiagnostics: view.latestContinuityDiagnostics, + recentContinuityAuditEntries: view.recentContinuityAuditEntries, + continuityAuditPath: view.continuityAuditPath, + pendingContinuityRecovery: view.pendingContinuityRecovery, + continuityRecoveryPath: view.continuityRecoveryPath + }, + null, + 2 + ); +} + +export function formatSessionLoadText( + view: SessionInspectionView, + printStartup = false +): string { + const lines = [ + "Session Continuity", + `Project continuity: ${view.projectLocation.exists ? "active" : "missing"} (${view.projectLocation.path})`, + `Project-local continuity: ${view.localLocation.exists ? "active" : "missing"} (${view.localLocation.path})`, + `Latest generation: ${view.latestContinuityDiagnostics ? formatSessionContinuityDiagnostics(view.latestContinuityDiagnostics) : "none recorded yet"}`, + ...(view.latestContinuityAuditEntry ? [`Latest rollout: ${view.latestContinuityAuditEntry.rolloutPath}`] : []), + `Continuity audit: ${view.continuityAuditPath}`, + "Merged resume brief combines shared continuity with any project-local overrides.", + "Recent prior generations below are compact audit previews, not startup-injected history.", + ...(view.latestContinuityAuditEntry + ? formatSessionContinuityAuditDrillDown(view.latestContinuityAuditEntry) + : []), + ...(view.pendingContinuityRecovery + ? formatPendingContinuityRecovery( + view.pendingContinuityRecovery, + view.continuityRecoveryPath + ) + : []), + "Recent prior generations:", + ...formatRecentGenerationLines(view.recentContinuityAuditPreviewEntries), + "", + "Shared project continuity:", + `Goal: ${view.projectState?.goal || "No active goal recorded."}`, + "", + "Confirmed working:", + ...(view.projectState?.confirmedWorking.length + ? view.projectState.confirmedWorking.map((item) => `- ${item}`) + : ["- Nothing confirmed yet."]), + "", + "Tried and failed:", + ...(view.projectState?.triedAndFailed.length + ? view.projectState.triedAndFailed.map((item) => `- ${item}`) + : ["- No failed approaches recorded."]), + "", + "Not yet tried:", + ...(view.projectState?.notYetTried.length + ? view.projectState.notYetTried.map((item) => `- ${item}`) + : ["- No untried approaches recorded."]), + "", + "Files / decisions / environment:", + ...(view.projectState?.filesDecisionsEnvironment.length + ? view.projectState.filesDecisionsEnvironment.map((item) => `- ${item}`) + : ["- No additional file, decision, or environment notes."]), + "", + ...formatLayerSection("Project-local continuity:", view.localState), + "", + "Project-local not yet tried:", + ...(view.localState?.notYetTried.length + ? view.localState.notYetTried.map((item) => `- ${item}`) + : ["- No untried local approaches recorded."]), + "", + "Project-local files / decisions / environment:", + ...(view.localState?.filesDecisionsEnvironment.length + ? view.localState.filesDecisionsEnvironment.map((item) => `- ${item}`) + : ["- No additional local file, decision, or environment notes."]), + "", + "Effective merged resume brief:", + `Goal: ${view.mergedState.goal || "No active goal recorded."}`, + "Confirmed working:", + ...(view.mergedState.confirmedWorking.length > 0 + ? view.mergedState.confirmedWorking.map((item) => `- ${item}`) + : ["- Nothing confirmed yet."]), + "Tried and failed:", + ...(view.mergedState.triedAndFailed.length > 0 + ? view.mergedState.triedAndFailed.map((item) => `- ${item}`) + : ["- No failed approaches recorded."]), + "Not yet tried:", + ...(view.mergedState.notYetTried.length > 0 + ? view.mergedState.notYetTried.map((item) => `- ${item}`) + : ["- No untried approaches recorded."]), + "Incomplete / next:", + ...(view.mergedState.incompleteNext.length > 0 + ? view.mergedState.incompleteNext.map((item) => `- ${item}`) + : ["- No next step recorded."]), + "Files / decisions / environment:", + ...(view.mergedState.filesDecisionsEnvironment.length > 0 + ? view.mergedState.filesDecisionsEnvironment.map((item) => `- ${item}`) + : ["- No additional file, decision, or environment notes."]) + ]; + + if (printStartup) { + lines.push("", "Startup continuity:", view.startup.text.trimEnd()); + } + + return lines.join("\n"); +} + +export function buildSessionStatusJson(view: SessionInspectionView): string { + return JSON.stringify( + { + autoLoad: view.autoLoad, + autoSave: view.autoSave, + localPathStyle: view.localPathStyle, + maxLines: view.maxLines, + projectLocation: view.projectLocation, + localLocation: view.localLocation, + projectState: view.projectState, + localState: view.localState, + mergedState: view.mergedState, + latestContinuityAuditEntry: view.latestContinuityAuditEntry, + latestContinuityDiagnostics: view.latestContinuityDiagnostics, + recentContinuityAuditEntries: view.recentContinuityAuditEntries, + continuityAuditPath: view.continuityAuditPath, + pendingContinuityRecovery: view.pendingContinuityRecovery, + continuityRecoveryPath: view.continuityRecoveryPath + }, + null, + 2 + ); +} + +export function formatSessionStatusText(view: SessionInspectionView): string { + return [ + "Codex Auto Memory Session Continuity", + `Auto-load: ${view.autoLoad}`, + `Auto-save: ${view.autoSave}`, + `Local path style: ${view.localPathStyle}`, + `Shared continuity: ${view.projectLocation.exists ? "active" : "missing"} (${view.projectLocation.path})`, + `Project-local continuity: ${view.localLocation.exists ? "active" : "missing"} (${view.localLocation.path})`, + `Latest generation: ${view.latestContinuityDiagnostics ? formatSessionContinuityDiagnostics(view.latestContinuityDiagnostics) : "none recorded yet"}`, + ...(view.latestContinuityAuditEntry ? [`Latest rollout: ${view.latestContinuityAuditEntry.rolloutPath}`] : []), + `Continuity audit: ${view.continuityAuditPath}`, + "Merged resume brief combines shared continuity with any project-local overrides.", + "Recent prior generations below are compact audit previews, not startup-injected history.", + ...(view.latestContinuityAuditEntry + ? formatSessionContinuityAuditDrillDown(view.latestContinuityAuditEntry) + : []), + ...(view.pendingContinuityRecovery + ? formatPendingContinuityRecovery( + view.pendingContinuityRecovery, + view.continuityRecoveryPath + ) + : []), + "Recent prior generations:", + ...formatRecentGenerationLines(view.recentContinuityAuditPreviewEntries), + "", + `Shared updated at: ${view.projectState?.updatedAt ?? "n/a"}`, + `Project-local updated at: ${view.localState?.updatedAt ?? "n/a"}`, + `Merged continuity layers: ${[view.projectState, view.localState].filter(Boolean).length}`, + `Startup continuity line budget: ${view.maxLines}` + ].join("\n"); +} diff --git a/src/lib/commands/session.ts b/src/lib/commands/session.ts index 3b66693..94899a2 100644 --- a/src/lib/commands/session.ts +++ b/src/lib/commands/session.ts @@ -1,38 +1,20 @@ -import { - findLatestProjectRollout, - parseRolloutEvidence -} from "../domain/rollout.js"; +import { persistSessionContinuity } from "../domain/session-continuity-persistence.js"; +import { findLatestProjectRollout } from "../domain/rollout.js"; +import type { RuntimeContext } from "../runtime/runtime-context.js"; +import { buildRuntimeContext } from "../runtime/runtime-context.js"; +import type { SessionContinuityScope } from "../types.js"; import { openPath } from "../util/open.js"; import { - buildSessionContinuityAuditEntry, - formatSessionContinuityAuditDrillDown, - formatSessionContinuityDiagnostics, - normalizeContinuityRecoveryRecord, - normalizeSessionContinuityAuditTrigger, - normalizeSessionContinuityWriteMode, - toSessionContinuityDiagnostics -} from "../domain/session-continuity-diagnostics.js"; -import { - buildContinuityRecoveryRecord, - matchesContinuityRecoveryRecord -} from "../domain/recovery-records.js"; -import { buildCompactHistoryPreview } from "../domain/reviewer-history.js"; -import { - compileSessionContinuity, - createEmptySessionContinuityState -} from "../domain/session-continuity.js"; -import type { - ContinuityRecoveryRecord, - SessionContinuityAuditTrigger, - SessionContinuityAuditEntry, - SessionContinuityScope, - SessionContinuityWriteMode -} from "../types.js"; -import { SessionContinuitySummarizer } from "../extractor/session-continuity-summarizer.js"; -import { buildRuntimeContext } from "./common.js"; + buildPersistedSessionJson, + buildSessionLoadJson, + buildSessionStatusJson, + formatPersistedSessionText, + formatSessionLoadText, + formatSessionStatusText, + loadSessionInspectionView +} from "./session-presenters.js"; type SessionAction = "status" | "save" | "refresh" | "load" | "clear" | "open"; -type SessionRuntime = Awaited>; type RolloutSelectionKind = | "explicit-rollout" | "pending-recovery-marker" @@ -52,39 +34,11 @@ interface SessionOptions { scope?: SessionContinuityScope | "both"; } -const recentContinuityAuditLimit = 5; -const recentContinuityPreviewReadLimit = 10; -const recentContinuityPreviewGroupLimit = 3; - -interface PersistSessionContinuityOptions { - runtime: SessionRuntime; - rolloutPath: string; - scope: SessionContinuityScope | "both"; - trigger: SessionContinuityAuditTrigger; - writeMode: SessionContinuityWriteMode; -} - -interface PersistSessionContinuityResult { - rolloutPath: string; - written: string[]; - excludePath: string | null; - summary: Awaited>["summary"]; - diagnostics: Awaited>["diagnostics"]; - latestContinuityAuditEntry: SessionContinuityAuditEntry | null; - recentContinuityAuditEntries: SessionContinuityAuditEntry[]; - pendingContinuityRecovery: ContinuityRecoveryRecord | null; - continuityAuditPath: string; - continuityRecoveryPath: string; -} - -function errorMessage(error: unknown): string { - return error instanceof Error ? error.message : String(error); -} - function selectedScope(scope?: SessionContinuityScope | "both"): SessionContinuityScope | "both" { if (!scope) { return "both"; } + if (scope === "project" || scope === "project-local" || scope === "both") { return scope; } @@ -92,94 +46,8 @@ function selectedScope(scope?: SessionContinuityScope | "both"): SessionContinui throw new Error("Scope must be one of: project, project-local, both."); } -function formatRecentGenerationLines(entries: SessionContinuityAuditEntry[]): string[] { - if (entries.length === 0) { - return ["- none recorded yet"]; - } - - const preview = buildCompactHistoryPreview(entries, { - excludeLeadingCount: 1, - maxGroups: recentContinuityPreviewGroupLimit, - getSignature: (entry) => - JSON.stringify({ - rolloutPath: entry.rolloutPath, - sourceSessionId: entry.sourceSessionId, - scope: entry.scope, - trigger: normalizeSessionContinuityAuditTrigger(entry.trigger), - writeMode: normalizeSessionContinuityWriteMode(entry.writeMode), - preferredPath: entry.preferredPath, - actualPath: entry.actualPath, - confidence: entry.confidence ?? "high", - warnings: entry.warnings ?? [], - fallbackReason: entry.fallbackReason ?? null, - codexExitCode: entry.codexExitCode ?? null, - evidenceCounts: { - successfulCommands: entry.evidenceCounts.successfulCommands, - failedCommands: entry.evidenceCounts.failedCommands, - fileWrites: entry.evidenceCounts.fileWrites, - nextSteps: entry.evidenceCounts.nextSteps, - untried: entry.evidenceCounts.untried - }, - writtenPaths: entry.writtenPaths - }) - }); - - if (preview.totalRawCount === 0) { - return ["- none beyond latest"]; - } - - const lines = preview.groups.flatMap((group) => [ - `- ${group.latest.generatedAt}: ${formatSessionContinuityDiagnostics(toSessionContinuityDiagnostics(group.latest))}`, - ` Rollout: ${group.latest.rolloutPath}`, - ...(group.rawCount > 1 - ? [` Repeated similar generations hidden: ${group.rawCount - 1}`] - : []) - ]); - - if (preview.omittedRawCount > 0) { - lines.push(`- older generations omitted: ${preview.omittedRawCount}`); - } - - return lines.length > 0 ? lines : ["- none beyond latest"]; -} - -function formatPendingContinuityRecovery( - record: ContinuityRecoveryRecord, - recoveryPath: string -): string[] { - const normalized = normalizeContinuityRecoveryRecord(record); - const warnings = normalized.warnings ?? []; - const lines = [ - "Pending continuity recovery:", - `- Recovery file: ${recoveryPath}`, - `- Failed stage: ${normalized.failedStage}`, - `- Rollout: ${normalized.rolloutPath}`, - ...(normalized.trigger ? [`- Trigger: ${normalized.trigger}`] : []), - ...(normalized.writeMode ? [`- Write mode: ${normalized.writeMode}`] : []), - `- Scope: ${normalized.scope}`, - `- Generation: ${normalized.actualPath} | preferred ${normalized.preferredPath}${normalized.confidence ? ` | confidence ${normalized.confidence}` : ""}`, - `- Failure: ${normalized.failureMessage}` - ]; - - if (warnings.length > 0) { - lines.push(...warnings.map((warning) => `- Warning: ${warning}`)); - } - - if (normalized.writtenPaths.length > 0) { - lines.push(...normalized.writtenPaths.map((filePath) => `- Written: ${filePath}`)); - } - - return lines; -} - -function existingContinuitySourceFiles( - ...locations: Array<{ path: string; exists: boolean }> -): string[] { - return locations.filter((location) => location.exists).map((location) => location.path); -} - async function selectRefreshRollout( - runtime: SessionRuntime, + runtime: RuntimeContext, scope: SessionContinuityScope | "both", explicitRollout?: string ): Promise { @@ -218,95 +86,6 @@ async function selectRefreshRollout( throw new Error("No relevant rollout found for this project."); } -async function persistSessionContinuity( - options: PersistSessionContinuityOptions -): Promise { - const parsedEvidence = await parseRolloutEvidence(options.rolloutPath); - if (!parsedEvidence) { - throw new Error(`Could not parse rollout evidence from ${options.rolloutPath}.`); - } - - const existing = - options.writeMode === "merge" - ? { - project: await options.runtime.sessionContinuityStore.readState("project"), - projectLocal: await options.runtime.sessionContinuityStore.readState("project-local") - } - : undefined; - const summarizer = new SessionContinuitySummarizer(options.runtime.loadedConfig.config); - const generation = await summarizer.summarizeWithDiagnostics(parsedEvidence, existing); - const written = - options.writeMode === "replace" - ? await options.runtime.sessionContinuityStore.replaceSummary( - generation.summary, - options.scope - ) - : await options.runtime.sessionContinuityStore.saveSummary( - generation.summary, - options.scope - ); - const auditEntry = buildSessionContinuityAuditEntry( - options.runtime.project, - options.runtime.loadedConfig.config, - generation.diagnostics, - written, - options.scope, - { - trigger: options.trigger, - writeMode: options.writeMode - } - ); - - try { - await options.runtime.sessionContinuityStore.appendAuditLog(auditEntry); - } catch (error) { - await writeContinuityRecoveryRecordBestEffort( - options.runtime, - generation.diagnostics, - options.scope, - written, - errorMessage(error), - options.trigger, - options.writeMode - ); - throw error; - } - - await clearContinuityRecoveryRecordBestEffort( - options.runtime, - generation.diagnostics, - options.scope - ); - - const recentContinuityAuditPreviewEntries = - await options.runtime.sessionContinuityStore.readRecentAuditEntries( - recentContinuityPreviewReadLimit - ); - const pendingContinuityRecoveryRecord = - await options.runtime.sessionContinuityStore.readRecoveryRecord(); - - return { - rolloutPath: options.rolloutPath, - written, - excludePath: - options.scope === "project" - ? null - : options.runtime.sessionContinuityStore.getLocalIgnorePath(), - summary: generation.summary, - diagnostics: generation.diagnostics, - latestContinuityAuditEntry: recentContinuityAuditPreviewEntries[0] ?? null, - recentContinuityAuditEntries: recentContinuityAuditPreviewEntries.slice( - 0, - recentContinuityAuditLimit - ), - pendingContinuityRecovery: pendingContinuityRecoveryRecord - ? normalizeContinuityRecoveryRecord(pendingContinuityRecoveryRecord) - : null, - continuityAuditPath: options.runtime.sessionContinuityStore.paths.auditFile, - continuityRecoveryPath: options.runtime.sessionContinuityStore.getRecoveryPath() - }; -} - export async function runSession( action: SessionAction, options: SessionOptions = {} @@ -323,6 +102,7 @@ export async function runSession( kind: options.rollout ? "explicit-rollout" : "latest-primary-rollout", rolloutPath: options.rollout ?? (await findLatestProjectRollout(runtime.project)) ?? "" }; + if (!rolloutSelection.rolloutPath) { throw new Error("No relevant rollout found for this project."); } @@ -336,44 +116,10 @@ export async function runSession( }); if (options.json) { - return JSON.stringify( - { - ...(action === "refresh" - ? { - action: "refresh", - writeMode: "replace", - rolloutSelection - } - : {}), - rolloutPath: persisted.rolloutPath, - written: persisted.written, - excludePath: persisted.excludePath, - summary: persisted.summary, - diagnostics: persisted.diagnostics, - latestContinuityAuditEntry: persisted.latestContinuityAuditEntry, - recentContinuityAuditEntries: persisted.recentContinuityAuditEntries, - continuityAuditPath: persisted.continuityAuditPath, - pendingContinuityRecovery: persisted.pendingContinuityRecovery, - continuityRecoveryPath: persisted.continuityRecoveryPath - }, - null, - 2 - ); + return buildPersistedSessionJson(action, persisted, rolloutSelection); } - return [ - action === "refresh" - ? `Refreshed session continuity from ${persisted.rolloutPath}` - : `Saved session continuity from ${persisted.rolloutPath}`, - ...(action === "refresh" - ? [`Selection: ${rolloutSelection.kind} | write mode: replace`] - : []), - formatSessionContinuityDiagnostics(persisted.diagnostics), - ...(persisted.latestContinuityAuditEntry - ? formatSessionContinuityAuditDrillDown(persisted.latestContinuityAuditEntry) - : []), - ...(persisted.excludePath ? [`Local exclude updated: ${persisted.excludePath}`] : []) - ].join("\n"); + return formatPersistedSessionText(action, persisted, rolloutSelection); } if (action === "clear") { @@ -398,271 +144,19 @@ export async function runSession( ].join("\n"); } - const projectLocation = await runtime.sessionContinuityStore.getLocation("project"); - const localLocation = await runtime.sessionContinuityStore.getLocation("project-local"); - const projectState = await runtime.sessionContinuityStore.readState("project"); - const localState = await runtime.sessionContinuityStore.readState("project-local"); - const recentContinuityAuditPreviewEntries = - await runtime.sessionContinuityStore.readRecentAuditEntries(recentContinuityPreviewReadLimit); - const recentContinuityAuditEntries = recentContinuityAuditPreviewEntries.slice( - 0, - recentContinuityAuditLimit - ); - const latestContinuityAuditEntry = recentContinuityAuditPreviewEntries[0] ?? null; - const latestContinuityDiagnostics = latestContinuityAuditEntry - ? toSessionContinuityDiagnostics(latestContinuityAuditEntry) - : null; - const mergedState = - (await runtime.sessionContinuityStore.readMergedState()) ?? - createEmptySessionContinuityState( - "project-local", - runtime.project.projectId, - runtime.project.worktreeId - ); - const pendingContinuityRecoveryRecord = await runtime.sessionContinuityStore.readRecoveryRecord(); - const pendingContinuityRecovery = pendingContinuityRecoveryRecord - ? normalizeContinuityRecoveryRecord(pendingContinuityRecoveryRecord) - : null; - const startup = compileSessionContinuity( - mergedState, - existingContinuitySourceFiles(projectLocation, localLocation), - runtime.loadedConfig.config.maxSessionContinuityLines - ); + const view = await loadSessionInspectionView(runtime); if (action === "load") { if (options.json) { - return JSON.stringify( - { - projectLocation, - localLocation, - projectState, - localState, - mergedState, - startup, - latestContinuityAuditEntry, - latestContinuityDiagnostics, - recentContinuityAuditEntries, - continuityAuditPath: runtime.sessionContinuityStore.paths.auditFile, - pendingContinuityRecovery, - continuityRecoveryPath: runtime.sessionContinuityStore.getRecoveryPath() - }, - null, - 2 - ); - } - - const lines = [ - "Session Continuity", - `Project continuity: ${projectLocation.exists ? "active" : "missing"} (${projectLocation.path})`, - `Project-local continuity: ${localLocation.exists ? "active" : "missing"} (${localLocation.path})`, - `Latest generation: ${latestContinuityDiagnostics ? formatSessionContinuityDiagnostics(latestContinuityDiagnostics) : "none recorded yet"}`, - ...(latestContinuityAuditEntry ? [`Latest rollout: ${latestContinuityAuditEntry.rolloutPath}`] : []), - `Continuity audit: ${runtime.sessionContinuityStore.paths.auditFile}`, - "Merged resume brief combines shared continuity with any project-local overrides.", - "Recent prior generations below are compact audit previews, not startup-injected history.", - ...(latestContinuityAuditEntry - ? formatSessionContinuityAuditDrillDown(latestContinuityAuditEntry) - : []), - ...(pendingContinuityRecovery - ? formatPendingContinuityRecovery( - pendingContinuityRecovery, - runtime.sessionContinuityStore.getRecoveryPath() - ) - : []), - "Recent prior generations:", - ...formatRecentGenerationLines(recentContinuityAuditPreviewEntries), - "", - "Shared project continuity:", - `Goal: ${projectState?.goal || "No active goal recorded."}`, - "", - "Confirmed working:", - ...(projectState?.confirmedWorking.length - ? projectState.confirmedWorking.map((item) => `- ${item}`) - : ["- Nothing confirmed yet."]), - "", - "Tried and failed:", - ...(projectState?.triedAndFailed.length - ? projectState.triedAndFailed.map((item) => `- ${item}`) - : ["- No failed approaches recorded."]), - "", - "Not yet tried:", - ...(projectState?.notYetTried.length - ? projectState.notYetTried.map((item) => `- ${item}`) - : ["- No untried approaches recorded."]), - "", - "Files / decisions / environment:", - ...(projectState?.filesDecisionsEnvironment.length - ? projectState.filesDecisionsEnvironment.map((item) => `- ${item}`) - : ["- No additional file, decision, or environment notes."]), - "", - "Project-local continuity:", - `Goal: ${localState?.goal || "No active goal recorded."}`, - "", - "Confirmed working:", - ...(localState?.confirmedWorking.length - ? localState.confirmedWorking.map((item) => `- ${item}`) - : ["- Nothing confirmed yet."]), - "", - "Tried and failed:", - ...(localState?.triedAndFailed.length - ? localState.triedAndFailed.map((item) => `- ${item}`) - : ["- No failed approaches recorded."]), - "", - "Incomplete / next:", - ...(localState?.incompleteNext.length - ? localState.incompleteNext.map((item) => `- ${item}`) - : ["- No next step recorded."]) - ]; - - lines.push( - "", - "Project-local not yet tried:", - ...(localState?.notYetTried.length - ? localState.notYetTried.map((item) => `- ${item}`) - : ["- No untried local approaches recorded."]), - "", - "Project-local files / decisions / environment:", - ...(localState?.filesDecisionsEnvironment.length - ? localState.filesDecisionsEnvironment.map((item) => `- ${item}`) - : ["- No additional local file, decision, or environment notes."]), - "", - "Effective merged resume brief:", - `Goal: ${mergedState.goal || "No active goal recorded."}`, - "Confirmed working:", - ...(mergedState.confirmedWorking.length > 0 - ? mergedState.confirmedWorking.map((item) => `- ${item}`) - : ["- Nothing confirmed yet."]), - "Tried and failed:", - ...(mergedState.triedAndFailed.length > 0 - ? mergedState.triedAndFailed.map((item) => `- ${item}`) - : ["- No failed approaches recorded."]), - "Not yet tried:", - ...(mergedState.notYetTried.length > 0 - ? mergedState.notYetTried.map((item) => `- ${item}`) - : ["- No untried approaches recorded."]), - "Incomplete / next:", - ...(mergedState.incompleteNext.length > 0 - ? mergedState.incompleteNext.map((item) => `- ${item}`) - : ["- No next step recorded."]), - "Files / decisions / environment:", - ...(mergedState.filesDecisionsEnvironment.length > 0 - ? mergedState.filesDecisionsEnvironment.map((item) => `- ${item}`) - : ["- No additional file, decision, or environment notes."]) - ); - - if (options.printStartup) { - lines.push("", "Startup continuity:", startup.text.trimEnd()); + return buildSessionLoadJson(view); } - return lines.join("\n"); + return formatSessionLoadText(view, options.printStartup); } if (options.json) { - return JSON.stringify( - { - autoLoad: runtime.loadedConfig.config.sessionContinuityAutoLoad, - autoSave: runtime.loadedConfig.config.sessionContinuityAutoSave, - localPathStyle: runtime.loadedConfig.config.sessionContinuityLocalPathStyle, - maxLines: runtime.loadedConfig.config.maxSessionContinuityLines, - projectLocation, - localLocation, - projectState, - localState, - mergedState, - latestContinuityAuditEntry, - latestContinuityDiagnostics, - recentContinuityAuditEntries, - continuityAuditPath: runtime.sessionContinuityStore.paths.auditFile, - pendingContinuityRecovery, - continuityRecoveryPath: runtime.sessionContinuityStore.getRecoveryPath() - }, - null, - 2 - ); + return buildSessionStatusJson(view); } - return [ - "Codex Auto Memory Session Continuity", - `Auto-load: ${runtime.loadedConfig.config.sessionContinuityAutoLoad}`, - `Auto-save: ${runtime.loadedConfig.config.sessionContinuityAutoSave}`, - `Local path style: ${runtime.loadedConfig.config.sessionContinuityLocalPathStyle}`, - `Shared continuity: ${projectLocation.exists ? "active" : "missing"} (${projectLocation.path})`, - `Project-local continuity: ${localLocation.exists ? "active" : "missing"} (${localLocation.path})`, - `Latest generation: ${latestContinuityDiagnostics ? formatSessionContinuityDiagnostics(latestContinuityDiagnostics) : "none recorded yet"}`, - ...(latestContinuityAuditEntry ? [`Latest rollout: ${latestContinuityAuditEntry.rolloutPath}`] : []), - `Continuity audit: ${runtime.sessionContinuityStore.paths.auditFile}`, - "Merged resume brief combines shared continuity with any project-local overrides.", - "Recent prior generations below are compact audit previews, not startup-injected history.", - ...(latestContinuityAuditEntry - ? formatSessionContinuityAuditDrillDown(latestContinuityAuditEntry) - : []), - ...(pendingContinuityRecovery - ? formatPendingContinuityRecovery( - pendingContinuityRecovery, - runtime.sessionContinuityStore.getRecoveryPath() - ) - : []), - "Recent prior generations:", - ...formatRecentGenerationLines(recentContinuityAuditPreviewEntries), - "", - `Shared updated at: ${projectState?.updatedAt ?? "n/a"}`, - `Project-local updated at: ${localState?.updatedAt ?? "n/a"}`, - `Merged continuity layers: ${[projectState, localState].filter(Boolean).length}`, - `Startup continuity line budget: ${runtime.loadedConfig.config.maxSessionContinuityLines}` - ].join("\n"); -} - -async function writeContinuityRecoveryRecordBestEffort( - runtime: SessionRuntime, - diagnostics: Parameters[0]["diagnostics"], - scope: SessionContinuityScope | "both", - writtenPaths: string[], - failureMessage: string, - trigger?: SessionContinuityAuditTrigger, - writeMode?: SessionContinuityWriteMode -): Promise { - try { - await runtime.sessionContinuityStore.writeRecoveryRecord( - buildContinuityRecoveryRecord({ - projectId: runtime.project.projectId, - worktreeId: runtime.project.worktreeId, - diagnostics, - trigger, - writeMode, - scope, - writtenPaths, - failedStage: "audit-write", - failureMessage - }) - ); - } catch { - // Best-effort marker persistence should not overwrite the original failure. - } -} - -async function clearContinuityRecoveryRecordBestEffort( - runtime: SessionRuntime, - diagnostics: Parameters[0]["diagnostics"], - scope: SessionContinuityScope | "both" -): Promise { - try { - const record = await runtime.sessionContinuityStore.readRecoveryRecord(); - if (!record) { - return; - } - if ( - !matchesContinuityRecoveryRecord(record, { - projectId: runtime.project.projectId, - worktreeId: runtime.project.worktreeId, - rolloutPath: diagnostics.rolloutPath, - sourceSessionId: diagnostics.sourceSessionId, - scope - }) - ) { - return; - } - await runtime.sessionContinuityStore.clearRecoveryRecord(); - } catch { - // Best-effort cleanup should not fail an otherwise successful save. - } + return formatSessionStatusText(view); } diff --git a/src/lib/commands/sync.ts b/src/lib/commands/sync.ts index bc547eb..b37d79b 100644 --- a/src/lib/commands/sync.ts +++ b/src/lib/commands/sync.ts @@ -1,4 +1,4 @@ -import { buildRuntimeContext } from "./common.js"; +import { buildRuntimeContext } from "../runtime/runtime-context.js"; import { listRolloutFiles, matchesProjectContext, parseRolloutEvidence } from "../domain/rollout.js"; interface SyncOptions { @@ -33,4 +33,3 @@ export async function runSync(options: SyncOptions = {}): Promise { const result = await runtime.syncService.syncRollout(rolloutPath, options.force); return result.message; } - diff --git a/src/lib/commands/wrapper.ts b/src/lib/commands/wrapper.ts index 2a7325a..8e3cd5b 100644 --- a/src/lib/commands/wrapper.ts +++ b/src/lib/commands/wrapper.ts @@ -1,28 +1,21 @@ import { compileStartupMemory } from "../domain/startup-memory.js"; +import { formatSessionContinuityDiagnostics } from "../domain/session-continuity-diagnostics.js"; +import { persistSessionContinuity } from "../domain/session-continuity-persistence.js"; import { - buildSessionContinuityAuditEntry, - formatSessionContinuityDiagnostics -} from "../domain/session-continuity-diagnostics.js"; -import { - buildContinuityRecoveryRecord, - matchesContinuityRecoveryRecord -} from "../domain/recovery-records.js"; -import { listRolloutFiles, parseRolloutEvidence, readRolloutMeta } from "../domain/rollout.js"; + listRolloutFiles, + parseRolloutEvidence, + readRolloutMeta +} from "../domain/rollout.js"; import { compileSessionContinuity } from "../domain/session-continuity.js"; import { readCodexBaseInstructions } from "../runtime/codex-config.js"; +import { buildRuntimeContext } from "../runtime/runtime-context.js"; import { runCommand } from "../util/process.js"; -import { buildRuntimeContext } from "./common.js"; import { RolloutSessionSource } from "../runtime/rollout-session-source.js"; import { WrapperRuntimeInjector } from "../runtime/wrapper-injector.js"; -import { SessionContinuitySummarizer } from "../extractor/session-continuity-summarizer.js"; const sessionSource = new RolloutSessionSource(); const runtimeInjector = new WrapperRuntimeInjector(); -function errorMessage(error: unknown): string { - return error instanceof Error ? error.message : String(error); -} - async function selectLatestPrimaryRollout(candidates: string[]): Promise { const metas = await Promise.all(candidates.map((candidate) => readRolloutMeta(candidate))); for (let index = candidates.length - 1; index >= 0; index -= 1) { @@ -112,73 +105,23 @@ async function saveSessionContinuity( return null; } - const evidence = await parseRolloutEvidence(rolloutPath); - if (!evidence) { + if (!(await parseRolloutEvidence(rolloutPath))) { return null; } - const existing = { - project: await runtime.sessionContinuityStore.readState("project"), - projectLocal: await runtime.sessionContinuityStore.readState("project-local") - }; - const summarizer = new SessionContinuitySummarizer(runtime.loadedConfig.config); - const generation = await summarizer.summarizeWithDiagnostics(evidence, existing); - const written = await runtime.sessionContinuityStore.saveSummary(generation.summary, "both"); - const auditEntry = buildSessionContinuityAuditEntry( - runtime.project, - runtime.loadedConfig.config, - generation.diagnostics, - written, - "both", - { - trigger: "wrapper-auto-save", - writeMode: "merge" - } - ); - try { - await runtime.sessionContinuityStore.appendAuditLog(auditEntry); - } catch (error) { - try { - await runtime.sessionContinuityStore.writeRecoveryRecord( - buildContinuityRecoveryRecord({ - projectId: runtime.project.projectId, - worktreeId: runtime.project.worktreeId, - diagnostics: generation.diagnostics, - trigger: "wrapper-auto-save", - writeMode: "merge", - scope: "both", - writtenPaths: written, - failedStage: "audit-write", - failureMessage: errorMessage(error) - }) - ); - } catch { - // Best-effort marker persistence should not overwrite the original failure. - } - throw error; - } - try { - const record = await runtime.sessionContinuityStore.readRecoveryRecord(); - if ( - record && - matchesContinuityRecoveryRecord(record, { - projectId: runtime.project.projectId, - worktreeId: runtime.project.worktreeId, - rolloutPath: generation.diagnostics.rolloutPath, - sourceSessionId: generation.diagnostics.sourceSessionId, - scope: "both" - }) - ) { - await runtime.sessionContinuityStore.clearRecoveryRecord(); - } - } catch { - // Best-effort cleanup should not fail an otherwise successful auto-save. - } - return written.length > 0 + const persisted = await persistSessionContinuity({ + runtime, + rolloutPath, + scope: "both", + trigger: "wrapper-auto-save", + writeMode: "merge" + }); + + return persisted.written.length > 0 ? [ `Updated session continuity from ${rolloutPath}:`, - formatSessionContinuityDiagnostics(generation.diagnostics), - ...written.map((filePath) => `- ${filePath}`) + formatSessionContinuityDiagnostics(persisted.diagnostics), + ...persisted.written.map((filePath) => `- ${filePath}`) ].join("\n") : null; } @@ -234,7 +177,7 @@ export async function runWrappedCodex( if (syncError && continuityError) { throw new AggregateError( [syncError, continuityError], - `Post-run persistence failed: durable sync: ${errorMessage(syncError)}; continuity: ${errorMessage(continuityError)}` + `Post-run persistence failed: durable sync: ${syncError instanceof Error ? syncError.message : String(syncError)}; continuity: ${continuityError instanceof Error ? continuityError.message : String(continuityError)}` ); } if (syncError) { diff --git a/src/lib/domain/session-continuity-persistence.ts b/src/lib/domain/session-continuity-persistence.ts new file mode 100644 index 0000000..5c99c1d --- /dev/null +++ b/src/lib/domain/session-continuity-persistence.ts @@ -0,0 +1,195 @@ +import { + buildSessionContinuityAuditEntry, + normalizeContinuityRecoveryRecord +} from "./session-continuity-diagnostics.js"; +import { + buildContinuityRecoveryRecord, + matchesContinuityRecoveryRecord +} from "./recovery-records.js"; +import { parseRolloutEvidence } from "./rollout.js"; +import { SessionContinuitySummarizer } from "../extractor/session-continuity-summarizer.js"; +import type { RuntimeContext } from "../runtime/runtime-context.js"; +import type { + ContinuityRecoveryRecord, + SessionContinuityAuditEntry, + SessionContinuityAuditTrigger, + SessionContinuitySummary, + SessionContinuityWriteMode, + SessionContinuityScope, + SessionContinuityDiagnostics +} from "../types.js"; + +const defaultRecentContinuityAuditLimit = 5; +const defaultRecentContinuityPreviewReadLimit = 10; + +export interface PersistSessionContinuityOptions { + runtime: RuntimeContext; + rolloutPath: string; + scope: SessionContinuityScope | "both"; + trigger: SessionContinuityAuditTrigger; + writeMode: SessionContinuityWriteMode; + recentAuditLimit?: number; + recentAuditPreviewReadLimit?: number; +} + +export interface PersistSessionContinuityResult { + rolloutPath: string; + written: string[]; + excludePath: string | null; + summary: SessionContinuitySummary; + diagnostics: SessionContinuityDiagnostics; + latestContinuityAuditEntry: SessionContinuityAuditEntry | null; + recentContinuityAuditEntries: SessionContinuityAuditEntry[]; + pendingContinuityRecovery: ContinuityRecoveryRecord | null; + continuityAuditPath: string; + continuityRecoveryPath: string; +} + +function errorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error); +} + +async function writeContinuityRecoveryRecordBestEffort( + runtime: RuntimeContext, + diagnostics: SessionContinuityDiagnostics, + scope: SessionContinuityScope | "both", + writtenPaths: string[], + failureMessage: string, + trigger: SessionContinuityAuditTrigger, + writeMode: SessionContinuityWriteMode +): Promise { + try { + await runtime.sessionContinuityStore.writeRecoveryRecord( + buildContinuityRecoveryRecord({ + projectId: runtime.project.projectId, + worktreeId: runtime.project.worktreeId, + diagnostics, + trigger, + writeMode, + scope, + writtenPaths, + failedStage: "audit-write", + failureMessage + }) + ); + } catch { + // Best-effort marker persistence should not overwrite the original failure. + } +} + +async function clearContinuityRecoveryRecordBestEffort( + runtime: RuntimeContext, + diagnostics: SessionContinuityDiagnostics, + scope: SessionContinuityScope | "both" +): Promise { + try { + const record = await runtime.sessionContinuityStore.readRecoveryRecord(); + if (!record) { + return; + } + + if ( + !matchesContinuityRecoveryRecord(record, { + projectId: runtime.project.projectId, + worktreeId: runtime.project.worktreeId, + rolloutPath: diagnostics.rolloutPath, + sourceSessionId: diagnostics.sourceSessionId, + scope + }) + ) { + return; + } + + await runtime.sessionContinuityStore.clearRecoveryRecord(); + } catch { + // Best-effort cleanup should not fail an otherwise successful save. + } +} + +export async function persistSessionContinuity( + options: PersistSessionContinuityOptions +): Promise { + const parsedEvidence = await parseRolloutEvidence(options.rolloutPath); + if (!parsedEvidence) { + throw new Error(`Could not parse rollout evidence from ${options.rolloutPath}.`); + } + + const existing = + options.writeMode === "merge" + ? { + project: await options.runtime.sessionContinuityStore.readState("project"), + projectLocal: await options.runtime.sessionContinuityStore.readState("project-local") + } + : undefined; + + const summarizer = new SessionContinuitySummarizer(options.runtime.loadedConfig.config); + const generation = await summarizer.summarizeWithDiagnostics(parsedEvidence, existing); + const written = + options.writeMode === "replace" + ? await options.runtime.sessionContinuityStore.replaceSummary( + generation.summary, + options.scope + ) + : await options.runtime.sessionContinuityStore.saveSummary( + generation.summary, + options.scope + ); + const auditEntry = buildSessionContinuityAuditEntry( + options.runtime.project, + options.runtime.loadedConfig.config, + generation.diagnostics, + written, + options.scope, + { + trigger: options.trigger, + writeMode: options.writeMode + } + ); + + try { + await options.runtime.sessionContinuityStore.appendAuditLog(auditEntry); + } catch (error) { + await writeContinuityRecoveryRecordBestEffort( + options.runtime, + generation.diagnostics, + options.scope, + written, + errorMessage(error), + options.trigger, + options.writeMode + ); + throw error; + } + + await clearContinuityRecoveryRecordBestEffort( + options.runtime, + generation.diagnostics, + options.scope + ); + + const recentAuditPreviewReadLimit = + options.recentAuditPreviewReadLimit ?? defaultRecentContinuityPreviewReadLimit; + const recentAuditLimit = options.recentAuditLimit ?? defaultRecentContinuityAuditLimit; + const recentContinuityAuditPreviewEntries = + await options.runtime.sessionContinuityStore.readRecentAuditEntries(recentAuditPreviewReadLimit); + const pendingContinuityRecoveryRecord = + await options.runtime.sessionContinuityStore.readRecoveryRecord(); + + return { + rolloutPath: options.rolloutPath, + written, + excludePath: + options.scope === "project" + ? null + : options.runtime.sessionContinuityStore.getLocalIgnorePath(), + summary: generation.summary, + diagnostics: generation.diagnostics, + latestContinuityAuditEntry: recentContinuityAuditPreviewEntries[0] ?? null, + recentContinuityAuditEntries: recentContinuityAuditPreviewEntries.slice(0, recentAuditLimit), + pendingContinuityRecovery: pendingContinuityRecoveryRecord + ? normalizeContinuityRecoveryRecord(pendingContinuityRecoveryRecord) + : null, + continuityAuditPath: options.runtime.sessionContinuityStore.paths.auditFile, + continuityRecoveryPath: options.runtime.sessionContinuityStore.getRecoveryPath() + }; +} diff --git a/src/lib/commands/common.ts b/src/lib/runtime/runtime-context.ts similarity index 58% rename from src/lib/commands/common.ts rename to src/lib/runtime/runtime-context.ts index f5019fb..e78b7a2 100644 --- a/src/lib/commands/common.ts +++ b/src/lib/runtime/runtime-context.ts @@ -1,9 +1,14 @@ -import { APP_NAME } from "../constants.js"; import { loadConfig } from "../config/load-config.js"; +import { patchConfigFile } from "../config/write-config.js"; import { detectProjectContext } from "../domain/project-context.js"; -import { SyncService } from "../domain/sync-service.js"; import { SessionContinuityStore } from "../domain/session-continuity-store.js"; -import type { AppConfig, LoadedConfig, ProjectContext } from "../types.js"; +import { SyncService } from "../domain/sync-service.js"; +import type { + AppConfig, + ConfigScope, + LoadedConfig, + ProjectContext +} from "../types.js"; export interface RuntimeContext { project: ProjectContext; @@ -12,6 +17,11 @@ export interface RuntimeContext { sessionContinuityStore: SessionContinuityStore; } +export interface ReloadedRuntimeContext { + runtime: RuntimeContext; + configUpdatePath: string; +} + export async function buildRuntimeContext( cwd = process.cwd(), overrides: Partial = {} @@ -21,6 +31,7 @@ export async function buildRuntimeContext( const syncService = new SyncService(project, loadedConfig.config); const sessionContinuityStore = new SessionContinuityStore(project, loadedConfig.config); await syncService.memoryStore.ensureLayout(); + return { project, loadedConfig, @@ -29,6 +40,20 @@ export async function buildRuntimeContext( }; } -export function formatWarnings(warnings: string[]): string[] { - return warnings.map((warning) => `${APP_NAME} warning: ${warning}`); +export async function patchConfigAndReloadRuntime( + cwd: string, + configScope: ConfigScope, + updates: Partial +): Promise { + const initialRuntime = await buildRuntimeContext(cwd); + const configUpdatePath = await patchConfigFile( + initialRuntime.project.projectRoot, + configScope, + updates + ); + + return { + runtime: await buildRuntimeContext(cwd), + configUpdatePath + }; } diff --git a/test/helpers/session-test-support.ts b/test/helpers/session-test-support.ts new file mode 100644 index 0000000..e096c84 --- /dev/null +++ b/test/helpers/session-test-support.ts @@ -0,0 +1,75 @@ +import fs from "node:fs/promises"; +import os from "node:os"; +import path from "node:path"; + +export async function createTempDir( + tempDirs: string[], + prefix: string +): Promise { + const dir = await fs.mkdtemp(path.join(os.tmpdir(), prefix)); + tempDirs.push(dir); + return dir; +} + +export async function cleanupTempDirs(tempDirs: string[]): Promise { + await Promise.all( + tempDirs.splice(0).map((dir) => fs.rm(dir, { recursive: true, force: true })) + ); +} + +export function makeEvidenceCounts(successfulCommands = 1): { + successfulCommands: number; + failedCommands: number; + fileWrites: number; + nextSteps: number; + untried: number; +} { + return { + successfulCommands, + failedCommands: 0, + fileWrites: 0, + nextSteps: 1, + untried: 0 + }; +} + +export async function writeWrapperMockCodex( + repoDir: string, + sessionsDir: string, + options: { + sessionId: string; + message: string; + callOutput?: string; + } +): Promise<{ capturedArgsPath: string; mockCodexPath: string }> { + const capturedArgsPath = path.join(repoDir, "captured-args.json"); + const mockCodexPath = path.join(repoDir, "mock-codex"); + const todayDir = path.join(sessionsDir, "2026", "03", "15"); + await fs.mkdir(todayDir, { recursive: true }); + await fs.writeFile( + mockCodexPath, + `#!/usr/bin/env node +const fs = require("node:fs"); +const path = require("node:path"); +const cwd = process.cwd(); +const sessionsDir = process.env.CAM_CODEX_SESSIONS_DIR; +fs.writeFileSync(path.join(cwd, "captured-args.json"), JSON.stringify(process.argv.slice(2), null, 2)); +const rolloutDir = path.join(sessionsDir, "2026", "03", "15"); +fs.mkdirSync(rolloutDir, { recursive: true }); +const rolloutPath = path.join(rolloutDir, "rollout-2026-03-15T00-00-00-000Z-session.jsonl"); +fs.writeFileSync(rolloutPath, [ + JSON.stringify({ type: "session_meta", payload: { id: ${JSON.stringify(options.sessionId)}, timestamp: "2026-03-15T00:00:00.000Z", cwd } }), + JSON.stringify({ type: "event_msg", payload: { type: "user_message", message: ${JSON.stringify(options.message)} } }), + JSON.stringify({ type: "response_item", payload: { type: "function_call", name: "exec_command", call_id: "call-1", arguments: "{\\"cmd\\":\\"pnpm test\\"}" } }), + JSON.stringify({ type: "response_item", payload: { type: "function_call_output", call_id: "call-1", output: ${JSON.stringify(options.callOutput ?? "Process exited with code 0")} } }) +].join("\\n")); +`, + "utf8" + ); + await fs.chmod(mockCodexPath, 0o755); + + return { + capturedArgsPath, + mockCodexPath + }; +} diff --git a/test/session-command.test.ts b/test/session-command.test.ts index 1a97965..eabacac 100644 --- a/test/session-command.test.ts +++ b/test/session-command.test.ts @@ -1,12 +1,9 @@ import fs from "node:fs/promises"; -import os from "node:os"; import path from "node:path"; import { afterEach, describe, expect, it, vi } from "vitest"; import { runSession } from "../src/lib/commands/session.js"; -import { runWrappedCodex } from "../src/lib/commands/wrapper.js"; import { detectProjectContext } from "../src/lib/domain/project-context.js"; import { SessionContinuityStore } from "../src/lib/domain/session-continuity-store.js"; -import { SyncService } from "../src/lib/domain/sync-service.js"; import type { SessionContinuityAuditEntry } from "../src/lib/types.js"; import { initGitRepo, @@ -15,14 +12,17 @@ import { writeCamConfig } from "./helpers/cam-test-fixtures.js"; import { runCli } from "./helpers/cli-runner.js"; +import { + cleanupTempDirs, + createTempDir, + makeEvidenceCounts +} from "./helpers/session-test-support.js"; const tempDirs: string[] = []; const originalSessionsDir = process.env.CAM_CODEX_SESSIONS_DIR; async function tempDir(prefix: string): Promise { - const dir = await fs.mkdtemp(path.join(os.tmpdir(), prefix)); - tempDirs.push(dir); - return dir; + return createTempDir(tempDirs, prefix); } const initRepo = initGitRepo; @@ -31,60 +31,9 @@ const configJson = makeAppConfig; const writeProjectConfig = writeCamConfig; const rolloutFixture = makeRolloutFixture; -function makeEvidenceCounts(successfulCommands = 1) { - return { - successfulCommands, - failedCommands: 0, - fileWrites: 0, - nextSteps: 1, - untried: 0 - }; -} - -async function writeWrapperMockCodex( - repoDir: string, - sessionsDir: string, - options: { - sessionId: string; - message: string; - callOutput?: string; - } -): Promise<{ capturedArgsPath: string; mockCodexPath: string }> { - const capturedArgsPath = path.join(repoDir, "captured-args.json"); - const mockCodexPath = path.join(repoDir, "mock-codex"); - const todayDir = path.join(sessionsDir, "2026", "03", "15"); - await fs.mkdir(todayDir, { recursive: true }); - await fs.writeFile( - mockCodexPath, - `#!/usr/bin/env node -const fs = require("node:fs"); -const path = require("node:path"); -const cwd = process.cwd(); -const sessionsDir = process.env.CAM_CODEX_SESSIONS_DIR; -fs.writeFileSync(path.join(cwd, "captured-args.json"), JSON.stringify(process.argv.slice(2), null, 2)); -const rolloutDir = path.join(sessionsDir, "2026", "03", "15"); -fs.mkdirSync(rolloutDir, { recursive: true }); -const rolloutPath = path.join(rolloutDir, "rollout-2026-03-15T00-00-00-000Z-session.jsonl"); -fs.writeFileSync(rolloutPath, [ - JSON.stringify({ type: "session_meta", payload: { id: ${JSON.stringify(options.sessionId)}, timestamp: "2026-03-15T00:00:00.000Z", cwd } }), - JSON.stringify({ type: "event_msg", payload: { type: "user_message", message: ${JSON.stringify(options.message)} } }), - JSON.stringify({ type: "response_item", payload: { type: "function_call", name: "exec_command", call_id: "call-1", arguments: "{\\"cmd\\":\\"pnpm test\\"}" } }), - JSON.stringify({ type: "response_item", payload: { type: "function_call_output", call_id: "call-1", output: ${JSON.stringify(options.callOutput ?? "Process exited with code 0")} } }) -].join("\\n")); -`, - "utf8" - ); - await fs.chmod(mockCodexPath, 0o755); - - return { - capturedArgsPath, - mockCodexPath - }; -} - afterEach(async () => { process.env.CAM_CODEX_SESSIONS_DIR = originalSessionsDir; - await Promise.all(tempDirs.splice(0).map((dir) => fs.rm(dir, { recursive: true, force: true }))); + await cleanupTempDirs(tempDirs); }); describe("runSession", () => { @@ -1904,533 +1853,3 @@ describe("runSession", () => { expect(statusOutput).not.toContain("Pending continuity recovery:"); }, 30_000); }); - -describe("runWrappedCodex with session continuity", () => { - it("does not inject or auto-save continuity when both wrapper flags are disabled", async () => { - const repoDir = await tempDir("cam-wrapper-no-continuity-repo-"); - const memoryRoot = await tempDir("cam-wrapper-no-continuity-memory-"); - const sessionsDir = await tempDir("cam-wrapper-no-continuity-rollouts-"); - await initRepo(repoDir); - process.env.CAM_CODEX_SESSIONS_DIR = sessionsDir; - - const { capturedArgsPath, mockCodexPath } = await writeWrapperMockCodex(repoDir, sessionsDir, { - sessionId: "session-wrapper-no-continuity", - message: "Continue without continuity automation." - }); - - await writeProjectConfig( - repoDir, - configJson({ - codexBinary: mockCodexPath, - sessionContinuityAutoLoad: false, - sessionContinuityAutoSave: false - }), - { - autoMemoryDirectory: memoryRoot, - sessionContinuityAutoLoad: false, - sessionContinuityAutoSave: false - } - ); - - const exitCode = await runWrappedCodex(repoDir, "exec", ["continue"]); - expect(exitCode).toBe(0); - - const capturedArgs = JSON.parse(await fs.readFile(capturedArgsPath, "utf8")) as string[]; - const baseInstructionsArg = capturedArgs.find((arg) => arg.startsWith("base_instructions=")); - expect(baseInstructionsArg).toContain("# Codex Auto Memory"); - expect(baseInstructionsArg).not.toContain("# Session Continuity"); - - const store = new SessionContinuityStore(detectProjectContext(repoDir), { - ...configJson({ - codexBinary: mockCodexPath, - sessionContinuityAutoLoad: false, - sessionContinuityAutoSave: false - }), - autoMemoryDirectory: memoryRoot - }); - expect(await store.readLatestAuditEntry()).toBeNull(); - expect(await store.readMergedState()).toBeNull(); - expect(await store.readRecoveryRecord()).toBeNull(); - }, 30_000); - - it("injects continuity without auto-saving when autoLoad is enabled and autoSave is disabled", async () => { - const repoDir = await tempDir("cam-wrapper-load-only-repo-"); - const memoryRoot = await tempDir("cam-wrapper-load-only-memory-"); - const sessionsDir = await tempDir("cam-wrapper-load-only-rollouts-"); - await initRepo(repoDir); - process.env.CAM_CODEX_SESSIONS_DIR = sessionsDir; - - const { capturedArgsPath, mockCodexPath } = await writeWrapperMockCodex(repoDir, sessionsDir, { - sessionId: "session-wrapper-load-only", - message: "Continue but do not auto-save continuity." - }); - - await writeProjectConfig( - repoDir, - configJson({ - codexBinary: mockCodexPath, - sessionContinuityAutoLoad: true, - sessionContinuityAutoSave: false - }), - { - autoMemoryDirectory: memoryRoot, - sessionContinuityAutoLoad: true, - sessionContinuityAutoSave: false - } - ); - - const continuityStore = new SessionContinuityStore(detectProjectContext(repoDir), { - ...configJson({ - codexBinary: mockCodexPath, - sessionContinuityAutoLoad: true, - sessionContinuityAutoSave: false - }), - autoMemoryDirectory: memoryRoot - }); - await continuityStore.saveSummary( - { - project: { - goal: "Seeded continuity goal.", - confirmedWorking: ["Seeded continuity still exists."], - triedAndFailed: [], - notYetTried: [], - incompleteNext: [], - filesDecisionsEnvironment: [] - }, - projectLocal: { - goal: "", - confirmedWorking: [], - triedAndFailed: [], - notYetTried: [], - incompleteNext: ["Seeded local next step."], - filesDecisionsEnvironment: [] - } - }, - "both" - ); - - const exitCode = await runWrappedCodex(repoDir, "exec", ["continue"]); - expect(exitCode).toBe(0); - - const capturedArgs = JSON.parse(await fs.readFile(capturedArgsPath, "utf8")) as string[]; - const baseInstructionsArg = capturedArgs.find((arg) => arg.startsWith("base_instructions=")); - expect(baseInstructionsArg).toContain("# Session Continuity"); - expect(baseInstructionsArg).toContain("Seeded continuity goal."); - - const merged = await continuityStore.readMergedState(); - expect(merged?.goal).toBe("Seeded continuity goal."); - expect(merged?.goal).not.toContain("do not auto-save continuity"); - expect(await continuityStore.readLatestAuditEntry()).toBeNull(); - expect(await continuityStore.readRecoveryRecord()).toBeNull(); - }, 30_000); - - it("injects only continuity source files that actually exist", async () => { - const repoDir = await tempDir("cam-wrapper-existing-sources-repo-"); - const memoryRoot = await tempDir("cam-wrapper-existing-sources-memory-"); - const sessionsDir = await tempDir("cam-wrapper-existing-sources-rollouts-"); - await initRepo(repoDir); - process.env.CAM_CODEX_SESSIONS_DIR = sessionsDir; - - const { capturedArgsPath, mockCodexPath } = await writeWrapperMockCodex(repoDir, sessionsDir, { - sessionId: "session-wrapper-existing-sources", - message: "Continue with shared-only continuity." - }); - - await writeProjectConfig( - repoDir, - configJson({ - codexBinary: mockCodexPath, - sessionContinuityAutoLoad: true, - sessionContinuityAutoSave: false - }), - { - autoMemoryDirectory: memoryRoot, - sessionContinuityAutoLoad: true, - sessionContinuityAutoSave: false - } - ); - - const continuityStore = new SessionContinuityStore(detectProjectContext(repoDir), { - ...configJson({ - codexBinary: mockCodexPath, - sessionContinuityAutoLoad: true, - sessionContinuityAutoSave: false - }), - autoMemoryDirectory: memoryRoot - }); - await continuityStore.saveSummary( - { - project: { - goal: "Shared-only continuity goal.", - confirmedWorking: ["Shared-only continuity exists."], - triedAndFailed: [], - notYetTried: [], - incompleteNext: [], - filesDecisionsEnvironment: [] - }, - projectLocal: { - goal: "", - confirmedWorking: [], - triedAndFailed: [], - notYetTried: [], - incompleteNext: [], - filesDecisionsEnvironment: [] - } - }, - "project" - ); - - const exitCode = await runWrappedCodex(repoDir, "exec", ["continue"]); - expect(exitCode).toBe(0); - - const capturedArgs = JSON.parse(await fs.readFile(capturedArgsPath, "utf8")) as string[]; - const baseInstructionsArg = capturedArgs.find((arg) => arg.startsWith("base_instructions=")); - expect(baseInstructionsArg).toContain(continuityStore.paths.sharedFile); - expect(baseInstructionsArg).not.toContain(continuityStore.paths.localFile); - }, 30_000); - - it("auto-saves continuity without injecting it when autoLoad is disabled and autoSave is enabled", async () => { - const repoDir = await tempDir("cam-wrapper-save-only-repo-"); - const memoryRoot = await tempDir("cam-wrapper-save-only-memory-"); - const sessionsDir = await tempDir("cam-wrapper-save-only-rollouts-"); - await initRepo(repoDir); - process.env.CAM_CODEX_SESSIONS_DIR = sessionsDir; - - const { capturedArgsPath, mockCodexPath } = await writeWrapperMockCodex(repoDir, sessionsDir, { - sessionId: "session-wrapper-save-only", - message: "Continue with save-only continuity handling." - }); - - await writeProjectConfig( - repoDir, - configJson({ - codexBinary: mockCodexPath, - sessionContinuityAutoLoad: false, - sessionContinuityAutoSave: true - }), - { - autoMemoryDirectory: memoryRoot, - sessionContinuityAutoLoad: false, - sessionContinuityAutoSave: true - } - ); - - const continuityStore = new SessionContinuityStore(detectProjectContext(repoDir), { - ...configJson({ - codexBinary: mockCodexPath, - sessionContinuityAutoLoad: false, - sessionContinuityAutoSave: true - }), - autoMemoryDirectory: memoryRoot - }); - - const exitCode = await runWrappedCodex(repoDir, "exec", ["continue"]); - expect(exitCode).toBe(0); - - const capturedArgs = JSON.parse(await fs.readFile(capturedArgsPath, "utf8")) as string[]; - const baseInstructionsArg = capturedArgs.find((arg) => arg.startsWith("base_instructions=")); - expect(baseInstructionsArg).toContain("# Codex Auto Memory"); - expect(baseInstructionsArg).not.toContain("# Session Continuity"); - - const latestAudit = await continuityStore.readLatestAuditEntry(); - expect(latestAudit?.rolloutPath).toContain("rollout-2026-03-15T00-00-00-000Z-session.jsonl"); - const merged = await continuityStore.readMergedState(); - expect(merged?.goal).toContain("Continue with save-only continuity handling"); - }, 30_000); - - it("prefers the primary rollout over a newer subagent rollout during wrapper auto-save", async () => { - const repoDir = await tempDir("cam-wrapper-primary-rollout-repo-"); - const memoryRoot = await tempDir("cam-wrapper-primary-rollout-memory-"); - const sessionsDir = await tempDir("cam-wrapper-primary-rollout-sessions-"); - await initRepo(repoDir); - process.env.CAM_CODEX_SESSIONS_DIR = sessionsDir; - - const mockCodexPath = path.join(repoDir, "mock-codex"); - const todayDir = path.join(sessionsDir, "2026", "03", "15"); - await fs.mkdir(todayDir, { recursive: true }); - await fs.writeFile( - mockCodexPath, - `#!/usr/bin/env node -const fs = require("node:fs"); -const path = require("node:path"); -const cwd = process.cwd(); -const sessionsDir = process.env.CAM_CODEX_SESSIONS_DIR; -const rolloutDir = path.join(sessionsDir, "2026", "03", "15"); -fs.mkdirSync(rolloutDir, { recursive: true }); -const primaryPath = path.join(rolloutDir, "rollout-2026-03-15T00-00-00-000Z-session.jsonl"); -const subagentPath = path.join(rolloutDir, "rollout-2026-03-15T00-00-01-000Z-subagent.jsonl"); -fs.writeFileSync(primaryPath, [ - JSON.stringify({ type: "session_meta", payload: { id: "session-wrapper-primary", timestamp: "2026-03-15T00:00:00.000Z", cwd, source: "cli" } }), - JSON.stringify({ type: "event_msg", payload: { type: "user_message", message: "Continue the primary wrapper continuity path." } }), - JSON.stringify({ type: "response_item", payload: { type: "function_call", name: "exec_command", call_id: "call-1", arguments: "{\\"cmd\\":\\"pnpm test\\"}" } }), - JSON.stringify({ type: "response_item", payload: { type: "function_call_output", call_id: "call-1", output: "Process exited with code 0" } }) -].join("\\n")); -fs.writeFileSync(subagentPath, [ - JSON.stringify({ type: "session_meta", payload: { id: "session-wrapper-subagent", forked_from_id: "session-wrapper-primary", timestamp: "2026-03-15T00:00:01.000Z", cwd, source: { subagent: { thread_spawn: { parent_thread_id: "session-wrapper-primary" } } } } }), - JSON.stringify({ type: "session_meta", payload: { id: "session-wrapper-primary", timestamp: "2026-03-15T00:00:00.000Z", cwd, source: "cli" } }), - JSON.stringify({ type: "event_msg", payload: { type: "user_message", message: "You are reviewer sub-agent 4. Work read-only. Focus on docs and contract surfaces only." } }) -].join("\\n")); -`, - "utf8" - ); - await fs.chmod(mockCodexPath, 0o755); - - await writeProjectConfig( - repoDir, - configJson({ - codexBinary: mockCodexPath, - sessionContinuityAutoLoad: false, - sessionContinuityAutoSave: true - }), - { - autoMemoryDirectory: memoryRoot, - sessionContinuityAutoLoad: false, - sessionContinuityAutoSave: true - } - ); - - const continuityStore = new SessionContinuityStore(detectProjectContext(repoDir), { - ...configJson({ - codexBinary: mockCodexPath, - sessionContinuityAutoLoad: false, - sessionContinuityAutoSave: true - }), - autoMemoryDirectory: memoryRoot - }); - - const exitCode = await runWrappedCodex(repoDir, "exec", ["continue"]); - expect(exitCode).toBe(0); - - const latestAudit = await continuityStore.readLatestAuditEntry(); - const merged = await continuityStore.readMergedState(); - - expect(latestAudit?.rolloutPath).toContain("rollout-2026-03-15T00-00-00-000Z-session.jsonl"); - expect(latestAudit?.sourceSessionId).toBe("session-wrapper-primary"); - expect(merged?.goal).toContain("primary wrapper continuity path"); - expect(merged?.goal).not.toContain("reviewer sub-agent"); - expect(merged?.incompleteNext.join("\n")).not.toContain("reviewer sub-agent"); - }, 30_000); - - it("injects continuity on startup and auto-saves it after the run", async () => { - const repoDir = await tempDir("cam-wrapper-session-repo-"); - const memoryRoot = await tempDir("cam-wrapper-session-memory-"); - const sessionsDir = await tempDir("cam-wrapper-session-rollouts-"); - await initRepo(repoDir); - process.env.CAM_CODEX_SESSIONS_DIR = sessionsDir; - - const capturedArgsPath = path.join(repoDir, "captured-args.json"); - const mockCodexPath = path.join(repoDir, "mock-codex"); - const todayDir = path.join(sessionsDir, "2026", "03", "15"); - await fs.mkdir(todayDir, { recursive: true }); - await fs.writeFile( - mockCodexPath, - `#!/usr/bin/env node -const fs = require("node:fs"); -const path = require("node:path"); -const cwd = process.cwd(); -const sessionsDir = process.env.CAM_CODEX_SESSIONS_DIR; -fs.writeFileSync(path.join(cwd, "captured-args.json"), JSON.stringify(process.argv.slice(2), null, 2)); -const rolloutDir = path.join(sessionsDir, "2026", "03", "15"); -fs.mkdirSync(rolloutDir, { recursive: true }); -const rolloutPath = path.join(rolloutDir, "rollout-2026-03-15T00-00-00-000Z-session.jsonl"); -fs.writeFileSync(rolloutPath, [ - JSON.stringify({ type: "session_meta", payload: { id: "session-wrapper", timestamp: "2026-03-15T00:00:00.000Z", cwd } }), - JSON.stringify({ type: "event_msg", payload: { type: "user_message", message: "Continue the wrapper continuity migration." } }), - JSON.stringify({ type: "response_item", payload: { type: "function_call", name: "exec_command", call_id: "call-1", arguments: "{\\"cmd\\":\\"pnpm test\\"}" } }), - JSON.stringify({ type: "response_item", payload: { type: "function_call_output", call_id: "call-1", output: "Process exited with code 0" } }) -].join("\\n")); -`, - "utf8" - ); - await fs.chmod(mockCodexPath, 0o755); - - await writeProjectConfig( - repoDir, - configJson({ - codexBinary: mockCodexPath - }), - { - autoMemoryDirectory: memoryRoot, - sessionContinuityAutoLoad: true, - sessionContinuityAutoSave: true - } - ); - - const continuityStore = new SessionContinuityStore(detectProjectContext(repoDir), { - ...configJson({ - codexBinary: mockCodexPath, - sessionContinuityAutoLoad: true, - sessionContinuityAutoSave: true - }), - autoMemoryDirectory: memoryRoot - }); - await continuityStore.saveSummary( - { - project: { - goal: "Resume the wrapper test continuity.", - confirmedWorking: ["Previous startup block exists."], - triedAndFailed: [], - notYetTried: [], - incompleteNext: [], - filesDecisionsEnvironment: [] - }, - projectLocal: { - goal: "", - confirmedWorking: [], - triedAndFailed: [], - notYetTried: [], - incompleteNext: ["Run wrapper auto-save."], - filesDecisionsEnvironment: [] - } - }, - "both" - ); - - const exitCode = await runWrappedCodex(repoDir, "exec", ["continue"]); - expect(exitCode).toBe(0); - - const capturedArgs = JSON.parse(await fs.readFile(capturedArgsPath, "utf8")) as string[]; - const baseInstructionsArg = capturedArgs.find((arg) => arg.startsWith("base_instructions=")); - expect(baseInstructionsArg).toContain("# Session Continuity"); - expect(baseInstructionsArg).toContain("# Codex Auto Memory"); - - const merged = await continuityStore.readMergedState(); - expect(merged?.goal).toContain("Continue the wrapper continuity migration"); - expect(merged?.confirmedWorking.join("\n")).toContain("pnpm test"); - - const latestAudit = await continuityStore.readLatestAuditEntry(); - expect(latestAudit?.actualPath).toBe("heuristic"); - expect(latestAudit?.fallbackReason).toBe("configured-heuristic"); - expect(latestAudit?.writtenPaths.length).toBeGreaterThan(0); - }, 30_000); - - it("writes a continuity recovery marker when wrapper auto-save cannot append audit", async () => { - const repoDir = await tempDir("cam-wrapper-recovery-repo-"); - const memoryRoot = await tempDir("cam-wrapper-recovery-memory-"); - const sessionsDir = await tempDir("cam-wrapper-recovery-rollouts-"); - await initRepo(repoDir); - process.env.CAM_CODEX_SESSIONS_DIR = sessionsDir; - - const mockCodexPath = path.join(repoDir, "mock-codex"); - const todayDir = path.join(sessionsDir, "2026", "03", "15"); - await fs.mkdir(todayDir, { recursive: true }); - await fs.writeFile( - mockCodexPath, - `#!/usr/bin/env node -const fs = require("node:fs"); -const path = require("node:path"); -const cwd = process.cwd(); -const sessionsDir = process.env.CAM_CODEX_SESSIONS_DIR; -const rolloutDir = path.join(sessionsDir, "2026", "03", "15"); -fs.mkdirSync(rolloutDir, { recursive: true }); -const rolloutPath = path.join(rolloutDir, "rollout-2026-03-15T00-00-00-000Z-session.jsonl"); -fs.writeFileSync(rolloutPath, [ - JSON.stringify({ type: "session_meta", payload: { id: "session-wrapper-recovery", timestamp: "2026-03-15T00:00:00.000Z", cwd } }), - JSON.stringify({ type: "event_msg", payload: { type: "user_message", message: "Continue wrapper recovery handling." } }), - JSON.stringify({ type: "response_item", payload: { type: "function_call", name: "exec_command", call_id: "call-1", arguments: "{\\"cmd\\":\\"pnpm test\\"}" } }), - JSON.stringify({ type: "response_item", payload: { type: "function_call_output", call_id: "call-1", output: "Process exited with code 0" } }) -].join("\\n")); -`, - "utf8" - ); - await fs.chmod(mockCodexPath, 0o755); - - await writeProjectConfig( - repoDir, - configJson({ - codexBinary: mockCodexPath, - sessionContinuityAutoSave: true - }), - { - autoMemoryDirectory: memoryRoot, - sessionContinuityAutoSave: true - } - ); - - const appendAuditSpy = vi - .spyOn(SessionContinuityStore.prototype, "appendAuditLog") - .mockRejectedValueOnce(new Error("wrapper continuity audit write failed")); - - await expect(runWrappedCodex(repoDir, "exec", ["continue"])).rejects.toThrow( - "wrapper continuity audit write failed" - ); - appendAuditSpy.mockRestore(); - - const store = new SessionContinuityStore(detectProjectContext(repoDir), { - ...configJson({ - codexBinary: mockCodexPath, - sessionContinuityAutoSave: true - }), - autoMemoryDirectory: memoryRoot - }); - expect(await store.readRecoveryRecord()).toMatchObject({ - failedStage: "audit-write", - failureMessage: "wrapper continuity audit write failed", - scope: "both" - }); - }, 30_000); - - it("still saves continuity when wrapper durable sync fails", async () => { - const repoDir = await tempDir("cam-wrapper-sync-fail-repo-"); - const memoryRoot = await tempDir("cam-wrapper-sync-fail-memory-"); - const sessionsDir = await tempDir("cam-wrapper-sync-fail-rollouts-"); - await initRepo(repoDir); - process.env.CAM_CODEX_SESSIONS_DIR = sessionsDir; - - const mockCodexPath = path.join(repoDir, "mock-codex"); - const todayDir = path.join(sessionsDir, "2026", "03", "15"); - await fs.mkdir(todayDir, { recursive: true }); - await fs.writeFile( - mockCodexPath, - `#!/usr/bin/env node -const fs = require("node:fs"); -const path = require("node:path"); -const cwd = process.cwd(); -const sessionsDir = process.env.CAM_CODEX_SESSIONS_DIR; -const rolloutDir = path.join(sessionsDir, "2026", "03", "15"); -fs.mkdirSync(rolloutDir, { recursive: true }); -const rolloutPath = path.join(rolloutDir, "rollout-2026-03-15T00-00-00-000Z-session.jsonl"); -fs.writeFileSync(rolloutPath, [ - JSON.stringify({ type: "session_meta", payload: { id: "session-wrapper-sync-fail", timestamp: "2026-03-15T00:00:00.000Z", cwd } }), - JSON.stringify({ type: "event_msg", payload: { type: "user_message", message: "Continue after durable sync sidecar failure." } }), - JSON.stringify({ type: "response_item", payload: { type: "function_call", name: "exec_command", call_id: "call-1", arguments: "{\\"cmd\\":\\"pnpm test\\"}" } }), - JSON.stringify({ type: "response_item", payload: { type: "function_call_output", call_id: "call-1", output: "Process exited with code 0" } }) -].join("\\n")); -`, - "utf8" - ); - await fs.chmod(mockCodexPath, 0o755); - - await writeProjectConfig( - repoDir, - configJson({ - codexBinary: mockCodexPath - }), - { - autoMemoryDirectory: memoryRoot, - sessionContinuityAutoSave: true - } - ); - - const syncSpy = vi - .spyOn(SyncService.prototype, "syncRollout") - .mockRejectedValueOnce(new Error("sync audit write failed")); - - await expect(runWrappedCodex(repoDir, "exec", ["continue"])).rejects.toThrow( - "sync audit write failed" - ); - syncSpy.mockRestore(); - - const store = new SessionContinuityStore(detectProjectContext(repoDir), { - ...configJson({ - codexBinary: mockCodexPath, - sessionContinuityAutoSave: true - }), - autoMemoryDirectory: memoryRoot - }); - const latestAudit = await store.readLatestAuditEntry(); - expect(latestAudit?.rolloutPath).toContain("rollout-2026-03-15T00-00-00-000Z-session.jsonl"); - expect(latestAudit?.writtenPaths.length).toBeGreaterThan(0); - expect((await store.readMergedState())?.confirmedWorking.join("\n")).toContain("pnpm test"); - }, 30_000); -}); diff --git a/test/wrapper-session-continuity.test.ts b/test/wrapper-session-continuity.test.ts new file mode 100644 index 0000000..c288c4d --- /dev/null +++ b/test/wrapper-session-continuity.test.ts @@ -0,0 +1,563 @@ +import fs from "node:fs/promises"; +import path from "node:path"; +import { afterEach, describe, expect, it, vi } from "vitest"; +import { runWrappedCodex } from "../src/lib/commands/wrapper.js"; +import { detectProjectContext } from "../src/lib/domain/project-context.js"; +import { SessionContinuityStore } from "../src/lib/domain/session-continuity-store.js"; +import { SyncService } from "../src/lib/domain/sync-service.js"; +import { + initGitRepo, + makeAppConfig, + writeCamConfig +} from "./helpers/cam-test-fixtures.js"; +import { + cleanupTempDirs, + createTempDir, + writeWrapperMockCodex +} from "./helpers/session-test-support.js"; + +const tempDirs: string[] = []; +const originalSessionsDir = process.env.CAM_CODEX_SESSIONS_DIR; + +async function tempDir(prefix: string): Promise { + return createTempDir(tempDirs, prefix); +} + +const initRepo = initGitRepo; +const configJson = makeAppConfig; +const writeProjectConfig = writeCamConfig; + +afterEach(async () => { + process.env.CAM_CODEX_SESSIONS_DIR = originalSessionsDir; + await cleanupTempDirs(tempDirs); +}); + +describe("runWrappedCodex with session continuity", () => { + it("does not inject or auto-save continuity when both wrapper flags are disabled", async () => { + const repoDir = await tempDir("cam-wrapper-no-continuity-repo-"); + const memoryRoot = await tempDir("cam-wrapper-no-continuity-memory-"); + const sessionsDir = await tempDir("cam-wrapper-no-continuity-rollouts-"); + await initRepo(repoDir); + process.env.CAM_CODEX_SESSIONS_DIR = sessionsDir; + + const { capturedArgsPath, mockCodexPath } = await writeWrapperMockCodex(repoDir, sessionsDir, { + sessionId: "session-wrapper-no-continuity", + message: "Continue without continuity automation." + }); + + await writeProjectConfig( + repoDir, + configJson({ + codexBinary: mockCodexPath, + sessionContinuityAutoLoad: false, + sessionContinuityAutoSave: false + }), + { + autoMemoryDirectory: memoryRoot, + sessionContinuityAutoLoad: false, + sessionContinuityAutoSave: false + } + ); + + const exitCode = await runWrappedCodex(repoDir, "exec", ["continue"]); + expect(exitCode).toBe(0); + + const capturedArgs = JSON.parse(await fs.readFile(capturedArgsPath, "utf8")) as string[]; + const baseInstructionsArg = capturedArgs.find((arg) => arg.startsWith("base_instructions=")); + expect(baseInstructionsArg).toContain("# Codex Auto Memory"); + expect(baseInstructionsArg).not.toContain("# Session Continuity"); + + const store = new SessionContinuityStore(detectProjectContext(repoDir), { + ...configJson({ + codexBinary: mockCodexPath, + sessionContinuityAutoLoad: false, + sessionContinuityAutoSave: false + }), + autoMemoryDirectory: memoryRoot + }); + expect(await store.readLatestAuditEntry()).toBeNull(); + expect(await store.readMergedState()).toBeNull(); + expect(await store.readRecoveryRecord()).toBeNull(); + }, 30_000); + + it("injects continuity without auto-saving when autoLoad is enabled and autoSave is disabled", async () => { + const repoDir = await tempDir("cam-wrapper-load-only-repo-"); + const memoryRoot = await tempDir("cam-wrapper-load-only-memory-"); + const sessionsDir = await tempDir("cam-wrapper-load-only-rollouts-"); + await initRepo(repoDir); + process.env.CAM_CODEX_SESSIONS_DIR = sessionsDir; + + const { capturedArgsPath, mockCodexPath } = await writeWrapperMockCodex(repoDir, sessionsDir, { + sessionId: "session-wrapper-load-only", + message: "Continue but do not auto-save continuity." + }); + + await writeProjectConfig( + repoDir, + configJson({ + codexBinary: mockCodexPath, + sessionContinuityAutoLoad: true, + sessionContinuityAutoSave: false + }), + { + autoMemoryDirectory: memoryRoot, + sessionContinuityAutoLoad: true, + sessionContinuityAutoSave: false + } + ); + + const continuityStore = new SessionContinuityStore(detectProjectContext(repoDir), { + ...configJson({ + codexBinary: mockCodexPath, + sessionContinuityAutoLoad: true, + sessionContinuityAutoSave: false + }), + autoMemoryDirectory: memoryRoot + }); + await continuityStore.saveSummary( + { + project: { + goal: "Seeded continuity goal.", + confirmedWorking: ["Seeded continuity still exists."], + triedAndFailed: [], + notYetTried: [], + incompleteNext: [], + filesDecisionsEnvironment: [] + }, + projectLocal: { + goal: "", + confirmedWorking: [], + triedAndFailed: [], + notYetTried: [], + incompleteNext: ["Seeded local next step."], + filesDecisionsEnvironment: [] + } + }, + "both" + ); + + const exitCode = await runWrappedCodex(repoDir, "exec", ["continue"]); + expect(exitCode).toBe(0); + + const capturedArgs = JSON.parse(await fs.readFile(capturedArgsPath, "utf8")) as string[]; + const baseInstructionsArg = capturedArgs.find((arg) => arg.startsWith("base_instructions=")); + expect(baseInstructionsArg).toContain("# Session Continuity"); + expect(baseInstructionsArg).toContain("Seeded continuity goal."); + + const merged = await continuityStore.readMergedState(); + expect(merged?.goal).toBe("Seeded continuity goal."); + expect(merged?.goal).not.toContain("do not auto-save continuity"); + expect(await continuityStore.readLatestAuditEntry()).toBeNull(); + expect(await continuityStore.readRecoveryRecord()).toBeNull(); + }, 30_000); + + it("injects only continuity source files that actually exist", async () => { + const repoDir = await tempDir("cam-wrapper-existing-sources-repo-"); + const memoryRoot = await tempDir("cam-wrapper-existing-sources-memory-"); + const sessionsDir = await tempDir("cam-wrapper-existing-sources-rollouts-"); + await initRepo(repoDir); + process.env.CAM_CODEX_SESSIONS_DIR = sessionsDir; + + const { capturedArgsPath, mockCodexPath } = await writeWrapperMockCodex(repoDir, sessionsDir, { + sessionId: "session-wrapper-existing-sources", + message: "Continue with shared-only continuity." + }); + + await writeProjectConfig( + repoDir, + configJson({ + codexBinary: mockCodexPath, + sessionContinuityAutoLoad: true, + sessionContinuityAutoSave: false + }), + { + autoMemoryDirectory: memoryRoot, + sessionContinuityAutoLoad: true, + sessionContinuityAutoSave: false + } + ); + + const continuityStore = new SessionContinuityStore(detectProjectContext(repoDir), { + ...configJson({ + codexBinary: mockCodexPath, + sessionContinuityAutoLoad: true, + sessionContinuityAutoSave: false + }), + autoMemoryDirectory: memoryRoot + }); + await continuityStore.saveSummary( + { + project: { + goal: "Shared-only continuity goal.", + confirmedWorking: ["Shared-only continuity exists."], + triedAndFailed: [], + notYetTried: [], + incompleteNext: [], + filesDecisionsEnvironment: [] + }, + projectLocal: { + goal: "", + confirmedWorking: [], + triedAndFailed: [], + notYetTried: [], + incompleteNext: [], + filesDecisionsEnvironment: [] + } + }, + "project" + ); + + const exitCode = await runWrappedCodex(repoDir, "exec", ["continue"]); + expect(exitCode).toBe(0); + + const capturedArgs = JSON.parse(await fs.readFile(capturedArgsPath, "utf8")) as string[]; + const baseInstructionsArg = capturedArgs.find((arg) => arg.startsWith("base_instructions=")); + expect(baseInstructionsArg).toContain(continuityStore.paths.sharedFile); + expect(baseInstructionsArg).not.toContain(continuityStore.paths.localFile); + }, 30_000); + + it("auto-saves continuity without injecting it when autoLoad is disabled and autoSave is enabled", async () => { + const repoDir = await tempDir("cam-wrapper-save-only-repo-"); + const memoryRoot = await tempDir("cam-wrapper-save-only-memory-"); + const sessionsDir = await tempDir("cam-wrapper-save-only-rollouts-"); + await initRepo(repoDir); + process.env.CAM_CODEX_SESSIONS_DIR = sessionsDir; + + const { capturedArgsPath, mockCodexPath } = await writeWrapperMockCodex(repoDir, sessionsDir, { + sessionId: "session-wrapper-save-only", + message: "Continue with save-only continuity handling." + }); + + await writeProjectConfig( + repoDir, + configJson({ + codexBinary: mockCodexPath, + sessionContinuityAutoLoad: false, + sessionContinuityAutoSave: true + }), + { + autoMemoryDirectory: memoryRoot, + sessionContinuityAutoLoad: false, + sessionContinuityAutoSave: true + } + ); + + const continuityStore = new SessionContinuityStore(detectProjectContext(repoDir), { + ...configJson({ + codexBinary: mockCodexPath, + sessionContinuityAutoLoad: false, + sessionContinuityAutoSave: true + }), + autoMemoryDirectory: memoryRoot + }); + + const exitCode = await runWrappedCodex(repoDir, "exec", ["continue"]); + expect(exitCode).toBe(0); + + const capturedArgs = JSON.parse(await fs.readFile(capturedArgsPath, "utf8")) as string[]; + const baseInstructionsArg = capturedArgs.find((arg) => arg.startsWith("base_instructions=")); + expect(baseInstructionsArg).toContain("# Codex Auto Memory"); + expect(baseInstructionsArg).not.toContain("# Session Continuity"); + + const latestAudit = await continuityStore.readLatestAuditEntry(); + expect(latestAudit?.rolloutPath).toContain("rollout-2026-03-15T00-00-00-000Z-session.jsonl"); + const merged = await continuityStore.readMergedState(); + expect(merged?.goal).toContain("Continue with save-only continuity handling"); + }, 30_000); + + it("prefers the primary rollout over a newer subagent rollout during wrapper auto-save", async () => { + const repoDir = await tempDir("cam-wrapper-primary-rollout-repo-"); + const memoryRoot = await tempDir("cam-wrapper-primary-rollout-memory-"); + const sessionsDir = await tempDir("cam-wrapper-primary-rollout-sessions-"); + await initRepo(repoDir); + process.env.CAM_CODEX_SESSIONS_DIR = sessionsDir; + + const mockCodexPath = path.join(repoDir, "mock-codex"); + const todayDir = path.join(sessionsDir, "2026", "03", "15"); + await fs.mkdir(todayDir, { recursive: true }); + await fs.writeFile( + mockCodexPath, + `#!/usr/bin/env node +const fs = require("node:fs"); +const path = require("node:path"); +const cwd = process.cwd(); +const sessionsDir = process.env.CAM_CODEX_SESSIONS_DIR; +const rolloutDir = path.join(sessionsDir, "2026", "03", "15"); +fs.mkdirSync(rolloutDir, { recursive: true }); +const primaryPath = path.join(rolloutDir, "rollout-2026-03-15T00-00-00-000Z-session.jsonl"); +const subagentPath = path.join(rolloutDir, "rollout-2026-03-15T00-00-01-000Z-subagent.jsonl"); +fs.writeFileSync(primaryPath, [ + JSON.stringify({ type: "session_meta", payload: { id: "session-wrapper-primary", timestamp: "2026-03-15T00:00:00.000Z", cwd, source: "cli" } }), + JSON.stringify({ type: "event_msg", payload: { type: "user_message", message: "Continue the primary wrapper continuity path." } }), + JSON.stringify({ type: "response_item", payload: { type: "function_call", name: "exec_command", call_id: "call-1", arguments: "{\\"cmd\\":\\"pnpm test\\"}" } }), + JSON.stringify({ type: "response_item", payload: { type: "function_call_output", call_id: "call-1", output: "Process exited with code 0" } }) +].join("\\n")); +fs.writeFileSync(subagentPath, [ + JSON.stringify({ type: "session_meta", payload: { id: "session-wrapper-subagent", forked_from_id: "session-wrapper-primary", timestamp: "2026-03-15T00:00:01.000Z", cwd, source: { subagent: { thread_spawn: { parent_thread_id: "session-wrapper-primary" } } } } }), + JSON.stringify({ type: "session_meta", payload: { id: "session-wrapper-primary", timestamp: "2026-03-15T00:00:00.000Z", cwd, source: "cli" } }), + JSON.stringify({ type: "event_msg", payload: { type: "user_message", message: "You are reviewer sub-agent 4. Work read-only. Focus on docs and contract surfaces only." } }) +].join("\\n")); +`, + "utf8" + ); + await fs.chmod(mockCodexPath, 0o755); + + await writeProjectConfig( + repoDir, + configJson({ + codexBinary: mockCodexPath, + sessionContinuityAutoLoad: false, + sessionContinuityAutoSave: true + }), + { + autoMemoryDirectory: memoryRoot, + sessionContinuityAutoLoad: false, + sessionContinuityAutoSave: true + } + ); + + const continuityStore = new SessionContinuityStore(detectProjectContext(repoDir), { + ...configJson({ + codexBinary: mockCodexPath, + sessionContinuityAutoLoad: false, + sessionContinuityAutoSave: true + }), + autoMemoryDirectory: memoryRoot + }); + + const exitCode = await runWrappedCodex(repoDir, "exec", ["continue"]); + expect(exitCode).toBe(0); + + const latestAudit = await continuityStore.readLatestAuditEntry(); + const merged = await continuityStore.readMergedState(); + + expect(latestAudit?.rolloutPath).toContain("rollout-2026-03-15T00-00-00-000Z-session.jsonl"); + expect(latestAudit?.sourceSessionId).toBe("session-wrapper-primary"); + expect(merged?.goal).toContain("primary wrapper continuity path"); + expect(merged?.goal).not.toContain("reviewer sub-agent"); + expect(merged?.incompleteNext.join("\n")).not.toContain("reviewer sub-agent"); + }, 30_000); + + it("injects continuity on startup and auto-saves it after the run", async () => { + const repoDir = await tempDir("cam-wrapper-session-repo-"); + const memoryRoot = await tempDir("cam-wrapper-session-memory-"); + const sessionsDir = await tempDir("cam-wrapper-session-rollouts-"); + await initRepo(repoDir); + process.env.CAM_CODEX_SESSIONS_DIR = sessionsDir; + + const capturedArgsPath = path.join(repoDir, "captured-args.json"); + const mockCodexPath = path.join(repoDir, "mock-codex"); + const todayDir = path.join(sessionsDir, "2026", "03", "15"); + await fs.mkdir(todayDir, { recursive: true }); + await fs.writeFile( + mockCodexPath, + `#!/usr/bin/env node +const fs = require("node:fs"); +const path = require("node:path"); +const cwd = process.cwd(); +const sessionsDir = process.env.CAM_CODEX_SESSIONS_DIR; +fs.writeFileSync(path.join(cwd, "captured-args.json"), JSON.stringify(process.argv.slice(2), null, 2)); +const rolloutDir = path.join(sessionsDir, "2026", "03", "15"); +fs.mkdirSync(rolloutDir, { recursive: true }); +const rolloutPath = path.join(rolloutDir, "rollout-2026-03-15T00-00-00-000Z-session.jsonl"); +fs.writeFileSync(rolloutPath, [ + JSON.stringify({ type: "session_meta", payload: { id: "session-wrapper", timestamp: "2026-03-15T00:00:00.000Z", cwd } }), + JSON.stringify({ type: "event_msg", payload: { type: "user_message", message: "Continue the wrapper continuity migration." } }), + JSON.stringify({ type: "response_item", payload: { type: "function_call", name: "exec_command", call_id: "call-1", arguments: "{\\"cmd\\":\\"pnpm test\\"}" } }), + JSON.stringify({ type: "response_item", payload: { type: "function_call_output", call_id: "call-1", output: "Process exited with code 0" } }) +].join("\\n")); +`, + "utf8" + ); + await fs.chmod(mockCodexPath, 0o755); + + await writeProjectConfig( + repoDir, + configJson({ + codexBinary: mockCodexPath + }), + { + autoMemoryDirectory: memoryRoot, + sessionContinuityAutoLoad: true, + sessionContinuityAutoSave: true + } + ); + + const continuityStore = new SessionContinuityStore(detectProjectContext(repoDir), { + ...configJson({ + codexBinary: mockCodexPath, + sessionContinuityAutoLoad: true, + sessionContinuityAutoSave: true + }), + autoMemoryDirectory: memoryRoot + }); + await continuityStore.saveSummary( + { + project: { + goal: "Resume the wrapper test continuity.", + confirmedWorking: ["Previous startup block exists."], + triedAndFailed: [], + notYetTried: [], + incompleteNext: [], + filesDecisionsEnvironment: [] + }, + projectLocal: { + goal: "", + confirmedWorking: [], + triedAndFailed: [], + notYetTried: [], + incompleteNext: ["Run wrapper auto-save."], + filesDecisionsEnvironment: [] + } + }, + "both" + ); + + const exitCode = await runWrappedCodex(repoDir, "exec", ["continue"]); + expect(exitCode).toBe(0); + + const capturedArgs = JSON.parse(await fs.readFile(capturedArgsPath, "utf8")) as string[]; + const baseInstructionsArg = capturedArgs.find((arg) => arg.startsWith("base_instructions=")); + expect(baseInstructionsArg).toContain("# Session Continuity"); + expect(baseInstructionsArg).toContain("# Codex Auto Memory"); + + const merged = await continuityStore.readMergedState(); + expect(merged?.goal).toContain("Continue the wrapper continuity migration"); + expect(merged?.confirmedWorking.join("\n")).toContain("pnpm test"); + + const latestAudit = await continuityStore.readLatestAuditEntry(); + expect(latestAudit?.actualPath).toBe("heuristic"); + expect(latestAudit?.fallbackReason).toBe("configured-heuristic"); + expect(latestAudit?.writtenPaths.length).toBeGreaterThan(0); + }, 30_000); + + it("writes a continuity recovery marker when wrapper auto-save cannot append audit", async () => { + const repoDir = await tempDir("cam-wrapper-recovery-repo-"); + const memoryRoot = await tempDir("cam-wrapper-recovery-memory-"); + const sessionsDir = await tempDir("cam-wrapper-recovery-rollouts-"); + await initRepo(repoDir); + process.env.CAM_CODEX_SESSIONS_DIR = sessionsDir; + + const mockCodexPath = path.join(repoDir, "mock-codex"); + const todayDir = path.join(sessionsDir, "2026", "03", "15"); + await fs.mkdir(todayDir, { recursive: true }); + await fs.writeFile( + mockCodexPath, + `#!/usr/bin/env node +const fs = require("node:fs"); +const path = require("node:path"); +const cwd = process.cwd(); +const sessionsDir = process.env.CAM_CODEX_SESSIONS_DIR; +const rolloutDir = path.join(sessionsDir, "2026", "03", "15"); +fs.mkdirSync(rolloutDir, { recursive: true }); +const rolloutPath = path.join(rolloutDir, "rollout-2026-03-15T00-00-00-000Z-session.jsonl"); +fs.writeFileSync(rolloutPath, [ + JSON.stringify({ type: "session_meta", payload: { id: "session-wrapper-recovery", timestamp: "2026-03-15T00:00:00.000Z", cwd } }), + JSON.stringify({ type: "event_msg", payload: { type: "user_message", message: "Continue wrapper recovery handling." } }), + JSON.stringify({ type: "response_item", payload: { type: "function_call", name: "exec_command", call_id: "call-1", arguments: "{\\"cmd\\":\\"pnpm test\\"}" } }), + JSON.stringify({ type: "response_item", payload: { type: "function_call_output", call_id: "call-1", output: "Process exited with code 0" } }) +].join("\\n")); +`, + "utf8" + ); + await fs.chmod(mockCodexPath, 0o755); + + await writeProjectConfig( + repoDir, + configJson({ + codexBinary: mockCodexPath, + sessionContinuityAutoSave: true + }), + { + autoMemoryDirectory: memoryRoot, + sessionContinuityAutoSave: true + } + ); + + const appendAuditSpy = vi + .spyOn(SessionContinuityStore.prototype, "appendAuditLog") + .mockRejectedValueOnce(new Error("wrapper continuity audit write failed")); + + await expect(runWrappedCodex(repoDir, "exec", ["continue"])).rejects.toThrow( + "wrapper continuity audit write failed" + ); + appendAuditSpy.mockRestore(); + + const store = new SessionContinuityStore(detectProjectContext(repoDir), { + ...configJson({ + codexBinary: mockCodexPath, + sessionContinuityAutoSave: true + }), + autoMemoryDirectory: memoryRoot + }); + expect(await store.readRecoveryRecord()).toMatchObject({ + failedStage: "audit-write", + failureMessage: "wrapper continuity audit write failed", + scope: "both" + }); + }, 30_000); + + it("still saves continuity when wrapper durable sync fails", async () => { + const repoDir = await tempDir("cam-wrapper-sync-fail-repo-"); + const memoryRoot = await tempDir("cam-wrapper-sync-fail-memory-"); + const sessionsDir = await tempDir("cam-wrapper-sync-fail-rollouts-"); + await initRepo(repoDir); + process.env.CAM_CODEX_SESSIONS_DIR = sessionsDir; + + const mockCodexPath = path.join(repoDir, "mock-codex"); + const todayDir = path.join(sessionsDir, "2026", "03", "15"); + await fs.mkdir(todayDir, { recursive: true }); + await fs.writeFile( + mockCodexPath, + `#!/usr/bin/env node +const fs = require("node:fs"); +const path = require("node:path"); +const cwd = process.cwd(); +const sessionsDir = process.env.CAM_CODEX_SESSIONS_DIR; +const rolloutDir = path.join(sessionsDir, "2026", "03", "15"); +fs.mkdirSync(rolloutDir, { recursive: true }); +const rolloutPath = path.join(rolloutDir, "rollout-2026-03-15T00-00-00-000Z-session.jsonl"); +fs.writeFileSync(rolloutPath, [ + JSON.stringify({ type: "session_meta", payload: { id: "session-wrapper-sync-fail", timestamp: "2026-03-15T00:00:00.000Z", cwd } }), + JSON.stringify({ type: "event_msg", payload: { type: "user_message", message: "Continue after durable sync sidecar failure." } }), + JSON.stringify({ type: "response_item", payload: { type: "function_call", name: "exec_command", call_id: "call-1", arguments: "{\\"cmd\\":\\"pnpm test\\"}" } }), + JSON.stringify({ type: "response_item", payload: { type: "function_call_output", call_id: "call-1", output: "Process exited with code 0" } }) +].join("\\n")); +`, + "utf8" + ); + await fs.chmod(mockCodexPath, 0o755); + + await writeProjectConfig( + repoDir, + configJson({ + codexBinary: mockCodexPath + }), + { + autoMemoryDirectory: memoryRoot, + sessionContinuityAutoSave: true + } + ); + + const syncSpy = vi + .spyOn(SyncService.prototype, "syncRollout") + .mockRejectedValueOnce(new Error("sync audit write failed")); + + await expect(runWrappedCodex(repoDir, "exec", ["continue"])).rejects.toThrow( + "sync audit write failed" + ); + syncSpy.mockRestore(); + + const store = new SessionContinuityStore(detectProjectContext(repoDir), { + ...configJson({ + codexBinary: mockCodexPath, + sessionContinuityAutoSave: true + }), + autoMemoryDirectory: memoryRoot + }); + const latestAudit = await store.readLatestAuditEntry(); + expect(latestAudit?.rolloutPath).toContain("rollout-2026-03-15T00-00-00-000Z-session.jsonl"); + expect(latestAudit?.writtenPaths.length).toBeGreaterThan(0); + expect((await store.readMergedState())?.confirmedWorking.join("\n")).toContain("pnpm test"); + }, 30_000); +}); From c69d1378a8bdfd6f7063c046cb829fdfefcaf169 Mon Sep 17 00:00:00 2001 From: blocks Date: Fri, 20 Mar 2026 23:40:53 +0800 Subject: [PATCH 4/9] fix: tighten release verification and continuity contracts --- .github/workflows/ci.yml | 3 + CONTRIBUTING.md | 5 + README.en.md | 3 +- README.md | 3 +- docs/architecture.en.md | 6 +- docs/architecture.md | 6 +- docs/release-checklist.md | 12 +- package.json | 3 +- src/lib/cli/register-commands.ts | 81 +-- src/lib/commands/session-presenters.ts | 131 +++-- src/lib/commands/session.ts | 58 ++- .../domain/session-continuity-persistence.ts | 4 +- test/docs-contract.test.ts | 12 + .../session-continuity-conflict-heavy.jsonl | 15 + test/helpers/session-test-support.ts | 29 ++ test/memory-command.test.ts | 24 +- test/session-command.test.ts | 464 +++++++++++++++++- test/tarball-install-smoke.test.ts | 94 ++++ 18 files changed, 816 insertions(+), 137 deletions(-) create mode 100644 test/fixtures/rollouts/session-continuity-conflict-heavy.jsonl create mode 100644 test/tarball-install-smoke.test.ts diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fd41073..a285e26 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -48,3 +48,6 @@ jobs: - name: Pack Check run: pnpm pack:check + + - name: Tarball Install Smoke + run: pnpm test:tarball-install-smoke diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 706ff86..c86359f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -27,6 +27,7 @@ pnpm test:cli-smoke pnpm test pnpm build pnpm test:dist-cli-smoke +pnpm test:tarball-install-smoke ``` Use Node 20+ and `pnpm`. @@ -39,6 +40,7 @@ Use Node 20+ and `pnpm`. - Update docs whenever behavior, config, or file layout changes. - Include screenshots or terminal output only when it helps explain the UX. - If you touch release-facing CLI behavior, validate `node dist/cli.js` or `pnpm test:dist-cli-smoke`. +- If you touch packaging, release verification, or install-time CLI behavior, also validate `pnpm test:tarball-install-smoke`. ## Current maintainer focus @@ -56,10 +58,13 @@ Use Node 20+ and `pnpm`. - Keep reviewer-only warnings and confidence prose in audit/reviewer surfaces; they should not become continuity body content. - Keep `src/cli.ts` narrow. New commands should be registered through `src/lib/cli/register-commands.ts` instead of expanding the main entrypoint again. - Keep runtime composition in `src/lib/runtime/runtime-context.ts`; command files should depend on that runtime surface instead of rebuilding their own composition helpers. +- Keep `src/lib/commands/session.ts` thin. Provenance selection and action dispatch belong there; reviewer-facing text/json assembly belongs in `src/lib/commands/session-presenters.ts`. +- Keep shared continuity persistence in `src/lib/domain/session-continuity-persistence.ts` so session commands and wrapper auto-save do not drift into separate persistence code paths. - When touching continuity persistence, preserve the current contract split: - `cam session save` = `merge` - `cam session refresh` = `replace` - wrapper auto-save = `merge` +- Do not hard-merge the rollout selection rules for `cam session refresh` and wrapper auto-save. They intentionally share persistence semantics, not identical provenance selection. - If you split tests, keep `runSession` and wrapper continuity coverage in separate files and share helpers from `test/helpers/` rather than re-inlining temp-dir or mock-wrapper setup. ## Documentation Guidelines diff --git a/README.en.md b/README.en.md index aa022fe..d4ff836 100644 --- a/README.en.md +++ b/README.en.md @@ -274,7 +274,7 @@ Current public-ready status: - topic-aware startup lookup: available - session continuity companion layer: available - reviewer audit surfaces: available -- tagged GitHub Releases: available with tarball artifacts; npm publish remains manual +- tagged GitHub Releases: the release workflow is defined with tarball artifacts as the target; before pushing the first real tag, confirm that the default branch exposes and activates that workflow; npm publish remains manual - native memory / native hooks primary path: not enabled and not trusted as the main implementation path ## Roadmap @@ -292,6 +292,7 @@ Current public-ready status: - stronger contradiction handling - clearer `cam memory` and `cam session` reviewer UX - tighter continuity diagnostics and reviewer packets, with explicit confidence and warning surfaces +- tighter release-facing verification through tarball install smoke so the `.tgz`-installed `cam` bin shim is exercised directly - keep a compatibility seam for future hook surfaces ### v0.3+ diff --git a/README.md b/README.md index 4e85177..fdabe4f 100644 --- a/README.md +++ b/README.md @@ -274,7 +274,7 @@ Session continuity: - topic-aware startup lookup:可用 - session continuity companion layer:可用 - reviewer audit surfaces:可用 -- tagged GitHub Releases:可用,提供 tarball artifact;npm publish 仍保持手动流程 +- tagged GitHub Releases:release workflow 已定义并以 tarball artifact 为目标;推送首个真实 tag 前,应先确认默认分支上的该 workflow 已激活且可观测,npm publish 继续保持手动流程 - native memory / native hooks primary path:未启用,仍非 trusted implementation path ## 路线图 @@ -292,6 +292,7 @@ Session continuity: - 更稳的 contradiction handling - 更清晰的 `cam memory` / `cam session` 审查 UX - continuity diagnostics 与 reviewer packet 继续收紧信息层次,并显式暴露 confidence / warnings +- release-facing 验证继续收紧到 tarball install smoke,确保 `.tgz` 安装后的 `cam` bin shim 可直接工作 - 继续保留对未来 hook surface 的 compatibility seam ### v0.3+ diff --git a/docs/architecture.en.md b/docs/architecture.en.md index d046e79..e6d1609 100644 --- a/docs/architecture.en.md +++ b/docs/architecture.en.md @@ -18,8 +18,10 @@ The implementation also now follows an intentionally narrow code layout: - `src/cli.ts`: wrapper fast path, version wiring, and Commander bootstrap only - `src/lib/cli/register-commands.ts`: centralized command registration -- `src/lib/runtime/runtime-context.ts`: runtime composition and config-patch reload -- `src/lib/commands/*`: command orchestration and reviewer-facing text/json surfaces +- `src/lib/runtime/runtime-context.ts`: runtime composition, config-patch reload, and the shared reload helper used after memory enable/disable patches +- `src/lib/commands/session.ts`: provenance selection and action dispatch only +- `src/lib/commands/session-presenters.ts`: centralized text/json reviewer surfaces for `cam session` +- `src/lib/domain/session-continuity-persistence.ts`: shared continuity persistence spine used by both session commands and the wrapper flow - `src/lib/domain/*`: core memory, continuity, audit, and rollout behavior - `src/lib/util/*`: utility layer diff --git a/docs/architecture.md b/docs/architecture.md index fd49525..44d9909 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -18,8 +18,10 @@ - `src/cli.ts`:只负责 wrapper fast path、版本与 Commander 启动 - `src/lib/cli/register-commands.ts`:集中做命令注册 -- `src/lib/runtime/runtime-context.ts`:集中做 runtime composition 与 config patch 后的 reload -- `src/lib/commands/*`:命令编排与 reviewer-facing text/json surface +- `src/lib/runtime/runtime-context.ts`:集中做 runtime composition、config patch 后的 reload,以及 memory enable/disable 的统一 reload helper +- `src/lib/commands/session.ts`:只保留 provenance 选择与 action dispatch +- `src/lib/commands/session-presenters.ts`:集中组装 `cam session` 的 text/json reviewer surface +- `src/lib/domain/session-continuity-persistence.ts`:承载 session / wrapper 共享的 continuity persistence 主干 - `src/lib/domain/*`:memory / continuity / audit / rollout 的核心语义与存储行为 - `src/lib/util/*`:纯工具层 diff --git a/docs/release-checklist.md b/docs/release-checklist.md index ef0ac22..d21263d 100644 --- a/docs/release-checklist.md +++ b/docs/release-checklist.md @@ -18,16 +18,21 @@ Use this checklist before cutting any alpha or beta release of `codex-auto-memor ## Code and runtime checks +- Prefer `pnpm verify:release` as the canonical full milestone check; run the individual commands below when you need to isolate a failure. - Run `pnpm lint` - Run `pnpm test:docs-contract` - Run `pnpm test:reviewer-smoke` - Run `pnpm test:cli-smoke` - Run `pnpm test:dist-cli-smoke` +- Run `pnpm test:tarball-install-smoke` - Run `pnpm test` - Run `pnpm build` - Run `pnpm pack:check` +- Confirm `pnpm build` still starts from a clean `dist/` directory so `npm pack` cannot accidentally pick up stale compiled artifacts from an older tree shape. +- If you add new generated outputs beyond `dist/`, keep their cleanup path aligned with the build and pack workflow instead of letting release tarballs accumulate leftovers. - After `pnpm build`, prefer validating release-facing CLI behavior through `node dist/cli.js ...` rather than `tsx src/cli.ts`. - Run `node dist/cli.js --version` and confirm it matches `package.json`. +- Run `pnpm test:tarball-install-smoke` and confirm the packed `.tgz` installs cleanly, `./node_modules/.bin/cam --version` works, and at least one lightweight reviewer path such as `cam session status --json` succeeds from the installed package. - Run `node dist/cli.js audit` if you want the repository privacy scan; keep it as a manual release-time check instead of a CI gate. - Run `node dist/cli.js session refresh --json` and confirm `action`, `writeMode`, and `rolloutSelection` reflect the selected provenance. - Run `node dist/cli.js session load --json` and confirm older JSON consumers still receive the existing core fields. @@ -54,8 +59,8 @@ Use this checklist before cutting any alpha or beta release of `codex-auto-memor ## Native compatibility checks -- Run `cam doctor` and record the current `memories` / `codex_hooks` status. -- Run `pnpm exec tsx src/cli.ts audit` and record whether any medium/high findings remain. +- Run `node dist/cli.js doctor` and record the current `memories` / `codex_hooks` status. +- Run `node dist/cli.js audit` and record whether any medium/high findings remain. - Confirm that any native-facing code still preserves companion fallback. - Confirm that Markdown memory remains the user-facing source of truth. @@ -71,6 +76,7 @@ Do not tag a release unless: ## Release automation notes -- A pushed `v*` tag now runs the GitHub Release workflow. +- A pushed `v*` tag is intended to run the GitHub Release workflow. - The workflow verifies `GITHUB_REF_NAME === v${package.json.version}`, runs `pnpm verify:release`, and uploads the `npm pack` tarball to the GitHub Release. +- Before the first real tag validation, confirm that the remote default branch exposes `release.yml` in Actions and that the workflow is active. - npm publish remains manual until registry credentials and approval posture are intentionally wired. diff --git a/package.json b/package.json index bbe921c..9894276 100644 --- a/package.json +++ b/package.json @@ -30,8 +30,9 @@ "test:dist-cli-smoke": "vitest run test/dist-cli-smoke.test.ts", "test:docs-contract": "vitest run test/docs-contract.test.ts", "test:reviewer-smoke": "vitest run test/docs-contract.test.ts test/memory-command.test.ts test/session-command.test.ts test/wrapper-session-continuity.test.ts test/session-continuity.test.ts", + "test:tarball-install-smoke": "vitest run test/tarball-install-smoke.test.ts", "test:watch": "vitest", - "verify:release": "pnpm lint && pnpm test:docs-contract && pnpm test:reviewer-smoke && pnpm test:cli-smoke && pnpm test && pnpm build && pnpm test:dist-cli-smoke && pnpm pack:check" + "verify:release": "pnpm lint && pnpm test:docs-contract && pnpm test:reviewer-smoke && pnpm test:cli-smoke && pnpm test && pnpm build && pnpm test:dist-cli-smoke && pnpm pack:check && pnpm test:tarball-install-smoke" }, "keywords": [ "codex", diff --git a/src/lib/cli/register-commands.ts b/src/lib/cli/register-commands.ts index 122cc95..e736879 100644 --- a/src/lib/cli/register-commands.ts +++ b/src/lib/cli/register-commands.ts @@ -19,56 +19,69 @@ function withStdout( }; } +function addJsonOption(command: Command): Command { + return command.option("--json", "Print JSON output"); +} + +function addSessionScopeOption(command: Command): Command { + return command.option( + "--scope ", + "Target continuity scope: project, project-local, or both", + "both" + ); +} + +function addSessionRolloutOption(command: Command): Command { + return command.option("--rollout ", "Specific rollout JSONL file to summarize"); +} + function registerSessionCommands(program: Command): void { const sessionCommand = program .command("session") .description("Manage temporary cross-session continuity state"); - sessionCommand - .command("status") - .description("Inspect current session continuity state") - .option("--json", "Print JSON output") + addJsonOption( + sessionCommand + .command("status") + .description("Inspect current session continuity state") + ) .action(withStdout(async (options) => runSession("status", options))); - sessionCommand - .command("save") - .description("Save temporary session continuity from a rollout") - .option("--json", "Print JSON output") - .option("--rollout ", "Specific rollout JSONL file to summarize") - .option( - "--scope ", - "Target continuity scope: project, project-local, or both", - "both" + addSessionScopeOption( + addSessionRolloutOption( + addJsonOption( + sessionCommand + .command("save") + .description("Save temporary session continuity from a rollout") + ) ) + ) .action(withStdout(async (options) => runSession("save", options))); - sessionCommand - .command("refresh") - .description("Regenerate session continuity from provenance and replace the selected scope") - .option("--json", "Print JSON output") - .option("--rollout ", "Specific rollout JSONL file to summarize") - .option( - "--scope ", - "Target continuity scope: project, project-local, or both", - "both" + addSessionScopeOption( + addSessionRolloutOption( + addJsonOption( + sessionCommand + .command("refresh") + .description("Regenerate session continuity from provenance and replace the selected scope") + ) ) + ) .action(withStdout(async (options) => runSession("refresh", options))); - sessionCommand - .command("load") - .description("Load current session continuity summary") - .option("--json", "Print JSON output") + addJsonOption( + sessionCommand + .command("load") + .description("Load current session continuity summary") + ) .option("--print-startup", "Print the compiled startup continuity block") .action(withStdout(async (options) => runSession("load", options))); - sessionCommand - .command("clear") - .description("Clear active session continuity state") - .option( - "--scope ", - "Target continuity scope: project, project-local, or both", - "both" - ) + addSessionScopeOption( + sessionCommand + .command("clear") + .description("Clear active session continuity state") + ) .action(withStdout(async (options) => runSession("clear", options))); sessionCommand diff --git a/src/lib/commands/session-presenters.ts b/src/lib/commands/session-presenters.ts index 642872d..9fa6aaf 100644 --- a/src/lib/commands/session-presenters.ts +++ b/src/lib/commands/session-presenters.ts @@ -11,6 +11,10 @@ import { normalizeSessionContinuityWriteMode, toSessionContinuityDiagnostics } from "../domain/session-continuity-diagnostics.js"; +import { + defaultRecentContinuityAuditLimit, + defaultRecentContinuityPreviewReadLimit +} from "../domain/session-continuity-persistence.js"; import type { PersistSessionContinuityResult } from "../domain/session-continuity-persistence.js"; import type { RuntimeContext } from "../runtime/runtime-context.js"; import type { @@ -23,8 +27,6 @@ import type { SessionContinuityWriteMode } from "../types.js"; -const recentContinuityAuditLimit = 5; -const recentContinuityPreviewReadLimit = 10; const recentContinuityPreviewGroupLimit = 3; interface RolloutSelectionSummary { @@ -163,6 +165,47 @@ function formatLayerSection( ]; } +function buildSessionInspectionPayload(view: SessionInspectionView): Record { + return { + projectLocation: view.projectLocation, + localLocation: view.localLocation, + projectState: view.projectState, + localState: view.localState, + mergedState: view.mergedState, + latestContinuityAuditEntry: view.latestContinuityAuditEntry, + latestContinuityDiagnostics: view.latestContinuityDiagnostics, + recentContinuityAuditEntries: view.recentContinuityAuditEntries, + continuityAuditPath: view.continuityAuditPath, + pendingContinuityRecovery: view.pendingContinuityRecovery, + continuityRecoveryPath: view.continuityRecoveryPath + }; +} + +function buildSessionOverviewLines( + view: SessionInspectionView, + headingLines: string[] +): string[] { + return [ + ...headingLines, + `Latest generation: ${view.latestContinuityDiagnostics ? formatSessionContinuityDiagnostics(view.latestContinuityDiagnostics) : "none recorded yet"}`, + ...(view.latestContinuityAuditEntry ? [`Latest rollout: ${view.latestContinuityAuditEntry.rolloutPath}`] : []), + `Continuity audit: ${view.continuityAuditPath}`, + "Merged resume brief combines shared continuity with any project-local overrides.", + "Recent prior generations below are compact audit previews, not startup-injected history.", + ...(view.latestContinuityAuditEntry + ? formatSessionContinuityAuditDrillDown(view.latestContinuityAuditEntry) + : []), + ...(view.pendingContinuityRecovery + ? formatPendingContinuityRecovery( + view.pendingContinuityRecovery, + view.continuityRecoveryPath + ) + : []), + "Recent prior generations:", + ...formatRecentGenerationLines(view.recentContinuityAuditPreviewEntries) + ]; +} + export async function loadSessionInspectionView( runtime: RuntimeContext ): Promise { @@ -171,7 +214,9 @@ export async function loadSessionInspectionView( const projectState = await runtime.sessionContinuityStore.readState("project"); const localState = await runtime.sessionContinuityStore.readState("project-local"); const recentContinuityAuditPreviewEntries = - await runtime.sessionContinuityStore.readRecentAuditEntries(recentContinuityPreviewReadLimit); + await runtime.sessionContinuityStore.readRecentAuditEntries( + defaultRecentContinuityPreviewReadLimit + ); const latestContinuityAuditEntry = recentContinuityAuditPreviewEntries[0] ?? null; const latestContinuityDiagnostics = latestContinuityAuditEntry ? toSessionContinuityDiagnostics(latestContinuityAuditEntry) @@ -209,7 +254,7 @@ export async function loadSessionInspectionView( latestContinuityDiagnostics, recentContinuityAuditEntries: recentContinuityAuditPreviewEntries.slice( 0, - recentContinuityAuditLimit + defaultRecentContinuityAuditLimit ), recentContinuityAuditPreviewEntries, continuityAuditPath: runtime.sessionContinuityStore.paths.auditFile, @@ -271,18 +316,8 @@ export function buildPersistedSessionJson( export function buildSessionLoadJson(view: SessionInspectionView): string { return JSON.stringify( { - projectLocation: view.projectLocation, - localLocation: view.localLocation, - projectState: view.projectState, - localState: view.localState, - mergedState: view.mergedState, + ...buildSessionInspectionPayload(view), startup: view.startup, - latestContinuityAuditEntry: view.latestContinuityAuditEntry, - latestContinuityDiagnostics: view.latestContinuityDiagnostics, - recentContinuityAuditEntries: view.recentContinuityAuditEntries, - continuityAuditPath: view.continuityAuditPath, - pendingContinuityRecovery: view.pendingContinuityRecovery, - continuityRecoveryPath: view.continuityRecoveryPath }, null, 2 @@ -294,25 +329,11 @@ export function formatSessionLoadText( printStartup = false ): string { const lines = [ - "Session Continuity", - `Project continuity: ${view.projectLocation.exists ? "active" : "missing"} (${view.projectLocation.path})`, - `Project-local continuity: ${view.localLocation.exists ? "active" : "missing"} (${view.localLocation.path})`, - `Latest generation: ${view.latestContinuityDiagnostics ? formatSessionContinuityDiagnostics(view.latestContinuityDiagnostics) : "none recorded yet"}`, - ...(view.latestContinuityAuditEntry ? [`Latest rollout: ${view.latestContinuityAuditEntry.rolloutPath}`] : []), - `Continuity audit: ${view.continuityAuditPath}`, - "Merged resume brief combines shared continuity with any project-local overrides.", - "Recent prior generations below are compact audit previews, not startup-injected history.", - ...(view.latestContinuityAuditEntry - ? formatSessionContinuityAuditDrillDown(view.latestContinuityAuditEntry) - : []), - ...(view.pendingContinuityRecovery - ? formatPendingContinuityRecovery( - view.pendingContinuityRecovery, - view.continuityRecoveryPath - ) - : []), - "Recent prior generations:", - ...formatRecentGenerationLines(view.recentContinuityAuditPreviewEntries), + ...buildSessionOverviewLines(view, [ + "Session Continuity", + `Project continuity: ${view.projectLocation.exists ? "active" : "missing"} (${view.projectLocation.path})`, + `Project-local continuity: ${view.localLocation.exists ? "active" : "missing"} (${view.localLocation.path})` + ]), "", "Shared project continuity:", `Goal: ${view.projectState?.goal || "No active goal recorded."}`, @@ -387,17 +408,7 @@ export function buildSessionStatusJson(view: SessionInspectionView): string { autoSave: view.autoSave, localPathStyle: view.localPathStyle, maxLines: view.maxLines, - projectLocation: view.projectLocation, - localLocation: view.localLocation, - projectState: view.projectState, - localState: view.localState, - mergedState: view.mergedState, - latestContinuityAuditEntry: view.latestContinuityAuditEntry, - latestContinuityDiagnostics: view.latestContinuityDiagnostics, - recentContinuityAuditEntries: view.recentContinuityAuditEntries, - continuityAuditPath: view.continuityAuditPath, - pendingContinuityRecovery: view.pendingContinuityRecovery, - continuityRecoveryPath: view.continuityRecoveryPath + ...buildSessionInspectionPayload(view) }, null, 2 @@ -406,28 +417,14 @@ export function buildSessionStatusJson(view: SessionInspectionView): string { export function formatSessionStatusText(view: SessionInspectionView): string { return [ - "Codex Auto Memory Session Continuity", - `Auto-load: ${view.autoLoad}`, - `Auto-save: ${view.autoSave}`, - `Local path style: ${view.localPathStyle}`, - `Shared continuity: ${view.projectLocation.exists ? "active" : "missing"} (${view.projectLocation.path})`, - `Project-local continuity: ${view.localLocation.exists ? "active" : "missing"} (${view.localLocation.path})`, - `Latest generation: ${view.latestContinuityDiagnostics ? formatSessionContinuityDiagnostics(view.latestContinuityDiagnostics) : "none recorded yet"}`, - ...(view.latestContinuityAuditEntry ? [`Latest rollout: ${view.latestContinuityAuditEntry.rolloutPath}`] : []), - `Continuity audit: ${view.continuityAuditPath}`, - "Merged resume brief combines shared continuity with any project-local overrides.", - "Recent prior generations below are compact audit previews, not startup-injected history.", - ...(view.latestContinuityAuditEntry - ? formatSessionContinuityAuditDrillDown(view.latestContinuityAuditEntry) - : []), - ...(view.pendingContinuityRecovery - ? formatPendingContinuityRecovery( - view.pendingContinuityRecovery, - view.continuityRecoveryPath - ) - : []), - "Recent prior generations:", - ...formatRecentGenerationLines(view.recentContinuityAuditPreviewEntries), + ...buildSessionOverviewLines(view, [ + "Codex Auto Memory Session Continuity", + `Auto-load: ${view.autoLoad}`, + `Auto-save: ${view.autoSave}`, + `Local path style: ${view.localPathStyle}`, + `Shared continuity: ${view.projectLocation.exists ? "active" : "missing"} (${view.projectLocation.path})`, + `Project-local continuity: ${view.localLocation.exists ? "active" : "missing"} (${view.localLocation.path})` + ]), "", `Shared updated at: ${view.projectState?.updatedAt ?? "n/a"}`, `Project-local updated at: ${view.localState?.updatedAt ?? "n/a"}`, diff --git a/src/lib/commands/session.ts b/src/lib/commands/session.ts index 94899a2..9a9ded2 100644 --- a/src/lib/commands/session.ts +++ b/src/lib/commands/session.ts @@ -34,6 +34,12 @@ interface SessionOptions { scope?: SessionContinuityScope | "both"; } +interface SessionPersistenceRequest { + rolloutSelection: RolloutSelection; + trigger: "manual-save" | "manual-refresh"; + writeMode: "merge" | "replace"; +} + function selectedScope(scope?: SessionContinuityScope | "both"): SessionContinuityScope | "both" { if (!scope) { return "both"; @@ -86,6 +92,31 @@ async function selectRefreshRollout( throw new Error("No relevant rollout found for this project."); } +async function prepareSessionPersistenceRequest( + runtime: RuntimeContext, + action: "save" | "refresh", + scope: SessionContinuityScope | "both", + explicitRollout?: string +): Promise { + const rolloutSelection: RolloutSelection = + action === "refresh" + ? await selectRefreshRollout(runtime, scope, explicitRollout) + : { + kind: explicitRollout ? "explicit-rollout" : "latest-primary-rollout", + rolloutPath: explicitRollout ?? (await findLatestProjectRollout(runtime.project)) ?? "" + }; + + if (!rolloutSelection.rolloutPath) { + throw new Error("No relevant rollout found for this project."); + } + + return { + rolloutSelection, + trigger: action === "refresh" ? "manual-refresh" : "manual-save", + writeMode: action === "refresh" ? "replace" : "merge" + }; +} + export async function runSession( action: SessionAction, options: SessionOptions = {} @@ -95,31 +126,26 @@ export async function runSession( const scope = selectedScope(options.scope); if (action === "save" || action === "refresh") { - const rolloutSelection = - action === "refresh" - ? await selectRefreshRollout(runtime, scope, options.rollout) - : { - kind: options.rollout ? "explicit-rollout" : "latest-primary-rollout", - rolloutPath: options.rollout ?? (await findLatestProjectRollout(runtime.project)) ?? "" - }; - - if (!rolloutSelection.rolloutPath) { - throw new Error("No relevant rollout found for this project."); - } + const persistenceRequest = await prepareSessionPersistenceRequest( + runtime, + action, + scope, + options.rollout + ); const persisted = await persistSessionContinuity({ runtime, - rolloutPath: rolloutSelection.rolloutPath, + rolloutPath: persistenceRequest.rolloutSelection.rolloutPath, scope, - trigger: action === "refresh" ? "manual-refresh" : "manual-save", - writeMode: action === "refresh" ? "replace" : "merge" + trigger: persistenceRequest.trigger, + writeMode: persistenceRequest.writeMode }); if (options.json) { - return buildPersistedSessionJson(action, persisted, rolloutSelection); + return buildPersistedSessionJson(action, persisted, persistenceRequest.rolloutSelection); } - return formatPersistedSessionText(action, persisted, rolloutSelection); + return formatPersistedSessionText(action, persisted, persistenceRequest.rolloutSelection); } if (action === "clear") { diff --git a/src/lib/domain/session-continuity-persistence.ts b/src/lib/domain/session-continuity-persistence.ts index 5c99c1d..d93c90b 100644 --- a/src/lib/domain/session-continuity-persistence.ts +++ b/src/lib/domain/session-continuity-persistence.ts @@ -19,8 +19,8 @@ import type { SessionContinuityDiagnostics } from "../types.js"; -const defaultRecentContinuityAuditLimit = 5; -const defaultRecentContinuityPreviewReadLimit = 10; +export const defaultRecentContinuityAuditLimit = 5; +export const defaultRecentContinuityPreviewReadLimit = 10; export interface PersistSessionContinuityOptions { runtime: RuntimeContext; diff --git a/test/docs-contract.test.ts b/test/docs-contract.test.ts index 17244cf..879612d 100644 --- a/test/docs-contract.test.ts +++ b/test/docs-contract.test.ts @@ -23,12 +23,15 @@ describe("docs contract", () => { expect(readme).toContain("cam session refresh"); expect(readme).toContain("reviewer warning prose"); expect(readme).toContain("tagged GitHub Releases"); + expect(readme).toContain("tarball install smoke"); expect(readmeEn).toContain("cam memory"); expect(readmeEn).toContain("cam session status"); expect(readmeEn).toContain("confidence"); expect(readmeEn).toContain("deterministic scrub"); expect(readmeEn).toContain("tagged GitHub Releases"); + expect(readmeEn).toContain("tarball install smoke"); expect(releaseChecklist).toContain("pnpm test:dist-cli-smoke"); + expect(releaseChecklist).toContain("pnpm test:tarball-install-smoke"); expect(releaseChecklist).toContain("node dist/cli.js --version"); expect(releaseChecklist).toContain("node dist/cli.js audit"); expect(releaseChecklist).toContain("pnpm test:docs-contract"); @@ -41,14 +44,23 @@ describe("docs contract", () => { expect(contributing).toContain("reviewer-only warnings"); expect(contributing).toContain("pnpm test:docs-contract"); expect(contributing).toContain("pnpm test:dist-cli-smoke"); + expect(contributing).toContain("pnpm test:tarball-install-smoke"); expect(packageJson.scripts["test:dist-cli-smoke"]).toBe("vitest run test/dist-cli-smoke.test.ts"); + expect(packageJson.scripts["test:tarball-install-smoke"]).toBe( + "vitest run test/tarball-install-smoke.test.ts" + ); expect(packageJson.scripts.prepack).toBe("pnpm build"); expect(packageJson.scripts["verify:release"]).toContain("pnpm test:dist-cli-smoke"); + expect(packageJson.scripts["verify:release"]).toContain("pnpm test:tarball-install-smoke"); expect(ciWorkflow).toContain("Dist CLI Smoke"); + expect(ciWorkflow).toContain("Tarball Install Smoke"); expect(releaseWorkflow).toContain("tags:"); expect(releaseWorkflow).toContain("v*"); expect(releaseWorkflow).toContain("pnpm verify:release"); expect(releaseWorkflow).toContain("gh release create"); + expect(releaseChecklist).toContain("default branch"); + expect(readme).toContain("确认默认分支上的该 workflow 已激活且可观测"); + expect(readmeEn).toContain("default branch exposes and activates that workflow"); }); it("keeps continuity, architecture, and migration wording aligned with the current product posture", async () => { diff --git a/test/fixtures/rollouts/session-continuity-conflict-heavy.jsonl b/test/fixtures/rollouts/session-continuity-conflict-heavy.jsonl new file mode 100644 index 0000000..3b257c6 --- /dev/null +++ b/test/fixtures/rollouts/session-continuity-conflict-heavy.jsonl @@ -0,0 +1,15 @@ +{"type":"session_meta","payload":{"id":"fixture-session-continuity-conflict-heavy","timestamp":"2026-03-20T00:00:00.000Z","cwd":"/tmp/project","source":"cli"}} +{"type":"event_msg","payload":{"type":"user_message","message":"We still have not tried switching the auth callback to cookies() yet."}} +{"type":"event_msg","payload":{"type":"agent_message","message":"You are reviewer sub-agent 2. Work read-only. Focus on docs and contract surfaces only."}} +{"type":"event_msg","payload":{"type":"user_message","message":"Next step: update src/auth/login.ts and src/auth/middleware.ts to set an httpOnly cookie and guard the redirect."}} +{"type":"event_msg","payload":{"type":"agent_message","message":"Use bun in this repo for faster installs."}} +{"type":"event_msg","payload":{"type":"user_message","message":"Actually use pnpm in this repo, not bun."}} +{"type":"event_msg","payload":{"type":"user_message","message":"Redis must be running before integration tests."}} +{"type":"event_msg","payload":{"type":"user_message","message":"We already confirmed pnpm test passes after the cookie change."}} +{"type":"response_item","payload":{"type":"function_call","name":"exec_command","call_id":"call-1","arguments":"{\\\"cmd\\\":\\\"pnpm test\\\"}"}} +{"type":"response_item","payload":{"type":"function_call_output","call_id":"call-1","output":"PASS auth cookie suite\\n0 failing\\nDone in 2.3s"}} +{"type":"response_item","payload":{"type":"function_call","name":"exec_command","call_id":"call-2","arguments":"{\\\"cmd\\\":\\\"pnpm build\\\"}"}} +{"type":"response_item","payload":{"type":"function_call_output","call_id":"call-2","output":"Error: missing NEXTAUTH_URL\\nProcess exited with code 1"}} +{"type":"response_item","payload":{"type":"function_call","name":"edit_file","call_id":"call-3","arguments":"{\\\"path\\\":\\\"src/auth/login.ts\\\"}"}} +{"type":"response_item","payload":{"type":"function_call","name":"apply_patch_freeform","call_id":"call-4","arguments":"diff --git a/src/auth/login.ts b/src/auth/login.ts\\nindex abc..def 100644\\n--- a/src/auth/login.ts\\n+++ b/src/auth/login.ts\\n@@ -1,3 +1,4 @@\\n+setCookie(token);\\n export {};"}} +{"type":"response_item","payload":{"type":"function_call","name":"apply_patch_freeform","call_id":"call-5","arguments":"diff --git a/src/auth/middleware.ts b/src/auth/middleware.ts\\nindex abc..def 100644\\n--- a/src/auth/middleware.ts\\n+++ b/src/auth/middleware.ts\\n@@ -1,3 +1,4 @@\\n+guardRedirect(request);\\n export {};"}} diff --git a/test/helpers/session-test-support.ts b/test/helpers/session-test-support.ts index e096c84..4e5a6f8 100644 --- a/test/helpers/session-test-support.ts +++ b/test/helpers/session-test-support.ts @@ -17,6 +17,15 @@ export async function cleanupTempDirs(tempDirs: string[]): Promise { ); } +export async function writeSessionRolloutFile( + rolloutPath: string, + contents: string +): Promise { + await fs.mkdir(path.dirname(rolloutPath), { recursive: true }); + await fs.writeFile(rolloutPath, contents, "utf8"); + return rolloutPath; +} + export function makeEvidenceCounts(successfulCommands = 1): { successfulCommands: number; failedCommands: number; @@ -73,3 +82,23 @@ fs.writeFileSync(rolloutPath, [ mockCodexPath }; } + +export async function writeMockCodexBinary( + tempRoot: string, + body: string +): Promise { + const mockBinary = path.join(tempRoot, "mock-codex"); + await fs.writeFile( + mockBinary, + `#!/usr/bin/env node +const fs = require("node:fs"); +const args = process.argv.slice(2); +const outputIndex = args.indexOf("-o"); +const outputPath = args[outputIndex + 1]; +${body} +`, + "utf8" + ); + await fs.chmod(mockBinary, 0o755); + return mockBinary; +} diff --git a/test/memory-command.test.ts b/test/memory-command.test.ts index e87bcde..3ed8a4b 100644 --- a/test/memory-command.test.ts +++ b/test/memory-command.test.ts @@ -481,8 +481,19 @@ describe("runMemory", () => { sessionSource: "rollout-jsonl", status: "applied", appliedCount: 1, + suppressedOperationCount: 1, scopesTouched: ["project"], resultSummary: "1 operation(s) applied", + conflicts: [ + { + scope: "project", + topic: "preferences", + candidateSummary: "Maybe use bun instead of pnpm in this repository.", + conflictsWith: ["Prefer pnpm in this repository."], + source: "existing-memory", + resolution: "suppressed" + } + ], operations: [ { action: "upsert", @@ -501,7 +512,18 @@ describe("runMemory", () => { expect(jsonResult.exitCode).toBe(0); const jsonOutput = JSON.parse(jsonResult.stdout) as MemoryCommandOutput; expect(jsonOutput.recentSyncAudit).toHaveLength(1); - expect(jsonOutput.recentSyncAudit[0]?.rolloutPath).toBe("/tmp/rollout-memory-cli.jsonl"); + expect(jsonOutput.recentSyncAudit[0]).toMatchObject({ + rolloutPath: "/tmp/rollout-memory-cli.jsonl", + suppressedOperationCount: 1 + }); + expect(jsonOutput.recentSyncAudit[0]?.conflicts).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + resolution: "suppressed" + }) + ]) + ); + expect(jsonOutput.recentAudit).toEqual(jsonOutput.recentSyncAudit); expect(jsonOutput.syncAuditPath).toBe(store.getSyncAuditPath()); const textResult = runCli(projectDir, ["memory", "--recent", "2", "--print-startup"]); diff --git a/test/session-command.test.ts b/test/session-command.test.ts index eabacac..0c30f91 100644 --- a/test/session-command.test.ts +++ b/test/session-command.test.ts @@ -2,6 +2,7 @@ import fs from "node:fs/promises"; import path from "node:path"; import { afterEach, describe, expect, it, vi } from "vitest"; import { runSession } from "../src/lib/commands/session.js"; +import { runWrappedCodex } from "../src/lib/commands/wrapper.js"; import { detectProjectContext } from "../src/lib/domain/project-context.js"; import { SessionContinuityStore } from "../src/lib/domain/session-continuity-store.js"; import type { SessionContinuityAuditEntry } from "../src/lib/types.js"; @@ -15,7 +16,8 @@ import { runCli } from "./helpers/cli-runner.js"; import { cleanupTempDirs, createTempDir, - makeEvidenceCounts + makeEvidenceCounts, + writeSessionRolloutFile } from "./helpers/session-test-support.js"; const tempDirs: string[] = []; @@ -36,6 +38,25 @@ afterEach(async () => { await cleanupTempDirs(tempDirs); }); +const rolloutFixturesDir = path.join(process.cwd(), "test/fixtures/rollouts"); + +async function copyRolloutFixture(fixtureName: string, destinationPath: string): Promise { + await fs.copyFile(path.join(rolloutFixturesDir, fixtureName), destinationPath); +} + +async function writeNoopCodexBinary(repoDir: string): Promise { + const mockCodexPath = path.join(repoDir, "mock-codex"); + await fs.writeFile( + mockCodexPath, + `#!/usr/bin/env node +process.exit(0); +`, + "utf8" + ); + await fs.chmod(mockCodexPath, 0o755); + return mockCodexPath; +} + describe("runSession", () => { it("shows an empty compact prior preview when no continuity audit history exists", async () => { const repoDir = await tempDir("cam-session-empty-history-repo-"); @@ -304,12 +325,24 @@ describe("runSession", () => { expect(result.exitCode).toBe(0); const payload = JSON.parse(result.stdout) as { - diagnostics: { actualPath: string }; - latestContinuityAuditEntry: { rolloutPath: string } | null; + diagnostics: { actualPath: string; confidence: string; warnings: string[] }; + latestContinuityAuditEntry: { + rolloutPath: string; + trigger?: string; + writeMode?: string; + } | null; recentContinuityAuditEntries: Array<{ rolloutPath: string }>; }; expect(payload.diagnostics.actualPath).toBe("heuristic"); + expect(payload.diagnostics.confidence).toBe("low"); + expect(payload.diagnostics.warnings).toEqual( + expect.arrayContaining([ + expect.stringContaining("Next steps were inferred from the latest request") + ]) + ); expect(payload.latestContinuityAuditEntry?.rolloutPath).toBe(rolloutPath); + expect(payload.latestContinuityAuditEntry?.trigger).toBe("manual-save"); + expect(payload.latestContinuityAuditEntry?.writeMode).toBe("merge"); expect(payload.recentContinuityAuditEntries[0]?.rolloutPath).toBe(rolloutPath); }, 30_000); @@ -364,6 +397,10 @@ describe("runSession", () => { action: string; writeMode: string; rolloutPath: string; + diagnostics: { confidence: string; warnings: string[] }; + recentContinuityAuditEntries: Array<{ rolloutPath: string }>; + continuityAuditPath: string; + pendingContinuityRecovery: object | null; rolloutSelection: { kind: string; rolloutPath: string }; latestContinuityAuditEntry: { rolloutPath: string; @@ -381,6 +418,59 @@ describe("runSession", () => { expect(payload.latestContinuityAuditEntry?.rolloutPath).toBe(rolloutPath); expect(payload.latestContinuityAuditEntry?.trigger).toBe("manual-refresh"); expect(payload.latestContinuityAuditEntry?.writeMode).toBe("replace"); + expect(payload.diagnostics.confidence).toBe("low"); + expect(payload.diagnostics.warnings).toEqual( + expect.arrayContaining([ + expect.stringContaining("Next steps were inferred from the latest request") + ]) + ); + expect(payload.recentContinuityAuditEntries[0]?.rolloutPath).toBe(rolloutPath); + expect(payload.continuityAuditPath).toContain("session-continuity-log.jsonl"); + expect(payload.pendingContinuityRecovery).toBeNull(); + + const loadPayload = JSON.parse( + await runSession("load", { cwd: repoDir, json: true }) + ) as { + latestContinuityAuditEntry: { + rolloutPath: string; + trigger?: string; + writeMode?: string; + } | null; + latestContinuityDiagnostics: { confidence: string; warnings: string[] } | null; + }; + expect(loadPayload.latestContinuityAuditEntry).toMatchObject({ + rolloutPath, + trigger: "manual-refresh", + writeMode: "replace" + }); + expect(loadPayload.latestContinuityDiagnostics?.confidence).toBe("low"); + expect(loadPayload.latestContinuityDiagnostics?.warnings).toEqual( + expect.arrayContaining([ + expect.stringContaining("Next steps were inferred from the latest request") + ]) + ); + + const statusPayload = JSON.parse( + await runSession("status", { cwd: repoDir, json: true }) + ) as { + latestContinuityAuditEntry: { + rolloutPath: string; + trigger?: string; + writeMode?: string; + } | null; + latestContinuityDiagnostics: { confidence: string; warnings: string[] } | null; + }; + expect(statusPayload.latestContinuityAuditEntry).toMatchObject({ + rolloutPath, + trigger: "manual-refresh", + writeMode: "replace" + }); + expect(statusPayload.latestContinuityDiagnostics?.confidence).toBe("low"); + expect(statusPayload.latestContinuityDiagnostics?.warnings).toEqual( + expect.arrayContaining([ + expect.stringContaining("Next steps were inferred from the latest request") + ]) + ); const merged = await store.readMergedState(); expect(merged?.goal).toContain("Refresh continuity through the CLI command surface."); @@ -555,13 +645,24 @@ describe("runSession", () => { expect(loadResult.exitCode).toBe(0); const loadPayload = JSON.parse(loadResult.stdout) as { startup: { text: string }; - latestContinuityAuditEntry: { rolloutPath: string } | null; - latestContinuityDiagnostics: { actualPath: string; warnings: string[] } | null; + latestContinuityAuditEntry: { + rolloutPath: string; + trigger?: string; + writeMode?: string; + } | null; + latestContinuityDiagnostics: { + actualPath: string; + confidence: string; + warnings: string[]; + } | null; recentContinuityAuditEntries: Array<{ rolloutPath: string }>; }; expect(loadPayload.startup.text).toContain("# Session Continuity"); expect(loadPayload.latestContinuityAuditEntry?.rolloutPath).toBe(rolloutPath); + expect(loadPayload.latestContinuityAuditEntry?.trigger).toBe("manual-save"); + expect(loadPayload.latestContinuityAuditEntry?.writeMode).toBe("merge"); expect(loadPayload.latestContinuityDiagnostics?.actualPath).toBe("heuristic"); + expect(loadPayload.latestContinuityDiagnostics?.confidence).toBe("low"); expect(loadPayload.latestContinuityDiagnostics?.warnings).toEqual( expect.arrayContaining([ expect.stringContaining("Next steps were inferred from the latest request") @@ -572,12 +673,23 @@ describe("runSession", () => { const statusResult = runCli(repoDir, ["session", "status", "--json"]); expect(statusResult.exitCode).toBe(0); const statusPayload = JSON.parse(statusResult.stdout) as { - latestContinuityAuditEntry: { rolloutPath: string } | null; - latestContinuityDiagnostics: { actualPath: string; warnings: string[] } | null; + latestContinuityAuditEntry: { + rolloutPath: string; + trigger?: string; + writeMode?: string; + } | null; + latestContinuityDiagnostics: { + actualPath: string; + confidence: string; + warnings: string[]; + } | null; recentContinuityAuditEntries: Array<{ rolloutPath: string }>; }; expect(statusPayload.latestContinuityAuditEntry?.rolloutPath).toBe(rolloutPath); + expect(statusPayload.latestContinuityAuditEntry?.trigger).toBe("manual-save"); + expect(statusPayload.latestContinuityAuditEntry?.writeMode).toBe("merge"); expect(statusPayload.latestContinuityDiagnostics?.actualPath).toBe("heuristic"); + expect(statusPayload.latestContinuityDiagnostics?.confidence).toBe("low"); expect(statusPayload.latestContinuityDiagnostics?.warnings).toEqual( expect.arrayContaining([ expect.stringContaining("Next steps were inferred from the latest request") @@ -586,6 +698,55 @@ describe("runSession", () => { expect(statusPayload.recentContinuityAuditEntries[0]?.rolloutPath).toBe(rolloutPath); }, 30_000); + it("keeps reviewer noise in diagnostics but out of persisted continuity markdown for a mixed-language rollout fixture", async () => { + const repoDir = await tempDir("cam-session-mixed-noise-repo-"); + const memoryRoot = await tempDir("cam-session-mixed-noise-memory-"); + await initRepo(repoDir); + + await writeProjectConfig( + repoDir, + configJson(), + { autoMemoryDirectory: memoryRoot } + ); + + const rolloutPath = path.join(repoDir, "mixed-language-reviewer-noise.jsonl"); + await copyRolloutFixture("mixed-language-reviewer-noise.jsonl", rolloutPath); + + const saveResult = runCli(repoDir, ["session", "save", "--json", "--rollout", rolloutPath]); + expect(saveResult.exitCode).toBe(0); + const savePayload = JSON.parse(saveResult.stdout) as { + diagnostics: { confidence: string; warnings: string[] }; + latestContinuityAuditEntry: { warnings?: string[] } | null; + }; + expect(savePayload.diagnostics.confidence).toBe("low"); + expect(savePayload.diagnostics.warnings).toEqual( + expect.arrayContaining([ + expect.stringContaining("Reviewer or subagent prompt noise") + ]) + ); + expect(savePayload.latestContinuityAuditEntry?.warnings).toEqual( + expect.arrayContaining([ + expect.stringContaining("Reviewer or subagent prompt noise") + ]) + ); + + const store = new SessionContinuityStore(detectProjectContext(repoDir), { + ...configJson(), + autoMemoryDirectory: memoryRoot + }); + const sharedBody = await fs.readFile(store.paths.sharedFile, "utf8"); + const localBody = await fs.readFile(store.paths.localFile, "utf8"); + + expect(sharedBody).toContain("pnpm test"); + expect(sharedBody).toContain("pnpm build"); + expect(localBody).toContain("更新 src/auth/login.ts"); + expect(localBody).toContain("login.ts"); + expect(sharedBody).not.toContain("Reviewer or subagent prompt noise"); + expect(localBody).not.toContain("Reviewer or subagent prompt noise"); + expect(sharedBody).not.toContain("Focus on docs and contract surfaces only"); + expect(localBody).not.toContain("Focus on docs and contract surfaces only"); + }, 30_000); + it("refresh prefers a matching recovery marker over audit and latest primary rollout", async () => { const repoDir = await tempDir("cam-session-refresh-recovery-priority-repo-"); const memoryRoot = await tempDir("cam-session-refresh-recovery-priority-memory-"); @@ -938,6 +1099,295 @@ describe("runSession", () => { ); }, 30_000); + it("keeps modern audit metadata and reviewer-noise scrub stable for a conflict-heavy rollout fixture", async () => { + const repoDir = await tempDir("cam-session-conflict-heavy-repo-"); + const memoryRoot = await tempDir("cam-session-conflict-heavy-memory-"); + await initRepo(repoDir); + + await writeProjectConfig( + repoDir, + configJson(), + { autoMemoryDirectory: memoryRoot } + ); + + const rolloutPath = path.join(repoDir, "session-continuity-conflict-heavy.jsonl"); + await writeSessionRolloutFile( + rolloutPath, + await fs.readFile( + path.join(rolloutFixturesDir, "session-continuity-conflict-heavy.jsonl"), + "utf8" + ) + ); + + const savePayload = JSON.parse( + await runSession("save", { + cwd: repoDir, + rollout: rolloutPath, + scope: "both", + json: true + }) + ) as { + diagnostics: { actualPath: string; confidence: string; warnings: string[] }; + latestContinuityAuditEntry: { + rolloutPath: string; + trigger?: string; + writeMode?: string; + warnings?: string[]; + } | null; + }; + expect(savePayload.diagnostics.actualPath).toBe("heuristic"); + expect(savePayload.diagnostics.confidence).toBe("low"); + expect(savePayload.diagnostics.warnings).toEqual( + expect.arrayContaining([ + expect.stringContaining("Reviewer or subagent prompt noise") + ]) + ); + expect(savePayload.latestContinuityAuditEntry).toMatchObject({ + rolloutPath, + trigger: "manual-save", + writeMode: "merge" + }); + expect(savePayload.latestContinuityAuditEntry?.warnings).toEqual( + expect.arrayContaining([ + expect.stringContaining("Reviewer or subagent prompt noise") + ]) + ); + + const loadPayload = JSON.parse( + await runSession("load", { cwd: repoDir, json: true }) + ) as { + latestContinuityAuditEntry: { + rolloutPath: string; + trigger?: string; + writeMode?: string; + } | null; + latestContinuityDiagnostics: { confidence: string; warnings: string[] } | null; + recentContinuityAuditEntries: Array<{ rolloutPath: string }>; + projectState: { + goal: string; + confirmedWorking: string[]; + triedAndFailed: string[]; + notYetTried: string[]; + } | null; + localState: { + incompleteNext: string[]; + filesDecisionsEnvironment: string[]; + } | null; + mergedState: { + goal: string; + confirmedWorking: string[]; + triedAndFailed: string[]; + incompleteNext: string[]; + }; + }; + expect(loadPayload.latestContinuityAuditEntry).toMatchObject({ + rolloutPath, + trigger: "manual-save", + writeMode: "merge" + }); + expect(loadPayload.latestContinuityDiagnostics?.confidence).toBe("low"); + expect(loadPayload.latestContinuityDiagnostics?.warnings).toEqual( + expect.arrayContaining([ + expect.stringContaining("Reviewer or subagent prompt noise") + ]) + ); + expect(loadPayload.recentContinuityAuditEntries[0]?.rolloutPath).toBe(rolloutPath); + expect(loadPayload.projectState?.goal).toContain("pnpm test passes"); + expect(loadPayload.projectState?.notYetTried.join("\n")).toContain("cookies()"); + expect(loadPayload.localState?.incompleteNext.join("\n")).toContain("guard the redirect"); + expect(loadPayload.localState?.filesDecisionsEnvironment.join("\n")).toContain("login.ts"); + expect(loadPayload.mergedState.goal).toContain("pnpm test passes"); + + const statusPayload = JSON.parse( + await runSession("status", { cwd: repoDir, json: true }) + ) as { + latestContinuityAuditEntry: { + rolloutPath: string; + trigger?: string; + writeMode?: string; + } | null; + latestContinuityDiagnostics: { confidence: string; warnings: string[] } | null; + recentContinuityAuditEntries: Array<{ rolloutPath: string }>; + }; + expect(statusPayload.latestContinuityAuditEntry).toMatchObject({ + rolloutPath, + trigger: "manual-save", + writeMode: "merge" + }); + expect(statusPayload.latestContinuityDiagnostics?.confidence).toBe("low"); + expect(statusPayload.latestContinuityDiagnostics?.warnings).toEqual( + expect.arrayContaining([ + expect.stringContaining("Reviewer or subagent prompt noise") + ]) + ); + expect(statusPayload.recentContinuityAuditEntries[0]?.rolloutPath).toBe(rolloutPath); + + const store = new SessionContinuityStore(detectProjectContext(repoDir), { + ...configJson(), + autoMemoryDirectory: memoryRoot + }); + const sharedBody = await fs.readFile(store.paths.sharedFile, "utf8"); + const localBody = await fs.readFile(store.paths.localFile, "utf8"); + expect(sharedBody).not.toContain("Reviewer or subagent prompt noise"); + expect(localBody).not.toContain("Reviewer or subagent prompt noise"); + expect(sharedBody).not.toContain("Focus on docs and contract surfaces only"); + expect(localBody).not.toContain("Focus on docs and contract surfaces only"); + expect(sharedBody).toContain("Redis must be running before integration tests."); + expect(localBody).toContain("src/auth/middleware.ts"); + }, 30_000); + + it("keeps refresh provenance selection distinct from wrapper auto-save in the same scenario", async () => { + const repoDir = await tempDir("cam-session-refresh-vs-wrapper-repo-"); + const memoryRoot = await tempDir("cam-session-refresh-vs-wrapper-memory-"); + const sessionsDir = await tempDir("cam-session-refresh-vs-wrapper-sessions-"); + const dayDir = path.join(sessionsDir, "2026", "03", "15"); + process.env.CAM_CODEX_SESSIONS_DIR = sessionsDir; + await fs.mkdir(dayDir, { recursive: true }); + await initRepo(repoDir); + + const mockCodexPath = await writeNoopCodexBinary(repoDir); + await writeProjectConfig( + repoDir, + configJson({ + codexBinary: mockCodexPath, + sessionContinuityAutoLoad: false, + sessionContinuityAutoSave: true + }), + { + autoMemoryDirectory: memoryRoot, + sessionContinuityAutoLoad: false, + sessionContinuityAutoSave: true + } + ); + + const recoveryRolloutPath = await writeSessionRolloutFile( + path.join(repoDir, "recovery-rollout.jsonl"), + rolloutFixture(repoDir, "Refresh should use the recovery provenance.", { + sessionId: "session-recovery-shared" + }) + ); + const auditRolloutPath = await writeSessionRolloutFile( + path.join(repoDir, "audit-rollout.jsonl"), + rolloutFixture(repoDir, "Refresh should use the audit provenance only if recovery is absent.", { + sessionId: "session-audit-shared" + }) + ); + const primaryRolloutPath = await writeSessionRolloutFile( + path.join(dayDir, "rollout-primary.jsonl"), + rolloutFixture(repoDir, "Wrapper auto-save should keep the real primary rollout.", { + sessionId: "session-primary-shared" + }) + ); + const subagentRolloutPath = await writeSessionRolloutFile( + path.join(dayDir, "rollout-subagent.jsonl"), + [ + JSON.stringify({ + type: "session_meta", + payload: { + id: "session-subagent-shared", + forked_from_id: "session-primary-shared", + timestamp: "2026-03-15T00:00:02.000Z", + cwd: repoDir, + source: { + subagent: { + thread_spawn: { + parent_thread_id: "session-primary-shared" + } + } + } + } + }), + JSON.stringify({ + type: "session_meta", + payload: { + id: "session-primary-shared", + timestamp: "2026-03-15T00:00:01.000Z", + cwd: repoDir, + source: "cli" + } + }), + JSON.stringify({ + type: "event_msg", + payload: { + type: "user_message", + message: "You are reviewer sub-agent 4. Work read-only. Focus on docs and contract surfaces only." + } + }) + ].join("\n") + ); + + const project = detectProjectContext(repoDir); + const store = new SessionContinuityStore(project, { + ...configJson({ + codexBinary: mockCodexPath, + sessionContinuityAutoLoad: false, + sessionContinuityAutoSave: true + }), + autoMemoryDirectory: memoryRoot + }); + await store.writeRecoveryRecord({ + recordedAt: "2026-03-18T00:00:00.000Z", + projectId: project.projectId, + worktreeId: project.worktreeId, + rolloutPath: recoveryRolloutPath, + sourceSessionId: "session-recovery-shared", + trigger: "manual-save", + writeMode: "merge", + scope: "both", + writtenPaths: [store.paths.sharedFile, store.paths.localFile], + preferredPath: "heuristic", + actualPath: "heuristic", + fallbackReason: "configured-heuristic", + evidenceCounts: makeEvidenceCounts(), + failedStage: "audit-write", + failureMessage: "stale recovery marker" + }); + await store.appendAuditLog({ + generatedAt: "2026-03-18T00:01:00.000Z", + projectId: project.projectId, + worktreeId: project.worktreeId, + configuredExtractorMode: "heuristic", + trigger: "manual-save", + writeMode: "merge", + scope: "both", + rolloutPath: auditRolloutPath, + sourceSessionId: "session-audit-shared", + preferredPath: "heuristic", + actualPath: "heuristic", + fallbackReason: "configured-heuristic", + evidenceCounts: makeEvidenceCounts(), + writtenPaths: ["/tmp/continuity-audit.md"] + }); + + const refreshPayload = JSON.parse( + await runSession("refresh", { cwd: repoDir, scope: "both", json: true }) + ) as { + rolloutPath: string; + rolloutSelection: { kind: string; rolloutPath: string }; + }; + expect(refreshPayload).toMatchObject({ + rolloutPath: recoveryRolloutPath, + rolloutSelection: { + kind: "pending-recovery-marker", + rolloutPath: recoveryRolloutPath + } + }); + + const exitCode = await runWrappedCodex(repoDir, "exec", ["continue"]); + expect(exitCode).toBe(0); + + const latestAudit = await store.readLatestAuditEntry(); + expect(latestAudit).toMatchObject({ + trigger: "wrapper-auto-save", + writeMode: "merge", + rolloutPath: primaryRolloutPath, + sourceSessionId: "session-primary-shared" + }); + expect(latestAudit?.rolloutPath).not.toBe(recoveryRolloutPath); + expect(latestAudit?.rolloutPath).not.toBe(auditRolloutPath); + expect(latestAudit?.rolloutPath).not.toBe(subagentRolloutPath); + }, 30_000); + it("rejects invalid scope values", async () => { const repoDir = await tempDir("cam-session-invalid-scope-repo-"); const memoryRoot = await tempDir("cam-session-invalid-scope-memory-"); diff --git a/test/tarball-install-smoke.test.ts b/test/tarball-install-smoke.test.ts new file mode 100644 index 0000000..3fd525c --- /dev/null +++ b/test/tarball-install-smoke.test.ts @@ -0,0 +1,94 @@ +import fs from "node:fs/promises"; +import os from "node:os"; +import path from "node:path"; +import { afterEach, describe, expect, it } from "vitest"; +import { runCommandCapture } from "../src/lib/util/process.js"; + +const tempDirs: string[] = []; + +async function tempDir(prefix: string): Promise { + const dir = await fs.mkdtemp(path.join(os.tmpdir(), prefix)); + tempDirs.push(dir); + return dir; +} + +afterEach(async () => { + await Promise.all(tempDirs.splice(0).map((dir) => fs.rm(dir, { recursive: true, force: true }))); +}); + +function npmCommand(): string { + return process.platform === "win32" ? "npm.cmd" : "npm"; +} + +function camBinaryPath(installDir: string): string { + return path.join( + installDir, + "node_modules", + ".bin", + process.platform === "win32" ? "cam.cmd" : "cam" + ); +} + +function isolatedEnv(homeDir: string): NodeJS.ProcessEnv { + return { + ...process.env, + HOME: homeDir, + ...(process.platform === "win32" ? { USERPROFILE: homeDir } : {}) + }; +} + +describe("tarball install smoke", () => { + it("installs and runs the packaged cam bin shim from a local tarball", async () => { + const homeDir = await tempDir("cam-tarball-home-"); + const packDir = await tempDir("cam-tarball-pack-"); + const installDir = await tempDir("cam-tarball-install-"); + const env = isolatedEnv(homeDir); + const packageJson = JSON.parse(await fs.readFile(path.resolve("package.json"), "utf8")) as { + version: string; + }; + + const packResult = runCommandCapture( + npmCommand(), + ["pack", "--pack-destination", packDir], + process.cwd(), + env + ); + expect(packResult.exitCode).toBe(0); + + const tarballName = packResult.stdout.trim().split(/\r?\n/).at(-1); + expect(tarballName).toBeTruthy(); + const tarballPath = path.join(packDir, tarballName!); + + const initResult = runCommandCapture(npmCommand(), ["init", "-y"], installDir, env); + expect(initResult.exitCode).toBe(0); + + const installResult = runCommandCapture( + npmCommand(), + ["install", "--no-package-lock", tarballPath], + installDir, + env + ); + expect(installResult.exitCode).toBe(0); + + const versionResult = runCommandCapture(camBinaryPath(installDir), ["--version"], installDir, env); + expect(versionResult.exitCode).toBe(0); + expect(versionResult.stdout.trim()).toBe(packageJson.version); + + const sessionStatusResult = runCommandCapture( + camBinaryPath(installDir), + ["session", "status", "--json"], + installDir, + env + ); + expect(sessionStatusResult.exitCode).toBe(0); + + const payload = JSON.parse(sessionStatusResult.stdout) as { + projectLocation: { exists: boolean }; + latestContinuityAuditEntry: object | null; + pendingContinuityRecovery: object | null; + }; + expect(payload.projectLocation.exists).toBe(false); + expect(payload.latestContinuityAuditEntry).toBeNull(); + expect(payload.pendingContinuityRecovery).toBeNull(); + }, 60_000); +}); From f3b2ad259656283257e6543d54d7a7663069e8a6 Mon Sep 17 00:00:00 2001 From: blocks Date: Sat, 21 Mar 2026 00:02:27 +0800 Subject: [PATCH 5/9] docs: add traditional chinese and japanese readmes --- README.en.md | 2 + README.ja.md | 279 +++++++++++++++++++++++++++++++++++++ README.md | 2 + README.zh-TW.md | 279 +++++++++++++++++++++++++++++++++++++ package.json | 2 + test/docs-contract.test.ts | 13 ++ 6 files changed, 577 insertions(+) create mode 100644 README.ja.md create mode 100644 README.zh-TW.md diff --git a/README.en.md b/README.en.md index d4ff836..d77c78f 100644 --- a/README.en.md +++ b/README.en.md @@ -3,7 +3,9 @@

A local-first companion CLI that brings Claude-style auto memory workflows to Codex

简体中文 | + 繁體中文 | English + 日本語

diff --git a/README.ja.md b/README.ja.md new file mode 100644 index 0000000..787d1cf --- /dev/null +++ b/README.ja.md @@ -0,0 +1,279 @@ +

+ +> `codex-auto-memory` は汎用メモアプリでもクラウド型メモリサービスでもありません。 +> 現在の Codex CLI に対して、ローカル Markdown、コンパクトな startup injection、必要時のみの topic file 読み出し、そして companion runtime を使い、Claude Code auto memory の観測可能な契約をできるだけ再現することが目的です。 + +--- + +**まず押さえるべき 3 点** + +1. **何をするか**:Codex セッション終了後に有用な情報を抽出し、ローカル Markdown に書き戻します。次回起動時にそれを注入し、Codex がプロジェクトを「覚えている」状態を作ります。 +2. **どう保存するか**:すべて `~/.codex-auto-memory/` 配下の Markdown です。いつでも閲覧・編集でき、Git レビューにも載せられます。 +3. **Claude との関係**:これは companion CLI です。Codex 上で Claude Code auto memory の作業感を再現するためのもので、Anthropic の公式製品でもクラウド機能でもありません。 + +--- + +## 目次 + +- [このプロジェクトが存在する理由](#このプロジェクトが存在する理由) +- [どんな人向けか](#どんな人向けか) +- [主要機能](#主要機能) +- [機能比較](#機能比較) +- [クイックスタート](#クイックスタート) +- [主要コマンド](#主要コマンド) +- [仕組み](#仕組み) +- [保存レイアウト](#保存レイアウト) +- [ドキュメント案内](#ドキュメント案内) +- [現在の状態](#現在の状態) +- [ロードマップ](#ロードマップ) +- [コントリビュートとライセンス](#コントリビュートとライセンス) + +## このプロジェクトが存在する理由 + +Claude Code には比較的明確な auto memory 契約があります。 + +- AI が memory を自動で書く +- memory はローカル Markdown で保存される +- `MEMORY.md` が起動時の入口になる +- 起動時に読むのは先頭 200 行だけ +- 詳細は topic files に分かれ、必要になった時だけ読む +- 同じリポジトリの worktree 間で project memory を共有する +- `/memory` で監査・編集できる + +一方で現在の Codex CLI には便利な基盤はあるものの、同等に完成した public memory surface はまだありません。 + +- `AGENTS.md` +- multi-agent workflows +- local persistent sessions / rollout logs +- `cam doctor` や feature output で見える `memories` / `codex_hooks` signal + +そこで `codex-auto-memory` は、native memory を既成事実にせず、companion-first で監査しやすいルートを提供します。現在の UX 改善は `cam memory` と `cam session` の reviewer surface をより分かりやすくすることに集中しています。 + +## どんな人向けか + +向いている人: + +- 今すぐ Codex で Claude-style auto memory に近い体験がほしい人 +- memory を完全にローカル・可編集・監査可能な Markdown で持ちたいチーム +- worktree 間で project memory を共有しつつ、worktree-local continuity も分けたい人 +- 将来 Codex の公式 surface が変わっても、ユーザーの mental model を壊したくないメンテナ + +向いていない人: + +- 汎用ナレッジベースやメモアプリを求めている人 +- 現時点で Claude `/memory` の完全な操作性を期待している人 +- アカウント単位やクラウド同期型の記憶が必要な人 + +## 主要機能 + +| 機能 | 説明 | +| :-- | :-- | +| 自動 memory 同期 | Codex rollout JSONL から将来も有用な知識を抽出し、Markdown memory に書き戻す | +| Markdown-first | `MEMORY.md` と topic files 自体がプロダクト surface であり、隠れたキャッシュではない | +| コンパクトな起動注入 | 実際に payload に入った quoted `MEMORY.md` startup files と on-demand topic refs のみを注入し、topic body を eager load しない | +| worktree-aware | project memory を worktree 間で共有しつつ、local continuity は分離する | +| session continuity | 一時的な作業状態を durable memory から分離して扱う | +| reviewer surface | `cam memory`、`cam session`、`cam audit` で review・監査しやすい surface を提供する | + +## 機能比較 + +| 機能 | Claude Code | 現在の Codex | Codex Auto Memory | +| :-- | :-- | :-- | :-- | +| memory の自動書き込み | Built in | 完全な公開契約は未整備 | companion sync flow で提供 | +| ローカル Markdown memory | Built in | 完全な公開契約は未整備 | 対応 | +| `MEMORY.md` 起動入口 | Built in | なし | あり | +| 200 行の起動予算 | Built in | なし | あり | +| topic files の必要時読込 | Built in | なし | 部分対応 | +| セッション continuity | コミュニティ解法が多い | 完全な公開契約は未整備 | 独立した companion layer として対応 | +| worktree 間の project memory 共有 | Built in | 公開契約なし | 対応 | +| inspect / audit memory | `/memory` | 相当コマンドなし | `cam memory` | +| native hooks / memory | Built in | Experimental / under development | compatibility seam のみ保持 | + +`cam memory` は inspection / audit surface として設計されています。 +実際に startup payload に入った quoted startup files、startup budget、on-demand topic refs、edit paths、さらに `--recent [count]` の recent durable sync audit を表示します。 +recent sync audit では、保守的に suppress された conflict candidates も reviewer-visible に保持され、矛盾する rollout 出力が silent merge されないようになっています。 + +## クイックスタート + +### 1. Clone とインストール + +```bash +git clone https://github.com/Boulea7/Codex-Auto-Memory.git +cd Codex-Auto-Memory +pnpm install +``` + +### 2. ビルドしてグローバルコマンドをリンク + +```bash +pnpm build +pnpm link --global +``` + +> これで `cam` コマンドを任意のディレクトリから使えます。 + +### 3. プロジェクト内で初期化 + +```bash +cd /your/project +cam init +``` + +これにより、プロジェクトルートに `codex-auto-memory.json` が作成され、ローカル専用の `.codex-auto-memory.local.json` も生成されます。 + +### 4. wrapper 経由で Codex を起動 + +```bash +cam run +``` + +各セッション終了後、`cam` が rollout ログから情報を抽出し、memory ファイルへ自動で書き込みます。 + +### 5. 状態を確認 + +```bash +cam memory +cam session status +cam session refresh +cam remember "Always use pnpm instead of npm" +cam forget "old debug note" +cam audit +``` + +## 主要コマンド + +| コマンド | 用途 | +| :-- | :-- | +| `cam run` / `cam exec` / `cam resume` | startup memory を組み立て、wrapper 経由で Codex を起動 | +| `cam sync` | 最新 rollout を durable memory に手動同期 | +| `cam memory` | quoted startup files、on-demand topic refs、startup budget、edit paths、suppressed conflict candidates を含む durable sync audit を確認 | +| `cam remember` / `cam forget` | durable memory を明示的に追加・削除 | +| `cam session save` | merge / incremental save | +| `cam session refresh` | replace / clean regeneration | +| `cam session load` / `status` | continuity reviewer surface | +| `cam session clear` / `open` | active continuity を消す、または local continuity ディレクトリを開く | +| `cam audit` | privacy / secret hygiene チェック | +| `cam doctor` | companion wiring と native readiness posture を確認 | + +## 仕組み + +### 設計原則 + +- `local-first and auditable` +- `Markdown files are the product surface` +- `companion-first, with a narrow compatibility seam` +- `session continuity` と `durable memory` は明確に分離 + +### なぜ今すぐ native memory に切り替えないのか + +- 公開された Codex ドキュメントは、Claude Code 相当の完全で安定した native memory 契約をまだ定義していない +- ローカルの `cam doctor --json` でも、`memories` / `codex_hooks` は readiness signal として見えているだけで trusted primary path ではない +- そのため、公開ドキュメント・実行時安定性・CI での検証可能性が揃うまでは companion-first を維持する + +## 保存レイアウト + +Durable memory: + +```text +~/.codex-auto-memory/ +├── global/ +│ └── MEMORY.md +└── projects// + ├── project/ + │ ├── MEMORY.md + │ └── commands.md + └── locals// + ├── MEMORY.md + └── workflow.md +``` + +Session continuity: + +```text +~/.codex-auto-memory/projects//continuity/project/active.md +/.codex-auto-memory/sessions/active.md +``` + +詳細は architecture doc を参照してください。 + +## ドキュメント案内 + +- [文档首页(中文)](docs/README.md) +- [Documentation Hub (English)](docs/README.en.md) +- [Claude reference contract (中文)](docs/claude-reference.md) | [English](docs/claude-reference.en.md) +- [Architecture (中文)](docs/architecture.md) | [English](docs/architecture.en.md) +- [Native migration strategy (中文)](docs/native-migration.md) | [English](docs/native-migration.en.md) +- [Session continuity design](docs/session-continuity.md) +- [Release checklist](docs/release-checklist.md) +- [Contributing](CONTRIBUTING.md) + +## 現在の状態 + +- durable memory companion path: available +- topic-aware startup lookup: available +- session continuity companion layer: available +- reviewer audit surfaces: available +- tagged GitHub Releases: release workflow は tarball artifact を対象として定義済み。最初の real tag を push する前に、default branch 上でその workflow が表示され、active になっていることを確認してください。npm publish は引き続き手動です +- native memory / native hooks primary path: not enabled and not trusted as the main implementation path + +## ロードマップ + +### v0.1 + +- companion CLI +- Markdown memory store +- 200-line startup compiler +- worktree-aware project identity +- 初期の maintainer / reviewer docs + +### v0.2 + +- より堅い contradiction handling +- `cam memory` と `cam session` の reviewer UX 改善 +- continuity diagnostics と reviewer packet の整理、`confidence` / warnings の明示 +- tarball install smoke を含む release-facing 検証の強化 +- 将来の hook surface に備えた compatibility seam の維持 + +### v0.3+ + +- 公式 Codex memory / hooks surface を継続的に追跡 +- optional GUI / TUI browser +- より強い cross-session diagnostics と confidence surface + +## コントリビュートとライセンス + +- Contribution guide: [CONTRIBUTING.md](./CONTRIBUTING.md) +- License: [Apache-2.0](./LICENSE) + +README、公式ドキュメント、ローカル実行結果のあいだで食い違いを見つけた場合は、次の順で信頼してください。 + +1. 公式プロダクトドキュメント +2. 再現可能なローカル挙動 +3. 不確実性を明示した記述 + +根拠の弱い断定より、確認可能な証拠を優先してください。 diff --git a/README.md b/README.md index fdabe4f..171b634 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,9 @@

为 Codex 复现 Claude-style auto memory 工作流的 local-first companion CLI

简体中文 | + 繁體中文 | English + 日本語

diff --git a/README.zh-TW.md b/README.zh-TW.md new file mode 100644 index 0000000..7ef0090 --- /dev/null +++ b/README.zh-TW.md @@ -0,0 +1,279 @@ +

+ +> `codex-auto-memory` 不是通用筆記軟體,也不是雲端記憶服務。 +> 它的目標是在今天的 Codex CLI 上,以本地 Markdown、緊湊 startup injection、按需 topic file 讀取與 companion runtime,盡可能重現 Claude Code auto memory 的可觀察產品契約。 + +--- + +**先看三個重點:** + +1. **它做什麼**:每次 Codex 會話結束後,自動把有用資訊提取出來,寫入本地 Markdown,下一次啟動時再注入給 Codex,讓它「記得」你的專案。 +2. **它怎麼存**:全部都是本地 Markdown,放在 `~/.codex-auto-memory/`,你可以隨時檢視、編輯、納入 Git 審查。 +3. **它和 Claude 的關係**:這是一個 companion CLI,目標是在 Codex 上重現 Claude Code auto memory 的工作方式。它不是 Claude 官方產品,也不依賴雲端。 + +--- + +## 目錄 + +- [為什麼這個專案存在](#為什麼這個專案存在) +- [這個專案適合誰](#這個專案適合誰) +- [核心能力](#核心能力) +- [能力對照](#能力對照) +- [快速開始](#快速開始) +- [常用命令](#常用命令) +- [工作方式](#工作方式) +- [儲存布局](#儲存布局) +- [文件導航](#文件導航) +- [目前狀態](#目前狀態) +- [路線圖](#路線圖) +- [貢獻與授權](#貢獻與授權) + +## 為什麼這個專案存在 + +Claude Code 已經公開了一套相對清晰的 auto memory 產品契約: + +- AI 會自動寫 memory +- memory 以本地 Markdown 保存 +- `MEMORY.md` 是啟動入口 +- 啟動時只讀前 200 行 +- 細節寫入 topic files,按需讀取 +- 同一個倉庫的不同 worktree 共享 project memory +- `/memory` 可用來審查與編輯 memory + +而今天的 Codex CLI 已經具備不少有價值的基礎能力,但尚未公開同等完整的 memory product surface: + +- `AGENTS.md` +- multi-agent workflows +- 本地 persistent sessions / rollout logs +- 本地 `cam doctor` / feature output 中可見的 `memories`、`codex_hooks` signal + +`codex-auto-memory` 的價值,就是在官方 native memory 還沒有穩定公開之前,先提供一條乾淨、可審計、companion-first 的路線,只保留一條狹窄的 compatibility seam。近期 UX 重點仍是持續收緊 `cam memory` / `cam session` 的 reviewer 體驗。 + +## 這個專案適合誰 + +適合: + +- 想在 Codex 中獲得更接近 Claude-style auto memory 工作流的使用者 +- 希望 memory 完全本地、完全可編輯、可以直接放進 Git 審查語境的團隊 +- 需要在多個 worktree 之間共享 project memory,同時保留 worktree-local continuity 的工程流 +- 希望未來即使官方 surface 變化,也不需要重建使用者心智模型的維護者 + +不適合: + +- 想把它當作通用知識庫、筆記軟體或雲端同步服務的人 +- 期待現階段直接替代 Claude `/memory` 全部互動能力的人 +- 需要帳號級個人化記憶或跨裝置雲端記憶的人 + +## 核心能力 + +| 能力 | 說明 | +| :-- | :-- | +| 自動 memory 同步 | 會話結束後從 Codex rollout JSONL 中提取穩定、未來有用的資訊並寫回 Markdown memory | +| Markdown-first | `MEMORY.md` 與 topic files 就是產品表面,而不是內部快取 | +| 緊湊啟動注入 | 啟動時只注入真正進入 payload 的 quoted `MEMORY.md` startup files,並附帶按需 topic refs,不做 eager topic loading | +| worktree-aware | project memory 在同一個 git 倉庫的 worktree 間共享,project-local 仍保持隔離 | +| session continuity | 臨時 working state 與 durable memory 分層儲存、分層載入 | +| reviewer surface | `cam memory` / `cam session` / `cam audit` 為維護者與 reviewer 提供可核查的審查入口 | + +## 能力對照 + +| 能力 | Claude Code | Codex today | Codex Auto Memory | +| :-- | :-- | :-- | :-- | +| 自動寫 memory | Built in | 沒有完整公開契約 | 透過 companion sync flow 提供 | +| 本地 Markdown memory | Built in | 沒有完整公開契約 | 支援 | +| `MEMORY.md` 啟動入口 | Built in | 沒有 | 支援 | +| 200 行啟動預算 | Built in | 沒有 | 支援 | +| topic files 按需讀取 | Built in | 沒有 | 部分支援,啟動時暴露 topic refs,供後續按需讀取 | +| 跨會話 continuity | 社群方案較多 | 沒有完整公開契約 | 作為獨立 companion layer 支援 | +| worktree 共享 project memory | Built in | 沒有公開契約 | 支援 | +| inspect / audit memory | `/memory` | 無等價命令 | `cam memory` | +| native hooks / memory | Built in | Experimental / under development | 目前只保留 compatibility seam | + +`cam memory` 目前是 inspection / audit surface:它會暴露真正進入 startup payload 的 quoted startup files、startup budget、按需 topic refs、edit paths,以及 `--recent [count]` 下的 recent durable sync audit。 +recent durable sync audit 也會顯式暴露被保守 suppress 的 conflict candidates,避免在同一個 rollout 或和現有 durable memory 衝突時靜默 merge。 +如果主 memory 檔案已寫入,但 reviewer sidecar 沒有完整落盤,`cam memory` 會盡力暴露 pending sync recovery marker,幫助 reviewer 辨識 partial-success 狀態。 + +## 快速開始 + +### 1. Clone 並安裝 + +```bash +git clone https://github.com/Boulea7/Codex-Auto-Memory.git +cd Codex-Auto-Memory +pnpm install +``` + +### 2. 建構並連結全域命令 + +```bash +pnpm build +pnpm link --global +``` + +> 連結之後,`cam` 命令就可以在任何目錄使用。 + +### 3. 在你的專案裡初始化 + +```bash +cd /你的專案目錄 +cam init +``` + +這會在專案根目錄產生 `codex-auto-memory.json`(追蹤到 Git),並在本地建立 `.codex-auto-memory.local.json`(預設 gitignored)。 + +### 4. 透過 wrapper 啟動 Codex + +```bash +cam run +``` + +每次會話結束後,`cam` 會自動從 Codex rollout 日誌中提取資訊並寫入 memory 檔案。 + +### 5. 檢視 memory 狀態 + +```bash +cam memory +cam session status +cam session refresh +cam remember "Always use pnpm instead of npm" +cam forget "old debug note" +cam audit +``` + +## 常用命令 + +| 命令 | 作用 | +| :-- | :-- | +| `cam run` / `cam exec` / `cam resume` | 編譯 startup memory 並透過 wrapper 啟動 Codex | +| `cam sync` | 手動把最近 rollout 同步進 durable memory | +| `cam memory` | 檢視真正進入 startup payload 的 quoted startup files、按需 topic refs、startup budget、edit paths,以及 `--recent [count]` 下的 durable sync audit 與 suppressed conflict candidates | +| `cam remember` / `cam forget` | 顯式新增或刪除 memory | +| `cam session save` | merge / incremental save;從 rollout 增量寫入 continuity | +| `cam session refresh` | replace / clean regeneration;從選定 provenance 重新生成 continuity 並覆蓋所選 scope | +| `cam session load` / `status` | continuity reviewer surface;顯示 latest continuity diagnostics、latest audit drill-down、compact prior preview 與 pending continuity recovery marker | +| `cam session clear` / `open` | 清理 current active continuity,或打開 local continuity 目錄 | +| `cam audit` | 做倉庫級隱私 / secret hygiene 審查 | +| `cam doctor` | 檢查目前 companion wiring 與 native readiness posture | + +## 工作方式 + +### 設計原則 + +- `local-first and auditable` +- `Markdown files are the product surface` +- `companion-first, with a narrow compatibility seam` +- `session continuity` 與 `durable memory` 明確分離 + +### 為什麼現在不直接切到 native memory + +- 官方公開文件仍未給出完整、穩定、等價於 Claude Code 的 native memory 契約 +- 本地 `cam doctor --json` 仍將 `memories` / `codex_hooks` 視為 readiness signal,而非 trusted primary path +- 因此專案預設仍堅持 companion-first,直到公開文件、執行時穩定性與 CI 可驗證性都足夠強 + +## 儲存布局 + +Durable memory: + +```text +~/.codex-auto-memory/ +├── global/ +│ └── MEMORY.md +└── projects// + ├── project/ + │ ├── MEMORY.md + │ └── commands.md + └── locals// + ├── MEMORY.md + └── workflow.md +``` + +Session continuity: + +```text +~/.codex-auto-memory/projects//continuity/project/active.md +/.codex-auto-memory/sessions/active.md +``` + +更完整的結構與邊界說明,請參考架構文件。 + +## 文件導航 + +- [文檔首頁(中文)](docs/README.md) +- [Documentation Hub (English)](docs/README.en.md) +- [Claude Code 參考契約(中文)](docs/claude-reference.md) | [English](docs/claude-reference.en.md) +- [架構設計(中文)](docs/architecture.md) | [English](docs/architecture.en.md) +- [Native migration 策略(中文)](docs/native-migration.md) | [English](docs/native-migration.en.md) +- [Session continuity 設計](docs/session-continuity.md) +- [Release checklist](docs/release-checklist.md) +- [Contributing](CONTRIBUTING.md) + +## 目前狀態 + +- durable memory companion path:可用 +- topic-aware startup lookup:可用 +- session continuity companion layer:可用 +- reviewer audit surfaces:可用 +- tagged GitHub Releases:release workflow 已定義並以 tarball artifact 為目標;推送首個真實 tag 前,應先確認預設分支上的該 workflow 已啟用且可觀測;npm publish 仍保持手動流程 +- native memory / native hooks primary path:未啟用,仍非 trusted implementation path + +## 路線圖 + +### v0.1 + +- companion CLI +- Markdown memory store +- 200-line startup compiler +- worktree-aware project identity +- 初始 reviewer / maintainer 文件體系 + +### v0.2 + +- 更穩的 contradiction handling +- 更清楚的 `cam memory` / `cam session` 審查 UX +- continuity diagnostics 與 reviewer packet 持續收緊資訊層次,並顯式暴露 confidence / warnings +- release-facing 驗證持續收緊到 tarball install smoke,確保 `.tgz` 安裝後的 `cam` bin shim 可直接工作 +- 繼續保留對未來 hook surface 的 compatibility seam + +### v0.3+ + +- 持續追蹤官方 Codex memory / hooks surfaces,不預設主路徑變更 +- 可選 GUI / TUI browser +- 更強的跨會話 diagnostics 與 confidence surfaces + +## 貢獻與授權 + +- 貢獻指南:[CONTRIBUTING.md](./CONTRIBUTING.md) +- License:[Apache-2.0](./LICENSE) + +如果你在 README、官方文件與本地執行時觀察之間發現衝突,請優先相信: + +1. 官方產品文件 +2. 可重現的本地行為 +3. 對不確定性的明確說明 + +而不是更自信但證據不足的表述。 diff --git a/package.json b/package.json index 9894276..67e6da4 100644 --- a/package.json +++ b/package.json @@ -11,7 +11,9 @@ "docs", "schemas", "README.md", + "README.zh-TW.md", "README.en.md", + "README.ja.md", "LICENSE" ], "engines": { diff --git a/test/docs-contract.test.ts b/test/docs-contract.test.ts index 879612d..8d276c8 100644 --- a/test/docs-contract.test.ts +++ b/test/docs-contract.test.ts @@ -9,7 +9,9 @@ async function readDoc(relativePath: string): Promise { describe("docs contract", () => { it("keeps the public reviewer command surface and deterministic verification entry points documented", async () => { const readme = await readDoc("README.md"); + const readmeTw = await readDoc("README.zh-TW.md"); const readmeEn = await readDoc("README.en.md"); + const readmeJa = await readDoc("README.ja.md"); const releaseChecklist = await readDoc("docs/release-checklist.md"); const contributing = await readDoc("CONTRIBUTING.md"); const ciWorkflow = await readDoc(".github/workflows/ci.yml"); @@ -24,12 +26,20 @@ describe("docs contract", () => { expect(readme).toContain("reviewer warning prose"); expect(readme).toContain("tagged GitHub Releases"); expect(readme).toContain("tarball install smoke"); + expect(readme).toContain("README.zh-TW.md"); + expect(readme).toContain("README.ja.md"); + expect(readmeTw).toContain("README.md"); + expect(readmeTw).toContain("README.en.md"); + expect(readmeJa).toContain("README.md"); + expect(readmeJa).toContain("README.en.md"); expect(readmeEn).toContain("cam memory"); expect(readmeEn).toContain("cam session status"); expect(readmeEn).toContain("confidence"); expect(readmeEn).toContain("deterministic scrub"); expect(readmeEn).toContain("tagged GitHub Releases"); expect(readmeEn).toContain("tarball install smoke"); + expect(readmeEn).toContain("README.zh-TW.md"); + expect(readmeEn).toContain("README.ja.md"); expect(releaseChecklist).toContain("pnpm test:dist-cli-smoke"); expect(releaseChecklist).toContain("pnpm test:tarball-install-smoke"); expect(releaseChecklist).toContain("node dist/cli.js --version"); @@ -49,6 +59,9 @@ describe("docs contract", () => { expect(packageJson.scripts["test:tarball-install-smoke"]).toBe( "vitest run test/tarball-install-smoke.test.ts" ); + expect(packageJson.files).toEqual( + expect.arrayContaining(["README.md", "README.zh-TW.md", "README.en.md", "README.ja.md"]) + ); expect(packageJson.scripts.prepack).toBe("pnpm build"); expect(packageJson.scripts["verify:release"]).toContain("pnpm test:dist-cli-smoke"); expect(packageJson.scripts["verify:release"]).toContain("pnpm test:tarball-install-smoke"); From 291cba3f212125e3aa06e911612c8a69a391bf76 Mon Sep 17 00:00:00 2001 From: blocks Date: Sat, 21 Mar 2026 00:03:26 +0800 Subject: [PATCH 6/9] chore: clean multilingual readme formatting --- README.ja.md | 6 +++--- README.zh-TW.md | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/README.ja.md b/README.ja.md index 787d1cf..1b2a229 100644 --- a/README.ja.md +++ b/README.ja.md @@ -25,7 +25,7 @@

-> `codex-auto-memory` は汎用メモアプリでもクラウド型メモリサービスでもありません。 +> `codex-auto-memory` は汎用メモアプリでもクラウド型メモリサービスでもありません。
> 現在の Codex CLI に対して、ローカル Markdown、コンパクトな startup injection、必要時のみの topic file 読み出し、そして companion runtime を使い、Claude Code auto memory の観測可能な契約をできるだけ再現することが目的です。 --- @@ -114,8 +114,8 @@ Claude Code には比較的明確な auto memory 契約があります。 | inspect / audit memory | `/memory` | 相当コマンドなし | `cam memory` | | native hooks / memory | Built in | Experimental / under development | compatibility seam のみ保持 | -`cam memory` は inspection / audit surface として設計されています。 -実際に startup payload に入った quoted startup files、startup budget、on-demand topic refs、edit paths、さらに `--recent [count]` の recent durable sync audit を表示します。 +`cam memory` は inspection / audit surface として設計されています。
+実際に startup payload に入った quoted startup files、startup budget、on-demand topic refs、edit paths、さらに `--recent [count]` の recent durable sync audit を表示します。
recent sync audit では、保守的に suppress された conflict candidates も reviewer-visible に保持され、矛盾する rollout 出力が silent merge されないようになっています。 ## クイックスタート diff --git a/README.zh-TW.md b/README.zh-TW.md index 7ef0090..49254c0 100644 --- a/README.zh-TW.md +++ b/README.zh-TW.md @@ -25,7 +25,7 @@

-> `codex-auto-memory` 不是通用筆記軟體,也不是雲端記憶服務。 +> `codex-auto-memory` 不是通用筆記軟體,也不是雲端記憶服務。
> 它的目標是在今天的 Codex CLI 上,以本地 Markdown、緊湊 startup injection、按需 topic file 讀取與 companion runtime,盡可能重現 Claude Code auto memory 的可觀察產品契約。 --- @@ -114,8 +114,8 @@ Claude Code 已經公開了一套相對清晰的 auto memory 產品契約: | inspect / audit memory | `/memory` | 無等價命令 | `cam memory` | | native hooks / memory | Built in | Experimental / under development | 目前只保留 compatibility seam | -`cam memory` 目前是 inspection / audit surface:它會暴露真正進入 startup payload 的 quoted startup files、startup budget、按需 topic refs、edit paths,以及 `--recent [count]` 下的 recent durable sync audit。 -recent durable sync audit 也會顯式暴露被保守 suppress 的 conflict candidates,避免在同一個 rollout 或和現有 durable memory 衝突時靜默 merge。 +`cam memory` 目前是 inspection / audit surface:它會暴露真正進入 startup payload 的 quoted startup files、startup budget、按需 topic refs、edit paths,以及 `--recent [count]` 下的 recent durable sync audit。
+recent durable sync audit 也會顯式暴露被保守 suppress 的 conflict candidates,避免在同一個 rollout 或和現有 durable memory 衝突時靜默 merge。
如果主 memory 檔案已寫入,但 reviewer sidecar 沒有完整落盤,`cam memory` 會盡力暴露 pending sync recovery marker,幫助 reviewer 辨識 partial-success 狀態。 ## 快速開始 From a2f4001308f32c87f428c54e7a4c3262c770278a Mon Sep 17 00:00:00 2001 From: blocks Date: Mon, 23 Mar 2026 22:52:01 +0800 Subject: [PATCH 7/9] test: tighten release tarball contract --- docs/release-checklist.md | 1 + test/docs-contract.test.ts | 25 +++++++++++++++++++++---- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/docs/release-checklist.md b/docs/release-checklist.md index d21263d..71cfe84 100644 --- a/docs/release-checklist.md +++ b/docs/release-checklist.md @@ -28,6 +28,7 @@ Use this checklist before cutting any alpha or beta release of `codex-auto-memor - Run `pnpm test` - Run `pnpm build` - Run `pnpm pack:check` +- Confirm `package.json.files` still whitelists the release-facing surfaces you intend to ship: `dist`, `docs`, `schemas`, the multilingual READMEs, and `LICENSE`. - Confirm `pnpm build` still starts from a clean `dist/` directory so `npm pack` cannot accidentally pick up stale compiled artifacts from an older tree shape. - If you add new generated outputs beyond `dist/`, keep their cleanup path aligned with the build and pack workflow instead of letting release tarballs accumulate leftovers. - After `pnpm build`, prefer validating release-facing CLI behavior through `node dist/cli.js ...` rather than `tsx src/cli.ts`. diff --git a/test/docs-contract.test.ts b/test/docs-contract.test.ts index 8d276c8..37aad99 100644 --- a/test/docs-contract.test.ts +++ b/test/docs-contract.test.ts @@ -2,6 +2,14 @@ import fs from "node:fs/promises"; import path from "node:path"; import { describe, expect, it } from "vitest"; +interface PackageJsonContract { + bin: { + cam: string; + }; + files: string[]; + scripts: Record; +} + async function readDoc(relativePath: string): Promise { return fs.readFile(path.join(process.cwd(), relativePath), "utf8"); } @@ -16,9 +24,7 @@ describe("docs contract", () => { const contributing = await readDoc("CONTRIBUTING.md"); const ciWorkflow = await readDoc(".github/workflows/ci.yml"); const releaseWorkflow = await readDoc(".github/workflows/release.yml"); - const packageJson = JSON.parse(await readDoc("package.json")) as { - scripts: Record; - }; + const packageJson = JSON.parse(await readDoc("package.json")) as PackageJsonContract; expect(readme).toContain("cam memory"); expect(readme).toContain("cam session status"); @@ -48,6 +54,7 @@ describe("docs contract", () => { expect(releaseChecklist).toContain("pnpm test:reviewer-smoke"); expect(releaseChecklist).toContain("pnpm test:cli-smoke"); expect(releaseChecklist).toContain("pnpm pack:check"); + expect(releaseChecklist).toContain("package.json.files"); expect(releaseChecklist).toContain("node dist/cli.js session refresh --json"); expect(releaseChecklist).toContain("node dist/cli.js session load --json"); expect(releaseChecklist).toContain("node dist/cli.js session status --json"); @@ -59,8 +66,18 @@ describe("docs contract", () => { expect(packageJson.scripts["test:tarball-install-smoke"]).toBe( "vitest run test/tarball-install-smoke.test.ts" ); + expect(packageJson.bin.cam).toBe("dist/cli.js"); expect(packageJson.files).toEqual( - expect.arrayContaining(["README.md", "README.zh-TW.md", "README.en.md", "README.ja.md"]) + expect.arrayContaining([ + "dist", + "docs", + "schemas", + "README.md", + "README.zh-TW.md", + "README.en.md", + "README.ja.md", + "LICENSE" + ]) ); expect(packageJson.scripts.prepack).toBe("pnpm build"); expect(packageJson.scripts["verify:release"]).toContain("pnpm test:dist-cli-smoke"); From 145ff820029f5d5a68323252ae04aec395404676 Mon Sep 17 00:00:00 2001 From: blocks Date: Mon, 23 Mar 2026 23:09:25 +0800 Subject: [PATCH 8/9] test: harden dist cli smoke isolation --- test/dist-cli-smoke.test.ts | 42 ++++++++++++++++++++++++++++--------- test/helpers/cli-runner.ts | 5 +++-- 2 files changed, 35 insertions(+), 12 deletions(-) diff --git a/test/dist-cli-smoke.test.ts b/test/dist-cli-smoke.test.ts index 583fa3a..03f5794 100644 --- a/test/dist-cli-smoke.test.ts +++ b/test/dist-cli-smoke.test.ts @@ -14,7 +14,6 @@ import { import { runCli } from "./helpers/cli-runner.js"; const tempDirs: string[] = []; -const originalHome = process.env.HOME; async function tempDir(prefix: string): Promise { const dir = await fs.mkdtemp(path.join(os.tmpdir(), prefix)); @@ -22,8 +21,27 @@ async function tempDir(prefix: string): Promise { return dir; } +async function waitForFile(pathname: string, timeoutMs = 2_000): Promise { + const deadline = Date.now() + timeoutMs; + while (true) { + try { + return await fs.readFile(pathname, "utf8"); + } catch (error) { + if ( + error instanceof Error && + "code" in error && + error.code === "ENOENT" && + Date.now() < deadline + ) { + await new Promise((resolve) => setTimeout(resolve, 25)); + continue; + } + throw error; + } + } +} + afterEach(async () => { - process.env.HOME = originalHome; await Promise.all(tempDirs.splice(0).map((dir) => fs.rm(dir, { recursive: true, force: true }))); }); @@ -44,7 +62,7 @@ describe("dist cli smoke", () => { const homeDir = await tempDir("cam-dist-home-"); const projectDir = await tempDir("cam-dist-project-"); const memoryRoot = await tempDir("cam-dist-memory-root-"); - process.env.HOME = homeDir; + const cliEnv = { HOME: homeDir }; const config = makeAppConfig(); await writeCamConfig(projectDir, config, { @@ -123,14 +141,16 @@ describe("dist cli smoke", () => { ); const memoryResult = runCli(projectDir, ["memory", "--recent", "1", "--json"], { - entrypoint: "dist" + entrypoint: "dist", + env: cliEnv }); const sessionResult = runCli(projectDir, ["session", "status", "--json"], { - entrypoint: "dist" + entrypoint: "dist", + env: cliEnv }); - expect(memoryResult.exitCode).toBe(0); - expect(sessionResult.exitCode).toBe(0); + expect(memoryResult.exitCode, memoryResult.stderr).toBe(0); + expect(sessionResult.exitCode, sessionResult.stderr).toBe(0); const memoryPayload = JSON.parse(memoryResult.stdout) as { recentSyncAudit: Array<{ rolloutPath: string }>; @@ -146,6 +166,7 @@ describe("dist cli smoke", () => { it("routes exec through the compiled wrapper entrypoint", async () => { const repoDir = await tempDir("cam-dist-wrapper-repo-"); + const homeDir = await tempDir("cam-dist-wrapper-home-"); const memoryRoot = await tempDir("cam-dist-wrapper-memory-"); await initGitRepo(repoDir); @@ -175,11 +196,12 @@ fs.writeFileSync(${JSON.stringify(capturedArgsPath)}, JSON.stringify(process.arg }); const result = runCli(repoDir, ["exec", "continue"], { - entrypoint: "dist" + entrypoint: "dist", + env: { HOME: homeDir } }); - const capturedArgs = JSON.parse(await fs.readFile(capturedArgsPath, "utf8")) as string[]; - expect(result.exitCode).toBe(0); + expect(result.exitCode, result.stderr).toBe(0); + const capturedArgs = JSON.parse(await waitForFile(capturedArgsPath)) as string[]; expect(capturedArgs).toContain("exec"); expect(capturedArgs).toContain("continue"); expect(capturedArgs.some((value) => value.startsWith("base_instructions="))).toBe(true); diff --git a/test/helpers/cli-runner.ts b/test/helpers/cli-runner.ts index 364bd11..a53d55c 100644 --- a/test/helpers/cli-runner.ts +++ b/test/helpers/cli-runner.ts @@ -19,9 +19,10 @@ export function runCli( } = {} ): ProcessOutput { const entrypoint = options.entrypoint ?? "source"; + const env = options.env ? { ...process.env, ...options.env } : process.env; if (entrypoint === "dist") { - return runCommandCapture("node", [distCliPath, ...args], repoDir, options.env); + return runCommandCapture("node", [distCliPath, ...args], repoDir, env); } - return runCommandCapture(tsxBinaryPath, [sourceCliPath, ...args], repoDir, options.env); + return runCommandCapture(tsxBinaryPath, [sourceCliPath, ...args], repoDir, env); } From bbc7d55835c707155a015c488891479ffc1e1619 Mon Sep 17 00:00:00 2001 From: blocks Date: Mon, 23 Mar 2026 23:29:34 +0800 Subject: [PATCH 9/9] ci: keep dist smoke out of default test suite --- CONTRIBUTING.md | 3 +++ package.json | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c86359f..bceab61 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -32,6 +32,9 @@ pnpm test:tarball-install-smoke Use Node 20+ and `pnpm`. +`pnpm test` is the default source-level suite. Build-dependent release checks stay explicit in +`pnpm test:dist-cli-smoke` and `pnpm test:tarball-install-smoke`. + ## Branch and PR expectations - Keep pull requests focused. diff --git a/package.json b/package.json index 67e6da4..114d336 100644 --- a/package.json +++ b/package.json @@ -27,7 +27,7 @@ "lint": "tsc --noEmit -p tsconfig.json", "pack:check": "npm pack --dry-run", "prepack": "pnpm build", - "test": "vitest run", + "test": "vitest run --exclude test/dist-cli-smoke.test.ts --exclude test/tarball-install-smoke.test.ts", "test:cli-smoke": "vitest run test/audit.test.ts test/memory-command.test.ts test/session-command.test.ts test/wrapper-session-continuity.test.ts", "test:dist-cli-smoke": "vitest run test/dist-cli-smoke.test.ts", "test:docs-contract": "vitest run test/docs-contract.test.ts",