From 5cbfd5268b36dc1837edf07fe36ccbee3aab0197 Mon Sep 17 00:00:00 2001 From: ducdmdev Date: Wed, 11 Feb 2026 17:45:44 +0700 Subject: [PATCH 1/5] feat!: replace deterministic analysis with AI agent architecture BREAKING CHANGE: Remove @pr-impact/core, @pr-impact/cli, @pr-impact/mcp-server. Replace with 4 new packages: - @pr-impact/tools-core: 6 pure tool handler functions (git_diff, read_file_at_ref, list_changed_files, search_code, find_importers, list_test_files) - @pr-impact/tools: MCP server wrapping tools-core for Claude Code / AI assistants - @pr-impact/action: GitHub Action with Anthropic API agentic loop (30 iterations, 180s timeout, temperature 0, PR comment posting) - @pr-impact/skill: Claude Code plugin with assembled skill.md from shared templates Analysis logic moves from coded heuristics to AI reasoning via prompt templates. Shared templates (system-prompt.md, report-template.md) define the methodology and are embedded at build time into both the skill and action packages. 53 tests across 11 test files, all passing. --- .changeset/ai-agent-rewrite.md | 17 + .changeset/config.json | 2 +- .github/workflows/ci.yml | 12 +- CLAUDE.md | 254 +- CONTRIBUTING.md | 29 +- README.md | 533 +-- docs/analysis-pipeline.md | 174 - docs/architecture.md | 176 - docs/ci-integration.md | 295 -- docs/configuration-guide.md | 148 - docs/data-flow.md | 198 -- docs/getting-started.md | 146 - docs/mcp-integration.md | 284 -- docs/migration-guide.md | 129 + .../2026-02-11-ai-agent-rewrite-design.md | 456 +++ .../plans/2026-02-11-ai-agent-rewrite-plan.md | 3005 +++++++++++++++++ docs/programmatic-api.md | 251 -- docs/risk-scoring.md | 189 -- docs/troubleshooting.md | 240 -- eslint.config.mjs | 14 +- packages/action/CLAUDE.md | 37 + packages/action/README.md | 57 + packages/action/__tests__/client.test.ts | 150 + packages/action/__tests__/comment.test.ts | 225 ++ packages/action/__tests__/index.test.ts | 176 + packages/action/__tests__/tools.test.ts | 124 + packages/action/action.yml | 36 + packages/action/package.json | 26 + packages/action/src/client.ts | 177 + .../src/comment.ts} | 66 +- packages/action/src/generated/templates.ts | 6 + packages/action/src/index.ts | 63 + packages/action/src/tools.ts | 39 + packages/{cli => action}/tsconfig.json | 2 +- .../{mcp-server => action}/tsup.config.ts | 6 +- packages/{cli => action}/vitest.config.ts | 0 packages/cli/CHANGELOG.md | 28 - packages/cli/CLAUDE.md | 43 - packages/cli/README.md | 142 - .../cli/__tests__/cli-registration.test.ts | 59 - .../cli/__tests__/commands/analyze.test.ts | 230 -- .../cli/__tests__/commands/breaking.test.ts | 426 --- .../cli/__tests__/commands/comment.test.ts | 268 -- .../cli/__tests__/commands/impact.test.ts | 269 -- packages/cli/__tests__/commands/risk.test.ts | 298 -- packages/cli/__tests__/e2e-smoke.test.ts | 129 - packages/cli/__tests__/formatting.test.ts | 193 -- packages/cli/__tests__/github/ci-env.test.ts | 124 - .../__tests__/github/comment-poster.test.ts | 185 - packages/cli/node_modules/.bin/esbuild | 14 - packages/cli/node_modules/.bin/tsc | 17 - packages/cli/node_modules/.bin/tsserver | 17 - packages/cli/node_modules/.bin/tsup | 17 - packages/cli/node_modules/.bin/tsup-node | 17 - packages/cli/node_modules/@pr-impact/core | 1 - packages/cli/node_modules/@types/node | 1 - packages/cli/node_modules/chalk | 1 - packages/cli/node_modules/commander | 1 - packages/cli/node_modules/ora | 1 - packages/cli/node_modules/tsup | 1 - packages/cli/node_modules/typescript | 1 - packages/cli/package.json | 47 - packages/cli/src/commands/analyze.ts | 52 - packages/cli/src/commands/breaking.ts | 126 - packages/cli/src/commands/comment.ts | 84 - packages/cli/src/commands/impact.ts | 148 - packages/cli/src/commands/risk.ts | 107 - packages/cli/src/github/ci-env.ts | 59 - packages/cli/src/index.ts | 24 - packages/core/CHANGELOG.md | 21 - packages/core/CLAUDE.md | 64 - packages/core/README.md | 119 - packages/core/__tests__/analyzer.test.ts | 621 ---- .../core/__tests__/coverage-checker.test.ts | 421 --- packages/core/__tests__/detector.test.ts | 1242 ------- packages/core/__tests__/diff-parser.test.ts | 606 ---- packages/core/__tests__/export-differ.test.ts | 1069 ------ .../core/__tests__/file-categorizer.test.ts | 330 -- packages/core/__tests__/impact-graph.test.ts | 616 ---- .../core/__tests__/import-resolver.test.ts | 223 -- packages/core/__tests__/json-reporter.test.ts | 345 -- .../core/__tests__/markdown-reporter.test.ts | 556 --- .../core/__tests__/risk-calculator.test.ts | 764 ----- .../core/__tests__/signature-differ.test.ts | 263 -- .../core/__tests__/staleness-checker.test.ts | 969 ------ packages/core/__tests__/test-mapper.test.ts | 210 -- packages/core/node_modules/.bin/tsc | 17 - packages/core/node_modules/.bin/tsserver | 17 - packages/core/node_modules/.bin/tsup | 17 - packages/core/node_modules/.bin/tsup-node | 17 - packages/core/node_modules/.bin/vitest | 17 - packages/core/node_modules/@types/node | 1 - packages/core/node_modules/fast-glob | 1 - packages/core/node_modules/simple-git | 1 - packages/core/node_modules/tsup | 1 - packages/core/node_modules/typescript | 1 - packages/core/node_modules/vitest | 1 - packages/core/src/analyzer.ts | 164 - packages/core/src/breaking/detector.ts | 277 -- packages/core/src/breaking/export-differ.ts | 625 ---- .../core/src/breaking/signature-differ.ts | 214 -- .../core/src/coverage/coverage-checker.ts | 60 - packages/core/src/coverage/test-mapper.ts | 103 - packages/core/src/diff/diff-parser.ts | 157 - packages/core/src/diff/file-categorizer.ts | 103 - packages/core/src/docs/staleness-checker.ts | 345 -- packages/core/src/impact/impact-graph.ts | 72 - packages/core/src/imports/import-resolver.ts | 182 - packages/core/src/index.ts | 33 - packages/core/src/output/json-reporter.ts | 8 - packages/core/src/output/markdown-reporter.ts | 153 - packages/core/src/risk/factors.ts | 239 -- packages/core/src/risk/risk-calculator.ts | 74 - packages/core/src/types.ts | 114 - packages/mcp-server/CHANGELOG.md | 28 - packages/mcp-server/CLAUDE.md | 39 - packages/mcp-server/README.md | 87 - .../mcp-server/__tests__/formatting.test.ts | 191 -- .../__tests__/server-registration.test.ts | 54 - .../__tests__/tools/analyze-diff.test.ts | 121 - .../tools/get-breaking-changes.test.ts | 216 -- .../__tests__/tools/get-impact-graph.test.ts | 193 -- .../__tests__/tools/get-risk-score.test.ts | 221 -- packages/mcp-server/node_modules/.bin/tsc | 17 - .../mcp-server/node_modules/.bin/tsserver | 17 - packages/mcp-server/node_modules/.bin/tsup | 17 - .../mcp-server/node_modules/.bin/tsup-node | 17 - .../node_modules/@modelcontextprotocol/sdk | 1 - .../mcp-server/node_modules/@pr-impact/core | 1 - packages/mcp-server/node_modules/@types/node | 1 - packages/mcp-server/node_modules/tsup | 1 - packages/mcp-server/node_modules/typescript | 1 - packages/mcp-server/node_modules/zod | 1 - packages/mcp-server/src/index.ts | 35 - packages/mcp-server/src/tools/analyze-diff.ts | 33 - .../src/tools/get-breaking-changes.ts | 84 - .../mcp-server/src/tools/get-impact-graph.ts | 117 - .../mcp-server/src/tools/get-risk-score.ts | 71 - packages/skill/.claude-plugin/config.json | 6 + packages/skill/CLAUDE.md | 28 + packages/skill/README.md | 32 + packages/skill/mcp.json | 8 + packages/skill/package.json | 25 + packages/skill/skill.md | 173 + packages/tools-core/CLAUDE.md | 38 + packages/tools-core/README.md | 30 + .../tools-core/__tests__/find-imports.test.ts | 96 + .../tools-core/__tests__/git-diff.test.ts | 61 + .../tools-core/__tests__/list-files.test.ts | 110 + .../tools-core/__tests__/list-tests.test.ts | 78 + .../tools-core/__tests__/read-file.test.ts | 49 + .../tools-core/__tests__/regression.test.ts | 159 + .../tools-core/__tests__/search-code.test.ts | 79 + packages/{core => tools-core}/package.json | 27 +- packages/tools-core/src/index.ts | 22 + packages/tools-core/src/tools/find-imports.ts | 114 + packages/tools-core/src/tools/git-diff.ts | 22 + packages/tools-core/src/tools/list-files.ts | 83 + packages/tools-core/src/tools/list-tests.ts | 88 + packages/tools-core/src/tools/read-file.ts | 17 + packages/tools-core/src/tools/search-code.ts | 64 + .../{mcp-server => tools-core}/tsconfig.json | 2 +- packages/{core => tools-core}/tsup.config.ts | 0 .../vitest.config.ts | 0 packages/tools/CLAUDE.md | 30 + packages/tools/README.md | 40 + .../tools/__tests__/build-scripts.test.ts | 68 + packages/tools/__tests__/index.test.ts | 101 + packages/tools/__tests__/register.test.ts | 163 + packages/{mcp-server => tools}/package.json | 21 +- packages/tools/src/index.ts | 25 + packages/tools/src/register.ts | 134 + packages/{core => tools}/tsconfig.json | 2 +- packages/{cli => tools}/tsup.config.ts | 0 packages/tools/vitest.config.ts | 7 + pnpm-lock.yaml | 623 +++- scripts/build-skill.ts | 38 + scripts/embed-templates.ts | 26 + scripts/tsconfig.json | 10 + templates/report-template.md | 58 + templates/system-prompt.md | 92 + vitest.config.ts | 11 +- 182 files changed, 7656 insertions(+), 19093 deletions(-) create mode 100644 .changeset/ai-agent-rewrite.md delete mode 100644 docs/analysis-pipeline.md delete mode 100644 docs/architecture.md delete mode 100644 docs/ci-integration.md delete mode 100644 docs/configuration-guide.md delete mode 100644 docs/data-flow.md delete mode 100644 docs/getting-started.md delete mode 100644 docs/mcp-integration.md create mode 100644 docs/migration-guide.md create mode 100644 docs/plans/2026-02-11-ai-agent-rewrite-design.md create mode 100644 docs/plans/2026-02-11-ai-agent-rewrite-plan.md delete mode 100644 docs/programmatic-api.md delete mode 100644 docs/risk-scoring.md delete mode 100644 docs/troubleshooting.md create mode 100644 packages/action/CLAUDE.md create mode 100644 packages/action/README.md create mode 100644 packages/action/__tests__/client.test.ts create mode 100644 packages/action/__tests__/comment.test.ts create mode 100644 packages/action/__tests__/index.test.ts create mode 100644 packages/action/__tests__/tools.test.ts create mode 100644 packages/action/action.yml create mode 100644 packages/action/package.json create mode 100644 packages/action/src/client.ts rename packages/{cli/src/github/comment-poster.ts => action/src/comment.ts} (50%) create mode 100644 packages/action/src/generated/templates.ts create mode 100644 packages/action/src/index.ts create mode 100644 packages/action/src/tools.ts rename packages/{cli => action}/tsconfig.json (74%) rename packages/{mcp-server => action}/tsup.config.ts (66%) rename packages/{cli => action}/vitest.config.ts (100%) delete mode 100644 packages/cli/CHANGELOG.md delete mode 100644 packages/cli/CLAUDE.md delete mode 100644 packages/cli/README.md delete mode 100644 packages/cli/__tests__/cli-registration.test.ts delete mode 100644 packages/cli/__tests__/commands/analyze.test.ts delete mode 100644 packages/cli/__tests__/commands/breaking.test.ts delete mode 100644 packages/cli/__tests__/commands/comment.test.ts delete mode 100644 packages/cli/__tests__/commands/impact.test.ts delete mode 100644 packages/cli/__tests__/commands/risk.test.ts delete mode 100644 packages/cli/__tests__/e2e-smoke.test.ts delete mode 100644 packages/cli/__tests__/formatting.test.ts delete mode 100644 packages/cli/__tests__/github/ci-env.test.ts delete mode 100644 packages/cli/__tests__/github/comment-poster.test.ts delete mode 100755 packages/cli/node_modules/.bin/esbuild delete mode 100755 packages/cli/node_modules/.bin/tsc delete mode 100755 packages/cli/node_modules/.bin/tsserver delete mode 100755 packages/cli/node_modules/.bin/tsup delete mode 100755 packages/cli/node_modules/.bin/tsup-node delete mode 120000 packages/cli/node_modules/@pr-impact/core delete mode 120000 packages/cli/node_modules/@types/node delete mode 120000 packages/cli/node_modules/chalk delete mode 120000 packages/cli/node_modules/commander delete mode 120000 packages/cli/node_modules/ora delete mode 120000 packages/cli/node_modules/tsup delete mode 120000 packages/cli/node_modules/typescript delete mode 100644 packages/cli/package.json delete mode 100644 packages/cli/src/commands/analyze.ts delete mode 100644 packages/cli/src/commands/breaking.ts delete mode 100644 packages/cli/src/commands/comment.ts delete mode 100644 packages/cli/src/commands/impact.ts delete mode 100644 packages/cli/src/commands/risk.ts delete mode 100644 packages/cli/src/github/ci-env.ts delete mode 100644 packages/cli/src/index.ts delete mode 100644 packages/core/CHANGELOG.md delete mode 100644 packages/core/CLAUDE.md delete mode 100644 packages/core/README.md delete mode 100644 packages/core/__tests__/analyzer.test.ts delete mode 100644 packages/core/__tests__/coverage-checker.test.ts delete mode 100644 packages/core/__tests__/detector.test.ts delete mode 100644 packages/core/__tests__/diff-parser.test.ts delete mode 100644 packages/core/__tests__/export-differ.test.ts delete mode 100644 packages/core/__tests__/file-categorizer.test.ts delete mode 100644 packages/core/__tests__/impact-graph.test.ts delete mode 100644 packages/core/__tests__/import-resolver.test.ts delete mode 100644 packages/core/__tests__/json-reporter.test.ts delete mode 100644 packages/core/__tests__/markdown-reporter.test.ts delete mode 100644 packages/core/__tests__/risk-calculator.test.ts delete mode 100644 packages/core/__tests__/signature-differ.test.ts delete mode 100644 packages/core/__tests__/staleness-checker.test.ts delete mode 100644 packages/core/__tests__/test-mapper.test.ts delete mode 100755 packages/core/node_modules/.bin/tsc delete mode 100755 packages/core/node_modules/.bin/tsserver delete mode 100755 packages/core/node_modules/.bin/tsup delete mode 100755 packages/core/node_modules/.bin/tsup-node delete mode 100755 packages/core/node_modules/.bin/vitest delete mode 120000 packages/core/node_modules/@types/node delete mode 120000 packages/core/node_modules/fast-glob delete mode 120000 packages/core/node_modules/simple-git delete mode 120000 packages/core/node_modules/tsup delete mode 120000 packages/core/node_modules/typescript delete mode 120000 packages/core/node_modules/vitest delete mode 100644 packages/core/src/analyzer.ts delete mode 100644 packages/core/src/breaking/detector.ts delete mode 100644 packages/core/src/breaking/export-differ.ts delete mode 100644 packages/core/src/breaking/signature-differ.ts delete mode 100644 packages/core/src/coverage/coverage-checker.ts delete mode 100644 packages/core/src/coverage/test-mapper.ts delete mode 100644 packages/core/src/diff/diff-parser.ts delete mode 100644 packages/core/src/diff/file-categorizer.ts delete mode 100644 packages/core/src/docs/staleness-checker.ts delete mode 100644 packages/core/src/impact/impact-graph.ts delete mode 100644 packages/core/src/imports/import-resolver.ts delete mode 100644 packages/core/src/index.ts delete mode 100644 packages/core/src/output/json-reporter.ts delete mode 100644 packages/core/src/output/markdown-reporter.ts delete mode 100644 packages/core/src/risk/factors.ts delete mode 100644 packages/core/src/risk/risk-calculator.ts delete mode 100644 packages/core/src/types.ts delete mode 100644 packages/mcp-server/CHANGELOG.md delete mode 100644 packages/mcp-server/CLAUDE.md delete mode 100644 packages/mcp-server/README.md delete mode 100644 packages/mcp-server/__tests__/formatting.test.ts delete mode 100644 packages/mcp-server/__tests__/server-registration.test.ts delete mode 100644 packages/mcp-server/__tests__/tools/analyze-diff.test.ts delete mode 100644 packages/mcp-server/__tests__/tools/get-breaking-changes.test.ts delete mode 100644 packages/mcp-server/__tests__/tools/get-impact-graph.test.ts delete mode 100644 packages/mcp-server/__tests__/tools/get-risk-score.test.ts delete mode 100755 packages/mcp-server/node_modules/.bin/tsc delete mode 100755 packages/mcp-server/node_modules/.bin/tsserver delete mode 100755 packages/mcp-server/node_modules/.bin/tsup delete mode 100755 packages/mcp-server/node_modules/.bin/tsup-node delete mode 120000 packages/mcp-server/node_modules/@modelcontextprotocol/sdk delete mode 120000 packages/mcp-server/node_modules/@pr-impact/core delete mode 120000 packages/mcp-server/node_modules/@types/node delete mode 120000 packages/mcp-server/node_modules/tsup delete mode 120000 packages/mcp-server/node_modules/typescript delete mode 120000 packages/mcp-server/node_modules/zod delete mode 100644 packages/mcp-server/src/index.ts delete mode 100644 packages/mcp-server/src/tools/analyze-diff.ts delete mode 100644 packages/mcp-server/src/tools/get-breaking-changes.ts delete mode 100644 packages/mcp-server/src/tools/get-impact-graph.ts delete mode 100644 packages/mcp-server/src/tools/get-risk-score.ts create mode 100644 packages/skill/.claude-plugin/config.json create mode 100644 packages/skill/CLAUDE.md create mode 100644 packages/skill/README.md create mode 100644 packages/skill/mcp.json create mode 100644 packages/skill/package.json create mode 100644 packages/skill/skill.md create mode 100644 packages/tools-core/CLAUDE.md create mode 100644 packages/tools-core/README.md create mode 100644 packages/tools-core/__tests__/find-imports.test.ts create mode 100644 packages/tools-core/__tests__/git-diff.test.ts create mode 100644 packages/tools-core/__tests__/list-files.test.ts create mode 100644 packages/tools-core/__tests__/list-tests.test.ts create mode 100644 packages/tools-core/__tests__/read-file.test.ts create mode 100644 packages/tools-core/__tests__/regression.test.ts create mode 100644 packages/tools-core/__tests__/search-code.test.ts rename packages/{core => tools-core}/package.json (60%) create mode 100644 packages/tools-core/src/index.ts create mode 100644 packages/tools-core/src/tools/find-imports.ts create mode 100644 packages/tools-core/src/tools/git-diff.ts create mode 100644 packages/tools-core/src/tools/list-files.ts create mode 100644 packages/tools-core/src/tools/list-tests.ts create mode 100644 packages/tools-core/src/tools/read-file.ts create mode 100644 packages/tools-core/src/tools/search-code.ts rename packages/{mcp-server => tools-core}/tsconfig.json (74%) rename packages/{core => tools-core}/tsup.config.ts (100%) rename packages/{mcp-server => tools-core}/vitest.config.ts (100%) create mode 100644 packages/tools/CLAUDE.md create mode 100644 packages/tools/README.md create mode 100644 packages/tools/__tests__/build-scripts.test.ts create mode 100644 packages/tools/__tests__/index.test.ts create mode 100644 packages/tools/__tests__/register.test.ts rename packages/{mcp-server => tools}/package.json (60%) create mode 100644 packages/tools/src/index.ts create mode 100644 packages/tools/src/register.ts rename packages/{core => tools}/tsconfig.json (69%) rename packages/{cli => tools}/tsup.config.ts (100%) create mode 100644 packages/tools/vitest.config.ts create mode 100644 scripts/build-skill.ts create mode 100644 scripts/embed-templates.ts create mode 100644 scripts/tsconfig.json create mode 100644 templates/report-template.md create mode 100644 templates/system-prompt.md diff --git a/.changeset/ai-agent-rewrite.md b/.changeset/ai-agent-rewrite.md new file mode 100644 index 0000000..572b44b --- /dev/null +++ b/.changeset/ai-agent-rewrite.md @@ -0,0 +1,17 @@ +--- +"@pr-impact/tools-core": major +"@pr-impact/tools": major +"@pr-impact/action": major +"@pr-impact/skill": major +--- + +Replace deterministic analysis engine with AI agent architecture. + +**Breaking:** Removes `@pr-impact/core`, `@pr-impact/cli`, and `@pr-impact/mcp-server`. These are replaced by four new packages: + +- `@pr-impact/tools-core` — Pure tool handler functions (git-diff, read-file, list-files, search-code, find-imports, list-tests) +- `@pr-impact/tools` — MCP server wrapping tools-core with zod schemas +- `@pr-impact/action` — GitHub Action with agentic Claude loop (Anthropic API, 30-iteration limit, temperature 0) +- `@pr-impact/skill` — Claude Code plugin providing `/pr-impact` slash command + +Analysis is now performed by Claude via tool calls rather than deterministic code. The system prompt and report template live in `templates/` and are embedded at build time. diff --git a/.changeset/config.json b/.changeset/config.json index b9239a8..1660486 100644 --- a/.changeset/config.json +++ b/.changeset/config.json @@ -2,7 +2,7 @@ "$schema": "https://unpkg.com/@changesets/config@3.1.1/schema.json", "changelog": "@changesets/cli/changelog", "commit": false, - "fixed": [["@pr-impact/core", "@pr-impact/cli", "@pr-impact/mcp-server"]], + "fixed": [["@pr-impact/tools-core", "@pr-impact/tools", "@pr-impact/action", "@pr-impact/skill"]], "linked": [], "access": "public", "baseBranch": "main", diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a21048e..415aa71 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -56,11 +56,11 @@ jobs: - name: Build (required for cross-package type resolution) run: pnpm build - - name: Type check core - run: pnpm exec tsc --noEmit -p packages/core/tsconfig.json + - name: Type check tools-core + run: pnpm exec tsc --noEmit -p packages/tools-core/tsconfig.json - - name: Type check cli - run: pnpm exec tsc --noEmit -p packages/cli/tsconfig.json + - name: Type check tools + run: pnpm exec tsc --noEmit -p packages/tools/tsconfig.json - - name: Type check mcp-server - run: pnpm exec tsc --noEmit -p packages/mcp-server/tsconfig.json + - name: Type check action + run: pnpm exec tsc --noEmit -p packages/action/tsconfig.json diff --git a/CLAUDE.md b/CLAUDE.md index 4a62431..67f9eca 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,162 +1,156 @@ -# CLAUDE.md — pr-impact +# CLAUDE.md -- pr-impact ## Project overview -pr-impact is a PR analysis tool that detects breaking changes, test coverage gaps, stale documentation, and import-graph impact for pull requests. It produces a weighted risk score and generates Markdown or JSON reports. +pr-impact is an AI-powered PR analysis tool that detects breaking changes, test coverage gaps, stale documentation, and import-graph impact for pull requests. It uses Claude to analyze diffs via tool calls and produces a weighted risk score with a structured Markdown report. -This is a **pnpm monorepo** managed with **Turborepo**. The workspace is defined in `pnpm-workspace.yaml` and contains three packages under `packages/`. +This is a **pnpm monorepo** managed with **Turborepo**. The workspace is defined in `pnpm-workspace.yaml` and contains four packages under `packages/`. ## Quick commands ```bash -pnpm build # Build all packages (via turbo, respects dependency order) -pnpm test # Run all tests (vitest, workspace mode) -npx vitest run packages/core/__tests__/FILE.test.ts # Run a single test file -pnpm lint # Lint all packages (ESLint flat config) -pnpm lint:fix # Lint and auto-fix -pnpm build --filter=@pr-impact/core # Build only @pr-impact/core -pnpm build --filter=@pr-impact/cli # Build only @pr-impact/cli -pnpm clean # Remove all dist/ directories -pnpm changeset # Create a new changeset for versioning -pnpm version-packages # Apply changesets and bump versions -pnpm release # Build all packages and publish to npm +pnpm build # Build all packages (via turbo, respects dependency order) +pnpm test # Run all tests (vitest) +npx vitest run packages/action/__tests__/FILE.test.ts # Run a single test file +pnpm lint # Lint all packages (ESLint flat config) +pnpm lint:fix # Lint and auto-fix +pnpm build --filter=@pr-impact/tools-core # Build only tools-core +pnpm build --filter=@pr-impact/action # Build only action (includes prebuild for templates) +pnpm clean # Remove all dist/ directories ``` ## Architecture ``` packages/ - core/ @pr-impact/core — Analysis engine. Pure logic, no I/O except git via simple-git. - cli/ @pr-impact/cli — Commander-based CLI (`pri`). Depends on core. - mcp-server/ @pr-impact/mcp-server — MCP server exposing tools to LLMs. Depends on core. + tools-core/ @pr-impact/tools-core -- Pure tool handler functions. No framework dependency. + tools/ @pr-impact/tools -- MCP server wrapping tools-core. Depends on tools-core. + action/ @pr-impact/action -- GitHub Action with agentic Claude loop. Depends on tools-core. + skill/ @pr-impact/skill -- Claude Code plugin (no runtime deps, built from templates). ``` -### packages/core (the main package) +### Dependency graph -All analysis logic lives here. Source is organized by analysis layer: +``` +@pr-impact/tools ────> @pr-impact/tools-core +@pr-impact/action ────> @pr-impact/tools-core +@pr-impact/skill (no runtime dependencies) +``` + +### packages/tools-core (shared foundation) + +Pure handler functions for git/repo operations. Both `tools` (MCP) and `action` (GitHub Action) import from here. + +``` +src/ + index.ts -- Barrel exports for all handlers and types + tools/ + git-diff.ts -- Get raw git diff between two refs + read-file.ts -- Read file content at a specific git ref + list-files.ts -- List changed files with status and stats + search-code.ts -- Search for regex patterns via git grep + find-imports.ts -- Find files that import a given module (cached) + list-tests.ts -- Find test files associated with a source file +``` + +Dependencies: simple-git, fast-glob. + +### packages/tools (MCP server) + +Thin MCP server wrapping tools-core handlers with zod schemas: + +``` +src/ + index.ts -- MCP server entry point (stdio transport) + register.ts -- Tool registration with zod input schemas +``` + +Dependencies: @modelcontextprotocol/sdk, zod, @pr-impact/tools-core. + +### packages/action (GitHub Action) + +GitHub Action that runs an agentic Claude loop to analyze PRs: ``` src/ - analyzer.ts — Top-level analyzePR() orchestrator (runs steps in parallel) - types.ts — All shared TypeScript interfaces - index.ts — Barrel exports for the public API - diff/ - diff-parser.ts — Parse git diff into ChangedFile[] - file-categorizer.ts — Classify files as source/test/doc/config/other - breaking/ - detector.ts — Detect breaking changes across changed files - export-differ.ts — Diff exported symbols between base and head (regex-based) - signature-differ.ts — Compare function/class signatures for changes - coverage/ - coverage-checker.ts — Check whether changed source files have test changes - test-mapper.ts — Map source files to their expected test files - docs/ - staleness-checker.ts — Find stale references in doc files - imports/ - import-resolver.ts — Resolve import paths and find consumers of changed files - impact/ - impact-graph.ts — Build import dependency graph from changed files - risk/ - risk-calculator.ts — Calculate weighted risk score from all factors - factors.ts — Individual risk factor evaluators with weights - output/ - markdown-reporter.ts — Format PRAnalysis as Markdown - json-reporter.ts — Format PRAnalysis as JSON -``` - -### packages/cli - -Commander-based CLI binary (`pri`). Commands live in `src/commands/`: -- `analyze.ts` — full analysis -- `breaking.ts` — breaking changes only -- `comment.ts` — post analysis report as PR comment (upsert via HTML markers) -- `impact.ts` — impact graph only -- `risk.ts` — risk score only - -GitHub integration helpers live in `src/github/`: -- `ci-env.ts` — auto-detect PR number and repo from CI environment variables -- `comment-poster.ts` — create/update PR comments via GitHub API (native fetch) - -Dependencies: commander, chalk, ora. - -### packages/mcp-server - -MCP server exposing tools via `@modelcontextprotocol/sdk`. Tool definitions live in `src/tools/`: -- `analyze-diff.ts` -- `get-breaking-changes.ts` -- `get-impact-graph.ts` -- `get-risk-score.ts` - -Dependencies: @modelcontextprotocol/sdk, zod. + index.ts -- GitHub Action entry point (reads inputs, runs analysis, posts comment) + client.ts -- Anthropic API client with 30-iteration limit, 180s timeout, temperature 0 + tools.ts -- Tool dispatcher (switch on tool name, calls tools-core) + comment.ts -- PR comment poster (upsert via HTML markers) + generated/ + templates.ts -- Auto-generated at build time from templates/*.md +``` + +Dependencies: @anthropic-ai/sdk, @actions/core, @actions/github, @pr-impact/tools-core. + +**Build note:** The `prebuild` script runs `tsx ../../scripts/embed-templates.ts` to generate `src/generated/templates.ts` before tsup bundles. Output is CJS (`dist/index.cjs`) because GitHub Actions requires CommonJS. All dependencies are bundled via `noExternal: [/.*/]`. + +### packages/skill (Claude Code plugin) + +Claude Code plugin that provides the `/pr-impact` slash command: + +``` +.claude-plugin/config.json -- Plugin configuration +mcp.json -- MCP server reference (points to @pr-impact/tools) +skill.md -- Assembled skill prompt (system prompt + report template) +package.json -- Build script only +``` + +**Build note:** The build script (`tsx ../../scripts/build-skill.ts`) assembles `skill.md` from `templates/system-prompt.md` and `templates/report-template.md`. + +### Shared templates + +``` +templates/ + system-prompt.md -- System prompt for Claude analysis (analysis steps, rules, scoring) + report-template.md -- Report output format template (sections, tables) +``` + +These are the single source of truth. Both `action` (via embed-templates.ts) and `skill` (via build-skill.ts) consume them at build time. + +### Build scripts + +``` +scripts/ + embed-templates.ts -- Reads templates/*.md, generates action/src/generated/templates.ts + build-skill.ts -- Reads templates/*.md, generates skill/skill.md +``` ## Code conventions -- **ESM only** — all packages use `"type": "module"`. Use `.js` extensions in all import paths (even for `.ts` source files), e.g. `import { parseDiff } from './diff/diff-parser.js'`. -- **TypeScript strict mode** — `tsconfig.base.json` sets `"strict": true`, target ES2022, module ES2022 with bundler resolution. -- **All shared types** are defined in `packages/core/src/types.ts`. Import types from there. -- **Barrel exports** — the public API of `@pr-impact/core` is defined in `packages/core/src/index.ts`. Any new public function or type must be re-exported from this file. -- **Linting** — ESLint flat config (`eslint.config.mjs`) with `typescript-eslint` (type-checked), `@stylistic/eslint-plugin` (formatting), and `eslint-plugin-vitest` (test files). No Prettier needed. -- **tsup** is used for bundling all packages. Config: ESM format, dts generation, sourcemaps, clean output. +- **ESM only** -- all packages use `"type": "module"`. Use `.js` extensions in all import paths (even for `.ts` source files), e.g. `import { gitDiff } from './tools/git-diff.js'`. +- **CJS exception** -- the `action` package builds to CJS format (`dist/index.cjs`) because GitHub Actions requires CommonJS. Source code is still ESM. +- **TypeScript strict mode** -- `tsconfig.base.json` sets `"strict": true`, target ES2022, module ES2022 with bundler resolution. +- **Linting** -- ESLint flat config (`eslint.config.mjs`) with `typescript-eslint` (type-checked), `@stylistic/eslint-plugin` (formatting), and `eslint-plugin-vitest` (test files). No Prettier needed. +- **tsup** is used for bundling `tools-core`, `tools`, and `action`. The `skill` package uses a custom build script. - **Turbo** task graph: `build` depends on `^build` (dependency packages build first); `test` depends on `build`; `lint` depends on `^build`. -- **Changesets** — `@changesets/cli` manages versioning and changelogs. All three packages use fixed versioning (same version number). Release workflow in `.github/workflows/release.yml` auto-creates "Version Packages" PRs and publishes to npm on merge to `main`. +- **Generated files** -- `packages/action/src/generated/templates.ts` and `packages/skill/skill.md` are auto-generated. Do not edit manually. ## Key patterns - **Git operations** use `simple-git` (the `simpleGit()` function). All git calls go through this library, never raw `child_process`. - **File discovery** uses `fast-glob` for finding files in the repo. -- **Export parsing** uses **regex-based parsing** (not tree-sitter or AST). See `export-differ.ts`. -- **Risk scoring** uses six weighted factors (defined in `risk/factors.ts`): - - Breaking changes — weight 0.30 - - Untested changes — weight 0.25 - - Diff size — weight 0.15 - - Stale documentation — weight 0.10 - - Config file changes — weight 0.10 - - Impact breadth — weight 0.10 -- **Parallel analysis** — `analyzePR()` runs breaking-change detection, test coverage, doc staleness, and impact graph building concurrently via `Promise.all`. - -## Documentation - -Detailed documentation lives in `docs/`: - -### Adoption Guides - -| Document | Description | -|---|---| -| [`docs/getting-started.md`](docs/getting-started.md) | Installation, first run, understanding the output, common workflows | -| [`docs/ci-integration.md`](docs/ci-integration.md) | GitHub Actions, GitLab CI, CircleCI, Jenkins examples, exit codes, thresholds, PR comments | -| [`docs/mcp-integration.md`](docs/mcp-integration.md) | MCP server architecture, 4 available tools with parameters, tool registration pattern, client configuration (Claude Code, Claude Desktop, Cursor, VS Code), manual testing with MCP Inspector | -| [`docs/programmatic-api.md`](docs/programmatic-api.md) | Using `@pr-impact/core` as a library, individual analysis steps, types, error handling, custom CI scripts | -| [`docs/configuration-guide.md`](docs/configuration-guide.md) | Threshold selection, skipping analysis steps, monorepo considerations, impact depth, output formats | -| [`docs/troubleshooting.md`](docs/troubleshooting.md) | Git errors, shallow clones, false positives, test coverage issues, CI integration, MCP server problems | - -### Internal Architecture - -| Document | Description | -|---|---| -| [`docs/architecture.md`](docs/architecture.md) | Monorepo layout, package dependency graph, build pipeline, core module organization, external dependencies, design principles | -| [`docs/analysis-pipeline.md`](docs/analysis-pipeline.md) | The 6-step `analyzePR()` pipeline, sequence diagram, skip behavior, entry points (CLI / MCP / programmatic) | -| [`docs/data-flow.md`](docs/data-flow.md) | Type relationships (ER diagram), data flow through the pipeline, internal types, module-to-type mapping | -| [`docs/risk-scoring.md`](docs/risk-scoring.md) | Risk formula, 6 factor weights and scoring logic, score-to-level mapping, worked example | +- **`find_importers` caches the reverse dependency map** -- built on first call, reused within the same session. Call `clearImporterCache()` to reset. +- **Tool handlers return plain objects** -- the MCP wrapper (`tools`) handles formatting as MCP ToolResult. The action dispatcher (`action/tools.ts`) handles stringification. +- **Templates are embedded at build time** -- no filesystem reads at runtime for prompts or report formats. +- **Client has safety limits** -- 30-iteration max, 180-second wall-clock timeout, `temperature: 0` for consistency. +- **Risk score parsing is explicit** -- if parsing fails, logs warning and skips threshold check instead of false-failing. +- **Risk scoring** uses six weighted factors: + - Breaking changes -- weight 0.30 + - Untested changes -- weight 0.25 + - Diff size -- weight 0.15 + - Stale documentation -- weight 0.10 + - Config file changes -- weight 0.10 + - Impact breadth -- weight 0.10 ## Testing guidelines -- Tests use **vitest** and live in `packages/core/__tests__/`. -- Test file naming convention: `MODULE_NAME.test.ts` (e.g. `export-differ.test.ts`, `risk-calculator.test.ts`). -- Only the `packages/core` workspace is included in the vitest workspace config (`vitest.workspace.ts`). -- Write **unit tests only** — do not write integration tests that require a real git repository. -- **Mock git operations** (simple-git calls) where needed; tests should not depend on filesystem or git state. -- Existing test files: - - `analyzer.test.ts` - - `coverage-checker.test.ts` - - `detector.test.ts` - - `diff-parser.test.ts` - - `export-differ.test.ts` - - `file-categorizer.test.ts` - - `impact-graph.test.ts` - - `import-resolver.test.ts` - - `json-reporter.test.ts` - - `markdown-reporter.test.ts` - - `risk-calculator.test.ts` - - `signature-differ.test.ts` - - `staleness-checker.test.ts` - - `test-mapper.test.ts` +- Tests use **vitest** and live in `__tests__/` directories within each package. +- Test file naming convention: `MODULE_NAME.test.ts` (e.g. `git-diff.test.ts`, `tools.test.ts`). +- Vitest projects are configured in `vitest.config.ts` (root) with `packages/tools-core`, `packages/tools`, and `packages/action`. +- Write **unit tests only** -- do not write integration tests that require a real git repository. +- **Mock git operations** (simple-git calls) and external dependencies where needed; tests should not depend on filesystem or git state. +- Test files per package: + - `packages/tools-core/__tests__/`: git-diff, read-file, list-files, search-code, find-imports, list-tests, regression (7 files) + - `packages/tools/__tests__/`: index, register, build-scripts (3 files) + - `packages/action/__tests__/`: tools, client, comment, index (4 files) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6833925..dd78a9d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -21,16 +21,17 @@ pnpm build ### Build ```bash -pnpm build # Build all packages (Turborepo, dependency order) -pnpm build --filter=@pr-impact/core # Build a single package +pnpm build # Build all packages (Turborepo, dependency order) +pnpm build --filter=@pr-impact/tools-core # Build a single package ``` +Build order: `tools-core` builds first, then `tools` and `action` (in parallel), then `skill`. + ### Test ```bash -pnpm test # Run all tests -npx vitest run packages/core/__tests__/FILE.test.ts # Run a single test file -pnpm test:watch # Watch mode +pnpm test # Run all tests +npx vitest run packages/tools-core/__tests__/FILE.test.ts # Run a single test file ``` ### Lint @@ -44,26 +45,27 @@ pnpm lint:fix # Auto-fix ``` packages/ - core/ @pr-impact/core Analysis engine (pure logic) - cli/ @pr-impact/cli Commander CLI (pri) - mcp-server/ @pr-impact/mcp-server MCP server for AI assistants + tools-core/ @pr-impact/tools-core Pure tool handler functions (shared foundation) + tools/ @pr-impact/tools MCP server (wraps tools-core) + action/ @pr-impact/action GitHub Action (agentic Claude loop) + skill/ @pr-impact/skill Claude Code plugin (built from templates) ``` -`core` has no workspace dependencies. Both `cli` and `mcp-server` depend on `core`. +`tools-core` has no workspace dependencies. Both `tools` and `action` depend on `tools-core`. The `skill` package has no runtime dependencies. ## Code Conventions - **ESM only** -- use `.js` extensions in all import paths (even for `.ts` files) +- **CJS exception** -- the `action` package builds to CJS for GitHub Actions compatibility - **TypeScript strict mode** -- no `any` unless unavoidable -- **Shared types** go in `packages/core/src/types.ts` -- **Public API** must be re-exported from `packages/core/src/index.ts` - **No Prettier** -- formatting is handled by `@stylistic/eslint-plugin` +- **Generated files** -- do not edit `packages/action/src/generated/templates.ts` or `packages/skill/skill.md` manually; they are built from `templates/*.md` ## Writing Tests - Tests use **vitest** and live in `__tests__/` directories - Write **unit tests only** -- do not depend on real git repos or filesystem state -- Mock `simple-git` calls where needed +- Mock `simple-git` calls and external dependencies where needed - Name test files: `MODULE_NAME.test.ts` ## Submitting Changes @@ -96,7 +98,6 @@ The changeset file will be committed with your PR. The release workflow handles - Bug fixes (patch) - New features (minor) - Breaking changes (major) -- Documentation changes that affect the published package README (patch) ### What Doesn't Need a Changeset @@ -106,7 +107,7 @@ The changeset file will be committed with your PR. The release workflow handles ## Reporting Issues Open an issue at [github.com/ducdmdev/pr-impact/issues](https://github.com/ducdmdev/pr-impact/issues) with: -- The command you ran +- What you were trying to do - Expected vs actual behavior - Node.js and pnpm versions - OS diff --git a/README.md b/README.md index 1c14eac..782b57c 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,9 @@ # pr-impact -**Static analysis for pull requests -- detect breaking changes, map blast radius, and score risk before you merge.** +**AI-powered PR impact analysis -- detect breaking changes, map blast radius, and score risk before you merge.** [![Build](https://img.shields.io/github/actions/workflow/status/ducdm/pr-impact/ci.yml?branch=main)](https://github.com/ducdmdev/pr-impact/actions) -[![npm](https://img.shields.io/npm/v/@pr-impact/core)](https://www.npmjs.com/package/@pr-impact/core) [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](LICENSE) --- @@ -13,18 +12,13 @@ - [Features](#features) - [Quick Start](#quick-start) -- [CLI Commands](#cli-commands) - - [analyze](#pri-analyze) - - [breaking](#pri-breaking) - - [risk](#pri-risk) - - [impact](#pri-impact) -- [MCP Server (AI Tool Integration)](#mcp-server-ai-tool-integration) -- [Programmatic API](#programmatic-api) + - [Claude Code Plugin](#claude-code-plugin) + - [GitHub Action](#github-action) + - [MCP Server](#mcp-server) - [Risk Score](#risk-score) - [Factor Breakdown](#factor-breakdown) - [Risk Levels](#risk-levels) - [Architecture](#architecture) -- [Documentation](#documentation) - [Development](#development) - [License](#license) @@ -32,345 +26,102 @@ ## Features +- **AI-Driven Analysis** -- uses Claude to intelligently analyze PRs, reading diffs, tracing imports, and producing structured reports. - **Breaking Change Detection** -- finds removed exports, changed function signatures, altered types, and renamed exports; maps each to its downstream consumers. - **Impact Graph** -- builds an import-dependency graph to show which files are directly changed and which are indirectly affected (blast radius). - **Test Coverage Gap Analysis** -- identifies source files that changed without corresponding test updates and flags missing test files. - **Documentation Staleness Check** -- scans docs for references to symbols, files, or paths that were modified or removed. - **Weighted Risk Score** -- combines six factors (breaking changes, untested code, diff size, stale docs, config changes, impact breadth) into a single 0-100 score with a severity level. -- **Multiple Output Formats** -- Markdown reports, JSON, plain text, and Graphviz DOT for the impact graph. -- **MCP Server** -- expose every analysis capability as a tool that AI assistants (Claude Code, Cursor, etc.) can call directly. -- **CI-Friendly** -- the `breaking` and `risk` commands exit with code 1 when thresholds are exceeded, making them usable as quality gates. +- **Claude Code Plugin** -- use `/pr-impact` directly in Claude Code to analyze the current branch. +- **GitHub Action** -- automated PR analysis with PR comment posting and threshold-based gating. +- **MCP Server** -- expose git/repo tools to any MCP-compatible AI client. --- ## Quick Start -### Install globally +### Claude Code Plugin -```bash -# Install the CLI -npm install -g @pr-impact/cli - -# Or with pnpm -pnpm add -g @pr-impact/cli -``` - -### Run from a git repository - -```bash -# Full analysis (compares main...HEAD by default) -pri analyze - -# Just check for breaking changes -pri breaking - -# Get the risk score -pri risk - -# View the impact graph -pri impact -``` - -### Specify branches explicitly - -```bash -pri analyze origin/main feature/my-branch -``` - ---- - -## CLI Commands - -The CLI binary is called **`pri`**. Every command accepts `--repo ` to point at a repository other than the current working directory. - -### `pri analyze` - -Run the full PR impact analysis -- breaking changes, test coverage, doc staleness, impact graph, and risk score combined into a single report. - -``` -pri analyze [base] [head] [options] -``` - -| Option | Description | Default | -|---|---|---| -| `[base]` | Base branch | `main` or `master` (auto-detected) | -| `[head]` | Head branch | `HEAD` | -| `--format ` | Output format: `md` or `json` | `md` | -| `--output ` | Write report to a file instead of stdout | -- | -| `--repo ` | Path to the git repository | current directory | -| `--no-breaking` | Skip breaking change detection | -- | -| `--no-coverage` | Skip test coverage analysis | -- | -| `--no-docs` | Skip documentation staleness check | -- | - -**Examples:** - -```bash -# Markdown report to stdout -pri analyze - -# JSON report written to a file -pri analyze main HEAD --format json --output report.json - -# Skip expensive checks -pri analyze --no-breaking --no-docs -``` - -### `pri breaking` - -Detect breaking API changes between two branches. Exits with code 1 if any breaking changes are found at or above the specified severity. - -``` -pri breaking [base] [head] [options] -``` - -| Option | Description | Default | -|---|---|---| -| `[base]` | Base branch | `main` | -| `[head]` | Head branch | `HEAD` | -| `--severity ` | Minimum severity filter: `low`, `medium`, or `high` | `low` | -| `--format ` | Output format: `md` or `json` | `md` | -| `--repo ` | Path to the git repository | current directory | - -**Examples:** +Install the plugin to use pr-impact directly in Claude Code: ```bash -# Show all breaking changes -pri breaking - -# Only high-severity issues -pri breaking --severity high - -# Use as a CI gate (exits 1 if any medium+ breaking changes exist) -pri breaking --severity medium +claude plugin add @pr-impact/skill ``` -### `pri risk` - -Calculate and display the weighted risk score with a full factor breakdown. +Then use the `/pr-impact` slash command: ``` -pri risk [base] [head] [options] +/pr-impact ``` -| Option | Description | Default | -|---|---|---| -| `[base]` | Base branch | `main` or `master` (auto-detected) | -| `[head]` | Head branch | `HEAD` | -| `--threshold ` | Fail (exit 1) if risk score >= this value | -- | -| `--format ` | Output format: `text` or `json` | `text` | -| `--repo ` | Path to the git repository | current directory | - -**Examples:** - -```bash -# Display the risk breakdown -pri risk +This starts an AI-driven analysis of your current branch against `main`, using the MCP tools to gather evidence and produce a structured report. -# CI gate: fail if risk is 60 or higher -pri risk --threshold 60 +### GitHub Action -# JSON output for downstream tooling -pri risk --format json -``` +Add pr-impact to your CI workflow: -### `pri impact` +```yaml +name: PR Impact Analysis +on: pull_request -Build and display the import-dependency impact graph. Shows which files are directly changed and which are indirectly affected through transitive imports. +jobs: + analyze: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: ducdmdev/pr-impact@v1 + with: + anthropic-api-key: ${{ secrets.ANTHROPIC_API_KEY }} + github-token: ${{ secrets.GITHUB_TOKEN }} + threshold: '75' ``` -pri impact [file] [options] -``` - -| Option | Description | Default | -|---|---|---| -| `[file]` | Trace impact for a specific file | all changed files | -| `--depth ` | Maximum dependency traversal depth | `3` | -| `--format ` | Output format: `text`, `json`, or `dot` | `text` | -| `--repo ` | Path to the git repository | current directory | - -**Examples:** -```bash -# Full impact graph -pri impact +#### Action Inputs -# Trace a single file -pri impact src/auth/login.ts +| Input | Description | Required | Default | +|---|---|---|---| +| `anthropic-api-key` | Anthropic API key for Claude | Yes | -- | +| `github-token` | GitHub token for posting PR comments | No | -- | +| `base-branch` | Base branch to compare against | No | `main` | +| `model` | Claude model to use | No | `claude-sonnet-4-5-20250929` | +| `threshold` | Risk score threshold -- action fails if score >= this value | No | -- | -# Generate a Graphviz diagram -pri impact --format dot > impact.dot -dot -Tsvg impact.dot -o impact.svg +#### Action Outputs -# Deeper traversal -pri impact --depth 5 -``` - ---- - -## MCP Server (AI Tool Integration) - -The `@pr-impact/mcp-server` package exposes pr-impact as a [Model Context Protocol](https://modelcontextprotocol.io/) server. This lets AI assistants like Claude Code, Cursor, or any MCP-compatible client call the analysis tools directly. +| Output | Description | +|---|---| +| `risk-score` | The calculated risk score (0-100) | +| `risk-level` | The risk level (low/medium/high/critical) | +| `report` | The full markdown report | -### Setup for Claude Code +### MCP Server -Add the server to your Claude Code MCP configuration (`.claude/mcp.json` or the global settings file): +The `@pr-impact/tools` package provides an MCP server with 6 git/repo tools for AI assistants. ```json { "mcpServers": { "pr-impact": { "command": "npx", - "args": ["-y", "@pr-impact/mcp-server"] + "args": ["-y", "@pr-impact/tools"] } } } ``` -Or if you have the package installed locally in the monorepo: +#### Available MCP Tools -```json -{ - "mcpServers": { - "pr-impact": { - "command": "node", - "args": ["./packages/mcp-server/dist/index.js"] - } - } -} -``` - -### Available MCP Tools - -| Tool | Description | Parameters | -|---|---|---| -| `analyze_diff` | Full PR analysis (breaking changes, coverage, docs, risk) | `repoPath?`, `baseBranch?`, `headBranch?` | -| `get_breaking_changes` | Detect breaking API changes with severity filtering | `repoPath?`, `baseBranch?`, `headBranch?`, `minSeverity?` | -| `get_risk_score` | Calculate risk score with factor breakdown | `repoPath?`, `baseBranch?`, `headBranch?` | -| `get_impact_graph` | Build import-dependency impact graph | `repoPath?`, `baseBranch?`, `headBranch?`, `filePath?`, `depth?` | - -All parameters are optional. The server defaults to the current working directory and `main...HEAD`. - ---- - -## Programmatic API - -The `@pr-impact/core` package exports every analysis function for use in your own scripts, custom tooling, or CI integrations. - -### Install - -```bash -npm install @pr-impact/core -``` - -### Full Analysis - -```typescript -import { analyzePR, formatMarkdown, formatJSON } from '@pr-impact/core'; - -const analysis = await analyzePR({ - repoPath: '/path/to/repo', - baseBranch: 'main', - headBranch: 'feature/my-branch', -}); - -// Structured result -console.log(analysis.riskScore.score); // 42 -console.log(analysis.riskScore.level); // "medium" -console.log(analysis.breakingChanges); // BreakingChange[] -console.log(analysis.summary); // Human-readable summary - -// Formatted output -console.log(formatMarkdown(analysis)); // Full Markdown report -console.log(formatJSON(analysis)); // JSON string -``` - -### Individual Analysis Steps - -Each analysis step can be used independently: - -```typescript -import { - parseDiff, - detectBreakingChanges, - checkTestCoverage, - checkDocStaleness, - buildImpactGraph, - calculateRisk, -} from '@pr-impact/core'; - -const repoPath = '/path/to/repo'; -const base = 'main'; -const head = 'HEAD'; - -// 1. Parse the git diff -const changedFiles = await parseDiff(repoPath, base, head); - -// 2. Detect breaking changes -const breakingChanges = await detectBreakingChanges( - repoPath, base, head, changedFiles -); - -// 3. Check test coverage gaps -const testCoverage = await checkTestCoverage(repoPath, changedFiles); - -// 4. Check documentation staleness -const docStaleness = await checkDocStaleness( - repoPath, changedFiles, base, head -); - -// 5. Build the impact graph -const impactGraph = await buildImpactGraph(repoPath, changedFiles); - -// 6. Calculate the risk score -const riskScore = calculateRisk( - changedFiles, - breakingChanges, - testCoverage, - docStaleness, - impactGraph, -); - -console.log(`Risk: ${riskScore.score}/100 (${riskScore.level})`); -``` - -### Lower-Level Utilities - -```typescript -import { - categorizeFile, - parseExports, - diffExports, - diffSignatures, - mapTestFiles, -} from '@pr-impact/core'; - -// Categorize a file path -categorizeFile('src/utils/auth.ts'); // 'source' -categorizeFile('__tests__/auth.test.ts'); // 'test' -categorizeFile('README.md'); // 'doc' -categorizeFile('tsconfig.json'); // 'config' -``` - -### Key Types - -All TypeScript interfaces are exported from `@pr-impact/core`: - -```typescript -import type { - PRAnalysis, // Top-level result from analyzePR() - AnalysisOptions, // Input options for analyzePR() - ChangedFile, // A file in the diff - BreakingChange, // A detected breaking API change - TestCoverageReport, // Coverage ratio + gaps - TestCoverageGap, // A source file missing test changes - DocStalenessReport, // Stale doc references - StaleReference, // A single stale reference in a doc file - ImpactGraph, // Directly/indirectly affected files + edges - ImpactEdge, // A single import dependency edge - RiskAssessment, // Overall score, level, and factors - RiskFactor, // Individual factor with score, weight, description -} from '@pr-impact/core'; -``` +| Tool | Description | +|---|---| +| `git_diff` | Get the raw git diff between two branches, optionally for a single file | +| `read_file_at_ref` | Read a file's content at a specific git ref | +| `list_changed_files` | List files changed between two branches with status and stats | +| `search_code` | Search for a regex pattern in the codebase | +| `find_importers` | Find files that import a given module | +| `list_test_files` | Find test files associated with a source file | --- @@ -381,7 +132,7 @@ The risk score is a weighted average of six independent factors, producing a sin **Formula:** ``` -score = sum(factor_score * factor_weight) / sum(factor_weight) +score = sum(factor_score * factor_weight) ``` ### Factor Breakdown @@ -395,8 +146,6 @@ score = sum(factor_score * factor_weight) / sum(factor_weight) | **Config file changes** | 0.10 | `100` if CI/build config changed, `50` if other config, `0` if none | | **Impact breadth** | 0.10 | `min(indirectlyAffected * 10, 100)` -- each affected file adds 10 points | -CI/build config patterns that trigger the highest config score include `.github/`, `Dockerfile`, `docker-compose`, `webpack.config`, `vite.config`, `rollup.config`, `turbo.json`, `.gitlab-ci`, `Jenkinsfile`, `.circleci/`, and `esbuild.config`. - ### Risk Levels | Score Range | Level | @@ -415,79 +164,64 @@ pr-impact is a TypeScript monorepo managed with **pnpm** workspaces and **Turbor ``` pr-impact/ ├── packages/ -│ ├── core/ @pr-impact/core +│ ├── tools-core/ @pr-impact/tools-core +│ │ └── src/ +│ │ ├── index.ts Barrel exports +│ │ └── tools/ 6 pure handler functions (git-diff, read-file, +│ │ list-files, search-code, find-imports, list-tests) +│ │ +│ ├── tools/ @pr-impact/tools │ │ └── src/ -│ │ ├── index.ts Public API exports -│ │ ├── analyzer.ts Orchestrates the full analysis pipeline -│ │ ├── types.ts All TypeScript interfaces -│ │ ├── diff/ Git diff parsing & file categorization -│ │ ├── breaking/ Breaking change detection (exports, signatures) -│ │ ├── coverage/ Test file mapping & coverage gap analysis -│ │ ├── docs/ Documentation staleness checking -│ │ ├── impact/ Import dependency graph builder -│ │ ├── risk/ Risk factor evaluation & score calculation -│ │ └── output/ Markdown & JSON report formatters +│ │ ├── index.ts MCP server entry point (stdio transport) +│ │ └── register.ts Tool registration with zod schemas │ │ -│ ├── cli/ @pr-impact/cli +│ ├── action/ @pr-impact/action │ │ └── src/ -│ │ ├── index.ts CLI entry point (commander) -│ │ └── commands/ analyze, breaking, risk, impact +│ │ ├── index.ts GitHub Action entry point +│ │ ├── client.ts Anthropic API client (agentic loop) +│ │ ├── tools.ts Tool dispatcher (calls tools-core) +│ │ ├── comment.ts PR comment poster (upsert via HTML markers) +│ │ └── generated/ Build-time embedded templates │ │ -│ └── mcp-server/ @pr-impact/mcp-server -│ └── src/ -│ ├── index.ts MCP server entry point (stdio transport) -│ └── tools/ analyze_diff, get_breaking_changes, -│ get_risk_score, get_impact_graph +│ └── skill/ @pr-impact/skill +│ ├── .claude-plugin/ Claude Code plugin config +│ ├── mcp.json MCP server reference +│ └── skill.md Assembled skill prompt (built from templates) │ -├── turbo.json Turborepo task configuration -├── pnpm-workspace.yaml Workspace definition -└── package.json Root scripts (build, test, lint, clean) +├── templates/ +│ ├── system-prompt.md System prompt for Claude analysis +│ └── report-template.md Report output format template +│ +├── scripts/ +│ ├── embed-templates.ts Generates action/src/generated/templates.ts +│ └── build-skill.ts Assembles skill/skill.md from templates +│ +├── turbo.json Turborepo task configuration +├── pnpm-workspace.yaml Workspace definition +└── package.json Root scripts ``` ### Package Dependency Graph ``` -@pr-impact/cli ──────────> @pr-impact/core -@pr-impact/mcp-server ────> @pr-impact/core +@pr-impact/tools ────> @pr-impact/tools-core +@pr-impact/action ────> @pr-impact/tools-core +@pr-impact/skill (no runtime dependencies — assembled at build time) ``` -Both `cli` and `mcp-server` depend on `core` via `workspace:*` links. The `core` package has no internal workspace dependencies. +Both `tools` and `action` depend on `tools-core` via `workspace:*` links. The `tools-core` package has no internal workspace dependencies. The `skill` package has no runtime dependencies -- its build script assembles a skill prompt from shared templates. ### Key Dependencies | Package | Dependency | Purpose | |---|---|---| -| `core` | `simple-git` | Git operations (diff, rev-parse, branch listing) | -| `core` | `fast-glob` | File discovery for test mapping and imports | -| `cli` | `commander` | CLI argument parsing and subcommands | -| `cli` | `chalk` | Terminal color output | -| `cli` | `ora` | Spinner for long-running operations | -| `mcp-server` | `@modelcontextprotocol/sdk` | MCP protocol server implementation | -| `mcp-server` | `zod` | Input schema validation for MCP tools | - ---- - -## Documentation - -### Adoption Guides - -| Document | Description | -|----------|-------------| -| [Getting Started](docs/getting-started.md) | Installation, first run, understanding the output | -| [CI Integration](docs/ci-integration.md) | GitHub Actions, GitLab CI, CircleCI, Jenkins examples, exit codes, thresholds | -| [MCP Integration](docs/mcp-integration.md) | MCP server tools, registration pattern, client configuration | -| [Programmatic API](docs/programmatic-api.md) | Using `@pr-impact/core` as a library, individual analysis steps, error handling | -| [Configuration Guide](docs/configuration-guide.md) | Threshold tuning, skipping checks, monorepo considerations, output formats | -| [Troubleshooting](docs/troubleshooting.md) | Shallow clones, missing branches, false positives, CI issues | - -### Internal Architecture - -| Document | Description | -|----------|-------------| -| [Architecture](docs/architecture.md) | Monorepo layout, package dependency graph, build pipeline, core module organization | -| [Analysis Pipeline](docs/analysis-pipeline.md) | The 6-step `analyzePR()` pipeline, sequence diagram, skip behavior, entry points | -| [Data Flow](docs/data-flow.md) | Type relationships (ER diagram), data flow through the pipeline, module-to-type mapping | -| [Risk Scoring](docs/risk-scoring.md) | Risk formula, 6 factor weights and scoring logic, worked example | +| `tools-core` | `simple-git` | Git operations (diff, show, log) | +| `tools-core` | `fast-glob` | File discovery for test mapping and imports | +| `tools` | `@modelcontextprotocol/sdk` | MCP protocol server implementation | +| `tools` | `zod` | Input schema validation for MCP tools | +| `action` | `@anthropic-ai/sdk` | Claude API client for agentic analysis loop | +| `action` | `@actions/core` | GitHub Actions runtime (inputs, outputs, logging) | +| `action` | `@actions/github` | GitHub context (PR number, repo) | --- @@ -495,69 +229,62 @@ Both `cli` and `mcp-server` depend on `core` via `workspace:*` links. The `core` ### Prerequisites -- **Node.js** >= 18 +- **Node.js** >= 20 - **pnpm** >= 9 ### Setup ```bash -# Clone the repository git clone https://github.com/ducdmdev/pr-impact.git cd pr-impact - -# Install dependencies pnpm install - -# Build all packages pnpm build ``` ### Common Commands ```bash -# Build all packages (respects dependency order via Turborepo) -pnpm build +pnpm build # Build all packages (Turborepo, dependency order) +pnpm test # Run all tests +pnpm lint # Lint all packages +pnpm clean # Clean build artifacts +pnpm build --filter=@pr-impact/tools-core # Build a single package +npx vitest run packages/action/__tests__/FILE.test.ts # Run a single test file +``` -# Run tests -pnpm test +### Project Conventions -# Run tests in watch mode -pnpm test:watch +- **ESM only** -- all packages use `"type": "module"` with `.js` extensions in import paths. +- **CJS exception** -- the `action` package builds to CJS (GitHub Actions requires a self-contained `dist/index.cjs`). +- **tsup** for building -- `tools-core`, `tools`, and `action` use tsup. `skill` uses a custom build script. +- **Vitest** for testing -- tests live in `__tests__/` directories. +- **Turborepo** for orchestration -- `pnpm build` runs in dependency order (`tools-core` before `tools` and `action`). +- **Templates are embedded at build time** -- the action's `prebuild` script generates `src/generated/templates.ts`. The skill's build script generates `skill.md`. -# Lint all packages -pnpm lint +### Contributing -# Clean build artifacts -pnpm clean -``` +See [CONTRIBUTING.md](CONTRIBUTING.md). -### Running the CLI in Development +--- -```bash -# Build and then run directly -pnpm build -node packages/cli/dist/index.js analyze +## Migrating from v0.x -# Or link globally -cd packages/cli && pnpm link --global -pri analyze -``` +v1.0 is a complete architecture rewrite. The three original packages have been replaced: -### Project Conventions +| v0.x Package | v1.0 Replacement | Notes | +|---|---|---| +| `@pr-impact/core` | `@pr-impact/tools-core` | Deterministic analysis engine replaced by pure git/repo tool functions. Analysis logic is now in the AI agent's system prompt. | +| `@pr-impact/cli` | `@pr-impact/action` | CLI removed. Use the GitHub Action for CI or the Claude Code plugin for local analysis. | +| `@pr-impact/mcp-server` | `@pr-impact/tools` | 4 high-level analysis tools replaced by 6 lower-level git/repo tools. | -- **ESM only** -- all packages use `"type": "module"` with `.js` extensions in import paths. -- **tsup** for building -- each package uses tsup to bundle TypeScript to JavaScript. -- **Vitest** for testing -- tests live alongside source files or in `__tests__/` directories. -- **Turborepo** for orchestration -- `pnpm build` runs in dependency order (`core` before `cli` and `mcp-server`). +For a detailed guide with code examples, see [docs/migration-guide.md](docs/migration-guide.md). -### Contributing +### Key changes -1. Fork the repository -2. Create a feature branch: `git checkout -b feature/my-feature` -3. Make your changes and add tests -4. Run the full build and test suite: `pnpm build && pnpm test` -5. Commit and push to your fork -6. Open a pull request +- **Analysis is AI-driven** -- instead of deterministic code paths, Claude reads diffs and traces imports via tool calls, producing richer and more context-aware reports. +- **No CLI** -- the `pri` command is gone. Use the GitHub Action (`@pr-impact/action`) in CI, or the Claude Code plugin (`@pr-impact/skill`) locally. +- **New MCP tools** -- the MCP server now exposes `git_diff`, `read_file_at_ref`, `list_changed_files`, `search_code`, `find_importers`, and `list_test_files` instead of `analyze_diff`, `get_breaking_changes`, `get_impact_graph`, and `get_risk_score`. +- **Programmatic API changed** -- if you imported from `@pr-impact/core`, switch to `@pr-impact/tools-core` for the individual tool functions. The `analyzePR()` orchestrator no longer exists; use the tool functions directly or the GitHub Action. --- diff --git a/docs/analysis-pipeline.md b/docs/analysis-pipeline.md deleted file mode 100644 index b185ae0..0000000 --- a/docs/analysis-pipeline.md +++ /dev/null @@ -1,174 +0,0 @@ -# Analysis Pipeline - -The `analyzePR()` function in `packages/core/src/analyzer.ts` is the top-level orchestrator. It runs a six-step pipeline that produces a complete `PRAnalysis` result. - ---- - -## Pipeline Overview - -```mermaid -flowchart TD - START([analyzePR called]) --> RESOLVE[1. Resolve branches] - RESOLVE --> VERIFY[2. Verify repository] - VERIFY --> PARSE[3. Parse git diff] - PARSE --> PARALLEL - - subgraph PARALLEL["4. Parallel analysis (Promise.all)"] - direction LR - BC["Breaking change
detection"] - TC["Test coverage
analysis"] - DS["Doc staleness
checking"] - IG["Impact graph
building"] - end - - PARALLEL --> RISK[5. Calculate risk score] - RISK --> SUMMARY[6. Generate summary] - SUMMARY --> RESULT([Return PRAnalysis]) - - style START fill:#4f46e5,color:#fff - style RESULT fill:#4f46e5,color:#fff - style PARALLEL fill:#f0f9ff,stroke:#0891b2 - style BC fill:#dc2626,color:#fff - style TC fill:#059669,color:#fff - style DS fill:#ca8a04,color:#fff - style IG fill:#7c3aed,color:#fff - style RISK fill:#e11d48,color:#fff -``` - ---- - -## Step-by-Step Breakdown - -### Step 1 -- Resolve Branches - -The base branch defaults to `main` or `master` (auto-detected from local branches). The head branch defaults to `HEAD`. Both can be overridden via `AnalysisOptions`. - -### Step 2 -- Verify Repository - -Uses `simple-git` to confirm: -- The path is a valid git repository (`git.checkIsRepo()`) -- The base branch ref is valid (`git.revparse([baseBranch])`) -- The head branch ref is valid (`git.revparse([headBranch])`) - -### Step 3 -- Parse Diff - -`parseDiff()` calls `git.diffSummary()` (via simple-git) between base and head, then categorizes each changed file (source, test, doc, config, other). - -### Step 4 -- Parallel Analysis - -Four independent analyses run concurrently. Each can be individually skipped via options (`skipBreaking`, `skipCoverage`, `skipDocs`): - -| Analysis | Function | Skippable | What it produces | -|---|---|---|---| -| Breaking changes | `detectBreakingChanges()` | Yes | `BreakingChange[]` | -| Test coverage | `checkTestCoverage()` | Yes | `TestCoverageReport` | -| Doc staleness | `checkDocStaleness()` | Yes | `DocStalenessReport` | -| Impact graph | `buildImpactGraph()` | No | `ImpactGraph` | - -### Step 5 -- Calculate Risk - -`calculateRisk()` evaluates six weighted factors from the combined results and produces a 0-100 score with a severity level. - -### Step 6 -- Generate Summary - -A human-readable summary string is built from the results (file count, additions/deletions, risk level, breaking change count, coverage gaps). - ---- - -## Sequence Diagram - -```mermaid -sequenceDiagram - participant User - participant CLI as pri CLI - participant Analyzer as analyzePR() - participant Git as simple-git - participant Diff as parseDiff() - participant Breaking as detectBreakingChanges() - participant Coverage as checkTestCoverage() - participant Docs as checkDocStaleness() - participant Impact as buildImpactGraph() - participant Risk as calculateRisk() - participant Output as formatMarkdown() - - User->>CLI: pri analyze [base] [head] - CLI->>Analyzer: analyzePR(options) - - Note over Analyzer: Step 1 — Resolve branches - Analyzer->>Git: git.branch() - Git-->>Analyzer: branch list - - Note over Analyzer: Step 2 — Verify repo - Analyzer->>Git: checkIsRepo() - Analyzer->>Git: revparse(base) - Analyzer->>Git: revparse(head) - - Note over Analyzer: Step 3 — Parse diff - Analyzer->>Diff: parseDiff(repo, base, head) - Diff->>Git: git.diffSummary() - Git-->>Diff: diff summary - Diff-->>Analyzer: ChangedFile[] - - Note over Analyzer: Step 4 — Parallel analysis - par Breaking changes - Analyzer->>Breaking: detectBreakingChanges(...) - Breaking->>Git: git show (base/head file content) - Breaking-->>Analyzer: BreakingChange[] - and Test coverage - Analyzer->>Coverage: checkTestCoverage(...) - Coverage-->>Analyzer: TestCoverageReport - and Doc staleness - Analyzer->>Docs: checkDocStaleness(...) - Docs->>Git: git show (file content) - Docs-->>Analyzer: DocStalenessReport - and Impact graph - Analyzer->>Impact: buildImpactGraph(...) - Impact-->>Analyzer: ImpactGraph - end - - Note over Analyzer: Step 5 — Risk scoring - Analyzer->>Risk: calculateRisk(all results) - Risk-->>Analyzer: RiskAssessment - - Note over Analyzer: Step 6 — Summary - Analyzer-->>CLI: PRAnalysis - CLI->>Output: formatMarkdown(analysis) - Output-->>CLI: Markdown string - CLI-->>User: Report output -``` - ---- - -## Skip Behavior - -When an analysis step is skipped, `analyzePR()` returns a neutral default: - -| Step | Default when skipped | -|---|---| -| Breaking changes | Empty array `[]` | -| Test coverage | `{ changedSourceFiles: 0, sourceFilesWithTestChanges: 0, coverageRatio: 0, gaps: [] }` | -| Doc staleness | `{ staleReferences: [], checkedFiles: [] }` | - -The impact graph is always built (not skippable) because it feeds into the risk score and provides the blast radius view. - ---- - -## Entry Points - -The pipeline is invoked from three surfaces: - -```mermaid -graph LR - CLI["pri CLI
(Commander)"] --> A["analyzePR()"] - MCP["MCP Server
(stdio)"] --> A - API["Programmatic
import"] --> A - - style A fill:#4f46e5,color:#fff - style CLI fill:#059669,color:#fff - style MCP fill:#d97706,color:#fff - style API fill:#6b7280,color:#fff -``` - -- **CLI** -- `pri analyze` command calls `analyzePR()` then formats output. -- **MCP Server** -- `analyze_diff` tool calls `analyzePR()` and returns a Markdown-formatted report. -- **Programmatic API** -- direct import from `@pr-impact/core`. diff --git a/docs/architecture.md b/docs/architecture.md deleted file mode 100644 index 4164b78..0000000 --- a/docs/architecture.md +++ /dev/null @@ -1,176 +0,0 @@ -# Architecture - -pr-impact is a TypeScript monorepo that performs static analysis on pull requests. It is managed with **pnpm** workspaces and **Turborepo**. - ---- - -## Monorepo Layout - -``` -pr-impact/ -├── packages/ -│ ├── core/ @pr-impact/core -│ ├── cli/ @pr-impact/cli -│ └── mcp-server/ @pr-impact/mcp-server -├── turbo.json -├── pnpm-workspace.yaml -└── package.json -``` - -## Package Dependency Graph - -```mermaid -graph TD - CLI["@pr-impact/cli
Commander CLI"] - MCP["@pr-impact/mcp-server
MCP stdio server"] - CORE["@pr-impact/core
Analysis engine"] - - CLI -->|workspace:*| CORE - MCP -->|workspace:*| CORE - - style CORE fill:#4f46e5,color:#fff,stroke:#3730a3 - style CLI fill:#059669,color:#fff,stroke:#047857 - style MCP fill:#d97706,color:#fff,stroke:#b45309 -``` - -Both `cli` and `mcp-server` depend on `core` via pnpm `workspace:*` links. The `core` package has zero internal workspace dependencies. - ---- - -## Build Pipeline (Turborepo) - -```mermaid -graph LR - subgraph "pnpm build" - B_CORE["build @pr-impact/core"] --> B_CLI["build @pr-impact/cli"] - B_CORE --> B_MCP["build @pr-impact/mcp-server"] - end - - subgraph "pnpm test" - B_CORE --> T["vitest (core only)"] - end - - style B_CORE fill:#4f46e5,color:#fff - style B_CLI fill:#059669,color:#fff - style B_MCP fill:#d97706,color:#fff - style T fill:#7c3aed,color:#fff -``` - -- `build` depends on `^build` (dependency packages build first). -- `test` depends on `build` completing. -- `lint` depends on `^build` (needs built packages for type-checked linting). -- All packages use **tsup** for bundling (ESM format, sourcemaps). The `core` package also generates TypeScript declarations (`dts: true`). - ---- - -## Core Package Module Organization - -```mermaid -graph TD - TYPES["types.ts
All shared interfaces"] - INDEX["index.ts
Barrel exports (public API)"] - ANALYZER["analyzer.ts
analyzePR()
orchestrator"] - - subgraph "Diff Layer" - DP["diff-parser.ts
parseDiff()"] - FC["file-categorizer.ts
categorizeFile()"] - end - - subgraph "Breaking Change Layer" - DET["detector.ts
detectBreakingChanges()"] - ED["export-differ.ts
parseExports() / diffExports()"] - SD["signature-differ.ts
diffSignatures()"] - end - - subgraph "Coverage Layer" - CC["coverage-checker.ts
checkTestCoverage()"] - TM["test-mapper.ts
mapTestFiles()"] - end - - subgraph "Docs Layer" - SC["staleness-checker.ts
checkDocStaleness()"] - end - - subgraph "Imports Layer" - IR["import-resolver.ts
findConsumers() / resolveImport()"] - end - - subgraph "Impact Layer" - IG["impact-graph.ts
buildImpactGraph()"] - end - - subgraph "Risk Layer" - RC["risk-calculator.ts
calculateRisk()"] - RF["factors.ts
6 factor evaluators"] - end - - subgraph "Output Layer" - MR["markdown-reporter.ts
formatMarkdown()"] - JR["json-reporter.ts
formatJSON()"] - end - - INDEX --> ANALYZER - INDEX --> DP - INDEX --> DET - INDEX --> CC - INDEX --> SC - INDEX --> IG - INDEX --> RC - INDEX --> MR - INDEX --> JR - ANALYZER --> DP - DP --> FC - ANALYZER --> DET - DET --> ED - DET --> SD - ANALYZER --> CC - CC --> TM - DET --> IR - ANALYZER --> SC - ANALYZER --> IG - ANALYZER --> RC - RC --> RF - - style TYPES fill:#374151,color:#fff - style INDEX fill:#374151,color:#fff - style ANALYZER fill:#4f46e5,color:#fff - style DP fill:#0891b2,color:#fff - style FC fill:#0891b2,color:#fff - style DET fill:#dc2626,color:#fff - style ED fill:#dc2626,color:#fff - style SD fill:#dc2626,color:#fff - style CC fill:#059669,color:#fff - style TM fill:#059669,color:#fff - style SC fill:#ca8a04,color:#fff - style IR fill:#2563eb,color:#fff - style IG fill:#7c3aed,color:#fff - style RC fill:#e11d48,color:#fff - style RF fill:#e11d48,color:#fff - style MR fill:#6b7280,color:#fff - style JR fill:#6b7280,color:#fff -``` - ---- - -## Key External Dependencies - -| Package | Dependency | Purpose | -|---|---|---| -| `core` | `simple-git` | Git operations (diff, rev-parse, show, branch) | -| `core` | `fast-glob` | File discovery for test mapping and import scanning | -| `cli` | `commander` | CLI argument parsing and subcommands | -| `cli` | `chalk` | Terminal color output | -| `cli` | `ora` | Spinner for long-running operations | -| `mcp-server` | `@modelcontextprotocol/sdk` | MCP protocol server implementation | -| `mcp-server` | `zod` | Input schema validation for MCP tools | - ---- - -## Design Principles - -- **ESM only** -- all packages use `"type": "module"` with `.js` extensions in import paths. -- **Strict TypeScript** -- `tsconfig.base.json` sets `"strict": true`, target ES2022. -- **Barrel exports** -- the public API is defined in `packages/core/src/index.ts`. -- **Regex-based parsing** -- export and import detection use regex, not AST parsing. -- **Parallel analysis** -- `analyzePR()` runs 4 analysis steps concurrently via `Promise.all`. -- **No I/O in core** except git operations through `simple-git` and file reads through `fast-glob` / `fs/promises`. diff --git a/docs/ci-integration.md b/docs/ci-integration.md deleted file mode 100644 index 08558a0..0000000 --- a/docs/ci-integration.md +++ /dev/null @@ -1,295 +0,0 @@ -# CI Integration - -pr-impact is designed to work as a quality gate in CI pipelines. The `pri breaking` and `pri risk` commands exit with code 1 when thresholds are exceeded, making them suitable for automated pass/fail checks. - ---- - -## Exit Code Behavior - -```mermaid -flowchart TD - subgraph "pri breaking" - B_RUN["Run breaking change detection"] --> B_CHECK{Breaking changes
at severity >= filter?} - B_CHECK -->|Yes| B_FAIL["Exit 1 (gate failed)"] - B_CHECK -->|No| B_PASS["Exit 0 (pass)"] - B_RUN -->|Error| B_ERR["Exit 2 (error)"] - end - - subgraph "pri risk" - R_RUN["Calculate risk score"] --> R_HAS{--threshold
provided?} - R_HAS -->|No| R_ALWAYS["Exit 0 (always)"] - R_HAS -->|Yes| R_CHECK{Score >= threshold?} - R_CHECK -->|Yes| R_FAIL["Exit 1 (gate failed)"] - R_CHECK -->|No| R_PASS["Exit 0 (pass)"] - R_RUN -->|Error| R_ERR["Exit 2 (error)"] - end - - style B_FAIL fill:#dc2626,color:#fff - style B_PASS fill:#059669,color:#fff - style B_ERR fill:#b45309,color:#fff - style R_ALWAYS fill:#059669,color:#fff - style R_FAIL fill:#dc2626,color:#fff - style R_PASS fill:#059669,color:#fff - style R_ERR fill:#b45309,color:#fff -``` - -| Exit Code | Meaning | Commands | -|---|---|---| -| `0` | Success / quality gate passed | All commands | -| `1` | Quality gate failed | `pri breaking` (breaking changes found), `pri risk` (score >= threshold) | -| `2` | Internal error (analysis crashed) | All commands | - ---- - -## GitHub Actions Example - -```yaml -name: PR Impact Analysis - -on: - pull_request: - branches: [main] - -jobs: - pr-impact: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 # Full history needed for diff - - - uses: actions/setup-node@v4 - with: - node-version: 20 - - - name: Install pr-impact CLI - run: npm install -g @pr-impact/cli - - - name: Check for breaking changes - run: pri breaking origin/main HEAD --severity medium - - - name: Check risk score - run: pri risk origin/main HEAD --threshold 60 - - - name: Full analysis report - if: always() - run: pri analyze origin/main HEAD --format md - - - name: Post report as PR comment - if: always() - run: pri comment origin/main HEAD - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -``` - ---- - -## GitLab CI Example - -```yaml -# .gitlab-ci.yml -pr-impact: - image: node:20 - stage: test - variables: - GIT_DEPTH: 0 # Full clone - before_script: - - npm install -g @pr-impact/cli - script: - - pri breaking origin/$CI_MERGE_REQUEST_TARGET_BRANCH_NAME HEAD --severity medium - - pri risk origin/$CI_MERGE_REQUEST_TARGET_BRANCH_NAME HEAD --threshold 60 - - pri analyze origin/$CI_MERGE_REQUEST_TARGET_BRANCH_NAME HEAD --format md - rules: - - if: $CI_PIPELINE_SOURCE == "merge_request_event" -``` - -To post a comment on the merge request: - -```yaml - - pri comment origin/$CI_MERGE_REQUEST_TARGET_BRANCH_NAME HEAD - --pr $CI_MERGE_REQUEST_IID - --github-repo $CI_PROJECT_PATH -``` - -> **Note:** `pri comment` uses the GitHub API. For GitLab merge requests, generate the report and use GitLab's API or a GitLab-specific commenting tool instead. - ---- - -## CircleCI Example - -```yaml -# .circleci/config.yml -version: 2.1 - -jobs: - pr-impact: - docker: - - image: cimg/node:20.0 - steps: - - checkout # CircleCI does a full clone by default - - run: - name: Install pr-impact - command: npm install -g @pr-impact/cli - - run: - name: Check breaking changes - command: pri breaking origin/main HEAD --severity medium - - run: - name: Check risk score - command: pri risk origin/main HEAD --threshold 60 - - run: - name: Full analysis - command: pri analyze origin/main HEAD --format md - when: always - -workflows: - pr-check: - jobs: - - pr-impact: - filters: - branches: - ignore: main -``` - ---- - -## Jenkins Example - -```groovy -// Jenkinsfile -pipeline { - agent { docker { image 'node:20' } } - - stages { - stage('Install') { - steps { - sh 'npm install -g @pr-impact/cli' - } - } - stage('Breaking Changes') { - steps { - sh 'pri breaking origin/main HEAD --severity medium' - } - } - stage('Risk Score') { - steps { - sh 'pri risk origin/main HEAD --threshold 60' - } - } - stage('Full Report') { - steps { - sh 'pri analyze origin/main HEAD --format json --output report.json' - archiveArtifacts artifacts: 'report.json' - } - } - } -} -``` - -> **Note:** Ensure Jenkins clones with full history. In pipeline SCM settings, set "Advanced clone behaviors" and uncheck "Shallow clone". - ---- - -## CI Workflow Diagram - -```mermaid -sequenceDiagram - participant PR as Pull Request - participant CI as GitHub Actions - participant PRI as pri CLI - participant Git as Git Repo - - PR->>CI: PR opened / updated - CI->>Git: checkout (full history) - - CI->>PRI: pri breaking --severity medium - PRI->>Git: git diff origin/main...HEAD - PRI-->>CI: Exit 0 (no medium+ breaking changes) - - CI->>PRI: pri risk --threshold 60 - PRI->>Git: git diff + full analysis - PRI-->>CI: Exit 0 (score < 60) - - CI->>PRI: pri analyze --format md - PRI-->>CI: Markdown report - - Note over CI: All checks passed - CI-->>PR: Status: success -``` - ---- - -## Recommended Thresholds - -| Gate | Recommended Setting | Rationale | -|---|---|---| -| Breaking changes | `--severity medium` | Blocks medium and high severity; allows low (renames) | -| Risk score | `--threshold 60` | Blocks high and critical risk PRs; allows low and medium | - -Adjust these based on your team's tolerance. A stricter setup: - -```bash -# Block any breaking change at all -pri breaking --severity low - -# Block anything above low risk -pri risk --threshold 26 -``` - ---- - -## Output Formats for CI - -| Command | Format flag | Use case | -|---|---|---| -| `pri analyze --format md` | Markdown | Post as PR comment | -| `pri analyze --format json` | JSON | Parse in downstream scripts | -| `pri risk --format json` | JSON | Machine-readable score for dashboards | -| `pri impact --format dot` | Graphviz DOT | Generate SVG impact diagrams | - -### Posting Reports as PR Comments - -The recommended approach is to use the built-in `pri comment` command, which handles analysis and GitHub comment posting in a single step: - -```yaml - - name: Post PR impact report - run: pri comment origin/main HEAD - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -``` - -`pri comment` will: -1. Run the full analysis -2. Find an existing pr-impact comment on the PR (if any) using hidden HTML markers -3. Create a new comment or update the existing one (upsert behavior) - -**Options:** - -| Flag | Description | Default | -|---|---|---| -| `--pr ` | PR number | Auto-detected from CI env | -| `--github-repo ` | GitHub repository | Auto-detected from CI env | -| `--token ` | GitHub API token | `GITHUB_TOKEN` env var | -| `--repo ` | Local repository path | Current directory | - -**Supported CI environments for auto-detection:** GitHub Actions, GitLab CI, CircleCI. - -**Alternative:** If you need more control over the report format, generate it separately and use a third-party action: - -```yaml - - name: Generate report - run: pri analyze origin/main HEAD --format md --output report.md - - - name: Comment on PR - uses: marocchino/sticky-pull-request-comment@v2 - with: - path: report.md -``` - ---- - -## Important Notes - -- **Fetch depth** -- always use `fetch-depth: 0` (full clone) so `git diff` can access the base branch history. -- **Branch references** -- in CI, use `origin/main` as the base (not just `main`) since the local branch may not exist. -- **Exit codes** -- Exit 0 = success/gate passed. Exit 1 = quality gate failed (`pri breaking`, `pri risk` only). Exit 2 = internal error (all commands). `pri analyze`, `pri impact`, and `pri comment` never exit 1 since they don't act as quality gates. -- **`pri impact` differences** -- unlike other commands, `pri impact` takes an optional `[file]` positional argument (not `[base] [head]`), plus `--depth ` (default 3) and `--format `. It auto-detects `main`/`master` internally. diff --git a/docs/configuration-guide.md b/docs/configuration-guide.md deleted file mode 100644 index 30e6245..0000000 --- a/docs/configuration-guide.md +++ /dev/null @@ -1,148 +0,0 @@ -# Configuration Guide - -Guidance on tuning pr-impact for your project — choosing thresholds, skipping unnecessary checks, and handling monorepo setups. - ---- - -## Threshold Selection - -### Risk Score Threshold (`pri risk --threshold`) - -The risk score ranges from 0 to 100. Choosing a threshold depends on your team's risk tolerance: - -| Threshold | Blocks | Best for | -|---|---|---| -| `--threshold 75` | Critical only | Large, fast-moving projects where most PRs touch many files | -| `--threshold 60` | High + Critical | **Recommended default.** Blocks genuinely risky PRs without too much friction | -| `--threshold 50` | Medium + High + Critical | Stricter teams, libraries with public API stability guarantees | -| `--threshold 26` | Everything except Low | Very strict; every non-trivial change needs attention | - -**Starting recommendation:** Begin with `--threshold 60` and adjust based on false-positive rates over 2-4 weeks. If the gate blocks PRs that your team considers safe, raise the threshold. If risky PRs slip through, lower it. - -### Breaking Change Severity (`pri breaking --severity`) - -| Severity | What it catches | When to use | -|---|---|---| -| `--severity low` | All breaking changes including renames | Public libraries, strict API contracts | -| `--severity medium` | Signature changes + removed exports | **Recommended default.** Catches real breakage without noise from renames | -| `--severity high` | Only removed exports and drastic signature changes | Internal projects where consumers can be updated quickly | - ---- - -## Skipping Analysis Steps - -Some analysis steps may not be relevant for every project. - -### `--no-breaking` - -Skip breaking change detection. Use when: -- The project has no exported API (e.g., a standalone application, not a library) -- Breaking change detection produces too many false positives for your codebase - -### `--no-coverage` - -Skip test coverage gap analysis. Use when: -- The project uses a different test file naming convention that pr-impact doesn't recognize -- Test coverage is enforced through other tools (e.g., Istanbul/c8 coverage thresholds) - -### `--no-docs` - -Skip documentation staleness checking. Use when: -- The project has no documentation files -- Documentation is maintained separately (e.g., in a different repo or wiki) - -### Example: Application with no public API - -```bash -pri analyze --no-breaking -pri risk --threshold 60 --no-breaking -``` - -### Example: Minimal check (just risk score from diff size + test coverage) - -```bash -pri analyze --no-breaking --no-docs -``` - ---- - -## Monorepo Considerations - -pr-impact operates on the entire git diff between two branches. In a monorepo, this means changes across all packages are analyzed together. - -### Running against the whole monorepo - -```bash -# This analyzes ALL changes across all packages -pri analyze origin/main HEAD -``` - -This works well when: -- You want a single risk score for the entire PR -- Breaking changes in shared packages should surface as risks - -### Running against a specific package - -pr-impact doesn't have a built-in package filter, but you can scope the analysis by pointing `--repo` at a subdirectory (if it's its own git repo) or by using the programmatic API to filter `ChangedFile[]` by path prefix. - -```typescript -import { parseDiff, calculateRisk, detectBreakingChanges } from '@pr-impact/core'; - -const allFiles = await parseDiff('.', 'main', 'HEAD'); -const coreFiles = allFiles.filter(f => f.path.startsWith('packages/core/')); - -// Run analysis only on core package files -const breaking = await detectBreakingChanges('.', 'main', 'HEAD', coreFiles); -``` - ---- - -## Impact Graph Depth - -The `--depth` flag on `pri impact` controls how many levels of transitive imports to follow: - -| Depth | Behavior | Use case | -|---|---|---| -| `1` | Direct consumers only | Quick check, large codebases | -| `3` | Three levels of transitive imports | **Default.** Good balance of coverage and noise | -| `5+` | Deep traversal | Small codebases, thorough impact analysis | - -Deeper traversal is slower and may surface files that are only loosely related. Start with the default (3) and increase if you need more visibility. - ---- - -## Output Formats - -| Format | Flag | Best for | -|---|---|---| -| Markdown | `--format md` | Human reading, PR comments | -| JSON | `--format json` | Parsing in scripts, dashboards, custom reporting | -| Plain text | `--format text` | Terminal output (default for `pri risk`) | -| Graphviz DOT | `--format dot` | Generating visual impact diagrams | - -### Generating impact diagrams - -```bash -pri impact --format dot > impact.dot -dot -Tsvg impact.dot -o impact.svg -``` - -Requires [Graphviz](https://graphviz.org/) installed (`brew install graphviz` on macOS). - ---- - -## Environment Variables - -| Variable | Used by | Description | -|---|---|---| -| `GITHUB_TOKEN` | `pri comment` | GitHub API token for posting PR comments | - -`pri comment` auto-detects the PR number and repository from CI environment variables (GitHub Actions, GitLab CI, CircleCI). You can override with `--pr` and `--github-repo` flags. - ---- - -## Next Steps - -- [CI Integration](./ci-integration.md) — Set up automated quality gates -- [Risk Scoring](./risk-scoring.md) — Understand how the risk score is calculated -- [Troubleshooting](./troubleshooting.md) — Common issues and fixes diff --git a/docs/data-flow.md b/docs/data-flow.md deleted file mode 100644 index 37459d8..0000000 --- a/docs/data-flow.md +++ /dev/null @@ -1,198 +0,0 @@ -# Data Flow - -All shared types are defined in `packages/core/src/types.ts` and re-exported from the barrel `index.ts`. This document maps how data flows between modules and the relationships between interfaces. - ---- - -## Type Relationship Diagram - -```mermaid -erDiagram - PRAnalysis { - string repoPath - string baseBranch - string headBranch - string summary - } - - ChangedFile { - string path - string status "added | modified | deleted | renamed" - string oldPath "optional" - number additions - number deletions - string language - string category "source | test | doc | config | other" - } - - BreakingChange { - string filePath - string type "removed_export | changed_signature | changed_type | renamed_export" - string symbolName - string before - string after "nullable" - string severity "high | medium | low" - string[] consumers - } - - TestCoverageReport { - number changedSourceFiles - number sourceFilesWithTestChanges - number coverageRatio - } - - TestCoverageGap { - string sourceFile - string[] expectedTestFiles - boolean testFileExists - boolean testFileChanged - } - - DocStalenessReport { - string[] checkedFiles - } - - StaleReference { - string docFile - number line - string reference - string reason - } - - ImpactGraph { - string[] directlyChanged - string[] indirectlyAffected - } - - ImpactEdge { - string from - string to - string type "imports" - } - - RiskAssessment { - number score "0-100" - string level "low | medium | high | critical" - } - - RiskFactor { - string name - number score - number weight - string description - string[] details "optional" - } - - PRAnalysis ||--o{ ChangedFile : "changedFiles" - PRAnalysis ||--o{ BreakingChange : "breakingChanges" - PRAnalysis ||--|| TestCoverageReport : "testCoverage" - PRAnalysis ||--|| DocStalenessReport : "docStaleness" - PRAnalysis ||--|| ImpactGraph : "impactGraph" - PRAnalysis ||--|| RiskAssessment : "riskScore" - TestCoverageReport ||--o{ TestCoverageGap : "gaps" - DocStalenessReport ||--o{ StaleReference : "staleReferences" - ImpactGraph ||--o{ ImpactEdge : "edges" - RiskAssessment ||--o{ RiskFactor : "factors" -``` - ---- - -## Data Flow Through the Pipeline - -```mermaid -flowchart LR - subgraph Input - OPT["AnalysisOptions
repoPath, base, head,
skipBreaking?, skipCoverage?, skipDocs?"] - end - - subgraph "Step 3" - DIFF["parseDiff()"] - end - - subgraph "Step 4 — Parallel" - BC["detectBreakingChanges()"] - TC["checkTestCoverage()"] - DS["checkDocStaleness()"] - IG["buildImpactGraph()"] - end - - subgraph "Step 5" - RISK["calculateRisk()"] - end - - subgraph Output - PR["PRAnalysis"] - end - - OPT -->|repoPath, base, head| DIFF - DIFF -->|ChangedFile[]| BC - DIFF -->|ChangedFile[]| TC - DIFF -->|ChangedFile[]| DS - DIFF -->|ChangedFile[]| IG - BC -->|BreakingChange[]| RISK - TC -->|TestCoverageReport| RISK - DS -->|DocStalenessReport| RISK - IG -->|ImpactGraph| RISK - DIFF -->|ChangedFile[]| RISK - RISK -->|RiskAssessment| PR - BC -->|BreakingChange[]| PR - TC -->|TestCoverageReport| PR - DS -->|DocStalenessReport| PR - IG -->|ImpactGraph| PR - DIFF -->|ChangedFile[]| PR - - style OPT fill:#6b7280,color:#fff - style DIFF fill:#0891b2,color:#fff - style BC fill:#dc2626,color:#fff - style TC fill:#059669,color:#fff - style DS fill:#ca8a04,color:#fff - style IG fill:#7c3aed,color:#fff - style RISK fill:#e11d48,color:#fff - style PR fill:#4f46e5,color:#fff -``` - ---- - -## Internal Types (Not Exported as Public API) - -These types are used within the core package but not exposed to consumers: - -```mermaid -erDiagram - ExportedSymbol { - string name - string kind "function | class | variable | type | interface | enum | const" - string signature "optional" - boolean isDefault - } - - FileExports { - string filePath - } - - FileExports ||--o{ ExportedSymbol : "symbols" -``` - -- `ExportedSymbol` and `FileExports` are used by `export-differ.ts` and `detector.ts` for comparing exports between base and head branches. -- Although re-exported from `index.ts`, they are primarily internal to the breaking change detection layer. - ---- - -## Module-to-Type Mapping - -| Module | Consumes | Produces | -|---|---|---| -| `diff-parser.ts` | `repoPath`, `base`, `head` (strings) | `ChangedFile[]` | -| `file-categorizer.ts` | file path string | `category` field value | -| `detector.ts` | `ChangedFile[]`, git refs | `BreakingChange[]` | -| `export-differ.ts` | file content strings | `FileExports` (via `parseExports`), `{ removed, added, modified }` (via `diffExports`) | -| `signature-differ.ts` | signature strings | `{ changed, details }` | -| `import-resolver.ts` | `repoPath`, target file paths | `Map` (consumers map) | -| `coverage-checker.ts` | `ChangedFile[]` | `TestCoverageReport` | -| `test-mapper.ts` | source file path | expected test file paths | -| `staleness-checker.ts` | `ChangedFile[]`, git refs | `DocStalenessReport` | -| `impact-graph.ts` | `ChangedFile[]` | `ImpactGraph` | -| `risk-calculator.ts` | all analysis results | `RiskAssessment` | -| `factors.ts` | individual analysis results | `RiskFactor` | -| `markdown-reporter.ts` | `PRAnalysis` | Markdown string | -| `json-reporter.ts` | `PRAnalysis` | JSON string | diff --git a/docs/getting-started.md b/docs/getting-started.md deleted file mode 100644 index 7347c3f..0000000 --- a/docs/getting-started.md +++ /dev/null @@ -1,146 +0,0 @@ -# Getting Started - -A quick guide to installing pr-impact and running your first analysis. - ---- - -## Prerequisites - -- **Node.js** >= 18 -- **Git** — the repository you want to analyze must be a git repo with at least two branches (or commits) to compare -- The repository must have a **full clone** (not shallow) so `git diff` can access full history - ---- - -## Installation - -### Global install (recommended for CLI usage) - -```bash -# npm -npm install -g @pr-impact/cli - -# pnpm -pnpm add -g @pr-impact/cli -``` - -### Per-project install - -```bash -npm install --save-dev @pr-impact/cli -``` - -Then run via `npx pri` or add scripts to your `package.json`. - -### As a library - -```bash -npm install @pr-impact/core -``` - -See the [Programmatic API Guide](./programmatic-api.md) for library usage. - ---- - -## First Run - -Navigate to any git repository and run: - -```bash -pri analyze -``` - -This compares `main` (or `master`, auto-detected) against `HEAD` and prints a full Markdown report covering: - -- Breaking changes -- Test coverage gaps -- Stale documentation references -- Import dependency impact graph -- Weighted risk score - -### Specify branches explicitly - -```bash -pri analyze origin/develop feature/my-branch -``` - -The first argument is the **base** branch (what you're merging into) and the second is the **head** branch (what you're merging). - ---- - -## Understanding the Output - -### Risk Score - -The report ends with a risk score from 0 to 100: - -| Score Range | Level | Meaning | -|---|---|---| -| 0 -- 25 | **Low** | Routine change, low blast radius | -| 26 -- 50 | **Medium** | Some risk factors present, review recommended | -| 51 -- 75 | **High** | Significant risk, careful review required | -| 76 -- 100 | **Critical** | Major breaking changes or large untested diff | - -The score is a weighted combination of six factors. Run `pri risk` for a detailed factor breakdown. See [Risk Scoring](./risk-scoring.md) for the full formula. - -### Breaking Changes - -Each breaking change includes: - -- **File** — which file was affected -- **Type** — what changed (removed export, changed signature, renamed symbol, etc.) -- **Severity** — `low`, `medium`, or `high` -- **Consumers** — which files import the affected symbol - -### Test Coverage Gaps - -Lists source files that changed but have no corresponding test file changes. A coverage ratio of `1.0` means every changed source file also had test updates. - -### Impact Graph - -Shows **directly changed** files and **indirectly affected** files (consumers that import the changed files, transitively up to depth 3). - ---- - -## Common Workflows - -### Quick breaking change check - -```bash -pri breaking -``` - -Exits with code 1 if any breaking changes are found. Use `--severity medium` to only fail on medium or high severity. - -### Risk gate for PRs - -```bash -pri risk --threshold 60 -``` - -Exits with code 1 if the risk score is 60 or above. - -### Impact of a specific file - -```bash -pri impact src/auth/login.ts -``` - -Shows which files depend on `src/auth/login.ts` and would be affected by changes to it. - -### JSON output for scripting - -```bash -pri analyze --format json --output report.json -pri risk --format json -``` - ---- - -## Next Steps - -- [CI Integration](./ci-integration.md) — Set up automated quality gates in your CI pipeline -- [MCP Integration](./mcp-integration.md) — Let AI assistants use pr-impact as a tool -- [Programmatic API](./programmatic-api.md) — Use pr-impact as a library in your own code -- [Configuration Guide](./configuration-guide.md) — Tune thresholds and skip unnecessary checks -- [Troubleshooting](./troubleshooting.md) — Common issues and solutions diff --git a/docs/mcp-integration.md b/docs/mcp-integration.md deleted file mode 100644 index 6ae7f14..0000000 --- a/docs/mcp-integration.md +++ /dev/null @@ -1,284 +0,0 @@ -# MCP Server Integration - -The `@pr-impact/mcp-server` package exposes pr-impact analysis capabilities as [Model Context Protocol](https://modelcontextprotocol.io/) tools. This allows AI assistants (Claude Code, Cursor, etc.) to call the analysis functions directly. - ---- - -## Architecture - -```mermaid -flowchart LR - subgraph "AI Assistant" - CC["Claude Code /
Cursor / MCP Client"] - end - - subgraph "@pr-impact/mcp-server" - TRANSPORT["StdioServerTransport"] - SERVER["McpServer"] - subgraph Tools - T1["analyze_diff"] - T2["get_breaking_changes"] - T3["get_risk_score"] - T4["get_impact_graph"] - end - end - - subgraph "@pr-impact/core" - A["analyzePR()"] - PD["parseDiff()"] - B["detectBreakingChanges()"] - I["buildImpactGraph()"] - end - - CC <-->|stdio| TRANSPORT - TRANSPORT <--> SERVER - SERVER --> T1 & T2 & T3 & T4 - T1 --> A - T2 --> PD - T2 --> B - T3 --> A - T4 --> PD - T4 --> I - - style CC fill:#4f46e5,color:#fff - style SERVER fill:#d97706,color:#fff - style A fill:#4f46e5,color:#fff - style PD fill:#0891b2,color:#fff - style B fill:#dc2626,color:#fff - style I fill:#7c3aed,color:#fff -``` - ---- - -## Server Initialization - -The MCP server follows the standard `@modelcontextprotocol/sdk` pattern: - -```mermaid -sequenceDiagram - participant Process as Node.js Process - participant Server as McpServer - participant Transport as StdioServerTransport - participant Client as MCP Client - - Process->>Server: new McpServer({ name, version }) - Process->>Server: registerAnalyzeDiffTool(server) - Process->>Server: registerGetBreakingChangesTool(server) - Process->>Server: registerGetRiskScoreTool(server) - Process->>Server: registerGetImpactGraphTool(server) - Process->>Transport: new StdioServerTransport() - Process->>Server: server.connect(transport) - Server-->>Transport: Listening on stdin/stdout - - Client->>Transport: JSON-RPC request (tool call) - Transport->>Server: Route to tool handler - Server-->>Transport: JSON-RPC response - Transport-->>Client: Result -``` - ---- - -## Available Tools - -### `analyze_diff` - -Full PR analysis combining all analysis steps. - -| Parameter | Type | Required | Default | -|---|---|---|---| -| `repoPath` | string | No | `process.cwd()` | -| `baseBranch` | string | No | auto-detect `main`/`master` | -| `headBranch` | string | No | `HEAD` | - -Returns: Markdown-formatted report covering breaking changes, test coverage, doc staleness, impact graph, and risk score. - -### `get_breaking_changes` - -Detect breaking API changes with severity filtering. - -| Parameter | Type | Required | Default | -|---|---|---|---| -| `repoPath` | string | No | `process.cwd()` | -| `baseBranch` | string | No | auto-detect | -| `headBranch` | string | No | `HEAD` | -| `minSeverity` | `low` \| `medium` \| `high` | No | `low` | - -Returns: Markdown-formatted list of breaking changes, filtered by severity. - -### `get_risk_score` - -Calculate the weighted risk score with factor breakdown. - -| Parameter | Type | Required | Default | -|---|---|---|---| -| `repoPath` | string | No | `process.cwd()` | -| `baseBranch` | string | No | auto-detect | -| `headBranch` | string | No | `HEAD` | - -Returns: Markdown-formatted risk assessment showing overall score, level, and factor breakdown. - -### `get_impact_graph` - -Build the import-dependency impact graph. - -| Parameter | Type | Required | Default | -|---|---|---|---| -| `repoPath` | string | No | `process.cwd()` | -| `baseBranch` | string | No | auto-detect | -| `headBranch` | string | No | `HEAD` | -| `filePath` | string | No | all changed files | -| `depth` | number | No | `3` | - -Returns: Markdown-formatted impact graph listing directly changed files, indirectly affected files, and dependency edges. - ---- - -## Tool Registration Pattern - -Each tool is defined in its own file under `src/tools/` and follows a consistent pattern: - -```mermaid -flowchart TD - FILE["src/tools/get-risk-score.ts"] - REGISTER["registerGetRiskScoreTool(server)"] - SCHEMA["Zod input schema"] - HANDLER["Tool handler function"] - CORE["@pr-impact/core function"] - - FILE --> REGISTER - REGISTER --> SCHEMA - REGISTER --> HANDLER - HANDLER --> CORE - - style FILE fill:#6b7280,color:#fff - style REGISTER fill:#d97706,color:#fff - style SCHEMA fill:#7c3aed,color:#fff - style HANDLER fill:#059669,color:#fff - style CORE fill:#4f46e5,color:#fff -``` - -1. Define a Zod schema for input validation. -2. Register the tool on the `McpServer` instance with name, description, and schema. -3. The handler calls the corresponding `@pr-impact/core` function and returns the result. - ---- - -## Configuration - -### Claude Code - -Add to `.claude/mcp.json` or global settings: - -```json -{ - "mcpServers": { - "pr-impact": { - "command": "npx", - "args": ["-y", "@pr-impact/mcp-server"] - } - } -} -``` - -### Local development (monorepo) - -```json -{ - "mcpServers": { - "pr-impact": { - "command": "node", - "args": ["./packages/mcp-server/dist/index.js"] - } - } -} -``` - -### Claude Desktop - -Add to `~/Library/Application Support/Claude/claude_desktop_config.json` (macOS) or `%APPDATA%\Claude\claude_desktop_config.json` (Windows): - -```json -{ - "mcpServers": { - "pr-impact": { - "command": "npx", - "args": ["-y", "@pr-impact/mcp-server"] - } - } -} -``` - -### Cursor - -Add to `.cursor/mcp.json` in your project root: - -```json -{ - "mcpServers": { - "pr-impact": { - "command": "npx", - "args": ["-y", "@pr-impact/mcp-server"] - } - } -} -``` - -### VS Code (Copilot MCP) - -Add to `.vscode/mcp.json` in your project root: - -```json -{ - "servers": { - "pr-impact": { - "command": "npx", - "args": ["-y", "@pr-impact/mcp-server"] - } - } -} -``` - -### Any MCP-compatible client - -The server communicates over **stdio** (stdin/stdout) using JSON-RPC. Any MCP client that supports stdio transport can connect. - ---- - -## Manual Testing - -Use the [MCP Inspector](https://modelcontextprotocol.io/docs/tools/inspector) to test the server locally: - -```bash -# Build the server first -pnpm build --filter=@pr-impact/mcp-server - -# Run the inspector against the built server -npx @modelcontextprotocol/inspector node ./packages/mcp-server/dist/index.js -``` - -The inspector opens a web UI where you can: -1. See all registered tools and their input schemas -2. Call tools interactively with custom parameters -3. Inspect the JSON-RPC request/response payloads - ---- - -## Communication Flow - -```mermaid -sequenceDiagram - participant User - participant AI as AI Assistant - participant MCP as MCP Server - participant Core as @pr-impact/core - participant Git as simple-git - - User->>AI: "What's the risk of this PR?" - AI->>MCP: tool_call: get_risk_score({ repoPath: "." }) - MCP->>Core: analyzePR({ repoPath: "." }) - Core->>Git: git diff, git show, etc. - Git-->>Core: Raw git output - Core-->>MCP: PRAnalysis - MCP-->>AI: Markdown-formatted risk assessment - AI-->>User: "Risk score is 42 (medium)..." -``` diff --git a/docs/migration-guide.md b/docs/migration-guide.md new file mode 100644 index 0000000..5feae69 --- /dev/null +++ b/docs/migration-guide.md @@ -0,0 +1,129 @@ +# Migrating from pr-impact v0.x to v1.0 + +v1.0 is a complete architecture rewrite. Deterministic analysis is replaced by an AI agent that uses tool calls to gather evidence and produce reports. + +## Package Changes + +| v0.x Package | v1.0 Replacement | Action Required | +|---|---|---| +| `@pr-impact/core` | `@pr-impact/tools-core` | Update imports (see below) | +| `@pr-impact/cli` | `@pr-impact/action` + `@pr-impact/skill` | Remove CLI usage, switch to GitHub Action or Claude Code plugin | +| `@pr-impact/mcp-server` | `@pr-impact/tools` | Update MCP config (tool names changed) | + +## Migrating from `@pr-impact/core` + +### If you used `analyzePR()` programmatically + +The `analyzePR()` orchestrator no longer exists. In v1.0, analysis is performed by Claude via tool calls. You have two options: + +**Option A: Use the GitHub Action** (recommended for CI) + +```yaml +- uses: ducdmdev/pr-impact@v1 + with: + anthropic-api-key: ${{ secrets.ANTHROPIC_API_KEY }} +``` + +**Option B: Use individual tool functions** + +```typescript +// v0.x +import { analyzePR } from '@pr-impact/core'; +const report = await analyzePR({ repoPath: '.', base: 'main', head: 'feature' }); + +// v1.0 +import { listChangedFiles, gitDiff, findImporters } from '@pr-impact/tools-core'; +const files = await listChangedFiles({ base: 'main', head: 'feature' }); +const diff = await gitDiff({ base: 'main', head: 'feature', file: 'src/index.ts' }); +const consumers = await findImporters({ modulePath: 'src/utils.ts' }); +// Analysis logic is now in the AI agent's system prompt, not in code +``` + +### If you used individual analysis functions + +| v0.x Function | v1.0 Equivalent | +|---|---| +| `parseDiff()` | `listChangedFiles()` + `gitDiff()` | +| `detectBreakingChanges()` | Use `readFileAtRef()` to compare exports manually, or let the AI agent handle it | +| `checkTestCoverage()` | `listTestFiles()` | +| `checkStaleDocs()` | `searchCode()` to find references | +| `buildImpactGraph()` | `findImporters()` | +| `calculateRisk()` | Risk scoring is now in the AI agent's system prompt | +| `formatMarkdown()` / `formatJSON()` | Report formatting is in the AI agent's report template | + +## Migrating from `@pr-impact/cli` + +The `pri` CLI has been removed. Replace with: + +| CLI Command | v1.0 Alternative | +|---|---| +| `pri analyze` | GitHub Action or Claude Code `/pr-impact` | +| `pri breaking` | `readFileAtRef()` to compare exports | +| `pri impact` | `findImporters()` from `@pr-impact/tools-core` | +| `pri risk` | GitHub Action outputs `risk-score` and `risk-level` | +| `pri comment` | GitHub Action with `github-token` input | + +### CI migration + +```yaml +# v0.x +- run: npx pri analyze --base main --threshold 75 + +# v1.0 +- uses: ducdmdev/pr-impact@v1 + with: + anthropic-api-key: ${{ secrets.ANTHROPIC_API_KEY }} + threshold: '75' +``` + +## Migrating from `@pr-impact/mcp-server` + +### MCP tool name changes + +| v0.x Tool | v1.0 Tool | +|---|---| +| `analyze_diff` | No direct equivalent -- use `git_diff` + `list_changed_files` | +| `get_breaking_changes` | No direct equivalent -- use `read_file_at_ref` to compare exports | +| `get_impact_graph` | `find_importers` | +| `get_risk_score` | No direct equivalent -- risk scoring is in the AI prompt | +| *(new)* | `git_diff` -- raw diff between two refs | +| *(new)* | `read_file_at_ref` -- read file at a git ref | +| *(new)* | `list_changed_files` -- list changed files with stats | +| *(new)* | `search_code` -- regex search via git grep | +| *(new)* | `list_test_files` -- find test files for a source file | + +### MCP config migration + +```jsonc +// v0.x +{ + "mcpServers": { + "pr-impact": { + "command": "npx", + "args": ["-y", "@pr-impact/mcp-server"] + } + } +} + +// v1.0 +{ + "mcpServers": { + "pr-impact": { + "command": "npx", + "args": ["-y", "@pr-impact/tools"] + } + } +} +``` + +## Key Differences + +| Aspect | v0.x | v1.0 | +|---|---|---| +| Analysis engine | Deterministic code (regex parsing, AST-free) | AI agent (Claude via Anthropic API) | +| Breaking change detection | Regex-based export diffing | Claude reads both versions and compares | +| Risk scoring | Computed in code (`risk-calculator.ts`) | Claude computes using the system prompt formula | +| Report format | Generated by `markdown-reporter.ts` | Claude follows a report template | +| Consistency | Identical results every run | May vary slightly between runs | +| Cost | Free (local computation) | Requires Anthropic API key (API usage costs) | +| Depth | Fixed analysis rules | Context-aware, can reason about intent | diff --git a/docs/plans/2026-02-11-ai-agent-rewrite-design.md b/docs/plans/2026-02-11-ai-agent-rewrite-design.md new file mode 100644 index 0000000..acc9edb --- /dev/null +++ b/docs/plans/2026-02-11-ai-agent-rewrite-design.md @@ -0,0 +1,456 @@ +# Design: AI Agent Rewrite of pr-impact + +**Date**: 2026-02-11 +**Status**: Draft +**Author**: ducdm + +--- + +## Motivation + +Replace all deterministic TypeScript analysis code with an AI agent that reasons about PR impact directly. The current regex-based export parsing, heuristic test mapping, and rule-based risk scoring are limited in what they can detect. An AI agent can understand code semantics, explain findings, and provide actionable recommendations. + +## Goals + +- AI agent performs all analysis: breaking changes, test coverage gaps, doc staleness, impact graph, risk scoring +- Deliver as a **Claude Code plugin** for interactive use +- Deliver as a **GitHub Action** for automated CI +- Structured output using a predefined template (consistent across runs) +- Conversational follow-up in Claude Code (ask why, get suggestions) + +## Non-Goals + +- Support for non-Claude LLMs (may revisit later) +- Keeping the old deterministic analysis as a fallback +- Real-time streaming of partial results + +--- + +## Architecture Overview + +Four packages replace the current three (`core`, `cli`, `mcp-server`). The key design decision is a shared `tools-core` package containing pure tool logic that both the MCP server and GitHub Action import. This eliminates the DRY violation of duplicating tool implementations. + +``` +pr-impact/ +├── packages/ +│ ├── tools-core/ @pr-impact/tools-core (pure tool functions) +│ │ └── src/ +│ │ ├── index.ts Barrel exports +│ │ ├── git-diff.ts Get diff between branches +│ │ ├── read-file.ts Read file at a specific git ref +│ │ ├── list-files.ts List changed files between branches +│ │ ├── search-code.ts Search for patterns in codebase +│ │ ├── find-importers.ts Find files that import a given path +│ │ └── list-tests.ts List test files related to a source file +│ │ +│ ├── tools/ @pr-impact/tools (MCP server) +│ │ └── src/ +│ │ ├── index.ts MCP server entry (stdio transport) +│ │ └── tools/ +│ │ ├── git-diff.ts MCP wrapper for tools-core +│ │ ├── read-file.ts MCP wrapper for tools-core +│ │ ├── list-files.ts MCP wrapper for tools-core +│ │ ├── search-code.ts MCP wrapper for tools-core +│ │ ├── find-importers.ts MCP wrapper for tools-core +│ │ └── list-tests.ts MCP wrapper for tools-core +│ │ +│ ├── skill/ Claude Code plugin +│ │ ├── .claude-plugin/ +│ │ │ └── config.json Plugin metadata +│ │ ├── skill.md Skill definition (assembled from templates at build time) +│ │ ├── mcp.json Registers @pr-impact/tools MCP server +│ │ └── package.json +│ │ +│ └── action/ GitHub Action +│ ├── action.yml Action metadata +│ ├── src/ +│ │ ├── index.ts Entry point +│ │ ├── client.ts Anthropic API client with tool use +│ │ └── templates.ts Generated file — prompt/report templates as string constants +│ ├── tsconfig.json +│ └── package.json +│ +├── templates/ Shared prompt & report templates +│ ├── system-prompt.md Core analysis methodology +│ └── report-template.md Output structure +│ +└── scripts/ + └── build-skill.ts Assembles skill.md from templates +``` + +### Package Dependency Graph + +``` +@pr-impact/tools ──depends──> @pr-impact/tools-core +@pr-impact/action ──depends──> @pr-impact/tools-core +@pr-impact/skill ──uses MCP──> @pr-impact/tools +templates/ ──assembled by──> scripts/build-skill.ts ──into──> skill/skill.md +``` + +`tools-core` is a pure library with no I/O framework dependencies. It exports plain async functions that accept parameters and return typed results. The `tools` package wraps each function in an MCP tool definition. The `action` package calls the same functions directly in its agentic loop. + +--- + +## MCP Tools (`@pr-impact/tools-core` + `@pr-impact/tools`) + +Six thin tools that give the AI read-only access to the repository. No analysis logic — tools return raw data, the AI interprets it. The pure implementations live in `tools-core`; the MCP server in `tools` wraps them with schema validation and MCP transport. + +| Tool | Purpose | Parameters | Returns | +|---|---|---|---| +| `git_diff` | Get diff between two branches | `repoPath`, `base`, `head`, `file?` | Raw diff text | +| `read_file_at_ref` | Read file content at a git ref | `repoPath`, `ref`, `filePath` | File contents | +| `list_changed_files` | List files changed between branches | `repoPath`, `base`, `head` | `{path, status, additions, deletions}[]` | +| `search_code` | Search for a pattern in the codebase | `repoPath`, `pattern`, `glob?` | `{file, line, match}[]` | +| `find_importers` | Find files that import a given module | `repoPath`, `modulePath` | File paths array | +| `list_test_files` | Find test files related to a source file | `repoPath`, `sourceFile` | Test file paths array | + +**Implementation**: Uses `simple-git` for git operations and `fast-glob` for file discovery. Each tool function in `tools-core` is ~20 lines. + +**`list_changed_files`**: Returns `{path, status, additions, deletions}[]`. The `status` field (added/modified/deleted/renamed) comes from `git.diffSummary()`, which provides this information per file. This lets the AI know which files are new, removed, or renamed without calling `git_diff` on every file. + +**`search_code`**: Uses `git grep` internally. The `glob` parameter is passed as a `--` pathspec to `git grep` for filtering by file pattern. Handles `git grep` exit code 1 (no matches found) gracefully by returning `{ matches: [] }` instead of throwing. + +**`find_importers`**: Builds a reverse dependency map by scanning all source files in the repository. The map is cached internally for the duration of the MCP server session — subsequent calls to `find_importers` with different `modulePath` values reuse the cached map, avoiding repeated filesystem scans. + +**Context window management**: `git_diff` accepts an optional `file` parameter to get per-file diffs. The system prompt instructs the AI to list changed files first, then inspect selectively. + +--- + +## Prompt Templates + +### System Prompt (`templates/system-prompt.md`) + +Defines the analysis methodology — the "brain" that replaces coded logic: + +```markdown +You are a PR impact analyzer. Given access to a git repository, analyze a pull +request and produce a structured impact report. + +## Analysis Steps + +1. **Diff Overview**: Call `list_changed_files` to get all changed files. + Categorize each as source/test/doc/config/other. + +2. **Breaking Change Detection**: For each changed source file that exports + public API symbols: + - Call `read_file_at_ref` for both base and head versions + - Compare exported functions, classes, types, interfaces + - Identify: removed exports, changed signatures, changed types, renames + - For each breaking change, call `find_importers` to find consumers + - Assign severity: high (removed/renamed), medium (changed signature), + low (changed type) + +3. **Test Coverage Gaps**: For each changed source file: + - Call `list_test_files` to find associated tests + - Check if those test files appear in the changed file list + - Flag source files that changed without test updates + +4. **Documentation Staleness**: For each changed doc file: + - Look for references to modified/deleted symbols, paths, or patterns + - Flag references that point to changed or removed targets + +5. **Impact Graph**: For each changed source file: + - Call `find_importers` to build the dependency chain + - Identify directly changed vs. indirectly affected files + - Only call `find_importers` once per directly changed source file + (do not recurse into indirect consumers) + +6. **Risk Assessment**: Score each factor 0-100, apply weights: + - Breaking changes (0.30): 100 if high, 60 if medium, 30 if low, 0 if none + - Untested changes (0.25): (1 - coverageRatio) * 100 + - Diff size (0.15): 0 (<100), 50 (100-500), 80 (500-1000), 100 (>1000) + - Stale docs (0.10): min(staleRefs * 20, 100) + - Config changes (0.10): 100 if CI/build, 50 if other, 0 if none + - Impact breadth (0.10): min(indirectlyAffected * 10, 100) + +## Rules +- Always use tools to verify — never guess about file contents or imports. +- If a file is too large, focus on exported symbols and public API. +- Categorize every finding with severity and evidence. +- Always use `git_diff` with the `file` parameter — never load the full diff at once. + +## Large PR Strategy +- If >30 changed files: only call `read_file_at_ref` for files with >50 lines + changed. For smaller changes, rely on the per-file diff from `git_diff`. +- If >50 changed files: focus only on source files. Skip documentation + staleness check entirely. +- For `find_importers`: call once per directly changed source file only. + Do not follow indirect consumers. +``` + +### Report Template (`templates/report-template.md`) + +```markdown +# PR Impact Report + +## Summary +- **Risk Score**: {score}/100 ({level}) +- **Files Changed**: {count} ({additions} added, {deletions} deleted) +- **Breaking Changes**: {count} ({high} high, {medium} medium, {low} low) +- **Test Coverage**: {ratio}% of changed source files have test updates +- **Stale Doc References**: {count} + +## Breaking Changes +| File | Change | Symbol | Severity | Consumers | +|------|--------|--------|----------|-----------| + +## Test Coverage Gaps +| Source File | Expected Test | Test Exists | Test Updated | +|-------------|---------------|-------------|--------------| + +## Impact Graph +### Directly Changed +### Indirectly Affected + +## Risk Factor Breakdown +| Factor | Score | Weight | Details | +|--------|-------|--------|---------| + +## Recommendations +(AI-generated: explains findings and suggests next steps) +``` + +--- + +## Claude Code Skill (Plugin) + +### Plugin Config (`.claude-plugin/config.json`) + +```json +{ + "name": "@pr-impact/skill", + "version": "1.0.0", + "description": "AI-powered PR impact analysis", + "skills": ["skill.md"] +} +``` + +### MCP Registration (`mcp.json`) + +```json +{ + "mcpServers": { + "pr-impact-tools": { + "command": "npx", + "args": ["-y", "@pr-impact/tools"] + } + } +} +``` + +### Skill Definition (`skill.md`) + +```markdown +--- +name: pr-impact +description: Analyze PR impact — breaking changes, test coverage, risk score +arguments: + - name: base + description: Base branch (default: main) + required: false + - name: head + description: Head branch (default: HEAD) + required: false +--- + +{system-prompt content} + +Analyze the PR comparing `$base` (default: main) to `$head` (default: HEAD). +Use the pr-impact MCP tools. Follow the analysis steps exactly. +Output using the report template. + +{report-template content} +``` + +### Template Assembly + +`skill.md` is **not** manually maintained. It is assembled at build time by `scripts/build-skill.ts`, which reads `templates/system-prompt.md` and `templates/report-template.md`, interpolates them into the skill definition skeleton, and writes the final `skill/skill.md`. This ensures the skill always uses the same prompt and report template as the GitHub Action. + +The build script runs as part of the `build` task for `@pr-impact/skill` in the Turborepo pipeline. + +### User Experience + +```bash +/pr-impact # Full analysis, main...HEAD +/pr-impact main feature/auth # Specify branches +# Then conversational follow-up: +"Why is the risk score so high?" +"What would reduce the breaking changes?" +``` + +--- + +## GitHub Action + +### `action.yml` + +```yaml +name: 'PR Impact Analysis' +description: 'AI-powered PR impact analysis' +inputs: + anthropic-api-key: + description: 'Anthropic API key' + required: true + github-token: + description: 'GitHub token for posting PR comments' + required: false + base-branch: + description: 'Base branch' + required: false + default: 'main' + model: + description: 'Claude model' + required: false + default: 'claude-sonnet-4-5-20250929' + threshold: + description: 'Risk score threshold (fail if >=)' + required: false +runs: + using: 'node20' + main: 'dist/index.js' +``` + +Note: `github-token` does not use `default: ${{ github.token }}` because that expression syntax only works in workflow files, not in `action.yml` defaults. Users must pass it explicitly in their workflow. + +### Workflow Example + +```yaml +name: PR Impact +on: + pull_request: + types: [opened, synchronize] + +jobs: + analyze: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: ducdmdev/pr-impact-action@v1 + with: + anthropic-api-key: ${{ secrets.ANTHROPIC_API_KEY }} + github-token: ${{ github.token }} + threshold: 70 +``` + +### Build Configuration + +The action is bundled with tsup using **CJS format** (`format: ['cjs']`) because the GitHub Actions runner expects a CommonJS entry point at `dist/index.js`. + +**Template embedding**: Templates must be available at runtime but the action runs as a single bundled file with no access to the source repo's `templates/` directory. To solve this, a pre-build step reads `templates/system-prompt.md` and `templates/report-template.md` and generates `src/templates.ts` containing exported string constants: + +```typescript +// Auto-generated by build script — do not edit +export const SYSTEM_PROMPT = `...`; +export const REPORT_TEMPLATE = `...`; +``` + +This file is committed to the action package so the build is hermetic. The build script that generates it runs before tsup in the Turborepo pipeline. + +### Implementation Flow + +The action imports tool functions from `@pr-impact/tools-core` (bundled at build time by tsup). Flow: + +1. Read inputs (base branch, threshold, API key, GitHub token) +2. Load system prompt and report template from embedded string constants +3. Call Claude API with `temperature: 0` and tools defined, `MAX_ITERATIONS = 30` +4. Execute tool calls locally as Claude requests them (calling `tools-core` functions directly) +5. If iteration count reaches `MAX_ITERATIONS` or wall-clock time exceeds **180 seconds**, stop the loop and use whatever results are available. Append a warning to the report: "Analysis terminated early due to resource limits. Results may be incomplete." +6. Collect final report from Claude's response +7. Parse risk score from the report (regex for `**Risk Score**: {N}/100`). If parsing fails, log a warning and set risk score to -1 (threshold check is skipped) +8. Post report as PR comment (upsert with HTML markers) +9. If threshold is set and risk score >= threshold, exit 1 + +--- + +## Reliability & Consistency + +LLM-based analysis is non-deterministic. Even with identical inputs, results will vary between runs. + +- **Use `temperature: 0`** for the most reproducible results. This minimizes but does not eliminate variation. +- **Risk scores may vary +/-5 points** between runs on the same diff. This is inherent to LLM sampling. +- **CI threshold gates should use a buffer**: if you want to catch PRs at risk level 70+, set the threshold to 65 to account for score variance. +- **Claude Code interactive use is unaffected**: users can simply re-run `/pr-impact` if a result seems off, and follow up conversationally for clarification. + +--- + +## Cost & Performance + +Switching from deterministic analysis to an LLM agent introduces API costs and higher latency. + +| Metric | Estimate | +|---|---| +| Input tokens per analysis | 30k - 80k (depends on PR size and number of tool calls) | +| Output tokens per analysis | 2k - 4k | +| Cost per PR (Sonnet) | $0.30 - $1.50 | +| Cost per PR (Opus) | $1.00 - $5.00 | +| Latency | 30 - 90 seconds (vs. 2-5 seconds for old deterministic approach) | + +**Recommendations**: +- Use **Haiku** for CI if cost is a primary concern (fastest, cheapest, still capable for structured analysis) +- Use **Sonnet** for balanced quality/cost (default in `action.yml`) +- Use **Opus** when maximum accuracy matters and cost is not a constraint +- **Claude Code plugin**: no API cost to the user — it uses the host Claude Code instance's context + +--- + +## Migration Plan + +### Phase 1: Tools Core + MCP Server + Templates +- Create `packages/tools-core` with 6 pure tool functions +- Create `packages/tools` as MCP server wrapping `tools-core` +- Create `templates/` with system prompt and report template +- Write tests for each tool function (unit tests, mock git) + +### Phase 2: Claude Code Plugin +- Create `packages/skill` with plugin config and skill definition +- Create `scripts/build-skill.ts` to assemble `skill.md` from templates +- Register MCP tools via `mcp.json` +- Test interactively with Claude Code + +### Phase 3: GitHub Action +- Create `packages/action` with action metadata and TypeScript entry point +- Import tool functions from `@pr-impact/tools-core` +- Create build script to embed templates as string constants +- Configure tsup for CJS output +- Test on a real PR in the repository + +### Phase 4: Cleanup +- Remove `packages/core`, `packages/cli`, `packages/mcp-server` +- Update root `package.json`, `turbo.json`, `pnpm-workspace.yaml` +- Update all documentation + +--- + +## Breaking Changes for Existing Users + +This rewrite removes the programmatic API and CLI. Users of the current packages must migrate: + +| Removed | Migration Path | +|---|---| +| `@pr-impact/core` — `analyzePR()` and all analysis functions | Use the Claude Code plugin (`/pr-impact`) for interactive analysis, or the GitHub Action for CI. There is no programmatic `analyzePR()` equivalent. | +| `@pr-impact/cli` — `pri` binary and all subcommands | Use the Claude Code plugin for local analysis. Use the GitHub Action for CI comment posting. | +| `@pr-impact/mcp-server` — old MCP tool definitions | Replaced by `@pr-impact/tools`. The new tools are data-only (no analysis logic). | + +`@pr-impact/tools-core` exports the raw tool functions (`gitDiff`, `readFileAtRef`, `listChangedFiles`, `searchCode`, `findImporters`, `listTestFiles`) but does **not** export any analysis or scoring logic. Analysis is performed entirely by the LLM at runtime. + +--- + +## What Gets Deleted + +| Current Package | Reason | +|---|---| +| `packages/core` | All analysis logic replaced by AI reasoning | +| `packages/cli` | Replaced by Claude Code skill | +| `packages/mcp-server` | Replaced by `packages/tools` (thinner, data-only tools) | + +## What Gets Reused + +| Component | From | In | +|---|---|---| +| `simple-git` usage patterns | `core` | `tools-core` | +| `fast-glob` file discovery | `core` | `tools-core` | +| PR comment upsert logic | `cli/github/comment-poster.ts` | `action` | +| Type interfaces (as output schema reference) | `core/types.ts` | Templates | +| Risk scoring formula and weights | `core/risk/factors.ts` | System prompt | diff --git a/docs/plans/2026-02-11-ai-agent-rewrite-plan.md b/docs/plans/2026-02-11-ai-agent-rewrite-plan.md new file mode 100644 index 0000000..696bc5e --- /dev/null +++ b/docs/plans/2026-02-11-ai-agent-rewrite-plan.md @@ -0,0 +1,3005 @@ +# AI Agent Rewrite Implementation Plan + +> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task. + +**Goal:** Replace all deterministic TypeScript analysis code with an AI agent that performs PR impact analysis via prompt templates and MCP tools. + +**Architecture:** Four new packages (`tools-core`, `tools`, `skill`, `action`) replace the existing three (`core`, `cli`, `mcp-server`). `tools-core` contains pure tool handler functions with no framework dependency. `tools` wraps them as an MCP server. `action` imports them for Claude API tool_use. `skill` is the Claude Code plugin. Shared prompt/report templates define the analysis methodology and are embedded at build time. + +**Tech Stack:** TypeScript (ESM, strict mode, `.js` import extensions), `simple-git` + `fast-glob` (tools-core), `@modelcontextprotocol/sdk` + `zod` (tools), `@anthropic-ai/sdk` (action), `@actions/core` + `@actions/github` (action). + +**Design Doc:** `docs/plans/2026-02-11-ai-agent-rewrite-design.md` + +--- + +## Phase 1: Shared Templates + +### Task 1: Create system prompt template + +**Files:** +- Create: `templates/system-prompt.md` + +**Step 1: Create the system prompt** + +Create `templates/system-prompt.md` with this exact content: + +```markdown +You are a PR impact analyzer. Given access to a git repository via MCP tools, analyze a pull request and produce a structured impact report. + +## Available Tools + +- `git_diff` — Get the raw diff between two branches (optionally for a single file) +- `read_file_at_ref` — Read a file's content at a specific git ref (branch/commit) +- `list_changed_files` — List all files changed between two branches with stats and status +- `search_code` — Search for a regex pattern across the codebase +- `find_importers` — Find all files that import a given module path +- `list_test_files` — Find test files associated with a given source file + +## Analysis Steps + +Follow these steps in order. Use the tools to gather evidence — never guess about file contents or imports. + +### Step 1: Diff Overview + +Call `list_changed_files` to get all changed files. Categorize each file: +- **source**: `.ts`, `.tsx`, `.js`, `.jsx` files that are not tests +- **test**: files in `__tests__/`, `test/`, `tests/` directories, or files matching `*.test.*`, `*.spec.*` +- **doc**: `.md`, `.mdx`, `.rst`, `.txt` files +- **config**: `package.json`, `tsconfig.json`, `.eslintrc.*`, `Dockerfile`, CI/CD files, bundler configs +- **other**: everything else + +### Step 2: Breaking Change Detection + +For each changed **source** file that likely exports public API symbols: +1. Call `read_file_at_ref` with the base branch ref to get the old version +2. Call `read_file_at_ref` with the head branch ref to get the new version +3. Compare exported functions, classes, types, interfaces, enums, and variables +4. Identify breaking changes: + - **Removed export**: a symbol that existed in base but is gone in head + - **Changed signature**: function parameters changed (added required params, removed params, changed types) + - **Changed type**: interface/type fields changed in incompatible ways + - **Renamed export**: a symbol was renamed (removed + similar new one added) +5. For each breaking change, call `find_importers` to find downstream consumers +6. Assign severity: + - **high**: removed or renamed exports, removed required interface fields + - **medium**: changed function signatures, changed return types + - **low**: changed optional fields, added required fields to interfaces + +### Step 3: Test Coverage Gaps + +For each changed source file: +1. Call `list_test_files` to find associated test files +2. Check if any of those test files appear in the changed file list from Step 1 +3. Calculate coverage ratio: `sourceFilesWithTestChanges / changedSourceFiles` +4. Flag each source file that changed without corresponding test updates + +### Step 4: Documentation Staleness + +For each changed **doc** file AND for each doc file that references changed source files: +1. Call `read_file_at_ref` (head ref) to read the doc content +2. Look for references to symbols, file paths, or function names that were modified or removed +3. Flag stale references with the line number and reason + +If no doc files are in the diff, call `search_code` with pattern matching changed symbol names in `*.md` files to find docs that reference them. + +### Step 5: Impact Graph + +For each changed source file: +1. Call `find_importers` to find direct consumers +2. For each direct consumer, call `find_importers` again to find indirect consumers (up to 2 levels deep) +3. Classify files as **directly changed** (in the diff) or **indirectly affected** (consumers not in the diff) + +### Step 6: Risk Assessment + +Score each factor from 0 to 100, then compute the weighted average: + +| Factor | Weight | Scoring | +|--------|--------|---------| +| Breaking changes | 0.30 | `100` if any high-severity, `60` if medium-only, `30` if low-only, `0` if none | +| Untested changes | 0.25 | `(1 - coverageRatio) * 100` | +| Diff size | 0.15 | `0` if <100 total lines, `50` if 100-500, `80` if 500-1000, `100` if >1000 | +| Stale documentation | 0.10 | `min(staleReferences * 20, 100)` | +| Config file changes | 0.10 | `100` if CI/build config, `50` if other config, `0` if none | +| Impact breadth | 0.10 | `min(indirectlyAffectedFiles * 10, 100)` | + +**Formula:** `score = sum(factor_score * weight)` (weights sum to 1.0) + +**Risk levels:** 0-25 = low, 26-50 = medium, 51-75 = high, 76-100 = critical + +## Rules + +- Always call tools to verify — never guess about file contents, imports, or test file existence. +- Always use `git_diff` with the `file` parameter to inspect files individually. Never load the full diff at once. +- If >30 changed files, only call `read_file_at_ref` for files with >50 lines changed. +- If >50 changed files, skip the documentation staleness check (Step 4). +- Call `find_importers` only for directly changed source files, not for indirect consumers. +- Focus on exported/public symbols for breaking change detection. Internal/private changes are lower priority. +- Categorize every finding with severity and cite evidence (file path, line, before/after). +- Be precise with the risk score calculation — show your math in the factor breakdown. +``` + +**Step 2: Commit** + +```bash +git add templates/system-prompt.md +git commit -m "feat: add system prompt template for AI agent analysis" +``` + +--- + +### Task 2: Create report template + +**Files:** +- Create: `templates/report-template.md` + +**Step 1: Create the report template** + +Create `templates/report-template.md` with this exact content: + +```markdown +Output your analysis using exactly this structure. Fill in all sections. If a section has no findings, write "None" under it. + +# PR Impact Report + +## Summary +- **Risk Score**: {score}/100 ({level}) +- **Files Changed**: {total} ({source} source, {test} test, {doc} doc, {config} config, {other} other) +- **Total Lines Changed**: {additions} additions, {deletions} deletions +- **Breaking Changes**: {count} ({high} high, {medium} medium, {low} low) +- **Test Coverage**: {ratio}% of changed source files have corresponding test updates +- **Stale Doc References**: {count} +- **Impact Breadth**: {direct} directly changed, {indirect} indirectly affected + +## Breaking Changes + +| File | Type | Symbol | Before | After | Severity | Consumers | +|------|------|--------|--------|-------|----------|-----------| +| {filePath} | {removed_export/changed_signature/changed_type/renamed_export} | {symbolName} | {before signature/definition} | {after signature/definition or "removed"} | {high/medium/low} | {comma-separated consumer file paths} | + +## Test Coverage Gaps + +| Source File | Expected Test File | Test Exists | Test Updated | +|-------------|-------------------|-------------|--------------| +| {sourceFile} | {testFile} | {yes/no} | {yes/no} | + +## Stale Documentation + +| Doc File | Line | Reference | Reason | +|----------|------|-----------|--------| +| {docFile} | {lineNumber} | {reference text} | {why it's stale} | + +## Impact Graph + +### Directly Changed Files +- {filePath} ({additions}+, {deletions}-) + +### Indirectly Affected Files +- {filePath} — imported by {consumer}, which is directly changed + +## Risk Factor Breakdown + +| Factor | Score | Weight | Weighted | Details | +|--------|-------|--------|----------|---------| +| Breaking changes | {0-100} | 0.30 | {score*0.30} | {description} | +| Untested changes | {0-100} | 0.25 | {score*0.25} | {coverageRatio}% coverage | +| Diff size | {0-100} | 0.15 | {score*0.15} | {totalLines} total lines changed | +| Stale documentation | {0-100} | 0.10 | {score*0.10} | {count} stale references | +| Config file changes | {0-100} | 0.10 | {score*0.10} | {description} | +| Impact breadth | {0-100} | 0.10 | {score*0.10} | {count} indirectly affected files | +| **Total** | | **1.00** | **{total}** | | + +## Recommendations + +Based on the analysis above, here are the recommended actions before merging: + +1. {actionable recommendation with specific file/symbol references} +2. {actionable recommendation} +3. {actionable recommendation} +``` + +**Step 2: Commit** + +```bash +git add templates/report-template.md +git commit -m "feat: add report output template for AI agent analysis" +``` + +--- + +## Phase 2: Tools Core Package + +### Task 3: Scaffold `packages/tools-core` package + +**Files:** +- Create: `packages/tools-core/package.json` +- Create: `packages/tools-core/tsconfig.json` +- Create: `packages/tools-core/tsup.config.ts` + +**Step 1: Create package.json** + +```json +{ + "name": "@pr-impact/tools-core", + "version": "1.0.0", + "description": "Pure tool handler functions for git/repo operations — no framework dependency", + "type": "module", + "main": "./dist/index.js", + "module": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "import": "./dist/index.js", + "types": "./dist/index.d.ts" + } + }, + "files": [ + "dist" + ], + "license": "MIT", + "engines": { + "node": ">=20.0.0" + }, + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "https://github.com/ducdmdev/pr-impact.git", + "directory": "packages/tools-core" + }, + "scripts": { + "build": "tsup", + "clean": "rm -rf dist" + }, + "dependencies": { + "simple-git": "^3.27.0", + "fast-glob": "^3.3.0" + }, + "devDependencies": { + "tsup": "^8.0.0", + "typescript": "~5.7.0", + "@types/node": "^22.0.0" + } +} +``` + +**Step 2: Create tsconfig.json** + +```json +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "dist", "__tests__"] +} +``` + +**Step 3: Create tsup.config.ts** + +```typescript +import { defineConfig } from 'tsup'; + +export default defineConfig({ + entry: ['src/index.ts'], + format: ['esm'], + dts: true, + clean: true, + sourcemap: true, +}); +``` + +**Step 4: Install dependencies** + +Run: `cd /Users/duc.do/Downloads/Documents/ducdm/pr-impact && pnpm install` + +**Step 5: Commit** + +```bash +git add packages/tools-core/package.json packages/tools-core/tsconfig.json packages/tools-core/tsup.config.ts pnpm-lock.yaml +git commit -m "feat(tools-core): scaffold @pr-impact/tools-core package" +``` + +--- + +### Task 4: Implement `git_diff` handler + +**Files:** +- Create: `packages/tools-core/src/tools/git-diff.ts` +- Create: `packages/tools-core/__tests__/git-diff.test.ts` + +**Step 1: Write the failing test** + +Create `packages/tools-core/__tests__/git-diff.test.ts`: + +```typescript +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +vi.mock('simple-git', () => ({ + simpleGit: vi.fn(), +})); + +import { simpleGit } from 'simple-git'; +import { gitDiff } from '../src/tools/git-diff.js'; + +const mockGit = { + diff: vi.fn(), +}; + +beforeEach(() => { + vi.clearAllMocks(); + vi.mocked(simpleGit).mockReturnValue(mockGit as never); +}); + +describe('gitDiff', () => { + it('returns full diff between two branches', async () => { + mockGit.diff.mockResolvedValue('diff --git a/src/foo.ts b/src/foo.ts\n--- a/src/foo.ts\n+++ b/src/foo.ts\n@@ -1 +1 @@\n-old\n+new'); + + const result = await gitDiff({ + repoPath: '/repo', + base: 'main', + head: 'HEAD', + }); + + expect(simpleGit).toHaveBeenCalledWith('/repo'); + expect(mockGit.diff).toHaveBeenCalledWith(['main...HEAD']); + expect(result.diff).toContain('diff --git'); + }); + + it('returns diff for a single file when file parameter is provided', async () => { + mockGit.diff.mockResolvedValue('diff for single file'); + + const result = await gitDiff({ + repoPath: '/repo', + base: 'main', + head: 'HEAD', + file: 'src/foo.ts', + }); + + expect(mockGit.diff).toHaveBeenCalledWith(['main...HEAD', '--', 'src/foo.ts']); + expect(result.diff).toBe('diff for single file'); + }); + + it('defaults repoPath to cwd when not provided', async () => { + mockGit.diff.mockResolvedValue('some diff'); + + await gitDiff({ base: 'main', head: 'HEAD' }); + + expect(simpleGit).toHaveBeenCalledWith(process.cwd()); + }); + + it('throws on failure', async () => { + mockGit.diff.mockRejectedValue(new Error('not a git repo')); + + await expect(gitDiff({ base: 'main', head: 'HEAD' })).rejects.toThrow('not a git repo'); + }); +}); +``` + +**Step 2: Run test to verify it fails** + +Run: `npx vitest run packages/tools-core/__tests__/git-diff.test.ts` +Expected: FAIL — module `../src/tools/git-diff.js` not found + +**Step 3: Implement the handler** + +Create `packages/tools-core/src/tools/git-diff.ts`: + +```typescript +import { simpleGit } from 'simple-git'; + +export interface GitDiffParams { + repoPath?: string; + base: string; + head: string; + file?: string; +} + +export interface GitDiffResult { + diff: string; +} + +export async function gitDiff(params: GitDiffParams): Promise { + const git = simpleGit(params.repoPath ?? process.cwd()); + const args = [`${params.base}...${params.head}`]; + if (params.file) { + args.push('--', params.file); + } + const diff = await git.diff(args); + return { diff }; +} +``` + +**Step 4: Run test to verify it passes** + +Run: `npx vitest run packages/tools-core/__tests__/git-diff.test.ts` +Expected: PASS (4 tests) + +**Step 5: Commit** + +```bash +git add packages/tools-core/src/tools/git-diff.ts packages/tools-core/__tests__/git-diff.test.ts +git commit -m "feat(tools-core): implement git_diff handler" +``` + +--- + +### Task 5: Implement `read_file_at_ref` handler + +**Files:** +- Create: `packages/tools-core/src/tools/read-file.ts` +- Create: `packages/tools-core/__tests__/read-file.test.ts` + +**Step 1: Write the failing test** + +Create `packages/tools-core/__tests__/read-file.test.ts`: + +```typescript +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +vi.mock('simple-git', () => ({ + simpleGit: vi.fn(), +})); + +import { simpleGit } from 'simple-git'; +import { readFileAtRef } from '../src/tools/read-file.js'; + +const mockGit = { + show: vi.fn(), +}; + +beforeEach(() => { + vi.clearAllMocks(); + vi.mocked(simpleGit).mockReturnValue(mockGit as never); +}); + +describe('readFileAtRef', () => { + it('reads a file at a specific git ref', async () => { + mockGit.show.mockResolvedValue('export function foo() {}'); + + const result = await readFileAtRef({ + repoPath: '/repo', + ref: 'main', + filePath: 'src/foo.ts', + }); + + expect(simpleGit).toHaveBeenCalledWith('/repo'); + expect(mockGit.show).toHaveBeenCalledWith(['main:src/foo.ts']); + expect(result.content).toBe('export function foo() {}'); + }); + + it('defaults repoPath to cwd when not provided', async () => { + mockGit.show.mockResolvedValue('content'); + + await readFileAtRef({ ref: 'main', filePath: 'src/foo.ts' }); + + expect(simpleGit).toHaveBeenCalledWith(process.cwd()); + }); + + it('throws when file does not exist at ref', async () => { + mockGit.show.mockRejectedValue(new Error('path not found')); + + await expect( + readFileAtRef({ repoPath: '/repo', ref: 'main', filePath: 'src/missing.ts' }), + ).rejects.toThrow('path not found'); + }); +}); +``` + +**Step 2: Run test to verify it fails** + +Run: `npx vitest run packages/tools-core/__tests__/read-file.test.ts` +Expected: FAIL + +**Step 3: Implement the handler** + +Create `packages/tools-core/src/tools/read-file.ts`: + +```typescript +import { simpleGit } from 'simple-git'; + +export interface ReadFileAtRefParams { + repoPath?: string; + ref: string; + filePath: string; +} + +export interface ReadFileAtRefResult { + content: string; +} + +export async function readFileAtRef(params: ReadFileAtRefParams): Promise { + const git = simpleGit(params.repoPath ?? process.cwd()); + const content = await git.show([`${params.ref}:${params.filePath}`]); + return { content }; +} +``` + +**Step 4: Run test to verify it passes** + +Run: `npx vitest run packages/tools-core/__tests__/read-file.test.ts` +Expected: PASS + +**Step 5: Commit** + +```bash +git add packages/tools-core/src/tools/read-file.ts packages/tools-core/__tests__/read-file.test.ts +git commit -m "feat(tools-core): implement read_file_at_ref handler" +``` + +--- + +### Task 6: Implement `list_changed_files` handler + +This handler returns `{ path, status, additions, deletions }` per file. Status is derived by running `git diff --name-status` to get proper add/modify/delete/rename status, then merging with `diffSummary` for line counts. + +**Files:** +- Create: `packages/tools-core/src/tools/list-files.ts` +- Create: `packages/tools-core/__tests__/list-files.test.ts` + +**Step 1: Write the failing test** + +Create `packages/tools-core/__tests__/list-files.test.ts`: + +```typescript +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +vi.mock('simple-git', () => ({ + simpleGit: vi.fn(), +})); + +import { simpleGit } from 'simple-git'; +import { listChangedFiles } from '../src/tools/list-files.js'; + +const mockGit = { + diff: vi.fn(), + diffSummary: vi.fn(), +}; + +beforeEach(() => { + vi.clearAllMocks(); + vi.mocked(simpleGit).mockReturnValue(mockGit as never); +}); + +describe('listChangedFiles', () => { + it('returns list of changed files with status and stats', async () => { + mockGit.diff.mockResolvedValue('M\tsrc/foo.ts\nA\tsrc/bar.ts\nD\told.ts\n'); + mockGit.diffSummary.mockResolvedValue({ + files: [ + { file: 'src/foo.ts', insertions: 10, deletions: 3, binary: false }, + { file: 'src/bar.ts', insertions: 20, deletions: 0, binary: false }, + { file: 'old.ts', insertions: 0, deletions: 15, binary: false }, + ], + insertions: 30, + deletions: 18, + }); + + const result = await listChangedFiles({ + repoPath: '/repo', + base: 'main', + head: 'HEAD', + }); + + expect(simpleGit).toHaveBeenCalledWith('/repo'); + expect(mockGit.diff).toHaveBeenCalledWith(['--name-status', 'main...HEAD']); + expect(mockGit.diffSummary).toHaveBeenCalledWith(['main...HEAD']); + expect(result.files).toHaveLength(3); + expect(result.files[0]).toEqual({ + path: 'src/foo.ts', + status: 'modified', + additions: 10, + deletions: 3, + }); + expect(result.files[1]).toEqual({ + path: 'src/bar.ts', + status: 'added', + additions: 20, + deletions: 0, + }); + expect(result.files[2]).toEqual({ + path: 'old.ts', + status: 'deleted', + additions: 0, + deletions: 15, + }); + expect(result.totalAdditions).toBe(30); + expect(result.totalDeletions).toBe(18); + }); + + it('handles renamed files (R status with score)', async () => { + mockGit.diff.mockResolvedValue('R100\told-name.ts\tnew-name.ts\n'); + mockGit.diffSummary.mockResolvedValue({ + files: [ + { file: 'new-name.ts', insertions: 0, deletions: 0, binary: false }, + ], + insertions: 0, + deletions: 0, + }); + + const result = await listChangedFiles({ + repoPath: '/repo', + base: 'main', + head: 'HEAD', + }); + + expect(result.files).toHaveLength(1); + expect(result.files[0]).toEqual({ + path: 'new-name.ts', + status: 'renamed', + additions: 0, + deletions: 0, + }); + }); + + it('defaults repoPath to cwd when not provided', async () => { + mockGit.diff.mockResolvedValue(''); + mockGit.diffSummary.mockResolvedValue({ + files: [], + insertions: 0, + deletions: 0, + }); + + await listChangedFiles({ base: 'main', head: 'HEAD' }); + + expect(simpleGit).toHaveBeenCalledWith(process.cwd()); + }); + + it('throws on failure', async () => { + mockGit.diff.mockRejectedValue(new Error('bad revision')); + + await expect( + listChangedFiles({ base: 'main', head: 'HEAD' }), + ).rejects.toThrow('bad revision'); + }); +}); +``` + +**Step 2: Run test to verify it fails** + +Run: `npx vitest run packages/tools-core/__tests__/list-files.test.ts` +Expected: FAIL + +**Step 3: Implement the handler** + +Create `packages/tools-core/src/tools/list-files.ts`: + +```typescript +import { simpleGit } from 'simple-git'; + +export interface ListChangedFilesParams { + repoPath?: string; + base: string; + head: string; +} + +export type FileStatus = 'added' | 'modified' | 'deleted' | 'renamed' | 'copied'; + +export interface ChangedFileEntry { + path: string; + status: FileStatus; + additions: number; + deletions: number; +} + +export interface ListChangedFilesResult { + files: ChangedFileEntry[]; + totalAdditions: number; + totalDeletions: number; +} + +export async function listChangedFiles(params: ListChangedFilesParams): Promise { + const git = simpleGit(params.repoPath ?? process.cwd()); + const range = `${params.base}...${params.head}`; + + // Get file status (A/M/D/R/C) from --name-status + const nameStatusOutput = await git.diff(['--name-status', range]); + const statusMap = parseNameStatus(nameStatusOutput); + + // Get line counts from diffSummary + const summary = await git.diffSummary([range]); + + const files: ChangedFileEntry[] = summary.files.map((f) => ({ + path: f.file, + status: statusMap.get(f.file) ?? 'modified', + additions: f.insertions, + deletions: f.deletions, + })); + + return { + files, + totalAdditions: summary.insertions, + totalDeletions: summary.deletions, + }; +} + +function parseNameStatus(output: string): Map { + const map = new Map(); + const lines = output.trim().split('\n').filter(Boolean); + + for (const line of lines) { + const parts = line.split('\t'); + if (parts.length < 2) continue; + + const statusCode = parts[0].charAt(0); + let filePath: string; + + if (statusCode === 'R' || statusCode === 'C') { + // Renamed/Copied: status\told-path\tnew-path + filePath = parts[2] ?? parts[1]; + } else { + filePath = parts[1]; + } + + map.set(filePath, mapStatusCode(statusCode)); + } + + return map; +} + +function mapStatusCode(code: string): FileStatus { + switch (code) { + case 'A': return 'added'; + case 'D': return 'deleted'; + case 'R': return 'renamed'; + case 'C': return 'copied'; + case 'M': + default: + return 'modified'; + } +} +``` + +**Step 4: Run test to verify it passes** + +Run: `npx vitest run packages/tools-core/__tests__/list-files.test.ts` +Expected: PASS + +**Step 5: Commit** + +```bash +git add packages/tools-core/src/tools/list-files.ts packages/tools-core/__tests__/list-files.test.ts +git commit -m "feat(tools-core): implement list_changed_files handler with status field" +``` + +--- + +### Task 7: Implement `search_code` handler + +This handler uses `git.grep()` with the glob parameter properly passed, and handles exit code 1 (no matches) gracefully. + +**Files:** +- Create: `packages/tools-core/src/tools/search-code.ts` +- Create: `packages/tools-core/__tests__/search-code.test.ts` + +**Step 1: Write the failing test** + +Create `packages/tools-core/__tests__/search-code.test.ts`: + +```typescript +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +vi.mock('simple-git', () => ({ + simpleGit: vi.fn(), +})); + +import { simpleGit } from 'simple-git'; +import { searchCode } from '../src/tools/search-code.js'; + +const mockGit = { + raw: vi.fn(), +}; + +beforeEach(() => { + vi.clearAllMocks(); + vi.mocked(simpleGit).mockReturnValue(mockGit as never); +}); + +describe('searchCode', () => { + it('searches for a pattern and returns matches', async () => { + mockGit.raw.mockResolvedValue( + 'src/foo.ts:5:export function doStuff() {\n' + + 'src/bar.ts:12:import { doStuff } from "./foo"\n', + ); + + const result = await searchCode({ + repoPath: '/repo', + pattern: 'doStuff', + }); + + expect(mockGit.raw).toHaveBeenCalledWith(['grep', '-n', '--', 'doStuff']); + expect(result.matches).toHaveLength(2); + expect(result.matches[0]).toEqual({ + file: 'src/foo.ts', + line: 5, + match: 'export function doStuff() {', + }); + expect(result.matches[1]).toEqual({ + file: 'src/bar.ts', + line: 12, + match: 'import { doStuff } from "./foo"', + }); + }); + + it('passes glob parameter to filter files', async () => { + mockGit.raw.mockResolvedValue('docs/api.md:3:doStuff reference\n'); + + const result = await searchCode({ + repoPath: '/repo', + pattern: 'doStuff', + glob: '*.md', + }); + + expect(mockGit.raw).toHaveBeenCalledWith(['grep', '-n', '--', 'doStuff', '*.md']); + expect(result.matches).toHaveLength(1); + expect(result.matches[0].file).toBe('docs/api.md'); + }); + + it('returns empty matches when git grep finds nothing (exit code 1)', async () => { + const error = new Error('process exited with code 1'); + mockGit.raw.mockRejectedValue(error); + + const result = await searchCode({ + repoPath: '/repo', + pattern: 'nonexistent', + }); + + expect(result.matches).toHaveLength(0); + }); + + it('throws on real errors (not exit code 1)', async () => { + const error = new Error('fatal: not a git repository'); + mockGit.raw.mockRejectedValue(error); + + // The handler catches exit-code-1 but re-throws other errors. + // Since we can't distinguish by error message content reliably in all git versions, + // the implementation treats all grep errors as "no matches" to be safe. + const result = await searchCode({ + repoPath: '/repo', + pattern: 'anything', + }); + + expect(result.matches).toHaveLength(0); + }); +}); +``` + +**Step 2: Run test to verify it fails** + +Run: `npx vitest run packages/tools-core/__tests__/search-code.test.ts` +Expected: FAIL + +**Step 3: Implement the handler** + +Create `packages/tools-core/src/tools/search-code.ts`: + +```typescript +import { simpleGit } from 'simple-git'; + +export interface SearchCodeParams { + repoPath?: string; + pattern: string; + glob?: string; +} + +export interface SearchMatch { + file: string; + line: number; + match: string; +} + +export interface SearchCodeResult { + matches: SearchMatch[]; +} + +export async function searchCode(params: SearchCodeParams): Promise { + const git = simpleGit(params.repoPath ?? process.cwd()); + + // Build raw git grep command to properly support glob filtering. + // Using git.raw() instead of git.grep() because simple-git's grep() + // does not reliably pass glob path specs. + const args = ['grep', '-n', '--', params.pattern]; + if (params.glob) { + args.push(params.glob); + } + + let output: string; + try { + output = await git.raw(args); + } catch { + // git grep exits with code 1 when no matches are found. + // Treat all grep errors as "no matches" since we cannot reliably + // distinguish exit-code-1 from other errors in all environments. + return { matches: [] }; + } + + const matches: SearchMatch[] = []; + const lines = output.trim().split('\n').filter(Boolean); + + for (const line of lines) { + // Format: file:line:content + const firstColon = line.indexOf(':'); + if (firstColon === -1) continue; + const secondColon = line.indexOf(':', firstColon + 1); + if (secondColon === -1) continue; + + const file = line.slice(0, firstColon); + const lineNum = parseInt(line.slice(firstColon + 1, secondColon), 10); + const matchText = line.slice(secondColon + 1); + + if (!isNaN(lineNum)) { + matches.push({ file, line: lineNum, match: matchText }); + } + } + + return { matches }; +} +``` + +**Step 4: Run test to verify it passes** + +Run: `npx vitest run packages/tools-core/__tests__/search-code.test.ts` +Expected: PASS + +**Step 5: Commit** + +```bash +git add packages/tools-core/src/tools/search-code.ts packages/tools-core/__tests__/search-code.test.ts +git commit -m "feat(tools-core): implement search_code handler with glob support and exit-code-1 handling" +``` + +--- + +### Task 8: Implement `find_importers` handler with session cache + +This handler builds a reverse dependency map and caches it for the session. Subsequent calls reuse the cache. A `clearImporterCache()` function is exported for testing. + +**Files:** +- Create: `packages/tools-core/src/tools/find-imports.ts` +- Create: `packages/tools-core/__tests__/find-imports.test.ts` + +**Step 1: Write the failing test** + +Create `packages/tools-core/__tests__/find-imports.test.ts`: + +```typescript +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import { readFile } from 'fs/promises'; + +vi.mock('fast-glob', () => ({ + default: vi.fn(), +})); + +vi.mock('fs/promises', () => ({ + readFile: vi.fn(), +})); + +import fg from 'fast-glob'; +import { findImporters, clearImporterCache } from '../src/tools/find-imports.js'; + +beforeEach(() => { + vi.clearAllMocks(); + clearImporterCache(); +}); + +describe('findImporters', () => { + it('finds files that import a given module', async () => { + vi.mocked(fg).mockResolvedValue([ + '/repo/src/bar.ts', + '/repo/src/baz.ts', + '/repo/src/foo.ts', + ]); + + vi.mocked(readFile).mockImplementation(async (path) => { + if (String(path).endsWith('bar.ts')) { + return 'import { doStuff } from "./foo.js";\nconsole.log(doStuff());' as never; + } + if (String(path).endsWith('baz.ts')) { + return 'import { other } from "./utils.js";\nconsole.log(other());' as never; + } + if (String(path).endsWith('foo.ts')) { + return 'export function doStuff() { return 1; }' as never; + } + return '' as never; + }); + + const result = await findImporters({ + repoPath: '/repo', + modulePath: 'src/foo.ts', + }); + + expect(result.importers).toContain('src/bar.ts'); + expect(result.importers).not.toContain('src/baz.ts'); + }); + + it('returns empty array when no importers found', async () => { + vi.mocked(fg).mockResolvedValue(['/repo/src/bar.ts']); + vi.mocked(readFile).mockResolvedValue('const x = 1;' as never); + + const result = await findImporters({ + repoPath: '/repo', + modulePath: 'src/foo.ts', + }); + + expect(result.importers).toHaveLength(0); + }); + + it('caches the reverse dependency map across calls', async () => { + vi.mocked(fg).mockResolvedValue([ + '/repo/src/bar.ts', + '/repo/src/foo.ts', + ]); + + vi.mocked(readFile).mockImplementation(async (path) => { + if (String(path).endsWith('bar.ts')) { + return 'import { doStuff } from "./foo.js";' as never; + } + return 'export function doStuff() {}' as never; + }); + + // First call builds the cache + await findImporters({ repoPath: '/repo', modulePath: 'src/foo.ts' }); + expect(fg).toHaveBeenCalledTimes(1); + + // Second call should reuse the cache — fg should NOT be called again + await findImporters({ repoPath: '/repo', modulePath: 'src/foo.ts' }); + expect(fg).toHaveBeenCalledTimes(1); + }); + + it('clearImporterCache forces rebuild on next call', async () => { + vi.mocked(fg).mockResolvedValue(['/repo/src/bar.ts']); + vi.mocked(readFile).mockResolvedValue('const x = 1;' as never); + + await findImporters({ repoPath: '/repo', modulePath: 'src/foo.ts' }); + expect(fg).toHaveBeenCalledTimes(1); + + clearImporterCache(); + + await findImporters({ repoPath: '/repo', modulePath: 'src/foo.ts' }); + expect(fg).toHaveBeenCalledTimes(2); + }); +}); +``` + +**Step 2: Run test to verify it fails** + +Run: `npx vitest run packages/tools-core/__tests__/find-imports.test.ts` +Expected: FAIL + +**Step 3: Implement the handler** + +Create `packages/tools-core/src/tools/find-imports.ts`: + +```typescript +import fg from 'fast-glob'; +import { readFile } from 'fs/promises'; +import { relative, resolve, dirname } from 'path'; + +export interface FindImportersParams { + repoPath?: string; + modulePath: string; +} + +export interface FindImportersResult { + importers: string[]; +} + +const IMPORT_RE = /(?:import|export)\s+(?:[\s\S]*?\s+from\s+)?['"]([^'"]+)['"]/g; +const DYNAMIC_IMPORT_RE = /import\s*\(\s*['"]([^'"]+)['"]\s*\)/g; +const REQUIRE_RE = /require\s*\(\s*['"]([^'"]+)['"]\s*\)/g; +const EXTENSIONS = ['.ts', '.tsx', '.js', '.jsx']; + +// Session-level cache: maps repoPath -> reverse dependency map. +// The reverse dep map maps a normalized module base -> list of importer relative paths. +let cachedRepoPath: string | null = null; +let cachedReverseMap: Map | null = null; + +export function clearImporterCache(): void { + cachedRepoPath = null; + cachedReverseMap = null; +} + +export async function findImporters(params: FindImportersParams): Promise { + const repoPath = params.repoPath ?? process.cwd(); + const targetModule = params.modulePath; + + // Build or reuse cached reverse dependency map + if (cachedRepoPath !== repoPath || cachedReverseMap === null) { + cachedReverseMap = await buildReverseMap(repoPath); + cachedRepoPath = repoPath; + } + + // Look up importers from the reverse map + const targetBase = normalizeModulePath(targetModule); + const importers = cachedReverseMap.get(targetBase) ?? []; + + return { importers: [...importers] }; +} + +async function buildReverseMap(repoPath: string): Promise> { + const reverseMap = new Map(); + + const absolutePaths = await fg('**/*.{ts,tsx,js,jsx}', { + cwd: repoPath, + ignore: ['**/node_modules/**', '**/dist/**', '**/.git/**'], + absolute: true, + }); + + for (const absPath of absolutePaths) { + const relPath = relative(repoPath, absPath); + let content: string; + try { + content = await readFile(absPath, 'utf-8'); + } catch { + continue; + } + + const importPaths = extractImports(content); + for (const importPath of importPaths) { + if (!importPath.startsWith('./') && !importPath.startsWith('../')) continue; + + const resolvedBase = resolveAndNormalize(importPath, relPath); + if (resolvedBase === null) continue; + + const existing = reverseMap.get(resolvedBase); + if (existing) { + if (!existing.includes(relPath)) { + existing.push(relPath); + } + } else { + reverseMap.set(resolvedBase, [relPath]); + } + } + } + + return reverseMap; +} + +function extractImports(content: string): string[] { + const paths: string[] = []; + for (const re of [IMPORT_RE, DYNAMIC_IMPORT_RE, REQUIRE_RE]) { + const pattern = new RegExp(re.source, re.flags); + let match: RegExpExecArray | null; + while ((match = pattern.exec(content)) !== null) { + paths.push(match[1]); + } + } + return paths; +} + +function resolveAndNormalize(importPath: string, importerRelPath: string): string | null { + const importerDir = dirname(importerRelPath); + const resolved = resolve('/', importerDir, importPath).slice(1); + return normalizeModulePath(resolved); +} + +function normalizeModulePath(modulePath: string): string { + // Strip leading slash if present + let normalized = modulePath.startsWith('/') ? modulePath.slice(1) : modulePath; + // Strip known extensions for consistent lookup + for (const ext of EXTENSIONS) { + if (normalized.endsWith(ext)) { + normalized = normalized.slice(0, -ext.length); + break; + } + } + return normalized; +} +``` + +**Step 4: Run test to verify it passes** + +Run: `npx vitest run packages/tools-core/__tests__/find-imports.test.ts` +Expected: PASS + +**Step 5: Commit** + +```bash +git add packages/tools-core/src/tools/find-imports.ts packages/tools-core/__tests__/find-imports.test.ts +git commit -m "feat(tools-core): implement find_importers handler with session-level cache" +``` + +--- + +### Task 9: Implement `list_test_files` handler and create barrel exports + +**Files:** +- Create: `packages/tools-core/src/tools/list-tests.ts` +- Create: `packages/tools-core/__tests__/list-tests.test.ts` +- Create: `packages/tools-core/src/index.ts` + +**Step 1: Write the failing test** + +Create `packages/tools-core/__tests__/list-tests.test.ts`: + +```typescript +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +vi.mock('fast-glob', () => ({ + default: vi.fn(), +})); + +import fg from 'fast-glob'; +import { listTestFiles } from '../src/tools/list-tests.js'; + +beforeEach(() => { + vi.clearAllMocks(); +}); + +describe('listTestFiles', () => { + it('finds test files for a given source file', async () => { + vi.mocked(fg).mockResolvedValue(['src/utils/__tests__/parser.test.ts']); + + const result = await listTestFiles({ + repoPath: '/repo', + sourceFile: 'src/utils/parser.ts', + }); + + expect(result.testFiles).toContain('src/utils/__tests__/parser.test.ts'); + }); + + it('returns empty array when no test files found', async () => { + vi.mocked(fg).mockResolvedValue([]); + + const result = await listTestFiles({ + repoPath: '/repo', + sourceFile: 'src/utils/obscure.ts', + }); + + expect(result.testFiles).toHaveLength(0); + }); + + it('generates candidates for sibling, __tests__, test, and tests directories', async () => { + vi.mocked(fg).mockResolvedValue([]); + + await listTestFiles({ + repoPath: '/repo', + sourceFile: 'src/utils/parser.ts', + }); + + // Verify that fg was called with candidate patterns + const candidates = vi.mocked(fg).mock.calls[0][0] as string[]; + expect(candidates).toContain('src/utils/parser.test.ts'); + expect(candidates).toContain('src/utils/parser.spec.ts'); + expect(candidates).toContain('src/utils/__tests__/parser.ts'); + expect(candidates).toContain('src/utils/__tests__/parser.test.ts'); + }); + + it('defaults repoPath to cwd when not provided', async () => { + vi.mocked(fg).mockResolvedValue([]); + + await listTestFiles({ sourceFile: 'src/foo.ts' }); + + expect(vi.mocked(fg).mock.calls[0][1]).toEqual( + expect.objectContaining({ cwd: process.cwd() }), + ); + }); +}); +``` + +**Step 2: Run test to verify it fails** + +Run: `npx vitest run packages/tools-core/__tests__/list-tests.test.ts` +Expected: FAIL + +**Step 3: Implement the handler** + +Create `packages/tools-core/src/tools/list-tests.ts`: + +```typescript +import fg from 'fast-glob'; +import { posix as path } from 'node:path'; + +export interface ListTestFilesParams { + repoPath?: string; + sourceFile: string; +} + +export interface ListTestFilesResult { + testFiles: string[]; +} + +const TEST_EXTENSIONS = ['.ts', '.tsx', '.js', '.jsx'] as const; + +export async function listTestFiles(params: ListTestFilesParams): Promise { + const repoPath = params.repoPath ?? process.cwd(); + const candidates = buildCandidatePaths(params.sourceFile); + + if (candidates.length === 0) { + return { testFiles: [] }; + } + + const existing = await fg(candidates, { + cwd: repoPath, + dot: false, + onlyFiles: true, + }); + + return { testFiles: existing }; +} + +function buildCandidatePaths(sourceFile: string): string[] { + const normalized = sourceFile.replace(/\\/g, '/'); + const dir = path.dirname(normalized); + const ext = path.extname(normalized); + const base = path.basename(normalized, ext); + const subPath = stripLeadingSourceDir(normalized); + const subDir = path.dirname(subPath); + const candidates: string[] = []; + + for (const testExt of TEST_EXTENSIONS) { + // Sibling patterns + candidates.push(path.join(dir, `${base}.test${testExt}`)); + candidates.push(path.join(dir, `${base}.spec${testExt}`)); + + // __tests__ directory + const testsDir = path.join(dir, '__tests__'); + candidates.push(path.join(testsDir, `${base}${testExt}`)); + candidates.push(path.join(testsDir, `${base}.test${testExt}`)); + candidates.push(path.join(testsDir, `${base}.spec${testExt}`)); + + // Top-level test/tests directories + for (const topDir of ['test', 'tests']) { + candidates.push(path.join(topDir, subDir, `${base}${testExt}`)); + candidates.push(path.join(topDir, subDir, `${base}.test${testExt}`)); + candidates.push(path.join(topDir, subDir, `${base}.spec${testExt}`)); + } + } + + return [...new Set(candidates)]; +} + +function stripLeadingSourceDir(filePath: string): string { + const srcIndex = filePath.lastIndexOf('src/'); + if (srcIndex !== -1) return filePath.slice(srcIndex + 4); + const libIndex = filePath.lastIndexOf('lib/'); + if (libIndex !== -1) return filePath.slice(libIndex + 4); + return filePath; +} +``` + +**Step 4: Create barrel exports** + +Create `packages/tools-core/src/index.ts`: + +```typescript +export { gitDiff } from './tools/git-diff.js'; +export type { GitDiffParams, GitDiffResult } from './tools/git-diff.js'; + +export { readFileAtRef } from './tools/read-file.js'; +export type { ReadFileAtRefParams, ReadFileAtRefResult } from './tools/read-file.js'; + +export { listChangedFiles } from './tools/list-files.js'; +export type { + ListChangedFilesParams, + ListChangedFilesResult, + ChangedFileEntry, + FileStatus, +} from './tools/list-files.js'; + +export { searchCode } from './tools/search-code.js'; +export type { SearchCodeParams, SearchCodeResult, SearchMatch } from './tools/search-code.js'; + +export { findImporters, clearImporterCache } from './tools/find-imports.js'; +export type { FindImportersParams, FindImportersResult } from './tools/find-imports.js'; + +export { listTestFiles } from './tools/list-tests.js'; +export type { ListTestFilesParams, ListTestFilesResult } from './tools/list-tests.js'; +``` + +**Step 5: Run tests to verify they pass** + +Run: `npx vitest run packages/tools-core/__tests__/list-tests.test.ts` +Expected: PASS + +**Step 6: Build the package** + +Run: `pnpm build --filter=@pr-impact/tools-core` +Expected: Build succeeds, `packages/tools-core/dist/index.js` and `packages/tools-core/dist/index.d.ts` exist + +**Step 7: Commit** + +```bash +git add packages/tools-core/src/tools/list-tests.ts packages/tools-core/__tests__/list-tests.test.ts packages/tools-core/src/index.ts +git commit -m "feat(tools-core): implement list_test_files handler and create barrel exports" +``` + +--- + +## Phase 3: MCP Tools Package + +### Task 10: Scaffold `packages/tools` package + +**Files:** +- Create: `packages/tools/package.json` +- Create: `packages/tools/tsconfig.json` +- Create: `packages/tools/tsup.config.ts` + +**Step 1: Create package.json** + +```json +{ + "name": "@pr-impact/tools", + "version": "1.0.0", + "description": "MCP server providing git/repo tools for AI-powered PR impact analysis", + "type": "module", + "main": "./dist/index.js", + "exports": { + ".": { + "import": "./dist/index.js" + } + }, + "bin": { + "pr-impact-tools": "./dist/index.js" + }, + "files": [ + "dist" + ], + "license": "MIT", + "engines": { + "node": ">=20.0.0" + }, + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "https://github.com/ducdmdev/pr-impact.git", + "directory": "packages/tools" + }, + "scripts": { + "build": "tsup", + "clean": "rm -rf dist" + }, + "dependencies": { + "@pr-impact/tools-core": "workspace:*", + "@modelcontextprotocol/sdk": "^1.12.0", + "zod": "^3.24.0" + }, + "devDependencies": { + "tsup": "^8.0.0", + "typescript": "~5.7.0", + "@types/node": "^22.0.0" + } +} +``` + +**Step 2: Create tsconfig.json** + +```json +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "dist", "__tests__"] +} +``` + +**Step 3: Create tsup.config.ts** + +```typescript +import { defineConfig } from 'tsup'; + +export default defineConfig({ + entry: ['src/index.ts'], + format: ['esm'], + clean: true, + sourcemap: true, + banner: { + js: '#!/usr/bin/env node', + }, +}); +``` + +**Step 4: Install dependencies** + +Run: `cd /Users/duc.do/Downloads/Documents/ducdm/pr-impact && pnpm install` + +**Step 5: Commit** + +```bash +git add packages/tools/package.json packages/tools/tsconfig.json packages/tools/tsup.config.ts pnpm-lock.yaml +git commit -m "feat(tools): scaffold @pr-impact/tools MCP server package" +``` + +--- + +### Task 11: Create MCP server — thin wrappers around tools-core + +Each tool file is ~15 lines: zod schema + call to tools-core handler + format as MCP result. + +**Files:** +- Create: `packages/tools/src/index.ts` +- Create: `packages/tools/__tests__/index.test.ts` + +**Step 1: Write the failing test** + +Create `packages/tools/__tests__/index.test.ts`: + +```typescript +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +vi.mock('@modelcontextprotocol/sdk/server/mcp.js', () => ({ + McpServer: vi.fn().mockImplementation(() => ({ + tool: vi.fn(), + connect: vi.fn(), + close: vi.fn(), + })), +})); + +vi.mock('@modelcontextprotocol/sdk/server/stdio.js', () => ({ + StdioServerTransport: vi.fn(), +})); + +// Mock all tools-core handlers +vi.mock('@pr-impact/tools-core', () => ({ + gitDiff: vi.fn().mockResolvedValue({ diff: 'mock diff' }), + readFileAtRef: vi.fn().mockResolvedValue({ content: 'mock content' }), + listChangedFiles: vi.fn().mockResolvedValue({ files: [], totalAdditions: 0, totalDeletions: 0 }), + searchCode: vi.fn().mockResolvedValue({ matches: [] }), + findImporters: vi.fn().mockResolvedValue({ importers: [] }), + listTestFiles: vi.fn().mockResolvedValue({ testFiles: [] }), + clearImporterCache: vi.fn(), +})); + +import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; + +describe('MCP server setup', () => { + it('registers all 6 tools on the server', async () => { + const mockInstance = { + tool: vi.fn(), + connect: vi.fn(), + close: vi.fn(), + }; + vi.mocked(McpServer).mockImplementation(() => mockInstance as never); + + const { registerAllTools } = await import('../src/register.js'); + registerAllTools(mockInstance as never); + + expect(mockInstance.tool).toHaveBeenCalledTimes(6); + const toolNames = mockInstance.tool.mock.calls.map((call: unknown[]) => call[0]); + expect(toolNames).toContain('git_diff'); + expect(toolNames).toContain('read_file_at_ref'); + expect(toolNames).toContain('list_changed_files'); + expect(toolNames).toContain('search_code'); + expect(toolNames).toContain('find_importers'); + expect(toolNames).toContain('list_test_files'); + }); + + it('tool handlers format results as MCP ToolResult', async () => { + const mockInstance = { + tool: vi.fn(), + connect: vi.fn(), + close: vi.fn(), + }; + vi.mocked(McpServer).mockImplementation(() => mockInstance as never); + + const { registerAllTools } = await import('../src/register.js'); + registerAllTools(mockInstance as never); + + // Find the git_diff handler and call it + const gitDiffCall = mockInstance.tool.mock.calls.find( + (call: unknown[]) => call[0] === 'git_diff', + ); + expect(gitDiffCall).toBeDefined(); + + // The handler is the last argument (index 3) + const handler = gitDiffCall![3] as (params: Record) => Promise; + const result = await handler({ base: 'main', head: 'HEAD' }); + + expect(result).toEqual({ + content: [{ type: 'text', text: expect.stringContaining('mock diff') }], + }); + }); + + it('tool handlers return isError on failure', async () => { + const { gitDiff } = await import('@pr-impact/tools-core'); + vi.mocked(gitDiff).mockRejectedValueOnce(new Error('repo not found')); + + const mockInstance = { + tool: vi.fn(), + connect: vi.fn(), + close: vi.fn(), + }; + vi.mocked(McpServer).mockImplementation(() => mockInstance as never); + + const { registerAllTools } = await import('../src/register.js'); + registerAllTools(mockInstance as never); + + const gitDiffCall = mockInstance.tool.mock.calls.find( + (call: unknown[]) => call[0] === 'git_diff', + ); + const handler = gitDiffCall![3] as (params: Record) => Promise; + const result = await handler({ base: 'main', head: 'HEAD' }); + + expect(result).toEqual({ + content: [{ type: 'text', text: expect.stringContaining('repo not found') }], + isError: true, + }); + }); +}); +``` + +**Step 2: Run test to verify it fails** + +Run: `npx vitest run packages/tools/__tests__/index.test.ts` +Expected: FAIL + +**Step 3: Create the registration module** + +Create `packages/tools/src/register.ts`: + +```typescript +import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; +import { z } from 'zod'; +import { + gitDiff, + readFileAtRef, + listChangedFiles, + searchCode, + findImporters, + listTestFiles, +} from '@pr-impact/tools-core'; + +interface ToolResult { + content: Array<{ type: 'text'; text: string }>; + isError?: boolean; +} + +function success(text: string): ToolResult { + return { content: [{ type: 'text', text }] }; +} + +function error(err: unknown): ToolResult { + const message = err instanceof Error ? err.message : String(err); + return { content: [{ type: 'text', text: `Error: ${message}` }], isError: true }; +} + +export function registerAllTools(server: McpServer): void { + server.tool( + 'git_diff', + 'Get the raw git diff between two branches, optionally for a single file', + { + repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), + base: z.string().describe('Base branch or ref'), + head: z.string().describe('Head branch or ref'), + file: z.string().optional().describe('Optional file path to get diff for a single file'), + }, + async (params) => { + try { + const result = await gitDiff(params); + return success(result.diff); + } catch (err) { + return error(err); + } + }, + ); + + server.tool( + 'read_file_at_ref', + 'Read a file content at a specific git ref (branch or commit)', + { + repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), + ref: z.string().describe('Git ref (branch name, commit SHA, or tag)'), + filePath: z.string().describe('Repo-relative file path'), + }, + async (params) => { + try { + const result = await readFileAtRef(params); + return success(result.content); + } catch (err) { + return error(err); + } + }, + ); + + server.tool( + 'list_changed_files', + 'List all files changed between two branches with status and addition/deletion stats', + { + repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), + base: z.string().describe('Base branch or ref'), + head: z.string().describe('Head branch or ref'), + }, + async (params) => { + try { + const result = await listChangedFiles(params); + return success(JSON.stringify(result, null, 2)); + } catch (err) { + return error(err); + } + }, + ); + + server.tool( + 'search_code', + 'Search for a regex pattern across the codebase using git grep', + { + repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), + pattern: z.string().describe('Regex pattern to search for'), + glob: z.string().optional().describe('File glob to limit search scope (e.g. "*.md")'), + }, + async (params) => { + try { + const result = await searchCode(params); + return success(JSON.stringify(result, null, 2)); + } catch (err) { + return error(err); + } + }, + ); + + server.tool( + 'find_importers', + 'Find all source files that import a given module path', + { + repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), + modulePath: z.string().describe('Repo-relative path of the module to find importers for'), + }, + async (params) => { + try { + const result = await findImporters(params); + return success(JSON.stringify(result, null, 2)); + } catch (err) { + return error(err); + } + }, + ); + + server.tool( + 'list_test_files', + 'Find test files associated with a source file using naming conventions', + { + repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), + sourceFile: z.string().describe('Repo-relative path of the source file'), + }, + async (params) => { + try { + const result = await listTestFiles(params); + return success(JSON.stringify(result, null, 2)); + } catch (err) { + return error(err); + } + }, + ); +} +``` + +**Step 4: Create the entry point** + +Create `packages/tools/src/index.ts`: + +```typescript +import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; +import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; +import { registerAllTools } from './register.js'; + +const server = new McpServer({ + name: 'pr-impact-tools', + version: '1.0.0', +}); + +registerAllTools(server); + +async function main() { + const transport = new StdioServerTransport(); + await server.connect(transport); + + const shutdown = async () => { + await server.close(); + process.exit(0); + }; + + process.on('SIGINT', () => void shutdown()); + process.on('SIGTERM', () => void shutdown()); +} + +main().catch(console.error); +``` + +**Step 5: Run test to verify it passes** + +Run: `npx vitest run packages/tools/__tests__/index.test.ts` +Expected: PASS + +**Step 6: Build the package** + +Run: `pnpm build --filter=@pr-impact/tools` +Expected: Build succeeds, `packages/tools/dist/index.js` exists + +**Step 7: Commit** + +```bash +git add packages/tools/src/index.ts packages/tools/src/register.ts packages/tools/__tests__/index.test.ts +git commit -m "feat(tools): create MCP server as thin wrapper around tools-core" +``` + +--- + +## Phase 4: Build Scripts + +### Task 12: Create build scripts for template embedding and skill assembly + +**Files:** +- Create: `scripts/embed-templates.ts` +- Create: `scripts/build-skill.ts` + +**Step 1: Create embed-templates.ts** + +This script reads `templates/system-prompt.md` and `templates/report-template.md` and generates `packages/action/src/generated/templates.ts` with the templates as string constants. + +Create `scripts/embed-templates.ts`: + +```typescript +import { readFileSync, writeFileSync, mkdirSync } from 'fs'; +import { resolve, dirname } from 'path'; +import { fileURLToPath } from 'url'; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const rootDir = resolve(__dirname, '..'); + +const systemPrompt = readFileSync(resolve(rootDir, 'templates/system-prompt.md'), 'utf-8'); +const reportTemplate = readFileSync(resolve(rootDir, 'templates/report-template.md'), 'utf-8'); + +const outputDir = resolve(rootDir, 'packages/action/src/generated'); +mkdirSync(outputDir, { recursive: true }); + +const outputContent = [ + '// AUTO-GENERATED — do not edit manually.', + '// Generated by scripts/embed-templates.ts from templates/*.md', + '', + 'export const SYSTEM_PROMPT = ' + JSON.stringify(systemPrompt) + ';', + '', + 'export const REPORT_TEMPLATE = ' + JSON.stringify(reportTemplate) + ';', + '', +].join('\n'); + +writeFileSync(resolve(outputDir, 'templates.ts'), outputContent, 'utf-8'); + +console.log('Generated packages/action/src/generated/templates.ts'); +``` + +**Step 2: Create build-skill.ts** + +This script reads templates and generates `packages/skill/skill.md` with the templates embedded inline. + +Create `scripts/build-skill.ts`: + +```typescript +import { readFileSync, writeFileSync } from 'fs'; +import { resolve, dirname } from 'path'; +import { fileURLToPath } from 'url'; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const rootDir = resolve(__dirname, '..'); + +const systemPrompt = readFileSync(resolve(rootDir, 'templates/system-prompt.md'), 'utf-8'); +const reportTemplate = readFileSync(resolve(rootDir, 'templates/report-template.md'), 'utf-8'); + +const skillMd = `--- +name: pr-impact +description: Analyze PR impact — breaking changes, test coverage gaps, doc staleness, impact graph, and risk score +arguments: + - name: base + description: Base branch to compare against (default: main) + required: false + - name: head + description: Head branch to analyze (default: HEAD) + required: false +--- + +${systemPrompt} + +## Your Task + +Analyze the PR comparing branch \`$ARGUMENTS\` in the current repository. If no arguments provided, compare \`main\` to \`HEAD\`. + +Parse the arguments: first argument is \`base\` branch, second is \`head\` branch. + +Use the pr-impact MCP tools to inspect the repository. Follow all 6 analysis steps. Produce the report using this exact template: + +${reportTemplate} +`; + +writeFileSync(resolve(rootDir, 'packages/skill/skill.md'), skillMd, 'utf-8'); + +console.log('Generated packages/skill/skill.md'); +``` + +**Step 3: Commit** + +```bash +git add scripts/embed-templates.ts scripts/build-skill.ts +git commit -m "feat: add build scripts for template embedding and skill assembly" +``` + +--- + +## Phase 5: Claude Code Skill (Plugin) + +### Task 13: Create the Claude Code plugin package + +**Files:** +- Create: `packages/skill/package.json` +- Create: `packages/skill/.claude-plugin/config.json` +- Create: `packages/skill/mcp.json` +- Create: `packages/skill/skill.md` (generated by build script) + +**Step 1: Create package.json** + +```json +{ + "name": "@pr-impact/skill", + "version": "1.0.0", + "description": "Claude Code skill for AI-powered PR impact analysis", + "license": "MIT", + "files": [ + ".claude-plugin", + "skill.md", + "mcp.json" + ], + "scripts": { + "build": "tsx ../../scripts/build-skill.ts" + }, + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "https://github.com/ducdmdev/pr-impact.git", + "directory": "packages/skill" + }, + "devDependencies": { + "tsx": "^4.0.0" + } +} +``` + +**Step 2: Create plugin config** + +Create `packages/skill/.claude-plugin/config.json`: + +```json +{ + "name": "@pr-impact/skill", + "version": "1.0.0", + "description": "AI-powered PR impact analysis — detect breaking changes, map blast radius, score risk", + "skills": ["skill.md"] +} +``` + +**Step 3: Create MCP registration** + +Create `packages/skill/mcp.json`: + +```json +{ + "mcpServers": { + "pr-impact-tools": { + "command": "npx", + "args": ["-y", "@pr-impact/tools"] + } + } +} +``` + +**Step 4: Generate skill.md** + +Run: `npx tsx scripts/build-skill.ts` +Expected: `packages/skill/skill.md` is created with system prompt and report template embedded + +**Step 5: Commit** + +```bash +git add packages/skill/ +git commit -m "feat(skill): create Claude Code plugin for PR impact analysis" +``` + +--- + +## Phase 6: GitHub Action + +### Task 14: Scaffold `packages/action` package + +**Files:** +- Create: `packages/action/package.json` +- Create: `packages/action/tsconfig.json` +- Create: `packages/action/tsup.config.ts` +- Create: `packages/action/action.yml` + +**Step 1: Create package.json** + +Note: The `prebuild` script runs `embed-templates.ts` to generate `src/generated/templates.ts` before tsup runs. + +```json +{ + "name": "@pr-impact/action", + "version": "1.0.0", + "private": true, + "description": "GitHub Action for AI-powered PR impact analysis", + "type": "module", + "main": "./dist/index.js", + "license": "MIT", + "scripts": { + "prebuild": "tsx ../../scripts/embed-templates.ts", + "build": "tsup", + "clean": "rm -rf dist" + }, + "dependencies": { + "@pr-impact/tools-core": "workspace:*", + "@anthropic-ai/sdk": "^0.39.0", + "@actions/core": "^1.11.0", + "@actions/github": "^6.0.0" + }, + "devDependencies": { + "tsup": "^8.0.0", + "tsx": "^4.0.0", + "typescript": "~5.7.0", + "@types/node": "^22.0.0" + } +} +``` + +**Step 2: Create tsconfig.json** + +```json +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "dist", "__tests__"] +} +``` + +**Step 3: Create tsup.config.ts** + +Note: `format: ['cjs']` because GitHub Actions requires CommonJS. `noExternal: [/.*/]` bundles all dependencies into a single file. + +```typescript +import { defineConfig } from 'tsup'; + +export default defineConfig({ + entry: ['src/index.ts'], + format: ['cjs'], + clean: true, + sourcemap: true, + noExternal: [/.*/], +}); +``` + +**Step 4: Create action.yml** + +Note: `github-token` has no `default` value (the `${{ github.token }}` syntax is invalid in `action.yml`). Users must pass it explicitly. + +```yaml +name: 'PR Impact Analysis' +description: 'AI-powered PR impact analysis — detect breaking changes, map blast radius, and score risk' +branding: + icon: 'shield' + color: 'blue' + +inputs: + anthropic-api-key: + description: 'Anthropic API key for Claude' + required: true + base-branch: + description: 'Base branch to compare against' + required: false + default: 'main' + model: + description: 'Claude model to use' + required: false + default: 'claude-sonnet-4-5-20250929' + threshold: + description: 'Risk score threshold — action fails if risk score >= this value' + required: false + github-token: + description: 'GitHub token for posting PR comments. Pass ${{ secrets.GITHUB_TOKEN }} in your workflow.' + required: false + +outputs: + risk-score: + description: 'The calculated risk score (0-100)' + risk-level: + description: 'The risk level (low/medium/high/critical)' + report: + description: 'The full markdown report' + +runs: + using: 'node20' + main: 'dist/index.cjs' +``` + +**Step 5: Install dependencies** + +Run: `cd /Users/duc.do/Downloads/Documents/ducdm/pr-impact && pnpm install` + +**Step 6: Commit** + +```bash +git add packages/action/package.json packages/action/tsconfig.json packages/action/tsup.config.ts packages/action/action.yml pnpm-lock.yaml +git commit -m "feat(action): scaffold GitHub Action package with CJS format" +``` + +--- + +### Task 15: Implement tool dispatcher for the GitHub Action + +The action uses `@pr-impact/tools-core` directly -- no duplicated logic. The dispatcher imports handlers and calls them, returning stringified results for the Claude API. + +**Files:** +- Create: `packages/action/src/tools.ts` +- Create: `packages/action/__tests__/tools.test.ts` + +**Step 1: Write the failing test** + +Create `packages/action/__tests__/tools.test.ts`: + +```typescript +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +vi.mock('@pr-impact/tools-core', () => ({ + gitDiff: vi.fn(), + readFileAtRef: vi.fn(), + listChangedFiles: vi.fn(), + searchCode: vi.fn(), + findImporters: vi.fn(), + listTestFiles: vi.fn(), + clearImporterCache: vi.fn(), +})); + +import { + gitDiff, + readFileAtRef, + listChangedFiles, + searchCode, + findImporters, + listTestFiles, +} from '@pr-impact/tools-core'; +import { executeTool } from '../src/tools.js'; + +beforeEach(() => { + vi.clearAllMocks(); +}); + +describe('executeTool', () => { + it('dispatches git_diff tool and returns stringified result', async () => { + vi.mocked(gitDiff).mockResolvedValue({ diff: 'diff output' }); + + const result = await executeTool('git_diff', { + repoPath: '/repo', + base: 'main', + head: 'HEAD', + }); + + expect(gitDiff).toHaveBeenCalledWith({ + repoPath: '/repo', + base: 'main', + head: 'HEAD', + }); + expect(result).toContain('diff output'); + }); + + it('dispatches read_file_at_ref tool', async () => { + vi.mocked(readFileAtRef).mockResolvedValue({ content: 'file content' }); + + const result = await executeTool('read_file_at_ref', { + repoPath: '/repo', + ref: 'main', + filePath: 'src/foo.ts', + }); + + expect(readFileAtRef).toHaveBeenCalledWith({ + repoPath: '/repo', + ref: 'main', + filePath: 'src/foo.ts', + }); + expect(result).toContain('file content'); + }); + + it('dispatches list_changed_files tool', async () => { + vi.mocked(listChangedFiles).mockResolvedValue({ + files: [{ path: 'a.ts', status: 'modified', additions: 1, deletions: 0 }], + totalAdditions: 1, + totalDeletions: 0, + }); + + const result = await executeTool('list_changed_files', { + repoPath: '/repo', + base: 'main', + head: 'HEAD', + }); + + const parsed = JSON.parse(result); + expect(parsed.files).toHaveLength(1); + expect(parsed.files[0].status).toBe('modified'); + }); + + it('dispatches search_code tool', async () => { + vi.mocked(searchCode).mockResolvedValue({ + matches: [{ file: 'a.ts', line: 1, match: 'test' }], + }); + + const result = await executeTool('search_code', { + repoPath: '/repo', + pattern: 'test', + glob: '*.ts', + }); + + const parsed = JSON.parse(result); + expect(parsed.matches).toHaveLength(1); + }); + + it('dispatches find_importers tool', async () => { + vi.mocked(findImporters).mockResolvedValue({ importers: ['src/bar.ts'] }); + + const result = await executeTool('find_importers', { + repoPath: '/repo', + modulePath: 'src/foo.ts', + }); + + const parsed = JSON.parse(result); + expect(parsed.importers).toContain('src/bar.ts'); + }); + + it('dispatches list_test_files tool', async () => { + vi.mocked(listTestFiles).mockResolvedValue({ + testFiles: ['src/__tests__/foo.test.ts'], + }); + + const result = await executeTool('list_test_files', { + repoPath: '/repo', + sourceFile: 'src/foo.ts', + }); + + const parsed = JSON.parse(result); + expect(parsed.testFiles).toContain('src/__tests__/foo.test.ts'); + }); + + it('throws for unknown tool', async () => { + await expect(executeTool('unknown_tool', {})).rejects.toThrow('Unknown tool: unknown_tool'); + }); +}); +``` + +**Step 2: Run test to verify it fails** + +Run: `npx vitest run packages/action/__tests__/tools.test.ts` +Expected: FAIL + +**Step 3: Implement the tool dispatcher** + +Create `packages/action/src/tools.ts`: + +```typescript +import { + gitDiff, + readFileAtRef, + listChangedFiles, + searchCode, + findImporters, + listTestFiles, +} from '@pr-impact/tools-core'; + +export async function executeTool(name: string, input: Record): Promise { + switch (name) { + case 'git_diff': { + const result = await gitDiff(input as Parameters[0]); + return result.diff; + } + case 'read_file_at_ref': { + const result = await readFileAtRef(input as Parameters[0]); + return result.content; + } + case 'list_changed_files': { + const result = await listChangedFiles(input as Parameters[0]); + return JSON.stringify(result, null, 2); + } + case 'search_code': { + const result = await searchCode(input as Parameters[0]); + return JSON.stringify(result, null, 2); + } + case 'find_importers': { + const result = await findImporters(input as Parameters[0]); + return JSON.stringify(result, null, 2); + } + case 'list_test_files': { + const result = await listTestFiles(input as Parameters[0]); + return JSON.stringify(result, null, 2); + } + default: + throw new Error(`Unknown tool: ${name}`); + } +} +``` + +**Step 4: Run test to verify it passes** + +Run: `npx vitest run packages/action/__tests__/tools.test.ts` +Expected: PASS + +**Step 5: Commit** + +```bash +git add packages/action/src/tools.ts packages/action/__tests__/tools.test.ts +git commit -m "feat(action): implement tool dispatcher using @pr-impact/tools-core" +``` + +--- + +### Task 16: Implement the Anthropic API client with agentic loop + +The client uses embedded templates (imported from generated file), has a 30-iteration limit, a 180-second wall-clock timeout, uses `temperature: 0`, and extracts partial output on timeout. + +**Files:** +- Create: `packages/action/src/client.ts` +- Create: `packages/action/__tests__/client.test.ts` + +**Step 1: Write the failing test** + +Create `packages/action/__tests__/client.test.ts`: + +```typescript +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +vi.mock('@anthropic-ai/sdk', () => ({ + default: vi.fn().mockImplementation(() => ({ + messages: { + create: vi.fn(), + }, + })), +})); + +vi.mock('../src/tools.js', () => ({ + executeTool: vi.fn(), +})); + +vi.mock('../src/generated/templates.js', () => ({ + SYSTEM_PROMPT: 'You are a test prompt.', + REPORT_TEMPLATE: '# Test Report Template', +})); + +import Anthropic from '@anthropic-ai/sdk'; +import { executeTool } from '../src/tools.js'; +import { runAnalysis } from '../src/client.js'; + +beforeEach(() => { + vi.clearAllMocks(); + vi.useFakeTimers(); +}); + +afterEach(() => { + vi.useRealTimers(); +}); + +describe('runAnalysis', () => { + it('calls Claude API with temperature 0 and returns the final text response', async () => { + const mockCreate = vi.fn().mockResolvedValue({ + content: [{ type: 'text', text: '# PR Impact Report\n\n## Summary\n...' }], + stop_reason: 'end_turn', + }); + + vi.mocked(Anthropic).mockImplementation(() => ({ + messages: { create: mockCreate }, + }) as never); + + vi.useRealTimers(); + const result = await runAnalysis({ + apiKey: 'test-key', + repoPath: '/repo', + baseBranch: 'main', + headBranch: 'HEAD', + model: 'claude-sonnet-4-5-20250929', + }); + + expect(result).toContain('# PR Impact Report'); + expect(mockCreate).toHaveBeenCalledTimes(1); + + // Verify temperature: 0 is passed + const createArgs = mockCreate.mock.calls[0][0]; + expect(createArgs.temperature).toBe(0); + }); + + it('handles tool_use responses by executing tools and continuing', async () => { + const mockCreate = vi.fn() + .mockResolvedValueOnce({ + content: [ + { type: 'tool_use', id: 'call_1', name: 'list_changed_files', input: { base: 'main', head: 'HEAD' } }, + ], + stop_reason: 'tool_use', + }) + .mockResolvedValueOnce({ + content: [{ type: 'text', text: '# PR Impact Report\n\nFinal report' }], + stop_reason: 'end_turn', + }); + + vi.mocked(Anthropic).mockImplementation(() => ({ + messages: { create: mockCreate }, + }) as never); + + vi.mocked(executeTool).mockResolvedValue('{"files": []}'); + + vi.useRealTimers(); + const result = await runAnalysis({ + apiKey: 'test-key', + repoPath: '/repo', + baseBranch: 'main', + headBranch: 'HEAD', + model: 'claude-sonnet-4-5-20250929', + }); + + expect(executeTool).toHaveBeenCalledWith('list_changed_files', expect.objectContaining({ base: 'main', head: 'HEAD' })); + expect(result).toContain('Final report'); + expect(mockCreate).toHaveBeenCalledTimes(2); + }); + + it('injects repoPath into tool calls', async () => { + const mockCreate = vi.fn() + .mockResolvedValueOnce({ + content: [ + { type: 'tool_use', id: 'call_1', name: 'git_diff', input: { base: 'main', head: 'HEAD' } }, + ], + stop_reason: 'tool_use', + }) + .mockResolvedValueOnce({ + content: [{ type: 'text', text: 'done' }], + stop_reason: 'end_turn', + }); + + vi.mocked(Anthropic).mockImplementation(() => ({ + messages: { create: mockCreate }, + }) as never); + + vi.mocked(executeTool).mockResolvedValue('diff output'); + + vi.useRealTimers(); + await runAnalysis({ + apiKey: 'test-key', + repoPath: '/my-repo', + baseBranch: 'main', + headBranch: 'HEAD', + model: 'claude-sonnet-4-5-20250929', + }); + + expect(executeTool).toHaveBeenCalledWith('git_diff', expect.objectContaining({ + repoPath: '/my-repo', + })); + }); + + it('uses embedded templates (not filesystem)', async () => { + const mockCreate = vi.fn().mockResolvedValue({ + content: [{ type: 'text', text: 'report' }], + stop_reason: 'end_turn', + }); + + vi.mocked(Anthropic).mockImplementation(() => ({ + messages: { create: mockCreate }, + }) as never); + + vi.useRealTimers(); + await runAnalysis({ + apiKey: 'test-key', + repoPath: '/repo', + baseBranch: 'main', + headBranch: 'HEAD', + model: 'claude-sonnet-4-5-20250929', + }); + + const createArgs = mockCreate.mock.calls[0][0]; + expect(createArgs.system).toBe('You are a test prompt.'); + expect(createArgs.messages[0].content).toContain('# Test Report Template'); + }); +}); +``` + +**Step 2: Run test to verify it fails** + +Run: `npx vitest run packages/action/__tests__/client.test.ts` +Expected: FAIL + +**Step 3: Generate the templates file for development** + +Run: `npx tsx scripts/embed-templates.ts` +Expected: `packages/action/src/generated/templates.ts` is created + +**Step 4: Implement the client** + +Create `packages/action/src/client.ts`: + +```typescript +import Anthropic from '@anthropic-ai/sdk'; +import { executeTool } from './tools.js'; +import { SYSTEM_PROMPT, REPORT_TEMPLATE } from './generated/templates.js'; + +export interface AnalysisOptions { + apiKey: string; + repoPath: string; + baseBranch: string; + headBranch: string; + model: string; +} + +const MAX_ITERATIONS = 30; +const TIMEOUT_MS = 180_000; // 180 seconds + +const TOOL_DEFINITIONS: Anthropic.Tool[] = [ + { + name: 'git_diff', + description: 'Get the raw git diff between two branches, optionally for a single file', + input_schema: { + type: 'object' as const, + properties: { + base: { type: 'string', description: 'Base branch or ref' }, + head: { type: 'string', description: 'Head branch or ref' }, + file: { type: 'string', description: 'Optional file path for single-file diff' }, + }, + required: ['base', 'head'], + }, + }, + { + name: 'read_file_at_ref', + description: 'Read a file content at a specific git ref', + input_schema: { + type: 'object' as const, + properties: { + ref: { type: 'string', description: 'Git ref (branch, commit, tag)' }, + filePath: { type: 'string', description: 'Repo-relative file path' }, + }, + required: ['ref', 'filePath'], + }, + }, + { + name: 'list_changed_files', + description: 'List files changed between two branches with status and stats', + input_schema: { + type: 'object' as const, + properties: { + base: { type: 'string', description: 'Base branch or ref' }, + head: { type: 'string', description: 'Head branch or ref' }, + }, + required: ['base', 'head'], + }, + }, + { + name: 'search_code', + description: 'Search for a regex pattern in the codebase', + input_schema: { + type: 'object' as const, + properties: { + pattern: { type: 'string', description: 'Regex pattern' }, + glob: { type: 'string', description: 'File glob to limit scope (e.g. "*.md")' }, + }, + required: ['pattern'], + }, + }, + { + name: 'find_importers', + description: 'Find files that import a given module', + input_schema: { + type: 'object' as const, + properties: { + modulePath: { type: 'string', description: 'Repo-relative module path' }, + }, + required: ['modulePath'], + }, + }, + { + name: 'list_test_files', + description: 'Find test files associated with a source file', + input_schema: { + type: 'object' as const, + properties: { + sourceFile: { type: 'string', description: 'Repo-relative source file path' }, + }, + required: ['sourceFile'], + }, + }, +]; + +export async function runAnalysis(options: AnalysisOptions): Promise { + const client = new Anthropic({ apiKey: options.apiKey }); + + const userMessage = [ + `Analyze the PR comparing branch \`${options.baseBranch}\` to \`${options.headBranch}\`.`, + `Repository path: ${options.repoPath}`, + '', + 'Follow all 6 analysis steps. Produce the report using this template:', + '', + REPORT_TEMPLATE, + ].join('\n'); + + const messages: Anthropic.MessageParam[] = [ + { role: 'user', content: userMessage }, + ]; + + const startTime = Date.now(); + let lastTextOutput = ''; + + for (let i = 0; i < MAX_ITERATIONS; i++) { + // Check wall-clock timeout + if (Date.now() - startTime > TIMEOUT_MS) { + if (lastTextOutput) { + return lastTextOutput; + } + throw new Error(`Analysis timed out after ${TIMEOUT_MS / 1000} seconds`); + } + + const response = await client.messages.create({ + model: options.model, + max_tokens: 8192, + system: SYSTEM_PROMPT, + tools: TOOL_DEFINITIONS, + messages, + temperature: 0, + }); + + // Collect text blocks from this response for partial extraction + const textBlocks = response.content.filter( + (block): block is Anthropic.TextBlock => block.type === 'text', + ); + if (textBlocks.length > 0) { + lastTextOutput = textBlocks.map((b) => b.text).join('\n'); + } + + // Collect tool use blocks + const toolUseBlocks = response.content.filter( + (block): block is Anthropic.ToolUseBlock => block.type === 'tool_use', + ); + + if (toolUseBlocks.length === 0 || response.stop_reason === 'end_turn') { + return lastTextOutput; + } + + // Execute all tool calls and build tool results + messages.push({ role: 'assistant', content: response.content }); + + const toolResults: Anthropic.ToolResultBlockParam[] = []; + for (const toolUse of toolUseBlocks) { + try { + const input = toolUse.input as Record; + // Inject repoPath into all tool calls + input.repoPath = options.repoPath; + const result = await executeTool(toolUse.name, input); + toolResults.push({ + type: 'tool_result', + tool_use_id: toolUse.id, + content: result, + }); + } catch (error) { + toolResults.push({ + type: 'tool_result', + tool_use_id: toolUse.id, + content: `Error: ${error instanceof Error ? error.message : String(error)}`, + is_error: true, + }); + } + } + + messages.push({ role: 'user', content: toolResults }); + } + + // Iteration limit hit — return whatever text we have + if (lastTextOutput) { + return lastTextOutput; + } + throw new Error('Analysis exceeded maximum iterations without producing output'); +} +``` + +**Step 5: Run test to verify it passes** + +Run: `npx vitest run packages/action/__tests__/client.test.ts` +Expected: PASS + +**Step 6: Commit** + +```bash +git add packages/action/src/client.ts packages/action/src/generated/templates.ts packages/action/__tests__/client.test.ts +git commit -m "feat(action): implement Anthropic API client with 30-iteration limit, 180s timeout, temperature 0" +``` + +--- + +### Task 17: Implement the GitHub Action entry point and comment poster + +**Files:** +- Create: `packages/action/src/comment.ts` +- Create: `packages/action/src/index.ts` + +**Step 1: Create the comment poster** + +Create `packages/action/src/comment.ts`: + +```typescript +const MARKER_START = ''; +const MARKER_END = ''; + +export interface PostCommentOptions { + token: string; + repo: string; + prNumber: number; + body: string; +} + +export async function postOrUpdateComment(opts: PostCommentOptions): Promise { + const { token, repo, prNumber, body } = opts; + const markedBody = `${MARKER_START}\n${body}\n${MARKER_END}`; + + const baseUrl = `https://api.github.com/repos/${repo}/issues/${prNumber}/comments`; + const headers: Record = { + 'Authorization': `Bearer ${token}`, + 'Accept': 'application/vnd.github+json', + 'X-GitHub-Api-Version': '2022-11-28', + }; + + const existingId = await findExistingComment(baseUrl, headers); + + if (existingId !== null) { + const patchUrl = `https://api.github.com/repos/${repo}/issues/comments/${existingId}`; + const res = await fetch(patchUrl, { + method: 'PATCH', + headers: { ...headers, 'Content-Type': 'application/json' }, + body: JSON.stringify({ body: markedBody }), + }); + if (!res.ok) throw new Error(`GitHub API error updating comment: ${res.status}`); + const data = (await res.json()) as { html_url: string }; + return data.html_url; + } + + const res = await fetch(baseUrl, { + method: 'POST', + headers: { ...headers, 'Content-Type': 'application/json' }, + body: JSON.stringify({ body: markedBody }), + }); + if (!res.ok) throw new Error(`GitHub API error creating comment: ${res.status}`); + const data = (await res.json()) as { html_url: string }; + return data.html_url; +} + +async function findExistingComment( + baseUrl: string, + headers: Record, +): Promise { + let page = 1; + while (true) { + const res = await fetch(`${baseUrl}?per_page=100&page=${page}`, { headers }); + if (!res.ok) return null; + const comments = (await res.json()) as Array<{ id: number; body?: string }>; + if (comments.length === 0) break; + for (const c of comments) { + if (c.body?.includes(MARKER_START)) return c.id; + } + if (comments.length < 100) break; + page++; + } + return null; +} +``` + +**Step 2: Create the entry point** + +Note: Risk score parsing failure (-1) logs a warning and skips threshold check instead of failing or passing. + +Create `packages/action/src/index.ts`: + +```typescript +import * as core from '@actions/core'; +import * as github from '@actions/github'; +import { runAnalysis } from './client.js'; +import { postOrUpdateComment } from './comment.js'; + +async function main() { + const apiKey = core.getInput('anthropic-api-key', { required: true }); + const baseBranch = core.getInput('base-branch') || 'main'; + const model = core.getInput('model') || 'claude-sonnet-4-5-20250929'; + const threshold = core.getInput('threshold'); + const githubToken = core.getInput('github-token') || process.env.GITHUB_TOKEN || ''; + + const repoPath = process.cwd(); + + core.info(`Analyzing PR: ${baseBranch}...HEAD`); + core.info(`Model: ${model}`); + + const report = await runAnalysis({ + apiKey, + repoPath, + baseBranch, + headBranch: 'HEAD', + model, + }); + + // Extract risk score from report + const scoreMatch = report.match(/\*\*Risk Score\*\*:\s*(\d+)\/100\s*\((\w+)\)/); + const riskScore = scoreMatch ? parseInt(scoreMatch[1], 10) : -1; + const riskLevel = scoreMatch ? scoreMatch[2] : 'unknown'; + + // Set outputs + core.setOutput('risk-score', String(riskScore)); + core.setOutput('risk-level', riskLevel); + core.setOutput('report', report); + + if (riskScore === -1) { + core.warning('Could not parse risk score from report. Skipping threshold check.'); + } else { + core.info(`Risk Score: ${riskScore}/100 (${riskLevel})`); + } + + // Post PR comment if in a PR context + const prNumber = github.context.payload.pull_request?.number; + if (prNumber && githubToken) { + const repo = `${github.context.repo.owner}/${github.context.repo.repo}`; + const commentUrl = await postOrUpdateComment({ + token: githubToken, + repo, + prNumber, + body: report, + }); + core.info(`Posted PR comment: ${commentUrl}`); + } + + // Threshold gate — only check if we successfully parsed a score + if (threshold && riskScore !== -1 && riskScore >= parseInt(threshold, 10)) { + core.setFailed(`Risk score ${riskScore} exceeds threshold ${threshold}`); + } +} + +main().catch((error) => { + core.setFailed(error instanceof Error ? error.message : String(error)); +}); +``` + +**Step 3: Build the action** + +Run: `pnpm build --filter=@pr-impact/action` +Expected: Build succeeds — prebuild generates templates, tsup bundles to CJS + +**Step 4: Commit** + +```bash +git add packages/action/src/comment.ts packages/action/src/index.ts +git commit -m "feat(action): implement GitHub Action entry point with PR comment posting and explicit risk score parsing" +``` + +--- + +## Phase 7: Workspace & Cleanup + +### Task 18: Update workspace configuration + +**Files:** +- Modify: `pnpm-workspace.yaml` (no change needed — already uses `packages/*`) +- Modify: `turbo.json` (verify build order works) +- Modify: `package.json` (root) + +**Step 1: Verify workspace includes new packages** + +The existing `pnpm-workspace.yaml` uses `packages/*` which automatically includes `tools-core`, `tools`, `skill`, and `action`. No changes needed. + +**Step 2: Verify turbo config** + +The existing `turbo.json` task graph handles the dependency chain correctly: +- `build` depends on `^build` — so `tools-core` builds before `tools` and `action` (they depend on it via `workspace:*`) +- `test` depends on `build` +- No changes needed + +**Step 3: Run full build** + +Run: `pnpm install && pnpm build` +Expected: All packages build in correct order: `tools-core` -> `tools` + `action` (in parallel) + +**Step 4: Run all tests** + +Run: `pnpm test` +Expected: All tests pass + +**Step 5: Commit (only if changes were needed)** + +```bash +git add pnpm-workspace.yaml turbo.json package.json pnpm-lock.yaml +git commit -m "chore: update workspace config for new packages" +``` + +--- + +### Task 19: Remove old packages + +**Important:** Only do this after all new packages are working and tested. + +**Files:** +- Delete: `packages/core/` (entire directory) +- Delete: `packages/cli/` (entire directory) +- Delete: `packages/mcp-server/` (entire directory) + +**Step 1: Remove old packages** + +```bash +rm -rf packages/core packages/cli packages/mcp-server +``` + +**Step 2: Clean lockfile** + +Run: `pnpm install` + +**Step 3: Verify build and tests** + +Run: `pnpm build && pnpm test` +Expected: Everything passes with only the new packages. + +**Step 4: Commit** + +```bash +git add -A +git commit -m "chore: remove old packages (core, cli, mcp-server) replaced by AI agent approach" +``` + +--- + +### Task 20: Update documentation + +**Files:** +- Modify: `README.md` +- Modify: `CLAUDE.md` +- Modify: `CONTRIBUTING.md` +- Modify: docs files as needed + +**Step 1: Update README.md** + +Rewrite to reflect the new architecture: +- Remove CLI commands section (no more `pri` binary) +- Update Quick Start to show plugin installation and `/pr-impact` usage +- Update MCP Server section to reference `@pr-impact/tools` +- Update Architecture section with new 4-package structure (`tools-core`, `tools`, `skill`, `action`) +- Add GitHub Action usage section with workflow example showing `github-token: ${{ secrets.GITHUB_TOKEN }}` +- Keep Risk Score section (methodology is the same, just AI-driven now) + +**Step 2: Update CLAUDE.md** + +Rewrite to reflect new package structure, conventions, and testing guidelines: +- Update architecture diagram to show `tools-core`, `tools`, `skill`, `action` +- Update quick commands +- Document the `tools-core` -> `tools` and `tools-core` -> `action` dependency relationship +- Document template embedding (prebuild for action, build script for skill) +- Update testing guidelines + +**Step 3: Commit** + +```bash +git add README.md CLAUDE.md CONTRIBUTING.md docs/ +git commit -m "docs: update documentation for AI agent architecture" +``` + +--- + +## Summary + +| Phase | Tasks | Description | +|-------|-------|-------------| +| 1 | 1-2 | Create shared prompt and report templates | +| 2 | 3-9 | Build `@pr-impact/tools-core` (6 pure tool handlers + tests + barrel exports) | +| 3 | 10-11 | Build `@pr-impact/tools` MCP server (thin wrappers around tools-core) | +| 4 | 12 | Build scripts for template embedding and skill assembly | +| 5 | 13 | Create `@pr-impact/skill` Claude Code plugin | +| 6 | 14-17 | Build `@pr-impact/action` GitHub Action (CJS, embedded templates, tools-core dispatcher) | +| 7 | 18-20 | Update workspace config, remove old packages, update docs | + +**Total: 20 tasks, ~18 commits** + +### Key Architectural Decisions + +1. **`tools-core` is the shared foundation.** Both `tools` (MCP) and `action` (GitHub Action) import pure functions from it. No duplicated tool logic. +2. **Tools return plain objects, not MCP ToolResult.** The MCP wrapper handles formatting. The action dispatcher handles stringification. +3. **Templates are embedded at build time.** The action's prebuild step generates `src/generated/templates.ts`. The skill's build step generates `skill.md`. No filesystem reads at runtime. +4. **Action uses CJS format.** GitHub Actions requires a self-contained `dist/index.cjs` (not ESM). +5. **`find_importers` caches the reverse dependency map.** Built on first call, reused on subsequent calls within the same session. +6. **`list_changed_files` includes status.** Uses `git diff --name-status` for proper A/M/D/R status, merged with `diffSummary` for line counts. +7. **`search_code` passes glob to git grep.** Uses `git.raw()` to properly pass pathspec after `--`. +8. **Client has safety limits.** 30 iterations max, 180-second wall-clock timeout, `temperature: 0` for consistency. +9. **Risk score parsing is explicit.** If parsing fails, logs warning and skips threshold check instead of false-failing. +10. **`action.yml` has no `default` for `github-token`.** Users must pass `${{ secrets.GITHUB_TOKEN }}` explicitly. diff --git a/docs/programmatic-api.md b/docs/programmatic-api.md deleted file mode 100644 index 05ec367..0000000 --- a/docs/programmatic-api.md +++ /dev/null @@ -1,251 +0,0 @@ -# Programmatic API Guide - -Use `@pr-impact/core` as a library in your own scripts, custom CI tooling, or internal platforms. - ---- - -## Installation - -```bash -npm install @pr-impact/core -``` - -The package is ESM-only. Make sure your project has `"type": "module"` in `package.json` or uses `.mjs` file extensions. - ---- - -## Full Analysis - -The simplest way to use the library — run the entire analysis pipeline in one call: - -```typescript -import { analyzePR, formatMarkdown, formatJSON } from '@pr-impact/core'; - -const analysis = await analyzePR({ - repoPath: '/path/to/repo', - baseBranch: 'main', - headBranch: 'feature/my-branch', -}); - -// Access structured data -console.log(analysis.riskScore.score); // 42 -console.log(analysis.riskScore.level); // "medium" -console.log(analysis.breakingChanges); // BreakingChange[] -console.log(analysis.testCoverage.ratio); // 0.75 -console.log(analysis.summary); // Human-readable summary - -// Format as report -const markdown = formatMarkdown(analysis); -const json = formatJSON(analysis); -``` - -### Skipping Analysis Steps - -Pass options to skip expensive analysis steps you don't need: - -```typescript -const analysis = await analyzePR({ - repoPath: '.', - baseBranch: 'main', - headBranch: 'HEAD', - skipBreaking: true, // Skip breaking change detection - skipCoverage: false, // Run test coverage (default) - skipDocs: true, // Skip doc staleness check -}); -``` - -When a step is skipped, its result will contain empty/default values (empty arrays, zero scores, etc.). The risk score adjusts accordingly. - ---- - -## Individual Analysis Steps - -Each step of the pipeline can be called independently. This is useful when you only need part of the analysis or want to build a custom workflow. - -### Step 1: Parse the Diff - -```typescript -import { parseDiff } from '@pr-impact/core'; - -const changedFiles = await parseDiff('/path/to/repo', 'main', 'HEAD'); - -for (const file of changedFiles) { - console.log(`${file.path} — ${file.category} — +${file.additions}/-${file.deletions}`); -} -``` - -`parseDiff` returns `ChangedFile[]`, where each entry includes the file path, change category (`source`, `test`, `doc`, `config`, `other`), line counts, and the raw diff hunks. - -### Step 2: Detect Breaking Changes - -```typescript -import { parseDiff, detectBreakingChanges } from '@pr-impact/core'; - -const changedFiles = await parseDiff(repoPath, base, head); -const breakingChanges = await detectBreakingChanges(repoPath, base, head, changedFiles); - -for (const bc of breakingChanges) { - console.log(`[${bc.severity}] ${bc.type} in ${bc.file}: ${bc.description}`); - console.log(` Consumers: ${bc.consumers.join(', ')}`); -} -``` - -Breaking change detection requires source files (not test/doc/config files). It compares exports and function signatures between the base and head versions. - -### Step 3: Check Test Coverage - -```typescript -import { parseDiff, checkTestCoverage } from '@pr-impact/core'; - -const changedFiles = await parseDiff(repoPath, base, head); -const coverage = await checkTestCoverage(repoPath, changedFiles); - -console.log(`Coverage ratio: ${coverage.ratio}`); // 0.0 to 1.0 -for (const gap of coverage.gaps) { - console.log(`${gap.sourceFile} — missing: ${gap.expectedTestFile}`); -} -``` - -### Step 4: Check Documentation Staleness - -```typescript -import { parseDiff, checkDocStaleness } from '@pr-impact/core'; - -const changedFiles = await parseDiff(repoPath, base, head); -const staleness = await checkDocStaleness(repoPath, changedFiles, base, head); - -for (const ref of staleness.staleReferences) { - console.log(`${ref.docFile}:${ref.line} references ${ref.symbol} (${ref.reason})`); -} -``` - -### Step 5: Build Impact Graph - -```typescript -import { parseDiff, buildImpactGraph } from '@pr-impact/core'; - -const changedFiles = await parseDiff(repoPath, base, head); -const impact = await buildImpactGraph(repoPath, changedFiles); - -console.log('Directly changed:', impact.directlyChanged); -console.log('Indirectly affected:', impact.indirectlyAffected); -for (const edge of impact.edges) { - console.log(`${edge.from} → ${edge.to}`); -} -``` - -### Step 6: Calculate Risk - -```typescript -import { calculateRisk } from '@pr-impact/core'; - -const risk = calculateRisk( - changedFiles, - breakingChanges, - testCoverage, - docStaleness, - impactGraph, -); - -console.log(`Score: ${risk.score}/100 (${risk.level})`); -for (const factor of risk.factors) { - console.log(` ${factor.name}: ${factor.score} × ${factor.weight}`); -} -``` - ---- - -## Lower-Level Utilities - -```typescript -import { - categorizeFile, - parseExports, - diffExports, - diffSignatures, - mapTestFiles, -} from '@pr-impact/core'; - -// Classify a file path -categorizeFile('src/utils/auth.ts'); // 'source' -categorizeFile('__tests__/auth.test.ts'); // 'test' -categorizeFile('README.md'); // 'doc' -categorizeFile('tsconfig.json'); // 'config' -``` - ---- - -## Key Types - -All TypeScript interfaces are exported from `@pr-impact/core`: - -```typescript -import type { - PRAnalysis, // Top-level result from analyzePR() - AnalysisOptions, // Input options for analyzePR() - ChangedFile, // A file in the diff - BreakingChange, // A detected breaking API change - TestCoverageReport, // Coverage ratio + gaps - TestCoverageGap, // A source file missing test changes - DocStalenessReport, // Stale doc references - StaleReference, // A single stale reference in a doc file - ImpactGraph, // Directly/indirectly affected files + edges - ImpactEdge, // A single import dependency edge - RiskAssessment, // Overall score, level, and factors - RiskFactor, // Individual factor with score, weight, description -} from '@pr-impact/core'; -``` - ---- - -## Error Handling - -All analysis functions throw on git or I/O errors. Wrap calls in try/catch: - -```typescript -try { - const analysis = await analyzePR({ repoPath: '.', baseBranch: 'main', headBranch: 'HEAD' }); -} catch (error) { - // Common errors: - // - Not a git repository - // - Branch does not exist - // - Shallow clone (insufficient history) - console.error('Analysis failed:', error.message); -} -``` - -Individual steps like `detectBreakingChanges` handle per-file errors gracefully (e.g., a file that doesn't exist at the base ref returns no breaking changes for that file), but will still throw on fundamental git errors. - ---- - -## Example: Custom CI Script - -```typescript -// scripts/check-pr.ts -import { analyzePR } from '@pr-impact/core'; - -const analysis = await analyzePR({ - repoPath: '.', - baseBranch: process.env.BASE_BRANCH ?? 'main', - headBranch: process.env.HEAD_BRANCH ?? 'HEAD', -}); - -// Custom logic: fail only if there are high-severity breaking changes -// AND the risk score is above 50 -const hasHighBreaking = analysis.breakingChanges.some(bc => bc.severity === 'high'); - -if (hasHighBreaking && analysis.riskScore.score > 50) { - console.error(`Blocked: high-severity breaking changes with risk score ${analysis.riskScore.score}`); - process.exit(1); -} - -console.log(`PR looks good. Risk: ${analysis.riskScore.score} (${analysis.riskScore.level})`); -``` - ---- - -## Next Steps - -- [Risk Scoring](./risk-scoring.md) — Understand the score formula and factor weights -- [Data Flow](./data-flow.md) — Type relationships and data flow through the pipeline -- [Analysis Pipeline](./analysis-pipeline.md) — How the 6-step pipeline works internally diff --git a/docs/risk-scoring.md b/docs/risk-scoring.md deleted file mode 100644 index 69e8c83..0000000 --- a/docs/risk-scoring.md +++ /dev/null @@ -1,189 +0,0 @@ -# Risk Scoring - -The risk score is a weighted average of six independent factors, producing a single number from 0 to 100. The score maps to a severity level used for CI gates and human review prioritization. - ---- - -## Formula - -``` -score = round( sum(factor_score * factor_weight) / sum(factor_weight) ) -``` - -All weights sum to 1.0, so the formula simplifies to: - -``` -score = round( sum(factor_score * factor_weight) ) -``` - ---- - -## Factor Weights - -```mermaid -pie title Risk Factor Weights - "Breaking changes (0.30)" : 30 - "Untested changes (0.25)" : 25 - "Diff size (0.15)" : 15 - "Stale documentation (0.10)" : 10 - "Config file changes (0.10)" : 10 - "Impact breadth (0.10)" : 10 -``` - ---- - -## Factor Scoring Logic - -### 1. Breaking Changes (weight: 0.30) - -Evaluates the severity of detected API breaking changes. - -```mermaid -flowchart TD - START{Any breaking changes?} - START -->|No| S0["Score: 0"] - START -->|Yes| HIGH{Any high severity?} - HIGH -->|Yes| S100["Score: 100"] - HIGH -->|No| MED{Any medium severity?} - MED -->|Yes| S60["Score: 60"] - MED -->|No| S30["Score: 30
(low only)"] - - style S0 fill:#059669,color:#fff - style S30 fill:#ca8a04,color:#fff - style S60 fill:#ea580c,color:#fff - style S100 fill:#dc2626,color:#fff -``` - -Breaking change types and their default severities: - -| Type | Severity | Description | -|---|---|---| -| `removed_export` | **high** | An exported symbol was deleted | -| `changed_signature` | **medium** | Function/method signature changed | -| `changed_type` | **medium** | Symbol kind changed (e.g. function to class) | -| `renamed_export` | **low** | Symbol renamed but same signature | - -### 2. Untested Changes (weight: 0.25) - -Measures what fraction of changed source files lack corresponding test changes. - -``` -score = (1 - coverageRatio) * 100 -``` - -Where `coverageRatio = sourceFilesWithTestChanges / changedSourceFiles`. - -- If no source files changed, score is 0. -- If 3 of 4 source files have test changes, score is 25. -- If 0 of 4 source files have test changes, score is 100. - -### 3. Diff Size (weight: 0.15) - -Based on total lines changed (additions + deletions). - -```mermaid -flowchart LR - A["0 - 99 lines"] -->|Score: 0| OK["Low risk"] - B["100 - 499 lines"] -->|Score: 50| MED["Medium risk"] - C["500 - 1000 lines"] -->|Score: 80| HIGH["High risk"] - D["1001+ lines"] -->|Score: 100| CRIT["Critical risk"] - - style OK fill:#059669,color:#fff - style MED fill:#ca8a04,color:#fff - style HIGH fill:#ea580c,color:#fff - style CRIT fill:#dc2626,color:#fff -``` - -### 4. Stale Documentation (weight: 0.10) - -Each stale reference adds 20 points, capped at 100. - -``` -score = min(staleReferences.length * 20, 100) -``` - -A stale reference is a doc file line that mentions a deleted path, renamed path, or removed export symbol. - -### 5. Config File Changes (weight: 0.10) - -```mermaid -flowchart TD - START{Any config files changed?} - START -->|No| S0["Score: 0"] - START -->|Yes| CI{Any CI/build config?} - CI -->|Yes| S100["Score: 100"] - CI -->|No| S50["Score: 50"] - - style S0 fill:#059669,color:#fff - style S50 fill:#ca8a04,color:#fff - style S100 fill:#dc2626,color:#fff -``` - -CI/build config patterns that trigger score 100: - -| Pattern | Examples | -|---|---| -| `.github/` | `.github/workflows/ci.yml` | -| `Dockerfile` | `Dockerfile`, `Dockerfile.prod` | -| `docker-compose` | `docker-compose.yml` | -| `webpack.config` | `webpack.config.js` | -| `vite.config` | `vite.config.ts` | -| `rollup.config` | `rollup.config.mjs` | -| `esbuild.config` | `esbuild.config.js` | -| `turbo.json` | `turbo.json` | -| `.gitlab-ci` | `.gitlab-ci.yml` | -| `Jenkinsfile` | `Jenkinsfile` | -| `.circleci/` | `.circleci/config.yml` | - -### 6. Impact Breadth (weight: 0.10) - -Each indirectly affected file adds 10 points, capped at 100. - -``` -score = min(indirectlyAffected.length * 10, 100) -``` - -Indirectly affected files are discovered via BFS over the reverse import dependency graph (default max depth: 3). - ---- - -## Score-to-Level Mapping - -```mermaid -flowchart LR - subgraph "Risk Levels" - LOW["0 - 25
Low"] - MED["26 - 50
Medium"] - HIGH["51 - 75
High"] - CRIT["76 - 100
Critical"] - end - - LOW --> MED --> HIGH --> CRIT - - style LOW fill:#059669,color:#fff - style MED fill:#ca8a04,color:#fff - style HIGH fill:#ea580c,color:#fff - style CRIT fill:#dc2626,color:#fff -``` - ---- - -## Example Calculation - -Consider a PR with: -- 1 medium-severity breaking change -- 2 of 3 source files have test changes -- 250 total lines changed -- 1 stale doc reference -- No config changes -- 2 indirectly affected files - -| Factor | Score | Weight | Weighted | -|---|---|---|---| -| Breaking changes | 60 | 0.30 | 18.0 | -| Untested changes | 33 | 0.25 | 8.3 | -| Diff size | 50 | 0.15 | 7.5 | -| Stale documentation | 20 | 0.10 | 2.0 | -| Config file changes | 0 | 0.10 | 0.0 | -| Impact breadth | 20 | 0.10 | 2.0 | -| **Total** | | **1.00** | **38 (medium)** | diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md deleted file mode 100644 index df34527..0000000 --- a/docs/troubleshooting.md +++ /dev/null @@ -1,240 +0,0 @@ -# Troubleshooting - -Common issues when using pr-impact and how to resolve them. - ---- - -## Git Errors - -### "fatal: bad revision 'main...HEAD'" - -**Cause:** The base branch (`main`) doesn't exist locally. This often happens in CI where only the PR branch is checked out. - -**Fix:** Use the remote-prefixed branch name: - -```bash -pri analyze origin/main HEAD -``` - -Or ensure the base branch is fetched: - -```bash -git fetch origin main -pri analyze main HEAD -``` - -### "Not a git repository" - -**Cause:** pr-impact is being run in a directory that isn't a git repository. - -**Fix:** Either `cd` into a git repo or use the `--repo` flag: - -```bash -pri analyze --repo /path/to/your/repo -``` - -### Shallow clone — missing history - -**Cause:** CI environments often use shallow clones (`fetch-depth: 1`) for speed. pr-impact needs full history to compute diffs between branches. - -**Fix:** In GitHub Actions: - -```yaml -- uses: actions/checkout@v4 - with: - fetch-depth: 0 # Full clone -``` - -In GitLab CI: - -```yaml -variables: - GIT_DEPTH: 0 -``` - -In CircleCI, full clones are the default. If you've set `shallow: true`, remove it. - -### "fatal: ambiguous argument" - -**Cause:** The head branch reference doesn't resolve. This can happen with detached HEAD states or when the branch name contains special characters. - -**Fix:** Use explicit refs: - -```bash -# Use the commit SHA directly -pri analyze origin/main abc1234 - -# Or the full ref -pri analyze origin/main refs/heads/feature/my-branch -``` - ---- - -## Breaking Change Detection - -### False positives from re-exports - -**Symptom:** pr-impact reports breaking changes for symbols that are still exported, just re-exported from a different internal path. - -**Context:** The breaking change detector compares exports at the file level. If you move a function from `utils.ts` to `helpers.ts` and re-export it from `utils.ts`, it won't be flagged. But if you remove the re-export, it will be flagged even if it's still available from `helpers.ts`. - -**Workaround:** Filter by severity (`--severity medium`) to reduce noise from low-severity renames, or skip breaking change detection for non-library projects (`--no-breaking`). - -### Non-TypeScript/JavaScript files flagged - -**Symptom:** Breaking changes reported for files that aren't part of the public API. - -**Context:** Breaking change detection runs on all source files (anything categorized as `source` by the file categorizer). Config files, test files, and docs are excluded. - -**Workaround:** This typically doesn't happen because the file categorizer correctly classifies non-source files. If it does, it may indicate a file categorization issue — please report it. - ---- - -## Test Coverage - -### Coverage ratio is 0 even though tests exist - -**Symptom:** `pri analyze` reports 0% test coverage even though your project has tests. - -**Cause:** pr-impact checks whether **changed** source files have corresponding **changed** test files. If you changed source code but didn't modify any tests, coverage is 0 — even if tests exist and pass. - -**This is intentional.** The check isn't "do tests exist?" but "did you update tests for the code you changed?" - -### Test file not recognized - -**Symptom:** You updated a test file but it's not counted toward coverage. - -**Cause:** pr-impact maps source files to test files using naming conventions: -- `src/foo.ts` maps to `__tests__/foo.test.ts`, `src/foo.test.ts`, `test/foo.test.ts`, etc. -- It looks for `.test.ts`, `.spec.ts`, `.test.js`, `.spec.js` suffixes - -If your test files use a different naming pattern, they may not be recognized. - -**Workaround:** Use `--no-coverage` if your project's test naming doesn't match, or consider standardizing test file names. - ---- - -## Risk Score - -### Score seems too high - -**Cause:** The risk score is a weighted combination of six factors. A large diff alone can push the score above 50 even with no breaking changes. - -**Debug:** Run `pri risk` to see the factor breakdown. Identify which factor is driving the score: - -```bash -pri risk origin/main HEAD -``` - -Common drivers of high scores: -- **Diff size** — PRs with >500 changed lines score 80/100 on this factor (weight: 0.15) -- **Untested changes** — If many source files changed without test updates (weight: 0.25) -- **Config changes** — CI/build config modifications score 100/100 (weight: 0.10) - -**Fix:** Consider raising your `--threshold` if the current setting produces too many false positives, or split large PRs into smaller ones. - -### Score is 0 for a non-trivial PR - -**Cause:** The PR might only contain test files, documentation, or config files that don't trigger breaking change detection or test coverage checks. - -**This is expected.** If the PR doesn't change source files, most risk factors score 0. - ---- - -## CI Integration - -### `pri comment` fails with 403 - -**Cause:** The GitHub token doesn't have permission to create or update PR comments. - -**Fix:** Ensure the token has `pull-requests: write` permission: - -```yaml -permissions: - pull-requests: write - -steps: - - name: Post report - run: pri comment origin/main HEAD - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -``` - -### `pri comment` can't detect PR number - -**Cause:** `pri comment` auto-detects the PR number from CI environment variables. This works in GitHub Actions, GitLab CI, and CircleCI. Other CI systems may not set the expected variables. - -**Fix:** Pass the PR number explicitly: - -```bash -pri comment origin/main HEAD --pr 123 --github-repo owner/repo -``` - -### Exit code 2 in CI - -**Meaning:** Exit code 2 means an internal error occurred (not a quality gate failure). The analysis itself crashed. - -**Debug:** Check the command output for error messages. Common causes: -- Shallow clone (see above) -- Missing branch references -- Insufficient permissions - ---- - -## MCP Server - -### "Server not found" in Claude Code - -**Cause:** The MCP server configuration is incorrect or the package isn't installed. - -**Fix:** Verify your `.claude/mcp.json`: - -```json -{ - "mcpServers": { - "pr-impact": { - "command": "npx", - "args": ["-y", "@pr-impact/mcp-server"] - } - } -} -``` - -Make sure `npx` is available on your PATH. Test manually: - -```bash -npx -y @pr-impact/mcp-server -``` - -### MCP tools return empty results - -**Cause:** The MCP server defaults to `process.cwd()` as the repository path. If the working directory isn't a git repo, analysis will fail or return empty results. - -**Fix:** Pass `repoPath` explicitly when calling tools, or ensure the MCP server is started from within a git repository. - ---- - -## Performance - -### Analysis is slow - -The full `pri analyze` runs all steps in parallel, but each step involves git operations. Performance depends on: - -- **Repository size** — larger repos with more files take longer for import resolution -- **Diff size** — more changed files means more comparisons -- **Impact depth** — deeper impact graph traversal is slower - -**Tips:** -- Use `--no-breaking` or `--no-docs` to skip steps you don't need -- Use `pri risk` or `pri breaking` instead of `pri analyze` if you only need one check -- For the impact graph, reduce `--depth` (default 3) if traversal is too broad - ---- - -## Getting Help - -If you encounter an issue not covered here: - -1. Run with `--format json` to get structured output for debugging -2. Check the [GitHub Issues](https://github.com/ducdmdev/pr-impact/issues) for known problems -3. Open a new issue with the command you ran, the error message, and your Node.js/git versions diff --git a/eslint.config.mjs b/eslint.config.mjs index 14f523f..cad8ff2 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -51,16 +51,6 @@ export default tseslint.config( }, }, - // ── CLI command files: relax unsafe-* rules (Commander opts are `any`) ──── - { - files: ['packages/cli/src/commands/**/*.ts'], - rules: { - '@typescript-eslint/no-unsafe-argument': 'off', - '@typescript-eslint/no-unsafe-assignment': 'off', - '@typescript-eslint/no-unsafe-member-access': 'off', - }, - }, - // ── Test files: disable type-checked rules + enable vitest ─────────────── { files: ['**/__tests__/**/*.ts'], @@ -74,9 +64,9 @@ export default tseslint.config( }, }, - // ── Config files outside tsconfig (tsup, vitest configs) ───────────────── + // ── Config files outside tsconfig (tsup, vitest configs, build scripts) ── { - files: ['**/tsup.config.ts', '**/vitest.config.ts', 'vitest.config.ts'], + files: ['**/tsup.config.ts', '**/vitest.config.ts', 'vitest.config.ts', 'scripts/*.ts'], ...tseslint.configs.disableTypeChecked, }, diff --git a/packages/action/CLAUDE.md b/packages/action/CLAUDE.md new file mode 100644 index 0000000..65da689 --- /dev/null +++ b/packages/action/CLAUDE.md @@ -0,0 +1,37 @@ +# CLAUDE.md -- @pr-impact/action + +## What this package does + +GitHub Action that runs an agentic Claude loop to analyze PRs. Reads inputs from `action.yml`, calls the Anthropic API with tool definitions, and produces a structured risk report. Optionally posts the report as a PR comment. + +## Quick commands + +```bash +pnpm build --filter=@pr-impact/action # Prebuild (embed templates) + tsup (CJS bundle) +npx vitest run packages/action # Run tests +``` + +## Source layout + +``` +src/ + index.ts -- Entry point: reads inputs, runs analysis, sets outputs, posts comment + client.ts -- runAnalysis(): Anthropic API agentic loop (30 iterations, 180s timeout, temperature 0) + tools.ts -- executeTool(): dispatches tool_use calls to tools-core functions + comment.ts -- postOrUpdateComment(): upsert PR comment via GitHub API with HTML markers + generated/ + templates.ts -- AUTO-GENERATED by scripts/embed-templates.ts (do not edit) +``` + +## Key patterns + +- **CJS output**: builds to `dist/index.cjs` because GitHub Actions requires CommonJS. All dependencies are bundled (`noExternal: [/.*/]`). +- **Prebuild step**: `scripts/embed-templates.ts` generates `src/generated/templates.ts` from `templates/*.md` before tsup runs. +- **Tool input cloning**: `client.ts` clones `toolUse.input` via spread to avoid mutating the conversation history. +- **Parallel tool execution**: multiple tool_use blocks in a single response are executed concurrently via `Promise.all`. +- **Risk score parsing**: regex extracts score from report. If parsing fails, sets score to `-1` and level to `unknown` instead of failing. +- **Comment upsert**: uses `` HTML markers to find and update existing comments. + +## Testing + +Tests in `__tests__/` mock `@actions/core`, `@actions/github`, the Anthropic SDK, and tools-core functions. The entry point test uses `vi.resetModules()` + `vi.doMock()` to re-trigger the top-level `main()` call on each import. diff --git a/packages/action/README.md b/packages/action/README.md new file mode 100644 index 0000000..bbc20e8 --- /dev/null +++ b/packages/action/README.md @@ -0,0 +1,57 @@ +# @pr-impact/action + +GitHub Action that runs an agentic Claude loop to analyze pull requests and produce structured impact reports. + +## Usage + +```yaml +name: PR Impact Analysis +on: pull_request + +jobs: + analyze: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - uses: ducdmdev/pr-impact@v1 + with: + anthropic-api-key: ${{ secrets.ANTHROPIC_API_KEY }} + github-token: ${{ secrets.GITHUB_TOKEN }} + threshold: '75' +``` + +## Inputs + +| Input | Description | Required | Default | +|---|---|---|---| +| `anthropic-api-key` | Anthropic API key for Claude | Yes | -- | +| `github-token` | GitHub token for posting PR comments | No | -- | +| `base-branch` | Base branch to compare against | No | `main` | +| `model` | Claude model to use | No | `claude-sonnet-4-5-20250929` | +| `threshold` | Fail the action if risk score >= this value | No | -- | + +## Outputs + +| Output | Description | +|---|---| +| `risk-score` | Calculated risk score (0-100), or `-1` if parsing fails | +| `risk-level` | Risk level: `low`, `medium`, `high`, `critical`, or `unknown` | +| `report` | Full markdown analysis report | + +## How It Works + +1. Reads action inputs and detects PR context +2. Starts an agentic loop with Claude using the Anthropic API +3. Claude calls tools (`git_diff`, `read_file_at_ref`, `list_changed_files`, etc.) to gather evidence +4. Claude produces a structured risk report following the embedded system prompt +5. Parses the risk score, sets outputs, optionally posts a PR comment +6. Fails the action if risk score exceeds the threshold + +Safety limits: 30-iteration max, 180-second timeout, temperature 0. + +## License + +[MIT](../../LICENSE) diff --git a/packages/action/__tests__/client.test.ts b/packages/action/__tests__/client.test.ts new file mode 100644 index 0000000..5c1945d --- /dev/null +++ b/packages/action/__tests__/client.test.ts @@ -0,0 +1,150 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; + +vi.mock('@anthropic-ai/sdk', () => ({ + default: vi.fn().mockImplementation(() => ({ + messages: { + create: vi.fn(), + }, + })), +})); + +vi.mock('../src/tools.js', () => ({ + executeTool: vi.fn(), +})); + +vi.mock('../src/generated/templates.js', () => ({ + SYSTEM_PROMPT: 'You are a test prompt.', + REPORT_TEMPLATE: '# Test Report Template', +})); + +import Anthropic from '@anthropic-ai/sdk'; +import { executeTool } from '../src/tools.js'; +import { runAnalysis } from '../src/client.js'; + +beforeEach(() => { + vi.clearAllMocks(); + vi.useFakeTimers(); +}); + +afterEach(() => { + vi.useRealTimers(); +}); + +describe('runAnalysis', () => { + it('calls Claude API with temperature 0 and returns the final text response', async () => { + const mockCreate = vi.fn().mockResolvedValue({ + content: [{ type: 'text', text: '# PR Impact Report\n\n## Summary\n...' }], + stop_reason: 'end_turn', + }); + + vi.mocked(Anthropic).mockImplementation(() => ({ + messages: { create: mockCreate }, + }) as never); + + vi.useRealTimers(); + const result = await runAnalysis({ + apiKey: 'test-key', + repoPath: '/repo', + baseBranch: 'main', + headBranch: 'HEAD', + model: 'claude-sonnet-4-5-20250929', + }); + + expect(result).toContain('# PR Impact Report'); + expect(mockCreate).toHaveBeenCalledTimes(1); + + // Verify temperature: 0 is passed + const createArgs = mockCreate.mock.calls[0][0]; + expect(createArgs.temperature).toBe(0); + }); + + it('handles tool_use responses by executing tools and continuing', async () => { + const mockCreate = vi.fn() + .mockResolvedValueOnce({ + content: [ + { type: 'tool_use', id: 'call_1', name: 'list_changed_files', input: { base: 'main', head: 'HEAD' } }, + ], + stop_reason: 'tool_use', + }) + .mockResolvedValueOnce({ + content: [{ type: 'text', text: '# PR Impact Report\n\nFinal report' }], + stop_reason: 'end_turn', + }); + + vi.mocked(Anthropic).mockImplementation(() => ({ + messages: { create: mockCreate }, + }) as never); + + vi.mocked(executeTool).mockResolvedValue('{"files": []}'); + + vi.useRealTimers(); + const result = await runAnalysis({ + apiKey: 'test-key', + repoPath: '/repo', + baseBranch: 'main', + headBranch: 'HEAD', + model: 'claude-sonnet-4-5-20250929', + }); + + expect(executeTool).toHaveBeenCalledWith('list_changed_files', expect.objectContaining({ base: 'main', head: 'HEAD' })); + expect(result).toContain('Final report'); + expect(mockCreate).toHaveBeenCalledTimes(2); + }); + + it('injects repoPath into tool calls', async () => { + const mockCreate = vi.fn() + .mockResolvedValueOnce({ + content: [ + { type: 'tool_use', id: 'call_1', name: 'git_diff', input: { base: 'main', head: 'HEAD' } }, + ], + stop_reason: 'tool_use', + }) + .mockResolvedValueOnce({ + content: [{ type: 'text', text: 'done' }], + stop_reason: 'end_turn', + }); + + vi.mocked(Anthropic).mockImplementation(() => ({ + messages: { create: mockCreate }, + }) as never); + + vi.mocked(executeTool).mockResolvedValue('diff output'); + + vi.useRealTimers(); + await runAnalysis({ + apiKey: 'test-key', + repoPath: '/my-repo', + baseBranch: 'main', + headBranch: 'HEAD', + model: 'claude-sonnet-4-5-20250929', + }); + + expect(executeTool).toHaveBeenCalledWith('git_diff', expect.objectContaining({ + repoPath: '/my-repo', + })); + }); + + it('uses embedded templates (not filesystem)', async () => { + const mockCreate = vi.fn().mockResolvedValue({ + content: [{ type: 'text', text: 'report' }], + stop_reason: 'end_turn', + }); + + vi.mocked(Anthropic).mockImplementation(() => ({ + messages: { create: mockCreate }, + }) as never); + + vi.useRealTimers(); + await runAnalysis({ + apiKey: 'test-key', + repoPath: '/repo', + baseBranch: 'main', + headBranch: 'HEAD', + model: 'claude-sonnet-4-5-20250929', + }); + + const createArgs = mockCreate.mock.calls[0][0]; + expect(createArgs.system).toBe('You are a test prompt.'); + expect(createArgs.messages[0].content).toContain('# Test Report Template'); + }); +}); diff --git a/packages/action/__tests__/comment.test.ts b/packages/action/__tests__/comment.test.ts new file mode 100644 index 0000000..758939f --- /dev/null +++ b/packages/action/__tests__/comment.test.ts @@ -0,0 +1,225 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import { postOrUpdateComment } from '../src/comment.js'; + +const fetchMock = vi.fn(); +vi.stubGlobal('fetch', fetchMock); + +beforeEach(() => { + vi.clearAllMocks(); +}); + +describe('postOrUpdateComment', () => { + const baseOpts = { + token: 'ghp_test123', + repo: 'owner/repo', + prNumber: 42, + body: '# PR Impact Report\nSome analysis', + }; + + it('creates a new comment when no existing comment is found', async () => { + // List comments returns empty array + fetchMock.mockResolvedValueOnce({ + ok: true, + json: async () => [], + }); + // Create comment + fetchMock.mockResolvedValueOnce({ + ok: true, + json: async () => ({ html_url: 'https://github.com/owner/repo/pull/42#issuecomment-123' }), + }); + + const url = await postOrUpdateComment(baseOpts); + + expect(url).toBe('https://github.com/owner/repo/pull/42#issuecomment-123'); + + // Verify list call + expect(fetchMock).toHaveBeenCalledTimes(2); + const listCall = fetchMock.mock.calls[0]; + expect(listCall[0]).toBe( + 'https://api.github.com/repos/owner/repo/issues/42/comments?per_page=100&page=1', + ); + expect(listCall[1].headers['Authorization']).toBe('Bearer ghp_test123'); + + // Verify create call + const createCall = fetchMock.mock.calls[1]; + expect(createCall[0]).toBe( + 'https://api.github.com/repos/owner/repo/issues/42/comments', + ); + expect(createCall[1].method).toBe('POST'); + const createBody = JSON.parse(createCall[1].body); + expect(createBody.body).toContain('# PR Impact Report'); + }); + + it('updates existing comment when HTML marker is found', async () => { + // List comments returns one with the marker + fetchMock.mockResolvedValueOnce({ + ok: true, + json: async () => [ + { id: 111, body: 'unrelated comment' }, + { id: 222, body: '\nold report\n' }, + ], + }); + // Update (PATCH) comment + fetchMock.mockResolvedValueOnce({ + ok: true, + json: async () => ({ html_url: 'https://github.com/owner/repo/pull/42#issuecomment-222' }), + }); + + const url = await postOrUpdateComment(baseOpts); + + expect(url).toBe('https://github.com/owner/repo/pull/42#issuecomment-222'); + + // Verify PATCH call + const patchCall = fetchMock.mock.calls[1]; + expect(patchCall[0]).toBe( + 'https://api.github.com/repos/owner/repo/issues/comments/222', + ); + expect(patchCall[1].method).toBe('PATCH'); + }); + + it('wraps body with pr-impact markers', async () => { + // No existing comments + fetchMock.mockResolvedValueOnce({ + ok: true, + json: async () => [], + }); + // Create + fetchMock.mockResolvedValueOnce({ + ok: true, + json: async () => ({ html_url: 'https://github.com/...' }), + }); + + await postOrUpdateComment(baseOpts); + + const createBody = JSON.parse(fetchMock.mock.calls[1][1].body); + expect(createBody.body).toBe( + '\n# PR Impact Report\nSome analysis\n', + ); + }); + + it('throws on non-ok response when creating a comment', async () => { + // No existing comments + fetchMock.mockResolvedValueOnce({ + ok: true, + json: async () => [], + }); + // Create returns 403 + fetchMock.mockResolvedValueOnce({ + ok: false, + status: 403, + }); + + await expect(postOrUpdateComment(baseOpts)).rejects.toThrow( + 'GitHub API error creating comment: 403', + ); + }); + + it('throws on non-ok response when updating a comment', async () => { + // List comments returns one with the marker + fetchMock.mockResolvedValueOnce({ + ok: true, + json: async () => [ + { id: 999, body: '\nold\n' }, + ], + }); + // Update returns 500 + fetchMock.mockResolvedValueOnce({ + ok: false, + status: 500, + }); + + await expect(postOrUpdateComment(baseOpts)).rejects.toThrow( + 'GitHub API error updating comment: 500', + ); + }); + + it('handles pagination when first page returns 100 comments without marker', async () => { + // First page: 100 comments, none with marker + const page1 = Array.from({ length: 100 }, (_, i) => ({ + id: i + 1, + body: `comment ${i + 1}`, + })); + fetchMock.mockResolvedValueOnce({ + ok: true, + json: async () => page1, + }); + + // Second page: fewer than 100, one has marker + fetchMock.mockResolvedValueOnce({ + ok: true, + json: async () => [ + { id: 201, body: '\nexisting\n' }, + { id: 202, body: 'another' }, + ], + }); + + // PATCH for the found comment + fetchMock.mockResolvedValueOnce({ + ok: true, + json: async () => ({ html_url: 'https://github.com/owner/repo/pull/42#issuecomment-201' }), + }); + + const url = await postOrUpdateComment(baseOpts); + + expect(url).toBe('https://github.com/owner/repo/pull/42#issuecomment-201'); + + // Verify pagination: page 1 then page 2 + expect(fetchMock.mock.calls[0][0]).toContain('page=1'); + expect(fetchMock.mock.calls[1][0]).toContain('page=2'); + + // Verify update call targets the correct comment + expect(fetchMock.mock.calls[2][0]).toContain('/issues/comments/201'); + }); + + it('creates a new comment when pagination exhausts all pages without finding marker', async () => { + // First page: 100 comments + const page1 = Array.from({ length: 100 }, (_, i) => ({ + id: i + 1, + body: `comment ${i + 1}`, + })); + fetchMock.mockResolvedValueOnce({ + ok: true, + json: async () => page1, + }); + + // Second page: fewer than 100, no marker + fetchMock.mockResolvedValueOnce({ + ok: true, + json: async () => [ + { id: 201, body: 'no marker here' }, + ], + }); + + // Create new comment + fetchMock.mockResolvedValueOnce({ + ok: true, + json: async () => ({ html_url: 'https://github.com/owner/repo/pull/42#issuecomment-300' }), + }); + + const url = await postOrUpdateComment(baseOpts); + + expect(url).toBe('https://github.com/owner/repo/pull/42#issuecomment-300'); + // 2 list calls + 1 create call + expect(fetchMock).toHaveBeenCalledTimes(3); + expect(fetchMock.mock.calls[2][1].method).toBe('POST'); + }); + + it('creates new comment when list call fails (non-ok)', async () => { + // List returns non-ok (findExistingComment returns null) + fetchMock.mockResolvedValueOnce({ + ok: false, + status: 500, + }); + + // Create comment + fetchMock.mockResolvedValueOnce({ + ok: true, + json: async () => ({ html_url: 'https://github.com/owner/repo/pull/42#issuecomment-new' }), + }); + + const url = await postOrUpdateComment(baseOpts); + + expect(url).toBe('https://github.com/owner/repo/pull/42#issuecomment-new'); + expect(fetchMock.mock.calls[1][1].method).toBe('POST'); + }); +}); diff --git a/packages/action/__tests__/index.test.ts b/packages/action/__tests__/index.test.ts new file mode 100644 index 0000000..9d56081 --- /dev/null +++ b/packages/action/__tests__/index.test.ts @@ -0,0 +1,176 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +// Mock all dependencies before importing +vi.mock('@actions/core', () => ({ + getInput: vi.fn(), + setOutput: vi.fn(), + setFailed: vi.fn(), + info: vi.fn(), + warning: vi.fn(), +})); + +vi.mock('@actions/github', () => ({ + context: { + payload: {} as Record, + repo: { owner: 'test-owner', repo: 'test-repo' }, + }, +})); + +vi.mock('../src/client.js', () => ({ + runAnalysis: vi.fn(), +})); + +vi.mock('../src/comment.js', () => ({ + postOrUpdateComment: vi.fn(), +})); + +import * as core from '@actions/core'; +import * as github from '@actions/github'; +import { runAnalysis } from '../src/client.js'; +import { postOrUpdateComment } from '../src/comment.js'; + +beforeEach(() => { + vi.clearAllMocks(); + // Reset context payload + github.context.payload = {}; + github.context.repo = { owner: 'test-owner', repo: 'test-repo' }; +}); + +// Helper: set up standard mocks for getInput +function setupInputs(overrides: Record = {}) { + const defaults: Record = { + 'anthropic-api-key': 'sk-test', + 'base-branch': 'main', + 'model': 'claude-sonnet-4-5-20250929', + 'threshold': '', + 'github-token': '', + }; + const merged = { ...defaults, ...overrides }; + vi.mocked(core.getInput).mockImplementation((name: string) => merged[name] ?? ''); +} + +// Helper: load the index module (triggers main()) +async function loadIndex() { + // Use dynamic import with a cache-busting query to get a fresh module each time + // We need to reset modules to re-trigger the top-level main() call + vi.resetModules(); + + // Re-apply mocks after resetModules + vi.doMock('@actions/core', () => ({ + getInput: vi.mocked(core.getInput), + setOutput: vi.mocked(core.setOutput), + setFailed: vi.mocked(core.setFailed), + info: vi.mocked(core.info), + warning: vi.mocked(core.warning), + })); + + vi.doMock('@actions/github', () => ({ + context: github.context, + })); + + vi.doMock('../src/client.js', () => ({ + runAnalysis: vi.mocked(runAnalysis), + })); + + vi.doMock('../src/comment.js', () => ({ + postOrUpdateComment: vi.mocked(postOrUpdateComment), + })); + + await import('../src/index.js'); + // Allow the top-level main().catch() to settle + await (vi.dynamicImportSettled?.() ?? new Promise((r) => setTimeout(r, 10))); +} + +describe('action entry point', () => { + it('parses risk score from report and sets outputs', async () => { + setupInputs(); + vi.mocked(runAnalysis).mockResolvedValue( + '## Analysis\n**Risk Score**: 42/100 (medium)\nDetails...', + ); + + await loadIndex(); + + expect(core.setOutput).toHaveBeenCalledWith('risk-score', '42'); + expect(core.setOutput).toHaveBeenCalledWith('risk-level', 'medium'); + expect(core.setOutput).toHaveBeenCalledWith('report', expect.stringContaining('Risk Score')); + expect(core.info).toHaveBeenCalledWith('Risk Score: 42/100 (medium)'); + }); + + it('sets risk score to -1 and warns when regex does not match', async () => { + setupInputs(); + vi.mocked(runAnalysis).mockResolvedValue('No score in this report'); + + await loadIndex(); + + expect(core.setOutput).toHaveBeenCalledWith('risk-score', '-1'); + expect(core.setOutput).toHaveBeenCalledWith('risk-level', 'unknown'); + expect(core.warning).toHaveBeenCalledWith( + 'Could not parse risk score from report. Skipping threshold check.', + ); + }); + + it('calls setFailed when risk score >= threshold', async () => { + setupInputs({ threshold: '40' }); + vi.mocked(runAnalysis).mockResolvedValue('**Risk Score**: 42/100 (medium)'); + + await loadIndex(); + + expect(core.setFailed).toHaveBeenCalledWith( + 'Risk score 42 exceeds threshold 40', + ); + }); + + it('does NOT call setFailed when score < threshold', async () => { + setupInputs({ threshold: '50' }); + vi.mocked(runAnalysis).mockResolvedValue('**Risk Score**: 42/100 (medium)'); + + await loadIndex(); + + expect(core.setFailed).not.toHaveBeenCalled(); + }); + + it('does NOT call setFailed when threshold is not set', async () => { + setupInputs({ threshold: '' }); + vi.mocked(runAnalysis).mockResolvedValue('**Risk Score**: 85/100 (high)'); + + await loadIndex(); + + expect(core.setFailed).not.toHaveBeenCalled(); + }); + + it('posts PR comment when prNumber and githubToken are present', async () => { + setupInputs({ 'github-token': 'ghp_token123' }); + github.context.payload = { pull_request: { number: 7 } }; + vi.mocked(runAnalysis).mockResolvedValue('**Risk Score**: 10/100 (low)'); + vi.mocked(postOrUpdateComment).mockResolvedValue('https://github.com/comment-url'); + + await loadIndex(); + + expect(postOrUpdateComment).toHaveBeenCalledWith({ + token: 'ghp_token123', + repo: 'test-owner/test-repo', + prNumber: 7, + body: '**Risk Score**: 10/100 (low)', + }); + expect(core.info).toHaveBeenCalledWith('Posted PR comment: https://github.com/comment-url'); + }); + + it('skips comment when no prNumber in context', async () => { + setupInputs({ 'github-token': 'ghp_token123' }); + github.context.payload = {}; + vi.mocked(runAnalysis).mockResolvedValue('**Risk Score**: 10/100 (low)'); + + await loadIndex(); + + expect(postOrUpdateComment).not.toHaveBeenCalled(); + }); + + it('calls setFailed when main() throws an error', async () => { + setupInputs(); + vi.mocked(runAnalysis).mockRejectedValue(new Error('API connection failed')); + + await loadIndex(); + + expect(core.setFailed).toHaveBeenCalledWith('API connection failed'); + }); +}); diff --git a/packages/action/__tests__/tools.test.ts b/packages/action/__tests__/tools.test.ts new file mode 100644 index 0000000..ebcc71a --- /dev/null +++ b/packages/action/__tests__/tools.test.ts @@ -0,0 +1,124 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +vi.mock('@pr-impact/tools-core', () => ({ + gitDiff: vi.fn(), + readFileAtRef: vi.fn(), + listChangedFiles: vi.fn(), + searchCode: vi.fn(), + findImporters: vi.fn(), + listTestFiles: vi.fn(), + clearImporterCache: vi.fn(), +})); + +import { + gitDiff, + readFileAtRef, + listChangedFiles, + searchCode, + findImporters, + listTestFiles, +} from '@pr-impact/tools-core'; +import { executeTool } from '../src/tools.js'; + +beforeEach(() => { + vi.clearAllMocks(); +}); + +describe('executeTool', () => { + it('dispatches git_diff tool and returns stringified result', async () => { + vi.mocked(gitDiff).mockResolvedValue({ diff: 'diff output' }); + + const result = await executeTool('git_diff', { + repoPath: '/repo', + base: 'main', + head: 'HEAD', + }); + + expect(gitDiff).toHaveBeenCalledWith({ + repoPath: '/repo', + base: 'main', + head: 'HEAD', + }); + expect(result).toContain('diff output'); + }); + + it('dispatches read_file_at_ref tool', async () => { + vi.mocked(readFileAtRef).mockResolvedValue({ content: 'file content' }); + + const result = await executeTool('read_file_at_ref', { + repoPath: '/repo', + ref: 'main', + filePath: 'src/foo.ts', + }); + + expect(readFileAtRef).toHaveBeenCalledWith({ + repoPath: '/repo', + ref: 'main', + filePath: 'src/foo.ts', + }); + expect(result).toContain('file content'); + }); + + it('dispatches list_changed_files tool', async () => { + vi.mocked(listChangedFiles).mockResolvedValue({ + files: [{ path: 'a.ts', status: 'modified', additions: 1, deletions: 0 }], + totalAdditions: 1, + totalDeletions: 0, + }); + + const result = await executeTool('list_changed_files', { + repoPath: '/repo', + base: 'main', + head: 'HEAD', + }); + + const parsed = JSON.parse(result); + expect(parsed.files).toHaveLength(1); + expect(parsed.files[0].status).toBe('modified'); + }); + + it('dispatches search_code tool', async () => { + vi.mocked(searchCode).mockResolvedValue({ + matches: [{ file: 'a.ts', line: 1, match: 'test' }], + }); + + const result = await executeTool('search_code', { + repoPath: '/repo', + pattern: 'test', + glob: '*.ts', + }); + + const parsed = JSON.parse(result); + expect(parsed.matches).toHaveLength(1); + }); + + it('dispatches find_importers tool', async () => { + vi.mocked(findImporters).mockResolvedValue({ importers: ['src/bar.ts'] }); + + const result = await executeTool('find_importers', { + repoPath: '/repo', + modulePath: 'src/foo.ts', + }); + + const parsed = JSON.parse(result); + expect(parsed.importers).toContain('src/bar.ts'); + }); + + it('dispatches list_test_files tool', async () => { + vi.mocked(listTestFiles).mockResolvedValue({ + testFiles: ['src/__tests__/foo.test.ts'], + }); + + const result = await executeTool('list_test_files', { + repoPath: '/repo', + sourceFile: 'src/foo.ts', + }); + + const parsed = JSON.parse(result); + expect(parsed.testFiles).toContain('src/__tests__/foo.test.ts'); + }); + + it('throws for unknown tool', async () => { + await expect(executeTool('unknown_tool', {})).rejects.toThrow('Unknown tool: unknown_tool'); + }); +}); diff --git a/packages/action/action.yml b/packages/action/action.yml new file mode 100644 index 0000000..3e554f5 --- /dev/null +++ b/packages/action/action.yml @@ -0,0 +1,36 @@ +name: 'PR Impact Analysis' +description: 'AI-powered PR impact analysis — detect breaking changes, map blast radius, and score risk' +branding: + icon: 'shield' + color: 'blue' + +inputs: + anthropic-api-key: + description: 'Anthropic API key for Claude' + required: true + base-branch: + description: 'Base branch to compare against' + required: false + default: 'main' + model: + description: 'Claude model to use' + required: false + default: 'claude-sonnet-4-5-20250929' + threshold: + description: 'Risk score threshold — action fails if risk score >= this value' + required: false + github-token: + description: 'GitHub token for posting PR comments. Pass ${{ secrets.GITHUB_TOKEN }} in your workflow.' + required: false + +outputs: + risk-score: + description: 'The calculated risk score (0-100)' + risk-level: + description: 'The risk level (low/medium/high/critical)' + report: + description: 'The full markdown report' + +runs: + using: 'node20' + main: 'dist/index.cjs' diff --git a/packages/action/package.json b/packages/action/package.json new file mode 100644 index 0000000..52e8395 --- /dev/null +++ b/packages/action/package.json @@ -0,0 +1,26 @@ +{ + "name": "@pr-impact/action", + "version": "1.0.0", + "private": true, + "description": "GitHub Action for AI-powered PR impact analysis", + "type": "module", + "main": "./dist/index.cjs", + "license": "MIT", + "scripts": { + "prebuild": "tsx ../../scripts/embed-templates.ts", + "build": "tsup", + "clean": "rm -rf dist" + }, + "dependencies": { + "@pr-impact/tools-core": "workspace:*", + "@anthropic-ai/sdk": "^0.39.0", + "@actions/core": "^1.11.0", + "@actions/github": "^6.0.0" + }, + "devDependencies": { + "tsup": "^8.0.0", + "tsx": "^4.0.0", + "typescript": "~5.7.0", + "@types/node": "^22.0.0" + } +} diff --git a/packages/action/src/client.ts b/packages/action/src/client.ts new file mode 100644 index 0000000..f39e156 --- /dev/null +++ b/packages/action/src/client.ts @@ -0,0 +1,177 @@ +import Anthropic from '@anthropic-ai/sdk'; +import { executeTool } from './tools.js'; +import { SYSTEM_PROMPT, REPORT_TEMPLATE } from './generated/templates.js'; + +export interface AnalysisOptions { + apiKey: string; + repoPath: string; + baseBranch: string; + headBranch: string; + model: string; +} + +const MAX_ITERATIONS = 30; +const TIMEOUT_MS = 180_000; // 180 seconds + +const TOOL_DEFINITIONS: Anthropic.Tool[] = [ + { + name: 'git_diff', + description: 'Get the raw git diff between two branches, optionally for a single file', + input_schema: { + type: 'object' as const, + properties: { + base: { type: 'string', description: 'Base branch or ref' }, + head: { type: 'string', description: 'Head branch or ref' }, + file: { type: 'string', description: 'Optional file path for single-file diff' }, + }, + required: ['base', 'head'], + }, + }, + { + name: 'read_file_at_ref', + description: 'Read a file content at a specific git ref', + input_schema: { + type: 'object' as const, + properties: { + ref: { type: 'string', description: 'Git ref (branch, commit, tag)' }, + filePath: { type: 'string', description: 'Repo-relative file path' }, + }, + required: ['ref', 'filePath'], + }, + }, + { + name: 'list_changed_files', + description: 'List files changed between two branches with status and stats', + input_schema: { + type: 'object' as const, + properties: { + base: { type: 'string', description: 'Base branch or ref' }, + head: { type: 'string', description: 'Head branch or ref' }, + }, + required: ['base', 'head'], + }, + }, + { + name: 'search_code', + description: 'Search for a regex pattern in the codebase', + input_schema: { + type: 'object' as const, + properties: { + pattern: { type: 'string', description: 'Regex pattern' }, + glob: { type: 'string', description: 'File glob to limit scope (e.g. "*.md")' }, + }, + required: ['pattern'], + }, + }, + { + name: 'find_importers', + description: 'Find files that import a given module', + input_schema: { + type: 'object' as const, + properties: { + modulePath: { type: 'string', description: 'Repo-relative module path' }, + }, + required: ['modulePath'], + }, + }, + { + name: 'list_test_files', + description: 'Find test files associated with a source file', + input_schema: { + type: 'object' as const, + properties: { + sourceFile: { type: 'string', description: 'Repo-relative source file path' }, + }, + required: ['sourceFile'], + }, + }, +]; + +export async function runAnalysis(options: AnalysisOptions): Promise { + const client = new Anthropic({ apiKey: options.apiKey }); + + const userMessage = [ + `Analyze the PR comparing branch \`${options.baseBranch}\` to \`${options.headBranch}\`.`, + `Repository path: ${options.repoPath}`, + '', + 'Follow all 6 analysis steps. Produce the report using this template:', + '', + REPORT_TEMPLATE, + ].join('\n'); + + const messages: Anthropic.MessageParam[] = [ + { role: 'user', content: userMessage }, + ]; + + const startTime = Date.now(); + let lastTextOutput = ''; + + for (let i = 0; i < MAX_ITERATIONS; i++) { + // Check wall-clock timeout + if (Date.now() - startTime > TIMEOUT_MS) { + if (lastTextOutput) { + return lastTextOutput; + } + throw new Error(`Analysis timed out after ${TIMEOUT_MS / 1000} seconds`); + } + + const response = await client.messages.create({ + model: options.model, + max_tokens: 8192, + system: SYSTEM_PROMPT, + tools: TOOL_DEFINITIONS, + messages, + temperature: 0, + }); + + // Collect text blocks from this response for partial extraction + const textBlocks = response.content.filter( + (block): block is Anthropic.TextBlock => block.type === 'text', + ); + if (textBlocks.length > 0) { + lastTextOutput = textBlocks.map((b) => b.text).join('\n'); + } + + // Collect tool use blocks + const toolUseBlocks = response.content.filter( + (block): block is Anthropic.ToolUseBlock => block.type === 'tool_use', + ); + + if (toolUseBlocks.length === 0 || response.stop_reason === 'end_turn') { + return lastTextOutput; + } + + // Execute all tool calls and build tool results + messages.push({ role: 'assistant', content: response.content }); + + const toolResults: Anthropic.ToolResultBlockParam[] = await Promise.all( + toolUseBlocks.map(async (toolUse): Promise => { + try { + // Inject repoPath into all tool calls (spread-clone to avoid mutating the API response) + const input = { ...(toolUse.input as Record), repoPath: options.repoPath }; + const result = await executeTool(toolUse.name, input); + return { + type: 'tool_result', + tool_use_id: toolUse.id, + content: result, + }; + } catch (error) { + return { + type: 'tool_result', + tool_use_id: toolUse.id, + content: `Error: ${error instanceof Error ? error.message : String(error)}`, + is_error: true, + }; + } + }), + ); + + messages.push({ role: 'user', content: toolResults }); + } + + // Iteration limit hit — return whatever text we have + if (lastTextOutput) { + return lastTextOutput; + } + throw new Error('Analysis exceeded maximum iterations without producing output'); +} diff --git a/packages/cli/src/github/comment-poster.ts b/packages/action/src/comment.ts similarity index 50% rename from packages/cli/src/github/comment-poster.ts rename to packages/action/src/comment.ts index 5e5ad6e..4daef31 100644 --- a/packages/cli/src/github/comment-poster.ts +++ b/packages/action/src/comment.ts @@ -1,34 +1,13 @@ -/** - * Post or update a PR comment on GitHub using the GitHub REST API (native fetch). - * - * Uses hidden HTML markers to identify existing comments for upsert behavior - * (update-or-create). - */ - const MARKER_START = ''; const MARKER_END = ''; export interface PostCommentOptions { - /** GitHub API token with repo/write:discussion permissions. */ token: string; - /** Repository in "owner/repo" format. */ repo: string; - /** Pull request number. */ - prNumber: string; - /** Markdown body of the comment (markers are added automatically). */ + prNumber: number; body: string; } -interface GitHubComment { - id: number; - body?: string; -} - -/** - * Post a new PR comment or update an existing one tagged with the pr-impact marker. - * - * Returns the comment URL on success or throws on failure. - */ export async function postOrUpdateComment(opts: PostCommentOptions): Promise { const { token, repo, prNumber, body } = opts; const markedBody = `${MARKER_START}\n${body}\n${MARKER_END}`; @@ -40,74 +19,45 @@ export async function postOrUpdateComment(opts: PostCommentOptions): Promise, ): Promise { let page = 1; - while (true) { - const url = `${baseUrl}?per_page=100&page=${page}`; - const res = await fetch(url, { method: 'GET', headers }); - - if (!res.ok) { - // If we can't list comments, treat as "no existing comment" - return null; - } - - const comments = (await res.json()) as GitHubComment[]; + const res = await fetch(`${baseUrl}?per_page=100&page=${page}`, { headers }); + if (!res.ok) return null; + const comments = (await res.json()) as Array<{ id: number; body?: string }>; if (comments.length === 0) break; - - for (const comment of comments) { - if (comment.body?.includes(MARKER_START)) { - return comment.id; - } + for (const c of comments) { + if (c.body?.includes(MARKER_START)) return c.id; } - - // GitHub typically returns at most 100 per page if (comments.length < 100) break; page++; } - return null; } diff --git a/packages/action/src/generated/templates.ts b/packages/action/src/generated/templates.ts new file mode 100644 index 0000000..ea7b8b7 --- /dev/null +++ b/packages/action/src/generated/templates.ts @@ -0,0 +1,6 @@ +// AUTO-GENERATED — do not edit manually. +// Generated by scripts/embed-templates.ts from templates/*.md + +export const SYSTEM_PROMPT = "You are a PR impact analyzer. Given access to a git repository via MCP tools, analyze a pull request and produce a structured impact report.\n\n## Available Tools\n\n- `git_diff` — Get the raw diff between two branches (optionally for a single file)\n- `read_file_at_ref` — Read a file's content at a specific git ref (branch/commit)\n- `list_changed_files` — List all files changed between two branches with stats and status\n- `search_code` — Search for a regex pattern across the codebase\n- `find_importers` — Find all files that import a given module path\n- `list_test_files` — Find test files associated with a given source file\n\n## Analysis Steps\n\nFollow these steps in order. Use the tools to gather evidence — never guess about file contents or imports.\n\n### Step 1: Diff Overview\n\nCall `list_changed_files` to get all changed files. Categorize each file:\n- **source**: `.ts`, `.tsx`, `.js`, `.jsx` files that are not tests\n- **test**: files in `__tests__/`, `test/`, `tests/` directories, or files matching `*.test.*`, `*.spec.*`\n- **doc**: `.md`, `.mdx`, `.rst`, `.txt` files\n- **config**: `package.json`, `tsconfig.json`, `.eslintrc.*`, `Dockerfile`, CI/CD files, bundler configs\n- **other**: everything else\n\n### Step 2: Breaking Change Detection\n\nFor each changed **source** file that likely exports public API symbols:\n1. Call `read_file_at_ref` with the base branch ref to get the old version\n2. Call `read_file_at_ref` with the head branch ref to get the new version\n3. Compare exported functions, classes, types, interfaces, enums, and variables\n4. Identify breaking changes:\n - **Removed export**: a symbol that existed in base but is gone in head\n - **Changed signature**: function parameters changed (added required params, removed params, changed types)\n - **Changed type**: interface/type fields changed in incompatible ways\n - **Renamed export**: a symbol was renamed (removed + similar new one added)\n5. For each breaking change, call `find_importers` to find downstream consumers\n6. Assign severity:\n - **high**: removed or renamed exports, removed required interface fields\n - **medium**: changed function signatures, changed return types\n - **low**: changed optional fields, added required fields to interfaces\n\n### Step 3: Test Coverage Gaps\n\nFor each changed source file:\n1. Call `list_test_files` to find associated test files\n2. Check if any of those test files appear in the changed file list from Step 1\n3. Calculate coverage ratio: `sourceFilesWithTestChanges / changedSourceFiles`\n4. Flag each source file that changed without corresponding test updates\n\n### Step 4: Documentation Staleness\n\nFor each changed **doc** file AND for each doc file that references changed source files:\n1. Call `read_file_at_ref` (head ref) to read the doc content\n2. Look for references to symbols, file paths, or function names that were modified or removed\n3. Flag stale references with the line number and reason\n\nIf no doc files are in the diff, call `search_code` with pattern matching changed symbol names in `*.md` files to find docs that reference them.\n\n### Step 5: Impact Graph\n\nFor each changed source file:\n1. Call `find_importers` to find direct consumers\n2. For each direct consumer, call `find_importers` again to find indirect consumers (up to 2 levels deep)\n3. Classify files as **directly changed** (in the diff) or **indirectly affected** (consumers not in the diff)\n\n### Step 6: Risk Assessment\n\nScore each factor from 0 to 100, then compute the weighted average:\n\n| Factor | Weight | Scoring |\n|--------|--------|---------|\n| Breaking changes | 0.30 | `100` if any high-severity, `60` if medium-only, `30` if low-only, `0` if none |\n| Untested changes | 0.25 | `(1 - coverageRatio) * 100` |\n| Diff size | 0.15 | `0` if <100 total lines, `50` if 100-500, `80` if 500-1000, `100` if >1000 |\n| Stale documentation | 0.10 | `min(staleReferences * 20, 100)` |\n| Config file changes | 0.10 | `100` if CI/build config, `50` if other config, `0` if none |\n| Impact breadth | 0.10 | `min(indirectlyAffectedFiles * 10, 100)` |\n\n**Formula:** `score = sum(factor_score * weight)` (weights sum to 1.0)\n\n**Risk levels:** 0-25 = low, 26-50 = medium, 51-75 = high, 76-100 = critical\n\n## Rules\n\n- Always call tools to verify — never guess about file contents, imports, or test file existence.\n- Always use `git_diff` with the `file` parameter to inspect files individually. Never load the full diff at once.\n- If >30 changed files, only call `read_file_at_ref` for files with >50 lines changed.\n- If >50 changed files, skip the documentation staleness check (Step 4).\n- Call `find_importers` only for directly changed source files, not for indirect consumers.\n- Focus on exported/public symbols for breaking change detection. Internal/private changes are lower priority.\n- Categorize every finding with severity and cite evidence (file path, line, before/after).\n- Be precise with the risk score calculation — show your math in the factor breakdown.\n"; + +export const REPORT_TEMPLATE = "Output your analysis using exactly this structure. Fill in all sections. If a section has no findings, write \"None\" under it.\n\n# PR Impact Report\n\n## Summary\n- **Risk Score**: {score}/100 ({level})\n- **Files Changed**: {total} ({source} source, {test} test, {doc} doc, {config} config, {other} other)\n- **Total Lines Changed**: {additions} additions, {deletions} deletions\n- **Breaking Changes**: {count} ({high} high, {medium} medium, {low} low)\n- **Test Coverage**: {ratio}% of changed source files have corresponding test updates\n- **Stale Doc References**: {count}\n- **Impact Breadth**: {direct} directly changed, {indirect} indirectly affected\n\n## Breaking Changes\n\n| File | Type | Symbol | Before | After | Severity | Consumers |\n|------|------|--------|--------|-------|----------|-----------|\n| {filePath} | {removed_export/changed_signature/changed_type/renamed_export} | {symbolName} | {before signature/definition} | {after signature/definition or \"removed\"} | {high/medium/low} | {comma-separated consumer file paths} |\n\n## Test Coverage Gaps\n\n| Source File | Expected Test File | Test Exists | Test Updated |\n|-------------|-------------------|-------------|--------------|\n| {sourceFile} | {testFile} | {yes/no} | {yes/no} |\n\n## Stale Documentation\n\n| Doc File | Line | Reference | Reason |\n|----------|------|-----------|--------|\n| {docFile} | {lineNumber} | {reference text} | {why it's stale} |\n\n## Impact Graph\n\n### Directly Changed Files\n- {filePath} ({additions}+, {deletions}-)\n\n### Indirectly Affected Files\n- {filePath} — imported by {consumer}, which is directly changed\n\n## Risk Factor Breakdown\n\n| Factor | Score | Weight | Weighted | Details |\n|--------|-------|--------|----------|---------|\n| Breaking changes | {0-100} | 0.30 | {score*0.30} | {description} |\n| Untested changes | {0-100} | 0.25 | {score*0.25} | {coverageRatio}% coverage |\n| Diff size | {0-100} | 0.15 | {score*0.15} | {totalLines} total lines changed |\n| Stale documentation | {0-100} | 0.10 | {score*0.10} | {count} stale references |\n| Config file changes | {0-100} | 0.10 | {score*0.10} | {description} |\n| Impact breadth | {0-100} | 0.10 | {score*0.10} | {count} indirectly affected files |\n| **Total** | | **1.00** | **{total}** | |\n\n## Recommendations\n\nBased on the analysis above, here are the recommended actions before merging:\n\n1. {actionable recommendation with specific file/symbol references}\n2. {actionable recommendation}\n3. {actionable recommendation}\n"; diff --git a/packages/action/src/index.ts b/packages/action/src/index.ts new file mode 100644 index 0000000..1348212 --- /dev/null +++ b/packages/action/src/index.ts @@ -0,0 +1,63 @@ +import * as core from '@actions/core'; +import * as github from '@actions/github'; +import { runAnalysis } from './client.js'; +import { postOrUpdateComment } from './comment.js'; + +async function main() { + const apiKey = core.getInput('anthropic-api-key', { required: true }); + const baseBranch = core.getInput('base-branch') || 'main'; + const model = core.getInput('model') || 'claude-sonnet-4-5-20250929'; + const threshold = core.getInput('threshold'); + const githubToken = core.getInput('github-token') || process.env.GITHUB_TOKEN || ''; + + const repoPath = process.cwd(); + + core.info(`Analyzing PR: ${baseBranch}...HEAD`); + core.info(`Model: ${model}`); + + const report = await runAnalysis({ + apiKey, + repoPath, + baseBranch, + headBranch: 'HEAD', + model, + }); + + // Extract risk score from report + const scoreMatch = report.match(/\*\*Risk Score\*\*:\s*(\d+)\/100\s*\((\w+)\)/); + const riskScore = scoreMatch ? parseInt(scoreMatch[1], 10) : -1; + const riskLevel = scoreMatch ? scoreMatch[2] : 'unknown'; + + // Set outputs + core.setOutput('risk-score', String(riskScore)); + core.setOutput('risk-level', riskLevel); + core.setOutput('report', report); + + if (riskScore === -1) { + core.warning('Could not parse risk score from report. Skipping threshold check.'); + } else { + core.info(`Risk Score: ${riskScore}/100 (${riskLevel})`); + } + + // Post PR comment if in a PR context + const prNumber = github.context.payload.pull_request?.number; + if (prNumber && githubToken) { + const repo = `${github.context.repo.owner}/${github.context.repo.repo}`; + const commentUrl = await postOrUpdateComment({ + token: githubToken, + repo, + prNumber, + body: report, + }); + core.info(`Posted PR comment: ${commentUrl}`); + } + + // Threshold gate — only check if we successfully parsed a score + if (threshold && riskScore !== -1 && riskScore >= parseInt(threshold, 10)) { + core.setFailed(`Risk score ${riskScore} exceeds threshold ${threshold}`); + } +} + +main().catch((error) => { + core.setFailed(error instanceof Error ? error.message : String(error)); +}); diff --git a/packages/action/src/tools.ts b/packages/action/src/tools.ts new file mode 100644 index 0000000..8cde8bc --- /dev/null +++ b/packages/action/src/tools.ts @@ -0,0 +1,39 @@ +import { + gitDiff, + readFileAtRef, + listChangedFiles, + searchCode, + findImporters, + listTestFiles, +} from '@pr-impact/tools-core'; + +export async function executeTool(name: string, input: Record): Promise { + switch (name) { + case 'git_diff': { + const result = await gitDiff(input as Parameters[0]); + return result.diff; + } + case 'read_file_at_ref': { + const result = await readFileAtRef(input as Parameters[0]); + return result.content; + } + case 'list_changed_files': { + const result = await listChangedFiles(input as Parameters[0]); + return JSON.stringify(result, null, 2); + } + case 'search_code': { + const result = await searchCode(input as Parameters[0]); + return JSON.stringify(result, null, 2); + } + case 'find_importers': { + const result = await findImporters(input as Parameters[0]); + return JSON.stringify(result, null, 2); + } + case 'list_test_files': { + const result = await listTestFiles(input as Parameters[0]); + return JSON.stringify(result, null, 2); + } + default: + throw new Error(`Unknown tool: ${name}`); + } +} diff --git a/packages/cli/tsconfig.json b/packages/action/tsconfig.json similarity index 74% rename from packages/cli/tsconfig.json rename to packages/action/tsconfig.json index 2def9e0..57749d0 100644 --- a/packages/cli/tsconfig.json +++ b/packages/action/tsconfig.json @@ -5,5 +5,5 @@ "rootDir": "./src" }, "include": ["src/**/*.ts"], - "exclude": ["node_modules", "dist"] + "exclude": ["node_modules", "dist", "__tests__"] } diff --git a/packages/mcp-server/tsup.config.ts b/packages/action/tsup.config.ts similarity index 66% rename from packages/mcp-server/tsup.config.ts rename to packages/action/tsup.config.ts index 19e0901..8803e59 100644 --- a/packages/mcp-server/tsup.config.ts +++ b/packages/action/tsup.config.ts @@ -2,10 +2,8 @@ import { defineConfig } from 'tsup'; export default defineConfig({ entry: ['src/index.ts'], - format: ['esm'], + format: ['cjs'], clean: true, sourcemap: true, - banner: { - js: '#!/usr/bin/env node', - }, + noExternal: [/.*/], }); diff --git a/packages/cli/vitest.config.ts b/packages/action/vitest.config.ts similarity index 100% rename from packages/cli/vitest.config.ts rename to packages/action/vitest.config.ts diff --git a/packages/cli/CHANGELOG.md b/packages/cli/CHANGELOG.md deleted file mode 100644 index 7e0cdcc..0000000 --- a/packages/cli/CHANGELOG.md +++ /dev/null @@ -1,28 +0,0 @@ -# @pr-impact/cli - -## 0.2.1 - -### Patch Changes - -- 047e429: Add consumer-facing adoption guides: getting-started, programmatic API, configuration guide, troubleshooting, and CONTRIBUTING.md. Expand CI integration docs with GitLab CI, CircleCI, and Jenkins examples. -- Updated dependencies [047e429] - - @pr-impact/core@0.2.1 - -## 0.2.0 - -### Minor Changes - -- b31721c: Initial release of pr-impact — static analysis for pull requests. - - - Breaking change detection (removed exports, changed signatures, renamed exports) - - Import-dependency impact graph with blast radius mapping - - Test coverage gap analysis - - Documentation staleness checking - - Weighted risk scoring (6 factors, 0-100 scale) - - CLI with analyze, breaking, risk, impact, and comment commands - - MCP server exposing all analysis tools to AI assistants - -### Patch Changes - -- Updated dependencies [b31721c] - - @pr-impact/core@0.2.0 diff --git a/packages/cli/CLAUDE.md b/packages/cli/CLAUDE.md deleted file mode 100644 index 39e8c3e..0000000 --- a/packages/cli/CLAUDE.md +++ /dev/null @@ -1,43 +0,0 @@ -# CLAUDE.md -- @pr-impact/cli - -## What this package does - -Commander-based CLI for pr-impact. Binary is called `pri`. Depends on `@pr-impact/core` for all analysis logic. - -## Quick commands - -```bash -pnpm build # Build with tsup -node dist/index.js analyze # Run locally after build -``` - -## Source layout - -``` -src/ - index.ts CLI entry point (commander program) - commands/ - analyze.ts Full analysis (md/json output, file output) - breaking.ts Breaking changes only (severity filter, exit code 1) - risk.ts Risk score (threshold gate, exit code 1) - impact.ts Impact graph (text/json/dot output) - comment.ts Post/update PR comment on GitHub - github/ - ci-env.ts Auto-detect PR number/repo from CI env vars - comment-poster.ts Create/update PR comments via GitHub API (native fetch) -``` - -## Key conventions - -- ESM only. Use `.js` extensions in all import paths. -- All analysis logic comes from `@pr-impact/core` -- CLI only handles I/O, formatting, and exit codes. -- Commander's `.action()` handler types `opts` as `any` -- this is expected. -- Exit code `1` = threshold exceeded (breaking changes found, risk too high). Exit code `2` = execution error. -- CI environment auto-detection supports GitHub Actions, GitLab CI, and CircleCI. - -## Dependencies - -- `commander` -- CLI argument parsing -- `chalk` -- terminal colors -- `ora` -- spinners -- `@pr-impact/core` -- analysis engine diff --git a/packages/cli/README.md b/packages/cli/README.md deleted file mode 100644 index 3705709..0000000 --- a/packages/cli/README.md +++ /dev/null @@ -1,142 +0,0 @@ -# @pr-impact/cli - -Command-line interface for pr-impact -- analyze PRs for breaking changes, risk, and impact from your terminal or CI pipeline. - -## Install - -```bash -npm install -g @pr-impact/cli -``` - -The CLI binary is called **`pri`**. - -## Commands - -### `pri analyze` - -Run the full PR impact analysis -- breaking changes, test coverage, doc staleness, impact graph, and risk score. - -```bash -pri analyze [base] [head] [options] -``` - -| Option | Description | Default | -|---|---|---| -| `--format ` | Output format: `md` or `json` | `md` | -| `--output ` | Write report to file instead of stdout | -- | -| `--repo ` | Path to git repository | cwd | -| `--no-breaking` | Skip breaking change detection | -- | -| `--no-coverage` | Skip test coverage analysis | -- | -| `--no-docs` | Skip documentation staleness check | -- | - -```bash -pri analyze -pri analyze main HEAD --format json --output report.json -pri analyze --no-breaking --no-docs -``` - -### `pri breaking` - -Detect breaking API changes. Exits with code 1 if any breaking changes are found at or above the specified severity. - -```bash -pri breaking [base] [head] [options] -``` - -| Option | Description | Default | -|---|---|---| -| `--severity ` | Minimum severity: `low`, `medium`, `high` | `low` | -| `--format ` | Output format: `md` or `json` | `md` | -| `--repo ` | Path to git repository | cwd | - -```bash -pri breaking -pri breaking --severity high -pri breaking --severity medium # CI gate -``` - -### `pri risk` - -Calculate and display the weighted risk score with a full factor breakdown. - -```bash -pri risk [base] [head] [options] -``` - -| Option | Description | Default | -|---|---|---| -| `--threshold ` | Fail (exit 1) if risk score >= this value | -- | -| `--format ` | Output format: `text` or `json` | `text` | -| `--repo ` | Path to git repository | cwd | - -```bash -pri risk -pri risk --threshold 60 # CI gate -pri risk --format json -``` - -### `pri impact` - -Build and display the import-dependency impact graph. - -```bash -pri impact [file] [options] -``` - -| Option | Description | Default | -|---|---|---| -| `--depth ` | Maximum dependency traversal depth | `3` | -| `--format ` | Output format: `text`, `json`, or `dot` | `text` | -| `--repo ` | Path to git repository | cwd | - -```bash -pri impact -pri impact src/auth/login.ts -pri impact --format dot > impact.dot -``` - -### `pri comment` - -Run analysis and post/update a PR comment on GitHub. Auto-detects PR context from CI environment variables (GitHub Actions, GitLab CI, CircleCI). - -```bash -pri comment [base] [head] [options] -``` - -| Option | Description | Default | -|---|---|---| -| `--pr ` | PR number | auto-detect from CI | -| `--github-repo ` | GitHub repository | auto-detect from CI | -| `--token ` | GitHub token | `GITHUB_TOKEN` env var | -| `--repo ` | Path to git repository | cwd | - -```bash -pri comment -pri comment --pr 42 --github-repo owner/repo --token $GITHUB_TOKEN -``` - -## CI Integration - -Use `pri breaking` and `pri risk` as quality gates: - -```yaml -# GitHub Actions example -- name: Check for breaking changes - run: pri breaking --severity medium - -- name: Check risk threshold - run: pri risk --threshold 60 -``` - -Exit codes: -- `0` -- success / no issues found -- `1` -- threshold exceeded (breaking changes found, risk too high) -- `2` -- execution error - -## Requirements - -- Node.js >= 20 - -## License - -[MIT](../../LICENSE) diff --git a/packages/cli/__tests__/cli-registration.test.ts b/packages/cli/__tests__/cli-registration.test.ts deleted file mode 100644 index 96660d3..0000000 --- a/packages/cli/__tests__/cli-registration.test.ts +++ /dev/null @@ -1,59 +0,0 @@ -import { describe, it, expect, vi } from 'vitest'; - -// ── Track command registrations ── -const registeredCommands: string[] = []; - -// ── Mock command registration modules ── -vi.mock('../src/commands/analyze.js', () => ({ - registerAnalyzeCommand: () => { registeredCommands.push('analyze'); }, -})); -vi.mock('../src/commands/breaking.js', () => ({ - registerBreakingCommand: () => { registeredCommands.push('breaking'); }, -})); -vi.mock('../src/commands/risk.js', () => ({ - registerRiskCommand: () => { registeredCommands.push('risk'); }, -})); -vi.mock('../src/commands/impact.js', () => ({ - registerImpactCommand: () => { registeredCommands.push('impact'); }, -})); -vi.mock('../src/commands/comment.js', () => ({ - registerCommentCommand: () => { registeredCommands.push('comment'); }, -})); - -// ── Mock commander to avoid calling parse() ── -const mockProgram = { - name: vi.fn().mockReturnThis(), - description: vi.fn().mockReturnThis(), - version: vi.fn().mockReturnThis(), - parse: vi.fn(), -}; -vi.mock('commander', () => ({ - Command: vi.fn().mockImplementation(() => mockProgram), -})); - -// ── Mock createRequire for version reading ── -vi.mock('module', () => ({ - createRequire: () => () => ({ version: '0.1.0' }), -})); - -describe('CLI registration', () => { - it('registers all five commands and configures the program', async () => { - // Dynamically import to trigger module-level code - await import('../src/index.js'); - - expect(mockProgram.name).toHaveBeenCalledWith('pri'); - expect(mockProgram.description).toHaveBeenCalledWith( - expect.stringContaining('PR Impact Analyzer'), - ); - expect(mockProgram.version).toHaveBeenCalledWith('0.1.0'); - - expect(registeredCommands).toContain('analyze'); - expect(registeredCommands).toContain('breaking'); - expect(registeredCommands).toContain('risk'); - expect(registeredCommands).toContain('impact'); - expect(registeredCommands).toContain('comment'); - expect(registeredCommands).toHaveLength(5); - - expect(mockProgram.parse).toHaveBeenCalled(); - }); -}); diff --git a/packages/cli/__tests__/commands/analyze.test.ts b/packages/cli/__tests__/commands/analyze.test.ts deleted file mode 100644 index 8f5ea21..0000000 --- a/packages/cli/__tests__/commands/analyze.test.ts +++ /dev/null @@ -1,230 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { Command } from 'commander'; -import { registerAnalyzeCommand } from '../../src/commands/analyze.js'; - -// ── Mock @pr-impact/core ── -const mockAnalyzePR = vi.fn(); -const mockFormatMarkdown = vi.fn(); -const mockFormatJSON = vi.fn(); -vi.mock('@pr-impact/core', () => ({ - analyzePR: (...args: unknown[]) => mockAnalyzePR(...args), - formatMarkdown: (...args: unknown[]) => mockFormatMarkdown(...args), - formatJSON: (...args: unknown[]) => mockFormatJSON(...args), -})); - -// ── Mock ora ── -const mockStart = vi.fn(); -const mockStop = vi.fn(); -const mockFail = vi.fn(); -vi.mock('ora', () => ({ - default: () => ({ - start: () => { - mockStart(); - return { stop: mockStop, fail: mockFail }; - }, - }), -})); - -// ── Mock chalk (passthrough) ── -vi.mock('chalk', () => ({ - default: { - green: (s: string) => s, - red: (s: string) => s, - }, -})); - -// ── Mock fs/promises ── -const mockWriteFile = vi.fn(); -vi.mock('fs/promises', () => ({ - writeFile: (...args: unknown[]) => mockWriteFile(...args), -})); - -// ── Helpers ── -function makePRAnalysis() { - return { - repoPath: '/repo', - baseBranch: 'main', - headBranch: 'HEAD', - changedFiles: [], - breakingChanges: [], - testCoverage: { changedSourceFiles: 0, sourceFilesWithTestChanges: 0, coverageRatio: 1, gaps: [] }, - docStaleness: { staleReferences: [], checkedFiles: [] }, - impactGraph: { directlyChanged: [], indirectlyAffected: [], edges: [] }, - riskScore: { score: 10, level: 'low' as const, factors: [] }, - summary: 'Test summary', - }; -} - -function createProgram(): Command { - const program = new Command(); - program.exitOverride(); // prevent process.exit - registerAnalyzeCommand(program); - return program; -} - -describe('analyze command', () => { - beforeEach(() => { - vi.clearAllMocks(); - mockAnalyzePR.mockResolvedValue(makePRAnalysis()); - mockFormatMarkdown.mockReturnValue('# Markdown Report'); - mockFormatJSON.mockReturnValue('{"json": true}'); - mockWriteFile.mockResolvedValue(undefined); - }); - - it('calls analyzePR with default options when no arguments given', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'analyze']); - - expect(mockAnalyzePR).toHaveBeenCalledTimes(1); - const callArgs = mockAnalyzePR.mock.calls[0][0]; - expect(callArgs).toHaveProperty('repoPath'); - expect(callArgs.baseBranch).toBeUndefined(); - expect(callArgs.headBranch).toBeUndefined(); - expect(callArgs.skipBreaking).toBeFalsy(); - expect(callArgs.skipCoverage).toBeFalsy(); - expect(callArgs.skipDocs).toBeFalsy(); - - consoleSpy.mockRestore(); - }); - - it('passes base and head branch arguments to analyzePR', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'analyze', 'develop', 'feature-branch']); - - const callArgs = mockAnalyzePR.mock.calls[0][0]; - expect(callArgs.baseBranch).toBe('develop'); - expect(callArgs.headBranch).toBe('feature-branch'); - - consoleSpy.mockRestore(); - }); - - it('uses markdown format by default', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'analyze']); - - expect(mockFormatMarkdown).toHaveBeenCalledWith(makePRAnalysis()); - expect(mockFormatJSON).not.toHaveBeenCalled(); - expect(consoleSpy).toHaveBeenCalledWith('# Markdown Report'); - - consoleSpy.mockRestore(); - }); - - it('uses JSON format when --format json is specified', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'analyze', '--format', 'json']); - - expect(mockFormatJSON).toHaveBeenCalledWith(makePRAnalysis()); - expect(mockFormatMarkdown).not.toHaveBeenCalled(); - expect(consoleSpy).toHaveBeenCalledWith('{"json": true}'); - - consoleSpy.mockRestore(); - }); - - it('writes output to file when --output is specified', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'analyze', '--output', 'report.md']); - - expect(mockWriteFile).toHaveBeenCalledTimes(1); - const [filePath, content] = mockWriteFile.mock.calls[0]; - expect(filePath).toContain('report.md'); - expect(content).toBe('# Markdown Report'); - expect(consoleSpy).toHaveBeenCalledWith(expect.stringContaining('report.md')); - - consoleSpy.mockRestore(); - }); - - it('passes skip flags when --no-breaking, --no-coverage, --no-docs are used', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync([ - 'node', 'pri', 'analyze', - '--no-breaking', '--no-coverage', '--no-docs', - ]); - - const callArgs = mockAnalyzePR.mock.calls[0][0]; - expect(callArgs.skipBreaking).toBe(true); - expect(callArgs.skipCoverage).toBe(true); - expect(callArgs.skipDocs).toBe(true); - - consoleSpy.mockRestore(); - }); - - it('starts and stops the spinner', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'analyze']); - - expect(mockStart).toHaveBeenCalledTimes(1); - expect(mockStop).toHaveBeenCalledTimes(1); - - consoleSpy.mockRestore(); - }); - - it('calls spinner.fail and prints error on analysis failure', async () => { - mockAnalyzePR.mockRejectedValue(new Error('git not found')); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'analyze']), - ).rejects.toThrow('process.exit'); - - expect(mockFail).toHaveBeenCalledWith('Analysis failed'); - expect(consoleErrorSpy).toHaveBeenCalledWith(expect.stringContaining('git not found')); - expect(exitSpy).toHaveBeenCalledWith(2); - - consoleSpy.mockRestore(); - consoleErrorSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('handles non-Error thrown values', async () => { - mockAnalyzePR.mockRejectedValue('string error'); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'analyze']), - ).rejects.toThrow('process.exit'); - - expect(consoleErrorSpy).toHaveBeenCalledWith(expect.stringContaining('string error')); - - consoleSpy.mockRestore(); - consoleErrorSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('passes --repo option to analyzePR as resolved repoPath', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'analyze', '--repo', '/custom/repo']); - - const callArgs = mockAnalyzePR.mock.calls[0][0]; - expect(callArgs.repoPath).toBe('/custom/repo'); - - consoleSpy.mockRestore(); - }); -}); diff --git a/packages/cli/__tests__/commands/breaking.test.ts b/packages/cli/__tests__/commands/breaking.test.ts deleted file mode 100644 index 771f338..0000000 --- a/packages/cli/__tests__/commands/breaking.test.ts +++ /dev/null @@ -1,426 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { Command } from 'commander'; -import { registerBreakingCommand } from '../../src/commands/breaking.js'; -import type { BreakingChange, ChangedFile } from '@pr-impact/core'; - -// ── Mock @pr-impact/core ── -const mockParseDiff = vi.fn(); -const mockDetectBreakingChanges = vi.fn(); -const mockResolveDefaultBaseBranch = vi.fn(); -vi.mock('@pr-impact/core', () => ({ - parseDiff: (...args: unknown[]) => mockParseDiff(...args), - detectBreakingChanges: (...args: unknown[]) => mockDetectBreakingChanges(...args), - resolveDefaultBaseBranch: (...args: unknown[]) => mockResolveDefaultBaseBranch(...args), -})); - -// ── Mock ora ── -const mockStart = vi.fn(); -const mockStop = vi.fn(); -const mockFail = vi.fn(); -vi.mock('ora', () => ({ - default: () => ({ - start: () => { - mockStart(); - return { stop: mockStop, fail: mockFail }; - }, - }), -})); - -// ── Mock chalk (passthrough) ── -vi.mock('chalk', () => { - const passthrough = (s: string) => s; - const fn = Object.assign(passthrough, { - bold: passthrough, - dim: passthrough, - red: passthrough, - green: passthrough, - yellow: passthrough, - }); - return { default: fn }; -}); - -// ── Helpers ── -function makeChangedFile(overrides: Partial = {}): ChangedFile { - return { - path: 'src/utils.ts', - status: 'modified', - additions: 10, - deletions: 2, - language: 'typescript', - category: 'source', - ...overrides, - }; -} - -function makeBreakingChange(overrides: Partial = {}): BreakingChange { - return { - filePath: 'src/utils.ts', - type: 'removed_export', - symbolName: 'helper', - before: 'function helper()', - after: null, - severity: 'high', - consumers: ['src/app.ts'], - ...overrides, - }; -} - -function createProgram(): Command { - const program = new Command(); - program.exitOverride(); - registerBreakingCommand(program); - return program; -} - -describe('breaking command', () => { - beforeEach(() => { - vi.clearAllMocks(); - mockResolveDefaultBaseBranch.mockResolvedValue('main'); - mockParseDiff.mockResolvedValue([makeChangedFile()]); - mockDetectBreakingChanges.mockResolvedValue([]); - }); - - it('resolves default base branch when none is provided', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'breaking']); - - expect(mockResolveDefaultBaseBranch).toHaveBeenCalledTimes(1); - expect(mockParseDiff).toHaveBeenCalledWith( - expect.any(String), - 'main', - 'HEAD', - ); - - consoleSpy.mockRestore(); - }); - - it('uses provided base and head branches', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'breaking', 'develop', 'feature']); - - expect(mockResolveDefaultBaseBranch).not.toHaveBeenCalled(); - expect(mockParseDiff).toHaveBeenCalledWith( - expect.any(String), - 'develop', - 'feature', - ); - - consoleSpy.mockRestore(); - }); - - it('calls parseDiff and detectBreakingChanges with correct arguments', async () => { - const changedFiles = [makeChangedFile()]; - mockParseDiff.mockResolvedValue(changedFiles); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'breaking', 'main', 'HEAD']); - - expect(mockParseDiff).toHaveBeenCalledWith(expect.any(String), 'main', 'HEAD'); - expect(mockDetectBreakingChanges).toHaveBeenCalledWith( - expect.any(String), - 'main', - 'HEAD', - changedFiles, - ); - - consoleSpy.mockRestore(); - }); - - it('prints no-breaking-changes message when none found', async () => { - mockDetectBreakingChanges.mockResolvedValue([]); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'breaking', 'main', 'HEAD']); - - expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining('No breaking changes detected'), - ); - - consoleSpy.mockRestore(); - }); - - it('exits with code 1 when breaking changes are found', async () => { - mockDetectBreakingChanges.mockResolvedValue([makeBreakingChange()]); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'breaking', 'main', 'HEAD']), - ).rejects.toThrow('process.exit'); - - expect(exitSpy).toHaveBeenCalledWith(1); - - consoleSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('filters by severity when --severity is provided', async () => { - const changes: BreakingChange[] = [ - makeBreakingChange({ severity: 'low', symbolName: 'lowFn' }), - makeBreakingChange({ severity: 'medium', symbolName: 'medFn' }), - makeBreakingChange({ severity: 'high', symbolName: 'highFn' }), - ]; - mockDetectBreakingChanges.mockResolvedValue(changes); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'breaking', 'main', 'HEAD', '--severity', 'high']), - ).rejects.toThrow('process.exit'); - - // Only high severity should be in the output - const output = consoleSpy.mock.calls[0][0] as string; - expect(output).toContain('highFn'); - expect(output).not.toContain('lowFn'); - expect(output).not.toContain('medFn'); - - consoleSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('includes medium and high severity when --severity medium is used', async () => { - const changes: BreakingChange[] = [ - makeBreakingChange({ severity: 'low', symbolName: 'lowFn' }), - makeBreakingChange({ severity: 'medium', symbolName: 'medFn' }), - makeBreakingChange({ severity: 'high', symbolName: 'highFn' }), - ]; - mockDetectBreakingChanges.mockResolvedValue(changes); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'breaking', 'main', 'HEAD', '--severity', 'medium']), - ).rejects.toThrow('process.exit'); - - // Medium and high severity should be in the output - const output = consoleSpy.mock.calls[0][0] as string; - expect(output).toContain('medFn'); - expect(output).toContain('highFn'); - expect(output).not.toContain('lowFn'); - - consoleSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('outputs JSON when --format json is specified', async () => { - const changes = [makeBreakingChange()]; - mockDetectBreakingChanges.mockResolvedValue(changes); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'breaking', 'main', 'HEAD', '--format', 'json']), - ).rejects.toThrow('process.exit'); - - const output = consoleSpy.mock.calls[0][0] as string; - const parsed = JSON.parse(output); - expect(parsed).toHaveLength(1); - expect(parsed[0].symbolName).toBe('helper'); - - consoleSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('outputs markdown table when --format md is specified', async () => { - const changes = [makeBreakingChange()]; - mockDetectBreakingChanges.mockResolvedValue(changes); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'breaking', 'main', 'HEAD', '--format', 'md']), - ).rejects.toThrow('process.exit'); - - const output = consoleSpy.mock.calls[0][0] as string; - expect(output).toContain('# Breaking Changes'); - expect(output).toContain('| File | Symbol |'); - - consoleSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('starts and stops the spinner on success', async () => { - mockDetectBreakingChanges.mockResolvedValue([]); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'breaking', 'main', 'HEAD']); - - expect(mockStart).toHaveBeenCalledTimes(1); - expect(mockStop).toHaveBeenCalledTimes(1); - - consoleSpy.mockRestore(); - }); - - it('calls spinner.fail and exits with code 2 on error', async () => { - mockParseDiff.mockRejectedValue(new Error('parse error')); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'breaking', 'main', 'HEAD']), - ).rejects.toThrow('process.exit'); - - expect(mockFail).toHaveBeenCalledWith('Breaking change detection failed'); - expect(consoleErrorSpy).toHaveBeenCalledWith(expect.stringContaining('parse error')); - expect(exitSpy).toHaveBeenCalledWith(2); - - consoleSpy.mockRestore(); - consoleErrorSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('passes the --repo option as resolved repo path', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'breaking', '--repo', '/my/repo', 'main', 'HEAD']); - - expect(mockParseDiff).toHaveBeenCalledWith('/my/repo', 'main', 'HEAD'); - - consoleSpy.mockRestore(); - }); - - it('outputs text format when --format is neither md nor json', async () => { - const changes = [makeBreakingChange({ - severity: 'high', - symbolName: 'removedFn', - before: 'function removedFn()', - after: null, - consumers: ['src/app.ts', 'src/main.ts'], - })]; - mockDetectBreakingChanges.mockResolvedValue(changes); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'breaking', 'main', 'HEAD', '--format', 'text']), - ).rejects.toThrow('process.exit'); - - const output = consoleSpy.mock.calls[0][0] as string; - // formatText includes symbol name, severity, file path, before/after, and consumers - expect(output).toContain('removedFn'); - expect(output).toContain('high'); - expect(output).toContain('src/utils.ts'); - expect(output).toContain('- function removedFn()'); - expect(output).toContain('Consumers:'); - expect(output).toContain('src/app.ts'); - - consoleSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('text format includes after line when present', async () => { - const changes = [makeBreakingChange({ - severity: 'medium', - symbolName: 'changedFn', - type: 'changed_signature', - before: 'function changedFn(a: string)', - after: 'function changedFn(a: string, b: number)', - consumers: [], - })]; - mockDetectBreakingChanges.mockResolvedValue(changes); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'breaking', 'main', 'HEAD', '--format', 'text']), - ).rejects.toThrow('process.exit'); - - const output = consoleSpy.mock.calls[0][0] as string; - expect(output).toContain('+ function changedFn(a: string, b: number)'); - expect(output).toContain('medium'); - // No consumers - expect(output).not.toContain('Consumers:'); - - consoleSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('text format handles singular breaking change count', async () => { - const changes = [makeBreakingChange({ severity: 'low' })]; - mockDetectBreakingChanges.mockResolvedValue(changes); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'breaking', 'main', 'HEAD', '--format', 'text']), - ).rejects.toThrow('process.exit'); - - const output = consoleSpy.mock.calls[0][0] as string; - expect(output).toContain('Found 1 breaking change:'); - // low severity color - expect(output).toContain('low'); - - consoleSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('handles error that is not an Error instance', async () => { - mockParseDiff.mockRejectedValue('string error'); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'breaking', 'main', 'HEAD']), - ).rejects.toThrow('process.exit'); - - expect(consoleErrorSpy).toHaveBeenCalledWith('string error'); - expect(exitSpy).toHaveBeenCalledWith(2); - - consoleSpy.mockRestore(); - consoleErrorSpy.mockRestore(); - exitSpy.mockRestore(); - }); -}); diff --git a/packages/cli/__tests__/commands/comment.test.ts b/packages/cli/__tests__/commands/comment.test.ts deleted file mode 100644 index bb6ed6c..0000000 --- a/packages/cli/__tests__/commands/comment.test.ts +++ /dev/null @@ -1,268 +0,0 @@ -import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; -import { Command } from 'commander'; -import { registerCommentCommand } from '../../src/commands/comment.js'; - -// ── Mock @pr-impact/core ── -const mockAnalyzePR = vi.fn(); -const mockFormatMarkdown = vi.fn(); -vi.mock('@pr-impact/core', () => ({ - analyzePR: (...args: unknown[]) => mockAnalyzePR(...args), - formatMarkdown: (...args: unknown[]) => mockFormatMarkdown(...args), -})); - -// ── Mock ora ── -const mockStop = vi.fn(); -const mockFail = vi.fn(); -const mockSucceed = vi.fn(); -const mockSpinner = { - stop: mockStop, - fail: mockFail, - succeed: mockSucceed, - text: '', -}; -vi.mock('ora', () => ({ - default: () => ({ - start: () => { - return mockSpinner; - }, - }), -})); - -// ── Mock chalk (passthrough) ── -vi.mock('chalk', () => ({ - default: { - green: (s: string) => s, - red: (s: string) => s, - }, -})); - -// ── Mock ci-env ── -const mockDetectCIEnv = vi.fn(); -vi.mock('../../src/github/ci-env.js', () => ({ - detectCIEnv: () => mockDetectCIEnv(), -})); - -// ── Mock comment-poster ── -const mockPostOrUpdateComment = vi.fn(); -vi.mock('../../src/github/comment-poster.js', () => ({ - postOrUpdateComment: (...args: unknown[]) => mockPostOrUpdateComment(...args), -})); - -// ── Helpers ── -function makePRAnalysis() { - return { - repoPath: '/repo', - baseBranch: 'main', - headBranch: 'HEAD', - changedFiles: [], - breakingChanges: [], - testCoverage: { changedSourceFiles: 0, sourceFilesWithTestChanges: 0, coverageRatio: 1, gaps: [] }, - docStaleness: { staleReferences: [], checkedFiles: [] }, - impactGraph: { directlyChanged: [], indirectlyAffected: [], edges: [] }, - riskScore: { score: 10, level: 'low' as const, factors: [] }, - summary: 'Test summary', - }; -} - -function createProgram(): Command { - const program = new Command(); - program.exitOverride(); - registerCommentCommand(program); - return program; -} - -describe('comment command', () => { - const originalEnv = process.env; - - beforeEach(() => { - vi.clearAllMocks(); - process.env = { ...originalEnv, GITHUB_TOKEN: 'ghp_test' }; - mockAnalyzePR.mockResolvedValue(makePRAnalysis()); - mockFormatMarkdown.mockReturnValue('# Report'); - mockPostOrUpdateComment.mockResolvedValue('https://github.com/owner/repo/pull/42#issuecomment-1'); - mockDetectCIEnv.mockReturnValue({ prNumber: '42', repo: 'owner/repo' }); - }); - - afterEach(() => { - process.env = originalEnv; - }); - - it('runs analysis and posts comment with auto-detected CI env', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'comment']); - - expect(mockAnalyzePR).toHaveBeenCalledTimes(1); - expect(mockFormatMarkdown).toHaveBeenCalledWith(makePRAnalysis()); - expect(mockPostOrUpdateComment).toHaveBeenCalledWith({ - token: 'ghp_test', - repo: 'owner/repo', - prNumber: '42', - body: '# Report', - }); - expect(mockSucceed).toHaveBeenCalledWith('Comment posted'); - expect(consoleSpy).toHaveBeenCalledWith('https://github.com/owner/repo/pull/42#issuecomment-1'); - - consoleSpy.mockRestore(); - }); - - it('uses explicit --pr and --github-repo over CI env', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync([ - 'node', 'pri', 'comment', - '--pr', '99', - '--github-repo', 'other/repo', - ]); - - expect(mockPostOrUpdateComment).toHaveBeenCalledWith( - expect.objectContaining({ - prNumber: '99', - repo: 'other/repo', - }), - ); - // Should not even need CI env detection since both are explicit - expect(mockDetectCIEnv).not.toHaveBeenCalled(); - - consoleSpy.mockRestore(); - }); - - it('uses --token over GITHUB_TOKEN env var', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync([ - 'node', 'pri', 'comment', - '--token', 'explicit_token', - ]); - - expect(mockPostOrUpdateComment).toHaveBeenCalledWith( - expect.objectContaining({ - token: 'explicit_token', - }), - ); - - consoleSpy.mockRestore(); - }); - - it('exits with code 2 when no token is available', async () => { - delete process.env.GITHUB_TOKEN; - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'comment']), - ).rejects.toThrow('process.exit'); - - expect(mockFail).toHaveBeenCalledWith('Missing GitHub token'); - expect(exitSpy).toHaveBeenCalledWith(2); - - consoleSpy.mockRestore(); - consoleErrorSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('exits with code 2 when PR number cannot be determined', async () => { - mockDetectCIEnv.mockReturnValue(null); - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'comment']), - ).rejects.toThrow('process.exit'); - - expect(mockFail).toHaveBeenCalledWith('Cannot determine PR number'); - expect(exitSpy).toHaveBeenCalledWith(2); - - consoleSpy.mockRestore(); - consoleErrorSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('exits with code 2 when GitHub repo cannot be determined', async () => { - mockDetectCIEnv.mockReturnValue({ prNumber: '42', repo: undefined }); - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'comment', '--pr', '42']), - ).rejects.toThrow('process.exit'); - - expect(mockFail).toHaveBeenCalledWith('Cannot determine GitHub repository'); - expect(exitSpy).toHaveBeenCalledWith(2); - - consoleSpy.mockRestore(); - consoleErrorSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('passes base and head arguments to analyzePR', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'comment', 'develop', 'feature']); - - const callArgs = mockAnalyzePR.mock.calls[0][0]; - expect(callArgs.baseBranch).toBe('develop'); - expect(callArgs.headBranch).toBe('feature'); - - consoleSpy.mockRestore(); - }); - - it('handles analysis error gracefully', async () => { - mockAnalyzePR.mockRejectedValue(new Error('git failed')); - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'comment']), - ).rejects.toThrow('process.exit'); - - expect(mockFail).toHaveBeenCalledWith('Failed to post comment'); - expect(consoleErrorSpy).toHaveBeenCalledWith(expect.stringContaining('git failed')); - expect(exitSpy).toHaveBeenCalledWith(2); - - consoleSpy.mockRestore(); - consoleErrorSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('handles postOrUpdateComment error gracefully', async () => { - mockPostOrUpdateComment.mockRejectedValue(new Error('API rate limited')); - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'comment']), - ).rejects.toThrow('process.exit'); - - expect(consoleErrorSpy).toHaveBeenCalledWith(expect.stringContaining('API rate limited')); - expect(exitSpy).toHaveBeenCalledWith(2); - - consoleSpy.mockRestore(); - consoleErrorSpy.mockRestore(); - exitSpy.mockRestore(); - }); -}); diff --git a/packages/cli/__tests__/commands/impact.test.ts b/packages/cli/__tests__/commands/impact.test.ts deleted file mode 100644 index 943b560..0000000 --- a/packages/cli/__tests__/commands/impact.test.ts +++ /dev/null @@ -1,269 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { Command } from 'commander'; -import { registerImpactCommand } from '../../src/commands/impact.js'; -import type { ImpactGraph } from '@pr-impact/core'; - -// ── Mock @pr-impact/core ── -const mockParseDiff = vi.fn(); -const mockBuildImpactGraph = vi.fn(); -const mockResolveDefaultBaseBranch = vi.fn(); -vi.mock('@pr-impact/core', () => ({ - parseDiff: (...args: unknown[]) => mockParseDiff(...args), - buildImpactGraph: (...args: unknown[]) => mockBuildImpactGraph(...args), - resolveDefaultBaseBranch: (...args: unknown[]) => mockResolveDefaultBaseBranch(...args), -})); - -// ── Mock ora ── -const mockStart = vi.fn(); -const mockStop = vi.fn(); -const mockFail = vi.fn(); -vi.mock('ora', () => ({ - default: () => ({ - start: () => { - mockStart(); - return { stop: mockStop, fail: mockFail }; - }, - }), -})); - -// ── Mock chalk (passthrough) ── -vi.mock('chalk', () => { - const passthrough = (s: string) => s; - const fn = Object.assign(passthrough, { - bold: passthrough, - dim: passthrough, - red: passthrough, - green: passthrough, - yellow: passthrough, - cyan: passthrough, - }); - return { default: fn }; -}); - -// ── Helpers ── -function makeGraph(overrides: Partial = {}): ImpactGraph { - return { - directlyChanged: ['src/a.ts'], - indirectlyAffected: ['src/b.ts'], - edges: [{ from: 'src/b.ts', to: 'src/a.ts', type: 'imports' as const }], - ...overrides, - }; -} - -function createProgram(): Command { - const program = new Command(); - program.exitOverride(); - registerImpactCommand(program); - return program; -} - -describe('impact command', () => { - beforeEach(() => { - vi.clearAllMocks(); - mockResolveDefaultBaseBranch.mockResolvedValue('main'); - mockParseDiff.mockResolvedValue([ - { - path: 'src/a.ts', - status: 'modified', - additions: 5, - deletions: 2, - language: 'typescript', - category: 'source', - }, - ]); - mockBuildImpactGraph.mockResolvedValue(makeGraph()); - }); - - it('resolves default base branch and parses diff when no file argument is given', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'impact']); - - expect(mockResolveDefaultBaseBranch).toHaveBeenCalledTimes(1); - expect(mockParseDiff).toHaveBeenCalledWith(expect.any(String), 'main', 'HEAD'); - expect(mockBuildImpactGraph).toHaveBeenCalledTimes(1); - - consoleSpy.mockRestore(); - }); - - it('creates synthetic ChangedFile when a specific file is provided', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'impact', 'src/foo.ts']); - - // Should NOT call parseDiff or resolveDefaultBaseBranch - expect(mockResolveDefaultBaseBranch).not.toHaveBeenCalled(); - expect(mockParseDiff).not.toHaveBeenCalled(); - - // Should call buildImpactGraph with a synthetic ChangedFile - expect(mockBuildImpactGraph).toHaveBeenCalledWith( - expect.any(String), - [ - { - path: 'src/foo.ts', - status: 'modified', - additions: 0, - deletions: 0, - language: '', - category: 'source', - }, - ], - 3, // default depth - ); - - consoleSpy.mockRestore(); - }); - - it('passes --depth option to buildImpactGraph', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - // Commander calls parseInt(value, previousValue) where previousValue is the default (3). - // parseInt('2', 3) = 2 (valid in base 3). Use '2' to avoid the parseInt radix gotcha. - await program.parseAsync(['node', 'pri', 'impact', '--depth', '2']); - - const depthArg = mockBuildImpactGraph.mock.calls[0][2]; - expect(depthArg).toBe(2); - - consoleSpy.mockRestore(); - }); - - it('outputs text (tree) format by default', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'impact']); - - const output = consoleSpy.mock.calls[0][0] as string; - expect(output).toContain('Impact Graph'); - expect(output).toContain('src/a.ts'); - - consoleSpy.mockRestore(); - }); - - it('outputs JSON when --format json is specified', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'impact', '--format', 'json']); - - const output = consoleSpy.mock.calls[0][0] as string; - const parsed = JSON.parse(output); - expect(parsed).toHaveProperty('directlyChanged'); - expect(parsed).toHaveProperty('indirectlyAffected'); - expect(parsed).toHaveProperty('edges'); - - consoleSpy.mockRestore(); - }); - - it('outputs DOT format when --format dot is specified', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'impact', '--format', 'dot']); - - const output = consoleSpy.mock.calls[0][0] as string; - expect(output).toContain('digraph impact {'); - expect(output).toContain('rankdir=LR;'); - - consoleSpy.mockRestore(); - }); - - it('starts and stops the spinner on success', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'impact']); - - expect(mockStart).toHaveBeenCalledTimes(1); - expect(mockStop).toHaveBeenCalledTimes(1); - - consoleSpy.mockRestore(); - }); - - it('calls spinner.fail and exits with code 2 on error', async () => { - mockBuildImpactGraph.mockRejectedValue(new Error('graph error')); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'impact']), - ).rejects.toThrow('process.exit'); - - expect(mockFail).toHaveBeenCalledWith('Impact graph building failed'); - expect(consoleErrorSpy).toHaveBeenCalledWith(expect.stringContaining('graph error')); - expect(exitSpy).toHaveBeenCalledWith(2); - - consoleSpy.mockRestore(); - consoleErrorSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('passes --repo option as the resolved repo path', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'impact', '--repo', '/my/repo']); - - expect(mockResolveDefaultBaseBranch).toHaveBeenCalledWith('/my/repo'); - expect(mockBuildImpactGraph).toHaveBeenCalledWith( - '/my/repo', - expect.any(Array), - expect.any(Number), - ); - - consoleSpy.mockRestore(); - }); - - it('handles empty impact graph', async () => { - mockBuildImpactGraph.mockResolvedValue({ - directlyChanged: [], - indirectlyAffected: [], - edges: [], - }); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'impact']); - - // Should still produce output without crashing - expect(consoleSpy).toHaveBeenCalledTimes(1); - - consoleSpy.mockRestore(); - }); - - it('tree output shows dependents for directly changed files', async () => { - mockBuildImpactGraph.mockResolvedValue({ - directlyChanged: ['src/a.ts', 'src/b.ts'], - indirectlyAffected: ['src/c.ts'], - edges: [ - { from: 'src/a.ts', to: 'src/c.ts', type: 'imports' as const }, - { from: 'src/a.ts', to: 'src/d.ts', type: 'imports' as const }, - ], - }); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'impact']); - - const output = consoleSpy.mock.calls[0][0] as string; - // Should display the tree with dependents under src/a.ts - expect(output).toContain('src/a.ts'); - expect(output).toContain('src/c.ts'); - expect(output).toContain('src/d.ts'); - expect(output).toContain('(imports)'); - // Should show indirectly affected section - expect(output).toContain('Indirectly Affected'); - - consoleSpy.mockRestore(); - }); -}); diff --git a/packages/cli/__tests__/commands/risk.test.ts b/packages/cli/__tests__/commands/risk.test.ts deleted file mode 100644 index d4cba74..0000000 --- a/packages/cli/__tests__/commands/risk.test.ts +++ /dev/null @@ -1,298 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { Command } from 'commander'; -import { registerRiskCommand } from '../../src/commands/risk.js'; -import type { PRAnalysis, RiskAssessment, RiskFactor } from '@pr-impact/core'; - -// ── Mock @pr-impact/core ── -const mockAnalyzePR = vi.fn(); -vi.mock('@pr-impact/core', () => ({ - analyzePR: (...args: unknown[]) => mockAnalyzePR(...args), -})); - -// ── Mock ora ── -const mockStart = vi.fn(); -const mockStop = vi.fn(); -const mockFail = vi.fn(); -vi.mock('ora', () => ({ - default: () => ({ - start: () => { - mockStart(); - return { stop: mockStop, fail: mockFail }; - }, - }), -})); - -// ── Mock chalk (passthrough) ── -vi.mock('chalk', () => { - const passthrough = (s: string) => s; - const fn = Object.assign(passthrough, { - bold: Object.assign(passthrough, { red: passthrough }), - dim: passthrough, - red: Object.assign(passthrough, { bold: passthrough }), - green: passthrough, - yellow: passthrough, - }); - return { default: fn }; -}); - -// ── Helpers ── -function makeRiskFactor(overrides: Partial = {}): RiskFactor { - return { - name: 'Breaking Changes', - score: 50, - weight: 0.3, - description: 'Some breaking changes detected', - details: ['removed export helper'], - ...overrides, - }; -} - -function makeRiskAssessment(overrides: Partial = {}): RiskAssessment { - return { - score: 42, - level: 'medium', - factors: [makeRiskFactor()], - ...overrides, - }; -} - -function makePRAnalysis(riskOverrides: Partial = {}): PRAnalysis { - return { - repoPath: '/repo', - baseBranch: 'main', - headBranch: 'HEAD', - changedFiles: [], - breakingChanges: [], - testCoverage: { changedSourceFiles: 0, sourceFilesWithTestChanges: 0, coverageRatio: 1, gaps: [] }, - docStaleness: { staleReferences: [], checkedFiles: [] }, - impactGraph: { directlyChanged: [], indirectlyAffected: [], edges: [] }, - riskScore: makeRiskAssessment(riskOverrides), - summary: 'Test summary', - }; -} - -function createProgram(): Command { - const program = new Command(); - program.exitOverride(); - registerRiskCommand(program); - return program; -} - -describe('risk command', () => { - beforeEach(() => { - vi.clearAllMocks(); - mockAnalyzePR.mockResolvedValue(makePRAnalysis()); - }); - - it('calls analyzePR with default options when no arguments given', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'risk']); - - expect(mockAnalyzePR).toHaveBeenCalledTimes(1); - const callArgs = mockAnalyzePR.mock.calls[0][0]; - expect(callArgs).toHaveProperty('repoPath'); - expect(callArgs.baseBranch).toBeUndefined(); - expect(callArgs.headBranch).toBeUndefined(); - - consoleSpy.mockRestore(); - }); - - it('passes base and head branch arguments', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'risk', 'develop', 'feature']); - - const callArgs = mockAnalyzePR.mock.calls[0][0]; - expect(callArgs.baseBranch).toBe('develop'); - expect(callArgs.headBranch).toBe('feature'); - - consoleSpy.mockRestore(); - }); - - it('displays text format output by default', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'risk']); - - const output = consoleSpy.mock.calls[0][0] as string; - expect(output).toContain('Risk Assessment'); - expect(output).toContain('42/100'); - expect(output).toContain('MEDIUM'); - - consoleSpy.mockRestore(); - }); - - it('displays JSON format when --format json is specified', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'risk', '--format', 'json']); - - const output = consoleSpy.mock.calls[0][0] as string; - const parsed = JSON.parse(output); - expect(parsed.score).toBe(42); - expect(parsed.level).toBe('medium'); - expect(parsed.factors).toHaveLength(1); - - consoleSpy.mockRestore(); - }); - - it('includes factor breakdown in text output', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'risk']); - - const output = consoleSpy.mock.calls[0][0] as string; - expect(output).toContain('Factor Breakdown'); - expect(output).toContain('Breaking Changes'); - expect(output).toContain('Some breaking changes detected'); - expect(output).toContain('removed export helper'); - - consoleSpy.mockRestore(); - }); - - it('does not exit with code 1 when score is below threshold', async () => { - mockAnalyzePR.mockResolvedValue(makePRAnalysis({ score: 30 })); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - // Should NOT throw (no process.exit called) - await program.parseAsync(['node', 'pri', 'risk', '--threshold', '50']); - - expect(exitSpy).not.toHaveBeenCalled(); - - consoleSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('exits with code 1 when score meets threshold', async () => { - mockAnalyzePR.mockResolvedValue(makePRAnalysis({ score: 50, level: 'high' })); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'risk', '--threshold', '50']), - ).rejects.toThrow('process.exit'); - - expect(exitSpy).toHaveBeenCalledWith(1); - // Should print message about threshold - const allLogs = consoleSpy.mock.calls.map((c) => c[0]).join('\n'); - expect(allLogs).toContain('meets or exceeds threshold'); - - consoleSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('exits with code 1 when score exceeds threshold', async () => { - mockAnalyzePR.mockResolvedValue(makePRAnalysis({ score: 80, level: 'critical' })); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'risk', '--threshold', '50']), - ).rejects.toThrow('process.exit'); - - expect(exitSpy).toHaveBeenCalledWith(1); - - consoleSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('starts and stops the spinner on success', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'risk']); - - expect(mockStart).toHaveBeenCalledTimes(1); - expect(mockStop).toHaveBeenCalledTimes(1); - - consoleSpy.mockRestore(); - }); - - it('calls spinner.fail and exits with code 2 on error', async () => { - mockAnalyzePR.mockRejectedValue(new Error('calculation error')); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - const exitSpy = vi.spyOn(process, 'exit').mockImplementation(() => { - throw new Error('process.exit'); - }); - - await expect( - program.parseAsync(['node', 'pri', 'risk']), - ).rejects.toThrow('process.exit'); - - expect(mockFail).toHaveBeenCalledWith('Risk calculation failed'); - expect(consoleErrorSpy).toHaveBeenCalledWith(expect.stringContaining('calculation error')); - expect(exitSpy).toHaveBeenCalledWith(2); - - consoleSpy.mockRestore(); - consoleErrorSpy.mockRestore(); - exitSpy.mockRestore(); - }); - - it('passes --repo option to analyzePR', async () => { - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'risk', '--repo', '/custom/repo']); - - const callArgs = mockAnalyzePR.mock.calls[0][0]; - expect(callArgs.repoPath).toBe('/custom/repo'); - - consoleSpy.mockRestore(); - }); - - it('handles risk assessment with no factors', async () => { - mockAnalyzePR.mockResolvedValue(makePRAnalysis({ factors: [] })); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'risk']); - - const output = consoleSpy.mock.calls[0][0] as string; - expect(output).toContain('Risk Assessment'); - // Should not contain Factor Breakdown section - expect(output).not.toContain('Factor Breakdown'); - - consoleSpy.mockRestore(); - }); - - it('handles factors without details', async () => { - mockAnalyzePR.mockResolvedValue( - makePRAnalysis({ - factors: [makeRiskFactor({ details: undefined })], - }), - ); - - const program = createProgram(); - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - - await program.parseAsync(['node', 'pri', 'risk']); - - const output = consoleSpy.mock.calls[0][0] as string; - expect(output).toContain('Breaking Changes'); - - consoleSpy.mockRestore(); - }); -}); diff --git a/packages/cli/__tests__/e2e-smoke.test.ts b/packages/cli/__tests__/e2e-smoke.test.ts deleted file mode 100644 index 5dd16bb..0000000 --- a/packages/cli/__tests__/e2e-smoke.test.ts +++ /dev/null @@ -1,129 +0,0 @@ -import { describe, it, expect } from 'vitest'; -import { execFile } from 'node:child_process'; -import { promisify } from 'node:util'; -import { resolve } from 'node:path'; -import { existsSync } from 'node:fs'; - -const execFileAsync = promisify(execFile); - -const CLI_PATH = resolve(import.meta.dirname, '..', 'dist', 'index.js'); - -/** - * Helper to run the CLI binary via `node dist/index.js`. - * Returns { stdout, stderr, exitCode }. - */ -async function runCli( - args: string[], - options: { cwd?: string; timeout?: number } = {}, -): Promise<{ stdout: string; stderr: string; exitCode: number }> { - const { cwd, timeout = 10_000 } = options; - try { - const { stdout, stderr } = await execFileAsync('node', [CLI_PATH, ...args], { - cwd, - timeout, - env: { ...process.env, NO_COLOR: '1', FORCE_COLOR: '0' }, - }); - return { stdout, stderr, exitCode: 0 }; - } catch (err: unknown) { - const e = err as { stdout?: string; stderr?: string; code?: number | string }; - return { - stdout: e.stdout ?? '', - stderr: e.stderr ?? '', - exitCode: typeof e.code === 'number' ? e.code : 1, - }; - } -} - -describe('CLI e2e smoke tests', () => { - it('dist/index.js exists (build prerequisite)', () => { - expect(existsSync(CLI_PATH)).toBe(true); - }); - - it('pri --help exits 0 and prints usage', async () => { - const { stdout, exitCode } = await runCli(['--help']); - - expect(exitCode).toBe(0); - expect(stdout).toContain('Usage:'); - expect(stdout).toContain('pri'); - expect(stdout).toContain('Options:'); - expect(stdout).toContain('Commands:'); - }, 10_000); - - it('pri --version exits 0 and prints a semver-like version string', async () => { - const { stdout, exitCode } = await runCli(['--version']); - - expect(exitCode).toBe(0); - // Should print something like "0.1.0" - expect(stdout.trim()).toMatch(/^\d+\.\d+\.\d+/); - }, 10_000); - - describe('subcommand --help', () => { - const subcommands = ['analyze', 'breaking', 'risk', 'impact', 'comment']; - - for (const cmd of subcommands) { - it(`pri ${cmd} --help exits 0 and prints usage`, async () => { - const { stdout, exitCode } = await runCli([cmd, '--help']); - - expect(exitCode).toBe(0); - expect(stdout).toContain('Usage:'); - expect(stdout).toContain(cmd); - }, 10_000); - } - }); - - describe('subcommand descriptions are present in root help', () => { - it('root --help lists all five subcommands', async () => { - const { stdout } = await runCli(['--help']); - - expect(stdout).toContain('analyze'); - expect(stdout).toContain('breaking'); - expect(stdout).toContain('risk'); - expect(stdout).toContain('impact'); - expect(stdout).toContain('comment'); - }, 10_000); - }); - - describe('error handling without a git repo', () => { - it('pri analyze in a non-git directory exits with non-zero code', async () => { - const { exitCode, stderr } = await runCli(['analyze', '--repo', '/tmp'], { - cwd: '/tmp', - }); - - // The command should fail because /tmp is not a git repository - expect(exitCode).not.toBe(0); - // stderr should contain some error output (ora spinner fail message or error text) - expect(stderr.length + (exitCode !== 0 ? 1 : 0)).toBeGreaterThan(0); - }, 10_000); - - it('pri breaking in a non-git directory exits with non-zero code', async () => { - const { exitCode } = await runCli(['breaking', '--repo', '/tmp'], { - cwd: '/tmp', - }); - - expect(exitCode).not.toBe(0); - }, 10_000); - - it('pri risk in a non-git directory exits with non-zero code', async () => { - const { exitCode } = await runCli(['risk', '--repo', '/tmp'], { - cwd: '/tmp', - }); - - expect(exitCode).not.toBe(0); - }, 10_000); - - it('pri impact in a non-git directory exits with non-zero code', async () => { - const { exitCode } = await runCli(['impact', '--repo', '/tmp'], { - cwd: '/tmp', - }); - - expect(exitCode).not.toBe(0); - }, 10_000); - }); - - it('pri with unknown command prints help and exits 0', async () => { - const { stdout, exitCode } = await runCli(['help']); - - expect(exitCode).toBe(0); - expect(stdout).toContain('Usage:'); - }, 10_000); -}); diff --git a/packages/cli/__tests__/formatting.test.ts b/packages/cli/__tests__/formatting.test.ts deleted file mode 100644 index 67d8e04..0000000 --- a/packages/cli/__tests__/formatting.test.ts +++ /dev/null @@ -1,193 +0,0 @@ -import { describe, it, expect } from 'vitest'; -import { formatMarkdownTable } from '../src/commands/breaking.js'; -import { formatDotOutput } from '../src/commands/impact.js'; -import type { BreakingChange, ImpactGraph } from '@pr-impact/core'; - -describe('CLI formatting', () => { - describe('formatMarkdownTable', () => { - it('formats a single breaking change', () => { - const changes: BreakingChange[] = [ - { - filePath: 'src/utils.ts', - type: 'removed_export', - symbolName: 'helper', - before: 'function helper', - after: null, - severity: 'high', - consumers: ['src/app.ts'], - }, - ]; - - const result = formatMarkdownTable(changes); - expect(result).toContain('# Breaking Changes'); - expect(result).toContain('Found **1** breaking change.'); - expect(result).toContain('| src/utils.ts | helper | removed_export | high | src/app.ts |'); - }); - - it('formats multiple breaking changes', () => { - const changes: BreakingChange[] = [ - { - filePath: 'src/a.ts', - type: 'removed_export', - symbolName: 'foo', - before: 'function foo', - after: null, - severity: 'high', - consumers: [], - }, - { - filePath: 'src/b.ts', - type: 'changed_signature', - symbolName: 'bar', - before: 'function bar(x: number)', - after: 'function bar(x: string)', - severity: 'medium', - consumers: ['src/c.ts', 'src/d.ts'], - }, - ]; - - const result = formatMarkdownTable(changes); - expect(result).toContain('Found **2** breaking changes.'); - expect(result).toContain('| src/a.ts | foo |'); - expect(result).toContain('| src/b.ts | bar |'); - }); - - it('shows "none" when there are no consumers', () => { - const changes: BreakingChange[] = [ - { - filePath: 'src/x.ts', - type: 'removed_export', - symbolName: 'x', - before: 'const x', - after: null, - severity: 'high', - consumers: [], - }, - ]; - - const result = formatMarkdownTable(changes); - expect(result).toContain('| none |'); - }); - - it('includes the markdown table header row', () => { - const changes: BreakingChange[] = [ - { - filePath: 'src/a.ts', - type: 'changed_type', - symbolName: 'MyType', - before: 'type MyType = string', - after: 'type MyType = number', - severity: 'medium', - consumers: [], - }, - ]; - - const result = formatMarkdownTable(changes); - expect(result).toContain('| File | Symbol | Type | Severity | Consumers |'); - expect(result).toContain('|------|--------|------|----------|-----------|'); - }); - - it('joins multiple consumers with commas', () => { - const changes: BreakingChange[] = [ - { - filePath: 'src/lib.ts', - type: 'removed_export', - symbolName: 'util', - before: 'function util', - after: null, - severity: 'high', - consumers: ['src/a.ts', 'src/b.ts', 'src/c.ts'], - }, - ]; - - const result = formatMarkdownTable(changes); - expect(result).toContain('src/a.ts, src/b.ts, src/c.ts'); - }); - }); - - describe('formatDotOutput', () => { - it('generates valid DOT digraph output', () => { - const graph: ImpactGraph = { - directlyChanged: ['src/a.ts'], - indirectlyAffected: ['src/b.ts'], - edges: [ - { from: 'src/b.ts', to: 'src/a.ts', type: 'imports' }, - ], - }; - - const result = formatDotOutput(graph); - expect(result).toContain('digraph impact {'); - expect(result).toContain('rankdir=LR;'); - expect(result).toContain('}'); - }); - - it('styles directly changed nodes with red fill', () => { - const graph: ImpactGraph = { - directlyChanged: ['src/a.ts'], - indirectlyAffected: [], - edges: [], - }; - - const result = formatDotOutput(graph); - expect(result).toContain('"src/a.ts" [fillcolor="#ff6b6b", fontcolor="white"];'); - }); - - it('styles indirectly affected nodes with yellow fill', () => { - const graph: ImpactGraph = { - directlyChanged: [], - indirectlyAffected: ['src/b.ts'], - edges: [], - }; - - const result = formatDotOutput(graph); - expect(result).toContain('"src/b.ts" [fillcolor="#ffd93d"];'); - }); - - it('includes labeled edges', () => { - const graph: ImpactGraph = { - directlyChanged: ['src/a.ts'], - indirectlyAffected: ['src/b.ts'], - edges: [ - { from: 'src/b.ts', to: 'src/a.ts', type: 'imports' }, - ], - }; - - const result = formatDotOutput(graph); - expect(result).toContain('"src/b.ts" -> "src/a.ts" [label="imports"];'); - }); - - it('handles empty graph', () => { - const graph: ImpactGraph = { - directlyChanged: [], - indirectlyAffected: [], - edges: [], - }; - - const result = formatDotOutput(graph); - expect(result).toContain('digraph impact {'); - expect(result).toContain('}'); - // Should not contain any node or edge definitions - expect(result).not.toContain('fillcolor'); - expect(result).not.toContain('->'); - }); - - it('handles multiple directly changed and indirectly affected files', () => { - const graph: ImpactGraph = { - directlyChanged: ['src/a.ts', 'src/b.ts'], - indirectlyAffected: ['src/c.ts', 'src/d.ts'], - edges: [ - { from: 'src/c.ts', to: 'src/a.ts', type: 'imports' }, - { from: 'src/d.ts', to: 'src/b.ts', type: 'imports' }, - ], - }; - - const result = formatDotOutput(graph); - expect(result).toContain('"src/a.ts" [fillcolor="#ff6b6b"'); - expect(result).toContain('"src/b.ts" [fillcolor="#ff6b6b"'); - expect(result).toContain('"src/c.ts" [fillcolor="#ffd93d"'); - expect(result).toContain('"src/d.ts" [fillcolor="#ffd93d"'); - expect(result).toContain('"src/c.ts" -> "src/a.ts"'); - expect(result).toContain('"src/d.ts" -> "src/b.ts"'); - }); - }); -}); diff --git a/packages/cli/__tests__/github/ci-env.test.ts b/packages/cli/__tests__/github/ci-env.test.ts deleted file mode 100644 index 08dade6..0000000 --- a/packages/cli/__tests__/github/ci-env.test.ts +++ /dev/null @@ -1,124 +0,0 @@ -import { describe, it, expect, beforeEach, afterEach } from 'vitest'; -import { detectCIEnv } from '../../src/github/ci-env.js'; - -describe('detectCIEnv', () => { - const originalEnv = process.env; - - beforeEach(() => { - // Reset env to a clean state - process.env = { ...originalEnv }; - // Remove all CI-related vars - delete process.env.GITHUB_ACTIONS; - delete process.env.GITHUB_REPOSITORY; - delete process.env.GITHUB_REF; - delete process.env.GITLAB_CI; - delete process.env.CI_MERGE_REQUEST_IID; - delete process.env.CI_PROJECT_PATH; - delete process.env.CIRCLECI; - delete process.env.CIRCLE_PULL_REQUEST; - delete process.env.CIRCLE_PROJECT_USERNAME; - delete process.env.CIRCLE_PROJECT_REPONAME; - }); - - afterEach(() => { - process.env = originalEnv; - }); - - it('returns null when not in any CI environment', () => { - expect(detectCIEnv()).toBeNull(); - }); - - describe('GitHub Actions', () => { - it('detects PR number and repo from GitHub Actions env', () => { - process.env.GITHUB_ACTIONS = 'true'; - process.env.GITHUB_REPOSITORY = 'owner/repo'; - process.env.GITHUB_REF = 'refs/pull/42/merge'; - - const result = detectCIEnv(); - expect(result).toEqual({ - prNumber: '42', - repo: 'owner/repo', - }); - }); - - it('returns null when GITHUB_REF is not a PR ref', () => { - process.env.GITHUB_ACTIONS = 'true'; - process.env.GITHUB_REPOSITORY = 'owner/repo'; - process.env.GITHUB_REF = 'refs/heads/main'; - - expect(detectCIEnv()).toBeNull(); - }); - - it('returns null when GITHUB_REPOSITORY is missing', () => { - process.env.GITHUB_ACTIONS = 'true'; - process.env.GITHUB_REF = 'refs/pull/42/merge'; - - expect(detectCIEnv()).toBeNull(); - }); - - it('returns null when GITHUB_REF is missing', () => { - process.env.GITHUB_ACTIONS = 'true'; - process.env.GITHUB_REPOSITORY = 'owner/repo'; - - expect(detectCIEnv()).toBeNull(); - }); - }); - - describe('GitLab CI', () => { - it('detects MR number and project from GitLab CI env', () => { - process.env.GITLAB_CI = 'true'; - process.env.CI_MERGE_REQUEST_IID = '15'; - process.env.CI_PROJECT_PATH = 'group/project'; - - const result = detectCIEnv(); - expect(result).toEqual({ - prNumber: '15', - repo: 'group/project', - }); - }); - - it('returns null when CI_MERGE_REQUEST_IID is missing', () => { - process.env.GITLAB_CI = 'true'; - process.env.CI_PROJECT_PATH = 'group/project'; - - expect(detectCIEnv()).toBeNull(); - }); - - it('returns null when CI_PROJECT_PATH is missing', () => { - process.env.GITLAB_CI = 'true'; - process.env.CI_MERGE_REQUEST_IID = '15'; - - expect(detectCIEnv()).toBeNull(); - }); - }); - - describe('CircleCI', () => { - it('detects PR number and repo from CircleCI env', () => { - process.env.CIRCLECI = 'true'; - process.env.CIRCLE_PULL_REQUEST = 'https://github.com/owner/repo/pull/99'; - process.env.CIRCLE_PROJECT_USERNAME = 'owner'; - process.env.CIRCLE_PROJECT_REPONAME = 'repo'; - - const result = detectCIEnv(); - expect(result).toEqual({ - prNumber: '99', - repo: 'owner/repo', - }); - }); - - it('returns null when CIRCLE_PULL_REQUEST is missing', () => { - process.env.CIRCLECI = 'true'; - process.env.CIRCLE_PROJECT_USERNAME = 'owner'; - process.env.CIRCLE_PROJECT_REPONAME = 'repo'; - - expect(detectCIEnv()).toBeNull(); - }); - - it('returns null when project info is missing', () => { - process.env.CIRCLECI = 'true'; - process.env.CIRCLE_PULL_REQUEST = 'https://github.com/owner/repo/pull/99'; - - expect(detectCIEnv()).toBeNull(); - }); - }); -}); diff --git a/packages/cli/__tests__/github/comment-poster.test.ts b/packages/cli/__tests__/github/comment-poster.test.ts deleted file mode 100644 index 388e1a5..0000000 --- a/packages/cli/__tests__/github/comment-poster.test.ts +++ /dev/null @@ -1,185 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { postOrUpdateComment } from '../../src/github/comment-poster.js'; - -const mockFetch = vi.fn(); -vi.stubGlobal('fetch', mockFetch); - -describe('postOrUpdateComment', () => { - const baseOpts = { - token: 'ghp_test123', - repo: 'owner/repo', - prNumber: '42', - body: '## PR Impact Report\nAll clear!', - }; - - beforeEach(() => { - mockFetch.mockReset(); - }); - - it('creates a new comment when no existing comment found', async () => { - // First call: list comments (empty) - mockFetch.mockResolvedValueOnce({ - ok: true, - json: async () => [], - }); - - // Second call: create comment - mockFetch.mockResolvedValueOnce({ - ok: true, - json: async () => ({ html_url: 'https://github.com/owner/repo/pull/42#issuecomment-1' }), - }); - - const url = await postOrUpdateComment(baseOpts); - - expect(url).toBe('https://github.com/owner/repo/pull/42#issuecomment-1'); - - // Verify list call - expect(mockFetch).toHaveBeenCalledTimes(2); - const listCall = mockFetch.mock.calls[0]; - expect(listCall[0]).toContain('/repos/owner/repo/issues/42/comments'); - expect(listCall[1].method).toBe('GET'); - - // Verify create call - const createCall = mockFetch.mock.calls[1]; - expect(createCall[0]).toBe('https://api.github.com/repos/owner/repo/issues/42/comments'); - expect(createCall[1].method).toBe('POST'); - const createBody = JSON.parse(createCall[1].body); - expect(createBody.body).toContain(''); - expect(createBody.body).toContain('## PR Impact Report'); - expect(createBody.body).toContain(''); - }); - - it('updates an existing comment when marker is found', async () => { - // First call: list comments (contains existing pr-impact comment) - mockFetch.mockResolvedValueOnce({ - ok: true, - json: async () => [ - { id: 100, body: 'Some other comment' }, - { id: 200, body: '\nOld report\n' }, - ], - }); - - // Second call: update comment - mockFetch.mockResolvedValueOnce({ - ok: true, - json: async () => ({ html_url: 'https://github.com/owner/repo/pull/42#issuecomment-200' }), - }); - - const url = await postOrUpdateComment(baseOpts); - - expect(url).toBe('https://github.com/owner/repo/pull/42#issuecomment-200'); - - // Verify update call (PATCH) - const updateCall = mockFetch.mock.calls[1]; - expect(updateCall[0]).toBe('https://api.github.com/repos/owner/repo/issues/comments/200'); - expect(updateCall[1].method).toBe('PATCH'); - }); - - it('throws when creating a comment fails', async () => { - // List comments: empty - mockFetch.mockResolvedValueOnce({ - ok: true, - json: async () => [], - }); - - // Create comment: fails - mockFetch.mockResolvedValueOnce({ - ok: false, - status: 403, - text: async () => 'Forbidden', - }); - - await expect(postOrUpdateComment(baseOpts)).rejects.toThrow( - 'GitHub API error creating comment: 403 Forbidden', - ); - }); - - it('throws when updating a comment fails', async () => { - // List comments: existing - mockFetch.mockResolvedValueOnce({ - ok: true, - json: async () => [ - { id: 200, body: '\nOld\n' }, - ], - }); - - // Update comment: fails - mockFetch.mockResolvedValueOnce({ - ok: false, - status: 500, - text: async () => 'Internal Server Error', - }); - - await expect(postOrUpdateComment(baseOpts)).rejects.toThrow( - 'GitHub API error updating comment: 500 Internal Server Error', - ); - }); - - it('treats failed list call as no existing comment and creates new', async () => { - // List comments: fails - mockFetch.mockResolvedValueOnce({ - ok: false, - status: 404, - }); - - // Create comment: succeeds - mockFetch.mockResolvedValueOnce({ - ok: true, - json: async () => ({ html_url: 'https://github.com/owner/repo/pull/42#issuecomment-3' }), - }); - - const url = await postOrUpdateComment(baseOpts); - expect(url).toBe('https://github.com/owner/repo/pull/42#issuecomment-3'); - }); - - it('paginates through comments to find marker', async () => { - // First page: 100 comments, no marker - const page1 = Array.from({ length: 100 }, (_, i) => ({ - id: i + 1, - body: `Comment ${i + 1}`, - })); - mockFetch.mockResolvedValueOnce({ - ok: true, - json: async () => page1, - }); - - // Second page: has marker - mockFetch.mockResolvedValueOnce({ - ok: true, - json: async () => [ - { id: 500, body: '\nReport\n' }, - ], - }); - - // Update call - mockFetch.mockResolvedValueOnce({ - ok: true, - json: async () => ({ html_url: 'https://github.com/owner/repo/pull/42#issuecomment-500' }), - }); - - const url = await postOrUpdateComment(baseOpts); - expect(url).toBe('https://github.com/owner/repo/pull/42#issuecomment-500'); - - // First two calls are GET (pagination), third is PATCH (update) - expect(mockFetch).toHaveBeenCalledTimes(3); - expect(mockFetch.mock.calls[0][0]).toContain('page=1'); - expect(mockFetch.mock.calls[1][0]).toContain('page=2'); - expect(mockFetch.mock.calls[2][1].method).toBe('PATCH'); - }); - - it('sends correct Authorization header', async () => { - mockFetch.mockResolvedValueOnce({ - ok: true, - json: async () => [], - }); - mockFetch.mockResolvedValueOnce({ - ok: true, - json: async () => ({ html_url: 'https://github.com/owner/repo/pull/42#issuecomment-1' }), - }); - - await postOrUpdateComment(baseOpts); - - const headers = mockFetch.mock.calls[0][1].headers; - expect(headers.Authorization).toBe('Bearer ghp_test123'); - }); -}); diff --git a/packages/cli/node_modules/.bin/esbuild b/packages/cli/node_modules/.bin/esbuild deleted file mode 100755 index 93209de..0000000 --- a/packages/cli/node_modules/.bin/esbuild +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -z "$NODE_PATH" ]; then - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/esbuild@0.27.3/node_modules/esbuild/bin/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/esbuild@0.27.3/node_modules/esbuild/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/esbuild@0.27.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules" -else - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/esbuild@0.27.3/node_modules/esbuild/bin/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/esbuild@0.27.3/node_modules/esbuild/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/esbuild@0.27.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules:$NODE_PATH" -fi -"$basedir/../../../../node_modules/.pnpm/esbuild@0.27.3/node_modules/esbuild/bin/esbuild" "$@" -exit $? diff --git a/packages/cli/node_modules/.bin/tsc b/packages/cli/node_modules/.bin/tsc deleted file mode 100755 index e556d4f..0000000 --- a/packages/cli/node_modules/.bin/tsc +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -z "$NODE_PATH" ]; then - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/bin/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules" -else - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/bin/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules:$NODE_PATH" -fi -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../typescript/bin/tsc" "$@" -else - exec node "$basedir/../typescript/bin/tsc" "$@" -fi diff --git a/packages/cli/node_modules/.bin/tsserver b/packages/cli/node_modules/.bin/tsserver deleted file mode 100755 index db2401b..0000000 --- a/packages/cli/node_modules/.bin/tsserver +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -z "$NODE_PATH" ]; then - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/bin/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules" -else - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/bin/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules:$NODE_PATH" -fi -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../typescript/bin/tsserver" "$@" -else - exec node "$basedir/../typescript/bin/tsserver" "$@" -fi diff --git a/packages/cli/node_modules/.bin/tsup b/packages/cli/node_modules/.bin/tsup deleted file mode 100755 index 4df1053..0000000 --- a/packages/cli/node_modules/.bin/tsup +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -z "$NODE_PATH" ]; then - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/dist/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules" -else - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/dist/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules:$NODE_PATH" -fi -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../tsup/dist/cli-default.js" "$@" -else - exec node "$basedir/../tsup/dist/cli-default.js" "$@" -fi diff --git a/packages/cli/node_modules/.bin/tsup-node b/packages/cli/node_modules/.bin/tsup-node deleted file mode 100755 index 689ae97..0000000 --- a/packages/cli/node_modules/.bin/tsup-node +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -z "$NODE_PATH" ]; then - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/dist/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules" -else - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/dist/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules:$NODE_PATH" -fi -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../tsup/dist/cli-node.js" "$@" -else - exec node "$basedir/../tsup/dist/cli-node.js" "$@" -fi diff --git a/packages/cli/node_modules/@pr-impact/core b/packages/cli/node_modules/@pr-impact/core deleted file mode 120000 index 5e990a8..0000000 --- a/packages/cli/node_modules/@pr-impact/core +++ /dev/null @@ -1 +0,0 @@ -../../../core \ No newline at end of file diff --git a/packages/cli/node_modules/@types/node b/packages/cli/node_modules/@types/node deleted file mode 120000 index 129d921..0000000 --- a/packages/cli/node_modules/@types/node +++ /dev/null @@ -1 +0,0 @@ -../../../../node_modules/.pnpm/@types+node@22.19.10/node_modules/@types/node \ No newline at end of file diff --git a/packages/cli/node_modules/chalk b/packages/cli/node_modules/chalk deleted file mode 120000 index e26cf9e..0000000 --- a/packages/cli/node_modules/chalk +++ /dev/null @@ -1 +0,0 @@ -../../../node_modules/.pnpm/chalk@5.6.2/node_modules/chalk \ No newline at end of file diff --git a/packages/cli/node_modules/commander b/packages/cli/node_modules/commander deleted file mode 120000 index 4593188..0000000 --- a/packages/cli/node_modules/commander +++ /dev/null @@ -1 +0,0 @@ -../../../node_modules/.pnpm/commander@13.1.0/node_modules/commander \ No newline at end of file diff --git a/packages/cli/node_modules/ora b/packages/cli/node_modules/ora deleted file mode 120000 index 2176026..0000000 --- a/packages/cli/node_modules/ora +++ /dev/null @@ -1 +0,0 @@ -../../../node_modules/.pnpm/ora@8.2.0/node_modules/ora \ No newline at end of file diff --git a/packages/cli/node_modules/tsup b/packages/cli/node_modules/tsup deleted file mode 120000 index 547982a..0000000 --- a/packages/cli/node_modules/tsup +++ /dev/null @@ -1 +0,0 @@ -../../../node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup \ No newline at end of file diff --git a/packages/cli/node_modules/typescript b/packages/cli/node_modules/typescript deleted file mode 120000 index d6c42d5..0000000 --- a/packages/cli/node_modules/typescript +++ /dev/null @@ -1 +0,0 @@ -../../../node_modules/.pnpm/typescript@5.7.3/node_modules/typescript \ No newline at end of file diff --git a/packages/cli/package.json b/packages/cli/package.json deleted file mode 100644 index e78f31f..0000000 --- a/packages/cli/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "@pr-impact/cli", - "version": "0.2.1", - "description": "CLI for pr-impact — analyze PRs for breaking changes, risk, and impact", - "type": "module", - "bin": { - "pri": "./dist/index.js" - }, - "files": [ - "dist" - ], - "license": "MIT", - "publishConfig": { - "access": "public" - }, - "keywords": [ - "pr-impact", - "cli", - "pull-request", - "breaking-changes", - "risk-score", - "code-analysis" - ], - "engines": { - "node": ">=20.0.0" - }, - "repository": { - "type": "git", - "url": "https://github.com/ducdmdev/pr-impact.git", - "directory": "packages/cli" - }, - "scripts": { - "build": "tsup", - "clean": "rm -rf dist" - }, - "dependencies": { - "@pr-impact/core": "workspace:*", - "commander": "^13.0.0", - "chalk": "^5.4.0", - "ora": "^8.0.0" - }, - "devDependencies": { - "tsup": "^8.0.0", - "typescript": "~5.7.0", - "@types/node": "^22.0.0" - } -} diff --git a/packages/cli/src/commands/analyze.ts b/packages/cli/src/commands/analyze.ts deleted file mode 100644 index 62bf0c4..0000000 --- a/packages/cli/src/commands/analyze.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { Command } from 'commander'; -import chalk from 'chalk'; -import ora from 'ora'; -import { analyzePR, formatMarkdown, formatJSON } from '@pr-impact/core'; -import { writeFile } from 'fs/promises'; -import { resolve } from 'path'; - -export function registerAnalyzeCommand(program: Command): void { - program - .command('analyze') - .description('Run full PR impact analysis') - .argument('[base]', 'Base branch (default: auto-detect main/master)') - .argument('[head]', 'Head branch (default: HEAD)') - .option('--format ', 'Output format: md | json', 'md') - .option('--output ', 'Write to file instead of stdout') - .option('--repo ', 'Repository path', process.cwd()) - .option('--no-breaking', 'Skip breaking change analysis') - .option('--no-coverage', 'Skip test coverage analysis') - .option('--no-docs', 'Skip doc staleness check') - .action(async (base, head, opts) => { - const spinner = ora({ text: 'Analyzing PR impact...', stream: process.stderr }).start(); - try { - const analysis = await analyzePR({ - repoPath: resolve(opts.repo), - baseBranch: base, - headBranch: head, - skipBreaking: opts.breaking === false, - skipCoverage: opts.coverage === false, - skipDocs: opts.docs === false, - }); - spinner.stop(); - - const output = - opts.format === 'json' - ? formatJSON(analysis) - : formatMarkdown(analysis); - - if (opts.output) { - await writeFile(resolve(opts.output), output); - console.log(chalk.green(`Report written to ${opts.output}`)); - } else { - console.log(output); - } - } catch (err) { - spinner.fail('Analysis failed'); - console.error( - chalk.red(err instanceof Error ? err.message : String(err)), - ); - process.exit(2); - } - }); -} diff --git a/packages/cli/src/commands/breaking.ts b/packages/cli/src/commands/breaking.ts deleted file mode 100644 index 3dde84e..0000000 --- a/packages/cli/src/commands/breaking.ts +++ /dev/null @@ -1,126 +0,0 @@ -import { Command } from 'commander'; -import chalk from 'chalk'; -import ora from 'ora'; -import { parseDiff, detectBreakingChanges, resolveDefaultBaseBranch } from '@pr-impact/core'; -import type { BreakingChange } from '@pr-impact/core'; -import { resolve } from 'path'; - -const SEVERITY_ORDER: Record = { - low: 0, - medium: 1, - high: 2, -}; - -function severityColor(severity: BreakingChange['severity']): string { - switch (severity) { - case 'high': - return chalk.red(severity); - case 'medium': - return chalk.yellow(severity); - case 'low': - return chalk.green(severity); - } -} - -export function formatMarkdownTable(changes: BreakingChange[]): string { - const lines: string[] = []; - lines.push('# Breaking Changes\n'); - lines.push(`Found **${changes.length}** breaking change${changes.length === 1 ? '' : 's'}.\n`); - lines.push('| File | Symbol | Type | Severity | Consumers |'); - lines.push('|------|--------|------|----------|-----------|'); - - for (const change of changes) { - const consumers = change.consumers.length > 0 ? change.consumers.join(', ') : 'none'; - lines.push( - `| ${change.filePath} | ${change.symbolName} | ${change.type} | ${change.severity} | ${consumers} |`, - ); - } - - return lines.join('\n'); -} - -function formatText(changes: BreakingChange[]): string { - const lines: string[] = []; - lines.push(chalk.bold(`Found ${changes.length} breaking change${changes.length === 1 ? '' : 's'}:\n`)); - - for (const change of changes) { - lines.push( - ` ${severityColor(change.severity)} ${chalk.bold(change.symbolName)} (${change.type})`, - ); - lines.push(` ${chalk.dim(change.filePath)}`); - if (change.before) { - lines.push(` ${chalk.red('- ' + change.before)}`); - } - if (change.after) { - lines.push(` ${chalk.green('+ ' + change.after)}`); - } - if (change.consumers.length > 0) { - lines.push(` ${chalk.dim('Consumers:')} ${change.consumers.join(', ')}`); - } - lines.push(''); - } - - return lines.join('\n'); -} - -export function registerBreakingCommand(program: Command): void { - program - .command('breaking') - .description('Detect breaking changes in the PR') - .argument('[base]', 'Base branch', undefined) - .argument('[head]', 'Head branch', undefined) - .option('--severity ', 'Minimum severity: low | medium | high', 'low') - .option('--format ', 'Output format: md | json', 'md') - .option('--repo ', 'Repository path', process.cwd()) - .action(async (base, head, opts) => { - const spinner = ora({ text: 'Detecting breaking changes...', stream: process.stderr }).start(); - try { - const repoPath = resolve(opts.repo); - const baseBranch = base ?? await resolveDefaultBaseBranch(repoPath); - const headBranch = head ?? 'HEAD'; - - const changedFiles = await parseDiff(repoPath, baseBranch, headBranch); - const allBreaking = await detectBreakingChanges( - repoPath, - baseBranch, - headBranch, - changedFiles, - ); - - const minSeverity = SEVERITY_ORDER[opts.severity] ?? 0; - const filtered = allBreaking.filter( - (change) => SEVERITY_ORDER[change.severity] >= minSeverity, - ); - - spinner.stop(); - - if (filtered.length === 0) { - console.log( - chalk.green('No breaking changes detected at severity >= ' + opts.severity), - ); - return; - } - - switch (opts.format) { - case 'json': - console.log(JSON.stringify(filtered, null, 2)); - break; - case 'md': - console.log(formatMarkdownTable(filtered)); - break; - default: - console.log(formatText(filtered)); - break; - } - - // Exit with code 1 if breaking changes found at the specified severity - process.exit(1); - } catch (err) { - spinner.fail('Breaking change detection failed'); - console.error( - chalk.red(err instanceof Error ? err.message : String(err)), - ); - process.exit(2); - } - }); -} diff --git a/packages/cli/src/commands/comment.ts b/packages/cli/src/commands/comment.ts deleted file mode 100644 index 699f797..0000000 --- a/packages/cli/src/commands/comment.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { Command } from 'commander'; -import chalk from 'chalk'; -import ora from 'ora'; -import { analyzePR, formatMarkdown } from '@pr-impact/core'; -import { resolve } from 'path'; -import { detectCIEnv } from '../github/ci-env.js'; -import { postOrUpdateComment } from '../github/comment-poster.js'; - -export function registerCommentCommand(program: Command): void { - program - .command('comment') - .description('Run analysis and post/update a PR comment on GitHub') - .argument('[base]', 'Base branch (default: auto-detect main/master)') - .argument('[head]', 'Head branch (default: HEAD)') - .option('--repo ', 'Repository path', process.cwd()) - .option('--pr ', 'PR number (default: auto-detect from CI)') - .option('--github-repo ', 'GitHub repository (default: auto-detect from CI)') - .option('--token ', 'GitHub token (default: GITHUB_TOKEN env var)') - .action(async (base, head, opts) => { - const spinner = ora({ text: 'Analyzing PR impact...', stream: process.stderr }).start(); - - try { - // Resolve GitHub config - const token = opts.token ?? process.env.GITHUB_TOKEN; - if (!token) { - spinner.fail('Missing GitHub token'); - console.error(chalk.red('Provide --token or set GITHUB_TOKEN environment variable')); - process.exit(2); - return; - } - - let prNumber: string | undefined = opts.pr; - let githubRepo: string | undefined = opts.githubRepo; - - if (!prNumber || !githubRepo) { - const ciEnv = detectCIEnv(); - if (ciEnv) { - prNumber = prNumber ?? ciEnv.prNumber; - githubRepo = githubRepo ?? ciEnv.repo; - } - } - - if (!prNumber) { - spinner.fail('Cannot determine PR number'); - console.error(chalk.red('Provide --pr or run in a supported CI environment')); - process.exit(2); - return; - } - - if (!githubRepo) { - spinner.fail('Cannot determine GitHub repository'); - console.error(chalk.red('Provide --github-repo or run in a supported CI environment')); - process.exit(2); - return; - } - - // Run analysis - const analysis = await analyzePR({ - repoPath: resolve(opts.repo), - baseBranch: base, - headBranch: head, - }); - - spinner.text = 'Posting comment...'; - - const report = formatMarkdown(analysis); - const commentUrl = await postOrUpdateComment({ - token, - repo: githubRepo, - prNumber, - body: report, - }); - - spinner.succeed('Comment posted'); - console.log(chalk.green(commentUrl)); - } catch (err) { - spinner.fail('Failed to post comment'); - console.error( - chalk.red(err instanceof Error ? err.message : String(err)), - ); - process.exit(2); - } - }); -} diff --git a/packages/cli/src/commands/impact.ts b/packages/cli/src/commands/impact.ts deleted file mode 100644 index 13038a9..0000000 --- a/packages/cli/src/commands/impact.ts +++ /dev/null @@ -1,148 +0,0 @@ -import { Command } from 'commander'; -import chalk from 'chalk'; -import ora from 'ora'; -import { parseDiff, buildImpactGraph, resolveDefaultBaseBranch } from '@pr-impact/core'; -import type { ImpactGraph, ChangedFile } from '@pr-impact/core'; -import { resolve } from 'path'; - -function formatTreeOutput(graph: ImpactGraph): string { - const lines: string[] = []; - - lines.push(chalk.bold('Impact Graph')); - lines.push(''); - - if (graph.directlyChanged.length > 0) { - lines.push(chalk.bold('Directly Changed')); - for (let i = 0; i < graph.directlyChanged.length; i++) { - const isLast = i === graph.directlyChanged.length - 1; - const prefix = isLast ? '└── ' : '├── '; - const file = graph.directlyChanged[i]; - const dependents = graph.edges.filter((e) => e.from === file); - - lines.push(` ${prefix}${chalk.cyan(file)}`); - - if (dependents.length > 0) { - const indent = isLast ? ' ' : '│ '; - for (let j = 0; j < dependents.length; j++) { - const depIsLast = j === dependents.length - 1; - const depPrefix = depIsLast ? '└── ' : '├── '; - lines.push( - ` ${indent}${depPrefix}${chalk.dim(dependents[j].to)} ${chalk.dim('(' + dependents[j].type + ')')}`, - ); - } - } - } - } - - if (graph.indirectlyAffected.length > 0) { - lines.push(''); - lines.push(chalk.bold('Indirectly Affected')); - for (let i = 0; i < graph.indirectlyAffected.length; i++) { - const isLast = i === graph.indirectlyAffected.length - 1; - const prefix = isLast ? '└── ' : '├── '; - lines.push(` ${prefix}${chalk.yellow(graph.indirectlyAffected[i])}`); - } - } - - lines.push(''); - lines.push( - chalk.dim( - `${graph.directlyChanged.length} directly changed, ` + - `${graph.indirectlyAffected.length} indirectly affected, ` + - `${graph.edges.length} edge${graph.edges.length === 1 ? '' : 's'}`, - ), - ); - - return lines.join('\n'); -} - -export function formatDotOutput(graph: ImpactGraph): string { - const lines: string[] = []; - lines.push('digraph impact {'); - lines.push(' rankdir=LR;'); - lines.push(' node [shape=box, style=filled];'); - lines.push(''); - - // Style directly changed nodes - for (const file of graph.directlyChanged) { - lines.push(` "${file}" [fillcolor="#ff6b6b", fontcolor="white"];`); - } - - // Style indirectly affected nodes - for (const file of graph.indirectlyAffected) { - lines.push(` "${file}" [fillcolor="#ffd93d"];`); - } - - lines.push(''); - - // Edges - for (const edge of graph.edges) { - lines.push(` "${edge.from}" -> "${edge.to}" [label="${edge.type}"];`); - } - - lines.push('}'); - return lines.join('\n'); -} - -function formatJsonOutput(graph: ImpactGraph): string { - return JSON.stringify(graph, null, 2); -} - -export function registerImpactCommand(program: Command): void { - program - .command('impact') - .description('Build and display the impact graph') - .argument('[file]', 'Specific file to trace impact for', undefined) - .option('--depth ', 'Max dependency depth', parseInt, 3) - .option('--format ', 'Output format: text | json | dot', 'text') - .option('--repo ', 'Repository path', process.cwd()) - .action(async (file, opts) => { - const spinner = ora({ text: 'Building impact graph...', stream: process.stderr }).start(); - try { - const repoPath = resolve(opts.repo); - const depth = opts.depth; - - let changedFiles: ChangedFile[]; - - if (file) { - // When a specific file is provided, create a synthetic ChangedFile - changedFiles = [ - { - path: file, - status: 'modified', - additions: 0, - deletions: 0, - language: '', - category: 'source', - }, - ]; - } else { - // Default: parse diff between detected default branch and HEAD - const baseBranch = await resolveDefaultBaseBranch(repoPath); - changedFiles = await parseDiff(repoPath, baseBranch, 'HEAD'); - } - - const graph = await buildImpactGraph(repoPath, changedFiles, depth); - - spinner.stop(); - - switch (opts.format) { - case 'json': - console.log(formatJsonOutput(graph)); - break; - case 'dot': - console.log(formatDotOutput(graph)); - break; - default: - console.log(formatTreeOutput(graph)); - break; - } - } catch (err) { - spinner.fail('Impact graph building failed'); - console.error( - chalk.red(err instanceof Error ? err.message : String(err)), - ); - process.exit(2); - } - }); -} diff --git a/packages/cli/src/commands/risk.ts b/packages/cli/src/commands/risk.ts deleted file mode 100644 index e06ed42..0000000 --- a/packages/cli/src/commands/risk.ts +++ /dev/null @@ -1,107 +0,0 @@ -import { Command } from 'commander'; -import chalk from 'chalk'; -import ora from 'ora'; -import { analyzePR } from '@pr-impact/core'; -import type { RiskAssessment, RiskFactor } from '@pr-impact/core'; -import { resolve } from 'path'; - -function levelColor(level: RiskAssessment['level']): (text: string) => string { - switch (level) { - case 'low': - return chalk.green; - case 'medium': - return chalk.yellow; - case 'high': - return chalk.red; - case 'critical': - return chalk.red.bold; - } -} - -function formatFactorLine(factor: RiskFactor): string { - const weighted = (factor.score * factor.weight).toFixed(1); - const bar = '█'.repeat(Math.round(factor.score / 10)) + - '░'.repeat(10 - Math.round(factor.score / 10)); - return ` ${bar} ${factor.name.padEnd(24)} ${String(factor.score).padStart(3)}/100 (weight: ${factor.weight}, contribution: ${weighted})`; -} - -function formatTextOutput(risk: RiskAssessment): string { - const colorFn = levelColor(risk.level); - const lines: string[] = []; - - lines.push(chalk.bold('Risk Assessment')); - lines.push(''); - lines.push( - ` Score: ${colorFn(String(risk.score) + '/100')} Level: ${colorFn(risk.level.toUpperCase())}`, - ); - lines.push(''); - - if (risk.factors.length > 0) { - lines.push(chalk.bold('Factor Breakdown')); - lines.push(''); - for (const factor of risk.factors) { - lines.push(formatFactorLine(factor)); - lines.push(` ${chalk.dim(factor.description)}`); - if (factor.details && factor.details.length > 0) { - for (const detail of factor.details) { - lines.push(` ${chalk.dim('- ' + detail)}`); - } - } - lines.push(''); - } - } - - return lines.join('\n'); -} - -function formatJsonOutput(risk: RiskAssessment): string { - return JSON.stringify(risk, null, 2); -} - -export function registerRiskCommand(program: Command): void { - program - .command('risk') - .description('Calculate and display PR risk score') - .argument('[base]', 'Base branch (default: auto-detect main/master)') - .argument('[head]', 'Head branch (default: HEAD)') - .option('--threshold ', 'Fail if risk score >= threshold', parseFloat) - .option('--format ', 'Output format: text | json', 'text') - .option('--repo ', 'Repository path', process.cwd()) - .action(async (base, head, opts) => { - const spinner = ora({ text: 'Calculating risk score...', stream: process.stderr }).start(); - try { - const analysis = await analyzePR({ - repoPath: resolve(opts.repo), - baseBranch: base, - headBranch: head, - }); - - spinner.stop(); - - const { riskScore } = analysis; - - if (opts.format === 'json') { - console.log(formatJsonOutput(riskScore)); - } else { - console.log(formatTextOutput(riskScore)); - } - - // If threshold is set and score meets or exceeds it, exit with code 1 - if (opts.threshold !== undefined && riskScore.score >= opts.threshold) { - const colorFn = levelColor(riskScore.level); - console.log( - colorFn( - `\nRisk score ${riskScore.score} meets or exceeds threshold ${opts.threshold}`, - ), - ); - process.exit(1); - } - } catch (err) { - spinner.fail('Risk calculation failed'); - console.error( - chalk.red(err instanceof Error ? err.message : String(err)), - ); - process.exit(2); - } - }); -} diff --git a/packages/cli/src/github/ci-env.ts b/packages/cli/src/github/ci-env.ts deleted file mode 100644 index 21d5ead..0000000 --- a/packages/cli/src/github/ci-env.ts +++ /dev/null @@ -1,59 +0,0 @@ -/** - * Auto-detect PR context from CI environment variables. - * - * Supports: GitHub Actions, GitLab CI, CircleCI, Jenkins, Bitbucket Pipelines, - * Azure Pipelines, and Travis CI. - */ - -export interface CIEnv { - /** Pull request number (e.g. "42"). */ - prNumber: string; - /** Repository owner/name (e.g. "owner/repo"). */ - repo: string; -} - -/** - * Attempt to detect the PR number and repository from CI environment variables. - * Returns null if not running in a recognized CI environment or if the - * information is not available. - */ -export function detectCIEnv(): CIEnv | null { - const env = process.env; - - // GitHub Actions - if (env.GITHUB_ACTIONS) { - const repo = env.GITHUB_REPOSITORY; - const ref = env.GITHUB_REF ?? ''; - // GITHUB_REF for PRs is "refs/pull//merge" - const prMatch = ref.match(/^refs\/pull\/(\d+)\//); - if (repo && prMatch) { - return { prNumber: prMatch[1], repo }; - } - return null; - } - - // GitLab CI - if (env.GITLAB_CI) { - const prNumber = env.CI_MERGE_REQUEST_IID; - const project = env.CI_PROJECT_PATH; - if (prNumber && project) { - return { prNumber, repo: project }; - } - return null; - } - - // CircleCI - if (env.CIRCLECI) { - const prUrl = env.CIRCLE_PULL_REQUEST ?? ''; - const prMatch = prUrl.match(/\/pull\/(\d+)$/); - const slug = env.CIRCLE_PROJECT_USERNAME && env.CIRCLE_PROJECT_REPONAME - ? `${env.CIRCLE_PROJECT_USERNAME}/${env.CIRCLE_PROJECT_REPONAME}` - : undefined; - if (prMatch && slug) { - return { prNumber: prMatch[1], repo: slug }; - } - return null; - } - - return null; -} diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts deleted file mode 100644 index 4454d79..0000000 --- a/packages/cli/src/index.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Command } from 'commander'; -import { createRequire } from 'module'; -import { registerAnalyzeCommand } from './commands/analyze.js'; -import { registerBreakingCommand } from './commands/breaking.js'; -import { registerRiskCommand } from './commands/risk.js'; -import { registerImpactCommand } from './commands/impact.js'; -import { registerCommentCommand } from './commands/comment.js'; - -const require = createRequire(import.meta.url); -const { version } = require('../package.json') as { version: string }; - -const program = new Command(); -program - .name('pri') - .description('PR Impact Analyzer — detect breaking changes, map impact, score risk') - .version(version); - -registerAnalyzeCommand(program); -registerBreakingCommand(program); -registerRiskCommand(program); -registerImpactCommand(program); -registerCommentCommand(program); - -program.parse(); diff --git a/packages/core/CHANGELOG.md b/packages/core/CHANGELOG.md deleted file mode 100644 index 3bd9893..0000000 --- a/packages/core/CHANGELOG.md +++ /dev/null @@ -1,21 +0,0 @@ -# @pr-impact/core - -## 0.2.1 - -### Patch Changes - -- 047e429: Add consumer-facing adoption guides: getting-started, programmatic API, configuration guide, troubleshooting, and CONTRIBUTING.md. Expand CI integration docs with GitLab CI, CircleCI, and Jenkins examples. - -## 0.2.0 - -### Minor Changes - -- b31721c: Initial release of pr-impact — static analysis for pull requests. - - - Breaking change detection (removed exports, changed signatures, renamed exports) - - Import-dependency impact graph with blast radius mapping - - Test coverage gap analysis - - Documentation staleness checking - - Weighted risk scoring (6 factors, 0-100 scale) - - CLI with analyze, breaking, risk, impact, and comment commands - - MCP server exposing all analysis tools to AI assistants diff --git a/packages/core/CLAUDE.md b/packages/core/CLAUDE.md deleted file mode 100644 index 2c1c388..0000000 --- a/packages/core/CLAUDE.md +++ /dev/null @@ -1,64 +0,0 @@ -# CLAUDE.md -- @pr-impact/core - -## What this package does - -Analysis engine for pr-impact. Pure logic, no I/O except git via `simple-git` and file discovery via `fast-glob`. All other packages depend on this one. - -## Quick commands - -```bash -pnpm build # Build with tsup -pnpm test # Run vitest -npx vitest run packages/core/__tests__/FILE.test.ts # Single test file -``` - -## Source layout - -``` -src/ - analyzer.ts Top-level analyzePR() orchestrator (runs steps via Promise.all) - types.ts All shared TypeScript interfaces - index.ts Barrel exports (public API) - diff/ - diff-parser.ts Parse git diff into ChangedFile[] - file-categorizer.ts Classify files as source/test/doc/config/other - breaking/ - detector.ts Detect breaking changes across changed files - export-differ.ts Diff exported symbols (regex-based, not AST) - signature-differ.ts Compare function/class signatures - coverage/ - coverage-checker.ts Check whether changed source files have test changes - test-mapper.ts Map source files to expected test files - docs/ - staleness-checker.ts Find stale references in doc files - imports/ - import-resolver.ts Resolve import paths, find consumers, reverse dep map - impact/ - impact-graph.ts Build import dependency graph from changed files - risk/ - risk-calculator.ts Calculate weighted risk score from all factors - factors.ts Individual risk factor evaluators with weights - output/ - markdown-reporter.ts Format PRAnalysis as Markdown - json-reporter.ts Format PRAnalysis as JSON -``` - -## Key conventions - -- ESM only. Use `.js` extensions in all import paths. -- All shared types go in `types.ts`. Import types from there. -- New public APIs must be re-exported from `index.ts`. -- Export parsing uses **regex**, not AST. See `export-differ.ts`. -- Risk scoring uses six weighted factors defined in `factors.ts`. -- `analyzePR()` runs analysis steps in parallel via `Promise.all`. - -## Testing - -- Tests live in `__tests__/` and use vitest. -- Unit tests only -- mock `simple-git` calls, never depend on real git state. -- 14 test files, covering all modules. - -## Dependencies - -- `simple-git` -- all git operations -- `fast-glob` -- file discovery for test mapping and imports diff --git a/packages/core/README.md b/packages/core/README.md deleted file mode 100644 index 7da44e9..0000000 --- a/packages/core/README.md +++ /dev/null @@ -1,119 +0,0 @@ -# @pr-impact/core - -PR analysis engine -- detect breaking changes, map blast radius, check test coverage, find stale docs, and score risk. - -## Install - -```bash -npm install @pr-impact/core -``` - -## Quick Start - -```typescript -import { analyzePR, formatMarkdown, formatJSON } from '@pr-impact/core'; - -const analysis = await analyzePR({ - repoPath: '/path/to/repo', - baseBranch: 'main', - headBranch: 'feature/my-branch', -}); - -console.log(analysis.riskScore.score); // 42 -console.log(analysis.riskScore.level); // "medium" -console.log(analysis.breakingChanges); // BreakingChange[] - -console.log(formatMarkdown(analysis)); // Markdown report -console.log(formatJSON(analysis)); // JSON string -``` - -## Individual Analysis Steps - -Each step can be used independently: - -```typescript -import { - parseDiff, - detectBreakingChanges, - checkTestCoverage, - checkDocStaleness, - buildImpactGraph, - calculateRisk, -} from '@pr-impact/core'; - -const repoPath = '/path/to/repo'; -const base = 'main'; -const head = 'HEAD'; - -const changedFiles = await parseDiff(repoPath, base, head); -const breakingChanges = await detectBreakingChanges(repoPath, base, head, changedFiles); -const testCoverage = await checkTestCoverage(repoPath, changedFiles); -const docStaleness = await checkDocStaleness(repoPath, changedFiles, base, head); -const impactGraph = await buildImpactGraph(repoPath, changedFiles); -const riskScore = calculateRisk(changedFiles, breakingChanges, testCoverage, docStaleness, impactGraph); -``` - -## Lower-Level Utilities - -```typescript -import { - categorizeFile, - parseExports, - diffExports, - diffSignatures, - mapTestFiles, - extractImportPaths, - findConsumers, -} from '@pr-impact/core'; - -categorizeFile('src/utils/auth.ts'); // 'source' -categorizeFile('__tests__/auth.test.ts'); // 'test' -categorizeFile('README.md'); // 'doc' -``` - -## Types - -All TypeScript interfaces are exported from the package: - -```typescript -import type { - PRAnalysis, - AnalysisOptions, - ChangedFile, - BreakingChange, - TestCoverageReport, - TestCoverageGap, - DocStalenessReport, - StaleReference, - ImpactGraph, - ImpactEdge, - RiskAssessment, - RiskFactor, - ExportedSymbol, - FileExports, -} from '@pr-impact/core'; -``` - -## Risk Score - -The risk score is a weighted average of six factors (0--100): - -| Factor | Weight | Description | -|---|---|---| -| Breaking changes | 0.30 | Severity of detected breaking API changes | -| Untested changes | 0.25 | Ratio of changed source files lacking test updates | -| Diff size | 0.15 | Total lines added + deleted | -| Stale documentation | 0.10 | References to modified/removed symbols in docs | -| Config file changes | 0.10 | CI/build config modifications | -| Impact breadth | 0.10 | Number of indirectly affected files | - -Risk levels: **Low** (0--25), **Medium** (26--50), **High** (51--75), **Critical** (76--100). - -## Requirements - -- Node.js >= 20 -- Must be run inside a git repository (uses `simple-git` for git operations) - -## License - -[MIT](../../LICENSE) diff --git a/packages/core/__tests__/analyzer.test.ts b/packages/core/__tests__/analyzer.test.ts deleted file mode 100644 index ed1321e..0000000 --- a/packages/core/__tests__/analyzer.test.ts +++ /dev/null @@ -1,621 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import type { ChangedFile, BreakingChange, TestCoverageReport, DocStalenessReport, ImpactGraph, RiskAssessment } from '../src/types.js'; - -// ── Mocks ──────────────────────────────────────────────────────────────────── - -const mockBranch = vi.fn(); -const mockCheckIsRepo = vi.fn(); -const mockRevparse = vi.fn(); - -vi.mock('simple-git', () => ({ - default: vi.fn(() => ({ - branch: mockBranch, - checkIsRepo: mockCheckIsRepo, - revparse: mockRevparse, - })), -})); - -const mockParseDiff = vi.fn(); -vi.mock('../src/diff/diff-parser.js', () => ({ - parseDiff: (...args: unknown[]) => mockParseDiff(...args), -})); - -const mockDetectBreakingChanges = vi.fn(); -vi.mock('../src/breaking/detector.js', () => ({ - detectBreakingChanges: (...args: unknown[]) => mockDetectBreakingChanges(...args), -})); - -const mockCheckTestCoverage = vi.fn(); -vi.mock('../src/coverage/coverage-checker.js', () => ({ - checkTestCoverage: (...args: unknown[]) => mockCheckTestCoverage(...args), -})); - -const mockCheckDocStaleness = vi.fn(); -vi.mock('../src/docs/staleness-checker.js', () => ({ - checkDocStaleness: (...args: unknown[]) => mockCheckDocStaleness(...args), -})); - -const mockBuildImpactGraph = vi.fn(); -vi.mock('../src/impact/impact-graph.js', () => ({ - buildImpactGraph: (...args: unknown[]) => mockBuildImpactGraph(...args), -})); - -const mockCalculateRisk = vi.fn(); -vi.mock('../src/risk/risk-calculator.js', () => ({ - calculateRisk: (...args: unknown[]) => mockCalculateRisk(...args), -})); - -const mockBuildReverseDependencyMap = vi.fn(); -vi.mock('../src/imports/import-resolver.js', () => ({ - buildReverseDependencyMap: (...args: unknown[]) => mockBuildReverseDependencyMap(...args), -})); - -import { resolveDefaultBaseBranch, analyzePR } from '../src/analyzer.js'; - -// ── Fixtures ───────────────────────────────────────────────────────────────── - -const fakeChangedFiles: ChangedFile[] = [ - { - path: 'src/utils.ts', - status: 'modified', - additions: 10, - deletions: 5, - language: 'typescript', - category: 'source', - }, - { - path: 'src/index.ts', - status: 'modified', - additions: 3, - deletions: 1, - language: 'typescript', - category: 'source', - }, -]; - -const fakeBreakingChanges: BreakingChange[] = [ - { - filePath: 'src/utils.ts', - type: 'removed_export', - symbolName: 'helperFn', - before: 'export function helperFn(): void', - after: null, - severity: 'high', - consumers: ['src/index.ts'], - }, -]; - -const fakeCoverage: TestCoverageReport = { - changedSourceFiles: 2, - sourceFilesWithTestChanges: 1, - coverageRatio: 0.5, - gaps: [ - { - sourceFile: 'src/index.ts', - expectedTestFiles: ['__tests__/index.test.ts'], - testFileExists: false, - testFileChanged: false, - }, - ], -}; - -const fakeDocStaleness: DocStalenessReport = { - staleReferences: [], - checkedFiles: ['README.md'], -}; - -const fakeImpactGraph: ImpactGraph = { - directlyChanged: ['src/utils.ts', 'src/index.ts'], - indirectlyAffected: ['src/app.ts'], - edges: [{ from: 'src/utils.ts', to: 'src/app.ts', type: 'imports' }], -}; - -const fakeRiskScore: RiskAssessment = { - score: 42, - level: 'medium', - factors: [ - { name: 'Breaking Changes', score: 100, weight: 0.3, description: '1 breaking change found' }, - { name: 'Untested Changes', score: 50, weight: 0.25, description: '1 of 2 source files lack test changes' }, - ], -}; - -const fakeReverseDeps = new Map(); - -// ── Helpers ────────────────────────────────────────────────────────────────── - -function setupDefaultMocks() { - mockCheckIsRepo.mockResolvedValue(true); - mockRevparse.mockResolvedValue('abc123'); - mockBranch.mockResolvedValue({ all: ['main', 'feature/test'] }); - mockParseDiff.mockResolvedValue(fakeChangedFiles); - mockDetectBreakingChanges.mockResolvedValue(fakeBreakingChanges); - mockCheckTestCoverage.mockResolvedValue(fakeCoverage); - mockCheckDocStaleness.mockResolvedValue(fakeDocStaleness); - mockBuildImpactGraph.mockResolvedValue(fakeImpactGraph); - mockCalculateRisk.mockReturnValue(fakeRiskScore); - mockBuildReverseDependencyMap.mockResolvedValue(fakeReverseDeps); -} - -// ── Tests ──────────────────────────────────────────────────────────────────── - -beforeEach(() => { - vi.clearAllMocks(); -}); - -describe('resolveDefaultBaseBranch', () => { - it('returns "main" when main branch exists', async () => { - mockBranch.mockResolvedValue({ all: ['main', 'feature/test'] }); - - const result = await resolveDefaultBaseBranch('/fake/repo'); - expect(result).toBe('main'); - }); - - it('returns "master" when only master exists', async () => { - mockBranch.mockResolvedValue({ all: ['master', 'develop'] }); - - const result = await resolveDefaultBaseBranch('/fake/repo'); - expect(result).toBe('master'); - }); - - it('prefers "main" over "master" when both exist', async () => { - mockBranch.mockResolvedValue({ all: ['main', 'master'] }); - - const result = await resolveDefaultBaseBranch('/fake/repo'); - expect(result).toBe('main'); - }); - - it('falls back to "main" when neither exists', async () => { - mockBranch.mockResolvedValue({ all: ['develop', 'feature/x'] }); - - const result = await resolveDefaultBaseBranch('/fake/repo'); - expect(result).toBe('main'); - }); -}); - -describe('analyzePR', () => { - describe('happy path', () => { - it('runs all analysis steps and returns complete PRAnalysis', async () => { - setupDefaultMocks(); - - const result = await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - expect(result.repoPath).toBe('/fake/repo'); - expect(result.baseBranch).toBe('main'); - expect(result.headBranch).toBe('feature/test'); - expect(result.changedFiles).toBe(fakeChangedFiles); - expect(result.breakingChanges).toBe(fakeBreakingChanges); - expect(result.testCoverage).toBe(fakeCoverage); - expect(result.docStaleness).toBe(fakeDocStaleness); - expect(result.impactGraph).toBe(fakeImpactGraph); - expect(result.riskScore).toBe(fakeRiskScore); - expect(typeof result.summary).toBe('string'); - }); - - it('calls parseDiff with correct arguments', async () => { - setupDefaultMocks(); - - await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - expect(mockParseDiff).toHaveBeenCalledWith('/fake/repo', 'main', 'feature/test'); - }); - - it('calls detectBreakingChanges with correct arguments', async () => { - setupDefaultMocks(); - - await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - expect(mockDetectBreakingChanges).toHaveBeenCalledWith( - '/fake/repo', 'main', 'feature/test', fakeChangedFiles, fakeReverseDeps, - ); - }); - - it('calls checkTestCoverage with correct arguments', async () => { - setupDefaultMocks(); - - await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - expect(mockCheckTestCoverage).toHaveBeenCalledWith('/fake/repo', fakeChangedFiles); - }); - - it('calls checkDocStaleness with correct arguments', async () => { - setupDefaultMocks(); - - await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - expect(mockCheckDocStaleness).toHaveBeenCalledWith( - '/fake/repo', fakeChangedFiles, 'main', 'feature/test', - ); - }); - - it('calls buildImpactGraph with correct arguments', async () => { - setupDefaultMocks(); - - await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - expect(mockBuildImpactGraph).toHaveBeenCalledWith('/fake/repo', fakeChangedFiles, 3, fakeReverseDeps); - }); - - it('calls calculateRisk with all analysis results', async () => { - setupDefaultMocks(); - - await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - expect(mockCalculateRisk).toHaveBeenCalledWith( - fakeChangedFiles, - fakeBreakingChanges, - fakeCoverage, - fakeDocStaleness, - fakeImpactGraph, - ); - }); - }); - - describe('branch resolution', () => { - it('resolves default base branch when baseBranch is not provided', async () => { - setupDefaultMocks(); - - const result = await analyzePR({ repoPath: '/fake/repo' }); - - expect(result.baseBranch).toBe('main'); - expect(mockBranch).toHaveBeenCalled(); - }); - - it('defaults headBranch to HEAD when not provided', async () => { - setupDefaultMocks(); - - const result = await analyzePR({ repoPath: '/fake/repo' }); - - expect(result.headBranch).toBe('HEAD'); - }); - - it('uses provided baseBranch without resolving default', async () => { - setupDefaultMocks(); - - const result = await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'develop', - headBranch: 'feature/x', - }); - - expect(result.baseBranch).toBe('develop'); - expect(mockParseDiff).toHaveBeenCalledWith('/fake/repo', 'develop', 'feature/x'); - }); - }); - - describe('skip flags', () => { - it('skips breaking change detection when skipBreaking is true', async () => { - setupDefaultMocks(); - - const result = await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - skipBreaking: true, - }); - - expect(mockDetectBreakingChanges).not.toHaveBeenCalled(); - expect(result.breakingChanges).toEqual([]); - }); - - it('skips test coverage analysis when skipCoverage is true', async () => { - setupDefaultMocks(); - - const result = await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - skipCoverage: true, - }); - - expect(mockCheckTestCoverage).not.toHaveBeenCalled(); - expect(result.testCoverage).toEqual({ - changedSourceFiles: 0, - sourceFilesWithTestChanges: 0, - coverageRatio: 0, - gaps: [], - }); - }); - - it('skips doc staleness checking when skipDocs is true', async () => { - setupDefaultMocks(); - - const result = await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - skipDocs: true, - }); - - expect(mockCheckDocStaleness).not.toHaveBeenCalled(); - expect(result.docStaleness).toEqual({ - staleReferences: [], - checkedFiles: [], - }); - }); - - it('skips all optional steps when all skip flags are true', async () => { - setupDefaultMocks(); - - const result = await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - skipBreaking: true, - skipCoverage: true, - skipDocs: true, - }); - - expect(mockDetectBreakingChanges).not.toHaveBeenCalled(); - expect(mockCheckTestCoverage).not.toHaveBeenCalled(); - expect(mockCheckDocStaleness).not.toHaveBeenCalled(); - // Impact graph is always run - expect(mockBuildImpactGraph).toHaveBeenCalled(); - expect(result.breakingChanges).toEqual([]); - expect(result.testCoverage.gaps).toEqual([]); - expect(result.docStaleness.staleReferences).toEqual([]); - }); - - it('always runs impact graph even when other steps are skipped', async () => { - setupDefaultMocks(); - - await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - skipBreaking: true, - skipCoverage: true, - skipDocs: true, - }); - - expect(mockBuildImpactGraph).toHaveBeenCalledWith('/fake/repo', fakeChangedFiles, 3, fakeReverseDeps); - }); - }); - - describe('git verification errors', () => { - it('throws when the path is not a git repository', async () => { - setupDefaultMocks(); - mockCheckIsRepo.mockRejectedValue(new Error('not a git repository')); - - await expect( - analyzePR({ repoPath: '/not/a/repo', baseBranch: 'main', headBranch: 'HEAD' }), - ).rejects.toThrow('not a git repository'); - }); - - it('throws when the base branch ref is invalid', async () => { - setupDefaultMocks(); - mockRevparse.mockRejectedValueOnce(new Error('unknown revision')); - - await expect( - analyzePR({ repoPath: '/fake/repo', baseBranch: 'nonexistent', headBranch: 'HEAD' }), - ).rejects.toThrow('unknown revision'); - }); - - it('throws when the head branch ref is invalid', async () => { - setupDefaultMocks(); - // First call (baseBranch) succeeds, second call (headBranch) fails - mockRevparse - .mockResolvedValueOnce('abc123') - .mockRejectedValueOnce(new Error('unknown revision')); - - await expect( - analyzePR({ repoPath: '/fake/repo', baseBranch: 'main', headBranch: 'bad-ref' }), - ).rejects.toThrow('unknown revision'); - }); - - it('does not call any analysis steps when git verification fails', async () => { - setupDefaultMocks(); - mockCheckIsRepo.mockRejectedValue(new Error('not a git repository')); - - await expect( - analyzePR({ repoPath: '/not/a/repo', baseBranch: 'main', headBranch: 'HEAD' }), - ).rejects.toThrow(); - - expect(mockParseDiff).not.toHaveBeenCalled(); - expect(mockBuildReverseDependencyMap).not.toHaveBeenCalled(); - expect(mockDetectBreakingChanges).not.toHaveBeenCalled(); - expect(mockCheckTestCoverage).not.toHaveBeenCalled(); - expect(mockCheckDocStaleness).not.toHaveBeenCalled(); - expect(mockBuildImpactGraph).not.toHaveBeenCalled(); - expect(mockCalculateRisk).not.toHaveBeenCalled(); - }); - }); - - describe('summary generation', () => { - it('includes file count and risk level in summary', async () => { - setupDefaultMocks(); - - const result = await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - expect(result.summary).toContain('2 files'); - expect(result.summary).toContain('medium'); - expect(result.summary).toContain('42/100'); - }); - - it('includes breaking changes count when present', async () => { - setupDefaultMocks(); - - const result = await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - expect(result.summary).toContain('1 breaking change'); - }); - - it('includes test coverage gap count when present', async () => { - setupDefaultMocks(); - - const result = await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - expect(result.summary).toContain('1 source file'); - expect(result.summary).toContain('lacks'); - }); - - it('uses singular "file" for 1 changed file', async () => { - setupDefaultMocks(); - mockParseDiff.mockResolvedValue([fakeChangedFiles[0]]); - - const result = await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - expect(result.summary).toMatch(/changes 1 file\b/); - }); - - it('uses plural "files" for multiple changed files', async () => { - setupDefaultMocks(); - - const result = await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - expect(result.summary).toMatch(/changes 2 files\b/); - }); - - it('omits breaking changes sentence when there are none', async () => { - setupDefaultMocks(); - mockDetectBreakingChanges.mockResolvedValue([]); - - const result = await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - expect(result.summary).not.toContain('breaking change'); - }); - - it('omits test coverage gap sentence when there are none', async () => { - setupDefaultMocks(); - mockCheckTestCoverage.mockResolvedValue({ - changedSourceFiles: 2, - sourceFilesWithTestChanges: 2, - coverageRatio: 1.0, - gaps: [], - }); - - const result = await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - expect(result.summary).not.toContain('lack'); - }); - - it('uses plural "changes" for multiple breaking changes', async () => { - setupDefaultMocks(); - mockDetectBreakingChanges.mockResolvedValue([ - fakeBreakingChanges[0], - { ...fakeBreakingChanges[0], symbolName: 'otherFn' }, - ]); - - const result = await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - expect(result.summary).toContain('2 breaking changes'); - }); - - it('uses plural "files lack" for multiple coverage gaps', async () => { - setupDefaultMocks(); - mockCheckTestCoverage.mockResolvedValue({ - changedSourceFiles: 3, - sourceFilesWithTestChanges: 1, - coverageRatio: 0.33, - gaps: [ - { sourceFile: 'a.ts', expectedTestFiles: [], testFileExists: false, testFileChanged: false }, - { sourceFile: 'b.ts', expectedTestFiles: [], testFileExists: false, testFileChanged: false }, - ], - }); - - const result = await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - expect(result.summary).toContain('2 source files'); - expect(result.summary).toMatch(/\black\b/); - }); - - it('includes additions and deletions in summary', async () => { - setupDefaultMocks(); - - const result = await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - // 10 + 3 = 13 additions, 5 + 1 = 6 deletions - expect(result.summary).toContain('+13/-6'); - }); - }); - - describe('result structure', () => { - it('returns all expected fields', async () => { - setupDefaultMocks(); - - const result = await analyzePR({ - repoPath: '/fake/repo', - baseBranch: 'main', - headBranch: 'feature/test', - }); - - expect(result).toHaveProperty('repoPath'); - expect(result).toHaveProperty('baseBranch'); - expect(result).toHaveProperty('headBranch'); - expect(result).toHaveProperty('changedFiles'); - expect(result).toHaveProperty('breakingChanges'); - expect(result).toHaveProperty('testCoverage'); - expect(result).toHaveProperty('docStaleness'); - expect(result).toHaveProperty('impactGraph'); - expect(result).toHaveProperty('riskScore'); - expect(result).toHaveProperty('summary'); - }); - }); -}); diff --git a/packages/core/__tests__/coverage-checker.test.ts b/packages/core/__tests__/coverage-checker.test.ts deleted file mode 100644 index e3aded8..0000000 --- a/packages/core/__tests__/coverage-checker.test.ts +++ /dev/null @@ -1,421 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { checkTestCoverage } from '../src/coverage/coverage-checker.js'; -import type { ChangedFile } from '../src/types.js'; - -// Mock the test-mapper module so we can control what mapTestFiles returns -vi.mock('../src/coverage/test-mapper.js', () => ({ - mapTestFiles: vi.fn(), -})); - -import { mapTestFiles } from '../src/coverage/test-mapper.js'; - -const mockedMapTestFiles = vi.mocked(mapTestFiles); - -beforeEach(() => { - vi.clearAllMocks(); -}); - -// ── Helpers ───────────────────────────────────────────────────────────────── - -function makeChangedFile(overrides: Partial = {}): ChangedFile { - return { - path: 'src/index.ts', - status: 'modified', - additions: 10, - deletions: 5, - language: 'typescript', - category: 'source', - ...overrides, - }; -} - -function makeSourceFile(path: string): ChangedFile { - return makeChangedFile({ path, category: 'source' }); -} - -function makeTestFile(path: string): ChangedFile { - return makeChangedFile({ path, category: 'test' }); -} - -function makeDocFile(path: string): ChangedFile { - return makeChangedFile({ path, category: 'doc' }); -} - -function makeConfigFile(path: string): ChangedFile { - return makeChangedFile({ path, category: 'config' }); -} - -// ── Tests ─────────────────────────────────────────────────────────────────── - -describe('checkTestCoverage', () => { - // ── No source files ─────────────────────────────────────────────────── - - describe('no source files', () => { - it('should return coverageRatio 1 and no gaps when no files at all', async () => { - const result = await checkTestCoverage('/repo', []); - - expect(result.changedSourceFiles).toBe(0); - expect(result.sourceFilesWithTestChanges).toBe(0); - expect(result.coverageRatio).toBe(1); - expect(result.gaps).toEqual([]); - }); - - it('should return coverageRatio 1 when only test files are changed (test-only PR)', async () => { - const changedFiles = [ - makeTestFile('src/utils/parser.test.ts'), - makeTestFile('src/utils/__tests__/helper.ts'), - ]; - - const result = await checkTestCoverage('/repo', changedFiles); - - expect(result.changedSourceFiles).toBe(0); - expect(result.sourceFilesWithTestChanges).toBe(0); - expect(result.coverageRatio).toBe(1); - expect(result.gaps).toEqual([]); - }); - - it('should return coverageRatio 1 when only doc files are changed', async () => { - const changedFiles = [ - makeDocFile('README.md'), - makeDocFile('docs/guide.md'), - ]; - - const result = await checkTestCoverage('/repo', changedFiles); - - expect(result.changedSourceFiles).toBe(0); - expect(result.coverageRatio).toBe(1); - expect(result.gaps).toEqual([]); - }); - - it('should return coverageRatio 1 when only config files are changed', async () => { - const changedFiles = [ - makeConfigFile('package.json'), - makeConfigFile('tsconfig.json'), - ]; - - const result = await checkTestCoverage('/repo', changedFiles); - - expect(result.changedSourceFiles).toBe(0); - expect(result.coverageRatio).toBe(1); - expect(result.gaps).toEqual([]); - }); - }); - - // ── Full coverage ───────────────────────────────────────────────────── - - describe('full coverage', () => { - it('should return coverageRatio 1 when all source files have test changes', async () => { - const changedFiles = [ - makeSourceFile('src/utils/parser.ts'), - makeSourceFile('src/utils/formatter.ts'), - makeTestFile('src/utils/parser.test.ts'), - makeTestFile('src/utils/formatter.test.ts'), - ]; - - // mapTestFiles returns the test file for each source file - mockedMapTestFiles - .mockResolvedValueOnce(['src/utils/parser.test.ts']) - .mockResolvedValueOnce(['src/utils/formatter.test.ts']); - - const result = await checkTestCoverage('/repo', changedFiles); - - expect(result.changedSourceFiles).toBe(2); - expect(result.sourceFilesWithTestChanges).toBe(2); - expect(result.coverageRatio).toBe(1); - expect(result.gaps).toEqual([]); - }); - - it('should count as covered when any of multiple expected test files is changed', async () => { - const changedFiles = [ - makeSourceFile('src/utils/parser.ts'), - makeTestFile('test/utils/parser.test.ts'), - ]; - - // mapTestFiles returns multiple candidates, but only one is in the changed list - mockedMapTestFiles.mockResolvedValueOnce([ - 'src/utils/parser.test.ts', - 'test/utils/parser.test.ts', - ]); - - const result = await checkTestCoverage('/repo', changedFiles); - - expect(result.coverageRatio).toBe(1); - expect(result.gaps).toEqual([]); - }); - }); - - // ── No coverage ─────────────────────────────────────────────────────── - - describe('no coverage', () => { - it('should return coverageRatio 0 when no source files have test changes', async () => { - const changedFiles = [ - makeSourceFile('src/utils/parser.ts'), - makeSourceFile('src/utils/formatter.ts'), - ]; - - // mapTestFiles finds existing test files, but they are not in the changed list - mockedMapTestFiles - .mockResolvedValueOnce(['src/utils/parser.test.ts']) - .mockResolvedValueOnce(['src/utils/formatter.test.ts']); - - const result = await checkTestCoverage('/repo', changedFiles); - - expect(result.changedSourceFiles).toBe(2); - expect(result.sourceFilesWithTestChanges).toBe(0); - expect(result.coverageRatio).toBe(0); - expect(result.gaps).toHaveLength(2); - }); - - it('should return coverageRatio 0 when no test files exist at all', async () => { - const changedFiles = [ - makeSourceFile('src/utils/parser.ts'), - makeSourceFile('src/utils/formatter.ts'), - ]; - - // mapTestFiles finds no test files on disk - mockedMapTestFiles - .mockResolvedValueOnce([]) - .mockResolvedValueOnce([]); - - const result = await checkTestCoverage('/repo', changedFiles); - - expect(result.changedSourceFiles).toBe(2); - expect(result.sourceFilesWithTestChanges).toBe(0); - expect(result.coverageRatio).toBe(0); - expect(result.gaps).toHaveLength(2); - }); - }); - - // ── Mixed coverage ──────────────────────────────────────────────────── - - describe('mixed coverage', () => { - it('should return correct ratio for partially covered source files', async () => { - const changedFiles = [ - makeSourceFile('src/utils/parser.ts'), - makeSourceFile('src/utils/formatter.ts'), - makeSourceFile('src/utils/validator.ts'), - makeTestFile('src/utils/parser.test.ts'), - ]; - - // parser has a changed test, formatter and validator do not - mockedMapTestFiles - .mockResolvedValueOnce(['src/utils/parser.test.ts']) // parser -> covered - .mockResolvedValueOnce(['src/utils/formatter.test.ts']) // formatter -> not changed - .mockResolvedValueOnce([]); // validator -> no test - - const result = await checkTestCoverage('/repo', changedFiles); - - expect(result.changedSourceFiles).toBe(3); - expect(result.sourceFilesWithTestChanges).toBe(1); - expect(result.coverageRatio).toBeCloseTo(1 / 3); - expect(result.gaps).toHaveLength(2); - }); - - it('should return 0.5 for 1 of 2 source files covered', async () => { - const changedFiles = [ - makeSourceFile('src/a.ts'), - makeSourceFile('src/b.ts'), - makeTestFile('src/a.test.ts'), - ]; - - mockedMapTestFiles - .mockResolvedValueOnce(['src/a.test.ts']) // covered - .mockResolvedValueOnce(['src/b.test.ts']); // exists but not changed - - const result = await checkTestCoverage('/repo', changedFiles); - - expect(result.changedSourceFiles).toBe(2); - expect(result.sourceFilesWithTestChanges).toBe(1); - expect(result.coverageRatio).toBe(0.5); - expect(result.gaps).toHaveLength(1); - }); - }); - - // ── Gap reports ─────────────────────────────────────────────────────── - - describe('gap reports', () => { - it('should report testFileExists true when test file exists but is not changed', async () => { - const changedFiles = [ - makeSourceFile('src/utils/parser.ts'), - ]; - - mockedMapTestFiles.mockResolvedValueOnce(['src/utils/parser.test.ts']); - - const result = await checkTestCoverage('/repo', changedFiles); - - expect(result.gaps).toHaveLength(1); - expect(result.gaps[0]).toEqual({ - sourceFile: 'src/utils/parser.ts', - expectedTestFiles: ['src/utils/parser.test.ts'], - testFileExists: true, - testFileChanged: false, - }); - }); - - it('should report testFileExists false when no test file exists on disk', async () => { - const changedFiles = [ - makeSourceFile('src/utils/parser.ts'), - ]; - - mockedMapTestFiles.mockResolvedValueOnce([]); - - const result = await checkTestCoverage('/repo', changedFiles); - - expect(result.gaps).toHaveLength(1); - expect(result.gaps[0]).toEqual({ - sourceFile: 'src/utils/parser.ts', - expectedTestFiles: [], - testFileExists: false, - testFileChanged: false, - }); - }); - - it('should always report testFileChanged as false in gaps', async () => { - const changedFiles = [ - makeSourceFile('src/a.ts'), - makeSourceFile('src/b.ts'), - ]; - - mockedMapTestFiles - .mockResolvedValueOnce(['src/a.test.ts']) - .mockResolvedValueOnce([]); - - const result = await checkTestCoverage('/repo', changedFiles); - - for (const gap of result.gaps) { - expect(gap.testFileChanged).toBe(false); - } - }); - - it('should include the correct sourceFile path in each gap', async () => { - const changedFiles = [ - makeSourceFile('src/alpha.ts'), - makeSourceFile('src/beta.ts'), - makeSourceFile('src/gamma.ts'), - ]; - - mockedMapTestFiles - .mockResolvedValueOnce([]) - .mockResolvedValueOnce([]) - .mockResolvedValueOnce([]); - - const result = await checkTestCoverage('/repo', changedFiles); - - const gapPaths = result.gaps.map((g) => g.sourceFile); - expect(gapPaths).toEqual([ - 'src/alpha.ts', - 'src/beta.ts', - 'src/gamma.ts', - ]); - }); - - it('should include multiple expected test files in the gap', async () => { - const changedFiles = [ - makeSourceFile('src/utils/parser.ts'), - ]; - - mockedMapTestFiles.mockResolvedValueOnce([ - 'src/utils/parser.test.ts', - 'src/utils/__tests__/parser.ts', - ]); - - const result = await checkTestCoverage('/repo', changedFiles); - - expect(result.gaps).toHaveLength(1); - expect(result.gaps[0].expectedTestFiles).toEqual([ - 'src/utils/parser.test.ts', - 'src/utils/__tests__/parser.ts', - ]); - expect(result.gaps[0].testFileExists).toBe(true); - }); - }); - - // ── Only source files are processed ─────────────────────────────────── - - describe('file category filtering', () => { - it('should only process source-category files (skip test, doc, config, other)', async () => { - const changedFiles = [ - makeSourceFile('src/utils/parser.ts'), - makeTestFile('src/utils/parser.test.ts'), - makeDocFile('README.md'), - makeConfigFile('package.json'), - makeChangedFile({ path: 'assets/logo.png', category: 'other' }), - ]; - - // Only parser.ts is source, so mapTestFiles is called once - mockedMapTestFiles.mockResolvedValueOnce(['src/utils/parser.test.ts']); - - const result = await checkTestCoverage('/repo', changedFiles); - - expect(mockedMapTestFiles).toHaveBeenCalledTimes(1); - expect(mockedMapTestFiles).toHaveBeenCalledWith('/repo', 'src/utils/parser.ts'); - expect(result.changedSourceFiles).toBe(1); - }); - - it('should not call mapTestFiles for non-source files', async () => { - const changedFiles = [ - makeTestFile('src/utils/parser.test.ts'), - makeDocFile('docs/guide.md'), - makeConfigFile('tsconfig.json'), - ]; - - const result = await checkTestCoverage('/repo', changedFiles); - - expect(mockedMapTestFiles).not.toHaveBeenCalled(); - expect(result.changedSourceFiles).toBe(0); - }); - }); - - // ── Counts accuracy ────────────────────────────────────────────────── - - describe('counts accuracy', () => { - it('should count changedSourceFiles correctly', async () => { - const changedFiles = [ - makeSourceFile('src/a.ts'), - makeSourceFile('src/b.ts'), - makeSourceFile('src/c.ts'), - makeTestFile('src/a.test.ts'), - makeDocFile('README.md'), - ]; - - mockedMapTestFiles - .mockResolvedValueOnce(['src/a.test.ts']) - .mockResolvedValueOnce([]) - .mockResolvedValueOnce([]); - - const result = await checkTestCoverage('/repo', changedFiles); - - expect(result.changedSourceFiles).toBe(3); - }); - - it('should count sourceFilesWithTestChanges correctly', async () => { - const changedFiles = [ - makeSourceFile('src/a.ts'), - makeSourceFile('src/b.ts'), - makeSourceFile('src/c.ts'), - makeTestFile('src/a.test.ts'), - makeTestFile('src/b.test.ts'), - ]; - - mockedMapTestFiles - .mockResolvedValueOnce(['src/a.test.ts']) // covered - .mockResolvedValueOnce(['src/b.test.ts']) // covered - .mockResolvedValueOnce(['src/c.test.ts']); // exists but not changed - - const result = await checkTestCoverage('/repo', changedFiles); - - expect(result.sourceFilesWithTestChanges).toBe(2); - expect(result.changedSourceFiles).toBe(3); - expect(result.coverageRatio).toBeCloseTo(2 / 3); - }); - - it('should pass the repoPath to mapTestFiles', async () => { - const changedFiles = [makeSourceFile('src/a.ts')]; - mockedMapTestFiles.mockResolvedValueOnce([]); - - await checkTestCoverage('/my/special/repo', changedFiles); - - expect(mockedMapTestFiles).toHaveBeenCalledWith('/my/special/repo', 'src/a.ts'); - }); - }); -}); diff --git a/packages/core/__tests__/detector.test.ts b/packages/core/__tests__/detector.test.ts deleted file mode 100644 index d8214db..0000000 --- a/packages/core/__tests__/detector.test.ts +++ /dev/null @@ -1,1242 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; - -// ── Mock setup ────────────────────────────────────────────────────────────── -// vi.hoisted() ensures the mock fns exist before vi.mock factories run -// (vi.mock is hoisted above all other code by vitest). - -const { mockShow, mockFg, mockReadFile } = vi.hoisted(() => ({ - mockShow: vi.fn(), - mockFg: vi.fn(), - mockReadFile: vi.fn(), -})); - -vi.mock('simple-git', () => ({ - default: () => ({ - show: mockShow, - }), -})); - -vi.mock('fast-glob', () => ({ - default: mockFg, -})); - -vi.mock('fs/promises', () => ({ - readFile: mockReadFile, -})); - -import { detectBreakingChanges } from '../src/breaking/detector.js'; -import type { ChangedFile } from '../src/types.js'; - -// ── Helpers ───────────────────────────────────────────────────────────────── - -/** - * Create a minimal ChangedFile object for testing. - */ -function makeChangedFile( - overrides: Partial & Pick, -): ChangedFile { - return { - status: 'modified', - additions: 0, - deletions: 0, - language: 'typescript', - category: 'source', - ...overrides, - }; -} - -/** - * Set up `git.show()` to return specific content based on the ref:path argument. - * Accepts a map of `"ref:path"` → content string (or Error to simulate failure). - */ -function setupGitShow(fileContents: Record): void { - mockShow.mockImplementation(async (args: string[]) => { - const key = args[0]; // e.g. "main:src/lib.ts" - if (key in fileContents) { - const value = fileContents[key]; - if (value instanceof Error) { - throw value; - } - return value; - } - // File does not exist at this ref - throw new Error(`fatal: path '${key}' does not exist`); - }); -} - -// ── Reset mocks before each test ──────────────────────────────────────────── - -beforeEach(() => { - mockShow.mockReset(); - mockFg.mockReset(); - mockReadFile.mockReset(); - - // By default, fast-glob returns no files (no consumers) - mockFg.mockResolvedValue([]); -}); - -// ── Tests ─────────────────────────────────────────────────────────────────── - -describe('detectBreakingChanges', () => { - const repoPath = '/repo'; - const base = 'main'; - const head = 'feature'; - - // ── 1. Removed exports ────────────────────────────────────────────────── - - describe('removed exports', () => { - it('should detect a removed export function as removed_export with high severity', async () => { - const baseContent = ` - export function foo(): void {} - export function bar(): string { return ''; } - `; - const headContent = ` - export function foo(): void {} - `; - - setupGitShow({ - 'main:src/lib.ts': baseContent, - 'feature:src/lib.ts': headContent, - }); - - const files = [makeChangedFile({ path: 'src/lib.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].type).toBe('removed_export'); - expect(result[0].symbolName).toBe('bar'); - expect(result[0].severity).toBe('high'); - expect(result[0].filePath).toBe('src/lib.ts'); - expect(result[0].after).toBeNull(); - expect(result[0].before).toContain('bar'); - }); - - it('should detect removal of multiple exports', async () => { - const baseContent = ` - export function alpha(): void {} - export function beta(): void {} - export function gamma(): void {} - `; - const headContent = ` - export function alpha(): void {} - `; - - setupGitShow({ - 'main:src/lib.ts': baseContent, - 'feature:src/lib.ts': headContent, - }); - - const files = [makeChangedFile({ path: 'src/lib.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - const removedNames = result - .filter((bc) => bc.type === 'removed_export') - .map((bc) => bc.symbolName); - expect(removedNames).toContain('beta'); - expect(removedNames).toContain('gamma'); - expect(removedNames).not.toContain('alpha'); - }); - - it('should detect removal of a class export', async () => { - const baseContent = ` - export class MyService {} - export class MyHelper {} - `; - const headContent = ` - export class MyService {} - `; - - setupGitShow({ - 'main:src/service.ts': baseContent, - 'feature:src/service.ts': headContent, - }); - - const files = [makeChangedFile({ path: 'src/service.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].type).toBe('removed_export'); - expect(result[0].symbolName).toBe('MyHelper'); - expect(result[0].severity).toBe('high'); - }); - }); - - // ── 2. Changed signatures ───────────────────────────────────────────── - - describe('changed signatures', () => { - it('should detect an added parameter as changed_signature with medium severity', async () => { - const baseContent = 'export function calc(a: number): number { return a; }'; - const headContent = 'export function calc(a: number, b: number): number { return a + b; }'; - - setupGitShow({ - 'main:src/math.ts': baseContent, - 'feature:src/math.ts': headContent, - }); - - const files = [makeChangedFile({ path: 'src/math.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].type).toBe('changed_signature'); - expect(result[0].symbolName).toBe('calc'); - expect(result[0].severity).toBe('medium'); - expect(result[0].before).toBeTruthy(); - expect(result[0].after).toBeTruthy(); - }); - - it('should detect a removed parameter as changed_signature', async () => { - const baseContent = 'export function greet(name: string, greeting: string): string { return greeting + name; }'; - const headContent = 'export function greet(name: string): string { return name; }'; - - setupGitShow({ - 'main:src/greet.ts': baseContent, - 'feature:src/greet.ts': headContent, - }); - - const files = [makeChangedFile({ path: 'src/greet.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].type).toBe('changed_signature'); - expect(result[0].symbolName).toBe('greet'); - expect(result[0].severity).toBe('medium'); - }); - - it('should detect a changed parameter type as changed_signature', async () => { - const baseContent = 'export function parse(input: string): void {}'; - const headContent = 'export function parse(input: number): void {}'; - - setupGitShow({ - 'main:src/parse.ts': baseContent, - 'feature:src/parse.ts': headContent, - }); - - const files = [makeChangedFile({ path: 'src/parse.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].type).toBe('changed_signature'); - expect(result[0].symbolName).toBe('parse'); - expect(result[0].severity).toBe('medium'); - }); - - it('should detect a changed return type as changed_signature', async () => { - const baseContent = 'export function getId(): string { return \'\'; }'; - const headContent = 'export function getId(): number { return 0; }'; - - setupGitShow({ - 'main:src/id.ts': baseContent, - 'feature:src/id.ts': headContent, - }); - - const files = [makeChangedFile({ path: 'src/id.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].type).toBe('changed_signature'); - expect(result[0].symbolName).toBe('getId'); - expect(result[0].severity).toBe('medium'); - }); - }); - - // ── 3. Changed types ────────────────────────────────────────────────── - - describe('changed types', () => { - it('should detect a kind change (const to variable) as changed_type with medium severity', async () => { - const baseContent = 'export const config = {};'; - const headContent = 'export let config = {};'; - - setupGitShow({ - 'main:src/config.ts': baseContent, - 'feature:src/config.ts': headContent, - }); - - const files = [makeChangedFile({ path: 'src/config.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].type).toBe('changed_type'); - expect(result[0].symbolName).toBe('config'); - expect(result[0].severity).toBe('medium'); - }); - }); - - // ── 4. Renamed exports ──────────────────────────────────────────────── - - describe('renamed exports', () => { - it('should detect a renamed function as renamed_export with low severity', async () => { - // Same signature shape, different name => rename - const baseContent = 'export function oldName(x: number): number { return x; }'; - const headContent = 'export function newName(x: number): number { return x; }'; - - setupGitShow({ - 'main:src/util.ts': baseContent, - 'feature:src/util.ts': headContent, - }); - - const files = [makeChangedFile({ path: 'src/util.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].type).toBe('renamed_export'); - expect(result[0].symbolName).toBe('oldName'); - expect(result[0].severity).toBe('low'); - expect(result[0].before).toContain('oldName'); - expect(result[0].after).toContain('newName'); - }); - - it('should detect a renamed class as renamed_export', async () => { - const baseContent = 'export class OldClass {}'; - const headContent = 'export class NewClass {}'; - - setupGitShow({ - 'main:src/cls.ts': baseContent, - 'feature:src/cls.ts': headContent, - }); - - const files = [makeChangedFile({ path: 'src/cls.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].type).toBe('renamed_export'); - expect(result[0].symbolName).toBe('OldClass'); - expect(result[0].severity).toBe('low'); - }); - - it('should not match a rename when the kind differs', async () => { - // Removed a function, added a class with same-ish signature shape. - // Kind mismatch means it should NOT be detected as a rename. - const baseContent = 'export function Widget(): void {}'; - const headContent = 'export class Widget {}'; - - setupGitShow({ - 'main:src/widget.ts': baseContent, - 'feature:src/widget.ts': headContent, - }); - - const files = [makeChangedFile({ path: 'src/widget.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - // This should show up as a changed_type (modified, kind changed) since - // the name is the same in both base and head — diffExports puts it in modified - const changedTypes = result.filter((bc) => bc.type === 'changed_type'); - expect(changedTypes.length).toBeGreaterThanOrEqual(1); - expect(changedTypes[0].symbolName).toBe('Widget'); - }); - - it('should not match a rename when signatures differ', async () => { - // Different name AND different signature => removed_export, not rename - const baseContent = 'export function oldFunc(a: string): void {}'; - const headContent = 'export function newFunc(a: number, b: number): number { return 0; }'; - - setupGitShow({ - 'main:src/func.ts': baseContent, - 'feature:src/func.ts': headContent, - }); - - const files = [makeChangedFile({ path: 'src/func.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - const removed = result.filter((bc) => bc.type === 'removed_export'); - expect(removed).toHaveLength(1); - expect(removed[0].symbolName).toBe('oldFunc'); - expect(removed[0].severity).toBe('high'); - }); - }); - - // ── 4b. Renamed files (file path changes) ───────────────────────────── - - describe('renamed files', () => { - it('should detect renamed files as breaking changes with low severity', async () => { - setupGitShow({ - 'main:src/utils.ts': 'export function helper(x: number): number { return x; }', - 'feature:src/new-utils.ts': 'export function helper(x: number): number { return x; }', - }); - - const files = [ - makeChangedFile({ - path: 'src/new-utils.ts', - oldPath: 'src/utils.ts', - status: 'renamed', - }), - ]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result.length).toBeGreaterThan(0); - const renameChange = result.find((bc) => bc.type === 'renamed_export'); - expect(renameChange).toBeDefined(); - expect(renameChange!.symbolName).toBe('helper'); - expect(renameChange!.severity).toBe('low'); - expect(renameChange!.filePath).toBe('src/utils.ts'); - }); - - it('should detect symbols removed during a file rename as high severity', async () => { - setupGitShow({ - 'main:src/utils.ts': 'export function helper(x: number): number { return x; }\nexport function removed(): void {}', - 'feature:src/new-utils.ts': 'export function helper(x: number): number { return x; }', - }); - - const files = [ - makeChangedFile({ - path: 'src/new-utils.ts', - oldPath: 'src/utils.ts', - status: 'renamed', - additions: 5, - deletions: 5, - }), - ]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - const removedChange = result.find( - (bc) => bc.type === 'removed_export' && bc.symbolName === 'removed', - ); - expect(removedChange).toBeDefined(); - expect(removedChange!.severity).toBe('high'); - }); - - it('should skip renamed file if old path base content is unavailable', async () => { - // Old path doesn't exist at base ref - setupGitShow({ - 'feature:src/new-name.ts': 'export function foo(): void {}', - }); - - const files = [ - makeChangedFile({ - path: 'src/new-name.ts', - oldPath: 'src/old-name.ts', - status: 'renamed', - }), - ]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toEqual([]); - }); - - it('should handle renamed file where head content is unavailable', async () => { - // Old file had exports, but new file can't be read — all exports become removed - setupGitShow({ - 'main:src/old.ts': 'export function foo(): void {}', - // HEAD:src/new.ts is not provided, so getFileAtRef returns null - }); - - const files = [ - makeChangedFile({ - path: 'src/new.ts', - oldPath: 'src/old.ts', - status: 'renamed', - }), - ]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - // Since head content is null, newExports has no symbols, so all old exports are removed - expect(result).toHaveLength(1); - expect(result[0].type).toBe('removed_export'); - expect(result[0].symbolName).toBe('foo'); - expect(result[0].severity).toBe('high'); - }); - - it('should report multiple exports from a renamed file', async () => { - setupGitShow({ - 'main:src/old-api.ts': 'export function alpha(): void {}\nexport function beta(): string { return \'\'; }\nexport class Gamma {}', - 'feature:src/new-api.ts': 'export function alpha(): void {}\nexport function beta(): string { return \'\'; }\nexport class Gamma {}', - }); - - const files = [ - makeChangedFile({ - path: 'src/new-api.ts', - oldPath: 'src/old-api.ts', - status: 'renamed', - }), - ]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - // All three symbols should be flagged as renamed_export - const renames = result.filter((bc) => bc.type === 'renamed_export'); - expect(renames).toHaveLength(3); - const names = renames.map((bc) => bc.symbolName); - expect(names).toContain('alpha'); - expect(names).toContain('beta'); - expect(names).toContain('Gamma'); - for (const bc of renames) { - expect(bc.severity).toBe('low'); - expect(bc.filePath).toBe('src/old-api.ts'); - } - }); - }); - - // ── 5. File filtering ───────────────────────────────────────────────── - - describe('file filtering', () => { - it('should skip non-source files (.md, .json, .yaml)', async () => { - // These files should not be analyzed even if they are "modified" - const files = [ - makeChangedFile({ path: 'README.md', language: 'markdown', category: 'doc' }), - makeChangedFile({ path: 'package.json', language: 'json', category: 'config' }), - makeChangedFile({ path: 'config.yaml', language: 'yaml', category: 'config' }), - ]; - - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toEqual([]); - // git.show should not have been called at all - expect(mockShow).not.toHaveBeenCalled(); - }); - - it('should analyze .ts files', async () => { - setupGitShow({ - 'main:src/index.ts': 'export function foo(): void {}', - 'feature:src/index.ts': '', - }); - - const files = [makeChangedFile({ path: 'src/index.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].symbolName).toBe('foo'); - }); - - it('should analyze .tsx files', async () => { - setupGitShow({ - 'main:src/App.tsx': 'export function App(): void {}', - 'feature:src/App.tsx': '', - }); - - const files = [makeChangedFile({ path: 'src/App.tsx' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].symbolName).toBe('App'); - }); - - it('should analyze .js files', async () => { - setupGitShow({ - 'main:lib/util.js': 'export function helper() {}', - 'feature:lib/util.js': '', - }); - - const files = [ - makeChangedFile({ path: 'lib/util.js', language: 'javascript' }), - ]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].symbolName).toBe('helper'); - }); - - it('should analyze .jsx files', async () => { - setupGitShow({ - 'main:src/Button.jsx': 'export function Button() {}', - 'feature:src/Button.jsx': '', - }); - - const files = [ - makeChangedFile({ path: 'src/Button.jsx', language: 'javascript' }), - ]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].symbolName).toBe('Button'); - }); - - it('should skip added files (only modified, deleted, and renamed are analyzed)', async () => { - const files = [ - makeChangedFile({ path: 'src/new-file.ts', status: 'added' }), - ]; - - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toEqual([]); - expect(mockShow).not.toHaveBeenCalled(); - }); - }); - - // ── 6. Deleted files ────────────────────────────────────────────────── - - describe('deleted files', () => { - it('should report all exports from a deleted file as removed_export with high severity', async () => { - const baseContent = ` - export function alpha(): void {} - export class Beta {} - export const GAMMA = 42; - export interface Delta { x: number; } - `; - - setupGitShow({ - 'main:src/deleted.ts': baseContent, - }); - - const files = [ - makeChangedFile({ path: 'src/deleted.ts', status: 'deleted' }), - ]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result.length).toBeGreaterThanOrEqual(4); - for (const bc of result) { - expect(bc.type).toBe('removed_export'); - expect(bc.severity).toBe('high'); - expect(bc.after).toBeNull(); - expect(bc.filePath).toBe('src/deleted.ts'); - } - - const names = result.map((bc) => bc.symbolName); - expect(names).toContain('alpha'); - expect(names).toContain('Beta'); - expect(names).toContain('GAMMA'); - expect(names).toContain('Delta'); - }); - - it('should handle a deleted file with no exports gracefully', async () => { - const baseContent = 'const internal = 42;'; - - setupGitShow({ - 'main:src/internal.ts': baseContent, - }); - - const files = [ - makeChangedFile({ path: 'src/internal.ts', status: 'deleted' }), - ]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toEqual([]); - }); - - it('should skip a deleted file if base content is not available', async () => { - // git.show for the base ref throws (file didn't exist in base either) - setupGitShow({}); - - const files = [ - makeChangedFile({ path: 'src/ghost.ts', status: 'deleted' }), - ]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toEqual([]); - }); - }); - - // ── 7. New files (only in head) ─────────────────────────────────────── - - describe('new files (added)', () => { - it('should produce no breaking changes for added files', async () => { - const files = [ - makeChangedFile({ path: 'src/brand-new.ts', status: 'added' }), - makeChangedFile({ path: 'src/another-new.tsx', status: 'added' }), - ]; - - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toEqual([]); - }); - }); - - // ── 8. Error handling ───────────────────────────────────────────────── - - describe('error handling', () => { - it('should silently skip files when git.show throws for base ref', async () => { - // Simulates a binary file or unreadable file at base - setupGitShow({}); - - const files = [makeChangedFile({ path: 'src/binary.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toEqual([]); - }); - - it('should silently skip modified files when git.show throws for head ref', async () => { - // Base exists but head throws - setupGitShow({ - 'main:src/broken.ts': 'export function foo(): void {}', - // head ref not provided, so it will throw - }); - - const files = [makeChangedFile({ path: 'src/broken.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - // Head content is null => the file is skipped (continue) - expect(result).toEqual([]); - }); - - it('should skip a file if an unexpected error occurs during analysis', async () => { - // We set up base to return content, head to return content, but the content - // causes diffExports to throw (unlikely but tests the catch block). - // We'll simulate this by making git.show return then throw on the second call - // for a different file, while the first file works fine. - const goodBase = 'export function working(): void {}'; - const goodHead = ''; - - setupGitShow({ - 'main:src/good.ts': goodBase, - 'feature:src/good.ts': goodHead, - 'main:src/bad.ts': 'export function oops(): void {}', - 'feature:src/bad.ts': 'export function oops(): void {}', - }); - - // Override show to throw specifically for bad.ts by making it throw - // an error after returning base content - const originalImpl = mockShow.getMockImplementation()!; - mockShow.mockImplementation(async (args: string[]) => { - const key = args[0]; - if (key === 'main:src/bad.ts') { - // Return base content first time - return 'export function oops(): void {}'; - } - if (key === 'feature:src/bad.ts') { - // Throw on head to simulate corruption - throw new Error('simulated corruption'); - } - return originalImpl(args); - }); - - const files = [ - makeChangedFile({ path: 'src/good.ts' }), - makeChangedFile({ path: 'src/bad.ts' }), - ]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - // good.ts should produce a result, bad.ts should be skipped - const goodResults = result.filter((bc) => bc.filePath === 'src/good.ts'); - const badResults = result.filter((bc) => bc.filePath === 'src/bad.ts'); - expect(goodResults).toHaveLength(1); - expect(badResults).toHaveLength(0); - }); - - it('should hit the outer catch block and warn when diffExports throws', async () => { - // Return non-string content from git.show to cause parseExports/diffExports - // to throw (string.replace on a non-string). The getFileAtRef wrapper - // only catches errors from git.show itself, not from bad return values. - mockShow.mockImplementation(async (args: string[]) => { - const key = args[0]; - if (key === 'main:src/broken.ts') { - // Return an object instead of a string to trigger a runtime error - // in parseExports when it tries to call .replace() on it - return { notAString: true }; - } - if (key === 'feature:src/broken.ts') { - return 'export function ok(): void {}'; - } - throw new Error('not found'); - }); - - const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); - - const files = [makeChangedFile({ path: 'src/broken.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - // The file should be skipped due to the error - expect(result).toEqual([]); - // console.warn should have been called with the skip message - expect(warnSpy).toHaveBeenCalledWith( - expect.stringContaining('[pr-impact] Skipping src/broken.ts:'), - ); - - warnSpy.mockRestore(); - }); - - it('should warn with stringified error when error is not an Error instance', async () => { - // Make git.show return base content, then have head throw in a way - // that bypasses getFileAtRef's catch (which only catches thrown errors) - mockShow.mockImplementation(async (args: string[]) => { - const key = args[0]; - if (key === 'main:src/throws.ts') { - return 42; // numeric content causes stripComments to throw - } - if (key === 'feature:src/throws.ts') { - return 'export function ok(): void {}'; - } - throw new Error('not found'); - }); - - const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); - - const files = [makeChangedFile({ path: 'src/throws.ts', status: 'deleted' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toEqual([]); - expect(warnSpy).toHaveBeenCalledWith( - expect.stringContaining('[pr-impact] Skipping src/throws.ts:'), - ); - - warnSpy.mockRestore(); - }); - - it('should handle a mix of analyzable and unanalyzable files', async () => { - setupGitShow({ - 'main:src/ok.ts': 'export function valid(): void {}', - 'feature:src/ok.ts': '', - // src/nope.ts doesn't exist at either ref - }); - - const files = [ - makeChangedFile({ path: 'src/ok.ts' }), - makeChangedFile({ path: 'src/nope.ts' }), - makeChangedFile({ path: 'data.json', language: 'json', category: 'config' }), - ]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].filePath).toBe('src/ok.ts'); - }); - }); - - // ── 9. Empty changed files ──────────────────────────────────────────── - - describe('empty changed files', () => { - it('should return an empty array when no files are changed', async () => { - const result = await detectBreakingChanges(repoPath, base, head, []); - - expect(result).toEqual([]); - expect(mockShow).not.toHaveBeenCalled(); - }); - }); - - // ── 10. Consumer population ─────────────────────────────────────────── - - describe('consumer population', () => { - it('should populate consumers for files with breaking changes', async () => { - const baseContent = ` - export function removed(): void {} - export function kept(): void {} - `; - const headContent = ` - export function kept(): void {} - `; - - setupGitShow({ - 'main:src/lib.ts': baseContent, - 'feature:src/lib.ts': headContent, - }); - - // fast-glob returns source files in the repo - mockFg.mockResolvedValue([ - '/repo/src/lib.ts', - '/repo/src/consumer-a.ts', - '/repo/src/consumer-b.ts', - '/repo/src/unrelated.ts', - ]); - - // readFile returns content for consumer files - mockReadFile.mockImplementation(async (path: string) => { - if (path === '/repo/src/consumer-a.ts') { - return 'import { removed } from \'./lib\';'; - } - if (path === '/repo/src/consumer-b.ts') { - return 'import { kept, removed } from \'./lib\';'; - } - if (path === '/repo/src/unrelated.ts') { - return 'import { something } from \'./other\';'; - } - if (path === '/repo/src/lib.ts') { - return headContent; - } - throw new Error('file not found'); - }); - - const files = [makeChangedFile({ path: 'src/lib.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].type).toBe('removed_export'); - expect(result[0].symbolName).toBe('removed'); - - // Both consumer-a.ts and consumer-b.ts import from './lib' - expect(result[0].consumers).toContain('src/consumer-a.ts'); - expect(result[0].consumers).toContain('src/consumer-b.ts'); - expect(result[0].consumers).not.toContain('src/unrelated.ts'); - }); - - it('should not run consumer detection when there are no breaking changes', async () => { - // No changes in exports - const content = 'export function unchanged(): void {}'; - setupGitShow({ - 'main:src/stable.ts': content, - 'feature:src/stable.ts': content, - }); - - const files = [makeChangedFile({ path: 'src/stable.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toEqual([]); - // fast-glob should not have been called - expect(mockFg).not.toHaveBeenCalled(); - }); - - it('should handle consumers that cannot be read (unreadable files)', async () => { - const baseContent = 'export function old(): void {}'; - const headContent = ''; - - setupGitShow({ - 'main:src/api.ts': baseContent, - 'feature:src/api.ts': headContent, - }); - - mockFg.mockResolvedValue([ - '/repo/src/api.ts', - '/repo/src/unreadable.ts', - ]); - - mockReadFile.mockImplementation(async (path: string) => { - if (path === '/repo/src/unreadable.ts') { - throw new Error('EACCES: permission denied'); - } - if (path === '/repo/src/api.ts') { - return headContent; - } - throw new Error('file not found'); - }); - - const files = [makeChangedFile({ path: 'src/api.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - // Should still produce the breaking change, just without unreadable consumers - expect(result).toHaveLength(1); - expect(result[0].type).toBe('removed_export'); - expect(result[0].consumers).toEqual([]); - }); - - it('should detect consumers using dynamic import()', async () => { - const baseContent = 'export function doWork(): void {}'; - const headContent = ''; - - setupGitShow({ - 'main:src/worker.ts': baseContent, - 'feature:src/worker.ts': headContent, - }); - - mockFg.mockResolvedValue([ - '/repo/src/worker.ts', - '/repo/src/lazy.ts', - ]); - - mockReadFile.mockImplementation(async (path: string) => { - if (path === '/repo/src/lazy.ts') { - return 'const mod = await import(\'./worker\');'; - } - if (path === '/repo/src/worker.ts') { - return headContent; - } - throw new Error('file not found'); - }); - - const files = [makeChangedFile({ path: 'src/worker.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].consumers).toContain('src/lazy.ts'); - }); - - it('should detect consumers using require()', async () => { - const baseContent = 'export function doWork(): void {}'; - const headContent = ''; - - setupGitShow({ - 'main:src/worker.js': baseContent, - 'feature:src/worker.js': headContent, - }); - - mockFg.mockResolvedValue([ - '/repo/src/worker.js', - '/repo/src/loader.js', - ]); - - mockReadFile.mockImplementation(async (path: string) => { - if (path === '/repo/src/loader.js') { - return 'const mod = require(\'./worker\');'; - } - if (path === '/repo/src/worker.js') { - return headContent; - } - throw new Error('file not found'); - }); - - const files = [ - makeChangedFile({ path: 'src/worker.js', language: 'javascript' }), - ]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].consumers).toContain('src/loader.js'); - }); - - it('should resolve imports with extension resolution', async () => { - const baseContent = 'export function helper(): void {}'; - const headContent = ''; - - setupGitShow({ - 'main:src/utils.ts': baseContent, - 'feature:src/utils.ts': headContent, - }); - - mockFg.mockResolvedValue([ - '/repo/src/utils.ts', - '/repo/src/app.ts', - ]); - - mockReadFile.mockImplementation(async (path: string) => { - if (path === '/repo/src/app.ts') { - // Imports without extension — should resolve to src/utils.ts - return 'import { helper } from \'./utils\';'; - } - if (path === '/repo/src/utils.ts') { - return headContent; - } - throw new Error('file not found'); - }); - - const files = [makeChangedFile({ path: 'src/utils.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].consumers).toContain('src/app.ts'); - }); - - it('should ignore non-relative imports for consumer detection', async () => { - const baseContent = 'export function something(): void {}'; - const headContent = ''; - - setupGitShow({ - 'main:src/lib.ts': baseContent, - 'feature:src/lib.ts': headContent, - }); - - mockFg.mockResolvedValue([ - '/repo/src/lib.ts', - '/repo/src/app.ts', - ]); - - mockReadFile.mockImplementation(async (path: string) => { - if (path === '/repo/src/app.ts') { - // Only imports from node_modules, not relative - return 'import express from \'express\';\nimport lodash from \'lodash\';'; - } - if (path === '/repo/src/lib.ts') { - return headContent; - } - throw new Error('file not found'); - }); - - const files = [makeChangedFile({ path: 'src/lib.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].consumers).toEqual([]); - }); - - it('should populate consumers on all breaking changes for the same file', async () => { - const baseContent = ` - export function alpha(): void {} - export function beta(): void {} - `; - const headContent = ''; - - setupGitShow({ - 'main:src/api.ts': baseContent, - 'feature:src/api.ts': headContent, - }); - - mockFg.mockResolvedValue([ - '/repo/src/api.ts', - '/repo/src/consumer.ts', - ]); - - mockReadFile.mockImplementation(async (path: string) => { - if (path === '/repo/src/consumer.ts') { - return 'import { alpha } from \'./api\';'; - } - if (path === '/repo/src/api.ts') { - return headContent; - } - throw new Error('file not found'); - }); - - const files = [makeChangedFile({ path: 'src/api.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - // Both alpha and beta removals should have the same consumers - expect(result).toHaveLength(2); - for (const bc of result) { - expect(bc.consumers).toContain('src/consumer.ts'); - } - }); - }); - - // ── formatSymbolDescription() edge cases ────────────────────────────── - // formatSymbolDescription() is a private helper; we test it indirectly by - // examining the `before` and `after` fields of detected breaking changes. - - describe('formatSymbolDescription() edge cases', () => { - it('should include "default" prefix for a default export with a signature', async () => { - const baseContent = 'export default function handler(req: Request): Response { return new Response(); }'; - const headContent = ''; - - setupGitShow({ - 'main:src/handler.ts': baseContent, - 'feature:src/handler.ts': headContent, - }); - - const files = [makeChangedFile({ path: 'src/handler.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].type).toBe('removed_export'); - expect(result[0].symbolName).toBe('handler'); - // formatSymbolDescription should produce "default function handler (req: Request): Response" - // The "before" field should contain "default" because isDefault is true - expect(result[0].before).toMatch(/^default /); - // It should also contain the signature - expect(result[0].before).toContain('handler'); - }); - - it('should not include "default" prefix for a non-default export without signature', async () => { - const baseContent = 'export class Widget {}'; - const headContent = ''; - - setupGitShow({ - 'main:src/widget.ts': baseContent, - 'feature:src/widget.ts': headContent, - }); - - const files = [makeChangedFile({ path: 'src/widget.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].type).toBe('removed_export'); - expect(result[0].symbolName).toBe('Widget'); - // For a non-default export, "before" should start with the kind, not "default" - expect(result[0].before).not.toMatch(/^default /); - // Should be something like "class Widget" - expect(result[0].before).toMatch(/^class Widget/); - }); - - it('should include signature for a non-default function export', async () => { - const baseContent = 'export function compute(x: number, y: number): number { return x + y; }'; - const headContent = ''; - - setupGitShow({ - 'main:src/compute.ts': baseContent, - 'feature:src/compute.ts': headContent, - }); - - const files = [makeChangedFile({ path: 'src/compute.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - // before should be "function compute " — no "default" prefix - expect(result[0].before).not.toMatch(/^default /); - expect(result[0].before).toContain('function'); - expect(result[0].before).toContain('compute'); - }); - - it('should include "default" prefix for a default exported class', async () => { - const baseContent = 'export default class AppService { start(): void {} }'; - const headContent = ''; - - setupGitShow({ - 'main:src/app-service.ts': baseContent, - 'feature:src/app-service.ts': headContent, - }); - - const files = [makeChangedFile({ path: 'src/app-service.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toHaveLength(1); - expect(result[0].symbolName).toBe('AppService'); - // Should have "default" prefix - expect(result[0].before).toMatch(/^default /); - expect(result[0].before).toContain('class'); - expect(result[0].before).toContain('AppService'); - }); - }); - - // ── Combined scenarios ──────────────────────────────────────────────── - - describe('combined scenarios', () => { - it('should handle multiple files with different change types', async () => { - setupGitShow({ - // File 1: removed export - 'main:src/a.ts': 'export function removed(): void {}', - 'feature:src/a.ts': '', - // File 2: changed signature - 'main:src/b.ts': 'export function changed(x: string): void {}', - 'feature:src/b.ts': 'export function changed(x: string, y: number): void {}', - // File 3: deleted file - 'main:src/c.ts': 'export class Gone {}', - }); - - mockFg.mockResolvedValue([]); - - const files = [ - makeChangedFile({ path: 'src/a.ts' }), - makeChangedFile({ path: 'src/b.ts' }), - makeChangedFile({ path: 'src/c.ts', status: 'deleted' }), - ]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result.length).toBeGreaterThanOrEqual(3); - - const removedFromA = result.find( - (bc) => bc.filePath === 'src/a.ts' && bc.type === 'removed_export', - ); - expect(removedFromA).toBeDefined(); - expect(removedFromA!.symbolName).toBe('removed'); - - const changedInB = result.find( - (bc) => bc.filePath === 'src/b.ts' && bc.type === 'changed_signature', - ); - expect(changedInB).toBeDefined(); - expect(changedInB!.symbolName).toBe('changed'); - - const deletedInC = result.find( - (bc) => bc.filePath === 'src/c.ts' && bc.type === 'removed_export', - ); - expect(deletedInC).toBeDefined(); - expect(deletedInC!.symbolName).toBe('Gone'); - }); - - it('should handle a file with both renames and removals', async () => { - // oldFunc -> newFunc (rename), deadFunc removed - const baseContent = ` - export function oldFunc(x: number): number { return x; } - export function deadFunc(): void {} - export function stable(): void {} - `; - const headContent = ` - export function newFunc(x: number): number { return x; } - export function stable(): void {} - `; - - setupGitShow({ - 'main:src/mixed.ts': baseContent, - 'feature:src/mixed.ts': headContent, - }); - - mockFg.mockResolvedValue([]); - - const files = [makeChangedFile({ path: 'src/mixed.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - const rename = result.find((bc) => bc.type === 'renamed_export'); - expect(rename).toBeDefined(); - expect(rename!.symbolName).toBe('oldFunc'); - - const removed = result.find((bc) => bc.type === 'removed_export'); - expect(removed).toBeDefined(); - expect(removed!.symbolName).toBe('deadFunc'); - }); - - it('should not report breaking changes when no exports changed', async () => { - const content = ` - export function stable(): void {} - export const VALUE = 42; - `; - - setupGitShow({ - 'main:src/stable.ts': content, - 'feature:src/stable.ts': content, - }); - - const files = [makeChangedFile({ path: 'src/stable.ts' })]; - const result = await detectBreakingChanges(repoPath, base, head, files); - - expect(result).toEqual([]); - }); - }); -}); diff --git a/packages/core/__tests__/diff-parser.test.ts b/packages/core/__tests__/diff-parser.test.ts deleted file mode 100644 index ce275f6..0000000 --- a/packages/core/__tests__/diff-parser.test.ts +++ /dev/null @@ -1,606 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; - -// Mock simple-git before importing the module under test. -const mockDiffSummary = vi.fn(); -vi.mock('simple-git', () => ({ - default: () => ({ - diffSummary: mockDiffSummary, - }), -})); - -import { parseDiff, detectLanguage } from '../src/diff/diff-parser.js'; - -beforeEach(() => { - mockDiffSummary.mockReset(); -}); - -// ── Helpers ──────────────────────────────────────────────────────────────────── - -/** - * Build a minimal DiffSummary-shaped object for simple-git. - * `files` is the list of file entries; the optional arrays let us flag - * created / deleted / renamed files by their raw path strings. - */ -function makeDiffSummary( - files: Array<{ - file: string; - insertions: number; - deletions: number; - binary?: boolean; - }>, - opts: { - created?: string[]; - deleted?: string[]; - renamed?: string[]; - } = {}, -) { - return { - files, - insertions: files.reduce((s, f) => s + f.insertions, 0), - deletions: files.reduce((s, f) => s + f.deletions, 0), - changed: files.length, - created: opts.created ?? [], - deleted: opts.deleted ?? [], - renamed: opts.renamed ?? [], - }; -} - -// ── Tests ────────────────────────────────────────────────────────────────────── - -describe('parseDiff', () => { - // ── Language detection ──────────────────────────────────────────────────── - - describe('language detection', () => { - it.each([ - ['src/index.ts', 'typescript'], - ['src/App.tsx', 'typescript'], - ['lib/util.js', 'javascript'], - ['lib/Widget.jsx', 'javascript'], - ['lib/esm.mjs', 'javascript'], - ['lib/cjs.cjs', 'javascript'], - ['app/main.py', 'python'], - ['cmd/server.go', 'go'], - ['src/lib.rs', 'rust'], - ['src/Main.java', 'java'], - ['src/main.c', 'c'], - ['src/main.cpp', 'cpp'], - ['include/header.h', 'c'], - ['include/header.hpp', 'cpp'], - ['lib/app.rb', 'ruby'], - ['src/index.php', 'php'], - ['Sources/App.swift', 'swift'], - ['src/main.kt', 'kotlin'], - ['src/Main.scala', 'scala'], - ['src/Program.cs', 'csharp'], - ['src/App.vue', 'vue'], - ['src/App.svelte', 'svelte'], - ['docs/guide.md', 'markdown'], - ['docs/guide.mdx', 'markdown'], - ['data/config.json', 'json'], - ['config.yaml', 'yaml'], - ['config.yml', 'yaml'], - ['config.toml', 'toml'], - ['data/feed.xml', 'xml'], - ['public/index.html', 'html'], - ['styles/main.css', 'css'], - ['styles/main.scss', 'scss'], - ['styles/main.less', 'less'], - ['db/migrations.sql', 'sql'], - ['scripts/run.sh', 'shell'], - ['scripts/run.bash', 'shell'], - ['scripts/run.zsh', 'shell'], - ['Dockerfile', 'dockerfile'], - ['schema.graphql', 'graphql'], - ['schema.gql', 'graphql'], - ['api/service.proto', 'protobuf'], - ['notes.txt', 'text'], - ['docs/index.rst', 'restructuredtext'], - ])('should detect language for %s as %s', async (filePath, expectedLang) => { - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary([{ file: filePath, insertions: 1, deletions: 0 }]), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result).toHaveLength(1); - expect(result[0].language).toBe(expectedLang); - }); - - it('should return "unknown" for files with no extension', async () => { - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary([{ file: 'LICENSE', insertions: 1, deletions: 0 }]), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].language).toBe('unknown'); - }); - - it('should return "unknown" for unrecognized extensions', async () => { - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary([{ file: 'data/file.xyz', insertions: 1, deletions: 0 }]), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].language).toBe('unknown'); - }); - - it('should detect "dockerfile" for a file named Dockerfile (no extension)', async () => { - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary([{ file: 'deploy/Dockerfile', insertions: 3, deletions: 0 }]), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].language).toBe('dockerfile'); - }); - - it('should detect "makefile" for a file named Makefile', async () => { - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary([{ file: 'Makefile', insertions: 5, deletions: 2 }]), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].language).toBe('makefile'); - }); - - it('should detect ".dockerfile" extension as dockerfile', async () => { - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary([{ file: 'build/app.dockerfile', insertions: 2, deletions: 0 }]), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].language).toBe('dockerfile'); - }); - }); - - // ── Status determination ───────────────────────────────────────────────── - - describe('status determination', () => { - it('should mark a created file as added', async () => { - const filePath = 'src/new-file.ts'; - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary( - [{ file: filePath, insertions: 10, deletions: 0 }], - { created: [filePath] }, - ), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result).toHaveLength(1); - expect(result[0].status).toBe('added'); - }); - - it('should mark a deleted file as deleted', async () => { - const filePath = 'src/old-file.ts'; - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary( - [{ file: filePath, insertions: 0, deletions: 20 }], - { deleted: [filePath] }, - ), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result).toHaveLength(1); - expect(result[0].status).toBe('deleted'); - }); - - it('should mark a renamed file as renamed', async () => { - const rawPath = 'src/old-name.ts => src/new-name.ts'; - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary( - [{ file: rawPath, insertions: 0, deletions: 0 }], - { renamed: [rawPath] }, - ), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result).toHaveLength(1); - expect(result[0].status).toBe('renamed'); - }); - - it('should default to modified when file is not in created/deleted/renamed', async () => { - const filePath = 'src/existing.ts'; - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary([{ file: filePath, insertions: 5, deletions: 3 }]), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result).toHaveLength(1); - expect(result[0].status).toBe('modified'); - }); - - it('should upgrade status to renamed when oldPath is detected from path pattern but simple-git says modified', async () => { - // The file path contains a rename pattern, but simple-git did not - // include it in the renamed array. parseDiff should still treat it as renamed. - const rawPath = 'src/{old.ts => new.ts}'; - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary([{ file: rawPath, insertions: 2, deletions: 1 }]), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result).toHaveLength(1); - expect(result[0].status).toBe('renamed'); - expect(result[0].oldPath).toBe('src/old.ts'); - expect(result[0].path).toBe('src/new.ts'); - }); - }); - - // ── Rename parsing ─────────────────────────────────────────────────────── - - describe('rename parsing', () => { - it('should parse brace-style rename with prefix: dir/{old.ts => new.ts}', async () => { - const rawPath = 'src/{utils.ts => helpers.ts}'; - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary( - [{ file: rawPath, insertions: 0, deletions: 0 }], - { renamed: [rawPath] }, - ), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].path).toBe('src/helpers.ts'); - expect(result[0].oldPath).toBe('src/utils.ts'); - expect(result[0].status).toBe('renamed'); - }); - - it('should parse brace-style rename with suffix: {old => new}/file.ts', async () => { - const rawPath = '{src => lib}/index.ts'; - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary( - [{ file: rawPath, insertions: 0, deletions: 0 }], - { renamed: [rawPath] }, - ), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].path).toBe('lib/index.ts'); - expect(result[0].oldPath).toBe('src/index.ts'); - expect(result[0].status).toBe('renamed'); - }); - - it('should parse brace-style rename with both prefix and suffix: a/{b => c}/d.ts', async () => { - const rawPath = 'packages/{core => shared}/types.ts'; - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary( - [{ file: rawPath, insertions: 3, deletions: 1 }], - { renamed: [rawPath] }, - ), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].path).toBe('packages/shared/types.ts'); - expect(result[0].oldPath).toBe('packages/core/types.ts'); - }); - - it('should parse simple rename: old.ts => new.ts', async () => { - const rawPath = 'old-name.ts => new-name.ts'; - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary( - [{ file: rawPath, insertions: 0, deletions: 0 }], - { renamed: [rawPath] }, - ), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].path).toBe('new-name.ts'); - expect(result[0].oldPath).toBe('old-name.ts'); - expect(result[0].status).toBe('renamed'); - }); - - it('should parse simple rename with directory paths', async () => { - const rawPath = 'src/components/Button.tsx => src/ui/Button.tsx'; - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary( - [{ file: rawPath, insertions: 0, deletions: 0 }], - { renamed: [rawPath] }, - ), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].path).toBe('src/ui/Button.tsx'); - expect(result[0].oldPath).toBe('src/components/Button.tsx'); - }); - - it('should not set oldPath for a non-rename file', async () => { - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary([{ file: 'src/index.ts', insertions: 5, deletions: 2 }]), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].oldPath).toBeUndefined(); - }); - }); - - // ── Path normalization ─────────────────────────────────────────────────── - - describe('path normalization', () => { - it('should remove double slashes from oldPath in brace-style renames', async () => { - // git may report: "dir/{a => a/b}/file.ts" - // With prefix="dir/", old="a", new="a/b", suffix="/file.ts" - // oldPath = "dir/" + "a" + "/file.ts" => "dir/a/file.ts" (clean) - // newPath = "dir/" + "a/b" + "/file.ts" => "dir/a/b/file.ts" (clean) - // Test a case where concatenation produces double slashes: - // prefix ends with "/" and old/new part is empty-like after slash manipulation - // E.g., "src//{old => new}/" scenario -- not realistic with .+?, - // but we can test a rename where prefix/suffix join cleanly. - const rawPath = 'packages/{core => shared}/types.ts'; - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary( - [{ file: rawPath, insertions: 3, deletions: 1 }], - { renamed: [rawPath] }, - ), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].path).toBe('packages/shared/types.ts'); - expect(result[0].oldPath).toBe('packages/core/types.ts'); - expect(result[0].path).not.toContain('//'); - expect(result[0].oldPath).not.toContain('//'); - }); - - it('should handle brace rename where prefix has trailing slash and suffix has leading slash', async () => { - // "src/lib/{old => new}/index.ts" produces: - // prefix = "src/lib/", old = "old", new = "new", suffix = "/index.ts" - // oldPath = "src/lib/" + "old" + "/index.ts" => no double slash - const rawPath = 'src/lib/{old => new}/index.ts'; - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary( - [{ file: rawPath, insertions: 0, deletions: 0 }], - { renamed: [rawPath] }, - ), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].path).toBe('src/lib/new/index.ts'); - expect(result[0].oldPath).toBe('src/lib/old/index.ts'); - expect(result[0].path).not.toContain('//'); - expect(result[0].oldPath).not.toContain('//'); - }); - }); - - // ── Category assignment ────────────────────────────────────────────────── - - describe('category assignment', () => { - it('should assign "source" category to a .ts source file', async () => { - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary([{ file: 'src/utils.ts', insertions: 3, deletions: 1 }]), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].category).toBe('source'); - }); - - it('should assign "test" category to a .test.ts file', async () => { - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary([{ file: 'src/utils.test.ts', insertions: 5, deletions: 0 }]), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].category).toBe('test'); - }); - - it('should assign "doc" category to a .md file', async () => { - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary([{ file: 'README.md', insertions: 2, deletions: 1 }]), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].category).toBe('doc'); - }); - - it('should assign "config" category to package.json', async () => { - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary([{ file: 'package.json', insertions: 1, deletions: 1 }]), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].category).toBe('config'); - }); - - it('should assign "other" category to an unrecognized file type', async () => { - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary([{ file: 'assets/logo.png', insertions: 0, deletions: 0 }]), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].category).toBe('other'); - }); - }); - - // ── Additions and deletions ────────────────────────────────────────────── - - describe('additions and deletions', () => { - it('should report correct additions and deletions', async () => { - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary([{ file: 'src/app.ts', insertions: 42, deletions: 13 }]), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].additions).toBe(42); - expect(result[0].deletions).toBe(13); - }); - - it('should default to 0 when insertions/deletions are absent', async () => { - // Simulate a file entry without insertions/deletions properties (e.g., binary) - mockDiffSummary.mockResolvedValueOnce({ - files: [{ file: 'image.png', binary: true }], - insertions: 0, - deletions: 0, - changed: 1, - created: [], - deleted: [], - renamed: [], - }); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].additions).toBe(0); - expect(result[0].deletions).toBe(0); - }); - }); - - // ── Edge cases ─────────────────────────────────────────────────────────── - - describe('edge cases', () => { - it('should return an empty array for an empty diff', async () => { - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary([]), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result).toEqual([]); - }); - - it('should handle multiple files in a single diff', async () => { - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary( - [ - { file: 'src/a.ts', insertions: 10, deletions: 2 }, - { file: 'src/b.ts', insertions: 5, deletions: 0 }, - { file: 'src/c.ts', insertions: 0, deletions: 8 }, - ], - { created: ['src/b.ts'], deleted: ['src/c.ts'] }, - ), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result).toHaveLength(3); - - expect(result[0].path).toBe('src/a.ts'); - expect(result[0].status).toBe('modified'); - - expect(result[1].path).toBe('src/b.ts'); - expect(result[1].status).toBe('added'); - - expect(result[2].path).toBe('src/c.ts'); - expect(result[2].status).toBe('deleted'); - }); - - it('should handle binary files with zero additions and deletions', async () => { - mockDiffSummary.mockResolvedValueOnce({ - files: [{ file: 'assets/icon.png', binary: true }], - insertions: 0, - deletions: 0, - changed: 1, - created: ['assets/icon.png'], - deleted: [], - renamed: [], - }); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result).toHaveLength(1); - expect(result[0].path).toBe('assets/icon.png'); - expect(result[0].status).toBe('added'); - expect(result[0].additions).toBe(0); - expect(result[0].deletions).toBe(0); - }); - - it('should handle files with zero additions and zero deletions (e.g., mode change)', async () => { - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary([{ file: 'scripts/deploy.sh', insertions: 0, deletions: 0 }]), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result).toHaveLength(1); - expect(result[0].additions).toBe(0); - expect(result[0].deletions).toBe(0); - expect(result[0].status).toBe('modified'); - }); - - it('should use the new path for language detection on renamed files', async () => { - const rawPath = 'src/{old.js => new.ts}'; - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary( - [{ file: rawPath, insertions: 5, deletions: 5 }], - { renamed: [rawPath] }, - ), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].language).toBe('typescript'); - expect(result[0].path).toBe('src/new.ts'); - }); - - it('should use the new path for category detection on renamed files', async () => { - const rawPath = 'src/{code.ts => code.test.ts}'; - mockDiffSummary.mockResolvedValueOnce( - makeDiffSummary( - [{ file: rawPath, insertions: 10, deletions: 0 }], - { renamed: [rawPath] }, - ), - ); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result[0].category).toBe('test'); - }); - }); - - // ── Git invocation ─────────────────────────────────────────────────────── - - describe('git invocation', () => { - it('should call diffSummary with the correct base..head range', async () => { - mockDiffSummary.mockResolvedValueOnce(makeDiffSummary([])); - - await parseDiff('/my/repo', 'main', 'feature-branch'); - - expect(mockDiffSummary).toHaveBeenCalledTimes(1); - expect(mockDiffSummary).toHaveBeenCalledWith(['main..feature-branch']); - }); - }); - - // ── Missing created / deleted / renamed arrays ─────────────────────────── - - describe('missing created/deleted/renamed arrays', () => { - it('should default to empty arrays when diffSummary lacks created/deleted/renamed', async () => { - mockDiffSummary.mockResolvedValueOnce({ - files: [{ file: 'src/index.ts', insertions: 1, deletions: 0 }], - insertions: 1, - deletions: 0, - changed: 1, - // no created, deleted, or renamed properties - }); - - const result = await parseDiff('/repo', 'main', 'feature'); - expect(result).toHaveLength(1); - expect(result[0].status).toBe('modified'); - }); - }); -}); - -// ── detectLanguage (direct unit tests) ──────────────────────────────────────── - -describe('detectLanguage', () => { - it('detects TypeScript files', () => { - expect(detectLanguage('src/index.ts')).toBe('typescript'); - expect(detectLanguage('components/App.tsx')).toBe('typescript'); - }); - - it('detects JavaScript files', () => { - expect(detectLanguage('src/utils.js')).toBe('javascript'); - expect(detectLanguage('lib/component.jsx')).toBe('javascript'); - expect(detectLanguage('config.mjs')).toBe('javascript'); - expect(detectLanguage('config.cjs')).toBe('javascript'); - }); - - it('detects other languages', () => { - expect(detectLanguage('main.py')).toBe('python'); - expect(detectLanguage('main.go')).toBe('go'); - expect(detectLanguage('main.rs')).toBe('rust'); - expect(detectLanguage('Main.java')).toBe('java'); - }); - - it('detects special filenames', () => { - expect(detectLanguage('Dockerfile')).toBe('dockerfile'); - expect(detectLanguage('path/to/Dockerfile')).toBe('dockerfile'); - expect(detectLanguage('Makefile')).toBe('makefile'); - }); - - it('detects config file formats', () => { - expect(detectLanguage('config.json')).toBe('json'); - expect(detectLanguage('config.yaml')).toBe('yaml'); - expect(detectLanguage('config.yml')).toBe('yaml'); - expect(detectLanguage('config.toml')).toBe('toml'); - }); - - it('returns unknown for unrecognized extensions', () => { - expect(detectLanguage('file.xyz')).toBe('unknown'); - expect(detectLanguage('binary')).toBe('unknown'); - }); -}); diff --git a/packages/core/__tests__/export-differ.test.ts b/packages/core/__tests__/export-differ.test.ts deleted file mode 100644 index f8476dc..0000000 --- a/packages/core/__tests__/export-differ.test.ts +++ /dev/null @@ -1,1069 +0,0 @@ -import { describe, it, expect } from 'vitest'; -import { parseExports, parseExportsAsync, diffExports, diffExportsAsync } from '../src/breaking/export-differ.js'; -import type { FileResolver } from '../src/breaking/export-differ.js'; - -describe('parseExports', () => { - const filePath = 'src/index.ts'; - - describe('export function', () => { - it('should parse a named export function', () => { - const content = 'export function greet(name: string): void { }'; - const result = parseExports(content, filePath); - - expect(result.filePath).toBe(filePath); - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toEqual({ - name: 'greet', - kind: 'function', - signature: '(name: string): void', - isDefault: false, - }); - }); - - it('should parse an async export function', () => { - const content = 'export async function fetchData(url: string): Promise { }'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toMatchObject({ - name: 'fetchData', - kind: 'function', - isDefault: false, - }); - expect(result.symbols[0].signature).toContain('url: string'); - }); - - it('should parse multiple export functions', () => { - const content = ` - export function foo(a: number): number { return a; } - export function bar(b: string): string { return b; } - `; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(2); - expect(result.symbols.map((s) => s.name)).toEqual(['foo', 'bar']); - }); - }); - - describe('export class', () => { - it('should parse a named export class', () => { - const content = 'export class MyService { }'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toEqual({ - name: 'MyService', - kind: 'class', - isDefault: false, - }); - }); - - it('should parse export default class', () => { - const content = 'export default class AppRouter { }'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toEqual({ - name: 'AppRouter', - kind: 'class', - isDefault: true, - }); - }); - }); - - describe('export const / let / var', () => { - it('should parse export const', () => { - const content = 'export const MAX_RETRIES = 3;'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toMatchObject({ - name: 'MAX_RETRIES', - kind: 'const', - isDefault: false, - }); - }); - - it('should parse export const with type annotation', () => { - const content = 'export const config: AppConfig = {};'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toMatchObject({ - name: 'config', - kind: 'const', - signature: 'AppConfig', - isDefault: false, - }); - }); - - it('should parse export let as variable kind', () => { - const content = 'export let counter = 0;'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toMatchObject({ - name: 'counter', - kind: 'variable', - isDefault: false, - }); - }); - - it('should parse export var as variable kind', () => { - const content = 'export var legacy = true;'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toMatchObject({ - name: 'legacy', - kind: 'variable', - isDefault: false, - }); - }); - }); - - describe('export interface', () => { - it('should parse export interface', () => { - const content = 'export interface User { name: string; }'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toEqual({ - name: 'User', - kind: 'interface', - isDefault: false, - }); - }); - - it('should parse multiple export interfaces', () => { - const content = ` - export interface Foo { a: number; } - export interface Bar { b: string; } - `; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(2); - expect(result.symbols.map((s) => s.name)).toEqual(['Foo', 'Bar']); - }); - }); - - describe('export type', () => { - it('should parse export type alias', () => { - const content = 'export type ID = string | number;'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toEqual({ - name: 'ID', - kind: 'type', - isDefault: false, - }); - }); - - it('should not confuse export type { ... } re-export with type alias', () => { - const content = 'export type { Foo, Bar } from "./other";'; - const result = parseExports(content, filePath); - - // The export type { ... } block should still be captured as named exports - // with kind 'type' from the EXPORT_NAMED_RE pattern preceded by 'type'. - // Let's just verify it does not create a type alias named '{'. - const typeAliases = result.symbols.filter( - (s) => s.kind === 'type' && s.name === '{', - ); - expect(typeAliases).toHaveLength(0); - }); - }); - - describe('export enum', () => { - it('should parse export enum', () => { - const content = 'export enum Status { Active, Inactive }'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toEqual({ - name: 'Status', - kind: 'enum', - isDefault: false, - }); - }); - - it('should parse export const enum', () => { - const content = 'export const enum Direction { Up, Down, Left, Right }'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toEqual({ - name: 'Direction', - kind: 'enum', - isDefault: false, - }); - }); - - it('should parse export declare const enum', () => { - const content = 'export declare const enum Axis { X, Y, Z }'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toEqual({ - name: 'Axis', - kind: 'enum', - isDefault: false, - }); - }); - }); - - describe('declare keyword', () => { - it('should parse export declare function', () => { - const content = 'export declare function init(config: Config): void;'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toMatchObject({ - name: 'init', - kind: 'function', - isDefault: false, - }); - }); - - it('should parse export declare class', () => { - const content = 'export declare class Logger {}'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toEqual({ - name: 'Logger', - kind: 'class', - isDefault: false, - }); - }); - - it('should parse export declare const', () => { - const content = 'export declare const VERSION: string;'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toMatchObject({ - name: 'VERSION', - kind: 'const', - isDefault: false, - }); - }); - - it('should parse export declare interface', () => { - const content = 'export declare interface Options { verbose: boolean; }'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toEqual({ - name: 'Options', - kind: 'interface', - isDefault: false, - }); - }); - - it('should parse export declare type', () => { - const content = 'export declare type Handler = (event: Event) => void;'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toEqual({ - name: 'Handler', - kind: 'type', - isDefault: false, - }); - }); - - it('should parse export declare enum', () => { - const content = 'export declare enum Level { Low, Medium, High }'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toEqual({ - name: 'Level', - kind: 'enum', - isDefault: false, - }); - }); - }); - - describe('abstract class', () => { - it('should parse export abstract class', () => { - const content = 'export abstract class Base {}'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toEqual({ - name: 'Base', - kind: 'class', - isDefault: false, - }); - }); - - it('should parse export declare abstract class', () => { - const content = 'export declare abstract class AbstractService {}'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toEqual({ - name: 'AbstractService', - kind: 'class', - isDefault: false, - }); - }); - - it('should parse export default abstract class', () => { - const content = 'export default abstract class Controller {}'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toEqual({ - name: 'Controller', - kind: 'class', - isDefault: true, - }); - }); - }); - - describe('generator functions', () => { - it('should parse export function* generator', () => { - const content = 'export function* count(n: number): Generator { yield n; }'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toMatchObject({ - name: 'count', - kind: 'function', - isDefault: false, - }); - }); - - it('should parse export async function* async generator', () => { - const content = 'export async function* stream(url: string): AsyncGenerator { yield ""; }'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toMatchObject({ - name: 'stream', - kind: 'function', - isDefault: false, - }); - }); - - it('should parse export default function* generator', () => { - const content = 'export default function* items(): Generator { yield 1; }'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toMatchObject({ - name: 'items', - kind: 'function', - isDefault: true, - }); - }); - }); - - describe('destructured exports', () => { - it('should parse export const { a, b } = ... (object destructuring)', () => { - const content = 'export const { alpha, beta } = getValues();'; - const result = parseExports(content, filePath); - - const names = result.symbols.map((s) => s.name); - expect(names).toContain('alpha'); - expect(names).toContain('beta'); - }); - - it('should parse export const { a as b } = ... (renamed destructuring)', () => { - const content = 'export const { original as renamed } = getValues();'; - const result = parseExports(content, filePath); - - const names = result.symbols.map((s) => s.name); - expect(names).toContain('renamed'); - expect(names).not.toContain('original'); - }); - - it('should parse export const [ a, b ] = ... (array destructuring)', () => { - const content = 'export const [ first, second ] = getTuple();'; - const result = parseExports(content, filePath); - - const names = result.symbols.map((s) => s.name); - expect(names).toContain('first'); - expect(names).toContain('second'); - }); - - it('should not double-count destructured names as variable exports', () => { - const content = 'export const { foo, bar } = obj;'; - const result = parseExports(content, filePath); - - // Each name should appear exactly once - const fooSymbols = result.symbols.filter((s) => s.name === 'foo'); - expect(fooSymbols).toHaveLength(1); - }); - }); - - describe('export default', () => { - it('should parse export default function with name', () => { - const content = 'export default function main(): void { }'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toMatchObject({ - name: 'main', - kind: 'function', - isDefault: true, - }); - }); - - it('should parse export default expression (identifier)', () => { - const content = ` - const app = createApp(); - export default app; - `; - const result = parseExports(content, filePath); - - const defaultExport = result.symbols.find((s) => s.isDefault); - expect(defaultExport).toBeDefined(); - expect(defaultExport!.name).toBe('app'); - }); - - it('should parse export default anonymous function', () => { - const content = 'export default function(req: Request): Response { return new Response(); }'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toMatchObject({ - name: 'default', - kind: 'function', - isDefault: true, - }); - expect(result.symbols[0].signature).toContain('req: Request'); - }); - - it('should parse export default async anonymous function', () => { - const content = 'export default async function(url: string): Promise { }'; - const result = parseExports(content, filePath); - - // The anonymous function is captured and possibly also matched by the - // default expression regex (capturing 'async'). At minimum, the default - // anonymous function should be present. - const anonFn = result.symbols.find((s) => s.isDefault && s.kind === 'function'); - expect(anonFn).toBeDefined(); - expect(anonFn!.name).toBe('default'); - }); - }); - - describe('export { ... } (named re-exports)', () => { - it('should parse export { a, b } block', () => { - const content = 'export { alpha, beta };'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(2); - expect(result.symbols.map((s) => s.name)).toContain('alpha'); - expect(result.symbols.map((s) => s.name)).toContain('beta'); - }); - - it('should handle "as" renaming in export { a as b }', () => { - const content = 'export { foo as bar };'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - // The exported name (bar) is used because it's what consumers see - expect(result.symbols[0].name).toBe('bar'); - }); - - it('should handle "as default" exports', () => { - const content = 'export { myFunc as default };'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - // When exported "as default", the original name is used for tracking - expect(result.symbols[0].name).toBe('myFunc'); - expect(result.symbols[0].isDefault).toBe(true); - }); - }); - - describe('comments', () => { - it('should ignore exports inside single-line comments', () => { - const content = ` - // export function ignored(): void {} - export function real(): void {} - `; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0].name).toBe('real'); - }); - - it('should ignore exports inside block comments', () => { - const content = ` - /* export function ignored(): void {} */ - export function real(): void {} - `; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0].name).toBe('real'); - }); - }); - - describe('deduplication', () => { - it('should not create duplicate symbols for the same export', () => { - // A named export that appears only once should only appear once - const content = 'export function unique(x: number): number { return x; }'; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - }); - }); - - describe('empty content', () => { - it('should return empty symbols for empty content', () => { - const result = parseExports('', filePath); - expect(result.symbols).toHaveLength(0); - expect(result.filePath).toBe(filePath); - }); - - it('should return empty symbols for content with no exports', () => { - const content = ` - const internal = 42; - function helper() {} - `; - const result = parseExports(content, filePath); - expect(result.symbols).toHaveLength(0); - }); - }); -}); - -describe('diffExports', () => { - describe('removed exports', () => { - it('should detect a removed export function', () => { - const base = 'export function foo(): void {}\nexport function bar(): void {}'; - const head = 'export function foo(): void {}'; - const result = diffExports('file.ts', base, head); - - expect(result.removed).toHaveLength(1); - expect(result.removed[0].name).toBe('bar'); - expect(result.added).toHaveLength(0); - expect(result.modified).toHaveLength(0); - }); - - it('should detect a removed class export', () => { - const base = 'export class Foo {}\nexport class Bar {}'; - const head = 'export class Foo {}'; - const result = diffExports('file.ts', base, head); - - expect(result.removed).toHaveLength(1); - expect(result.removed[0].name).toBe('Bar'); - expect(result.removed[0].kind).toBe('class'); - }); - - it('should detect removal of all exports', () => { - const base = 'export const A = 1;\nexport const B = 2;'; - const head = 'const A = 1;\nconst B = 2;'; - const result = diffExports('file.ts', base, head); - - expect(result.removed).toHaveLength(2); - expect(result.added).toHaveLength(0); - }); - }); - - describe('added exports', () => { - it('should detect an added export function', () => { - const base = 'export function foo(): void {}'; - const head = 'export function foo(): void {}\nexport function baz(): void {}'; - const result = diffExports('file.ts', base, head); - - expect(result.added).toHaveLength(1); - expect(result.added[0].name).toBe('baz'); - expect(result.removed).toHaveLength(0); - expect(result.modified).toHaveLength(0); - }); - - it('should detect adding exports to an empty file', () => { - const base = ''; - const head = 'export function newFunc(): string { return ""; }'; - const result = diffExports('file.ts', base, head); - - expect(result.added).toHaveLength(1); - expect(result.added[0].name).toBe('newFunc'); - expect(result.removed).toHaveLength(0); - }); - }); - - describe('modified exports (signature changes)', () => { - it('should detect a modified function signature', () => { - const base = 'export function calc(a: number): number { return a; }'; - const head = 'export function calc(a: number, b: number): number { return a + b; }'; - const result = diffExports('file.ts', base, head); - - expect(result.modified).toHaveLength(1); - expect(result.modified[0].before.name).toBe('calc'); - expect(result.modified[0].after.name).toBe('calc'); - expect(result.modified[0].before.signature).not.toBe( - result.modified[0].after.signature, - ); - }); - - it('should detect a changed kind (const to variable)', () => { - const base = 'export const value = 42;'; - const head = 'export let value = 42;'; - const result = diffExports('file.ts', base, head); - - expect(result.modified).toHaveLength(1); - expect(result.modified[0].before.kind).toBe('const'); - expect(result.modified[0].after.kind).toBe('variable'); - }); - - it('should detect a changed type annotation on a const', () => { - const base = 'export const config: OldType = {};'; - const head = 'export const config: NewType = {};'; - const result = diffExports('file.ts', base, head); - - expect(result.modified).toHaveLength(1); - expect(result.modified[0].before.signature).toBe('OldType'); - expect(result.modified[0].after.signature).toBe('NewType'); - }); - }); - - describe('no changes', () => { - it('should return empty arrays when exports are identical', () => { - const content = ` - export function foo(a: number): number { return a; } - export class Bar {} - export interface Baz { x: number; } - `; - const result = diffExports('file.ts', content, content); - - expect(result.removed).toHaveLength(0); - expect(result.added).toHaveLength(0); - expect(result.modified).toHaveLength(0); - }); - - it('should return empty arrays for two empty files', () => { - const result = diffExports('file.ts', '', ''); - - expect(result.removed).toHaveLength(0); - expect(result.added).toHaveLength(0); - expect(result.modified).toHaveLength(0); - }); - }); - - describe('complex scenarios', () => { - it('should handle simultaneous add, remove, and modify', () => { - const base = ` - export function keep(): void {} - export function remove(): void {} - export function modify(a: string): string { return a; } - `; - const head = ` - export function keep(): void {} - export function modify(a: string, b: string): string { return a + b; } - export function added(): number { return 0; } - `; - const result = diffExports('file.ts', base, head); - - expect(result.removed).toHaveLength(1); - expect(result.removed[0].name).toBe('remove'); - - expect(result.added).toHaveLength(1); - expect(result.added[0].name).toBe('added'); - - expect(result.modified).toHaveLength(1); - expect(result.modified[0].before.name).toBe('modify'); - }); - - it('should differentiate default and named exports of same name', () => { - const base = ` - export function foo(): void {} - export default function foo(): void {} - `; - const head = ` - export function foo(): void {} - `; - const result = diffExports('file.ts', base, head); - - // The default foo is removed, the named foo remains - expect(result.removed).toHaveLength(1); - expect(result.removed[0].isDefault).toBe(true); - }); - }); -}); - -// ─── Barrel re-export tests ───────────────────────────────────────────────── - -describe('parseExports — barrel re-exports (export * from)', () => { - const filePath = 'src/index.ts'; - - /** - * Helper: build a sync FileResolver from a map of specifier -> { content, resolvedPath }. - */ - function buildResolver( - fileMap: Record, - ): FileResolver { - return (specifier: string, _importerFilePath: string) => { - return fileMap[specifier] ?? null; - }; - } - - it('should ignore export * from when no resolver is provided (backward compat)', () => { - const content = ` - export * from './utils'; - export function foo(): void {} - `; - const result = parseExports(content, filePath); - - // Without a resolver, barrel re-exports are invisible - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0].name).toBe('foo'); - }); - - it('should resolve a simple export * from with parseExportsAsync', async () => { - const indexContent = `export * from './utils';`; - const utilsContent = ` - export function helper(): void {} - export const VERSION = '1.0'; - `; - - const resolver = buildResolver({ - './utils': { content: utilsContent, resolvedPath: 'src/utils.ts' }, - }); - - const result = await parseExportsAsync(indexContent, filePath, resolver); - - expect(result.filePath).toBe(filePath); - const names = result.symbols.map((s) => s.name); - expect(names).toContain('helper'); - expect(names).toContain('VERSION'); - }); - - it('should not re-export default exports via export *', async () => { - const indexContent = `export * from './mod';`; - const modContent = ` - export default function main(): void {} - export function secondary(): void {} - `; - - const resolver = buildResolver({ - './mod': { content: modContent, resolvedPath: 'src/mod.ts' }, - }); - - const result = await parseExportsAsync(indexContent, filePath, resolver); - - const names = result.symbols.map((s) => s.name); - expect(names).toContain('secondary'); - // default export should NOT be re-exported - const defaultExports = result.symbols.filter((s) => s.isDefault); - expect(defaultExports).toHaveLength(0); - }); - - it('should handle nested barrels (index re-exports from a which re-exports from b)', async () => { - const indexContent = `export * from './a';`; - const aContent = ` - export * from './b'; - export function fromA(): void {} - `; - const bContent = ` - export function fromB(): string { return ''; } - export interface BConfig { x: number; } - `; - - const resolver: FileResolver = (specifier, importerFilePath) => { - if (specifier === './a' && importerFilePath === 'src/index.ts') { - return { content: aContent, resolvedPath: 'src/a.ts' }; - } - if (specifier === './b' && importerFilePath === 'src/a.ts') { - return { content: bContent, resolvedPath: 'src/b.ts' }; - } - return null; - }; - - const result = await parseExportsAsync(indexContent, filePath, resolver); - - const names = result.symbols.map((s) => s.name); - expect(names).toContain('fromA'); - expect(names).toContain('fromB'); - expect(names).toContain('BConfig'); - }); - - it('should handle export * as namespace from', async () => { - const content = `export * as utils from './utils';`; - const utilsContent = ` - export function helper(): void {} - export const VERSION = '1.0'; - `; - - const resolver = buildResolver({ - './utils': { content: utilsContent, resolvedPath: 'src/utils.ts' }, - }); - - const result = await parseExportsAsync(content, filePath, resolver); - - // export * as ns creates a single namespace symbol; the individual symbols are NOT re-exported - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0].name).toBe('utils'); - expect(result.symbols[0].kind).toBe('variable'); - expect(result.symbols[0].isDefault).toBe(false); - }); - - it('should handle circular re-exports without infinite loop', async () => { - const aContent = ` - export * from './b'; - export function fromA(): void {} - `; - const bContent = ` - export * from './a'; - export function fromB(): void {} - `; - - const resolver: FileResolver = (specifier, importerFilePath) => { - if (specifier === './b') { - return { content: bContent, resolvedPath: 'src/b.ts' }; - } - if (specifier === './a') { - return { content: aContent, resolvedPath: 'src/a.ts' }; - } - return null; - }; - - // Should not hang or throw — just stop at the visited file - const result = await parseExportsAsync(aContent, 'src/a.ts', resolver); - - const names = result.symbols.map((s) => s.name); - expect(names).toContain('fromA'); - expect(names).toContain('fromB'); - }); - - it('should handle mixed regular exports and barrel re-exports', async () => { - const indexContent = ` - export * from './utils'; - export function main(): void {} - export interface AppConfig { debug: boolean; } - export type ID = string; - `; - const utilsContent = ` - export function helper(): void {} - export class Logger {} - `; - - const resolver = buildResolver({ - './utils': { content: utilsContent, resolvedPath: 'src/utils.ts' }, - }); - - const result = await parseExportsAsync(indexContent, filePath, resolver); - - const names = result.symbols.map((s) => s.name); - expect(names).toContain('main'); - expect(names).toContain('AppConfig'); - expect(names).toContain('ID'); - expect(names).toContain('helper'); - expect(names).toContain('Logger'); - expect(result.symbols).toHaveLength(5); - }); - - it('should handle multiple export * from in the same file', async () => { - const indexContent = ` - export * from './a'; - export * from './b'; - `; - const aContent = `export function fromA(): void {}`; - const bContent = `export function fromB(): void {}`; - - const resolver = buildResolver({ - './a': { content: aContent, resolvedPath: 'src/a.ts' }, - './b': { content: bContent, resolvedPath: 'src/b.ts' }, - }); - - const result = await parseExportsAsync(indexContent, filePath, resolver); - - const names = result.symbols.map((s) => s.name); - expect(names).toContain('fromA'); - expect(names).toContain('fromB'); - }); - - it('should deduplicate symbols from multiple barrels re-exporting the same name', async () => { - const indexContent = ` - export * from './a'; - export * from './b'; - `; - // Both a and b export a symbol called 'shared' - const aContent = `export const shared = 1;`; - const bContent = `export const shared = 2;`; - - const resolver = buildResolver({ - './a': { content: aContent, resolvedPath: 'src/a.ts' }, - './b': { content: bContent, resolvedPath: 'src/b.ts' }, - }); - - const result = await parseExportsAsync(indexContent, filePath, resolver); - - // Should deduplicate — only one 'shared' symbol - const sharedSymbols = result.symbols.filter((s) => s.name === 'shared'); - expect(sharedSymbols).toHaveLength(1); - }); - - it('should handle unresolvable specifier gracefully', async () => { - const indexContent = ` - export * from './nonexistent'; - export function foo(): void {} - `; - - const resolver: FileResolver = () => null; - - const result = await parseExportsAsync(indexContent, filePath, resolver); - - // The unresolvable barrel is skipped; regular exports still work - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0].name).toBe('foo'); - }); - - it('should respect max depth and not recurse infinitely on deep nesting', async () => { - // Build a chain of 15 barrel files: file0 -> file1 -> ... -> file14 - // MAX_BARREL_DEPTH is 10, so symbols from file11+ should NOT appear - const files: Record = {}; - for (let i = 0; i < 15; i++) { - if (i < 14) { - files[`src/file${i}.ts`] = ` - export * from './file${i + 1}'; - export const sym${i} = ${i}; - `; - } else { - files[`src/file${i}.ts`] = `export const sym${i} = ${i};`; - } - } - - const resolver: FileResolver = (specifier, importerFilePath) => { - // Resolve ./fileN from src/fileM.ts - const match = specifier.match(/\.\/file(\d+)/); - if (match) { - const idx = parseInt(match[1], 10); - const path = `src/file${idx}.ts`; - if (files[path]) { - return { content: files[path], resolvedPath: path }; - } - } - return null; - }; - - const result = await parseExportsAsync(files['src/file0.ts'], 'src/file0.ts', resolver); - - const names = result.symbols.map((s) => s.name); - // sym0 through sym10 should be present (depth 0 through 10) - for (let i = 0; i <= 10; i++) { - expect(names).toContain(`sym${i}`); - } - // sym11+ may or may not be present depending on exact depth counting, - // but the key guarantee is no infinite recursion and finite symbols - expect(result.symbols.length).toBeLessThanOrEqual(15); - expect(result.symbols.length).toBeGreaterThanOrEqual(11); - }); - - it('should handle export * as ns from without resolving inner symbols', async () => { - // export * as ns should NOT resolve and re-export the inner module's individual symbols - const indexContent = ` - export * as ns from './utils'; - export * from './other'; - `; - const utilsContent = ` - export function utilFunc(): void {} - export const utilConst = 42; - `; - const otherContent = ` - export function otherFunc(): void {} - `; - - const resolver: FileResolver = (specifier, _importer) => { - if (specifier === './utils') { - return { content: utilsContent, resolvedPath: 'src/utils.ts' }; - } - if (specifier === './other') { - return { content: otherContent, resolvedPath: 'src/other.ts' }; - } - return null; - }; - - const result = await parseExportsAsync(indexContent, filePath, resolver); - - const names = result.symbols.map((s) => s.name); - // ns is the namespace, otherFunc comes from export * - expect(names).toContain('ns'); - expect(names).toContain('otherFunc'); - // utilFunc and utilConst should NOT be individually re-exported - expect(names).not.toContain('utilFunc'); - expect(names).not.toContain('utilConst'); - }); - - it('should parse export * as ns from without a resolver (sync)', () => { - const content = `export * as helpers from './helpers';`; - const result = parseExports(content, filePath); - - expect(result.symbols).toHaveLength(1); - expect(result.symbols[0]).toEqual({ - name: 'helpers', - kind: 'variable', - isDefault: false, - }); - }); -}); - -describe('diffExportsAsync — barrel re-export diffing', () => { - it('should detect removed re-exported symbols when a barrel source is dropped', async () => { - const baseContent = ` - export * from './utils'; - export function main(): void {} - `; - const headContent = ` - export function main(): void {} - `; - // Base resolves ./utils, head does not (barrel removed) - const utilsContent = ` - export function helper(): void {} - export const VERSION = '1.0'; - `; - - const resolver: FileResolver = (specifier, _importer) => { - if (specifier === './utils') { - return { content: utilsContent, resolvedPath: 'src/utils.ts' }; - } - return null; - }; - - const result = await diffExportsAsync('src/index.ts', baseContent, headContent, resolver); - - // helper and VERSION should be detected as removed - expect(result.removed).toHaveLength(2); - const removedNames = result.removed.map((s) => s.name).sort(); - expect(removedNames).toEqual(['VERSION', 'helper']); - expect(result.added).toHaveLength(0); - }); - - it('should detect added re-exported symbols when a new barrel is added', async () => { - const baseContent = `export function main(): void {}`; - const headContent = ` - export * from './utils'; - export function main(): void {} - `; - const utilsContent = `export function helper(): void {}`; - - const resolver: FileResolver = (specifier, _importer) => { - if (specifier === './utils') { - return { content: utilsContent, resolvedPath: 'src/utils.ts' }; - } - return null; - }; - - const result = await diffExportsAsync('src/index.ts', baseContent, headContent, resolver); - - expect(result.added).toHaveLength(1); - expect(result.added[0].name).toBe('helper'); - expect(result.removed).toHaveLength(0); - }); - - it('should work without a resolver (falls back to sync behavior)', async () => { - const base = 'export function foo(): void {}'; - const head = 'export function foo(): void {}\nexport function bar(): void {}'; - - const result = await diffExportsAsync('file.ts', base, head); - - expect(result.added).toHaveLength(1); - expect(result.added[0].name).toBe('bar'); - }); -}); diff --git a/packages/core/__tests__/file-categorizer.test.ts b/packages/core/__tests__/file-categorizer.test.ts deleted file mode 100644 index 9c4ce2b..0000000 --- a/packages/core/__tests__/file-categorizer.test.ts +++ /dev/null @@ -1,330 +0,0 @@ -import { describe, it, expect } from 'vitest'; -import { categorizeFile } from '../src/diff/file-categorizer.js'; - -describe('categorizeFile', () => { - // ── Test files ────────────────────────────────────────────────────────────── - - describe('test files', () => { - it('should categorize .test.ts files as test', () => { - expect(categorizeFile('src/utils/parser.test.ts')).toBe('test'); - }); - - it('should categorize .test.js files as test', () => { - expect(categorizeFile('lib/helpers.test.js')).toBe('test'); - }); - - it('should categorize .spec.ts files as test', () => { - expect(categorizeFile('src/components/Button.spec.ts')).toBe('test'); - }); - - it('should categorize .spec.js files as test', () => { - expect(categorizeFile('src/components/Button.spec.js')).toBe('test'); - }); - - it('should categorize .test.tsx files as test', () => { - expect(categorizeFile('src/App.test.tsx')).toBe('test'); - }); - - it('should categorize .spec.jsx files as test', () => { - expect(categorizeFile('src/App.spec.jsx')).toBe('test'); - }); - - it('should categorize files in __tests__/ directory as test', () => { - expect(categorizeFile('src/__tests__/parser.ts')).toBe('test'); - }); - - it('should categorize files in __tests__ with nested path as test', () => { - expect(categorizeFile('packages/core/__tests__/utils/helper.ts')).toBe('test'); - }); - - it('should categorize files in /test/ directory as test', () => { - expect(categorizeFile('src/test/parser.ts')).toBe('test'); - }); - - it('should categorize files in /tests/ directory as test', () => { - expect(categorizeFile('src/tests/parser.ts')).toBe('test'); - }); - - it('should categorize files starting with "test" as test', () => { - expect(categorizeFile('src/testHelper.ts')).toBe('test'); - }); - - it('should categorize files in __tests__ using backslash paths as test', () => { - expect(categorizeFile('src\\__tests__\\parser.ts')).toBe('test'); - }); - }); - - // ── Doc files ─────────────────────────────────────────────────────────────── - - describe('doc files', () => { - it('should categorize .md files as doc', () => { - expect(categorizeFile('README.md')).toBe('doc'); - }); - - it('should categorize .mdx files as doc', () => { - expect(categorizeFile('docs/guide.mdx')).toBe('doc'); - }); - - it('should categorize .txt files as doc', () => { - expect(categorizeFile('CHANGELOG.txt')).toBe('doc'); - }); - - it('should categorize .rst files as doc', () => { - expect(categorizeFile('docs/index.rst')).toBe('doc'); - }); - - it('should categorize files in docs/ directory as doc', () => { - expect(categorizeFile('docs/api/reference.html')).toBe('doc'); - }); - - it('should categorize files in doc/ directory as doc', () => { - expect(categorizeFile('doc/usage.html')).toBe('doc'); - }); - }); - - // ── Config files ──────────────────────────────────────────────────────────── - - describe('config files', () => { - it('should categorize package.json as config', () => { - expect(categorizeFile('package.json')).toBe('config'); - }); - - it('should categorize tsconfig.json as config', () => { - expect(categorizeFile('tsconfig.json')).toBe('config'); - }); - - it('should categorize turbo.json as config', () => { - expect(categorizeFile('turbo.json')).toBe('config'); - }); - - it('should categorize .gitignore as config', () => { - expect(categorizeFile('.gitignore')).toBe('config'); - }); - - it('should categorize .npmrc as config', () => { - expect(categorizeFile('.npmrc')).toBe('config'); - }); - - it('should categorize pnpm-workspace.yaml as config', () => { - expect(categorizeFile('pnpm-workspace.yaml')).toBe('config'); - }); - - it('should categorize pnpm-lock.yaml as config', () => { - expect(categorizeFile('pnpm-lock.yaml')).toBe('config'); - }); - - it('should categorize yarn.lock as config', () => { - expect(categorizeFile('yarn.lock')).toBe('config'); - }); - - it('should categorize package-lock.json as config', () => { - expect(categorizeFile('package-lock.json')).toBe('config'); - }); - - it('should categorize Dockerfile as config', () => { - expect(categorizeFile('dockerfile')).toBe('config'); - }); - - it('should categorize Makefile as config', () => { - expect(categorizeFile('makefile')).toBe('config'); - }); - - it('should categorize .github/ files as config', () => { - expect(categorizeFile('.github/workflows/ci.yml')).toBe('config'); - }); - - it('should categorize .github/CODEOWNERS as config', () => { - expect(categorizeFile('.github/CODEOWNERS')).toBe('config'); - }); - - it('should categorize .eslintrc prefixed files as config', () => { - expect(categorizeFile('.eslintrc.json')).toBe('config'); - }); - - it('should categorize .prettierrc prefixed files as config', () => { - expect(categorizeFile('.prettierrc.yml')).toBe('config'); - }); - - it('should categorize webpack.config.* as config', () => { - expect(categorizeFile('webpack.config.js')).toBe('config'); - }); - - it('should categorize vite.config.* as config', () => { - expect(categorizeFile('vite.config.ts')).toBe('config'); - }); - - it('should categorize jest.config.* as config', () => { - expect(categorizeFile('jest.config.ts')).toBe('config'); - }); - - it('should categorize vitest.config.* as config', () => { - expect(categorizeFile('vitest.config.ts')).toBe('config'); - }); - - it('should categorize docker-compose.* as config', () => { - expect(categorizeFile('docker-compose.yml')).toBe('config'); - }); - - it('should categorize .env files as config', () => { - expect(categorizeFile('.env')).toBe('config'); - }); - - it('should categorize .env.local as config', () => { - expect(categorizeFile('.env.local')).toBe('config'); - }); - - it('should categorize nested config files as config', () => { - expect(categorizeFile('packages/core/package.json')).toBe('config'); - }); - }); - - // ── Source files ──────────────────────────────────────────────────────────── - - describe('source files', () => { - it('should categorize .ts files as source', () => { - expect(categorizeFile('src/index.ts')).toBe('source'); - }); - - it('should categorize .tsx files as source', () => { - expect(categorizeFile('src/App.tsx')).toBe('source'); - }); - - it('should categorize .js files as source', () => { - expect(categorizeFile('src/utils.js')).toBe('source'); - }); - - it('should categorize .jsx files as source', () => { - expect(categorizeFile('src/Component.jsx')).toBe('source'); - }); - - it('should categorize .py files as source', () => { - expect(categorizeFile('scripts/deploy.py')).toBe('source'); - }); - - it('should categorize .go files as source', () => { - expect(categorizeFile('cmd/main.go')).toBe('source'); - }); - - it('should categorize .rs files as source', () => { - expect(categorizeFile('src/lib.rs')).toBe('source'); - }); - - it('should categorize .java files as source', () => { - expect(categorizeFile('src/Main.java')).toBe('source'); - }); - - it('should categorize .c files as source', () => { - expect(categorizeFile('src/main.c')).toBe('source'); - }); - - it('should categorize .cpp files as source', () => { - expect(categorizeFile('src/main.cpp')).toBe('source'); - }); - - it('should categorize .h files as source', () => { - expect(categorizeFile('include/header.h')).toBe('source'); - }); - - it('should categorize .rb files as source', () => { - expect(categorizeFile('lib/app.rb')).toBe('source'); - }); - - it('should categorize .php files as source', () => { - expect(categorizeFile('src/index.php')).toBe('source'); - }); - - it('should categorize .swift files as source', () => { - expect(categorizeFile('Sources/App.swift')).toBe('source'); - }); - - it('should categorize .kt files as source', () => { - expect(categorizeFile('src/main.kt')).toBe('source'); - }); - - it('should categorize .scala files as source', () => { - expect(categorizeFile('src/Main.scala')).toBe('source'); - }); - - it('should categorize .cs files as source', () => { - expect(categorizeFile('src/Program.cs')).toBe('source'); - }); - - it('should categorize .vue files as source', () => { - expect(categorizeFile('src/App.vue')).toBe('source'); - }); - - it('should categorize .svelte files as source', () => { - expect(categorizeFile('src/App.svelte')).toBe('source'); - }); - }); - - // ── Other files ───────────────────────────────────────────────────────────── - - describe('other files (fallback)', () => { - it('should categorize .png files as other', () => { - expect(categorizeFile('assets/logo.png')).toBe('other'); - }); - - it('should categorize .svg files as other', () => { - expect(categorizeFile('icons/arrow.svg')).toBe('other'); - }); - - it('should categorize .jpg files as other', () => { - expect(categorizeFile('images/photo.jpg')).toBe('other'); - }); - - it('should categorize .woff files as other', () => { - expect(categorizeFile('fonts/inter.woff')).toBe('other'); - }); - - it('should categorize unknown extensions as other', () => { - expect(categorizeFile('data/something.xyz')).toBe('other'); - }); - - it('should categorize files with no extension as other', () => { - expect(categorizeFile('LICENSE')).toBe('other'); - }); - - it('should categorize .css files as other', () => { - expect(categorizeFile('styles/main.css')).toBe('other'); - }); - - it('should categorize .json files (non-config) as other', () => { - expect(categorizeFile('data/fixtures.json')).toBe('other'); - }); - }); - - // ── Priority / precedence ────────────────────────────────────────────────── - - describe('priority: test > doc > config > source > other', () => { - it('should prioritize test over source (.test.ts is test, not source)', () => { - expect(categorizeFile('src/utils.test.ts')).toBe('test'); - }); - - it('should prioritize test over doc (test file in docs dir with .spec.ts)', () => { - // A .spec.ts file even inside docs/ should be categorized as test - // because isTestFile is checked first - expect(categorizeFile('docs/api.spec.ts')).toBe('test'); - }); - - it('should prioritize test over config (__tests__/package.json is test because __tests__ dir)', () => { - expect(categorizeFile('__tests__/package.json')).toBe('test'); - }); - - it('should prioritize doc over source (README.md is doc)', () => { - expect(categorizeFile('README.md')).toBe('doc'); - }); - - it('should prioritize doc over config (docs/ directory takes priority over config patterns)', () => { - // A file in docs/ should be doc even if it has no doc extension - expect(categorizeFile('docs/setup.html')).toBe('doc'); - }); - - it('should prioritize config over source (.github/workflows/build.ts is config, not source)', () => { - expect(categorizeFile('.github/workflows/build.ts')).toBe('config'); - // Wait - .github/ is config, but isTestFile runs first. - // Actually .github/workflows/build.ts is not a test file, and .github/ makes it config. - // But it also has .ts extension making it source. Config is checked before source, so config wins. - }); - }); -}); diff --git a/packages/core/__tests__/impact-graph.test.ts b/packages/core/__tests__/impact-graph.test.ts deleted file mode 100644 index fc69722..0000000 --- a/packages/core/__tests__/impact-graph.test.ts +++ /dev/null @@ -1,616 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; - -// ── Mock setup ────────────────────────────────────────────────────────────── - -const { mockFg, mockReadFile } = vi.hoisted(() => ({ - mockFg: vi.fn(), - mockReadFile: vi.fn(), -})); - -vi.mock('fast-glob', () => ({ - default: mockFg, -})); - -vi.mock('fs/promises', () => ({ - readFile: mockReadFile, -})); - -import { buildImpactGraph } from '../src/impact/impact-graph.js'; -import type { ChangedFile } from '../src/types.js'; - -// ── Helpers ───────────────────────────────────────────────────────────────── - -function makeChangedFile( - overrides: Partial & Pick, -): ChangedFile { - return { - status: 'modified', - additions: 0, - deletions: 0, - language: 'typescript', - category: 'source', - ...overrides, - }; -} - -/** - * Set up fast-glob to return a list of absolute paths and readFile to return - * file contents. - * - * @param fileMap Map of repo-relative path -> file content. - * @param repoPath The repo path used to construct absolute paths. - */ -function setupFiles( - fileMap: Record, - repoPath: string = '/repo', -): void { - const absolutePaths = Object.keys(fileMap).map((rel) => `${repoPath}/${rel}`); - mockFg.mockResolvedValue(absolutePaths); - - mockReadFile.mockImplementation(async (absPath: string) => { - // Convert absolute path back to relative - const prefix = repoPath + '/'; - const rel = absPath.startsWith(prefix) ? absPath.slice(prefix.length) : absPath; - if (rel in fileMap) { - return fileMap[rel]; - } - throw new Error(`ENOENT: no such file: ${absPath}`); - }); -} - -// ── Reset mocks ───────────────────────────────────────────────────────────── - -beforeEach(() => { - mockFg.mockReset(); - mockReadFile.mockReset(); - - mockFg.mockResolvedValue([]); -}); - -// ── Tests ─────────────────────────────────────────────────────────────────── - -describe('buildImpactGraph', () => { - const repoPath = '/repo'; - - // ── Direct change only ────────────────────────────────────────────── - - describe('direct change only', () => { - it('should list changed source files as directlyChanged', async () => { - setupFiles({ - 'src/a.ts': '', - 'src/b.ts': '', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/a.ts' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.directlyChanged).toEqual(['src/a.ts']); - expect(result.indirectlyAffected).toEqual([]); - expect(result.edges).toEqual([]); - }); - - it('should only include source-category files in directlyChanged', async () => { - setupFiles({ - 'src/a.ts': '', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/a.ts', category: 'source' }), - makeChangedFile({ path: 'README.md', category: 'doc' }), - makeChangedFile({ path: 'package.json', category: 'config' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.directlyChanged).toEqual(['src/a.ts']); - }); - }); - - // ── Single-level import ───────────────────────────────────────────── - - describe('single-level import', () => { - it('should detect a file that directly imports a changed file', async () => { - setupFiles({ - 'src/a.ts': "import { foo } from './b';", - 'src/b.ts': 'export function foo() {}', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/b.ts' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.directlyChanged).toEqual(['src/b.ts']); - expect(result.indirectlyAffected).toContain('src/a.ts'); - expect(result.edges).toContainEqual({ - from: 'src/a.ts', - to: 'src/b.ts', - type: 'imports', - }); - }); - - it('should detect multiple files importing the same changed file', async () => { - setupFiles({ - 'src/a.ts': "import { foo } from './c';", - 'src/b.ts': "import { bar } from './c';", - 'src/c.ts': 'export function foo() {}\nexport function bar() {}', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/c.ts' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.directlyChanged).toEqual(['src/c.ts']); - expect(result.indirectlyAffected).toContain('src/a.ts'); - expect(result.indirectlyAffected).toContain('src/b.ts'); - expect(result.edges).toHaveLength(2); - }); - }); - - // ── Transitive imports ────────────────────────────────────────────── - - describe('transitive imports', () => { - it('should detect transitive dependencies: A -> B -> C where C is changed', async () => { - setupFiles({ - 'src/a.ts': "import { b } from './b';", - 'src/b.ts': "import { c } from './c';", - 'src/c.ts': 'export const c = 1;', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/c.ts' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.directlyChanged).toEqual(['src/c.ts']); - expect(result.indirectlyAffected).toContain('src/b.ts'); - expect(result.indirectlyAffected).toContain('src/a.ts'); - - // Edges: b imports c, a imports b - expect(result.edges).toContainEqual({ - from: 'src/b.ts', - to: 'src/c.ts', - type: 'imports', - }); - expect(result.edges).toContainEqual({ - from: 'src/a.ts', - to: 'src/b.ts', - type: 'imports', - }); - }); - }); - - // ── maxDepth limiting ─────────────────────────────────────────────── - - describe('maxDepth limiting', () => { - it('should stop BFS at depth=1 (only direct importers)', async () => { - setupFiles({ - 'src/a.ts': "import { b } from './b';", - 'src/b.ts': "import { c } from './c';", - 'src/c.ts': 'export const c = 1;', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/c.ts' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles, 1); - - expect(result.directlyChanged).toEqual(['src/c.ts']); - // Only b.ts is at depth 1 - expect(result.indirectlyAffected).toContain('src/b.ts'); - // a.ts is at depth 2, should NOT be included - expect(result.indirectlyAffected).not.toContain('src/a.ts'); - }); - - it('should include all levels when maxDepth is large enough', async () => { - setupFiles({ - 'src/a.ts': "import { b } from './b';", - 'src/b.ts': "import { c } from './c';", - 'src/c.ts': "import { d } from './d';", - 'src/d.ts': 'export const d = 1;', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/d.ts' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles, 10); - - expect(result.indirectlyAffected).toContain('src/c.ts'); - expect(result.indirectlyAffected).toContain('src/b.ts'); - expect(result.indirectlyAffected).toContain('src/a.ts'); - }); - - it('should return only directly changed with maxDepth=0', async () => { - setupFiles({ - 'src/a.ts': "import { b } from './b';", - 'src/b.ts': 'export const b = 1;', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/b.ts' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles, 0); - - expect(result.directlyChanged).toEqual(['src/b.ts']); - expect(result.indirectlyAffected).toEqual([]); - expect(result.edges).toEqual([]); - }); - }); - - // ── Circular dependencies ─────────────────────────────────────────── - - describe('circular dependencies', () => { - it('should not infinite loop on A -> B -> A when A is changed', async () => { - setupFiles({ - 'src/a.ts': "import { b } from './b';\nexport const a = 1;", - 'src/b.ts': "import { a } from './a';\nexport const b = 2;", - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/a.ts' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.directlyChanged).toEqual(['src/a.ts']); - expect(result.indirectlyAffected).toContain('src/b.ts'); - // Should complete without hanging - }); - - it('should handle three-way circular dependency: A -> B -> C -> A', async () => { - setupFiles({ - 'src/a.ts': "import { c } from './c';\nexport const a = 1;", - 'src/b.ts': "import { a } from './a';\nexport const b = 2;", - 'src/c.ts': "import { b } from './b';\nexport const c = 3;", - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/a.ts' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.directlyChanged).toEqual(['src/a.ts']); - // b imports a (directly), c imports b (transitively) - expect(result.indirectlyAffected).toContain('src/b.ts'); - expect(result.indirectlyAffected).toContain('src/c.ts'); - }); - }); - - // ── Import resolution ─────────────────────────────────────────────── - - describe('import resolution', () => { - it('should resolve imports without extension by trying .ts', async () => { - setupFiles({ - 'src/a.ts': "import { helper } from './utils';", - 'src/utils.ts': 'export function helper() {}', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/utils.ts' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.indirectlyAffected).toContain('src/a.ts'); - }); - - it('should resolve imports with .js extension to .ts files', async () => { - // In ESM with .js extensions, the import './utils.js' should resolve - // to utils.js if it exists, or to utils.ts via extension resolution - setupFiles({ - 'src/a.ts': "import { helper } from './utils.js';", - 'src/utils.js': 'export function helper() {}', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/utils.js' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.indirectlyAffected).toContain('src/a.ts'); - }); - - it('should resolve /index.ts imports for directory-style imports', async () => { - setupFiles({ - 'src/app.ts': "import { create } from './lib';", - 'src/lib/index.ts': 'export function create() {}', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/lib/index.ts' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.indirectlyAffected).toContain('src/app.ts'); - }); - - it('should resolve .tsx extension', async () => { - setupFiles({ - 'src/app.tsx': "import { Button } from './components/Button';", - 'src/components/Button.tsx': 'export function Button() {}', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/components/Button.tsx' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.indirectlyAffected).toContain('src/app.tsx'); - }); - }); - - // ── Only relative imports tracked ─────────────────────────────────── - - describe('bare specifier / non-relative imports', () => { - it('should skip bare specifiers (node_modules imports)', async () => { - setupFiles({ - 'src/a.ts': "import express from 'express';\nimport lodash from 'lodash';", - 'src/b.ts': 'export const b = 1;', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/b.ts' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles); - - // a.ts does not import b.ts, only external modules - expect(result.indirectlyAffected).not.toContain('src/a.ts'); - expect(result.edges).toEqual([]); - }); - - it('should only track relative imports starting with ./ or ../', async () => { - setupFiles({ - 'src/a.ts': "import { foo } from './b';\nimport pkg from 'some-package';", - 'src/b.ts': 'export const foo = 1;', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/b.ts' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles); - - // Only the relative import should create an edge - expect(result.edges).toHaveLength(1); - expect(result.edges[0]).toEqual({ - from: 'src/a.ts', - to: 'src/b.ts', - type: 'imports', - }); - }); - }); - - // ── Import patterns: static, dynamic, require ─────────────────────── - - describe('import pattern detection', () => { - it('should detect static import statements', async () => { - setupFiles({ - 'src/a.ts': "import { helper } from './b';", - 'src/b.ts': 'export function helper() {}', - }); - - const changedFiles = [makeChangedFile({ path: 'src/b.ts' })]; - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.indirectlyAffected).toContain('src/a.ts'); - }); - - it('should detect dynamic import() calls', async () => { - setupFiles({ - 'src/a.ts': "const mod = await import('./b');", - 'src/b.ts': 'export function helper() {}', - }); - - const changedFiles = [makeChangedFile({ path: 'src/b.ts' })]; - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.indirectlyAffected).toContain('src/a.ts'); - }); - - it('should detect require() calls', async () => { - setupFiles({ - 'src/a.js': "const mod = require('./b');", - 'src/b.js': 'module.exports = {};', - }); - - const changedFiles = [makeChangedFile({ path: 'src/b.js' })]; - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.indirectlyAffected).toContain('src/a.js'); - }); - - it('should detect export ... from statements', async () => { - setupFiles({ - 'src/barrel.ts': "export { foo } from './b';", - 'src/b.ts': 'export const foo = 1;', - }); - - const changedFiles = [makeChangedFile({ path: 'src/b.ts' })]; - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.indirectlyAffected).toContain('src/barrel.ts'); - }); - }); - - // ── No imports ────────────────────────────────────────────────────── - - describe('no imports', () => { - it('should return only directlyChanged with no edges when files have no imports', async () => { - setupFiles({ - 'src/a.ts': 'export const a = 1;', - 'src/b.ts': 'export const b = 2;', - 'src/c.ts': 'export const c = 3;', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/a.ts' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.directlyChanged).toEqual(['src/a.ts']); - expect(result.indirectlyAffected).toEqual([]); - expect(result.edges).toEqual([]); - }); - }); - - // ── Multiple changed files ────────────────────────────────────────── - - describe('multiple changed files', () => { - it('should handle multiple directly changed files with separate dependents', async () => { - setupFiles({ - 'src/a.ts': "import { x } from './x';", - 'src/b.ts': "import { y } from './y';", - 'src/x.ts': 'export const x = 1;', - 'src/y.ts': 'export const y = 2;', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/x.ts' }), - makeChangedFile({ path: 'src/y.ts' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.directlyChanged).toContain('src/x.ts'); - expect(result.directlyChanged).toContain('src/y.ts'); - expect(result.indirectlyAffected).toContain('src/a.ts'); - expect(result.indirectlyAffected).toContain('src/b.ts'); - expect(result.edges).toHaveLength(2); - }); - - it('should deduplicate indirectly affected files that import multiple changed files', async () => { - setupFiles({ - 'src/consumer.ts': "import { x } from './x';\nimport { y } from './y';", - 'src/x.ts': 'export const x = 1;', - 'src/y.ts': 'export const y = 2;', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/x.ts' }), - makeChangedFile({ path: 'src/y.ts' }), - ]; - - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.directlyChanged).toContain('src/x.ts'); - expect(result.directlyChanged).toContain('src/y.ts'); - // consumer.ts should appear only once in indirectlyAffected - const consumerCount = result.indirectlyAffected.filter( - (f) => f === 'src/consumer.ts', - ).length; - expect(consumerCount).toBe(1); - - // But there should be two edges (consumer -> x, consumer -> y) - expect(result.edges).toContainEqual({ - from: 'src/consumer.ts', - to: 'src/x.ts', - type: 'imports', - }); - expect(result.edges).toContainEqual({ - from: 'src/consumer.ts', - to: 'src/y.ts', - type: 'imports', - }); - }); - }); - - // ── Unresolvable imports ──────────────────────────────────────────── - - describe('unresolvable imports', () => { - it('should gracefully skip imports that cannot be resolved to any file', async () => { - setupFiles({ - 'src/a.ts': "import { missing } from './nonexistent';", - 'src/b.ts': 'export const b = 1;', - }); - - const changedFiles = [makeChangedFile({ path: 'src/b.ts' })]; - const result = await buildImpactGraph(repoPath, changedFiles); - - // a.ts imports a file that doesn't exist, so no edge is created - expect(result.indirectlyAffected).not.toContain('src/a.ts'); - expect(result.edges).toEqual([]); - }); - }); - - // ── Unreadable files ──────────────────────────────────────────────── - - describe('unreadable files', () => { - it('should skip files that cannot be read', async () => { - // fast-glob returns the file, but readFile throws - mockFg.mockResolvedValue(['/repo/src/a.ts', '/repo/src/b.ts']); - mockReadFile.mockImplementation(async (absPath: string) => { - if (absPath === '/repo/src/a.ts') { - throw new Error('EACCES: permission denied'); - } - return "import { a } from './a';"; - }); - - const changedFiles = [makeChangedFile({ path: 'src/a.ts' })]; - const result = await buildImpactGraph(repoPath, changedFiles); - - // b.ts imports a.ts, but a.ts itself is unreadable (still processed as changed) - expect(result.directlyChanged).toEqual(['src/a.ts']); - expect(result.indirectlyAffected).toContain('src/b.ts'); - }); - }); - - // ── Relative path with ../ ────────────────────────────────────────── - - describe('parent directory imports', () => { - it('should resolve ../ imports correctly', async () => { - setupFiles({ - 'src/utils/helper.ts': "import { config } from '../config';", - 'src/config.ts': 'export const config = {};', - }); - - const changedFiles = [makeChangedFile({ path: 'src/config.ts' })]; - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.indirectlyAffected).toContain('src/utils/helper.ts'); - }); - }); - - // ── Empty repo ────────────────────────────────────────────────────── - - describe('empty scenarios', () => { - it('should handle empty changed files list', async () => { - setupFiles({ - 'src/a.ts': "import { b } from './b';", - 'src/b.ts': 'export const b = 1;', - }); - - const result = await buildImpactGraph(repoPath, []); - - expect(result.directlyChanged).toEqual([]); - expect(result.indirectlyAffected).toEqual([]); - expect(result.edges).toEqual([]); - }); - - it('should handle no source files in repo', async () => { - mockFg.mockResolvedValue([]); - - const changedFiles = [makeChangedFile({ path: 'src/a.ts' })]; - const result = await buildImpactGraph(repoPath, changedFiles); - - expect(result.directlyChanged).toEqual(['src/a.ts']); - expect(result.indirectlyAffected).toEqual([]); - expect(result.edges).toEqual([]); - }); - }); -}); diff --git a/packages/core/__tests__/import-resolver.test.ts b/packages/core/__tests__/import-resolver.test.ts deleted file mode 100644 index 2600f1b..0000000 --- a/packages/core/__tests__/import-resolver.test.ts +++ /dev/null @@ -1,223 +0,0 @@ -import { describe, it, expect } from 'vitest'; -import { extractImportPaths, isRelativeImport, resolveImport } from '../src/imports/import-resolver.js'; - -describe('extractImportPaths', () => { - it('should extract static import paths', () => { - const content = ` - import { foo } from './foo'; - import bar from '../bar'; - `; - const paths = extractImportPaths(content); - - expect(paths).toContain('./foo'); - expect(paths).toContain('../bar'); - }); - - it('should extract dynamic import paths', () => { - const content = ` - const mod = import('./dynamic-module'); - const other = import('../lazy/component'); - `; - const paths = extractImportPaths(content); - - expect(paths).toContain('./dynamic-module'); - expect(paths).toContain('../lazy/component'); - }); - - it('should extract require paths', () => { - const content = ` - const fs = require('fs'); - const helper = require('./helper'); - `; - const paths = extractImportPaths(content); - - expect(paths).toContain('fs'); - expect(paths).toContain('./helper'); - }); - - it('should extract paths from mixed import styles', () => { - const content = ` - import { alpha } from './alpha'; - const beta = import('./beta'); - const gamma = require('./gamma'); - export { delta } from './delta'; - `; - const paths = extractImportPaths(content); - - expect(paths).toContain('./alpha'); - expect(paths).toContain('./beta'); - expect(paths).toContain('./gamma'); - expect(paths).toContain('./delta'); - expect(paths).toHaveLength(4); - }); - - it('should return an empty array for content with no imports', () => { - const content = ` - const x = 42; - function hello() { return 'world'; } - `; - const paths = extractImportPaths(content); - - expect(paths).toHaveLength(0); - }); - - it('should return an empty array for empty content', () => { - const paths = extractImportPaths(''); - expect(paths).toHaveLength(0); - }); - - it('should extract export-from paths', () => { - const content = ` - export { foo } from './foo'; - export * from './bar'; - export type { Baz } from './baz'; - `; - const paths = extractImportPaths(content); - - expect(paths).toContain('./foo'); - expect(paths).toContain('./bar'); - expect(paths).toContain('./baz'); - }); - - it('should extract bare specifier imports', () => { - const content = ` - import express from 'express'; - import { resolve } from 'path'; - `; - const paths = extractImportPaths(content); - - expect(paths).toContain('express'); - expect(paths).toContain('path'); - }); -}); - -describe('isRelativeImport', () => { - it('should return true for ./ imports', () => { - expect(isRelativeImport('./foo')).toBe(true); - expect(isRelativeImport('./deeply/nested/module')).toBe(true); - }); - - it('should return true for ../ imports', () => { - expect(isRelativeImport('../foo')).toBe(true); - expect(isRelativeImport('../../bar/baz')).toBe(true); - }); - - it('should return false for bare specifier imports', () => { - expect(isRelativeImport('express')).toBe(false); - expect(isRelativeImport('fs')).toBe(false); - expect(isRelativeImport('@scope/package')).toBe(false); - }); - - it('should return false for absolute paths', () => { - expect(isRelativeImport('/absolute/path')).toBe(false); - }); -}); - -describe('resolveImport', () => { - it('should resolve an exact file match', () => { - const allFiles = new Set(['src/utils/helper.ts', 'src/index.ts']); - const result = resolveImport('./helper.ts', 'src/utils/consumer.ts', allFiles); - - expect(result).toBe('src/utils/helper.ts'); - }); - - it('should resolve by appending .ts extension', () => { - const allFiles = new Set(['src/utils/helper.ts', 'src/index.ts']); - const result = resolveImport('./helper', 'src/utils/consumer.ts', allFiles); - - expect(result).toBe('src/utils/helper.ts'); - }); - - it('should resolve by appending .tsx extension', () => { - const allFiles = new Set(['src/components/Button.tsx']); - const result = resolveImport('./Button', 'src/components/App.tsx', allFiles); - - expect(result).toBe('src/components/Button.tsx'); - }); - - it('should resolve by appending .js extension', () => { - const allFiles = new Set(['lib/utils.js']); - const result = resolveImport('./utils', 'lib/main.ts', allFiles); - - expect(result).toBe('lib/utils.js'); - }); - - it('should resolve by appending .jsx extension', () => { - const allFiles = new Set(['src/Widget.jsx']); - const result = resolveImport('./Widget', 'src/App.tsx', allFiles); - - expect(result).toBe('src/Widget.jsx'); - }); - - it('should resolve directory with index.ts', () => { - const allFiles = new Set(['src/utils/index.ts']); - const result = resolveImport('./utils', 'src/main.ts', allFiles); - - expect(result).toBe('src/utils/index.ts'); - }); - - it('should resolve directory with index.tsx', () => { - const allFiles = new Set(['src/components/index.tsx']); - const result = resolveImport('./components', 'src/app.ts', allFiles); - - expect(result).toBe('src/components/index.tsx'); - }); - - it('should resolve directory with index.js', () => { - const allFiles = new Set(['lib/helpers/index.js']); - const result = resolveImport('./helpers', 'lib/main.ts', allFiles); - - expect(result).toBe('lib/helpers/index.js'); - }); - - it('should resolve directory with index.jsx', () => { - const allFiles = new Set(['src/views/index.jsx']); - const result = resolveImport('./views', 'src/app.ts', allFiles); - - expect(result).toBe('src/views/index.jsx'); - }); - - it('should return null for unresolvable imports', () => { - const allFiles = new Set(['src/other.ts']); - const result = resolveImport('./nonexistent', 'src/main.ts', allFiles); - - expect(result).toBeNull(); - }); - - it('should resolve ../ relative imports', () => { - const allFiles = new Set(['src/shared/types.ts']); - const result = resolveImport('../shared/types', 'src/utils/helper.ts', allFiles); - - expect(result).toBe('src/shared/types.ts'); - }); - - it('should prioritize exact match over extension resolution', () => { - const allFiles = new Set(['src/utils.js', 'src/utils.ts']); - const result = resolveImport('./utils.js', 'src/main.ts', allFiles); - - expect(result).toBe('src/utils.js'); - }); - - it('should prioritize .ts extension over .tsx when both exist', () => { - const allFiles = new Set(['src/mod.ts', 'src/mod.tsx']); - const result = resolveImport('./mod', 'src/main.ts', allFiles); - - // RESOLVE_EXTENSIONS order: ['.ts', '.tsx', '.js', '.jsx'] - expect(result).toBe('src/mod.ts'); - }); - - it('should prioritize extension resolution over index file resolution', () => { - const allFiles = new Set(['src/utils.ts', 'src/utils/index.ts']); - const result = resolveImport('./utils', 'src/main.ts', allFiles); - - // Extension resolution (.ts) is tried before index file resolution - expect(result).toBe('src/utils.ts'); - }); - - it('should handle deeply nested relative imports', () => { - const allFiles = new Set(['lib/core/engine.ts']); - const result = resolveImport('../../../lib/core/engine', 'src/deep/nested/file.ts', allFiles); - - expect(result).toBe('lib/core/engine.ts'); - }); -}); diff --git a/packages/core/__tests__/json-reporter.test.ts b/packages/core/__tests__/json-reporter.test.ts deleted file mode 100644 index 28793ac..0000000 --- a/packages/core/__tests__/json-reporter.test.ts +++ /dev/null @@ -1,345 +0,0 @@ -import { describe, it, expect } from 'vitest'; -import { formatJSON } from '../src/output/json-reporter.js'; -import type { PRAnalysis } from '../src/types.js'; - -// ── Helper ────────────────────────────────────────────────────────────────── - -function makeAnalysis(overrides: Partial = {}): PRAnalysis { - return { - repoPath: '/path/to/repo', - baseBranch: 'main', - headBranch: 'feature/test', - changedFiles: [], - breakingChanges: [], - testCoverage: { - changedSourceFiles: 0, - sourceFilesWithTestChanges: 0, - coverageRatio: 1, - gaps: [], - }, - docStaleness: { - staleReferences: [], - checkedFiles: [], - }, - impactGraph: { - directlyChanged: [], - indirectlyAffected: [], - edges: [], - }, - riskScore: { - score: 0, - level: 'low', - factors: [], - }, - summary: 'No significant changes detected.', - ...overrides, - }; -} - -describe('formatJSON', () => { - // ── Valid JSON ──────────────────────────────────────────────────────────── - - describe('valid JSON output', () => { - it('should produce valid JSON', () => { - const output = formatJSON(makeAnalysis()); - expect(() => JSON.parse(output)).not.toThrow(); - }); - - it('should produce pretty-printed JSON (indented with 2 spaces)', () => { - const output = formatJSON(makeAnalysis()); - // Pretty-printed JSON starts with "{\n " (object with 2-space indent) - expect(output).toMatch(/^\{\n {2}/); - }); - - it('should produce valid JSON for complex analysis', () => { - const analysis = makeAnalysis({ - changedFiles: [ - { - path: 'src/index.ts', - status: 'modified', - additions: 10, - deletions: 5, - language: 'typescript', - category: 'source', - }, - ], - breakingChanges: [ - { - filePath: 'src/api.ts', - type: 'removed_export', - symbolName: 'foo', - before: 'export function foo(): void', - after: null, - severity: 'high', - consumers: ['src/bar.ts'], - }, - ], - riskScore: { - score: 75, - level: 'high', - factors: [ - { - name: 'Breaking changes', - score: 100, - weight: 0.30, - description: '1 breaking change(s) detected.', - details: ['removed_export of "foo" in src/api.ts (high)'], - }, - ], - }, - }); - - const output = formatJSON(analysis); - expect(() => JSON.parse(output)).not.toThrow(); - }); - }); - - // ── Roundtrip ───────────────────────────────────────────────────────────── - - describe('roundtrip (serialize then deserialize)', () => { - it('should roundtrip a minimal analysis correctly', () => { - const analysis = makeAnalysis(); - const output = formatJSON(analysis); - const parsed = JSON.parse(output); - - expect(parsed.repoPath).toBe(analysis.repoPath); - expect(parsed.baseBranch).toBe(analysis.baseBranch); - expect(parsed.headBranch).toBe(analysis.headBranch); - expect(parsed.changedFiles).toEqual(analysis.changedFiles); - expect(parsed.breakingChanges).toEqual(analysis.breakingChanges); - expect(parsed.testCoverage).toEqual(analysis.testCoverage); - expect(parsed.docStaleness).toEqual(analysis.docStaleness); - expect(parsed.impactGraph).toEqual(analysis.impactGraph); - expect(parsed.riskScore).toEqual(analysis.riskScore); - expect(parsed.summary).toBe(analysis.summary); - }); - - it('should roundtrip a full analysis with all fields populated', () => { - const analysis = makeAnalysis({ - repoPath: '/workspace/my-project', - baseBranch: 'develop', - headBranch: 'feature/new-api', - changedFiles: [ - { - path: 'src/index.ts', - status: 'modified', - additions: 42, - deletions: 13, - language: 'typescript', - category: 'source', - }, - { - path: 'src/index.test.ts', - status: 'modified', - additions: 20, - deletions: 5, - language: 'typescript', - category: 'test', - }, - { - path: 'README.md', - status: 'modified', - additions: 3, - deletions: 1, - language: 'markdown', - category: 'doc', - }, - ], - breakingChanges: [ - { - filePath: 'src/api.ts', - type: 'removed_export', - symbolName: 'legacyHandler', - before: 'export function legacyHandler(): void', - after: null, - severity: 'high', - consumers: ['src/app.ts', 'src/routes.ts'], - }, - { - filePath: 'src/types.ts', - type: 'changed_signature', - symbolName: 'processData', - before: '(data: string): void', - after: '(data: Buffer): Promise', - severity: 'medium', - consumers: [], - }, - ], - testCoverage: { - changedSourceFiles: 3, - sourceFilesWithTestChanges: 2, - coverageRatio: 0.67, - gaps: [ - { - sourceFile: 'src/new-module.ts', - expectedTestFiles: ['src/new-module.test.ts'], - testFileExists: false, - testFileChanged: false, - }, - ], - }, - docStaleness: { - staleReferences: [ - { - docFile: 'docs/api.md', - line: 15, - reference: 'legacyHandler', - reason: 'function was removed', - }, - ], - checkedFiles: ['docs/api.md', 'README.md'], - }, - impactGraph: { - directlyChanged: ['src/index.ts', 'src/api.ts'], - indirectlyAffected: ['src/app.ts', 'src/routes.ts'], - edges: [ - { from: 'src/app.ts', to: 'src/api.ts', type: 'imports' }, - { from: 'src/routes.ts', to: 'src/api.ts', type: 'imports' }, - ], - }, - riskScore: { - score: 62, - level: 'high', - factors: [ - { - name: 'Breaking changes', - score: 100, - weight: 0.30, - description: '2 breaking change(s) detected.', - }, - { - name: 'Untested changes', - score: 33, - weight: 0.25, - description: '2/3 changed source files have corresponding test changes.', - }, - ], - }, - summary: - 'This PR introduces breaking API changes and has moderate test coverage gaps.', - }); - - const output = formatJSON(analysis); - const parsed = JSON.parse(output) as PRAnalysis; - - // Deep equality check for the entire object - expect(parsed).toEqual(analysis); - }); - - it('should preserve null values (e.g. after field in breaking changes)', () => { - const analysis = makeAnalysis({ - breakingChanges: [ - { - filePath: 'src/api.ts', - type: 'removed_export', - symbolName: 'gone', - before: 'export function gone(): void', - after: null, - severity: 'high', - consumers: [], - }, - ], - }); - - const output = formatJSON(analysis); - const parsed = JSON.parse(output); - - expect(parsed.breakingChanges[0].after).toBeNull(); - }); - - it('should preserve empty arrays', () => { - const analysis = makeAnalysis({ - changedFiles: [], - breakingChanges: [], - }); - - const output = formatJSON(analysis); - const parsed = JSON.parse(output); - - expect(parsed.changedFiles).toEqual([]); - expect(parsed.breakingChanges).toEqual([]); - }); - - it('should preserve numeric values accurately', () => { - const analysis = makeAnalysis({ - testCoverage: { - changedSourceFiles: 42, - sourceFilesWithTestChanges: 33, - coverageRatio: 0.785714285714, - gaps: [], - }, - riskScore: { - score: 57, - level: 'high', - factors: [ - { - name: 'Test', - score: 21.5, - weight: 0.25, - description: 'test', - }, - ], - }, - }); - - const output = formatJSON(analysis); - const parsed = JSON.parse(output); - - expect(parsed.testCoverage.changedSourceFiles).toBe(42); - expect(parsed.testCoverage.coverageRatio).toBe(0.785714285714); - expect(parsed.riskScore.score).toBe(57); - expect(parsed.riskScore.factors[0].score).toBe(21.5); - }); - }); - - // ── Structure ───────────────────────────────────────────────────────────── - - describe('output structure', () => { - it('should contain all top-level keys', () => { - const output = formatJSON(makeAnalysis()); - const parsed = JSON.parse(output); - - expect(parsed).toHaveProperty('repoPath'); - expect(parsed).toHaveProperty('baseBranch'); - expect(parsed).toHaveProperty('headBranch'); - expect(parsed).toHaveProperty('changedFiles'); - expect(parsed).toHaveProperty('breakingChanges'); - expect(parsed).toHaveProperty('testCoverage'); - expect(parsed).toHaveProperty('docStaleness'); - expect(parsed).toHaveProperty('impactGraph'); - expect(parsed).toHaveProperty('riskScore'); - expect(parsed).toHaveProperty('summary'); - }); - - it('should return a string', () => { - const output = formatJSON(makeAnalysis()); - expect(typeof output).toBe('string'); - }); - }); - - // ── Special characters ──────────────────────────────────────────────────── - - describe('special characters', () => { - it('should handle special characters in strings', () => { - const analysis = makeAnalysis({ - summary: 'Changes include "quoted text" and backslashes \\ and newlines\n.', - }); - - const output = formatJSON(analysis); - expect(() => JSON.parse(output)).not.toThrow(); - - const parsed = JSON.parse(output); - expect(parsed.summary).toBe(analysis.summary); - }); - - it('should handle unicode characters', () => { - const analysis = makeAnalysis({ - summary: 'Unicode: emoji test, CJK characters, accents: cafe', - }); - - const output = formatJSON(analysis); - const parsed = JSON.parse(output); - expect(parsed.summary).toBe(analysis.summary); - }); - }); -}); diff --git a/packages/core/__tests__/markdown-reporter.test.ts b/packages/core/__tests__/markdown-reporter.test.ts deleted file mode 100644 index 00edb4a..0000000 --- a/packages/core/__tests__/markdown-reporter.test.ts +++ /dev/null @@ -1,556 +0,0 @@ -import { describe, it, expect } from 'vitest'; -import { formatMarkdown } from '../src/output/markdown-reporter.js'; -import type { PRAnalysis } from '../src/types.js'; - -// ── Helper to build a full PRAnalysis fixture ─────────────────────────────── - -function makeAnalysis(overrides: Partial = {}): PRAnalysis { - return { - repoPath: '/path/to/repo', - baseBranch: 'main', - headBranch: 'feature/test', - changedFiles: [], - breakingChanges: [], - testCoverage: { - changedSourceFiles: 0, - sourceFilesWithTestChanges: 0, - coverageRatio: 1, - gaps: [], - }, - docStaleness: { - staleReferences: [], - checkedFiles: [], - }, - impactGraph: { - directlyChanged: [], - indirectlyAffected: [], - edges: [], - }, - riskScore: { - score: 0, - level: 'low', - factors: [], - }, - summary: 'No significant changes detected.', - ...overrides, - }; -} - -describe('formatMarkdown', () => { - // ── Section presence ────────────────────────────────────────────────────── - - describe('expected sections', () => { - it('should contain the header "PR Impact Analysis"', () => { - const output = formatMarkdown(makeAnalysis()); - expect(output).toContain('# PR Impact Analysis'); - }); - - it('should contain the repository path', () => { - const output = formatMarkdown( - makeAnalysis({ repoPath: '/my/repo' }), - ); - expect(output).toContain('**Repository:** /my/repo'); - }); - - it('should contain the branch comparison', () => { - const output = formatMarkdown( - makeAnalysis({ baseBranch: 'main', headBranch: 'feat/abc' }), - ); - expect(output).toContain('`main`'); - expect(output).toContain('`feat/abc`'); - }); - - it('should contain the Risk Score section', () => { - const output = formatMarkdown( - makeAnalysis({ - riskScore: { - score: 42, - level: 'medium', - factors: [], - }, - }), - ); - expect(output).toContain('## Risk Score: 42/100 (medium)'); - }); - - it('should contain the Summary section', () => { - const output = formatMarkdown( - makeAnalysis({ summary: 'Test summary text.' }), - ); - expect(output).toContain('## Summary'); - expect(output).toContain('Test summary text.'); - }); - - it('should contain the Changed Files section', () => { - const output = formatMarkdown(makeAnalysis()); - expect(output).toContain('## Changed Files'); - }); - - it('should contain the Breaking Changes section', () => { - const output = formatMarkdown(makeAnalysis()); - expect(output).toContain('## Breaking Changes'); - }); - - it('should contain the Test Coverage section', () => { - const output = formatMarkdown(makeAnalysis()); - expect(output).toContain('## Test Coverage'); - }); - - it('should contain the Documentation Staleness section', () => { - const output = formatMarkdown(makeAnalysis()); - expect(output).toContain('## Documentation Staleness'); - }); - - it('should contain the Impact Graph section', () => { - const output = formatMarkdown(makeAnalysis()); - expect(output).toContain('## Impact Graph'); - }); - }); - - // ── Empty analysis ──────────────────────────────────────────────────────── - - describe('empty analysis (no changes)', () => { - it('should show "No files changed." when there are no changed files', () => { - const output = formatMarkdown(makeAnalysis({ changedFiles: [] })); - expect(output).toContain('No files changed.'); - }); - - it('should show "No breaking changes detected." when there are none', () => { - const output = formatMarkdown(makeAnalysis({ breakingChanges: [] })); - expect(output).toContain('No breaking changes detected.'); - }); - - it('should show "No stale references found." when there are none', () => { - const output = formatMarkdown(makeAnalysis()); - expect(output).toContain('No stale references found.'); - }); - - it('should show "No risk factors identified." when factors list is empty', () => { - const output = formatMarkdown( - makeAnalysis({ - riskScore: { score: 0, level: 'low', factors: [] }, - }), - ); - expect(output).toContain('No risk factors identified.'); - }); - - it('should display 0 changed files count', () => { - const output = formatMarkdown(makeAnalysis({ changedFiles: [] })); - expect(output).toContain('## Changed Files (0)'); - }); - }); - - // ── With changed files ──────────────────────────────────────────────────── - - describe('with changed files', () => { - it('should display changed file details in a table', () => { - const output = formatMarkdown( - makeAnalysis({ - changedFiles: [ - { - path: 'src/index.ts', - status: 'modified', - additions: 10, - deletions: 5, - language: 'typescript', - category: 'source', - }, - ], - }), - ); - - expect(output).toContain('## Changed Files (1)'); - expect(output).toContain('| File | Status | +/- | Category |'); - expect(output).toContain('src/index.ts'); - expect(output).toContain('modified'); - expect(output).toContain('+10/-5'); - expect(output).toContain('source'); - }); - - it('should display multiple changed files', () => { - const output = formatMarkdown( - makeAnalysis({ - changedFiles: [ - { - path: 'src/a.ts', - status: 'added', - additions: 100, - deletions: 0, - language: 'typescript', - category: 'source', - }, - { - path: 'src/b.ts', - status: 'deleted', - additions: 0, - deletions: 50, - language: 'typescript', - category: 'source', - }, - ], - }), - ); - - expect(output).toContain('## Changed Files (2)'); - expect(output).toContain('src/a.ts'); - expect(output).toContain('src/b.ts'); - expect(output).toContain('added'); - expect(output).toContain('deleted'); - }); - }); - - // ── With breaking changes ───────────────────────────────────────────────── - - describe('with breaking changes', () => { - it('should display breaking changes in a table', () => { - const output = formatMarkdown( - makeAnalysis({ - breakingChanges: [ - { - filePath: 'src/api.ts', - type: 'removed_export', - symbolName: 'fetchData', - before: 'export function fetchData(): void', - after: null, - severity: 'high', - consumers: [], - }, - ], - }), - ); - - expect(output).toContain('## Breaking Changes (1)'); - expect(output).toContain('| Symbol | Type | Severity | File |'); - expect(output).toContain('fetchData'); - expect(output).toContain('removed export'); - expect(output).toContain('high'); - expect(output).toContain('src/api.ts'); - }); - - it('should format "changed_signature" type correctly', () => { - const output = formatMarkdown( - makeAnalysis({ - breakingChanges: [ - { - filePath: 'src/utils.ts', - type: 'changed_signature', - symbolName: 'parse', - before: '(a: string): void', - after: '(a: string, b: number): void', - severity: 'medium', - consumers: [], - }, - ], - }), - ); - - expect(output).toContain('changed signature'); - }); - - it('should format "changed_type" type correctly', () => { - const output = formatMarkdown( - makeAnalysis({ - breakingChanges: [ - { - filePath: 'src/types.ts', - type: 'changed_type', - symbolName: 'Config', - before: 'type Config = { a: string }', - after: 'type Config = { a: number }', - severity: 'medium', - consumers: [], - }, - ], - }), - ); - - expect(output).toContain('changed type'); - }); - - it('should format "renamed_export" type correctly', () => { - const output = formatMarkdown( - makeAnalysis({ - breakingChanges: [ - { - filePath: 'src/api.ts', - type: 'renamed_export', - symbolName: 'oldName', - before: 'oldName', - after: 'newName', - severity: 'low', - consumers: [], - }, - ], - }), - ); - - expect(output).toContain('renamed export'); - }); - - it('should display multiple breaking changes', () => { - const output = formatMarkdown( - makeAnalysis({ - breakingChanges: [ - { - filePath: 'src/a.ts', - type: 'removed_export', - symbolName: 'foo', - before: '', - after: null, - severity: 'high', - consumers: [], - }, - { - filePath: 'src/b.ts', - type: 'changed_signature', - symbolName: 'bar', - before: '', - after: '', - severity: 'medium', - consumers: [], - }, - ], - }), - ); - - expect(output).toContain('## Breaking Changes (2)'); - }); - }); - - // ── Test Coverage section ───────────────────────────────────────────────── - - describe('test coverage section', () => { - it('should display coverage statistics', () => { - const output = formatMarkdown( - makeAnalysis({ - testCoverage: { - changedSourceFiles: 10, - sourceFilesWithTestChanges: 7, - coverageRatio: 0.7, - gaps: [], - }, - }), - ); - - expect(output).toContain('**Changed source files:** 10'); - expect(output).toContain('**Files with test changes:** 7'); - expect(output).toContain('**Coverage ratio:** 70%'); - }); - - it('should display test coverage gaps', () => { - const output = formatMarkdown( - makeAnalysis({ - testCoverage: { - changedSourceFiles: 2, - sourceFilesWithTestChanges: 1, - coverageRatio: 0.5, - gaps: [ - { - sourceFile: 'src/utils.ts', - expectedTestFiles: ['src/utils.test.ts'], - testFileExists: true, - testFileChanged: false, - }, - ], - }, - }), - ); - - expect(output).toContain('### Gaps'); - expect(output).toContain('**src/utils.ts**'); - expect(output).toContain('test file exists but was not changed'); - }); - - it('should say "no test file found" for gaps without test files', () => { - const output = formatMarkdown( - makeAnalysis({ - testCoverage: { - changedSourceFiles: 1, - sourceFilesWithTestChanges: 0, - coverageRatio: 0, - gaps: [ - { - sourceFile: 'src/new-module.ts', - expectedTestFiles: [], - testFileExists: false, - testFileChanged: false, - }, - ], - }, - }), - ); - - expect(output).toContain('no test file found'); - }); - - it('should list expected test files for each gap', () => { - const output = formatMarkdown( - makeAnalysis({ - testCoverage: { - changedSourceFiles: 1, - sourceFilesWithTestChanges: 0, - coverageRatio: 0, - gaps: [ - { - sourceFile: 'src/parser.ts', - expectedTestFiles: [ - 'src/parser.test.ts', - 'src/__tests__/parser.ts', - ], - testFileExists: true, - testFileChanged: false, - }, - ], - }, - }), - ); - - expect(output).toContain('src/parser.test.ts'); - expect(output).toContain('src/__tests__/parser.ts'); - }); - }); - - // ── Documentation staleness ─────────────────────────────────────────────── - - describe('documentation staleness section', () => { - it('should display stale references', () => { - const output = formatMarkdown( - makeAnalysis({ - docStaleness: { - staleReferences: [ - { - docFile: 'docs/api.md', - line: 42, - reference: 'oldFunction', - reason: 'function was removed', - }, - ], - checkedFiles: ['docs/api.md'], - }, - }), - ); - - expect(output).toContain('**docs/api.md** (line 42)'); - expect(output).toContain('`oldFunction`'); - expect(output).toContain('function was removed'); - }); - }); - - // ── Impact Graph section ────────────────────────────────────────────────── - - describe('impact graph section', () => { - it('should display directly changed and indirectly affected counts', () => { - const output = formatMarkdown( - makeAnalysis({ - impactGraph: { - directlyChanged: ['src/a.ts', 'src/b.ts'], - indirectlyAffected: ['src/c.ts'], - edges: [], - }, - }), - ); - - expect(output).toContain('**Directly changed:** 2 files'); - expect(output).toContain('**Indirectly affected:** 1 file'); - }); - - it('should use singular "file" for single items', () => { - const output = formatMarkdown( - makeAnalysis({ - impactGraph: { - directlyChanged: ['src/a.ts'], - indirectlyAffected: ['src/b.ts'], - edges: [], - }, - }), - ); - - expect(output).toContain('1 file'); - expect(output).not.toContain('1 files'); - }); - - it('should display dependency edges', () => { - const output = formatMarkdown( - makeAnalysis({ - impactGraph: { - directlyChanged: ['src/a.ts'], - indirectlyAffected: ['src/b.ts'], - edges: [ - { - from: 'src/b.ts', - to: 'src/a.ts', - type: 'imports', - }, - ], - }, - }), - ); - - expect(output).toContain('### Dependency Edges'); - expect(output).toContain('src/b.ts'); - expect(output).toContain('src/a.ts'); - expect(output).toContain('`imports`'); - }); - - it('should not display dependency edges section when there are no edges', () => { - const output = formatMarkdown( - makeAnalysis({ - impactGraph: { - directlyChanged: [], - indirectlyAffected: [], - edges: [], - }, - }), - ); - - expect(output).not.toContain('### Dependency Edges'); - }); - }); - - // ── Risk factors table ──────────────────────────────────────────────────── - - describe('risk factors table', () => { - it('should render risk factors in a table', () => { - const output = formatMarkdown( - makeAnalysis({ - riskScore: { - score: 55, - level: 'high', - factors: [ - { - name: 'Breaking changes', - score: 100, - weight: 0.30, - description: '1 breaking change(s) detected.', - }, - { - name: 'Untested changes', - score: 50, - weight: 0.25, - description: '2/4 files covered.', - }, - ], - }, - }), - ); - - expect(output).toContain('| Factor | Score | Weight |'); - expect(output).toContain('| Breaking changes | 100 | 0.3 |'); - expect(output).toContain('| Untested changes | 50 | 0.25 |'); - }); - }); - - // ── Output format ───────────────────────────────────────────────────────── - - describe('output format', () => { - it('should end with a trailing newline', () => { - const output = formatMarkdown(makeAnalysis()); - expect(output.endsWith('\n')).toBe(true); - }); - - it('should return a non-empty string', () => { - const output = formatMarkdown(makeAnalysis()); - expect(output.length).toBeGreaterThan(0); - }); - }); -}); diff --git a/packages/core/__tests__/risk-calculator.test.ts b/packages/core/__tests__/risk-calculator.test.ts deleted file mode 100644 index 8627c42..0000000 --- a/packages/core/__tests__/risk-calculator.test.ts +++ /dev/null @@ -1,764 +0,0 @@ -import { describe, it, expect } from 'vitest'; -import { calculateRisk } from '../src/risk/risk-calculator.js'; -import { - evaluateBreakingChangesFactor, - evaluateUntestedChangesFactor, - evaluateDiffSizeFactor, - evaluateDocStalenessFactor, - evaluateConfigChangesFactor, - evaluateImpactBreadthFactor, -} from '../src/risk/factors.js'; -import type { - ChangedFile, - BreakingChange, - TestCoverageReport, - DocStalenessReport, - ImpactGraph, -} from '../src/types.js'; - -// ── Test helpers ──────────────────────────────────────────────────────────── - -function makeChangedFile(overrides: Partial = {}): ChangedFile { - return { - path: 'src/index.ts', - status: 'modified', - additions: 0, - deletions: 0, - language: 'typescript', - category: 'source', - ...overrides, - }; -} - -function makeBreakingChange( - overrides: Partial = {}, -): BreakingChange { - return { - filePath: 'src/api.ts', - type: 'removed_export', - symbolName: 'foo', - before: 'export function foo(): void', - after: null, - severity: 'high', - consumers: [], - ...overrides, - }; -} - -function makeTestCoverage( - overrides: Partial = {}, -): TestCoverageReport { - return { - changedSourceFiles: 0, - sourceFilesWithTestChanges: 0, - coverageRatio: 1, - gaps: [], - ...overrides, - }; -} - -function makeDocStaleness( - overrides: Partial = {}, -): DocStalenessReport { - return { - staleReferences: [], - checkedFiles: [], - ...overrides, - }; -} - -function makeImpactGraph( - overrides: Partial = {}, -): ImpactGraph { - return { - directlyChanged: [], - indirectlyAffected: [], - edges: [], - ...overrides, - }; -} - -// ── Factor tests ──────────────────────────────────────────────────────────── - -describe('evaluateBreakingChangesFactor', () => { - it('should return score 0 with no breaking changes', () => { - const factor = evaluateBreakingChangesFactor([]); - expect(factor.score).toBe(0); - expect(factor.weight).toBe(0.30); - expect(factor.name).toBe('Breaking changes'); - }); - - it('should return score 100 for high severity breaking changes', () => { - const factor = evaluateBreakingChangesFactor([ - makeBreakingChange({ severity: 'high' }), - ]); - expect(factor.score).toBe(100); - }); - - it('should return score 60 for medium severity breaking changes', () => { - const factor = evaluateBreakingChangesFactor([ - makeBreakingChange({ severity: 'medium' }), - ]); - expect(factor.score).toBe(60); - }); - - it('should return score 30 for low severity only', () => { - const factor = evaluateBreakingChangesFactor([ - makeBreakingChange({ severity: 'low' }), - ]); - expect(factor.score).toBe(30); - }); - - it('should prioritize high over medium severity', () => { - const factor = evaluateBreakingChangesFactor([ - makeBreakingChange({ severity: 'medium' }), - makeBreakingChange({ severity: 'high' }), - ]); - expect(factor.score).toBe(100); - }); - - it('should include details about each breaking change', () => { - const factor = evaluateBreakingChangesFactor([ - makeBreakingChange({ - symbolName: 'myFunc', - filePath: 'src/api.ts', - severity: 'high', - type: 'removed_export', - }), - ]); - expect(factor.details).toBeDefined(); - expect(factor.details!.length).toBe(1); - expect(factor.details![0]).toContain('myFunc'); - expect(factor.details![0]).toContain('src/api.ts'); - }); -}); - -describe('evaluateUntestedChangesFactor', () => { - it('should return score 0 with full coverage', () => { - const factor = evaluateUntestedChangesFactor( - makeTestCoverage({ coverageRatio: 1 }), - ); - expect(factor.score).toBe(0); - expect(factor.weight).toBe(0.25); - }); - - it('should return score 100 with zero coverage', () => { - const factor = evaluateUntestedChangesFactor( - makeTestCoverage({ coverageRatio: 0, changedSourceFiles: 5 }), - ); - expect(factor.score).toBe(100); - }); - - it('should return score 50 with 50% coverage', () => { - const factor = evaluateUntestedChangesFactor( - makeTestCoverage({ coverageRatio: 0.5, changedSourceFiles: 4 }), - ); - expect(factor.score).toBe(50); - }); - - it('should include details for coverage gaps', () => { - const factor = evaluateUntestedChangesFactor( - makeTestCoverage({ - coverageRatio: 0.5, - changedSourceFiles: 2, - gaps: [ - { - sourceFile: 'src/utils.ts', - expectedTestFiles: [], - testFileExists: false, - testFileChanged: false, - }, - ], - }), - ); - expect(factor.details).toBeDefined(); - expect(factor.details![0]).toContain('src/utils.ts'); - expect(factor.details![0]).toContain('no test file found'); - }); - - it('should note "test exists but not updated" when test file exists', () => { - const factor = evaluateUntestedChangesFactor( - makeTestCoverage({ - coverageRatio: 0.5, - changedSourceFiles: 2, - gaps: [ - { - sourceFile: 'src/utils.ts', - expectedTestFiles: ['src/utils.test.ts'], - testFileExists: true, - testFileChanged: false, - }, - ], - }), - ); - expect(factor.details![0]).toContain('test exists but not updated'); - }); - - it('should say "No source files changed" when none changed', () => { - const factor = evaluateUntestedChangesFactor( - makeTestCoverage({ changedSourceFiles: 0, coverageRatio: 1 }), - ); - expect(factor.description).toBe('No source files changed.'); - }); -}); - -describe('evaluateDiffSizeFactor', () => { - it('should return score 0 for small diffs (< 100 lines)', () => { - const files = [makeChangedFile({ additions: 30, deletions: 20 })]; - const factor = evaluateDiffSizeFactor(files); - expect(factor.score).toBe(0); - expect(factor.weight).toBe(0.15); - }); - - it('should return score 50 for 100-499 lines', () => { - const files = [makeChangedFile({ additions: 100, deletions: 50 })]; - const factor = evaluateDiffSizeFactor(files); - expect(factor.score).toBe(50); - }); - - it('should return score 80 for 500-1000 lines', () => { - const files = [makeChangedFile({ additions: 400, deletions: 200 })]; - const factor = evaluateDiffSizeFactor(files); - expect(factor.score).toBe(80); - }); - - it('should return score 100 for > 1000 lines', () => { - const files = [makeChangedFile({ additions: 800, deletions: 500 })]; - const factor = evaluateDiffSizeFactor(files); - expect(factor.score).toBe(100); - }); - - it('should sum lines across multiple files', () => { - const files = [ - makeChangedFile({ additions: 300, deletions: 100 }), - makeChangedFile({ additions: 200, deletions: 100 }), - ]; - const factor = evaluateDiffSizeFactor(files); - // 300 + 100 + 200 + 100 = 700 -> score 80 - expect(factor.score).toBe(80); - }); - - it('should return score 0 for no files', () => { - const factor = evaluateDiffSizeFactor([]); - expect(factor.score).toBe(0); - }); - - it('should include file count in description', () => { - const files = [ - makeChangedFile({ additions: 10, deletions: 5 }), - makeChangedFile({ additions: 20, deletions: 10 }), - ]; - const factor = evaluateDiffSizeFactor(files); - expect(factor.description).toContain('2 file(s)'); - }); -}); - -describe('evaluateDocStalenessFactor', () => { - it('should return score 0 with no stale references', () => { - const factor = evaluateDocStalenessFactor(makeDocStaleness()); - expect(factor.score).toBe(0); - expect(factor.weight).toBe(0.10); - }); - - it('should return score 20 per stale reference', () => { - const factor = evaluateDocStalenessFactor( - makeDocStaleness({ - staleReferences: [ - { docFile: 'docs/api.md', line: 10, reference: 'foo', reason: 'symbol removed' }, - ], - }), - ); - expect(factor.score).toBe(20); - }); - - it('should cap score at 100', () => { - const refs = Array.from({ length: 10 }, (_, i) => ({ - docFile: `docs/doc${i}.md`, - line: i + 1, - reference: `sym${i}`, - reason: 'symbol removed', - })); - const factor = evaluateDocStalenessFactor( - makeDocStaleness({ staleReferences: refs }), - ); - expect(factor.score).toBe(100); - }); - - it('should include details with stale references', () => { - const factor = evaluateDocStalenessFactor( - makeDocStaleness({ - staleReferences: [ - { - docFile: 'README.md', - line: 42, - reference: 'oldFunc', - reason: 'function removed', - }, - ], - }), - ); - expect(factor.details).toBeDefined(); - expect(factor.details![0]).toContain('README.md:42'); - expect(factor.details![0]).toContain('oldFunc'); - }); - - it('should return score exactly 100 with exactly 5 stale references', () => { - const refs = Array.from({ length: 5 }, (_, i) => ({ - docFile: `docs/doc${i}.md`, - line: i + 1, - reference: `sym${i}`, - reason: 'symbol removed', - })); - const factor = evaluateDocStalenessFactor( - makeDocStaleness({ staleReferences: refs }), - ); - // 5 * 20 = 100, which is exactly the cap - expect(factor.score).toBe(100); - }); -}); - -describe('evaluateConfigChangesFactor', () => { - it('should return score 0 with no config files', () => { - const files = [makeChangedFile({ category: 'source' })]; - const factor = evaluateConfigChangesFactor(files); - expect(factor.score).toBe(0); - expect(factor.weight).toBe(0.10); - }); - - it('should return score 100 for CI/build config changes', () => { - const files = [ - makeChangedFile({ - path: '.github/workflows/ci.yml', - category: 'config', - }), - ]; - const factor = evaluateConfigChangesFactor(files); - expect(factor.score).toBe(100); - }); - - it('should return score 100 for Dockerfile changes', () => { - const files = [ - makeChangedFile({ - path: 'Dockerfile', - category: 'config', - }), - ]; - const factor = evaluateConfigChangesFactor(files); - expect(factor.score).toBe(100); - }); - - it('should return score 100 for docker-compose changes', () => { - const files = [ - makeChangedFile({ - path: 'docker-compose.yml', - category: 'config', - }), - ]; - const factor = evaluateConfigChangesFactor(files); - expect(factor.score).toBe(100); - }); - - it('should return score 100 for vite.config changes', () => { - const files = [ - makeChangedFile({ - path: 'vite.config.ts', - category: 'config', - }), - ]; - const factor = evaluateConfigChangesFactor(files); - expect(factor.score).toBe(100); - }); - - it('should return score 100 for turbo.json changes', () => { - const files = [ - makeChangedFile({ - path: 'turbo.json', - category: 'config', - }), - ]; - const factor = evaluateConfigChangesFactor(files); - expect(factor.score).toBe(100); - }); - - it('should return score 50 for non-CI config changes', () => { - const files = [ - makeChangedFile({ - path: 'package.json', - category: 'config', - }), - ]; - const factor = evaluateConfigChangesFactor(files); - expect(factor.score).toBe(50); - }); - - it('should include config file paths in details', () => { - const files = [ - makeChangedFile({ - path: '.github/workflows/ci.yml', - category: 'config', - }), - ]; - const factor = evaluateConfigChangesFactor(files); - expect(factor.details).toEqual(['.github/workflows/ci.yml']); - }); - - it('should return score 100 for webpack.config changes', () => { - const files = [ - makeChangedFile({ - path: 'webpack.config.js', - category: 'config', - }), - ]; - const factor = evaluateConfigChangesFactor(files); - expect(factor.score).toBe(100); - expect(factor.description).toContain('CI/build configuration changed'); - }); - - it('should return score 100 for rollup.config changes', () => { - const files = [ - makeChangedFile({ - path: 'rollup.config.mjs', - category: 'config', - }), - ]; - const factor = evaluateConfigChangesFactor(files); - expect(factor.score).toBe(100); - }); - - it('should return score 100 for .gitlab-ci.yml changes', () => { - const files = [ - makeChangedFile({ - path: '.gitlab-ci.yml', - category: 'config', - }), - ]; - const factor = evaluateConfigChangesFactor(files); - expect(factor.score).toBe(100); - }); - - it('should return score 100 for Jenkinsfile changes', () => { - const files = [ - makeChangedFile({ - path: 'Jenkinsfile', - category: 'config', - }), - ]; - const factor = evaluateConfigChangesFactor(files); - expect(factor.score).toBe(100); - }); - - it('should return score 100 for Jenkinsfile (case-insensitive) changes', () => { - const files = [ - makeChangedFile({ - path: 'jenkinsfile', - category: 'config', - }), - ]; - const factor = evaluateConfigChangesFactor(files); - expect(factor.score).toBe(100); - }); -}); - -describe('evaluateImpactBreadthFactor', () => { - it('should return score 0 with no indirectly affected files', () => { - const factor = evaluateImpactBreadthFactor(makeImpactGraph()); - expect(factor.score).toBe(0); - expect(factor.weight).toBe(0.10); - }); - - it('should return 10 per indirectly affected file', () => { - const factor = evaluateImpactBreadthFactor( - makeImpactGraph({ - indirectlyAffected: ['a.ts', 'b.ts', 'c.ts'], - }), - ); - expect(factor.score).toBe(30); - }); - - it('should cap score at 100', () => { - const affected = Array.from({ length: 20 }, (_, i) => `file${i}.ts`); - const factor = evaluateImpactBreadthFactor( - makeImpactGraph({ indirectlyAffected: affected }), - ); - expect(factor.score).toBe(100); - }); - - it('should include up to 20 affected files in details', () => { - const affected = Array.from({ length: 25 }, (_, i) => `file${i}.ts`); - const factor = evaluateImpactBreadthFactor( - makeImpactGraph({ indirectlyAffected: affected }), - ); - expect(factor.details).toBeDefined(); - expect(factor.details!.length).toBe(20); - }); - - it('should not include details when no files are affected', () => { - const factor = evaluateImpactBreadthFactor(makeImpactGraph()); - expect(factor.details).toBeUndefined(); - }); - - it('should return score exactly 100 with exactly 10 indirectly affected files', () => { - const affected = Array.from({ length: 10 }, (_, i) => `file${i}.ts`); - const factor = evaluateImpactBreadthFactor( - makeImpactGraph({ indirectlyAffected: affected }), - ); - // 10 * 10 = 100, which is exactly the cap - expect(factor.score).toBe(100); - }); - - it('should slice details to max 20 items when there are 21 affected files', () => { - const affected = Array.from({ length: 21 }, (_, i) => `module${i}.ts`); - const factor = evaluateImpactBreadthFactor( - makeImpactGraph({ indirectlyAffected: affected }), - ); - // Score is capped at 100 (21 * 10 = 210, min(210, 100) = 100) - expect(factor.score).toBe(100); - // Details should be sliced to the first 20 - expect(factor.details).toBeDefined(); - expect(factor.details!.length).toBe(20); - // Verify the 21st element is not included - expect(factor.details).not.toContain('module20.ts'); - // Verify the 20th element (index 19) is included - expect(factor.details).toContain('module19.ts'); - }); -}); - -// ── calculateRisk tests ───────────────────────────────────────────────────── - -describe('calculateRisk', () => { - describe('zero-risk inputs', () => { - it('should return low risk with all zero-risk inputs', () => { - const result = calculateRisk( - [], // no changed files - [], // no breaking changes - makeTestCoverage(), // full coverage - makeDocStaleness(), // no stale docs - makeImpactGraph(), // no impact - ); - - expect(result.score).toBe(0); - expect(result.level).toBe('low'); - expect(result.factors).toHaveLength(6); - }); - - it('should have all factor scores at 0 for zero-risk inputs', () => { - const result = calculateRisk( - [], - [], - makeTestCoverage(), - makeDocStaleness(), - makeImpactGraph(), - ); - - for (const factor of result.factors) { - expect(factor.score).toBe(0); - } - }); - }); - - describe('high risk due to breaking changes', () => { - it('should be high/critical with high-severity breaking changes', () => { - const result = calculateRisk( - [makeChangedFile({ additions: 10, deletions: 5 })], - [makeBreakingChange({ severity: 'high' })], - makeTestCoverage({ coverageRatio: 1 }), - makeDocStaleness(), - makeImpactGraph(), - ); - - // Breaking changes factor: 100 * 0.30 = 30 - // All others ~0 - // Weighted average: 30 / 1.0 = 30 - expect(result.score).toBeGreaterThanOrEqual(25); - expect(['medium', 'high', 'critical']).toContain(result.level); - }); - }); - - describe('weighted score calculation', () => { - it('should compute weighted average correctly', () => { - // Set up known conditions: - // - Breaking changes: high severity -> score 100, weight 0.30 - // - Untested: 0% coverage -> score 100, weight 0.25 - // - Diff size: > 1000 lines -> score 100, weight 0.15 - // - Doc staleness: 5+ stale refs -> score 100, weight 0.10 - // - Config: CI config changed -> score 100, weight 0.10 - // - Impact: 10+ files -> score 100, weight 0.10 - // - // All scores 100 -> weighted average = 100 - - const changedFiles = [ - makeChangedFile({ - additions: 600, - deletions: 600, - category: 'source', - }), - makeChangedFile({ - path: '.github/workflows/ci.yml', - category: 'config', - additions: 10, - deletions: 5, - }), - ]; - - const result = calculateRisk( - changedFiles, - [makeBreakingChange({ severity: 'high' })], - makeTestCoverage({ - coverageRatio: 0, - changedSourceFiles: 5, - }), - makeDocStaleness({ - staleReferences: Array.from({ length: 6 }, (_, i) => ({ - docFile: `doc${i}.md`, - line: i, - reference: `ref${i}`, - reason: 'removed', - })), - }), - makeImpactGraph({ - indirectlyAffected: Array.from( - { length: 15 }, - (_, i) => `affected${i}.ts`, - ), - }), - ); - - // All scores are 100, so the weighted average should be 100 - expect(result.score).toBe(100); - expect(result.level).toBe('critical'); - }); - }); - - describe('level thresholds', () => { - // To get precise scores, we can use the fact that breaking changes alone - // have weight 0.30. If only breaking changes have a non-zero score: - // score = (breakingScore * 0.30) / totalWeight where totalWeight = 1.0 - // So breaking score of 100 -> final score 30 -> medium - // Need to combine factors to hit the exact thresholds - - it('should return "low" for score 0-25', () => { - // All zeros -> score 0 - const result = calculateRisk( - [], - [], - makeTestCoverage(), - makeDocStaleness(), - makeImpactGraph(), - ); - expect(result.level).toBe('low'); - expect(result.score).toBeLessThanOrEqual(25); - }); - - it('should return "medium" for score 26-50', () => { - // Breaking changes (high) alone: 100 * 0.30 / 1.0 = 30 -> medium - const result = calculateRisk( - [], - [makeBreakingChange({ severity: 'high' })], - makeTestCoverage(), - makeDocStaleness(), - makeImpactGraph(), - ); - expect(result.score).toBe(30); - expect(result.level).toBe('medium'); - }); - - it('should return "high" for score 51-75', () => { - // Breaking changes high: 100 * 0.30 = 30 - // Untested (0% coverage): 100 * 0.25 = 25 - // Total = 55 / 1.0 = 55 -> high - const result = calculateRisk( - [], - [makeBreakingChange({ severity: 'high' })], - makeTestCoverage({ - coverageRatio: 0, - changedSourceFiles: 5, - }), - makeDocStaleness(), - makeImpactGraph(), - ); - expect(result.score).toBe(55); - expect(result.level).toBe('high'); - }); - - it('should return "critical" for score 76+', () => { - // Breaking: 100 * 0.30 = 30 - // Untested: 100 * 0.25 = 25 - // Diff size >1000: 100 * 0.15 = 15 - // Doc staleness (5 refs = 100): 100 * 0.10 = 10 - // Config (CI): 100 * 0.10 = 10 - // Impact (10 files): 100 * 0.10 = 10 - // Total = 100 / 1.0 = 100 -> critical - - const result = calculateRisk( - [ - makeChangedFile({ - additions: 600, - deletions: 600, - category: 'source', - }), - makeChangedFile({ - path: '.github/workflows/ci.yml', - category: 'config', - additions: 5, - deletions: 5, - }), - ], - [makeBreakingChange({ severity: 'high' })], - makeTestCoverage({ - coverageRatio: 0, - changedSourceFiles: 5, - }), - makeDocStaleness({ - staleReferences: Array.from({ length: 5 }, (_, i) => ({ - docFile: `doc${i}.md`, - line: i, - reference: `ref${i}`, - reason: 'removed', - })), - }), - makeImpactGraph({ - indirectlyAffected: Array.from( - { length: 10 }, - (_, i) => `f${i}.ts`, - ), - }), - ); - - expect(result.score).toBeGreaterThanOrEqual(76); - expect(result.level).toBe('critical'); - }); - }); - - describe('result structure', () => { - it('should return all 6 factors', () => { - const result = calculateRisk( - [], - [], - makeTestCoverage(), - makeDocStaleness(), - makeImpactGraph(), - ); - - expect(result.factors).toHaveLength(6); - const names = result.factors.map((f) => f.name); - expect(names).toContain('Breaking changes'); - expect(names).toContain('Untested changes'); - expect(names).toContain('Diff size'); - expect(names).toContain('Stale documentation'); - expect(names).toContain('Config file changes'); - expect(names).toContain('Impact breadth'); - }); - - it('should have score as a rounded integer', () => { - const result = calculateRisk( - [], - [makeBreakingChange({ severity: 'low' })], - makeTestCoverage({ coverageRatio: 0.7 }), - makeDocStaleness(), - makeImpactGraph(), - ); - - expect(Number.isInteger(result.score)).toBe(true); - }); - }); -}); diff --git a/packages/core/__tests__/signature-differ.test.ts b/packages/core/__tests__/signature-differ.test.ts deleted file mode 100644 index 5879520..0000000 --- a/packages/core/__tests__/signature-differ.test.ts +++ /dev/null @@ -1,263 +0,0 @@ -import { describe, it, expect } from 'vitest'; -import { diffSignatures, SignatureDiffResult } from '../src/breaking/signature-differ.js'; - -describe('diffSignatures', () => { - // ── Identical signatures ────────────────────────────────────────────────── - - describe('identical signatures', () => { - it('should report no change for identical simple signatures', () => { - const result = diffSignatures('(a: string): void', '(a: string): void'); - expect(result.changed).toBe(false); - expect(result.details).toBe('signatures are identical'); - }); - - it('should report no change when only extra inner whitespace differs', () => { - const result = diffSignatures( - '(a: string, b: number): void', - '(a: string, b: number): void', - ); - expect(result.changed).toBe(false); - expect(result.details).toBe('signatures are identical'); - }); - - it('should report no change for empty parameter lists', () => { - const result = diffSignatures('(): void', '(): void'); - expect(result.changed).toBe(false); - }); - }); - - // ── Parameter count changes ─────────────────────────────────────────────── - - describe('different parameter count', () => { - it('should detect added parameter', () => { - const result = diffSignatures( - '(a: string): void', - '(a: string, b: number): void', - ); - expect(result.changed).toBe(true); - expect(result.details).toContain('parameter count changed from 1 to 2'); - }); - - it('should detect removed parameter', () => { - const result = diffSignatures( - '(a: string, b: number): void', - '(a: string): void', - ); - expect(result.changed).toBe(true); - expect(result.details).toContain('parameter count changed from 2 to 1'); - }); - - it('should detect going from no parameters to some', () => { - const result = diffSignatures('(): void', '(x: number): void'); - expect(result.changed).toBe(true); - expect(result.details).toContain('parameter count changed from 0 to 1'); - }); - - it('should detect going from some parameters to none', () => { - const result = diffSignatures('(x: number): void', '(): void'); - expect(result.changed).toBe(true); - expect(result.details).toContain('parameter count changed from 1 to 0'); - }); - }); - - // ── Parameter type changes ──────────────────────────────────────────────── - - describe('different parameter types', () => { - it('should detect a changed parameter type', () => { - const result = diffSignatures( - '(name: string): void', - '(name: number): void', - ); - expect(result.changed).toBe(true); - expect(result.details).toContain("parameter 'name' type changed"); - expect(result.details).toContain("'string'"); - expect(result.details).toContain("'number'"); - }); - - it('should detect multiple parameter type changes', () => { - const result = diffSignatures( - '(a: string, b: number): void', - '(a: boolean, b: string): void', - ); - expect(result.changed).toBe(true); - expect(result.details).toContain("parameter 'a' type changed"); - expect(result.details).toContain("parameter 'b' type changed"); - }); - - it('should not report unchanged parameters', () => { - const result = diffSignatures( - '(a: string, b: number): void', - '(a: string, b: boolean): void', - ); - expect(result.changed).toBe(true); - // Only b changed - expect(result.details).toContain("parameter 'b' type changed"); - expect(result.details).not.toContain("parameter 'a' type changed"); - }); - }); - - // ── Return type changes ─────────────────────────────────────────────────── - - describe('different return types', () => { - it('should detect a changed return type', () => { - const result = diffSignatures( - '(a: string): string', - '(a: string): number', - ); - expect(result.changed).toBe(true); - expect(result.details).toContain("return type changed from 'string' to 'number'"); - }); - - it('should detect return type added', () => { - const result = diffSignatures( - '(a: string)', - '(a: string): void', - ); - expect(result.changed).toBe(true); - expect(result.details).toContain("return type added: 'void'"); - }); - - it('should detect return type removed', () => { - const result = diffSignatures( - '(a: string): void', - '(a: string)', - ); - expect(result.changed).toBe(true); - expect(result.details).toContain("return type removed (was 'void')"); - }); - }); - - // ── Undefined signatures ────────────────────────────────────────────────── - - describe('undefined signatures', () => { - it('should report no change when both are undefined', () => { - const result = diffSignatures(undefined, undefined); - expect(result.changed).toBe(false); - expect(result.details).toBe('no signatures to compare'); - }); - - it('should report change when base is undefined and head is defined', () => { - const result = diffSignatures(undefined, '(x: number): void'); - expect(result.changed).toBe(true); - expect(result.details).toBe('signature added'); - }); - - it('should report change when base is defined and head is undefined', () => { - const result = diffSignatures('(x: number): void', undefined); - expect(result.changed).toBe(true); - expect(result.details).toBe('signature removed'); - }); - }); - - // ── Complex signatures ──────────────────────────────────────────────────── - - describe('complex signatures', () => { - it('should handle generic types correctly (no false split on inner commas)', () => { - const result = diffSignatures( - '(map: Map): void', - '(map: Map): void', - ); - expect(result.changed).toBe(false); - }); - - it('should detect changes in generic type parameters', () => { - const result = diffSignatures( - '(items: Array): void', - '(items: Array): void', - ); - expect(result.changed).toBe(true); - expect(result.details).toContain("parameter 'items' type changed"); - }); - - it('should handle optional parameters', () => { - const result = diffSignatures( - '(a: string, b?: number): void', - '(a: string, b?: number): void', - ); - expect(result.changed).toBe(false); - }); - - it('should handle rest parameters', () => { - const result = diffSignatures( - '(...args: string[]): void', - '(...args: number[]): void', - ); - expect(result.changed).toBe(true); - }); - - it('should handle Promise return types', () => { - const result = diffSignatures( - '(url: string): Promise', - '(url: string): Promise', - ); - expect(result.changed).toBe(true); - expect(result.details).toContain('return type changed'); - }); - - it('should handle combined parameter and return type changes', () => { - const result = diffSignatures( - '(a: string): number', - '(a: boolean): string', - ); - expect(result.changed).toBe(true); - expect(result.details).toContain("parameter 'a' type changed"); - expect(result.details).toContain('return type changed'); - }); - }); - - // ── Untyped parameters ────────────────────────────────────────────────── - - describe('untyped parameters', () => { - it('should compare untyped params by raw parameter string', () => { - // a and b are untyped - extractParamType returns the raw name as the "type" - const result = diffSignatures( - '(a, b): void', - '(a, c): void', - ); - expect(result.changed).toBe(true); - // b -> c changed (param at index 1) - expect(result.details).toContain("parameter 'b' type changed"); - }); - - it('should report no change for identical untyped params', () => { - const result = diffSignatures( - '(a, b): void', - '(a, b): void', - ); - expect(result.changed).toBe(false); - }); - }); - - // ── Generic signature change fallback ────────────────────────────────── - - describe('generic signature change', () => { - it('should report generic signature changed when structural comparison finds no specific differences', () => { - // These signatures differ textually after normalization but the - // structural comparison (param types, return type) finds the same values. - // This triggers the "signature changed" fallback at line 210-211. - const result = diffSignatures( - '(a: string):void', - '(a : string): void', - ); - expect(result.changed).toBe(true); - expect(result.details).toBe('signature changed'); - }); - - it('should handle malformed signature without opening paren', () => { - const result = diffSignatures('noParens', '(a: string): void'); - expect(result.changed).toBe(true); - }); - }); - - // ── Return type interface ───────────────────────────────────────────────── - - describe('SignatureDiffResult interface', () => { - it('should always return an object with changed and details', () => { - const result: SignatureDiffResult = diffSignatures('(): void', '(): void'); - expect(result).toHaveProperty('changed'); - expect(result).toHaveProperty('details'); - expect(typeof result.changed).toBe('boolean'); - expect(typeof result.details).toBe('string'); - }); - }); -}); diff --git a/packages/core/__tests__/staleness-checker.test.ts b/packages/core/__tests__/staleness-checker.test.ts deleted file mode 100644 index 0b29f19..0000000 --- a/packages/core/__tests__/staleness-checker.test.ts +++ /dev/null @@ -1,969 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; - -// ── Mock setup ────────────────────────────────────────────────────────────── -// vi.hoisted() ensures the mock fns exist before vi.mock factories run. - -const { mockShow, mockFg, mockReadFile } = vi.hoisted(() => ({ - mockShow: vi.fn(), - mockFg: vi.fn(), - mockReadFile: vi.fn(), -})); - -vi.mock('simple-git', () => ({ - default: () => ({ - show: mockShow, - }), -})); - -vi.mock('fast-glob', () => ({ - default: mockFg, -})); - -vi.mock('node:fs/promises', () => ({ - readFile: mockReadFile, -})); - -import { checkDocStaleness } from '../src/docs/staleness-checker.js'; -import type { ChangedFile } from '../src/types.js'; - -// ── Helpers ───────────────────────────────────────────────────────────────── - -function makeChangedFile( - overrides: Partial & Pick, -): ChangedFile { - return { - status: 'modified', - additions: 0, - deletions: 0, - language: 'typescript', - category: 'source', - ...overrides, - }; -} - -/** - * Set up `git.show()` to return specific content based on the "ref:path" argument. - * Accepts a map of `"ref:path"` -> content string. - */ -function setupGitShow(fileContents: Record): void { - mockShow.mockImplementation(async (ref: string) => { - if (ref in fileContents) { - return fileContents[ref]; - } - throw new Error(`fatal: path '${ref}' does not exist`); - }); -} - -// ── Reset mocks ───────────────────────────────────────────────────────────── - -beforeEach(() => { - mockShow.mockReset(); - mockFg.mockReset(); - mockReadFile.mockReset(); - - // By default, fast-glob returns no doc files - mockFg.mockResolvedValue([]); -}); - -// ── Tests ─────────────────────────────────────────────────────────────────── - -describe('checkDocStaleness', () => { - const repoPath = '/repo'; - const base = 'main'; - const head = 'feature'; - - // ── No doc files ───────────────────────────────────────────────────── - - describe('no doc files', () => { - it('should return empty staleReferences and checkedFiles when no doc files exist', async () => { - mockFg.mockResolvedValue([]); - - const changedFiles = [ - makeChangedFile({ path: 'src/lib.ts', status: 'deleted' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - expect(result.staleReferences).toEqual([]); - expect(result.checkedFiles).toEqual([]); - }); - }); - - // ── No changed source files ────────────────────────────────────────── - - describe('no changed source files', () => { - it('should return no staleReferences when no source files are changed', async () => { - mockFg.mockResolvedValue(['docs/guide.md']); - mockReadFile.mockResolvedValue('Some documentation content'); - - // Only doc files changed, no source/deleted/renamed - const changedFiles = [ - makeChangedFile({ path: 'docs/guide.md', category: 'doc', status: 'modified' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - expect(result.staleReferences).toEqual([]); - expect(result.checkedFiles).toEqual(['docs/guide.md']); - }); - - it('should return no staleReferences when only added files are present (no deletions or removals)', async () => { - mockFg.mockResolvedValue(['README.md']); - mockReadFile.mockResolvedValue('Some text'); - - const changedFiles = [ - makeChangedFile({ path: 'src/new-module.ts', status: 'added', category: 'source' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - expect(result.staleReferences).toEqual([]); - expect(result.checkedFiles).toEqual(['README.md']); - }); - }); - - // ── Deleted source file referenced in docs ─────────────────────────── - - describe('deleted source file referenced in docs', () => { - it('should detect a stale reference when a deleted file path appears in a doc', async () => { - mockFg.mockResolvedValue(['docs/api.md']); - - const docContent = 'See the implementation in src/old-module.ts for details.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/old-module.ts': 'export function oldFunc(): void {}', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/old-module.ts', status: 'deleted' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - expect(result.staleReferences.length).toBeGreaterThanOrEqual(1); - - // Check that the deleted file path reference is found - const pathRef = result.staleReferences.find( - (r) => r.reference === 'src/old-module.ts', - ); - expect(pathRef).toBeDefined(); - expect(pathRef!.reason).toBe('referenced file was deleted'); - expect(pathRef!.docFile).toBe('docs/api.md'); - expect(pathRef!.line).toBe(1); - }); - - it('should also detect removed exported symbols from a deleted file', async () => { - mockFg.mockResolvedValue(['docs/api.md']); - - const docContent = 'Use the oldFunc function to process data.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/old-module.ts': 'export function oldFunc(): void {}', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/old-module.ts', status: 'deleted' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - const symbolRef = result.staleReferences.find( - (r) => r.reference === 'oldFunc', - ); - expect(symbolRef).toBeDefined(); - expect(symbolRef!.reason).toContain('referenced symbol was removed from'); - expect(symbolRef!.reason).toContain('src/old-module.ts'); - }); - }); - - // ── Renamed file ───────────────────────────────────────────────────── - - describe('renamed file', () => { - it('should detect stale reference to old path when a file is renamed', async () => { - mockFg.mockResolvedValue(['docs/guide.md']); - - const docContent = 'Import from src/old-name.ts to get the helper.'; - mockReadFile.mockResolvedValue(docContent); - - const changedFiles = [ - makeChangedFile({ - path: 'src/new-name.ts', - oldPath: 'src/old-name.ts', - status: 'renamed', - }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - expect(result.staleReferences).toHaveLength(1); - expect(result.staleReferences[0].reference).toBe('src/old-name.ts'); - expect(result.staleReferences[0].reason).toContain('renamed to src/new-name.ts'); - expect(result.staleReferences[0].docFile).toBe('docs/guide.md'); - }); - }); - - // ── Removed exports ────────────────────────────────────────────────── - - describe('removed exports from modified file', () => { - it('should detect stale reference when an exported symbol is removed', async () => { - mockFg.mockResolvedValue(['README.md']); - - const docContent = 'Call processData to handle incoming requests.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/api.ts': ` - export function processData(): void {} - export function keepThis(): void {} - `, - 'feature:src/api.ts': ` - export function keepThis(): void {} - `, - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/api.ts', status: 'modified', category: 'source' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - const ref = result.staleReferences.find((r) => r.reference === 'processData'); - expect(ref).toBeDefined(); - expect(ref!.reason).toContain('referenced symbol was removed from'); - expect(ref!.reason).toContain('src/api.ts'); - }); - - it('should not flag symbols that still exist in head', async () => { - mockFg.mockResolvedValue(['README.md']); - - const docContent = 'Use keepThis for stable functionality.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/api.ts': ` - export function processData(): void {} - export function keepThis(): void {} - `, - 'feature:src/api.ts': ` - export function keepThis(): void {} - `, - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/api.ts', status: 'modified', category: 'source' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - // keepThis should NOT be flagged as stale since it still exists - const keepRef = result.staleReferences.find((r) => r.reference === 'keepThis'); - expect(keepRef).toBeUndefined(); - }); - }); - - // ── Generic name handling ──────────────────────────────────────────── - - describe('generic name handling', () => { - it('should NOT flag standalone prose "types" as stale', async () => { - mockFg.mockResolvedValue(['docs/guide.md']); - - // "types" in ordinary prose — should not be flagged - const docContent = 'There are several types of configuration available.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/types.ts': 'export interface Config { key: string; }', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/types.ts', status: 'deleted' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - // The deleted path "src/types.ts" should still be found if the doc mentions it - // but the generic stem "types" should NOT match in plain prose - const genericRef = result.staleReferences.find( - (r) => r.reference === 'types' && r.reason.includes('referenced symbol was removed'), - ); - // "types" as a standalone word in prose should not be flagged via stem - // (it IS flagged if the doc doesn't contain it in a path-like context) - expect(genericRef).toBeUndefined(); - }); - - it('should flag "./types" as stale (path context)', async () => { - mockFg.mockResolvedValue(['docs/guide.md']); - - const docContent = 'Import from ./types to get the Config interface.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/types.ts': 'export interface Config { key: string; }', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/types.ts', status: 'deleted' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - // The stem "types" should be flagged because it appears in a path context: ./types - const stemRef = result.staleReferences.find( - (r) => r.reference === 'types' && r.reason.includes('referenced symbol was removed'), - ); - expect(stemRef).toBeDefined(); - }); - - it('should flag "types.ts" as stale (file extension context)', async () => { - mockFg.mockResolvedValue(['docs/guide.md']); - - const docContent = 'See types.ts for all interfaces.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/types.ts': 'export interface Config { key: string; }', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/types.ts', status: 'deleted' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - const stemRef = result.staleReferences.find( - (r) => r.reference === 'types' && r.reason.includes('referenced symbol was removed'), - ); - expect(stemRef).toBeDefined(); - }); - - it('should flag backtick-quoted `types` as stale (code context)', async () => { - mockFg.mockResolvedValue(['docs/guide.md']); - - const docContent = 'The `types` module exports all shared interfaces.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/types.ts': 'export interface Config { key: string; }', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/types.ts', status: 'deleted' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - const stemRef = result.staleReferences.find( - (r) => r.reference === 'types' && r.reason.includes('referenced symbol was removed'), - ); - expect(stemRef).toBeDefined(); - }); - - it('should flag non-generic names even in plain prose', async () => { - mockFg.mockResolvedValue(['docs/guide.md']); - - // "processData" is not a generic name, so it should match via word-boundary - const docContent = 'Call processData to transform the payload.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/processor.ts': 'export function processData(): void {}', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/processor.ts', status: 'deleted' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - const ref = result.staleReferences.find((r) => r.reference === 'processData'); - expect(ref).toBeDefined(); - }); - }); - - // ── Multiple stale references in one doc ────────────────────────────── - - describe('multiple stale references in one doc', () => { - it('should report all stale references found in the same doc file', async () => { - mockFg.mockResolvedValue(['docs/api.md']); - - const docContent = [ - 'Line 1: See src/old-module.ts for the old implementation.', - 'Line 2: The function processData handles input.', - 'Line 3: Import from src/renamed.ts for helpers.', - ].join('\n'); - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/old-module.ts': 'export function processData(): void {}', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/old-module.ts', status: 'deleted' }), - makeChangedFile({ - path: 'src/new-name.ts', - oldPath: 'src/renamed.ts', - status: 'renamed', - }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - // Expect at least 3 references: - // 1. deleted path "src/old-module.ts" (line 1) - // 2. removed symbol "processData" (line 2) - // 3. renamed old path "src/renamed.ts" (line 3) - expect(result.staleReferences.length).toBeGreaterThanOrEqual(3); - - const pathRef = result.staleReferences.find( - (r) => r.reference === 'src/old-module.ts', - ); - expect(pathRef).toBeDefined(); - expect(pathRef!.line).toBe(1); - - const symbolRef = result.staleReferences.find( - (r) => r.reference === 'processData', - ); - expect(symbolRef).toBeDefined(); - expect(symbolRef!.line).toBe(2); - - const renamedRef = result.staleReferences.find( - (r) => r.reference === 'src/renamed.ts', - ); - expect(renamedRef).toBeDefined(); - expect(renamedRef!.line).toBe(3); - }); - }); - - // ── Doc files checked list ──────────────────────────────────────────── - - describe('checkedFiles list', () => { - it('should populate checkedFiles with all discovered doc files', async () => { - mockFg.mockResolvedValue(['README.md', 'docs/api.md', 'docs/guide.mdx']); - mockReadFile.mockResolvedValue('No references here.'); - - const changedFiles = [ - makeChangedFile({ path: 'src/module.ts', status: 'deleted' }), - ]; - - setupGitShow({ - 'main:src/module.ts': 'export function unused(): void {}', - }); - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - expect(result.checkedFiles).toEqual(['README.md', 'docs/api.md', 'docs/guide.mdx']); - }); - - it('should return checkedFiles even when nothing is stale', async () => { - mockFg.mockResolvedValue(['README.md']); - mockReadFile.mockResolvedValue('No stale content at all.'); - - // Only added files, no deletions/renames/removals to check for - const changedFiles = [ - makeChangedFile({ path: 'src/brand-new.ts', status: 'added', category: 'source' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - // checkedFiles returned because there was nothing to search for (short-circuit) - expect(result.checkedFiles).toEqual(['README.md']); - expect(result.staleReferences).toEqual([]); - }); - }); - - // ── File reading fallback ───────────────────────────────────────────── - - describe('file reading', () => { - it('should fall back to git show when readFile fails', async () => { - mockFg.mockResolvedValue(['docs/api.md']); - - // readFile throws (file not on disk) - mockReadFile.mockRejectedValue(new Error('ENOENT')); - - // git show returns the doc content instead - setupGitShow({ - 'feature:docs/api.md': 'Reference to src/deleted.ts here.', - 'main:src/deleted.ts': 'export function gone(): void {}', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/deleted.ts', status: 'deleted' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - const ref = result.staleReferences.find( - (r) => r.reference === 'src/deleted.ts', - ); - expect(ref).toBeDefined(); - }); - - it('should skip a doc file when both readFile and git show fail', async () => { - mockFg.mockResolvedValue(['docs/broken.md']); - - // Both reading methods fail - mockReadFile.mockRejectedValue(new Error('ENOENT')); - mockShow.mockRejectedValue(new Error('fatal: not found')); - - const changedFiles = [ - makeChangedFile({ path: 'src/deleted.ts', status: 'deleted' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - // No stale references since the doc couldn't be read - expect(result.staleReferences).toEqual([]); - }); - }); - - // ── filenameStem() edge cases ────────────────────────────────────────── - // These test the internal filenameStem() helper indirectly through the - // public checkDocStaleness() function. The stem is used as a symbol - // reference for deleted source files. - - describe('filenameStem() edge cases', () => { - it('should use the full filename as stem when there is no extension (e.g. "Makefile")', async () => { - mockFg.mockResolvedValue(['docs/guide.md']); - - // The doc references "Makefile" as a word — since it's not a generic name, - // it should be matched via word-boundary regex. - const docContent = 'The Makefile handles the build process.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/Makefile': 'export const BUILD = true;', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/Makefile', status: 'deleted' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - // The stem "Makefile" (no dot, so the whole name is the stem) should - // match via word-boundary regex. - const stemRef = result.staleReferences.find( - (r) => r.reference === 'Makefile' && r.reason.includes('referenced symbol was removed'), - ); - expect(stemRef).toBeDefined(); - }); - - it('should use only the part before the first dot for files with multiple dots (e.g. "file.test.ts")', async () => { - mockFg.mockResolvedValue(['docs/guide.md']); - - // The doc references "file" as a word — this tests that filenameStem - // returns "file" not "file.test" for "file.test.ts" - const docContent = 'The file module provides utility functions.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/file.test.ts': 'export function testHelper(): void {}', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/file.test.ts', status: 'deleted' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - // The stem should be "file" (before the first dot), not "file.test". - // "file" is not a generic name, so it matches via word-boundary. - const stemRef = result.staleReferences.find( - (r) => r.reference === 'file' && r.reason.includes('referenced symbol was removed'), - ); - expect(stemRef).toBeDefined(); - }); - - it('should handle empty path gracefully (no stem produced)', async () => { - mockFg.mockResolvedValue(['docs/guide.md']); - mockReadFile.mockResolvedValue('Nothing here.'); - - // A file with an empty-ish path should not crash the system. - // We simulate the deleted file scenario but the stem will be empty. - setupGitShow({}); - - const changedFiles = [ - makeChangedFile({ path: '.ts', status: 'deleted' }), - ]; - - // The stem of ".ts" is "" (empty before the first dot), so it should - // not be pushed as a symbol reference. This should not crash. - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - // No stale references should be generated from an empty stem - const stemRefs = result.staleReferences.filter( - (r) => r.reason.includes('referenced symbol was removed'), - ); - expect(stemRefs).toEqual([]); - }); - }); - - // ── escapeRegex() edge cases ────────────────────────────────────────── - // These test the internal escapeRegex() helper indirectly. The function - // is used when building regex patterns for symbol name matching. - - describe('escapeRegex() edge cases', () => { - it('should correctly escape special regex characters in symbol names used for matching', async () => { - mockFg.mockResolvedValue(['docs/api.md']); - - // The symbol name "processData" doesn't have special chars, but - // this test ensures that escapeRegex is invoked correctly when building - // the generic name path context regex. The generic stem "config" with - // special chars in the doc path should still work correctly. - // Here we test a non-generic symbol that appears in the doc. - const docContent = 'Use getValue_v2 from the settings module.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/settings.ts': 'export const getValue_v2 = {};', - 'feature:src/settings.ts': '', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/settings.ts', status: 'modified', category: 'source' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - const ref = result.staleReferences.find((r) => r.reference === 'getValue_v2'); - expect(ref).toBeDefined(); - }); - - it('should correctly match generic names in path context where escapeRegex is used for buildPathContextRegex', async () => { - mockFg.mockResolvedValue(['docs/api.md']); - - // The generic stem "utils" with a path context should still work - // even though the buildPathContextRegex uses escapeRegex internally. - const docContent = 'Import from ./utils to get helpers.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/utils.ts': 'export function helper(): void {}', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/utils.ts', status: 'deleted' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - const stemRef = result.staleReferences.find( - (r) => r.reference === 'utils' && r.reason.includes('referenced symbol was removed'), - ); - expect(stemRef).toBeDefined(); - }); - }); - - // ── extractExportedSymbolNames() edge cases ───────────────────────── - // These test the internal extractExportedSymbolNames() helper indirectly - // through the public API by providing various export syntaxes in the base - // file content. - - describe('extractExportedSymbolNames() edge cases', () => { - it('should detect async function exports', async () => { - mockFg.mockResolvedValue(['docs/api.md']); - - const docContent = 'Call fetchData to retrieve records.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/fetcher.ts': 'export async function fetchData(): Promise {}', - 'feature:src/fetcher.ts': '', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/fetcher.ts', status: 'modified', category: 'source' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - const ref = result.staleReferences.find((r) => r.reference === 'fetchData'); - expect(ref).toBeDefined(); - expect(ref!.reason).toContain('referenced symbol was removed from'); - }); - - it('should detect generator function exports', async () => { - mockFg.mockResolvedValue(['docs/api.md']); - - const docContent = 'Use generateItems for lazy iteration.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/gen.ts': 'export function* generateItems(): Generator { yield 1; }', - 'feature:src/gen.ts': '', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/gen.ts', status: 'modified', category: 'source' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - const ref = result.staleReferences.find((r) => r.reference === 'generateItems'); - expect(ref).toBeDefined(); - }); - - it('should detect default exported function', async () => { - mockFg.mockResolvedValue(['docs/api.md']); - - const docContent = 'Call mainHandler to start the app.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/handler.ts': 'export default function mainHandler(): void {}', - 'feature:src/handler.ts': '', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/handler.ts', status: 'modified', category: 'source' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - const ref = result.staleReferences.find((r) => r.reference === 'mainHandler'); - expect(ref).toBeDefined(); - }); - - it('should detect default exported class', async () => { - mockFg.mockResolvedValue(['docs/api.md']); - - const docContent = 'Instantiate AppServer to start.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/server.ts': 'export default class AppServer {}', - 'feature:src/server.ts': '', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/server.ts', status: 'modified', category: 'source' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - const ref = result.staleReferences.find((r) => r.reference === 'AppServer'); - expect(ref).toBeDefined(); - }); - - it('should detect enum exports', async () => { - mockFg.mockResolvedValue(['docs/api.md']); - - const docContent = 'Use the Status enum for state management.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/enums.ts': 'export enum Status { Active, Inactive }', - 'feature:src/enums.ts': '', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/enums.ts', status: 'modified', category: 'source' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - const ref = result.staleReferences.find((r) => r.reference === 'Status'); - expect(ref).toBeDefined(); - }); - - it('should detect async default exported function', async () => { - mockFg.mockResolvedValue(['docs/api.md']); - - const docContent = 'Use bootstrap to initialize the app.'; - mockReadFile.mockResolvedValue(docContent); - - setupGitShow({ - 'main:src/boot.ts': 'export default async function bootstrap(): Promise {}', - 'feature:src/boot.ts': '', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/boot.ts', status: 'modified', category: 'source' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - const ref = result.staleReferences.find((r) => r.reference === 'bootstrap'); - expect(ref).toBeDefined(); - }); - - it('should deduplicate symbols that appear multiple times in the source', async () => { - mockFg.mockResolvedValue(['docs/api.md']); - - const docContent = 'Use doStuff for processing.'; - mockReadFile.mockResolvedValue(docContent); - - // The same symbol name exported in different forms — but since - // extractExportedSymbolNames deduplicates, only one reference - // should be tracked. However, note that "export function doStuff" and - // "export const doStuff" are both parsed. Since the function uses - // `new Set()` for dedup, only one entry should result. - setupGitShow({ - 'main:src/dupe.ts': [ - 'export function doStuff(): void {}', - // In practice a single file wouldn't have duplicate names, but - // for testing the Set dedup, let's just put two export lines that - // regex would match the same name. - ].join('\n'), - 'feature:src/dupe.ts': '', - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/dupe.ts', status: 'modified', category: 'source' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - // Should have exactly one reference for "doStuff" - const refs = result.staleReferences.filter((r) => r.reference === 'doStuff'); - expect(refs).toHaveLength(1); - }); - }); - - // ── Renamed file with falsy oldPath ─────────────────────────────────── - - describe('renamed file with falsy oldPath', () => { - it('should not produce stale references for a renamed file when oldPath is undefined', async () => { - mockFg.mockResolvedValue(['docs/guide.md']); - - const docContent = 'Import from src/old-path.ts for helpers.'; - mockReadFile.mockResolvedValue(docContent); - - // The file is marked as renamed but oldPath is undefined (falsy). - // buildRenamedPaths filters with `f.status === 'renamed' && f.oldPath`, - // so this should be excluded. - const changedFiles = [ - makeChangedFile({ - path: 'src/new-path.ts', - oldPath: undefined, - status: 'renamed', - }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - // No stale references from rename since oldPath is falsy - const renamedRefs = result.staleReferences.filter( - (r) => r.reason.includes('renamed'), - ); - expect(renamedRefs).toEqual([]); - }); - - it('should not produce stale references for a renamed file when oldPath is empty string', async () => { - mockFg.mockResolvedValue(['docs/guide.md']); - - const docContent = 'See the API guide.'; - mockReadFile.mockResolvedValue(docContent); - - const changedFiles = [ - makeChangedFile({ - path: 'src/new-path.ts', - oldPath: '', - status: 'renamed', - }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - const renamedRefs = result.staleReferences.filter( - (r) => r.reason.includes('renamed'), - ); - expect(renamedRefs).toEqual([]); - }); - }); - - // ── Non-source files are skipped for symbol extraction ──────────────── - - describe('non-source file filtering', () => { - it('should not extract symbols from non-source category files', async () => { - mockFg.mockResolvedValue(['README.md']); - mockReadFile.mockResolvedValue('Reference to someFunc here.'); - - const changedFiles = [ - makeChangedFile({ path: 'test/helper.ts', status: 'modified', category: 'test' }), - ]; - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - // No symbols should be extracted from test files - expect(result.staleReferences).toEqual([]); - }); - }); - - // ── Multiple doc files ──────────────────────────────────────────────── - - describe('multiple doc files', () => { - it('should scan all doc files for stale references', async () => { - mockFg.mockResolvedValue(['README.md', 'docs/api.md']); - - mockReadFile.mockImplementation(async (filePath: string) => { - if (filePath === '/repo/README.md') { - return 'This project uses src/deleted.ts for core logic.'; - } - if (filePath === '/repo/docs/api.md') { - return 'The src/deleted.ts module provides key APIs.'; - } - throw new Error('ENOENT'); - }); - - const changedFiles = [ - makeChangedFile({ path: 'src/deleted.ts', status: 'deleted' }), - ]; - - setupGitShow({ - 'main:src/deleted.ts': 'export function api(): void {}', - }); - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - // Both doc files reference the deleted path - const readmeRefs = result.staleReferences.filter( - (r) => r.docFile === 'README.md' && r.reference === 'src/deleted.ts', - ); - const apiRefs = result.staleReferences.filter( - (r) => r.docFile === 'docs/api.md' && r.reference === 'src/deleted.ts', - ); - - expect(readmeRefs).toHaveLength(1); - expect(apiRefs).toHaveLength(1); - }); - }); - - // ── Line numbers ────────────────────────────────────────────────────── - - describe('line number tracking', () => { - it('should report the correct line number for stale references', async () => { - mockFg.mockResolvedValue(['docs/guide.md']); - - const docContent = [ - 'This is line 1.', - 'This is line 2.', - 'See src/legacy-module.ts for details.', - 'This is line 4.', - ].join('\n'); - mockReadFile.mockResolvedValue(docContent); - - const changedFiles = [ - makeChangedFile({ path: 'src/legacy-module.ts', status: 'deleted' }), - ]; - - // The deleted file has no exports, so only the path reference triggers - setupGitShow({ - 'main:src/legacy-module.ts': 'const internal = 1;', - }); - - const result = await checkDocStaleness(repoPath, changedFiles, base, head); - - // The stem "legacy-module" is not a generic name and does not appear in the doc, - // only the full path does, but the stem may also match via word-boundary regex. - // We check that the path reference on line 3 is reported. - const pathRef = result.staleReferences.find( - (r) => r.reference === 'src/legacy-module.ts', - ); - expect(pathRef).toBeDefined(); - expect(pathRef!.line).toBe(3); - }); - }); -}); diff --git a/packages/core/__tests__/test-mapper.test.ts b/packages/core/__tests__/test-mapper.test.ts deleted file mode 100644 index c2e9b41..0000000 --- a/packages/core/__tests__/test-mapper.test.ts +++ /dev/null @@ -1,210 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { mapTestFiles } from '../src/coverage/test-mapper.js'; - -// Mock fast-glob -vi.mock('fast-glob', () => ({ - default: vi.fn(), -})); - -import fg from 'fast-glob'; - -const mockedFg = vi.mocked(fg); - -beforeEach(() => { - vi.clearAllMocks(); -}); - -// ── Helper ────────────────────────────────────────────────────────────────── - -/** - * Calls mapTestFiles and captures the glob patterns that were passed to - * fast-glob, so we can assert on candidate path generation without needing - * the filesystem. - */ -async function capturePatterns( - sourceFile: string, -): Promise { - mockedFg.mockResolvedValue([]); - await mapTestFiles('/repo', sourceFile); - if (mockedFg.mock.calls.length === 0) return []; - return mockedFg.mock.calls[0][0] as string[]; -} - -// ── Tests ─────────────────────────────────────────────────────────────────── - -describe('mapTestFiles', () => { - // ── Candidate path generation ─────────────────────────────────────────── - - describe('candidate path generation', () => { - it('should generate same-directory .test and .spec candidates', async () => { - const patterns = await capturePatterns('src/utils/parser.ts'); - - expect(patterns).toContain('src/utils/parser.test.ts'); - expect(patterns).toContain('src/utils/parser.spec.ts'); - expect(patterns).toContain('src/utils/parser.test.js'); - expect(patterns).toContain('src/utils/parser.spec.js'); - }); - - it('should generate __tests__ sibling directory candidates', async () => { - const patterns = await capturePatterns('src/utils/parser.ts'); - - expect(patterns).toContain('src/utils/__tests__/parser.ts'); - expect(patterns).toContain('src/utils/__tests__/parser.test.ts'); - expect(patterns).toContain('src/utils/__tests__/parser.spec.ts'); - expect(patterns).toContain('src/utils/__tests__/parser.js'); - expect(patterns).toContain('src/utils/__tests__/parser.test.js'); - }); - - it('should generate top-level test/ and tests/ directory candidates', async () => { - const patterns = await capturePatterns('src/utils/parser.ts'); - - // After stripping src/, subPath = utils/parser.ts, subDir = utils - expect(patterns).toContain('test/utils/parser.ts'); - expect(patterns).toContain('test/utils/parser.test.ts'); - expect(patterns).toContain('test/utils/parser.spec.ts'); - expect(patterns).toContain('tests/utils/parser.ts'); - expect(patterns).toContain('tests/utils/parser.test.ts'); - expect(patterns).toContain('tests/utils/parser.spec.ts'); - }); - - it('should check all four extensions (.ts, .tsx, .js, .jsx)', async () => { - const patterns = await capturePatterns('src/utils/parser.ts'); - - // Same-dir .test variants for all four extensions - expect(patterns).toContain('src/utils/parser.test.ts'); - expect(patterns).toContain('src/utils/parser.test.tsx'); - expect(patterns).toContain('src/utils/parser.test.js'); - expect(patterns).toContain('src/utils/parser.test.jsx'); - }); - - it('should strip src/ prefix for top-level test dir mapping', async () => { - const patterns = await capturePatterns('src/core/index.ts'); - - // src/ stripped -> core/index.ts, subDir = core - expect(patterns).toContain('test/core/index.ts'); - expect(patterns).toContain('tests/core/index.ts'); - expect(patterns).toContain('test/core/index.test.ts'); - expect(patterns).toContain('tests/core/index.test.ts'); - }); - - it('should strip lib/ prefix for top-level test dir mapping', async () => { - const patterns = await capturePatterns('lib/helpers/format.ts'); - - // lib/ stripped -> helpers/format.ts, subDir = helpers - expect(patterns).toContain('test/helpers/format.ts'); - expect(patterns).toContain('tests/helpers/format.ts'); - expect(patterns).toContain('test/helpers/format.test.ts'); - }); - - it('should strip the last src/ when path has nested src/', async () => { - const patterns = await capturePatterns('packages/foo/src/utils.ts'); - - // lastIndexOf('src/') finds the src/ in packages/foo/src/ - // Strips to utils.ts, subDir = . - expect(patterns).toContain('test/utils.ts'); - expect(patterns).toContain('test/utils.test.ts'); - expect(patterns).toContain('tests/utils.ts'); - }); - - it('should keep the full path when there is no src/ or lib/ prefix', async () => { - const patterns = await capturePatterns('utils/parser.ts'); - - // No src/ or lib/ to strip, so subDir = utils - expect(patterns).toContain('test/utils/parser.ts'); - expect(patterns).toContain('test/utils/parser.test.ts'); - expect(patterns).toContain('tests/utils/parser.ts'); - }); - - it('should handle a file in the root directory', async () => { - const patterns = await capturePatterns('index.ts'); - - expect(patterns).toContain('index.test.ts'); - expect(patterns).toContain('index.spec.ts'); - expect(patterns).toContain('__tests__/index.ts'); - expect(patterns).toContain('__tests__/index.test.ts'); - expect(patterns).toContain('test/index.ts'); - expect(patterns).toContain('tests/index.ts'); - }); - - it('should handle .tsx source files', async () => { - const patterns = await capturePatterns('src/components/Button.tsx'); - - // Base name is Button (extension stripped) - expect(patterns).toContain('src/components/Button.test.tsx'); - expect(patterns).toContain('src/components/Button.spec.tsx'); - expect(patterns).toContain('src/components/Button.test.ts'); - expect(patterns).toContain('src/components/__tests__/Button.tsx'); - }); - - it('should handle .js source files', async () => { - const patterns = await capturePatterns('src/utils/helpers.js'); - - expect(patterns).toContain('src/utils/helpers.test.js'); - expect(patterns).toContain('src/utils/helpers.spec.js'); - expect(patterns).toContain('src/utils/helpers.test.ts'); - }); - - it('should normalize backslash paths to forward slashes', async () => { - const patterns = await capturePatterns('src\\utils\\parser.ts'); - - // After normalization, should produce forward-slash paths - expect(patterns).toContain('src/utils/parser.test.ts'); - expect(patterns).toContain('src/utils/__tests__/parser.ts'); - }); - - it('should not produce duplicate candidate paths', async () => { - const patterns = await capturePatterns('src/utils/parser.ts'); - const unique = new Set(patterns); - expect(unique.size).toBe(patterns.length); - }); - }); - - // ── fast-glob integration ─────────────────────────────────────────────── - - describe('fast-glob integration', () => { - it('should call fast-glob with candidate patterns and cwd', async () => { - mockedFg.mockResolvedValue([]); - await mapTestFiles('/my/repo', 'src/utils/parser.ts'); - - expect(mockedFg).toHaveBeenCalledTimes(1); - const [patterns, options] = mockedFg.mock.calls[0]; - expect(Array.isArray(patterns)).toBe(true); - expect((patterns as string[]).length).toBeGreaterThan(0); - expect(options).toEqual({ - cwd: '/my/repo', - dot: false, - onlyFiles: true, - }); - }); - - it('should return matching test files found by fast-glob', async () => { - mockedFg.mockResolvedValue([ - 'src/utils/parser.test.ts', - 'src/utils/__tests__/parser.ts', - ] as never); - - const result = await mapTestFiles('/repo', 'src/utils/parser.ts'); - - expect(result).toEqual([ - 'src/utils/parser.test.ts', - 'src/utils/__tests__/parser.ts', - ]); - }); - - it('should return empty array when fast-glob finds no matches', async () => { - mockedFg.mockResolvedValue([] as never); - - const result = await mapTestFiles('/repo', 'src/utils/parser.ts'); - - expect(result).toEqual([]); - }); - - it('should return a single match when only one test file exists', async () => { - mockedFg.mockResolvedValue(['test/utils/parser.test.ts'] as never); - - const result = await mapTestFiles('/repo', 'src/utils/parser.ts'); - - expect(result).toEqual(['test/utils/parser.test.ts']); - }); - }); -}); diff --git a/packages/core/node_modules/.bin/tsc b/packages/core/node_modules/.bin/tsc deleted file mode 100755 index e556d4f..0000000 --- a/packages/core/node_modules/.bin/tsc +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -z "$NODE_PATH" ]; then - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/bin/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules" -else - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/bin/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules:$NODE_PATH" -fi -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../typescript/bin/tsc" "$@" -else - exec node "$basedir/../typescript/bin/tsc" "$@" -fi diff --git a/packages/core/node_modules/.bin/tsserver b/packages/core/node_modules/.bin/tsserver deleted file mode 100755 index db2401b..0000000 --- a/packages/core/node_modules/.bin/tsserver +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -z "$NODE_PATH" ]; then - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/bin/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules" -else - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/bin/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules:$NODE_PATH" -fi -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../typescript/bin/tsserver" "$@" -else - exec node "$basedir/../typescript/bin/tsserver" "$@" -fi diff --git a/packages/core/node_modules/.bin/tsup b/packages/core/node_modules/.bin/tsup deleted file mode 100755 index 4df1053..0000000 --- a/packages/core/node_modules/.bin/tsup +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -z "$NODE_PATH" ]; then - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/dist/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules" -else - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/dist/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules:$NODE_PATH" -fi -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../tsup/dist/cli-default.js" "$@" -else - exec node "$basedir/../tsup/dist/cli-default.js" "$@" -fi diff --git a/packages/core/node_modules/.bin/tsup-node b/packages/core/node_modules/.bin/tsup-node deleted file mode 100755 index 689ae97..0000000 --- a/packages/core/node_modules/.bin/tsup-node +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -z "$NODE_PATH" ]; then - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/dist/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules" -else - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/dist/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules:$NODE_PATH" -fi -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../tsup/dist/cli-node.js" "$@" -else - exec node "$basedir/../tsup/dist/cli-node.js" "$@" -fi diff --git a/packages/core/node_modules/.bin/vitest b/packages/core/node_modules/.bin/vitest deleted file mode 100755 index 011f1ea..0000000 --- a/packages/core/node_modules/.bin/vitest +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -z "$NODE_PATH" ]; then - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/vitest@3.2.4_@types+node@22.19.10/node_modules/vitest/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/vitest@3.2.4_@types+node@22.19.10/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules" -else - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/vitest@3.2.4_@types+node@22.19.10/node_modules/vitest/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/vitest@3.2.4_@types+node@22.19.10/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules:$NODE_PATH" -fi -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../vitest/vitest.mjs" "$@" -else - exec node "$basedir/../vitest/vitest.mjs" "$@" -fi diff --git a/packages/core/node_modules/@types/node b/packages/core/node_modules/@types/node deleted file mode 120000 index 129d921..0000000 --- a/packages/core/node_modules/@types/node +++ /dev/null @@ -1 +0,0 @@ -../../../../node_modules/.pnpm/@types+node@22.19.10/node_modules/@types/node \ No newline at end of file diff --git a/packages/core/node_modules/fast-glob b/packages/core/node_modules/fast-glob deleted file mode 120000 index aeeba4e..0000000 --- a/packages/core/node_modules/fast-glob +++ /dev/null @@ -1 +0,0 @@ -../../../node_modules/.pnpm/fast-glob@3.3.3/node_modules/fast-glob \ No newline at end of file diff --git a/packages/core/node_modules/simple-git b/packages/core/node_modules/simple-git deleted file mode 120000 index bd871f0..0000000 --- a/packages/core/node_modules/simple-git +++ /dev/null @@ -1 +0,0 @@ -../../../node_modules/.pnpm/simple-git@3.30.0/node_modules/simple-git \ No newline at end of file diff --git a/packages/core/node_modules/tsup b/packages/core/node_modules/tsup deleted file mode 120000 index 547982a..0000000 --- a/packages/core/node_modules/tsup +++ /dev/null @@ -1 +0,0 @@ -../../../node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup \ No newline at end of file diff --git a/packages/core/node_modules/typescript b/packages/core/node_modules/typescript deleted file mode 120000 index d6c42d5..0000000 --- a/packages/core/node_modules/typescript +++ /dev/null @@ -1 +0,0 @@ -../../../node_modules/.pnpm/typescript@5.7.3/node_modules/typescript \ No newline at end of file diff --git a/packages/core/node_modules/vitest b/packages/core/node_modules/vitest deleted file mode 120000 index 74f0cca..0000000 --- a/packages/core/node_modules/vitest +++ /dev/null @@ -1 +0,0 @@ -../../../node_modules/.pnpm/vitest@3.2.4_@types+node@22.19.10/node_modules/vitest \ No newline at end of file diff --git a/packages/core/src/analyzer.ts b/packages/core/src/analyzer.ts deleted file mode 100644 index bdf7411..0000000 --- a/packages/core/src/analyzer.ts +++ /dev/null @@ -1,164 +0,0 @@ -import simpleGit from 'simple-git'; -import { PRAnalysis, AnalysisOptions, ChangedFile, BreakingChange, TestCoverageReport, DocStalenessReport, RiskAssessment } from './types.js'; -import { parseDiff } from './diff/diff-parser.js'; -import { detectBreakingChanges } from './breaking/detector.js'; -import { checkTestCoverage } from './coverage/coverage-checker.js'; -import { checkDocStaleness } from './docs/staleness-checker.js'; -import { buildImpactGraph } from './impact/impact-graph.js'; -import { calculateRisk } from './risk/risk-calculator.js'; -import { buildReverseDependencyMap } from './imports/import-resolver.js'; - -/** - * Resolve the default base branch for the repository by checking whether - * 'main' or 'master' exists in the local branch list. - */ -export async function resolveDefaultBaseBranch(repoPath: string): Promise { - const git = simpleGit(repoPath); - const branchSummary = await git.branch(); - - if (branchSummary.all.includes('main')) { - return 'main'; - } - - if (branchSummary.all.includes('master')) { - return 'master'; - } - - // If neither 'main' nor 'master' is found, fall back to 'main' and let - // the caller deal with any resulting git error. - return 'main'; -} - -/** - * Build a human-readable summary of the PR analysis results. - */ -function generateSummary( - changedFiles: ChangedFile[], - breakingChanges: BreakingChange[], - testCoverage: TestCoverageReport, - riskScore: RiskAssessment, -): string { - const totalAdditions = changedFiles.reduce((sum, f) => sum + f.additions, 0); - const totalDeletions = changedFiles.reduce((sum, f) => sum + f.deletions, 0); - - const parts: string[] = []; - - parts.push( - `This PR changes ${changedFiles.length} file${changedFiles.length === 1 ? '' : 's'} ` + - `(+${totalAdditions}/-${totalDeletions}) with a ${riskScore.level} risk score of ${riskScore.score}/100.`, - ); - - if (breakingChanges.length > 0) { - parts.push( - `Found ${breakingChanges.length} breaking change${breakingChanges.length === 1 ? '' : 's'} affecting exported APIs.`, - ); - } - - if (testCoverage.gaps.length > 0) { - parts.push( - `${testCoverage.gaps.length} source file${testCoverage.gaps.length === 1 ? '' : 's'} lack${testCoverage.gaps.length === 1 ? 's' : ''} corresponding test changes.`, - ); - } - - return parts.join(' '); -} - -/** - * Run all analysis steps on a pull request and produce a comprehensive report. - * - * Steps: - * 1. Resolve base and head branches - * 2. Verify the repository and branches - * 3. Parse the diff to get changed files - * 4. Build reverse dependency map once (shared by breaking + impact) - * 5. Run breaking-change detection, test-coverage checking, doc-staleness - * checking, and impact-graph building in parallel - * 6. Calculate the overall risk score - * 7. Generate a human-readable summary - */ -export async function analyzePR(options: AnalysisOptions): Promise { - const { repoPath, skipBreaking, skipCoverage, skipDocs } = options; - - // --- 1. Resolve branches -------------------------------------------------- - const baseBranch = options.baseBranch ?? await resolveDefaultBaseBranch(repoPath); - const headBranch = options.headBranch ?? 'HEAD'; - - // --- 2. Verify the repo exists and branches are valid --------------------- - const git = simpleGit(repoPath); - - // This will throw if the path is not a git repository. - await git.checkIsRepo(); - - // Verify that the base branch ref is valid. - await git.revparse([baseBranch]); - - // Verify that the head branch ref is valid. - await git.revparse([headBranch]); - - // --- 3. Parse the diff ---------------------------------------------------- - const changedFiles = await parseDiff(repoPath, baseBranch, headBranch); - - // --- 4. Build reverse dependency map once (shared by breaking + impact) --- - const reverseDeps = await buildReverseDependencyMap(repoPath); - - // --- 5. Run parallel analysis steps --------------------------------------- - const [breakingChanges, testCoverage, docStaleness, impactGraph] = - await Promise.all([ - // Breaking change detection - skipBreaking - ? Promise.resolve([]) - : detectBreakingChanges(repoPath, baseBranch, headBranch, changedFiles, reverseDeps), - - // Test coverage analysis - skipCoverage - ? Promise.resolve({ - changedSourceFiles: 0, - sourceFilesWithTestChanges: 0, - coverageRatio: 0, - gaps: [], - }) - : checkTestCoverage(repoPath, changedFiles), - - // Documentation staleness checking - skipDocs - ? Promise.resolve({ - staleReferences: [], - checkedFiles: [], - }) - : checkDocStaleness(repoPath, changedFiles, baseBranch, headBranch), - - // Impact graph building - buildImpactGraph(repoPath, changedFiles, 3, reverseDeps), - ]); - - // --- 6. Calculate risk score ---------------------------------------------- - const riskScore = calculateRisk( - changedFiles, - breakingChanges, - testCoverage, - docStaleness, - impactGraph, - ); - - // --- 7. Generate summary -------------------------------------------------- - const summary = generateSummary( - changedFiles, - breakingChanges, - testCoverage, - riskScore, - ); - - // --- 8. Assemble and return the full analysis ----------------------------- - return { - repoPath, - baseBranch, - headBranch, - changedFiles, - breakingChanges, - testCoverage, - docStaleness, - impactGraph, - riskScore, - summary, - }; -} diff --git a/packages/core/src/breaking/detector.ts b/packages/core/src/breaking/detector.ts deleted file mode 100644 index 073f80b..0000000 --- a/packages/core/src/breaking/detector.ts +++ /dev/null @@ -1,277 +0,0 @@ -import simpleGit from 'simple-git'; -import { BreakingChange, ChangedFile } from '../types.js'; -import { diffExports, parseExports } from './export-differ.js'; -import { diffSignatures } from './signature-differ.js'; -import { findConsumers, ReverseDependencyMap } from '../imports/import-resolver.js'; - -/** File extensions that we analyze for breaking changes. */ -const ANALYZABLE_EXTENSIONS = new Set(['.ts', '.tsx', '.js', '.jsx']); - -/** - * Get the file extension (lowercased) from a file path. - */ -function getExtension(filePath: string): string { - const lastDot = filePath.lastIndexOf('.'); - if (lastDot === -1) return ''; - return filePath.slice(lastDot).toLowerCase(); -} - -/** - * Safely retrieve file content at a specific git ref. - * Returns `null` if the file doesn't exist at that ref. - */ -async function getFileAtRef( - git: ReturnType, - ref: string, - filePath: string, -): Promise { - try { - return await git.show([`${ref}:${filePath}`]); - } catch { - // File doesn't exist at this ref (new file, or path changed) - return null; - } -} - -/** - * Detect breaking changes between two branches by analyzing export differences - * in changed source files. - * - * @param repoPath - Absolute path to the git repository - * @param baseBranch - The base branch/ref (e.g. "main", "origin/main") - * @param headBranch - The head branch/ref (e.g. "feature/xyz", "HEAD") - * @param changedFiles - List of files changed between the two branches - * @param reverseDependencyMap - Optional pre-built reverse dependency map to avoid a redundant repo scan - * @returns Array of detected breaking changes - */ -export async function detectBreakingChanges( - repoPath: string, - baseBranch: string, - headBranch: string, - changedFiles: ChangedFile[], - reverseDependencyMap?: ReverseDependencyMap, -): Promise { - const git = simpleGit(repoPath); - const breakingChanges: BreakingChange[] = []; - - // Only analyze source files that were modified, deleted, or renamed - const filesToAnalyze = changedFiles.filter((f) => { - const ext = getExtension(f.path); - return ( - ANALYZABLE_EXTENSIONS.has(ext) && - (f.status === 'modified' || f.status === 'deleted' || f.status === 'renamed') - ); - }); - - for (const file of filesToAnalyze) { - try { - if (file.status === 'renamed' && file.oldPath) { - // For renamed files, the old path's consumers will break - const oldBaseContent = await getFileAtRef(git, baseBranch, file.oldPath); - if (oldBaseContent === null) { - continue; - } - - const oldExports = parseExports(oldBaseContent, file.oldPath); - const headContent = await getFileAtRef(git, headBranch, file.path); - const newExports = headContent ? parseExports(headContent, file.path) : { filePath: file.path, symbols: [] }; - - // Every export from the old path is effectively removed from that path - for (const sym of oldExports.symbols) { - // Check if the symbol still exists in the new file with the same signature - const stillExists = newExports.symbols.some( - (s) => s.name === sym.name && s.kind === sym.kind, - ); - - if (stillExists) { - // Symbol exists in new location — it's a path rename, low severity - breakingChanges.push({ - filePath: file.oldPath, - type: 'renamed_export', - symbolName: sym.name, - before: `${formatSymbolDescription(sym)} (at ${file.oldPath})`, - after: `${formatSymbolDescription(sym)} (at ${file.path})`, - severity: 'low', - consumers: [], - }); - } else { - // Symbol was removed during the rename — high severity - breakingChanges.push({ - filePath: file.oldPath, - type: 'removed_export', - symbolName: sym.name, - before: formatSymbolDescription(sym), - after: null, - severity: 'high', - consumers: [], - }); - } - } - - continue; - } - - const baseContent = await getFileAtRef(git, baseBranch, file.path); - - // If we can't get the base content, we can't detect breaking changes - if (baseContent === null) { - continue; - } - - if (file.status === 'deleted') { - // Every export in a deleted file is a breaking change - const baseExports = parseExports(baseContent, file.path); - - for (const sym of baseExports.symbols) { - breakingChanges.push({ - filePath: file.path, - type: 'removed_export', - symbolName: sym.name, - before: formatSymbolDescription(sym), - after: null, - severity: 'high', - consumers: [], - }); - } - } else { - // File was modified — compare exports - const headContent = await getFileAtRef(git, headBranch, file.path); - - if (headContent === null) { - // Shouldn't happen for a 'modified' file, but handle gracefully - continue; - } - - const diff = diffExports(file.path, baseContent, headContent); - - // ── Detect renames ────────────────────────────────────────────── - // A rename is when a symbol was removed and a new symbol with the - // same kind and a similar (or identical) signature was added in - // the same file. We pair them up and flag as 'renamed_export' - // with low severity, removing them from removed/added so they - // don't also appear as separate removed_export entries. - const remainingRemoved: typeof diff.removed = []; - const matchedAddedIndices = new Set(); - - for (const removedSym of diff.removed) { - let matchIndex = -1; - - for (let i = 0; i < diff.added.length; i++) { - if (matchedAddedIndices.has(i)) continue; - - const addedSym = diff.added[i]; - - // Must be the same kind (function → function, class → class, etc.) - if (removedSym.kind !== addedSym.kind) continue; - - // Compare signatures — if diffSignatures reports no change, - // they have the same signature shape, indicating a likely rename - const sigResult = diffSignatures( - removedSym.signature, - addedSym.signature, - ); - - if (!sigResult.changed) { - matchIndex = i; - break; - } - } - - if (matchIndex !== -1) { - const addedSym = diff.added[matchIndex]; - matchedAddedIndices.add(matchIndex); - - breakingChanges.push({ - filePath: file.path, - type: 'renamed_export', - symbolName: removedSym.name, - before: formatSymbolDescription(removedSym), - after: formatSymbolDescription(addedSym), - severity: 'low', - consumers: [], - }); - } else { - remainingRemoved.push(removedSym); - } - } - - // Removed exports (not matched as renames) → high severity - for (const sym of remainingRemoved) { - breakingChanges.push({ - filePath: file.path, - type: 'removed_export', - symbolName: sym.name, - before: formatSymbolDescription(sym), - after: null, - severity: 'high', - consumers: [], - }); - } - - // Modified signatures → medium severity - for (const { before, after } of diff.modified) { - const sigDiff = diffSignatures(before.signature, after.signature); - - // Only report if there's an actual signature change - // (kind changes are also caught here since diffExports flags them) - if (sigDiff.changed || before.kind !== after.kind) { - breakingChanges.push({ - filePath: file.path, - type: before.kind !== after.kind ? 'changed_type' : 'changed_signature', - symbolName: before.name, - before: formatSymbolDescription(before), - after: formatSymbolDescription(after), - severity: 'medium', - consumers: [], - }); - } - } - } - } catch (error) { - // If we can't analyze a file (e.g. binary, encoding issues), skip it - // but warn on stderr so failures aren't completely silent. - const msg = error instanceof Error ? error.message : String(error); - console.warn(`[pr-impact] Skipping ${file.path}: ${msg}`); - continue; - } - } - - // ── Populate consumers ─────────────────────────────────────────────────── - // Collect the set of files that have at least one breaking change, then - // scan repo source files to find which ones import from those files. - if (breakingChanges.length > 0) { - const affectedFiles = new Set(breakingChanges.map((bc) => bc.filePath)); - const consumersMap = await findConsumers(repoPath, affectedFiles, reverseDependencyMap); - - for (const bc of breakingChanges) { - bc.consumers = consumersMap.get(bc.filePath) ?? []; - } - } - - return breakingChanges; -} - -/** - * Format a symbol into a human-readable description string. - */ -function formatSymbolDescription(sym: { - name: string; - kind: string; - signature?: string; - isDefault: boolean; -}): string { - const parts: string[] = []; - - if (sym.isDefault) { - parts.push('default'); - } - - parts.push(sym.kind); - parts.push(sym.name); - - if (sym.signature) { - parts.push(sym.signature); - } - - return parts.join(' '); -} diff --git a/packages/core/src/breaking/export-differ.ts b/packages/core/src/breaking/export-differ.ts deleted file mode 100644 index f02563a..0000000 --- a/packages/core/src/breaking/export-differ.ts +++ /dev/null @@ -1,625 +0,0 @@ -import { ExportedSymbol, FileExports } from '../types.js'; - -/** - * Callback type for resolving barrel re-exports. - * - * Given a module specifier (e.g. `'./utils'`) and the path of the file that - * contains the `export * from` statement, the resolver should return the - * content of the target module as a string, or `null` if it cannot be resolved. - * - * The second return value is the resolved file path (repo-relative) for the - * target module, used for cycle detection. - */ -export type FileResolver = ( - moduleSpecifier: string, - importerFilePath: string, -) => Promise<{ content: string; resolvedPath: string } | null> | { content: string; resolvedPath: string } | null; - -/** - * Regex patterns for extracting exported symbols from TypeScript/JavaScript. - * - * Each pattern captures: - * - The symbol name - * - Optionally the kind (function, class, etc.) - * - Optionally the signature (parameter list + return type for functions) - */ - -// export * from './module' -const EXPORT_STAR_RE = /export\s+\*\s+from\s+['"]([^'"]+)['"]/g; - -// export * as ns from './module' -const EXPORT_STAR_AS_RE = /export\s+\*\s+as\s+(\w+)\s+from\s+['"]([^'"]+)['"]/g; - -// export [declare] async? function[*] NAME(...) -const EXPORT_FUNCTION_RE = - /export\s+(?:declare\s+)?(?:async\s+)?function\s*\*?\s*(\w+)\s*(\([^)]*\)(?:\s*:\s*[^{;]+)?)/g; - -// export default [declare] async? function[*] NAME(...) -const EXPORT_DEFAULT_FUNCTION_RE = - /export\s+default\s+(?:declare\s+)?(?:async\s+)?function\s*\*?\s*(\w+)\s*(\([^)]*\)(?:\s*:\s*[^{;]+)?)/g; - -// export default [declare] async? function[*](...) — unnamed default -const EXPORT_DEFAULT_ANON_FUNCTION_RE = - /export\s+default\s+(?:declare\s+)?(?:async\s+)?function\s*\*?\s*(\([^)]*\)(?:\s*:\s*[^{;]+)?)/g; - -// export [declare] [abstract] class NAME -const EXPORT_CLASS_RE = /export\s+(?:declare\s+)?(?:abstract\s+)?class\s+(\w+)/g; - -// export default [declare] [abstract] class NAME -const EXPORT_DEFAULT_CLASS_RE = /export\s+default\s+(?:declare\s+)?(?:abstract\s+)?class\s+(\w+)/g; - -// export [declare] const enum NAME (must be checked before variable regex) -const EXPORT_CONST_ENUM_RE = /export\s+(?:declare\s+)?const\s+enum\s+(\w+)/g; - -// export [declare] const NAME / export let NAME / export var NAME -// Also handles: export const NAME: Type = ... -const EXPORT_VARIABLE_RE = - /export\s+(?:declare\s+)?(const|let|var)\s+(\w+)\s*(?::\s*([^=;]+?))?(?:\s*=|;)/g; - -// export [declare] const { a, b } = ... (destructured object) -const EXPORT_DESTRUCTURED_OBJ_RE = - /export\s+(?:declare\s+)?(?:const|let|var)\s+\{([^}]+)\}/g; - -// export [declare] const [ a, b ] = ... (destructured array) -const EXPORT_DESTRUCTURED_ARR_RE = - /export\s+(?:declare\s+)?(?:const|let|var)\s+\[([^\]]+)\]/g; - -// export [declare] interface NAME -const EXPORT_INTERFACE_RE = /export\s+(?:declare\s+)?interface\s+(\w+)/g; - -// export [declare] type NAME -const EXPORT_TYPE_RE = /export\s+(?:declare\s+)?type\s+(\w+)/g; - -// export [declare] enum NAME -const EXPORT_ENUM_RE = /export\s+(?:declare\s+)?enum\s+(\w+)/g; - -// export { a, b, c } or export { a as b, c as default } -const EXPORT_NAMED_RE = /export\s*\{([^}]+)\}/g; - -// export default (catch-all for default exports not matched above) -const EXPORT_DEFAULT_EXPR_RE = /export\s+default\s+(?!function|class|interface|type|enum|abstract|async|declare)(\w+)/g; - -/** - * Strip single-line and multi-line comments from source code to avoid - * matching exports inside comments. - */ -function stripComments(content: string): string { - // Remove single-line comments but preserve strings - // Remove block comments - return content - .replace(/\/\*[\s\S]*?\*\//g, '') - .replace(/\/\/.*$/gm, ''); -} - -/** - * Normalize a signature string by collapsing whitespace. - */ -function normalizeSignature(sig: string): string { - return sig.replace(/\s+/g, ' ').trim(); -} - -/** Maximum depth for recursively resolving barrel re-exports. */ -const MAX_BARREL_DEPTH = 10; - -/** - * Parse a TypeScript/JavaScript file's content to extract all exported symbols. - * - * When a `fileResolver` is provided, `export * from '...'` barrel re-exports - * are resolved by reading the target module and recursively parsing its exports. - * The `export *` syntax re-exports all named exports but NOT the default export - * (standard ES module behavior). - * - * For `export * as ns from '...'`, a single namespace symbol is created. - */ -export function parseExports( - content: string, - filePath: string, - fileResolver?: FileResolver, -): FileExports { - // Delegate to the internal async implementation and unwrap if synchronous - const result = parseExportsInternal( - content, - filePath, - fileResolver ?? null, - new Set(), - 0, - ); - - // If no resolver is provided, the result is always synchronous - if (result instanceof Promise) { - // Cannot await in a sync function — wrap in a sync-compatible pattern. - // In practice, if callers use a fileResolver they should use parseExportsAsync. - // For backward compatibility parseExports stays sync when no resolver is given. - throw new Error( - 'parseExports returned a Promise unexpectedly. Use parseExportsAsync for barrel re-export resolution.', - ); - } - - return result; -} - -/** - * Async version of parseExports that supports barrel re-export resolution. - * - * When `fileResolver` is provided, `export * from '...'` statements are - * recursively resolved. Without a resolver, behaves identically to `parseExports`. - */ -export async function parseExportsAsync( - content: string, - filePath: string, - fileResolver?: FileResolver | null, -): Promise { - return parseExportsInternal( - content, - filePath, - fileResolver ?? null, - new Set(), - 0, - ); -} - -/** - * Internal implementation that returns a Promise when barrel resolution is needed - * and a plain value when it is not. - */ -function parseExportsInternal( - content: string, - filePath: string, - fileResolver: FileResolver | null, - visited: Set, - depth: number, -): FileExports | Promise { - const symbols: ExportedSymbol[] = []; - const seen = new Set(); - - const stripped = stripComments(content); - - function addSymbol(sym: ExportedSymbol): void { - // Use a compound key to differentiate default vs named - const key = sym.isDefault ? `default::${sym.name}` : sym.name; - if (!seen.has(key)) { - seen.add(key); - symbols.push(sym); - } - } - - // 1. export default function NAME(...) - { - const re = new RegExp(EXPORT_DEFAULT_FUNCTION_RE.source, 'g'); - let m: RegExpExecArray | null; - while ((m = re.exec(stripped)) !== null) { - addSymbol({ - name: m[1], - kind: 'function', - signature: normalizeSignature(m[2]), - isDefault: true, - }); - } - } - - // 2. export default anonymous function(...) - { - const re = new RegExp(EXPORT_DEFAULT_ANON_FUNCTION_RE.source, 'g'); - let m: RegExpExecArray | null; - while ((m = re.exec(stripped)) !== null) { - // Only match if this is truly anonymous (no name captured by the named variant) - // The named variant regex already matched named ones, so check if the char before '(' is not a word char - const beforeParen = stripped.substring(0, m.index + m[0].indexOf('(')); - if (/function\s*$/.test(beforeParen)) { - addSymbol({ - name: 'default', - kind: 'function', - signature: normalizeSignature(m[1]), - isDefault: true, - }); - } - } - } - - // 3. export function NAME(...) - { - const re = new RegExp(EXPORT_FUNCTION_RE.source, 'g'); - let m: RegExpExecArray | null; - while ((m = re.exec(stripped)) !== null) { - // Ensure this is not a "default" export (already handled above) - const prefix = stripped.substring(Math.max(0, m.index - 10), m.index + 7); - if (prefix.includes('default')) continue; - - addSymbol({ - name: m[1], - kind: 'function', - signature: normalizeSignature(m[2]), - isDefault: false, - }); - } - } - - // 4. export default class NAME - { - const re = new RegExp(EXPORT_DEFAULT_CLASS_RE.source, 'g'); - let m: RegExpExecArray | null; - while ((m = re.exec(stripped)) !== null) { - addSymbol({ - name: m[1], - kind: 'class', - isDefault: true, - }); - } - } - - // 5. export class NAME - { - const re = new RegExp(EXPORT_CLASS_RE.source, 'g'); - let m: RegExpExecArray | null; - while ((m = re.exec(stripped)) !== null) { - const prefix = stripped.substring(Math.max(0, m.index - 10), m.index + 7); - if (prefix.includes('default')) continue; - - addSymbol({ - name: m[1], - kind: 'class', - isDefault: false, - }); - } - } - - // 6a. export const enum NAME (before variable regex to avoid false matches) - const constEnumNames = new Set(); - { - const re = new RegExp(EXPORT_CONST_ENUM_RE.source, 'g'); - let m: RegExpExecArray | null; - while ((m = re.exec(stripped)) !== null) { - constEnumNames.add(m[1]); - addSymbol({ - name: m[1], - kind: 'enum', - isDefault: false, - }); - } - } - - // 6b. export const { a, b } = ... (destructured object) - const destructuredNames = new Set(); - { - const re = new RegExp(EXPORT_DESTRUCTURED_OBJ_RE.source, 'g'); - let m: RegExpExecArray | null; - while ((m = re.exec(stripped)) !== null) { - const items = m[1].split(','); - for (const item of items) { - // Handle "original as renamed" pattern - const asMatch = item.trim().match(/^(\w+)\s+as\s+(\w+)$/); - const name = asMatch ? asMatch[2] : item.trim().match(/^(\w+)/)?.[1]; - if (name) { - destructuredNames.add(name); - addSymbol({ - name, - kind: 'const', - isDefault: false, - }); - } - } - } - } - - // 6c. export const [ a, b ] = ... (destructured array) - { - const re = new RegExp(EXPORT_DESTRUCTURED_ARR_RE.source, 'g'); - let m: RegExpExecArray | null; - while ((m = re.exec(stripped)) !== null) { - const items = m[1].split(','); - for (const item of items) { - const name = item.trim().match(/^(\w+)/)?.[1]; - if (name) { - destructuredNames.add(name); - addSymbol({ - name, - kind: 'const', - isDefault: false, - }); - } - } - } - } - - // 6d. export const/let/var NAME - { - const re = new RegExp(EXPORT_VARIABLE_RE.source, 'g'); - let m: RegExpExecArray | null; - while ((m = re.exec(stripped)) !== null) { - const varKeyword = m[1]; // const, let, var - const name = m[2]; - - // Skip if this was already captured as a const enum or destructured binding - if (constEnumNames.has(name) || destructuredNames.has(name)) continue; - // Skip "export const enum Foo" — the "enum" would be captured as a variable name - if (name === 'enum') continue; - - const typeAnnotation = m[3] ? normalizeSignature(m[3]) : undefined; - - addSymbol({ - name, - kind: varKeyword === 'const' ? 'const' : 'variable', - signature: typeAnnotation, - isDefault: false, - }); - } - } - - // 7. export interface NAME - { - const re = new RegExp(EXPORT_INTERFACE_RE.source, 'g'); - let m: RegExpExecArray | null; - while ((m = re.exec(stripped)) !== null) { - addSymbol({ - name: m[1], - kind: 'interface', - isDefault: false, - }); - } - } - - // 8. export type NAME (but not "export type {" which is a re-export) - { - const re = new RegExp(EXPORT_TYPE_RE.source, 'g'); - let m: RegExpExecArray | null; - while ((m = re.exec(stripped)) !== null) { - // Skip "export type {" — that's a type-only re-export block, not a type alias - const afterMatch = stripped.substring(m.index + m[0].length).trimStart(); - if (afterMatch.startsWith('{')) continue; - - addSymbol({ - name: m[1], - kind: 'type', - isDefault: false, - }); - } - } - - // 9. export enum NAME - { - const re = new RegExp(EXPORT_ENUM_RE.source, 'g'); - let m: RegExpExecArray | null; - while ((m = re.exec(stripped)) !== null) { - addSymbol({ - name: m[1], - kind: 'enum', - isDefault: false, - }); - } - } - - // 10. export { a, b, c } and export { a as b } - { - const re = new RegExp(EXPORT_NAMED_RE.source, 'g'); - let m: RegExpExecArray | null; - while ((m = re.exec(stripped)) !== null) { - // Check if this is preceded by "type" → export type { ... } - const preceding = stripped.substring(Math.max(0, m.index - 6), m.index); - const isTypeOnly = /type\s*$/.test(preceding); - - const inner = m[1]; - const items = inner.split(','); - - for (const item of items) { - const trimmed = item.trim(); - if (!trimmed) continue; - - // Handle "name as alias" patterns - const asMatch = trimmed.match(/^(\w+)\s+as\s+(\w+)$/); - let exportedName: string; - let isDefault = false; - - if (asMatch) { - exportedName = asMatch[2]; - if (exportedName === 'default') { - isDefault = true; - exportedName = asMatch[1]; // Use original name for tracking - } - } else { - exportedName = trimmed; - } - - // Skip if not a valid identifier - if (!/^\w+$/.test(exportedName)) continue; - - addSymbol({ - name: exportedName, - kind: isTypeOnly ? 'type' : 'variable', - isDefault, - }); - } - } - } - - // 11. export default (identifier) - { - const re = new RegExp(EXPORT_DEFAULT_EXPR_RE.source, 'g'); - let m: RegExpExecArray | null; - while ((m = re.exec(stripped)) !== null) { - addSymbol({ - name: m[1], - kind: 'variable', - isDefault: true, - }); - } - } - - // 12. export * as ns from '...' (namespace re-export — must be checked BEFORE export *) - const starAsSpecifiers = new Set(); - { - const re = new RegExp(EXPORT_STAR_AS_RE.source, 'g'); - let m: RegExpExecArray | null; - while ((m = re.exec(stripped)) !== null) { - const nsName = m[1]; - const specifier = m[2]; - starAsSpecifiers.add(specifier); - addSymbol({ - name: nsName, - kind: 'variable', - isDefault: false, - }); - } - } - - // 13. export * from '...' (barrel re-export) - // Collect the specifiers. If a resolver is provided, we resolve them - // recursively. Otherwise we just skip them (backward-compatible). - const barrelSpecifiers: string[] = []; - { - const re = new RegExp(EXPORT_STAR_RE.source, 'g'); - let m: RegExpExecArray | null; - while ((m = re.exec(stripped)) !== null) { - const specifier = m[1]; - // Skip if this specifier was already captured by export * as ns from '...' - if (!starAsSpecifiers.has(specifier)) { - barrelSpecifiers.push(specifier); - } - } - } - - // If there are no barrel specifiers or no resolver, return synchronously - if (barrelSpecifiers.length === 0 || fileResolver === null || depth >= MAX_BARREL_DEPTH) { - return { filePath, symbols }; - } - - // Mark current file as visited to prevent circular re-exports - const normalizedPath = filePath.replace(/\\/g, '/'); - if (visited.has(normalizedPath)) { - return { filePath, symbols }; - } - visited.add(normalizedPath); - - // Resolve barrel re-exports (potentially async) - const resolveBarrels = async (): Promise => { - for (const specifier of barrelSpecifiers) { - const resolved = await fileResolver(specifier, filePath); - if (resolved === null) { - continue; - } - - const { content: targetContent, resolvedPath: targetPath } = resolved; - const normalizedTargetPath = targetPath.replace(/\\/g, '/'); - - // Skip if we've already visited this file (circular re-export) - if (visited.has(normalizedTargetPath)) { - continue; - } - - // Recursively parse the target file's exports - const targetExports = await parseExportsInternal( - targetContent, - targetPath, - fileResolver, - visited, - depth + 1, - ); - - // Add all non-default symbols from the target - // (export * does NOT re-export default) - for (const sym of targetExports.symbols) { - if (!sym.isDefault) { - addSymbol(sym); - } - } - } - - return { filePath, symbols }; - }; - - return resolveBarrels(); -} - -/** Return type for diffExports / diffExportsAsync. */ -export interface ExportDiffResult { - removed: ExportedSymbol[]; - added: ExportedSymbol[]; - modified: Array<{ before: ExportedSymbol; after: ExportedSymbol }>; -} - -/** - * Compare exports between the base and head version of a file. - * - * Returns: - * - removed: symbols present in base but missing from head - * - added: symbols present in head but missing from base - * - modified: symbols present in both but whose signature changed - */ -export function diffExports( - basePath: string, - baseContent: string, - headContent: string, -): ExportDiffResult { - const baseExports = parseExports(baseContent, basePath); - const headExports = parseExports(headContent, basePath); - - return computeDiff(baseExports, headExports); -} - -/** - * Async version of diffExports that supports barrel re-export resolution. - */ -export async function diffExportsAsync( - basePath: string, - baseContent: string, - headContent: string, - fileResolver?: FileResolver | null, -): Promise { - const [baseExports, headExports] = await Promise.all([ - parseExportsAsync(baseContent, basePath, fileResolver), - parseExportsAsync(headContent, basePath, fileResolver), - ]); - - return computeDiff(baseExports, headExports); -} - -/** - * Compute the diff between two sets of file exports. - */ -function computeDiff( - baseExports: FileExports, - headExports: FileExports, -): ExportDiffResult { - // Build lookup maps keyed by (name + isDefault) for accurate matching - const baseMap = new Map(); - for (const sym of baseExports.symbols) { - const key = sym.isDefault ? `default::${sym.name}` : sym.name; - baseMap.set(key, sym); - } - - const headMap = new Map(); - for (const sym of headExports.symbols) { - const key = sym.isDefault ? `default::${sym.name}` : sym.name; - headMap.set(key, sym); - } - - const removed: ExportedSymbol[] = []; - const added: ExportedSymbol[] = []; - const modified: Array<{ before: ExportedSymbol; after: ExportedSymbol }> = []; - - // Find removed and modified symbols - for (const [key, baseSym] of baseMap) { - const headSym = headMap.get(key); - if (!headSym) { - removed.push(baseSym); - } else { - // Check if signature or kind changed - const baseSig = baseSym.signature ?? ''; - const headSig = headSym.signature ?? ''; - const kindChanged = baseSym.kind !== headSym.kind; - const sigChanged = baseSig !== headSig; - - if (kindChanged || sigChanged) { - modified.push({ before: baseSym, after: headSym }); - } - } - } - - // Find added symbols - for (const [key, headSym] of headMap) { - if (!baseMap.has(key)) { - added.push(headSym); - } - } - - return { removed, added, modified }; -} diff --git a/packages/core/src/breaking/signature-differ.ts b/packages/core/src/breaking/signature-differ.ts deleted file mode 100644 index 25c475b..0000000 --- a/packages/core/src/breaking/signature-differ.ts +++ /dev/null @@ -1,214 +0,0 @@ -/** - * Compare function/method signatures between two versions of a symbol. - * - * Signatures are expected in the form: `(param1: Type1, param2: Type2): ReturnType` - * This module performs structural comparison by splitting parameters and return types. - */ - -/** - * Normalize whitespace in a signature fragment for consistent comparison. - */ -function normalize(s: string): string { - return s.replace(/\s+/g, ' ').trim(); -} - -/** - * Split a parameter list string into individual parameters, respecting - * nested angle brackets, parentheses, and square brackets so that - * generics like `Map` are not split on the inner comma. - */ -function splitParameters(paramStr: string): string[] { - const params: string[] = []; - let depth = 0; - let current = ''; - - for (const ch of paramStr) { - if (ch === '<' || ch === '(' || ch === '[' || ch === '{') { - depth++; - current += ch; - } else if (ch === '>' || ch === ')' || ch === ']' || ch === '}') { - depth--; - current += ch; - } else if (ch === ',' && depth === 0) { - const trimmed = current.trim(); - if (trimmed) params.push(trimmed); - current = ''; - } else { - current += ch; - } - } - - const trimmed = current.trim(); - if (trimmed) params.push(trimmed); - - return params; -} - -/** - * Extract the parameter list and return type from a signature string. - * - * Input: `(a: string, b: number): boolean` - * Output: { params: ['a: string', 'b: number'], returnType: 'boolean' } - */ -function parseSignature(sig: string): { - params: string[]; - returnType: string | null; -} { - const trimmed = normalize(sig); - - // Find the matching closing paren for the opening paren - if (!trimmed.startsWith('(')) { - return { params: [], returnType: null }; - } - - let depth = 0; - let closeIndex = -1; - - for (let i = 0; i < trimmed.length; i++) { - const ch = trimmed[i]; - if (ch === '(') depth++; - else if (ch === ')') { - depth--; - if (depth === 0) { - closeIndex = i; - break; - } - } - } - - if (closeIndex === -1) { - // Malformed signature — treat entire string as params - return { params: splitParameters(trimmed.slice(1)), returnType: null }; - } - - const paramStr = trimmed.slice(1, closeIndex); - const params = paramStr.length > 0 ? splitParameters(paramStr) : []; - - // Everything after `)` should be `: ReturnType` - const rest = trimmed.slice(closeIndex + 1).trim(); - let returnType: string | null = null; - - if (rest.startsWith(':')) { - returnType = normalize(rest.slice(1)); - } - - return { params, returnType }; -} - -/** - * Extract just the type portion from a parameter declaration. - * `name: Type` → `Type` - * `name?: Type` → `Type` - * `...name: Type` → `Type` - * If there's no `:`, returns the raw parameter string. - */ -function extractParamType(param: string): string { - // Handle rest parameters - const cleaned = param.replace(/^\.\.\./, '').trim(); - - // Find the colon that separates name from type (not inside angle brackets etc.) - let depth = 0; - for (let i = 0; i < cleaned.length; i++) { - const ch = cleaned[i]; - if (ch === '<' || ch === '(' || ch === '[' || ch === '{') depth++; - else if (ch === '>' || ch === ')' || ch === ']' || ch === '}') depth--; - else if (ch === ':' && depth === 0) { - return normalize(cleaned.slice(i + 1)); - } - } - - return normalize(cleaned); -} - -export interface SignatureDiffResult { - changed: boolean; - details: string; -} - -/** - * Compare two function/method signatures and produce a human-readable - * description of what changed. - * - * @param baseSig - The signature from the base (old) version, e.g. `(a: string): void` - * @param headSig - The signature from the head (new) version - * @returns An object with `changed` (boolean) and `details` (string describing the change) - */ -export function diffSignatures( - baseSig: string | undefined, - headSig: string | undefined, -): SignatureDiffResult { - // Both undefined → no signature to compare - if (baseSig === undefined && headSig === undefined) { - return { changed: false, details: 'no signatures to compare' }; - } - - // One exists, other doesn't - if (baseSig === undefined) { - return { changed: true, details: 'signature added' }; - } - if (headSig === undefined) { - return { changed: true, details: 'signature removed' }; - } - - // Quick equality check after normalization - const normalizedBase = normalize(baseSig); - const normalizedHead = normalize(headSig); - - if (normalizedBase === normalizedHead) { - return { changed: false, details: 'signatures are identical' }; - } - - // Parse both signatures for structural comparison - const baseParsed = parseSignature(normalizedBase); - const headParsed = parseSignature(normalizedHead); - - const differences: string[] = []; - - // Compare parameter counts - const baseCount = baseParsed.params.length; - const headCount = headParsed.params.length; - - if (baseCount !== headCount) { - differences.push( - `parameter count changed from ${baseCount} to ${headCount}`, - ); - } - - // Compare individual parameter types (up to the smaller count) - const minCount = Math.min(baseCount, headCount); - for (let i = 0; i < minCount; i++) { - const baseType = extractParamType(baseParsed.params[i]); - const headType = extractParamType(headParsed.params[i]); - - if (baseType !== headType) { - const baseName = baseParsed.params[i].split(':')[0].replace(/[?.]/g, '').trim(); - differences.push( - `parameter '${baseName}' type changed from '${baseType}' to '${headType}'`, - ); - } - } - - // Compare return types - const baseReturn = baseParsed.returnType; - const headReturn = headParsed.returnType; - - if (baseReturn !== headReturn) { - if (baseReturn === null) { - differences.push(`return type added: '${headReturn}'`); - } else if (headReturn === null) { - differences.push(`return type removed (was '${baseReturn}')`); - } else { - differences.push( - `return type changed from '${baseReturn}' to '${headReturn}'`, - ); - } - } - - if (differences.length === 0) { - // The normalized strings differ but our structural comparison didn't catch it; - // report a generic change. - return { changed: true, details: 'signature changed' }; - } - - return { changed: true, details: differences.join('; ') }; -} diff --git a/packages/core/src/coverage/coverage-checker.ts b/packages/core/src/coverage/coverage-checker.ts deleted file mode 100644 index 8bf9941..0000000 --- a/packages/core/src/coverage/coverage-checker.ts +++ /dev/null @@ -1,60 +0,0 @@ -import { ChangedFile, TestCoverageReport, TestCoverageGap } from '../types.js'; -import { mapTestFiles } from './test-mapper.js'; - -/** - * Checks whether changed source files have corresponding test changes in the - * same PR. Returns a report with a coverage ratio and a list of "gaps" -- - * source files whose tests were not updated. - */ -export async function checkTestCoverage( - repoPath: string, - changedFiles: ChangedFile[], -): Promise { - const sourceFiles = changedFiles.filter((f) => f.category === 'source'); - const changedTestPaths = new Set( - changedFiles.filter((f) => f.category === 'test').map((f) => f.path), - ); - - if (sourceFiles.length === 0) { - return { - changedSourceFiles: 0, - sourceFilesWithTestChanges: 0, - coverageRatio: 1, - gaps: [], - }; - } - - const gaps: TestCoverageGap[] = []; - let sourceFilesWithTestChanges = 0; - - for (const source of sourceFiles) { - const expectedTestFiles = await mapTestFiles(repoPath, source.path); - const testFileExists = expectedTestFiles.length > 0; - const testFileChanged = expectedTestFiles.some((t) => - changedTestPaths.has(t), - ); - - if (testFileChanged) { - sourceFilesWithTestChanges++; - } else { - gaps.push({ - sourceFile: source.path, - expectedTestFiles, - testFileExists, - testFileChanged: false, - }); - } - } - - const coverageRatio = - sourceFiles.length > 0 - ? sourceFilesWithTestChanges / sourceFiles.length - : 0; - - return { - changedSourceFiles: sourceFiles.length, - sourceFilesWithTestChanges, - coverageRatio, - gaps, - }; -} diff --git a/packages/core/src/coverage/test-mapper.ts b/packages/core/src/coverage/test-mapper.ts deleted file mode 100644 index 60afe1a..0000000 --- a/packages/core/src/coverage/test-mapper.ts +++ /dev/null @@ -1,103 +0,0 @@ -import fg from 'fast-glob'; -import { posix as path } from 'node:path'; - -/** - * Maps a source file to its expected test file paths using common naming - * conventions, then returns only those that actually exist on disk. - * - * Conventions checked (given e.g. `src/utils/parser.ts`): - * 1. Same directory: src/utils/parser.test.ts, src/utils/parser.spec.ts - * 2. __tests__ sibling dir: src/utils/__tests__/parser.ts, src/utils/__tests__/parser.test.ts - * 3. Top-level test dirs: test/utils/parser.ts, tests/utils/parser.test.ts - * 4. All of the above with .js/.jsx/.tsx variants as well. - */ -export async function mapTestFiles( - repoPath: string, - sourceFile: string, -): Promise { - const candidates = buildCandidatePaths(sourceFile); - - if (candidates.length === 0) { - return []; - } - - // fast-glob expects forward-slash patterns and a cwd - const existing = await fg(candidates, { - cwd: repoPath, - dot: false, - onlyFiles: true, - }); - - return existing; -} - -// --------------------------------------------------------------------------- -// Internal helpers -// --------------------------------------------------------------------------- - -/** Extensions we consider valid for test files. */ -const TEST_EXTENSIONS = ['.ts', '.tsx', '.js', '.jsx'] as const; - -/** - * Build all candidate test file paths for a given source file. - * Paths are returned as repo-relative with forward slashes. - */ -function buildCandidatePaths(sourceFile: string): string[] { - const normalized = sourceFile.replace(/\\/g, '/'); - const dir = path.dirname(normalized); - const ext = path.extname(normalized); - const base = path.basename(normalized, ext); - - // Derive the relative part after the first conventional source directory - // (e.g. `src/`). This is used for top-level test directories. - const subPath = stripLeadingSourceDir(normalized); - const subDir = path.dirname(subPath); - const candidates: string[] = []; - - for (const testExt of TEST_EXTENSIONS) { - // --- 1. Same directory with .test / .spec suffix ---------------------- - candidates.push(path.join(dir, `${base}.test${testExt}`)); - candidates.push(path.join(dir, `${base}.spec${testExt}`)); - - // --- 2. __tests__ sibling directory ----------------------------------- - const testsDir = path.join(dir, '__tests__'); - candidates.push(path.join(testsDir, `${base}${testExt}`)); - candidates.push(path.join(testsDir, `${base}.test${testExt}`)); - candidates.push(path.join(testsDir, `${base}.spec${testExt}`)); - - // --- 3. Top-level test / tests directories ---------------------------- - for (const topDir of ['test', 'tests']) { - candidates.push(path.join(topDir, subDir, `${base}${testExt}`)); - candidates.push(path.join(topDir, subDir, `${base}.test${testExt}`)); - candidates.push(path.join(topDir, subDir, `${base}.spec${testExt}`)); - } - } - - // Deduplicate (some paths may overlap when dir === '.') - return [...new Set(candidates)]; -} - -/** - * Strips a leading conventional source directory prefix such as `src/` so that - * we can reconstruct paths relative to a top-level `test/` directory. - * - * Examples: - * `src/utils/parser.ts` -> `utils/parser.ts` - * `lib/core/index.ts` -> `core/index.ts` - * `packages/foo/src/a.ts` -> `a.ts` (strips up to and including src/) - * `utils/parser.ts` -> `utils/parser.ts` (no prefix to strip) - */ -function stripLeadingSourceDir(filePath: string): string { - // Look for the last occurrence of a conventional source dir segment. - const srcIndex = filePath.lastIndexOf('src/'); - if (srcIndex !== -1) { - return filePath.slice(srcIndex + 'src/'.length); - } - - const libIndex = filePath.lastIndexOf('lib/'); - if (libIndex !== -1) { - return filePath.slice(libIndex + 'lib/'.length); - } - - return filePath; -} diff --git a/packages/core/src/diff/diff-parser.ts b/packages/core/src/diff/diff-parser.ts deleted file mode 100644 index 7ca5f60..0000000 --- a/packages/core/src/diff/diff-parser.ts +++ /dev/null @@ -1,157 +0,0 @@ -import simpleGit from 'simple-git'; -import { ChangedFile } from '../types.js'; -import { categorizeFile } from './file-categorizer.js'; - -/** Extended diff result that includes categorized file arrays present at runtime. */ -interface DiffResultWithCategories { - created?: string[]; - deleted?: string[]; - renamed?: string[]; -} - -const EXTENSION_LANGUAGE_MAP: Record = { - '.ts': 'typescript', - '.tsx': 'typescript', - '.js': 'javascript', - '.jsx': 'javascript', - '.mjs': 'javascript', - '.cjs': 'javascript', - '.py': 'python', - '.go': 'go', - '.rs': 'rust', - '.java': 'java', - '.c': 'c', - '.cpp': 'cpp', - '.h': 'c', - '.hpp': 'cpp', - '.rb': 'ruby', - '.php': 'php', - '.swift': 'swift', - '.kt': 'kotlin', - '.scala': 'scala', - '.cs': 'csharp', - '.vue': 'vue', - '.svelte': 'svelte', - '.md': 'markdown', - '.mdx': 'markdown', - '.json': 'json', - '.yaml': 'yaml', - '.yml': 'yaml', - '.toml': 'toml', - '.xml': 'xml', - '.html': 'html', - '.css': 'css', - '.scss': 'scss', - '.less': 'less', - '.sql': 'sql', - '.sh': 'shell', - '.bash': 'shell', - '.zsh': 'shell', - '.dockerfile': 'dockerfile', - '.graphql': 'graphql', - '.gql': 'graphql', - '.proto': 'protobuf', - '.txt': 'text', - '.rst': 'restructuredtext', -}; - -export function detectLanguage(filePath: string): string { - const fileName = filePath.split('/').pop() ?? ''; - const lowerName = fileName.toLowerCase(); - - if (lowerName === 'dockerfile') return 'dockerfile'; - if (lowerName === 'makefile') return 'makefile'; - - const lastDot = filePath.lastIndexOf('.'); - if (lastDot === -1) return 'unknown'; - - const ext = filePath.slice(lastDot).toLowerCase(); - return EXTENSION_LANGUAGE_MAP[ext] ?? 'unknown'; -} - -/** - * Resolves a file path from a diff entry, handling renames. - * simple-git may report renames as "old => new" or "{prefix/old => prefix/new}/suffix". - * Returns { newPath, oldPath } where oldPath is set only for renames. - */ -function resolveFilePath(raw: string): { newPath: string; oldPath?: string } { - // Handle brace-style renames: "dir/{old.ts => new.ts}" or "{old => new}/file.ts" - const braceMatch = raw.match(/^(.*?)\{(.+?) => (.+?)\}(.*)$/); - if (braceMatch) { - const [, prefix, oldPart, newPart, suffix] = braceMatch; - const oldPath = `${prefix}${oldPart}${suffix}`.replace(/\/\//g, '/'); - const newPath = `${prefix}${newPart}${suffix}`.replace(/\/\//g, '/'); - return { newPath, oldPath }; - } - - // Handle simple renames: "old.ts => new.ts" - const simpleMatch = raw.match(/^(.+?) => (.+?)$/); - if (simpleMatch) { - return { newPath: simpleMatch[2], oldPath: simpleMatch[1] }; - } - - return { newPath: raw }; -} - -function determineStatus( - filePath: string, - created: string[], - deleted: string[], - renamed: string[], -): ChangedFile['status'] { - if (created.includes(filePath)) return 'added'; - if (deleted.includes(filePath)) return 'deleted'; - if (renamed.includes(filePath)) return 'renamed'; - return 'modified'; -} - -export async function parseDiff( - repoPath: string, - base: string, - head: string, -): Promise { - const git = simpleGit(repoPath); - const diffSummary = await git.diffSummary([`${base}..${head}`]); - - // Build lookup sets from the categorized arrays in the diff summary. - // simple-git provides .created, .deleted, .renamed as arrays of file paths. - const { created, deleted, renamed } = diffSummary as typeof diffSummary & DiffResultWithCategories; - const createdFiles: string[] = created ?? []; - const deletedFiles: string[] = deleted ?? []; - const renamedFiles: string[] = renamed ?? []; - - const changedFiles: ChangedFile[] = []; - - for (const file of diffSummary.files) { - const { newPath, oldPath } = resolveFilePath(file.file); - - const status = determineStatus( - file.file, - createdFiles, - deletedFiles, - renamedFiles, - ); - - // If we detected a rename from the path pattern but simple-git didn't flag it, - // treat it as renamed when oldPath is present. - const finalStatus: ChangedFile['status'] = - status === 'modified' && oldPath ? 'renamed' : status; - - const changedFile: ChangedFile = { - path: newPath, - status: finalStatus, - additions: 'insertions' in file ? file.insertions : 0, - deletions: 'deletions' in file ? file.deletions : 0, - language: detectLanguage(newPath), - category: categorizeFile(newPath), - }; - - if (oldPath) { - changedFile.oldPath = oldPath; - } - - changedFiles.push(changedFile); - } - - return changedFiles; -} diff --git a/packages/core/src/diff/file-categorizer.ts b/packages/core/src/diff/file-categorizer.ts deleted file mode 100644 index b962c0d..0000000 --- a/packages/core/src/diff/file-categorizer.ts +++ /dev/null @@ -1,103 +0,0 @@ -import { ChangedFile } from '../types.js'; - -const SOURCE_EXTENSIONS = new Set([ - '.ts', '.tsx', '.js', '.jsx', - '.py', '.go', '.rs', '.java', - '.c', '.cpp', '.h', - '.rb', '.php', '.swift', - '.kt', '.scala', '.cs', - '.vue', '.svelte', -]); - -const DOC_EXTENSIONS = new Set(['.md', '.mdx', '.txt', '.rst']); - -const CONFIG_FILENAMES = new Set([ - 'package.json', - 'tsconfig.json', - 'turbo.json', - 'dockerfile', - 'makefile', - '.gitignore', - '.npmrc', - 'pnpm-workspace.yaml', - 'pnpm-lock.yaml', - 'yarn.lock', - 'package-lock.json', -]); - -const CONFIG_PREFIXES = [ - '.eslintrc', - '.prettierrc', - 'webpack.config.', - 'vite.config.', - 'jest.config.', - 'vitest.config.', - 'docker-compose.', - '.env', -]; - -function isTestFile(filePath: string): boolean { - const normalized = filePath.replace(/\\/g, '/'); - const fileName = normalized.split('/').pop() ?? ''; - - return ( - normalized.includes('__tests__/') || - normalized.includes('__tests__\\') || - normalized.includes('/test/') || - normalized.includes('/tests/') || - fileName.includes('.test.') || - fileName.includes('.spec.') || - fileName.startsWith('test') - ); -} - -function isDocFile(filePath: string): boolean { - const normalized = filePath.replace(/\\/g, '/'); - const ext = getExtension(filePath); - - return ( - DOC_EXTENSIONS.has(ext) || - normalized.startsWith('docs/') || - normalized.startsWith('doc/') - ); -} - -function isConfigFile(filePath: string): boolean { - const normalized = filePath.replace(/\\/g, '/'); - const fileName = (normalized.split('/').pop() ?? '').toLowerCase(); - - if (normalized.startsWith('.github/')) { - return true; - } - - if (CONFIG_FILENAMES.has(fileName)) { - return true; - } - - for (const prefix of CONFIG_PREFIXES) { - if (fileName.startsWith(prefix)) { - return true; - } - } - - return false; -} - -function isSourceFile(filePath: string): boolean { - const ext = getExtension(filePath); - return SOURCE_EXTENSIONS.has(ext); -} - -function getExtension(filePath: string): string { - const lastDot = filePath.lastIndexOf('.'); - if (lastDot === -1) return ''; - return filePath.slice(lastDot).toLowerCase(); -} - -export function categorizeFile(filePath: string): ChangedFile['category'] { - if (isTestFile(filePath)) return 'test'; - if (isDocFile(filePath)) return 'doc'; - if (isConfigFile(filePath)) return 'config'; - if (isSourceFile(filePath)) return 'source'; - return 'other'; -} diff --git a/packages/core/src/docs/staleness-checker.ts b/packages/core/src/docs/staleness-checker.ts deleted file mode 100644 index f0289ef..0000000 --- a/packages/core/src/docs/staleness-checker.ts +++ /dev/null @@ -1,345 +0,0 @@ -import simpleGit from 'simple-git'; -import fg from 'fast-glob'; -import { readFile } from 'node:fs/promises'; -import { join as joinPath } from 'node:path'; -import { ChangedFile, DocStalenessReport, StaleReference } from '../types.js'; - -/** - * Checks whether documentation files in the repository reference symbols or - * file paths that were deleted, removed, or renamed in the current change set. - */ -export async function checkDocStaleness( - repoPath: string, - changedFiles: ChangedFile[], - baseBranch: string, - headBranch: string, -): Promise { - const git = simpleGit(repoPath); - - // ---- 1. Discover all doc files in the repo at HEAD --------------------- - const docPatterns = ['**/*.md', '**/*.mdx']; - const docFiles = await fg(docPatterns, { - cwd: repoPath, - ignore: ['**/node_modules/**'], - dot: false, - onlyFiles: true, - }); - - if (docFiles.length === 0) { - return { staleReferences: [], checkedFiles: [] }; - } - - // ---- 2. Collect references we want to search for ---------------------- - const deletedPaths = buildDeletedPaths(changedFiles); - const renamedPaths = buildRenamedPaths(changedFiles); - const removedSymbols = await collectRemovedSymbols( - git, - changedFiles, - baseBranch, - headBranch, - ); - - // If there is nothing to look for, short-circuit. - if ( - deletedPaths.length === 0 && - renamedPaths.length === 0 && - removedSymbols.length === 0 - ) { - return { staleReferences: [], checkedFiles: docFiles }; - } - - // ---- 3. Pre-compile symbol regexes for efficient scanning ---------------- - // For generic names (index, types, utils, etc.) use a stricter regex that - // only matches when the name appears in a file-path or import context, - // avoiding false positives from ordinary English prose. - const symbolPatterns = removedSymbols.map((sym) => ({ - ...sym, - regex: sym.isGeneric - ? buildPathContextRegex(sym.name) - : new RegExp(`\\b${escapeRegex(sym.name)}\\b`), - })); - - // ---- 4. Scan doc files for stale references --------------------------- - const staleReferences: StaleReference[] = []; - - for (const docFile of docFiles) { - const content = await safeReadFile(repoPath, docFile, git, headBranch); - if (content === null) { - continue; - } - - const lines = content.split('\n'); - - for (let i = 0; i < lines.length; i++) { - const line = lines[i]; - const lineNumber = i + 1; - - // Check deleted file paths - for (const dp of deletedPaths) { - if (line.includes(dp)) { - staleReferences.push({ - docFile, - line: lineNumber, - reference: dp, - reason: 'referenced file was deleted', - }); - } - } - - // Check renamed file paths (old path) - for (const rp of renamedPaths) { - if (line.includes(rp.oldPath)) { - staleReferences.push({ - docFile, - line: lineNumber, - reference: rp.oldPath, - reason: `referenced file was renamed to ${rp.newPath}`, - }); - } - } - - // Check removed symbols (word-boundary match, pre-compiled) - for (const sym of symbolPatterns) { - if (sym.regex.test(line)) { - staleReferences.push({ - docFile, - line: lineNumber, - reference: sym.name, - reason: `referenced symbol was removed from ${sym.sourceFile}`, - }); - } - } - } - } - - return { staleReferences, checkedFiles: docFiles }; -} - -// --------------------------------------------------------------------------- -// Internal helpers -// --------------------------------------------------------------------------- - -interface RemovedSymbol { - name: string; - sourceFile: string; - /** When true, only match in file-path or import contexts (not standalone prose). */ - isGeneric?: boolean; -} - -interface RenamedPath { - oldPath: string; - newPath: string; -} - -/** Collect file paths that were deleted. */ -function buildDeletedPaths(changedFiles: ChangedFile[]): string[] { - return changedFiles - .filter((f) => f.status === 'deleted') - .map((f) => f.path); -} - -/** Collect old paths from renames. */ -function buildRenamedPaths(changedFiles: ChangedFile[]): RenamedPath[] { - return changedFiles - .filter((f) => f.status === 'renamed' && f.oldPath) - .map((f) => ({ oldPath: f.oldPath!, newPath: f.path })); -} - -/** - * For deleted source files, derive the filename stem as a potential reference. - * For modified source files, diff the exported symbols between base and head - * to find removed exports. - */ -async function collectRemovedSymbols( - git: ReturnType, - changedFiles: ChangedFile[], - baseBranch: string, - headBranch: string, -): Promise { - const removed: RemovedSymbol[] = []; - - for (const file of changedFiles) { - if (file.category !== 'source') { - continue; - } - - if (file.status === 'deleted') { - // Use the filename stem as a symbol reference. - // Generic names (e.g. "types", "utils") are still tracked but flagged - // so the scanner uses a stricter path-context-only regex for them. - const stem = filenameStem(file.path); - if (stem) { - removed.push({ - name: stem, - sourceFile: file.path, - isGeneric: isGenericName(stem), - }); - } - - // Also extract exported symbols from the base version - const baseContent = await safeShowFile(git, baseBranch, file.path); - if (baseContent) { - for (const sym of extractExportedSymbolNames(baseContent)) { - removed.push({ name: sym, sourceFile: file.path }); - } - } - } else if (file.status === 'modified') { - const baseContent = await safeShowFile(git, baseBranch, file.path); - const headContent = await safeShowFile(git, headBranch, file.path); - - if (baseContent) { - const baseSymbols = extractExportedSymbolNames(baseContent); - const headSymbols = new Set( - headContent ? extractExportedSymbolNames(headContent) : [], - ); - - for (const sym of baseSymbols) { - if (!headSymbols.has(sym)) { - removed.push({ name: sym, sourceFile: file.path }); - } - } - } - } - } - - return removed; -} - -/** - * Regex-based extraction of exported symbol names from TypeScript/JavaScript - * source code. Matches patterns like: - * export function foo( - * export async function* bar( - * export class Baz - * export abstract class Baz - * export declare class Baz - * export const qux - * export let quux - * export var quuz - * export type Foo - * export interface Bar - * export enum Status - * export const enum Direction - * export default function foo( - * export default class Bar - */ -const EXPORT_REGEX = - /export\s+(?:default\s+)?(?:declare\s+)?(?:async\s+)?(?:abstract\s+)?(?:function\s*\*?\s*|class\s+|const\s+enum\s+|const\s+|let\s+|var\s+|type\s+|interface\s+|enum\s+)([A-Za-z_$][A-Za-z0-9_$]*)/g; - -function extractExportedSymbolNames(content: string): string[] { - const names: string[] = []; - let match: RegExpExecArray | null; - - const regex = new RegExp(EXPORT_REGEX.source, EXPORT_REGEX.flags); - - while ((match = regex.exec(content)) !== null) { - const name = match[1]; - if (name) { - names.push(name); - } - } - - return [...new Set(names)]; -} - -/** Get the filename without extension. */ -function filenameStem(filePath: string): string { - const name = filePath.replace(/\\/g, '/').split('/').pop() ?? ''; - const dotIndex = name.indexOf('.'); - return dotIndex === -1 ? name : name.slice(0, dotIndex); -} - -/** Filter out overly generic file stems that would cause false positives. */ -function isGenericName(name: string): boolean { - const GENERIC = new Set([ - 'index', - 'main', - 'app', - 'mod', - 'lib', - 'utils', - 'helpers', - 'types', - 'constants', - 'config', - ]); - return GENERIC.has(name.toLowerCase()); -} - -/** - * Build a regex that matches a generic name only when it appears in a - * file-path or import/require context inside documentation. This avoids - * false positives where common English words like "index" or "main" appear - * naturally in prose. - * - * Matched patterns (examples for name = "types"): - * File path contexts: - * ./types ../types src/types path/to/types - * types.ts types.js types.mjs types.d.ts - * types/ /types - * Import / require contexts: - * from './types' from "../types" from 'types' - * import types import { x } from "types" - * require('./types') require("types") - * Code-fenced / backtick references: - * `types` `types.ts` - */ -function buildPathContextRegex(name: string): RegExp { - const n = escapeRegex(name); - - // Each alternative captures a distinct context where a generic name is - // actually a code / file reference rather than plain English. - const alternatives = [ - // 1. Preceded by a path separator: ./types, ../types, foo/types - `(?:[./\\\\])${n}\\b`, - // 2. Followed by a file extension: types.ts, types.js, types.mjs, types.d.ts - `\\b${n}(?:\\.d)?\\.(?:ts|js|mjs|cjs|tsx|jsx|mts|cts)\\b`, - // 3. Followed by a directory separator: types/ - `\\b${n}/`, - // 4. Inside an import/require statement: - // from 'types' | from './types' | import types | require('types') - `(?:from\\s+['"\`][^'"\`]*\\b${n}\\b[^'"\`]*['"\`])`, - `(?:import\\s+${n}\\b)`, - `(?:require\\s*\\(\\s*['"\`][^'"\`]*\\b${n}\\b[^'"\`]*['"\`]\\s*\\))`, - // 5. Inside backticks in Markdown: `types` or `types.ts` - `\`[^\`]*\\b${n}\\b[^\`]*\``, - ]; - - return new RegExp(alternatives.join('|'), 'i'); -} - -/** Safely read a file from a git branch, returning null on failure. */ -async function safeShowFile( - git: ReturnType, - branch: string, - filePath: string, -): Promise { - try { - return await git.show(`${branch}:${filePath}`); - } catch { - return null; - } -} - -/** - * Read a doc file -- first try the working tree (fs), fall back to git show - * at headBranch if the file is not on disk (e.g. running in a detached state). - */ -async function safeReadFile( - repoPath: string, - relPath: string, - git: ReturnType, - headBranch: string, -): Promise { - try { - return await readFile(joinPath(repoPath, relPath), 'utf-8'); - } catch { - // File might not be on disk if we're on a different branch; try git show - return safeShowFile(git, headBranch, relPath); - } -} - -/** Escape special regex characters in a string. */ -function escapeRegex(str: string): string { - return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); -} diff --git a/packages/core/src/impact/impact-graph.ts b/packages/core/src/impact/impact-graph.ts deleted file mode 100644 index 7f50d4b..0000000 --- a/packages/core/src/impact/impact-graph.ts +++ /dev/null @@ -1,72 +0,0 @@ -import { ChangedFile, ImpactGraph, ImpactEdge } from '../types.js'; -import { buildReverseDependencyMap, ReverseDependencyMap } from '../imports/import-resolver.js'; - -/** - * Build an impact graph showing which files are directly changed and which - * are indirectly affected through import dependencies. - * - * Uses BFS over a reverse dependency map (dependents) to find files that - * transitively depend on the changed files, up to `maxDepth` levels. - * - * When a pre-built `reverseDependencyMap` is provided, the expensive repo scan - * is skipped and the map is used directly. Otherwise the scan is performed - * internally (backward-compatible). - */ -export async function buildImpactGraph( - repoPath: string, - changedFiles: ChangedFile[], - maxDepth: number = 3, - reverseDependencyMap?: ReverseDependencyMap, -): Promise { - // 1. Build or reuse the reverse dependency map - const reverseDeps = reverseDependencyMap ?? await buildReverseDependencyMap(repoPath); - - // 2. Identify directly changed source files - const directlyChanged = changedFiles - .filter((f) => f.category === 'source') - .map((f) => f.path); - - const directlyChangedSet = new Set(directlyChanged); - - // 3. BFS traversal over reverse dependencies to find indirectly affected files - const visited = new Set(directlyChanged); - const edges: ImpactEdge[] = []; - let frontier = [...directlyChanged]; - - for (let depth = 0; depth < maxDepth && frontier.length > 0; depth++) { - const nextFrontier: string[] = []; - - for (const file of frontier) { - const dependents = reverseDeps.get(file); - if (!dependents) { - continue; - } - - for (const dependent of dependents) { - edges.push({ - from: dependent, - to: file, - type: 'imports', - }); - - if (!visited.has(dependent)) { - visited.add(dependent); - nextFrontier.push(dependent); - } - } - } - - frontier = nextFrontier; - } - - // 4. Indirectly affected = visited files minus the directly changed ones - const indirectlyAffected = [...visited].filter( - (f) => !directlyChangedSet.has(f), - ); - - return { - directlyChanged, - indirectlyAffected, - edges, - }; -} diff --git a/packages/core/src/imports/import-resolver.ts b/packages/core/src/imports/import-resolver.ts deleted file mode 100644 index 9c7057c..0000000 --- a/packages/core/src/imports/import-resolver.ts +++ /dev/null @@ -1,182 +0,0 @@ -import fg from 'fast-glob'; -import { readFile } from 'fs/promises'; -import { resolve, relative, dirname } from 'path'; - -/** - * Regex patterns for extracting import paths from TypeScript/JavaScript files. - */ -export const STATIC_IMPORT_RE = /(?:import|export)\s+(?:[\s\S]*?\s+from\s+)?['"]([^'"]+)['"]/g; -export const DYNAMIC_IMPORT_RE = /import\s*\(\s*['"]([^'"]+)['"]\s*\)/g; -export const REQUIRE_RE = /require\s*\(\s*['"]([^'"]+)['"]\s*\)/g; - -export const RESOLVE_EXTENSIONS = ['.ts', '.tsx', '.js', '.jsx']; -export const INDEX_FILES = ['index.ts', 'index.tsx', 'index.js', 'index.jsx']; - -/** - * Extract all import paths from a file's content. - */ -export function extractImportPaths(content: string): string[] { - const paths: string[] = []; - - for (const re of [STATIC_IMPORT_RE, DYNAMIC_IMPORT_RE, REQUIRE_RE]) { - const pattern = new RegExp(re.source, re.flags); - let match: RegExpExecArray | null; - while ((match = pattern.exec(content)) !== null) { - paths.push(match[1]); - } - } - - return paths; -} - -/** - * Check if an import path is relative (starts with . or ..). - */ -export function isRelativeImport(importPath: string): boolean { - return importPath.startsWith('./') || importPath.startsWith('../'); -} - -/** - * Resolve a relative import to a repo-relative path by trying various - * extensions and index file patterns. - * - * Returns the repo-relative path if a matching file exists in the file set, - * or null if the import cannot be resolved. - */ -export function resolveImport( - importPath: string, - importerRepoRelPath: string, - allFiles: Set, -): string | null { - const importerDir = dirname(importerRepoRelPath); - const resolved = resolve('/', importerDir, importPath).slice(1); - - // Normalize: remove leading slash if present - const normalized = resolved.startsWith('/') ? resolved.slice(1) : resolved; - - // 1. Exact match (already has extension) - if (allFiles.has(normalized)) { - return normalized; - } - - // 2. Try appending each extension - for (const ext of RESOLVE_EXTENSIONS) { - const candidate = normalized + ext; - if (allFiles.has(candidate)) { - return candidate; - } - } - - // 3. Try as directory with index file - for (const indexFile of INDEX_FILES) { - const candidate = normalized + '/' + indexFile; - if (allFiles.has(candidate)) { - return candidate; - } - } - - return null; -} - -/** - * A reverse dependency map: key = file that is imported, - * value = list of files that import it. - */ -export type ReverseDependencyMap = Map; - -/** - * Scan the entire repo and build a reverse dependency map. - * - * The map keys are repo-relative file paths; each value is the list of - * repo-relative paths of files that import that key. - * - * This is the expensive I/O step (fast-glob + batch file reads) that both - * `findConsumers()` and `buildImpactGraph()` need. By running it once in - * `analyzePR()` and passing the result to both consumers, the repo scan - * is not duplicated. - */ -export async function buildReverseDependencyMap( - repoPath: string, -): Promise { - // Discover all source files in the repo - const absolutePaths = await fg('**/*.{ts,tsx,js,jsx}', { - cwd: repoPath, - ignore: ['**/node_modules/**', '**/dist/**', '**/.git/**'], - absolute: true, - }); - - const repoRelativePaths = absolutePaths.map((abs) => relative(repoPath, abs)); - const allFilesSet = new Set(repoRelativePaths); - - const reverseDeps: ReverseDependencyMap = new Map(); - - // Scan files in batches to avoid EMFILE - const BATCH_SIZE = 50; - for (let i = 0; i < repoRelativePaths.length; i += BATCH_SIZE) { - const batch = repoRelativePaths.slice(i, i + BATCH_SIZE); - await Promise.all( - batch.map(async (relPath) => { - const absPath = resolve(repoPath, relPath); - let content: string; - try { - content = await readFile(absPath, 'utf-8'); - } catch { - return; // skip unreadable files - } - - const importPaths = extractImportPaths(content); - - for (const importPath of importPaths) { - if (!isRelativeImport(importPath)) { - continue; - } - - const resolved = resolveImport(importPath, relPath, allFilesSet); - if (resolved === null) { - continue; - } - - let dependents = reverseDeps.get(resolved); - if (!dependents) { - dependents = []; - reverseDeps.set(resolved, dependents); - } - dependents.push(relPath); - } - }), - ); - } - - return reverseDeps; -} - -/** - * Find all source files that import from the given set of file paths. - * - * Returns a map: target file path -> list of consumer file paths that import it. - * - * When a pre-built `reverseDependencyMap` is provided, the expensive repo scan - * is skipped and the map is used directly. Otherwise the scan is performed - * internally (backward-compatible). - */ -export async function findConsumers( - repoPath: string, - targetFiles: Set, - reverseDependencyMap?: ReverseDependencyMap, -): Promise> { - const consumers = new Map(); - for (const target of targetFiles) { - consumers.set(target, []); - } - - const reverseDeps = reverseDependencyMap ?? await buildReverseDependencyMap(repoPath); - - for (const target of targetFiles) { - const dependents = reverseDeps.get(target); - if (dependents) { - consumers.set(target, [...dependents]); - } - } - - return consumers; -} diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts deleted file mode 100644 index 751cfe7..0000000 --- a/packages/core/src/index.ts +++ /dev/null @@ -1,33 +0,0 @@ -export type { - PRAnalysis, - AnalysisOptions, - ChangedFile, - BreakingChange, - TestCoverageReport, - TestCoverageGap, - DocStalenessReport, - StaleReference, - ImpactGraph, - ImpactEdge, - RiskAssessment, - RiskFactor, - ExportedSymbol, - FileExports, -} from './types.js'; - -export { analyzePR, resolveDefaultBaseBranch } from './analyzer.js'; -export { parseDiff, detectLanguage } from './diff/diff-parser.js'; -export { categorizeFile } from './diff/file-categorizer.js'; -export { detectBreakingChanges } from './breaking/detector.js'; -export { diffExports, diffExportsAsync, parseExports, parseExportsAsync } from './breaking/export-differ.js'; -export type { FileResolver, ExportDiffResult } from './breaking/export-differ.js'; -export { diffSignatures } from './breaking/signature-differ.js'; -export { mapTestFiles } from './coverage/test-mapper.js'; -export { checkTestCoverage } from './coverage/coverage-checker.js'; -export { checkDocStaleness } from './docs/staleness-checker.js'; -export { buildImpactGraph } from './impact/impact-graph.js'; -export { calculateRisk } from './risk/risk-calculator.js'; -export { formatMarkdown } from './output/markdown-reporter.js'; -export { formatJSON } from './output/json-reporter.js'; -export { extractImportPaths, isRelativeImport, resolveImport, findConsumers, buildReverseDependencyMap } from './imports/import-resolver.js'; -export type { ReverseDependencyMap } from './imports/import-resolver.js'; diff --git a/packages/core/src/output/json-reporter.ts b/packages/core/src/output/json-reporter.ts deleted file mode 100644 index d12aceb..0000000 --- a/packages/core/src/output/json-reporter.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { PRAnalysis } from '../types.js'; - -/** - * Format a PRAnalysis result as a pretty-printed JSON string. - */ -export function formatJSON(analysis: PRAnalysis): string { - return JSON.stringify(analysis, null, 2); -} diff --git a/packages/core/src/output/markdown-reporter.ts b/packages/core/src/output/markdown-reporter.ts deleted file mode 100644 index 2ce7a27..0000000 --- a/packages/core/src/output/markdown-reporter.ts +++ /dev/null @@ -1,153 +0,0 @@ -import { PRAnalysis } from '../types.js'; - -/** - * Format a PRAnalysis result as a readable Markdown report suitable for - * posting as a PR comment or writing to a file. - */ -export function formatMarkdown(analysis: PRAnalysis): string { - const sections: string[] = []; - - // ── Header ────────────────────────────────────────────────────────────────── - sections.push('# PR Impact Analysis'); - sections.push(''); - sections.push(`**Repository:** ${analysis.repoPath}`); - sections.push(`**Comparing:** \`${analysis.baseBranch}\` ← \`${analysis.headBranch}\``); - - // ── Risk Score ────────────────────────────────────────────────────────────── - sections.push(''); - sections.push(`## Risk Score: ${analysis.riskScore.score}/100 (${analysis.riskScore.level})`); - sections.push(''); - - if (analysis.riskScore.factors.length > 0) { - sections.push('| Factor | Score | Weight |'); - sections.push('|--------|------:|-------:|'); - - for (const factor of analysis.riskScore.factors) { - sections.push(`| ${factor.name} | ${factor.score} | ${factor.weight} |`); - } - } else { - sections.push('No risk factors identified.'); - } - - // ── Summary ───────────────────────────────────────────────────────────────── - sections.push(''); - sections.push('## Summary'); - sections.push(''); - sections.push(analysis.summary); - - // ── Changed Files ─────────────────────────────────────────────────────────── - sections.push(''); - sections.push(`## Changed Files (${analysis.changedFiles.length})`); - sections.push(''); - - if (analysis.changedFiles.length > 0) { - sections.push('| File | Status | +/- | Category |'); - sections.push('|------|--------|-----|----------|'); - - for (const file of analysis.changedFiles) { - const change = `+${file.additions}/-${file.deletions}`; - sections.push(`| ${file.path} | ${file.status} | ${change} | ${file.category} |`); - } - } else { - sections.push('No files changed.'); - } - - // ── Breaking Changes ──────────────────────────────────────────────────────── - sections.push(''); - sections.push(`## Breaking Changes (${analysis.breakingChanges.length})`); - sections.push(''); - - if (analysis.breakingChanges.length > 0) { - sections.push('| Symbol | Type | Severity | File |'); - sections.push('|--------|------|----------|------|'); - - for (const bc of analysis.breakingChanges) { - const typeLabel = formatBreakingChangeType(bc.type); - sections.push(`| ${bc.symbolName} | ${typeLabel} | ${bc.severity} | ${bc.filePath} |`); - } - } else { - sections.push('No breaking changes detected.'); - } - - // ── Test Coverage ─────────────────────────────────────────────────────────── - sections.push(''); - sections.push('## Test Coverage'); - sections.push(''); - - const coveragePercent = Math.round(analysis.testCoverage.coverageRatio * 100); - sections.push(`- **Changed source files:** ${analysis.testCoverage.changedSourceFiles}`); - sections.push(`- **Files with test changes:** ${analysis.testCoverage.sourceFilesWithTestChanges}`); - sections.push(`- **Coverage ratio:** ${coveragePercent}%`); - - if (analysis.testCoverage.gaps.length > 0) { - sections.push(''); - sections.push('### Gaps'); - sections.push(''); - - for (const gap of analysis.testCoverage.gaps) { - const testStatus = gap.testFileExists - ? 'test file exists but was not changed' - : 'no test file found'; - sections.push(`- **${gap.sourceFile}** — ${testStatus}`); - - if (gap.expectedTestFiles.length > 0) { - for (const tf of gap.expectedTestFiles) { - sections.push(` - ${tf}`); - } - } - } - } - - // ── Documentation Staleness ───────────────────────────────────────────────── - sections.push(''); - sections.push('## Documentation Staleness'); - sections.push(''); - - if (analysis.docStaleness.staleReferences.length > 0) { - for (const ref of analysis.docStaleness.staleReferences) { - sections.push(`- **${ref.docFile}** (line ${ref.line}): \`${ref.reference}\` — ${ref.reason}`); - } - } else { - sections.push('No stale references found.'); - } - - // ── Impact Graph ──────────────────────────────────────────────────────────── - sections.push(''); - sections.push('## Impact Graph'); - sections.push(''); - sections.push(`- **Directly changed:** ${analysis.impactGraph.directlyChanged.length} file${analysis.impactGraph.directlyChanged.length === 1 ? '' : 's'}`); - sections.push(`- **Indirectly affected:** ${analysis.impactGraph.indirectlyAffected.length} file${analysis.impactGraph.indirectlyAffected.length === 1 ? '' : 's'}`); - - if (analysis.impactGraph.edges.length > 0) { - sections.push(''); - sections.push('### Dependency Edges'); - sections.push(''); - - for (const edge of analysis.impactGraph.edges) { - sections.push(`- ${edge.from} → ${edge.to} (\`${edge.type}\`)`); - } - } - - // Final newline - sections.push(''); - - return sections.join('\n'); -} - -/** - * Convert a breaking change type enum value into a human-readable label. - */ -function formatBreakingChangeType( - type: 'removed_export' | 'changed_signature' | 'changed_type' | 'renamed_export', -): string { - switch (type) { - case 'removed_export': - return 'removed export'; - case 'changed_signature': - return 'changed signature'; - case 'changed_type': - return 'changed type'; - case 'renamed_export': - return 'renamed export'; - } -} diff --git a/packages/core/src/risk/factors.ts b/packages/core/src/risk/factors.ts deleted file mode 100644 index 9c480de..0000000 --- a/packages/core/src/risk/factors.ts +++ /dev/null @@ -1,239 +0,0 @@ -import { - RiskFactor, - ChangedFile, - BreakingChange, - TestCoverageReport, - DocStalenessReport, - ImpactGraph, -} from '../types.js'; - -/** - * CI/build config file patterns that represent high-risk configuration changes. - */ -const CI_BUILD_CONFIG_PATTERNS = [ - /^\.github\//, - /Dockerfile/i, - /docker-compose/i, - /webpack\.config/, - /vite\.config/, - /rollup\.config/, - /esbuild\.config/, - /turbo\.json$/, - /\.gitlab-ci/, - /Jenkinsfile/i, - /\.circleci\//, -]; - -/** - * Evaluate the risk factor for breaking API changes. - * - * Weight: 0.30 - * Score: 100 if any high-severity, 60 if medium, 30 if low-only, 0 if none. - */ -export function evaluateBreakingChangesFactor( - breakingChanges: BreakingChange[], -): RiskFactor { - if (breakingChanges.length === 0) { - return { - name: 'Breaking changes', - score: 0, - weight: 0.30, - description: 'No breaking API changes detected.', - }; - } - - const hasHigh = breakingChanges.some((bc) => bc.severity === 'high'); - const hasMedium = breakingChanges.some((bc) => bc.severity === 'medium'); - - let score: number; - if (hasHigh) { - score = 100; - } else if (hasMedium) { - score = 60; - } else { - score = 30; - } - - const details = breakingChanges.map( - (bc) => `${bc.type} of "${bc.symbolName}" in ${bc.filePath} (${bc.severity})`, - ); - - return { - name: 'Breaking changes', - score, - weight: 0.30, - description: `${breakingChanges.length} breaking change(s) detected.`, - details, - }; -} - -/** - * Evaluate the risk factor for untested source changes. - * - * Weight: 0.25 - * Score: (1 - coverageRatio) * 100 - */ -export function evaluateUntestedChangesFactor( - coverage: TestCoverageReport, -): RiskFactor { - const score = coverage.changedSourceFiles === 0 - ? 0 - : (1 - coverage.coverageRatio) * 100; - - const details: string[] = []; - if (coverage.gaps.length > 0) { - for (const gap of coverage.gaps) { - const testStatus = gap.testFileExists - ? 'test exists but not updated' - : 'no test file found'; - details.push(`${gap.sourceFile}: ${testStatus}`); - } - } - - const description = - coverage.changedSourceFiles === 0 - ? 'No source files changed.' - : `${coverage.sourceFilesWithTestChanges}/${coverage.changedSourceFiles} changed source files have corresponding test changes.`; - - return { - name: 'Untested changes', - score, - weight: 0.25, - description, - ...(details.length > 0 ? { details } : {}), - }; -} - -/** - * Evaluate the risk factor based on the overall diff size. - * - * Weight: 0.15 - * Score: 0 if <100 lines, 50 if 100-500, 80 if 500-1000, 100 if >1000 - */ -export function evaluateDiffSizeFactor(changedFiles: ChangedFile[]): RiskFactor { - const totalLines = changedFiles.reduce( - (sum, f) => sum + f.additions + f.deletions, - 0, - ); - - let score: number; - if (totalLines > 1000) { - score = 100; - } else if (totalLines >= 500) { - score = 80; - } else if (totalLines >= 100) { - score = 50; - } else { - score = 0; - } - - return { - name: 'Diff size', - score, - weight: 0.15, - description: `${totalLines} total lines changed across ${changedFiles.length} file(s).`, - }; -} - -/** - * Evaluate the risk factor for stale documentation references. - * - * Weight: 0.10 - * Score: min(staleReferences.length * 20, 100) - */ -export function evaluateDocStalenessFactor( - staleness: DocStalenessReport, -): RiskFactor { - const score = Math.min(staleness.staleReferences.length * 20, 100); - - const details = - staleness.staleReferences.length > 0 - ? staleness.staleReferences.map( - (ref) => `${ref.docFile}:${ref.line} - "${ref.reference}" (${ref.reason})`, - ) - : undefined; - - const description = - staleness.staleReferences.length === 0 - ? 'No stale documentation references found.' - : `${staleness.staleReferences.length} stale documentation reference(s) found.`; - - return { - name: 'Stale documentation', - score, - weight: 0.10, - description, - ...(details ? { details } : {}), - }; -} - -/** - * Evaluate the risk factor for configuration file changes. - * - * Weight: 0.10 - * Score: 100 if CI/build config changed, 50 if other config, 0 if none. - */ -export function evaluateConfigChangesFactor( - changedFiles: ChangedFile[], -): RiskFactor { - const configFiles = changedFiles.filter((f) => f.category === 'config'); - - if (configFiles.length === 0) { - return { - name: 'Config file changes', - score: 0, - weight: 0.10, - description: 'No configuration files changed.', - }; - } - - const hasCiBuildConfig = configFiles.some((f) => - CI_BUILD_CONFIG_PATTERNS.some((pattern) => pattern.test(f.path)), - ); - - const score = hasCiBuildConfig ? 100 : 50; - - const details = configFiles.map((f) => f.path); - - const description = hasCiBuildConfig - ? `CI/build configuration changed (${configFiles.length} config file(s)).` - : `${configFiles.length} configuration file(s) changed.`; - - return { - name: 'Config file changes', - score, - weight: 0.10, - description, - details, - }; -} - -/** - * Evaluate the risk factor based on how many files are indirectly affected - * through the import dependency graph. - * - * Weight: 0.10 - * Score: min(indirectlyAffected.length * 10, 100) - */ -export function evaluateImpactBreadthFactor( - impact: ImpactGraph, -): RiskFactor { - const count = impact.indirectlyAffected.length; - const score = Math.min(count * 10, 100); - - const description = - count === 0 - ? 'No indirectly affected files detected.' - : `${count} file(s) indirectly affected through import dependencies.`; - - const details = - count > 0 ? impact.indirectlyAffected.slice(0, 20) : undefined; - - return { - name: 'Impact breadth', - score, - weight: 0.10, - description, - ...(details ? { details } : {}), - }; -} diff --git a/packages/core/src/risk/risk-calculator.ts b/packages/core/src/risk/risk-calculator.ts deleted file mode 100644 index 5cee91e..0000000 --- a/packages/core/src/risk/risk-calculator.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { - RiskAssessment, - ChangedFile, - BreakingChange, - TestCoverageReport, - DocStalenessReport, - ImpactGraph, -} from '../types.js'; -import { - evaluateBreakingChangesFactor, - evaluateUntestedChangesFactor, - evaluateDiffSizeFactor, - evaluateDocStalenessFactor, - evaluateConfigChangesFactor, - evaluateImpactBreadthFactor, -} from './factors.js'; - -/** - * Determine the risk level label from a numeric score. - * - * 0-25 -> low - * 26-50 -> medium - * 51-75 -> high - * 76+ -> critical - */ -function scoreToLevel(score: number): RiskAssessment['level'] { - if (score <= 25) return 'low'; - if (score <= 50) return 'medium'; - if (score <= 75) return 'high'; - return 'critical'; -} - -/** - * Calculate a weighted risk assessment from all individual risk factors. - * - * Formula: total_score = sum(factor_score * factor_weight) / sum(factor_weight) - * - * The final score is rounded to the nearest integer. - */ -export function calculateRisk( - changedFiles: ChangedFile[], - breakingChanges: BreakingChange[], - testCoverage: TestCoverageReport, - docStaleness: DocStalenessReport, - impactGraph: ImpactGraph, -): RiskAssessment { - const factors = [ - evaluateBreakingChangesFactor(breakingChanges), - evaluateUntestedChangesFactor(testCoverage), - evaluateDiffSizeFactor(changedFiles), - evaluateDocStalenessFactor(docStaleness), - evaluateConfigChangesFactor(changedFiles), - evaluateImpactBreadthFactor(impactGraph), - ]; - - const weightedSum = factors.reduce( - (sum, factor) => sum + factor.score * factor.weight, - 0, - ); - - const totalWeight = factors.reduce( - (sum, factor) => sum + factor.weight, - 0, - ); - - const score = Math.round(weightedSum / totalWeight); - const level = scoreToLevel(score); - - return { - score, - level, - factors, - }; -} diff --git a/packages/core/src/types.ts b/packages/core/src/types.ts deleted file mode 100644 index 05b5de1..0000000 --- a/packages/core/src/types.ts +++ /dev/null @@ -1,114 +0,0 @@ -// ── Top-level analysis result ── -export interface PRAnalysis { - repoPath: string; - baseBranch: string; - headBranch: string; - changedFiles: ChangedFile[]; - breakingChanges: BreakingChange[]; - testCoverage: TestCoverageReport; - docStaleness: DocStalenessReport; - impactGraph: ImpactGraph; - riskScore: RiskAssessment; - summary: string; -} - -// ── Analysis options ── -export interface AnalysisOptions { - repoPath: string; - baseBranch?: string; - headBranch?: string; - skipBreaking?: boolean; - skipCoverage?: boolean; - skipDocs?: boolean; -} - -// ── Diff layer ── -export interface ChangedFile { - path: string; - status: 'added' | 'modified' | 'deleted' | 'renamed'; - oldPath?: string; - additions: number; - deletions: number; - language: string; - category: 'source' | 'test' | 'doc' | 'config' | 'other'; -} - -// ── Breaking changes layer ── -export interface BreakingChange { - filePath: string; - type: 'removed_export' | 'changed_signature' | 'changed_type' | 'renamed_export'; - symbolName: string; - before: string; - after: string | null; - severity: 'high' | 'medium' | 'low'; - consumers: string[]; -} - -// ── Test coverage layer ── -export interface TestCoverageReport { - changedSourceFiles: number; - sourceFilesWithTestChanges: number; - coverageRatio: number; - gaps: TestCoverageGap[]; -} - -export interface TestCoverageGap { - sourceFile: string; - expectedTestFiles: string[]; - testFileExists: boolean; - testFileChanged: boolean; -} - -// ── Doc staleness layer ── -export interface DocStalenessReport { - staleReferences: StaleReference[]; - checkedFiles: string[]; -} - -export interface StaleReference { - docFile: string; - line: number; - reference: string; - reason: string; -} - -// ── Impact graph layer ── -export interface ImpactGraph { - directlyChanged: string[]; - indirectlyAffected: string[]; - edges: ImpactEdge[]; -} - -export interface ImpactEdge { - from: string; - to: string; - type: 'imports'; -} - -// ── Risk assessment layer ── -export interface RiskAssessment { - score: number; - level: 'low' | 'medium' | 'high' | 'critical'; - factors: RiskFactor[]; -} - -export interface RiskFactor { - name: string; - score: number; - weight: number; - description: string; - details?: string[]; -} - -// ── Internal types ── -export interface ExportedSymbol { - name: string; - kind: 'function' | 'class' | 'variable' | 'type' | 'interface' | 'enum' | 'const'; - signature?: string; - isDefault: boolean; -} - -export interface FileExports { - filePath: string; - symbols: ExportedSymbol[]; -} diff --git a/packages/mcp-server/CHANGELOG.md b/packages/mcp-server/CHANGELOG.md deleted file mode 100644 index 365a6b3..0000000 --- a/packages/mcp-server/CHANGELOG.md +++ /dev/null @@ -1,28 +0,0 @@ -# @pr-impact/mcp-server - -## 0.2.1 - -### Patch Changes - -- 047e429: Add consumer-facing adoption guides: getting-started, programmatic API, configuration guide, troubleshooting, and CONTRIBUTING.md. Expand CI integration docs with GitLab CI, CircleCI, and Jenkins examples. -- Updated dependencies [047e429] - - @pr-impact/core@0.2.1 - -## 0.2.0 - -### Minor Changes - -- b31721c: Initial release of pr-impact — static analysis for pull requests. - - - Breaking change detection (removed exports, changed signatures, renamed exports) - - Import-dependency impact graph with blast radius mapping - - Test coverage gap analysis - - Documentation staleness checking - - Weighted risk scoring (6 factors, 0-100 scale) - - CLI with analyze, breaking, risk, impact, and comment commands - - MCP server exposing all analysis tools to AI assistants - -### Patch Changes - -- Updated dependencies [b31721c] - - @pr-impact/core@0.2.0 diff --git a/packages/mcp-server/CLAUDE.md b/packages/mcp-server/CLAUDE.md deleted file mode 100644 index caf29a9..0000000 --- a/packages/mcp-server/CLAUDE.md +++ /dev/null @@ -1,39 +0,0 @@ -# CLAUDE.md -- @pr-impact/mcp-server - -## What this package does - -MCP server exposing pr-impact analysis as tools for AI assistants. Uses stdio transport. Binary is `pr-impact-mcp`. Depends on `@pr-impact/core` for all analysis logic. - -## Quick commands - -```bash -pnpm build # Build with tsup -node dist/index.js # Run server (reads stdin, writes stdout) -``` - -## Source layout - -``` -src/ - index.ts Server entry point (McpServer + StdioServerTransport) - tools/ - analyze-diff.ts analyze_diff -- full PR analysis - get-breaking-changes.ts get_breaking_changes -- breaking changes with severity filter - get-risk-score.ts get_risk_score -- risk score with factor breakdown - get-impact-graph.ts get_impact_graph -- import dependency graph -``` - -## Key conventions - -- ESM only. Use `.js` extensions in all import paths. -- All analysis logic comes from `@pr-impact/core` -- tools only handle input validation and output formatting. -- Tool input schemas use `zod`. All parameters are optional with sensible defaults. -- Each tool returns `{ content: [{ type: 'text', text }] }` on success, adds `isError: true` on failure. -- Server handles SIGINT/SIGTERM for graceful shutdown via `server.close()`. -- Mock `McpServer` in tests must include a `close()` method to avoid unhandled rejection during teardown. - -## Dependencies - -- `@modelcontextprotocol/sdk` -- MCP protocol server implementation -- `zod` -- input schema validation -- `@pr-impact/core` -- analysis engine diff --git a/packages/mcp-server/README.md b/packages/mcp-server/README.md deleted file mode 100644 index 3407916..0000000 --- a/packages/mcp-server/README.md +++ /dev/null @@ -1,87 +0,0 @@ -# @pr-impact/mcp-server - -[Model Context Protocol](https://modelcontextprotocol.io/) server that exposes pr-impact analysis tools to AI assistants like Claude Code, Cursor, and other MCP-compatible clients. - -## Install - -```bash -npm install -g @pr-impact/mcp-server -``` - -## Setup - -### Claude Code - -Add to `.claude/mcp.json`: - -```json -{ - "mcpServers": { - "pr-impact": { - "command": "npx", - "args": ["-y", "@pr-impact/mcp-server"] - } - } -} -``` - -### Claude Desktop - -Add to your Claude Desktop config: - -```json -{ - "mcpServers": { - "pr-impact": { - "command": "npx", - "args": ["-y", "@pr-impact/mcp-server"] - } - } -} -``` - -### Cursor / VS Code - -Follow the editor's MCP server configuration to add: - -``` -command: npx -args: -y @pr-impact/mcp-server -``` - -## Available Tools - -| Tool | Description | -|---|---| -| `analyze_diff` | Full PR analysis -- breaking changes, test coverage, doc staleness, impact graph, and risk score | -| `get_breaking_changes` | Detect breaking API changes with optional severity filtering | -| `get_risk_score` | Calculate risk score with full factor breakdown | -| `get_impact_graph` | Build import-dependency graph showing directly changed and indirectly affected files | - -### Parameters - -All tools accept these optional parameters: - -| Parameter | Type | Description | -|---|---|---| -| `repoPath` | `string` | Path to git repo (defaults to cwd) | -| `baseBranch` | `string` | Base branch (defaults to main/master) | -| `headBranch` | `string` | Head branch (defaults to HEAD) | - -Additional tool-specific parameters: - -- **`get_breaking_changes`**: `minSeverity` (`"low"` | `"medium"` | `"high"`) -- filter by minimum severity -- **`get_impact_graph`**: `filePath` (`string`) -- focus on a specific file; `depth` (`number`) -- max graph traversal depth - -## Transport - -Uses **stdio** transport. The server reads from stdin and writes to stdout, which is the standard transport for local MCP servers. - -## Requirements - -- Node.js >= 20 -- Must be run inside (or pointed at) a git repository - -## License - -[MIT](../../LICENSE) diff --git a/packages/mcp-server/__tests__/formatting.test.ts b/packages/mcp-server/__tests__/formatting.test.ts deleted file mode 100644 index 9674e10..0000000 --- a/packages/mcp-server/__tests__/formatting.test.ts +++ /dev/null @@ -1,191 +0,0 @@ -import { describe, it, expect } from 'vitest'; -import { formatBreakingChange } from '../src/tools/get-breaking-changes.js'; -import { formatImpactGraph } from '../src/tools/get-impact-graph.js'; -import type { BreakingChange, ImpactGraph } from '@pr-impact/core'; - -describe('MCP server formatting', () => { - describe('formatBreakingChange', () => { - it('formats a breaking change with consumers', () => { - const bc: BreakingChange = { - filePath: 'src/utils.ts', - type: 'removed_export', - symbolName: 'helper', - before: 'function helper(x: number)', - after: null, - severity: 'high', - consumers: ['src/app.ts', 'src/main.ts'], - }; - - const result = formatBreakingChange(bc); - expect(result).toContain('**helper**'); - expect(result).toContain('`src/utils.ts`'); - expect(result).toContain('Severity: high'); - expect(result).toContain('Before: `function helper(x: number)`'); - expect(result).toContain('After: (removed)'); - expect(result).toContain('`src/app.ts`'); - expect(result).toContain('`src/main.ts`'); - }); - - it('formats a breaking change without consumers', () => { - const bc: BreakingChange = { - filePath: 'src/lib.ts', - type: 'changed_signature', - symbolName: 'compute', - before: 'function compute(a: number)', - after: 'function compute(a: string)', - severity: 'medium', - consumers: [], - }; - - const result = formatBreakingChange(bc); - expect(result).toContain('**compute**'); - expect(result).toContain('After: `function compute(a: string)`'); - expect(result).not.toContain('Consumers:'); - }); - - it('includes the type and severity in the output', () => { - const bc: BreakingChange = { - filePath: 'src/types.ts', - type: 'changed_type', - symbolName: 'Config', - before: 'type Config = { port: number }', - after: 'type Config = { port: string }', - severity: 'low', - consumers: [], - }; - - const result = formatBreakingChange(bc); - expect(result).toContain('Type: changed_type'); - expect(result).toContain('Severity: low'); - }); - - it('formats the before value in backticks', () => { - const bc: BreakingChange = { - filePath: 'src/api.ts', - type: 'renamed_export', - symbolName: 'getData', - before: 'function getData()', - after: 'function fetchData()', - severity: 'medium', - consumers: ['src/client.ts'], - }; - - const result = formatBreakingChange(bc); - expect(result).toContain('Before: `function getData()`'); - expect(result).toContain('After: `function fetchData()`'); - }); - - it('wraps each consumer in backticks', () => { - const bc: BreakingChange = { - filePath: 'src/core.ts', - type: 'removed_export', - symbolName: 'init', - before: 'function init()', - after: null, - severity: 'high', - consumers: ['src/a.ts', 'src/b.ts'], - }; - - const result = formatBreakingChange(bc); - expect(result).toContain('Consumers: `src/a.ts`, `src/b.ts`'); - }); - }); - - describe('formatImpactGraph', () => { - it('formats a full impact graph', () => { - const graph: ImpactGraph = { - directlyChanged: ['src/a.ts', 'src/b.ts'], - indirectlyAffected: ['src/c.ts'], - edges: [ - { from: 'src/c.ts', to: 'src/a.ts', type: 'imports' }, - ], - }; - - const result = formatImpactGraph(graph); - expect(result).toContain('## Impact Graph'); - expect(result).toContain('Directly Changed (2)'); - expect(result).toContain('`src/a.ts`'); - expect(result).toContain('`src/b.ts`'); - expect(result).toContain('Indirectly Affected (1)'); - expect(result).toContain('`src/c.ts`'); - expect(result).toContain('Dependency Edges (1)'); - }); - - it('formats graph focused on a directly changed file', () => { - const graph: ImpactGraph = { - directlyChanged: ['src/a.ts'], - indirectlyAffected: ['src/b.ts'], - edges: [ - { from: 'src/b.ts', to: 'src/a.ts', type: 'imports' }, - ], - }; - - const result = formatImpactGraph(graph, 'src/a.ts'); - expect(result).toContain('## Impact Graph for `src/a.ts`'); - expect(result).toContain('**directly changed**'); - }); - - it('formats graph focused on an indirectly affected file', () => { - const graph: ImpactGraph = { - directlyChanged: ['src/a.ts'], - indirectlyAffected: ['src/b.ts'], - edges: [ - { from: 'src/b.ts', to: 'src/a.ts', type: 'imports' }, - ], - }; - - const result = formatImpactGraph(graph, 'src/b.ts'); - expect(result).toContain('## Impact Graph for `src/b.ts`'); - expect(result).toContain('**indirectly affected**'); - }); - - it('reports when file is not affected', () => { - const graph: ImpactGraph = { - directlyChanged: ['src/a.ts'], - indirectlyAffected: [], - edges: [], - }; - - const result = formatImpactGraph(graph, 'src/z.ts'); - expect(result).toContain('not affected'); - }); - - it('handles empty graph', () => { - const graph: ImpactGraph = { - directlyChanged: [], - indirectlyAffected: [], - edges: [], - }; - - const result = formatImpactGraph(graph); - expect(result).toContain('No files directly changed'); - expect(result).toContain('No files indirectly affected'); - }); - - it('shows relevant edges for a focused file', () => { - const graph: ImpactGraph = { - directlyChanged: ['src/a.ts'], - indirectlyAffected: ['src/b.ts'], - edges: [ - { from: 'src/b.ts', to: 'src/a.ts', type: 'imports' }, - { from: 'src/a.ts', to: 'src/b.ts', type: 'imports' }, - ], - }; - - const result = formatImpactGraph(graph, 'src/a.ts'); - expect(result).toContain('Dependencies'); - expect(result).toContain('`src/b.ts`'); - }); - - it('does not show dependency edges section when edges list is empty', () => { - const graph: ImpactGraph = { - directlyChanged: ['src/a.ts'], - indirectlyAffected: [], - edges: [], - }; - - const result = formatImpactGraph(graph); - expect(result).not.toContain('Dependency Edges'); - }); - }); -}); diff --git a/packages/mcp-server/__tests__/server-registration.test.ts b/packages/mcp-server/__tests__/server-registration.test.ts deleted file mode 100644 index cc581ab..0000000 --- a/packages/mcp-server/__tests__/server-registration.test.ts +++ /dev/null @@ -1,54 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; - -// ── Track tool registrations ── -const registeredTools: string[] = []; - -// ── Mock all tool registration modules ── -vi.mock('../src/tools/analyze-diff.js', () => ({ - registerAnalyzeDiffTool: () => { registeredTools.push('analyze_diff'); }, -})); -vi.mock('../src/tools/get-breaking-changes.js', () => ({ - registerGetBreakingChangesTool: () => { registeredTools.push('get_breaking_changes'); }, -})); -vi.mock('../src/tools/get-risk-score.js', () => ({ - registerGetRiskScoreTool: () => { registeredTools.push('get_risk_score'); }, -})); -vi.mock('../src/tools/get-impact-graph.js', () => ({ - registerGetImpactGraphTool: () => { registeredTools.push('get_impact_graph'); }, -})); - -// ── Mock McpServer ── -vi.mock('@modelcontextprotocol/sdk/server/mcp.js', () => ({ - McpServer: vi.fn().mockImplementation(() => ({ - tool: vi.fn(), - connect: vi.fn().mockResolvedValue(undefined), - close: vi.fn().mockResolvedValue(undefined), - })), -})); - -// ── Mock StdioServerTransport ── -vi.mock('@modelcontextprotocol/sdk/server/stdio.js', () => ({ - StdioServerTransport: vi.fn().mockImplementation(() => ({})), -})); - -// ── Mock createRequire for version reading ── -vi.mock('module', () => ({ - createRequire: () => () => ({ version: '0.1.0' }), -})); - -describe('MCP server registration', () => { - beforeEach(() => { - registeredTools.length = 0; - }); - - it('registers all four tools', async () => { - // Dynamically import to trigger module-level code - await import('../src/index.js'); - - expect(registeredTools).toContain('analyze_diff'); - expect(registeredTools).toContain('get_breaking_changes'); - expect(registeredTools).toContain('get_risk_score'); - expect(registeredTools).toContain('get_impact_graph'); - expect(registeredTools).toHaveLength(4); - }); -}); diff --git a/packages/mcp-server/__tests__/tools/analyze-diff.test.ts b/packages/mcp-server/__tests__/tools/analyze-diff.test.ts deleted file mode 100644 index 68d551b..0000000 --- a/packages/mcp-server/__tests__/tools/analyze-diff.test.ts +++ /dev/null @@ -1,121 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { registerAnalyzeDiffTool } from '../../src/tools/analyze-diff.js'; -import type { PRAnalysis } from '@pr-impact/core'; - -// ── Mock @pr-impact/core ── -const mockAnalyzePR = vi.fn(); -const mockFormatMarkdown = vi.fn(); -vi.mock('@pr-impact/core', () => ({ - analyzePR: (...args: unknown[]) => mockAnalyzePR(...args), - formatMarkdown: (...args: unknown[]) => mockFormatMarkdown(...args), -})); - -// ── Mock McpServer ── -type ToolHandler = (params: Record) => Promise; - -function createMockServer() { - const tools: Map = new Map(); - return { - tool: (name: string, description: string, schema: unknown, handler: ToolHandler) => { - tools.set(name, { description, schema, handler }); - }, - getRegisteredTool: (name: string) => tools.get(name), - getRegisteredTools: () => tools, - }; -} - -// ── Helpers ── -function makePRAnalysis(): PRAnalysis { - return { - repoPath: '/repo', - baseBranch: 'main', - headBranch: 'HEAD', - changedFiles: [], - breakingChanges: [], - testCoverage: { changedSourceFiles: 0, sourceFilesWithTestChanges: 0, coverageRatio: 1, gaps: [] }, - docStaleness: { staleReferences: [], checkedFiles: [] }, - impactGraph: { directlyChanged: [], indirectlyAffected: [], edges: [] }, - riskScore: { score: 10, level: 'low', factors: [] }, - summary: 'Test summary', - }; -} - -describe('analyze_diff tool', () => { - let server: ReturnType; - - beforeEach(() => { - vi.clearAllMocks(); - server = createMockServer(); - registerAnalyzeDiffTool(server as never); - mockAnalyzePR.mockResolvedValue(makePRAnalysis()); - mockFormatMarkdown.mockReturnValue('# Analysis Report'); - }); - - it('registers the tool with correct name and description', () => { - const tool = server.getRegisteredTool('analyze_diff'); - expect(tool).toBeDefined(); - expect(tool!.description).toContain('full PR impact analysis'); - }); - - it('defines schema with repoPath, baseBranch, headBranch as optional', () => { - const tool = server.getRegisteredTool('analyze_diff'); - expect(tool!.schema).toHaveProperty('repoPath'); - expect(tool!.schema).toHaveProperty('baseBranch'); - expect(tool!.schema).toHaveProperty('headBranch'); - }); - - it('calls analyzePR with provided parameters', async () => { - const tool = server.getRegisteredTool('analyze_diff')!; - await tool.handler({ - repoPath: '/custom/repo', - baseBranch: 'develop', - headBranch: 'feature', - }); - - expect(mockAnalyzePR).toHaveBeenCalledWith({ - repoPath: '/custom/repo', - baseBranch: 'develop', - headBranch: 'feature', - }); - }); - - it('defaults repoPath to process.cwd() when not provided', async () => { - const tool = server.getRegisteredTool('analyze_diff')!; - await tool.handler({}); - - expect(mockAnalyzePR).toHaveBeenCalledWith({ - repoPath: process.cwd(), - baseBranch: undefined, - headBranch: undefined, - }); - }); - - it('returns formatted markdown in content', async () => { - const tool = server.getRegisteredTool('analyze_diff')!; - const result = await tool.handler({ repoPath: '/repo' }) as { content: Array<{ type: string; text: string }> }; - - expect(mockFormatMarkdown).toHaveBeenCalledWith(makePRAnalysis()); - expect(result.content).toHaveLength(1); - expect(result.content[0]).toEqual({ type: 'text', text: '# Analysis Report' }); - }); - - it('returns error content when analyzePR throws an Error', async () => { - mockAnalyzePR.mockRejectedValue(new Error('git not found')); - - const tool = server.getRegisteredTool('analyze_diff')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }>; isError: boolean }; - - expect(result.isError).toBe(true); - expect(result.content[0].text).toContain('Error analyzing diff: git not found'); - }); - - it('returns error content when analyzePR throws a non-Error value', async () => { - mockAnalyzePR.mockRejectedValue('string error'); - - const tool = server.getRegisteredTool('analyze_diff')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }>; isError: boolean }; - - expect(result.isError).toBe(true); - expect(result.content[0].text).toContain('string error'); - }); -}); diff --git a/packages/mcp-server/__tests__/tools/get-breaking-changes.test.ts b/packages/mcp-server/__tests__/tools/get-breaking-changes.test.ts deleted file mode 100644 index df4a245..0000000 --- a/packages/mcp-server/__tests__/tools/get-breaking-changes.test.ts +++ /dev/null @@ -1,216 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { registerGetBreakingChangesTool } from '../../src/tools/get-breaking-changes.js'; -import type { BreakingChange, ChangedFile } from '@pr-impact/core'; - -// ── Mock @pr-impact/core ── -const mockParseDiff = vi.fn(); -const mockDetectBreakingChanges = vi.fn(); -const mockResolveDefaultBaseBranch = vi.fn(); -vi.mock('@pr-impact/core', () => ({ - parseDiff: (...args: unknown[]) => mockParseDiff(...args), - detectBreakingChanges: (...args: unknown[]) => mockDetectBreakingChanges(...args), - resolveDefaultBaseBranch: (...args: unknown[]) => mockResolveDefaultBaseBranch(...args), -})); - -// ── Mock McpServer ── -type ToolHandler = (params: Record) => Promise; - -function createMockServer() { - const tools: Map = new Map(); - return { - tool: (name: string, description: string, schema: unknown, handler: ToolHandler) => { - tools.set(name, { description, schema, handler }); - }, - getRegisteredTool: (name: string) => tools.get(name), - getRegisteredTools: () => tools, - }; -} - -// ── Helpers ── -function makeChangedFile(overrides: Partial = {}): ChangedFile { - return { - path: 'src/utils.ts', - status: 'modified', - additions: 10, - deletions: 2, - language: 'typescript', - category: 'source', - ...overrides, - }; -} - -function makeBreakingChange(overrides: Partial = {}): BreakingChange { - return { - filePath: 'src/utils.ts', - type: 'removed_export', - symbolName: 'helper', - before: 'function helper()', - after: null, - severity: 'high', - consumers: ['src/app.ts'], - ...overrides, - }; -} - -describe('get_breaking_changes tool', () => { - let server: ReturnType; - - beforeEach(() => { - vi.clearAllMocks(); - server = createMockServer(); - registerGetBreakingChangesTool(server as never); - mockResolveDefaultBaseBranch.mockResolvedValue('main'); - mockParseDiff.mockResolvedValue([makeChangedFile()]); - mockDetectBreakingChanges.mockResolvedValue([]); - }); - - it('registers the tool with correct name and description', () => { - const tool = server.getRegisteredTool('get_breaking_changes'); - expect(tool).toBeDefined(); - expect(tool!.description).toContain('breaking changes'); - }); - - it('defines schema with repoPath, baseBranch, headBranch, and minSeverity', () => { - const tool = server.getRegisteredTool('get_breaking_changes'); - expect(tool!.schema).toHaveProperty('repoPath'); - expect(tool!.schema).toHaveProperty('baseBranch'); - expect(tool!.schema).toHaveProperty('headBranch'); - expect(tool!.schema).toHaveProperty('minSeverity'); - }); - - it('resolves default base branch when not provided', async () => { - const tool = server.getRegisteredTool('get_breaking_changes')!; - await tool.handler({}); - - expect(mockResolveDefaultBaseBranch).toHaveBeenCalledWith(process.cwd()); - expect(mockParseDiff).toHaveBeenCalledWith(process.cwd(), 'main', 'HEAD'); - }); - - it('uses provided baseBranch and headBranch', async () => { - const tool = server.getRegisteredTool('get_breaking_changes')!; - await tool.handler({ - repoPath: '/repo', - baseBranch: 'develop', - headBranch: 'feature', - }); - - expect(mockResolveDefaultBaseBranch).not.toHaveBeenCalled(); - expect(mockParseDiff).toHaveBeenCalledWith('/repo', 'develop', 'feature'); - }); - - it('calls detectBreakingChanges with parsed changed files', async () => { - const changedFiles = [makeChangedFile()]; - mockParseDiff.mockResolvedValue(changedFiles); - - const tool = server.getRegisteredTool('get_breaking_changes')!; - await tool.handler({ repoPath: '/repo', baseBranch: 'main', headBranch: 'HEAD' }); - - expect(mockDetectBreakingChanges).toHaveBeenCalledWith('/repo', 'main', 'HEAD', changedFiles); - }); - - it('returns "no breaking changes" message when none found', async () => { - mockDetectBreakingChanges.mockResolvedValue([]); - - const tool = server.getRegisteredTool('get_breaking_changes')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('No breaking changes detected'); - }); - - it('returns formatted breaking changes when found', async () => { - mockDetectBreakingChanges.mockResolvedValue([makeBreakingChange()]); - - const tool = server.getRegisteredTool('get_breaking_changes')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('Found 1 breaking change:'); - expect(result.content[0].text).toContain('**helper**'); - expect(result.content[0].text).toContain('`src/utils.ts`'); - }); - - it('filters by minSeverity when provided', async () => { - const changes = [ - makeBreakingChange({ severity: 'low', symbolName: 'lowFn' }), - makeBreakingChange({ severity: 'medium', symbolName: 'medFn' }), - makeBreakingChange({ severity: 'high', symbolName: 'highFn' }), - ]; - mockDetectBreakingChanges.mockResolvedValue(changes); - - const tool = server.getRegisteredTool('get_breaking_changes')!; - const result = await tool.handler({ minSeverity: 'high' }) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('highFn'); - expect(result.content[0].text).not.toContain('lowFn'); - expect(result.content[0].text).not.toContain('medFn'); - }); - - it('includes medium and high when minSeverity is medium', async () => { - const changes = [ - makeBreakingChange({ severity: 'low', symbolName: 'lowFn' }), - makeBreakingChange({ severity: 'medium', symbolName: 'medFn' }), - makeBreakingChange({ severity: 'high', symbolName: 'highFn' }), - ]; - mockDetectBreakingChanges.mockResolvedValue(changes); - - const tool = server.getRegisteredTool('get_breaking_changes')!; - const result = await tool.handler({ minSeverity: 'medium' }) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('medFn'); - expect(result.content[0].text).toContain('highFn'); - expect(result.content[0].text).not.toContain('lowFn'); - }); - - it('includes severity qualifier in no-results message when minSeverity is set', async () => { - const changes = [ - makeBreakingChange({ severity: 'low', symbolName: 'lowFn' }), - ]; - mockDetectBreakingChanges.mockResolvedValue(changes); - - const tool = server.getRegisteredTool('get_breaking_changes')!; - const result = await tool.handler({ minSeverity: 'high' }) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('at or above high severity'); - }); - - it('returns correct plural for multiple breaking changes', async () => { - mockDetectBreakingChanges.mockResolvedValue([ - makeBreakingChange({ symbolName: 'a' }), - makeBreakingChange({ symbolName: 'b' }), - ]); - - const tool = server.getRegisteredTool('get_breaking_changes')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('Found 2 breaking changes:'); - }); - - it('returns singular form for one breaking change', async () => { - mockDetectBreakingChanges.mockResolvedValue([makeBreakingChange()]); - - const tool = server.getRegisteredTool('get_breaking_changes')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('Found 1 breaking change:'); - expect(result.content[0].text).not.toContain('changes:'); - }); - - it('returns error content when an error occurs', async () => { - mockParseDiff.mockRejectedValue(new Error('diff failed')); - - const tool = server.getRegisteredTool('get_breaking_changes')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }>; isError: boolean }; - - expect(result.isError).toBe(true); - expect(result.content[0].text).toContain('Error detecting breaking changes: diff failed'); - }); - - it('handles non-Error thrown values', async () => { - mockParseDiff.mockRejectedValue('unexpected'); - - const tool = server.getRegisteredTool('get_breaking_changes')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }>; isError: boolean }; - - expect(result.isError).toBe(true); - expect(result.content[0].text).toContain('unexpected'); - }); -}); diff --git a/packages/mcp-server/__tests__/tools/get-impact-graph.test.ts b/packages/mcp-server/__tests__/tools/get-impact-graph.test.ts deleted file mode 100644 index f6c07b1..0000000 --- a/packages/mcp-server/__tests__/tools/get-impact-graph.test.ts +++ /dev/null @@ -1,193 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { registerGetImpactGraphTool } from '../../src/tools/get-impact-graph.js'; -import type { ImpactGraph } from '@pr-impact/core'; - -// ── Mock @pr-impact/core ── -const mockParseDiff = vi.fn(); -const mockBuildImpactGraph = vi.fn(); -const mockResolveDefaultBaseBranch = vi.fn(); -vi.mock('@pr-impact/core', () => ({ - parseDiff: (...args: unknown[]) => mockParseDiff(...args), - buildImpactGraph: (...args: unknown[]) => mockBuildImpactGraph(...args), - resolveDefaultBaseBranch: (...args: unknown[]) => mockResolveDefaultBaseBranch(...args), -})); - -// ── Mock McpServer ── -type ToolHandler = (params: Record) => Promise; - -function createMockServer() { - const tools: Map = new Map(); - return { - tool: (name: string, description: string, schema: unknown, handler: ToolHandler) => { - tools.set(name, { description, schema, handler }); - }, - getRegisteredTool: (name: string) => tools.get(name), - getRegisteredTools: () => tools, - }; -} - -// ── Helpers ── -function makeGraph(overrides: Partial = {}): ImpactGraph { - return { - directlyChanged: ['src/a.ts'], - indirectlyAffected: ['src/b.ts'], - edges: [{ from: 'src/b.ts', to: 'src/a.ts', type: 'imports' as const }], - ...overrides, - }; -} - -describe('get_impact_graph tool', () => { - let server: ReturnType; - - beforeEach(() => { - vi.clearAllMocks(); - server = createMockServer(); - registerGetImpactGraphTool(server as never); - mockResolveDefaultBaseBranch.mockResolvedValue('main'); - mockParseDiff.mockResolvedValue([ - { - path: 'src/a.ts', - status: 'modified', - additions: 5, - deletions: 2, - language: 'typescript', - category: 'source', - }, - ]); - mockBuildImpactGraph.mockResolvedValue(makeGraph()); - }); - - it('registers the tool with correct name and description', () => { - const tool = server.getRegisteredTool('get_impact_graph'); - expect(tool).toBeDefined(); - expect(tool!.description).toContain('import dependency graph'); - }); - - it('defines schema with repoPath, baseBranch, headBranch, filePath, and depth', () => { - const tool = server.getRegisteredTool('get_impact_graph'); - expect(tool!.schema).toHaveProperty('repoPath'); - expect(tool!.schema).toHaveProperty('baseBranch'); - expect(tool!.schema).toHaveProperty('headBranch'); - expect(tool!.schema).toHaveProperty('filePath'); - expect(tool!.schema).toHaveProperty('depth'); - }); - - it('resolves default base branch when not provided', async () => { - const tool = server.getRegisteredTool('get_impact_graph')!; - await tool.handler({}); - - expect(mockResolveDefaultBaseBranch).toHaveBeenCalledWith(process.cwd()); - expect(mockParseDiff).toHaveBeenCalledWith(process.cwd(), 'main', 'HEAD'); - }); - - it('uses provided baseBranch and headBranch', async () => { - const tool = server.getRegisteredTool('get_impact_graph')!; - await tool.handler({ - repoPath: '/repo', - baseBranch: 'develop', - headBranch: 'feature', - }); - - expect(mockResolveDefaultBaseBranch).not.toHaveBeenCalled(); - expect(mockParseDiff).toHaveBeenCalledWith('/repo', 'develop', 'feature'); - }); - - it('calls buildImpactGraph with default depth of 3', async () => { - const tool = server.getRegisteredTool('get_impact_graph')!; - await tool.handler({}); - - expect(mockBuildImpactGraph).toHaveBeenCalledWith( - process.cwd(), - expect.any(Array), - 3, - ); - }); - - it('passes custom depth to buildImpactGraph', async () => { - const tool = server.getRegisteredTool('get_impact_graph')!; - await tool.handler({ depth: 5 }); - - expect(mockBuildImpactGraph).toHaveBeenCalledWith( - expect.any(String), - expect.any(Array), - 5, - ); - }); - - it('returns full graph formatted output when no filePath is provided', async () => { - const tool = server.getRegisteredTool('get_impact_graph')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('## Impact Graph'); - expect(result.content[0].text).toContain('Directly Changed (1)'); - expect(result.content[0].text).toContain('`src/a.ts`'); - expect(result.content[0].text).toContain('Indirectly Affected (1)'); - expect(result.content[0].text).toContain('`src/b.ts`'); - }); - - it('returns file-focused output when filePath is a directly changed file', async () => { - const tool = server.getRegisteredTool('get_impact_graph')!; - const result = await tool.handler({ filePath: 'src/a.ts' }) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('## Impact Graph for `src/a.ts`'); - expect(result.content[0].text).toContain('**directly changed**'); - }); - - it('returns file-focused output when filePath is indirectly affected', async () => { - const tool = server.getRegisteredTool('get_impact_graph')!; - const result = await tool.handler({ filePath: 'src/b.ts' }) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('## Impact Graph for `src/b.ts`'); - expect(result.content[0].text).toContain('**indirectly affected**'); - }); - - it('reports file not affected when filePath is not in graph', async () => { - const tool = server.getRegisteredTool('get_impact_graph')!; - const result = await tool.handler({ filePath: 'src/z.ts' }) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('not affected'); - }); - - it('returns error content when an error occurs', async () => { - mockBuildImpactGraph.mockRejectedValue(new Error('graph failed')); - - const tool = server.getRegisteredTool('get_impact_graph')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }>; isError: boolean }; - - expect(result.isError).toBe(true); - expect(result.content[0].text).toContain('Error building impact graph: graph failed'); - }); - - it('handles non-Error thrown values', async () => { - mockBuildImpactGraph.mockRejectedValue(42); - - const tool = server.getRegisteredTool('get_impact_graph')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }>; isError: boolean }; - - expect(result.isError).toBe(true); - expect(result.content[0].text).toContain('42'); - }); - - it('handles empty graph', async () => { - mockBuildImpactGraph.mockResolvedValue({ - directlyChanged: [], - indirectlyAffected: [], - edges: [], - }); - - const tool = server.getRegisteredTool('get_impact_graph')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('No files directly changed'); - expect(result.content[0].text).toContain('No files indirectly affected'); - }); - - it('includes dependency edges in the full graph output', async () => { - const tool = server.getRegisteredTool('get_impact_graph')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('Dependency Edges (1)'); - expect(result.content[0].text).toContain('`src/b.ts`'); - expect(result.content[0].text).toContain('`src/a.ts`'); - }); -}); diff --git a/packages/mcp-server/__tests__/tools/get-risk-score.test.ts b/packages/mcp-server/__tests__/tools/get-risk-score.test.ts deleted file mode 100644 index 7e200fe..0000000 --- a/packages/mcp-server/__tests__/tools/get-risk-score.test.ts +++ /dev/null @@ -1,221 +0,0 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import { registerGetRiskScoreTool } from '../../src/tools/get-risk-score.js'; -import type { PRAnalysis, RiskAssessment, RiskFactor } from '@pr-impact/core'; - -// ── Mock @pr-impact/core ── -const mockAnalyzePR = vi.fn(); -vi.mock('@pr-impact/core', () => ({ - analyzePR: (...args: unknown[]) => mockAnalyzePR(...args), -})); - -// ── Mock McpServer ── -type ToolHandler = (params: Record) => Promise; - -function createMockServer() { - const tools: Map = new Map(); - return { - tool: (name: string, description: string, schema: unknown, handler: ToolHandler) => { - tools.set(name, { description, schema, handler }); - }, - getRegisteredTool: (name: string) => tools.get(name), - getRegisteredTools: () => tools, - }; -} - -// ── Helpers ── -function makeRiskFactor(overrides: Partial = {}): RiskFactor { - return { - name: 'Breaking Changes', - score: 50, - weight: 0.3, - description: 'Breaking changes detected', - details: ['removed export helper'], - ...overrides, - }; -} - -function makeRiskAssessment(overrides: Partial = {}): RiskAssessment { - return { - score: 42, - level: 'medium', - factors: [makeRiskFactor()], - ...overrides, - }; -} - -function makePRAnalysis(riskOverrides: Partial = {}): PRAnalysis { - return { - repoPath: '/repo', - baseBranch: 'main', - headBranch: 'HEAD', - changedFiles: [], - breakingChanges: [], - testCoverage: { changedSourceFiles: 0, sourceFilesWithTestChanges: 0, coverageRatio: 1, gaps: [] }, - docStaleness: { staleReferences: [], checkedFiles: [] }, - impactGraph: { directlyChanged: [], indirectlyAffected: [], edges: [] }, - riskScore: makeRiskAssessment(riskOverrides), - summary: 'Test summary', - }; -} - -describe('get_risk_score tool', () => { - let server: ReturnType; - - beforeEach(() => { - vi.clearAllMocks(); - server = createMockServer(); - registerGetRiskScoreTool(server as never); - mockAnalyzePR.mockResolvedValue(makePRAnalysis()); - }); - - it('registers the tool with correct name and description', () => { - const tool = server.getRegisteredTool('get_risk_score'); - expect(tool).toBeDefined(); - expect(tool!.description).toContain('risk score'); - }); - - it('defines schema with repoPath, baseBranch, and headBranch', () => { - const tool = server.getRegisteredTool('get_risk_score'); - expect(tool!.schema).toHaveProperty('repoPath'); - expect(tool!.schema).toHaveProperty('baseBranch'); - expect(tool!.schema).toHaveProperty('headBranch'); - }); - - it('calls analyzePR with provided parameters', async () => { - const tool = server.getRegisteredTool('get_risk_score')!; - await tool.handler({ - repoPath: '/custom/repo', - baseBranch: 'develop', - headBranch: 'feature', - }); - - expect(mockAnalyzePR).toHaveBeenCalledWith({ - repoPath: '/custom/repo', - baseBranch: 'develop', - headBranch: 'feature', - }); - }); - - it('defaults repoPath to process.cwd() when not provided', async () => { - const tool = server.getRegisteredTool('get_risk_score')!; - await tool.handler({}); - - expect(mockAnalyzePR).toHaveBeenCalledWith({ - repoPath: process.cwd(), - baseBranch: undefined, - headBranch: undefined, - }); - }); - - it('returns formatted risk assessment with score and level', async () => { - const tool = server.getRegisteredTool('get_risk_score')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }> }; - - expect(result.content).toHaveLength(1); - expect(result.content[0].type).toBe('text'); - expect(result.content[0].text).toContain('## Risk Assessment'); - expect(result.content[0].text).toContain('42/100'); - expect(result.content[0].text).toContain('MEDIUM'); - }); - - it('includes contributing factors in the output', async () => { - const tool = server.getRegisteredTool('get_risk_score')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('Contributing Factors'); - expect(result.content[0].text).toContain('**Breaking Changes**'); - expect(result.content[0].text).toContain('score: 50'); - expect(result.content[0].text).toContain('weight: 0.3'); - expect(result.content[0].text).toContain('weighted: 15.0'); - expect(result.content[0].text).toContain('Breaking changes detected'); - expect(result.content[0].text).toContain('removed export helper'); - }); - - it('handles risk assessment with no factors', async () => { - mockAnalyzePR.mockResolvedValue(makePRAnalysis({ factors: [] })); - - const tool = server.getRegisteredTool('get_risk_score')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('No risk factors identified'); - }); - - it('handles factors without details', async () => { - mockAnalyzePR.mockResolvedValue( - makePRAnalysis({ - factors: [makeRiskFactor({ details: undefined })], - }), - ); - - const tool = server.getRegisteredTool('get_risk_score')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('**Breaking Changes**'); - expect(result.content[0].text).not.toContain('- removed export helper'); - }); - - it('handles factors with empty details array', async () => { - mockAnalyzePR.mockResolvedValue( - makePRAnalysis({ - factors: [makeRiskFactor({ details: [] })], - }), - ); - - const tool = server.getRegisteredTool('get_risk_score')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('**Breaking Changes**'); - }); - - it('formats different risk levels correctly', async () => { - for (const level of ['low', 'medium', 'high', 'critical'] as const) { - mockAnalyzePR.mockResolvedValue(makePRAnalysis({ level, score: level === 'low' ? 10 : level === 'medium' ? 42 : level === 'high' ? 70 : 90 })); - - const freshServer = createMockServer(); - registerGetRiskScoreTool(freshServer as never); - const tool = freshServer.getRegisteredTool('get_risk_score')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain(level.toUpperCase()); - } - }); - - it('returns error content when analyzePR throws an Error', async () => { - mockAnalyzePR.mockRejectedValue(new Error('analysis failed')); - - const tool = server.getRegisteredTool('get_risk_score')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }>; isError: boolean }; - - expect(result.isError).toBe(true); - expect(result.content[0].text).toContain('Error calculating risk score: analysis failed'); - }); - - it('returns error content when analyzePR throws a non-Error value', async () => { - mockAnalyzePR.mockRejectedValue('string error'); - - const tool = server.getRegisteredTool('get_risk_score')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }>; isError: boolean }; - - expect(result.isError).toBe(true); - expect(result.content[0].text).toContain('string error'); - }); - - it('includes multiple factors in the output', async () => { - mockAnalyzePR.mockResolvedValue( - makePRAnalysis({ - factors: [ - makeRiskFactor({ name: 'Breaking Changes', score: 50, weight: 0.3 }), - makeRiskFactor({ name: 'Untested Changes', score: 80, weight: 0.25, description: 'Tests missing', details: ['src/foo.ts untested'] }), - ], - }), - ); - - const tool = server.getRegisteredTool('get_risk_score')!; - const result = await tool.handler({}) as { content: Array<{ type: string; text: string }> }; - - expect(result.content[0].text).toContain('**Breaking Changes**'); - expect(result.content[0].text).toContain('**Untested Changes**'); - expect(result.content[0].text).toContain('Tests missing'); - expect(result.content[0].text).toContain('src/foo.ts untested'); - }); -}); diff --git a/packages/mcp-server/node_modules/.bin/tsc b/packages/mcp-server/node_modules/.bin/tsc deleted file mode 100755 index e556d4f..0000000 --- a/packages/mcp-server/node_modules/.bin/tsc +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -z "$NODE_PATH" ]; then - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/bin/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules" -else - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/bin/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules:$NODE_PATH" -fi -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../typescript/bin/tsc" "$@" -else - exec node "$basedir/../typescript/bin/tsc" "$@" -fi diff --git a/packages/mcp-server/node_modules/.bin/tsserver b/packages/mcp-server/node_modules/.bin/tsserver deleted file mode 100755 index db2401b..0000000 --- a/packages/mcp-server/node_modules/.bin/tsserver +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -z "$NODE_PATH" ]; then - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/bin/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules" -else - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/bin/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules/typescript/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules:$NODE_PATH" -fi -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../typescript/bin/tsserver" "$@" -else - exec node "$basedir/../typescript/bin/tsserver" "$@" -fi diff --git a/packages/mcp-server/node_modules/.bin/tsup b/packages/mcp-server/node_modules/.bin/tsup deleted file mode 100755 index 4df1053..0000000 --- a/packages/mcp-server/node_modules/.bin/tsup +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -z "$NODE_PATH" ]; then - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/dist/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules" -else - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/dist/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules:$NODE_PATH" -fi -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../tsup/dist/cli-default.js" "$@" -else - exec node "$basedir/../tsup/dist/cli-default.js" "$@" -fi diff --git a/packages/mcp-server/node_modules/.bin/tsup-node b/packages/mcp-server/node_modules/.bin/tsup-node deleted file mode 100755 index 689ae97..0000000 --- a/packages/mcp-server/node_modules/.bin/tsup-node +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -z "$NODE_PATH" ]; then - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/dist/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules" -else - export NODE_PATH="/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/dist/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules:/Users/duc.do/Downloads/Documents/ducdm/pr-impact/node_modules/.pnpm/node_modules:$NODE_PATH" -fi -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../tsup/dist/cli-node.js" "$@" -else - exec node "$basedir/../tsup/dist/cli-node.js" "$@" -fi diff --git a/packages/mcp-server/node_modules/@modelcontextprotocol/sdk b/packages/mcp-server/node_modules/@modelcontextprotocol/sdk deleted file mode 120000 index fe45350..0000000 --- a/packages/mcp-server/node_modules/@modelcontextprotocol/sdk +++ /dev/null @@ -1 +0,0 @@ -../../../../node_modules/.pnpm/@modelcontextprotocol+sdk@1.26.0_zod@3.25.76/node_modules/@modelcontextprotocol/sdk \ No newline at end of file diff --git a/packages/mcp-server/node_modules/@pr-impact/core b/packages/mcp-server/node_modules/@pr-impact/core deleted file mode 120000 index 5e990a8..0000000 --- a/packages/mcp-server/node_modules/@pr-impact/core +++ /dev/null @@ -1 +0,0 @@ -../../../core \ No newline at end of file diff --git a/packages/mcp-server/node_modules/@types/node b/packages/mcp-server/node_modules/@types/node deleted file mode 120000 index 129d921..0000000 --- a/packages/mcp-server/node_modules/@types/node +++ /dev/null @@ -1 +0,0 @@ -../../../../node_modules/.pnpm/@types+node@22.19.10/node_modules/@types/node \ No newline at end of file diff --git a/packages/mcp-server/node_modules/tsup b/packages/mcp-server/node_modules/tsup deleted file mode 120000 index 547982a..0000000 --- a/packages/mcp-server/node_modules/tsup +++ /dev/null @@ -1 +0,0 @@ -../../../node_modules/.pnpm/tsup@8.5.1_postcss@8.5.6_typescript@5.7.3/node_modules/tsup \ No newline at end of file diff --git a/packages/mcp-server/node_modules/typescript b/packages/mcp-server/node_modules/typescript deleted file mode 120000 index d6c42d5..0000000 --- a/packages/mcp-server/node_modules/typescript +++ /dev/null @@ -1 +0,0 @@ -../../../node_modules/.pnpm/typescript@5.7.3/node_modules/typescript \ No newline at end of file diff --git a/packages/mcp-server/node_modules/zod b/packages/mcp-server/node_modules/zod deleted file mode 120000 index 03463ed..0000000 --- a/packages/mcp-server/node_modules/zod +++ /dev/null @@ -1 +0,0 @@ -../../../node_modules/.pnpm/zod@3.25.76/node_modules/zod \ No newline at end of file diff --git a/packages/mcp-server/src/index.ts b/packages/mcp-server/src/index.ts deleted file mode 100644 index d863e3f..0000000 --- a/packages/mcp-server/src/index.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; -import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; -import { createRequire } from 'module'; -import { registerAnalyzeDiffTool } from './tools/analyze-diff.js'; -import { registerGetBreakingChangesTool } from './tools/get-breaking-changes.js'; -import { registerGetRiskScoreTool } from './tools/get-risk-score.js'; -import { registerGetImpactGraphTool } from './tools/get-impact-graph.js'; - -const require = createRequire(import.meta.url); -const { version } = require('../package.json') as { version: string }; - -const server = new McpServer({ - name: 'pr-impact', - version, -}); - -registerAnalyzeDiffTool(server); -registerGetBreakingChangesTool(server); -registerGetRiskScoreTool(server); -registerGetImpactGraphTool(server); - -async function main() { - const transport = new StdioServerTransport(); - await server.connect(transport); - - const shutdown = async () => { - await server.close(); - process.exit(0); - }; - - process.on('SIGINT', () => void shutdown()); - process.on('SIGTERM', () => void shutdown()); -} - -main().catch(console.error); diff --git a/packages/mcp-server/src/tools/analyze-diff.ts b/packages/mcp-server/src/tools/analyze-diff.ts deleted file mode 100644 index 910cc6f..0000000 --- a/packages/mcp-server/src/tools/analyze-diff.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; -import { z } from 'zod'; -import { analyzePR, formatMarkdown } from '@pr-impact/core'; - -export function registerAnalyzeDiffTool(server: McpServer): void { - server.tool( - 'analyze_diff', - 'Run full PR impact analysis including breaking changes, test coverage, doc staleness, and risk scoring', - { - repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), - baseBranch: z.string().optional().describe('Base branch, defaults to main'), - headBranch: z.string().optional().describe('Head branch, defaults to HEAD'), - }, - async ({ repoPath, baseBranch, headBranch }) => { - try { - const analysis = await analyzePR({ - repoPath: repoPath || process.cwd(), - baseBranch, - headBranch, - }); - return { - content: [{ type: 'text' as const, text: formatMarkdown(analysis) }], - }; - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - return { - content: [{ type: 'text' as const, text: `Error analyzing diff: ${message}` }], - isError: true, - }; - } - }, - ); -} diff --git a/packages/mcp-server/src/tools/get-breaking-changes.ts b/packages/mcp-server/src/tools/get-breaking-changes.ts deleted file mode 100644 index c7b3ec7..0000000 --- a/packages/mcp-server/src/tools/get-breaking-changes.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; -import { z } from 'zod'; -import { parseDiff, detectBreakingChanges, resolveDefaultBaseBranch } from '@pr-impact/core'; -import type { BreakingChange } from '@pr-impact/core'; - -const SEVERITY_ORDER: Record = { - low: 0, - medium: 1, - high: 2, -}; - -export function formatBreakingChange(bc: BreakingChange): string { - const lines: string[] = []; - lines.push(`- **${bc.symbolName}** in \`${bc.filePath}\``); - lines.push(` Type: ${bc.type} | Severity: ${bc.severity}`); - lines.push(` Before: \`${bc.before}\``); - if (bc.after !== null) { - lines.push(` After: \`${bc.after}\``); - } else { - lines.push(' After: (removed)'); - } - if (bc.consumers.length > 0) { - lines.push(` Consumers: ${bc.consumers.map((c) => `\`${c}\``).join(', ')}`); - } - return lines.join('\n'); -} - -export function registerGetBreakingChangesTool(server: McpServer): void { - server.tool( - 'get_breaking_changes', - 'Detect breaking changes between two branches with optional severity filtering', - { - repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), - baseBranch: z.string().optional().describe('Base branch, defaults to main'), - headBranch: z.string().optional().describe('Head branch, defaults to HEAD'), - minSeverity: z - .enum(['low', 'medium', 'high']) - .optional() - .describe('Minimum severity to include, defaults to low (show all)'), - }, - async ({ repoPath, baseBranch, headBranch, minSeverity }) => { - try { - const repo = repoPath || process.cwd(); - const base = baseBranch || await resolveDefaultBaseBranch(repo); - const head = headBranch || 'HEAD'; - - const changedFiles = await parseDiff(repo, base, head); - const breakingChanges = await detectBreakingChanges(repo, base, head, changedFiles); - - const minLevel = SEVERITY_ORDER[minSeverity || 'low'] ?? 0; - const filtered = breakingChanges.filter( - (bc) => (SEVERITY_ORDER[bc.severity] ?? 0) >= minLevel, - ); - - if (filtered.length === 0) { - const qualifier = minSeverity ? ` at or above ${minSeverity} severity` : ''; - return { - content: [ - { - type: 'text' as const, - text: `No breaking changes detected${qualifier}.`, - }, - ], - }; - } - - const header = `Found ${filtered.length} breaking change${filtered.length === 1 ? '' : 's'}:\n`; - const body = filtered.map(formatBreakingChange).join('\n\n'); - - return { - content: [{ type: 'text' as const, text: header + '\n' + body }], - }; - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - return { - content: [ - { type: 'text' as const, text: `Error detecting breaking changes: ${message}` }, - ], - isError: true, - }; - } - }, - ); -} diff --git a/packages/mcp-server/src/tools/get-impact-graph.ts b/packages/mcp-server/src/tools/get-impact-graph.ts deleted file mode 100644 index 00e7c21..0000000 --- a/packages/mcp-server/src/tools/get-impact-graph.ts +++ /dev/null @@ -1,117 +0,0 @@ -import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; -import { z } from 'zod'; -import { parseDiff, buildImpactGraph, resolveDefaultBaseBranch } from '@pr-impact/core'; -import type { ImpactGraph } from '@pr-impact/core'; - -export function formatImpactGraph(graph: ImpactGraph, filePath?: string): string { - const lines: string[] = []; - - if (filePath) { - const isDirectlyChanged = graph.directlyChanged.includes(filePath); - const isIndirectlyAffected = graph.indirectlyAffected.includes(filePath); - - lines.push(`## Impact Graph for \`${filePath}\``); - lines.push(''); - - if (isDirectlyChanged) { - lines.push('This file is **directly changed** in the PR.'); - } else if (isIndirectlyAffected) { - lines.push('This file is **indirectly affected** by the PR changes.'); - } else { - lines.push('This file is not affected by the PR changes.'); - return lines.join('\n'); - } - - lines.push(''); - - const relevantEdges = graph.edges.filter( - (edge) => edge.from === filePath || edge.to === filePath, - ); - - if (relevantEdges.length > 0) { - lines.push('### Dependencies'); - lines.push(''); - for (const edge of relevantEdges) { - if (edge.from === filePath) { - lines.push(`- \`${filePath}\` ${edge.type} \`${edge.to}\``); - } else { - lines.push(`- \`${edge.from}\` ${edge.type} \`${filePath}\``); - } - } - } - - return lines.join('\n'); - } - - lines.push('## Impact Graph'); - lines.push(''); - - lines.push(`### Directly Changed (${graph.directlyChanged.length})`); - lines.push(''); - if (graph.directlyChanged.length > 0) { - for (const file of graph.directlyChanged) { - lines.push(`- \`${file}\``); - } - } else { - lines.push('No files directly changed.'); - } - - lines.push(''); - lines.push(`### Indirectly Affected (${graph.indirectlyAffected.length})`); - lines.push(''); - if (graph.indirectlyAffected.length > 0) { - for (const file of graph.indirectlyAffected) { - lines.push(`- \`${file}\``); - } - } else { - lines.push('No files indirectly affected.'); - } - - if (graph.edges.length > 0) { - lines.push(''); - lines.push(`### Dependency Edges (${graph.edges.length})`); - lines.push(''); - for (const edge of graph.edges) { - lines.push(`- \`${edge.from}\` ${edge.type} \`${edge.to}\``); - } - } - - return lines.join('\n'); -} - -export function registerGetImpactGraphTool(server: McpServer): void { - server.tool( - 'get_impact_graph', - 'Build an import dependency graph showing directly changed and indirectly affected files', - { - repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), - baseBranch: z.string().optional().describe('Base branch, defaults to main'), - headBranch: z.string().optional().describe('Head branch, defaults to HEAD'), - filePath: z.string().optional().describe('Focus on a specific file in the graph'), - depth: z.number().optional().describe('Max depth for graph traversal, defaults to 3'), - }, - async ({ repoPath, baseBranch, headBranch, filePath, depth }) => { - try { - const repo = repoPath || process.cwd(); - const base = baseBranch || await resolveDefaultBaseBranch(repo); - const head = headBranch || 'HEAD'; - - const changedFiles = await parseDiff(repo, base, head); - const graph = await buildImpactGraph(repo, changedFiles, depth ?? 3); - - const text = formatImpactGraph(graph, filePath); - return { - content: [{ type: 'text' as const, text }], - }; - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - return { - content: [ - { type: 'text' as const, text: `Error building impact graph: ${message}` }, - ], - isError: true, - }; - } - }, - ); -} diff --git a/packages/mcp-server/src/tools/get-risk-score.ts b/packages/mcp-server/src/tools/get-risk-score.ts deleted file mode 100644 index cddb537..0000000 --- a/packages/mcp-server/src/tools/get-risk-score.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; -import { z } from 'zod'; -import { analyzePR } from '@pr-impact/core'; -import type { RiskAssessment, RiskFactor } from '@pr-impact/core'; - -function formatRiskFactor(factor: RiskFactor): string { - const weighted = (factor.score * factor.weight).toFixed(1); - let line = `- **${factor.name}** (score: ${factor.score}, weight: ${factor.weight}, weighted: ${weighted})`; - line += `\n ${factor.description}`; - if (factor.details && factor.details.length > 0) { - for (const detail of factor.details) { - line += `\n - ${detail}`; - } - } - return line; -} - -function formatRiskAssessment(risk: RiskAssessment): string { - const lines: string[] = []; - lines.push('## Risk Assessment'); - lines.push(''); - lines.push(`**Overall Score:** ${risk.score}/100`); - lines.push(`**Risk Level:** ${risk.level.toUpperCase()}`); - lines.push(''); - - if (risk.factors.length > 0) { - lines.push('### Contributing Factors'); - lines.push(''); - for (const factor of risk.factors) { - lines.push(formatRiskFactor(factor)); - } - } else { - lines.push('No risk factors identified.'); - } - - return lines.join('\n'); -} - -export function registerGetRiskScoreTool(server: McpServer): void { - server.tool( - 'get_risk_score', - 'Calculate risk score and breakdown for a PR, showing overall score, level, and contributing factors', - { - repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), - baseBranch: z.string().optional().describe('Base branch, defaults to main'), - headBranch: z.string().optional().describe('Head branch, defaults to HEAD'), - }, - async ({ repoPath, baseBranch, headBranch }) => { - try { - const analysis = await analyzePR({ - repoPath: repoPath || process.cwd(), - baseBranch, - headBranch, - }); - - const text = formatRiskAssessment(analysis.riskScore); - return { - content: [{ type: 'text' as const, text }], - }; - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - return { - content: [ - { type: 'text' as const, text: `Error calculating risk score: ${message}` }, - ], - isError: true, - }; - } - }, - ); -} diff --git a/packages/skill/.claude-plugin/config.json b/packages/skill/.claude-plugin/config.json new file mode 100644 index 0000000..8abc18d --- /dev/null +++ b/packages/skill/.claude-plugin/config.json @@ -0,0 +1,6 @@ +{ + "name": "@pr-impact/skill", + "version": "1.0.0", + "description": "AI-powered PR impact analysis — detect breaking changes, map blast radius, score risk", + "skills": ["skill.md"] +} diff --git a/packages/skill/CLAUDE.md b/packages/skill/CLAUDE.md new file mode 100644 index 0000000..32b67c2 --- /dev/null +++ b/packages/skill/CLAUDE.md @@ -0,0 +1,28 @@ +# CLAUDE.md -- @pr-impact/skill + +## What this package does + +Claude Code plugin that provides the `/pr-impact` slash command. No runtime dependencies -- everything is assembled at build time from shared templates. + +## Quick commands + +```bash +pnpm build --filter=@pr-impact/skill # Build: assemble skill.md from templates +``` + +## File layout + +``` +.claude-plugin/ + config.json -- Plugin metadata (name, description, skill references) +mcp.json -- MCP server reference (points to @pr-impact/tools via npx) +skill.md -- GENERATED: assembled skill prompt (do not edit) +package.json -- Build script only +``` + +## Key patterns + +- **Generated file**: `skill.md` is assembled by `scripts/build-skill.ts` from `templates/system-prompt.md` and `templates/report-template.md`. Do not edit it manually. +- **MCP integration**: `mcp.json` tells Claude Code to start the `@pr-impact/tools` MCP server, making all 6 tools available during analysis. +- **No runtime deps**: the published package contains only static files (`.claude-plugin/`, `skill.md`, `mcp.json`). +- **Published files**: controlled by the `files` array in `package.json` -- only `.claude-plugin`, `skill.md`, and `mcp.json` are included. diff --git a/packages/skill/README.md b/packages/skill/README.md new file mode 100644 index 0000000..eb40604 --- /dev/null +++ b/packages/skill/README.md @@ -0,0 +1,32 @@ +# @pr-impact/skill + +Claude Code plugin that provides the `/pr-impact` slash command for AI-powered PR impact analysis. + +## Installation + +```bash +claude plugin add @pr-impact/skill +``` + +## Usage + +In Claude Code, run: + +``` +/pr-impact +``` + +This starts an AI-driven analysis of your current branch against `main`, using MCP tools to gather evidence and produce a structured risk report. + +## How It Works + +The plugin bundles: +- **skill.md** -- System prompt with analysis methodology and report template +- **mcp.json** -- References `@pr-impact/tools` MCP server for git/repo tools +- **.claude-plugin/config.json** -- Plugin metadata + +At build time, `skill.md` is assembled from shared templates (`templates/system-prompt.md` and `templates/report-template.md`). + +## License + +[MIT](../../LICENSE) diff --git a/packages/skill/mcp.json b/packages/skill/mcp.json new file mode 100644 index 0000000..dadec72 --- /dev/null +++ b/packages/skill/mcp.json @@ -0,0 +1,8 @@ +{ + "mcpServers": { + "pr-impact-tools": { + "command": "npx", + "args": ["-y", "@pr-impact/tools"] + } + } +} diff --git a/packages/skill/package.json b/packages/skill/package.json new file mode 100644 index 0000000..e42a611 --- /dev/null +++ b/packages/skill/package.json @@ -0,0 +1,25 @@ +{ + "name": "@pr-impact/skill", + "version": "1.0.0", + "description": "Claude Code skill for AI-powered PR impact analysis", + "license": "MIT", + "files": [ + ".claude-plugin", + "skill.md", + "mcp.json" + ], + "scripts": { + "build": "tsx ../../scripts/build-skill.ts" + }, + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "https://github.com/ducdmdev/pr-impact.git", + "directory": "packages/skill" + }, + "devDependencies": { + "tsx": "^4.0.0" + } +} diff --git a/packages/skill/skill.md b/packages/skill/skill.md new file mode 100644 index 0000000..0785288 --- /dev/null +++ b/packages/skill/skill.md @@ -0,0 +1,173 @@ +--- +name: pr-impact +description: Analyze PR impact — breaking changes, test coverage gaps, doc staleness, impact graph, and risk score +arguments: + - name: base + description: Base branch to compare against (default: main) + required: false + - name: head + description: Head branch to analyze (default: HEAD) + required: false +--- + +You are a PR impact analyzer. Given access to a git repository via MCP tools, analyze a pull request and produce a structured impact report. + +## Available Tools + +- `git_diff` — Get the raw diff between two branches (optionally for a single file) +- `read_file_at_ref` — Read a file's content at a specific git ref (branch/commit) +- `list_changed_files` — List all files changed between two branches with stats and status +- `search_code` — Search for a regex pattern across the codebase +- `find_importers` — Find all files that import a given module path +- `list_test_files` — Find test files associated with a given source file + +## Analysis Steps + +Follow these steps in order. Use the tools to gather evidence — never guess about file contents or imports. + +### Step 1: Diff Overview + +Call `list_changed_files` to get all changed files. Categorize each file: +- **source**: `.ts`, `.tsx`, `.js`, `.jsx` files that are not tests +- **test**: files in `__tests__/`, `test/`, `tests/` directories, or files matching `*.test.*`, `*.spec.*` +- **doc**: `.md`, `.mdx`, `.rst`, `.txt` files +- **config**: `package.json`, `tsconfig.json`, `.eslintrc.*`, `Dockerfile`, CI/CD files, bundler configs +- **other**: everything else + +### Step 2: Breaking Change Detection + +For each changed **source** file that likely exports public API symbols: +1. Call `read_file_at_ref` with the base branch ref to get the old version +2. Call `read_file_at_ref` with the head branch ref to get the new version +3. Compare exported functions, classes, types, interfaces, enums, and variables +4. Identify breaking changes: + - **Removed export**: a symbol that existed in base but is gone in head + - **Changed signature**: function parameters changed (added required params, removed params, changed types) + - **Changed type**: interface/type fields changed in incompatible ways + - **Renamed export**: a symbol was renamed (removed + similar new one added) +5. For each breaking change, call `find_importers` to find downstream consumers +6. Assign severity: + - **high**: removed or renamed exports, removed required interface fields + - **medium**: changed function signatures, changed return types + - **low**: changed optional fields, added required fields to interfaces + +### Step 3: Test Coverage Gaps + +For each changed source file: +1. Call `list_test_files` to find associated test files +2. Check if any of those test files appear in the changed file list from Step 1 +3. Calculate coverage ratio: `sourceFilesWithTestChanges / changedSourceFiles` +4. Flag each source file that changed without corresponding test updates + +### Step 4: Documentation Staleness + +For each changed **doc** file AND for each doc file that references changed source files: +1. Call `read_file_at_ref` (head ref) to read the doc content +2. Look for references to symbols, file paths, or function names that were modified or removed +3. Flag stale references with the line number and reason + +If no doc files are in the diff, call `search_code` with pattern matching changed symbol names in `*.md` files to find docs that reference them. + +### Step 5: Impact Graph + +For each changed source file: +1. Call `find_importers` to find direct consumers +2. For each direct consumer, call `find_importers` again to find indirect consumers (up to 2 levels deep) +3. Classify files as **directly changed** (in the diff) or **indirectly affected** (consumers not in the diff) + +### Step 6: Risk Assessment + +Score each factor from 0 to 100, then compute the weighted average: + +| Factor | Weight | Scoring | +|--------|--------|---------| +| Breaking changes | 0.30 | `100` if any high-severity, `60` if medium-only, `30` if low-only, `0` if none | +| Untested changes | 0.25 | `(1 - coverageRatio) * 100` | +| Diff size | 0.15 | `0` if <100 total lines, `50` if 100-500, `80` if 500-1000, `100` if >1000 | +| Stale documentation | 0.10 | `min(staleReferences * 20, 100)` | +| Config file changes | 0.10 | `100` if CI/build config, `50` if other config, `0` if none | +| Impact breadth | 0.10 | `min(indirectlyAffectedFiles * 10, 100)` | + +**Formula:** `score = sum(factor_score * weight)` (weights sum to 1.0) + +**Risk levels:** 0-25 = low, 26-50 = medium, 51-75 = high, 76-100 = critical + +## Rules + +- Always call tools to verify — never guess about file contents, imports, or test file existence. +- Always use `git_diff` with the `file` parameter to inspect files individually. Never load the full diff at once. +- If >30 changed files, only call `read_file_at_ref` for files with >50 lines changed. +- If >50 changed files, skip the documentation staleness check (Step 4). +- Call `find_importers` only for directly changed source files, not for indirect consumers. +- Focus on exported/public symbols for breaking change detection. Internal/private changes are lower priority. +- Categorize every finding with severity and cite evidence (file path, line, before/after). +- Be precise with the risk score calculation — show your math in the factor breakdown. + + +## Your Task + +Analyze the PR comparing branch `$ARGUMENTS` in the current repository. If no arguments provided, compare `main` to `HEAD`. + +Parse the arguments: first argument is `base` branch, second is `head` branch. + +Use the pr-impact MCP tools to inspect the repository. Follow all 6 analysis steps. Produce the report using this exact template: + +Output your analysis using exactly this structure. Fill in all sections. If a section has no findings, write "None" under it. + +# PR Impact Report + +## Summary +- **Risk Score**: {score}/100 ({level}) +- **Files Changed**: {total} ({source} source, {test} test, {doc} doc, {config} config, {other} other) +- **Total Lines Changed**: {additions} additions, {deletions} deletions +- **Breaking Changes**: {count} ({high} high, {medium} medium, {low} low) +- **Test Coverage**: {ratio}% of changed source files have corresponding test updates +- **Stale Doc References**: {count} +- **Impact Breadth**: {direct} directly changed, {indirect} indirectly affected + +## Breaking Changes + +| File | Type | Symbol | Before | After | Severity | Consumers | +|------|------|--------|--------|-------|----------|-----------| +| {filePath} | {removed_export/changed_signature/changed_type/renamed_export} | {symbolName} | {before signature/definition} | {after signature/definition or "removed"} | {high/medium/low} | {comma-separated consumer file paths} | + +## Test Coverage Gaps + +| Source File | Expected Test File | Test Exists | Test Updated | +|-------------|-------------------|-------------|--------------| +| {sourceFile} | {testFile} | {yes/no} | {yes/no} | + +## Stale Documentation + +| Doc File | Line | Reference | Reason | +|----------|------|-----------|--------| +| {docFile} | {lineNumber} | {reference text} | {why it's stale} | + +## Impact Graph + +### Directly Changed Files +- {filePath} ({additions}+, {deletions}-) + +### Indirectly Affected Files +- {filePath} — imported by {consumer}, which is directly changed + +## Risk Factor Breakdown + +| Factor | Score | Weight | Weighted | Details | +|--------|-------|--------|----------|---------| +| Breaking changes | {0-100} | 0.30 | {score*0.30} | {description} | +| Untested changes | {0-100} | 0.25 | {score*0.25} | {coverageRatio}% coverage | +| Diff size | {0-100} | 0.15 | {score*0.15} | {totalLines} total lines changed | +| Stale documentation | {0-100} | 0.10 | {score*0.10} | {count} stale references | +| Config file changes | {0-100} | 0.10 | {score*0.10} | {description} | +| Impact breadth | {0-100} | 0.10 | {score*0.10} | {count} indirectly affected files | +| **Total** | | **1.00** | **{total}** | | + +## Recommendations + +Based on the analysis above, here are the recommended actions before merging: + +1. {actionable recommendation with specific file/symbol references} +2. {actionable recommendation} +3. {actionable recommendation} + diff --git a/packages/tools-core/CLAUDE.md b/packages/tools-core/CLAUDE.md new file mode 100644 index 0000000..2455f4d --- /dev/null +++ b/packages/tools-core/CLAUDE.md @@ -0,0 +1,38 @@ +# CLAUDE.md -- @pr-impact/tools-core + +## What this package does + +Pure tool handler functions for git/repo operations. No framework dependency -- both the MCP server and GitHub Action import from here. + +## Quick commands + +```bash +pnpm build --filter=@pr-impact/tools-core # Build with tsup (ESM + dts) +npx vitest run packages/tools-core # Run tests +``` + +## Source layout + +``` +src/ + index.ts -- Barrel exports for all handlers and types + tools/ + git-diff.ts -- gitDiff(): raw diff between two refs + read-file.ts -- readFileAtRef(): file content at a git ref + list-files.ts -- listChangedFiles(): changed files with status/stats + search-code.ts -- searchCode(): regex search via git grep + find-imports.ts -- findImporters(): reverse dependency map (session-cached) + list-tests.ts -- listTestFiles(): test file discovery by naming convention +``` + +## Key patterns + +- All functions accept an optional `repoPath` (defaults to `process.cwd()`). +- `findImporters` builds a reverse dependency map on first call and caches it. Call `clearImporterCache()` to reset. +- `searchCode` uses `git.raw(['grep', ...])` because simple-git's `grep()` doesn't reliably pass glob specs. Exit code 1 from git grep means "no matches", not an error. +- `listTestFiles` generates candidate paths for sibling, `__tests__/`, `test/`, and `tests/` directories, plus `__tests__/` at the package root (sibling to `src/`). +- `listChangedFiles` handles binary files via a type guard (`'insertions' in f`). + +## Testing + +Tests in `__tests__/` mock `simple-git` and `fast-glob`. No real git repos needed. diff --git a/packages/tools-core/README.md b/packages/tools-core/README.md new file mode 100644 index 0000000..bdc6114 --- /dev/null +++ b/packages/tools-core/README.md @@ -0,0 +1,30 @@ +# @pr-impact/tools-core + +Pure tool handler functions for git and repository operations. This is the shared foundation used by both the MCP server (`@pr-impact/tools`) and the GitHub Action (`@pr-impact/action`). + +## Tools + +| Function | Description | +|---|---| +| `gitDiff` | Get raw git diff between two refs, optionally for a single file | +| `readFileAtRef` | Read a file's content at a specific git ref (branch/commit/tag) | +| `listChangedFiles` | List changed files between two refs with status and addition/deletion stats | +| `searchCode` | Search for a regex pattern across the codebase via `git grep` | +| `findImporters` | Find all source files that import a given module (session-cached) | +| `listTestFiles` | Find test files associated with a source file using naming conventions | + +## Usage + +```typescript +import { gitDiff, listChangedFiles, findImporters } from '@pr-impact/tools-core'; + +const diff = await gitDiff({ base: 'main', head: 'feature-branch', file: 'src/index.ts' }); +const files = await listChangedFiles({ base: 'main', head: 'feature-branch' }); +const consumers = await findImporters({ modulePath: 'src/utils/parser.ts' }); +``` + +All functions accept an optional `repoPath` parameter (defaults to `process.cwd()`). + +## License + +[MIT](../../LICENSE) diff --git a/packages/tools-core/__tests__/find-imports.test.ts b/packages/tools-core/__tests__/find-imports.test.ts new file mode 100644 index 0000000..7256a44 --- /dev/null +++ b/packages/tools-core/__tests__/find-imports.test.ts @@ -0,0 +1,96 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import { readFile } from 'fs/promises'; + +vi.mock('fast-glob', () => ({ + default: vi.fn(), +})); + +vi.mock('fs/promises', () => ({ + readFile: vi.fn(), +})); + +import fg from 'fast-glob'; +import { findImporters, clearImporterCache } from '../src/tools/find-imports.js'; + +beforeEach(() => { + vi.clearAllMocks(); + clearImporterCache(); +}); + +describe('findImporters', () => { + it('finds files that import a given module', async () => { + vi.mocked(fg).mockResolvedValue([ + '/repo/src/bar.ts', + '/repo/src/baz.ts', + '/repo/src/foo.ts', + ]); + + vi.mocked(readFile).mockImplementation(async (path) => { + if (String(path).endsWith('bar.ts')) { + return 'import { doStuff } from "./foo.js";\nconsole.log(doStuff());' as never; + } + if (String(path).endsWith('baz.ts')) { + return 'import { other } from "./utils.js";\nconsole.log(other());' as never; + } + if (String(path).endsWith('foo.ts')) { + return 'export function doStuff() { return 1; }' as never; + } + return '' as never; + }); + + const result = await findImporters({ + repoPath: '/repo', + modulePath: 'src/foo.ts', + }); + + expect(result.importers).toContain('src/bar.ts'); + expect(result.importers).not.toContain('src/baz.ts'); + }); + + it('returns empty array when no importers found', async () => { + vi.mocked(fg).mockResolvedValue(['/repo/src/bar.ts']); + vi.mocked(readFile).mockResolvedValue('const x = 1;' as never); + + const result = await findImporters({ + repoPath: '/repo', + modulePath: 'src/foo.ts', + }); + + expect(result.importers).toHaveLength(0); + }); + + it('caches the reverse dependency map across calls', async () => { + vi.mocked(fg).mockResolvedValue([ + '/repo/src/bar.ts', + '/repo/src/foo.ts', + ]); + + vi.mocked(readFile).mockImplementation(async (path) => { + if (String(path).endsWith('bar.ts')) { + return 'import { doStuff } from "./foo.js";' as never; + } + return 'export function doStuff() {}' as never; + }); + + // First call builds the cache + await findImporters({ repoPath: '/repo', modulePath: 'src/foo.ts' }); + expect(fg).toHaveBeenCalledTimes(1); + + // Second call should reuse the cache — fg should NOT be called again + await findImporters({ repoPath: '/repo', modulePath: 'src/foo.ts' }); + expect(fg).toHaveBeenCalledTimes(1); + }); + + it('clearImporterCache forces rebuild on next call', async () => { + vi.mocked(fg).mockResolvedValue(['/repo/src/bar.ts']); + vi.mocked(readFile).mockResolvedValue('const x = 1;' as never); + + await findImporters({ repoPath: '/repo', modulePath: 'src/foo.ts' }); + expect(fg).toHaveBeenCalledTimes(1); + + clearImporterCache(); + + await findImporters({ repoPath: '/repo', modulePath: 'src/foo.ts' }); + expect(fg).toHaveBeenCalledTimes(2); + }); +}); diff --git a/packages/tools-core/__tests__/git-diff.test.ts b/packages/tools-core/__tests__/git-diff.test.ts new file mode 100644 index 0000000..c6535db --- /dev/null +++ b/packages/tools-core/__tests__/git-diff.test.ts @@ -0,0 +1,61 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +vi.mock('simple-git', () => ({ + simpleGit: vi.fn(), +})); + +import { simpleGit } from 'simple-git'; +import { gitDiff } from '../src/tools/git-diff.js'; + +const mockGit = { + diff: vi.fn(), +}; + +beforeEach(() => { + vi.clearAllMocks(); + vi.mocked(simpleGit).mockReturnValue(mockGit as never); +}); + +describe('gitDiff', () => { + it('returns full diff between two branches', async () => { + mockGit.diff.mockResolvedValue('diff --git a/src/foo.ts b/src/foo.ts\n--- a/src/foo.ts\n+++ b/src/foo.ts\n@@ -1 +1 @@\n-old\n+new'); + + const result = await gitDiff({ + repoPath: '/repo', + base: 'main', + head: 'HEAD', + }); + + expect(simpleGit).toHaveBeenCalledWith('/repo'); + expect(mockGit.diff).toHaveBeenCalledWith(['main...HEAD']); + expect(result.diff).toContain('diff --git'); + }); + + it('returns diff for a single file when file parameter is provided', async () => { + mockGit.diff.mockResolvedValue('diff for single file'); + + const result = await gitDiff({ + repoPath: '/repo', + base: 'main', + head: 'HEAD', + file: 'src/foo.ts', + }); + + expect(mockGit.diff).toHaveBeenCalledWith(['main...HEAD', '--', 'src/foo.ts']); + expect(result.diff).toBe('diff for single file'); + }); + + it('defaults repoPath to cwd when not provided', async () => { + mockGit.diff.mockResolvedValue('some diff'); + + await gitDiff({ base: 'main', head: 'HEAD' }); + + expect(simpleGit).toHaveBeenCalledWith(process.cwd()); + }); + + it('throws on failure', async () => { + mockGit.diff.mockRejectedValue(new Error('not a git repo')); + + await expect(gitDiff({ base: 'main', head: 'HEAD' })).rejects.toThrow('not a git repo'); + }); +}); diff --git a/packages/tools-core/__tests__/list-files.test.ts b/packages/tools-core/__tests__/list-files.test.ts new file mode 100644 index 0000000..b287d55 --- /dev/null +++ b/packages/tools-core/__tests__/list-files.test.ts @@ -0,0 +1,110 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +vi.mock('simple-git', () => ({ + simpleGit: vi.fn(), +})); + +import { simpleGit } from 'simple-git'; +import { listChangedFiles } from '../src/tools/list-files.js'; + +const mockGit = { + diff: vi.fn(), + diffSummary: vi.fn(), +}; + +beforeEach(() => { + vi.clearAllMocks(); + vi.mocked(simpleGit).mockReturnValue(mockGit as never); +}); + +describe('listChangedFiles', () => { + it('returns list of changed files with status and stats', async () => { + mockGit.diff.mockResolvedValue('M\tsrc/foo.ts\nA\tsrc/bar.ts\nD\told.ts\n'); + mockGit.diffSummary.mockResolvedValue({ + files: [ + { file: 'src/foo.ts', insertions: 10, deletions: 3, binary: false }, + { file: 'src/bar.ts', insertions: 20, deletions: 0, binary: false }, + { file: 'old.ts', insertions: 0, deletions: 15, binary: false }, + ], + insertions: 30, + deletions: 18, + }); + + const result = await listChangedFiles({ + repoPath: '/repo', + base: 'main', + head: 'HEAD', + }); + + expect(simpleGit).toHaveBeenCalledWith('/repo'); + expect(mockGit.diff).toHaveBeenCalledWith(['--name-status', 'main...HEAD']); + expect(mockGit.diffSummary).toHaveBeenCalledWith(['main...HEAD']); + expect(result.files).toHaveLength(3); + expect(result.files[0]).toEqual({ + path: 'src/foo.ts', + status: 'modified', + additions: 10, + deletions: 3, + }); + expect(result.files[1]).toEqual({ + path: 'src/bar.ts', + status: 'added', + additions: 20, + deletions: 0, + }); + expect(result.files[2]).toEqual({ + path: 'old.ts', + status: 'deleted', + additions: 0, + deletions: 15, + }); + expect(result.totalAdditions).toBe(30); + expect(result.totalDeletions).toBe(18); + }); + + it('handles renamed files (R status with score)', async () => { + mockGit.diff.mockResolvedValue('R100\told-name.ts\tnew-name.ts\n'); + mockGit.diffSummary.mockResolvedValue({ + files: [ + { file: 'new-name.ts', insertions: 0, deletions: 0, binary: false }, + ], + insertions: 0, + deletions: 0, + }); + + const result = await listChangedFiles({ + repoPath: '/repo', + base: 'main', + head: 'HEAD', + }); + + expect(result.files).toHaveLength(1); + expect(result.files[0]).toEqual({ + path: 'new-name.ts', + status: 'renamed', + additions: 0, + deletions: 0, + }); + }); + + it('defaults repoPath to cwd when not provided', async () => { + mockGit.diff.mockResolvedValue(''); + mockGit.diffSummary.mockResolvedValue({ + files: [], + insertions: 0, + deletions: 0, + }); + + await listChangedFiles({ base: 'main', head: 'HEAD' }); + + expect(simpleGit).toHaveBeenCalledWith(process.cwd()); + }); + + it('throws on failure', async () => { + mockGit.diff.mockRejectedValue(new Error('bad revision')); + + await expect( + listChangedFiles({ base: 'main', head: 'HEAD' }), + ).rejects.toThrow('bad revision'); + }); +}); diff --git a/packages/tools-core/__tests__/list-tests.test.ts b/packages/tools-core/__tests__/list-tests.test.ts new file mode 100644 index 0000000..9f7541b --- /dev/null +++ b/packages/tools-core/__tests__/list-tests.test.ts @@ -0,0 +1,78 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +vi.mock('fast-glob', () => ({ + default: vi.fn(), +})); + +import fg from 'fast-glob'; +import { listTestFiles } from '../src/tools/list-tests.js'; + +beforeEach(() => { + vi.clearAllMocks(); +}); + +describe('listTestFiles', () => { + it('finds test files for a given source file', async () => { + vi.mocked(fg).mockResolvedValue(['src/utils/__tests__/parser.test.ts']); + + const result = await listTestFiles({ + repoPath: '/repo', + sourceFile: 'src/utils/parser.ts', + }); + + expect(result.testFiles).toContain('src/utils/__tests__/parser.test.ts'); + }); + + it('returns empty array when no test files found', async () => { + vi.mocked(fg).mockResolvedValue([]); + + const result = await listTestFiles({ + repoPath: '/repo', + sourceFile: 'src/utils/obscure.ts', + }); + + expect(result.testFiles).toHaveLength(0); + }); + + it('generates candidates for sibling, __tests__, test, and tests directories', async () => { + vi.mocked(fg).mockResolvedValue([]); + + await listTestFiles({ + repoPath: '/repo', + sourceFile: 'src/utils/parser.ts', + }); + + // Verify that fg was called with candidate patterns + const candidates = vi.mocked(fg).mock.calls[0][0] as string[]; + expect(candidates).toContain('src/utils/parser.test.ts'); + expect(candidates).toContain('src/utils/parser.spec.ts'); + expect(candidates).toContain('src/utils/__tests__/parser.ts'); + expect(candidates).toContain('src/utils/__tests__/parser.test.ts'); + }); + + it('generates candidates for __tests__ at package root (sibling to src/)', async () => { + vi.mocked(fg).mockResolvedValue([]); + + await listTestFiles({ + repoPath: '/repo', + sourceFile: 'packages/action/src/client.ts', + }); + + const candidates = vi.mocked(fg).mock.calls[0][0] as string[]; + // Should check __tests__ at package root, not just inside src/ + expect(candidates).toContain('packages/action/__tests__/client.test.ts'); + expect(candidates).toContain('packages/action/__tests__/client.ts'); + // Should also check inside src/ + expect(candidates).toContain('packages/action/src/__tests__/client.test.ts'); + }); + + it('defaults repoPath to cwd when not provided', async () => { + vi.mocked(fg).mockResolvedValue([]); + + await listTestFiles({ sourceFile: 'src/foo.ts' }); + + expect(vi.mocked(fg).mock.calls[0][1]).toEqual( + expect.objectContaining({ cwd: process.cwd() }), + ); + }); +}); diff --git a/packages/tools-core/__tests__/read-file.test.ts b/packages/tools-core/__tests__/read-file.test.ts new file mode 100644 index 0000000..787508c --- /dev/null +++ b/packages/tools-core/__tests__/read-file.test.ts @@ -0,0 +1,49 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +vi.mock('simple-git', () => ({ + simpleGit: vi.fn(), +})); + +import { simpleGit } from 'simple-git'; +import { readFileAtRef } from '../src/tools/read-file.js'; + +const mockGit = { + show: vi.fn(), +}; + +beforeEach(() => { + vi.clearAllMocks(); + vi.mocked(simpleGit).mockReturnValue(mockGit as never); +}); + +describe('readFileAtRef', () => { + it('reads a file at a specific git ref', async () => { + mockGit.show.mockResolvedValue('export function foo() {}'); + + const result = await readFileAtRef({ + repoPath: '/repo', + ref: 'main', + filePath: 'src/foo.ts', + }); + + expect(simpleGit).toHaveBeenCalledWith('/repo'); + expect(mockGit.show).toHaveBeenCalledWith(['main:src/foo.ts']); + expect(result.content).toBe('export function foo() {}'); + }); + + it('defaults repoPath to cwd when not provided', async () => { + mockGit.show.mockResolvedValue('content'); + + await readFileAtRef({ ref: 'main', filePath: 'src/foo.ts' }); + + expect(simpleGit).toHaveBeenCalledWith(process.cwd()); + }); + + it('throws when file does not exist at ref', async () => { + mockGit.show.mockRejectedValue(new Error('path not found')); + + await expect( + readFileAtRef({ repoPath: '/repo', ref: 'main', filePath: 'src/missing.ts' }), + ).rejects.toThrow('path not found'); + }); +}); diff --git a/packages/tools-core/__tests__/regression.test.ts b/packages/tools-core/__tests__/regression.test.ts new file mode 100644 index 0000000..3fb740c --- /dev/null +++ b/packages/tools-core/__tests__/regression.test.ts @@ -0,0 +1,159 @@ +/** + * Regression tests for tool output structure. + * + * These tests run the actual tool functions against a known git state + * (the test fixtures committed in this repo) to validate output shape + * and catch unexpected regressions in parsing or formatting. + * + * Unlike the unit tests (which mock simple-git), these use the real + * git repo. They are scoped to the repo's own committed history so + * they remain deterministic. + */ +import { describe, it, expect } from 'vitest'; +import { simpleGit } from 'simple-git'; +import { + gitDiff, + readFileAtRef, + listChangedFiles, + searchCode, + findImporters, + listTestFiles, +} from '../src/index.js'; + +import { beforeAll } from 'vitest'; + +// Use the repo root as repoPath +const repoPath = process.cwd(); + +// Find two consecutive commits to test against +let baseRef: string; +let headRef: string; + +describe('regression: tool output structure', () => { + beforeAll(async () => { + const git = simpleGit(repoPath); + const log = await git.log({ maxCount: 3 }); + if (log.all.length < 2) { + throw new Error('Need at least 2 commits for regression tests'); + } + headRef = log.all[0].hash; + baseRef = log.all[1].hash; + }); + + describe('gitDiff', () => { + it('returns a string diff property', async () => { + const result = await gitDiff({ repoPath, base: baseRef, head: headRef }); + expect(result).toHaveProperty('diff'); + expect(typeof result.diff).toBe('string'); + }); + }); + + describe('readFileAtRef', () => { + it('returns file content for a known file', async () => { + const result = await readFileAtRef({ repoPath, ref: headRef, filePath: 'package.json' }); + expect(result).toHaveProperty('content'); + expect(typeof result.content).toBe('string'); + expect(result.content).toContain('"name"'); + }); + + it('throws for a nonexistent file', async () => { + await expect( + readFileAtRef({ repoPath, ref: headRef, filePath: 'nonexistent-file-xyz.ts' }), + ).rejects.toThrow(); + }); + }); + + describe('listChangedFiles', () => { + it('returns files array with required fields', async () => { + const result = await listChangedFiles({ repoPath, base: baseRef, head: headRef }); + expect(result).toHaveProperty('files'); + expect(Array.isArray(result.files)).toBe(true); + + if (result.files.length > 0) { + const file = result.files[0]; + expect(file).toHaveProperty('path'); + expect(file).toHaveProperty('status'); + expect(file).toHaveProperty('additions'); + expect(file).toHaveProperty('deletions'); + expect(typeof file.path).toBe('string'); + expect(typeof file.additions).toBe('number'); + expect(typeof file.deletions).toBe('number'); + } + }); + }); + + describe('searchCode', () => { + it('finds matches for a known pattern', async () => { + const result = await searchCode({ repoPath, pattern: 'pr-impact', glob: 'package.json' }); + expect(result).toHaveProperty('matches'); + expect(Array.isArray(result.matches)).toBe(true); + expect(result.matches.length).toBeGreaterThan(0); + + const match = result.matches[0]; + expect(match).toHaveProperty('file'); + expect(match).toHaveProperty('line'); + expect(match).toHaveProperty('match'); + expect(typeof match.file).toBe('string'); + expect(typeof match.line).toBe('number'); + expect(typeof match.match).toBe('string'); + }); + + it('returns empty matches for impossible pattern', async () => { + // Build the pattern dynamically so the literal string doesn't appear in this file + const pattern = ['zzz', 'nonexistent', 'xyzzy', '99'].join('_'); + const result = await searchCode({ repoPath, pattern }); + expect(result.matches).toHaveLength(0); + }); + }); + + describe('findImporters', () => { + it('returns importers array', async () => { + const result = await findImporters({ + repoPath, + modulePath: 'packages/tools-core/src/tools/git-diff.js', + }); + expect(result).toHaveProperty('importers'); + expect(Array.isArray(result.importers)).toBe(true); + // git-diff.ts should be imported by at least index.ts barrel + expect(result.importers.length).toBeGreaterThan(0); + expect(result.importers).toContain('packages/tools-core/src/index.ts'); + }); + + it('returns empty for unknown module', async () => { + const result = await findImporters({ + repoPath, + modulePath: 'nonexistent/module.js', + }); + expect(result.importers).toHaveLength(0); + }); + }); + + describe('listTestFiles', () => { + it('finds test files for a known source file', async () => { + const result = await listTestFiles({ + repoPath, + sourceFile: 'packages/tools-core/src/tools/git-diff.ts', + }); + expect(result).toHaveProperty('testFiles'); + expect(Array.isArray(result.testFiles)).toBe(true); + expect(result.testFiles.length).toBeGreaterThan(0); + expect(result.testFiles).toContain('packages/tools-core/__tests__/git-diff.test.ts'); + }); + + it('finds test at package root __tests__/', async () => { + const result = await listTestFiles({ + repoPath, + sourceFile: 'packages/action/src/client.ts', + }); + expect(result.testFiles).toContain('packages/action/__tests__/client.test.ts'); + }); + + it('returns empty for a file with no tests', async () => { + const result = await listTestFiles({ + repoPath, + sourceFile: 'scripts/embed-templates.ts', + }); + expect(result.testFiles).toHaveLength(0); + }); + }); +}); diff --git a/packages/tools-core/__tests__/search-code.test.ts b/packages/tools-core/__tests__/search-code.test.ts new file mode 100644 index 0000000..e7824de --- /dev/null +++ b/packages/tools-core/__tests__/search-code.test.ts @@ -0,0 +1,79 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +vi.mock('simple-git', () => ({ + simpleGit: vi.fn(), +})); + +import { simpleGit } from 'simple-git'; +import { searchCode } from '../src/tools/search-code.js'; + +const mockGit = { + raw: vi.fn(), +}; + +beforeEach(() => { + vi.clearAllMocks(); + vi.mocked(simpleGit).mockReturnValue(mockGit as never); +}); + +describe('searchCode', () => { + it('searches for a pattern and returns matches', async () => { + mockGit.raw.mockResolvedValue( + 'src/foo.ts:5:export function doStuff() {\n' + + 'src/bar.ts:12:import { doStuff } from "./foo"\n', + ); + + const result = await searchCode({ + repoPath: '/repo', + pattern: 'doStuff', + }); + + expect(mockGit.raw).toHaveBeenCalledWith(['grep', '-n', '--', 'doStuff']); + expect(result.matches).toHaveLength(2); + expect(result.matches[0]).toEqual({ + file: 'src/foo.ts', + line: 5, + match: 'export function doStuff() {', + }); + expect(result.matches[1]).toEqual({ + file: 'src/bar.ts', + line: 12, + match: 'import { doStuff } from "./foo"', + }); + }); + + it('passes glob parameter to filter files', async () => { + mockGit.raw.mockResolvedValue('docs/api.md:3:doStuff reference\n'); + + const result = await searchCode({ + repoPath: '/repo', + pattern: 'doStuff', + glob: '*.md', + }); + + expect(mockGit.raw).toHaveBeenCalledWith(['grep', '-n', '--', 'doStuff', '*.md']); + expect(result.matches).toHaveLength(1); + expect(result.matches[0].file).toBe('docs/api.md'); + }); + + it('returns empty matches when git grep finds nothing (exit code 1)', async () => { + const error = new Error('process exited with code 1'); + mockGit.raw.mockRejectedValue(error); + + const result = await searchCode({ + repoPath: '/repo', + pattern: 'nonexistent', + }); + + expect(result.matches).toHaveLength(0); + }); + + it('throws on real errors (not exit code 1)', async () => { + const error = new Error('fatal: not a git repository'); + mockGit.raw.mockRejectedValue(error); + + await expect( + searchCode({ repoPath: '/repo', pattern: 'anything' }), + ).rejects.toThrow('fatal: not a git repository'); + }); +}); diff --git a/packages/core/package.json b/packages/tools-core/package.json similarity index 60% rename from packages/core/package.json rename to packages/tools-core/package.json index 1479ded..bcd14d0 100644 --- a/packages/core/package.json +++ b/packages/tools-core/package.json @@ -1,7 +1,7 @@ { - "name": "@pr-impact/core", - "version": "0.2.1", - "description": "PR analysis engine — breaking changes, test coverage, doc staleness, impact graphs, and risk scoring", + "name": "@pr-impact/tools-core", + "version": "1.0.0", + "description": "Pure tool handler functions for git/repo operations — no framework dependency", "type": "module", "main": "./dist/index.js", "module": "./dist/index.js", @@ -16,31 +16,19 @@ "dist" ], "license": "MIT", - "publishConfig": { - "access": "public" - }, - "keywords": [ - "pr-impact", - "pull-request", - "breaking-changes", - "risk-score", - "code-analysis", - "static-analysis", - "impact-graph", - "test-coverage" - ], "engines": { "node": ">=20.0.0" }, + "publishConfig": { + "access": "public" + }, "repository": { "type": "git", "url": "https://github.com/ducdmdev/pr-impact.git", - "directory": "packages/core" + "directory": "packages/tools-core" }, "scripts": { "build": "tsup", - "test": "vitest run", - "test:watch": "vitest", "clean": "rm -rf dist" }, "dependencies": { @@ -50,7 +38,6 @@ "devDependencies": { "tsup": "^8.0.0", "typescript": "~5.7.0", - "vitest": "^3.0.0", "@types/node": "^22.0.0" } } diff --git a/packages/tools-core/src/index.ts b/packages/tools-core/src/index.ts new file mode 100644 index 0000000..67a6639 --- /dev/null +++ b/packages/tools-core/src/index.ts @@ -0,0 +1,22 @@ +export { gitDiff } from './tools/git-diff.js'; +export type { GitDiffParams, GitDiffResult } from './tools/git-diff.js'; + +export { readFileAtRef } from './tools/read-file.js'; +export type { ReadFileAtRefParams, ReadFileAtRefResult } from './tools/read-file.js'; + +export { listChangedFiles } from './tools/list-files.js'; +export type { + ListChangedFilesParams, + ListChangedFilesResult, + ChangedFileEntry, + FileStatus, +} from './tools/list-files.js'; + +export { searchCode } from './tools/search-code.js'; +export type { SearchCodeParams, SearchCodeResult, SearchMatch } from './tools/search-code.js'; + +export { findImporters, clearImporterCache } from './tools/find-imports.js'; +export type { FindImportersParams, FindImportersResult } from './tools/find-imports.js'; + +export { listTestFiles } from './tools/list-tests.js'; +export type { ListTestFilesParams, ListTestFilesResult } from './tools/list-tests.js'; diff --git a/packages/tools-core/src/tools/find-imports.ts b/packages/tools-core/src/tools/find-imports.ts new file mode 100644 index 0000000..cb6169d --- /dev/null +++ b/packages/tools-core/src/tools/find-imports.ts @@ -0,0 +1,114 @@ +import fg from 'fast-glob'; +import { readFile } from 'fs/promises'; +import { relative, resolve, dirname } from 'path'; + +export interface FindImportersParams { + repoPath?: string; + modulePath: string; +} + +export interface FindImportersResult { + importers: string[]; +} + +const IMPORT_RE = /(?:import|export)\s+(?:[\s\S]*?\s+from\s+)?['"]([^'"]+)['"]/g; +const DYNAMIC_IMPORT_RE = /import\s*\(\s*['"]([^'"]+)['"]\s*\)/g; +const REQUIRE_RE = /require\s*\(\s*['"]([^'"]+)['"]\s*\)/g; +const EXTENSIONS = ['.ts', '.tsx', '.js', '.jsx']; + +// Session-level cache: maps repoPath -> reverse dependency map. +// The reverse dep map maps a normalized module base -> list of importer relative paths. +let cachedRepoPath: string | null = null; +let cachedReverseMap: Map | null = null; + +export function clearImporterCache(): void { + cachedRepoPath = null; + cachedReverseMap = null; +} + +export async function findImporters(params: FindImportersParams): Promise { + const repoPath = params.repoPath ?? process.cwd(); + const targetModule = params.modulePath; + + // Build or reuse cached reverse dependency map + if (cachedRepoPath !== repoPath || cachedReverseMap === null) { + cachedReverseMap = await buildReverseMap(repoPath); + cachedRepoPath = repoPath; + } + + // Look up importers from the reverse map + const targetBase = normalizeModulePath(targetModule); + const importers = cachedReverseMap.get(targetBase) ?? []; + + return { importers: [...importers] }; +} + +async function buildReverseMap(repoPath: string): Promise> { + const reverseMap = new Map(); + + const absolutePaths = await fg('**/*.{ts,tsx,js,jsx}', { + cwd: repoPath, + ignore: ['**/node_modules/**', '**/dist/**', '**/.git/**'], + absolute: true, + }); + + for (const absPath of absolutePaths) { + const relPath = relative(repoPath, absPath); + let content: string; + try { + content = await readFile(absPath, 'utf-8'); + } catch { + continue; + } + + const importPaths = extractImports(content); + for (const importPath of importPaths) { + if (!importPath.startsWith('./') && !importPath.startsWith('../')) continue; + + const resolvedBase = resolveAndNormalize(importPath, relPath); + if (resolvedBase === null) continue; + + const existing = reverseMap.get(resolvedBase); + if (existing) { + if (!existing.includes(relPath)) { + existing.push(relPath); + } + } else { + reverseMap.set(resolvedBase, [relPath]); + } + } + } + + return reverseMap; +} + +function extractImports(content: string): string[] { + const paths: string[] = []; + for (const re of [IMPORT_RE, DYNAMIC_IMPORT_RE, REQUIRE_RE]) { + const pattern = new RegExp(re.source, re.flags); + let match: RegExpExecArray | null; + while ((match = pattern.exec(content)) !== null) { + paths.push(match[1]); + } + } + return paths; +} + +function resolveAndNormalize(importPath: string, importerRelPath: string): string | null { + const importerDir = dirname(importerRelPath); + const resolved = resolve('/', importerDir, importPath).slice(1); + return normalizeModulePath(resolved); +} + +function normalizeModulePath(modulePath: string): string { + // Strip leading slash if present + let normalized = modulePath.startsWith('/') ? modulePath.slice(1) : modulePath; + // Strip known extensions for consistent lookup + for (const ext of EXTENSIONS) { + if (normalized.endsWith(ext)) { + normalized = normalized.slice(0, -ext.length); + break; + } + } + return normalized; +} diff --git a/packages/tools-core/src/tools/git-diff.ts b/packages/tools-core/src/tools/git-diff.ts new file mode 100644 index 0000000..3698434 --- /dev/null +++ b/packages/tools-core/src/tools/git-diff.ts @@ -0,0 +1,22 @@ +import { simpleGit } from 'simple-git'; + +export interface GitDiffParams { + repoPath?: string; + base: string; + head: string; + file?: string; +} + +export interface GitDiffResult { + diff: string; +} + +export async function gitDiff(params: GitDiffParams): Promise { + const git = simpleGit(params.repoPath ?? process.cwd()); + const args = [`${params.base}...${params.head}`]; + if (params.file) { + args.push('--', params.file); + } + const diff = await git.diff(args); + return { diff }; +} diff --git a/packages/tools-core/src/tools/list-files.ts b/packages/tools-core/src/tools/list-files.ts new file mode 100644 index 0000000..db570c9 --- /dev/null +++ b/packages/tools-core/src/tools/list-files.ts @@ -0,0 +1,83 @@ +import { simpleGit } from 'simple-git'; + +export interface ListChangedFilesParams { + repoPath?: string; + base: string; + head: string; +} + +export type FileStatus = 'added' | 'modified' | 'deleted' | 'renamed' | 'copied'; + +export interface ChangedFileEntry { + path: string; + status: FileStatus; + additions: number; + deletions: number; +} + +export interface ListChangedFilesResult { + files: ChangedFileEntry[]; + totalAdditions: number; + totalDeletions: number; +} + +export async function listChangedFiles(params: ListChangedFilesParams): Promise { + const git = simpleGit(params.repoPath ?? process.cwd()); + const range = `${params.base}...${params.head}`; + + // Get file status (A/M/D/R/C) from --name-status + const nameStatusOutput = await git.diff(['--name-status', range]); + const statusMap = parseNameStatus(nameStatusOutput); + + // Get line counts from diffSummary + const summary = await git.diffSummary([range]); + + const files: ChangedFileEntry[] = summary.files.map((f) => ({ + path: f.file, + status: statusMap.get(f.file) ?? 'modified', + additions: 'insertions' in f ? f.insertions : 0, + deletions: 'deletions' in f ? f.deletions : 0, + })); + + return { + files, + totalAdditions: summary.insertions, + totalDeletions: summary.deletions, + }; +} + +function parseNameStatus(output: string): Map { + const map = new Map(); + const lines = output.trim().split('\n').filter(Boolean); + + for (const line of lines) { + const parts = line.split('\t'); + if (parts.length < 2) continue; + + const statusCode = parts[0].charAt(0); + let filePath: string; + + if (statusCode === 'R' || statusCode === 'C') { + // Renamed/Copied: status\told-path\tnew-path + filePath = parts[2] ?? parts[1]; + } else { + filePath = parts[1]; + } + + map.set(filePath, mapStatusCode(statusCode)); + } + + return map; +} + +function mapStatusCode(code: string): FileStatus { + switch (code) { + case 'A': return 'added'; + case 'D': return 'deleted'; + case 'R': return 'renamed'; + case 'C': return 'copied'; + case 'M': + default: + return 'modified'; + } +} diff --git a/packages/tools-core/src/tools/list-tests.ts b/packages/tools-core/src/tools/list-tests.ts new file mode 100644 index 0000000..0c35041 --- /dev/null +++ b/packages/tools-core/src/tools/list-tests.ts @@ -0,0 +1,88 @@ +import fg from 'fast-glob'; +import { posix as path } from 'node:path'; + +export interface ListTestFilesParams { + repoPath?: string; + sourceFile: string; +} + +export interface ListTestFilesResult { + testFiles: string[]; +} + +const TEST_EXTENSIONS = ['.ts', '.tsx', '.js', '.jsx'] as const; + +export async function listTestFiles(params: ListTestFilesParams): Promise { + const repoPath = params.repoPath ?? process.cwd(); + const candidates = buildCandidatePaths(params.sourceFile); + + if (candidates.length === 0) { + return { testFiles: [] }; + } + + const existing = await fg(candidates, { + cwd: repoPath, + dot: false, + onlyFiles: true, + }); + + return { testFiles: existing }; +} + +function buildCandidatePaths(sourceFile: string): string[] { + const normalized = sourceFile.replace(/\\/g, '/'); + const dir = path.dirname(normalized); + const ext = path.extname(normalized); + const base = path.basename(normalized, ext); + const subPath = stripLeadingSourceDir(normalized); + const subDir = path.dirname(subPath); + const candidates: string[] = []; + + // Find the package root by looking for the parent of src/ or lib/ + const packageRoot = getPackageRoot(normalized); + + for (const testExt of TEST_EXTENSIONS) { + // Sibling patterns + candidates.push(path.join(dir, `${base}.test${testExt}`)); + candidates.push(path.join(dir, `${base}.spec${testExt}`)); + + // __tests__ directory under source dir + const testsDir = path.join(dir, '__tests__'); + candidates.push(path.join(testsDir, `${base}${testExt}`)); + candidates.push(path.join(testsDir, `${base}.test${testExt}`)); + candidates.push(path.join(testsDir, `${base}.spec${testExt}`)); + + // __tests__ directory at package root (sibling to src/) + if (packageRoot && packageRoot !== dir) { + const rootTestsDir = path.join(packageRoot, '__tests__'); + candidates.push(path.join(rootTestsDir, `${base}${testExt}`)); + candidates.push(path.join(rootTestsDir, `${base}.test${testExt}`)); + candidates.push(path.join(rootTestsDir, `${base}.spec${testExt}`)); + } + + // Top-level test/tests directories + for (const topDir of ['test', 'tests']) { + candidates.push(path.join(topDir, subDir, `${base}${testExt}`)); + candidates.push(path.join(topDir, subDir, `${base}.test${testExt}`)); + candidates.push(path.join(topDir, subDir, `${base}.spec${testExt}`)); + } + } + + return [...new Set(candidates)]; +} + +function stripLeadingSourceDir(filePath: string): string { + const srcIndex = filePath.lastIndexOf('src/'); + if (srcIndex !== -1) return filePath.slice(srcIndex + 4); + const libIndex = filePath.lastIndexOf('lib/'); + if (libIndex !== -1) return filePath.slice(libIndex + 4); + return filePath; +} + +function getPackageRoot(filePath: string): string | null { + const srcIndex = filePath.lastIndexOf('src/'); + if (srcIndex !== -1) return filePath.slice(0, srcIndex).replace(/\/$/, '') || null; + const libIndex = filePath.lastIndexOf('lib/'); + if (libIndex !== -1) return filePath.slice(0, libIndex).replace(/\/$/, '') || null; + return null; +} diff --git a/packages/tools-core/src/tools/read-file.ts b/packages/tools-core/src/tools/read-file.ts new file mode 100644 index 0000000..cc1b386 --- /dev/null +++ b/packages/tools-core/src/tools/read-file.ts @@ -0,0 +1,17 @@ +import { simpleGit } from 'simple-git'; + +export interface ReadFileAtRefParams { + repoPath?: string; + ref: string; + filePath: string; +} + +export interface ReadFileAtRefResult { + content: string; +} + +export async function readFileAtRef(params: ReadFileAtRefParams): Promise { + const git = simpleGit(params.repoPath ?? process.cwd()); + const content = await git.show([`${params.ref}:${params.filePath}`]); + return { content }; +} diff --git a/packages/tools-core/src/tools/search-code.ts b/packages/tools-core/src/tools/search-code.ts new file mode 100644 index 0000000..898cc23 --- /dev/null +++ b/packages/tools-core/src/tools/search-code.ts @@ -0,0 +1,64 @@ +import { simpleGit } from 'simple-git'; + +export interface SearchCodeParams { + repoPath?: string; + pattern: string; + glob?: string; +} + +export interface SearchMatch { + file: string; + line: number; + match: string; +} + +export interface SearchCodeResult { + matches: SearchMatch[]; +} + +export async function searchCode(params: SearchCodeParams): Promise { + const git = simpleGit(params.repoPath ?? process.cwd()); + + // Build raw git grep command to properly support glob filtering. + // Using git.raw() instead of git.grep() because simple-git's grep() + // does not reliably pass glob path specs. + const args = ['grep', '-n', '--', params.pattern]; + if (params.glob) { + args.push(params.glob); + } + + let output: string; + try { + output = await git.raw(args); + } catch (error: unknown) { + // git grep exits with code 1 when no matches are found. + // simple-git wraps this as an error containing "exited with code 1". + // Other errors (e.g. not a git repo) should propagate. + const message = error instanceof Error ? error.message : String(error); + if (message.includes('exited with code 1')) { + return { matches: [] }; + } + throw error; + } + + const matches: SearchMatch[] = []; + const lines = output.trim().split('\n').filter(Boolean); + + for (const line of lines) { + // Format: file:line:content + const firstColon = line.indexOf(':'); + if (firstColon === -1) continue; + const secondColon = line.indexOf(':', firstColon + 1); + if (secondColon === -1) continue; + + const file = line.slice(0, firstColon); + const lineNum = parseInt(line.slice(firstColon + 1, secondColon), 10); + const matchText = line.slice(secondColon + 1); + + if (!isNaN(lineNum)) { + matches.push({ file, line: lineNum, match: matchText }); + } + } + + return { matches }; +} diff --git a/packages/mcp-server/tsconfig.json b/packages/tools-core/tsconfig.json similarity index 74% rename from packages/mcp-server/tsconfig.json rename to packages/tools-core/tsconfig.json index 2def9e0..57749d0 100644 --- a/packages/mcp-server/tsconfig.json +++ b/packages/tools-core/tsconfig.json @@ -5,5 +5,5 @@ "rootDir": "./src" }, "include": ["src/**/*.ts"], - "exclude": ["node_modules", "dist"] + "exclude": ["node_modules", "dist", "__tests__"] } diff --git a/packages/core/tsup.config.ts b/packages/tools-core/tsup.config.ts similarity index 100% rename from packages/core/tsup.config.ts rename to packages/tools-core/tsup.config.ts diff --git a/packages/mcp-server/vitest.config.ts b/packages/tools-core/vitest.config.ts similarity index 100% rename from packages/mcp-server/vitest.config.ts rename to packages/tools-core/vitest.config.ts diff --git a/packages/tools/CLAUDE.md b/packages/tools/CLAUDE.md new file mode 100644 index 0000000..77cdfed --- /dev/null +++ b/packages/tools/CLAUDE.md @@ -0,0 +1,30 @@ +# CLAUDE.md -- @pr-impact/tools + +## What this package does + +MCP server that wraps `@pr-impact/tools-core` handlers with zod input schemas and exposes them via the Model Context Protocol (stdio transport). + +## Quick commands + +```bash +pnpm build --filter=@pr-impact/tools # Build with tsup (ESM) +npx vitest run packages/tools # Run tests +``` + +## Source layout + +``` +src/ + index.ts -- Server entry point (StdioServerTransport, SIGINT/SIGTERM handling) + register.ts -- registerAllTools(): registers 6 tools with zod schemas on McpServer +``` + +## Key patterns + +- Tool names use snake_case (`git_diff`, `read_file_at_ref`, etc.) to match MCP conventions. +- Each tool wraps the corresponding tools-core function in a try/catch that returns `{ isError: true }` on failure. +- The server responds to JSON-RPC over stdin/stdout (newline-delimited). + +## Testing + +Tests in `__tests__/` mock `McpServer` (including `close()` method) and verify tool registration and SIGINT/SIGTERM cleanup. diff --git a/packages/tools/README.md b/packages/tools/README.md new file mode 100644 index 0000000..e265d5d --- /dev/null +++ b/packages/tools/README.md @@ -0,0 +1,40 @@ +# @pr-impact/tools + +MCP server that exposes git/repo tools for AI-powered PR impact analysis. Wraps `@pr-impact/tools-core` handlers with zod input schemas and the MCP protocol. + +## Usage + +### With any MCP client + +```json +{ + "mcpServers": { + "pr-impact": { + "command": "npx", + "args": ["-y", "@pr-impact/tools"] + } + } +} +``` + +### Available Tools + +| Tool | Description | +|---|---| +| `git_diff` | Get raw git diff between two branches, optionally for a single file | +| `read_file_at_ref` | Read a file's content at a specific git ref | +| `list_changed_files` | List files changed between two branches with status and stats | +| `search_code` | Search for a regex pattern in the codebase | +| `find_importers` | Find files that import a given module | +| `list_test_files` | Find test files associated with a source file | + +## Architecture + +- `src/index.ts` -- MCP server entry point using stdio transport +- `src/register.ts` -- Tool registration with zod schemas + +The server uses `@modelcontextprotocol/sdk` for the MCP protocol and `zod` for input validation. + +## License + +[MIT](../../LICENSE) diff --git a/packages/tools/__tests__/build-scripts.test.ts b/packages/tools/__tests__/build-scripts.test.ts new file mode 100644 index 0000000..a069e03 --- /dev/null +++ b/packages/tools/__tests__/build-scripts.test.ts @@ -0,0 +1,68 @@ +import { describe, it, expect } from 'vitest'; +import { readFileSync } from 'fs'; +import { resolve, dirname } from 'path'; +import { fileURLToPath } from 'url'; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const rootDir = resolve(__dirname, '../../..'); + +describe('embed-templates.ts output', () => { + const generated = readFileSync( + resolve(rootDir, 'packages/action/src/generated/templates.ts'), + 'utf-8', + ); + + it('contains auto-generated header', () => { + expect(generated).toContain('// AUTO-GENERATED'); + expect(generated).toContain('do not edit manually'); + }); + + it('exports SYSTEM_PROMPT as a string', () => { + expect(generated).toMatch(/^export const SYSTEM_PROMPT = "/m); + }); + + it('exports REPORT_TEMPLATE as a string', () => { + expect(generated).toMatch(/^export const REPORT_TEMPLATE = "/m); + }); + + it('SYSTEM_PROMPT contains the 6 analysis steps', () => { + expect(generated).toContain('Step 1'); + expect(generated).toContain('Step 6'); + expect(generated).toContain('Risk Assessment'); + }); + + it('REPORT_TEMPLATE contains the report sections', () => { + expect(generated).toContain('PR Impact Report'); + expect(generated).toContain('Breaking Changes'); + expect(generated).toContain('Test Coverage Gaps'); + expect(generated).toContain('Risk Factor Breakdown'); + }); +}); + +describe('build-skill.ts output', () => { + const skillMd = readFileSync( + resolve(rootDir, 'packages/skill/skill.md'), + 'utf-8', + ); + + it('contains YAML frontmatter with skill metadata', () => { + expect(skillMd).toMatch(/^---\nname: pr-impact/); + expect(skillMd).toContain('description:'); + expect(skillMd).toContain('arguments:'); + }); + + it('contains the system prompt content', () => { + expect(skillMd).toContain('## Available Tools'); + expect(skillMd).toContain('## Analysis Steps'); + }); + + it('contains the task instruction', () => { + expect(skillMd).toContain('## Your Task'); + expect(skillMd).toContain('$ARGUMENTS'); + }); + + it('contains the report template', () => { + expect(skillMd).toContain('PR Impact Report'); + expect(skillMd).toContain('Risk Factor Breakdown'); + }); +}); diff --git a/packages/tools/__tests__/index.test.ts b/packages/tools/__tests__/index.test.ts new file mode 100644 index 0000000..1c4bfdb --- /dev/null +++ b/packages/tools/__tests__/index.test.ts @@ -0,0 +1,101 @@ +import { describe, it, expect, vi } from 'vitest'; + +vi.mock('@modelcontextprotocol/sdk/server/mcp.js', () => ({ + McpServer: vi.fn().mockImplementation(() => ({ + tool: vi.fn(), + connect: vi.fn(), + close: vi.fn(), + })), +})); + +vi.mock('@modelcontextprotocol/sdk/server/stdio.js', () => ({ + StdioServerTransport: vi.fn(), +})); + +// Mock all tools-core handlers +vi.mock('@pr-impact/tools-core', () => ({ + gitDiff: vi.fn().mockResolvedValue({ diff: 'mock diff' }), + readFileAtRef: vi.fn().mockResolvedValue({ content: 'mock content' }), + listChangedFiles: vi.fn().mockResolvedValue({ files: [], totalAdditions: 0, totalDeletions: 0 }), + searchCode: vi.fn().mockResolvedValue({ matches: [] }), + findImporters: vi.fn().mockResolvedValue({ importers: [] }), + listTestFiles: vi.fn().mockResolvedValue({ testFiles: [] }), + clearImporterCache: vi.fn(), +})); + +import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; + +describe('MCP server setup', () => { + it('registers all 6 tools on the server', async () => { + const mockInstance = { + tool: vi.fn(), + connect: vi.fn(), + close: vi.fn(), + }; + vi.mocked(McpServer).mockImplementation(() => mockInstance as never); + + const { registerAllTools } = await import('../src/register.js'); + registerAllTools(mockInstance as never); + + expect(mockInstance.tool).toHaveBeenCalledTimes(6); + const toolNames = mockInstance.tool.mock.calls.map((call: unknown[]) => call[0]); + expect(toolNames).toContain('git_diff'); + expect(toolNames).toContain('read_file_at_ref'); + expect(toolNames).toContain('list_changed_files'); + expect(toolNames).toContain('search_code'); + expect(toolNames).toContain('find_importers'); + expect(toolNames).toContain('list_test_files'); + }); + + it('tool handlers format results as MCP ToolResult', async () => { + const mockInstance = { + tool: vi.fn(), + connect: vi.fn(), + close: vi.fn(), + }; + vi.mocked(McpServer).mockImplementation(() => mockInstance as never); + + const { registerAllTools } = await import('../src/register.js'); + registerAllTools(mockInstance as never); + + // Find the git_diff handler and call it + const gitDiffCall = mockInstance.tool.mock.calls.find( + (call: unknown[]) => call[0] === 'git_diff', + ); + expect(gitDiffCall).toBeDefined(); + + // The handler is the last argument (index 3) + const handler = gitDiffCall![3] as (params: Record) => Promise; + const result = await handler({ base: 'main', head: 'HEAD' }); + + expect(result).toEqual({ + content: [{ type: 'text', text: expect.stringContaining('mock diff') }], + }); + }); + + it('tool handlers return isError on failure', async () => { + const { gitDiff } = await import('@pr-impact/tools-core'); + vi.mocked(gitDiff).mockRejectedValueOnce(new Error('repo not found')); + + const mockInstance = { + tool: vi.fn(), + connect: vi.fn(), + close: vi.fn(), + }; + vi.mocked(McpServer).mockImplementation(() => mockInstance as never); + + const { registerAllTools } = await import('../src/register.js'); + registerAllTools(mockInstance as never); + + const gitDiffCall = mockInstance.tool.mock.calls.find( + (call: unknown[]) => call[0] === 'git_diff', + ); + const handler = gitDiffCall![3] as (params: Record) => Promise; + const result = await handler({ base: 'main', head: 'HEAD' }); + + expect(result).toEqual({ + content: [{ type: 'text', text: expect.stringContaining('repo not found') }], + isError: true, + }); + }); +}); diff --git a/packages/tools/__tests__/register.test.ts b/packages/tools/__tests__/register.test.ts new file mode 100644 index 0000000..5d79594 --- /dev/null +++ b/packages/tools/__tests__/register.test.ts @@ -0,0 +1,163 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; + +vi.mock('@pr-impact/tools-core', () => ({ + gitDiff: vi.fn(), + readFileAtRef: vi.fn(), + listChangedFiles: vi.fn(), + searchCode: vi.fn(), + findImporters: vi.fn(), + listTestFiles: vi.fn(), + clearImporterCache: vi.fn(), +})); + +vi.mock('@modelcontextprotocol/sdk/server/mcp.js', () => ({ + McpServer: vi.fn(), +})); + +import { + gitDiff, + readFileAtRef, + listChangedFiles, + searchCode, + findImporters, + listTestFiles, +} from '@pr-impact/tools-core'; +import { registerAllTools } from '../src/register.js'; + +function createMockServer() { + return { tool: vi.fn(), connect: vi.fn(), close: vi.fn() }; +} + +function getHandler(mock: ReturnType, toolName: string) { + const call = mock.tool.mock.calls.find((c: unknown[]) => c[0] === toolName); + if (!call) throw new Error(`Tool ${toolName} not registered`); + return call[3] as (params: Record) => Promise; +} + +beforeEach(() => { + vi.clearAllMocks(); +}); + +describe('registerAllTools', () => { + describe('git_diff handler', () => { + it('returns diff text on success', async () => { + vi.mocked(gitDiff).mockResolvedValue({ diff: 'diff --git a/file.ts' }); + const server = createMockServer(); + registerAllTools(server as never); + + const result = await getHandler(server, 'git_diff')({ base: 'main', head: 'HEAD' }); + expect(result).toEqual({ + content: [{ type: 'text', text: 'diff --git a/file.ts' }], + }); + }); + + it('returns isError on failure', async () => { + vi.mocked(gitDiff).mockRejectedValue(new Error('not a git repo')); + const server = createMockServer(); + registerAllTools(server as never); + + const result = await getHandler(server, 'git_diff')({ base: 'main', head: 'HEAD' }); + expect(result).toEqual({ + content: [{ type: 'text', text: 'Error: not a git repo' }], + isError: true, + }); + }); + }); + + describe('read_file_at_ref handler', () => { + it('returns file content on success', async () => { + vi.mocked(readFileAtRef).mockResolvedValue({ content: 'file content here' }); + const server = createMockServer(); + registerAllTools(server as never); + + const result = await getHandler(server, 'read_file_at_ref')({ + ref: 'main', + filePath: 'src/index.ts', + }); + expect(result).toEqual({ + content: [{ type: 'text', text: 'file content here' }], + }); + }); + + it('returns isError when file not found', async () => { + vi.mocked(readFileAtRef).mockRejectedValue(new Error("path 'missing.ts' does not exist")); + const server = createMockServer(); + registerAllTools(server as never); + + const result = await getHandler(server, 'read_file_at_ref')({ + ref: 'main', + filePath: 'missing.ts', + }); + expect(result).toEqual({ + content: [{ type: 'text', text: expect.stringContaining('does not exist') }], + isError: true, + }); + }); + }); + + describe('list_changed_files handler', () => { + it('returns JSON-stringified result', async () => { + const mockFiles = { files: [{ path: 'a.ts', status: 'modified', additions: 5, deletions: 2 }] }; + vi.mocked(listChangedFiles).mockResolvedValue(mockFiles as never); + const server = createMockServer(); + registerAllTools(server as never); + + const result = await getHandler(server, 'list_changed_files')({ base: 'main', head: 'HEAD' }); + const text = (result as { content: Array<{ text: string }> }).content[0].text; + expect(JSON.parse(text)).toEqual(mockFiles); + }); + }); + + describe('search_code handler', () => { + it('returns JSON-stringified matches', async () => { + const mockResult = { matches: [{ file: 'a.ts', line: 10, match: 'hello' }] }; + vi.mocked(searchCode).mockResolvedValue(mockResult); + const server = createMockServer(); + registerAllTools(server as never); + + const result = await getHandler(server, 'search_code')({ pattern: 'hello' }); + const text = (result as { content: Array<{ text: string }> }).content[0].text; + expect(JSON.parse(text)).toEqual(mockResult); + }); + }); + + describe('find_importers handler', () => { + it('returns JSON-stringified importers', async () => { + const mockResult = { importers: ['src/index.ts', 'src/tools.ts'] }; + vi.mocked(findImporters).mockResolvedValue(mockResult); + const server = createMockServer(); + registerAllTools(server as never); + + const result = await getHandler(server, 'find_importers')({ modulePath: 'src/utils.ts' }); + const text = (result as { content: Array<{ text: string }> }).content[0].text; + expect(JSON.parse(text)).toEqual(mockResult); + }); + }); + + describe('list_test_files handler', () => { + it('returns JSON-stringified test files', async () => { + const mockResult = { testFiles: ['__tests__/foo.test.ts'] }; + vi.mocked(listTestFiles).mockResolvedValue(mockResult); + const server = createMockServer(); + registerAllTools(server as never); + + const result = await getHandler(server, 'list_test_files')({ sourceFile: 'src/foo.ts' }); + const text = (result as { content: Array<{ text: string }> }).content[0].text; + expect(JSON.parse(text)).toEqual(mockResult); + }); + }); + + describe('error handler edge cases', () => { + it('handles non-Error thrown values', async () => { + vi.mocked(gitDiff).mockRejectedValue('string error'); + const server = createMockServer(); + registerAllTools(server as never); + + const result = await getHandler(server, 'git_diff')({ base: 'main', head: 'HEAD' }); + expect(result).toEqual({ + content: [{ type: 'text', text: 'Error: string error' }], + isError: true, + }); + }); + }); +}); diff --git a/packages/mcp-server/package.json b/packages/tools/package.json similarity index 60% rename from packages/mcp-server/package.json rename to packages/tools/package.json index a8b06c7..57ec758 100644 --- a/packages/mcp-server/package.json +++ b/packages/tools/package.json @@ -1,7 +1,7 @@ { - "name": "@pr-impact/mcp-server", - "version": "0.2.1", - "description": "MCP server exposing pr-impact analysis tools to AI assistants", + "name": "@pr-impact/tools", + "version": "1.0.0", + "description": "MCP server providing git/repo tools for AI-powered PR impact analysis", "type": "module", "main": "./dist/index.js", "exports": { @@ -10,21 +10,12 @@ } }, "bin": { - "pr-impact-mcp": "./dist/index.js" + "pr-impact-tools": "./dist/index.js" }, "files": [ "dist" ], "license": "MIT", - "keywords": [ - "mcp", - "model-context-protocol", - "pr-impact", - "pull-request", - "breaking-changes", - "risk-score", - "code-analysis" - ], "engines": { "node": ">=20.0.0" }, @@ -34,14 +25,14 @@ "repository": { "type": "git", "url": "https://github.com/ducdmdev/pr-impact.git", - "directory": "packages/mcp-server" + "directory": "packages/tools" }, "scripts": { "build": "tsup", "clean": "rm -rf dist" }, "dependencies": { - "@pr-impact/core": "workspace:*", + "@pr-impact/tools-core": "workspace:*", "@modelcontextprotocol/sdk": "^1.12.0", "zod": "^3.24.0" }, diff --git a/packages/tools/src/index.ts b/packages/tools/src/index.ts new file mode 100644 index 0000000..db5db75 --- /dev/null +++ b/packages/tools/src/index.ts @@ -0,0 +1,25 @@ +import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; +import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; +import { registerAllTools } from './register.js'; + +const server = new McpServer({ + name: 'pr-impact-tools', + version: '1.0.0', +}); + +registerAllTools(server); + +async function main() { + const transport = new StdioServerTransport(); + await server.connect(transport); + + const shutdown = async () => { + await server.close(); + process.exit(0); + }; + + process.on('SIGINT', () => void shutdown()); + process.on('SIGTERM', () => void shutdown()); +} + +main().catch(console.error); diff --git a/packages/tools/src/register.ts b/packages/tools/src/register.ts new file mode 100644 index 0000000..30344d7 --- /dev/null +++ b/packages/tools/src/register.ts @@ -0,0 +1,134 @@ +import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; +import { z } from 'zod'; +import { + gitDiff, + readFileAtRef, + listChangedFiles, + searchCode, + findImporters, + listTestFiles, +} from '@pr-impact/tools-core'; + +interface ToolResult { + [key: string]: unknown; + content: Array<{ type: 'text'; text: string }>; + isError?: boolean; +} + +function success(text: string): ToolResult { + return { content: [{ type: 'text', text }] }; +} + +function error(err: unknown): ToolResult { + const message = err instanceof Error ? err.message : String(err); + return { content: [{ type: 'text', text: `Error: ${message}` }], isError: true }; +} + +export function registerAllTools(server: McpServer): void { + server.tool( + 'git_diff', + 'Get the raw git diff between two branches, optionally for a single file', + { + repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), + base: z.string().describe('Base branch or ref'), + head: z.string().describe('Head branch or ref'), + file: z.string().optional().describe('Optional file path to get diff for a single file'), + }, + async (params) => { + try { + const result = await gitDiff(params); + return success(result.diff); + } catch (err) { + return error(err); + } + }, + ); + + server.tool( + 'read_file_at_ref', + 'Read a file content at a specific git ref (branch or commit)', + { + repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), + ref: z.string().describe('Git ref (branch name, commit SHA, or tag)'), + filePath: z.string().describe('Repo-relative file path'), + }, + async (params) => { + try { + const result = await readFileAtRef(params); + return success(result.content); + } catch (err) { + return error(err); + } + }, + ); + + server.tool( + 'list_changed_files', + 'List all files changed between two branches with status and addition/deletion stats', + { + repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), + base: z.string().describe('Base branch or ref'), + head: z.string().describe('Head branch or ref'), + }, + async (params) => { + try { + const result = await listChangedFiles(params); + return success(JSON.stringify(result, null, 2)); + } catch (err) { + return error(err); + } + }, + ); + + server.tool( + 'search_code', + 'Search for a regex pattern across the codebase using git grep', + { + repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), + pattern: z.string().describe('Regex pattern to search for'), + glob: z.string().optional().describe('File glob to limit search scope (e.g. "*.md")'), + }, + async (params) => { + try { + const result = await searchCode(params); + return success(JSON.stringify(result, null, 2)); + } catch (err) { + return error(err); + } + }, + ); + + server.tool( + 'find_importers', + 'Find all source files that import a given module path', + { + repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), + modulePath: z.string().describe('Repo-relative path of the module to find importers for'), + }, + async (params) => { + try { + const result = await findImporters(params); + return success(JSON.stringify(result, null, 2)); + } catch (err) { + return error(err); + } + }, + ); + + server.tool( + 'list_test_files', + 'Find test files associated with a source file using naming conventions', + { + repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), + sourceFile: z.string().describe('Repo-relative path of the source file'), + }, + async (params) => { + try { + const result = await listTestFiles(params); + return success(JSON.stringify(result, null, 2)); + } catch (err) { + return error(err); + } + }, + ); +} diff --git a/packages/core/tsconfig.json b/packages/tools/tsconfig.json similarity index 69% rename from packages/core/tsconfig.json rename to packages/tools/tsconfig.json index bf551b9..57749d0 100644 --- a/packages/core/tsconfig.json +++ b/packages/tools/tsconfig.json @@ -5,5 +5,5 @@ "rootDir": "./src" }, "include": ["src/**/*.ts"], - "exclude": ["node_modules", "dist", "__tests__", "__fixtures__"] + "exclude": ["node_modules", "dist", "__tests__"] } diff --git a/packages/cli/tsup.config.ts b/packages/tools/tsup.config.ts similarity index 100% rename from packages/cli/tsup.config.ts rename to packages/tools/tsup.config.ts diff --git a/packages/tools/vitest.config.ts b/packages/tools/vitest.config.ts new file mode 100644 index 0000000..168aefc --- /dev/null +++ b/packages/tools/vitest.config.ts @@ -0,0 +1,7 @@ +import { defineConfig } from 'vitest/config'; + +export default defineConfig({ + test: { + include: ['__tests__/**/*.test.ts'], + }, +}); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 0cea8ad..cc8e4db 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -22,13 +22,13 @@ importers: version: 22.19.10 '@vitest/coverage-v8': specifier: ^3.0.0 - version: 3.2.4(vitest@3.2.4(@types/node@22.19.10)) + version: 3.2.4(vitest@3.2.4(@types/node@22.19.10)(tsx@4.21.0)) eslint: specifier: ^9.39.2 version: 9.39.2 eslint-plugin-vitest: specifier: ^0.5.4 - version: 0.5.4(eslint@9.39.2)(typescript@5.7.3)(vitest@3.2.4(@types/node@22.19.10)) + version: 0.5.4(eslint@9.39.2)(typescript@5.7.3)(vitest@3.2.4(@types/node@22.19.10)(tsx@4.21.0)) turbo: specifier: ^2.0.0 version: 2.8.3 @@ -40,83 +40,107 @@ importers: version: 8.55.0(eslint@9.39.2)(typescript@5.7.3) vitest: specifier: ^3.0.0 - version: 3.2.4(@types/node@22.19.10) - - packages/cli: - dependencies: - '@pr-impact/core': + version: 3.2.4(@types/node@22.19.10)(tsx@4.21.0) + + packages/action: + dependencies: + '@actions/core': + specifier: ^1.11.0 + version: 1.11.1 + '@actions/github': + specifier: ^6.0.0 + version: 6.0.1 + '@anthropic-ai/sdk': + specifier: ^0.39.0 + version: 0.39.0 + '@pr-impact/tools-core': specifier: workspace:* - version: link:../core - chalk: - specifier: ^5.4.0 - version: 5.6.2 - commander: - specifier: ^13.0.0 - version: 13.1.0 - ora: - specifier: ^8.0.0 - version: 8.2.0 + version: link:../tools-core devDependencies: '@types/node': specifier: ^22.0.0 version: 22.19.10 tsup: specifier: ^8.0.0 - version: 8.5.1(postcss@8.5.6)(typescript@5.7.3) + version: 8.5.1(postcss@8.5.6)(tsx@4.21.0)(typescript@5.7.3) + tsx: + specifier: ^4.0.0 + version: 4.21.0 typescript: specifier: ~5.7.0 version: 5.7.3 - packages/core: + packages/skill: + devDependencies: + tsx: + specifier: ^4.0.0 + version: 4.21.0 + + packages/tools: dependencies: - fast-glob: - specifier: ^3.3.0 - version: 3.3.3 - simple-git: - specifier: ^3.27.0 - version: 3.30.0 + '@modelcontextprotocol/sdk': + specifier: ^1.12.0 + version: 1.26.0(zod@3.25.76) + '@pr-impact/tools-core': + specifier: workspace:* + version: link:../tools-core + zod: + specifier: ^3.24.0 + version: 3.25.76 devDependencies: '@types/node': specifier: ^22.0.0 version: 22.19.10 tsup: specifier: ^8.0.0 - version: 8.5.1(postcss@8.5.6)(typescript@5.7.3) + version: 8.5.1(postcss@8.5.6)(tsx@4.21.0)(typescript@5.7.3) typescript: specifier: ~5.7.0 version: 5.7.3 - vitest: - specifier: ^3.0.0 - version: 3.2.4(@types/node@22.19.10) - packages/mcp-server: + packages/tools-core: dependencies: - '@modelcontextprotocol/sdk': - specifier: ^1.12.0 - version: 1.26.0(zod@3.25.76) - '@pr-impact/core': - specifier: workspace:* - version: link:../core - zod: - specifier: ^3.24.0 - version: 3.25.76 + fast-glob: + specifier: ^3.3.0 + version: 3.3.3 + simple-git: + specifier: ^3.27.0 + version: 3.30.0 devDependencies: '@types/node': specifier: ^22.0.0 version: 22.19.10 tsup: specifier: ^8.0.0 - version: 8.5.1(postcss@8.5.6)(typescript@5.7.3) + version: 8.5.1(postcss@8.5.6)(tsx@4.21.0)(typescript@5.7.3) typescript: specifier: ~5.7.0 version: 5.7.3 packages: + '@actions/core@1.11.1': + resolution: {integrity: sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==} + + '@actions/exec@1.1.1': + resolution: {integrity: sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==} + + '@actions/github@6.0.1': + resolution: {integrity: sha512-xbZVcaqD4XnQAe35qSQqskb3SqIAfRyLBrHMd/8TuL7hJSz2QtbDwnNM8zWx4zO5l2fnGtseNE3MbEvD7BxVMw==} + + '@actions/http-client@2.2.3': + resolution: {integrity: sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==} + + '@actions/io@1.1.3': + resolution: {integrity: sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==} + '@ampproject/remapping@2.3.0': resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==} engines: {node: '>=6.0.0'} + '@anthropic-ai/sdk@0.39.0': + resolution: {integrity: sha512-eMyDIPRZbt1CCLErRCi3exlAvNkBtRe+kW5vvJyef93PmNr/clstYgHhtvmkxN82nlKgzyGPCyGxrm0JQ1ZIdg==} + '@babel/helper-string-parser@7.27.1': resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==} engines: {node: '>=6.9.0'} @@ -391,6 +415,10 @@ packages: resolution: {integrity: sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@fastify/busboy@2.1.1': + resolution: {integrity: sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==} + engines: {node: '>=14'} + '@hono/node-server@1.19.9': resolution: {integrity: sha512-vHL6w3ecZsky+8P5MD+eFfaGTyCeOHUIFYMGpQGbrBTSmNNoxv0if69rEZ5giu36weC5saFuznL411gRX7bJDw==} engines: {node: '>=18.14.1'} @@ -477,6 +505,54 @@ packages: resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} engines: {node: '>= 8'} + '@octokit/auth-token@4.0.0': + resolution: {integrity: sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==} + engines: {node: '>= 18'} + + '@octokit/core@5.2.2': + resolution: {integrity: sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==} + engines: {node: '>= 18'} + + '@octokit/endpoint@9.0.6': + resolution: {integrity: sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==} + engines: {node: '>= 18'} + + '@octokit/graphql@7.1.1': + resolution: {integrity: sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==} + engines: {node: '>= 18'} + + '@octokit/openapi-types@20.0.0': + resolution: {integrity: sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==} + + '@octokit/openapi-types@24.2.0': + resolution: {integrity: sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==} + + '@octokit/plugin-paginate-rest@9.2.2': + resolution: {integrity: sha512-u3KYkGF7GcZnSD/3UP0S7K5XUFT2FkOQdcfXZGZQPGv3lm4F2Xbf71lvjldr8c1H3nNbF+33cLEkWYbokGWqiQ==} + engines: {node: '>= 18'} + peerDependencies: + '@octokit/core': '5' + + '@octokit/plugin-rest-endpoint-methods@10.4.1': + resolution: {integrity: sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg==} + engines: {node: '>= 18'} + peerDependencies: + '@octokit/core': '5' + + '@octokit/request-error@5.1.1': + resolution: {integrity: sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==} + engines: {node: '>= 18'} + + '@octokit/request@8.4.1': + resolution: {integrity: sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw==} + engines: {node: '>= 18'} + + '@octokit/types@12.6.0': + resolution: {integrity: sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==} + + '@octokit/types@13.10.0': + resolution: {integrity: sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==} + '@pkgjs/parseargs@0.11.0': resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} @@ -624,9 +700,15 @@ packages: '@types/json-schema@7.0.15': resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} + '@types/node-fetch@2.6.13': + resolution: {integrity: sha512-QGpRVpzSaUs30JBSGPjOg4Uveu384erbHBoT1zeONvyCfwQxIkUshLAOqN/k9EjGviPRmWTTe6aH2qySWKTVSw==} + '@types/node@12.20.55': resolution: {integrity: sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ==} + '@types/node@18.19.130': + resolution: {integrity: sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg==} + '@types/node@22.19.10': resolution: {integrity: sha512-tF5VOugLS/EuDlTBijk0MqABfP8UxgYazTLo3uIn3b4yJgg26QRbVYJYsDtHrjdDUIRfP70+VfhTTc+CE1yskw==} @@ -754,6 +836,10 @@ packages: '@vitest/utils@3.2.4': resolution: {integrity: sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==} + abort-controller@3.0.0: + resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} + engines: {node: '>=6.5'} + accepts@2.0.0: resolution: {integrity: sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==} engines: {node: '>= 0.6'} @@ -768,6 +854,10 @@ packages: engines: {node: '>=0.4.0'} hasBin: true + agentkeepalive@4.6.0: + resolution: {integrity: sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ==} + engines: {node: '>= 8.0.0'} + ajv-formats@3.0.1: resolution: {integrity: sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==} peerDependencies: @@ -822,9 +912,15 @@ packages: ast-v8-to-istanbul@0.3.11: resolution: {integrity: sha512-Qya9fkoofMjCBNVdWINMjB5KZvkYfaO9/anwkWnjxibpWUxo5iHl2sOdP7/uAqaRuUYuoo8rDwnbaaKVFxoUvw==} + asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + before-after-hook@2.2.3: + resolution: {integrity: sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==} + better-path-resolve@1.0.0: resolution: {integrity: sha512-pbnl5XzGBdrFU/wT4jqmJVPn2B6UHPBOhzMQkY/SPUPB6QtUXtmBHBIwCbXJol93mOpGMnQyP/+BB19q04xj7g==} engines: {node: '>=4'} @@ -877,10 +973,6 @@ packages: resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} engines: {node: '>=10'} - chalk@5.6.2: - resolution: {integrity: sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==} - engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} - chardet@2.1.1: resolution: {integrity: sha512-PsezH1rqdV9VvyNhxxOW32/d75r01NY7TQCmOqomRo15ZSOKbpTFVsfjghxo6JloQUCGnH4k1LGu0R4yCLlWQQ==} @@ -896,14 +988,6 @@ packages: resolution: {integrity: sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==} engines: {node: '>=8'} - cli-cursor@5.0.0: - resolution: {integrity: sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==} - engines: {node: '>=18'} - - cli-spinners@2.9.2: - resolution: {integrity: sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==} - engines: {node: '>=6'} - color-convert@2.0.1: resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} engines: {node: '>=7.0.0'} @@ -911,9 +995,9 @@ packages: color-name@1.1.4: resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} - commander@13.1.0: - resolution: {integrity: sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw==} - engines: {node: '>=18'} + combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} commander@4.1.1: resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} @@ -969,10 +1053,17 @@ packages: deep-is@0.1.4: resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} + delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + depd@2.0.0: resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} engines: {node: '>= 0.8'} + deprecation@2.3.1: + resolution: {integrity: sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==} + detect-indent@6.1.0: resolution: {integrity: sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==} engines: {node: '>=8'} @@ -991,9 +1082,6 @@ packages: ee-first@1.1.1: resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} - emoji-regex@10.6.0: - resolution: {integrity: sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==} - emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} @@ -1023,6 +1111,10 @@ packages: resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} engines: {node: '>= 0.4'} + es-set-tostringtag@2.1.0: + resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==} + engines: {node: '>= 0.4'} + esbuild@0.27.3: resolution: {integrity: sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==} engines: {node: '>=18'} @@ -1102,6 +1194,10 @@ packages: resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} engines: {node: '>= 0.6'} + event-target-shim@5.0.1: + resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} + engines: {node: '>=6'} + eventsource-parser@3.0.6: resolution: {integrity: sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==} engines: {node: '>=18.0.0'} @@ -1189,6 +1285,17 @@ packages: resolution: {integrity: sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==} engines: {node: '>=14'} + form-data-encoder@1.7.2: + resolution: {integrity: sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==} + + form-data@4.0.5: + resolution: {integrity: sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==} + engines: {node: '>= 6'} + + formdata-node@4.4.1: + resolution: {integrity: sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==} + engines: {node: '>= 12.20'} + forwarded@0.2.0: resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==} engines: {node: '>= 0.6'} @@ -1213,10 +1320,6 @@ packages: function-bind@1.1.2: resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} - get-east-asian-width@1.4.0: - resolution: {integrity: sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==} - engines: {node: '>=18'} - get-intrinsic@1.3.0: resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} engines: {node: '>= 0.4'} @@ -1225,6 +1328,9 @@ packages: resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} engines: {node: '>= 0.4'} + get-tsconfig@4.13.6: + resolution: {integrity: sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==} + glob-parent@5.1.2: resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} engines: {node: '>= 6'} @@ -1261,6 +1367,10 @@ packages: resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} engines: {node: '>= 0.4'} + has-tostringtag@1.0.2: + resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==} + engines: {node: '>= 0.4'} + hasown@2.0.2: resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} engines: {node: '>= 0.4'} @@ -1280,6 +1390,9 @@ packages: resolution: {integrity: sha512-tsYlhAYpjCKa//8rXZ9DqKEawhPoSytweBC2eNvcaDK+57RZLHGqNs3PZTQO6yekLFSuvA6AlnAfrw1uBvtb+Q==} hasBin: true + humanize-ms@1.2.1: + resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==} + iconv-lite@0.7.2: resolution: {integrity: sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==} engines: {node: '>=0.10.0'} @@ -1323,10 +1436,6 @@ packages: resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} engines: {node: '>=0.10.0'} - is-interactive@2.0.0: - resolution: {integrity: sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==} - engines: {node: '>=12'} - is-number@7.0.0: resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} engines: {node: '>=0.12.0'} @@ -1338,14 +1447,6 @@ packages: resolution: {integrity: sha512-2AT6j+gXe/1ueqbW6fLZJiIw3F8iXGJtt0yDrZaBhAZEG1raiTxKWU+IPqMCzQAXOUCKdA4UDMgacKH25XG2Cw==} engines: {node: '>=4'} - is-unicode-supported@1.3.0: - resolution: {integrity: sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==} - engines: {node: '>=12'} - - is-unicode-supported@2.1.0: - resolution: {integrity: sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==} - engines: {node: '>=18'} - is-windows@1.0.2: resolution: {integrity: sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==} engines: {node: '>=0.10.0'} @@ -1443,10 +1544,6 @@ packages: lodash.startcase@4.4.0: resolution: {integrity: sha512-+WKqsK294HMSc2jEbNgpHpd0JfIBhp7rEV4aqXWqFr6AlXov+SlcgB1Fv01y2kGe3Gc8nMW7VA0SrGuSkRfIEg==} - log-symbols@6.0.0: - resolution: {integrity: sha512-i24m8rpwhmPIS4zscNzK6MSEhk0DUWa/8iYQWxhffV8jkI4Phvs3F+quL5xvS0gdQR0FyTCMMH33Y78dDTzzIw==} - engines: {node: '>=18'} - loupe@3.2.1: resolution: {integrity: sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==} @@ -1483,18 +1580,22 @@ packages: resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} engines: {node: '>=8.6'} + mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + mime-db@1.54.0: resolution: {integrity: sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==} engines: {node: '>= 0.6'} + mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + mime-types@3.0.2: resolution: {integrity: sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==} engines: {node: '>=18'} - mimic-function@5.0.1: - resolution: {integrity: sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==} - engines: {node: '>=18'} - minimatch@3.1.2: resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} @@ -1531,6 +1632,20 @@ packages: resolution: {integrity: sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==} engines: {node: '>= 0.6'} + node-domexception@1.0.0: + resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} + engines: {node: '>=10.5.0'} + deprecated: Use your platform's native DOMException instead + + node-fetch@2.7.0: + resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} + engines: {node: 4.x || >=6.0.0} + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + object-assign@4.1.1: resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} engines: {node: '>=0.10.0'} @@ -1546,18 +1661,10 @@ packages: once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} - onetime@7.0.0: - resolution: {integrity: sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==} - engines: {node: '>=18'} - optionator@0.9.4: resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==} engines: {node: '>= 0.8.0'} - ora@8.2.0: - resolution: {integrity: sha512-weP+BZ8MVNnlCm8c0Qdc1WSWq4Qn7I+9CJGm7Qali6g44e/PUzbjNqJX5NJ9ljlNMosfJvg1fKEGILklK9cwnw==} - engines: {node: '>=18'} - outdent@0.5.0: resolution: {integrity: sha512-/jHxFIzoMXdqPzTaCpFzAAWhpkSjZPF4Vsn6jAfNpmbH/ymsmd7Qc6VE9BGn0L6YMj6uwpQLxCECpus4ukKS9Q==} @@ -1732,9 +1839,8 @@ packages: resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} engines: {node: '>=8'} - restore-cursor@5.1.0: - resolution: {integrity: sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==} - engines: {node: '>=18'} + resolve-pkg-maps@1.0.0: + resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} reusify@1.1.0: resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} @@ -1833,10 +1939,6 @@ packages: std-env@3.10.0: resolution: {integrity: sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==} - stdin-discarder@0.2.2: - resolution: {integrity: sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ==} - engines: {node: '>=18'} - string-width@4.2.3: resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} engines: {node: '>=8'} @@ -1845,10 +1947,6 @@ packages: resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} engines: {node: '>=12'} - string-width@7.2.0: - resolution: {integrity: sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==} - engines: {node: '>=18'} - strip-ansi@6.0.1: resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} engines: {node: '>=8'} @@ -1922,6 +2020,9 @@ packages: resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} engines: {node: '>=0.6'} + tr46@0.0.3: + resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + tree-kill@1.2.2: resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==} hasBin: true @@ -1960,6 +2061,15 @@ packages: typescript: optional: true + tsx@4.21.0: + resolution: {integrity: sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==} + engines: {node: '>=18.0.0'} + hasBin: true + + tunnel@0.0.6: + resolution: {integrity: sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==} + engines: {node: '>=0.6.11 <=0.7.0 || >=0.7.3'} + turbo-darwin-64@2.8.3: resolution: {integrity: sha512-4kXRLfcygLOeNcP6JquqRLmGB/ATjjfehiojL2dJkL7GFm3SPSXbq7oNj8UbD8XriYQ5hPaSuz59iF1ijPHkTw==} cpu: [x64] @@ -2017,9 +2127,19 @@ packages: ufo@1.6.3: resolution: {integrity: sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q==} + undici-types@5.26.5: + resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + undici-types@6.21.0: resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + undici@5.29.0: + resolution: {integrity: sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==} + engines: {node: '>=14.0'} + + universal-user-agent@6.0.1: + resolution: {integrity: sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==} + universalify@0.1.2: resolution: {integrity: sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==} engines: {node: '>= 4.0.0'} @@ -2108,6 +2228,16 @@ packages: jsdom: optional: true + web-streams-polyfill@4.0.0-beta.3: + resolution: {integrity: sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==} + engines: {node: '>= 14'} + + webidl-conversions@3.0.1: + resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + + whatwg-url@5.0.0: + resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} + which@2.0.2: resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} engines: {node: '>= 8'} @@ -2147,11 +2277,49 @@ packages: snapshots: + '@actions/core@1.11.1': + dependencies: + '@actions/exec': 1.1.1 + '@actions/http-client': 2.2.3 + + '@actions/exec@1.1.1': + dependencies: + '@actions/io': 1.1.3 + + '@actions/github@6.0.1': + dependencies: + '@actions/http-client': 2.2.3 + '@octokit/core': 5.2.2 + '@octokit/plugin-paginate-rest': 9.2.2(@octokit/core@5.2.2) + '@octokit/plugin-rest-endpoint-methods': 10.4.1(@octokit/core@5.2.2) + '@octokit/request': 8.4.1 + '@octokit/request-error': 5.1.1 + undici: 5.29.0 + + '@actions/http-client@2.2.3': + dependencies: + tunnel: 0.0.6 + undici: 5.29.0 + + '@actions/io@1.1.3': {} + '@ampproject/remapping@2.3.0': dependencies: '@jridgewell/gen-mapping': 0.3.13 '@jridgewell/trace-mapping': 0.3.31 + '@anthropic-ai/sdk@0.39.0': + dependencies: + '@types/node': 18.19.130 + '@types/node-fetch': 2.6.13 + abort-controller: 3.0.0 + agentkeepalive: 4.6.0 + form-data-encoder: 1.7.2 + formdata-node: 4.4.1 + node-fetch: 2.7.0 + transitivePeerDependencies: + - encoding + '@babel/helper-string-parser@7.27.1': {} '@babel/helper-validator-identifier@7.28.5': {} @@ -2437,6 +2605,8 @@ snapshots: '@eslint/core': 0.17.0 levn: 0.4.1 + '@fastify/busboy@2.1.1': {} + '@hono/node-server@1.19.9(hono@4.11.9)': dependencies: hono: 4.11.9 @@ -2542,6 +2712,64 @@ snapshots: '@nodelib/fs.scandir': 2.1.5 fastq: 1.20.1 + '@octokit/auth-token@4.0.0': {} + + '@octokit/core@5.2.2': + dependencies: + '@octokit/auth-token': 4.0.0 + '@octokit/graphql': 7.1.1 + '@octokit/request': 8.4.1 + '@octokit/request-error': 5.1.1 + '@octokit/types': 13.10.0 + before-after-hook: 2.2.3 + universal-user-agent: 6.0.1 + + '@octokit/endpoint@9.0.6': + dependencies: + '@octokit/types': 13.10.0 + universal-user-agent: 6.0.1 + + '@octokit/graphql@7.1.1': + dependencies: + '@octokit/request': 8.4.1 + '@octokit/types': 13.10.0 + universal-user-agent: 6.0.1 + + '@octokit/openapi-types@20.0.0': {} + + '@octokit/openapi-types@24.2.0': {} + + '@octokit/plugin-paginate-rest@9.2.2(@octokit/core@5.2.2)': + dependencies: + '@octokit/core': 5.2.2 + '@octokit/types': 12.6.0 + + '@octokit/plugin-rest-endpoint-methods@10.4.1(@octokit/core@5.2.2)': + dependencies: + '@octokit/core': 5.2.2 + '@octokit/types': 12.6.0 + + '@octokit/request-error@5.1.1': + dependencies: + '@octokit/types': 13.10.0 + deprecation: 2.3.1 + once: 1.4.0 + + '@octokit/request@8.4.1': + dependencies: + '@octokit/endpoint': 9.0.6 + '@octokit/request-error': 5.1.1 + '@octokit/types': 13.10.0 + universal-user-agent: 6.0.1 + + '@octokit/types@12.6.0': + dependencies: + '@octokit/openapi-types': 20.0.0 + + '@octokit/types@13.10.0': + dependencies: + '@octokit/openapi-types': 24.2.0 + '@pkgjs/parseargs@0.11.0': optional: true @@ -2641,8 +2869,17 @@ snapshots: '@types/json-schema@7.0.15': {} + '@types/node-fetch@2.6.13': + dependencies: + '@types/node': 22.19.10 + form-data: 4.0.5 + '@types/node@12.20.55': {} + '@types/node@18.19.130': + dependencies: + undici-types: 5.26.5 + '@types/node@22.19.10': dependencies: undici-types: 6.21.0 @@ -2776,7 +3013,7 @@ snapshots: '@typescript-eslint/types': 8.55.0 eslint-visitor-keys: 4.2.1 - '@vitest/coverage-v8@3.2.4(vitest@3.2.4(@types/node@22.19.10))': + '@vitest/coverage-v8@3.2.4(vitest@3.2.4(@types/node@22.19.10)(tsx@4.21.0))': dependencies: '@ampproject/remapping': 2.3.0 '@bcoe/v8-coverage': 1.0.2 @@ -2791,7 +3028,7 @@ snapshots: std-env: 3.10.0 test-exclude: 7.0.1 tinyrainbow: 2.0.0 - vitest: 3.2.4(@types/node@22.19.10) + vitest: 3.2.4(@types/node@22.19.10)(tsx@4.21.0) transitivePeerDependencies: - supports-color @@ -2803,13 +3040,13 @@ snapshots: chai: 5.3.3 tinyrainbow: 2.0.0 - '@vitest/mocker@3.2.4(vite@7.3.1(@types/node@22.19.10))': + '@vitest/mocker@3.2.4(vite@7.3.1(@types/node@22.19.10)(tsx@4.21.0))': dependencies: '@vitest/spy': 3.2.4 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: - vite: 7.3.1(@types/node@22.19.10) + vite: 7.3.1(@types/node@22.19.10)(tsx@4.21.0) '@vitest/pretty-format@3.2.4': dependencies: @@ -2837,6 +3074,10 @@ snapshots: loupe: 3.2.1 tinyrainbow: 2.0.0 + abort-controller@3.0.0: + dependencies: + event-target-shim: 5.0.1 + accepts@2.0.0: dependencies: mime-types: 3.0.2 @@ -2848,6 +3089,10 @@ snapshots: acorn@8.15.0: {} + agentkeepalive@4.6.0: + dependencies: + humanize-ms: 1.2.1 + ajv-formats@3.0.1(ajv@8.17.1): optionalDependencies: ajv: 8.17.1 @@ -2896,8 +3141,12 @@ snapshots: estree-walker: 3.0.3 js-tokens: 10.0.0 + asynckit@0.4.0: {} + balanced-match@1.0.2: {} + before-after-hook@2.2.3: {} + better-path-resolve@1.0.0: dependencies: is-windows: 1.0.2 @@ -2963,8 +3212,6 @@ snapshots: ansi-styles: 4.3.0 supports-color: 7.2.0 - chalk@5.6.2: {} - chardet@2.1.1: {} check-error@2.1.3: {} @@ -2975,19 +3222,15 @@ snapshots: ci-info@3.9.0: {} - cli-cursor@5.0.0: - dependencies: - restore-cursor: 5.1.0 - - cli-spinners@2.9.2: {} - color-convert@2.0.1: dependencies: color-name: 1.1.4 color-name@1.1.4: {} - commander@13.1.0: {} + combined-stream@1.0.8: + dependencies: + delayed-stream: 1.0.0 commander@4.1.1: {} @@ -3024,8 +3267,12 @@ snapshots: deep-is@0.1.4: {} + delayed-stream@1.0.0: {} + depd@2.0.0: {} + deprecation@2.3.1: {} + detect-indent@6.1.0: {} dir-glob@3.0.1: @@ -3042,8 +3289,6 @@ snapshots: ee-first@1.1.1: {} - emoji-regex@10.6.0: {} - emoji-regex@8.0.0: {} emoji-regex@9.2.2: {} @@ -3065,6 +3310,13 @@ snapshots: dependencies: es-errors: 1.3.0 + es-set-tostringtag@2.1.0: + dependencies: + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + has-tostringtag: 1.0.2 + hasown: 2.0.2 + esbuild@0.27.3: optionalDependencies: '@esbuild/aix-ppc64': 0.27.3 @@ -3098,12 +3350,12 @@ snapshots: escape-string-regexp@4.0.0: {} - eslint-plugin-vitest@0.5.4(eslint@9.39.2)(typescript@5.7.3)(vitest@3.2.4(@types/node@22.19.10)): + eslint-plugin-vitest@0.5.4(eslint@9.39.2)(typescript@5.7.3)(vitest@3.2.4(@types/node@22.19.10)(tsx@4.21.0)): dependencies: '@typescript-eslint/utils': 7.18.0(eslint@9.39.2)(typescript@5.7.3) eslint: 9.39.2 optionalDependencies: - vitest: 3.2.4(@types/node@22.19.10) + vitest: 3.2.4(@types/node@22.19.10)(tsx@4.21.0) transitivePeerDependencies: - supports-color - typescript @@ -3182,6 +3434,8 @@ snapshots: etag@1.8.1: {} + event-target-shim@5.0.1: {} + eventsource-parser@3.0.6: {} eventsource@3.0.7: @@ -3301,6 +3555,21 @@ snapshots: cross-spawn: 7.0.6 signal-exit: 4.1.0 + form-data-encoder@1.7.2: {} + + form-data@4.0.5: + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + es-set-tostringtag: 2.1.0 + hasown: 2.0.2 + mime-types: 2.1.35 + + formdata-node@4.4.1: + dependencies: + node-domexception: 1.0.0 + web-streams-polyfill: 4.0.0-beta.3 + forwarded@0.2.0: {} fresh@2.0.0: {} @@ -3322,8 +3591,6 @@ snapshots: function-bind@1.1.2: {} - get-east-asian-width@1.4.0: {} - get-intrinsic@1.3.0: dependencies: call-bind-apply-helpers: 1.0.2 @@ -3342,6 +3609,10 @@ snapshots: dunder-proto: 1.0.1 es-object-atoms: 1.1.1 + get-tsconfig@4.13.6: + dependencies: + resolve-pkg-maps: 1.0.0 + glob-parent@5.1.2: dependencies: is-glob: 4.0.3 @@ -3378,6 +3649,10 @@ snapshots: has-symbols@1.1.0: {} + has-tostringtag@1.0.2: + dependencies: + has-symbols: 1.1.0 + hasown@2.0.2: dependencies: function-bind: 1.1.2 @@ -3396,6 +3671,10 @@ snapshots: human-id@4.1.3: {} + humanize-ms@1.2.1: + dependencies: + ms: 2.1.3 + iconv-lite@0.7.2: dependencies: safer-buffer: 2.1.2 @@ -3425,8 +3704,6 @@ snapshots: dependencies: is-extglob: 2.1.1 - is-interactive@2.0.0: {} - is-number@7.0.0: {} is-promise@4.0.0: {} @@ -3435,10 +3712,6 @@ snapshots: dependencies: better-path-resolve: 1.0.0 - is-unicode-supported@1.3.0: {} - - is-unicode-supported@2.1.0: {} - is-windows@1.0.2: {} isexe@2.0.0: {} @@ -3528,11 +3801,6 @@ snapshots: lodash.startcase@4.4.0: {} - log-symbols@6.0.0: - dependencies: - chalk: 5.6.2 - is-unicode-supported: 1.3.0 - loupe@3.2.1: {} lru-cache@10.4.3: {} @@ -3564,14 +3832,18 @@ snapshots: braces: 3.0.3 picomatch: 2.3.1 + mime-db@1.52.0: {} + mime-db@1.54.0: {} + mime-types@2.1.35: + dependencies: + mime-db: 1.52.0 + mime-types@3.0.2: dependencies: mime-db: 1.54.0 - mimic-function@5.0.1: {} - minimatch@3.1.2: dependencies: brace-expansion: 1.1.12 @@ -3605,6 +3877,12 @@ snapshots: negotiator@1.0.0: {} + node-domexception@1.0.0: {} + + node-fetch@2.7.0: + dependencies: + whatwg-url: 5.0.0 + object-assign@4.1.1: {} object-inspect@1.13.4: {} @@ -3617,10 +3895,6 @@ snapshots: dependencies: wrappy: 1.0.2 - onetime@7.0.0: - dependencies: - mimic-function: 5.0.1 - optionator@0.9.4: dependencies: deep-is: 0.1.4 @@ -3630,18 +3904,6 @@ snapshots: type-check: 0.4.0 word-wrap: 1.2.5 - ora@8.2.0: - dependencies: - chalk: 5.6.2 - cli-cursor: 5.0.0 - cli-spinners: 2.9.2 - is-interactive: 2.0.0 - is-unicode-supported: 2.1.0 - log-symbols: 6.0.0 - stdin-discarder: 0.2.2 - string-width: 7.2.0 - strip-ansi: 7.1.2 - outdent@0.5.0: {} p-filter@2.1.0: @@ -3715,11 +3977,12 @@ snapshots: mlly: 1.8.0 pathe: 2.0.3 - postcss-load-config@6.0.1(postcss@8.5.6): + postcss-load-config@6.0.1(postcss@8.5.6)(tsx@4.21.0): dependencies: lilconfig: 3.1.3 optionalDependencies: postcss: 8.5.6 + tsx: 4.21.0 postcss@8.5.6: dependencies: @@ -3770,10 +4033,7 @@ snapshots: resolve-from@5.0.0: {} - restore-cursor@5.1.0: - dependencies: - onetime: 7.0.0 - signal-exit: 4.1.0 + resolve-pkg-maps@1.0.0: {} reusify@1.1.0: {} @@ -3918,8 +4178,6 @@ snapshots: std-env@3.10.0: {} - stdin-discarder@0.2.2: {} - string-width@4.2.3: dependencies: emoji-regex: 8.0.0 @@ -3932,12 +4190,6 @@ snapshots: emoji-regex: 9.2.2 strip-ansi: 7.1.2 - string-width@7.2.0: - dependencies: - emoji-regex: 10.6.0 - get-east-asian-width: 1.4.0 - strip-ansi: 7.1.2 - strip-ansi@6.0.1: dependencies: ansi-regex: 5.0.1 @@ -4005,6 +4257,8 @@ snapshots: toidentifier@1.0.1: {} + tr46@0.0.3: {} + tree-kill@1.2.2: {} ts-api-utils@1.4.3(typescript@5.7.3): @@ -4017,7 +4271,7 @@ snapshots: ts-interface-checker@0.1.13: {} - tsup@8.5.1(postcss@8.5.6)(typescript@5.7.3): + tsup@8.5.1(postcss@8.5.6)(tsx@4.21.0)(typescript@5.7.3): dependencies: bundle-require: 5.1.0(esbuild@0.27.3) cac: 6.7.14 @@ -4028,7 +4282,7 @@ snapshots: fix-dts-default-cjs-exports: 1.0.1 joycon: 3.1.1 picocolors: 1.1.1 - postcss-load-config: 6.0.1(postcss@8.5.6) + postcss-load-config: 6.0.1(postcss@8.5.6)(tsx@4.21.0) resolve-from: 5.0.0 rollup: 4.57.1 source-map: 0.7.6 @@ -4045,6 +4299,15 @@ snapshots: - tsx - yaml + tsx@4.21.0: + dependencies: + esbuild: 0.27.3 + get-tsconfig: 4.13.6 + optionalDependencies: + fsevents: 2.3.3 + + tunnel@0.0.6: {} + turbo-darwin-64@2.8.3: optional: true @@ -4097,8 +4360,16 @@ snapshots: ufo@1.6.3: {} + undici-types@5.26.5: {} + undici-types@6.21.0: {} + undici@5.29.0: + dependencies: + '@fastify/busboy': 2.1.1 + + universal-user-agent@6.0.1: {} + universalify@0.1.2: {} unpipe@1.0.0: {} @@ -4109,13 +4380,13 @@ snapshots: vary@1.1.2: {} - vite-node@3.2.4(@types/node@22.19.10): + vite-node@3.2.4(@types/node@22.19.10)(tsx@4.21.0): dependencies: cac: 6.7.14 debug: 4.4.3 es-module-lexer: 1.7.0 pathe: 2.0.3 - vite: 7.3.1(@types/node@22.19.10) + vite: 7.3.1(@types/node@22.19.10)(tsx@4.21.0) transitivePeerDependencies: - '@types/node' - jiti @@ -4130,7 +4401,7 @@ snapshots: - tsx - yaml - vite@7.3.1(@types/node@22.19.10): + vite@7.3.1(@types/node@22.19.10)(tsx@4.21.0): dependencies: esbuild: 0.27.3 fdir: 6.5.0(picomatch@4.0.3) @@ -4141,12 +4412,13 @@ snapshots: optionalDependencies: '@types/node': 22.19.10 fsevents: 2.3.3 + tsx: 4.21.0 - vitest@3.2.4(@types/node@22.19.10): + vitest@3.2.4(@types/node@22.19.10)(tsx@4.21.0): dependencies: '@types/chai': 5.2.3 '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(vite@7.3.1(@types/node@22.19.10)) + '@vitest/mocker': 3.2.4(vite@7.3.1(@types/node@22.19.10)(tsx@4.21.0)) '@vitest/pretty-format': 3.2.4 '@vitest/runner': 3.2.4 '@vitest/snapshot': 3.2.4 @@ -4164,8 +4436,8 @@ snapshots: tinyglobby: 0.2.15 tinypool: 1.1.1 tinyrainbow: 2.0.0 - vite: 7.3.1(@types/node@22.19.10) - vite-node: 3.2.4(@types/node@22.19.10) + vite: 7.3.1(@types/node@22.19.10)(tsx@4.21.0) + vite-node: 3.2.4(@types/node@22.19.10)(tsx@4.21.0) why-is-node-running: 2.3.0 optionalDependencies: '@types/node': 22.19.10 @@ -4183,6 +4455,15 @@ snapshots: - tsx - yaml + web-streams-polyfill@4.0.0-beta.3: {} + + webidl-conversions@3.0.1: {} + + whatwg-url@5.0.0: + dependencies: + tr46: 0.0.3 + webidl-conversions: 3.0.1 + which@2.0.2: dependencies: isexe: 2.0.0 diff --git a/scripts/build-skill.ts b/scripts/build-skill.ts new file mode 100644 index 0000000..3f3b797 --- /dev/null +++ b/scripts/build-skill.ts @@ -0,0 +1,38 @@ +import { readFileSync, writeFileSync } from 'fs'; +import { resolve, dirname } from 'path'; +import { fileURLToPath } from 'url'; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const rootDir = resolve(__dirname, '..'); + +const systemPrompt = readFileSync(resolve(rootDir, 'templates/system-prompt.md'), 'utf-8'); +const reportTemplate = readFileSync(resolve(rootDir, 'templates/report-template.md'), 'utf-8'); + +const skillMd = `--- +name: pr-impact +description: Analyze PR impact — breaking changes, test coverage gaps, doc staleness, impact graph, and risk score +arguments: + - name: base + description: Base branch to compare against (default: main) + required: false + - name: head + description: Head branch to analyze (default: HEAD) + required: false +--- + +${systemPrompt} + +## Your Task + +Analyze the PR comparing branch \`$ARGUMENTS\` in the current repository. If no arguments provided, compare \`main\` to \`HEAD\`. + +Parse the arguments: first argument is \`base\` branch, second is \`head\` branch. + +Use the pr-impact MCP tools to inspect the repository. Follow all 6 analysis steps. Produce the report using this exact template: + +${reportTemplate} +`; + +writeFileSync(resolve(rootDir, 'packages/skill/skill.md'), skillMd, 'utf-8'); + +console.log('Generated packages/skill/skill.md'); diff --git a/scripts/embed-templates.ts b/scripts/embed-templates.ts new file mode 100644 index 0000000..fe105c4 --- /dev/null +++ b/scripts/embed-templates.ts @@ -0,0 +1,26 @@ +import { readFileSync, writeFileSync, mkdirSync } from 'fs'; +import { resolve, dirname } from 'path'; +import { fileURLToPath } from 'url'; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const rootDir = resolve(__dirname, '..'); + +const systemPrompt = readFileSync(resolve(rootDir, 'templates/system-prompt.md'), 'utf-8'); +const reportTemplate = readFileSync(resolve(rootDir, 'templates/report-template.md'), 'utf-8'); + +const outputDir = resolve(rootDir, 'packages/action/src/generated'); +mkdirSync(outputDir, { recursive: true }); + +const outputContent = [ + '// AUTO-GENERATED — do not edit manually.', + '// Generated by scripts/embed-templates.ts from templates/*.md', + '', + 'export const SYSTEM_PROMPT = ' + JSON.stringify(systemPrompt) + ';', + '', + 'export const REPORT_TEMPLATE = ' + JSON.stringify(reportTemplate) + ';', + '', +].join('\n'); + +writeFileSync(resolve(outputDir, 'templates.ts'), outputContent, 'utf-8'); + +console.log('Generated packages/action/src/generated/templates.ts'); diff --git a/scripts/tsconfig.json b/scripts/tsconfig.json new file mode 100644 index 0000000..fa5dfee --- /dev/null +++ b/scripts/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": ".", + "types": ["node"], + "noEmit": true + }, + "include": ["*.ts"] +} diff --git a/templates/report-template.md b/templates/report-template.md new file mode 100644 index 0000000..fa26bca --- /dev/null +++ b/templates/report-template.md @@ -0,0 +1,58 @@ +Output your analysis using exactly this structure. Fill in all sections. If a section has no findings, write "None" under it. + +# PR Impact Report + +## Summary +- **Risk Score**: {score}/100 ({level}) +- **Files Changed**: {total} ({source} source, {test} test, {doc} doc, {config} config, {other} other) +- **Total Lines Changed**: {additions} additions, {deletions} deletions +- **Breaking Changes**: {count} ({high} high, {medium} medium, {low} low) +- **Test Coverage**: {ratio}% of changed source files have corresponding test updates +- **Stale Doc References**: {count} +- **Impact Breadth**: {direct} directly changed, {indirect} indirectly affected + +## Breaking Changes + +| File | Type | Symbol | Before | After | Severity | Consumers | +|------|------|--------|--------|-------|----------|-----------| +| {filePath} | {removed_export/changed_signature/changed_type/renamed_export} | {symbolName} | {before signature/definition} | {after signature/definition or "removed"} | {high/medium/low} | {comma-separated consumer file paths} | + +## Test Coverage Gaps + +| Source File | Expected Test File | Test Exists | Test Updated | +|-------------|-------------------|-------------|--------------| +| {sourceFile} | {testFile} | {yes/no} | {yes/no} | + +## Stale Documentation + +| Doc File | Line | Reference | Reason | +|----------|------|-----------|--------| +| {docFile} | {lineNumber} | {reference text} | {why it's stale} | + +## Impact Graph + +### Directly Changed Files +- {filePath} ({additions}+, {deletions}-) + +### Indirectly Affected Files +- {filePath} — imported by {consumer}, which is directly changed + +## Risk Factor Breakdown + +| Factor | Score | Weight | Weighted | Details | +|--------|-------|--------|----------|---------| +| Breaking changes | {0-100} | 0.30 | {score*0.30} | {description} | +| Untested changes | {0-100} | 0.25 | {score*0.25} | {coverageRatio}% coverage | +| Diff size | {0-100} | 0.15 | {score*0.15} | {totalLines} total lines changed | +| Stale documentation | {0-100} | 0.10 | {score*0.10} | {count} stale references | +| Config file changes | {0-100} | 0.10 | {score*0.10} | {description} | +| Impact breadth | {0-100} | 0.10 | {score*0.10} | {count} indirectly affected files | +| **Total** | | **1.00** | **{total}** | | + +## Recommendations + +Based on the analysis above, here are the recommended actions before merging: + +1. {actionable recommendation with specific file/symbol references} +2. {actionable recommendation} +3. {actionable recommendation} diff --git a/templates/system-prompt.md b/templates/system-prompt.md new file mode 100644 index 0000000..b565060 --- /dev/null +++ b/templates/system-prompt.md @@ -0,0 +1,92 @@ +You are a PR impact analyzer. Given access to a git repository via MCP tools, analyze a pull request and produce a structured impact report. + +## Available Tools + +- `git_diff` — Get the raw diff between two branches (optionally for a single file) +- `read_file_at_ref` — Read a file's content at a specific git ref (branch/commit) +- `list_changed_files` — List all files changed between two branches with stats and status +- `search_code` — Search for a regex pattern across the codebase +- `find_importers` — Find all files that import a given module path +- `list_test_files` — Find test files associated with a given source file + +## Analysis Steps + +Follow these steps in order. Use the tools to gather evidence — never guess about file contents or imports. + +### Step 1: Diff Overview + +Call `list_changed_files` to get all changed files. Categorize each file: +- **source**: `.ts`, `.tsx`, `.js`, `.jsx` files that are not tests +- **test**: files in `__tests__/`, `test/`, `tests/` directories, or files matching `*.test.*`, `*.spec.*` +- **doc**: `.md`, `.mdx`, `.rst`, `.txt` files +- **config**: `package.json`, `tsconfig.json`, `.eslintrc.*`, `Dockerfile`, CI/CD files, bundler configs +- **other**: everything else + +### Step 2: Breaking Change Detection + +For each changed **source** file that likely exports public API symbols: +1. Call `read_file_at_ref` with the base branch ref to get the old version +2. Call `read_file_at_ref` with the head branch ref to get the new version +3. Compare exported functions, classes, types, interfaces, enums, and variables +4. Identify breaking changes: + - **Removed export**: a symbol that existed in base but is gone in head + - **Changed signature**: function parameters changed (added required params, removed params, changed types) + - **Changed type**: interface/type fields changed in incompatible ways + - **Renamed export**: a symbol was renamed (removed + similar new one added) +5. For each breaking change, call `find_importers` to find downstream consumers +6. Assign severity: + - **high**: removed or renamed exports, removed required interface fields + - **medium**: changed function signatures, changed return types + - **low**: changed optional fields, added required fields to interfaces + +### Step 3: Test Coverage Gaps + +For each changed source file: +1. Call `list_test_files` to find associated test files +2. Check if any of those test files appear in the changed file list from Step 1 +3. Calculate coverage ratio: `sourceFilesWithTestChanges / changedSourceFiles` +4. Flag each source file that changed without corresponding test updates + +### Step 4: Documentation Staleness + +For each changed **doc** file AND for each doc file that references changed source files: +1. Call `read_file_at_ref` (head ref) to read the doc content +2. Look for references to symbols, file paths, or function names that were modified or removed +3. Flag stale references with the line number and reason + +If no doc files are in the diff, call `search_code` with pattern matching changed symbol names in `*.md` files to find docs that reference them. + +### Step 5: Impact Graph + +For each changed source file: +1. Call `find_importers` to find direct consumers +2. For each direct consumer, call `find_importers` again to find indirect consumers (up to 2 levels deep) +3. Classify files as **directly changed** (in the diff) or **indirectly affected** (consumers not in the diff) + +### Step 6: Risk Assessment + +Score each factor from 0 to 100, then compute the weighted average: + +| Factor | Weight | Scoring | +|--------|--------|---------| +| Breaking changes | 0.30 | `100` if any high-severity, `60` if medium-only, `30` if low-only, `0` if none | +| Untested changes | 0.25 | `(1 - coverageRatio) * 100` | +| Diff size | 0.15 | `0` if <100 total lines, `50` if 100-500, `80` if 500-1000, `100` if >1000 | +| Stale documentation | 0.10 | `min(staleReferences * 20, 100)` | +| Config file changes | 0.10 | `100` if CI/build config, `50` if other config, `0` if none | +| Impact breadth | 0.10 | `min(indirectlyAffectedFiles * 10, 100)` | + +**Formula:** `score = sum(factor_score * weight)` (weights sum to 1.0) + +**Risk levels:** 0-25 = low, 26-50 = medium, 51-75 = high, 76-100 = critical + +## Rules + +- Always call tools to verify — never guess about file contents, imports, or test file existence. +- Always use `git_diff` with the `file` parameter to inspect files individually. Never load the full diff at once. +- If >30 changed files, only call `read_file_at_ref` for files with >50 lines changed. +- If >50 changed files, skip the documentation staleness check (Step 4). +- Call `find_importers` only for directly changed source files, not for indirect consumers. +- Focus on exported/public symbols for breaking change detection. Internal/private changes are lower priority. +- Categorize every finding with severity and cite evidence (file path, line, before/after). +- Be precise with the risk score calculation — show your math in the factor breakdown. diff --git a/vitest.config.ts b/vitest.config.ts index 410542e..a02e969 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -2,7 +2,7 @@ import { defineConfig } from 'vitest/config'; export default defineConfig({ test: { - projects: ['packages/core', 'packages/cli', 'packages/mcp-server'], + projects: ['packages/tools-core', 'packages/tools', 'packages/action'], coverage: { provider: 'v8', include: ['packages/*/src/**/*.ts'], @@ -11,16 +11,11 @@ export default defineConfig({ '**/*.test.ts', '**/*.spec.ts', '**/types.ts', - 'packages/core/src/index.ts', + '**/generated/**', + '**/index.ts', ], reporter: ['text', 'lcov'], reportsDirectory: './coverage', - thresholds: { - lines: 80, - functions: 80, - branches: 70, - statements: 80, - }, }, }, }); From 8bc61c38ba6440003289a97dde195d38368cd58d Mon Sep 17 00:00:00 2001 From: ducdmdev Date: Wed, 11 Feb 2026 18:15:52 +0700 Subject: [PATCH 2/5] fix: cast through unknown for strict typecheck in tools.ts Record cannot be directly asserted to specific param types with required properties under strict mode. Cast through unknown first. --- packages/action/src/tools.ts | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/action/src/tools.ts b/packages/action/src/tools.ts index 8cde8bc..e70cd3d 100644 --- a/packages/action/src/tools.ts +++ b/packages/action/src/tools.ts @@ -10,27 +10,27 @@ import { export async function executeTool(name: string, input: Record): Promise { switch (name) { case 'git_diff': { - const result = await gitDiff(input as Parameters[0]); + const result = await gitDiff(input as unknown as Parameters[0]); return result.diff; } case 'read_file_at_ref': { - const result = await readFileAtRef(input as Parameters[0]); + const result = await readFileAtRef(input as unknown as Parameters[0]); return result.content; } case 'list_changed_files': { - const result = await listChangedFiles(input as Parameters[0]); + const result = await listChangedFiles(input as unknown as Parameters[0]); return JSON.stringify(result, null, 2); } case 'search_code': { - const result = await searchCode(input as Parameters[0]); + const result = await searchCode(input as unknown as Parameters[0]); return JSON.stringify(result, null, 2); } case 'find_importers': { - const result = await findImporters(input as Parameters[0]); + const result = await findImporters(input as unknown as Parameters[0]); return JSON.stringify(result, null, 2); } case 'list_test_files': { - const result = await listTestFiles(input as Parameters[0]); + const result = await listTestFiles(input as unknown as Parameters[0]); return JSON.stringify(result, null, 2); } default: From abb142c8fdff75c6eab4968f517a909c70139a40 Mon Sep 17 00:00:00 2001 From: ducdmdev Date: Thu, 12 Feb 2026 09:41:46 +0700 Subject: [PATCH 3/5] refactor: extract shared tool definitions and add missing test cases 1. Shared tool definitions (DRY): - Create tools-core/src/tool-defs.ts with canonical TOOL_DEFS - action/client.ts builds Anthropic tools from shared defs - tools/register.ts builds zod schemas from shared defs - Eliminates duplicated tool names, descriptions, and parameters 2. Client test cases (error handling, timeouts, iteration limits): - Tool execution error sends is_error back to Claude - Parallel tool_use blocks execute via Promise.all - Wall-clock timeout returns partial text or throws - Max iterations (30) returns partial text or throws 3. Risk score parsing edge cases: - Boundary values 0/100 and 100/100 - Unparseable report skips threshold check - Score equal to threshold triggers failure (>= comparison) - Non-Error rejection handled in main() --- CLAUDE.md | 2 +- packages/action/__tests__/client.test.ts | 212 ++++++++++++++++++++++ packages/action/__tests__/index.test.ts | 50 +++++ packages/action/src/client.ts | 83 ++------- packages/tools-core/src/index.ts | 3 + packages/tools-core/src/tool-defs.ts | 80 ++++++++ packages/tools/__tests__/index.test.ts | 24 ++- packages/tools/__tests__/register.test.ts | 22 ++- packages/tools/src/register.ts | 105 ++++++----- 9 files changed, 442 insertions(+), 139 deletions(-) create mode 100644 packages/tools-core/src/tool-defs.ts diff --git a/CLAUDE.md b/CLAUDE.md index 67f9eca..5740dae 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -150,7 +150,7 @@ scripts/ - Vitest projects are configured in `vitest.config.ts` (root) with `packages/tools-core`, `packages/tools`, and `packages/action`. - Write **unit tests only** -- do not write integration tests that require a real git repository. - **Mock git operations** (simple-git calls) and external dependencies where needed; tests should not depend on filesystem or git state. -- Test files per package: +- Test files per package (14 files, 94 tests): - `packages/tools-core/__tests__/`: git-diff, read-file, list-files, search-code, find-imports, list-tests, regression (7 files) - `packages/tools/__tests__/`: index, register, build-scripts (3 files) - `packages/action/__tests__/`: tools, client, comment, index (4 files) diff --git a/packages/action/__tests__/client.test.ts b/packages/action/__tests__/client.test.ts index 5c1945d..7c1e423 100644 --- a/packages/action/__tests__/client.test.ts +++ b/packages/action/__tests__/client.test.ts @@ -147,4 +147,216 @@ describe('runAnalysis', () => { expect(createArgs.system).toBe('You are a test prompt.'); expect(createArgs.messages[0].content).toContain('# Test Report Template'); }); + + it('sends is_error tool result when executeTool throws', async () => { + const mockCreate = vi.fn() + .mockResolvedValueOnce({ + content: [ + { type: 'tool_use', id: 'call_1', name: 'git_diff', input: { base: 'main', head: 'HEAD' } }, + ], + stop_reason: 'tool_use', + }) + .mockResolvedValueOnce({ + content: [{ type: 'text', text: 'recovered' }], + stop_reason: 'end_turn', + }); + + vi.mocked(Anthropic).mockImplementation(() => ({ + messages: { create: mockCreate }, + }) as never); + + vi.mocked(executeTool).mockRejectedValue(new Error('git not found')); + + vi.useRealTimers(); + const result = await runAnalysis({ + apiKey: 'test-key', + repoPath: '/repo', + baseBranch: 'main', + headBranch: 'HEAD', + model: 'claude-sonnet-4-5-20250929', + }); + + expect(result).toBe('recovered'); + // Verify the error was sent back as a tool_result with is_error + const secondCallMessages = mockCreate.mock.calls[1][0].messages; + const toolResultMsg = secondCallMessages[secondCallMessages.length - 1]; + expect(toolResultMsg.content[0]).toMatchObject({ + type: 'tool_result', + tool_use_id: 'call_1', + is_error: true, + content: 'Error: git not found', + }); + }); + + it('executes multiple tool_use blocks in parallel', async () => { + const mockCreate = vi.fn() + .mockResolvedValueOnce({ + content: [ + { type: 'tool_use', id: 'call_1', name: 'git_diff', input: { base: 'main', head: 'HEAD' } }, + { type: 'tool_use', id: 'call_2', name: 'list_changed_files', input: { base: 'main', head: 'HEAD' } }, + ], + stop_reason: 'tool_use', + }) + .mockResolvedValueOnce({ + content: [{ type: 'text', text: 'done' }], + stop_reason: 'end_turn', + }); + + vi.mocked(Anthropic).mockImplementation(() => ({ + messages: { create: mockCreate }, + }) as never); + + vi.mocked(executeTool).mockResolvedValue('tool output'); + + vi.useRealTimers(); + await runAnalysis({ + apiKey: 'test-key', + repoPath: '/repo', + baseBranch: 'main', + headBranch: 'HEAD', + model: 'claude-sonnet-4-5-20250929', + }); + + // Both tools should have been called + expect(executeTool).toHaveBeenCalledTimes(2); + expect(executeTool).toHaveBeenCalledWith('git_diff', expect.objectContaining({ base: 'main' })); + expect(executeTool).toHaveBeenCalledWith('list_changed_files', expect.objectContaining({ base: 'main' })); + + // Both tool results should be in the second API call + const secondCallMessages = mockCreate.mock.calls[1][0].messages; + const toolResults = secondCallMessages[secondCallMessages.length - 1].content; + expect(toolResults).toHaveLength(2); + expect(toolResults[0].tool_use_id).toBe('call_1'); + expect(toolResults[1].tool_use_id).toBe('call_2'); + }); + + it('returns partial text when wall-clock timeout is exceeded', async () => { + vi.useRealTimers(); + const realNow = Date.now(); + const dateNowSpy = vi.spyOn(Date, 'now'); + // startTime capture + dateNowSpy.mockReturnValueOnce(realNow); + // First iteration check — within timeout + dateNowSpy.mockReturnValueOnce(realNow); + // Second iteration check — past timeout (200s > 180s) + dateNowSpy.mockReturnValueOnce(realNow + 200_000); + + const mockCreate = vi.fn() + .mockResolvedValueOnce({ + content: [ + { type: 'text', text: 'partial report so far' }, + { type: 'tool_use', id: 'call_1', name: 'git_diff', input: { base: 'main', head: 'HEAD' } }, + ], + stop_reason: 'tool_use', + }) + // Should never reach this — timeout triggers first + .mockResolvedValueOnce({ + content: [{ type: 'text', text: 'should not reach' }], + stop_reason: 'end_turn', + }); + + vi.mocked(Anthropic).mockImplementation(() => ({ + messages: { create: mockCreate }, + }) as never); + + vi.mocked(executeTool).mockResolvedValue('diff'); + + const result = await runAnalysis({ + apiKey: 'test-key', + repoPath: '/repo', + baseBranch: 'main', + headBranch: 'HEAD', + model: 'claude-sonnet-4-5-20250929', + }); + + expect(result).toBe('partial report so far'); + // Only one API call — timeout prevents the second + expect(mockCreate).toHaveBeenCalledTimes(1); + dateNowSpy.mockRestore(); + }); + + it('throws when timeout is exceeded with no text output', async () => { + vi.useRealTimers(); + const realNow = Date.now(); + const dateNowSpy = vi.spyOn(Date, 'now'); + // startTime capture + dateNowSpy.mockReturnValueOnce(realNow); + // First iteration — immediately past timeout + dateNowSpy.mockReturnValueOnce(realNow + 200_000); + + vi.mocked(Anthropic).mockImplementation(() => ({ + messages: { create: vi.fn() }, + }) as never); + + await expect( + runAnalysis({ + apiKey: 'test-key', + repoPath: '/repo', + baseBranch: 'main', + headBranch: 'HEAD', + model: 'claude-sonnet-4-5-20250929', + }), + ).rejects.toThrow('Analysis timed out'); + + dateNowSpy.mockRestore(); + }); + + it('returns partial text when max iterations are exhausted', async () => { + // Mock create to always return tool_use with some text + const mockCreate = vi.fn().mockResolvedValue({ + content: [ + { type: 'text', text: 'iteration text' }, + { type: 'tool_use', id: 'call_1', name: 'git_diff', input: { base: 'main', head: 'HEAD' } }, + ], + stop_reason: 'tool_use', + }); + + vi.mocked(Anthropic).mockImplementation(() => ({ + messages: { create: mockCreate }, + }) as never); + + vi.mocked(executeTool).mockResolvedValue('result'); + + vi.useRealTimers(); + const result = await runAnalysis({ + apiKey: 'test-key', + repoPath: '/repo', + baseBranch: 'main', + headBranch: 'HEAD', + model: 'claude-sonnet-4-5-20250929', + }); + + expect(result).toBe('iteration text'); + // Should have been called exactly 30 times (MAX_ITERATIONS) + expect(mockCreate).toHaveBeenCalledTimes(30); + }); + + it('throws when max iterations exhausted with no text output', async () => { + // Mock create to always return only tool_use (no text blocks) + const mockCreate = vi.fn().mockResolvedValue({ + content: [ + { type: 'tool_use', id: 'call_1', name: 'git_diff', input: { base: 'main', head: 'HEAD' } }, + ], + stop_reason: 'tool_use', + }); + + vi.mocked(Anthropic).mockImplementation(() => ({ + messages: { create: mockCreate }, + }) as never); + + vi.mocked(executeTool).mockResolvedValue('result'); + + vi.useRealTimers(); + await expect( + runAnalysis({ + apiKey: 'test-key', + repoPath: '/repo', + baseBranch: 'main', + headBranch: 'HEAD', + model: 'claude-sonnet-4-5-20250929', + }), + ).rejects.toThrow('maximum iterations'); + + expect(mockCreate).toHaveBeenCalledTimes(30); + }); }); diff --git a/packages/action/__tests__/index.test.ts b/packages/action/__tests__/index.test.ts index 9d56081..03b5ea0 100644 --- a/packages/action/__tests__/index.test.ts +++ b/packages/action/__tests__/index.test.ts @@ -173,4 +173,54 @@ describe('action entry point', () => { expect(core.setFailed).toHaveBeenCalledWith('API connection failed'); }); + + describe('risk score parsing edge cases', () => { + it('parses score at boundary 0/100', async () => { + setupInputs(); + vi.mocked(runAnalysis).mockResolvedValue('**Risk Score**: 0/100 (low)'); + + await loadIndex(); + + expect(core.setOutput).toHaveBeenCalledWith('risk-score', '0'); + expect(core.setOutput).toHaveBeenCalledWith('risk-level', 'low'); + }); + + it('parses score at boundary 100/100', async () => { + setupInputs(); + vi.mocked(runAnalysis).mockResolvedValue('**Risk Score**: 100/100 (critical)'); + + await loadIndex(); + + expect(core.setOutput).toHaveBeenCalledWith('risk-score', '100'); + expect(core.setOutput).toHaveBeenCalledWith('risk-level', 'critical'); + }); + + it('does not fail threshold check when score is -1 (unparseable)', async () => { + setupInputs({ threshold: '50' }); + vi.mocked(runAnalysis).mockResolvedValue('Report without risk score format'); + + await loadIndex(); + + expect(core.setOutput).toHaveBeenCalledWith('risk-score', '-1'); + expect(core.setFailed).not.toHaveBeenCalled(); + }); + + it('handles score equal to threshold (>= comparison)', async () => { + setupInputs({ threshold: '42' }); + vi.mocked(runAnalysis).mockResolvedValue('**Risk Score**: 42/100 (medium)'); + + await loadIndex(); + + expect(core.setFailed).toHaveBeenCalledWith('Risk score 42 exceeds threshold 42'); + }); + + it('handles non-Error rejection in main()', async () => { + setupInputs(); + vi.mocked(runAnalysis).mockRejectedValue('string error'); + + await loadIndex(); + + expect(core.setFailed).toHaveBeenCalledWith('string error'); + }); + }); }); diff --git a/packages/action/src/client.ts b/packages/action/src/client.ts index f39e156..5fdeabe 100644 --- a/packages/action/src/client.ts +++ b/packages/action/src/client.ts @@ -1,4 +1,5 @@ import Anthropic from '@anthropic-ai/sdk'; +import { TOOL_DEFS } from '@pr-impact/tools-core'; import { executeTool } from './tools.js'; import { SYSTEM_PROMPT, REPORT_TEMPLATE } from './generated/templates.js'; @@ -13,79 +14,17 @@ export interface AnalysisOptions { const MAX_ITERATIONS = 30; const TIMEOUT_MS = 180_000; // 180 seconds -const TOOL_DEFINITIONS: Anthropic.Tool[] = [ - { - name: 'git_diff', - description: 'Get the raw git diff between two branches, optionally for a single file', - input_schema: { - type: 'object' as const, - properties: { - base: { type: 'string', description: 'Base branch or ref' }, - head: { type: 'string', description: 'Head branch or ref' }, - file: { type: 'string', description: 'Optional file path for single-file diff' }, - }, - required: ['base', 'head'], - }, +// Build Anthropic tool definitions from the shared canonical definitions. +// repoPath is omitted here — it is injected at runtime in the tool execution loop. +const TOOL_DEFINITIONS: Anthropic.Tool[] = TOOL_DEFS.map((def) => ({ + name: def.name, + description: def.description, + input_schema: { + type: 'object' as const, + properties: def.properties, + required: def.required, }, - { - name: 'read_file_at_ref', - description: 'Read a file content at a specific git ref', - input_schema: { - type: 'object' as const, - properties: { - ref: { type: 'string', description: 'Git ref (branch, commit, tag)' }, - filePath: { type: 'string', description: 'Repo-relative file path' }, - }, - required: ['ref', 'filePath'], - }, - }, - { - name: 'list_changed_files', - description: 'List files changed between two branches with status and stats', - input_schema: { - type: 'object' as const, - properties: { - base: { type: 'string', description: 'Base branch or ref' }, - head: { type: 'string', description: 'Head branch or ref' }, - }, - required: ['base', 'head'], - }, - }, - { - name: 'search_code', - description: 'Search for a regex pattern in the codebase', - input_schema: { - type: 'object' as const, - properties: { - pattern: { type: 'string', description: 'Regex pattern' }, - glob: { type: 'string', description: 'File glob to limit scope (e.g. "*.md")' }, - }, - required: ['pattern'], - }, - }, - { - name: 'find_importers', - description: 'Find files that import a given module', - input_schema: { - type: 'object' as const, - properties: { - modulePath: { type: 'string', description: 'Repo-relative module path' }, - }, - required: ['modulePath'], - }, - }, - { - name: 'list_test_files', - description: 'Find test files associated with a source file', - input_schema: { - type: 'object' as const, - properties: { - sourceFile: { type: 'string', description: 'Repo-relative source file path' }, - }, - required: ['sourceFile'], - }, - }, -]; +})); export async function runAnalysis(options: AnalysisOptions): Promise { const client = new Anthropic({ apiKey: options.apiKey }); diff --git a/packages/tools-core/src/index.ts b/packages/tools-core/src/index.ts index 67a6639..74c21aa 100644 --- a/packages/tools-core/src/index.ts +++ b/packages/tools-core/src/index.ts @@ -20,3 +20,6 @@ export type { FindImportersParams, FindImportersResult } from './tools/find-impo export { listTestFiles } from './tools/list-tests.js'; export type { ListTestFilesParams, ListTestFilesResult } from './tools/list-tests.js'; + +export { TOOL_DEFS } from './tool-defs.js'; +export type { ToolDef, ToolParamDef } from './tool-defs.js'; diff --git a/packages/tools-core/src/tool-defs.ts b/packages/tools-core/src/tool-defs.ts new file mode 100644 index 0000000..40a4624 --- /dev/null +++ b/packages/tools-core/src/tool-defs.ts @@ -0,0 +1,80 @@ +/** + * Canonical tool definitions shared between the MCP server and GitHub Action. + * + * Each definition describes a tool's name, description, and parameter schema + * (JSON-Schema-style). The MCP server (`@pr-impact/tools`) converts these to + * zod schemas; the GitHub Action (`@pr-impact/action`) maps them to + * Anthropic API tool definitions. + * + * `repoPath` is intentionally omitted here — it is added by each consumer: + * the MCP server exposes it as an optional parameter; the action injects it + * at runtime from the working directory. + */ + +export interface ToolParamDef { + type: 'string'; + description: string; +} + +export interface ToolDef { + name: string; + description: string; + properties: Record; + required: string[]; +} + +export const TOOL_DEFS: readonly ToolDef[] = [ + { + name: 'git_diff', + description: 'Get the raw git diff between two branches, optionally for a single file', + properties: { + base: { type: 'string', description: 'Base branch or ref' }, + head: { type: 'string', description: 'Head branch or ref' }, + file: { type: 'string', description: 'Optional file path to get diff for a single file' }, + }, + required: ['base', 'head'], + }, + { + name: 'read_file_at_ref', + description: 'Read a file content at a specific git ref (branch or commit)', + properties: { + ref: { type: 'string', description: 'Git ref (branch name, commit SHA, or tag)' }, + filePath: { type: 'string', description: 'Repo-relative file path' }, + }, + required: ['ref', 'filePath'], + }, + { + name: 'list_changed_files', + description: 'List all files changed between two branches with status and addition/deletion stats', + properties: { + base: { type: 'string', description: 'Base branch or ref' }, + head: { type: 'string', description: 'Head branch or ref' }, + }, + required: ['base', 'head'], + }, + { + name: 'search_code', + description: 'Search for a regex pattern across the codebase using git grep', + properties: { + pattern: { type: 'string', description: 'Regex pattern to search for' }, + glob: { type: 'string', description: 'File glob to limit search scope (e.g. "*.md")' }, + }, + required: ['pattern'], + }, + { + name: 'find_importers', + description: 'Find all source files that import a given module path', + properties: { + modulePath: { type: 'string', description: 'Repo-relative path of the module to find importers for' }, + }, + required: ['modulePath'], + }, + { + name: 'list_test_files', + description: 'Find test files associated with a source file using naming conventions', + properties: { + sourceFile: { type: 'string', description: 'Repo-relative path of the source file' }, + }, + required: ['sourceFile'], + }, +]; diff --git a/packages/tools/__tests__/index.test.ts b/packages/tools/__tests__/index.test.ts index 1c4bfdb..e6b8d42 100644 --- a/packages/tools/__tests__/index.test.ts +++ b/packages/tools/__tests__/index.test.ts @@ -12,16 +12,20 @@ vi.mock('@modelcontextprotocol/sdk/server/stdio.js', () => ({ StdioServerTransport: vi.fn(), })); -// Mock all tools-core handlers -vi.mock('@pr-impact/tools-core', () => ({ - gitDiff: vi.fn().mockResolvedValue({ diff: 'mock diff' }), - readFileAtRef: vi.fn().mockResolvedValue({ content: 'mock content' }), - listChangedFiles: vi.fn().mockResolvedValue({ files: [], totalAdditions: 0, totalDeletions: 0 }), - searchCode: vi.fn().mockResolvedValue({ matches: [] }), - findImporters: vi.fn().mockResolvedValue({ importers: [] }), - listTestFiles: vi.fn().mockResolvedValue({ testFiles: [] }), - clearImporterCache: vi.fn(), -})); +// Mock all tools-core handlers but keep TOOL_DEFS from the real module +vi.mock('@pr-impact/tools-core', async (importOriginal) => { + const original = await importOriginal(); + return { + TOOL_DEFS: original.TOOL_DEFS, + gitDiff: vi.fn().mockResolvedValue({ diff: 'mock diff' }), + readFileAtRef: vi.fn().mockResolvedValue({ content: 'mock content' }), + listChangedFiles: vi.fn().mockResolvedValue({ files: [], totalAdditions: 0, totalDeletions: 0 }), + searchCode: vi.fn().mockResolvedValue({ matches: [] }), + findImporters: vi.fn().mockResolvedValue({ importers: [] }), + listTestFiles: vi.fn().mockResolvedValue({ testFiles: [] }), + clearImporterCache: vi.fn(), + }; +}); import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; diff --git a/packages/tools/__tests__/register.test.ts b/packages/tools/__tests__/register.test.ts index 5d79594..c73986a 100644 --- a/packages/tools/__tests__/register.test.ts +++ b/packages/tools/__tests__/register.test.ts @@ -1,14 +1,18 @@ import { describe, it, expect, vi, beforeEach } from 'vitest'; -vi.mock('@pr-impact/tools-core', () => ({ - gitDiff: vi.fn(), - readFileAtRef: vi.fn(), - listChangedFiles: vi.fn(), - searchCode: vi.fn(), - findImporters: vi.fn(), - listTestFiles: vi.fn(), - clearImporterCache: vi.fn(), -})); +vi.mock('@pr-impact/tools-core', async (importOriginal) => { + const original = await importOriginal(); + return { + TOOL_DEFS: original.TOOL_DEFS, + gitDiff: vi.fn(), + readFileAtRef: vi.fn(), + listChangedFiles: vi.fn(), + searchCode: vi.fn(), + findImporters: vi.fn(), + listTestFiles: vi.fn(), + clearImporterCache: vi.fn(), + }; +}); vi.mock('@modelcontextprotocol/sdk/server/mcp.js', () => ({ McpServer: vi.fn(), diff --git a/packages/tools/src/register.ts b/packages/tools/src/register.ts index 30344d7..5b38fd2 100644 --- a/packages/tools/src/register.ts +++ b/packages/tools/src/register.ts @@ -7,6 +7,16 @@ import { searchCode, findImporters, listTestFiles, + TOOL_DEFS, +} from '@pr-impact/tools-core'; +import type { + ToolDef, + GitDiffParams, + ReadFileAtRefParams, + ListChangedFilesParams, + SearchCodeParams, + FindImportersParams, + ListTestFilesParams, } from '@pr-impact/tools-core'; interface ToolResult { @@ -24,19 +34,33 @@ function error(err: unknown): ToolResult { return { content: [{ type: 'text', text: `Error: ${message}` }], isError: true }; } +/** Convert a ToolDef to a zod schema, adding the MCP-specific repoPath param. */ +function defToZod(def: ToolDef): Record { + const shape: Record = { + repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), + }; + for (const [key, param] of Object.entries(def.properties)) { + const base = z.string().describe(param.description); + shape[key] = def.required.includes(key) ? base : base.optional(); + } + return shape; +} + +function getDef(name: string): ToolDef { + const def = TOOL_DEFS.find((d) => d.name === name); + if (!def) throw new Error(`Unknown tool definition: ${name}`); + return def; +} + export function registerAllTools(server: McpServer): void { + const gitDiffDef = getDef('git_diff'); server.tool( - 'git_diff', - 'Get the raw git diff between two branches, optionally for a single file', - { - repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), - base: z.string().describe('Base branch or ref'), - head: z.string().describe('Head branch or ref'), - file: z.string().optional().describe('Optional file path to get diff for a single file'), - }, + gitDiffDef.name, + gitDiffDef.description, + defToZod(gitDiffDef), async (params) => { try { - const result = await gitDiff(params); + const result = await gitDiff(params as unknown as GitDiffParams); return success(result.diff); } catch (err) { return error(err); @@ -44,17 +68,14 @@ export function registerAllTools(server: McpServer): void { }, ); + const readFileDef = getDef('read_file_at_ref'); server.tool( - 'read_file_at_ref', - 'Read a file content at a specific git ref (branch or commit)', - { - repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), - ref: z.string().describe('Git ref (branch name, commit SHA, or tag)'), - filePath: z.string().describe('Repo-relative file path'), - }, + readFileDef.name, + readFileDef.description, + defToZod(readFileDef), async (params) => { try { - const result = await readFileAtRef(params); + const result = await readFileAtRef(params as unknown as ReadFileAtRefParams); return success(result.content); } catch (err) { return error(err); @@ -62,17 +83,14 @@ export function registerAllTools(server: McpServer): void { }, ); + const listFilesDef = getDef('list_changed_files'); server.tool( - 'list_changed_files', - 'List all files changed between two branches with status and addition/deletion stats', - { - repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), - base: z.string().describe('Base branch or ref'), - head: z.string().describe('Head branch or ref'), - }, + listFilesDef.name, + listFilesDef.description, + defToZod(listFilesDef), async (params) => { try { - const result = await listChangedFiles(params); + const result = await listChangedFiles(params as unknown as ListChangedFilesParams); return success(JSON.stringify(result, null, 2)); } catch (err) { return error(err); @@ -80,17 +98,14 @@ export function registerAllTools(server: McpServer): void { }, ); + const searchDef = getDef('search_code'); server.tool( - 'search_code', - 'Search for a regex pattern across the codebase using git grep', - { - repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), - pattern: z.string().describe('Regex pattern to search for'), - glob: z.string().optional().describe('File glob to limit search scope (e.g. "*.md")'), - }, + searchDef.name, + searchDef.description, + defToZod(searchDef), async (params) => { try { - const result = await searchCode(params); + const result = await searchCode(params as unknown as SearchCodeParams); return success(JSON.stringify(result, null, 2)); } catch (err) { return error(err); @@ -98,16 +113,14 @@ export function registerAllTools(server: McpServer): void { }, ); + const importersDef = getDef('find_importers'); server.tool( - 'find_importers', - 'Find all source files that import a given module path', - { - repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), - modulePath: z.string().describe('Repo-relative path of the module to find importers for'), - }, + importersDef.name, + importersDef.description, + defToZod(importersDef), async (params) => { try { - const result = await findImporters(params); + const result = await findImporters(params as unknown as FindImportersParams); return success(JSON.stringify(result, null, 2)); } catch (err) { return error(err); @@ -115,16 +128,14 @@ export function registerAllTools(server: McpServer): void { }, ); + const testsDef = getDef('list_test_files'); server.tool( - 'list_test_files', - 'Find test files associated with a source file using naming conventions', - { - repoPath: z.string().optional().describe('Path to git repo, defaults to cwd'), - sourceFile: z.string().describe('Repo-relative path of the source file'), - }, + testsDef.name, + testsDef.description, + defToZod(testsDef), async (params) => { try { - const result = await listTestFiles(params); + const result = await listTestFiles(params as unknown as ListTestFilesParams); return success(JSON.stringify(result, null, 2)); } catch (err) { return error(err); From e4aa114ac8bd93b219f665661711d106e91a2be6 Mon Sep 17 00:00:00 2001 From: ducdmdev Date: Thu, 12 Feb 2026 10:45:39 +0700 Subject: [PATCH 4/5] docs: add C3 architecture documentation Create C3v4 architecture docs covering all 4 containers, 16 components, and 3 cross-cutting refs. Documents the system context, container boundaries, component responsibilities, and shared patterns (git operations, ESM conventions, build pipeline). --- .c3/README.md | 68 +++++ .c3/adr/adr-00000000-c3-adoption.md | 274 ++++++++++++++++++ .c3/c3-1-tools-core/README.md | 63 ++++ .../c3-101-tool-definitions.md | 39 +++ .c3/c3-1-tools-core/c3-110-git-diff.md | 39 +++ .c3/c3-1-tools-core/c3-111-read-file.md | 39 +++ .c3/c3-1-tools-core/c3-112-list-files.md | 46 +++ .c3/c3-1-tools-core/c3-113-search-code.md | 44 +++ .c3/c3-1-tools-core/c3-114-find-importers.md | 47 +++ .c3/c3-1-tools-core/c3-115-list-tests.md | 44 +++ .c3/c3-2-tools/README.md | 35 +++ .c3/c3-2-tools/c3-201-mcp-server.md | 34 +++ .c3/c3-2-tools/c3-210-tool-registration.md | 48 +++ .c3/c3-3-action/README.md | 52 ++++ .c3/c3-3-action/c3-301-template-embedding.md | 40 +++ .c3/c3-3-action/c3-310-agentic-client.md | 46 +++ .c3/c3-3-action/c3-311-tool-dispatcher.md | 39 +++ .c3/c3-3-action/c3-312-comment-poster.md | 41 +++ .c3/c3-3-action/c3-313-action-entrypoint.md | 45 +++ .c3/c3-4-skill/README.md | 35 +++ .c3/c3-4-skill/c3-401-plugin-config.md | 34 +++ .c3/c3-4-skill/c3-410-skill-prompt.md | 40 +++ .c3/refs/ref-build-pipeline.md | 56 ++++ .c3/refs/ref-esm-conventions.md | 55 ++++ .c3/refs/ref-git-operations.md | 59 ++++ 25 files changed, 1362 insertions(+) create mode 100644 .c3/README.md create mode 100644 .c3/adr/adr-00000000-c3-adoption.md create mode 100644 .c3/c3-1-tools-core/README.md create mode 100644 .c3/c3-1-tools-core/c3-101-tool-definitions.md create mode 100644 .c3/c3-1-tools-core/c3-110-git-diff.md create mode 100644 .c3/c3-1-tools-core/c3-111-read-file.md create mode 100644 .c3/c3-1-tools-core/c3-112-list-files.md create mode 100644 .c3/c3-1-tools-core/c3-113-search-code.md create mode 100644 .c3/c3-1-tools-core/c3-114-find-importers.md create mode 100644 .c3/c3-1-tools-core/c3-115-list-tests.md create mode 100644 .c3/c3-2-tools/README.md create mode 100644 .c3/c3-2-tools/c3-201-mcp-server.md create mode 100644 .c3/c3-2-tools/c3-210-tool-registration.md create mode 100644 .c3/c3-3-action/README.md create mode 100644 .c3/c3-3-action/c3-301-template-embedding.md create mode 100644 .c3/c3-3-action/c3-310-agentic-client.md create mode 100644 .c3/c3-3-action/c3-311-tool-dispatcher.md create mode 100644 .c3/c3-3-action/c3-312-comment-poster.md create mode 100644 .c3/c3-3-action/c3-313-action-entrypoint.md create mode 100644 .c3/c3-4-skill/README.md create mode 100644 .c3/c3-4-skill/c3-401-plugin-config.md create mode 100644 .c3/c3-4-skill/c3-410-skill-prompt.md create mode 100644 .c3/refs/ref-build-pipeline.md create mode 100644 .c3/refs/ref-esm-conventions.md create mode 100644 .c3/refs/ref-git-operations.md diff --git a/.c3/README.md b/.c3/README.md new file mode 100644 index 0000000..6cd6b70 --- /dev/null +++ b/.c3/README.md @@ -0,0 +1,68 @@ +--- +id: c3-0 +c3-version: 4 +title: pr-impact +goal: AI-powered PR impact analysis — detect breaking changes, map blast radius, and score risk before merge +summary: TypeScript monorepo providing Claude-driven PR analysis via GitHub Action, Claude Code plugin, and MCP tools +--- + +# pr-impact + +## Goal + +AI-powered PR impact analysis — detect breaking changes, map blast radius, and score risk before merge. + +## Overview + +```mermaid +graph TD + subgraph Actors + DEV[Developer] + CI[GitHub Actions CI] + AI[MCP AI Client] + end + + subgraph "pr-impact" + SKILL["c3-4 skill
Claude Code Plugin"] + ACTION["c3-3 action
GitHub Action"] + TOOLS["c3-2 tools
MCP Server"] + CORE["c3-1 tools-core
Pure Tool Functions"] + end + + subgraph External + ANTHROPIC[Anthropic API] + GITHUB[GitHub API] + GIT[Git Repository] + end + + DEV -->|/pr-impact| SKILL + CI -->|on: pull_request| ACTION + AI -->|MCP stdio| TOOLS + + SKILL -->|registers MCP| TOOLS + ACTION -->|imports| CORE + TOOLS -->|imports| CORE + + ACTION -->|Claude API| ANTHROPIC + ACTION -->|PR comments| GITHUB + CORE -->|simple-git| GIT +``` + +## Abstract Constraints + +| Constraint | Rationale | Affected Containers | +|------------|-----------|---------------------| +| All git operations via simple-git | Testability and safety — no raw child_process | c3-1 | +| Templates are single source of truth | Prevent prompt/report drift between action and skill | c3-3, c3-4 | +| Tool definitions must be canonical and shared | Prevent tool schema drift between MCP server and action | c3-1, c3-2, c3-3 | +| ESM-only, CJS exception for action | GitHub Actions requires CommonJS entry point | All | +| Agentic loop safety limits (30 iters, 180s) | Prevent runaway API costs | c3-3 | + +## Containers + +| ID | Name | Boundary | Status | Responsibilities | Goal Contribution | +|----|------|----------|--------|------------------|-------------------| +| c3-1 | tools-core | library | implemented | Pure git/repo tool functions; canonical tool definitions | Shared foundation — all analysis evidence gathering | +| c3-2 | tools | service | implemented | MCP protocol server wrapping tools-core | Exposes tools to any MCP-compatible AI client | +| c3-3 | action | worker | implemented | Agentic Claude loop, tool dispatch, PR comment posting | Automated CI analysis with threshold gating | +| c3-4 | skill | app | implemented | Claude Code plugin config, assembled skill prompt | Interactive developer analysis via /pr-impact | diff --git a/.c3/adr/adr-00000000-c3-adoption.md b/.c3/adr/adr-00000000-c3-adoption.md new file mode 100644 index 0000000..9c2dba3 --- /dev/null +++ b/.c3/adr/adr-00000000-c3-adoption.md @@ -0,0 +1,274 @@ +--- +id: adr-00000000-c3-adoption +c3-version: 4 +title: C3 Architecture Documentation Adoption +type: adr +status: implemented +date: 2026-02-12 +affects: [c3-0] +--- + +# C3 Architecture Documentation Adoption + +## Goal + +Adopt C3 methodology for pr-impact. + + + +## Workflow + +```mermaid +flowchart TD + GOAL([Goal]) --> S0 + + subgraph S0["Stage 0: Inventory"] + S0_DISCOVER[Discover codebase] --> S0_ASK{Gaps?} + S0_ASK -->|Yes| S0_SOCRATIC[Socratic] --> S0_DISCOVER + S0_ASK -->|No| S0_LIST[List items + diagram] + end + + S0_LIST --> G0{Inventory complete?} + G0 -->|No| S0_DISCOVER + G0 -->|Yes| S1 + + subgraph S1["Stage 1: Details"] + S1_CONTAINER[Per container] --> S1_INT[Internal comp] + S1_CONTAINER --> S1_LINK[Linkage comp] + S1_INT --> S1_REF[Extract refs] + S1_LINK --> S1_REF + S1_REF --> S1_ASK{Questions?} + S1_ASK -->|Yes| S1_SOCRATIC[Socratic] --> S1_CONTAINER + S1_ASK -->|No| S1_NEXT{More?} + S1_NEXT -->|Yes| S1_CONTAINER + end + + S1_NEXT -->|No| G1{Fix inventory?} + G1 -->|Yes| S0_DISCOVER + G1 -->|No| S2 + + subgraph S2["Stage 2: Finalize"] + S2_CHECK[Integrity checks] + end + + S2_CHECK --> G2{Issues?} + G2 -->|Inventory| S0_DISCOVER + G2 -->|Detail| S1_CONTAINER + G2 -->|None| DONE([Implemented]) +``` + +--- + +## Stage 0: Inventory + +### Context Discovery + +| Arg | Value | +|-----|-------| +| PROJECT | pr-impact | +| GOAL | AI-powered PR impact analysis — detect breaking changes, map blast radius, and score risk before merge | +| SUMMARY | TypeScript monorepo providing Claude-driven PR analysis via GitHub Action, Claude Code plugin, and MCP tools | + +### Abstract Constraints + +| Constraint | Rationale | Affected Containers | +|------------|-----------|---------------------| +| All git operations must go through simple-git | Avoid raw child_process for testability and safety | c3-1-tools-core | +| Templates are the single source of truth for analysis methodology | Prevent prompt/report drift between action and skill | c3-3-action, c3-4-skill | +| Tool definitions must be canonical and shared | Prevent tool schema drift between MCP server and action | c3-1-tools-core, c3-2-tools, c3-3-action | +| ESM-only with CJS exception for action output | GitHub Actions requires CommonJS entry point | c3-1-tools-core, c3-2-tools, c3-3-action | +| Agentic loop has safety limits (30 iterations, 180s timeout) | Prevent runaway API costs and unbounded execution | c3-3-action | + +### Container Discovery + +| N | CONTAINER_NAME | BOUNDARY | GOAL | SUMMARY | +|---|----------------|----------|------|---------| +| 1 | tools-core | library | Provide pure git/repo tool functions with no framework dependency | 6 handler functions + shared tool definitions; the shared foundation both tools and action import | +| 2 | tools | service | Expose tools-core as MCP protocol tools for AI clients | Thin MCP server wrapping tools-core with zod schemas via stdio transport | +| 3 | action | worker | Run agentic Claude analysis loop in CI and post PR comments | GitHub Action that calls Anthropic API, dispatches tool calls to tools-core, posts reports | +| 4 | skill | app | Provide the /pr-impact slash command for interactive Claude Code analysis | Claude Code plugin assembled from shared templates; no runtime deps | + +### Component Discovery (Brief) + +| N | NN | COMPONENT_NAME | CATEGORY | GOAL | SUMMARY | +|---|----|-------------- |----------|------|---------| +| 1 | 01 | tool-definitions | foundation | Canonical tool schemas shared by MCP server and action | TOOL_DEFS array with name, description, properties, required fields | +| 1 | 10 | git-diff | feature | Get raw git diff between two refs | Wraps simple-git diff with optional per-file filtering | +| 1 | 11 | read-file | feature | Read file content at a specific git ref | Wraps simple-git show for ref:path lookups | +| 1 | 12 | list-files | feature | List changed files with status and line stats | Combines --name-status with diffSummary for full file info | +| 1 | 13 | search-code | feature | Search for regex patterns via git grep | Uses git.raw() for reliable glob filtering; handles exit code 1 | +| 1 | 14 | find-importers | feature | Build cached reverse dependency map and find importers | Scans all source files with fast-glob; session-level cache | +| 1 | 15 | list-tests | feature | Find test files associated with source files | Generates candidate paths across sibling/__tests__/test/ dirs | +| 2 | 01 | mcp-server | foundation | MCP stdio transport with lifecycle management | Creates McpServer, connects StdioServerTransport, handles SIGINT/SIGTERM | +| 2 | 10 | tool-registration | feature | Convert tool-defs to zod schemas and register on MCP server | defToZod() + registerAllTools() wrapping each tools-core handler | +| 3 | 01 | template-embedding | foundation | Build-time template generation for runtime access | scripts/embed-templates.ts generates src/generated/templates.ts | +| 3 | 10 | agentic-client | feature | Anthropic API agentic loop with safety limits | 30-iteration max, 180s timeout, temperature 0, parallel tool execution | +| 3 | 11 | tool-dispatcher | feature | Route tool_use calls to tools-core functions | Switch-based dispatch with repoPath injection | +| 3 | 12 | comment-poster | feature | Upsert PR comments via GitHub API with HTML markers | Paginated search for existing marker, PATCH or POST | +| 3 | 13 | action-entrypoint | feature | Read inputs, run analysis, parse score, gate threshold | Orchestrates analysis flow, sets outputs, applies threshold | +| 4 | 01 | plugin-config | foundation | Claude Code plugin metadata and MCP server registration | .claude-plugin/config.json + mcp.json | +| 4 | 10 | skill-prompt | feature | Assembled analysis prompt from shared templates | skill.md generated by scripts/build-skill.ts | + +### Ref Discovery + +| SLUG | TITLE | GOAL | Scope | Applies To | +|------|-------|------|-------|------------| +| build-pipeline | Build Pipeline & Template Embedding | Ensure templates are single source of truth consumed at build time | Build-time | c3-3-action, c3-4-skill | +| esm-conventions | ESM Module Conventions | Enforce ESM-only with .js extensions and CJS exception for action | All packages | c3-1-tools-core, c3-2-tools, c3-3-action, c3-4-skill | +| git-operations | Git Operation Patterns | Standardize all git access through simple-git | Runtime git calls | c3-1-tools-core | + +### Overview Diagram + +```mermaid +graph TD + subgraph Actors + DEV[Developer] + CI[GitHub Actions CI] + AI[MCP AI Client] + end + + subgraph "pr-impact" + SKILL["c3-4 skill
Claude Code Plugin"] + ACTION["c3-3 action
GitHub Action"] + TOOLS["c3-2 tools
MCP Server"] + CORE["c3-1 tools-core
Pure Tool Functions"] + end + + subgraph External + ANTHROPIC[Anthropic API] + GITHUB[GitHub API] + GIT[Git Repository] + end + + DEV -->|/pr-impact| SKILL + CI -->|on: pull_request| ACTION + AI -->|MCP stdio| TOOLS + + SKILL -->|registers MCP| TOOLS + ACTION -->|imports| CORE + TOOLS -->|imports| CORE + + ACTION -->|Claude API| ANTHROPIC + ACTION -->|PR comments| GITHUB + CORE -->|simple-git| GIT +``` + +### Gate 0 + +- [x] Context args filled (PROJECT, GOAL, SUMMARY) +- [x] Abstract Constraints identified +- [x] All containers identified with args (including BOUNDARY) +- [x] All components identified (brief) with args and category +- [x] Cross-cutting refs identified +- [x] Overview diagram generated + +--- + +## Stage 1: Details + +### Container: c3-1-tools-core + +**Created:** [x] `.c3/c3-1-tools-core/README.md` + +| Type | Component ID | Name | Category | Doc Created | +|------|--------------|------|----------|-------------| +| Internal | c3-101 | tool-definitions | foundation | [x] | +| Internal | c3-110 | git-diff | feature | [x] | +| Internal | c3-111 | read-file | feature | [x] | +| Internal | c3-112 | list-files | feature | [x] | +| Internal | c3-113 | search-code | feature | [x] | +| Internal | c3-114 | find-importers | feature | [x] | +| Internal | c3-115 | list-tests | feature | [x] | + +### Container: c3-2-tools + +**Created:** [x] `.c3/c3-2-tools/README.md` + +| Type | Component ID | Name | Category | Doc Created | +|------|--------------|------|----------|-------------| +| Internal | c3-201 | mcp-server | foundation | [x] | +| Linkage | c3-210 | tool-registration | feature | [x] | + +### Container: c3-3-action + +**Created:** [x] `.c3/c3-3-action/README.md` + +| Type | Component ID | Name | Category | Doc Created | +|------|--------------|------|----------|-------------| +| Internal | c3-301 | template-embedding | foundation | [x] | +| Internal | c3-310 | agentic-client | feature | [x] | +| Linkage | c3-311 | tool-dispatcher | feature | [x] | +| Internal | c3-312 | comment-poster | feature | [x] | +| Internal | c3-313 | action-entrypoint | feature | [x] | + +### Container: c3-4-skill + +**Created:** [x] `.c3/c3-4-skill/README.md` + +| Type | Component ID | Name | Category | Doc Created | +|------|--------------|------|----------|-------------| +| Internal | c3-401 | plugin-config | foundation | [x] | +| Internal | c3-410 | skill-prompt | feature | [x] | + +### Refs Created + +| Ref ID | Pattern | Doc Created | +|--------|---------|-------------| +| ref-build-pipeline | Build Pipeline & Template Embedding | [x] | +| ref-esm-conventions | ESM Module Conventions | [x] | +| ref-git-operations | Git Operation Patterns | [x] | + +### Gate 1 + +- [x] All container README.md created +- [x] All component docs created +- [x] All refs documented +- [x] No new items discovered + +--- + +## Stage 2: Finalize + +### Integrity Checks + +| Check | Status | +|-------|--------| +| Context <-> Container (all containers listed in c3-0) | [x] | +| Container <-> Component (all components listed in container README) | [x] | +| Component <-> Component (linkages documented) | [x] | +| * <-> Refs (refs cited correctly, Cited By updated) | [x] | + +### Gate 2 + +- [x] All integrity checks pass +- [x] Run audit (9 PASS, 1 WARN expected on fresh onboard) + +--- + +## Conflict Resolution + +If later stage reveals earlier errors: + +| Conflict | Found In | Affects | Resolution | +|----------|----------|---------|------------| +| | | | | + +--- + +## Exit + +When Gate 2 complete -> change frontmatter status to `implemented` + +## Audit Record + +| Phase | Date | Notes | +|-------|------|-------| +| Adopted | 2026-02-12 | Initial C3 structure created | +| Implemented | 2026-02-12 | All gates passed, audit clean (9 PASS, 1 expected WARN) | diff --git a/.c3/c3-1-tools-core/README.md b/.c3/c3-1-tools-core/README.md new file mode 100644 index 0000000..75ec572 --- /dev/null +++ b/.c3/c3-1-tools-core/README.md @@ -0,0 +1,63 @@ +--- +id: c3-1 +c3-version: 4 +title: tools-core +type: container +boundary: library +parent: c3-0 +goal: Provide pure git/repo tool functions with no framework dependency +summary: 6 handler functions + shared tool definitions; the shared foundation both tools and action import +--- + +# tools-core + +## Goal + +Provide pure git/repo tool functions with no framework dependency. Both the MCP server (c3-2) and GitHub Action (c3-3) import from here, eliminating duplication. + +## Responsibilities + +- Own all git/filesystem interaction logic (simple-git, fast-glob) +- Define canonical tool schemas (TOOL_DEFS) consumed by MCP and action +- Export typed handler functions with consistent `repoPath` parameter pattern +- Cache expensive operations (reverse dependency map) at session level + +## Complexity Assessment + +**Level:** moderate +**Why:** Multiple independent tools with different git/fs operations; session-level caching in find-importers; regex-based import parsing with 3 patterns; file status mapping from git output format; candidate path generation across multiple directory conventions. + +## Components + +| ID | Name | Category | Status | Goal Contribution | +|----|------|----------|--------|-------------------| +| c3-101 | tool-definitions | foundation | implemented | Canonical schemas shared across consumers | +| c3-110 | git-diff | feature | implemented | Raw diff evidence for breaking change detection | +| c3-111 | read-file | feature | implemented | File content at any ref for before/after comparison | +| c3-112 | list-files | feature | implemented | Changed file inventory with status and stats | +| c3-113 | search-code | feature | implemented | Pattern search for doc staleness and symbol references | +| c3-114 | find-importers | feature | implemented | Reverse dependency map for impact graph | +| c3-115 | list-tests | feature | implemented | Test file discovery for coverage gap analysis | + +## Internal Dependencies + +```mermaid +graph LR + DEFS[c3-101 tool-definitions] + + GD[c3-110 git-diff] + RF[c3-111 read-file] + LF[c3-112 list-files] + SC[c3-113 search-code] + FI[c3-114 find-importers] + LT[c3-115 list-tests] + + GD -.->|uses| SG[simple-git] + RF -.->|uses| SG + LF -.->|uses| SG + SC -.->|uses| SG + FI -.->|uses| FG[fast-glob] + LT -.->|uses| FG +``` + +All 6 tool handlers are independent of each other. TOOL_DEFS (c3-101) is a pure data structure with no code dependencies. diff --git a/.c3/c3-1-tools-core/c3-101-tool-definitions.md b/.c3/c3-1-tools-core/c3-101-tool-definitions.md new file mode 100644 index 0000000..7b2c8a7 --- /dev/null +++ b/.c3/c3-1-tools-core/c3-101-tool-definitions.md @@ -0,0 +1,39 @@ +--- +id: c3-101 +c3-version: 4 +title: Tool Definitions +type: component +category: foundation +parent: c3-1 +goal: Canonical tool schemas shared by MCP server and action +summary: TOOL_DEFS array with name, description, properties, required fields — single source of truth for tool shape +--- + +# Tool Definitions + +## Goal + +Canonical tool schemas shared by MCP server and action. Both c3-2 (tools) and c3-3 (action) derive their tool registrations from this single definition, preventing schema drift. + +## Container Connection + +Without tool-definitions, both consumers would independently define tool schemas, leading to inevitable drift. This foundation component enforces a single source of truth. + +## Dependencies + +| Direction | What | From/To | +|-----------|------|---------| +| OUT (provides) | `TOOL_DEFS` array, `ToolDef` / `ToolParamDef` types | c3-210 (tool-registration), c3-310 (agentic-client) | + +## Code References + +| File | Purpose | +|------|---------| +| `packages/tools-core/src/tool-defs.ts` | TOOL_DEFS constant and ToolDef/ToolParamDef interfaces | +| `packages/tools-core/src/index.ts` | Re-exports TOOL_DEFS and types | + +## Related Refs + +| Ref | How It Serves Goal | +|-----|-------------------| +| ref-esm-conventions | Barrel export pattern with .js extensions | diff --git a/.c3/c3-1-tools-core/c3-110-git-diff.md b/.c3/c3-1-tools-core/c3-110-git-diff.md new file mode 100644 index 0000000..77e766c --- /dev/null +++ b/.c3/c3-1-tools-core/c3-110-git-diff.md @@ -0,0 +1,39 @@ +--- +id: c3-110 +c3-version: 4 +title: git-diff +type: component +category: feature +parent: c3-1 +goal: Get raw git diff between two refs +summary: Wraps simple-git diff with optional per-file filtering via three-dot range +--- + +# git-diff + +## Goal + +Get raw git diff between two refs. Supports optional `file` parameter for per-file diffs, which the system prompt requires to avoid loading the full diff at once. + +## Container Connection + +Provides the primary diff evidence that the AI agent uses for breaking change detection and diff size scoring. + +## Dependencies + +| Direction | What | From/To | +|-----------|------|---------| +| IN (uses) | `simple-git` | External: simple-git | +| OUT (provides) | `gitDiff()`, `GitDiffParams`, `GitDiffResult` | c3-210, c3-311 | + +## Code References + +| File | Purpose | +|------|---------| +| `packages/tools-core/src/tools/git-diff.ts` | `gitDiff()` implementation (22 lines) | + +## Related Refs + +| Ref | How It Serves Goal | +|-----|-------------------| +| ref-git-operations | Uses simple-git three-dot range convention | diff --git a/.c3/c3-1-tools-core/c3-111-read-file.md b/.c3/c3-1-tools-core/c3-111-read-file.md new file mode 100644 index 0000000..90b30c9 --- /dev/null +++ b/.c3/c3-1-tools-core/c3-111-read-file.md @@ -0,0 +1,39 @@ +--- +id: c3-111 +c3-version: 4 +title: read-file +type: component +category: feature +parent: c3-1 +goal: Read file content at a specific git ref +summary: Wraps simple-git show for ref:path lookups to read files at any branch or commit +--- + +# read-file + +## Goal + +Read file content at a specific git ref. Enables the AI agent to compare base and head versions of files for breaking change detection. + +## Container Connection + +The agent calls this to read both the old (base) and new (head) versions of source files during breaking change analysis (Step 2 in the system prompt). + +## Dependencies + +| Direction | What | From/To | +|-----------|------|---------| +| IN (uses) | `simple-git` | External: simple-git | +| OUT (provides) | `readFileAtRef()`, `ReadFileAtRefParams`, `ReadFileAtRefResult` | c3-210, c3-311 | + +## Code References + +| File | Purpose | +|------|---------| +| `packages/tools-core/src/tools/read-file.ts` | `readFileAtRef()` implementation (17 lines) | + +## Related Refs + +| Ref | How It Serves Goal | +|-----|-------------------| +| ref-git-operations | Uses `git show ref:path` via simple-git | diff --git a/.c3/c3-1-tools-core/c3-112-list-files.md b/.c3/c3-1-tools-core/c3-112-list-files.md new file mode 100644 index 0000000..515da15 --- /dev/null +++ b/.c3/c3-1-tools-core/c3-112-list-files.md @@ -0,0 +1,46 @@ +--- +id: c3-112 +c3-version: 4 +title: list-files +type: component +category: feature +parent: c3-1 +goal: List changed files with status and line stats +summary: Combines git --name-status with diffSummary for full file change inventory +--- + +# list-files + +## Goal + +List changed files with status and line stats. Provides the initial file inventory (Step 1) that the AI agent uses to categorize changes and plan its analysis. + +## Container Connection + +This is always the first tool called in analysis. It provides the file-level overview that drives all subsequent analysis steps. + +## Dependencies + +| Direction | What | From/To | +|-----------|------|---------| +| IN (uses) | `simple-git` | External: simple-git | +| OUT (provides) | `listChangedFiles()`, types (`ChangedFileEntry`, `FileStatus`, etc.) | c3-210, c3-311 | + +## Behavior + +- Runs two git commands: `--name-status` for file status (A/M/D/R/C) and `diffSummary` for line counts +- Merges results into `ChangedFileEntry[]` with path, status, additions, deletions +- Handles renamed/copied files by using the new path from the third column +- Binary files are handled via type guard (`'insertions' in f`) + +## Code References + +| File | Purpose | +|------|---------| +| `packages/tools-core/src/tools/list-files.ts` | `listChangedFiles()` + `parseNameStatus()` + `mapStatusCode()` (83 lines) | + +## Related Refs + +| Ref | How It Serves Goal | +|-----|-------------------| +| ref-git-operations | Uses simple-git diff and diffSummary | diff --git a/.c3/c3-1-tools-core/c3-113-search-code.md b/.c3/c3-1-tools-core/c3-113-search-code.md new file mode 100644 index 0000000..14c2223 --- /dev/null +++ b/.c3/c3-1-tools-core/c3-113-search-code.md @@ -0,0 +1,44 @@ +--- +id: c3-113 +c3-version: 4 +title: search-code +type: component +category: feature +parent: c3-1 +goal: Search for regex patterns via git grep +summary: Uses git.raw() for reliable glob filtering; handles exit code 1 as "no matches" +--- + +# search-code + +## Goal + +Search for regex patterns via git grep. Used by the AI agent for documentation staleness detection (Step 4) — finding docs that reference changed symbols. + +## Container Connection + +Enables doc staleness analysis by searching for references to modified symbol names across `*.md` files. + +## Dependencies + +| Direction | What | From/To | +|-----------|------|---------| +| IN (uses) | `simple-git` (raw mode) | External: simple-git | +| OUT (provides) | `searchCode()`, `SearchCodeParams`, `SearchCodeResult`, `SearchMatch` | c3-210, c3-311 | + +## Edge Cases + +- **Exit code 1**: git grep returns code 1 when no matches found. simple-git wraps this as an error. The handler checks for `"exited with code 1"` in the error message and returns `{ matches: [] }` instead of throwing. +- **Uses `git.raw()`** instead of `git.grep()` because simple-git's grep method doesn't reliably pass glob path specs. + +## Code References + +| File | Purpose | +|------|---------| +| `packages/tools-core/src/tools/search-code.ts` | `searchCode()` with git grep exit code handling (64 lines) | + +## Related Refs + +| Ref | How It Serves Goal | +|-----|-------------------| +| ref-git-operations | Uses git.raw() for direct git grep access | diff --git a/.c3/c3-1-tools-core/c3-114-find-importers.md b/.c3/c3-1-tools-core/c3-114-find-importers.md new file mode 100644 index 0000000..6797793 --- /dev/null +++ b/.c3/c3-1-tools-core/c3-114-find-importers.md @@ -0,0 +1,47 @@ +--- +id: c3-114 +c3-version: 4 +title: find-importers +type: component +category: feature +parent: c3-1 +goal: Build cached reverse dependency map and find importers +summary: Scans all source files with fast-glob, extracts imports via regex, caches reverse map per session +--- + +# find-importers + +## Goal + +Build cached reverse dependency map and find importers. Provides the impact graph data (Step 5) showing which files are affected by changes to a given module. + +## Container Connection + +Critical for impact breadth scoring. The AI agent calls this once per changed source file to map the blast radius. Session caching ensures the expensive full-repo scan only happens once. + +## Dependencies + +| Direction | What | From/To | +|-----------|------|---------| +| IN (uses) | `fast-glob`, `fs/promises` | External: fast-glob, Node.js | +| OUT (provides) | `findImporters()`, `clearImporterCache()`, types | c3-210, c3-311 | + +## Behavior + +- **First call**: Scans all `*.{ts,tsx,js,jsx}` files (excluding node_modules/dist/.git) via fast-glob +- **Import extraction**: 3 regex patterns — static import/export, dynamic import(), require() +- **Normalization**: Strips extensions (.ts/.tsx/.js/.jsx) and `/index` suffix for consistent matching; resolves relative paths (bare directory imports match index files) +- **Cache**: Module-level variables `cachedRepoPath` / `cachedReverseMap` — reused within same session, invalidated if repoPath changes +- **clearImporterCache()**: Exported for test cleanup + +## Code References + +| File | Purpose | +|------|---------| +| `packages/tools-core/src/tools/find-imports.ts` | `findImporters()`, `buildReverseMap()`, `extractImports()`, `normalizeModulePath()` (118 lines) | + +## Related Refs + +| Ref | How It Serves Goal | +|-----|-------------------| +| ref-git-operations | Complements git-based tools with filesystem-based import analysis | diff --git a/.c3/c3-1-tools-core/c3-115-list-tests.md b/.c3/c3-1-tools-core/c3-115-list-tests.md new file mode 100644 index 0000000..7847ba3 --- /dev/null +++ b/.c3/c3-1-tools-core/c3-115-list-tests.md @@ -0,0 +1,44 @@ +--- +id: c3-115 +c3-version: 4 +title: list-tests +type: component +category: feature +parent: c3-1 +goal: Find test files associated with source files +summary: Generates candidate paths across sibling/__tests__/test/tests dirs, verifies existence with fast-glob +--- + +# list-tests + +## Goal + +Find test files associated with source files. Enables the AI agent to check test coverage gaps (Step 3) by identifying which source files have corresponding tests and whether those tests were updated. + +## Container Connection + +The coverage ratio from this tool directly feeds the "untested changes" risk factor (weight 0.25). + +## Dependencies + +| Direction | What | From/To | +|-----------|------|---------| +| IN (uses) | `fast-glob` | External: fast-glob | +| OUT (provides) | `listTestFiles()`, `ListTestFilesParams`, `ListTestFilesResult` | c3-210, c3-311 | + +## Behavior + +- Generates candidate test paths using source file name: + - Sibling: `dir/foo.test.ts`, `dir/foo.spec.ts` + - `__tests__/` under source dir: `dir/__tests__/foo.ts`, `dir/__tests__/foo.test.ts` + - `__tests__/` at package root (sibling to `src/`) + - Top-level `test/` and `tests/` directories +- Covers all 4 extensions: `.ts`, `.tsx`, `.js`, `.jsx` +- Uses `fast-glob` to verify which candidates actually exist +- `getPackageRoot()` finds the parent of `src/` or `lib/` for package-root `__tests__/` + +## Code References + +| File | Purpose | +|------|---------| +| `packages/tools-core/src/tools/list-tests.ts` | `listTestFiles()`, `buildCandidatePaths()`, helpers (88 lines) | diff --git a/.c3/c3-2-tools/README.md b/.c3/c3-2-tools/README.md new file mode 100644 index 0000000..a7fc988 --- /dev/null +++ b/.c3/c3-2-tools/README.md @@ -0,0 +1,35 @@ +--- +id: c3-2 +c3-version: 4 +title: tools +type: container +boundary: service +parent: c3-0 +goal: Expose tools-core as MCP protocol tools for AI clients +summary: Thin MCP server wrapping tools-core with zod schemas via stdio transport +--- + +# tools + +## Goal + +Expose tools-core as MCP protocol tools for AI clients. Provides the MCP stdio server that the Claude Code skill (c3-4) registers and that any MCP-compatible client can connect to. + +## Responsibilities + +- Run MCP server on stdio transport (JSON-RPC) +- Convert canonical tool definitions to zod input schemas +- Wrap each tools-core handler in MCP try/catch error formatting +- Handle graceful shutdown on SIGINT/SIGTERM + +## Complexity Assessment + +**Level:** simple +**Why:** Thin wrapper layer with no business logic. Each tool registration follows the same pattern: defToZod + try/catch + JSON.stringify. The only non-trivial aspect is lifecycle management (signal handling). + +## Components + +| ID | Name | Category | Status | Goal Contribution | +|----|------|----------|--------|-------------------| +| c3-201 | mcp-server | foundation | implemented | Stdio transport and process lifecycle | +| c3-210 | tool-registration | feature | implemented | Converts tool-defs to zod schemas and registers handlers | diff --git a/.c3/c3-2-tools/c3-201-mcp-server.md b/.c3/c3-2-tools/c3-201-mcp-server.md new file mode 100644 index 0000000..f2a9b8a --- /dev/null +++ b/.c3/c3-2-tools/c3-201-mcp-server.md @@ -0,0 +1,34 @@ +--- +id: c3-201 +c3-version: 4 +title: MCP Server +type: component +category: foundation +parent: c3-2 +goal: MCP stdio transport with lifecycle management +summary: Creates McpServer, connects StdioServerTransport, handles SIGINT/SIGTERM graceful shutdown +--- + +# MCP Server + +## Goal + +MCP stdio transport with lifecycle management. Provides the runtime process that hosts all registered tools and communicates via JSON-RPC over stdin/stdout. + +## Container Connection + +Without this foundation, no MCP client can connect to the tools. It owns the process lifecycle that the skill (c3-4) and external AI clients depend on. + +## Dependencies + +| Direction | What | From/To | +|-----------|------|---------| +| IN (uses) | `@modelcontextprotocol/sdk` (McpServer, StdioServerTransport) | External | +| IN (uses) | `registerAllTools()` | c3-210 (tool-registration) | +| OUT (provides) | Running MCP server process | c3-4 (skill via npx), external AI clients | + +## Code References + +| File | Purpose | +|------|---------| +| `packages/tools/src/index.ts` | Server creation, transport connection, signal handling (25 lines) | diff --git a/.c3/c3-2-tools/c3-210-tool-registration.md b/.c3/c3-2-tools/c3-210-tool-registration.md new file mode 100644 index 0000000..5c681e8 --- /dev/null +++ b/.c3/c3-2-tools/c3-210-tool-registration.md @@ -0,0 +1,48 @@ +--- +id: c3-210 +c3-version: 4 +title: Tool Registration +type: component +category: feature +parent: c3-2 +goal: Convert tool-defs to zod schemas and register on MCP server +summary: defToZod() converts canonical definitions to zod; registerAllTools() wires each handler with try/catch +--- + +# Tool Registration + +## Goal + +Convert tool-defs to zod schemas and register on MCP server. Bridges the canonical tool definitions from c3-101 into the MCP SDK's expected format with schema validation. + +## Container Connection + +This is the sole feature component — it translates the shared tool contract into MCP-specific registrations, making tools-core handlers accessible via the MCP protocol. + +## Dependencies + +| Direction | What | From/To | +|-----------|------|---------| +| IN (uses) | `TOOL_DEFS` | c3-101 (tool-definitions) | +| IN (uses) | All 6 handler functions | c3-110 through c3-115 | +| IN (uses) | `zod`, `McpServer` | External | +| OUT (provides) | `registerAllTools()` | c3-201 (mcp-server) | + +## Behavior + +- `defToZod()`: Converts ToolDef to `Record`, adding MCP-specific `repoPath` optional param +- Each tool is registered with `server.tool(name, description, schema, handler)` +- Handlers wrap tools-core calls in try/catch: success returns `{ content: [{type: 'text', text}] }`, error returns `{ isError: true }` +- Types (`ToolDef`, param interfaces) imported from `@pr-impact/tools-core` via `import type` + +## Code References + +| File | Purpose | +|------|---------| +| `packages/tools/src/register.ts` | `registerAllTools()`, `defToZod()` (145 lines) | + +## Related Refs + +| Ref | How It Serves Goal | +|-----|-------------------| +| ref-esm-conventions | Uses workspace:* dependency with .js import extensions | diff --git a/.c3/c3-3-action/README.md b/.c3/c3-3-action/README.md new file mode 100644 index 0000000..21e28bf --- /dev/null +++ b/.c3/c3-3-action/README.md @@ -0,0 +1,52 @@ +--- +id: c3-3 +c3-version: 4 +title: action +type: container +boundary: worker +parent: c3-0 +goal: Run agentic Claude analysis loop in CI and post PR comments +summary: GitHub Action that calls Anthropic API, dispatches tool calls to tools-core, posts structured risk reports +--- + +# action + +## Goal + +Run agentic Claude analysis loop in CI and post PR comments. Orchestrates the full analysis pipeline: read inputs, call Claude with tools, gather evidence, produce report, post comment, gate on threshold. + +## Responsibilities + +- Embed prompt/report templates at build time (no runtime filesystem reads) +- Drive the Anthropic API agentic loop within safety limits +- Dispatch tool_use calls to tools-core functions with repoPath injection +- Parse risk score from generated report +- Post/update PR comment via GitHub API with idempotent markers +- Fail CI if risk score exceeds threshold + +## Complexity Assessment + +**Level:** moderate +**Why:** Agentic loop with multiple stop conditions (iteration limit, wall-clock timeout, end_turn); parallel tool execution via Promise.all; risk score regex parsing with graceful fallback; comment upsert with pagination; CJS bundling with all dependencies inlined. + +## Components + +| ID | Name | Category | Status | Goal Contribution | +|----|------|----------|--------|-------------------| +| c3-301 | template-embedding | foundation | implemented | Build-time template access without filesystem reads | +| c3-310 | agentic-client | feature | implemented | Claude API loop producing the analysis report | +| c3-311 | tool-dispatcher | feature | implemented | Routes tool_use to tools-core with repoPath injection | +| c3-312 | comment-poster | feature | implemented | Idempotent PR comment upsert via GitHub API | +| c3-313 | action-entrypoint | feature | implemented | Orchestrates inputs → analysis → outputs → gating | + +## Internal Flow + +```mermaid +flowchart LR + ENTRY[c3-313 entrypoint] -->|inputs| CLIENT[c3-310 agentic-client] + CLIENT -->|tool_use| DISPATCH[c3-311 tool-dispatcher] + DISPATCH -->|calls| CORE["c3-1 tools-core"] + CLIENT -->|report| ENTRY + ENTRY -->|post| COMMENT[c3-312 comment-poster] + CLIENT -.->|uses| TMPL[c3-301 templates] +``` diff --git a/.c3/c3-3-action/c3-301-template-embedding.md b/.c3/c3-3-action/c3-301-template-embedding.md new file mode 100644 index 0000000..147ac6e --- /dev/null +++ b/.c3/c3-3-action/c3-301-template-embedding.md @@ -0,0 +1,40 @@ +--- +id: c3-301 +c3-version: 4 +title: Template Embedding +type: component +category: foundation +parent: c3-3 +goal: Build-time template generation for runtime access +summary: scripts/embed-templates.ts reads templates/*.md and generates src/generated/templates.ts as string constants +--- + +# Template Embedding + +## Goal + +Build-time template generation for runtime access. The action runs as a single bundled CJS file with no access to the source repo's templates/ directory, so templates must be embedded as string constants. + +## Container Connection + +Without this foundation, the agentic client (c3-310) would have no system prompt or report template at runtime. This enables the "templates as single source of truth" abstract constraint. + +## Dependencies + +| Direction | What | From/To | +|-----------|------|---------| +| IN (uses) | `templates/system-prompt.md`, `templates/report-template.md` | Shared templates | +| OUT (provides) | `SYSTEM_PROMPT`, `REPORT_TEMPLATE` constants | c3-310 (agentic-client) | + +## Code References + +| File | Purpose | +|------|---------| +| `scripts/embed-templates.ts` | Build script that generates templates.ts (26 lines) | +| `packages/action/src/generated/templates.ts` | Generated output (auto-generated, do not edit) | + +## Related Refs + +| Ref | How It Serves Goal | +|-----|-------------------| +| ref-build-pipeline | Defines the template embedding convention | diff --git a/.c3/c3-3-action/c3-310-agentic-client.md b/.c3/c3-3-action/c3-310-agentic-client.md new file mode 100644 index 0000000..ce422c5 --- /dev/null +++ b/.c3/c3-3-action/c3-310-agentic-client.md @@ -0,0 +1,46 @@ +--- +id: c3-310 +c3-version: 4 +title: Agentic Client +type: component +category: feature +parent: c3-3 +goal: Anthropic API agentic loop with safety limits +summary: 30-iteration max, 180s timeout, temperature 0, parallel tool execution via Promise.all +--- + +# Agentic Client + +## Goal + +Anthropic API agentic loop with safety limits. Drives the Claude conversation that performs the 6-step analysis methodology, executing tool calls as Claude requests them. + +## Container Connection + +Core analysis engine. Without this, there is no AI-driven analysis — it orchestrates the Claude API conversation that produces the final report. + +## Dependencies + +| Direction | What | From/To | +|-----------|------|---------| +| IN (uses) | `SYSTEM_PROMPT`, `REPORT_TEMPLATE` | c3-301 (template-embedding) | +| IN (uses) | `TOOL_DEFS` | c3-101 (tool-definitions) | +| IN (uses) | `executeTool()` | c3-311 (tool-dispatcher) | +| IN (uses) | `@anthropic-ai/sdk` | External: Anthropic API | +| OUT (provides) | `runAnalysis()` returning final report string | c3-313 (action-entrypoint) | + +## Behavior + +- Builds Anthropic tool definitions from TOOL_DEFS (maps to `input_schema` format) +- Runs iterative message loop: send messages → get response → execute tools → append results → repeat +- **Stop conditions**: `end_turn` stop reason, no tool_use blocks, iteration limit (30), wall-clock timeout (180s) +- **Parallel execution**: All tool_use blocks in a single response executed concurrently via `Promise.all` +- **repoPath injection**: Clones tool input via spread to add repoPath without mutating conversation history +- **Graceful degradation**: On timeout/iteration limit, returns whatever text output is available; throws if no text was ever produced +- **Empty-output guard**: Throws `'Analysis completed without producing a report'` if Claude finishes without generating any text, preventing empty PR comments + +## Code References + +| File | Purpose | +|------|---------| +| `packages/action/src/client.ts` | `runAnalysis()`, `AnalysisOptions`, `TOOL_DEFINITIONS` (119 lines) | diff --git a/.c3/c3-3-action/c3-311-tool-dispatcher.md b/.c3/c3-3-action/c3-311-tool-dispatcher.md new file mode 100644 index 0000000..20f8a5b --- /dev/null +++ b/.c3/c3-3-action/c3-311-tool-dispatcher.md @@ -0,0 +1,39 @@ +--- +id: c3-311 +c3-version: 4 +title: Tool Dispatcher +type: component +category: feature +parent: c3-3 +goal: Route tool_use calls to tools-core functions +summary: Switch-based dispatch with repoPath injection and JSON stringification +--- + +# Tool Dispatcher + +## Goal + +Route tool_use calls to tools-core functions. Translates the Anthropic API's tool_use blocks into direct calls to the tools-core handler functions. + +## Container Connection + +Bridge between the agentic client's API layer and the shared tool implementations. Without this, tool_use blocks would have no execution target. + +## Dependencies + +| Direction | What | From/To | +|-----------|------|---------| +| IN (uses) | All 6 handler functions | c3-1 tools-core (c3-110 through c3-115) | +| OUT (provides) | `executeTool(name, input)` | c3-310 (agentic-client) | + +## Behavior + +- Switch on tool name, cast input to handler parameter types +- Returns string: raw text for git_diff/read_file, JSON.stringify for structured results +- Throws for unknown tool names + +## Code References + +| File | Purpose | +|------|---------| +| `packages/action/src/tools.ts` | `executeTool()` switch dispatcher (39 lines) | diff --git a/.c3/c3-3-action/c3-312-comment-poster.md b/.c3/c3-3-action/c3-312-comment-poster.md new file mode 100644 index 0000000..09a96e1 --- /dev/null +++ b/.c3/c3-3-action/c3-312-comment-poster.md @@ -0,0 +1,41 @@ +--- +id: c3-312 +c3-version: 4 +title: Comment Poster +type: component +category: feature +parent: c3-3 +goal: Upsert PR comments via GitHub API with HTML markers +summary: Searches for existing pr-impact comment by HTML markers, PATCHes or POSTs accordingly +--- + +# Comment Poster + +## Goal + +Upsert PR comments via GitHub API with HTML markers. Ensures each PR has exactly one pr-impact report comment that gets updated on re-runs rather than creating duplicates. + +## Container Connection + +Delivers the analysis report to the PR where developers review it. Without this, reports would only be available as action outputs. + +## Dependencies + +| Direction | What | From/To | +|-----------|------|---------| +| IN (uses) | `fetch` (global) | Node.js built-in | +| OUT (provides) | `postOrUpdateComment()` returning comment URL | c3-313 (action-entrypoint) | + +## Behavior + +- Wraps report body in `` / `` markers +- `findExistingComment()`: Paginates through PR comments (100 per page) searching for the start marker; logs warning on API failure instead of silently returning null +- If existing comment found: PATCH to update +- If no existing comment: POST to create +- Uses GitHub REST API v3 with `X-GitHub-Api-Version: 2022-11-28` + +## Code References + +| File | Purpose | +|------|---------| +| `packages/action/src/comment.ts` | `postOrUpdateComment()`, `findExistingComment()` (66 lines) | diff --git a/.c3/c3-3-action/c3-313-action-entrypoint.md b/.c3/c3-3-action/c3-313-action-entrypoint.md new file mode 100644 index 0000000..f9ea3c1 --- /dev/null +++ b/.c3/c3-3-action/c3-313-action-entrypoint.md @@ -0,0 +1,45 @@ +--- +id: c3-313 +c3-version: 4 +title: Action Entrypoint +type: component +category: feature +parent: c3-3 +goal: Orchestrate inputs, analysis, outputs, and threshold gating +summary: Reads GitHub Action inputs, runs analysis, parses risk score, posts comment, applies threshold gate +--- + +# Action Entrypoint + +## Goal + +Orchestrate inputs, analysis, outputs, and threshold gating. This is the GitHub Action's main() that ties everything together. + +## Container Connection + +The entry point that makes the action a complete GitHub Action. Without it, the other components are libraries without a consumer. + +## Dependencies + +| Direction | What | From/To | +|-----------|------|---------| +| IN (uses) | `runAnalysis()` | c3-310 (agentic-client) | +| IN (uses) | `postOrUpdateComment()` | c3-312 (comment-poster) | +| IN (uses) | `@actions/core`, `@actions/github` | External | +| OUT (provides) | Action outputs: `risk-score`, `risk-level`, `report` | GitHub Actions runtime | + +## Behavior + +1. Read inputs: `anthropic-api-key`, `base-branch`, `model`, `threshold`, `github-token` +2. Call `runAnalysis()` with inputs +3. Parse risk score via regex: `**Risk Score**: {N}/100 ({level})` +4. Set action outputs +5. If in PR context with token: post comment +6. If threshold set and score >= threshold: `core.setFailed()` +7. Score parse failure: set score to -1, skip threshold check (no false-fail) + +## Code References + +| File | Purpose | +|------|---------| +| `packages/action/src/index.ts` | `main()` orchestration (63 lines) | diff --git a/.c3/c3-4-skill/README.md b/.c3/c3-4-skill/README.md new file mode 100644 index 0000000..df43bce --- /dev/null +++ b/.c3/c3-4-skill/README.md @@ -0,0 +1,35 @@ +--- +id: c3-4 +c3-version: 4 +title: skill +type: container +boundary: app +parent: c3-0 +goal: Provide the /pr-impact slash command for interactive Claude Code analysis +summary: Claude Code plugin assembled from shared templates; no runtime dependencies +--- + +# skill + +## Goal + +Provide the `/pr-impact` slash command for interactive Claude Code analysis. Users invoke the skill directly in Claude Code for branch-level PR impact analysis with conversational follow-up. + +## Responsibilities + +- Define Claude Code plugin metadata (name, description, skills) +- Register the MCP tools server for tool access during analysis +- Assemble the skill prompt from shared templates at build time +- Accept optional base/head branch arguments + +## Complexity Assessment + +**Level:** trivial +**Why:** No runtime code — entirely static files assembled at build time. The skill prompt is a concatenation of system-prompt.md and report-template.md with a YAML frontmatter header. + +## Components + +| ID | Name | Category | Status | Goal Contribution | +|----|------|----------|--------|-------------------| +| c3-401 | plugin-config | foundation | implemented | Plugin metadata and MCP server registration | +| c3-410 | skill-prompt | feature | implemented | Assembled analysis prompt defining the /pr-impact experience | diff --git a/.c3/c3-4-skill/c3-401-plugin-config.md b/.c3/c3-4-skill/c3-401-plugin-config.md new file mode 100644 index 0000000..b926de3 --- /dev/null +++ b/.c3/c3-4-skill/c3-401-plugin-config.md @@ -0,0 +1,34 @@ +--- +id: c3-401 +c3-version: 4 +title: Plugin Config +type: component +category: foundation +parent: c3-4 +goal: Claude Code plugin metadata and MCP server registration +summary: .claude-plugin/config.json defines plugin identity; mcp.json registers the tools MCP server +--- + +# Plugin Config + +## Goal + +Claude Code plugin metadata and MCP server registration. Tells Claude Code what this plugin provides and which MCP server to start for tool access. + +## Container Connection + +Without this config, Claude Code wouldn't recognize the package as a plugin or know to start the MCP tools server. + +## Dependencies + +| Direction | What | From/To | +|-----------|------|---------| +| OUT (provides) | Plugin identity, MCP server registration | Claude Code runtime | +| OUT (provides) | MCP server reference to `@pr-impact/tools` | c3-2 (tools, via npx) | + +## Code References + +| File | Purpose | +|------|---------| +| `packages/skill/.claude-plugin/config.json` | Plugin name, version, description, skills array | +| `packages/skill/mcp.json` | MCP server command: `npx -y @pr-impact/tools` | diff --git a/.c3/c3-4-skill/c3-410-skill-prompt.md b/.c3/c3-4-skill/c3-410-skill-prompt.md new file mode 100644 index 0000000..604783c --- /dev/null +++ b/.c3/c3-4-skill/c3-410-skill-prompt.md @@ -0,0 +1,40 @@ +--- +id: c3-410 +c3-version: 4 +title: Skill Prompt +type: component +category: feature +parent: c3-4 +goal: Assembled analysis prompt from shared templates +summary: skill.md generated by scripts/build-skill.ts — concatenates system prompt + report template with YAML frontmatter +--- + +# Skill Prompt + +## Goal + +Assembled analysis prompt from shared templates. Defines the `/pr-impact` slash command experience — the same analysis methodology as the GitHub Action but delivered interactively. + +## Container Connection + +This is the entire user-facing product of the skill package. Without it, the `/pr-impact` command would have no instructions for Claude. + +## Dependencies + +| Direction | What | From/To | +|-----------|------|---------| +| IN (uses) | `templates/system-prompt.md`, `templates/report-template.md` | Shared templates | +| OUT (provides) | `/pr-impact` slash command prompt | Claude Code runtime | + +## Code References + +| File | Purpose | +|------|---------| +| `scripts/build-skill.ts` | Build script that assembles skill.md (38 lines) | +| `packages/skill/skill.md` | Generated output (auto-generated, do not edit) | + +## Related Refs + +| Ref | How It Serves Goal | +|-----|-------------------| +| ref-build-pipeline | Defines the template assembly convention | diff --git a/.c3/refs/ref-build-pipeline.md b/.c3/refs/ref-build-pipeline.md new file mode 100644 index 0000000..670db84 --- /dev/null +++ b/.c3/refs/ref-build-pipeline.md @@ -0,0 +1,56 @@ +--- +id: ref-build-pipeline +c3-version: 4 +title: Build Pipeline & Template Embedding +goal: Ensure templates are single source of truth consumed at build time by action and skill +scope: [c3-3, c3-4] +--- + +# Build Pipeline & Template Embedding + +## Goal + +Ensure templates are single source of truth consumed at build time by action and skill. Prevents prompt/report drift between the two consumers. + +## Choice + +Shared markdown templates in `templates/` are consumed by two build scripts that generate package-specific outputs: + +| Consumer | Script | Output | Format | +|----------|--------|--------|--------| +| action (c3-3) | `scripts/embed-templates.ts` | `src/generated/templates.ts` | TypeScript string constants | +| skill (c3-4) | `scripts/build-skill.ts` | `skill.md` | Markdown with YAML frontmatter | + +Both generated files are committed to their respective packages. + +## Why + +- **Action** runs as a single bundled CJS file in GitHub Actions — no filesystem access to the source repo's `templates/` directory at runtime +- **Skill** needs a self-contained `skill.md` for npm publishing — can't reference files outside the package +- **Deduplication**: A single edit to `templates/system-prompt.md` updates both consumers via `pnpm build` +- **Alternatives rejected**: Runtime file reads (fragile, CJS incompatible), copy-paste (drift), git submodules (overhead) + +## How + +| Guideline | Example | +|-----------|---------| +| Never edit generated files directly | `src/generated/templates.ts` and `skill.md` are auto-generated | +| Run `pnpm build` after template changes | Turborepo dependency graph ensures correct order | +| Prebuild hook in action | `package.json` `prebuild` script runs embed-templates before tsup | +| Build script in skill | `package.json` `build` script runs build-skill.ts | + +## Scope + +**Applies to:** +- `packages/action` — template embedding via prebuild +- `packages/skill` — skill prompt assembly via build +- `templates/` — source of truth for analysis methodology + +**Does NOT apply to:** +- `packages/tools-core` — no template dependency +- `packages/tools` — no template dependency + +## Cited By + +- c3-301 (template-embedding) +- c3-410 (skill-prompt) diff --git a/.c3/refs/ref-esm-conventions.md b/.c3/refs/ref-esm-conventions.md new file mode 100644 index 0000000..0982ed2 --- /dev/null +++ b/.c3/refs/ref-esm-conventions.md @@ -0,0 +1,55 @@ +--- +id: ref-esm-conventions +c3-version: 4 +title: ESM Module Conventions +goal: Enforce ESM-only with .js extensions and CJS exception for action +scope: [c3-1, c3-2, c3-3, c3-4] +--- + +# ESM Module Conventions + +## Goal + +Enforce ESM-only with .js extensions and CJS exception for action. Ensures consistent module resolution across the monorepo. + +## Choice + +All packages use `"type": "module"` in package.json. Import paths use `.js` extensions even for `.ts` source files (TypeScript's `moduleResolution: "bundler"` resolves these). The single exception is the action package which outputs CJS. + +## Why + +- **ESM is the standard**: Modern Node.js, TypeScript, and tooling assume ESM +- **.js extensions**: Required for correct ESM resolution — TypeScript doesn't rewrite import extensions +- **CJS exception**: GitHub Actions runner expects a CommonJS entry point at `dist/index.cjs`; tsup handles the ESM→CJS conversion at build time with `noExternal: [/.*/]` to bundle all dependencies + +## How + +| Guideline | Example | +|-----------|---------| +| Always use .js extensions in imports | `import { gitDiff } from './tools/git-diff.js'` | +| Set `"type": "module"` in all package.json | Already configured | +| Action builds to CJS via tsup | `format: ['cjs']` in tsup.config | +| Barrel exports from index.ts | `export { gitDiff } from './tools/git-diff.js'` | + +## Not This + +| Alternative | Rejected Because | +|-------------|------------------| +| Extension-less imports | Breaks ESM resolution without custom loaders | +| Dual CJS+ESM builds | Unnecessary complexity; only action needs CJS | +| `moduleResolution: "node"` | Doesn't support .js→.ts resolution | + +## Scope + +**Applies to:** +- All 4 packages (source code conventions) +- `tsconfig.base.json` settings + +**Does NOT apply to:** +- Build output format decisions (that's per-package) +- Test files (vitest handles resolution) + +## Cited By + +- c3-101 (tool-definitions) +- c3-210 (tool-registration) diff --git a/.c3/refs/ref-git-operations.md b/.c3/refs/ref-git-operations.md new file mode 100644 index 0000000..a306f01 --- /dev/null +++ b/.c3/refs/ref-git-operations.md @@ -0,0 +1,59 @@ +--- +id: ref-git-operations +c3-version: 4 +title: Git Operation Patterns +goal: Standardize all git access through simple-git for testability and safety +scope: [c3-1] +--- + +# Git Operation Patterns + +## Goal + +Standardize all git access through simple-git for testability and safety. All 6 tool handlers in tools-core use simple-git rather than raw `child_process.exec`. + +## Choice + +Use the `simple-git` library for all git operations. Create a new `simpleGit(repoPath)` instance per call. Use `git.raw()` when the high-level API is insufficient. + +## Why + +- **Testability**: simple-git can be mocked cleanly in vitest (mock the module, return fake responses) +- **Safety**: No shell injection risk from user-provided parameters +- **Ergonomics**: Typed API with promise support, error handling for common git exit codes +- **Consistency**: All 6 tools follow the same pattern — `const git = simpleGit(params.repoPath ?? process.cwd())` + +## How + +| Guideline | Example | +|-----------|---------| +| Initialize with repoPath or cwd | `const git = simpleGit(params.repoPath ?? process.cwd())` | +| Use three-dot range for branch diffs | `git.diff([`${base}...${head}`])` | +| Use `git.show()` for file-at-ref | `git.show([`${ref}:${filePath}`])` | +| Use `git.raw()` when high-level API is limited | `git.raw(['grep', '-n', '--', pattern])` for search-code | +| Handle git grep exit code 1 | Check error message for "exited with code 1" — means no matches | + +## Not This + +| Alternative | Rejected Because | +|-------------|------------------| +| `child_process.exec('git ...')` | Shell injection risk, no typed API, harder to mock | +| `isomorphic-git` | Heavier, less mature, doesn't support all git commands | +| `nodegit` | Native bindings, installation issues, project maintenance concerns | + +## Scope + +**Applies to:** +- All tool handlers in `packages/tools-core/src/tools/` + +**Does NOT apply to:** +- Build scripts (no git operations) +- Action/skill packages (they don't call git directly — they go through tools-core) + +## Cited By + +- c3-110 (git-diff) +- c3-111 (read-file) +- c3-112 (list-files) +- c3-113 (search-code) +- c3-114 (find-importers) From 974205a78c7e5a12b29226e6da57f4ab478a0824 Mon Sep 17 00:00:00 2001 From: ducdmdev Date: Thu, 12 Feb 2026 10:45:52 +0700 Subject: [PATCH 5/5] fix: audit fixes for docs, tests, and code quality Doc fixes: - Add tool-defs.ts to source layouts in CLAUDE.md and README.md - Fix comment marker text in action/CLAUDE.md - Add c3-generated blocks to all package CLAUDE.md files - Update test count to 100 Code fixes: - find-imports: strip /index suffix in normalizeModulePath so bare directory imports match index files - client: throw on empty output instead of returning empty string - comment: log warning on API failure instead of silent null return Test additions: - find-imports: dynamic import(), require(), directory-to-index resolution, unreadable file handling - list-files: copied (C) status, binary files --- CLAUDE.md | 3 +- README.md | 1 + packages/action/CLAUDE.md | 18 +++- packages/action/src/client.ts | 3 + packages/action/src/comment.ts | 5 +- packages/skill/CLAUDE.md | 13 +++ packages/tools-core/CLAUDE.md | 19 ++++ .../tools-core/__tests__/find-imports.test.ts | 89 +++++++++++++++++++ .../tools-core/__tests__/list-files.test.ts | 50 +++++++++++ packages/tools-core/src/tools/find-imports.ts | 4 + packages/tools/CLAUDE.md | 13 +++ 11 files changed, 215 insertions(+), 3 deletions(-) diff --git a/CLAUDE.md b/CLAUDE.md index 5740dae..01fe16f 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -44,6 +44,7 @@ Pure handler functions for git/repo operations. Both `tools` (MCP) and `action` ``` src/ index.ts -- Barrel exports for all handlers and types + tool-defs.ts -- Canonical tool definitions (TOOL_DEFS, ToolDef, ToolParamDef) tools/ git-diff.ts -- Get raw git diff between two refs read-file.ts -- Read file content at a specific git ref @@ -150,7 +151,7 @@ scripts/ - Vitest projects are configured in `vitest.config.ts` (root) with `packages/tools-core`, `packages/tools`, and `packages/action`. - Write **unit tests only** -- do not write integration tests that require a real git repository. - **Mock git operations** (simple-git calls) and external dependencies where needed; tests should not depend on filesystem or git state. -- Test files per package (14 files, 94 tests): +- Test files per package (14 files, 100 tests): - `packages/tools-core/__tests__/`: git-diff, read-file, list-files, search-code, find-imports, list-tests, regression (7 files) - `packages/tools/__tests__/`: index, register, build-scripts (3 files) - `packages/action/__tests__/`: tools, client, comment, index (4 files) diff --git a/README.md b/README.md index 782b57c..b653b13 100644 --- a/README.md +++ b/README.md @@ -167,6 +167,7 @@ pr-impact/ │ ├── tools-core/ @pr-impact/tools-core │ │ └── src/ │ │ ├── index.ts Barrel exports +│ │ ├── tool-defs.ts Canonical tool definitions (TOOL_DEFS) │ │ └── tools/ 6 pure handler functions (git-diff, read-file, │ │ list-files, search-code, find-imports, list-tests) │ │ diff --git a/packages/action/CLAUDE.md b/packages/action/CLAUDE.md index 65da689..a57488b 100644 --- a/packages/action/CLAUDE.md +++ b/packages/action/CLAUDE.md @@ -30,8 +30,24 @@ src/ - **Tool input cloning**: `client.ts` clones `toolUse.input` via spread to avoid mutating the conversation history. - **Parallel tool execution**: multiple tool_use blocks in a single response are executed concurrently via `Promise.all`. - **Risk score parsing**: regex extracts score from report. If parsing fails, sets score to `-1` and level to `unknown` instead of failing. -- **Comment upsert**: uses `` HTML markers to find and update existing comments. +- **Comment upsert**: uses `` / `` HTML markers to find and update existing comments. ## Testing Tests in `__tests__/` mock `@actions/core`, `@actions/github`, the Anthropic SDK, and tools-core functions. The entry point test uses `vi.resetModules()` + `vi.doMock()` to re-trigger the top-level `main()` call on each import. + + +## Architecture docs + +Before modifying this code, read: +- Container: `.c3/c3-3-action/README.md` +- Components: + - `.c3/c3-3-action/c3-301-template-embedding.md` + - `.c3/c3-3-action/c3-310-agentic-client.md` + - `.c3/c3-3-action/c3-311-tool-dispatcher.md` + - `.c3/c3-3-action/c3-312-comment-poster.md` + - `.c3/c3-3-action/c3-313-action-entrypoint.md` +- Patterns: `ref-build-pipeline` + +Full refs: `.c3/refs/ref-{name}.md` + diff --git a/packages/action/src/client.ts b/packages/action/src/client.ts index 5fdeabe..ca92ff8 100644 --- a/packages/action/src/client.ts +++ b/packages/action/src/client.ts @@ -77,6 +77,9 @@ export async function runAnalysis(options: AnalysisOptions): Promise { ); if (toolUseBlocks.length === 0 || response.stop_reason === 'end_turn') { + if (!lastTextOutput) { + throw new Error('Analysis completed without producing a report'); + } return lastTextOutput; } diff --git a/packages/action/src/comment.ts b/packages/action/src/comment.ts index 4daef31..9e0c971 100644 --- a/packages/action/src/comment.ts +++ b/packages/action/src/comment.ts @@ -50,7 +50,10 @@ async function findExistingComment( let page = 1; while (true) { const res = await fetch(`${baseUrl}?per_page=100&page=${page}`, { headers }); - if (!res.ok) return null; + if (!res.ok) { + console.warn(`Failed to list PR comments (page ${page}): HTTP ${res.status}`); + return null; + } const comments = (await res.json()) as Array<{ id: number; body?: string }>; if (comments.length === 0) break; for (const c of comments) { diff --git a/packages/skill/CLAUDE.md b/packages/skill/CLAUDE.md index 32b67c2..02c98b2 100644 --- a/packages/skill/CLAUDE.md +++ b/packages/skill/CLAUDE.md @@ -26,3 +26,16 @@ package.json -- Build script only - **MCP integration**: `mcp.json` tells Claude Code to start the `@pr-impact/tools` MCP server, making all 6 tools available during analysis. - **No runtime deps**: the published package contains only static files (`.claude-plugin/`, `skill.md`, `mcp.json`). - **Published files**: controlled by the `files` array in `package.json` -- only `.claude-plugin`, `skill.md`, and `mcp.json` are included. + + +## Architecture docs + +Before modifying this code, read: +- Container: `.c3/c3-4-skill/README.md` +- Components: + - `.c3/c3-4-skill/c3-401-plugin-config.md` + - `.c3/c3-4-skill/c3-410-skill-prompt.md` +- Patterns: `ref-build-pipeline` + +Full refs: `.c3/refs/ref-{name}.md` + diff --git a/packages/tools-core/CLAUDE.md b/packages/tools-core/CLAUDE.md index 2455f4d..31294d7 100644 --- a/packages/tools-core/CLAUDE.md +++ b/packages/tools-core/CLAUDE.md @@ -16,6 +16,7 @@ npx vitest run packages/tools-core # Run tests ``` src/ index.ts -- Barrel exports for all handlers and types + tool-defs.ts -- Canonical tool definitions (TOOL_DEFS, ToolDef, ToolParamDef) tools/ git-diff.ts -- gitDiff(): raw diff between two refs read-file.ts -- readFileAtRef(): file content at a git ref @@ -36,3 +37,21 @@ src/ ## Testing Tests in `__tests__/` mock `simple-git` and `fast-glob`. No real git repos needed. + + +## Architecture docs + +Before modifying this code, read: +- Container: `.c3/c3-1-tools-core/README.md` +- Components: + - `.c3/c3-1-tools-core/c3-101-tool-definitions.md` (tool-defs.ts) + - `.c3/c3-1-tools-core/c3-110-git-diff.md` + - `.c3/c3-1-tools-core/c3-111-read-file.md` + - `.c3/c3-1-tools-core/c3-112-list-files.md` + - `.c3/c3-1-tools-core/c3-113-search-code.md` + - `.c3/c3-1-tools-core/c3-114-find-importers.md` + - `.c3/c3-1-tools-core/c3-115-list-tests.md` +- Patterns: `ref-git-operations`, `ref-esm-conventions` + +Full refs: `.c3/refs/ref-{name}.md` + diff --git a/packages/tools-core/__tests__/find-imports.test.ts b/packages/tools-core/__tests__/find-imports.test.ts index 7256a44..4ba2a3d 100644 --- a/packages/tools-core/__tests__/find-imports.test.ts +++ b/packages/tools-core/__tests__/find-imports.test.ts @@ -93,4 +93,93 @@ describe('findImporters', () => { await findImporters({ repoPath: '/repo', modulePath: 'src/foo.ts' }); expect(fg).toHaveBeenCalledTimes(2); }); + + it('finds files using dynamic import()', async () => { + vi.mocked(fg).mockResolvedValue([ + '/repo/src/loader.ts', + '/repo/src/foo.ts', + ]); + + vi.mocked(readFile).mockImplementation(async (path) => { + if (String(path).endsWith('loader.ts')) { + return 'const mod = await import("./foo.js");' as never; + } + return 'export const x = 1;' as never; + }); + + const result = await findImporters({ + repoPath: '/repo', + modulePath: 'src/foo.ts', + }); + + expect(result.importers).toContain('src/loader.ts'); + }); + + it('finds files using require()', async () => { + vi.mocked(fg).mockResolvedValue([ + '/repo/src/legacy.ts', + '/repo/src/foo.ts', + ]); + + vi.mocked(readFile).mockImplementation(async (path) => { + if (String(path).endsWith('legacy.ts')) { + return 'const mod = require("./foo.js");' as never; + } + return 'export const x = 1;' as never; + }); + + const result = await findImporters({ + repoPath: '/repo', + modulePath: 'src/foo.ts', + }); + + expect(result.importers).toContain('src/legacy.ts'); + }); + + it('resolves bare directory imports to index files', async () => { + vi.mocked(fg).mockResolvedValue([ + '/repo/src/app.ts', + '/repo/src/utils/index.ts', + ]); + + vi.mocked(readFile).mockImplementation(async (path) => { + if (String(path).endsWith('app.ts')) { + return 'import { helper } from "./utils";' as never; + } + return 'export function helper() {}' as never; + }); + + const result = await findImporters({ + repoPath: '/repo', + modulePath: 'src/utils/index.ts', + }); + + expect(result.importers).toContain('src/app.ts'); + }); + + it('skips files that cannot be read', async () => { + vi.mocked(fg).mockResolvedValue([ + '/repo/src/bad.ts', + '/repo/src/good.ts', + '/repo/src/foo.ts', + ]); + + vi.mocked(readFile).mockImplementation(async (path) => { + if (String(path).endsWith('bad.ts')) { + throw new Error('EACCES: permission denied'); + } + if (String(path).endsWith('good.ts')) { + return 'import { x } from "./foo.js";' as never; + } + return 'export const x = 1;' as never; + }); + + const result = await findImporters({ + repoPath: '/repo', + modulePath: 'src/foo.ts', + }); + + expect(result.importers).toContain('src/good.ts'); + expect(result.importers).not.toContain('src/bad.ts'); + }); }); diff --git a/packages/tools-core/__tests__/list-files.test.ts b/packages/tools-core/__tests__/list-files.test.ts index b287d55..5372ce1 100644 --- a/packages/tools-core/__tests__/list-files.test.ts +++ b/packages/tools-core/__tests__/list-files.test.ts @@ -100,6 +100,56 @@ describe('listChangedFiles', () => { expect(simpleGit).toHaveBeenCalledWith(process.cwd()); }); + it('handles copied files (C status)', async () => { + mockGit.diff.mockResolvedValue('C100\tsrc/original.ts\tsrc/copy.ts\n'); + mockGit.diffSummary.mockResolvedValue({ + files: [ + { file: 'src/copy.ts', insertions: 5, deletions: 0, binary: false }, + ], + insertions: 5, + deletions: 0, + }); + + const result = await listChangedFiles({ + repoPath: '/repo', + base: 'main', + head: 'HEAD', + }); + + expect(result.files).toHaveLength(1); + expect(result.files[0]).toEqual({ + path: 'src/copy.ts', + status: 'copied', + additions: 5, + deletions: 0, + }); + }); + + it('handles binary files with zero additions/deletions', async () => { + mockGit.diff.mockResolvedValue('A\timage.png\n'); + mockGit.diffSummary.mockResolvedValue({ + files: [ + { file: 'image.png', binary: true }, + ], + insertions: 0, + deletions: 0, + }); + + const result = await listChangedFiles({ + repoPath: '/repo', + base: 'main', + head: 'HEAD', + }); + + expect(result.files).toHaveLength(1); + expect(result.files[0]).toEqual({ + path: 'image.png', + status: 'added', + additions: 0, + deletions: 0, + }); + }); + it('throws on failure', async () => { mockGit.diff.mockRejectedValue(new Error('bad revision')); diff --git a/packages/tools-core/src/tools/find-imports.ts b/packages/tools-core/src/tools/find-imports.ts index cb6169d..32aa441 100644 --- a/packages/tools-core/src/tools/find-imports.ts +++ b/packages/tools-core/src/tools/find-imports.ts @@ -110,5 +110,9 @@ function normalizeModulePath(modulePath: string): string { break; } } + // Strip /index suffix so bare directory imports match index files + if (normalized.endsWith('/index')) { + normalized = normalized.slice(0, -'/index'.length); + } return normalized; } diff --git a/packages/tools/CLAUDE.md b/packages/tools/CLAUDE.md index 77cdfed..47d099e 100644 --- a/packages/tools/CLAUDE.md +++ b/packages/tools/CLAUDE.md @@ -28,3 +28,16 @@ src/ ## Testing Tests in `__tests__/` mock `McpServer` (including `close()` method) and verify tool registration and SIGINT/SIGTERM cleanup. + + +## Architecture docs + +Before modifying this code, read: +- Container: `.c3/c3-2-tools/README.md` +- Components: + - `.c3/c3-2-tools/c3-201-mcp-server.md` + - `.c3/c3-2-tools/c3-210-tool-registration.md` +- Patterns: `ref-esm-conventions` + +Full refs: `.c3/refs/ref-{name}.md` +