From 33b171042431d1902a348359e27c892665a853b3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Oliver=20J=C3=A4gle?= Date: Wed, 18 Feb 2026 16:24:28 +0100 Subject: [PATCH] Add zip file source type for local and remote zip archives Support extracting documentation from zip files, either from a local path or a remote URL. Zip sources use the same smart documentation filtering as git repos. Single-root zip archives are automatically flattened. Also extracts shared file-filtering logic into a reusable module used by both GitRepoLoader and ZipLoader. Co-Authored-By: Claude Opus 4.6 --- packages/cli/src/commands/init.ts | 56 +++ packages/cli/src/commands/refresh.ts | 104 +++++ packages/content-loader/package.json | 2 + .../src/__tests__/git-repo-loader.test.ts | 17 +- .../src/__tests__/smart-filtering.test.ts | 81 ++-- .../src/__tests__/zip-loader.test.ts | 286 +++++++++++++ .../content-loader/src/content/file-filter.ts | 92 ++++ .../src/content/git-repo-loader.ts | 93 +---- packages/content-loader/src/content/index.ts | 5 + .../content-loader/src/content/zip-loader.ts | 393 ++++++++++++++++++ packages/content-loader/src/types.ts | 20 +- packages/core/src/__tests__/loader.test.ts | 101 +++++ packages/core/src/config/loader.ts | 31 ++ packages/core/src/paths/calculator.ts | 10 + packages/core/src/types.ts | 18 +- pnpm-lock.yaml | 44 +- 16 files changed, 1188 insertions(+), 165 deletions(-) create mode 100644 packages/content-loader/src/__tests__/zip-loader.test.ts create mode 100644 packages/content-loader/src/content/file-filter.ts create mode 100644 packages/content-loader/src/content/zip-loader.ts diff --git a/packages/cli/src/commands/init.ts b/packages/cli/src/commands/init.ts index 13f5240..ef4a725 100644 --- a/packages/cli/src/commands/init.ts +++ b/packages/cli/src/commands/init.ts @@ -16,6 +16,7 @@ import { } from "@codemcp/knowledge-core"; import { GitRepoLoader, + ZipLoader, WebSourceType, } from "@codemcp/knowledge-content-loader"; @@ -262,6 +263,61 @@ export const initCommand = new Command("init") docset_id: docsetId, }; + await fs.writeFile( + path.join(localPath, `.agentic-source-${index}.json`), + JSON.stringify(metadata, null, 2), + ); + } else if (source.type === "zip") { + // Handle zip file initialization + const loader = new ZipLoader(); + const sourceUrl = source.url || source.path || ""; + + console.log(chalk.gray(` Using ZipLoader for zip extraction`)); + + const webSourceConfig = { + url: sourceUrl, + type: WebSourceType.ZIP, + options: { + paths: source.paths || [], + }, + }; + + // Validate configuration + const validation = loader.validateConfig(webSourceConfig); + if (validation !== true) { + throw new Error( + `Invalid zip source configuration: ${validation}`, + ); + } + + // Load content using ZipLoader + const result = await loader.load(webSourceConfig, localPath); + + if (!result.success) { + throw new Error(`Zip loading failed: ${result.error}`); + } + + // Collect discovered paths for config update + allDiscoveredPaths.push(...result.files); + + totalFiles += result.files.length; + console.log( + chalk.green( + ` ✅ Extracted ${result.files.length} files from zip`, + ), + ); + + // Create source metadata + const metadata = { + source_url: sourceUrl, + source_type: source.type, + downloaded_at: new Date().toISOString(), + files_count: result.files.length, + files: result.files, + docset_id: docsetId, + content_hash: result.contentHash, + }; + await fs.writeFile( path.join(localPath, `.agentic-source-${index}.json`), JSON.stringify(metadata, null, 2), diff --git a/packages/cli/src/commands/refresh.ts b/packages/cli/src/commands/refresh.ts index 62a71f9..7ec6736 100644 --- a/packages/cli/src/commands/refresh.ts +++ b/packages/cli/src/commands/refresh.ts @@ -14,6 +14,7 @@ import { calculateLocalPath, ensureKnowledgeGitignoreSync, } from "@codemcp/knowledge-core"; +import { ZipLoader, WebSourceType } from "@codemcp/knowledge-content-loader"; interface DocsetMetadata { docset_id: string; @@ -168,6 +169,16 @@ async function refreshDocset( ); totalFiles += sourceFiles.files_count; refreshedSources.push(sourceFiles); + } else if (source.type === "zip") { + const sourceFiles = await refreshZipSource( + source, + localPath, + index, + docset.id, + force, + ); + totalFiles += sourceFiles.files_count; + refreshedSources.push(sourceFiles); } else { console.log( chalk.yellow( @@ -361,6 +372,99 @@ async function refreshGitSource( } } +async function refreshZipSource( + source: any, + localPath: string, + index: number, + docsetId: string, + force: boolean, +): Promise { + const sourceMetadataPath = path.join( + localPath, + `.agentic-source-${index}.json`, + ); + let existingSourceMetadata: SourceMetadata | null = null; + + try { + const content = await fs.readFile(sourceMetadataPath, "utf8"); + existingSourceMetadata = JSON.parse(content); + } catch { + // No existing metadata, will do full refresh + } + + const sourceUrl = source.url || source.path || ""; + const loader = new ZipLoader(); + const webSourceConfig = { + url: sourceUrl, + type: WebSourceType.ZIP, + options: { + paths: source.paths || [], + }, + }; + + // Check if content has changed + if (!force && existingSourceMetadata) { + try { + const currentId = await loader.getContentId(webSourceConfig); + const lastHash = (existingSourceMetadata as any).content_hash; + if (lastHash === currentId) { + const updatedMetadata: SourceMetadata = { + ...existingSourceMetadata, + downloaded_at: new Date().toISOString(), + }; + await fs.writeFile( + sourceMetadataPath, + JSON.stringify(updatedMetadata, null, 2), + ); + return updatedMetadata; + } + } catch { + // Could not check, proceed with full refresh + } + } + + // Remove old files from this source (if we have metadata) + if (existingSourceMetadata) { + for (const file of existingSourceMetadata.files) { + const filePath = path.join(localPath, file); + try { + await fs.unlink(filePath); + } catch { + // File might already be deleted, ignore + } + } + } + + // Load content + const result = await loader.load(webSourceConfig, localPath); + + if (!result.success) { + throw new Error(`Zip refresh failed: ${result.error}`); + } + + const metadata: SourceMetadata = { + source_url: sourceUrl, + source_type: "zip", + downloaded_at: new Date().toISOString(), + files_count: result.files.length, + files: result.files, + docset_id: docsetId, + }; + + // Store content hash for future change detection + const metadataWithHash = { + ...metadata, + content_hash: result.contentHash, + }; + + await fs.writeFile( + sourceMetadataPath, + JSON.stringify(metadataWithHash, null, 2), + ); + + return metadata; +} + // Reuse utility functions from init.ts async function findMarkdownFiles(dir: string): Promise { const files: string[] = []; diff --git a/packages/content-loader/package.json b/packages/content-loader/package.json index c8891eb..87f5ee6 100644 --- a/packages/content-loader/package.json +++ b/packages/content-loader/package.json @@ -29,10 +29,12 @@ "typecheck": "tsc --noEmit" }, "dependencies": { + "adm-zip": "0.5.16", "simple-git": "^3.22.0" }, "devDependencies": { "@eslint/js": "^9.34.0", + "@types/adm-zip": "0.5.7", "@types/node": "^24.3.0", "eslint": "^9.34.0", "rimraf": "^6.0.1", diff --git a/packages/content-loader/src/__tests__/git-repo-loader.test.ts b/packages/content-loader/src/__tests__/git-repo-loader.test.ts index b8ac762..2b13594 100644 --- a/packages/content-loader/src/__tests__/git-repo-loader.test.ts +++ b/packages/content-loader/src/__tests__/git-repo-loader.test.ts @@ -11,6 +11,10 @@ import { join } from "path"; import { tmpdir } from "os"; import { GitRepoLoader } from "../content/git-repo-loader.js"; import { WebSourceType } from "../types.js"; +import { + isDocumentationFile, + filterDocumentationFiles, +} from "../content/file-filter.js"; describe("Git Repository Loading - User Workflows", () => { let tempDir: string; @@ -191,7 +195,7 @@ describe("Git Repository Loading - User Workflows", () => { ]; // Test the filtering method directly - const filtered = (loader as any).filterDocumentationFiles(mockFiles); + const filtered = filterDocumentationFiles(mockFiles); expect(filtered).toEqual([ "README.md", @@ -274,7 +278,7 @@ describe("Git Repository Loading - User Workflows", () => { // Test each case for (const testCase of testCases) { - const result = (loader as any).isDocumentationFile(testCase.file); + const result = isDocumentationFile(testCase.file); expect(result).toBe(testCase.expected); } }); @@ -301,7 +305,7 @@ describe("Git Repository Loading - User Workflows", () => { "examples/demo.js", // Should be included ]; - const filtered = (loader as any).filterDocumentationFiles(testFiles); + const filtered = filterDocumentationFiles(testFiles); // Verify smart filtering is working correctly expect(filtered).toContain("README.md"); @@ -346,7 +350,7 @@ describe("Git Repository Loading - User Workflows", () => { ]; // Test direct filtering method - const filtered = (loader as any).filterDocumentationFiles(mockFiles); + const filtered = filterDocumentationFiles(mockFiles); expect(filtered).toEqual([ "README.md", @@ -423,8 +427,9 @@ describe("Git Repository Loading - User Workflows", () => { // but we can test it indirectly through the architecture expect(typeof (loader as any).scanAllFiles).toBe("function"); expect(typeof (loader as any).extractDocumentationFiles).toBe("function"); - expect(typeof (loader as any).filterDocumentationFiles).toBe("function"); - expect(typeof (loader as any).isDocumentationFile).toBe("function"); + // filterDocumentationFiles and isDocumentationFile are shared utilities in file-filter.ts + expect(typeof filterDocumentationFiles).toBe("function"); + expect(typeof isDocumentationFile).toBe("function"); // These methods form the centralized architecture for content filtering }); diff --git a/packages/content-loader/src/__tests__/smart-filtering.test.ts b/packages/content-loader/src/__tests__/smart-filtering.test.ts index 5f1ba0c..75464da 100644 --- a/packages/content-loader/src/__tests__/smart-filtering.test.ts +++ b/packages/content-loader/src/__tests__/smart-filtering.test.ts @@ -2,74 +2,57 @@ * Smart Filtering Tests - Test the key filtering behaviors for REQ-18 */ -import { describe, test, expect, beforeEach } from "vitest"; -import { GitRepoLoader } from "../content/git-repo-loader.js"; +import { describe, test, expect } from "vitest"; +import { + isDocumentationFile, + filterDocumentationFiles, +} from "../content/file-filter.js"; describe("Smart Content Filtering - REQ-18", () => { - let loader: GitRepoLoader; - - beforeEach(() => { - loader = new GitRepoLoader(); - }); - test("should include markdown files anywhere in repository", () => { - expect((loader as any).isDocumentationFile("README.md")).toBe(true); - expect((loader as any).isDocumentationFile("docs/guide.md")).toBe(true); - expect((loader as any).isDocumentationFile("deep/nested/api.mdx")).toBe( - true, - ); - expect((loader as any).isDocumentationFile("tutorial.rst")).toBe(true); - expect((loader as any).isDocumentationFile("notes.txt")).toBe(true); + expect(isDocumentationFile("README.md")).toBe(true); + expect(isDocumentationFile("docs/guide.md")).toBe(true); + expect(isDocumentationFile("deep/nested/api.mdx")).toBe(true); + expect(isDocumentationFile("tutorial.rst")).toBe(true); + expect(isDocumentationFile("notes.txt")).toBe(true); }); test("should exclude .github directory files even if they are markdown", () => { - expect( - (loader as any).isDocumentationFile(".github/issue_template.md"), - ).toBe(false); - expect( - (loader as any).isDocumentationFile(".github/pull_request_template.md"), - ).toBe(false); - expect( - (loader as any).isDocumentationFile(".github/workflows/ci.yml"), - ).toBe(false); + expect(isDocumentationFile(".github/issue_template.md")).toBe(false); + expect(isDocumentationFile(".github/pull_request_template.md")).toBe(false); + expect(isDocumentationFile(".github/workflows/ci.yml")).toBe(false); }); test("should exclude project metadata files", () => { - expect((loader as any).isDocumentationFile("CHANGELOG.md")).toBe(false); - expect((loader as any).isDocumentationFile("LICENSE.md")).toBe(false); - expect((loader as any).isDocumentationFile("CONTRIBUTING.md")).toBe(false); - expect((loader as any).isDocumentationFile("CODE_OF_CONDUCT.md")).toBe( - false, - ); + expect(isDocumentationFile("CHANGELOG.md")).toBe(false); + expect(isDocumentationFile("LICENSE.md")).toBe(false); + expect(isDocumentationFile("CONTRIBUTING.md")).toBe(false); + expect(isDocumentationFile("CODE_OF_CONDUCT.md")).toBe(false); }); test("should exclude config and source files", () => { // Config files should be excluded - expect((loader as any).isDocumentationFile("package.json")).toBe(false); - expect((loader as any).isDocumentationFile(".postcssrc.json")).toBe(false); - expect((loader as any).isDocumentationFile("config.ts")).toBe(false); - expect((loader as any).isDocumentationFile("styles.css")).toBe(false); + expect(isDocumentationFile("package.json")).toBe(false); + expect(isDocumentationFile(".postcssrc.json")).toBe(false); + expect(isDocumentationFile("config.ts")).toBe(false); + expect(isDocumentationFile("styles.css")).toBe(false); // Source files should be excluded - expect((loader as any).isDocumentationFile("index.ts")).toBe(false); - expect((loader as any).isDocumentationFile("src/index.ts")).toBe(false); - expect((loader as any).isDocumentationFile("src/utils.ts")).toBe(false); - expect((loader as any).isDocumentationFile("src/helpers.ts")).toBe(false); + expect(isDocumentationFile("index.ts")).toBe(false); + expect(isDocumentationFile("src/index.ts")).toBe(false); + expect(isDocumentationFile("src/utils.ts")).toBe(false); + expect(isDocumentationFile("src/helpers.ts")).toBe(false); }); test("should include files in examples directory", () => { - expect((loader as any).isDocumentationFile("examples/config.json")).toBe( - true, - ); - expect((loader as any).isDocumentationFile("examples/demo.js")).toBe(true); - expect((loader as any).isDocumentationFile("examples/style.css")).toBe( - true, - ); + expect(isDocumentationFile("examples/config.json")).toBe(true); + expect(isDocumentationFile("examples/demo.js")).toBe(true); + expect(isDocumentationFile("examples/style.css")).toBe(true); }); test("should exclude binary files even in examples", () => { - expect((loader as any).isDocumentationFile("examples/app.exe")).toBe(false); - expect((loader as any).isDocumentationFile("examples/lib.so")).toBe(false); + expect(isDocumentationFile("examples/app.exe")).toBe(false); + expect(isDocumentationFile("examples/lib.so")).toBe(false); }); test("should filter mixed file list correctly", () => { @@ -82,7 +65,7 @@ describe("Smart Content Filtering - REQ-18", () => { "examples/demo.js", // Include ]; - const filtered = (loader as any).filterDocumentationFiles(mixedFiles); + const filtered = filterDocumentationFiles(mixedFiles); expect(filtered).toEqual(["README.md", "docs/api.md", "examples/demo.js"]); }); @@ -127,7 +110,7 @@ describe("Smart Content Filtering - REQ-18", () => { ".github/workflows/ci.yml", ]; - const filtered = (loader as any).filterDocumentationFiles(repositoryFiles); + const filtered = filterDocumentationFiles(repositoryFiles); // Expected: Documentation files + all files from examples/samples const expectedIncludes = [ diff --git a/packages/content-loader/src/__tests__/zip-loader.test.ts b/packages/content-loader/src/__tests__/zip-loader.test.ts new file mode 100644 index 0000000..53a53bd --- /dev/null +++ b/packages/content-loader/src/__tests__/zip-loader.test.ts @@ -0,0 +1,286 @@ +/** + * Tests for Zip file content loader + */ + +import { describe, test, expect, beforeEach, afterEach } from "vitest"; +import { promises as fs } from "node:fs"; +import * as path from "node:path"; +import AdmZip from "adm-zip"; +import { ZipLoader } from "../content/zip-loader.js"; +import { WebSourceType } from "../types.js"; + +describe("Zip Loader", () => { + let loader: ZipLoader; + let tempDir: string; + + beforeEach(async () => { + loader = new ZipLoader(); + tempDir = path.join( + process.cwd(), + ".tmp", + `zip-test-${Date.now()}-${Math.random().toString(36).slice(2)}`, + ); + await fs.mkdir(tempDir, { recursive: true }); + }); + + afterEach(async () => { + await fs.rm(tempDir, { recursive: true, force: true }); + }); + + /** + * Helper to create a zip file with given entries + */ + async function createTestZip( + entries: Record, + zipName = "test.zip", + ): Promise { + const zip = new AdmZip(); + for (const [entryPath, content] of Object.entries(entries)) { + zip.addFile(entryPath, Buffer.from(content)); + } + const zipPath = path.join(tempDir, zipName); + zip.writeZip(zipPath); + return zipPath; + } + + describe("canHandle", () => { + test("should handle ZIP type", () => { + expect( + loader.canHandle({ url: "test.zip", type: WebSourceType.ZIP }), + ).toBe(true); + }); + + test("should not handle GIT_REPO type", () => { + expect( + loader.canHandle({ + url: "https://github.com/test/repo.git", + type: WebSourceType.GIT_REPO, + }), + ).toBe(false); + }); + }); + + describe("validateConfig", () => { + test("should accept valid config with url", () => { + expect( + loader.validateConfig({ + url: "https://example.com/docs.zip", + type: WebSourceType.ZIP, + }), + ).toBe(true); + }); + + test("should accept valid config with local path", () => { + expect( + loader.validateConfig({ + url: "/path/to/local.zip", + type: WebSourceType.ZIP, + }), + ).toBe(true); + }); + + test("should reject config without url", () => { + expect( + loader.validateConfig({ + url: "", + type: WebSourceType.ZIP, + }), + ).not.toBe(true); + }); + }); + + describe("load - local zip files", () => { + test("should extract documentation files from zip", async () => { + const zipPath = await createTestZip({ + "README.md": "# Hello", + "docs/guide.md": "# Guide", + "src/index.ts": "export {}", + "package.json": "{}", + }); + + const targetDir = path.join(tempDir, "output"); + const result = await loader.load( + { url: zipPath, type: WebSourceType.ZIP }, + targetDir, + ); + + expect(result.success).toBe(true); + expect(result.files).toContain("README.md"); + expect(result.files).toContain("docs/guide.md"); + expect(result.files).not.toContain("src/index.ts"); + expect(result.files).not.toContain("package.json"); + }); + + test("should flatten single root directory", async () => { + const zipPath = await createTestZip({ + "my-project/README.md": "# Hello", + "my-project/docs/guide.md": "# Guide", + }); + + const targetDir = path.join(tempDir, "output"); + const result = await loader.load( + { url: zipPath, type: WebSourceType.ZIP }, + targetDir, + ); + + expect(result.success).toBe(true); + // Files should be at root level, not under my-project/ + expect(result.files).toContain("README.md"); + expect(result.files).toContain("docs/guide.md"); + expect(result.files).not.toContain("my-project/README.md"); + }); + + test("should NOT flatten when multiple root entries exist", async () => { + const zipPath = await createTestZip({ + "dir-a/README.md": "# A", + "dir-b/README.md": "# B", + }); + + const targetDir = path.join(tempDir, "output"); + const result = await loader.load( + { url: zipPath, type: WebSourceType.ZIP }, + targetDir, + ); + + expect(result.success).toBe(true); + expect(result.files).toContain("dir-a/README.md"); + expect(result.files).toContain("dir-b/README.md"); + }); + + test("should extract only specified paths when provided", async () => { + const zipPath = await createTestZip({ + "docs/api.md": "# API", + "docs/guide.md": "# Guide", + "other/notes.md": "# Notes", + }); + + const targetDir = path.join(tempDir, "output"); + const result = await loader.load( + { + url: zipPath, + type: WebSourceType.ZIP, + options: { paths: ["docs/"] }, + }, + targetDir, + ); + + expect(result.success).toBe(true); + expect(result.files.some((f) => f.includes("api.md"))).toBe(true); + expect(result.files.some((f) => f.includes("guide.md"))).toBe(true); + }); + + test("should return error for non-existent local file", async () => { + const targetDir = path.join(tempDir, "output"); + const result = await loader.load( + { url: "/nonexistent/file.zip", type: WebSourceType.ZIP }, + targetDir, + ); + + expect(result.success).toBe(false); + expect(result.error).toBeDefined(); + }); + + test("should generate content hash", async () => { + const zipPath = await createTestZip({ + "README.md": "# Hello", + }); + + const targetDir = path.join(tempDir, "output"); + const result = await loader.load( + { url: zipPath, type: WebSourceType.ZIP }, + targetDir, + ); + + expect(result.success).toBe(true); + expect(result.contentHash).toBeTruthy(); + expect(result.contentHash.length).toBe(64); // SHA-256 hex + }); + }); + + describe("getContentId", () => { + test("should generate hash from local file content", async () => { + const zipPath = await createTestZip({ + "README.md": "# Hello", + }); + + const contentId = await loader.getContentId({ + url: zipPath, + type: WebSourceType.ZIP, + }); + + expect(contentId).toBeTruthy(); + expect(contentId.length).toBe(64); + }); + + test("should generate different hashes for different files", async () => { + const zip1 = await createTestZip({ "a.md": "content-1" }, "test1.zip"); + const zip2 = await createTestZip({ "b.md": "content-2" }, "test2.zip"); + + const id1 = await loader.getContentId({ + url: zip1, + type: WebSourceType.ZIP, + }); + const id2 = await loader.getContentId({ + url: zip2, + type: WebSourceType.ZIP, + }); + + expect(id1).not.toBe(id2); + }); + + test("should fallback gracefully for non-existent file", async () => { + const contentId = await loader.getContentId({ + url: "/nonexistent.zip", + type: WebSourceType.ZIP, + }); + + // Should fallback to URL-based hash + expect(contentId).toBeTruthy(); + expect(contentId.length).toBe(64); + }); + }); + + describe("smart filtering", () => { + test("should include examples directory files", async () => { + const zipPath = await createTestZip({ + "examples/demo.js": "console.log('hello')", + "examples/config.json": "{}", + "src/index.ts": "export {}", + }); + + const targetDir = path.join(tempDir, "output"); + const result = await loader.load( + { url: zipPath, type: WebSourceType.ZIP }, + targetDir, + ); + + expect(result.success).toBe(true); + expect(result.files).toContain("examples/demo.js"); + expect(result.files).toContain("examples/config.json"); + expect(result.files).not.toContain("src/index.ts"); + }); + + test("should exclude build artifacts and metadata", async () => { + const zipPath = await createTestZip({ + "README.md": "# Docs", + "CHANGELOG.md": "# Changes", + "LICENSE.md": "MIT", + "dist/bundle.js": "var x;", + "node_modules/lib/index.js": "module.exports = {};", + }); + + const targetDir = path.join(tempDir, "output"); + const result = await loader.load( + { url: zipPath, type: WebSourceType.ZIP }, + targetDir, + ); + + expect(result.success).toBe(true); + expect(result.files).toContain("README.md"); + expect(result.files).not.toContain("CHANGELOG.md"); + expect(result.files).not.toContain("LICENSE.md"); + expect(result.files).not.toContain("dist/bundle.js"); + expect(result.files).not.toContain("node_modules/lib/index.js"); + }); + }); +}); diff --git a/packages/content-loader/src/content/file-filter.ts b/packages/content-loader/src/content/file-filter.ts new file mode 100644 index 0000000..921ceea --- /dev/null +++ b/packages/content-loader/src/content/file-filter.ts @@ -0,0 +1,92 @@ +/** + * Shared file filtering utilities for documentation content extraction (REQ-18) + */ + +import * as path from "node:path"; + +/** + * Determine if a file is considered documentation content (REQ-18) + * @param filePath - Path to the file to check + * @returns True if file should be included as documentation + */ +export function isDocumentationFile(filePath: string): boolean { + const filename = path.basename(filePath); + const extension = path.extname(filePath).toLowerCase(); + const directory = path.dirname(filePath); + + // Exclude project metadata files (REQ-18) + const metadataFiles = + /^(CHANGELOG|LICENSE|CONTRIBUTING|AUTHORS|CODE_OF_CONDUCT)/i; + if (metadataFiles.test(filename)) { + return false; + } + + // Normalize directory path for consistent matching (use forward slashes) + const normalizedDir = directory.split(path.sep).join("/"); + const pathParts = normalizedDir.split("/"); + + // Exclude build, dependency, and development directories (REQ-18) + // Use exact directory name matching, not substring matching + const excludedDirs = [ + "node_modules", + "vendor", + ".git", + "build", + "dist", + "target", + ".cache", + "__tests__", + "test", + "tests", + ".github", + ".vscode", + ".idea", + ]; + + // Check if any path segment matches excluded directories + for (const excludedDir of excludedDirs) { + if (pathParts.includes(excludedDir)) { + return false; + } + } + + // Include README files anywhere (REQ-18) + if (/^README/i.test(filename)) { + return true; + } + + // Include documentation file extensions anywhere, regardless of directory (REQ-18) + const docExtensions = [".md", ".mdx", ".rst", ".txt", ".adoc", ".asciidoc"]; + if (docExtensions.includes(extension)) { + return true; + } + + // Special case: examples/samples directory - include ALL file types (Issue #12) + // These directories contain code that demonstrates usage patterns + const isInExamples = /\b(examples?|samples?)\b/i.test(directory); + if (isInExamples) { + // In examples/samples, exclude only binary files + const excludedInExamples = [ + ".exe", + ".bin", + ".so", + ".dll", + ".dylib", + ".a", + ".o", + ".obj", + ]; + return !excludedInExamples.includes(extension); + } + + return false; +} + +/** + * Filter list of files to only include documentation-relevant files (REQ-18) + * @param files - Array of file paths to filter + * @returns Array of file paths that are considered documentation + */ +export function filterDocumentationFiles(files: string[]): string[] { + return files.filter((file) => isDocumentationFile(file)); +} diff --git a/packages/content-loader/src/content/git-repo-loader.ts b/packages/content-loader/src/content/git-repo-loader.ts index 8b27136..508355d 100644 --- a/packages/content-loader/src/content/git-repo-loader.ts +++ b/packages/content-loader/src/content/git-repo-loader.ts @@ -14,6 +14,10 @@ import { WebSourceErrorType, } from "../types.js"; import * as crypto from "node:crypto"; +import { + isDocumentationFile, + filterDocumentationFiles, +} from "./file-filter.js"; /** * Content loader for Git repositories (GitHub, GitLab, any Git repo) @@ -325,93 +329,6 @@ export class GitRepoLoader extends ContentLoader { } } - /** - * Filter list of files to only include documentation-relevant files (REQ-18) - * @param files - Array of file paths to filter - * @returns Array of file paths that are considered documentation - */ - private filterDocumentationFiles(files: string[]): string[] { - return files.filter((file) => this.isDocumentationFile(file)); - } - - /** - * Determine if a file is considered documentation content (REQ-18) - * @param filePath - Path to the file to check - * @returns True if file should be included as documentation - */ - private isDocumentationFile(filePath: string): boolean { - const filename = path.basename(filePath); - const extension = path.extname(filePath).toLowerCase(); - const directory = path.dirname(filePath); - - // Exclude project metadata files (REQ-18) - const metadataFiles = - /^(CHANGELOG|LICENSE|CONTRIBUTING|AUTHORS|CODE_OF_CONDUCT)/i; - if (metadataFiles.test(filename)) { - return false; - } - - // Normalize directory path for consistent matching (use forward slashes) - const normalizedDir = directory.split(path.sep).join("/"); - const pathParts = normalizedDir.split("/"); - - // Exclude build, dependency, and development directories (REQ-18) - // Use exact directory name matching, not substring matching - const excludedDirs = [ - "node_modules", - "vendor", - ".git", - "build", - "dist", - "target", - ".cache", - "__tests__", - "test", - "tests", - ".github", - ".vscode", - ".idea", - ]; - - // Check if any path segment matches excluded directories - for (const excludedDir of excludedDirs) { - if (pathParts.includes(excludedDir)) { - return false; - } - } - - // Include README files anywhere (REQ-18) - if (/^README/i.test(filename)) { - return true; - } - - // Include documentation file extensions anywhere, regardless of directory (REQ-18) - const docExtensions = [".md", ".mdx", ".rst", ".txt", ".adoc", ".asciidoc"]; - if (docExtensions.includes(extension)) { - return true; - } - - // Special case: examples/samples directory - include ALL file types (Issue #12) - // These directories contain code that demonstrates usage patterns - const isInExamples = /\b(examples?|samples?)\b/i.test(directory); - if (isInExamples) { - // In examples/samples, exclude only binary files - const excludedInExamples = [ - ".exe", - ".bin", - ".so", - ".dll", - ".dylib", - ".a", - ".o", - ".obj", - ]; - return !excludedInExamples.includes(extension); - } - - return false; - } - /** * Extract only documentation files from source directory (REQ-18) * @param sourceDir - Source directory to scan @@ -427,7 +344,7 @@ export class GitRepoLoader extends ContentLoader { const allFiles = await this.scanAllFiles(sourceDir); // Filter to only documentation files - const docFiles = this.filterDocumentationFiles(allFiles); + const docFiles = filterDocumentationFiles(allFiles); // Copy the filtered files for (const filePath of docFiles) { diff --git a/packages/content-loader/src/content/index.ts b/packages/content-loader/src/content/index.ts index adc5a2e..428d5fe 100644 --- a/packages/content-loader/src/content/index.ts +++ b/packages/content-loader/src/content/index.ts @@ -4,7 +4,12 @@ export { ContentLoader } from "./loader.js"; export { GitRepoLoader } from "./git-repo-loader.js"; +export { ZipLoader } from "./zip-loader.js"; export { DocumentationSiteLoader } from "./documentation-site-loader.js"; export { ApiDocumentationLoader } from "./api-documentation-loader.js"; export { ContentProcessor } from "./content-processor.js"; export { MetadataManager } from "./metadata-manager.js"; +export { + isDocumentationFile, + filterDocumentationFiles, +} from "./file-filter.js"; diff --git a/packages/content-loader/src/content/zip-loader.ts b/packages/content-loader/src/content/zip-loader.ts new file mode 100644 index 0000000..013f9fa --- /dev/null +++ b/packages/content-loader/src/content/zip-loader.ts @@ -0,0 +1,393 @@ +/** + * Zip file content loader + */ + +import { promises as fs } from "node:fs"; +import * as path from "node:path"; +import * as crypto from "node:crypto"; +import AdmZip from "adm-zip"; +import { ContentLoader, type LoadResult } from "./loader.js"; +import { + WebSourceType, + WebSourceConfig, + ZipOptions, + WebSourceError, + WebSourceErrorType, +} from "../types.js"; +import { filterDocumentationFiles } from "./file-filter.js"; + +/** + * Content loader for zip files (local or remote) + */ +export class ZipLoader extends ContentLoader { + /** + * Check if this loader can handle the given web source type + */ + canHandle(webSource: WebSourceConfig): boolean { + return webSource.type === WebSourceType.ZIP; + } + + /** + * Validate the web source configuration + */ + validateConfig(webSource: WebSourceConfig): true | string { + if (!webSource.url) { + return "Zip source must have a URL (remote) or local path"; + } + + return true; + } + + /** + * Load content from a zip file + */ + async load( + webSource: WebSourceConfig, + targetPath: string, + ): Promise { + try { + const options = webSource.options as ZipOptions | undefined; + const tempDir = await this.createTempDirectory(); + + try { + // Get the zip file (download if remote, or use local path) + const zipFilePath = await this.resolveZipFile(webSource.url, tempDir); + + // Extract to temp directory + const extractDir = path.join(tempDir, "extracted"); + await fs.mkdir(extractDir, { recursive: true }); + this.extractZip(zipFilePath, extractDir); + + // Flatten single root directory + await this.flattenSingleRoot(extractDir); + + // Extract specified paths or all documentation content + const extractedFiles = await this.extractContent( + extractDir, + targetPath, + options?.paths, + ); + + // Generate content hash + const contentHash = await this.generateContentHash( + targetPath, + extractedFiles, + ); + + return { + success: true, + files: extractedFiles, + contentHash, + }; + } finally { + await this.cleanupTempDirectory(tempDir); + } + } catch (error) { + const errorMessage = + error instanceof Error ? error.message : String(error); + return { + success: false, + files: [], + contentHash: "", + error: `Zip loading failed: ${errorMessage}`, + }; + } + } + + /** + * Get content identifier for change detection + */ + async getContentId(webSource: WebSourceConfig): Promise { + try { + if (this.isRemoteUrl(webSource.url)) { + // For remote URLs, try HEAD request for ETag/Last-Modified + const response = await fetch(webSource.url, { method: "HEAD" }); + const etag = response.headers.get("etag") || ""; + const lastModified = response.headers.get("last-modified") || ""; + const identifier = etag || lastModified || webSource.url; + + return crypto + .createHash("sha256") + .update(`${webSource.url}:${identifier}`) + .digest("hex"); + } else { + // For local files, hash the file content + const content = await fs.readFile(webSource.url); + return crypto.createHash("sha256").update(content).digest("hex"); + } + } catch { + // Fallback to URL-based hash + return crypto.createHash("sha256").update(webSource.url).digest("hex"); + } + } + + /** + * Determine if the source is a remote URL or local path + */ + private isRemoteUrl(url: string): boolean { + return url.startsWith("http://") || url.startsWith("https://"); + } + + /** + * Resolve the zip file path - download if remote, return as-is if local + */ + private async resolveZipFile(url: string, tempDir: string): Promise { + if (this.isRemoteUrl(url)) { + return this.downloadZip(url, tempDir); + } + + // Local file - verify it exists + try { + await fs.access(url); + return url; + } catch { + throw new WebSourceError( + WebSourceErrorType.ZIP_ERROR, + `Local zip file not found: ${url}`, + { url }, + ); + } + } + + /** + * Download a zip file from a remote URL + */ + private async downloadZip(url: string, tempDir: string): Promise { + const zipPath = path.join(tempDir, "download.zip"); + + try { + const response = await fetch(url); + if (!response.ok) { + throw new Error(`HTTP ${response.status}: ${response.statusText}`); + } + + const buffer = Buffer.from(await response.arrayBuffer()); + await fs.writeFile(zipPath, buffer); + return zipPath; + } catch (error) { + throw new WebSourceError( + WebSourceErrorType.ZIP_ERROR, + `Failed to download zip from ${url}: ${error instanceof Error ? error.message : String(error)}`, + { url }, + ); + } + } + + /** + * Extract a zip file to a directory using adm-zip + */ + private extractZip(zipPath: string, targetDir: string): void { + try { + const zip = new AdmZip(zipPath); + zip.extractAllTo(targetDir, true); + } catch (error) { + throw new WebSourceError( + WebSourceErrorType.ZIP_ERROR, + `Failed to extract zip: ${error instanceof Error ? error.message : String(error)}`, + { zipPath }, + ); + } + } + + /** + * If the extracted contents have a single root directory and no files at root, + * move that directory's contents one level up. + */ + private async flattenSingleRoot(extractDir: string): Promise { + const entries = await fs.readdir(extractDir, { withFileTypes: true }); + + const directories = entries.filter((e) => e.isDirectory()); + const files = entries.filter((e) => e.isFile()); + + if (directories.length === 1 && files.length === 0) { + const singleDir = path.join(extractDir, directories[0]!.name); + const innerEntries = await fs.readdir(singleDir); + + // Move all contents up one level + for (const entry of innerEntries) { + const src = path.join(singleDir, entry); + const dest = path.join(extractDir, entry); + await fs.rename(src, dest); + } + + // Remove the now-empty directory + await fs.rmdir(singleDir); + } + } + + /** + * Extract content from extracted zip to target directory + */ + private async extractContent( + sourceDir: string, + targetDir: string, + paths?: string[], + ): Promise { + await fs.mkdir(targetDir, { recursive: true }); + const extractedFiles: string[] = []; + + if (paths && paths.length > 0) { + // Extract only specified paths + for (const relPath of paths) { + const sourcePath = path.join(sourceDir, relPath); + const targetPath = path.join(targetDir, relPath); + + try { + const stats = await fs.stat(sourcePath); + if (stats.isDirectory()) { + await this.copyDirectory(sourcePath, targetPath, extractedFiles); + } else if (stats.isFile()) { + await fs.mkdir(path.dirname(targetPath), { recursive: true }); + await fs.copyFile(sourcePath, targetPath); + extractedFiles.push(relPath); + } + } catch (error) { + console.warn( + `Warning: Could not extract ${relPath}: ${error instanceof Error ? error.message : String(error)}`, + ); + } + } + } else { + // Use smart filtering to extract only documentation files + await this.extractDocumentationFiles( + sourceDir, + targetDir, + extractedFiles, + ); + } + + return extractedFiles; + } + + /** + * Extract only documentation files from source directory + */ + private async extractDocumentationFiles( + sourceDir: string, + targetDir: string, + extractedFiles: string[], + ): Promise { + const allFiles = await this.scanAllFiles(sourceDir); + const docFiles = filterDocumentationFiles(allFiles); + + for (const filePath of docFiles) { + const relativePath = path.relative(sourceDir, filePath); + const targetPath = path.join(targetDir, relativePath); + + try { + await fs.mkdir(path.dirname(targetPath), { recursive: true }); + await fs.copyFile(filePath, targetPath); + extractedFiles.push(relativePath); + } catch (error) { + console.warn( + `Warning: Could not copy ${relativePath}: ${error instanceof Error ? error.message : String(error)}`, + ); + } + } + } + + /** + * Copy directory recursively + */ + private async copyDirectory( + source: string, + target: string, + fileList: string[], + ): Promise { + await fs.mkdir(target, { recursive: true }); + const items = await fs.readdir(source); + + for (const item of items) { + const sourcePath = path.join(source, item); + const targetPath = path.join(target, item); + const stats = await fs.stat(sourcePath); + + if (stats.isDirectory()) { + await this.copyDirectory(sourcePath, targetPath, fileList); + } else { + await fs.copyFile(sourcePath, targetPath); + const relativePath = path.relative(target, targetPath); + fileList.push(relativePath); + } + } + } + + /** + * Recursively scan all files in a directory + */ + private async scanAllFiles(dir: string): Promise { + const files: string[] = []; + + async function scan(currentDir: string) { + const items = await fs.readdir(currentDir); + + for (const item of items) { + if (item === ".git") continue; + + const fullPath = path.join(currentDir, item); + const stat = await fs.stat(fullPath); + + if (stat.isDirectory()) { + await scan(fullPath); + } else if (stat.isFile()) { + files.push(fullPath); + } + } + } + + await scan(dir); + return files; + } + + /** + * Generate content hash for change detection + */ + private async generateContentHash( + targetDir: string, + files: string[], + ): Promise { + const hash = crypto.createHash("sha256"); + const sortedFiles = files.slice().sort(); + + for (const file of sortedFiles) { + const filePath = path.join(targetDir, file); + try { + const content = await fs.readFile(filePath); + hash.update(file); + hash.update(content); + } catch (error) { + console.warn( + `Warning: Could not hash ${file}: ${error instanceof Error ? error.message : String(error)}`, + ); + } + } + + return hash.digest("hex"); + } + + /** + * Create a temporary directory + */ + private async createTempDirectory(): Promise { + const tempDir = path.join( + process.cwd(), + ".tmp", + `zip-extract-${Date.now()}-${Math.random().toString(36).slice(2)}`, + ); + await fs.mkdir(tempDir, { recursive: true }); + return tempDir; + } + + /** + * Clean up temporary directory + */ + private async cleanupTempDirectory(tempDir: string): Promise { + try { + await fs.rm(tempDir, { recursive: true, force: true }); + } catch (error) { + console.warn( + `Warning: Could not clean up temp directory ${tempDir}: ${error instanceof Error ? error.message : String(error)}`, + ); + } + } +} diff --git a/packages/content-loader/src/types.ts b/packages/content-loader/src/types.ts index fe6df80..6958b80 100644 --- a/packages/content-loader/src/types.ts +++ b/packages/content-loader/src/types.ts @@ -27,6 +27,8 @@ export enum WebSourceType { DOCUMENTATION_SITE = "documentation_site", API_DOCUMENTATION = "api_documentation", + + ZIP = "zip", } /** @@ -63,16 +65,28 @@ export interface ApiDocumentationOptions { include_packages?: string[]; } +/** + * Configuration for zip file web sources + */ +export interface ZipOptions { + /** Specific paths to extract from the zip */ + paths?: string[]; +} + /** * Configuration for a single web source */ export interface WebSourceConfig { - /** URL of the web source */ + /** URL of the web source (or local path for zip sources) */ url: string; /** Type of web source */ type: WebSourceType; /** Type-specific options */ - options?: GitRepoOptions | DocumentationSiteOptions | ApiDocumentationOptions; + options?: + | GitRepoOptions + | DocumentationSiteOptions + | ApiDocumentationOptions + | ZipOptions; } /** @@ -122,6 +136,8 @@ export enum WebSourceErrorType { GIT_REPO_ERROR = "GIT_REPO_ERROR", + ZIP_ERROR = "ZIP_ERROR", + NOT_IMPLEMENTED = "NOT_IMPLEMENTED", } diff --git a/packages/core/src/__tests__/loader.test.ts b/packages/core/src/__tests__/loader.test.ts index cb3b210..0869878 100644 --- a/packages/core/src/__tests__/loader.test.ts +++ b/packages/core/src/__tests__/loader.test.ts @@ -430,5 +430,106 @@ template: "Global: {{keywords}} in {{local_path}}"`; expect(validateConfig(config)).toBe(false); }); + + test("should accept zip source with url", () => { + const config = { + version: "1.0", + docsets: [ + { + id: "zip-docs", + name: "Zip Docs", + sources: [ + { + type: "zip", + url: "https://example.com/docs.zip", + }, + ], + }, + ], + }; + + expect(validateConfig(config)).toBe(true); + }); + + test("should accept zip source with path", () => { + const config = { + version: "1.0", + docsets: [ + { + id: "zip-docs", + name: "Zip Docs", + sources: [ + { + type: "zip", + path: "./archives/docs.zip", + }, + ], + }, + ], + }; + + expect(validateConfig(config)).toBe(true); + }); + + test("should reject zip source with both url and path", () => { + const config = { + version: "1.0", + docsets: [ + { + id: "zip-docs", + name: "Zip Docs", + sources: [ + { + type: "zip", + url: "https://example.com/docs.zip", + path: "./docs.zip", + }, + ], + }, + ], + }; + + expect(validateConfig(config)).toBe(false); + }); + + test("should reject zip source with neither url nor path", () => { + const config = { + version: "1.0", + docsets: [ + { + id: "zip-docs", + name: "Zip Docs", + sources: [ + { + type: "zip", + }, + ], + }, + ], + }; + + expect(validateConfig(config)).toBe(false); + }); + + test("should accept zip source with optional paths filter", () => { + const config = { + version: "1.0", + docsets: [ + { + id: "zip-docs", + name: "Zip Docs", + sources: [ + { + type: "zip", + url: "https://example.com/docs.zip", + paths: ["docs/", "README.md"], + }, + ], + }, + ], + }; + + expect(validateConfig(config)).toBe(true); + }); }); }); diff --git a/packages/core/src/config/loader.ts b/packages/core/src/config/loader.ts index 9eec497..c8e1752 100644 --- a/packages/core/src/config/loader.ts +++ b/packages/core/src/config/loader.ts @@ -285,6 +285,37 @@ function validateSource(source: unknown): source is SourceConfig { return true; } + if (type === "zip") { + const hasPath = + obj["path"] !== undefined && + typeof obj["path"] === "string" && + obj["path"].trim() !== ""; + const hasUrl = + obj["url"] !== undefined && + typeof obj["url"] === "string" && + obj["url"].trim() !== ""; + + // Must have exactly one of path or url + if (hasPath === hasUrl) { + return false; + } + + // Optional paths field + if (obj["paths"] !== undefined) { + if (!Array.isArray(obj["paths"])) { + return false; + } + + for (const path of obj["paths"]) { + if (typeof path !== "string" || path.trim() === "") { + return false; + } + } + } + + return true; + } + // Unknown source type return false; } diff --git a/packages/core/src/paths/calculator.ts b/packages/core/src/paths/calculator.ts index b24d7aa..e511364 100644 --- a/packages/core/src/paths/calculator.ts +++ b/packages/core/src/paths/calculator.ts @@ -70,6 +70,11 @@ export function calculateLocalPath( return join(configDir, "docsets", docset.id); } + if (primarySource.type === "zip") { + // For zip sources, use standardized path: .knowledge/docsets/{id} + return join(configDir, "docsets", docset.id); + } + throw new Error(`Unsupported source type: ${(primarySource as any).type}`); } catch (error) { throw new KnowledgeError( @@ -131,6 +136,11 @@ export async function calculateLocalPathWithSymlinks( return join(configDir, "docsets", docset.id); } + if (primarySource.type === "zip") { + // For zip sources, use standardized path: .knowledge/docsets/{id} + return join(configDir, "docsets", docset.id); + } + throw new Error(`Unsupported source type: ${(primarySource as any).type}`); } diff --git a/packages/core/src/types.ts b/packages/core/src/types.ts index e6dc344..9a77eef 100644 --- a/packages/core/src/types.ts +++ b/packages/core/src/types.ts @@ -34,10 +34,26 @@ export interface GitRepoSourceConfig extends BaseSourceConfig { paths?: string[]; } +/** + * Zip file source configuration + */ +export interface ZipSourceConfig extends BaseSourceConfig { + type: "zip"; + /** Local path to zip file (mutually exclusive with url) */ + path?: string; + /** Remote URL to download zip from (mutually exclusive with path) */ + url?: string; + /** Specific paths to extract (optional) */ + paths?: string[]; +} + /** * Union type for all source configurations */ -export type SourceConfig = LocalFolderSourceConfig | GitRepoSourceConfig; +export type SourceConfig = + | LocalFolderSourceConfig + | GitRepoSourceConfig + | ZipSourceConfig; /** * Configuration for a single docset diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 5a7bd67..f34938b 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -132,6 +132,9 @@ importers: packages/content-loader: dependencies: + adm-zip: + specifier: 0.5.16 + version: 0.5.16 simple-git: specifier: ^3.22.0 version: 3.28.0 @@ -139,6 +142,9 @@ importers: "@eslint/js": specifier: ^9.34.0 version: 9.39.2 + "@types/adm-zip": + specifier: 0.5.7 + version: 0.5.7 "@types/node": specifier: ^24.3.0 version: 24.3.0 @@ -746,7 +752,6 @@ packages: } cpu: [arm64] os: [linux] - libc: [glibc] "@oxlint/linux-arm64-musl@1.14.0": resolution: @@ -755,7 +760,6 @@ packages: } cpu: [arm64] os: [linux] - libc: [musl] "@oxlint/linux-x64-gnu@1.14.0": resolution: @@ -764,7 +768,6 @@ packages: } cpu: [x64] os: [linux] - libc: [glibc] "@oxlint/linux-x64-musl@1.14.0": resolution: @@ -773,7 +776,6 @@ packages: } cpu: [x64] os: [linux] - libc: [musl] "@oxlint/win32-arm64@1.14.0": resolution: @@ -1351,7 +1353,6 @@ packages: } cpu: [arm] os: [linux] - libc: [glibc] "@rollup/rollup-linux-arm-musleabihf@4.50.0": resolution: @@ -1360,7 +1361,6 @@ packages: } cpu: [arm] os: [linux] - libc: [musl] "@rollup/rollup-linux-arm64-gnu@4.50.0": resolution: @@ -1369,7 +1369,6 @@ packages: } cpu: [arm64] os: [linux] - libc: [glibc] "@rollup/rollup-linux-arm64-musl@4.50.0": resolution: @@ -1378,7 +1377,6 @@ packages: } cpu: [arm64] os: [linux] - libc: [musl] "@rollup/rollup-linux-loongarch64-gnu@4.50.0": resolution: @@ -1387,7 +1385,6 @@ packages: } cpu: [loong64] os: [linux] - libc: [glibc] "@rollup/rollup-linux-ppc64-gnu@4.50.0": resolution: @@ -1396,7 +1393,6 @@ packages: } cpu: [ppc64] os: [linux] - libc: [glibc] "@rollup/rollup-linux-riscv64-gnu@4.50.0": resolution: @@ -1405,7 +1401,6 @@ packages: } cpu: [riscv64] os: [linux] - libc: [glibc] "@rollup/rollup-linux-riscv64-musl@4.50.0": resolution: @@ -1414,7 +1409,6 @@ packages: } cpu: [riscv64] os: [linux] - libc: [musl] "@rollup/rollup-linux-s390x-gnu@4.50.0": resolution: @@ -1423,7 +1417,6 @@ packages: } cpu: [s390x] os: [linux] - libc: [glibc] "@rollup/rollup-linux-x64-gnu@4.50.0": resolution: @@ -1432,7 +1425,6 @@ packages: } cpu: [x64] os: [linux] - libc: [glibc] "@rollup/rollup-linux-x64-musl@4.50.0": resolution: @@ -1441,7 +1433,6 @@ packages: } cpu: [x64] os: [linux] - libc: [musl] "@rollup/rollup-openharmony-arm64@4.50.0": resolution: @@ -1510,7 +1501,6 @@ packages: engines: { node: ">=10" } cpu: [arm64] os: [linux] - libc: [glibc] "@swc/core-linux-arm64-musl@1.13.5": resolution: @@ -1520,7 +1510,6 @@ packages: engines: { node: ">=10" } cpu: [arm64] os: [linux] - libc: [musl] "@swc/core-linux-x64-gnu@1.13.5": resolution: @@ -1530,7 +1519,6 @@ packages: engines: { node: ">=10" } cpu: [x64] os: [linux] - libc: [glibc] "@swc/core-linux-x64-musl@1.13.5": resolution: @@ -1540,7 +1528,6 @@ packages: engines: { node: ">=10" } cpu: [x64] os: [linux] - libc: [musl] "@swc/core-win32-arm64-msvc@1.13.5": resolution: @@ -1629,6 +1616,12 @@ packages: integrity: sha512-ec4tjL2Rr0pkZ5hww65c+EEPYwxOi4Ryv+0MtjeaSQRJyq322Q27eOQiFbuNgw2hpL4hB1/W/HBGk3VKS43osg==, } + "@types/adm-zip@0.5.7": + resolution: + { + integrity: sha512-DNEs/QvmyRLurdQPChqq0Md4zGvPwHerAJYWk9l2jCbD1VPpnzRJorOdiq4zsw09NFbYnhfsoEhWtxIzXpn2yw==, + } + "@types/chai@5.2.2": resolution: { @@ -1846,6 +1839,13 @@ packages: engines: { node: ">=0.4.0" } hasBin: true + adm-zip@0.5.16: + resolution: + { + integrity: sha512-TGw5yVi4saajsSEgz25grObGHEUaDrniwvA2qwSC060KfqGPdglhvPMA2lPIoxs3PQIItj2iag35fONcQqgUaQ==, + } + engines: { node: ">=12.0" } + ajv@6.12.6: resolution: { @@ -5144,6 +5144,10 @@ snapshots: "@tsconfig/strictest@2.0.5": {} + "@types/adm-zip@0.5.7": + dependencies: + "@types/node": 24.3.0 + "@types/chai@5.2.2": dependencies: "@types/deep-eql": 4.0.2 @@ -5327,6 +5331,8 @@ snapshots: acorn@8.15.0: {} + adm-zip@0.5.16: {} + ajv@6.12.6: dependencies: fast-deep-equal: 3.1.3