From 0310f2a4620d6b5309eed296467680316d22a71b Mon Sep 17 00:00:00 2001 From: shwetank-dev Date: Sun, 15 Mar 2026 16:04:06 -0400 Subject: [PATCH 1/5] Extract shared cache helpers into utils/cache.ts MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Move getCacheDir, getCacheMetadata, writeCacheMetadata, and CacheMetadata interface from run.ts into a shared utils/cache.ts module. This prepares for the new `outdated` and `update` commands (#37) which need the same cache utilities. No behavior change — run.ts imports from the new location and all existing tests pass with updated imports. Co-Authored-By: Claude Opus 4.6 --- .../cli/src/commands/packages/run.test.ts | 2 +- packages/cli/src/commands/packages/run.ts | 48 ++----------------- packages/cli/src/utils/cache.ts | 46 ++++++++++++++++++ 3 files changed, 52 insertions(+), 44 deletions(-) create mode 100644 packages/cli/src/utils/cache.ts diff --git a/packages/cli/src/commands/packages/run.test.ts b/packages/cli/src/commands/packages/run.test.ts index a78a260..6038059 100644 --- a/packages/cli/src/commands/packages/run.test.ts +++ b/packages/cli/src/commands/packages/run.test.ts @@ -3,7 +3,6 @@ import { homedir } from "os"; import { join } from "path"; import { parsePackageSpec, - getCacheDir, resolveArgs, resolveWorkspace, substituteUserConfig, @@ -11,6 +10,7 @@ import { getLocalCacheDir, localBundleNeedsExtract, } from "./run.js"; +import { getCacheDir } from "../../utils/cache.js"; describe("parsePackageSpec", () => { describe("scoped packages", () => { diff --git a/packages/cli/src/commands/packages/run.ts b/packages/cli/src/commands/packages/run.ts index 64077a8..779ec17 100644 --- a/packages/cli/src/commands/packages/run.ts +++ b/packages/cli/src/commands/packages/run.ts @@ -14,6 +14,11 @@ import { homedir } from "os"; import { join, dirname, resolve, basename } from "path"; import { MpakClient } from "@nimblebrain/mpak-sdk"; import { createClient } from "../../utils/client.js"; +import { + getCacheDir, + getCacheMetadata, + writeCacheMetadata, +} from "../../utils/cache.js"; import { ConfigManager } from "../../utils/config-manager.js"; export interface RunOptions { @@ -52,12 +57,6 @@ interface McpbManifest { }; } -interface CacheMetadata { - version: string; - pulledAt: string; - platform: { os: string; arch: string }; -} - /** * Parse package specification into name and version * @example parsePackageSpec('@scope/name') => { name: '@scope/name' } @@ -83,43 +82,6 @@ export function parsePackageSpec(spec: string): { return { name, version }; } -/** - * Get cache directory for a package - * @example getCacheDir('@scope/name') => '~/.mpak/cache/scope-name' - */ -export function getCacheDir(packageName: string): string { - const cacheBase = join(homedir(), ".mpak", "cache"); - // @scope/name -> scope/name - const safeName = packageName.replace("@", "").replace("/", "-"); - return join(cacheBase, safeName); -} - -/** - * Read cache metadata - */ -function getCacheMetadata(cacheDir: string): CacheMetadata | null { - const metaPath = join(cacheDir, ".mpak-meta.json"); - if (!existsSync(metaPath)) { - return null; - } - try { - return JSON.parse(readFileSync(metaPath, "utf8")); - } catch { - return null; - } -} - -/** - * Write cache metadata - */ -function writeCacheMetadata( - cacheDir: string, - metadata: CacheMetadata, -): void { - const metaPath = join(cacheDir, ".mpak-meta.json"); - writeFileSync(metaPath, JSON.stringify(metadata, null, 2)); -} - /** * Maximum allowed uncompressed size for a bundle (500MB). * Protects against zip bombs that could exhaust disk space. diff --git a/packages/cli/src/utils/cache.ts b/packages/cli/src/utils/cache.ts new file mode 100644 index 0000000..84458d7 --- /dev/null +++ b/packages/cli/src/utils/cache.ts @@ -0,0 +1,46 @@ +import { existsSync, readFileSync, writeFileSync } from "fs"; +import { homedir } from "os"; +import { join } from "path"; + +export interface CacheMetadata { + version: string; + pulledAt: string; + platform: { os: string; arch: string }; +} + +/** + * Get cache directory for a package + * @example getCacheDir('@scope/name') => '~/.mpak/cache/scope-name' + */ +export function getCacheDir(packageName: string): string { + const cacheBase = join(homedir(), ".mpak", "cache"); + // @scope/name -> scope/name + const safeName = packageName.replace("@", "").replace("/", "-"); + return join(cacheBase, safeName); +} + +/** + * Read cache metadata + */ +export function getCacheMetadata(cacheDir: string): CacheMetadata | null { + const metaPath = join(cacheDir, ".mpak-meta.json"); + if (!existsSync(metaPath)) { + return null; + } + try { + return JSON.parse(readFileSync(metaPath, "utf8")); + } catch { + return null; + } +} + +/** + * Write cache metadata + */ +export function writeCacheMetadata( + cacheDir: string, + metadata: CacheMetadata, +): void { + const metaPath = join(cacheDir, ".mpak-meta.json"); + writeFileSync(metaPath, JSON.stringify(metadata, null, 2)); +} From 35d38eaafa94c281a1fef1e588ee7ad7360f1523 Mon Sep 17 00:00:00 2001 From: shwetank-dev Date: Sun, 15 Mar 2026 18:58:12 -0400 Subject: [PATCH 2/5] Add async update check during mpak run for registry bundles After spawning the MCP server, fire a non-blocking background check against the registry. If the cached version differs from latest, print an update notice to stderr. Uses a 1-hour TTL (lastCheckedAt field in .mpak-meta.json) to avoid hitting the registry on every run. Errors are silently swallowed. Local bundles skip the check entirely. The exit handler waits up to 3s for the check to complete before calling process.exit(), so the notice and metadata write aren't lost. Refs #37 Co-Authored-By: Claude Opus 4.6 --- packages/cli/src/commands/packages/run.ts | 22 +++++++++--- packages/cli/src/utils/cache.ts | 43 +++++++++++++++++++++++ 2 files changed, 61 insertions(+), 4 deletions(-) diff --git a/packages/cli/src/commands/packages/run.ts b/packages/cli/src/commands/packages/run.ts index 779ec17..e218882 100644 --- a/packages/cli/src/commands/packages/run.ts +++ b/packages/cli/src/commands/packages/run.ts @@ -18,7 +18,9 @@ import { getCacheDir, getCacheMetadata, writeCacheMetadata, + checkForUpdateAsync, } from "../../utils/cache.js"; +import type { CacheMetadata } from "../../utils/cache.js"; import { ConfigManager } from "../../utils/config-manager.js"; export interface RunOptions { @@ -411,6 +413,8 @@ export async function handleRun( let cacheDir: string; let packageName: string; + let registryClient: ReturnType | null = null; + let cachedMeta: CacheMetadata | null = null; if (options.local) { // === LOCAL BUNDLE MODE === @@ -468,12 +472,12 @@ export async function handleRun( const { name, version: requestedVersion } = parsePackageSpec(packageSpec); packageName = name; - const client = createClient(); + registryClient = createClient(); const platform = MpakClient.detectPlatform(); cacheDir = getCacheDir(name); let needsPull = true; - const cachedMeta = getCacheMetadata(cacheDir); + cachedMeta = getCacheMetadata(cacheDir); // Check if we have a cached version if (cachedMeta && !options.update) { @@ -488,7 +492,7 @@ export async function handleRun( if (needsPull) { // Fetch download info - const downloadInfo = await client.getBundleDownload( + const downloadInfo = await registryClient.getBundleDownload( name, requestedVersion || "latest", platform, @@ -640,12 +644,22 @@ export async function handleRun( cwd: cacheDir, }); + // Fire-and-forget update check for registry bundles + let updateCheckPromise: Promise | null = null; + if (!options.local && registryClient && cachedMeta) { + updateCheckPromise = checkForUpdateAsync(packageName, cachedMeta, cacheDir, registryClient); + } + // Forward signals process.on("SIGINT", () => child.kill("SIGINT")); process.on("SIGTERM", () => child.kill("SIGTERM")); // Wait for exit - child.on("exit", (code) => { + child.on("exit", async (code) => { + // Let the update check finish before exiting (but don't block indefinitely) + if (updateCheckPromise) { + await Promise.race([updateCheckPromise, new Promise((r) => setTimeout(r, 3000))]); + } process.exit(code ?? 0); }); diff --git a/packages/cli/src/utils/cache.ts b/packages/cli/src/utils/cache.ts index 84458d7..37e9f6d 100644 --- a/packages/cli/src/utils/cache.ts +++ b/packages/cli/src/utils/cache.ts @@ -1,10 +1,12 @@ import { existsSync, readFileSync, writeFileSync } from "fs"; import { homedir } from "os"; import { join } from "path"; +import type { MpakClient } from "@nimblebrain/mpak-sdk"; export interface CacheMetadata { version: string; pulledAt: string; + lastCheckedAt?: string; platform: { os: string; arch: string }; } @@ -44,3 +46,44 @@ export function writeCacheMetadata( const metaPath = join(cacheDir, ".mpak-meta.json"); writeFileSync(metaPath, JSON.stringify(metadata, null, 2)); } + +const UPDATE_CHECK_TTL_MS = 60 * 60 * 1000; // 1 hour + +/** + * Fire-and-forget background check for bundle updates. + * Prints a notice to stderr if a newer version exists. + * Silently swallows all errors. + */ +export async function checkForUpdateAsync( + packageName: string, + cachedMeta: CacheMetadata, + cacheDir: string, + client: MpakClient, +): Promise { + try { + // Skip if checked within the TTL + if (cachedMeta.lastCheckedAt) { + const elapsed = Date.now() - new Date(cachedMeta.lastCheckedAt).getTime(); + if (elapsed < UPDATE_CHECK_TTL_MS) { + return; + } + } + + const detail = await client.getBundle(packageName); + + // Update lastCheckedAt regardless of whether there's an update + writeCacheMetadata(cacheDir, { + ...cachedMeta, + lastCheckedAt: new Date().toISOString(), + }); + + if (detail.latest_version !== cachedMeta.version) { + process.stderr.write( + `\n=> Update available: ${packageName} ${cachedMeta.version} -> ${detail.latest_version}\n` + + ` Run 'mpak run ${packageName} --update' to update\n`, + ); + } + } catch { + // Silently swallow all errors (network down, registry unreachable, etc.) + } +} From d70ddcd968d279b14dbf4aa25eded006226b40bd Mon Sep 17 00:00:00 2001 From: shwetank-dev Date: Sun, 15 Mar 2026 19:56:02 -0400 Subject: [PATCH 3/5] Add mpak outdated and mpak update commands (#37) Adds two new CLI commands for bundle update awareness: - `mpak outdated` / `mpak bundle outdated`: scans ~/.mpak/cache/, checks each cached registry bundle against the registry, and prints a table of bundles with newer versions available. Supports --json. - `mpak update [@scope/name]` / `mpak bundle update [@scope/name]`: with a name, downloads the latest version and replaces the cache. Without args, runs the outdated check first and updates all stale bundles sequentially. New shared helpers in cache.ts: - listCachedBundles(): scans cache dir, skips _local/, reads manifest.json + .mpak-meta.json from each entry - downloadAndExtract(): reusable download+extract+write-metadata flow with zip bomb protection Co-Authored-By: Claude Opus 4.6 --- .../cli/src/commands/packages/outdated.ts | 75 +++++++++ packages/cli/src/commands/packages/update.ts | 68 ++++++++ packages/cli/src/program.ts | 38 +++++ packages/cli/src/utils/cache.ts | 145 +++++++++++++++++- 4 files changed, 323 insertions(+), 3 deletions(-) create mode 100644 packages/cli/src/commands/packages/outdated.ts create mode 100644 packages/cli/src/commands/packages/update.ts diff --git a/packages/cli/src/commands/packages/outdated.ts b/packages/cli/src/commands/packages/outdated.ts new file mode 100644 index 0000000..8c0f5f1 --- /dev/null +++ b/packages/cli/src/commands/packages/outdated.ts @@ -0,0 +1,75 @@ +import { listCachedBundles } from "../../utils/cache.js"; +import { createClient } from "../../utils/client.js"; +import { fmtError, table } from "../../utils/format.js"; + +export interface OutdatedEntry { + name: string; + current: string; + latest: string; + pulledAt: string; +} + +export interface OutdatedOptions { + json?: boolean; +} + +/** + * Check all cached registry bundles against the registry and return those + * that have a newer version available. + */ +export async function getOutdatedBundles(): Promise { + const cached = listCachedBundles(); + if (cached.length === 0) return []; + + const client = createClient(); + const results: OutdatedEntry[] = []; + + await Promise.all( + cached.map(async (bundle) => { + try { + const detail = await client.getBundle(bundle.name); + if (detail.latest_version !== bundle.version) { + results.push({ + name: bundle.name, + current: bundle.version, + latest: detail.latest_version, + pulledAt: bundle.pulledAt, + }); + } + } catch { + // Skip bundles that fail to resolve (e.g. deleted from registry) + } + }), + ); + + return results.sort((a, b) => a.name.localeCompare(b.name)); +} + +export async function handleOutdated(options: OutdatedOptions = {}): Promise { + const cached = listCachedBundles(); + if (cached.length === 0) { + fmtError("No cached bundles found. Run 'mpak run ' first."); + } + + process.stderr.write("=> Checking for updates...\n"); + + const outdated = await getOutdatedBundles(); + + if (options.json) { + console.log(JSON.stringify(outdated, null, 2)); + return; + } + + if (outdated.length === 0) { + console.log("All cached bundles are up to date."); + return; + } + + console.log( + table( + ["Bundle", "Current", "Latest", "Pulled"], + outdated.map((e) => [e.name, e.current, e.latest, e.pulledAt]), + ), + ); + console.log(`\n${outdated.length} bundle(s) can be updated. Run 'mpak update' to update all.`); +} diff --git a/packages/cli/src/commands/packages/update.ts b/packages/cli/src/commands/packages/update.ts new file mode 100644 index 0000000..0f7a6b7 --- /dev/null +++ b/packages/cli/src/commands/packages/update.ts @@ -0,0 +1,68 @@ +import { downloadAndExtract } from "../../utils/cache.js"; +import { createClient } from "../../utils/client.js"; +import { fmtError } from "../../utils/format.js"; +import { getOutdatedBundles } from "./outdated.js"; + +export interface UpdateOptions { + json?: boolean; +} + +export async function handleUpdate( + packageName: string | undefined, + options: UpdateOptions = {}, +): Promise { + const client = createClient(); + + if (packageName) { + // Update a single bundle + const { version } = await downloadAndExtract(packageName, client); + if (options.json) { + console.log(JSON.stringify({ name: packageName, version }, null, 2)); + } else { + console.log(`Updated ${packageName} to ${version}`); + } + return; + } + + // No name given — find and update all outdated bundles + process.stderr.write("=> Checking for updates...\n"); + const outdated = await getOutdatedBundles(); + + if (outdated.length === 0) { + if (options.json) { + console.log(JSON.stringify([], null, 2)); + } else { + console.log("All cached bundles are up to date."); + } + return; + } + + process.stderr.write( + `=> ${outdated.length} bundle(s) to update\n`, + ); + + const updated: Array<{ name: string; from: string; to: string }> = []; + + for (const entry of outdated) { + try { + const { version } = await downloadAndExtract(entry.name, client); + updated.push({ name: entry.name, from: entry.current, to: version }); + } catch (error: unknown) { + const message = error instanceof Error ? error.message : String(error); + process.stderr.write(`=> Failed to update ${entry.name}: ${message}\n`); + } + } + + if (options.json) { + console.log(JSON.stringify(updated, null, 2)); + return; + } + + if (updated.length === 0) { + fmtError("All updates failed."); + } + + for (const u of updated) { + console.log(`Updated ${u.name}: ${u.from} -> ${u.to}`); + } +} diff --git a/packages/cli/src/program.ts b/packages/cli/src/program.ts index 4daaef9..1b3661e 100644 --- a/packages/cli/src/program.ts +++ b/packages/cli/src/program.ts @@ -5,6 +5,8 @@ import { handleSearch } from "./commands/packages/search.js"; import { handleShow } from "./commands/packages/show.js"; import { handlePull } from "./commands/packages/pull.js"; import { handleRun } from "./commands/packages/run.js"; +import { handleOutdated } from "./commands/packages/outdated.js"; +import { handleUpdate } from "./commands/packages/update.js"; import { handleConfigSet, handleConfigGet, @@ -68,6 +70,26 @@ export function createProgram(): Command { await handleRun(packageSpec || "", options); }); + // ========================================================================== + // Top-level outdated / update aliases + // ========================================================================== + + program + .command("outdated") + .description('Check cached bundles for updates (alias for "bundle outdated")') + .option("--json", "Output as JSON") + .action(async (options) => { + await handleOutdated(options); + }); + + program + .command("update [package]") + .description('Update cached bundles (alias for "bundle update")') + .option("--json", "Output as JSON") + .action(async (packageName, options) => { + await handleUpdate(packageName, options); + }); + // ========================================================================== // Bundle namespace (MCP bundles) // ========================================================================== @@ -114,6 +136,22 @@ export function createProgram(): Command { await handleRun(packageSpec || "", options); }); + bundle + .command("outdated") + .description("Check cached bundles for available updates") + .option("--json", "Output as JSON") + .action(async (options) => { + await handleOutdated(options); + }); + + bundle + .command("update [package]") + .description("Update cached bundles to latest versions") + .option("--json", "Output as JSON") + .action(async (packageName, options) => { + await handleUpdate(packageName, options); + }); + // ========================================================================== // Skill namespace (Agent Skills) // ========================================================================== diff --git a/packages/cli/src/utils/cache.ts b/packages/cli/src/utils/cache.ts index 37e9f6d..25537cb 100644 --- a/packages/cli/src/utils/cache.ts +++ b/packages/cli/src/utils/cache.ts @@ -1,7 +1,15 @@ -import { existsSync, readFileSync, writeFileSync } from "fs"; +import { + existsSync, + mkdirSync, + readdirSync, + readFileSync, + rmSync, + writeFileSync, +} from "fs"; +import { execFileSync } from "child_process"; import { homedir } from "os"; -import { join } from "path"; -import type { MpakClient } from "@nimblebrain/mpak-sdk"; +import { dirname, join } from "path"; +import { MpakClient } from "@nimblebrain/mpak-sdk"; export interface CacheMetadata { version: string; @@ -87,3 +95,134 @@ export async function checkForUpdateAsync( // Silently swallow all errors (network down, registry unreachable, etc.) } } + +export interface CachedBundle { + name: string; + version: string; + pulledAt: string; + cacheDir: string; +} + +/** + * Scan ~/.mpak/cache/ and return metadata for every cached registry bundle. + * Skips the _local/ directory (local dev bundles). + */ +export function listCachedBundles(): CachedBundle[] { + const cacheBase = join(homedir(), ".mpak", "cache"); + if (!existsSync(cacheBase)) return []; + + const entries = readdirSync(cacheBase, { withFileTypes: true }); + const bundles: CachedBundle[] = []; + + for (const entry of entries) { + if (!entry.isDirectory() || entry.name === "_local") continue; + + const dir = join(cacheBase, entry.name); + const meta = getCacheMetadata(dir); + if (!meta) continue; + + const manifestPath = join(dir, "manifest.json"); + if (!existsSync(manifestPath)) continue; + + try { + const manifest = JSON.parse(readFileSync(manifestPath, "utf8")); + bundles.push({ + name: manifest.name, + version: meta.version, + pulledAt: meta.pulledAt, + cacheDir: dir, + }); + } catch { + // Skip corrupt bundles + } + } + + return bundles; +} + +/** + * Maximum allowed uncompressed size for a bundle (500MB). + */ +const MAX_UNCOMPRESSED_SIZE = 500 * 1024 * 1024; + +/** + * Download a bundle from the registry, extract it into the cache, and write metadata. + * Returns the cache directory path. + */ +export async function downloadAndExtract( + name: string, + client: MpakClient, + requestedVersion?: string, +): Promise<{ cacheDir: string; version: string }> { + const platform = MpakClient.detectPlatform(); + const downloadInfo = await client.getBundleDownload( + name, + requestedVersion || "latest", + platform, + ); + const bundle = downloadInfo.bundle; + const cacheDir = getCacheDir(name); + + // Download to temp file + const tempPath = join(homedir(), ".mpak", "tmp", `${Date.now()}.mcpb`); + mkdirSync(dirname(tempPath), { recursive: true }); + + process.stderr.write(`=> Pulling ${name}@${bundle.version}...\n`); + + const response = await fetch(downloadInfo.url); + if (!response.ok) { + throw new Error(`Failed to download bundle: ${response.statusText}`); + } + const arrayBuffer = await response.arrayBuffer(); + writeFileSync(tempPath, Buffer.from(arrayBuffer)); + + // Clear old cache and extract + if (existsSync(cacheDir)) { + rmSync(cacheDir, { recursive: true, force: true }); + } + mkdirSync(cacheDir, { recursive: true }); + + // Check uncompressed size before extraction + try { + const listOutput = execFileSync("unzip", ["-l", tempPath], { + stdio: "pipe", + encoding: "utf8", + }); + const totalMatch = listOutput.match(/^\s*(\d+)\s+\d+\s+files?$/m); + if (totalMatch) { + const totalSize = parseInt(totalMatch[1]!, 10); + if (totalSize > MAX_UNCOMPRESSED_SIZE) { + throw new Error( + `Bundle uncompressed size (${Math.round(totalSize / 1024 / 1024)}MB) exceeds maximum allowed (${MAX_UNCOMPRESSED_SIZE / (1024 * 1024)}MB)`, + ); + } + } + } catch (error: unknown) { + if ( + error instanceof Error && + error.message.includes("exceeds maximum allowed") + ) { + throw error; + } + const message = error instanceof Error ? error.message : String(error); + throw new Error(`Cannot verify bundle size before extraction: ${message}`); + } + + execFileSync("unzip", ["-o", "-q", tempPath, "-d", cacheDir], { + stdio: "pipe", + }); + + // Write metadata + writeCacheMetadata(cacheDir, { + version: bundle.version, + pulledAt: new Date().toISOString(), + platform: bundle.platform, + }); + + // Cleanup temp file + rmSync(tempPath, { force: true }); + + process.stderr.write(`=> Cached ${name}@${bundle.version}\n`); + + return { cacheDir, version: bundle.version }; +} From 97a7cfe92703f9c74796ad4aaa5df72f184ab8da Mon Sep 17 00:00:00 2001 From: shwetank-dev Date: Sun, 15 Mar 2026 19:59:29 -0400 Subject: [PATCH 4/5] Add tests for listCachedBundles, getOutdatedBundles, and handleUpdate - cache.test.ts (10 tests): listCachedBundles() using real temp dirs. Covers empty/missing cache, multiple bundles, _local/ skipping, missing or corrupt manifest/meta files, non-directory entries, and verifying name is read from manifest not directory name. - outdated.test.ts (6 tests): getOutdatedBundles() with mocked SDK client. Covers empty cache, all-up-to-date, identifying outdated bundles with correct versions, alphabetical sorting, skipping bundles deleted from registry, and parallel checking. - update.test.ts (8 tests): handleUpdate() with mocked downloadAndExtract and getOutdatedBundles. Covers single-bundle update, update-all, JSON output for both modes, continuation on partial failure, and not calling getOutdatedBundles in single mode. Co-Authored-By: Claude Opus 4.6 --- .../src/commands/packages/outdated.test.ts | 122 ++++++++++++ .../cli/src/commands/packages/update.test.ts | 124 ++++++++++++ packages/cli/src/utils/cache.test.ts | 185 ++++++++++++++++++ 3 files changed, 431 insertions(+) create mode 100644 packages/cli/src/commands/packages/outdated.test.ts create mode 100644 packages/cli/src/commands/packages/update.test.ts create mode 100644 packages/cli/src/utils/cache.test.ts diff --git a/packages/cli/src/commands/packages/outdated.test.ts b/packages/cli/src/commands/packages/outdated.test.ts new file mode 100644 index 0000000..51e6a0b --- /dev/null +++ b/packages/cli/src/commands/packages/outdated.test.ts @@ -0,0 +1,122 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import { getOutdatedBundles } from "./outdated.js"; + +vi.mock("../../utils/cache.js", () => ({ + listCachedBundles: vi.fn(), +})); + +vi.mock("../../utils/client.js", () => ({ + createClient: vi.fn(), +})); + +import { listCachedBundles } from "../../utils/cache.js"; +import { createClient } from "../../utils/client.js"; + +const mockListCachedBundles = vi.mocked(listCachedBundles); +const mockCreateClient = vi.mocked(createClient); + +function makeMockClient(registry: Record) { + return { + getBundle: vi.fn(async (name: string) => { + const version = registry[name]; + if (!version) throw new Error(`Not found: ${name}`); + return { latest_version: version }; + }), + }; +} + +beforeEach(() => { + vi.clearAllMocks(); +}); + +describe("getOutdatedBundles", () => { + it("returns empty array when no bundles are cached", async () => { + mockListCachedBundles.mockReturnValue([]); + + const result = await getOutdatedBundles(); + expect(result).toEqual([]); + expect(mockCreateClient).not.toHaveBeenCalled(); + }); + + it("returns empty array when all bundles are up to date", async () => { + mockListCachedBundles.mockReturnValue([ + { name: "@scope/a", version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/a" }, + { name: "@scope/b", version: "2.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/b" }, + ]); + mockCreateClient.mockReturnValue(makeMockClient({ + "@scope/a": "1.0.0", + "@scope/b": "2.0.0", + }) as never); + + const result = await getOutdatedBundles(); + expect(result).toEqual([]); + }); + + it("returns outdated bundles with current and latest versions", async () => { + mockListCachedBundles.mockReturnValue([ + { name: "@scope/a", version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/a" }, + { name: "@scope/b", version: "2.0.0", pulledAt: "2025-02-01T00:00:00.000Z", cacheDir: "/cache/b" }, + ]); + mockCreateClient.mockReturnValue(makeMockClient({ + "@scope/a": "1.1.0", + "@scope/b": "2.0.0", + }) as never); + + const result = await getOutdatedBundles(); + expect(result).toEqual([ + { + name: "@scope/a", + current: "1.0.0", + latest: "1.1.0", + pulledAt: "2025-01-01T00:00:00.000Z", + }, + ]); + }); + + it("returns multiple outdated bundles sorted by name", async () => { + mockListCachedBundles.mockReturnValue([ + { name: "@scope/zebra", version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/z" }, + { name: "@scope/alpha", version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/a" }, + ]); + mockCreateClient.mockReturnValue(makeMockClient({ + "@scope/zebra": "2.0.0", + "@scope/alpha": "1.1.0", + }) as never); + + const result = await getOutdatedBundles(); + expect(result).toHaveLength(2); + expect(result[0]!.name).toBe("@scope/alpha"); + expect(result[1]!.name).toBe("@scope/zebra"); + }); + + it("skips bundles that fail to resolve from registry", async () => { + mockListCachedBundles.mockReturnValue([ + { name: "@scope/exists", version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/e" }, + { name: "@scope/deleted", version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/d" }, + ]); + mockCreateClient.mockReturnValue(makeMockClient({ + "@scope/exists": "2.0.0", + // @scope/deleted not in registry — getBundle will throw + }) as never); + + const result = await getOutdatedBundles(); + expect(result).toHaveLength(1); + expect(result[0]!.name).toBe("@scope/exists"); + }); + + it("checks all bundles in parallel", async () => { + const getBundle = vi.fn(async (name: string) => { + return { latest_version: name === "@scope/a" ? "2.0.0" : "1.0.0" }; + }); + mockListCachedBundles.mockReturnValue([ + { name: "@scope/a", version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/a" }, + { name: "@scope/b", version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/b" }, + ]); + mockCreateClient.mockReturnValue({ getBundle } as never); + + await getOutdatedBundles(); + expect(getBundle).toHaveBeenCalledTimes(2); + expect(getBundle).toHaveBeenCalledWith("@scope/a"); + expect(getBundle).toHaveBeenCalledWith("@scope/b"); + }); +}); diff --git a/packages/cli/src/commands/packages/update.test.ts b/packages/cli/src/commands/packages/update.test.ts new file mode 100644 index 0000000..b2369ee --- /dev/null +++ b/packages/cli/src/commands/packages/update.test.ts @@ -0,0 +1,124 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import { handleUpdate } from "./update.js"; + +vi.mock("../../utils/cache.js", () => ({ + downloadAndExtract: vi.fn(), +})); + +vi.mock("../../utils/client.js", () => ({ + createClient: vi.fn(() => ({ getBundle: vi.fn() })), +})); + +vi.mock("./outdated.js", () => ({ + getOutdatedBundles: vi.fn(), +})); + +import { downloadAndExtract } from "../../utils/cache.js"; +import { getOutdatedBundles } from "./outdated.js"; + +const mockDownloadAndExtract = vi.mocked(downloadAndExtract); +const mockGetOutdatedBundles = vi.mocked(getOutdatedBundles); + +beforeEach(() => { + vi.clearAllMocks(); + vi.spyOn(console, "log").mockImplementation(() => {}); + vi.spyOn(process.stderr, "write").mockImplementation(() => true); +}); + +describe("handleUpdate", () => { + describe("single bundle", () => { + it("downloads and reports the updated version", async () => { + mockDownloadAndExtract.mockResolvedValue({ cacheDir: "/cache/a", version: "2.0.0" }); + + await handleUpdate("@scope/a", {}); + + expect(mockDownloadAndExtract).toHaveBeenCalledTimes(1); + expect(mockDownloadAndExtract.mock.calls[0]![0]).toBe("@scope/a"); + expect(console.log).toHaveBeenCalledWith("Updated @scope/a to 2.0.0"); + }); + + it("outputs JSON when --json flag is set", async () => { + mockDownloadAndExtract.mockResolvedValue({ cacheDir: "/cache/a", version: "2.0.0" }); + + await handleUpdate("@scope/a", { json: true }); + + expect(console.log).toHaveBeenCalledWith( + JSON.stringify({ name: "@scope/a", version: "2.0.0" }, null, 2), + ); + }); + + it("does not call getOutdatedBundles", async () => { + mockDownloadAndExtract.mockResolvedValue({ cacheDir: "/cache/a", version: "2.0.0" }); + + await handleUpdate("@scope/a", {}); + + expect(mockGetOutdatedBundles).not.toHaveBeenCalled(); + }); + }); + + describe("update all", () => { + it("reports all up to date when nothing is outdated", async () => { + mockGetOutdatedBundles.mockResolvedValue([]); + + await handleUpdate(undefined, {}); + + expect(console.log).toHaveBeenCalledWith("All cached bundles are up to date."); + expect(mockDownloadAndExtract).not.toHaveBeenCalled(); + }); + + it("updates all outdated bundles", async () => { + mockGetOutdatedBundles.mockResolvedValue([ + { name: "@scope/a", current: "1.0.0", latest: "2.0.0", pulledAt: "2025-01-01T00:00:00.000Z" }, + { name: "@scope/b", current: "1.0.0", latest: "1.1.0", pulledAt: "2025-01-01T00:00:00.000Z" }, + ]); + mockDownloadAndExtract + .mockResolvedValueOnce({ cacheDir: "/cache/a", version: "2.0.0" }) + .mockResolvedValueOnce({ cacheDir: "/cache/b", version: "1.1.0" }); + + await handleUpdate(undefined, {}); + + expect(mockDownloadAndExtract).toHaveBeenCalledTimes(2); + expect(console.log).toHaveBeenCalledWith("Updated @scope/a: 1.0.0 -> 2.0.0"); + expect(console.log).toHaveBeenCalledWith("Updated @scope/b: 1.0.0 -> 1.1.0"); + }); + + it("continues updating when one bundle fails", async () => { + mockGetOutdatedBundles.mockResolvedValue([ + { name: "@scope/a", current: "1.0.0", latest: "2.0.0", pulledAt: "2025-01-01T00:00:00.000Z" }, + { name: "@scope/b", current: "1.0.0", latest: "1.1.0", pulledAt: "2025-01-01T00:00:00.000Z" }, + ]); + mockDownloadAndExtract + .mockRejectedValueOnce(new Error("Network error")) + .mockResolvedValueOnce({ cacheDir: "/cache/b", version: "1.1.0" }); + + await handleUpdate(undefined, {}); + + expect(mockDownloadAndExtract).toHaveBeenCalledTimes(2); + expect(process.stderr.write).toHaveBeenCalledWith( + expect.stringContaining("Failed to update @scope/a"), + ); + expect(console.log).toHaveBeenCalledWith("Updated @scope/b: 1.0.0 -> 1.1.0"); + }); + + it("outputs JSON when --json flag is set", async () => { + mockGetOutdatedBundles.mockResolvedValue([ + { name: "@scope/a", current: "1.0.0", latest: "2.0.0", pulledAt: "2025-01-01T00:00:00.000Z" }, + ]); + mockDownloadAndExtract.mockResolvedValue({ cacheDir: "/cache/a", version: "2.0.0" }); + + await handleUpdate(undefined, { json: true }); + + expect(console.log).toHaveBeenCalledWith( + JSON.stringify([{ name: "@scope/a", from: "1.0.0", to: "2.0.0" }], null, 2), + ); + }); + + it("outputs empty JSON array when nothing is outdated with --json", async () => { + mockGetOutdatedBundles.mockResolvedValue([]); + + await handleUpdate(undefined, { json: true }); + + expect(console.log).toHaveBeenCalledWith(JSON.stringify([], null, 2)); + }); + }); +}); diff --git a/packages/cli/src/utils/cache.test.ts b/packages/cli/src/utils/cache.test.ts new file mode 100644 index 0000000..9fb6e1a --- /dev/null +++ b/packages/cli/src/utils/cache.test.ts @@ -0,0 +1,185 @@ +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import { mkdirSync, rmSync, writeFileSync } from "fs"; +import { join } from "path"; +import { tmpdir } from "os"; +import { listCachedBundles } from "./cache.js"; + +/** + * Creates a fake cached bundle directory with manifest.json and .mpak-meta.json. + */ +function seedBundle( + cacheBase: string, + dirName: string, + manifest: { name: string; version: string }, + meta: { version: string; pulledAt: string; platform: { os: string; arch: string } }, +): void { + const dir = join(cacheBase, dirName); + mkdirSync(dir, { recursive: true }); + writeFileSync(join(dir, "manifest.json"), JSON.stringify(manifest)); + writeFileSync(join(dir, ".mpak-meta.json"), JSON.stringify(meta)); +} + +describe("listCachedBundles", () => { + let tempCacheBase: string; + const originalHome = process.env["HOME"]; + + beforeEach(() => { + // Create a temp dir that acts as ~/.mpak/cache/ + const tempHome = join(tmpdir(), `mpak-test-${Date.now()}-${Math.random().toString(36).slice(2)}`); + tempCacheBase = join(tempHome, ".mpak", "cache"); + mkdirSync(tempCacheBase, { recursive: true }); + process.env["HOME"] = tempHome; + }); + + afterEach(() => { + process.env["HOME"] = originalHome; + // Clean up temp dir (parent of .mpak) + const tempHome = tempCacheBase.replace("/.mpak/cache", ""); + rmSync(tempHome, { recursive: true, force: true }); + }); + + it("returns empty array when cache dir does not exist", () => { + // Point HOME to a dir with no .mpak/cache + const emptyHome = join(tmpdir(), `mpak-empty-${Date.now()}`); + mkdirSync(emptyHome, { recursive: true }); + process.env["HOME"] = emptyHome; + + expect(listCachedBundles()).toEqual([]); + + rmSync(emptyHome, { recursive: true, force: true }); + }); + + it("returns empty array when cache dir is empty", () => { + expect(listCachedBundles()).toEqual([]); + }); + + it("returns cached bundles with correct metadata", () => { + seedBundle(tempCacheBase, "nimblebraininc-echo", { + name: "@nimblebraininc/echo", + version: "1.0.0", + }, { + version: "1.0.0", + pulledAt: "2025-02-16T00:00:00.000Z", + platform: { os: "darwin", arch: "arm64" }, + }); + + const result = listCachedBundles(); + expect(result).toHaveLength(1); + expect(result[0]).toEqual({ + name: "@nimblebraininc/echo", + version: "1.0.0", + pulledAt: "2025-02-16T00:00:00.000Z", + cacheDir: join(tempCacheBase, "nimblebraininc-echo"), + }); + }); + + it("returns multiple cached bundles", () => { + seedBundle(tempCacheBase, "nimblebraininc-echo", { + name: "@nimblebraininc/echo", + version: "1.0.0", + }, { + version: "1.0.0", + pulledAt: "2025-02-16T00:00:00.000Z", + platform: { os: "darwin", arch: "arm64" }, + }); + + seedBundle(tempCacheBase, "nimblebraininc-todoist", { + name: "@nimblebraininc/todoist", + version: "2.1.0", + }, { + version: "2.1.0", + pulledAt: "2025-03-14T00:00:00.000Z", + platform: { os: "darwin", arch: "arm64" }, + }); + + const result = listCachedBundles(); + expect(result).toHaveLength(2); + expect(result.map((b) => b.name).sort()).toEqual([ + "@nimblebraininc/echo", + "@nimblebraininc/todoist", + ]); + }); + + it("skips _local directory", () => { + // Create a _local dir with bundle-like contents + const localDir = join(tempCacheBase, "_local"); + mkdirSync(localDir, { recursive: true }); + writeFileSync(join(localDir, "manifest.json"), JSON.stringify({ name: "local-dev" })); + writeFileSync(join(localDir, ".mpak-meta.json"), JSON.stringify({ version: "0.0.1" })); + + seedBundle(tempCacheBase, "nimblebraininc-echo", { + name: "@nimblebraininc/echo", + version: "1.0.0", + }, { + version: "1.0.0", + pulledAt: "2025-02-16T00:00:00.000Z", + platform: { os: "darwin", arch: "arm64" }, + }); + + const result = listCachedBundles(); + expect(result).toHaveLength(1); + expect(result[0]!.name).toBe("@nimblebraininc/echo"); + }); + + it("skips directories without .mpak-meta.json", () => { + const dir = join(tempCacheBase, "no-meta"); + mkdirSync(dir, { recursive: true }); + writeFileSync(join(dir, "manifest.json"), JSON.stringify({ name: "@scope/no-meta" })); + + expect(listCachedBundles()).toEqual([]); + }); + + it("skips directories without manifest.json", () => { + const dir = join(tempCacheBase, "no-manifest"); + mkdirSync(dir, { recursive: true }); + writeFileSync( + join(dir, ".mpak-meta.json"), + JSON.stringify({ version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", platform: { os: "darwin", arch: "arm64" } }), + ); + + expect(listCachedBundles()).toEqual([]); + }); + + it("skips directories with corrupt manifest.json", () => { + const dir = join(tempCacheBase, "corrupt"); + mkdirSync(dir, { recursive: true }); + writeFileSync(join(dir, "manifest.json"), "not json{{{"); + writeFileSync( + join(dir, ".mpak-meta.json"), + JSON.stringify({ version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", platform: { os: "darwin", arch: "arm64" } }), + ); + + expect(listCachedBundles()).toEqual([]); + }); + + it("skips files in cache dir (only reads directories)", () => { + writeFileSync(join(tempCacheBase, "stray-file.txt"), "hello"); + + seedBundle(tempCacheBase, "nimblebraininc-echo", { + name: "@nimblebraininc/echo", + version: "1.0.0", + }, { + version: "1.0.0", + pulledAt: "2025-02-16T00:00:00.000Z", + platform: { os: "darwin", arch: "arm64" }, + }); + + const result = listCachedBundles(); + expect(result).toHaveLength(1); + }); + + it("reads name from manifest.json, not directory name", () => { + seedBundle(tempCacheBase, "weird-dir-name", { + name: "@actual/package-name", + version: "3.0.0", + }, { + version: "3.0.0", + pulledAt: "2025-01-01T00:00:00.000Z", + platform: { os: "linux", arch: "x64" }, + }); + + const result = listCachedBundles(); + expect(result).toHaveLength(1); + expect(result[0]!.name).toBe("@actual/package-name"); + }); +}); From 9d99b9dcb3b3840bedb9050222d35d93384c1cd2 Mon Sep 17 00:00:00 2001 From: shwetank-dev Date: Mon, 16 Mar 2026 13:56:07 -0400 Subject: [PATCH 5/5] Consolidate download/extract logic into shared cache helpers Split downloadAndExtract into resolveBundle + downloadAndExtract so run.ts can check the resolved version against cache before downloading. Extract shared extractZip helper (with zip bomb protection) used by both registry and local bundle paths, eliminating duplicate security-critical code from run.ts. Remove redundant listCachedBundles call in outdated.ts. Co-Authored-By: Claude Opus 4.6 --- .../cli/src/commands/packages/outdated.ts | 7 +- packages/cli/src/commands/packages/run.ts | 132 ++---------------- .../cli/src/commands/packages/update.test.ts | 30 +++- packages/cli/src/commands/packages/update.ts | 8 +- packages/cli/src/utils/cache.ts | 93 +++++++----- 5 files changed, 97 insertions(+), 173 deletions(-) diff --git a/packages/cli/src/commands/packages/outdated.ts b/packages/cli/src/commands/packages/outdated.ts index 8c0f5f1..9771306 100644 --- a/packages/cli/src/commands/packages/outdated.ts +++ b/packages/cli/src/commands/packages/outdated.ts @@ -1,6 +1,6 @@ import { listCachedBundles } from "../../utils/cache.js"; import { createClient } from "../../utils/client.js"; -import { fmtError, table } from "../../utils/format.js"; +import { table } from "../../utils/format.js"; export interface OutdatedEntry { name: string; @@ -46,11 +46,6 @@ export async function getOutdatedBundles(): Promise { } export async function handleOutdated(options: OutdatedOptions = {}): Promise { - const cached = listCachedBundles(); - if (cached.length === 0) { - fmtError("No cached bundles found. Run 'mpak run ' first."); - } - process.stderr.write("=> Checking for updates...\n"); const outdated = await getOutdatedBundles(); diff --git a/packages/cli/src/commands/packages/run.ts b/packages/cli/src/commands/packages/run.ts index e218882..93109b6 100644 --- a/packages/cli/src/commands/packages/run.ts +++ b/packages/cli/src/commands/packages/run.ts @@ -1,8 +1,7 @@ -import { execFileSync, spawn, spawnSync } from "child_process"; +import { spawn, spawnSync } from "child_process"; import { createInterface } from "readline"; import { existsSync, - mkdirSync, readFileSync, writeFileSync, chmodSync, @@ -11,14 +10,15 @@ import { } from "fs"; import { createHash } from "crypto"; import { homedir } from "os"; -import { join, dirname, resolve, basename } from "path"; -import { MpakClient } from "@nimblebrain/mpak-sdk"; +import { join, resolve, basename } from "path"; import { createClient } from "../../utils/client.js"; import { getCacheDir, getCacheMetadata, - writeCacheMetadata, checkForUpdateAsync, + extractZip, + resolveBundle, + downloadAndExtract, } from "../../utils/cache.js"; import type { CacheMetadata } from "../../utils/cache.js"; import { ConfigManager } from "../../utils/config-manager.js"; @@ -84,61 +84,6 @@ export function parsePackageSpec(spec: string): { return { name, version }; } -/** - * Maximum allowed uncompressed size for a bundle (500MB). - * Protects against zip bombs that could exhaust disk space. - */ -const MAX_UNCOMPRESSED_SIZE = 500 * 1024 * 1024; - -/** - * Extract ZIP file to directory (simple implementation without external deps) - */ -async function extractZip( - zipPath: string, - destDir: string, -): Promise { - // Use native unzip command (available on macOS, Linux, and Windows with WSL) - // Check uncompressed size before extraction to prevent zip bombs - try { - const listOutput = execFileSync('unzip', ['-l', zipPath], { - stdio: "pipe", - encoding: "utf8", - }); - const totalMatch = listOutput.match(/^\s*(\d+)\s+\d+\s+files?$/m); - if (totalMatch) { - const totalSize = parseInt(totalMatch[1]!, 10); - if (totalSize > MAX_UNCOMPRESSED_SIZE) { - throw new Error( - `Bundle uncompressed size (${Math.round(totalSize / 1024 / 1024)}MB) exceeds maximum allowed (${MAX_UNCOMPRESSED_SIZE / (1024 * 1024)}MB)`, - ); - } - } - } catch (error: unknown) { - if ( - error instanceof Error && - error.message.includes("exceeds maximum allowed") - ) { - throw error; - } - // Fail closed: if we can't verify the size, don't extract - const message = error instanceof Error ? error.message : String(error); - throw new Error(`Cannot verify bundle size before extraction: ${message}`); - } - - // Ensure destination exists - mkdirSync(destDir, { recursive: true }); - - try { - execFileSync('unzip', ['-o', '-q', zipPath, '-d', destDir], { - stdio: "pipe", - }); - } catch (error: unknown) { - const message = - error instanceof Error ? error.message : String(error); - throw new Error(`Failed to extract bundle: ${message}`); - } -} - /** * Read manifest from extracted bundle */ @@ -379,23 +324,6 @@ function findPythonCommand(): string { return "python"; } -/** - * Download a bundle to a file path - */ -async function downloadBundle( - downloadUrl: string, - outputPath: string, -): Promise { - const response = await fetch(downloadUrl); - if (!response.ok) { - throw new Error( - `Failed to download bundle: ${response.statusText}`, - ); - } - const arrayBuffer = await response.arrayBuffer(); - writeFileSync(outputPath, Buffer.from(arrayBuffer)); -} - /** * Run a package from the registry or a local bundle file */ @@ -446,12 +374,11 @@ export async function handleRun( if (existsSync(cacheDir)) { rmSync(cacheDir, { recursive: true, force: true }); } - mkdirSync(cacheDir, { recursive: true }); process.stderr.write( `=> Extracting ${basename(bundlePath)}...\n`, ); - await extractZip(bundlePath, cacheDir); + extractZip(bundlePath, cacheDir); // Write local metadata writeFileSync( @@ -473,7 +400,6 @@ export async function handleRun( parsePackageSpec(packageSpec); packageName = name; registryClient = createClient(); - const platform = MpakClient.detectPlatform(); cacheDir = getCacheDir(name); let needsPull = true; @@ -491,59 +417,19 @@ export async function handleRun( } if (needsPull) { - // Fetch download info - const downloadInfo = await registryClient.getBundleDownload( - name, - requestedVersion || "latest", - platform, - ); - const bundle = downloadInfo.bundle; + const downloadInfo = await resolveBundle(name, registryClient, requestedVersion); // Check if cached version is already the latest if ( cachedMeta && - cachedMeta.version === bundle.version && + cachedMeta.version === downloadInfo.bundle.version && !options.update ) { needsPull = false; } if (needsPull) { - // Download to temp file - const tempPath = join( - homedir(), - ".mpak", - "tmp", - `${Date.now()}.mcpb`, - ); - mkdirSync(dirname(tempPath), { recursive: true }); - - process.stderr.write( - `=> Pulling ${name}@${bundle.version}...\n`, - ); - await downloadBundle(downloadInfo.url, tempPath); - - // Clear old cache and extract - if (existsSync(cacheDir)) { - rmSync(cacheDir, { recursive: true, force: true }); - } - mkdirSync(cacheDir, { recursive: true }); - - await extractZip(tempPath, cacheDir); - - // Write metadata - writeCacheMetadata(cacheDir, { - version: bundle.version, - pulledAt: new Date().toISOString(), - platform: bundle.platform, - }); - - // Cleanup temp file - rmSync(tempPath, { force: true }); - - process.stderr.write( - `=> Cached ${name}@${bundle.version}\n`, - ); + ({ cacheDir } = await downloadAndExtract(name, downloadInfo)); } } } diff --git a/packages/cli/src/commands/packages/update.test.ts b/packages/cli/src/commands/packages/update.test.ts index b2369ee..e9a2a61 100644 --- a/packages/cli/src/commands/packages/update.test.ts +++ b/packages/cli/src/commands/packages/update.test.ts @@ -2,6 +2,7 @@ import { describe, it, expect, vi, beforeEach } from "vitest"; import { handleUpdate } from "./update.js"; vi.mock("../../utils/cache.js", () => ({ + resolveBundle: vi.fn(), downloadAndExtract: vi.fn(), })); @@ -13,12 +14,18 @@ vi.mock("./outdated.js", () => ({ getOutdatedBundles: vi.fn(), })); -import { downloadAndExtract } from "../../utils/cache.js"; +import { resolveBundle, downloadAndExtract } from "../../utils/cache.js"; import { getOutdatedBundles } from "./outdated.js"; +const mockResolveBundle = vi.mocked(resolveBundle); const mockDownloadAndExtract = vi.mocked(downloadAndExtract); const mockGetOutdatedBundles = vi.mocked(getOutdatedBundles); +const fakeDownloadInfo = { + url: "https://example.com/bundle.mcpb", + bundle: { version: "2.0.0", platform: { os: "darwin", arch: "arm64" } }, +}; + beforeEach(() => { vi.clearAllMocks(); vi.spyOn(console, "log").mockImplementation(() => {}); @@ -27,17 +34,20 @@ beforeEach(() => { describe("handleUpdate", () => { describe("single bundle", () => { - it("downloads and reports the updated version", async () => { + it("resolves then downloads and reports the updated version", async () => { + mockResolveBundle.mockResolvedValue(fakeDownloadInfo); mockDownloadAndExtract.mockResolvedValue({ cacheDir: "/cache/a", version: "2.0.0" }); await handleUpdate("@scope/a", {}); - expect(mockDownloadAndExtract).toHaveBeenCalledTimes(1); - expect(mockDownloadAndExtract.mock.calls[0]![0]).toBe("@scope/a"); + expect(mockResolveBundle).toHaveBeenCalledTimes(1); + expect(mockResolveBundle.mock.calls[0]![0]).toBe("@scope/a"); + expect(mockDownloadAndExtract).toHaveBeenCalledWith("@scope/a", fakeDownloadInfo); expect(console.log).toHaveBeenCalledWith("Updated @scope/a to 2.0.0"); }); it("outputs JSON when --json flag is set", async () => { + mockResolveBundle.mockResolvedValue(fakeDownloadInfo); mockDownloadAndExtract.mockResolvedValue({ cacheDir: "/cache/a", version: "2.0.0" }); await handleUpdate("@scope/a", { json: true }); @@ -48,6 +58,7 @@ describe("handleUpdate", () => { }); it("does not call getOutdatedBundles", async () => { + mockResolveBundle.mockResolvedValue(fakeDownloadInfo); mockDownloadAndExtract.mockResolvedValue({ cacheDir: "/cache/a", version: "2.0.0" }); await handleUpdate("@scope/a", {}); @@ -71,6 +82,11 @@ describe("handleUpdate", () => { { name: "@scope/a", current: "1.0.0", latest: "2.0.0", pulledAt: "2025-01-01T00:00:00.000Z" }, { name: "@scope/b", current: "1.0.0", latest: "1.1.0", pulledAt: "2025-01-01T00:00:00.000Z" }, ]); + const infoA = { ...fakeDownloadInfo, bundle: { ...fakeDownloadInfo.bundle, version: "2.0.0" } }; + const infoB = { ...fakeDownloadInfo, bundle: { ...fakeDownloadInfo.bundle, version: "1.1.0" } }; + mockResolveBundle + .mockResolvedValueOnce(infoA) + .mockResolvedValueOnce(infoB); mockDownloadAndExtract .mockResolvedValueOnce({ cacheDir: "/cache/a", version: "2.0.0" }) .mockResolvedValueOnce({ cacheDir: "/cache/b", version: "1.1.0" }); @@ -87,13 +103,14 @@ describe("handleUpdate", () => { { name: "@scope/a", current: "1.0.0", latest: "2.0.0", pulledAt: "2025-01-01T00:00:00.000Z" }, { name: "@scope/b", current: "1.0.0", latest: "1.1.0", pulledAt: "2025-01-01T00:00:00.000Z" }, ]); - mockDownloadAndExtract + mockResolveBundle .mockRejectedValueOnce(new Error("Network error")) + .mockResolvedValueOnce(fakeDownloadInfo); + mockDownloadAndExtract .mockResolvedValueOnce({ cacheDir: "/cache/b", version: "1.1.0" }); await handleUpdate(undefined, {}); - expect(mockDownloadAndExtract).toHaveBeenCalledTimes(2); expect(process.stderr.write).toHaveBeenCalledWith( expect.stringContaining("Failed to update @scope/a"), ); @@ -104,6 +121,7 @@ describe("handleUpdate", () => { mockGetOutdatedBundles.mockResolvedValue([ { name: "@scope/a", current: "1.0.0", latest: "2.0.0", pulledAt: "2025-01-01T00:00:00.000Z" }, ]); + mockResolveBundle.mockResolvedValue(fakeDownloadInfo); mockDownloadAndExtract.mockResolvedValue({ cacheDir: "/cache/a", version: "2.0.0" }); await handleUpdate(undefined, { json: true }); diff --git a/packages/cli/src/commands/packages/update.ts b/packages/cli/src/commands/packages/update.ts index 0f7a6b7..baed8f7 100644 --- a/packages/cli/src/commands/packages/update.ts +++ b/packages/cli/src/commands/packages/update.ts @@ -1,4 +1,4 @@ -import { downloadAndExtract } from "../../utils/cache.js"; +import { downloadAndExtract, resolveBundle } from "../../utils/cache.js"; import { createClient } from "../../utils/client.js"; import { fmtError } from "../../utils/format.js"; import { getOutdatedBundles } from "./outdated.js"; @@ -15,7 +15,8 @@ export async function handleUpdate( if (packageName) { // Update a single bundle - const { version } = await downloadAndExtract(packageName, client); + const downloadInfo = await resolveBundle(packageName, client); + const { version } = await downloadAndExtract(packageName, downloadInfo); if (options.json) { console.log(JSON.stringify({ name: packageName, version }, null, 2)); } else { @@ -45,7 +46,8 @@ export async function handleUpdate( for (const entry of outdated) { try { - const { version } = await downloadAndExtract(entry.name, client); + const downloadInfo = await resolveBundle(entry.name, client); + const { version } = await downloadAndExtract(entry.name, downloadInfo); updated.push({ name: entry.name, from: entry.current, to: version }); } catch (error: unknown) { const message = error instanceof Error ? error.message : String(error); diff --git a/packages/cli/src/utils/cache.ts b/packages/cli/src/utils/cache.ts index 25537cb..5b002c1 100644 --- a/packages/cli/src/utils/cache.ts +++ b/packages/cli/src/utils/cache.ts @@ -146,20 +146,72 @@ export function listCachedBundles(): CachedBundle[] { const MAX_UNCOMPRESSED_SIZE = 500 * 1024 * 1024; /** - * Download a bundle from the registry, extract it into the cache, and write metadata. - * Returns the cache directory path. + * Check uncompressed size and extract a ZIP file to a directory. + * Rejects bundles exceeding MAX_UNCOMPRESSED_SIZE (zip bomb protection). */ -export async function downloadAndExtract( +export function extractZip(zipPath: string, destDir: string): void { + // Check uncompressed size before extraction + try { + const listOutput = execFileSync("unzip", ["-l", zipPath], { + stdio: "pipe", + encoding: "utf8", + }); + const totalMatch = listOutput.match(/^\s*(\d+)\s+\d+\s+files?$/m); + if (totalMatch) { + const totalSize = parseInt(totalMatch[1]!, 10); + if (totalSize > MAX_UNCOMPRESSED_SIZE) { + throw new Error( + `Bundle uncompressed size (${Math.round(totalSize / 1024 / 1024)}MB) exceeds maximum allowed (${MAX_UNCOMPRESSED_SIZE / (1024 * 1024)}MB)`, + ); + } + } + } catch (error: unknown) { + if ( + error instanceof Error && + error.message.includes("exceeds maximum allowed") + ) { + throw error; + } + const message = error instanceof Error ? error.message : String(error); + throw new Error(`Cannot verify bundle size before extraction: ${message}`); + } + + mkdirSync(destDir, { recursive: true }); + execFileSync("unzip", ["-o", "-q", zipPath, "-d", destDir], { + stdio: "pipe", + }); +} + +export interface BundleDownloadInfo { + url: string; + bundle: { version: string; platform: { os: string; arch: string } }; +} + +/** + * Resolve a bundle from the registry without downloading it. + * Returns the download URL and resolved version/platform metadata. + */ +export async function resolveBundle( name: string, client: MpakClient, requestedVersion?: string, -): Promise<{ cacheDir: string; version: string }> { +): Promise { const platform = MpakClient.detectPlatform(); - const downloadInfo = await client.getBundleDownload( + return client.getBundleDownload( name, requestedVersion || "latest", platform, ); +} + +/** + * Download a bundle using pre-resolved download info, extract it into the + * cache, and write metadata. Returns the cache directory path. + */ +export async function downloadAndExtract( + name: string, + downloadInfo: BundleDownloadInfo, +): Promise<{ cacheDir: string; version: string }> { const bundle = downloadInfo.bundle; const cacheDir = getCacheDir(name); @@ -180,37 +232,8 @@ export async function downloadAndExtract( if (existsSync(cacheDir)) { rmSync(cacheDir, { recursive: true, force: true }); } - mkdirSync(cacheDir, { recursive: true }); - - // Check uncompressed size before extraction - try { - const listOutput = execFileSync("unzip", ["-l", tempPath], { - stdio: "pipe", - encoding: "utf8", - }); - const totalMatch = listOutput.match(/^\s*(\d+)\s+\d+\s+files?$/m); - if (totalMatch) { - const totalSize = parseInt(totalMatch[1]!, 10); - if (totalSize > MAX_UNCOMPRESSED_SIZE) { - throw new Error( - `Bundle uncompressed size (${Math.round(totalSize / 1024 / 1024)}MB) exceeds maximum allowed (${MAX_UNCOMPRESSED_SIZE / (1024 * 1024)}MB)`, - ); - } - } - } catch (error: unknown) { - if ( - error instanceof Error && - error.message.includes("exceeds maximum allowed") - ) { - throw error; - } - const message = error instanceof Error ? error.message : String(error); - throw new Error(`Cannot verify bundle size before extraction: ${message}`); - } - execFileSync("unzip", ["-o", "-q", tempPath, "-d", cacheDir], { - stdio: "pipe", - }); + extractZip(tempPath, cacheDir); // Write metadata writeCacheMetadata(cacheDir, {