diff --git a/packages/cli/src/commands/packages/outdated.test.ts b/packages/cli/src/commands/packages/outdated.test.ts new file mode 100644 index 0000000..51e6a0b --- /dev/null +++ b/packages/cli/src/commands/packages/outdated.test.ts @@ -0,0 +1,122 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import { getOutdatedBundles } from "./outdated.js"; + +vi.mock("../../utils/cache.js", () => ({ + listCachedBundles: vi.fn(), +})); + +vi.mock("../../utils/client.js", () => ({ + createClient: vi.fn(), +})); + +import { listCachedBundles } from "../../utils/cache.js"; +import { createClient } from "../../utils/client.js"; + +const mockListCachedBundles = vi.mocked(listCachedBundles); +const mockCreateClient = vi.mocked(createClient); + +function makeMockClient(registry: Record) { + return { + getBundle: vi.fn(async (name: string) => { + const version = registry[name]; + if (!version) throw new Error(`Not found: ${name}`); + return { latest_version: version }; + }), + }; +} + +beforeEach(() => { + vi.clearAllMocks(); +}); + +describe("getOutdatedBundles", () => { + it("returns empty array when no bundles are cached", async () => { + mockListCachedBundles.mockReturnValue([]); + + const result = await getOutdatedBundles(); + expect(result).toEqual([]); + expect(mockCreateClient).not.toHaveBeenCalled(); + }); + + it("returns empty array when all bundles are up to date", async () => { + mockListCachedBundles.mockReturnValue([ + { name: "@scope/a", version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/a" }, + { name: "@scope/b", version: "2.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/b" }, + ]); + mockCreateClient.mockReturnValue(makeMockClient({ + "@scope/a": "1.0.0", + "@scope/b": "2.0.0", + }) as never); + + const result = await getOutdatedBundles(); + expect(result).toEqual([]); + }); + + it("returns outdated bundles with current and latest versions", async () => { + mockListCachedBundles.mockReturnValue([ + { name: "@scope/a", version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/a" }, + { name: "@scope/b", version: "2.0.0", pulledAt: "2025-02-01T00:00:00.000Z", cacheDir: "/cache/b" }, + ]); + mockCreateClient.mockReturnValue(makeMockClient({ + "@scope/a": "1.1.0", + "@scope/b": "2.0.0", + }) as never); + + const result = await getOutdatedBundles(); + expect(result).toEqual([ + { + name: "@scope/a", + current: "1.0.0", + latest: "1.1.0", + pulledAt: "2025-01-01T00:00:00.000Z", + }, + ]); + }); + + it("returns multiple outdated bundles sorted by name", async () => { + mockListCachedBundles.mockReturnValue([ + { name: "@scope/zebra", version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/z" }, + { name: "@scope/alpha", version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/a" }, + ]); + mockCreateClient.mockReturnValue(makeMockClient({ + "@scope/zebra": "2.0.0", + "@scope/alpha": "1.1.0", + }) as never); + + const result = await getOutdatedBundles(); + expect(result).toHaveLength(2); + expect(result[0]!.name).toBe("@scope/alpha"); + expect(result[1]!.name).toBe("@scope/zebra"); + }); + + it("skips bundles that fail to resolve from registry", async () => { + mockListCachedBundles.mockReturnValue([ + { name: "@scope/exists", version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/e" }, + { name: "@scope/deleted", version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/d" }, + ]); + mockCreateClient.mockReturnValue(makeMockClient({ + "@scope/exists": "2.0.0", + // @scope/deleted not in registry — getBundle will throw + }) as never); + + const result = await getOutdatedBundles(); + expect(result).toHaveLength(1); + expect(result[0]!.name).toBe("@scope/exists"); + }); + + it("checks all bundles in parallel", async () => { + const getBundle = vi.fn(async (name: string) => { + return { latest_version: name === "@scope/a" ? "2.0.0" : "1.0.0" }; + }); + mockListCachedBundles.mockReturnValue([ + { name: "@scope/a", version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/a" }, + { name: "@scope/b", version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", cacheDir: "/cache/b" }, + ]); + mockCreateClient.mockReturnValue({ getBundle } as never); + + await getOutdatedBundles(); + expect(getBundle).toHaveBeenCalledTimes(2); + expect(getBundle).toHaveBeenCalledWith("@scope/a"); + expect(getBundle).toHaveBeenCalledWith("@scope/b"); + }); +}); diff --git a/packages/cli/src/commands/packages/outdated.ts b/packages/cli/src/commands/packages/outdated.ts new file mode 100644 index 0000000..9771306 --- /dev/null +++ b/packages/cli/src/commands/packages/outdated.ts @@ -0,0 +1,70 @@ +import { listCachedBundles } from "../../utils/cache.js"; +import { createClient } from "../../utils/client.js"; +import { table } from "../../utils/format.js"; + +export interface OutdatedEntry { + name: string; + current: string; + latest: string; + pulledAt: string; +} + +export interface OutdatedOptions { + json?: boolean; +} + +/** + * Check all cached registry bundles against the registry and return those + * that have a newer version available. + */ +export async function getOutdatedBundles(): Promise { + const cached = listCachedBundles(); + if (cached.length === 0) return []; + + const client = createClient(); + const results: OutdatedEntry[] = []; + + await Promise.all( + cached.map(async (bundle) => { + try { + const detail = await client.getBundle(bundle.name); + if (detail.latest_version !== bundle.version) { + results.push({ + name: bundle.name, + current: bundle.version, + latest: detail.latest_version, + pulledAt: bundle.pulledAt, + }); + } + } catch { + // Skip bundles that fail to resolve (e.g. deleted from registry) + } + }), + ); + + return results.sort((a, b) => a.name.localeCompare(b.name)); +} + +export async function handleOutdated(options: OutdatedOptions = {}): Promise { + process.stderr.write("=> Checking for updates...\n"); + + const outdated = await getOutdatedBundles(); + + if (options.json) { + console.log(JSON.stringify(outdated, null, 2)); + return; + } + + if (outdated.length === 0) { + console.log("All cached bundles are up to date."); + return; + } + + console.log( + table( + ["Bundle", "Current", "Latest", "Pulled"], + outdated.map((e) => [e.name, e.current, e.latest, e.pulledAt]), + ), + ); + console.log(`\n${outdated.length} bundle(s) can be updated. Run 'mpak update' to update all.`); +} diff --git a/packages/cli/src/commands/packages/run.test.ts b/packages/cli/src/commands/packages/run.test.ts index a78a260..6038059 100644 --- a/packages/cli/src/commands/packages/run.test.ts +++ b/packages/cli/src/commands/packages/run.test.ts @@ -3,7 +3,6 @@ import { homedir } from "os"; import { join } from "path"; import { parsePackageSpec, - getCacheDir, resolveArgs, resolveWorkspace, substituteUserConfig, @@ -11,6 +10,7 @@ import { getLocalCacheDir, localBundleNeedsExtract, } from "./run.js"; +import { getCacheDir } from "../../utils/cache.js"; describe("parsePackageSpec", () => { describe("scoped packages", () => { diff --git a/packages/cli/src/commands/packages/run.ts b/packages/cli/src/commands/packages/run.ts index 64077a8..93109b6 100644 --- a/packages/cli/src/commands/packages/run.ts +++ b/packages/cli/src/commands/packages/run.ts @@ -1,8 +1,7 @@ -import { execFileSync, spawn, spawnSync } from "child_process"; +import { spawn, spawnSync } from "child_process"; import { createInterface } from "readline"; import { existsSync, - mkdirSync, readFileSync, writeFileSync, chmodSync, @@ -11,9 +10,17 @@ import { } from "fs"; import { createHash } from "crypto"; import { homedir } from "os"; -import { join, dirname, resolve, basename } from "path"; -import { MpakClient } from "@nimblebrain/mpak-sdk"; +import { join, resolve, basename } from "path"; import { createClient } from "../../utils/client.js"; +import { + getCacheDir, + getCacheMetadata, + checkForUpdateAsync, + extractZip, + resolveBundle, + downloadAndExtract, +} from "../../utils/cache.js"; +import type { CacheMetadata } from "../../utils/cache.js"; import { ConfigManager } from "../../utils/config-manager.js"; export interface RunOptions { @@ -52,12 +59,6 @@ interface McpbManifest { }; } -interface CacheMetadata { - version: string; - pulledAt: string; - platform: { os: string; arch: string }; -} - /** * Parse package specification into name and version * @example parsePackageSpec('@scope/name') => { name: '@scope/name' } @@ -83,98 +84,6 @@ export function parsePackageSpec(spec: string): { return { name, version }; } -/** - * Get cache directory for a package - * @example getCacheDir('@scope/name') => '~/.mpak/cache/scope-name' - */ -export function getCacheDir(packageName: string): string { - const cacheBase = join(homedir(), ".mpak", "cache"); - // @scope/name -> scope/name - const safeName = packageName.replace("@", "").replace("/", "-"); - return join(cacheBase, safeName); -} - -/** - * Read cache metadata - */ -function getCacheMetadata(cacheDir: string): CacheMetadata | null { - const metaPath = join(cacheDir, ".mpak-meta.json"); - if (!existsSync(metaPath)) { - return null; - } - try { - return JSON.parse(readFileSync(metaPath, "utf8")); - } catch { - return null; - } -} - -/** - * Write cache metadata - */ -function writeCacheMetadata( - cacheDir: string, - metadata: CacheMetadata, -): void { - const metaPath = join(cacheDir, ".mpak-meta.json"); - writeFileSync(metaPath, JSON.stringify(metadata, null, 2)); -} - -/** - * Maximum allowed uncompressed size for a bundle (500MB). - * Protects against zip bombs that could exhaust disk space. - */ -const MAX_UNCOMPRESSED_SIZE = 500 * 1024 * 1024; - -/** - * Extract ZIP file to directory (simple implementation without external deps) - */ -async function extractZip( - zipPath: string, - destDir: string, -): Promise { - // Use native unzip command (available on macOS, Linux, and Windows with WSL) - // Check uncompressed size before extraction to prevent zip bombs - try { - const listOutput = execFileSync('unzip', ['-l', zipPath], { - stdio: "pipe", - encoding: "utf8", - }); - const totalMatch = listOutput.match(/^\s*(\d+)\s+\d+\s+files?$/m); - if (totalMatch) { - const totalSize = parseInt(totalMatch[1]!, 10); - if (totalSize > MAX_UNCOMPRESSED_SIZE) { - throw new Error( - `Bundle uncompressed size (${Math.round(totalSize / 1024 / 1024)}MB) exceeds maximum allowed (${MAX_UNCOMPRESSED_SIZE / (1024 * 1024)}MB)`, - ); - } - } - } catch (error: unknown) { - if ( - error instanceof Error && - error.message.includes("exceeds maximum allowed") - ) { - throw error; - } - // Fail closed: if we can't verify the size, don't extract - const message = error instanceof Error ? error.message : String(error); - throw new Error(`Cannot verify bundle size before extraction: ${message}`); - } - - // Ensure destination exists - mkdirSync(destDir, { recursive: true }); - - try { - execFileSync('unzip', ['-o', '-q', zipPath, '-d', destDir], { - stdio: "pipe", - }); - } catch (error: unknown) { - const message = - error instanceof Error ? error.message : String(error); - throw new Error(`Failed to extract bundle: ${message}`); - } -} - /** * Read manifest from extracted bundle */ @@ -415,23 +324,6 @@ function findPythonCommand(): string { return "python"; } -/** - * Download a bundle to a file path - */ -async function downloadBundle( - downloadUrl: string, - outputPath: string, -): Promise { - const response = await fetch(downloadUrl); - if (!response.ok) { - throw new Error( - `Failed to download bundle: ${response.statusText}`, - ); - } - const arrayBuffer = await response.arrayBuffer(); - writeFileSync(outputPath, Buffer.from(arrayBuffer)); -} - /** * Run a package from the registry or a local bundle file */ @@ -449,6 +341,8 @@ export async function handleRun( let cacheDir: string; let packageName: string; + let registryClient: ReturnType | null = null; + let cachedMeta: CacheMetadata | null = null; if (options.local) { // === LOCAL BUNDLE MODE === @@ -480,12 +374,11 @@ export async function handleRun( if (existsSync(cacheDir)) { rmSync(cacheDir, { recursive: true, force: true }); } - mkdirSync(cacheDir, { recursive: true }); process.stderr.write( `=> Extracting ${basename(bundlePath)}...\n`, ); - await extractZip(bundlePath, cacheDir); + extractZip(bundlePath, cacheDir); // Write local metadata writeFileSync( @@ -506,12 +399,11 @@ export async function handleRun( const { name, version: requestedVersion } = parsePackageSpec(packageSpec); packageName = name; - const client = createClient(); - const platform = MpakClient.detectPlatform(); + registryClient = createClient(); cacheDir = getCacheDir(name); let needsPull = true; - const cachedMeta = getCacheMetadata(cacheDir); + cachedMeta = getCacheMetadata(cacheDir); // Check if we have a cached version if (cachedMeta && !options.update) { @@ -525,59 +417,19 @@ export async function handleRun( } if (needsPull) { - // Fetch download info - const downloadInfo = await client.getBundleDownload( - name, - requestedVersion || "latest", - platform, - ); - const bundle = downloadInfo.bundle; + const downloadInfo = await resolveBundle(name, registryClient, requestedVersion); // Check if cached version is already the latest if ( cachedMeta && - cachedMeta.version === bundle.version && + cachedMeta.version === downloadInfo.bundle.version && !options.update ) { needsPull = false; } if (needsPull) { - // Download to temp file - const tempPath = join( - homedir(), - ".mpak", - "tmp", - `${Date.now()}.mcpb`, - ); - mkdirSync(dirname(tempPath), { recursive: true }); - - process.stderr.write( - `=> Pulling ${name}@${bundle.version}...\n`, - ); - await downloadBundle(downloadInfo.url, tempPath); - - // Clear old cache and extract - if (existsSync(cacheDir)) { - rmSync(cacheDir, { recursive: true, force: true }); - } - mkdirSync(cacheDir, { recursive: true }); - - await extractZip(tempPath, cacheDir); - - // Write metadata - writeCacheMetadata(cacheDir, { - version: bundle.version, - pulledAt: new Date().toISOString(), - platform: bundle.platform, - }); - - // Cleanup temp file - rmSync(tempPath, { force: true }); - - process.stderr.write( - `=> Cached ${name}@${bundle.version}\n`, - ); + ({ cacheDir } = await downloadAndExtract(name, downloadInfo)); } } } @@ -678,12 +530,22 @@ export async function handleRun( cwd: cacheDir, }); + // Fire-and-forget update check for registry bundles + let updateCheckPromise: Promise | null = null; + if (!options.local && registryClient && cachedMeta) { + updateCheckPromise = checkForUpdateAsync(packageName, cachedMeta, cacheDir, registryClient); + } + // Forward signals process.on("SIGINT", () => child.kill("SIGINT")); process.on("SIGTERM", () => child.kill("SIGTERM")); // Wait for exit - child.on("exit", (code) => { + child.on("exit", async (code) => { + // Let the update check finish before exiting (but don't block indefinitely) + if (updateCheckPromise) { + await Promise.race([updateCheckPromise, new Promise((r) => setTimeout(r, 3000))]); + } process.exit(code ?? 0); }); diff --git a/packages/cli/src/commands/packages/update.test.ts b/packages/cli/src/commands/packages/update.test.ts new file mode 100644 index 0000000..e9a2a61 --- /dev/null +++ b/packages/cli/src/commands/packages/update.test.ts @@ -0,0 +1,142 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import { handleUpdate } from "./update.js"; + +vi.mock("../../utils/cache.js", () => ({ + resolveBundle: vi.fn(), + downloadAndExtract: vi.fn(), +})); + +vi.mock("../../utils/client.js", () => ({ + createClient: vi.fn(() => ({ getBundle: vi.fn() })), +})); + +vi.mock("./outdated.js", () => ({ + getOutdatedBundles: vi.fn(), +})); + +import { resolveBundle, downloadAndExtract } from "../../utils/cache.js"; +import { getOutdatedBundles } from "./outdated.js"; + +const mockResolveBundle = vi.mocked(resolveBundle); +const mockDownloadAndExtract = vi.mocked(downloadAndExtract); +const mockGetOutdatedBundles = vi.mocked(getOutdatedBundles); + +const fakeDownloadInfo = { + url: "https://example.com/bundle.mcpb", + bundle: { version: "2.0.0", platform: { os: "darwin", arch: "arm64" } }, +}; + +beforeEach(() => { + vi.clearAllMocks(); + vi.spyOn(console, "log").mockImplementation(() => {}); + vi.spyOn(process.stderr, "write").mockImplementation(() => true); +}); + +describe("handleUpdate", () => { + describe("single bundle", () => { + it("resolves then downloads and reports the updated version", async () => { + mockResolveBundle.mockResolvedValue(fakeDownloadInfo); + mockDownloadAndExtract.mockResolvedValue({ cacheDir: "/cache/a", version: "2.0.0" }); + + await handleUpdate("@scope/a", {}); + + expect(mockResolveBundle).toHaveBeenCalledTimes(1); + expect(mockResolveBundle.mock.calls[0]![0]).toBe("@scope/a"); + expect(mockDownloadAndExtract).toHaveBeenCalledWith("@scope/a", fakeDownloadInfo); + expect(console.log).toHaveBeenCalledWith("Updated @scope/a to 2.0.0"); + }); + + it("outputs JSON when --json flag is set", async () => { + mockResolveBundle.mockResolvedValue(fakeDownloadInfo); + mockDownloadAndExtract.mockResolvedValue({ cacheDir: "/cache/a", version: "2.0.0" }); + + await handleUpdate("@scope/a", { json: true }); + + expect(console.log).toHaveBeenCalledWith( + JSON.stringify({ name: "@scope/a", version: "2.0.0" }, null, 2), + ); + }); + + it("does not call getOutdatedBundles", async () => { + mockResolveBundle.mockResolvedValue(fakeDownloadInfo); + mockDownloadAndExtract.mockResolvedValue({ cacheDir: "/cache/a", version: "2.0.0" }); + + await handleUpdate("@scope/a", {}); + + expect(mockGetOutdatedBundles).not.toHaveBeenCalled(); + }); + }); + + describe("update all", () => { + it("reports all up to date when nothing is outdated", async () => { + mockGetOutdatedBundles.mockResolvedValue([]); + + await handleUpdate(undefined, {}); + + expect(console.log).toHaveBeenCalledWith("All cached bundles are up to date."); + expect(mockDownloadAndExtract).not.toHaveBeenCalled(); + }); + + it("updates all outdated bundles", async () => { + mockGetOutdatedBundles.mockResolvedValue([ + { name: "@scope/a", current: "1.0.0", latest: "2.0.0", pulledAt: "2025-01-01T00:00:00.000Z" }, + { name: "@scope/b", current: "1.0.0", latest: "1.1.0", pulledAt: "2025-01-01T00:00:00.000Z" }, + ]); + const infoA = { ...fakeDownloadInfo, bundle: { ...fakeDownloadInfo.bundle, version: "2.0.0" } }; + const infoB = { ...fakeDownloadInfo, bundle: { ...fakeDownloadInfo.bundle, version: "1.1.0" } }; + mockResolveBundle + .mockResolvedValueOnce(infoA) + .mockResolvedValueOnce(infoB); + mockDownloadAndExtract + .mockResolvedValueOnce({ cacheDir: "/cache/a", version: "2.0.0" }) + .mockResolvedValueOnce({ cacheDir: "/cache/b", version: "1.1.0" }); + + await handleUpdate(undefined, {}); + + expect(mockDownloadAndExtract).toHaveBeenCalledTimes(2); + expect(console.log).toHaveBeenCalledWith("Updated @scope/a: 1.0.0 -> 2.0.0"); + expect(console.log).toHaveBeenCalledWith("Updated @scope/b: 1.0.0 -> 1.1.0"); + }); + + it("continues updating when one bundle fails", async () => { + mockGetOutdatedBundles.mockResolvedValue([ + { name: "@scope/a", current: "1.0.0", latest: "2.0.0", pulledAt: "2025-01-01T00:00:00.000Z" }, + { name: "@scope/b", current: "1.0.0", latest: "1.1.0", pulledAt: "2025-01-01T00:00:00.000Z" }, + ]); + mockResolveBundle + .mockRejectedValueOnce(new Error("Network error")) + .mockResolvedValueOnce(fakeDownloadInfo); + mockDownloadAndExtract + .mockResolvedValueOnce({ cacheDir: "/cache/b", version: "1.1.0" }); + + await handleUpdate(undefined, {}); + + expect(process.stderr.write).toHaveBeenCalledWith( + expect.stringContaining("Failed to update @scope/a"), + ); + expect(console.log).toHaveBeenCalledWith("Updated @scope/b: 1.0.0 -> 1.1.0"); + }); + + it("outputs JSON when --json flag is set", async () => { + mockGetOutdatedBundles.mockResolvedValue([ + { name: "@scope/a", current: "1.0.0", latest: "2.0.0", pulledAt: "2025-01-01T00:00:00.000Z" }, + ]); + mockResolveBundle.mockResolvedValue(fakeDownloadInfo); + mockDownloadAndExtract.mockResolvedValue({ cacheDir: "/cache/a", version: "2.0.0" }); + + await handleUpdate(undefined, { json: true }); + + expect(console.log).toHaveBeenCalledWith( + JSON.stringify([{ name: "@scope/a", from: "1.0.0", to: "2.0.0" }], null, 2), + ); + }); + + it("outputs empty JSON array when nothing is outdated with --json", async () => { + mockGetOutdatedBundles.mockResolvedValue([]); + + await handleUpdate(undefined, { json: true }); + + expect(console.log).toHaveBeenCalledWith(JSON.stringify([], null, 2)); + }); + }); +}); diff --git a/packages/cli/src/commands/packages/update.ts b/packages/cli/src/commands/packages/update.ts new file mode 100644 index 0000000..baed8f7 --- /dev/null +++ b/packages/cli/src/commands/packages/update.ts @@ -0,0 +1,70 @@ +import { downloadAndExtract, resolveBundle } from "../../utils/cache.js"; +import { createClient } from "../../utils/client.js"; +import { fmtError } from "../../utils/format.js"; +import { getOutdatedBundles } from "./outdated.js"; + +export interface UpdateOptions { + json?: boolean; +} + +export async function handleUpdate( + packageName: string | undefined, + options: UpdateOptions = {}, +): Promise { + const client = createClient(); + + if (packageName) { + // Update a single bundle + const downloadInfo = await resolveBundle(packageName, client); + const { version } = await downloadAndExtract(packageName, downloadInfo); + if (options.json) { + console.log(JSON.stringify({ name: packageName, version }, null, 2)); + } else { + console.log(`Updated ${packageName} to ${version}`); + } + return; + } + + // No name given — find and update all outdated bundles + process.stderr.write("=> Checking for updates...\n"); + const outdated = await getOutdatedBundles(); + + if (outdated.length === 0) { + if (options.json) { + console.log(JSON.stringify([], null, 2)); + } else { + console.log("All cached bundles are up to date."); + } + return; + } + + process.stderr.write( + `=> ${outdated.length} bundle(s) to update\n`, + ); + + const updated: Array<{ name: string; from: string; to: string }> = []; + + for (const entry of outdated) { + try { + const downloadInfo = await resolveBundle(entry.name, client); + const { version } = await downloadAndExtract(entry.name, downloadInfo); + updated.push({ name: entry.name, from: entry.current, to: version }); + } catch (error: unknown) { + const message = error instanceof Error ? error.message : String(error); + process.stderr.write(`=> Failed to update ${entry.name}: ${message}\n`); + } + } + + if (options.json) { + console.log(JSON.stringify(updated, null, 2)); + return; + } + + if (updated.length === 0) { + fmtError("All updates failed."); + } + + for (const u of updated) { + console.log(`Updated ${u.name}: ${u.from} -> ${u.to}`); + } +} diff --git a/packages/cli/src/program.ts b/packages/cli/src/program.ts index 4daaef9..1b3661e 100644 --- a/packages/cli/src/program.ts +++ b/packages/cli/src/program.ts @@ -5,6 +5,8 @@ import { handleSearch } from "./commands/packages/search.js"; import { handleShow } from "./commands/packages/show.js"; import { handlePull } from "./commands/packages/pull.js"; import { handleRun } from "./commands/packages/run.js"; +import { handleOutdated } from "./commands/packages/outdated.js"; +import { handleUpdate } from "./commands/packages/update.js"; import { handleConfigSet, handleConfigGet, @@ -68,6 +70,26 @@ export function createProgram(): Command { await handleRun(packageSpec || "", options); }); + // ========================================================================== + // Top-level outdated / update aliases + // ========================================================================== + + program + .command("outdated") + .description('Check cached bundles for updates (alias for "bundle outdated")') + .option("--json", "Output as JSON") + .action(async (options) => { + await handleOutdated(options); + }); + + program + .command("update [package]") + .description('Update cached bundles (alias for "bundle update")') + .option("--json", "Output as JSON") + .action(async (packageName, options) => { + await handleUpdate(packageName, options); + }); + // ========================================================================== // Bundle namespace (MCP bundles) // ========================================================================== @@ -114,6 +136,22 @@ export function createProgram(): Command { await handleRun(packageSpec || "", options); }); + bundle + .command("outdated") + .description("Check cached bundles for available updates") + .option("--json", "Output as JSON") + .action(async (options) => { + await handleOutdated(options); + }); + + bundle + .command("update [package]") + .description("Update cached bundles to latest versions") + .option("--json", "Output as JSON") + .action(async (packageName, options) => { + await handleUpdate(packageName, options); + }); + // ========================================================================== // Skill namespace (Agent Skills) // ========================================================================== diff --git a/packages/cli/src/utils/cache.test.ts b/packages/cli/src/utils/cache.test.ts new file mode 100644 index 0000000..9fb6e1a --- /dev/null +++ b/packages/cli/src/utils/cache.test.ts @@ -0,0 +1,185 @@ +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import { mkdirSync, rmSync, writeFileSync } from "fs"; +import { join } from "path"; +import { tmpdir } from "os"; +import { listCachedBundles } from "./cache.js"; + +/** + * Creates a fake cached bundle directory with manifest.json and .mpak-meta.json. + */ +function seedBundle( + cacheBase: string, + dirName: string, + manifest: { name: string; version: string }, + meta: { version: string; pulledAt: string; platform: { os: string; arch: string } }, +): void { + const dir = join(cacheBase, dirName); + mkdirSync(dir, { recursive: true }); + writeFileSync(join(dir, "manifest.json"), JSON.stringify(manifest)); + writeFileSync(join(dir, ".mpak-meta.json"), JSON.stringify(meta)); +} + +describe("listCachedBundles", () => { + let tempCacheBase: string; + const originalHome = process.env["HOME"]; + + beforeEach(() => { + // Create a temp dir that acts as ~/.mpak/cache/ + const tempHome = join(tmpdir(), `mpak-test-${Date.now()}-${Math.random().toString(36).slice(2)}`); + tempCacheBase = join(tempHome, ".mpak", "cache"); + mkdirSync(tempCacheBase, { recursive: true }); + process.env["HOME"] = tempHome; + }); + + afterEach(() => { + process.env["HOME"] = originalHome; + // Clean up temp dir (parent of .mpak) + const tempHome = tempCacheBase.replace("/.mpak/cache", ""); + rmSync(tempHome, { recursive: true, force: true }); + }); + + it("returns empty array when cache dir does not exist", () => { + // Point HOME to a dir with no .mpak/cache + const emptyHome = join(tmpdir(), `mpak-empty-${Date.now()}`); + mkdirSync(emptyHome, { recursive: true }); + process.env["HOME"] = emptyHome; + + expect(listCachedBundles()).toEqual([]); + + rmSync(emptyHome, { recursive: true, force: true }); + }); + + it("returns empty array when cache dir is empty", () => { + expect(listCachedBundles()).toEqual([]); + }); + + it("returns cached bundles with correct metadata", () => { + seedBundle(tempCacheBase, "nimblebraininc-echo", { + name: "@nimblebraininc/echo", + version: "1.0.0", + }, { + version: "1.0.0", + pulledAt: "2025-02-16T00:00:00.000Z", + platform: { os: "darwin", arch: "arm64" }, + }); + + const result = listCachedBundles(); + expect(result).toHaveLength(1); + expect(result[0]).toEqual({ + name: "@nimblebraininc/echo", + version: "1.0.0", + pulledAt: "2025-02-16T00:00:00.000Z", + cacheDir: join(tempCacheBase, "nimblebraininc-echo"), + }); + }); + + it("returns multiple cached bundles", () => { + seedBundle(tempCacheBase, "nimblebraininc-echo", { + name: "@nimblebraininc/echo", + version: "1.0.0", + }, { + version: "1.0.0", + pulledAt: "2025-02-16T00:00:00.000Z", + platform: { os: "darwin", arch: "arm64" }, + }); + + seedBundle(tempCacheBase, "nimblebraininc-todoist", { + name: "@nimblebraininc/todoist", + version: "2.1.0", + }, { + version: "2.1.0", + pulledAt: "2025-03-14T00:00:00.000Z", + platform: { os: "darwin", arch: "arm64" }, + }); + + const result = listCachedBundles(); + expect(result).toHaveLength(2); + expect(result.map((b) => b.name).sort()).toEqual([ + "@nimblebraininc/echo", + "@nimblebraininc/todoist", + ]); + }); + + it("skips _local directory", () => { + // Create a _local dir with bundle-like contents + const localDir = join(tempCacheBase, "_local"); + mkdirSync(localDir, { recursive: true }); + writeFileSync(join(localDir, "manifest.json"), JSON.stringify({ name: "local-dev" })); + writeFileSync(join(localDir, ".mpak-meta.json"), JSON.stringify({ version: "0.0.1" })); + + seedBundle(tempCacheBase, "nimblebraininc-echo", { + name: "@nimblebraininc/echo", + version: "1.0.0", + }, { + version: "1.0.0", + pulledAt: "2025-02-16T00:00:00.000Z", + platform: { os: "darwin", arch: "arm64" }, + }); + + const result = listCachedBundles(); + expect(result).toHaveLength(1); + expect(result[0]!.name).toBe("@nimblebraininc/echo"); + }); + + it("skips directories without .mpak-meta.json", () => { + const dir = join(tempCacheBase, "no-meta"); + mkdirSync(dir, { recursive: true }); + writeFileSync(join(dir, "manifest.json"), JSON.stringify({ name: "@scope/no-meta" })); + + expect(listCachedBundles()).toEqual([]); + }); + + it("skips directories without manifest.json", () => { + const dir = join(tempCacheBase, "no-manifest"); + mkdirSync(dir, { recursive: true }); + writeFileSync( + join(dir, ".mpak-meta.json"), + JSON.stringify({ version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", platform: { os: "darwin", arch: "arm64" } }), + ); + + expect(listCachedBundles()).toEqual([]); + }); + + it("skips directories with corrupt manifest.json", () => { + const dir = join(tempCacheBase, "corrupt"); + mkdirSync(dir, { recursive: true }); + writeFileSync(join(dir, "manifest.json"), "not json{{{"); + writeFileSync( + join(dir, ".mpak-meta.json"), + JSON.stringify({ version: "1.0.0", pulledAt: "2025-01-01T00:00:00.000Z", platform: { os: "darwin", arch: "arm64" } }), + ); + + expect(listCachedBundles()).toEqual([]); + }); + + it("skips files in cache dir (only reads directories)", () => { + writeFileSync(join(tempCacheBase, "stray-file.txt"), "hello"); + + seedBundle(tempCacheBase, "nimblebraininc-echo", { + name: "@nimblebraininc/echo", + version: "1.0.0", + }, { + version: "1.0.0", + pulledAt: "2025-02-16T00:00:00.000Z", + platform: { os: "darwin", arch: "arm64" }, + }); + + const result = listCachedBundles(); + expect(result).toHaveLength(1); + }); + + it("reads name from manifest.json, not directory name", () => { + seedBundle(tempCacheBase, "weird-dir-name", { + name: "@actual/package-name", + version: "3.0.0", + }, { + version: "3.0.0", + pulledAt: "2025-01-01T00:00:00.000Z", + platform: { os: "linux", arch: "x64" }, + }); + + const result = listCachedBundles(); + expect(result).toHaveLength(1); + expect(result[0]!.name).toBe("@actual/package-name"); + }); +}); diff --git a/packages/cli/src/utils/cache.ts b/packages/cli/src/utils/cache.ts new file mode 100644 index 0000000..5b002c1 --- /dev/null +++ b/packages/cli/src/utils/cache.ts @@ -0,0 +1,251 @@ +import { + existsSync, + mkdirSync, + readdirSync, + readFileSync, + rmSync, + writeFileSync, +} from "fs"; +import { execFileSync } from "child_process"; +import { homedir } from "os"; +import { dirname, join } from "path"; +import { MpakClient } from "@nimblebrain/mpak-sdk"; + +export interface CacheMetadata { + version: string; + pulledAt: string; + lastCheckedAt?: string; + platform: { os: string; arch: string }; +} + +/** + * Get cache directory for a package + * @example getCacheDir('@scope/name') => '~/.mpak/cache/scope-name' + */ +export function getCacheDir(packageName: string): string { + const cacheBase = join(homedir(), ".mpak", "cache"); + // @scope/name -> scope/name + const safeName = packageName.replace("@", "").replace("/", "-"); + return join(cacheBase, safeName); +} + +/** + * Read cache metadata + */ +export function getCacheMetadata(cacheDir: string): CacheMetadata | null { + const metaPath = join(cacheDir, ".mpak-meta.json"); + if (!existsSync(metaPath)) { + return null; + } + try { + return JSON.parse(readFileSync(metaPath, "utf8")); + } catch { + return null; + } +} + +/** + * Write cache metadata + */ +export function writeCacheMetadata( + cacheDir: string, + metadata: CacheMetadata, +): void { + const metaPath = join(cacheDir, ".mpak-meta.json"); + writeFileSync(metaPath, JSON.stringify(metadata, null, 2)); +} + +const UPDATE_CHECK_TTL_MS = 60 * 60 * 1000; // 1 hour + +/** + * Fire-and-forget background check for bundle updates. + * Prints a notice to stderr if a newer version exists. + * Silently swallows all errors. + */ +export async function checkForUpdateAsync( + packageName: string, + cachedMeta: CacheMetadata, + cacheDir: string, + client: MpakClient, +): Promise { + try { + // Skip if checked within the TTL + if (cachedMeta.lastCheckedAt) { + const elapsed = Date.now() - new Date(cachedMeta.lastCheckedAt).getTime(); + if (elapsed < UPDATE_CHECK_TTL_MS) { + return; + } + } + + const detail = await client.getBundle(packageName); + + // Update lastCheckedAt regardless of whether there's an update + writeCacheMetadata(cacheDir, { + ...cachedMeta, + lastCheckedAt: new Date().toISOString(), + }); + + if (detail.latest_version !== cachedMeta.version) { + process.stderr.write( + `\n=> Update available: ${packageName} ${cachedMeta.version} -> ${detail.latest_version}\n` + + ` Run 'mpak run ${packageName} --update' to update\n`, + ); + } + } catch { + // Silently swallow all errors (network down, registry unreachable, etc.) + } +} + +export interface CachedBundle { + name: string; + version: string; + pulledAt: string; + cacheDir: string; +} + +/** + * Scan ~/.mpak/cache/ and return metadata for every cached registry bundle. + * Skips the _local/ directory (local dev bundles). + */ +export function listCachedBundles(): CachedBundle[] { + const cacheBase = join(homedir(), ".mpak", "cache"); + if (!existsSync(cacheBase)) return []; + + const entries = readdirSync(cacheBase, { withFileTypes: true }); + const bundles: CachedBundle[] = []; + + for (const entry of entries) { + if (!entry.isDirectory() || entry.name === "_local") continue; + + const dir = join(cacheBase, entry.name); + const meta = getCacheMetadata(dir); + if (!meta) continue; + + const manifestPath = join(dir, "manifest.json"); + if (!existsSync(manifestPath)) continue; + + try { + const manifest = JSON.parse(readFileSync(manifestPath, "utf8")); + bundles.push({ + name: manifest.name, + version: meta.version, + pulledAt: meta.pulledAt, + cacheDir: dir, + }); + } catch { + // Skip corrupt bundles + } + } + + return bundles; +} + +/** + * Maximum allowed uncompressed size for a bundle (500MB). + */ +const MAX_UNCOMPRESSED_SIZE = 500 * 1024 * 1024; + +/** + * Check uncompressed size and extract a ZIP file to a directory. + * Rejects bundles exceeding MAX_UNCOMPRESSED_SIZE (zip bomb protection). + */ +export function extractZip(zipPath: string, destDir: string): void { + // Check uncompressed size before extraction + try { + const listOutput = execFileSync("unzip", ["-l", zipPath], { + stdio: "pipe", + encoding: "utf8", + }); + const totalMatch = listOutput.match(/^\s*(\d+)\s+\d+\s+files?$/m); + if (totalMatch) { + const totalSize = parseInt(totalMatch[1]!, 10); + if (totalSize > MAX_UNCOMPRESSED_SIZE) { + throw new Error( + `Bundle uncompressed size (${Math.round(totalSize / 1024 / 1024)}MB) exceeds maximum allowed (${MAX_UNCOMPRESSED_SIZE / (1024 * 1024)}MB)`, + ); + } + } + } catch (error: unknown) { + if ( + error instanceof Error && + error.message.includes("exceeds maximum allowed") + ) { + throw error; + } + const message = error instanceof Error ? error.message : String(error); + throw new Error(`Cannot verify bundle size before extraction: ${message}`); + } + + mkdirSync(destDir, { recursive: true }); + execFileSync("unzip", ["-o", "-q", zipPath, "-d", destDir], { + stdio: "pipe", + }); +} + +export interface BundleDownloadInfo { + url: string; + bundle: { version: string; platform: { os: string; arch: string } }; +} + +/** + * Resolve a bundle from the registry without downloading it. + * Returns the download URL and resolved version/platform metadata. + */ +export async function resolveBundle( + name: string, + client: MpakClient, + requestedVersion?: string, +): Promise { + const platform = MpakClient.detectPlatform(); + return client.getBundleDownload( + name, + requestedVersion || "latest", + platform, + ); +} + +/** + * Download a bundle using pre-resolved download info, extract it into the + * cache, and write metadata. Returns the cache directory path. + */ +export async function downloadAndExtract( + name: string, + downloadInfo: BundleDownloadInfo, +): Promise<{ cacheDir: string; version: string }> { + const bundle = downloadInfo.bundle; + const cacheDir = getCacheDir(name); + + // Download to temp file + const tempPath = join(homedir(), ".mpak", "tmp", `${Date.now()}.mcpb`); + mkdirSync(dirname(tempPath), { recursive: true }); + + process.stderr.write(`=> Pulling ${name}@${bundle.version}...\n`); + + const response = await fetch(downloadInfo.url); + if (!response.ok) { + throw new Error(`Failed to download bundle: ${response.statusText}`); + } + const arrayBuffer = await response.arrayBuffer(); + writeFileSync(tempPath, Buffer.from(arrayBuffer)); + + // Clear old cache and extract + if (existsSync(cacheDir)) { + rmSync(cacheDir, { recursive: true, force: true }); + } + + extractZip(tempPath, cacheDir); + + // Write metadata + writeCacheMetadata(cacheDir, { + version: bundle.version, + pulledAt: new Date().toISOString(), + platform: bundle.platform, + }); + + // Cleanup temp file + rmSync(tempPath, { force: true }); + + process.stderr.write(`=> Cached ${name}@${bundle.version}\n`); + + return { cacheDir, version: bundle.version }; +}