Skip to content

Commit d7ddf59

Browse files
committed
feat(build): add gzip-compressed binary downloads
Reduce download size from ~99 MB to ~37 MB (~60% reduction) for both the install script and the CLI self-upgrade path. Build script (CI only): - Compress each binary with gzip level 6 via node:zlib after build - Upload .gz files alongside raw binaries for backward compatibility Upgrade path (src/lib/upgrade.ts): - Try {url}.gz first with streaming DecompressionStream decompression - Use manual for-await writer loop to work around Bun.write streaming bug (oven-sh/bun#13237) - Fall back to raw binary download on any failure Install script: - Try curl | gunzip for .gz URL first (gunzip is POSIX-universal) - Fall back to raw curl download for older releases without .gz assets No CI or Craft config changes needed — existing globs already match .gz files.
1 parent eb898ac commit d7ddf59

File tree

4 files changed

+145
-25
lines changed

4 files changed

+145
-25
lines changed

install

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,16 @@ tmp_binary="${tmpdir}/sentry-install-$$${suffix}"
104104
trap 'rm -f "$tmp_binary"' EXIT
105105

106106
echo -e "${MUTED}Downloading sentry v${version}...${NC}"
107-
curl -fsSL --progress-bar "$url" -o "$tmp_binary"
107+
108+
# Try gzip-compressed download first (~60% smaller, ~37 MB vs ~99 MB).
109+
# gunzip is POSIX and available on all Unix systems.
110+
# Falls back to raw binary if the .gz asset doesn't exist yet.
111+
if curl -fsSL "${url}.gz" 2>/dev/null | gunzip > "$tmp_binary" 2>/dev/null; then
112+
: # Compressed download succeeded
113+
else
114+
curl -fsSL --progress-bar "$url" -o "$tmp_binary"
115+
fi
116+
108117
chmod +x "$tmp_binary"
109118

110119
# Delegate installation and configuration to the binary itself.

script/build.ts

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
#!/usr/bin/env bun
2+
23
/**
34
* Build script for Sentry CLI
45
*
@@ -19,9 +20,13 @@
1920
* sentry-windows-x64.exe
2021
*/
2122

23+
import { promisify } from "node:util";
24+
import { gzip } from "node:zlib";
2225
import { $ } from "bun";
2326
import pkg from "../package.json";
2427

28+
const gzipAsync = promisify(gzip);
29+
2530
const VERSION = pkg.version;
2631

2732
/** Build-time constants injected into the binary */
@@ -92,6 +97,20 @@ async function buildTarget(target: BuildTarget): Promise<boolean> {
9297
}
9398

9499
console.log(` -> ${outfile}`);
100+
101+
// In CI, create gzip-compressed copies for release downloads.
102+
// Reduces download size by ~60% (99 MB → 37 MB).
103+
if (process.env.CI) {
104+
const binary = await Bun.file(outfile).arrayBuffer();
105+
const compressed = await gzipAsync(Buffer.from(binary), { level: 6 });
106+
await Bun.write(`${outfile}.gz`, compressed);
107+
const ratio = (
108+
(1 - compressed.byteLength / binary.byteLength) *
109+
100
110+
).toFixed(0);
111+
console.log(` -> ${outfile}.gz (${ratio}% smaller)`);
112+
}
113+
95114
return true;
96115
}
97116

src/lib/upgrade.ts

Lines changed: 59 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -341,10 +341,37 @@ export type DownloadResult = {
341341
lockPath: string;
342342
};
343343

344+
/**
345+
* Stream a response body through a decompression transform and write to disk.
346+
*
347+
* Uses a manual `for await` loop with `Bun.file().writer()` instead of
348+
* `Bun.write(path, Response)` to work around a Bun event-loop bug where
349+
* streaming response bodies get GC'd before completing.
350+
* See: https://github.com/oven-sh/bun/issues/13237
351+
*
352+
* @param body - Readable stream from a fetch response
353+
* @param destPath - File path to write the decompressed output
354+
*/
355+
async function streamDecompressToFile(
356+
body: ReadableStream<Uint8Array>,
357+
destPath: string
358+
): Promise<void> {
359+
const stream = body.pipeThrough(new DecompressionStream("gzip"));
360+
const writer = Bun.file(destPath).writer();
361+
for await (const chunk of stream) {
362+
writer.write(chunk);
363+
}
364+
await writer.end();
365+
}
366+
344367
/**
345368
* Download the new binary to a temporary path and return its location.
346369
* Used by the upgrade command to download before spawning setup --install.
347370
*
371+
* Tries the gzip-compressed URL first (`{url}.gz`, ~37 MB vs ~99 MB),
372+
* falling back to the raw binary URL on any failure. The compressed
373+
* download is streamed through DecompressionStream for minimal memory usage.
374+
*
348375
* The lock is held on success so concurrent upgrades are blocked during the
349376
* download→spawn→install pipeline. The caller MUST release the lock after the
350377
* child process exits (the child may use a different install directory and
@@ -374,26 +401,42 @@ export async function downloadBinaryToTemp(
374401
// Ignore if doesn't exist
375402
}
376403

377-
// Download binary
378-
const response = await fetchWithUpgradeError(
379-
url,
380-
{ headers: getGitHubHeaders() },
381-
"GitHub"
382-
);
404+
const headers = getGitHubHeaders();
383405

384-
if (!response.ok) {
385-
throw new UpgradeError(
386-
"execution_failed",
387-
`Failed to download binary: HTTP ${response.status}`
406+
// Try gzip-compressed download first (~60% smaller)
407+
let downloaded = false;
408+
try {
409+
const gzResponse = await fetchWithUpgradeError(
410+
`${url}.gz`,
411+
{ headers },
412+
"GitHub"
388413
);
414+
if (gzResponse.ok && gzResponse.body) {
415+
await streamDecompressToFile(gzResponse.body, tempPath);
416+
downloaded = true;
417+
}
418+
} catch {
419+
// Fall through to raw download
389420
}
390421

391-
// Fully consume the response body before writing to disk.
392-
// Bun.write(path, Response) with a large streaming body can exit the
393-
// process before the download completes (Bun event-loop bug).
394-
// Materialising the body first ensures the await keeps the process alive.
395-
const body = await response.arrayBuffer();
396-
await Bun.write(tempPath, body);
422+
// Fall back to raw (uncompressed) binary
423+
if (!downloaded) {
424+
const response = await fetchWithUpgradeError(url, { headers }, "GitHub");
425+
426+
if (!response.ok) {
427+
throw new UpgradeError(
428+
"execution_failed",
429+
`Failed to download binary: HTTP ${response.status}`
430+
);
431+
}
432+
433+
// Fully consume the response body before writing to disk.
434+
// Bun.write(path, Response) with a large streaming body can exit the
435+
// process before the download completes (Bun event-loop bug).
436+
// See: https://github.com/oven-sh/bun/issues/13237
437+
const body = await response.arrayBuffer();
438+
await Bun.write(tempPath, body);
439+
}
397440

398441
// Set executable permission (Unix only)
399442
if (process.platform !== "win32") {

test/lib/upgrade.test.ts

Lines changed: 57 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -662,20 +662,22 @@ describe("executeUpgrade with curl method", () => {
662662
}
663663
});
664664

665-
test("executeUpgrade curl downloads binary to temp path", async () => {
665+
test("downloads and decompresses gzip binary when .gz URL succeeds", async () => {
666666
const mockBinaryContent = new Uint8Array([0x7f, 0x45, 0x4c, 0x46]); // ELF magic bytes
667667

668-
// Mock fetch to return our fake binary
669-
mockFetch(async () => new Response(mockBinaryContent, { status: 200 }));
668+
// Compress the mock content with gzip
669+
const gzipped = Bun.gzipSync(mockBinaryContent);
670+
671+
// Mock fetch: first call returns gzipped content (.gz URL)
672+
mockFetch(async () => new Response(gzipped, { status: 200 }));
670673

671-
// Run the actual executeUpgrade with curl method — returns DownloadResult
672674
const result = await executeUpgrade("curl", "1.0.0");
673675

674676
expect(result).not.toBeNull();
675677
expect(result).toHaveProperty("tempBinaryPath");
676678
expect(result).toHaveProperty("lockPath");
677679

678-
// Verify the binary was downloaded to the temp path
680+
// Verify the decompressed binary matches the original content
679681
const paths = getTestPaths();
680682
expect(result!.tempBinaryPath).toBe(paths.tempPath);
681683
expect(result!.lockPath).toBe(paths.lockPath);
@@ -684,7 +686,54 @@ describe("executeUpgrade with curl method", () => {
684686
expect(new Uint8Array(content)).toEqual(mockBinaryContent);
685687
});
686688

687-
test("executeUpgrade curl throws on HTTP error", async () => {
689+
test("falls back to raw binary when .gz URL returns 404", async () => {
690+
const mockBinaryContent = new Uint8Array([0x7f, 0x45, 0x4c, 0x46]); // ELF magic bytes
691+
let callCount = 0;
692+
693+
// Mock fetch: first call (for .gz) returns 404, second returns raw binary
694+
mockFetch(async () => {
695+
callCount += 1;
696+
if (callCount === 1) {
697+
return new Response("Not Found", { status: 404 });
698+
}
699+
return new Response(mockBinaryContent, { status: 200 });
700+
});
701+
702+
const result = await executeUpgrade("curl", "1.0.0");
703+
704+
expect(result).not.toBeNull();
705+
expect(callCount).toBe(2); // Both .gz and raw URL were tried
706+
707+
// Verify the binary was downloaded
708+
expect(await Bun.file(result!.tempBinaryPath).exists()).toBe(true);
709+
const content = await Bun.file(result!.tempBinaryPath).arrayBuffer();
710+
expect(new Uint8Array(content)).toEqual(mockBinaryContent);
711+
});
712+
713+
test("falls back to raw binary when .gz fetch throws network error", async () => {
714+
const mockBinaryContent = new Uint8Array([0x7f, 0x45, 0x4c, 0x46]);
715+
let callCount = 0;
716+
717+
// Mock fetch: first call throws, second returns raw binary
718+
mockFetch(async () => {
719+
callCount += 1;
720+
if (callCount === 1) {
721+
throw new TypeError("fetch failed");
722+
}
723+
return new Response(mockBinaryContent, { status: 200 });
724+
});
725+
726+
const result = await executeUpgrade("curl", "1.0.0");
727+
728+
expect(result).not.toBeNull();
729+
expect(callCount).toBe(2);
730+
731+
const content = await Bun.file(result!.tempBinaryPath).arrayBuffer();
732+
expect(new Uint8Array(content)).toEqual(mockBinaryContent);
733+
});
734+
735+
test("throws on HTTP error when both .gz and raw URLs fail", async () => {
736+
// Both .gz and raw return errors
688737
mockFetch(async () => new Response("Not Found", { status: 404 }));
689738

690739
await expect(executeUpgrade("curl", "99.99.99")).rejects.toThrow(
@@ -695,7 +744,7 @@ describe("executeUpgrade with curl method", () => {
695744
);
696745
});
697746

698-
test("executeUpgrade curl throws on network failure", async () => {
747+
test("throws on network failure when both .gz and raw URLs fail", async () => {
699748
mockFetch(async () => {
700749
throw new TypeError("fetch failed");
701750
});
@@ -706,7 +755,7 @@ describe("executeUpgrade with curl method", () => {
706755
);
707756
});
708757

709-
test("executeUpgrade curl releases lock on failure", async () => {
758+
test("releases lock on failure", async () => {
710759
mockFetch(async () => new Response("Server Error", { status: 500 }));
711760

712761
try {

0 commit comments

Comments
 (0)