Skip to content

Commit 5c9f7f4

Browse files
committed
fix: address review feedback — temp file cleanup and deduplicate hashing
- Add try/finally in uploadSourcemaps() to always clean up the temp ZIP file, even on error. Extracted uploadArtifactBundle() as inner function to keep the cleanup boundary clean. - Merge overall checksum computation into hashChunks() — maintains a running SHA-1 hasher alongside per-chunk hashing, eliminating the redundant full-file re-read. - Use STORE method for empty files in ZipWriter (DEFLATE of empty input confuses some extractors). - Use options object for uploadArtifactBundle() to stay under the 4-parameter lint limit.
1 parent 59902f3 commit 5c9f7f4

File tree

1 file changed

+42
-19
lines changed

1 file changed

+42
-19
lines changed

src/lib/api/sourcemaps.ts

Lines changed: 42 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
*/
1616

1717
import { createHash } from "node:crypto";
18-
import { open, readFile, stat } from "node:fs/promises";
18+
import { open, readFile, stat, unlink } from "node:fs/promises";
1919
import { tmpdir } from "node:os";
2020
import { join } from "node:path";
2121
import { promisify } from "node:util";
@@ -215,26 +215,28 @@ export async function buildArtifactBundle(
215215
*
216216
* @param zipPath - Path to the ZIP file
217217
* @param chunkSize - Size of each chunk in bytes
218-
* @returns Array of chunk metadata (sha1, offset, size)
218+
* @returns Per-chunk metadata and an overall SHA-1 checksum of the entire file
219219
*/
220220
export async function hashChunks(
221221
zipPath: string,
222222
chunkSize: number
223-
): Promise<ChunkInfo[]> {
223+
): Promise<{ chunks: ChunkInfo[]; overallChecksum: string }> {
224224
const fh = await open(zipPath, "r");
225225
const fileSize = (await stat(zipPath)).size;
226226
const chunks: ChunkInfo[] = [];
227+
const overallHasher = createHash("sha1");
227228

228229
for (let offset = 0; offset < fileSize; offset += chunkSize) {
229230
const size = Math.min(chunkSize, fileSize - offset);
230231
const buf = Buffer.alloc(size);
231232
await fh.read(buf, 0, size, offset);
232233
const sha1 = createHash("sha1").update(buf).digest("hex");
234+
overallHasher.update(buf);
233235
chunks.push({ sha1, offset, size });
234236
}
235237

236238
await fh.close();
237-
return chunks;
239+
return { chunks, overallChecksum: overallHasher.digest("hex") };
238240
}
239241

240242
/**
@@ -256,28 +258,49 @@ export async function uploadSourcemaps(options: UploadOptions): Promise<void> {
256258
const tmpZipPath = join(tmpdir(), `sentry-artifact-bundle-${Date.now()}.zip`);
257259
await buildArtifactBundle(tmpZipPath, files, { org, project, release });
258260

259-
// Step 3: Split into chunks and hash
260-
const chunks = await hashChunks(tmpZipPath, serverOptions.chunkSize);
261-
262-
// Compute overall file checksum
263-
const fh = await open(tmpZipPath, "r");
264-
const fileSize = (await stat(tmpZipPath)).size;
265-
const overallHasher = createHash("sha1");
266-
for (let offset = 0; offset < fileSize; offset += serverOptions.chunkSize) {
267-
const size = Math.min(serverOptions.chunkSize, fileSize - offset);
268-
const buf = Buffer.alloc(size);
269-
await fh.read(buf, 0, size, offset);
270-
overallHasher.update(buf);
261+
try {
262+
await uploadArtifactBundle({
263+
tmpZipPath,
264+
org,
265+
project,
266+
release,
267+
serverOptions,
268+
});
269+
} finally {
270+
// Always clean up the temp file
271+
await unlink(tmpZipPath).catch(() => {
272+
// Best-effort cleanup — OS temp directory will eventually purge it
273+
});
271274
}
272-
await fh.close();
273-
const overallChecksum = overallHasher.digest("hex");
275+
}
276+
277+
/**
278+
* Upload an already-built artifact bundle ZIP to Sentry.
279+
*
280+
* Handles steps 3–6 of the upload protocol: chunk + hash → assemble →
281+
* upload missing → poll. Separated from {@link uploadSourcemaps} to keep
282+
* the try/finally cleanup boundary clean.
283+
*/
284+
async function uploadArtifactBundle(opts: {
285+
tmpZipPath: string;
286+
org: string;
287+
project: string;
288+
release: string | undefined;
289+
serverOptions: ChunkServerOptions;
290+
}): Promise<void> {
291+
const { tmpZipPath, org, project, release, serverOptions } = opts;
292+
// Step 3: Split into chunks, hash each chunk + compute overall checksum
293+
const { chunks, overallChecksum } = await hashChunks(
294+
tmpZipPath,
295+
serverOptions.chunkSize
296+
);
274297

275298
const regionUrl = await resolveOrgRegion(org);
276299

277300
// Step 4: Request assembly — server tells us which chunks it needs
278301
const assembleBody = {
279302
checksum: overallChecksum,
280-
chunks: chunks.map((c) => c.sha1),
303+
chunks: chunks.map((c: ChunkInfo) => c.sha1),
281304
projects: [project],
282305
...(release ? { version: release } : {}),
283306
};

0 commit comments

Comments
 (0)