Skip to content

Commit 77a13c3

Browse files
committed
fix: wrap hashChunks file handle in try/finally
Prevents file handle leak if stat(), read(), or hash operations throw during chunk processing.
1 parent 5c9f7f4 commit 77a13c3

File tree

1 file changed

+17
-14
lines changed

1 file changed

+17
-14
lines changed

src/lib/api/sourcemaps.ts

Lines changed: 17 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -222,21 +222,24 @@ export async function hashChunks(
222222
chunkSize: number
223223
): Promise<{ chunks: ChunkInfo[]; overallChecksum: string }> {
224224
const fh = await open(zipPath, "r");
225-
const fileSize = (await stat(zipPath)).size;
226-
const chunks: ChunkInfo[] = [];
227-
const overallHasher = createHash("sha1");
228-
229-
for (let offset = 0; offset < fileSize; offset += chunkSize) {
230-
const size = Math.min(chunkSize, fileSize - offset);
231-
const buf = Buffer.alloc(size);
232-
await fh.read(buf, 0, size, offset);
233-
const sha1 = createHash("sha1").update(buf).digest("hex");
234-
overallHasher.update(buf);
235-
chunks.push({ sha1, offset, size });
236-
}
225+
try {
226+
const fileSize = (await stat(zipPath)).size;
227+
const chunks: ChunkInfo[] = [];
228+
const overallHasher = createHash("sha1");
229+
230+
for (let offset = 0; offset < fileSize; offset += chunkSize) {
231+
const size = Math.min(chunkSize, fileSize - offset);
232+
const buf = Buffer.alloc(size);
233+
await fh.read(buf, 0, size, offset);
234+
const sha1 = createHash("sha1").update(buf).digest("hex");
235+
overallHasher.update(buf);
236+
chunks.push({ sha1, offset, size });
237+
}
237238

238-
await fh.close();
239-
return { chunks, overallChecksum: overallHasher.digest("hex") };
239+
return { chunks, overallChecksum: overallHasher.digest("hex") };
240+
} finally {
241+
await fh.close();
242+
}
240243
}
241244

242245
/**

0 commit comments

Comments
 (0)