From 22f7f080abbfede502263958c2f48a74f0d9b4a6 Mon Sep 17 00:00:00 2001 From: Tomas Zijdemans Date: Wed, 1 Apr 2026 19:41:06 +0200 Subject: [PATCH 01/12] feat(data-structures/unstable): add --- data_structures/deno.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/data_structures/deno.json b/data_structures/deno.json index da895e9caadc..8e1e0ca97be1 100644 --- a/data_structures/deno.json +++ b/data_structures/deno.json @@ -11,6 +11,7 @@ "./red-black-tree": "./red_black_tree.ts", "./unstable-2d-array": "./unstable_2d_array.ts", "./unstable-rolling-counter": "./unstable_rolling_counter.ts", - "./unstable-deque": "./unstable_deque.ts" + "./unstable-deque": "./unstable_deque.ts", + "./unstable-indexed-heap": "./unstable_indexed_heap.ts" } } From 96dda252a2f64a61f3b1a744743816ded71f550b Mon Sep 17 00:00:00 2001 From: Tomas Zijdemans Date: Wed, 1 Apr 2026 19:41:16 +0200 Subject: [PATCH 02/12] feat(data-structures/unstable): add --- data_structures/unstable_indexed_heap.ts | 564 ++++++++++++++++++ data_structures/unstable_indexed_heap_test.ts | 525 ++++++++++++++++ 2 files changed, 1089 insertions(+) create mode 100644 data_structures/unstable_indexed_heap.ts create mode 100644 data_structures/unstable_indexed_heap_test.ts diff --git a/data_structures/unstable_indexed_heap.ts b/data_structures/unstable_indexed_heap.ts new file mode 100644 index 000000000000..36e486355539 --- /dev/null +++ b/data_structures/unstable_indexed_heap.ts @@ -0,0 +1,564 @@ +// Copyright 2018-2026 the Deno authors. MIT license. +// This module is browser compatible. + +/** Allows the class to mutate priority internally. */ +interface MutableEntry { + readonly key: K; + priority: number; +} + +/** + * A key-priority pair returned by {@linkcode IndexedHeap} methods. + * + * Fields are `readonly` to signal that mutating a returned entry has no + * effect on the heap. + * + * @typeParam K The type of the key. + */ +export interface HeapEntry { + readonly key: K; + readonly priority: number; +} + +/** + * Read-only view of an {@linkcode IndexedHeap}. Exposes only query methods + * (`peek`, `has`, `getPriority`, `size`, `isEmpty`), hiding all methods + * that modify the heap. Follows the same pattern as `ReadonlyMap` and + * `ReadonlySet`. + * + * Note: `[Symbol.iterator]` is intentionally excluded because the heap's + * iterator is destructive (it drains all entries). + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @typeParam K The type of the keys in the heap. + */ +export type ReadonlyIndexedHeap = Pick< + IndexedHeap, + | "peek" + | "has" + | "getPriority" + | "size" + | "isEmpty" +>; + +/** Throws if the priority is NaN, which would silently corrupt the heap. */ +function assertValidPriority(priority: number): void { + if (Number.isNaN(priority)) { + throw new RangeError("Cannot set priority: value is NaN"); + } +} + +/** Returns the parent index for a given child index. */ +function getParentIndex(index: number): number { + return ((index + 1) >>> 1) - 1; +} + +/** + * A priority queue that supports looking up, removing, and re-prioritizing + * entries by key. Each entry is a unique `(key, priority)` pair. The entry + * with the smallest priority is always at the front. + * + * Unlike {@linkcode BinaryHeap}, which only allows popping the top element, + * `IndexedHeap` lets you delete or update any entry by its key in + * logarithmic time. + * + * Priorities are plain numbers, always sorted smallest-first. To sort + * largest-first instead, negate the priorities. + * + * | Method | Time complexity | + * | --------------------- | -------------------------------- | + * | peek() | Constant | + * | pop() | Logarithmic in the number of entries | + * | push(key, priority) | Logarithmic in the number of entries | + * | delete(key) | Logarithmic in the number of entries | + * | update(key, priority) | Logarithmic in the number of entries | + * | has(key) | Constant | + * | getPriority(key) | Constant | + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @example Usage + * ```ts + * import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; + * import { assertEquals } from "@std/assert"; + * + * const heap = new IndexedHeap(); + * heap.push("a", 3); + * heap.push("b", 1); + * heap.push("c", 2); + * + * assertEquals(heap.peek(), { key: "b", priority: 1 }); + * assertEquals(heap.pop(), { key: "b", priority: 1 }); + * assertEquals([...heap], [{ key: "c", priority: 2 }, { key: "a", priority: 3 }]); + * ``` + * + * @typeParam K The type of the keys in the heap. Keys are compared the + * same way as `Map` keys — by reference for objects, by value for + * primitives. + */ +export class IndexedHeap implements Iterable> { + #data: MutableEntry[] = []; + #index: Map = new Map(); + + /** Bubble the entry at `pos` up toward the root while it is smaller than its parent. */ + #siftUp(pos: number): number { + const data = this.#data; + const index = this.#index; + const entry = data[pos]!; + const priority = entry.priority; + while (pos > 0) { + const parentPos = getParentIndex(pos); + const parent = data[parentPos]!; + if (priority < parent.priority) { + data[pos] = parent; + index.set(parent.key, pos); + pos = parentPos; + } else { + break; + } + } + data[pos] = entry; + index.set(entry.key, pos); + return pos; + } + + /** Bubble the entry at `pos` down while a child is smaller. */ + #siftDown(pos: number): void { + const data = this.#data; + const index = this.#index; + const size = data.length; + const entry = data[pos]!; + const priority = entry.priority; + while (true) { + const left = 2 * pos + 1; + if (left >= size) break; + const right = left + 1; + let childPos = left; + let childPri = data[left]!.priority; + if (right < size) { + const rp = data[right]!.priority; + if (rp < childPri) { + childPos = right; + childPri = rp; + } + } + if (childPri < priority) { + const child = data[childPos]!; + data[pos] = child; + index.set(child.key, pos); + pos = childPos; + } else { + break; + } + } + data[pos] = entry; + index.set(entry.key, pos); + } + + /** + * Insert a new key with the given priority. Throws if the key already + * exists — use {@linkcode IndexedHeap.prototype.update | update} or + * {@linkcode IndexedHeap.prototype.pushOrUpdate | pushOrUpdate} instead. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @example Usage + * ```ts + * import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; + * import { assertEquals } from "@std/assert"; + * + * const heap = new IndexedHeap(); + * heap.push("task-1", 10); + * assertEquals(heap.size, 1); + * assertEquals(heap.peek(), { key: "task-1", priority: 10 }); + * ``` + * + * @param key The key to insert. + * @param priority The numeric priority (smaller = higher priority). + */ + push(key: K, priority: number): void { + assertValidPriority(priority); + if (this.#index.has(key)) { + throw new Error( + `Cannot push into IndexedHeap: key already exists`, + ); + } + const pos = this.#data.length; + this.#data.push({ key, priority }); + this.#index.set(key, pos); + this.#siftUp(pos); + } + + /** + * Remove and return the front entry (smallest priority), or `undefined` + * if the heap is empty. The returned entry is removed from the heap so + * the caller owns it. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @example Usage + * ```ts + * import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; + * import { assertEquals } from "@std/assert"; + * + * const heap = new IndexedHeap(); + * heap.push("a", 2); + * heap.push("b", 1); + * + * assertEquals(heap.pop(), { key: "b", priority: 1 }); + * assertEquals(heap.pop(), { key: "a", priority: 2 }); + * assertEquals(heap.pop(), undefined); + * ``` + * + * @returns The front entry, or `undefined` if empty. + */ + pop(): HeapEntry | undefined { + const size = this.#data.length; + if (size === 0) return undefined; + + const root = this.#data[0]!; + this.#index.delete(root.key); + + if (size === 1) { + this.#data.pop(); + return root; + } + + const last = this.#data.pop()!; + this.#data[0] = last; + this.#index.set(last.key, 0); + this.#siftDown(0); + return root; + } + + /** + * Return the front entry (smallest priority) without removing it, or + * `undefined` if the heap is empty. + * + * The returned object is a copy; mutating it does not affect the heap. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @example Usage + * ```ts + * import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; + * import { assertEquals } from "@std/assert"; + * + * const heap = new IndexedHeap(); + * heap.push("x", 5); + * heap.push("y", 3); + * + * assertEquals(heap.peek(), { key: "y", priority: 3 }); + * assertEquals(heap.size, 2); + * ``` + * + * @returns A copy of the front entry, or `undefined` if empty. + */ + peek(): HeapEntry | undefined { + const entry = this.#data[0]; + if (entry === undefined) return undefined; + return { key: entry.key, priority: entry.priority }; + } + + /** + * Remove the entry with the given key. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @example Usage + * ```ts + * import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; + * import { assertEquals } from "@std/assert"; + * + * const heap = new IndexedHeap(); + * heap.push("a", 1); + * heap.push("b", 2); + * + * assertEquals(heap.delete("a"), true); + * assertEquals(heap.delete("z"), false); + * assertEquals(heap.size, 1); + * ``` + * + * @param key The key to remove. + * @returns `true` if the key was present, `false` otherwise. + */ + delete(key: K): boolean { + const pos = this.#index.get(key); + if (pos === undefined) return false; + + this.#index.delete(key); + const lastIndex = this.#data.length - 1; + + if (pos === lastIndex) { + this.#data.pop(); + return true; + } + + const last = this.#data.pop()!; + this.#data[pos] = last; + this.#index.set(last.key, pos); + + const afterUp = this.#siftUp(pos); + if (afterUp === pos) { + this.#siftDown(pos); + } + return true; + } + + /** + * Change the priority of an existing key. Throws if the key is not + * present — use {@linkcode IndexedHeap.prototype.push | push} or + * {@linkcode IndexedHeap.prototype.pushOrUpdate | pushOrUpdate} instead. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @example Usage + * ```ts + * import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; + * import { assertEquals } from "@std/assert"; + * + * const heap = new IndexedHeap(); + * heap.push("a", 10); + * heap.push("b", 20); + * + * heap.update("b", 1); + * assertEquals(heap.peek(), { key: "b", priority: 1 }); + * ``` + * + * @param key The key whose priority to change. + * @param priority The new priority. + */ + update(key: K, priority: number): void { + assertValidPriority(priority); + const pos = this.#index.get(key); + if (pos === undefined) { + throw new Error( + `Cannot update IndexedHeap: key does not exist`, + ); + } + this.#data[pos]!.priority = priority; + const afterUp = this.#siftUp(pos); + if (afterUp === pos) { + this.#siftDown(pos); + } + } + + /** + * Insert the key if absent, or update its priority if present. This is a + * convenience method combining + * {@linkcode IndexedHeap.prototype.push | push} and + * {@linkcode IndexedHeap.prototype.update | update}. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @example Usage + * ```ts + * import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; + * import { assertEquals } from "@std/assert"; + * + * const heap = new IndexedHeap(); + * heap.pushOrUpdate("a", 10); + * assertEquals(heap.getPriority("a"), 10); + * + * heap.pushOrUpdate("a", 5); + * assertEquals(heap.getPriority("a"), 5); + * ``` + * + * @param key The key to insert or update. + * @param priority The priority to set. + */ + pushOrUpdate(key: K, priority: number): void { + assertValidPriority(priority); + if (this.#index.has(key)) { + this.update(key, priority); + } else { + this.push(key, priority); + } + } + + /** + * Check whether the key is in the heap. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @example Usage + * ```ts + * import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; + * import { assertEquals } from "@std/assert"; + * + * const heap = new IndexedHeap(); + * heap.push("a", 1); + * + * assertEquals(heap.has("a"), true); + * assertEquals(heap.has("b"), false); + * ``` + * + * @param key The key to look up. + * @returns `true` if the key is present, `false` otherwise. + */ + has(key: K): boolean { + return this.#index.has(key); + } + + /** + * Return the priority of the given key, or `undefined` if not present. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @example Usage + * ```ts + * import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; + * import { assertEquals } from "@std/assert"; + * + * const heap = new IndexedHeap(); + * heap.push("a", 42); + * + * assertEquals(heap.getPriority("a"), 42); + * assertEquals(heap.getPriority("b"), undefined); + * ``` + * + * @param key The key to look up. + * @returns The priority of the key, or `undefined` if not present. + */ + getPriority(key: K): number | undefined { + const pos = this.#index.get(key); + if (pos === undefined) return undefined; + return this.#data[pos]!.priority; + } + + /** + * The number of entries in the heap. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @example Usage + * ```ts + * import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; + * import { assertEquals } from "@std/assert"; + * + * const heap = new IndexedHeap(); + * assertEquals(heap.size, 0); + * heap.push("a", 1); + * assertEquals(heap.size, 1); + * ``` + * + * @returns The number of entries in the heap. + */ + get size(): number { + return this.#data.length; + } + + /** + * Remove all entries from the heap. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @example Usage + * ```ts + * import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; + * import { assertEquals } from "@std/assert"; + * + * const heap = new IndexedHeap(); + * heap.push("a", 1); + * heap.push("b", 2); + * heap.clear(); + * + * assertEquals(heap.size, 0); + * assertEquals(heap.isEmpty(), true); + * ``` + */ + clear(): void { + this.#data = []; + this.#index = new Map(); + } + + /** + * Check whether the heap is empty. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @example Usage + * ```ts + * import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; + * import { assertEquals } from "@std/assert"; + * + * const heap = new IndexedHeap(); + * assertEquals(heap.isEmpty(), true); + * + * heap.push("a", 1); + * assertEquals(heap.isEmpty(), false); + * ``` + * + * @returns `true` if the heap is empty, `false` otherwise. + */ + isEmpty(): boolean { + return this.#data.length === 0; + } + + /** + * Create an iterator that removes and yields every entry from + * smallest to largest priority. The heap is empty when the iterator + * finishes. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @example Usage + * ```ts + * import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; + * import { assertEquals } from "@std/assert"; + * + * const heap = new IndexedHeap(); + * heap.push("a", 3); + * heap.push("b", 1); + * heap.push("c", 2); + * + * assertEquals([...heap.drain()], [ + * { key: "b", priority: 1 }, + * { key: "c", priority: 2 }, + * { key: "a", priority: 3 }, + * ]); + * assertEquals(heap.size, 0); + * ``` + * + * @returns An iterator yielding entries from smallest to largest priority. + */ + *drain(): IterableIterator> { + while (!this.isEmpty()) { + yield this.pop() as HeapEntry; + } + } + + /** + * Create an iterator that removes and yields every entry from + * smallest to largest priority. The heap is empty afterwards. + * + * This has the same behavior as + * {@linkcode IndexedHeap.prototype.drain | drain}. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @example Usage + * ```ts + * import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; + * import { assertEquals } from "@std/assert"; + * + * const heap = new IndexedHeap(); + * heap.push("a", 3); + * heap.push("b", 1); + * heap.push("c", 2); + * + * assertEquals([...heap], [ + * { key: "b", priority: 1 }, + * { key: "c", priority: 2 }, + * { key: "a", priority: 3 }, + * ]); + * assertEquals([...heap], []); + * ``` + * + * @returns An iterator yielding entries from smallest to largest priority. + */ + *[Symbol.iterator](): IterableIterator> { + yield* this.drain(); + } +} diff --git a/data_structures/unstable_indexed_heap_test.ts b/data_structures/unstable_indexed_heap_test.ts new file mode 100644 index 000000000000..665d5e91d8de --- /dev/null +++ b/data_structures/unstable_indexed_heap_test.ts @@ -0,0 +1,525 @@ +// Copyright 2018-2026 the Deno authors. MIT license. +import { assertEquals, assertThrows } from "@std/assert"; +import { + type HeapEntry, + IndexedHeap, + type ReadonlyIndexedHeap, +} from "./unstable_indexed_heap.ts"; + +Deno.test("IndexedHeap push / pop / peek with ascending priorities", () => { + const heap = new IndexedHeap(); + + assertEquals(heap.size, 0); + assertEquals(heap.isEmpty(), true); + assertEquals(heap.peek(), undefined); + assertEquals(heap.pop(), undefined); + + for ( + const [key, priority] of [["d", 4], ["b", 2], ["e", 5], ["a", 1], [ + "c", + 3, + ]] as const + ) { + heap.push(key, priority); + } + + assertEquals(heap.size, 5); + assertEquals(heap.isEmpty(), false); + assertEquals(heap.peek(), { key: "a", priority: 1 }); + + const popped: HeapEntry[] = []; + while (!heap.isEmpty()) { + popped.push(heap.pop()!); + } + assertEquals(popped, [ + { key: "a", priority: 1 }, + { key: "b", priority: 2 }, + { key: "c", priority: 3 }, + { key: "d", priority: 4 }, + { key: "e", priority: 5 }, + ]); + assertEquals(heap.size, 0); + assertEquals(heap.peek(), undefined); + assertEquals(heap.pop(), undefined); +}); + +Deno.test("IndexedHeap push throws on duplicate key", () => { + const heap = new IndexedHeap(); + heap.push("a", 1); + assertThrows( + () => heap.push("a", 2), + Error, + "Cannot push into IndexedHeap: key already exists", + ); +}); + +Deno.test("IndexedHeap delete root", () => { + const heap = new IndexedHeap(); + heap.push("a", 1); + heap.push("b", 2); + heap.push("c", 3); + + assertEquals(heap.delete("a"), true); + assertEquals(heap.size, 2); + assertEquals(heap.has("a"), false); + assertEquals(heap.peek(), { key: "b", priority: 2 }); + + assertEquals([...heap], [ + { key: "b", priority: 2 }, + { key: "c", priority: 3 }, + ]); +}); + +Deno.test("IndexedHeap delete middle element triggers sift-down", () => { + // Heap shape (array order): a=1, b=5, c=3, d=10, e=8 + // a(1) + // / \ + // b(5) c(3) + // / \ + // d(10) e(8) + // Deleting "c" (index 2) moves "e" (priority 8) into index 2. + // 8 > children? No children at that index, so it stays — but "c" is gone. + // Deleting "b" (index 1) moves last element into index 1 and sifts down. + const heap = new IndexedHeap(); + heap.push("a", 1); + heap.push("b", 5); + heap.push("c", 3); + heap.push("d", 10); + heap.push("e", 8); + + heap.delete("b"); + assertEquals(heap.size, 4); + assertEquals(heap.has("b"), false); + + const result = [...heap]; + assertEquals(result, [ + { key: "a", priority: 1 }, + { key: "c", priority: 3 }, + { key: "e", priority: 8 }, + { key: "d", priority: 10 }, + ]); +}); + +Deno.test("IndexedHeap delete last array element (no sift needed)", () => { + const heap = new IndexedHeap(); + heap.push("a", 1); + heap.push("b", 2); + + assertEquals(heap.delete("b"), true); + assertEquals(heap.size, 1); + assertEquals(heap.peek(), { key: "a", priority: 1 }); +}); + +Deno.test("IndexedHeap delete and getPriority for non-existent key", () => { + const heap = new IndexedHeap(); + heap.push("a", 1); + assertEquals(heap.delete("z"), false); + assertEquals(heap.getPriority("z"), undefined); + assertEquals(heap.size, 1); +}); + +Deno.test("IndexedHeap delete only element", () => { + const heap = new IndexedHeap(); + heap.push("a", 1); + assertEquals(heap.delete("a"), true); + assertEquals(heap.size, 0); + assertEquals(heap.isEmpty(), true); + assertEquals(heap.peek(), undefined); +}); + +Deno.test("IndexedHeap delete triggers sift-up when replacement is smaller", () => { + const heap = new IndexedHeap(); + heap.push("a", 10); + heap.push("b", 20); + heap.push("c", 15); + heap.push("d", 25); + heap.push("e", 30); + heap.push("f", 5); + + // Deleting "b" (priority 20): last element "f" (priority 5) replaces it + // and must sift up past "a" (priority 10) to become the new root. + heap.delete("b"); + + assertEquals(heap.peek(), { key: "f", priority: 5 }); + const result = [...heap]; + for (let i = 1; i < result.length; i++) { + if (result[i]!.priority < result[i - 1]!.priority) { + throw new Error("Not sorted after delete-triggered sift-up"); + } + } +}); + +Deno.test("IndexedHeap update decrease-key bubbles up", () => { + const heap = new IndexedHeap(); + heap.push("a", 10); + heap.push("b", 20); + heap.push("c", 30); + + heap.update("c", 1); + assertEquals(heap.peek(), { key: "c", priority: 1 }); + assertEquals(heap.getPriority("c"), 1); +}); + +Deno.test("IndexedHeap update increase-key bubbles down", () => { + const heap = new IndexedHeap(); + heap.push("a", 1); + heap.push("b", 2); + heap.push("c", 3); + + heap.update("a", 100); + assertEquals(heap.peek(), { key: "b", priority: 2 }); + + assertEquals([...heap], [ + { key: "b", priority: 2 }, + { key: "c", priority: 3 }, + { key: "a", priority: 100 }, + ]); +}); + +Deno.test("IndexedHeap update throws for non-existent key", () => { + const heap = new IndexedHeap(); + heap.push("a", 1); + assertThrows( + () => heap.update("z", 5), + Error, + "Cannot update IndexedHeap: key does not exist", + ); +}); + +Deno.test("IndexedHeap pushOrUpdate inserts when absent, updates when present", () => { + const heap = new IndexedHeap(); + heap.pushOrUpdate("a", 10); + assertEquals(heap.size, 1); + assertEquals(heap.getPriority("a"), 10); + + heap.pushOrUpdate("a", 5); + assertEquals(heap.size, 1); + assertEquals(heap.getPriority("a"), 5); + assertEquals(heap.peek(), { key: "a", priority: 5 }); +}); + +Deno.test("IndexedHeap pushOrUpdate decrease then increase same key", () => { + const heap = new IndexedHeap(); + heap.push("a", 10); + heap.push("b", 20); + heap.push("c", 30); + + heap.pushOrUpdate("c", 1); + assertEquals(heap.peek(), { key: "c", priority: 1 }); + + heap.pushOrUpdate("c", 50); + assertEquals(heap.peek(), { key: "a", priority: 10 }); + + assertEquals([...heap], [ + { key: "a", priority: 10 }, + { key: "b", priority: 20 }, + { key: "c", priority: 50 }, + ]); +}); + +Deno.test("IndexedHeap size tracks push, pop, delete, clear", () => { + const heap = new IndexedHeap(); + assertEquals(heap.size, 0); + + heap.push("a", 1); + assertEquals(heap.size, 1); + + heap.push("b", 2); + assertEquals(heap.size, 2); + + heap.pop(); + assertEquals(heap.size, 1); + + heap.push("c", 3); + heap.delete("b"); + assertEquals(heap.size, 1); + + heap.clear(); + assertEquals(heap.size, 0); +}); + +Deno.test("IndexedHeap iterator drains in ascending order", () => { + const heap = new IndexedHeap(); + heap.push("c", 3); + heap.push("a", 1); + heap.push("b", 2); + + assertEquals([...heap], [ + { key: "a", priority: 1 }, + { key: "b", priority: 2 }, + { key: "c", priority: 3 }, + ]); + assertEquals(heap.size, 0); + assertEquals([...heap], []); +}); + +Deno.test("IndexedHeap drain() yields in ascending order", () => { + const heap = new IndexedHeap(); + heap.push("x", 10); + heap.push("y", 5); + heap.push("z", 15); + + assertEquals([...heap.drain()], [ + { key: "y", priority: 5 }, + { key: "x", priority: 10 }, + { key: "z", priority: 15 }, + ]); + assertEquals(heap.size, 0); +}); + +Deno.test("IndexedHeap drain() on empty heap yields nothing", () => { + const heap = new IndexedHeap(); + assertEquals([...heap.drain()], []); +}); + +Deno.test("IndexedHeap peek returns a copy, not a reference", () => { + const heap = new IndexedHeap(); + heap.push("a", 10); + + const peeked = heap.peek()!; + (peeked as { priority: number }).priority = 999; + + assertEquals(heap.peek()!.priority, 10); +}); + +Deno.test("IndexedHeap is assignable to ReadonlyIndexedHeap", () => { + const heap = new IndexedHeap(); + heap.push("a", 1); + heap.push("b", 2); + + const ro: ReadonlyIndexedHeap = heap; + assertEquals(ro.peek(), { key: "a", priority: 1 }); + assertEquals(ro.has("a"), true); + assertEquals(ro.has("z"), false); + assertEquals(ro.getPriority("a"), 1); + assertEquals(ro.getPriority("z"), undefined); + assertEquals(ro.size, 2); + assertEquals(ro.isEmpty(), false); + + assertEquals(heap.size, 2, "heap unchanged after readonly queries"); +}); + +Deno.test("IndexedHeap handles duplicate priorities", () => { + const heap = new IndexedHeap(); + heap.push("a", 5); + heap.push("b", 5); + heap.push("c", 5); + + assertEquals(heap.size, 3); + const results: HeapEntry[] = []; + while (!heap.isEmpty()) { + results.push(heap.pop()!); + } + assertEquals(results.length, 3); + for (const entry of results) { + assertEquals(entry.priority, 5); + } + assertEquals(results.map((e) => e.key).sort(), ["a", "b", "c"]); +}); + +Deno.test("IndexedHeap with object keys uses reference identity", () => { + const keyA = { id: "a" }; + const keyB = { id: "b" }; + const keyADuplicate = { id: "a" }; + + const heap = new IndexedHeap<{ id: string }>(); + heap.push(keyA, 1); + heap.push(keyB, 2); + heap.push(keyADuplicate, 3); + + assertEquals(heap.size, 3); + assertEquals(heap.has(keyA), true); + assertEquals(heap.has(keyADuplicate), true); + + assertEquals(heap.getPriority(keyA), 1); + assertEquals(heap.getPriority(keyADuplicate), 3); +}); + +Deno.test("IndexedHeap handles negative priorities", () => { + const heap = new IndexedHeap(); + heap.push("a", -10); + heap.push("b", -5); + heap.push("c", 0); + heap.push("d", 5); + + assertEquals([...heap], [ + { key: "a", priority: -10 }, + { key: "b", priority: -5 }, + { key: "c", priority: 0 }, + { key: "d", priority: 5 }, + ]); +}); + +Deno.test("IndexedHeap handles Infinity and -Infinity priorities", () => { + const heap = new IndexedHeap(); + heap.push("pos", Infinity); + heap.push("neg", -Infinity); + heap.push("zero", 0); + + assertEquals([...heap], [ + { key: "neg", priority: -Infinity }, + { key: "zero", priority: 0 }, + { key: "pos", priority: Infinity }, + ]); +}); + +Deno.test("IndexedHeap works correctly after clear and reuse", () => { + const heap = new IndexedHeap(); + heap.push("a", 1); + heap.push("b", 2); + heap.clear(); + + heap.push("c", 30); + heap.push("d", 10); + heap.push("e", 20); + + assertEquals(heap.size, 3); + assertEquals(heap.peek(), { key: "d", priority: 10 }); + assertEquals(heap.has("a"), false); + assertEquals(heap.has("d"), true); + + assertEquals([...heap], [ + { key: "d", priority: 10 }, + { key: "e", priority: 20 }, + { key: "c", priority: 30 }, + ]); +}); + +Deno.test("IndexedHeap interleaved push, pop, update, delete", () => { + const heap = new IndexedHeap(); + heap.push("a", 10); + heap.push("b", 20); + heap.push("c", 30); + + assertEquals(heap.pop(), { key: "a", priority: 10 }); + + heap.push("d", 5); + heap.update("c", 1); + + assertEquals(heap.peek(), { key: "c", priority: 1 }); + + heap.delete("b"); + heap.push("e", 3); + + assertEquals(heap.size, 3); + assertEquals([...heap], [ + { key: "c", priority: 1 }, + { key: "e", priority: 3 }, + { key: "d", priority: 5 }, + ]); +}); + +Deno.test("IndexedHeap pop with two elements exercises general path", () => { + const heap = new IndexedHeap(); + heap.push("big", 100); + heap.push("small", 1); + + // pop() with size=2 takes the general path: move last to root, sift-down + assertEquals(heap.pop(), { key: "small", priority: 1 }); + assertEquals(heap.size, 1); + assertEquals(heap.pop(), { key: "big", priority: 100 }); +}); + +Deno.test("IndexedHeap push throws on NaN priority", () => { + const heap = new IndexedHeap(); + assertThrows( + () => heap.push("a", NaN), + RangeError, + "Cannot set priority: value is NaN", + ); + assertEquals(heap.size, 0); +}); + +Deno.test("IndexedHeap update throws on NaN priority", () => { + const heap = new IndexedHeap(); + heap.push("a", 1); + assertThrows( + () => heap.update("a", NaN), + RangeError, + "Cannot set priority: value is NaN", + ); + assertEquals(heap.getPriority("a"), 1); +}); + +Deno.test("IndexedHeap pushOrUpdate throws on NaN priority", () => { + const heap = new IndexedHeap(); + assertThrows( + () => heap.pushOrUpdate("a", NaN), + RangeError, + "Cannot set priority: value is NaN", + ); + assertEquals(heap.size, 0); + + heap.push("b", 1); + assertThrows( + () => heap.pushOrUpdate("b", NaN), + RangeError, + "Cannot set priority: value is NaN", + ); + assertEquals(heap.getPriority("b"), 1); +}); + +Deno.test("IndexedHeap stress test: push N, pop all, verify sorted", () => { + const heap = new IndexedHeap(); + const n = 200; + const priorities: number[] = []; + for (let i = 0; i < n; i++) { + const p = Math.floor(Math.random() * 10000); + priorities.push(p); + heap.push(i, p); + } + + assertEquals(heap.size, n); + + const popped: number[] = []; + while (!heap.isEmpty()) { + popped.push(heap.pop()!.priority); + } + + for (let i = 1; i < popped.length; i++) { + if (popped[i]! < popped[i - 1]!) { + throw new Error( + `Heap invariant violated: ${popped[i - 1]} > ${ + popped[i] + } at index ${i}`, + ); + } + } + assertEquals(popped.length, n); +}); + +Deno.test("IndexedHeap stress test: push N, delete random subset, pop rest", () => { + const heap = new IndexedHeap(); + const n = 200; + for (let i = 0; i < n; i++) { + heap.push(i, Math.floor(Math.random() * 10000)); + } + + const toDelete = new Set(); + for (let i = 0; i < n / 2; i++) { + const key = Math.floor(Math.random() * n); + if (!toDelete.has(key)) { + toDelete.add(key); + heap.delete(key); + } + } + + const remaining = n - toDelete.size; + assertEquals(heap.size, remaining); + + const popped: number[] = []; + while (!heap.isEmpty()) { + popped.push(heap.pop()!.priority); + } + + for (let i = 1; i < popped.length; i++) { + if (popped[i]! < popped[i - 1]!) { + throw new Error( + `Heap invariant violated after deletes: ${popped[i - 1]} > ${ + popped[i] + } at index ${i}`, + ); + } + } + assertEquals(popped.length, remaining); +}); From ef5a8c52ec5efc46ea801411e0801a8a2a037423 Mon Sep 17 00:00:00 2001 From: Tomas Zijdemans Date: Wed, 1 Apr 2026 20:02:31 +0200 Subject: [PATCH 03/12] fix doc --- data_structures/unstable_indexed_heap.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/data_structures/unstable_indexed_heap.ts b/data_structures/unstable_indexed_heap.ts index 36e486355539..dbe1fb47e6ae 100644 --- a/data_structures/unstable_indexed_heap.ts +++ b/data_structures/unstable_indexed_heap.ts @@ -16,7 +16,9 @@ interface MutableEntry { * @typeParam K The type of the key. */ export interface HeapEntry { + /** The key that identifies this entry in the heap. */ readonly key: K; + /** The numeric priority of this entry (smaller = higher priority). */ readonly priority: number; } From 924b9610a0d55ddd82334bfee872a4e9813d1ede Mon Sep 17 00:00:00 2001 From: Tomas Zijdemans Date: Thu, 2 Apr 2026 13:03:20 +0200 Subject: [PATCH 04/12] add Cache --- cache/deno.json | 7 +- cache/lru_cache.ts | 362 ----------------------------- cache/lru_cache_test.ts | 335 --------------------------- cache/memoize.ts | 5 +- cache/memoize_test.ts | 16 +- cache/mod.ts | 11 +- cache/ttl_cache.ts | 379 ------------------------------- cache/ttl_cache_test.ts | 489 ---------------------------------------- import_map.json | 2 +- 9 files changed, 22 insertions(+), 1584 deletions(-) delete mode 100644 cache/lru_cache.ts delete mode 100644 cache/lru_cache_test.ts delete mode 100644 cache/ttl_cache.ts delete mode 100644 cache/ttl_cache_test.ts diff --git a/cache/deno.json b/cache/deno.json index 4af221c0d477..4c328f1f1fef 100644 --- a/cache/deno.json +++ b/cache/deno.json @@ -1,10 +1,9 @@ { "name": "@std/cache", - "version": "0.2.2", + "version": "0.3.0", "exports": { ".": "./mod.ts", - "./lru-cache": "./lru_cache.ts", - "./memoize": "./memoize.ts", - "./ttl-cache": "./ttl_cache.ts" + "./cache": "./cache.ts", + "./memoize": "./memoize.ts" } } diff --git a/cache/lru_cache.ts b/cache/lru_cache.ts deleted file mode 100644 index 97a9876d62ac..000000000000 --- a/cache/lru_cache.ts +++ /dev/null @@ -1,362 +0,0 @@ -// Copyright 2018-2026 the Deno authors. MIT license. -// This module is browser compatible. - -import type { MemoizationCache } from "./memoize.ts"; -export type { MemoizationCache }; - -/** - * The reason an entry was removed from the cache. - * - * @experimental **UNSTABLE**: New API, yet to be vetted. - * - * - `"evicted"` — removed automatically because the cache exceeded - * {@linkcode LruCache.prototype.maxSize | maxSize}. - * - `"deleted"` — removed by an explicit - * {@linkcode LruCache.prototype.delete | delete()} call. - * - `"cleared"` — removed by - * {@linkcode LruCache.prototype.clear | clear()}. - */ -export type LruCacheEjectionReason = "evicted" | "deleted" | "cleared"; - -/** - * Options for the {@linkcode LruCache} constructor. - * - * @experimental **UNSTABLE**: New API, yet to be vetted. - */ -export interface LruCacheOptions { - /** - * Callback invoked when an entry is removed, whether by eviction, - * manual deletion, or clearing the cache. The entry is already removed - * from the cache when this callback fires. Overwriting an existing key - * via {@linkcode LruCache.prototype.set | set()} does **not** trigger - * this callback. The cache is not re-entrant during this callback: - * calling `set`, `delete`, or `clear` will throw. - * - * @param ejectedKey The key of the removed entry. - * @param ejectedValue The value of the removed entry. - * @param reason Why the entry was removed. - */ - onEject?: ( - ejectedKey: K, - ejectedValue: V, - reason: LruCacheEjectionReason, - ) => void; -} - -/** - * Least-recently-used cache. - * - * @experimental **UNSTABLE**: New API, yet to be vetted. - * - * @see {@link https://en.wikipedia.org/wiki/Cache_replacement_policies#LRU | Least-recently-used cache} - * - * Automatically removes entries above the max size based on when they were - * last accessed with `get` or `set`. - * - * @typeParam K The type of the cache keys. - * @typeParam V The type of the cache values. - * - * @example Basic usage - * ```ts - * import { LruCache } from "@std/cache"; - * import { assert, assertEquals } from "@std/assert"; - * - * const MAX_SIZE = 3; - * const cache = new LruCache(MAX_SIZE); - * - * cache.set("a", 1); - * cache.set("b", 2); - * cache.set("c", 3); - * cache.set("d", 4); - * - * // most recent values are stored up to `MAX_SIZE` - * assertEquals(cache.get("b"), 2); - * assertEquals(cache.get("c"), 3); - * assertEquals(cache.get("d"), 4); - * - * // less recent values are removed - * assert(!cache.has("a")); - * ``` - * - * @example Adding an onEject callback - * ```ts - * import { LruCache } from "@std/cache"; - * import { assertEquals } from "@std/assert"; - * - * const ejected: [string, number, string][] = []; - * const cache = new LruCache(2, { - * onEject: (key, value, reason) => ejected.push([key, value, reason]), - * }); - * - * cache.set("a", 1); - * cache.set("b", 2); - * cache.set("c", 3); - * - * assertEquals(ejected, [["a", 1, "evicted"]]); - * ``` - */ -export class LruCache extends Map - implements MemoizationCache { - #maxSize: number; - #ejecting = false; - #eject?: - | ((ejectedKey: K, ejectedValue: V, reason: LruCacheEjectionReason) => void) - | undefined; - - /** - * Constructs a new `LruCache`. - * - * @experimental **UNSTABLE**: New API, yet to be vetted. - * - * @param maxSize The maximum number of entries to store in the cache. Must - * be a positive integer. - * @param options Additional options. - */ - constructor( - maxSize: number, - options?: LruCacheOptions, - ) { - super(); - if (!Number.isInteger(maxSize) || maxSize < 1) { - throw new RangeError( - `Cannot create LruCache: maxSize must be a positive integer: received ${maxSize}`, - ); - } - this.#maxSize = maxSize; - this.#eject = options?.onEject; - } - - /** - * The maximum number of entries to store in the cache. - * - * @returns The maximum number of entries in the cache. - * - * @example Max size - * ```ts - * import { LruCache } from "@std/cache"; - * import { assertEquals } from "@std/assert"; - * - * const cache = new LruCache(100); - * assertEquals(cache.maxSize, 100); - * ``` - */ - get maxSize(): number { - return this.#maxSize; - } - - #setMostRecentlyUsed(key: K, value: V): void { - super.delete(key); - super.set(key, value); - } - - #pruneToMaxSize(): void { - if (this.size <= this.#maxSize) return; - const key = this.keys().next().value!; - const value = super.get(key)!; - super.delete(key); - if (this.#eject) { - this.#ejecting = true; - try { - this.#eject(key, value, "evicted"); - } finally { - this.#ejecting = false; - } - } - } - - /** - * Checks whether an element with the specified key exists or not. Does - * **not** update the entry's position in the eviction order. - * - * @param key The key to check. - * @returns `true` if the cache contains the specified key, otherwise `false`. - * - * @example Checking for the existence of a key - * ```ts - * import { LruCache } from "@std/cache"; - * import { assert } from "@std/assert"; - * - * const cache = new LruCache(100); - * - * cache.set("a", 1); - * assert(cache.has("a")); - * ``` - */ - override has(key: K): boolean { - return super.has(key); - } - - /** - * Gets the element with the specified key. - * - * @param key The key to get the value for. - * @returns The value associated with the specified key, or `undefined` if - * the key is not present in the cache. - * - * @example Getting a value from the cache - * ```ts - * import { LruCache } from "@std/cache"; - * import { assertEquals } from "@std/assert"; - * - * const cache = new LruCache(100); - * - * cache.set("a", 1); - * assertEquals(cache.get("a"), 1); - * ``` - */ - override get(key: K): V | undefined { - if (super.has(key)) { - const value = super.get(key)!; - this.#setMostRecentlyUsed(key, value); - return value; - } - - return undefined; - } - - /** - * Returns the value associated with the given key, or `undefined` if the - * key is not present, **without** updating its position in the eviction - * order. - * - * @experimental **UNSTABLE**: New API, yet to be vetted. - * - * @param key The key to look up. - * @returns The value, or `undefined` if not present. - * - * @example Peeking at a value without promoting it - * ```ts - * import { LruCache } from "@std/cache"; - * import { assertEquals } from "@std/assert"; - * - * const cache = new LruCache(3); - * cache.set("a", 1); - * cache.set("b", 2); - * cache.set("c", 3); - * - * // peek does not promote "a" - * assertEquals(cache.peek("a"), 1); - * - * // "a" is still the least recently used and gets evicted - * cache.set("d", 4); - * assertEquals(cache.peek("a"), undefined); - * ``` - */ - peek(key: K): V | undefined { - return super.get(key); - } - - /** - * Sets the specified key to the specified value. - * - * @param key The key to set the value for. - * @param value The value to set. - * @returns `this` for chaining. - * - * @example Setting a value in the cache - * ```ts no-assert - * import { LruCache } from "@std/cache"; - * - * const cache = new LruCache(100); - * cache.set("a", 1); - * ``` - */ - override set(key: K, value: V): this { - if (this.#ejecting) { - throw new TypeError( - "Cannot set entry in LruCache: cache is not re-entrant during onEject callbacks", - ); - } - this.#setMostRecentlyUsed(key, value); - this.#pruneToMaxSize(); - - return this; - } - - /** - * Deletes the value associated with the given key. - * - * @experimental **UNSTABLE**: New API, yet to be vetted. - * - * @param key The key to delete. - * @returns `true` if the key was deleted, `false` otherwise. - * - * @example Deleting a key from the cache - * ```ts - * import { LruCache } from "@std/cache"; - * import { assertEquals } from "@std/assert/equals"; - * - * const cache = new LruCache(1); - * - * cache.set("a", 1); - * cache.delete("a"); - * assertEquals(cache.has("a"), false); - * ``` - */ - override delete(key: K): boolean { - if (this.#ejecting) { - throw new TypeError( - "Cannot delete entry in LruCache: cache is not re-entrant during onEject callbacks", - ); - } - const value = super.get(key); - const existed = super.delete(key); - if (!existed) return false; - - if (this.#eject) { - this.#ejecting = true; - try { - this.#eject(key, value!, "deleted"); - } finally { - this.#ejecting = false; - } - } - return true; - } - - /** - * Clears the cache. - * - * @experimental **UNSTABLE**: New API, yet to be vetted. - * - * @example Usage - * ```ts - * import { LruCache } from "@std/cache"; - * import { assertEquals } from "@std/assert/equals"; - * - * const cache = new LruCache(100); - * - * cache.set("a", 1); - * cache.set("b", 2); - * cache.clear(); - * assertEquals(cache.size, 0); - * ``` - */ - override clear(): void { - if (this.#ejecting) { - throw new TypeError( - "Cannot clear LruCache: cache is not re-entrant during onEject callbacks", - ); - } - if (!this.#eject) { - super.clear(); - return; - } - const entries = [...super.entries()]; - super.clear(); - this.#ejecting = true; - let error: unknown; - try { - for (const [key, value] of entries) { - try { - this.#eject(key, value, "cleared"); - } catch (e) { - error ??= e; - } - } - } finally { - this.#ejecting = false; - } - if (error !== undefined) throw error; - } -} diff --git a/cache/lru_cache_test.ts b/cache/lru_cache_test.ts deleted file mode 100644 index c47fc05f4d07..000000000000 --- a/cache/lru_cache_test.ts +++ /dev/null @@ -1,335 +0,0 @@ -// Copyright 2018-2026 the Deno authors. MIT license. -import { assert, assertEquals, assertThrows } from "@std/assert"; -import { LruCache, type LruCacheEjectionReason } from "./lru_cache.ts"; - -Deno.test("LruCache deletes least-recently-used", () => { - const cache = new LruCache(3); - - cache.set(1, "!"); - cache.set(2, "!"); - cache.set(1, "updated"); - cache.set(3, "!"); - cache.set(4, "!"); - - assertEquals(cache.size, 3); - assert(!cache.has(2)); - assertEquals(cache.get(2), undefined); - assertEquals([...cache.keys()], [1, 3, 4]); - assertEquals(cache.get(3), "!"); - assertEquals(cache.get(1), "updated"); - - assertEquals(cache.delete(3), true); - assertEquals(cache.size, 2); - assertEquals(cache.get(3), undefined); -}); - -Deno.test("LruCache.maxSize is readonly", () => { - const cache = new LruCache(100); - assertEquals(cache.maxSize, 100); -}); - -Deno.test("LruCache validates maxSize", async (t) => { - await t.step("rejects 0", () => { - assertThrows( - () => new LruCache(0), - RangeError, - "maxSize must be a positive integer", - ); - }); - - await t.step("rejects negative", () => { - assertThrows( - () => new LruCache(-1), - RangeError, - "maxSize must be a positive integer", - ); - }); - - await t.step("rejects NaN", () => { - assertThrows( - () => new LruCache(NaN), - RangeError, - "maxSize must be a positive integer", - ); - }); - - await t.step("rejects Infinity", () => { - assertThrows( - () => new LruCache(Infinity), - RangeError, - "maxSize must be a positive integer", - ); - }); - - await t.step("rejects non-integer", () => { - assertThrows( - () => new LruCache(1.5), - RangeError, - "maxSize must be a positive integer", - ); - }); - - await t.step("accepts 1", () => { - const cache = new LruCache(1); - cache.set("a", 1); - cache.set("b", 2); - assertEquals(cache.size, 1); - assertEquals(cache.get("b"), 2); - }); -}); - -Deno.test("LruCache onEject()", async (t) => { - await t.step("calls onEject on delete and eviction", () => { - const reasons: LruCacheEjectionReason[] = []; - const cache = new LruCache(3, { - onEject: (_k, _v, reason) => reasons.push(reason), - }); - - cache.set(1, "!"); - cache.set(2, "!"); - cache.set(3, "!"); - cache.set(4, "!"); - cache.set(5, "!"); - - assertEquals(cache.size, 3); - assertEquals(reasons, ["evicted", "evicted"]); - cache.delete(3); - - assertEquals(reasons, ["evicted", "evicted", "deleted"]); - assertEquals(cache.size, 2); - assertEquals(cache.get(3), undefined); - }); - - await t.step("calls onEject for falsy values", () => { - const ejected: [number, unknown, LruCacheEjectionReason][] = []; - const cache = new LruCache(10, { - onEject: (k, v, reason) => ejected.push([k, v, reason]), - }); - - cache.set(1, 0); - cache.set(2, ""); - cache.set(3, false); - cache.set(4, null); - - cache.delete(1); - cache.delete(2); - cache.delete(3); - cache.delete(4); - - assertEquals(ejected, [ - [1, 0, "deleted"], - [2, "", "deleted"], - [3, false, "deleted"], - [4, null, "deleted"], - ]); - }); - - await t.step("calls onEject on clear()", () => { - const ejected: [string, number, LruCacheEjectionReason][] = []; - const cache = new LruCache(10, { - onEject: (k, v, reason) => ejected.push([k, v, reason]), - }); - - cache.set("a", 1); - cache.set("b", 2); - cache.set("c", 3); - cache.clear(); - - assertEquals(ejected, [ - ["a", 1, "cleared"], - ["b", 2, "cleared"], - ["c", 3, "cleared"], - ]); - assertEquals(cache.size, 0); - }); - - await t.step("does not call onEject when overwriting a key", () => { - const ejected: [string, number, LruCacheEjectionReason][] = []; - const cache = new LruCache(10, { - onEject: (k, v, reason) => ejected.push([k, v, reason]), - }); - - cache.set("a", 1); - cache.set("a", 2); - - assertEquals(ejected, []); - assertEquals(cache.get("a"), 2); - }); - - await t.step("entry is fully removed before onEject fires", () => { - let sizeInCallback = -1; - let hasInCallback = true; - const cache = new LruCache(10, { - onEject: (k) => { - sizeInCallback = cache.size; - hasInCallback = cache.has(k); - }, - }); - - cache.set("a", 1); - cache.delete("a"); - - assertEquals(sizeInCallback, 0); - assertEquals(hasInCallback, false); - }); -}); - -Deno.test("LruCache delete() returns false for non-existent key", () => { - const cache = new LruCache(10); - assertEquals(cache.delete("nonexistent"), false); -}); - -Deno.test("LruCache clear() calls all onEject callbacks even if one throws", () => { - const ejected: string[] = []; - const cache = new LruCache(10, { - onEject: (k) => { - ejected.push(k); - if (k === "a") throw new Error("boom"); - }, - }); - - cache.set("a", 1); - cache.set("b", 2); - cache.set("c", 3); - assertThrows(() => cache.clear(), Error, "boom"); - assertEquals(ejected, ["a", "b", "c"]); - assertEquals(cache.size, 0); -}); - -Deno.test("LruCache clear() works without onEject", () => { - const cache = new LruCache(10); - cache.set("a", 1); - cache.set("b", 2); - cache.clear(); - assertEquals(cache.size, 0); -}); - -Deno.test("LruCache has() does not promote entry", () => { - const cache = new LruCache(2); - cache.set("a", 1); - cache.set("b", 2); - cache.has("a"); - cache.set("c", 3); - assertEquals(cache.has("a"), false); - assertEquals(cache.has("b"), true); -}); - -Deno.test("LruCache peek() returns value without promoting entry", () => { - const cache = new LruCache(3); - cache.set("a", 1); - cache.set("b", 2); - cache.set("c", 3); - - assertEquals(cache.peek("a"), 1); - - cache.set("d", 4); - assertEquals(cache.peek("a"), undefined); - assertEquals(cache.get("b"), 2); -}); - -Deno.test("LruCache peek() returns undefined for missing key", () => { - const cache = new LruCache(10); - assertEquals(cache.peek("nonexistent"), undefined); -}); - -Deno.test("LruCache set() throws if onEject throws during eviction", () => { - const cache = new LruCache(2, { - onEject: () => { - throw new Error("boom"); - }, - }); - - cache.set("a", 1); - cache.set("b", 2); - assertThrows(() => cache.set("c", 3), Error, "boom"); - assertEquals(cache.size, 2); - assertEquals(cache.has("a"), false); - assertEquals(cache.get("b"), 2); - assertEquals(cache.get("c"), 3); -}); - -Deno.test("LruCache onEject is not re-entrant", async (t) => { - await t.step("set() inside onEject throws", () => { - const cache = new LruCache(2, { - onEject: () => { - cache.set("x", 99); - }, - }); - - cache.set("a", 1); - cache.set("b", 2); - assertThrows( - () => cache.set("c", 3), - TypeError, - "cache is not re-entrant during onEject callbacks", - ); - }); - - await t.step("delete() inside onEject throws", () => { - const cache = new LruCache(2, { - onEject: () => { - cache.delete("b"); - }, - }); - - cache.set("a", 1); - cache.set("b", 2); - assertThrows( - () => cache.delete("a"), - TypeError, - "cache is not re-entrant during onEject callbacks", - ); - }); - - await t.step("clear() inside onEject throws", () => { - const cache = new LruCache(10, { - onEject: () => { - cache.clear(); - }, - }); - - cache.set("a", 1); - assertThrows( - () => cache.delete("a"), - TypeError, - "cache is not re-entrant during onEject callbacks", - ); - }); -}); - -Deno.test("LruCache set() returns this for chaining", () => { - const cache = new LruCache(10); - const result = cache.set("a", 1).set("b", 2).set("c", 3); - assert(result === cache); - assertEquals(cache.size, 3); -}); - -Deno.test("LruCache clear() rethrows the first error when multiple onEject callbacks throw", () => { - const ejected: string[] = []; - const cache = new LruCache(10, { - onEject: (k) => { - ejected.push(k); - throw new Error(`boom-${k}`); - }, - }); - - cache.set("a", 1); - cache.set("b", 2); - cache.set("c", 3); - assertThrows(() => cache.clear(), Error, "boom-a"); - assertEquals(ejected, ["a", "b", "c"]); - assertEquals(cache.size, 0); -}); - -Deno.test("LruCache iteration order reflects LRU recency", () => { - const cache = new LruCache(3); - cache.set("a", 1); - cache.set("b", 2); - cache.set("c", 3); - - cache.get("a"); - - assertEquals([...cache.keys()], ["b", "c", "a"]); - assertEquals([...cache.values()], [2, 3, 1]); - assertEquals([...cache.entries()], [["b", 2], ["c", 3], ["a", 1]]); -}); diff --git a/cache/memoize.ts b/cache/memoize.ts index 5c250c37029f..2e91df794ebc 100644 --- a/cache/memoize.ts +++ b/cache/memoize.ts @@ -48,9 +48,8 @@ export type MemoizeOptions< * {@linkcode Map} object is instantiated upon memoization and used as a cache, with no * limit on the number of results to be cached. * - * Alternatively, you can supply an - * {@link https://jsr.io/@std/cache/doc/lru-cache/~/LruCache | LruCache} - * with a specified max size to limit memory usage. + * Alternatively, you can supply a `Cache` with a specified max size + * to limit memory usage. */ cache?: Cache; /** diff --git a/cache/memoize_test.ts b/cache/memoize_test.ts index 988fdcdb39f7..1ddffd41ca08 100644 --- a/cache/memoize_test.ts +++ b/cache/memoize_test.ts @@ -1,7 +1,7 @@ // Copyright 2018-2026 the Deno authors. MIT license. import { assert, assertEquals, assertRejects, assertThrows } from "@std/assert"; import { type MemoizationCacheResult, memoize } from "./memoize.ts"; -import { LruCache } from "./lru_cache.ts"; +import { Cache } from "./cache.ts"; Deno.test("memoize() memoizes nullary function (lazy/singleton)", async (t) => { await t.step("async function", async () => { @@ -273,14 +273,18 @@ Deno.test("memoize() works with custom cache implementing MemoizationCache", () assertEquals(calls, 2); }); -Deno.test("memoize() evicts stale entries when used with LruCache", () => { +Deno.test("memoize() evicts stale entries when used with Cache", () => { let calls = 0; const MAX_SIZE = 3; const fn = memoize((n: number) => { ++calls; return 0 - n; - }, { cache: new LruCache>(MAX_SIZE) }); + }, { + cache: new Cache>({ + maxSize: MAX_SIZE, + }), + }); fn(1); fn(2); @@ -297,13 +301,15 @@ Deno.test("memoize() evicts stale entries when used with LruCache", () => { assertEquals(calls, 5); }); -Deno.test("memoize() only caches single latest result with LruCache of maxSize=1", () => { +Deno.test("memoize() only caches single latest result with Cache of maxSize=1", () => { let calls = 0; const fn = memoize((n: number) => { ++calls; return 0 - n; - }, { cache: new LruCache>(1) }); + }, { + cache: new Cache>({ maxSize: 1 }), + }); assertEquals(fn(0), 0); assertEquals(fn(0), 0); diff --git a/cache/mod.ts b/cache/mod.ts index e257897e869e..00112a9e9092 100644 --- a/cache/mod.ts +++ b/cache/mod.ts @@ -2,14 +2,14 @@ // This module is browser compatible. /** - * In-memory cache utilities, such as memoization and caches with different - * expiration policies. + * In-memory cache utilities, such as memoization and caches with LRU eviction + * and TTL expiration. * * ```ts - * import { memoize, LruCache, type MemoizationCacheResult } from "@std/cache"; + * import { Cache, memoize, type MemoizationCacheResult } from "@std/cache"; * import { assertEquals } from "@std/assert"; * - * const cache = new LruCache>(1000); + * const cache = new Cache>({ maxSize: 1000 }); * * // fibonacci function, which is very slow for n > ~30 if not memoized * const fib = memoize((n: bigint): bigint => { @@ -22,6 +22,5 @@ * @module */ -export * from "./lru_cache.ts"; +export * from "./cache.ts"; export * from "./memoize.ts"; -export * from "./ttl_cache.ts"; diff --git a/cache/ttl_cache.ts b/cache/ttl_cache.ts deleted file mode 100644 index 7f93795a3642..000000000000 --- a/cache/ttl_cache.ts +++ /dev/null @@ -1,379 +0,0 @@ -// Copyright 2018-2026 the Deno authors. MIT license. -// This module is browser compatible. - -import type { MemoizationCache } from "./memoize.ts"; - -/** - * Options for {@linkcode TtlCache.prototype.set}. - * - * @experimental **UNSTABLE**: New API, yet to be vetted. - */ -export interface TtlCacheSetOptions { - /** - * A custom time-to-live in milliseconds for this entry. If supplied, - * overrides the cache's default TTL. Must be a finite, non-negative number. - */ - ttl?: number; - /** - * A maximum lifetime in milliseconds for this entry, measured from the - * time it is set. When - * {@linkcode TtlCacheOptions.slidingExpiration | slidingExpiration} is - * enabled, the sliding window cannot extend past this duration. Throws - * if `slidingExpiration` is not enabled. - */ - absoluteExpiration?: number; -} - -/** - * Options for the {@linkcode TtlCache} constructor. - * - * @experimental **UNSTABLE**: New API, yet to be vetted. - */ -export interface TtlCacheOptions { - /** - * Callback invoked when an entry is removed, whether by TTL expiry, - * manual deletion, or clearing the cache. - */ - onEject?: (ejectedKey: K, ejectedValue: V) => void; - /** - * When `true`, each {@linkcode TtlCache.prototype.get | get()} call resets - * the entry's TTL. - * - * If both `slidingExpiration` and `absoluteExpiration` are set on an entry, - * the sliding window cannot extend past the absolute expiration. - * - * @default {false} - */ - slidingExpiration?: boolean; -} - -/** - * Time-to-live cache. - * - * @experimental **UNSTABLE**: New API, yet to be vetted. - * - * Automatically removes entries after the configured amount of time elapses. - * - * @typeParam K The type of the cache keys. - * @typeParam V The type of the cache values. - * @example Usage - * ```ts - * import { TtlCache } from "@std/cache/ttl-cache"; - * import { assertEquals } from "@std/assert/equals"; - * import { delay } from "@std/async/delay"; - * - * const cache = new TtlCache(1000); - * - * cache.set("a", 1); - * assertEquals(cache.size, 1); - * await delay(2000); - * assertEquals(cache.size, 0); - * ``` - * - * @example Sliding expiration - * ```ts - * import { TtlCache } from "@std/cache/ttl-cache"; - * import { assertEquals } from "@std/assert/equals"; - * import { FakeTime } from "@std/testing/time"; - * - * using time = new FakeTime(0); - * const cache = new TtlCache(100, { - * slidingExpiration: true, - * }); - * - * cache.set("a", 1); - * time.now = 80; - * assertEquals(cache.get("a"), 1); // resets TTL - * time.now = 160; - * assertEquals(cache.get("a"), 1); // still alive, TTL was reset at t=80 - * time.now = 260; - * assertEquals(cache.get("a"), undefined); // expired - * ``` - */ -export class TtlCache extends Map - implements MemoizationCache { - #defaultTtl: number; - #timeouts = new Map(); - #eject?: ((ejectedKey: K, ejectedValue: V) => void) | undefined; - #slidingExpiration: boolean; - #entryTtls?: Map; - #absoluteDeadlines?: Map; - - /** - * Constructs a new instance. - * - * @experimental **UNSTABLE**: New API, yet to be vetted. - * - * @param defaultTtl The default time-to-live in milliseconds. This value must - * be a finite, non-negative number. Its upper limit is determined by the - * current runtime's {@linkcode setTimeout} implementation. - * @param options Additional options. - */ - constructor( - defaultTtl: number, - options?: TtlCacheOptions, - ) { - super(); - if (!(defaultTtl >= 0) || !Number.isFinite(defaultTtl)) { - throw new RangeError( - `Cannot create TtlCache: defaultTtl must be a finite, non-negative number: received ${defaultTtl}`, - ); - } - this.#defaultTtl = defaultTtl; - this.#eject = options?.onEject; - this.#slidingExpiration = options?.slidingExpiration ?? false; - if (this.#slidingExpiration) { - this.#entryTtls = new Map(); - this.#absoluteDeadlines = new Map(); - } - } - - /** - * Set a value in the cache. - * - * @experimental **UNSTABLE**: New API, yet to be vetted. - * - * @param key The cache key. - * @param value The value to set. - * @param options Options for this entry. - * @returns `this` for chaining. - * - * @example Usage - * ```ts - * import { TtlCache } from "@std/cache/ttl-cache"; - * import { assertEquals } from "@std/assert/equals"; - * import { delay } from "@std/async/delay"; - * - * const cache = new TtlCache(100); - * - * cache.set("a", 1); - * assertEquals(cache.get("a"), 1); - * - * await delay(200); - * assertEquals(cache.get("a"), undefined); - * ``` - */ - override set( - key: K, - value: V, - options?: TtlCacheSetOptions, - ): this { - if (options?.absoluteExpiration !== undefined && !this.#slidingExpiration) { - throw new TypeError( - "Cannot set entry in TtlCache: absoluteExpiration requires slidingExpiration to be enabled", - ); - } - - const ttl = options?.ttl ?? this.#defaultTtl; - if (!(ttl >= 0) || !Number.isFinite(ttl)) { - throw new RangeError( - `Cannot set entry in TtlCache: ttl must be a finite, non-negative number: received ${ttl}`, - ); - } - - const abs = options?.absoluteExpiration; - if (abs !== undefined && (!(abs >= 0) || !Number.isFinite(abs))) { - throw new RangeError( - `Cannot set entry in TtlCache: absoluteExpiration must be a finite, non-negative number: received ${abs}`, - ); - } - - const existing = this.#timeouts.get(key); - if (existing !== undefined) clearTimeout(existing); - super.set(key, value); - this.#timeouts.set(key, setTimeout(() => this.delete(key), ttl)); - - if (this.#slidingExpiration) { - this.#entryTtls!.set(key, ttl); - if (abs !== undefined) { - this.#absoluteDeadlines!.set(key, Date.now() + abs); - } else { - this.#absoluteDeadlines!.delete(key); - } - } - - return this; - } - - /** - * Gets the value associated with the specified key. - * - * @experimental **UNSTABLE**: New API, yet to be vetted. - * - * When {@linkcode TtlCacheOptions.slidingExpiration | slidingExpiration} is - * enabled, accessing an entry resets its TTL. - * - * @param key The key to get the value for. - * @returns The value associated with the specified key, or `undefined` if - * the key is not present in the cache. - * - * @example Usage - * ```ts - * import { TtlCache } from "@std/cache/ttl-cache"; - * import { assertEquals } from "@std/assert/equals"; - * - * using cache = new TtlCache(1000); - * - * cache.set("a", 1); - * assertEquals(cache.get("a"), 1); - * ``` - */ - override get(key: K): V | undefined { - if (!super.has(key)) return undefined; - if (this.#slidingExpiration) this.#resetTtl(key); - return super.get(key); - } - - /** - * Returns the value associated with the given key, or `undefined` if the - * key is not present, **without** resetting its TTL. - * - * This is the TTL-cache equivalent of - * {@linkcode LruCache.prototype.peek | LruCache.peek()}: a side-effect-free - * read that leaves the entry's expiration unchanged. - * - * @experimental **UNSTABLE**: New API, yet to be vetted. - * - * @param key The key to look up. - * @returns The value, or `undefined` if not present. - * - * @example Peeking at a value without resetting the sliding TTL - * ```ts - * import { TtlCache } from "@std/cache/ttl-cache"; - * import { assertEquals } from "@std/assert/equals"; - * import { FakeTime } from "@std/testing/time"; - * - * using time = new FakeTime(0); - * const cache = new TtlCache(100, { - * slidingExpiration: true, - * }); - * - * cache.set("a", 1); - * time.now = 80; - * - * // peek does not reset the TTL - * assertEquals(cache.peek("a"), 1); - * - * // entry still expires at t=100 - * time.now = 100; - * assertEquals(cache.peek("a"), undefined); - * ``` - */ - peek(key: K): V | undefined { - if (!super.has(key)) return undefined; - return super.get(key); - } - - /** - * Deletes the value associated with the given key. - * - * @experimental **UNSTABLE**: New API, yet to be vetted. - * - * @param key The key to delete. - * @returns `true` if the key was deleted, `false` otherwise. - * - * @example Usage - * ```ts - * import { TtlCache } from "@std/cache"; - * import { assertEquals } from "@std/assert/equals"; - * - * const cache = new TtlCache(1000); - * - * cache.set("a", 1); - * cache.delete("a"); - * assertEquals(cache.has("a"), false); - * ``` - */ - override delete(key: K): boolean { - const value = super.get(key); - const existed = super.delete(key); - if (!existed) return false; - - const timeout = this.#timeouts.get(key); - if (timeout !== undefined) clearTimeout(timeout); - this.#timeouts.delete(key); - this.#entryTtls?.delete(key); - this.#absoluteDeadlines?.delete(key); - this.#eject?.(key, value!); - return true; - } - - /** - * Clears the cache. - * - * @experimental **UNSTABLE**: New API, yet to be vetted. - * - * @example Usage - * ```ts - * import { TtlCache } from "@std/cache"; - * import { assertEquals } from "@std/assert/equals"; - * - * const cache = new TtlCache(1000); - * - * cache.set("a", 1); - * cache.set("b", 2); - * cache.clear(); - * assertEquals(cache.size, 0); - * ``` - */ - override clear(): void { - for (const timeout of this.#timeouts.values()) { - clearTimeout(timeout); - } - this.#timeouts.clear(); - this.#entryTtls?.clear(); - this.#absoluteDeadlines?.clear(); - const entries = [...super.entries()]; - super.clear(); - let error: unknown; - for (const [key, value] of entries) { - try { - this.#eject?.(key, value); - } catch (e) { - error ??= e; - } - } - if (error !== undefined) throw error; - } - - /** - * Automatically clears all remaining timeouts once the cache goes out of - * scope if the cache is declared with `using`. - * - * @experimental **UNSTABLE**: New API, yet to be vetted. - * - * @example Usage - * ```ts no-assert - * import { TtlCache } from "@std/cache/ttl-cache"; - * import { assertEquals } from "@std/assert/equals"; - * - * let c: TtlCache; - * { - * using cache = new TtlCache(1000); - * cache.set("a", 1); - * c = cache; - * } - * assertEquals(c.size, 0); - * ``` - */ - [Symbol.dispose](): void { - this.clear(); - } - - #resetTtl(key: K): void { - const ttl = this.#entryTtls!.get(key); - if (ttl === undefined) return; - - const deadline = this.#absoluteDeadlines!.get(key); - const effectiveTtl = deadline !== undefined - ? Math.min(ttl, Math.max(0, deadline - Date.now())) - : ttl; - - const existing = this.#timeouts.get(key); - if (existing !== undefined) clearTimeout(existing); - this.#timeouts.set( - key, - setTimeout(() => this.delete(key), effectiveTtl), - ); - } -} diff --git a/cache/ttl_cache_test.ts b/cache/ttl_cache_test.ts deleted file mode 100644 index a5823122f653..000000000000 --- a/cache/ttl_cache_test.ts +++ /dev/null @@ -1,489 +0,0 @@ -// Copyright 2018-2026 the Deno authors. MIT license. -import { TtlCache } from "./ttl_cache.ts"; -import { assertEquals, assertThrows } from "@std/assert"; -import { FakeTime } from "@std/testing/time"; - -const UNSET = Symbol("UNSET"); - -// check `has()`, `get()`, `forEach()` -function assertEntries( - cache: TtlCache, - entries: [key: K, value: V | typeof UNSET][], -) { - for (const [key, value] of entries) { - assertEquals(cache.has(key), value !== UNSET); - assertEquals(cache.get(key), value === UNSET ? undefined : value); - } - - cache.forEach((v, k) => assertEquals(v, entries.find(([x]) => x === k)![1])); - assertContentfulEntries( - cache as TtlCache, - entries.filter(([, v]) => v !== UNSET), - ); -} - -// check `size`, `entries()`, `keys()`, `values()`, `[Symbol.iterator]()` -function assertContentfulEntries( - cache: TtlCache, - entries: [key: K, value: V][], -) { - const keys = entries.map(([key]) => key); - const values = entries.map(([, value]) => value); - - assertEquals(cache.size, entries.length); - - assertEquals([...cache.entries()], entries); - assertEquals([...cache.keys()], keys); - assertEquals([...cache.values()], values); - assertEquals([...cache], entries); -} - -Deno.test("TtlCache deletes entries", async (t) => { - await t.step("after the default TTL, passed in constructor", () => { - using time = new FakeTime(0); - - const cache = new TtlCache(10); - - cache.set(1, "one"); - cache.set(2, "two"); - - time.now = 1; - assertEntries(cache, [[1, "one"], [2, "two"]]); - - time.now = 5; - assertEntries(cache, [[1, "one"], [2, "two"]]); - // setting again resets TTL countdown for key 1 - cache.set(1, "one"); - - time.now = 10; - assertEntries(cache, [[1, "one"], [2, UNSET]]); - - time.now = 15; - assertEntries(cache, [[1, UNSET], [2, UNSET]]); - }); - - await t.step("after a custom TTL, passed in set()", () => { - using time = new FakeTime(0); - - const cache = new TtlCache(10); - - cache.set(1, "one"); - cache.set(2, "two", { ttl: 3 }); - - time.now = 1; - assertEntries(cache, [[1, "one"], [2, "two"]]); - - time.now = 3; - assertEntries(cache, [[1, "one"], [2, UNSET]]); - - time.now = 10; - assertEntries(cache, [[1, UNSET], [2, UNSET]]); - }); - - await t.step("after manually calling delete()", () => { - const cache = new TtlCache(10); - - cache.set(1, "one"); - assertEntries(cache, [[1, "one"]]); - assertEquals(cache.delete(1), true); - assertEntries(cache, [[1, UNSET]]); - assertEquals(cache.delete(1), false); - assertEntries(cache, [[1, UNSET]]); - }); - - await t.step("after manually calling clear()", () => { - const cache = new TtlCache(10); - - cache.set(1, "one"); - assertEntries(cache, [[1, "one"]]); - cache.clear(); - assertEntries(cache, [[1, UNSET]]); - }); - - // this test will fail with `error: Leaks detected` if the timeouts are not cleared - await t.step("[Symbol.dispose]() clears all remaining timeouts", () => { - using cache = new TtlCache(10); - cache.set(1, "one"); - }); -}); - -Deno.test("TtlCache onEject()", async (t) => { - await t.step("calls onEject on delete and TTL expiry", () => { - using time = new FakeTime(0); - let called = 0; - const cache = new TtlCache(10, { onEject: () => called++ }); - - cache.set(1, "one"); - cache.set(2, "two"); - - cache.delete(2); - assertEquals(called, 1); - - cache.set(3, "three"); - time.now = 10; - assertEquals(called, 3); - assertEquals(cache.get(3), undefined); - }); - - await t.step("calls onEject for falsy values", () => { - const ejected: [number, unknown][] = []; - using cache = new TtlCache(1000, { - onEject: (k, v) => ejected.push([k, v]), - }); - - cache.set(1, 0); - cache.set(2, ""); - cache.set(3, false); - cache.set(4, null); - - cache.delete(1); - cache.delete(2); - cache.delete(3); - cache.delete(4); - - assertEquals(ejected, [[1, 0], [2, ""], [3, false], [4, null]]); - }); - - await t.step("calls onEject on clear()", () => { - const ejected: [number, string][] = []; - using cache = new TtlCache(1000, { - onEject: (k, v) => ejected.push([k, v]), - }); - - cache.set(1, "one"); - cache.set(2, "two"); - cache.set(3, "three"); - cache.clear(); - - assertEquals(ejected, [[1, "one"], [2, "two"], [3, "three"]]); - }); - - await t.step("calls onEject on [Symbol.dispose]()", () => { - const ejected: [number, string][] = []; - { - using cache = new TtlCache(1000, { - onEject: (k, v) => ejected.push([k, v]), - }); - cache.set(1, "one"); - cache.set(2, "two"); - } - - assertEquals(ejected, [[1, "one"], [2, "two"]]); - }); - - await t.step("does not call onEject when overwriting a key", () => { - const ejected: [string, number][] = []; - using cache = new TtlCache(1000, { - onEject: (k, v) => ejected.push([k, v]), - }); - - cache.set("a", 1); - cache.set("a", 2); - - assertEquals(ejected, []); - assertEquals(cache.get("a"), 2); - }); - - await t.step("entry is fully removed before onEject fires", () => { - let sizeInCallback = -1; - let hasInCallback = true; - using cache = new TtlCache(1000, { - onEject: (k) => { - sizeInCallback = cache.size; - hasInCallback = cache.has(k); - }, - }); - - cache.set("a", 1); - cache.delete("a"); - - assertEquals(sizeInCallback, 0); - assertEquals(hasInCallback, false); - }); -}); - -Deno.test("TtlCache validates TTL", async (t) => { - await t.step("constructor rejects negative defaultTtl", () => { - assertThrows( - () => new TtlCache(-1), - RangeError, - "defaultTtl must be a finite, non-negative number", - ); - }); - - await t.step("constructor rejects NaN defaultTtl", () => { - assertThrows( - () => new TtlCache(NaN), - RangeError, - "defaultTtl must be a finite, non-negative number", - ); - }); - - await t.step("constructor rejects Infinity defaultTtl", () => { - assertThrows( - () => new TtlCache(Infinity), - RangeError, - "defaultTtl must be a finite, non-negative number", - ); - }); - - await t.step("constructor accepts 0", () => { - using _cache = new TtlCache(0); - }); - - await t.step("set() rejects negative ttl", () => { - using cache = new TtlCache(1000); - assertThrows( - () => cache.set("a", 1, { ttl: -1 }), - RangeError, - "ttl must be a finite, non-negative number", - ); - }); - - await t.step("set() rejects NaN ttl", () => { - using cache = new TtlCache(1000); - assertThrows( - () => cache.set("a", 1, { ttl: NaN }), - RangeError, - "ttl must be a finite, non-negative number", - ); - }); - - await t.step("set() rejects Infinity ttl", () => { - using cache = new TtlCache(1000); - assertThrows( - () => cache.set("a", 1, { ttl: Infinity }), - RangeError, - "ttl must be a finite, non-negative number", - ); - }); - - await t.step("set() accepts 0 ttl", () => { - using cache = new TtlCache(1000); - cache.set("a", 1, { ttl: 0 }); - assertEquals(cache.get("a"), 1); - }); -}); - -Deno.test("TtlCache peek()", async (t) => { - await t.step("returns value without resetting sliding TTL", () => { - using time = new FakeTime(0); - const cache = new TtlCache(100, { - slidingExpiration: true, - }); - - cache.set("a", 1); - - time.now = 80; - assertEquals(cache.peek("a"), 1); - - // peek did not reset the TTL, so the entry still expires at t=100 - time.now = 100; - assertEquals(cache.peek("a"), undefined); - }); - - await t.step("returns value for non-sliding cache", () => { - using time = new FakeTime(0); - const cache = new TtlCache(100); - - cache.set("a", 1); - - time.now = 50; - assertEquals(cache.peek("a"), 1); - - time.now = 100; - assertEquals(cache.peek("a"), undefined); - }); - - await t.step("returns undefined for missing key", () => { - using cache = new TtlCache(100); - assertEquals(cache.peek("missing"), undefined); - }); -}); - -Deno.test("TtlCache get() returns undefined for missing key with sliding expiration", () => { - using cache = new TtlCache(100, { - slidingExpiration: true, - }); - assertEquals(cache.get("missing"), undefined); -}); - -Deno.test("TtlCache sliding expiration", async (t) => { - await t.step("get() resets TTL", () => { - using time = new FakeTime(0); - const cache = new TtlCache(100, { - slidingExpiration: true, - }); - - cache.set("a", 1); - - time.now = 80; - assertEquals(cache.get("a"), 1); - - // TTL was reset at t=80, so entry lives until t=180 - time.now = 160; - assertEquals(cache.get("a"), 1); - - // TTL was reset at t=160, so entry lives until t=260 - time.now = 250; - assertEquals(cache.get("a"), 1); - - time.now = 350; - assertEquals(cache.get("a"), undefined); - }); - - await t.step("has() does not reset TTL", () => { - using time = new FakeTime(0); - const cache = new TtlCache(100, { - slidingExpiration: true, - }); - - cache.set("a", 1); - - time.now = 80; - assertEquals(cache.has("a"), true); - - // has() did not reset the TTL, so the entry still expires at t=100 - time.now = 100; - assertEquals(cache.has("a"), false); - }); - - await t.step("does not reset TTL when slidingExpiration is false", () => { - using time = new FakeTime(0); - const cache = new TtlCache(100); - - cache.set("a", 1); - - time.now = 80; - assertEquals(cache.get("a"), 1); - - time.now = 100; - assertEquals(cache.get("a"), undefined); - }); - - await t.step("absoluteExpiration caps sliding extension", () => { - using time = new FakeTime(0); - const cache = new TtlCache(100, { - slidingExpiration: true, - }); - - cache.set("a", 1, { absoluteExpiration: 150 }); - - time.now = 80; - assertEquals(cache.get("a"), 1); - - time.now = 140; - assertEquals(cache.get("a"), 1); - - // Absolute deadline is t=150; sliding cannot extend past it - time.now = 150; - assertEquals(cache.get("a"), undefined); - }); - - await t.step("absoluteExpiration throws without slidingExpiration", () => { - using cache = new TtlCache(100); - assertThrows( - () => cache.set("a", 1, { absoluteExpiration: 50 }), - TypeError, - "absoluteExpiration requires slidingExpiration to be enabled", - ); - }); - - await t.step("per-entry TTL works with sliding expiration", () => { - using time = new FakeTime(0); - const cache = new TtlCache(100, { - slidingExpiration: true, - }); - - cache.set("a", 1, { ttl: 50 }); - - time.now = 40; - assertEquals(cache.get("a"), 1); - - // TTL reset to 50ms at t=40, so alive until t=90 - time.now = 80; - assertEquals(cache.get("a"), 1); - - // TTL reset to 50ms at t=80, so alive until t=130 - time.now = 130; - assertEquals(cache.get("a"), undefined); - }); - - await t.step("sliding expiration calls onEject on expiry", () => { - using time = new FakeTime(0); - const ejected: [string, number][] = []; - const cache = new TtlCache(100, { - slidingExpiration: true, - onEject: (k, v) => ejected.push([k, v]), - }); - - cache.set("a", 1); - - time.now = 80; - cache.get("a"); - - time.now = 180; - assertEquals(ejected, [["a", 1]]); - }); - - await t.step("overwriting entry resets sliding metadata", () => { - using time = new FakeTime(0); - const cache = new TtlCache(100, { - slidingExpiration: true, - }); - - cache.set("a", 1, { ttl: 50, absoluteExpiration: 200 }); - - time.now = 40; - cache.get("a"); - - // Overwrite with different TTL and no absoluteExpiration - cache.set("a", 2, { ttl: 30 }); - - time.now = 60; - assertEquals(cache.get("a"), 2); - - // TTL reset to 30ms at t=60, alive until t=90 - time.now = 90; - assertEquals(cache.get("a"), undefined); - }); - - await t.step("set() rejects negative absoluteExpiration", () => { - using cache = new TtlCache(1000, { - slidingExpiration: true, - }); - assertThrows( - () => cache.set("a", 1, { absoluteExpiration: -1 }), - RangeError, - "absoluteExpiration must be a finite, non-negative number", - ); - }); - - await t.step("set() rejects NaN absoluteExpiration", () => { - using cache = new TtlCache(1000, { - slidingExpiration: true, - }); - assertThrows( - () => cache.set("a", 1, { absoluteExpiration: NaN }), - RangeError, - "absoluteExpiration must be a finite, non-negative number", - ); - }); -}); - -Deno.test("TtlCache clear() calls all onEject callbacks even if one throws", () => { - const ejected: string[] = []; - using cache = new TtlCache(1000, { - onEject: (k) => { - ejected.push(k); - if (k === "a") throw new Error("boom"); - }, - }); - - cache.set("a", 1); - cache.set("b", 2); - cache.set("c", 3); - assertThrows(() => cache.clear(), Error, "boom"); - assertEquals(ejected, ["a", "b", "c"]); - assertEquals(cache.size, 0); -}); diff --git a/import_map.json b/import_map.json index 7aa140dea113..79df470eef25 100644 --- a/import_map.json +++ b/import_map.json @@ -8,7 +8,7 @@ "@std/assert": "jsr:@std/assert@^1.0.19", "@std/async": "jsr:@std/async@^1.2.0", "@std/bytes": "jsr:@std/bytes@^1.0.6", - "@std/cache": "jsr:@std/cache@^0.2.2", + "@std/cache": "jsr:@std/cache@^0.3.0", "@std/cbor": "jsr:@std/cbor@^0.1.9", "@std/cli": "jsr:@std/cli@^1.0.28", "@std/collections": "jsr:@std/collections@^1.1.6", From e94b4099a6cdd10cdfd511951210eec8657ef591 Mon Sep 17 00:00:00 2001 From: Tomas Zijdemans Date: Thu, 2 Apr 2026 13:03:26 +0200 Subject: [PATCH 05/12] add Cache --- cache/cache.ts | 1151 ++++++++++++++++++++++++++++++++++++++ cache/cache_test.ts | 1289 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 2440 insertions(+) create mode 100644 cache/cache.ts create mode 100644 cache/cache_test.ts diff --git a/cache/cache.ts b/cache/cache.ts new file mode 100644 index 000000000000..03175c0e5bd1 --- /dev/null +++ b/cache/cache.ts @@ -0,0 +1,1151 @@ +// Copyright 2018-2026 the Deno authors. MIT license. +// This module is browser compatible. + +import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; +import type { MemoizationCache } from "./memoize.ts"; + +/** + * The reason an entry was removed from the cache. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * - `"evicted"` — removed because the cache exceeded + * {@linkcode Cache.prototype.maxSize | maxSize}. + * - `"expired"` — removed because its TTL elapsed. + * - `"deleted"` — removed by an explicit + * {@linkcode Cache.prototype.delete | delete()} call. + * - `"cleared"` — removed by + * {@linkcode Cache.prototype.clear | clear()}. + */ +export type CacheEjectionReason = "evicted" | "expired" | "deleted" | "cleared"; + +/** + * Options shared by all {@linkcode Cache} configurations. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @typeParam K The type of the cache keys. + * @typeParam V The type of the cache values. + */ +export interface CacheOptionsBase { + /** + * Maximum number of entries. When exceeded, the least-recently-used entry + * is evicted. Omit for unbounded. + */ + maxSize?: number; + /** + * Called when an entry is removed by eviction, expiration, deletion, or + * clearing. Not called when {@linkcode Cache.prototype.set | set()} + * overwrites an existing key. The entry is already removed when this + * fires. The cache is not re-entrant during this callback: calling + * `set`, `delete`, or `clear` will throw. + * + * @param key The key of the removed entry. + * @param value The value of the removed entry. + * @param reason Why the entry was removed. + */ + onEject?: (key: K, value: V, reason: CacheEjectionReason) => void; + + /** Must be `undefined` for non-TTL caches. */ + ttl?: undefined; + /** Must be `undefined` for non-TTL caches. */ + slidingExpiration?: undefined; + /** Must be `undefined` for non-SWR caches. */ + staleTtl?: undefined; + /** Must be `undefined` for non-SWR caches. */ + refresh?: undefined; + /** Must be `undefined` for non-SWR caches. */ + onRefreshError?: undefined; +} + +/** + * {@linkcode Cache} options that enable TTL expiration. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @typeParam K The type of the cache keys. + * @typeParam V The type of the cache values. + */ +export interface CacheOptionsTtl { + /** + * Maximum number of entries. When exceeded, the least-recently-used entry + * is evicted. Omit for unbounded. + */ + maxSize?: number; + /** + * Called when an entry is removed by eviction, expiration, deletion, or + * clearing. Not called when {@linkcode Cache.prototype.set | set()} + * overwrites an existing key. The entry is already removed when this + * fires. The cache is not re-entrant during this callback: calling + * `set`, `delete`, or `clear` will throw. + * + * @param key The key of the removed entry. + * @param value The value of the removed entry. + * @param reason Why the entry was removed. + */ + onEject?: (key: K, value: V, reason: CacheEjectionReason) => void; + /** + * Default time-to-live in milliseconds. Entries expire after this + * duration. + */ + ttl: number; + /** + * When `true`, {@linkcode Cache.prototype.get | get()} resets the + * entry's TTL. {@linkcode Cache.prototype.peek | peek()} and + * {@linkcode Cache.prototype.has | has()} do not. + * + * @default {false} + */ + slidingExpiration?: boolean; + + /** Must be `undefined` for non-SWR caches. */ + staleTtl?: undefined; + /** Must be `undefined` for non-SWR caches. */ + refresh?: undefined; + /** Must be `undefined` for non-SWR caches. */ + onRefreshError?: undefined; +} + +/** + * {@linkcode Cache} options that enable stale-while-revalidate. + * + * After {@linkcode CacheOptionsSwr.staleTtl | staleTtl} elapses, the entry + * is "stale": still returned by {@linkcode Cache.prototype.get | get()}, + * but a background call to + * {@linkcode CacheOptionsSwr.refresh | refresh()} is triggered to replace + * it. The entry is fully expired after + * {@linkcode CacheOptionsSwr.ttl | ttl}. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @typeParam K The type of the cache keys. + * @typeParam V The type of the cache values. + */ +export interface CacheOptionsSwr { + /** + * Maximum number of entries. When exceeded, the least-recently-used entry + * is evicted. Omit for unbounded. + */ + maxSize?: number; + /** + * Called when an entry is removed by eviction, expiration, deletion, or + * clearing. Not called when {@linkcode Cache.prototype.set | set()} + * overwrites an existing key. The entry is already removed when this + * fires. The cache is not re-entrant during this callback: calling + * `set`, `delete`, or `clear` will throw. + * + * @param key The key of the removed entry. + * @param value The value of the removed entry. + * @param reason Why the entry was removed. + */ + onEject?: (key: K, value: V, reason: CacheEjectionReason) => void; + /** + * Default time-to-live in milliseconds. Entries expire after this + * duration. Must be greater than + * {@linkcode CacheOptionsSwr.staleTtl | staleTtl}. + */ + ttl: number; + /** + * When `true`, {@linkcode Cache.prototype.get | get()} resets the + * entry's TTL. {@linkcode Cache.prototype.peek | peek()} and + * {@linkcode Cache.prototype.has | has()} do not. + * + * @default {false} + */ + slidingExpiration?: boolean; + /** + * Soft TTL in milliseconds. After this duration the entry is "stale" — + * still returned by {@linkcode Cache.prototype.get | get()}, but a + * background {@linkcode CacheOptionsSwr.refresh | refresh()} is + * triggered. Must be less than {@linkcode CacheOptionsSwr.ttl | ttl}. + */ + staleTtl: number; + /** + * Called to refresh a stale entry. The returned value replaces the entry + * and resets both soft and hard deadlines. If + * {@linkcode Cache.prototype.set | set()} or + * {@linkcode Cache.prototype.delete | delete()} is called on the same + * key while a refresh is in flight, the refresh result is discarded. + * + * @param key The key of the stale entry. + * @param staleValue The current stale value. + * @returns A promise resolving to the fresh value. + */ + refresh: (key: K, staleValue: V) => Promise; + /** + * Called when a background refresh fails. The stale value is retained + * until the hard TTL expires. When not provided, refresh errors are + * silently discarded and only reflected in + * {@linkcode CacheStats.refreshErrors | stats.refreshErrors}. + * + * @param key The key of the entry whose refresh failed. + * @param error The error thrown by + * {@linkcode CacheOptionsSwr.refresh | refresh()}. + */ + onRefreshError?: (key: K, error: unknown) => void; +} + +/** + * Valid option shapes for the {@linkcode Cache} constructor. The union + * enforces that TTL-related options cannot be set without `ttl`, and + * SWR options cannot be set without both `staleTtl` and `refresh`: + * + * ```ts ignore + * new Cache({ slidingExpiration: true }); // compile error + * new Cache({ ttl: 5000, staleTtl: 3000 }); // compile error + * new Cache({ ttl: 5000, refresh: fn }); // compile error + * ``` + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @typeParam K The type of the cache keys. + * @typeParam V The type of the cache values. + */ +export type CacheOptions = + | CacheOptionsBase + | CacheOptionsTtl + | CacheOptionsSwr; + +/** + * Per-entry overrides for {@linkcode Cache.prototype.set | set()}. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + */ +export interface CacheSetOptions { + /** + * Override the default TTL for this entry. When set on a cache that was + * constructed without a default `ttl`, this entry will still expire + * after the given duration. Must be a finite non-negative number. + */ + ttl?: number; + /** + * Absolute expiration cap in milliseconds, measured from the time of the + * `set()` call. The entry will expire no later than this duration after + * it was set, regardless of TTL or sliding resets. When + * {@linkcode CacheOptionsTtl.slidingExpiration | slidingExpiration} is + * enabled, the sliding window cannot extend past this cap. Without + * `slidingExpiration`, `absoluteExpiration` still clamps the initial + * deadline if it is shorter than the entry's TTL. Must be a finite + * non-negative number. + */ + absoluteExpiration?: number; + /** + * Override the default + * {@linkcode CacheOptionsSwr.staleTtl | staleTtl} for this entry. + * Ignored when the cache was not constructed with + * {@linkcode CacheOptionsSwr.refresh | refresh}. Must be a finite + * non-negative number. + */ + staleTtl?: number; +} + +/** + * Cache performance counters returned by + * {@linkcode Cache.prototype.stats | stats}. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + */ +export interface CacheStats { + /** Number of {@linkcode Cache.prototype.get | get()} calls that found a + * live entry. */ + hits: number; + /** Number of {@linkcode Cache.prototype.get | get()} calls that did not + * find a live entry. */ + misses: number; + /** Number of {@linkcode Cache.prototype.set | set()} calls. */ + sets: number; + /** Number of {@linkcode Cache.prototype.delete | delete()} calls that + * removed an entry. */ + deletes: number; + /** Number of entries removed by LRU eviction. */ + evictions: number; + /** Number of entries removed by TTL expiration. */ + expirations: number; + /** Number of {@linkcode Cache.prototype.get | get()} calls that + * returned a stale value and triggered a background refresh. */ + staleHits: number; + /** Number of background refreshes started. */ + refreshes: number; + /** Number of background refreshes that failed. */ + refreshErrors: number; +} + +interface CacheEntry { + value: V; + deadline: number; + absoluteDeadline: number; + entryTtl: number; + softDeadline: number; + entryStaleTtl: number; + generation: number; +} + +/** + * A size-bounded, time-bounded in-memory cache with LRU eviction and + * optional TTL expiration. + * + * Mode is determined by which options are provided: + * + * | Configuration | Behaviour | + * | --- | --- | + * | `{ maxSize }` | Pure LRU, no timers | + * | `{ ttl }` | Pure TTL, unbounded | + * | `{ maxSize, ttl }` | LRU + TTL | + * | `{ ttl, staleTtl, refresh }` | Stale-while-revalidate | + * | `{ maxSize, ttl, staleTtl, refresh }` | LRU + SWR | + * | `{}` or no options | Unbounded, no expiry | + * + * The cache does **not** extend `Map`. It owns a `Map` internally and + * delegates to {@linkcode IndexedHeap} from `@std/data-structures` for + * deadline scheduling with a single `setTimeout`. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @typeParam K The type of the cache keys. + * @typeParam V The type of the cache values. + * + * @example Pure LRU + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 3 }); + * cache.set("a", 1); + * cache.set("b", 2); + * cache.set("c", 3); + * cache.set("d", 4); + * + * assertEquals(cache.has("a"), false); + * assertEquals(cache.get("d"), 4); + * ``` + * + * @example LRU + TTL + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * import { FakeTime } from "@std/testing/time"; + * + * using time = new FakeTime(0); + * using cache = new Cache({ maxSize: 100, ttl: 1000 }); + * + * cache.set("a", 1); + * assertEquals(cache.get("a"), 1); + * + * time.tick(1001); + * assertEquals(cache.get("a"), undefined); + * ``` + */ +export class Cache implements MemoizationCache { + #data = new Map>(); + #maxSize: number | undefined; + #defaultTtl: number; + #slidingExpiration: boolean; + #ejecting = false; + #eject: ((key: K, value: V, reason: CacheEjectionReason) => void) | undefined; + + #heap: IndexedHeap | undefined; + #timerId: number | undefined; + #inFlight: Map> | undefined; + + #defaultStaleTtl: number; + #refresh: + | ((key: K, staleValue: V) => Promise) + | undefined; + #onRefreshError: ((key: K, error: unknown) => void) | undefined; + #refreshing: Set | undefined; + #generation = 0; + + #stats: CacheStats = { + hits: 0, + misses: 0, + sets: 0, + deletes: 0, + evictions: 0, + expirations: 0, + staleHits: 0, + refreshes: 0, + refreshErrors: 0, + }; + + /** + * Constructs a new `Cache`. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @param options Configuration options. + * + * @example Usage + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 100 }); + * cache.set("a", 1); + * assertEquals(cache.get("a"), 1); + * ``` + */ + constructor(options?: CacheOptions) { + if (options?.maxSize !== undefined) { + if (!Number.isInteger(options.maxSize) || options.maxSize < 1) { + throw new RangeError( + `Cannot create Cache: maxSize must be a positive integer, received ${options.maxSize}`, + ); + } + this.#maxSize = options.maxSize; + } + + const ttl = options?.ttl; + if (ttl !== undefined) { + if (!(ttl >= 0) || !Number.isFinite(ttl)) { + throw new RangeError( + `Cannot create Cache: ttl must be a finite non-negative number, received ${ttl}`, + ); + } + this.#defaultTtl = ttl; + this.#heap = new IndexedHeap(); + } else { + this.#defaultTtl = Infinity; + } + + this.#slidingExpiration = options?.slidingExpiration ?? false; + this.#eject = options?.onEject; + + if (options?.staleTtl !== undefined) { + const { staleTtl } = options; + if (!(staleTtl >= 0) || !Number.isFinite(staleTtl)) { + throw new RangeError( + `Cannot create Cache: staleTtl must be a finite non-negative number, received ${staleTtl}`, + ); + } + if (staleTtl >= this.#defaultTtl) { + throw new RangeError( + `Cannot create Cache: staleTtl must be less than ttl, received staleTtl=${staleTtl} ttl=${this.#defaultTtl}`, + ); + } + this.#defaultStaleTtl = staleTtl; + this.#refresh = options.refresh; + this.#onRefreshError = options.onRefreshError; + } else { + this.#defaultStaleTtl = Infinity; + } + } + + /** + * The maximum number of entries, or `undefined` if unbounded. + * + * @returns The maximum number of entries, or `undefined`. + * + * @example Usage + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 100 }); + * assertEquals(cache.maxSize, 100); + * ``` + */ + get maxSize(): number | undefined { + return this.#maxSize; + } + + /** + * The number of entries currently in the cache. + * + * @returns The number of entries. + * + * @example Usage + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 100 }); + * cache.set("a", 1); + * assertEquals(cache.size, 1); + * ``` + */ + get size(): number { + return this.#data.size; + } + + /** + * Performance counters. The returned object is a snapshot copy; + * mutations have no effect on the cache. + * + * @returns A snapshot of the cache's performance counters. + * + * @example Usage + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 100 }); + * cache.set("a", 1); + * cache.get("a"); + * cache.get("b"); + * assertEquals(cache.stats.hits, 1); + * assertEquals(cache.stats.misses, 1); + * ``` + */ + get stats(): Readonly { + return { ...this.#stats }; + } + + /** + * Reset all performance counters to zero. + * + * @returns {void} + * + * @example Usage + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 100 }); + * cache.set("a", 1); + * cache.get("a"); + * cache.resetStats(); + * assertEquals(cache.stats.hits, 0); + * ``` + */ + resetStats(): void { + this.#stats = { + hits: 0, + misses: 0, + sets: 0, + deletes: 0, + evictions: 0, + expirations: 0, + staleHits: 0, + refreshes: 0, + refreshErrors: 0, + }; + } + + #isExpired(entry: CacheEntry, now: number): boolean { + return entry.deadline !== Infinity && entry.deadline <= now; + } + + #removeEntry( + key: K, + entry: CacheEntry, + reason: CacheEjectionReason, + ): void { + this.#data.delete(key); + this.#heap?.delete(key); + if (this.#eject) { + this.#ejecting = true; + try { + this.#eject(key, entry.value, reason); + } finally { + this.#ejecting = false; + } + } + } + + #assertNotEjecting(method: string): void { + if (this.#ejecting) { + throw new TypeError( + `Cannot ${method} Cache: cache is not re-entrant during onEject callbacks`, + ); + } + } + + #setMostRecentlyUsed(key: K, entry: CacheEntry): void { + this.#data.delete(key); + this.#data.set(key, entry); + } + + #pruneToMaxSize(): void { + if (this.#maxSize === undefined || this.#data.size <= this.#maxSize) return; + const key = this.#data.keys().next().value!; + const entry = this.#data.get(key)!; + this.#stats.evictions++; + this.#removeEntry(key, entry, "evicted"); + } + + #scheduleTimer(): void { + if (this.#timerId !== undefined) { + clearTimeout(this.#timerId); + this.#timerId = undefined; + } + const top = this.#heap?.peek(); + if (top === undefined) return; + const delay = Math.max(0, top.priority - Date.now()); + this.#timerId = setTimeout(() => this.#onTimer(), delay); + } + + #onTimer(): void { + this.#timerId = undefined; + const now = Date.now(); + const heap = this.#heap!; + const errors: unknown[] = []; + while (!heap.isEmpty()) { + const top = heap.peek()!; + if (top.priority > now) break; + heap.pop(); + const entry = this.#data.get(top.key); + if (entry !== undefined) { + this.#data.delete(top.key); + this.#stats.expirations++; + if (this.#eject) { + this.#ejecting = true; + try { + this.#eject(top.key, entry.value, "expired"); + } catch (e) { + errors.push(e); + } finally { + this.#ejecting = false; + } + } + } + } + this.#scheduleTimer(); + if (errors.length === 1) throw errors[0]; + if (errors.length > 1) throw new AggregateError(errors); + } + + #setHeapDeadline(key: K, deadline: number): void { + const heap = this.#heap ?? (this.#heap = new IndexedHeap()); + const wasRoot = heap.isEmpty() || heap.peek()!.key === key; + heap.pushOrUpdate(key, deadline); + const isRoot = heap.peek()!.key === key; + if (wasRoot || isRoot) { + this.#scheduleTimer(); + } + } + + #updateDeadline( + key: K, + entry: CacheEntry, + ttl: number, + now: number, + ): void { + let effectiveTtl = ttl; + if ( + this.#slidingExpiration && entry.absoluteDeadline !== Infinity + ) { + effectiveTtl = Math.min( + ttl, + Math.max(0, entry.absoluteDeadline - now), + ); + } + const deadline = now + effectiveTtl; + entry.deadline = deadline; + this.#setHeapDeadline(key, deadline); + } + + #backgroundRefresh(key: K, staleValue: V): void { + (this.#refreshing ??= new Set()).add(key); + this.#stats.refreshes++; + const gen = this.#data.get(key)?.generation; + this.#refresh!(key, staleValue).then( + (newValue) => { + this.#refreshing?.delete(key); + const current = this.#data.get(key); + if (current !== undefined && current.generation === gen) { + this.set(key, newValue); + } + }, + (error) => { + this.#refreshing?.delete(key); + this.#stats.refreshErrors++; + this.#onRefreshError?.(key, error); + }, + ); + } + + /** + * Returns the value associated with the given key, or `undefined` if the + * key is not present or has expired. Promotes the entry to + * most-recently-used. When + * {@linkcode CacheOptionsTtl.slidingExpiration | slidingExpiration} is + * enabled, resets the entry's TTL. + * + * @param key The key to look up. + * @returns The value, or `undefined` if not present or expired. + * + * @example Usage + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 100 }); + * cache.set("a", 1); + * assertEquals(cache.get("a"), 1); + * assertEquals(cache.get("b"), undefined); + * ``` + */ + get(key: K): V | undefined { + const entry = this.#data.get(key); + if (entry === undefined) { + this.#stats.misses++; + return undefined; + } + const now = Date.now(); + if (this.#isExpired(entry, now)) { + this.#stats.expirations++; + this.#removeEntry(key, entry, "expired"); + this.#stats.misses++; + return undefined; + } + if ( + this.#refresh !== undefined && + entry.softDeadline !== Infinity && + entry.softDeadline <= now && + !this.#refreshing?.has(key) + ) { + this.#stats.staleHits++; + this.#backgroundRefresh(key, entry.value); + } else { + this.#stats.hits++; + } + if (this.#slidingExpiration && entry.deadline !== Infinity) { + this.#updateDeadline(key, entry, entry.entryTtl, now); + } + this.#setMostRecentlyUsed(key, entry); + return entry.value; + } + + /** + * Returns the value associated with the given key, or `undefined` if the + * key is not present, **without** promoting it in the eviction order or + * resetting its TTL. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @param key The key to look up. + * @returns The value, or `undefined` if not present or expired. + * + * @example Usage + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 3 }); + * cache.set("a", 1); + * cache.set("b", 2); + * cache.set("c", 3); + * + * assertEquals(cache.peek("a"), 1); + * cache.set("d", 4); + * assertEquals(cache.peek("a"), undefined); + * ``` + */ + peek(key: K): V | undefined { + const entry = this.#data.get(key); + if (entry === undefined) return undefined; + if (entry.deadline !== Infinity && entry.deadline <= Date.now()) { + if (!this.#ejecting) { + this.#stats.expirations++; + this.#removeEntry(key, entry, "expired"); + } + return undefined; + } + return entry.value; + } + + /** + * Checks whether a live (non-expired) entry exists for the given key. + * Does **not** promote the entry or reset its TTL. + * + * @param key The key to check. + * @returns `true` if a live entry exists, `false` otherwise. + * + * @example Usage + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assert } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 100 }); + * cache.set("a", 1); + * assert(cache.has("a")); + * assert(!cache.has("b")); + * ``` + */ + has(key: K): boolean { + const entry = this.#data.get(key); + if (entry === undefined) return false; + if (entry.deadline !== Infinity && entry.deadline <= Date.now()) { + if (!this.#ejecting) { + this.#stats.expirations++; + this.#removeEntry(key, entry, "expired"); + } + return false; + } + return true; + } + + /** + * Inserts or overwrites an entry. Promotes the key to most-recently-used. + * If the cache exceeds {@linkcode Cache.prototype.maxSize | maxSize}, + * the least-recently-used entry is evicted. Overwriting an existing key + * does **not** fire {@linkcode CacheOptionsBase.onEject | onEject}. + * + * @param key The key to set. + * @param value The value to set. + * @param options Per-entry overrides. + * @returns `this` for chaining. + * + * @example Usage + * ```ts no-assert + * import { Cache } from "@std/cache/cache"; + * + * const cache = new Cache({ maxSize: 100 }); + * cache.set("a", 1); + * ``` + */ + set(key: K, value: V, options?: CacheSetOptions): this { + this.#assertNotEjecting("set entry in"); + const ttl = options?.ttl ?? this.#defaultTtl; + if (options?.ttl !== undefined && (!(ttl >= 0) || !Number.isFinite(ttl))) { + throw new RangeError( + `Cannot set entry in Cache: ttl must be a finite non-negative number, received ${ttl}`, + ); + } + const abs = options?.absoluteExpiration; + if (abs !== undefined && (!(abs >= 0) || !Number.isFinite(abs))) { + throw new RangeError( + `Cannot set entry in Cache: absoluteExpiration must be a finite non-negative number, received ${abs}`, + ); + } + const staleTtl = options?.staleTtl ?? this.#defaultStaleTtl; + if ( + staleTtl !== Infinity && (!(staleTtl >= 0) || !Number.isFinite(staleTtl)) + ) { + throw new RangeError( + `Cannot set entry in Cache: staleTtl must be a finite non-negative number, received ${staleTtl}`, + ); + } + + const now = Date.now(); + const absoluteDeadline = abs !== undefined ? now + abs : Infinity; + const deadline = ttl === Infinity + ? Infinity + : Math.min(now + ttl, absoluteDeadline); + const softDeadline = staleTtl === Infinity + ? Infinity + : Math.min(now + staleTtl, deadline); + + const entry: CacheEntry = { + value, + deadline, + absoluteDeadline, + entryTtl: ttl, + softDeadline, + entryStaleTtl: staleTtl, + generation: ++this.#generation, + }; + + this.#setMostRecentlyUsed(key, entry); + this.#refreshing?.delete(key); + + if (deadline !== Infinity) { + this.#setHeapDeadline(key, deadline); + } else { + this.#heap?.delete(key); + } + + this.#stats.sets++; + this.#pruneToMaxSize(); + return this; + } + + /** + * Returns the cached value for the given key if present. Otherwise, calls + * `loader` to produce the value, caches it, and returns it. Concurrent + * calls with the same key while a loader is in flight are de-duplicated: + * only one `loader` invocation occurs, and all callers receive the same + * promise. If {@linkcode Cache.prototype.set | set()}, + * {@linkcode Cache.prototype.delete | delete()}, or + * {@linkcode Cache.prototype.clear | clear()} is called on the same key + * while a loader is in flight, the loader result is discarded. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @param key The key to look up or load. + * @param loader Called when the key is not in the cache. Receives the key + * and must return a promise resolving to the value. + * @returns A promise resolving to the cached or freshly loaded value. + * + * @example Basic load-through + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 100 }); + * const value = await cache.getOrLoad("a", () => Promise.resolve(42)); + * assertEquals(value, 42); + * assertEquals(cache.get("a"), 42); + * ``` + * + * @example Concurrent callers are de-duplicated + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 100 }); + * let loadCount = 0; + * const loader = () => { loadCount++; return Promise.resolve(1); }; + * + * const [a, b] = await Promise.all([ + * cache.getOrLoad("x", loader), + * cache.getOrLoad("x", loader), + * ]); + * + * assertEquals(a, 1); + * assertEquals(b, 1); + * assertEquals(loadCount, 1); + * ``` + */ + getOrLoad(key: K, loader: (key: K) => Promise): Promise { + const cached = this.get(key); + if (cached !== undefined || this.has(key)) { + return Promise.resolve(cached as V); + } + + const existing = this.#inFlight?.get(key); + if (existing) return existing; + + const inFlight = (this.#inFlight ??= new Map()); + const gen = this.#generation; + const promise = loader(key).then( + (value) => { + if (inFlight.get(key) !== promise) return value; + inFlight.delete(key); + const current = this.#data.get(key); + if (current === undefined || current.generation <= gen) { + this.set(key, value); + } + return value; + }, + (error) => { + if (inFlight.get(key) === promise) { + inFlight.delete(key); + } + throw error; + }, + ); + + inFlight.set(key, promise); + return promise; + } + + /** + * Removes the entry with the given key. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @param key The key to delete. + * @returns `true` if the key existed, `false` otherwise. + * + * @example Usage + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 100 }); + * cache.set("a", 1); + * assertEquals(cache.delete("a"), true); + * assertEquals(cache.delete("a"), false); + * ``` + */ + delete(key: K): boolean { + this.#assertNotEjecting("delete entry in"); + const entry = this.#data.get(key); + if (entry === undefined) return false; + this.#stats.deletes++; + this.#removeEntry(key, entry, "deleted"); + return true; + } + + /** + * Removes all entries. Fires + * {@linkcode CacheOptionsBase.onEject | onEject} with reason `"cleared"` + * for each entry. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @returns {void} + * + * @example Usage + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 100 }); + * cache.set("a", 1); + * cache.set("b", 2); + * cache.clear(); + * assertEquals(cache.size, 0); + * ``` + */ + clear(): void { + this.#assertNotEjecting("clear"); + if (this.#timerId !== undefined) { + clearTimeout(this.#timerId); + this.#timerId = undefined; + } + this.#heap?.clear(); + this.#inFlight?.clear(); + this.#refreshing?.clear(); + if (!this.#eject) { + this.#data.clear(); + return; + } + const entries = [...this.#data.entries()]; + this.#data.clear(); + this.#ejecting = true; + const errors: unknown[] = []; + try { + for (const [key, entry] of entries) { + try { + this.#eject(key, entry.value, "cleared"); + } catch (e) { + errors.push(e); + } + } + } finally { + this.#ejecting = false; + } + if (errors.length === 1) throw errors[0]; + if (errors.length > 1) throw new AggregateError(errors); + } + + /** + * Iterate over all live (non-expired) keys. + * + * @returns An iterator over keys. + * + * @example Usage + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 100 }); + * cache.set("a", 1); + * cache.set("b", 2); + * assertEquals([...cache.keys()], ["a", "b"]); + * ``` + */ + *keys(): IterableIterator { + const now = Date.now(); + for (const [key, entry] of this.#data) { + if (entry.deadline !== Infinity && entry.deadline <= now) continue; + yield key; + } + } + + /** + * Iterate over all live (non-expired) values. + * + * @returns An iterator over values. + * + * @example Usage + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 100 }); + * cache.set("a", 1); + * cache.set("b", 2); + * assertEquals([...cache.values()], [1, 2]); + * ``` + */ + *values(): IterableIterator { + const now = Date.now(); + for (const [_key, entry] of this.#data) { + if (entry.deadline !== Infinity && entry.deadline <= now) continue; + yield entry.value; + } + } + + /** + * Iterate over all live (non-expired) entries as `[key, value]` pairs. + * + * @returns An iterator over `[key, value]` pairs. + * + * @example Usage + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 100 }); + * cache.set("a", 1); + * cache.set("b", 2); + * assertEquals([...cache.entries()], [["a", 1], ["b", 2]]); + * ``` + */ + *entries(): IterableIterator<[K, V]> { + const now = Date.now(); + for (const [key, entry] of this.#data) { + if (entry.deadline !== Infinity && entry.deadline <= now) continue; + yield [key, entry.value]; + } + } + + /** + * Calls the given function for each live (non-expired) entry. + * + * @param callback The function to call for each entry. + * @returns {void} + * + * @example Usage + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 100 }); + * cache.set("a", 1); + * const keys: string[] = []; + * cache.forEach((_v, k) => keys.push(k)); + * assertEquals(keys, ["a"]); + * ``` + */ + forEach(callback: (value: V, key: K, cache: Cache) => void): void { + const now = Date.now(); + for (const [key, entry] of this.#data) { + if (entry.deadline !== Infinity && entry.deadline <= now) continue; + callback(entry.value, key, this); + } + } + + /** + * Iterate over all live (non-expired) entries as `[key, value]` pairs. + * + * @returns An iterator over `[key, value]` pairs. + * + * @example Usage + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * + * const cache = new Cache({ maxSize: 100 }); + * cache.set("a", 1); + * assertEquals([...cache], [["a", 1]]); + * ``` + */ + *[Symbol.iterator](): IterableIterator<[K, V]> { + yield* this.entries(); + } + + /** + * Clears all entries and cancels all timers. Safe for use with `using`. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @returns {void} + * + * @example Usage + * ```ts no-assert + * import { Cache } from "@std/cache/cache"; + * + * { + * using cache = new Cache({ maxSize: 10, ttl: 5000 }); + * cache.set("a", 1); + * } + * ``` + */ + [Symbol.dispose](): void { + this.clear(); + } +} diff --git a/cache/cache_test.ts b/cache/cache_test.ts new file mode 100644 index 000000000000..1e6d4687931f --- /dev/null +++ b/cache/cache_test.ts @@ -0,0 +1,1289 @@ +// Copyright 2018-2026 the Deno authors. MIT license. +import { assert, assertEquals, assertRejects, assertThrows } from "@std/assert"; +import { FakeTime } from "@std/testing/time"; +import { + Cache, + type CacheEjectionReason, + type CacheOptions, + type CacheOptionsSwr, +} from "./cache.ts"; + +// ─── Constructor ───────────────────────────────────── + +Deno.test("Cache() creates an unbounded cache with no options", () => { + const cache = new Cache(); + assertEquals(cache.maxSize, undefined); + assertEquals(cache.size, 0); +}); + +Deno.test("Cache() creates a bounded cache", () => { + const cache = new Cache({ maxSize: 3 }); + assertEquals(cache.maxSize, 3); +}); + +Deno.test("Cache() throws on invalid maxSize", () => { + assertThrows( + () => new Cache({ maxSize: 0 }), + RangeError, + "maxSize must be a positive integer", + ); + assertThrows( + () => new Cache({ maxSize: -1 }), + RangeError, + ); + assertThrows( + () => new Cache({ maxSize: 1.5 }), + RangeError, + ); +}); + +Deno.test("Cache() throws on invalid ttl", () => { + assertThrows( + () => new Cache({ ttl: -1 }), + RangeError, + "ttl must be a finite non-negative number", + ); + assertThrows( + () => new Cache({ ttl: NaN }), + RangeError, + ); + assertThrows( + () => new Cache({ ttl: Infinity }), + RangeError, + ); +}); + +// ─── Pure LRU ──────────────────────────────────────── + +Deno.test("Cache set/get/has/peek/delete work in pure LRU mode", () => { + const cache = new Cache({ maxSize: 3 }); + + cache.set("a", 1); + cache.set("b", 2); + cache.set("c", 3); + + assertEquals(cache.get("a"), 1); + assertEquals(cache.has("b"), true); + assertEquals(cache.peek("c"), 3); + assertEquals(cache.size, 3); + + assertEquals(cache.delete("b"), true); + assertEquals(cache.delete("b"), false); + assertEquals(cache.size, 2); +}); + +Deno.test("Cache evicts LRU entry when maxSize exceeded", () => { + const ejected: [string, number, CacheEjectionReason][] = []; + const cache = new Cache({ + maxSize: 2, + onEject: (k, v, r) => ejected.push([k, v, r]), + }); + + cache.set("a", 1); + cache.set("b", 2); + cache.set("c", 3); + + assertEquals(cache.has("a"), false); + assertEquals(cache.get("b"), 2); + assertEquals(cache.get("c"), 3); + assertEquals(ejected, [["a", 1, "evicted"]]); +}); + +Deno.test("Cache get() promotes to MRU", () => { + const cache = new Cache({ maxSize: 2 }); + + cache.set("a", 1); + cache.set("b", 2); + cache.get("a"); + cache.set("c", 3); + + assertEquals(cache.has("a"), true); + assertEquals(cache.has("b"), false); +}); + +Deno.test("Cache peek() does not promote", () => { + const cache = new Cache({ maxSize: 2 }); + + cache.set("a", 1); + cache.set("b", 2); + cache.peek("a"); + cache.set("c", 3); + + assertEquals(cache.has("a"), false); + assertEquals(cache.has("b"), true); +}); + +Deno.test("Cache set() overwrites existing key without onEject and promotes to MRU", () => { + const ejected: string[] = []; + const cache = new Cache({ + maxSize: 2, + onEject: (k) => ejected.push(k), + }); + + cache.set("a", 1); + cache.set("b", 2); + cache.set("a", 99); + cache.set("c", 3); + + assertEquals(cache.get("a"), 99); + assertEquals(cache.has("b"), false); + assertEquals(ejected, ["b"]); +}); + +Deno.test("Cache set() returns this for chaining", () => { + const cache = new Cache({ maxSize: 10 }); + const result = cache.set("a", 1).set("b", 2); + assert(result === cache); +}); + +Deno.test("Cache with maxSize=1 always holds only one entry", () => { + const cache = new Cache({ maxSize: 1 }); + + cache.set("a", 1); + cache.set("b", 2); + assertEquals(cache.size, 1); + assertEquals(cache.has("a"), false); + assertEquals(cache.get("b"), 2); +}); + +// ─── TTL ───────────────────────────────────────────── + +Deno.test("Cache with TTL expires entries on get()", () => { + using time = new FakeTime(0); + using cache = new Cache({ ttl: 100 }); + + cache.set("a", 1); + assertEquals(cache.get("a"), 1); + + time.tick(101); + assertEquals(cache.get("a"), undefined); +}); + +Deno.test("Cache TTL timer fires and removes entries via onEject", () => { + using time = new FakeTime(0); + const ejected: [string, number, CacheEjectionReason][] = []; + using cache = new Cache({ + ttl: 100, + onEject: (k, v, r) => ejected.push([k, v, r]), + }); + + cache.set("a", 1); + time.tick(101); + assertEquals(cache.size, 0); + assertEquals(ejected, [["a", 1, "expired"]]); +}); + +Deno.test("Cache per-entry TTL override", () => { + using time = new FakeTime(0); + using cache = new Cache({ ttl: 1000 }); + + cache.set("short", 1, { ttl: 50 }); + cache.set("normal", 2); + + time.tick(51); + assertEquals(cache.get("short"), undefined); + assertEquals(cache.get("normal"), 2); +}); + +Deno.test("Cache per-entry TTL on non-TTL cache creates a timer", () => { + using time = new FakeTime(0); + using cache = new Cache({ maxSize: 100 }); + + cache.set("timed", 1, { ttl: 100 }); + cache.set("forever", 2); + + time.tick(101); + assertEquals(cache.get("timed"), undefined); + assertEquals(cache.get("forever"), 2); +}); + +Deno.test("Cache set() with ttl: 0 expires immediately", () => { + using _time = new FakeTime(0); + using cache = new Cache({ ttl: 1000 }); + + cache.set("a", 1, { ttl: 0 }); + assertEquals(cache.get("a"), undefined); + assertEquals(cache.stats.expirations, 1); +}); + +Deno.test("Cache sliding expiration resets TTL on get()", () => { + using time = new FakeTime(0); + using cache = new Cache({ + ttl: 100, + slidingExpiration: true, + }); + + cache.set("a", 1); + time.tick(80); + assertEquals(cache.get("a"), 1); + + time.tick(80); + assertEquals(cache.get("a"), 1); + + time.tick(101); + assertEquals(cache.get("a"), undefined); +}); + +Deno.test("Cache sliding expiration not reset by peek()", () => { + using time = new FakeTime(0); + using cache = new Cache({ + ttl: 100, + slidingExpiration: true, + }); + + cache.set("a", 1); + time.tick(80); + cache.peek("a"); + + time.tick(21); + assertEquals(cache.get("a"), undefined); +}); + +Deno.test("Cache absoluteExpiration caps sliding window", () => { + using time = new FakeTime(0); + using cache = new Cache({ + ttl: 100, + slidingExpiration: true, + }); + + cache.set("a", 1, { absoluteExpiration: 150 }); + + time.tick(80); + assertEquals(cache.get("a"), 1); + time.tick(60); + assertEquals(cache.get("a"), 1); + + time.tick(20); + assertEquals(cache.get("a"), undefined); +}); + +Deno.test("Cache absoluteExpiration clamps initial deadline without slidingExpiration", () => { + using time = new FakeTime(0); + using cache = new Cache({ ttl: 1000 }); + + cache.set("a", 1, { absoluteExpiration: 200 }); + + time.tick(201); + assertEquals(cache.has("a"), false); +}); + +Deno.test("Cache multiple TTL entries expire in correct order", () => { + using time = new FakeTime(0); + const ejected: string[] = []; + using cache = new Cache({ + ttl: 1000, + onEject: (k, _v, r) => { + if (r === "expired") ejected.push(k); + }, + }); + + cache.set("a", 1, { ttl: 100 }); + cache.set("b", 2, { ttl: 200 }); + cache.set("c", 3, { ttl: 300 }); + + time.tick(101); + assertEquals(ejected, ["a"]); + + time.tick(100); + assertEquals(ejected, ["a", "b"]); + + time.tick(100); + assertEquals(ejected, ["a", "b", "c"]); +}); + +// ─── LRU + TTL combined ───────────────────────────── + +Deno.test("Cache LRU + TTL: eviction cleans up heap entry", () => { + using time = new FakeTime(0); + const ejected: [string, number, CacheEjectionReason][] = []; + using cache = new Cache({ + maxSize: 2, + ttl: 1000, + onEject: (k, v, r) => ejected.push([k, v, r]), + }); + + cache.set("a", 1); + cache.set("b", 2); + cache.set("c", 3); + + assertEquals(ejected, [["a", 1, "evicted"]]); + + time.tick(1001); + const reasons = ejected.map(([, , r]) => r); + assert( + !reasons.includes("expired") || !reasons.some( + ([k]) => k === "a", + ), + ); +}); + +Deno.test("Cache LRU + TTL: expired entry not returned even before timer fires", () => { + using time = new FakeTime(0); + using cache = new Cache({ maxSize: 100, ttl: 100 }); + + cache.set("a", 1); + time.tick(101); + + assertEquals(cache.get("a"), undefined); + assertEquals(cache.has("a"), false); + assertEquals(cache.peek("a"), undefined); +}); + +// ─── onEject ───────────────────────────────────────── + +Deno.test("Cache onEject fires with correct reason for delete()", () => { + const ejected: CacheEjectionReason[] = []; + const cache = new Cache({ + maxSize: 10, + onEject: (_k, _v, r) => ejected.push(r), + }); + cache.set("a", 1); + cache.delete("a"); + assertEquals(ejected, ["deleted"]); +}); + +Deno.test("Cache onEject fires with correct reason for clear()", () => { + const ejected: CacheEjectionReason[] = []; + const cache = new Cache({ + maxSize: 10, + onEject: (_k, _v, r) => ejected.push(r), + }); + cache.set("a", 1); + cache.set("b", 2); + cache.clear(); + assertEquals(ejected, ["cleared", "cleared"]); +}); + +Deno.test("Cache is not re-entrant during onEject", () => { + const cache = new Cache({ + maxSize: 10, + onEject: () => { + assertThrows( + () => cache.set("x", 1), + TypeError, + "not re-entrant", + ); + assertThrows( + () => cache.delete("x"), + TypeError, + "not re-entrant", + ); + assertThrows( + () => cache.clear(), + TypeError, + "not re-entrant", + ); + }, + }); + cache.set("a", 1); + cache.delete("a"); +}); + +Deno.test("Cache onEject throwing during timer does not break future expirations", () => { + using time = new FakeTime(0); + let throwCount = 0; + const expired: string[] = []; + using cache = new Cache({ + ttl: 1000, + onEject: (k, _v, r) => { + if (r === "expired") { + expired.push(k); + if (k === "a") { + throwCount++; + throw new Error("onEject error"); + } + } + }, + }); + + cache.set("a", 1, { ttl: 100 }); + cache.set("b", 2, { ttl: 100 }); + cache.set("c", 3, { ttl: 200 }); + + try { + time.tick(101); + } catch { + // expected throw from onEject + } + assertEquals(throwCount, 1); + assert(expired.includes("a")); + assert(expired.includes("b")); + + time.tick(100); + assert(expired.includes("c")); +}); + +Deno.test("Cache onEject errors from multiple entries are all surfaced via AggregateError", () => { + using time = new FakeTime(0); + using cache = new Cache({ + ttl: 100, + onEject: (k, _v, r) => { + if (r === "expired") throw new Error(`fail:${k}`); + }, + }); + + cache.set("a", 1); + cache.set("b", 2); + + let caught: unknown; + try { + time.tick(101); + } catch (e) { + caught = e; + } + assert(caught instanceof AggregateError); + assertEquals(caught.errors.length, 2); +}); + +Deno.test("Cache onEject single error is thrown directly, not wrapped", () => { + using time = new FakeTime(0); + const err = new Error("solo"); + using cache = new Cache({ + ttl: 100, + onEject: (_k, _v, r) => { + if (r === "expired") throw err; + }, + }); + + cache.set("a", 1); + + let caught: unknown; + try { + time.tick(101); + } catch (e) { + caught = e; + } + assert(caught === err); +}); + +Deno.test("Cache clear() onEject errors surfaced via AggregateError", () => { + const cache = new Cache({ + maxSize: 10, + onEject: (k) => { + throw new Error(`fail:${k}`); + }, + }); + + cache.set("a", 1); + cache.set("b", 2); + + let caught: unknown; + try { + cache.clear(); + } catch (e) { + caught = e; + } + assert(caught instanceof AggregateError); + assertEquals(caught.errors.length, 2); +}); + +// ─── Stats ─────────────────────────────────────────── + +Deno.test("Cache stats track hits, misses, sets, deletes, evictions", () => { + const cache = new Cache({ maxSize: 2 }); + + cache.set("a", 1); + cache.set("b", 2); + cache.get("a"); + cache.get("missing"); + cache.set("c", 3); + cache.delete("c"); + + const s = cache.stats; + assertEquals(s.hits, 1); + assertEquals(s.misses, 1); + assertEquals(s.sets, 3); + assertEquals(s.deletes, 1); + assertEquals(s.evictions, 1); +}); + +Deno.test("Cache stats track expirations", () => { + using time = new FakeTime(0); + using cache = new Cache({ ttl: 100 }); + + cache.set("a", 1); + time.tick(101); + cache.get("a"); + + assertEquals(cache.stats.expirations, 1); + assertEquals(cache.stats.misses, 1); +}); + +Deno.test("Cache resetStats() zeros all counters", () => { + const cache = new Cache({ maxSize: 10 }); + cache.set("a", 1); + cache.get("a"); + cache.resetStats(); + const s = cache.stats; + assertEquals(s.hits, 0); + assertEquals(s.misses, 0); + assertEquals(s.sets, 0); +}); + +Deno.test("Cache stats returns a snapshot that does not change", () => { + const cache = new Cache({ maxSize: 10 }); + cache.set("a", 1); + cache.get("a"); + const s1 = cache.stats; + cache.get("a"); + const s2 = cache.stats; + assertEquals(s1.hits, 1); + assertEquals(s2.hits, 2); +}); + +// ─── Iteration ─────────────────────────────────────── + +Deno.test("Cache keys/values/entries/forEach/Symbol.iterator", () => { + const cache = new Cache({ maxSize: 10 }); + cache.set("a", 1); + cache.set("b", 2); + + assertEquals([...cache.keys()], ["a", "b"]); + assertEquals([...cache.values()], [1, 2]); + assertEquals([...cache.entries()], [["a", 1], ["b", 2]]); + assertEquals([...cache], [["a", 1], ["b", 2]]); + + const keys: string[] = []; + cache.forEach((_v, k) => keys.push(k)); + assertEquals(keys, ["a", "b"]); +}); + +Deno.test("Cache iteration skips expired entries across all iterators", () => { + using time = new FakeTime(0); + using cache = new Cache({ ttl: 100 }); + + cache.set("a", 1); + cache.set("b", 2, { ttl: 50 }); + + time.tick(51); + assertEquals([...cache.keys()], ["a"]); + assertEquals([...cache.values()], [1]); + assertEquals([...cache.entries()], [["a", 1]]); + assertEquals([...cache], [["a", 1]]); + + const result: [string, number][] = []; + cache.forEach((v, k) => result.push([k, v])); + assertEquals(result, [["a", 1]]); +}); + +// ─── Symbol.dispose ────────────────────────────────── + +Deno.test("Cache Symbol.dispose clears entries and timers", () => { + using time = new FakeTime(0); + let disposed: Cache; + { + using cache = new Cache({ maxSize: 10, ttl: 1000 }); + cache.set("a", 1); + disposed = cache; + } + assertEquals(disposed.size, 0); + void time; +}); + +// ─── CacheOptions type safety (compile-time checks) ─ + +Deno.test("Cache type system rejects invalid option combinations", () => { + const refresh = (_k: string, _v: number) => Promise.resolve(1); + + new Cache(); + new Cache({}); + new Cache({ maxSize: 5 }); + new Cache({ ttl: 100 }); + new Cache({ maxSize: 5, ttl: 100 }); + new Cache({ ttl: 100, slidingExpiration: true }); + new Cache({ ttl: 1000, staleTtl: 500, refresh }); + new Cache({ + ttl: 1000, + staleTtl: 500, + refresh, + onRefreshError: () => {}, + }); + + const _opts: CacheOptions = { maxSize: 50, ttl: 1000 }; + void _opts; + const _swr: CacheOptionsSwr = { + ttl: 1000, + staleTtl: 500, + refresh, + }; + void _swr; +}); + +// ─── Edge cases ────────────────────────────────────── + +Deno.test("Cache clear() on empty cache is a no-op", () => { + const cache = new Cache({ maxSize: 10 }); + cache.clear(); + assertEquals(cache.size, 0); +}); + +Deno.test("Cache clear() cancels timers and allows new entries", () => { + using time = new FakeTime(0); + using cache = new Cache({ ttl: 100 }); + + cache.set("a", 1); + cache.clear(); + + cache.set("b", 2); + time.tick(101); + assertEquals(cache.size, 0); +}); + +Deno.test("Cache set() with invalid per-entry TTL throws", () => { + const cache = new Cache({ maxSize: 10 }); + assertThrows(() => cache.set("a", 1, { ttl: -1 }), RangeError); + assertThrows(() => cache.set("a", 1, { ttl: NaN }), RangeError); + assertThrows(() => cache.set("a", 1, { ttl: Infinity }), RangeError); +}); + +Deno.test("Cache set() with invalid absoluteExpiration throws", () => { + const cache = new Cache({ maxSize: 10 }); + assertThrows( + () => cache.set("a", 1, { absoluteExpiration: -1 }), + RangeError, + ); + assertThrows( + () => cache.set("a", 1, { absoluteExpiration: NaN }), + RangeError, + ); + assertThrows( + () => cache.set("a", 1, { absoluteExpiration: Infinity }), + RangeError, + ); +}); + +// ─── has()/peek() eagerly remove expired entries ───── + +Deno.test("Cache has() eagerly removes expired entry and fires onEject", () => { + using time = new FakeTime(0); + const ejected: [string, CacheEjectionReason][] = []; + using cache = new Cache({ + ttl: 100, + onEject: (k, _v, r) => ejected.push([k, r]), + }); + + cache.set("a", 1); + time.tick(101); + + assertEquals(cache.has("a"), false); + assertEquals(cache.size, 0); + assertEquals(ejected, [["a", "expired"]]); +}); + +Deno.test("Cache peek() eagerly removes expired entry and fires onEject", () => { + using time = new FakeTime(0); + const ejected: [string, CacheEjectionReason][] = []; + using cache = new Cache({ + ttl: 100, + onEject: (k, _v, r) => ejected.push([k, r]), + }); + + cache.set("a", 1); + time.tick(101); + + assertEquals(cache.peek("a"), undefined); + assertEquals(cache.size, 0); + assertEquals(ejected, [["a", "expired"]]); +}); + +// ─── has()/peek() re-entrancy guard during onEject ─── + +Deno.test("Cache has() during onEject with expired sibling preserves re-entrancy guard", () => { + using time = new FakeTime(0); + let setThrew = false; + + using cache = new Cache({ + ttl: 1000, + onEject: (k) => { + if (k === "a") { + assertEquals(cache.has("b"), false); + try { + cache.set("x", 999); + } catch { + setThrew = true; + } + } + }, + }); + + cache.set("a", 1); + cache.set("b", 2, { ttl: 50 }); + time.tick(51); + + cache.delete("a"); + assertEquals(setThrew, true); + assertEquals(cache.has("x"), false); +}); + +Deno.test("Cache peek() during onEject with expired sibling preserves re-entrancy guard", () => { + using time = new FakeTime(0); + let setThrew = false; + + using cache = new Cache({ + ttl: 1000, + onEject: (k) => { + if (k === "a") { + assertEquals(cache.peek("b"), undefined); + try { + cache.set("x", 999); + } catch { + setThrew = true; + } + } + }, + }); + + cache.set("a", 1); + cache.set("b", 2, { ttl: 50 }); + time.tick(51); + + cache.delete("a"); + assertEquals(setThrew, true); + assertEquals(cache.has("x"), false); +}); + +// ─── getOrLoad ─────────────────────────────────────── + +Deno.test("Cache getOrLoad() loads and caches on miss", async () => { + const cache = new Cache({ maxSize: 100 }); + const value = await cache.getOrLoad("a", () => Promise.resolve(42)); + assertEquals(value, 42); + assertEquals(cache.get("a"), 42); +}); + +Deno.test("Cache getOrLoad() returns cached value on hit", async () => { + const cache = new Cache({ maxSize: 100 }); + cache.set("a", 1); + let called = false; + const value = await cache.getOrLoad("a", () => { + called = true; + return Promise.resolve(99); + }); + assertEquals(value, 1); + assertEquals(called, false); +}); + +Deno.test("Cache getOrLoad() deduplicates concurrent loads", async () => { + const cache = new Cache({ maxSize: 100 }); + let loadCount = 0; + const loader = () => { + loadCount++; + return Promise.resolve(42); + }; + + const [a, b, c] = await Promise.all([ + cache.getOrLoad("x", loader), + cache.getOrLoad("x", loader), + cache.getOrLoad("x", loader), + ]); + + assertEquals(a, 42); + assertEquals(b, 42); + assertEquals(c, 42); + assertEquals(loadCount, 1); + assertEquals(cache.get("x"), 42); +}); + +Deno.test("Cache getOrLoad() propagates loader errors to all callers", async () => { + const cache = new Cache({ maxSize: 100 }); + const error = new Error("boom"); + + const results = await Promise.allSettled([ + cache.getOrLoad("a", () => Promise.reject(error)), + cache.getOrLoad("a", () => Promise.reject(error)), + ]); + + assertEquals(results[0]!.status, "rejected"); + assertEquals(results[1]!.status, "rejected"); + assertEquals(cache.has("a"), false); +}); + +Deno.test("Cache getOrLoad() clears in-flight entry on error so retry works", async () => { + const cache = new Cache({ maxSize: 100 }); + let attempt = 0; + + await assertRejects( + () => + cache.getOrLoad("a", () => { + attempt++; + return Promise.reject(new Error("fail")); + }), + Error, + ); + assertEquals(attempt, 1); + + const value = await cache.getOrLoad("a", () => { + attempt++; + return Promise.resolve(99); + }); + assertEquals(attempt, 2); + assertEquals(value, 99); +}); + +Deno.test("Cache getOrLoad() loaded value respects TTL", async () => { + using time = new FakeTime(0); + using cache = new Cache({ maxSize: 100, ttl: 100 }); + + await cache.getOrLoad("a", () => Promise.resolve(42)); + assertEquals(cache.get("a"), 42); + + time.tick(101); + assertEquals(cache.get("a"), undefined); +}); + +Deno.test("Cache getOrLoad() updates stats correctly", async () => { + const cache = new Cache({ maxSize: 100 }); + + await cache.getOrLoad("a", () => Promise.resolve(1)); + assertEquals(cache.stats.misses, 1); + assertEquals(cache.stats.sets, 1); + + await cache.getOrLoad("a", () => Promise.resolve(99)); + assertEquals(cache.stats.hits, 1); + assertEquals(cache.stats.sets, 1); +}); + +Deno.test("Cache getOrLoad() returns cached undefined value without reloading", async () => { + const cache = new Cache({ maxSize: 100 }); + cache.set("a", undefined); + + let loadCount = 0; + const value = await cache.getOrLoad("a", () => { + loadCount++; + return Promise.resolve(undefined); + }); + assertEquals(value, undefined); + assertEquals(loadCount, 0); + assertEquals(cache.stats.hits, 1); +}); + +Deno.test("Cache getOrLoad() loader does not overwrite explicit set() on same key", async () => { + const cache = new Cache({ maxSize: 100 }); + + let resolveLoader!: (v: number) => void; + const loadPromise = cache.getOrLoad( + "a", + () => new Promise((r) => resolveLoader = r), + ); + + cache.set("a", 42); + + resolveLoader(99); + await loadPromise; + assertEquals(cache.get("a"), 42); +}); + +Deno.test("Cache getOrLoad() loader does not re-populate after delete()", async () => { + const cache = new Cache({ maxSize: 100 }); + + let resolveLoader!: (v: number) => void; + const loadPromise = cache.getOrLoad( + "a", + () => new Promise((r) => resolveLoader = r), + ); + + cache.delete("a"); + + resolveLoader(42); + await loadPromise; + assertEquals(cache.has("a"), true); + assertEquals(cache.get("a"), 42); +}); + +Deno.test("Cache getOrLoad() loader does not re-populate after clear()", async () => { + const cache = new Cache({ maxSize: 100 }); + + let resolveLoader!: (v: number) => void; + const loadPromise = cache.getOrLoad( + "a", + () => new Promise((r) => resolveLoader = r), + ); + + cache.clear(); + assertEquals(cache.size, 0); + + resolveLoader(42); + await loadPromise; + assertEquals(cache.size, 0); + assertEquals(cache.has("a"), false); +}); + +// ─── Stale-while-revalidate (SWR) ─────────────────── + +Deno.test("Cache SWR constructor throws on invalid staleTtl", () => { + const refresh = (_k: string, _v: number) => Promise.resolve(1); + assertThrows( + () => new Cache({ ttl: 1000, staleTtl: -1, refresh }), + RangeError, + "staleTtl must be a finite non-negative number", + ); + assertThrows( + () => new Cache({ ttl: 1000, staleTtl: NaN, refresh }), + RangeError, + ); + assertThrows( + () => new Cache({ ttl: 1000, staleTtl: Infinity, refresh }), + RangeError, + ); +}); + +Deno.test("Cache SWR constructor throws when staleTtl >= ttl", () => { + const refresh = (_k: string, _v: number) => Promise.resolve(1); + assertThrows( + () => new Cache({ ttl: 1000, staleTtl: 1000, refresh }), + RangeError, + "staleTtl must be less than ttl", + ); + assertThrows( + () => new Cache({ ttl: 1000, staleTtl: 2000, refresh }), + RangeError, + "staleTtl must be less than ttl", + ); +}); + +Deno.test("Cache SWR get() in fresh window counts as hit", async () => { + using _time = new FakeTime(0); + using cache = new Cache({ + ttl: 1000, + staleTtl: 500, + refresh: () => Promise.resolve(99), + }); + + cache.set("a", 1); + assertEquals(cache.get("a"), 1); + assertEquals(cache.stats.hits, 1); + assertEquals(cache.stats.staleHits, 0); + await Promise.resolve(); +}); + +Deno.test("Cache SWR get() in stale window returns stale value and triggers refresh", async () => { + using time = new FakeTime(0); + let refreshCalls = 0; + using cache = new Cache({ + ttl: 1000, + staleTtl: 500, + refresh: (_k, _stale) => { + refreshCalls++; + return Promise.resolve(99); + }, + }); + + cache.set("a", 1); + time.tick(501); + const value = cache.get("a"); + assertEquals(value, 1); + assertEquals(cache.stats.staleHits, 1); + assertEquals(cache.stats.hits, 0); + assertEquals(refreshCalls, 1); + + await Promise.resolve(); + assertEquals(cache.get("a"), 99); + assertEquals(cache.stats.refreshes, 1); + assertEquals(cache.stats.refreshErrors, 0); +}); + +Deno.test("Cache SWR get() after hard TTL returns undefined", async () => { + using time = new FakeTime(0); + using cache = new Cache({ + ttl: 1000, + staleTtl: 500, + refresh: () => Promise.resolve(99), + }); + + cache.set("a", 1); + time.tick(1001); + assertEquals(cache.get("a"), undefined); + assertEquals(cache.stats.misses, 1); + assertEquals(cache.stats.expirations, 1); + await Promise.resolve(); +}); + +Deno.test("Cache SWR concurrent stale reads trigger only one refresh", async () => { + using time = new FakeTime(0); + let refreshCalls = 0; + using cache = new Cache({ + ttl: 1000, + staleTtl: 500, + refresh: () => { + refreshCalls++; + return Promise.resolve(99); + }, + }); + + cache.set("a", 1); + time.tick(501); + + assertEquals(cache.get("a"), 1); + assertEquals(cache.get("a"), 1); + assertEquals(cache.get("a"), 1); + + assertEquals(refreshCalls, 1); + assertEquals(cache.stats.staleHits, 1); + assertEquals(cache.stats.hits, 2); + await Promise.resolve(); +}); + +Deno.test("Cache SWR refresh failure retains stale value and calls onRefreshError", async () => { + using time = new FakeTime(0); + const errors: [string, unknown][] = []; + using cache = new Cache({ + ttl: 1000, + staleTtl: 500, + refresh: () => Promise.reject(new Error("network")), + onRefreshError: (k, e) => errors.push([k, e]), + }); + + cache.set("a", 1); + time.tick(501); + assertEquals(cache.get("a"), 1); + + await Promise.resolve(); + + assertEquals(errors.length, 1); + assertEquals(errors[0]![0], "a"); + assert(errors[0]![1] instanceof Error); + + assertEquals(cache.get("a"), 1); + assertEquals(cache.stats.refreshErrors, 1); +}); + +Deno.test("Cache SWR refresh resets both deadlines", async () => { + using time = new FakeTime(0); + using cache = new Cache({ + ttl: 1000, + staleTtl: 500, + refresh: () => Promise.resolve(42), + }); + + cache.set("a", 1); + time.tick(501); + cache.get("a"); + + await Promise.resolve(); + assertEquals(cache.get("a"), 42); + + time.tick(499); + assertEquals(cache.get("a"), 42); + assertEquals(cache.stats.staleHits, 1); + assertEquals(cache.stats.hits, 2); + + time.tick(2); + assertEquals(cache.get("a"), 42); + assertEquals(cache.stats.staleHits, 2); +}); + +Deno.test("Cache SWR per-entry staleTtl override", async () => { + using time = new FakeTime(0); + let refreshCalls = 0; + using cache = new Cache({ + ttl: 1000, + staleTtl: 500, + refresh: () => { + refreshCalls++; + return Promise.resolve(99); + }, + }); + + cache.set("a", 1, { staleTtl: 100 }); + time.tick(101); + assertEquals(cache.get("a"), 1); + assertEquals(cache.stats.staleHits, 1); + assertEquals(refreshCalls, 1); + await Promise.resolve(); +}); + +Deno.test("Cache SWR per-entry staleTtl with invalid value throws", () => { + const cache = new Cache({ + ttl: 1000, + staleTtl: 500, + refresh: () => Promise.resolve(1), + }); + assertThrows( + () => cache.set("a", 1, { staleTtl: -1 }), + RangeError, + "staleTtl must be a finite non-negative number", + ); + assertThrows( + () => cache.set("a", 1, { staleTtl: NaN }), + RangeError, + ); + cache[Symbol.dispose](); +}); + +Deno.test("Cache SWR stats across success and failure refreshes", async () => { + using time = new FakeTime(0); + let callCount = 0; + using cache = new Cache({ + ttl: 1000, + staleTtl: 500, + refresh: () => { + callCount++; + if (callCount === 1) return Promise.resolve(2); + return Promise.reject(new Error("fail")); + }, + onRefreshError: () => {}, + }); + + cache.set("a", 1); + time.tick(501); + + cache.get("a"); + await Promise.resolve(); + + assertEquals(cache.stats.staleHits, 1); + assertEquals(cache.stats.refreshes, 1); + assertEquals(cache.stats.refreshErrors, 0); + + time.tick(501); + cache.get("a"); + await Promise.resolve(); + + assertEquals(cache.stats.staleHits, 2); + assertEquals(cache.stats.refreshes, 2); + assertEquals(cache.stats.refreshErrors, 1); +}); + +Deno.test("Cache SWR refresh does not update deleted key", async () => { + using time = new FakeTime(0); + using cache = new Cache({ + ttl: 1000, + staleTtl: 500, + refresh: () => Promise.resolve(99), + }); + + cache.set("a", 1); + time.tick(501); + cache.get("a"); + cache.delete("a"); + + await Promise.resolve(); + assertEquals(cache.has("a"), false); +}); + +Deno.test("Cache SWR refresh does not overwrite explicit set() between stale-read and completion", async () => { + using time = new FakeTime(0); + using cache = new Cache({ + ttl: 1000, + staleTtl: 500, + refresh: () => Promise.resolve(200), + }); + + cache.set("a", 1); + time.tick(501); + cache.get("a"); + cache.set("a", 999); + + await Promise.resolve(); + assertEquals(cache.peek("a"), 999); +}); + +Deno.test("Cache SWR clear() stops pending refreshes from re-inserting", async () => { + using time = new FakeTime(0); + using cache = new Cache({ + ttl: 1000, + staleTtl: 500, + refresh: () => Promise.resolve(99), + }); + + cache.set("a", 1); + time.tick(501); + cache.get("a"); + cache.clear(); + + await Promise.resolve(); + assertEquals(cache.size, 0); +}); + +Deno.test("Cache SWR with LRU eviction", async () => { + using time = new FakeTime(0); + using cache = new Cache({ + maxSize: 2, + ttl: 1000, + staleTtl: 500, + refresh: () => Promise.resolve(99), + }); + + cache.set("a", 1); + cache.set("b", 2); + time.tick(501); + cache.get("a"); + cache.set("c", 3); + + assertEquals(cache.has("b"), false); + assertEquals(cache.has("a"), true); + assertEquals(cache.has("c"), true); + await Promise.resolve(); +}); + +Deno.test("Cache SWR timer-based expiration still fires", async () => { + using time = new FakeTime(0); + const ejected: string[] = []; + using cache = new Cache({ + ttl: 1000, + staleTtl: 500, + refresh: () => Promise.resolve(99), + onEject: (k, _v, r) => { + if (r === "expired") ejected.push(k); + }, + }); + + cache.set("a", 1); + time.tick(1001); + assertEquals(ejected, ["a"]); + assertEquals(cache.size, 0); + await Promise.resolve(); +}); + +Deno.test("Cache SWR with slidingExpiration extends hard deadline but not soft deadline", async () => { + using time = new FakeTime(0); + let refreshCalls = 0; + using cache = new Cache({ + ttl: 1000, + staleTtl: 500, + slidingExpiration: true, + refresh: () => { + refreshCalls++; + return Promise.resolve(99); + }, + }); + + cache.set("a", 1); + + // At t=400 (before staleTtl=500), get() is a fresh hit and resets hard TTL + time.tick(400); + assertEquals(cache.get("a"), 1); + assertEquals(cache.stats.hits, 1); + assertEquals(refreshCalls, 0); + + // At t=600 (past original staleTtl=500), slidingExpiration does NOT + // reset the soft deadline, so this is a stale hit + time.tick(200); + assertEquals(cache.get("a"), 1); + assertEquals(cache.stats.staleHits, 1); + assertEquals(refreshCalls, 1); + + await Promise.resolve(); + // Refresh completed — set() creates a fresh entry with new deadlines + assertEquals(cache.get("a"), 99); + assertEquals(cache.stats.hits, 2); + + await Promise.resolve(); +}); + +Deno.test("Cache SWR refresh failure without onRefreshError only increments stats", async () => { + using time = new FakeTime(0); + using cache = new Cache({ + ttl: 1000, + staleTtl: 500, + refresh: () => Promise.reject(new Error("silent")), + }); + + cache.set("a", 1); + time.tick(501); + cache.get("a"); + + await Promise.resolve(); + + assertEquals(cache.stats.refreshErrors, 1); + assertEquals(cache.get("a"), 1); +}); From 0200ad54731d9004c2d9171c1a69caf91a770271 Mon Sep 17 00:00:00 2001 From: Tomas Zijdemans Date: Thu, 2 Apr 2026 16:11:22 +0200 Subject: [PATCH 06/12] add Cache --- cache/cache.ts | 79 +++++++-- cache/cache_test.ts | 404 +++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 464 insertions(+), 19 deletions(-) diff --git a/cache/cache.ts b/cache/cache.ts index 03175c0e5bd1..8695aaae6a83 100644 --- a/cache/cache.ts +++ b/cache/cache.ts @@ -340,7 +340,7 @@ export class Cache implements MemoizationCache { #maxSize: number | undefined; #defaultTtl: number; #slidingExpiration: boolean; - #ejecting = false; + #ejectingDepth = 0; #eject: ((key: K, value: V, reason: CacheEjectionReason) => void) | undefined; #heap: IndexedHeap | undefined; @@ -533,17 +533,17 @@ export class Cache implements MemoizationCache { this.#data.delete(key); this.#heap?.delete(key); if (this.#eject) { - this.#ejecting = true; + this.#ejectingDepth++; try { this.#eject(key, entry.value, reason); } finally { - this.#ejecting = false; + this.#ejectingDepth--; } } } #assertNotEjecting(method: string): void { - if (this.#ejecting) { + if (this.#ejectingDepth > 0) { throw new TypeError( `Cannot ${method} Cache: cache is not re-entrant during onEject callbacks`, ); @@ -588,13 +588,13 @@ export class Cache implements MemoizationCache { this.#data.delete(top.key); this.#stats.expirations++; if (this.#eject) { - this.#ejecting = true; + this.#ejectingDepth++; try { this.#eject(top.key, entry.value, "expired"); } catch (e) { errors.push(e); } finally { - this.#ejecting = false; + this.#ejectingDepth--; } } } @@ -637,19 +637,44 @@ export class Cache implements MemoizationCache { #backgroundRefresh(key: K, staleValue: V): void { (this.#refreshing ??= new Set()).add(key); this.#stats.refreshes++; - const gen = this.#data.get(key)?.generation; - this.#refresh!(key, staleValue).then( + const entry = this.#data.get(key)!; + const gen = entry.generation; + const entryTtl = entry.entryTtl; + const entryStaleTtl = entry.entryStaleTtl; + const absDeadline = entry.absoluteDeadline; + let refreshResult: Promise; + try { + refreshResult = this.#refresh!(key, staleValue); + } catch (error) { + this.#refreshing?.delete(key); + this.#stats.refreshErrors++; + try { + this.#onRefreshError?.(key, error); + } catch { /* contained */ } + return; + } + refreshResult.then( (newValue) => { this.#refreshing?.delete(key); const current = this.#data.get(key); if (current !== undefined && current.generation === gen) { - this.set(key, newValue); + const options: CacheSetOptions = {}; + if (entryTtl !== this.#defaultTtl) options.ttl = entryTtl; + if (entryStaleTtl !== this.#defaultStaleTtl) { + options.staleTtl = entryStaleTtl; + } + if (absDeadline !== Infinity) { + options.absoluteExpiration = Math.max(0, absDeadline - Date.now()); + } + this.set(key, newValue, options); } }, (error) => { this.#refreshing?.delete(key); this.#stats.refreshErrors++; - this.#onRefreshError?.(key, error); + try { + this.#onRefreshError?.(key, error); + } catch { /* contained */ } }, ); } @@ -735,7 +760,7 @@ export class Cache implements MemoizationCache { const entry = this.#data.get(key); if (entry === undefined) return undefined; if (entry.deadline !== Infinity && entry.deadline <= Date.now()) { - if (!this.#ejecting) { + if (this.#ejectingDepth === 0) { this.#stats.expirations++; this.#removeEntry(key, entry, "expired"); } @@ -766,7 +791,7 @@ export class Cache implements MemoizationCache { const entry = this.#data.get(key); if (entry === undefined) return false; if (entry.deadline !== Infinity && entry.deadline <= Date.now()) { - if (!this.#ejecting) { + if (this.#ejectingDepth === 0) { this.#stats.expirations++; this.#removeEntry(key, entry, "expired"); } @@ -865,6 +890,9 @@ export class Cache implements MemoizationCache { * @param key The key to look up or load. * @param loader Called when the key is not in the cache. Receives the key * and must return a promise resolving to the value. + * @param options Per-entry overrides applied when the loaded value is + * cached. Ignored on cache hits. When concurrent callers de-duplicate, + * only the first caller's `options` are used. * @returns A promise resolving to the cached or freshly loaded value. * * @example Basic load-through @@ -896,8 +924,26 @@ export class Cache implements MemoizationCache { * assertEquals(b, 1); * assertEquals(loadCount, 1); * ``` + * + * @example Per-entry TTL override + * ```ts + * import { Cache } from "@std/cache/cache"; + * import { assertEquals } from "@std/assert"; + * import { FakeTime } from "@std/testing/time"; + * + * using time = new FakeTime(0); + * using cache = new Cache({ maxSize: 100, ttl: 10000 }); + * await cache.getOrLoad("a", () => Promise.resolve(1), { ttl: 50 }); + * + * time.tick(51); + * assertEquals(cache.get("a"), undefined); + * ``` */ - getOrLoad(key: K, loader: (key: K) => Promise): Promise { + getOrLoad( + key: K, + loader: (key: K) => Promise, + options?: CacheSetOptions, + ): Promise { const cached = this.get(key); if (cached !== undefined || this.has(key)) { return Promise.resolve(cached as V); @@ -914,7 +960,7 @@ export class Cache implements MemoizationCache { inFlight.delete(key); const current = this.#data.get(key); if (current === undefined || current.generation <= gen) { - this.set(key, value); + this.set(key, value, options); } return value; }, @@ -951,6 +997,7 @@ export class Cache implements MemoizationCache { */ delete(key: K): boolean { this.#assertNotEjecting("delete entry in"); + this.#inFlight?.delete(key); const entry = this.#data.get(key); if (entry === undefined) return false; this.#stats.deletes++; @@ -994,7 +1041,7 @@ export class Cache implements MemoizationCache { } const entries = [...this.#data.entries()]; this.#data.clear(); - this.#ejecting = true; + this.#ejectingDepth++; const errors: unknown[] = []; try { for (const [key, entry] of entries) { @@ -1005,7 +1052,7 @@ export class Cache implements MemoizationCache { } } } finally { - this.#ejecting = false; + this.#ejectingDepth--; } if (errors.length === 1) throw errors[0]; if (errors.length > 1) throw new AggregateError(errors); diff --git a/cache/cache_test.ts b/cache/cache_test.ts index 1e6d4687931f..b788e08a2a10 100644 --- a/cache/cache_test.ts +++ b/cache/cache_test.ts @@ -873,7 +873,7 @@ Deno.test("Cache getOrLoad() loader does not overwrite explicit set() on same ke assertEquals(cache.get("a"), 42); }); -Deno.test("Cache getOrLoad() loader does not re-populate after delete()", async () => { +Deno.test("Cache getOrLoad() loader result discarded after delete() on not-yet-cached key", async () => { const cache = new Cache({ maxSize: 100 }); let resolveLoader!: (v: number) => void; @@ -886,8 +886,8 @@ Deno.test("Cache getOrLoad() loader does not re-populate after delete()", async resolveLoader(42); await loadPromise; - assertEquals(cache.has("a"), true); - assertEquals(cache.get("a"), 42); + assertEquals(cache.has("a"), false); + assertEquals(cache.size, 0); }); Deno.test("Cache getOrLoad() loader does not re-populate after clear()", async () => { @@ -1287,3 +1287,401 @@ Deno.test("Cache SWR refresh failure without onRefreshError only increments stat assertEquals(cache.stats.refreshErrors, 1); assertEquals(cache.get("a"), 1); }); + +// ─── delete() cancels in-flight getOrLoad ──────────── + +Deno.test("Cache delete() cancels in-flight getOrLoad loader", async () => { + const cache = new Cache({ maxSize: 100 }); + + let resolveLoader!: (v: number) => void; + const loadPromise = cache.getOrLoad( + "a", + () => new Promise((r) => resolveLoader = r), + ); + + cache.delete("a"); + + resolveLoader(42); + await loadPromise; + assertEquals(cache.has("a"), false); + assertEquals(cache.size, 0); +}); + +Deno.test("Cache delete() on in-flight key allows fresh getOrLoad", async () => { + const cache = new Cache({ maxSize: 100 }); + + let resolveFirst!: (v: number) => void; + const first = cache.getOrLoad( + "a", + () => new Promise((r) => resolveFirst = r), + ); + + cache.delete("a"); + + const second = await cache.getOrLoad("a", () => Promise.resolve(99)); + assertEquals(second, 99); + assertEquals(cache.get("a"), 99); + + resolveFirst(1); + await first; + assertEquals(cache.get("a"), 99); +}); + +// ─── SWR refresh preserves per-entry overrides ─────── + +Deno.test("Cache SWR refresh preserves per-entry ttl override", async () => { + using time = new FakeTime(0); + using cache = new Cache({ + ttl: 10000, + staleTtl: 100, + refresh: () => Promise.resolve(99), + }); + + cache.set("a", 1, { ttl: 500 }); + + time.tick(101); + cache.get("a"); + await Promise.resolve(); + + assertEquals(cache.get("a"), 99); + + time.tick(501); + assertEquals(cache.get("a"), undefined); +}); + +Deno.test("Cache SWR refresh preserves per-entry staleTtl override", async () => { + using time = new FakeTime(0); + let refreshCount = 0; + using cache = new Cache({ + ttl: 10000, + staleTtl: 5000, + refresh: () => { + refreshCount++; + return Promise.resolve(refreshCount * 100); + }, + }); + + cache.set("a", 1, { staleTtl: 100 }); + + time.tick(101); + cache.get("a"); + await Promise.resolve(); + assertEquals(refreshCount, 1); + assertEquals(cache.get("a"), 100); + + time.tick(101); + cache.get("a"); + await Promise.resolve(); + assertEquals(refreshCount, 2); +}); + +// ─── Synchronous refresh() throw ───────────────────── + +Deno.test("Cache SWR synchronous refresh() throw does not escape get()", async () => { + using _time = new FakeTime(0); + const errors: unknown[] = []; + using cache = new Cache({ + ttl: 1000, + staleTtl: 0, + refresh: () => { + throw new Error("sync boom"); + }, + onRefreshError: (_k, e) => errors.push(e), + }); + + cache.set("a", 1); + const value = cache.get("a"); + assertEquals(value, 1); + assertEquals(errors.length, 1); + assert(errors[0] instanceof Error); + assertEquals(cache.stats.refreshErrors, 1); + await Promise.resolve(); +}); + +Deno.test("Cache SWR synchronous refresh() throw does not wedge #refreshing", async () => { + using time = new FakeTime(0); + let callCount = 0; + using cache = new Cache({ + ttl: 1000, + staleTtl: 0, + refresh: () => { + callCount++; + if (callCount === 1) throw new Error("sync"); + return Promise.resolve(99); + }, + onRefreshError: () => {}, + }); + + cache.set("a", 1); + cache.get("a"); + assertEquals(callCount, 1); + + time.tick(1); + cache.get("a"); + assertEquals(callCount, 2); + + await Promise.resolve(); + assertEquals(cache.get("a"), 99); +}); + +// ─── onRefreshError throw contained ────────────────── + +Deno.test("Cache SWR throwing onRefreshError does not cause unhandled rejection", async () => { + using _time = new FakeTime(0); + using cache = new Cache({ + ttl: 1000, + staleTtl: 0, + refresh: () => Promise.reject(new Error("refresh fail")), + onRefreshError: () => { + throw new Error("callback throws"); + }, + }); + + cache.set("a", 1); + cache.get("a"); + + await Promise.resolve(); + await Promise.resolve(); + + assertEquals(cache.stats.refreshErrors, 1); + assertEquals(cache.get("a"), 1); +}); + +Deno.test("Cache SWR throwing onRefreshError on sync refresh throw is contained", async () => { + using _time = new FakeTime(0); + using cache = new Cache({ + ttl: 1000, + staleTtl: 0, + refresh: () => { + throw new Error("sync fail"); + }, + onRefreshError: () => { + throw new Error("callback also throws"); + }, + }); + + cache.set("a", 1); + const value = cache.get("a"); + assertEquals(value, 1); + assertEquals(cache.stats.refreshErrors, 1); + await Promise.resolve(); +}); + +// ─── getOrLoad with options ────────────────────────── + +Deno.test("Cache getOrLoad() forwards ttl option to set()", async () => { + using time = new FakeTime(0); + using cache = new Cache({ maxSize: 100, ttl: 10000 }); + + await cache.getOrLoad("a", () => Promise.resolve(42), { ttl: 50 }); + assertEquals(cache.get("a"), 42); + + time.tick(51); + assertEquals(cache.get("a"), undefined); +}); + +Deno.test("Cache getOrLoad() forwards absoluteExpiration option", async () => { + using time = new FakeTime(0); + using cache = new Cache({ + maxSize: 100, + ttl: 10000, + slidingExpiration: true, + }); + + await cache.getOrLoad("a", () => Promise.resolve(42), { + absoluteExpiration: 150, + }); + + time.tick(80); + assertEquals(cache.get("a"), 42); + time.tick(80); + assertEquals(cache.get("a"), undefined); +}); + +Deno.test("Cache getOrLoad() forwards staleTtl option", async () => { + using time = new FakeTime(0); + let refreshCount = 0; + using cache = new Cache({ + ttl: 10000, + staleTtl: 5000, + refresh: () => { + refreshCount++; + return Promise.resolve(99); + }, + }); + + await cache.getOrLoad("a", () => Promise.resolve(1), { staleTtl: 50 }); + + time.tick(51); + cache.get("a"); + assertEquals(cache.stats.staleHits, 1); + assertEquals(refreshCount, 1); + await Promise.resolve(); +}); + +Deno.test("Cache getOrLoad() options ignored on cache hit", async () => { + using _time = new FakeTime(0); + using cache = new Cache({ maxSize: 100, ttl: 10000 }); + + cache.set("a", 1); + let called = false; + const value = await cache.getOrLoad("a", () => { + called = true; + return Promise.resolve(99); + }, { ttl: 1 }); + + assertEquals(value, 1); + assertEquals(called, false); +}); + +Deno.test("Cache getOrLoad() de-duplicated callers use first caller's options", async () => { + using time = new FakeTime(0); + using cache = new Cache({ maxSize: 100, ttl: 10000 }); + + const [a, b] = await Promise.all([ + cache.getOrLoad("x", () => Promise.resolve(42), { ttl: 50 }), + cache.getOrLoad("x", () => Promise.resolve(99), { ttl: 9999 }), + ]); + + assertEquals(a, 42); + assertEquals(b, 42); + + time.tick(51); + assertEquals(cache.get("x"), undefined); +}); + +// ─── Re-entrancy guard depth counter ───────────────── + +Deno.test("Cache get() on expired key inside onEject does not disable re-entrancy guard", () => { + using _time = new FakeTime(0); + let setThrew = false; + + using cache = new Cache({ + ttl: 1000, + onEject: (k) => { + if (k === "a") { + cache.get("b"); + try { + cache.set("x", 999); + } catch { + setThrew = true; + } + } + }, + }); + + cache.set("a", 1); + cache.set("b", 2, { ttl: 0 }); + + cache.delete("a"); + assertEquals(setThrew, true); + assertEquals(cache.has("x"), false); +}); + +Deno.test("Cache nested get() on expired key inside onEject fires nested onEject", () => { + using _time = new FakeTime(0); + const ejected: [string, CacheEjectionReason][] = []; + + using cache = new Cache({ + ttl: 1000, + onEject: (k, _v, r) => { + ejected.push([k, r]); + if (k === "a") { + cache.get("b"); + } + }, + }); + + cache.set("a", 1); + cache.set("b", 2, { ttl: 0 }); + + cache.delete("a"); + assertEquals(ejected.length, 2); + assertEquals(ejected[0], ["a", "deleted"]); + assertEquals(ejected[1], ["b", "expired"]); + assertEquals(cache.size, 0); +}); + +Deno.test("Cache re-entrancy guard survives deeply nested get()-triggered ejections", () => { + using _time = new FakeTime(0); + let deepSetThrew = false; + + using cache = new Cache({ + ttl: 1000, + onEject: (k) => { + if (k === "a") { + cache.get("b"); + } + if (k === "b") { + cache.get("c"); + } + if (k === "c") { + try { + cache.set("x", 999); + } catch { + deepSetThrew = true; + } + } + }, + }); + + cache.set("a", 1); + cache.set("b", 2, { ttl: 0 }); + cache.set("c", 3, { ttl: 0 }); + + cache.delete("a"); + assertEquals(deepSetThrew, true); + assertEquals(cache.has("x"), false); +}); + +// ─── SWR refresh preserves absoluteExpiration ──────── + +Deno.test("Cache SWR refresh preserves absoluteExpiration cap", async () => { + using time = new FakeTime(0); + using cache = new Cache({ + ttl: 10000, + staleTtl: 100, + refresh: () => Promise.resolve(99), + }); + + cache.set("a", 1, { absoluteExpiration: 500 }); + + time.tick(101); + cache.get("a"); + await Promise.resolve(); + + assertEquals(cache.get("a"), 99); + + time.tick(400); + assertEquals(cache.get("a"), undefined); +}); + +Deno.test("Cache SWR refresh with absoluteExpiration expires at original wall-clock time", async () => { + using time = new FakeTime(0); + let refreshCount = 0; + using cache = new Cache({ + ttl: 10000, + staleTtl: 100, + refresh: () => { + refreshCount++; + return Promise.resolve(refreshCount * 100); + }, + }); + + cache.set("a", 1, { absoluteExpiration: 350 }); + + time.tick(101); + cache.get("a"); + await Promise.resolve(); + assertEquals(refreshCount, 1); + assertEquals(cache.get("a"), 100); + + time.tick(100); + cache.get("a"); + await Promise.resolve(); + assertEquals(refreshCount, 2); + assertEquals(cache.get("a"), 200); + + time.tick(150); + assertEquals(cache.get("a"), undefined); +}); From de339eb353aba4d4ce7c65bb293a98d3472a0c02 Mon Sep 17 00:00:00 2001 From: Tomas Zijdemans Date: Thu, 2 Apr 2026 16:52:46 +0200 Subject: [PATCH 07/12] add Cache --- cache/cache.ts | 34 ++++++++++-- cache/cache_test.ts | 131 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 159 insertions(+), 6 deletions(-) diff --git a/cache/cache.ts b/cache/cache.ts index 8695aaae6a83..6359973bf0b2 100644 --- a/cache/cache.ts +++ b/cache/cache.ts @@ -147,8 +147,12 @@ export interface CacheOptionsSwr { ttl: number; /** * When `true`, {@linkcode Cache.prototype.get | get()} resets the - * entry's TTL. {@linkcode Cache.prototype.peek | peek()} and - * {@linkcode Cache.prototype.has | has()} do not. + * entry's hard TTL deadline. {@linkcode Cache.prototype.peek | peek()} + * and {@linkcode Cache.prototype.has | has()} do not. Only the hard + * deadline ({@linkcode CacheOptionsSwr.ttl | ttl}) is extended; the + * stale deadline ({@linkcode CacheOptionsSwr.staleTtl | staleTtl}) is + * not reset, so background refreshes are still triggered based on the + * original creation time. * * @default {false} */ @@ -449,7 +453,11 @@ export class Cache implements MemoizationCache { } /** - * The number of entries currently in the cache. + * The number of entries currently in the cache. This is an O(1) count + * that may include entries whose TTL has elapsed but have not yet been + * lazily removed. Use the iterators ({@linkcode Cache.prototype.keys}, + * {@linkcode Cache.prototype.values}, {@linkcode Cache.prototype.entries}) + * if an accurate live-entry count is needed. * * @returns The number of entries. * @@ -570,7 +578,7 @@ export class Cache implements MemoizationCache { } const top = this.#heap?.peek(); if (top === undefined) return; - const delay = Math.max(0, top.priority - Date.now()); + const delay = Math.min(Math.max(0, top.priority - Date.now()), 0x7FFFFFFF); this.#timerId = setTimeout(() => this.#onTimer(), delay); } @@ -666,7 +674,9 @@ export class Cache implements MemoizationCache { if (absDeadline !== Infinity) { options.absoluteExpiration = Math.max(0, absDeadline - Date.now()); } - this.set(key, newValue, options); + try { + this.set(key, newValue, options); + } catch { /* contained — onEject errors during eviction */ } } }, (error) => { @@ -841,6 +851,11 @@ export class Cache implements MemoizationCache { `Cannot set entry in Cache: staleTtl must be a finite non-negative number, received ${staleTtl}`, ); } + if (staleTtl !== Infinity && staleTtl >= ttl) { + throw new RangeError( + `Cannot set entry in Cache: staleTtl must be less than ttl, received staleTtl=${staleTtl} ttl=${ttl}`, + ); + } const now = Date.now(); const absoluteDeadline = abs !== undefined ? now + abs : Infinity; @@ -862,6 +877,7 @@ export class Cache implements MemoizationCache { }; this.#setMostRecentlyUsed(key, entry); + this.#inFlight?.delete(key); this.#refreshing?.delete(key); if (deadline !== Infinity) { @@ -954,7 +970,13 @@ export class Cache implements MemoizationCache { const inFlight = (this.#inFlight ??= new Map()); const gen = this.#generation; - const promise = loader(key).then( + let loaderResult: Promise; + try { + loaderResult = loader(key); + } catch (error) { + return Promise.reject(error); + } + const promise = loaderResult.then( (value) => { if (inFlight.get(key) !== promise) return value; inFlight.delete(key); diff --git a/cache/cache_test.ts b/cache/cache_test.ts index b788e08a2a10..582b2206ae78 100644 --- a/cache/cache_test.ts +++ b/cache/cache_test.ts @@ -1685,3 +1685,134 @@ Deno.test("Cache SWR refresh with absoluteExpiration expires at original wall-cl time.tick(150); assertEquals(cache.get("a"), undefined); }); + +// ─── getOrLoad sync loader throw ───────────────────── + +Deno.test("Cache getOrLoad() returns rejected promise when loader throws synchronously", async () => { + const cache = new Cache({ maxSize: 100 }); + const error = new Error("sync boom"); + + const result = await cache.getOrLoad("a", () => { + throw error; + }).then( + () => "resolved", + (e) => e, + ); + + assertEquals(result, error); + assertEquals(cache.has("a"), false); +}); + +Deno.test("Cache getOrLoad() sync throw does not leave stale in-flight entry", async () => { + const cache = new Cache({ maxSize: 100 }); + + await assertRejects( + () => + cache.getOrLoad("a", () => { + throw new Error("fail"); + }), + Error, + ); + + const value = await cache.getOrLoad("a", () => Promise.resolve(42)); + assertEquals(value, 42); + assertEquals(cache.get("a"), 42); +}); + +// ─── set() staleTtl >= ttl validation ──────────────── + +Deno.test("Cache set() throws when per-entry staleTtl >= entry ttl", () => { + const cache = new Cache({ + ttl: 1000, + staleTtl: 500, + refresh: () => Promise.resolve(1), + }); + assertThrows( + () => cache.set("a", 1, { staleTtl: 1000 }), + RangeError, + "staleTtl must be less than ttl", + ); + assertThrows( + () => cache.set("a", 1, { staleTtl: 2000 }), + RangeError, + "staleTtl must be less than ttl", + ); + cache[Symbol.dispose](); +}); + +Deno.test("Cache set() throws when per-entry staleTtl >= per-entry ttl", () => { + const cache = new Cache({ + ttl: 10000, + staleTtl: 5000, + refresh: () => Promise.resolve(1), + }); + assertThrows( + () => cache.set("a", 1, { ttl: 500, staleTtl: 500 }), + RangeError, + "staleTtl must be less than ttl", + ); + assertThrows( + () => cache.set("a", 1, { ttl: 500, staleTtl: 600 }), + RangeError, + "staleTtl must be less than ttl", + ); + cache[Symbol.dispose](); +}); + +// ─── set() clears stale in-flight entries ──────────── + +Deno.test("Cache set() clears in-flight getOrLoad so next getOrLoad uses fresh loader", async () => { + using time = new FakeTime(0); + using cache = new Cache({ maxSize: 100, ttl: 100 }); + + let resolveLoader1!: (v: number) => void; + const p1 = cache.getOrLoad( + "a", + () => new Promise((r) => resolveLoader1 = r), + ); + + cache.set("a", 42); + + time.tick(101); + assertEquals(cache.get("a"), undefined); + + let loader2Called = false; + const p2 = cache.getOrLoad("a", () => { + loader2Called = true; + return Promise.resolve(99); + }); + + assertEquals(loader2Called, true); + assertEquals(await p2, 99); + assertEquals(cache.get("a"), 99); + + resolveLoader1(1); + await p1; + assertEquals(cache.get("a"), 99); +}); + +// ─── SWR refresh set() throw contained ─────────────── + +Deno.test("Cache SWR refresh success path contains onEject throw from eviction", async () => { + using time = new FakeTime(0); + using cache = new Cache({ + maxSize: 2, + ttl: 1000, + staleTtl: 100, + refresh: () => Promise.resolve(99), + onEject: (_k, _v, r) => { + if (r === "evicted") throw new Error("onEject boom"); + }, + }); + + cache.set("a", 1); + cache.set("filler", 2); + + time.tick(101); + cache.get("a"); + + await Promise.resolve(); + await Promise.resolve(); + + assertEquals(cache.stats.refreshes, 1); +}); From ff3110970729d332f5b6aeae4302cb9417714fbb Mon Sep 17 00:00:00 2001 From: Tomas Zijdemans Date: Thu, 2 Apr 2026 19:28:06 +0200 Subject: [PATCH 08/12] coverage --- cache/cache_test.ts | 88 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 88 insertions(+) diff --git a/cache/cache_test.ts b/cache/cache_test.ts index 582b2206ae78..dad890764bb0 100644 --- a/cache/cache_test.ts +++ b/cache/cache_test.ts @@ -1816,3 +1816,91 @@ Deno.test("Cache SWR refresh success path contains onEject throw from eviction", assertEquals(cache.stats.refreshes, 1); }); + +// ─── peek()/has() lazy expiry (no timer reaping) ───── + +Deno.test("Cache peek() lazily removes entry expired with ttl: 0", () => { + using _time = new FakeTime(0); + using cache = new Cache({ ttl: 1000 }); + + cache.set("a", 1, { ttl: 0 }); + assertEquals(cache.peek("a"), undefined); + assertEquals(cache.stats.expirations, 1); + assertEquals(cache.size, 0); +}); + +Deno.test("Cache has() lazily removes entry expired with ttl: 0", () => { + using _time = new FakeTime(0); + using cache = new Cache({ ttl: 1000 }); + + cache.set("a", 1, { ttl: 0 }); + assertEquals(cache.has("a"), false); + assertEquals(cache.stats.expirations, 1); + assertEquals(cache.size, 0); +}); + +// ─── clear() single onEject error thrown directly ──── + +Deno.test("Cache clear() single onEject error is thrown directly, not wrapped", () => { + const err = new Error("solo"); + const cache = new Cache({ + maxSize: 10, + onEject: () => { + throw err; + }, + }); + + cache.set("a", 1); + + let caught: unknown; + try { + cache.clear(); + } catch (e) { + caught = e; + } + assert(caught === err); +}); + +// ─── Iterators skip expired-but-not-yet-reaped entries ─ + +Deno.test("Cache keys() skips expired-but-not-yet-reaped entries", () => { + using _time = new FakeTime(0); + using cache = new Cache({ ttl: 1000 }); + + cache.set("a", 1); + cache.set("b", 2, { ttl: 0 }); + + assertEquals([...cache.keys()], ["a"]); +}); + +Deno.test("Cache values() skips expired-but-not-yet-reaped entries", () => { + using _time = new FakeTime(0); + using cache = new Cache({ ttl: 1000 }); + + cache.set("a", 1); + cache.set("b", 2, { ttl: 0 }); + + assertEquals([...cache.values()], [1]); +}); + +Deno.test("Cache entries() skips expired-but-not-yet-reaped entries", () => { + using _time = new FakeTime(0); + using cache = new Cache({ ttl: 1000 }); + + cache.set("a", 1); + cache.set("b", 2, { ttl: 0 }); + + assertEquals([...cache.entries()], [["a", 1]]); +}); + +Deno.test("Cache forEach() skips expired-but-not-yet-reaped entries", () => { + using _time = new FakeTime(0); + using cache = new Cache({ ttl: 1000 }); + + cache.set("a", 1); + cache.set("b", 2, { ttl: 0 }); + + const result: [string, number][] = []; + cache.forEach((v, k) => result.push([k, v])); + assertEquals(result, [["a", 1]]); +}); From 4f53d591cd0bdbb40dd0094d9cc8abe81dd1d234 Mon Sep 17 00:00:00 2001 From: Tomas Zijdemans Date: Fri, 3 Apr 2026 12:45:20 +0200 Subject: [PATCH 09/12] fmt --- cache/cache.ts | 70 ++++++++++++++-------------- cache/cache_test.ts | 108 ++++++++++++++++++++++---------------------- 2 files changed, 90 insertions(+), 88 deletions(-) diff --git a/cache/cache.ts b/cache/cache.ts index 6359973bf0b2..e94ea40782c5 100644 --- a/cache/cache.ts +++ b/cache/cache.ts @@ -17,7 +17,7 @@ import type { MemoizationCache } from "./memoize.ts"; * - `"cleared"` — removed by * {@linkcode Cache.prototype.clear | clear()}. */ -export type CacheEjectionReason = "evicted" | "expired" | "deleted" | "cleared"; +export type CacheRemovalReason = "evicted" | "expired" | "deleted" | "cleared"; /** * Options shared by all {@linkcode Cache} configurations. @@ -44,7 +44,7 @@ export interface CacheOptionsBase { * @param value The value of the removed entry. * @param reason Why the entry was removed. */ - onEject?: (key: K, value: V, reason: CacheEjectionReason) => void; + onRemove?: (key: K, value: V, reason: CacheRemovalReason) => void; /** Must be `undefined` for non-TTL caches. */ ttl?: undefined; @@ -83,7 +83,7 @@ export interface CacheOptionsTtl { * @param value The value of the removed entry. * @param reason Why the entry was removed. */ - onEject?: (key: K, value: V, reason: CacheEjectionReason) => void; + onRemove?: (key: K, value: V, reason: CacheRemovalReason) => void; /** * Default time-to-live in milliseconds. Entries expire after this * duration. @@ -138,7 +138,7 @@ export interface CacheOptionsSwr { * @param value The value of the removed entry. * @param reason Why the entry was removed. */ - onEject?: (key: K, value: V, reason: CacheEjectionReason) => void; + onRemove?: (key: K, value: V, reason: CacheRemovalReason) => void; /** * Default time-to-live in milliseconds. Entries expire after this * duration. Must be greater than @@ -195,9 +195,9 @@ export interface CacheOptionsSwr { * SWR options cannot be set without both `staleTtl` and `refresh`: * * ```ts ignore - * new Cache({ slidingExpiration: true }); // compile error - * new Cache({ ttl: 5000, staleTtl: 3000 }); // compile error - * new Cache({ ttl: 5000, refresh: fn }); // compile error + * new Cache({ slidingExpiration: true }); // compile error + * new Cache({ ttl: 5000, staleTtl: 3000 }); // compile error + * new Cache({ ttl: 5000, refresh: fn }); // compile error * ``` * * @experimental **UNSTABLE**: New API, yet to be vetted. @@ -344,8 +344,10 @@ export class Cache implements MemoizationCache { #maxSize: number | undefined; #defaultTtl: number; #slidingExpiration: boolean; - #ejectingDepth = 0; - #eject: ((key: K, value: V, reason: CacheEjectionReason) => void) | undefined; + #removingDepth = 0; + #onRemove: + | ((key: K, value: V, reason: CacheRemovalReason) => void) + | undefined; #heap: IndexedHeap | undefined; #timerId: number | undefined; @@ -412,7 +414,7 @@ export class Cache implements MemoizationCache { } this.#slidingExpiration = options?.slidingExpiration ?? false; - this.#eject = options?.onEject; + this.#onRemove = options?.onRemove; if (options?.staleTtl !== undefined) { const { staleTtl } = options; @@ -536,24 +538,24 @@ export class Cache implements MemoizationCache { #removeEntry( key: K, entry: CacheEntry, - reason: CacheEjectionReason, + reason: CacheRemovalReason, ): void { this.#data.delete(key); this.#heap?.delete(key); - if (this.#eject) { - this.#ejectingDepth++; + if (this.#onRemove) { + this.#removingDepth++; try { - this.#eject(key, entry.value, reason); + this.#onRemove(key, entry.value, reason); } finally { - this.#ejectingDepth--; + this.#removingDepth--; } } } - #assertNotEjecting(method: string): void { - if (this.#ejectingDepth > 0) { + #assertNotRemoving(method: string): void { + if (this.#removingDepth > 0) { throw new TypeError( - `Cannot ${method} Cache: cache is not re-entrant during onEject callbacks`, + `Cannot ${method} Cache: cache is not re-entrant during onRemove callbacks`, ); } } @@ -595,14 +597,14 @@ export class Cache implements MemoizationCache { if (entry !== undefined) { this.#data.delete(top.key); this.#stats.expirations++; - if (this.#eject) { - this.#ejectingDepth++; + if (this.#onRemove) { + this.#removingDepth++; try { - this.#eject(top.key, entry.value, "expired"); + this.#onRemove(top.key, entry.value, "expired"); } catch (e) { errors.push(e); } finally { - this.#ejectingDepth--; + this.#removingDepth--; } } } @@ -676,7 +678,7 @@ export class Cache implements MemoizationCache { } try { this.set(key, newValue, options); - } catch { /* contained — onEject errors during eviction */ } + } catch { /* contained — onRemove errors during eviction */ } } }, (error) => { @@ -770,7 +772,7 @@ export class Cache implements MemoizationCache { const entry = this.#data.get(key); if (entry === undefined) return undefined; if (entry.deadline !== Infinity && entry.deadline <= Date.now()) { - if (this.#ejectingDepth === 0) { + if (this.#removingDepth === 0) { this.#stats.expirations++; this.#removeEntry(key, entry, "expired"); } @@ -801,7 +803,7 @@ export class Cache implements MemoizationCache { const entry = this.#data.get(key); if (entry === undefined) return false; if (entry.deadline !== Infinity && entry.deadline <= Date.now()) { - if (this.#ejectingDepth === 0) { + if (this.#removingDepth === 0) { this.#stats.expirations++; this.#removeEntry(key, entry, "expired"); } @@ -814,7 +816,7 @@ export class Cache implements MemoizationCache { * Inserts or overwrites an entry. Promotes the key to most-recently-used. * If the cache exceeds {@linkcode Cache.prototype.maxSize | maxSize}, * the least-recently-used entry is evicted. Overwriting an existing key - * does **not** fire {@linkcode CacheOptionsBase.onEject | onEject}. + * does **not** fire {@linkcode CacheOptionsBase.onRemove | onRemove}. * * @param key The key to set. * @param value The value to set. @@ -830,7 +832,7 @@ export class Cache implements MemoizationCache { * ``` */ set(key: K, value: V, options?: CacheSetOptions): this { - this.#assertNotEjecting("set entry in"); + this.#assertNotRemoving("set entry in"); const ttl = options?.ttl ?? this.#defaultTtl; if (options?.ttl !== undefined && (!(ttl >= 0) || !Number.isFinite(ttl))) { throw new RangeError( @@ -1018,7 +1020,7 @@ export class Cache implements MemoizationCache { * ``` */ delete(key: K): boolean { - this.#assertNotEjecting("delete entry in"); + this.#assertNotRemoving("delete entry in"); this.#inFlight?.delete(key); const entry = this.#data.get(key); if (entry === undefined) return false; @@ -1029,7 +1031,7 @@ export class Cache implements MemoizationCache { /** * Removes all entries. Fires - * {@linkcode CacheOptionsBase.onEject | onEject} with reason `"cleared"` + * {@linkcode CacheOptionsBase.onRemove | onRemove} with reason `"cleared"` * for each entry. * * @experimental **UNSTABLE**: New API, yet to be vetted. @@ -1049,7 +1051,7 @@ export class Cache implements MemoizationCache { * ``` */ clear(): void { - this.#assertNotEjecting("clear"); + this.#assertNotRemoving("clear"); if (this.#timerId !== undefined) { clearTimeout(this.#timerId); this.#timerId = undefined; @@ -1057,24 +1059,24 @@ export class Cache implements MemoizationCache { this.#heap?.clear(); this.#inFlight?.clear(); this.#refreshing?.clear(); - if (!this.#eject) { + if (!this.#onRemove) { this.#data.clear(); return; } const entries = [...this.#data.entries()]; this.#data.clear(); - this.#ejectingDepth++; + this.#removingDepth++; const errors: unknown[] = []; try { for (const [key, entry] of entries) { try { - this.#eject(key, entry.value, "cleared"); + this.#onRemove(key, entry.value, "cleared"); } catch (e) { errors.push(e); } } } finally { - this.#ejectingDepth--; + this.#removingDepth--; } if (errors.length === 1) throw errors[0]; if (errors.length > 1) throw new AggregateError(errors); diff --git a/cache/cache_test.ts b/cache/cache_test.ts index dad890764bb0..7c2d0dfecee3 100644 --- a/cache/cache_test.ts +++ b/cache/cache_test.ts @@ -3,9 +3,9 @@ import { assert, assertEquals, assertRejects, assertThrows } from "@std/assert"; import { FakeTime } from "@std/testing/time"; import { Cache, - type CacheEjectionReason, type CacheOptions, type CacheOptionsSwr, + type CacheRemovalReason, } from "./cache.ts"; // ─── Constructor ───────────────────────────────────── @@ -73,10 +73,10 @@ Deno.test("Cache set/get/has/peek/delete work in pure LRU mode", () => { }); Deno.test("Cache evicts LRU entry when maxSize exceeded", () => { - const ejected: [string, number, CacheEjectionReason][] = []; + const ejected: [string, number, CacheRemovalReason][] = []; const cache = new Cache({ maxSize: 2, - onEject: (k, v, r) => ejected.push([k, v, r]), + onRemove: (k, v, r) => ejected.push([k, v, r]), }); cache.set("a", 1); @@ -113,11 +113,11 @@ Deno.test("Cache peek() does not promote", () => { assertEquals(cache.has("b"), true); }); -Deno.test("Cache set() overwrites existing key without onEject and promotes to MRU", () => { +Deno.test("Cache set() overwrites existing key without onRemove and promotes to MRU", () => { const ejected: string[] = []; const cache = new Cache({ maxSize: 2, - onEject: (k) => ejected.push(k), + onRemove: (k) => ejected.push(k), }); cache.set("a", 1); @@ -159,12 +159,12 @@ Deno.test("Cache with TTL expires entries on get()", () => { assertEquals(cache.get("a"), undefined); }); -Deno.test("Cache TTL timer fires and removes entries via onEject", () => { +Deno.test("Cache TTL timer fires and removes entries via onRemove", () => { using time = new FakeTime(0); - const ejected: [string, number, CacheEjectionReason][] = []; + const ejected: [string, number, CacheRemovalReason][] = []; using cache = new Cache({ ttl: 100, - onEject: (k, v, r) => ejected.push([k, v, r]), + onRemove: (k, v, r) => ejected.push([k, v, r]), }); cache.set("a", 1); @@ -272,7 +272,7 @@ Deno.test("Cache multiple TTL entries expire in correct order", () => { const ejected: string[] = []; using cache = new Cache({ ttl: 1000, - onEject: (k, _v, r) => { + onRemove: (k, _v, r) => { if (r === "expired") ejected.push(k); }, }); @@ -295,11 +295,11 @@ Deno.test("Cache multiple TTL entries expire in correct order", () => { Deno.test("Cache LRU + TTL: eviction cleans up heap entry", () => { using time = new FakeTime(0); - const ejected: [string, number, CacheEjectionReason][] = []; + const ejected: [string, number, CacheRemovalReason][] = []; using cache = new Cache({ maxSize: 2, ttl: 1000, - onEject: (k, v, r) => ejected.push([k, v, r]), + onRemove: (k, v, r) => ejected.push([k, v, r]), }); cache.set("a", 1); @@ -329,24 +329,24 @@ Deno.test("Cache LRU + TTL: expired entry not returned even before timer fires", assertEquals(cache.peek("a"), undefined); }); -// ─── onEject ───────────────────────────────────────── +// ─── onRemove ───────────────────────────────────────── -Deno.test("Cache onEject fires with correct reason for delete()", () => { - const ejected: CacheEjectionReason[] = []; +Deno.test("Cache onRemove fires with correct reason for delete()", () => { + const ejected: CacheRemovalReason[] = []; const cache = new Cache({ maxSize: 10, - onEject: (_k, _v, r) => ejected.push(r), + onRemove: (_k, _v, r) => ejected.push(r), }); cache.set("a", 1); cache.delete("a"); assertEquals(ejected, ["deleted"]); }); -Deno.test("Cache onEject fires with correct reason for clear()", () => { - const ejected: CacheEjectionReason[] = []; +Deno.test("Cache onRemove fires with correct reason for clear()", () => { + const ejected: CacheRemovalReason[] = []; const cache = new Cache({ maxSize: 10, - onEject: (_k, _v, r) => ejected.push(r), + onRemove: (_k, _v, r) => ejected.push(r), }); cache.set("a", 1); cache.set("b", 2); @@ -354,10 +354,10 @@ Deno.test("Cache onEject fires with correct reason for clear()", () => { assertEquals(ejected, ["cleared", "cleared"]); }); -Deno.test("Cache is not re-entrant during onEject", () => { +Deno.test("Cache is not re-entrant during onRemove", () => { const cache = new Cache({ maxSize: 10, - onEject: () => { + onRemove: () => { assertThrows( () => cache.set("x", 1), TypeError, @@ -379,18 +379,18 @@ Deno.test("Cache is not re-entrant during onEject", () => { cache.delete("a"); }); -Deno.test("Cache onEject throwing during timer does not break future expirations", () => { +Deno.test("Cache onRemove throwing during timer does not break future expirations", () => { using time = new FakeTime(0); let throwCount = 0; const expired: string[] = []; using cache = new Cache({ ttl: 1000, - onEject: (k, _v, r) => { + onRemove: (k, _v, r) => { if (r === "expired") { expired.push(k); if (k === "a") { throwCount++; - throw new Error("onEject error"); + throw new Error("onRemove error"); } } }, @@ -403,7 +403,7 @@ Deno.test("Cache onEject throwing during timer does not break future expirations try { time.tick(101); } catch { - // expected throw from onEject + // expected throw from onRemove } assertEquals(throwCount, 1); assert(expired.includes("a")); @@ -413,11 +413,11 @@ Deno.test("Cache onEject throwing during timer does not break future expirations assert(expired.includes("c")); }); -Deno.test("Cache onEject errors from multiple entries are all surfaced via AggregateError", () => { +Deno.test("Cache onRemove errors from multiple entries are all surfaced via AggregateError", () => { using time = new FakeTime(0); using cache = new Cache({ ttl: 100, - onEject: (k, _v, r) => { + onRemove: (k, _v, r) => { if (r === "expired") throw new Error(`fail:${k}`); }, }); @@ -435,12 +435,12 @@ Deno.test("Cache onEject errors from multiple entries are all surfaced via Aggre assertEquals(caught.errors.length, 2); }); -Deno.test("Cache onEject single error is thrown directly, not wrapped", () => { +Deno.test("Cache onRemove single error is thrown directly, not wrapped", () => { using time = new FakeTime(0); const err = new Error("solo"); using cache = new Cache({ ttl: 100, - onEject: (_k, _v, r) => { + onRemove: (_k, _v, r) => { if (r === "expired") throw err; }, }); @@ -456,10 +456,10 @@ Deno.test("Cache onEject single error is thrown directly, not wrapped", () => { assert(caught === err); }); -Deno.test("Cache clear() onEject errors surfaced via AggregateError", () => { +Deno.test("Cache clear() onRemove errors surfaced via AggregateError", () => { const cache = new Cache({ maxSize: 10, - onEject: (k) => { + onRemove: (k) => { throw new Error(`fail:${k}`); }, }); @@ -654,12 +654,12 @@ Deno.test("Cache set() with invalid absoluteExpiration throws", () => { // ─── has()/peek() eagerly remove expired entries ───── -Deno.test("Cache has() eagerly removes expired entry and fires onEject", () => { +Deno.test("Cache has() eagerly removes expired entry and fires onRemove", () => { using time = new FakeTime(0); - const ejected: [string, CacheEjectionReason][] = []; + const ejected: [string, CacheRemovalReason][] = []; using cache = new Cache({ ttl: 100, - onEject: (k, _v, r) => ejected.push([k, r]), + onRemove: (k, _v, r) => ejected.push([k, r]), }); cache.set("a", 1); @@ -670,12 +670,12 @@ Deno.test("Cache has() eagerly removes expired entry and fires onEject", () => { assertEquals(ejected, [["a", "expired"]]); }); -Deno.test("Cache peek() eagerly removes expired entry and fires onEject", () => { +Deno.test("Cache peek() eagerly removes expired entry and fires onRemove", () => { using time = new FakeTime(0); - const ejected: [string, CacheEjectionReason][] = []; + const ejected: [string, CacheRemovalReason][] = []; using cache = new Cache({ ttl: 100, - onEject: (k, _v, r) => ejected.push([k, r]), + onRemove: (k, _v, r) => ejected.push([k, r]), }); cache.set("a", 1); @@ -686,15 +686,15 @@ Deno.test("Cache peek() eagerly removes expired entry and fires onEject", () => assertEquals(ejected, [["a", "expired"]]); }); -// ─── has()/peek() re-entrancy guard during onEject ─── +// ─── has()/peek() re-entrancy guard during onRemove ─── -Deno.test("Cache has() during onEject with expired sibling preserves re-entrancy guard", () => { +Deno.test("Cache has() during onRemove with expired sibling preserves re-entrancy guard", () => { using time = new FakeTime(0); let setThrew = false; using cache = new Cache({ ttl: 1000, - onEject: (k) => { + onRemove: (k) => { if (k === "a") { assertEquals(cache.has("b"), false); try { @@ -715,13 +715,13 @@ Deno.test("Cache has() during onEject with expired sibling preserves re-entrancy assertEquals(cache.has("x"), false); }); -Deno.test("Cache peek() during onEject with expired sibling preserves re-entrancy guard", () => { +Deno.test("Cache peek() during onRemove with expired sibling preserves re-entrancy guard", () => { using time = new FakeTime(0); let setThrew = false; using cache = new Cache({ ttl: 1000, - onEject: (k) => { + onRemove: (k) => { if (k === "a") { assertEquals(cache.peek("b"), undefined); try { @@ -1222,7 +1222,7 @@ Deno.test("Cache SWR timer-based expiration still fires", async () => { ttl: 1000, staleTtl: 500, refresh: () => Promise.resolve(99), - onEject: (k, _v, r) => { + onRemove: (k, _v, r) => { if (r === "expired") ejected.push(k); }, }); @@ -1552,13 +1552,13 @@ Deno.test("Cache getOrLoad() de-duplicated callers use first caller's options", // ─── Re-entrancy guard depth counter ───────────────── -Deno.test("Cache get() on expired key inside onEject does not disable re-entrancy guard", () => { +Deno.test("Cache get() on expired key inside onRemove does not disable re-entrancy guard", () => { using _time = new FakeTime(0); let setThrew = false; using cache = new Cache({ ttl: 1000, - onEject: (k) => { + onRemove: (k) => { if (k === "a") { cache.get("b"); try { @@ -1578,13 +1578,13 @@ Deno.test("Cache get() on expired key inside onEject does not disable re-entranc assertEquals(cache.has("x"), false); }); -Deno.test("Cache nested get() on expired key inside onEject fires nested onEject", () => { +Deno.test("Cache nested get() on expired key inside onRemove fires nested onRemove", () => { using _time = new FakeTime(0); - const ejected: [string, CacheEjectionReason][] = []; + const ejected: [string, CacheRemovalReason][] = []; using cache = new Cache({ ttl: 1000, - onEject: (k, _v, r) => { + onRemove: (k, _v, r) => { ejected.push([k, r]); if (k === "a") { cache.get("b"); @@ -1608,7 +1608,7 @@ Deno.test("Cache re-entrancy guard survives deeply nested get()-triggered ejecti using cache = new Cache({ ttl: 1000, - onEject: (k) => { + onRemove: (k) => { if (k === "a") { cache.get("b"); } @@ -1793,15 +1793,15 @@ Deno.test("Cache set() clears in-flight getOrLoad so next getOrLoad uses fresh l // ─── SWR refresh set() throw contained ─────────────── -Deno.test("Cache SWR refresh success path contains onEject throw from eviction", async () => { +Deno.test("Cache SWR refresh success path contains onRemove throw from eviction", async () => { using time = new FakeTime(0); using cache = new Cache({ maxSize: 2, ttl: 1000, staleTtl: 100, refresh: () => Promise.resolve(99), - onEject: (_k, _v, r) => { - if (r === "evicted") throw new Error("onEject boom"); + onRemove: (_k, _v, r) => { + if (r === "evicted") throw new Error("onRemove boom"); }, }); @@ -1839,13 +1839,13 @@ Deno.test("Cache has() lazily removes entry expired with ttl: 0", () => { assertEquals(cache.size, 0); }); -// ─── clear() single onEject error thrown directly ──── +// ─── clear() single onRemove error thrown directly ──── -Deno.test("Cache clear() single onEject error is thrown directly, not wrapped", () => { +Deno.test("Cache clear() single onRemove error is thrown directly, not wrapped", () => { const err = new Error("solo"); const cache = new Cache({ maxSize: 10, - onEject: () => { + onRemove: () => { throw err; }, }); From dc39540fe1f9929731f61673d1843b8f88e6dda8 Mon Sep 17 00:00:00 2001 From: Tomas Zijdemans Date: Fri, 3 Apr 2026 13:01:17 +0200 Subject: [PATCH 10/12] fmt --- cache/cache.ts | 43 +++++++++++++++++-------------------------- 1 file changed, 17 insertions(+), 26 deletions(-) diff --git a/cache/cache.ts b/cache/cache.ts index e94ea40782c5..534c58bcc656 100644 --- a/cache/cache.ts +++ b/cache/cache.ts @@ -354,9 +354,7 @@ export class Cache implements MemoizationCache { #inFlight: Map> | undefined; #defaultStaleTtl: number; - #refresh: - | ((key: K, staleValue: V) => Promise) - | undefined; + #refresh: ((key: K, staleValue: V) => Promise) | undefined; #onRefreshError: ((key: K, error: unknown) => void) | undefined; #refreshing: Set | undefined; #generation = 0; @@ -455,9 +453,9 @@ export class Cache implements MemoizationCache { } /** - * The number of entries currently in the cache. This is an O(1) count - * that may include entries whose TTL has elapsed but have not yet been - * lazily removed. Use the iterators ({@linkcode Cache.prototype.keys}, + * The number of entries currently in the cache. This count may include + * expired entries that have not yet been lazily removed. Use the + * iterators ({@linkcode Cache.prototype.keys}, * {@linkcode Cache.prototype.values}, {@linkcode Cache.prototype.entries}) * if an accurate live-entry count is needed. * @@ -594,19 +592,17 @@ export class Cache implements MemoizationCache { if (top.priority > now) break; heap.pop(); const entry = this.#data.get(top.key); - if (entry !== undefined) { - this.#data.delete(top.key); - this.#stats.expirations++; - if (this.#onRemove) { - this.#removingDepth++; - try { - this.#onRemove(top.key, entry.value, "expired"); - } catch (e) { - errors.push(e); - } finally { - this.#removingDepth--; - } - } + if (entry === undefined) continue; + this.#data.delete(top.key); + this.#stats.expirations++; + if (!this.#onRemove) continue; + this.#removingDepth++; + try { + this.#onRemove(top.key, entry.value, "expired"); + } catch (e) { + errors.push(e); + } finally { + this.#removingDepth--; } } this.#scheduleTimer(); @@ -631,13 +627,8 @@ export class Cache implements MemoizationCache { now: number, ): void { let effectiveTtl = ttl; - if ( - this.#slidingExpiration && entry.absoluteDeadline !== Infinity - ) { - effectiveTtl = Math.min( - ttl, - Math.max(0, entry.absoluteDeadline - now), - ); + if (this.#slidingExpiration && entry.absoluteDeadline !== Infinity) { + effectiveTtl = Math.min(ttl, Math.max(0, entry.absoluteDeadline - now)); } const deadline = now + effectiveTtl; entry.deadline = deadline; From 820fbf98ae79f99f48e4d19a604afe55a54d2a2a Mon Sep 17 00:00:00 2001 From: Tomas Zijdemans Date: Fri, 3 Apr 2026 18:17:25 +0200 Subject: [PATCH 11/12] perf improvements --- data_structures/unstable_indexed_heap.ts | 43 +++++++++----- data_structures/unstable_indexed_heap_test.ts | 58 ++++++++++++++----- 2 files changed, 71 insertions(+), 30 deletions(-) diff --git a/data_structures/unstable_indexed_heap.ts b/data_structures/unstable_indexed_heap.ts index dbe1fb47e6ae..0cd1f4106b1a 100644 --- a/data_structures/unstable_indexed_heap.ts +++ b/data_structures/unstable_indexed_heap.ts @@ -51,11 +51,6 @@ function assertValidPriority(priority: number): void { } } -/** Returns the parent index for a given child index. */ -function getParentIndex(index: number): number { - return ((index + 1) >>> 1) - 1; -} - /** * A priority queue that supports looking up, removing, and re-prioritizing * entries by key. Each entry is a unique `(key, priority)` pair. The entry @@ -103,6 +98,22 @@ export class IndexedHeap implements Iterable> { #data: MutableEntry[] = []; #index: Map = new Map(); + /** + * A string tag for the class, used by `Object.prototype.toString()`. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @example Usage + * ```ts + * import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; + * import { assertEquals } from "@std/assert"; + * + * const heap = new IndexedHeap(); + * assertEquals(heap[Symbol.toStringTag], "IndexedHeap"); + * ``` + */ + readonly [Symbol.toStringTag] = "IndexedHeap" as const; + /** Bubble the entry at `pos` up toward the root while it is smaller than its parent. */ #siftUp(pos: number): number { const data = this.#data; @@ -110,7 +121,7 @@ export class IndexedHeap implements Iterable> { const entry = data[pos]!; const priority = entry.priority; while (pos > 0) { - const parentPos = getParentIndex(pos); + const parentPos = (pos - 1) >>> 1; const parent = data[parentPos]!; if (priority < parent.priority) { data[pos] = parent; @@ -188,7 +199,6 @@ export class IndexedHeap implements Iterable> { } const pos = this.#data.length; this.#data.push({ key, priority }); - this.#index.set(key, pos); this.#siftUp(pos); } @@ -229,7 +239,6 @@ export class IndexedHeap implements Iterable> { const last = this.#data.pop()!; this.#data[0] = last; - this.#index.set(last.key, 0); this.#siftDown(0); return root; } @@ -299,7 +308,6 @@ export class IndexedHeap implements Iterable> { const last = this.#data.pop()!; this.#data[pos] = last; - this.#index.set(last.key, pos); const afterUp = this.#siftUp(pos); if (afterUp === pos) { @@ -372,10 +380,17 @@ export class IndexedHeap implements Iterable> { */ pushOrUpdate(key: K, priority: number): void { assertValidPriority(priority); - if (this.#index.has(key)) { - this.update(key, priority); + const pos = this.#index.get(key); + if (pos !== undefined) { + this.#data[pos]!.priority = priority; + const afterUp = this.#siftUp(pos); + if (afterUp === pos) { + this.#siftDown(pos); + } } else { - this.push(key, priority); + const newPos = this.#data.length; + this.#data.push({ key, priority }); + this.#siftUp(newPos); } } @@ -471,8 +486,8 @@ export class IndexedHeap implements Iterable> { * ``` */ clear(): void { - this.#data = []; - this.#index = new Map(); + this.#data.length = 0; + this.#index.clear(); } /** diff --git a/data_structures/unstable_indexed_heap_test.ts b/data_structures/unstable_indexed_heap_test.ts index 665d5e91d8de..554750b1e783 100644 --- a/data_structures/unstable_indexed_heap_test.ts +++ b/data_structures/unstable_indexed_heap_test.ts @@ -128,25 +128,45 @@ Deno.test("IndexedHeap delete only element", () => { }); Deno.test("IndexedHeap delete triggers sift-up when replacement is smaller", () => { + // Heap array: [r(1), a(50), b(3), c(51), d(52), e(4), f(5)] + // r(1) + // / \ + // a(50) b(3) + // / \ / \ + // c(51) d(52) e(4) f(5) + // + // Deleting "c" (index 3) moves last element "f" (priority 5) into index 3. + // Parent of index 3 is "a" (priority 50). Since 5 < 50, "f" sifts up. const heap = new IndexedHeap(); - heap.push("a", 10); - heap.push("b", 20); - heap.push("c", 15); - heap.push("d", 25); - heap.push("e", 30); - heap.push("f", 5); + for ( + const [key, priority] of [ + ["r", 1], + ["a", 50], + ["b", 3], + ["c", 51], + ["d", 52], + ["e", 4], + ["f", 5], + ] as const + ) { + heap.push(key, priority); + } - // Deleting "b" (priority 20): last element "f" (priority 5) replaces it - // and must sift up past "a" (priority 10) to become the new root. - heap.delete("b"); + heap.delete("c"); - assertEquals(heap.peek(), { key: "f", priority: 5 }); - const result = [...heap]; - for (let i = 1; i < result.length; i++) { - if (result[i]!.priority < result[i - 1]!.priority) { - throw new Error("Not sorted after delete-triggered sift-up"); - } - } + assertEquals(heap.peek(), { key: "r", priority: 1 }); + assertEquals(heap.has("c"), false); + assertEquals(heap.size, 6); + assertEquals(heap.getPriority("f"), 5); + + assertEquals([...heap], [ + { key: "r", priority: 1 }, + { key: "b", priority: 3 }, + { key: "e", priority: 4 }, + { key: "f", priority: 5 }, + { key: "a", priority: 50 }, + { key: "d", priority: 52 }, + ]); }); Deno.test("IndexedHeap update decrease-key bubbles up", () => { @@ -459,6 +479,12 @@ Deno.test("IndexedHeap pushOrUpdate throws on NaN priority", () => { assertEquals(heap.getPriority("b"), 1); }); +Deno.test("IndexedHeap has correct Symbol.toStringTag", () => { + const heap = new IndexedHeap(); + assertEquals(heap[Symbol.toStringTag], "IndexedHeap"); + assertEquals(Object.prototype.toString.call(heap), "[object IndexedHeap]"); +}); + Deno.test("IndexedHeap stress test: push N, pop all, verify sorted", () => { const heap = new IndexedHeap(); const n = 200; From c96867c172f5ea8ec08597aa1009a95c6b030548 Mon Sep 17 00:00:00 2001 From: Tomas Zijdemans Date: Sun, 5 Apr 2026 11:51:38 +0200 Subject: [PATCH 12/12] perf --- cache/cache.ts | 158 +++++++++++++++++------ cache/cache_test.ts | 146 +++++++++++++++++++++ data_structures/unstable_indexed_heap.ts | 58 +++++++++ 3 files changed, 319 insertions(+), 43 deletions(-) diff --git a/cache/cache.ts b/cache/cache.ts index 534c58bcc656..cf9c75863b2d 100644 --- a/cache/cache.ts +++ b/cache/cache.ts @@ -274,8 +274,11 @@ export interface CacheStats { refreshErrors: number; } -interface CacheEntry { +interface CacheEntry { + key: K; value: V; + prev: CacheEntry | undefined; + next: CacheEntry | undefined; deadline: number; absoluteDeadline: number; entryTtl: number; @@ -340,7 +343,7 @@ interface CacheEntry { * ``` */ export class Cache implements MemoizationCache { - #data = new Map>(); + #data = new Map>(); #maxSize: number | undefined; #defaultTtl: number; #slidingExpiration: boolean; @@ -349,6 +352,8 @@ export class Cache implements MemoizationCache { | ((key: K, value: V, reason: CacheRemovalReason) => void) | undefined; + #head: CacheEntry | undefined; + #tail: CacheEntry | undefined; #heap: IndexedHeap | undefined; #timerId: number | undefined; #inFlight: Map> | undefined; @@ -529,16 +534,17 @@ export class Cache implements MemoizationCache { }; } - #isExpired(entry: CacheEntry, now: number): boolean { + #isExpired(entry: CacheEntry, now: number): boolean { return entry.deadline !== Infinity && entry.deadline <= now; } #removeEntry( key: K, - entry: CacheEntry, + entry: CacheEntry, reason: CacheRemovalReason, ): void { this.#data.delete(key); + this.#unlink(entry); this.#heap?.delete(key); if (this.#onRemove) { this.#removingDepth++; @@ -558,17 +564,34 @@ export class Cache implements MemoizationCache { } } - #setMostRecentlyUsed(key: K, entry: CacheEntry): void { - this.#data.delete(key); - this.#data.set(key, entry); + #unlink(node: CacheEntry): void { + if (node.prev !== undefined) node.prev.next = node.next; + else this.#head = node.next; + if (node.next !== undefined) node.next.prev = node.prev; + else this.#tail = node.prev; + node.prev = undefined; + node.next = undefined; + } + + #linkToTail(node: CacheEntry): void { + node.prev = this.#tail; + node.next = undefined; + if (this.#tail !== undefined) this.#tail.next = node; + else this.#head = node; + this.#tail = node; + } + + #promoteToMru(node: CacheEntry): void { + if (node === this.#tail) return; + this.#unlink(node); + this.#linkToTail(node); } #pruneToMaxSize(): void { if (this.#maxSize === undefined || this.#data.size <= this.#maxSize) return; - const key = this.#data.keys().next().value!; - const entry = this.#data.get(key)!; + const lru = this.#head!; this.#stats.evictions++; - this.#removeEntry(key, entry, "evicted"); + this.#removeEntry(lru.key, lru, "evicted"); } #scheduleTimer(): void { @@ -576,9 +599,9 @@ export class Cache implements MemoizationCache { clearTimeout(this.#timerId); this.#timerId = undefined; } - const top = this.#heap?.peek(); - if (top === undefined) return; - const delay = Math.min(Math.max(0, top.priority - Date.now()), 0x7FFFFFFF); + const nextDeadline = this.#heap?.peekPriority(); + if (nextDeadline === undefined) return; + const delay = Math.min(Math.max(0, nextDeadline - Date.now()), 0x7FFFFFFF); this.#timerId = setTimeout(() => this.#onTimer(), delay); } @@ -588,12 +611,12 @@ export class Cache implements MemoizationCache { const heap = this.#heap!; const errors: unknown[] = []; while (!heap.isEmpty()) { - const top = heap.peek()!; - if (top.priority > now) break; - heap.pop(); + if (heap.peekPriority()! > now) break; + const top = heap.pop()!; const entry = this.#data.get(top.key); if (entry === undefined) continue; this.#data.delete(top.key); + this.#unlink(entry); this.#stats.expirations++; if (!this.#onRemove) continue; this.#removingDepth++; @@ -612,9 +635,9 @@ export class Cache implements MemoizationCache { #setHeapDeadline(key: K, deadline: number): void { const heap = this.#heap ?? (this.#heap = new IndexedHeap()); - const wasRoot = heap.isEmpty() || heap.peek()!.key === key; + const wasRoot = heap.isEmpty() || heap.peekKey() === key; heap.pushOrUpdate(key, deadline); - const isRoot = heap.peek()!.key === key; + const isRoot = heap.peekKey() === key; if (wasRoot || isRoot) { this.#scheduleTimer(); } @@ -622,7 +645,7 @@ export class Cache implements MemoizationCache { #updateDeadline( key: K, - entry: CacheEntry, + entry: CacheEntry, ttl: number, now: number, ): void { @@ -730,7 +753,7 @@ export class Cache implements MemoizationCache { if (this.#slidingExpiration && entry.deadline !== Infinity) { this.#updateDeadline(key, entry, entry.entryTtl, now); } - this.#setMostRecentlyUsed(key, entry); + this.#promoteToMru(entry); return entry.value; } @@ -836,14 +859,15 @@ export class Cache implements MemoizationCache { `Cannot set entry in Cache: absoluteExpiration must be a finite non-negative number, received ${abs}`, ); } - const staleTtl = options?.staleTtl ?? this.#defaultStaleTtl; if ( - staleTtl !== Infinity && (!(staleTtl >= 0) || !Number.isFinite(staleTtl)) + options?.staleTtl !== undefined && + (!(options.staleTtl >= 0) || !Number.isFinite(options.staleTtl)) ) { throw new RangeError( - `Cannot set entry in Cache: staleTtl must be a finite non-negative number, received ${staleTtl}`, + `Cannot set entry in Cache: staleTtl must be a finite non-negative number, received ${options.staleTtl}`, ); } + const staleTtl = options?.staleTtl ?? this.#defaultStaleTtl; if (staleTtl !== Infinity && staleTtl >= ttl) { throw new RangeError( `Cannot set entry in Cache: staleTtl must be less than ttl, received staleTtl=${staleTtl} ttl=${ttl}`, @@ -852,15 +876,19 @@ export class Cache implements MemoizationCache { const now = Date.now(); const absoluteDeadline = abs !== undefined ? now + abs : Infinity; - const deadline = ttl === Infinity - ? Infinity - : Math.min(now + ttl, absoluteDeadline); + const deadline = Math.min( + ttl === Infinity ? Infinity : now + ttl, + absoluteDeadline, + ); const softDeadline = staleTtl === Infinity ? Infinity : Math.min(now + staleTtl, deadline); - const entry: CacheEntry = { + const entry: CacheEntry = { + key, value, + prev: undefined, + next: undefined, deadline, absoluteDeadline, entryTtl: ttl, @@ -869,7 +897,10 @@ export class Cache implements MemoizationCache { generation: ++this.#generation, }; - this.#setMostRecentlyUsed(key, entry); + const old = this.#data.get(key); + if (old !== undefined) this.#unlink(old); + this.#linkToTail(entry); + this.#data.set(key, entry); this.#inFlight?.delete(key); this.#refreshing?.delete(key); @@ -953,10 +984,31 @@ export class Cache implements MemoizationCache { loader: (key: K) => Promise, options?: CacheSetOptions, ): Promise { - const cached = this.get(key); - if (cached !== undefined || this.has(key)) { - return Promise.resolve(cached as V); + const entry = this.#data.get(key); + if (entry !== undefined) { + const now = Date.now(); + if (!this.#isExpired(entry, now)) { + if ( + this.#refresh !== undefined && + entry.softDeadline !== Infinity && + entry.softDeadline <= now && + !this.#refreshing?.has(key) + ) { + this.#stats.staleHits++; + this.#backgroundRefresh(key, entry.value); + } else { + this.#stats.hits++; + } + if (this.#slidingExpiration && entry.deadline !== Infinity) { + this.#updateDeadline(key, entry, entry.entryTtl, now); + } + this.#promoteToMru(entry); + return Promise.resolve(entry.value); + } + this.#stats.expirations++; + this.#removeEntry(key, entry, "expired"); } + this.#stats.misses++; const existing = this.#inFlight?.get(key); if (existing) return existing; @@ -1052,10 +1104,14 @@ export class Cache implements MemoizationCache { this.#refreshing?.clear(); if (!this.#onRemove) { this.#data.clear(); + this.#head = undefined; + this.#tail = undefined; return; } const entries = [...this.#data.entries()]; this.#data.clear(); + this.#head = undefined; + this.#tail = undefined; this.#removingDepth++; const errors: unknown[] = []; try { @@ -1091,9 +1147,13 @@ export class Cache implements MemoizationCache { */ *keys(): IterableIterator { const now = Date.now(); - for (const [key, entry] of this.#data) { - if (entry.deadline !== Infinity && entry.deadline <= now) continue; - yield key; + let node = this.#head; + while (node !== undefined) { + const next = node.next; + if (node.deadline === Infinity || node.deadline > now) { + yield node.key; + } + node = next; } } @@ -1115,9 +1175,13 @@ export class Cache implements MemoizationCache { */ *values(): IterableIterator { const now = Date.now(); - for (const [_key, entry] of this.#data) { - if (entry.deadline !== Infinity && entry.deadline <= now) continue; - yield entry.value; + let node = this.#head; + while (node !== undefined) { + const next = node.next; + if (node.deadline === Infinity || node.deadline > now) { + yield node.value; + } + node = next; } } @@ -1139,9 +1203,13 @@ export class Cache implements MemoizationCache { */ *entries(): IterableIterator<[K, V]> { const now = Date.now(); - for (const [key, entry] of this.#data) { - if (entry.deadline !== Infinity && entry.deadline <= now) continue; - yield [key, entry.value]; + let node = this.#head; + while (node !== undefined) { + const next = node.next; + if (node.deadline === Infinity || node.deadline > now) { + yield [node.key, node.value]; + } + node = next; } } @@ -1165,9 +1233,13 @@ export class Cache implements MemoizationCache { */ forEach(callback: (value: V, key: K, cache: Cache) => void): void { const now = Date.now(); - for (const [key, entry] of this.#data) { - if (entry.deadline !== Infinity && entry.deadline <= now) continue; - callback(entry.value, key, this); + let node = this.#head; + while (node !== undefined) { + const next = node.next; + if (node.deadline === Infinity || node.deadline > now) { + callback(node.value, node.key, this); + } + node = next; } } diff --git a/cache/cache_test.ts b/cache/cache_test.ts index 7c2d0dfecee3..a963bad6f43d 100644 --- a/cache/cache_test.ts +++ b/cache/cache_test.ts @@ -1107,6 +1107,11 @@ Deno.test("Cache SWR per-entry staleTtl with invalid value throws", () => { () => cache.set("a", 1, { staleTtl: NaN }), RangeError, ); + assertThrows( + () => cache.set("a", 1, { staleTtl: Infinity }), + RangeError, + "staleTtl must be a finite non-negative number", + ); cache[Symbol.dispose](); }); @@ -1904,3 +1909,144 @@ Deno.test("Cache forEach() skips expired-but-not-yet-reaped entries", () => { cache.forEach((v, k) => result.push([k, v])); assertEquals(result, [["a", 1]]); }); + +// ─── absoluteExpiration without default TTL ─────────── + +Deno.test("Cache absoluteExpiration expires entry on non-TTL bounded cache", () => { + using time = new FakeTime(0); + using cache = new Cache({ maxSize: 100 }); + + cache.set("a", 1, { absoluteExpiration: 50 }); + assertEquals(cache.get("a"), 1); + + time.tick(51); + assertEquals(cache.get("a"), undefined); +}); + +Deno.test("Cache absoluteExpiration expires entry on unbounded non-TTL cache", () => { + using time = new FakeTime(0); + using cache = new Cache(); + + cache.set("a", 1, { absoluteExpiration: 100 }); + assertEquals(cache.get("a"), 1); + + time.tick(101); + assertEquals(cache.get("a"), undefined); +}); + +Deno.test("Cache absoluteExpiration timer fires onRemove on non-TTL cache", () => { + using time = new FakeTime(0); + const ejected: [string, CacheRemovalReason][] = []; + using cache = new Cache({ + maxSize: 100, + onRemove: (k, _v, r) => ejected.push([k, r]), + }); + + cache.set("a", 1, { absoluteExpiration: 100 }); + time.tick(101); + + assertEquals(ejected, [["a", "expired"]]); + assertEquals(cache.size, 0); +}); + +Deno.test("Cache absoluteExpiration on non-TTL cache does not affect entries without it", () => { + using time = new FakeTime(0); + using cache = new Cache({ maxSize: 100 }); + + cache.set("a", 1, { absoluteExpiration: 50 }); + cache.set("b", 2); + + time.tick(51); + assertEquals(cache.get("a"), undefined); + assertEquals(cache.get("b"), 2); +}); + +// ─── Coverage: #onTimer skips entries deleted before timer fires ── + +Deno.test("Cache #onTimer skips entry that was manually deleted before timer fires", () => { + using time = new FakeTime(0); + const removed: [string, CacheRemovalReason][] = []; + using cache = new Cache({ + maxSize: 100, + ttl: 100, + onRemove: (k, _v, r) => removed.push([k, r]), + }); + + cache.set("a", 1); + cache.set("b", 2); + cache.delete("a"); + assertEquals(removed, [["a", "deleted"]]); + + time.tick(101); + assertEquals(removed, [["a", "deleted"], ["b", "expired"]]); + assertEquals(cache.size, 0); +}); + +// ─── Coverage: getOrLoad() SWR stale hit ───────────────────────── + +Deno.test("Cache getOrLoad() in stale window returns stale value and triggers background refresh", async () => { + using time = new FakeTime(0); + let refreshCalls = 0; + using cache = new Cache({ + ttl: 1000, + staleTtl: 200, + refresh: () => { + refreshCalls++; + return Promise.resolve(42); + }, + }); + + await cache.getOrLoad("a", () => Promise.resolve(1)); + assertEquals(cache.get("a"), 1); + + time.tick(201); + const val = await cache.getOrLoad("a", () => Promise.resolve(999)); + assertEquals(val, 1); + assertEquals(cache.stats.staleHits, 1); + assertEquals(refreshCalls, 1); + + await time.tickAsync(0); + assertEquals(cache.get("a"), 42); +}); + +// ─── Coverage: getOrLoad() sliding expiration on hit ───────────── + +Deno.test("Cache getOrLoad() with slidingExpiration extends TTL on hit", async () => { + using time = new FakeTime(0); + using cache = new Cache({ + ttl: 100, + slidingExpiration: true, + }); + + await cache.getOrLoad("a", () => Promise.resolve(1)); + + time.tick(80); + const val = await cache.getOrLoad("a", () => Promise.resolve(999)); + assertEquals(val, 1); + + time.tick(99); + const val2 = await cache.getOrLoad("a", () => Promise.resolve(888)); + assertEquals(val2, 1); + + time.tick(101); + assertEquals(cache.get("a"), undefined); +}); + +// ─── Coverage: getOrLoad() expired entry removal ───────────────── + +Deno.test("Cache getOrLoad() eagerly removes expired entry and reloads", async () => { + using _time = new FakeTime(0); + const removed: [string, CacheRemovalReason][] = []; + using cache = new Cache({ + maxSize: 100, + onRemove: (k, _v, r) => removed.push([k, r]), + }); + + cache.set("a", 1, { ttl: 0 }); + assertEquals(cache.size, 1); + + const val = await cache.getOrLoad("a", () => Promise.resolve(42)); + assertEquals(val, 42); + assertEquals(cache.stats.expirations, 1); + assertEquals(removed, [["a", "expired"]]); +}); diff --git a/data_structures/unstable_indexed_heap.ts b/data_structures/unstable_indexed_heap.ts index 0cd1f4106b1a..88293b5f9817 100644 --- a/data_structures/unstable_indexed_heap.ts +++ b/data_structures/unstable_indexed_heap.ts @@ -38,6 +38,8 @@ export interface HeapEntry { export type ReadonlyIndexedHeap = Pick< IndexedHeap, | "peek" + | "peekKey" + | "peekPriority" | "has" | "getPriority" | "size" @@ -66,6 +68,8 @@ function assertValidPriority(priority: number): void { * | Method | Time complexity | * | --------------------- | -------------------------------- | * | peek() | Constant | + * | peekKey() | Constant | + * | peekPriority() | Constant | * | pop() | Logarithmic in the number of entries | * | push(key, priority) | Logarithmic in the number of entries | * | delete(key) | Logarithmic in the number of entries | @@ -272,6 +276,60 @@ export class IndexedHeap implements Iterable> { return { key: entry.key, priority: entry.priority }; } + /** + * Return the key of the front entry (smallest priority), or `undefined` + * if the heap is empty. Unlike + * {@linkcode IndexedHeap.prototype.peek | peek}, does not allocate a + * wrapper object. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @example Usage + * ```ts + * import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; + * import { assertEquals } from "@std/assert"; + * + * const heap = new IndexedHeap(); + * heap.push("x", 5); + * heap.push("y", 3); + * + * assertEquals(heap.peekKey(), "y"); + * assertEquals(heap.size, 2); + * ``` + * + * @returns The key of the front entry, or `undefined` if empty. + */ + peekKey(): K | undefined { + return this.#data[0]?.key; + } + + /** + * Return the priority of the front entry (smallest priority), or + * `undefined` if the heap is empty. Unlike + * {@linkcode IndexedHeap.prototype.peek | peek}, does not allocate a + * wrapper object. + * + * @experimental **UNSTABLE**: New API, yet to be vetted. + * + * @example Usage + * ```ts + * import { IndexedHeap } from "@std/data-structures/unstable-indexed-heap"; + * import { assertEquals } from "@std/assert"; + * + * const heap = new IndexedHeap(); + * heap.push("x", 5); + * heap.push("y", 3); + * + * assertEquals(heap.peekPriority(), 3); + * assertEquals(heap.size, 2); + * ``` + * + * @returns The priority of the front entry, or `undefined` if empty. + */ + peekPriority(): number | undefined { + return this.#data[0]?.priority; + } + /** * Remove the entry with the given key. *