diff --git a/packages/producer/src/services/hdrImageTransferCache.test.ts b/packages/producer/src/services/hdrImageTransferCache.test.ts new file mode 100644 index 000000000..5eff7e7a8 --- /dev/null +++ b/packages/producer/src/services/hdrImageTransferCache.test.ts @@ -0,0 +1,316 @@ +import { describe, expect, test } from "bun:test"; +import { convertTransfer } from "@hyperframes/engine"; +import { createHdrImageTransferCache } from "./hdrImageTransferCache.ts"; + +/** + * Build a deterministic rgb48le buffer for `pixelCount` pixels. + * Each pixel is 3 channels × 2 bytes = 6 bytes. Values vary per pixel/channel + * so the LUT-based `convertTransfer` produces bytes that differ from the + * source. + */ +function makeSourceBuffer(pixelCount: number, seed = 0): Buffer { + const buf = Buffer.alloc(pixelCount * 6); + for (let i = 0; i < pixelCount; i++) { + const off = i * 6; + // Spread values across the 16-bit range so HLG↔PQ LUT lookups land on + // mid-curve entries that are guaranteed to differ from the input. + buf.writeUInt16LE((seed + i * 257) & 0xff_ff, off); + buf.writeUInt16LE((seed + i * 521 + 1) & 0xff_ff, off + 2); + buf.writeUInt16LE((seed + i * 1031 + 2) & 0xff_ff, off + 4); + } + return buf; +} + +function expectedConverted(source: Buffer, from: "hlg" | "pq", to: "hlg" | "pq"): Buffer { + const copy = Buffer.from(source); + convertTransfer(copy, from, to); + return copy; +} + +describe("hdrImageTransferCache", () => { + test("returns source buffer unchanged when sourceTransfer === targetTransfer", () => { + const cache = createHdrImageTransferCache(); + const source = makeSourceBuffer(4); + + const result = cache.getConverted("img1", "pq", "pq", source); + + expect(result).toBe(source); + expect(cache.size()).toBe(0); + expect(cache.bytesUsed()).toBe(0); + }); + + test("first miss converts and caches", () => { + const cache = createHdrImageTransferCache(); + const source = makeSourceBuffer(4); + const expected = expectedConverted(source, "hlg", "pq"); + + const result = cache.getConverted("img1", "hlg", "pq", source); + + expect(result).not.toBe(source); + expect(Buffer.compare(result, expected)).toBe(0); + expect(cache.size()).toBe(1); + expect(cache.bytesUsed()).toBe(source.byteLength); + }); + + test("second hit returns cached buffer reference", () => { + const cache = createHdrImageTransferCache(); + const source = makeSourceBuffer(4); + + const first = cache.getConverted("img1", "hlg", "pq", source); + const second = cache.getConverted("img1", "hlg", "pq", source); + + expect(second).toBe(first); + expect(cache.size()).toBe(1); + }); + + test("does not re-run convertTransfer on cache hit", () => { + const cache = createHdrImageTransferCache(); + const source = makeSourceBuffer(4); + + const first = cache.getConverted("img1", "hlg", "pq", source); + const snapshot = Buffer.from(first); + cache.getConverted("img1", "hlg", "pq", source); + + expect(Buffer.compare(first, snapshot)).toBe(0); + }); + + test("different target transfers for same imageId are cached independently", () => { + const cache = createHdrImageTransferCache(); + const source = makeSourceBuffer(4); + + const toPq = cache.getConverted("img1", "hlg", "pq", source); + const toHlg = cache.getConverted("img1", "pq", "hlg", source); + + expect(toPq).not.toBe(toHlg); + expect(Buffer.compare(toPq, expectedConverted(source, "hlg", "pq"))).toBe(0); + expect(Buffer.compare(toHlg, expectedConverted(source, "pq", "hlg"))).toBe(0); + expect(cache.size()).toBe(2); + }); + + test("different imageIds are cached independently", () => { + const cache = createHdrImageTransferCache(); + const a = makeSourceBuffer(4, 100); + const b = makeSourceBuffer(4, 200); + + const convA = cache.getConverted("a", "hlg", "pq", a); + const convB = cache.getConverted("b", "hlg", "pq", b); + + expect(convA).not.toBe(convB); + expect(Buffer.compare(convA, expectedConverted(a, "hlg", "pq"))).toBe(0); + expect(Buffer.compare(convB, expectedConverted(b, "hlg", "pq"))).toBe(0); + expect(cache.size()).toBe(2); + }); + + // ── Byte-budget eviction ────────────────────────────────────────────── + + test("evicts LRU entries when byte budget exceeded", () => { + // Each buffer = 100 pixels × 6 bytes = 600 bytes. + // Budget = 1200 → fits 2 entries. + const cache = createHdrImageTransferCache({ maxBytes: 1200 }); + const a = makeSourceBuffer(100, 1); + const b = makeSourceBuffer(100, 2); + const c = makeSourceBuffer(100, 3); + + const convA1 = cache.getConverted("a", "hlg", "pq", a); + cache.getConverted("b", "hlg", "pq", b); + expect(cache.size()).toBe(2); + expect(cache.bytesUsed()).toBe(1200); + + // Inserting c should evict a (LRU). + cache.getConverted("c", "hlg", "pq", c); + expect(cache.size()).toBe(2); + expect(cache.bytesUsed()).toBe(1200); + + // a was evicted — re-requesting produces a fresh conversion. + const convA2 = cache.getConverted("a", "hlg", "pq", a); + expect(convA2).not.toBe(convA1); + expect(Buffer.compare(convA2, expectedConverted(a, "hlg", "pq"))).toBe(0); + }); + + test("large buffer evicts multiple smaller entries", () => { + // 3 small entries (200 bytes each = 600 total), budget = 800. + // Then one 600-byte entry should evict 2 of the 3 smalls. + const cache = createHdrImageTransferCache({ maxBytes: 800 }); + const small = makeSourceBuffer(33, 1); // 33*6=198 bytes + const small2 = makeSourceBuffer(33, 2); + const small3 = makeSourceBuffer(33, 3); + const big = makeSourceBuffer(100, 4); // 600 bytes + + cache.getConverted("s1", "hlg", "pq", small); + cache.getConverted("s2", "hlg", "pq", small2); + cache.getConverted("s3", "hlg", "pq", small3); + expect(cache.size()).toBe(3); + + // big (600) + existing (594) > 800 → evict until room. + cache.getConverted("big", "hlg", "pq", big); + expect(cache.bytesUsed()).toBeLessThanOrEqual(800); + expect(cache.size()).toBeLessThan(4); + }); + + test("access promotes entry to most-recently-used under byte budget", () => { + const cache = createHdrImageTransferCache({ maxBytes: 1200 }); + const a = makeSourceBuffer(100, 1); // 600 bytes + const b = makeSourceBuffer(100, 2); + const c = makeSourceBuffer(100, 3); + + const convA1 = cache.getConverted("a", "hlg", "pq", a); + cache.getConverted("b", "hlg", "pq", b); + + // Promote a to MRU. + const convA2 = cache.getConverted("a", "hlg", "pq", a); + expect(convA2).toBe(convA1); + + // Insert c — b is now LRU and should be evicted, not a. + cache.getConverted("c", "hlg", "pq", c); + + // a should still be cached (was promoted). + const convA3 = cache.getConverted("a", "hlg", "pq", a); + expect(convA3).toBe(convA1); + + // b was evicted — fresh conversion. + const convB2 = cache.getConverted("b", "hlg", "pq", b); + expect(Buffer.compare(convB2, expectedConverted(b, "hlg", "pq"))).toBe(0); + expect(cache.size()).toBe(2); + }); + + test("maxBytes: 0 disables caching but still returns correct converted bytes", () => { + const cache = createHdrImageTransferCache({ maxBytes: 0 }); + const source = makeSourceBuffer(4); + const expected = expectedConverted(source, "hlg", "pq"); + + const first = cache.getConverted("img1", "hlg", "pq", source); + const second = cache.getConverted("img1", "hlg", "pq", source); + + expect(first).not.toBe(second); + expect(Buffer.compare(first, expected)).toBe(0); + expect(Buffer.compare(second, expected)).toBe(0); + expect(cache.size()).toBe(0); + expect(cache.bytesUsed()).toBe(0); + }); + + test("bytesUsed tracks cumulative size accurately", () => { + const cache = createHdrImageTransferCache({ maxBytes: 10000 }); + const a = makeSourceBuffer(50, 1); // 300 bytes + const b = makeSourceBuffer(100, 2); // 600 bytes + + cache.getConverted("a", "hlg", "pq", a); + expect(cache.bytesUsed()).toBe(300); + + cache.getConverted("b", "hlg", "pq", b); + expect(cache.bytesUsed()).toBe(900); + }); + + test("bytesUsed decreases on eviction", () => { + const cache = createHdrImageTransferCache({ maxBytes: 600 }); + const a = makeSourceBuffer(50, 1); // 300 bytes + const b = makeSourceBuffer(50, 2); // 300 bytes + const c = makeSourceBuffer(50, 3); // 300 bytes + + cache.getConverted("a", "hlg", "pq", a); + cache.getConverted("b", "hlg", "pq", b); + expect(cache.bytesUsed()).toBe(600); + + cache.getConverted("c", "hlg", "pq", c); + expect(cache.bytesUsed()).toBe(600); + expect(cache.size()).toBe(2); + }); + + test("single buffer larger than budget still works (cache-through)", () => { + const cache = createHdrImageTransferCache({ maxBytes: 100 }); + const big = makeSourceBuffer(100, 1); // 600 bytes > 100 budget + const expected = expectedConverted(big, "hlg", "pq"); + + const result = cache.getConverted("big", "hlg", "pq", big); + + expect(Buffer.compare(result, expected)).toBe(0); + // Too large to cache — behaves like passthrough. + expect(cache.size()).toBe(0); + expect(cache.bytesUsed()).toBe(0); + }); + + // ── Source-buffer-immutability ──────────────────────────────────────── + + test("cached buffer is independent from the source buffer", () => { + const cache = createHdrImageTransferCache(); + const source = makeSourceBuffer(4); + const sourceSnapshot = Buffer.from(source); + + const cached = cache.getConverted("img1", "hlg", "pq", source); + source.fill(0); + + expect(cache.getConverted("img1", "hlg", "pq", source)).toBe(cached); + expect(Buffer.compare(cached, expectedConverted(sourceSnapshot, "hlg", "pq"))).toBe(0); + }); + + test("does not mutate the source buffer on a convert+cache miss", () => { + const cache = createHdrImageTransferCache(); + const source = makeSourceBuffer(4); + const sourceSnapshot = Buffer.from(source); + + cache.getConverted("img1", "hlg", "pq", source); + + expect(Buffer.compare(source, sourceSnapshot)).toBe(0); + }); + + test("does not mutate the source buffer on a convert+cache miss with maxBytes=0 passthrough", () => { + const cache = createHdrImageTransferCache({ maxBytes: 0 }); + const source = makeSourceBuffer(4); + const sourceSnapshot = Buffer.from(source); + + const result = cache.getConverted("img1", "hlg", "pq", source); + + expect(Buffer.compare(source, sourceSnapshot)).toBe(0); + expect(result).not.toBe(source); + expect(Buffer.compare(result, expectedConverted(sourceSnapshot, "hlg", "pq"))).toBe(0); + expect(cache.size()).toBe(0); + }); + + test("does not mutate the source buffer on a cache hit", () => { + const cache = createHdrImageTransferCache(); + const source = makeSourceBuffer(4); + const sourceSnapshot = Buffer.from(source); + + cache.getConverted("img1", "hlg", "pq", source); + cache.getConverted("img1", "hlg", "pq", source); + + expect(Buffer.compare(source, sourceSnapshot)).toBe(0); + }); + + // ── Validation ──────────────────────────────────────────────────────── + + test("rejects invalid maxBytes", () => { + expect(() => createHdrImageTransferCache({ maxBytes: -1 })).toThrow(); + expect(() => createHdrImageTransferCache({ maxBytes: 1.5 })).toThrow(); + expect(() => createHdrImageTransferCache({ maxBytes: Number.NaN })).toThrow(); + }); + + test("default maxBytes accommodates typical 1080p compositions", () => { + const cache = createHdrImageTransferCache(); + // 1080p rgb48le = 1920*1080*6 = ~12.4MB per entry. + const px1080p = 1920 * 1080; + const source = makeSourceBuffer(px1080p); + + for (let i = 0; i < 16; i++) { + cache.getConverted(`img${i}`, "hlg", "pq", source); + } + // Default 200MB budget → fits ~16 entries at 1080p. + expect(cache.size()).toBe(16); + + const first = cache.getConverted("img0", "hlg", "pq", source); + expect(Buffer.compare(first, expectedConverted(source, "hlg", "pq"))).toBe(0); + }); + + test("default maxBytes limits 4K entries to safe count", () => { + const cache = createHdrImageTransferCache(); + // 4K rgb48le = 3840*2160*6 = ~49.8MB per entry. + const px4k = 3840 * 2160; + const source = makeSourceBuffer(px4k); + + for (let i = 0; i < 8; i++) { + cache.getConverted(`img${i}`, "hlg", "pq", source); + } + // 200MB / ~50MB = ~4 entries max. 8 inserts should cap at 4. + expect(cache.size()).toBeLessThanOrEqual(4); + expect(cache.bytesUsed()).toBeLessThanOrEqual(200 * 1024 * 1024); + }); +}); diff --git a/packages/producer/src/services/hdrImageTransferCache.ts b/packages/producer/src/services/hdrImageTransferCache.ts new file mode 100644 index 000000000..eb2307b0c --- /dev/null +++ b/packages/producer/src/services/hdrImageTransferCache.ts @@ -0,0 +1,96 @@ +import { type HdrTransfer, convertTransfer } from "@hyperframes/engine"; + +export interface HdrImageTransferCache { + getConverted( + imageId: string, + sourceTransfer: HdrTransfer, + targetTransfer: HdrTransfer, + source: Buffer, + ): Buffer; + + size(): number; + + bytesUsed(): number; +} + +export interface HdrImageTransferCacheOptions { + /** + * Maximum bytes of converted buffers to retain before evicting the + * least-recently-used entries. Defaults to 200 MB. At 1080p (~12 MB/entry) + * that fits ~16 entries; at 4K (~50 MB/entry) it naturally caps at ~4. + * Set to `0` to disable caching entirely (every call allocates fresh). + */ + maxBytes?: number; +} + +const DEFAULT_MAX_BYTES = 200 * 1024 * 1024; + +export function createHdrImageTransferCache( + options: HdrImageTransferCacheOptions = {}, +): HdrImageTransferCache { + const maxBytes = options.maxBytes ?? DEFAULT_MAX_BYTES; + if (!Number.isInteger(maxBytes) || maxBytes < 0) { + throw new Error( + `createHdrImageTransferCache: maxBytes must be a non-negative integer, got ${String(maxBytes)}`, + ); + } + + const entries = new Map(); + let totalBytes = 0; + + function makeKey(imageId: string, targetTransfer: HdrTransfer): string { + return `${imageId}|${targetTransfer}`; + } + + function evictUntilRoom(needed: number): void { + while (totalBytes + needed > maxBytes && entries.size > 0) { + const lruKey = entries.keys().next().value; + if (lruKey === undefined) break; + const evicted = entries.get(lruKey); + if (evicted) totalBytes -= evicted.byteLength; + entries.delete(lruKey); + } + } + + return { + getConverted(imageId, sourceTransfer, targetTransfer, source) { + if (sourceTransfer === targetTransfer) { + return source; + } + + if (maxBytes === 0) { + const fresh = Buffer.from(source); + convertTransfer(fresh, sourceTransfer, targetTransfer); + return fresh; + } + + const key = makeKey(imageId, targetTransfer); + const existing = entries.get(key); + if (existing) { + entries.delete(key); + entries.set(key, existing); + return existing; + } + + const converted = Buffer.from(source); + convertTransfer(converted, sourceTransfer, targetTransfer); + + if (converted.byteLength > maxBytes) { + return converted; + } + + evictUntilRoom(converted.byteLength); + entries.set(key, converted); + totalBytes += converted.byteLength; + return converted; + }, + + size() { + return entries.size; + }, + + bytesUsed() { + return totalBytes; + }, + }; +} diff --git a/packages/producer/src/services/renderOrchestrator.ts b/packages/producer/src/services/renderOrchestrator.ts index 64cd5b028..33afa3200 100644 --- a/packages/producer/src/services/renderOrchestrator.ts +++ b/packages/producer/src/services/renderOrchestrator.ts @@ -101,6 +101,11 @@ import { } from "./htmlCompiler.js"; import { defaultLogger, type ProducerLogger } from "../logger.js"; import { isPathInside } from "../utils/paths.js"; +import { clearMaxFrameIndex, getMaxFrameIndex } from "./frameDirCache.js"; +import { + type HdrImageTransferCache, + createHdrImageTransferCache, +} from "./hdrImageTransferCache.js"; /** * Wrap a cleanup operation so it never throws, but logs any failure. @@ -119,45 +124,6 @@ async function safeCleanup( } } -/** - * Cache of the maximum 1-based frame index present in each pre-extracted frame - * directory (e.g. `frame_0001.png … frame_0150.png` → 150). The directory is - * read once on first access and the max is computed by parsing filenames. - * - * Used to bounds-check `videoFrameIndex` against the directory size before - * calling `existsSync` per frame, which avoids redundant filesystem syscalls - * when the requested time falls past the last extracted frame (e.g. a clip - * shorter than the composition's effective video range). - */ -const frameDirMaxIndexCache = new Map(); - -const FRAME_FILENAME_RE = /^frame_(\d+)\.png$/; - -function getMaxFrameIndex(frameDir: string): number { - const cached = frameDirMaxIndexCache.get(frameDir); - if (cached !== undefined) return cached; - let max = 0; - try { - for (const name of readdirSync(frameDir)) { - const m = FRAME_FILENAME_RE.exec(name); - if (!m) continue; - const n = Number(m[1]); - if (Number.isFinite(n) && n > max) max = n; - } - } catch { - // Directory missing or unreadable → max stays 0; downstream existsSync - // check will still produce the right "no frame" outcome. - } - frameDirMaxIndexCache.set(frameDir, max); - return max; -} - -/** - * Sum file sizes under `dir` recursively. Used to report a `tmpPeakBytes` - * proxy in `RenderPerfSummary` right before workDir cleanup. Swallows errors - * because it's purely observational — a missing workDir or symlink loop must - * not fail the render. - */ function sampleDirectoryBytes(dir: string): number { let total = 0; const stack: string[] = [dir]; @@ -598,6 +564,7 @@ function blitHdrImageLayer( canvas: Buffer, el: ElementStackingInfo, hdrImageBuffers: Map, + hdrImageTransferCache: HdrImageTransferCache, width: number, height: number, log?: ProducerLogger, @@ -610,13 +577,13 @@ function blitHdrImageLayer( } try { - let hdrRgb = buf.data; - if (sourceTransfer && targetTransfer && sourceTransfer !== targetTransfer) { - // convertTransfer mutates in place; copy first so the cached decode stays - // pristine for subsequent frames. - hdrRgb = Buffer.from(buf.data); - convertTransfer(hdrRgb, sourceTransfer, targetTransfer); - } + // The cache returns `buf.data` unchanged when no conversion is needed, + // and otherwise returns a per-(imageId, targetTransfer) buffer that was + // converted exactly once and reused across every subsequent frame. + const hdrRgb = + sourceTransfer && targetTransfer + ? hdrImageTransferCache.getConverted(el.id, sourceTransfer, targetTransfer, buf.data) + : buf.data; const viewportMatrix = parseTransformMatrix(el.transform); @@ -677,6 +644,7 @@ interface HdrCompositeContext { effectiveHdr: { transfer: HdrTransfer }; nativeHdrImageIds: Set; hdrImageBuffers: Map; + hdrImageTransferCache: HdrImageTransferCache; hdrFrameDirs: Map; hdrVideoStartTimes: Map; imageTransfers: Map; @@ -730,6 +698,7 @@ async function compositeHdrFrame( effectiveHdr, nativeHdrImageIds, hdrImageBuffers, + hdrImageTransferCache, hdrFrameDirs, hdrVideoStartTimes, imageTransfers, @@ -777,6 +746,7 @@ async function compositeHdrFrame( canvas, layer.element, hdrImageBuffers, + hdrImageTransferCache, width, height, log, @@ -1659,7 +1629,7 @@ export async function executeRenderJob( let hdrEncoderClosed = false; let domSessionClosed = false; // Track HDR video frame directories at this scope so the outer finally - // can clear their entries from the module-scoped frameDirMaxIndexCache. + // can clear their entries from the shared frameDirCache module. // Without this, the cache leaks one entry per HDR video per render. const hdrFrameDirs = new Map(); try { @@ -1825,7 +1795,7 @@ export async function executeRenderJob( // ── Pre-extract all HDR video frames in a single FFmpeg pass ────── // hdrFrameDirs is declared above the try block so the outer finally - // can clear matching frameDirMaxIndexCache entries on any exit path. + // can clear matching frameDirCache entries on any exit path. for (const [videoId, srcPath] of hdrVideoSrcPaths) { const video = composition.videos.find((v) => v.id === videoId); if (!video) continue; @@ -1961,6 +1931,18 @@ export async function executeRenderJob( "Internal: HDR render path entered without effectiveHdr — this is a bug.", ); } + // Per-job LRU cache for transfer-converted HDR image buffers. Static HDR + // images that need PQ↔HLG conversion are converted exactly once per + // (imageId, targetTransfer) and then reused for every subsequent frame + // instead of paying a fresh `Buffer.from` + `convertTransfer` on every + // composite. The cache is local to this render job so concurrent renders + // do not share state. + const hdrCacheMaxBytes = process.env.HDR_TRANSFER_CACHE_MAX_BYTES + ? Number(process.env.HDR_TRANSFER_CACHE_MAX_BYTES) + : undefined; + const hdrImageTransferCache = createHdrImageTransferCache( + hdrCacheMaxBytes !== undefined ? { maxBytes: hdrCacheMaxBytes } : {}, + ); const hdrCompositeCtx: HdrCompositeContext = { log, domSession, @@ -1971,6 +1953,7 @@ export async function executeRenderJob( effectiveHdr, nativeHdrImageIds, hdrImageBuffers, + hdrImageTransferCache, hdrFrameDirs, hdrVideoStartTimes, imageTransfers, @@ -2090,6 +2073,7 @@ export async function executeRenderJob( sceneBuf as Buffer, el, hdrImageBuffers, + hdrImageTransferCache, width, height, log, @@ -2207,9 +2191,9 @@ export async function executeRenderJob( } // Drop the matching cache entry so we don't leak a stale // max-frame-index reading for a directory that no longer - // exists. Without this, the module-scoped cache grows + // exists. Without this, the shared cache grows // monotonically across renders. - frameDirMaxIndexCache.delete(frameDir); + clearMaxFrameIndex(frameDir); hdrFrameDirs.delete(videoId); } cleanedUpVideos.add(videoId); @@ -2267,13 +2251,13 @@ export async function executeRenderJob( }); }); } - // Drop frameDirMaxIndexCache entries for any HDR frame directories - // that survived the in-loop cleanup (early failures, KEEP_TEMP=1, - // videos still active when the render exits). The on-disk frames - // themselves are torn down with workDir; we just don't want the - // module-scoped cache to leak entries across renders. + // Drop frameDirCache entries for any HDR frame directories that + // survived the in-loop cleanup (early failures, KEEP_TEMP=1, videos + // still active when the render exits). The on-disk frames themselves + // are torn down with workDir; we just don't want the shared cache to + // leak entries across renders. for (const frameDir of hdrFrameDirs.values()) { - frameDirMaxIndexCache.delete(frameDir); + clearMaxFrameIndex(frameDir); } hdrFrameDirs.clear(); }