fix(image-pipeline): diagnose and stabilize webgl preview path

This commit is contained in:
2026-04-05 11:28:42 +02:00
parent 186a5b9f92
commit 451ab0b986
11 changed files with 401 additions and 25 deletions

View File

@@ -1,6 +1,6 @@
"use client";
import { useMemo } from "react";
import { useEffect, useMemo, useRef } from "react";
import { useCanvasGraph } from "@/components/canvas/canvas-graph-context";
import { usePipelinePreview } from "@/hooks/use-pipeline-preview";
@@ -18,6 +18,30 @@ const PREVIEW_PIPELINE_TYPES = new Set([
"detail-adjust",
]);
type PreviewLatencyTrace = {
sequence: number;
changedAtMs: number;
nodeType: string;
origin: string;
};
function readPreviewLatencyTrace(): PreviewLatencyTrace | null {
if (process.env.NODE_ENV === "production") {
return null;
}
const debugGlobals = globalThis as typeof globalThis & {
__LEMONSPACE_DEBUG_PREVIEW_LATENCY__?: boolean;
__LEMONSPACE_LAST_PREVIEW_TRACE__?: PreviewLatencyTrace;
};
if (debugGlobals.__LEMONSPACE_DEBUG_PREVIEW_LATENCY__ !== true) {
return null;
}
return debugGlobals.__LEMONSPACE_LAST_PREVIEW_TRACE__ ?? null;
}
export default function AdjustmentPreview({
nodeId,
nodeWidth,
@@ -30,6 +54,7 @@ export default function AdjustmentPreview({
currentParams: unknown;
}) {
const graph = useCanvasGraph();
const lastLoggedTraceSequenceRef = useRef<number | null>(null);
const sourceUrl = useMemo(
() =>
@@ -68,6 +93,30 @@ export default function AdjustmentPreview({
});
}, [currentParams, currentType, graph, nodeId]);
useEffect(() => {
const trace = readPreviewLatencyTrace();
if (!trace) {
return;
}
if (lastLoggedTraceSequenceRef.current === trace.sequence) {
return;
}
lastLoggedTraceSequenceRef.current = trace.sequence;
console.info("[Preview latency] downstream-graph-visible", {
nodeId,
nodeType: currentType,
sourceNodeType: trace.nodeType,
sourceOrigin: trace.origin,
sinceChangeMs: performance.now() - trace.changedAtMs,
pipelineDepth: steps.length,
stepTypes: steps.map((step) => step.type),
hasSource: Boolean(sourceUrl),
});
}, [currentType, nodeId, sourceUrl, steps]);
const { canvasRef, histogram, isRendering, hasSource, previewAspectRatio, error } =
usePipelinePreview({
sourceUrl,

View File

@@ -21,6 +21,37 @@ function logNodeDataDebug(event: string, payload: Record<string, unknown>): void
console.info("[Canvas node debug]", event, payload);
}
type PreviewLatencyTrace = {
sequence: number;
changedAtMs: number;
nodeType: string;
origin: "applyLocalData" | "updateLocalData";
};
function writePreviewLatencyTrace(trace: Omit<PreviewLatencyTrace, "sequence">): void {
if (process.env.NODE_ENV === "production") {
return;
}
const debugGlobals = globalThis as typeof globalThis & {
__LEMONSPACE_DEBUG_PREVIEW_LATENCY__?: boolean;
__LEMONSPACE_LAST_PREVIEW_TRACE__?: PreviewLatencyTrace;
};
if (debugGlobals.__LEMONSPACE_DEBUG_PREVIEW_LATENCY__ !== true) {
return;
}
const nextTrace: PreviewLatencyTrace = {
...trace,
sequence: (debugGlobals.__LEMONSPACE_LAST_PREVIEW_TRACE__?.sequence ?? 0) + 1,
};
debugGlobals.__LEMONSPACE_LAST_PREVIEW_TRACE__ = nextTrace;
console.info("[Preview latency] node-local-change", nextTrace);
}
export function useNodeLocalData<T>({
data,
normalize,
@@ -78,11 +109,16 @@ export function useNodeLocalData<T>({
const applyLocalData = useCallback(
(next: T) => {
hasPendingLocalChangesRef.current = true;
writePreviewLatencyTrace({
changedAtMs: performance.now(),
nodeType: debugLabel,
origin: "applyLocalData",
});
localDataRef.current = next;
setLocalDataState(next);
queueSave();
},
[queueSave],
[debugLabel, queueSave],
);
const updateLocalData = useCallback(
@@ -90,12 +126,17 @@ export function useNodeLocalData<T>({
hasPendingLocalChangesRef.current = true;
setLocalDataState((current) => {
const next = updater(current);
writePreviewLatencyTrace({
changedAtMs: performance.now(),
nodeType: debugLabel,
origin: "updateLocalData",
});
localDataRef.current = next;
queueSave();
return next;
});
},
[queueSave],
[debugLabel, queueSave],
);
return {

View File

@@ -23,6 +23,46 @@ type UsePipelinePreviewOptions = {
const PREVIEW_RENDER_DEBOUNCE_MS = 48;
type PreviewLatencyTrace = {
sequence: number;
changedAtMs: number;
nodeType: string;
origin: string;
};
function readPreviewLatencyTrace(): PreviewLatencyTrace | null {
if (process.env.NODE_ENV === "production") {
return null;
}
const debugGlobals = globalThis as typeof globalThis & {
__LEMONSPACE_DEBUG_PREVIEW_LATENCY__?: boolean;
__LEMONSPACE_LAST_PREVIEW_TRACE__?: PreviewLatencyTrace;
};
if (debugGlobals.__LEMONSPACE_DEBUG_PREVIEW_LATENCY__ !== true) {
return null;
}
return debugGlobals.__LEMONSPACE_LAST_PREVIEW_TRACE__ ?? null;
}
function logPreviewLatency(event: string, payload: Record<string, unknown>): void {
if (process.env.NODE_ENV === "production") {
return;
}
const debugGlobals = globalThis as typeof globalThis & {
__LEMONSPACE_DEBUG_PREVIEW_LATENCY__?: boolean;
};
if (debugGlobals.__LEMONSPACE_DEBUG_PREVIEW_LATENCY__ !== true) {
return;
}
console.info("[Preview latency]", event, payload);
}
function computePreviewWidth(
nodeWidth: number,
previewScale: number,
@@ -121,8 +161,23 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): {
const currentRun = runIdRef.current + 1;
runIdRef.current = currentRun;
const abortController = new AbortController();
const effectStartedAtMs = performance.now();
const timer = window.setTimeout(() => {
const requestStartedAtMs = performance.now();
const trace = readPreviewLatencyTrace();
logPreviewLatency("request-start", {
currentRun,
pipelineHash,
previewWidth,
includeHistogram: options.includeHistogram !== false,
debounceWaitMs: requestStartedAtMs - effectStartedAtMs,
sinceChangeMs: trace ? requestStartedAtMs - trace.changedAtMs : null,
sourceNodeType: trace?.nodeType ?? null,
sourceOrigin: trace?.origin ?? null,
});
setIsRendering(true);
setError(null);
void renderPreviewWithWorkerFallback({
@@ -145,8 +200,20 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): {
return;
}
context.putImageData(result.imageData, 0, 0);
const paintedAtMs = performance.now();
setHistogram(result.histogram);
setPreviewAspectRatio(result.width / result.height);
logPreviewLatency("paint-end", {
currentRun,
pipelineHash,
previewWidth,
imageWidth: result.width,
imageHeight: result.height,
requestDurationMs: paintedAtMs - requestStartedAtMs,
sinceChangeMs: trace ? paintedAtMs - trace.changedAtMs : null,
diagnostics: getLastBackendDiagnostics(),
});
})
.catch((renderError: unknown) => {
if (runIdRef.current !== currentRun) return;

View File

@@ -65,6 +65,14 @@ function normalizeBackendHint(value: BackendHint): string | null {
return normalized.length > 0 ? normalized : null;
}
function logBackendRouterDebug(event: string, payload: Record<string, unknown>): void {
if (process.env.NODE_ENV === "production" || process.env.NODE_ENV === "test") {
return;
}
console.info("[image-pipeline backend]", event, payload);
}
export function createBackendRouter(options?: {
backends?: readonly ImagePipelineBackend[];
defaultBackendId?: string;
@@ -123,6 +131,12 @@ export function createBackendRouter(options?: {
}
function emitFallback(event: BackendFallbackEvent): void {
logBackendRouterDebug("fallback", {
reason: event.reason,
requestedBackend: event.requestedBackend,
fallbackBackend: event.fallbackBackend,
errorMessage: event.error?.message,
});
options?.onFallback?.(event);
}
@@ -335,23 +349,32 @@ function getRolloutRouterState(): RolloutRouterState {
export function getPreviewBackendHintForSteps(steps: readonly PreviewBackendRequest["step"][]): BackendHint {
const rolloutState = getRolloutRouterState();
let backendHint: BackendHint;
if (rolloutState.webglEnabled && rolloutState.webglAvailable) {
if (isWebglPreviewPipelineSupported(steps)) {
return "webgl";
backendHint = "webgl";
} else if (rolloutState.wasmEnabled && rolloutState.wasmAvailable) {
backendHint = "wasm";
} else {
backendHint = CPU_BACKEND_ID;
}
if (rolloutState.wasmEnabled && rolloutState.wasmAvailable) {
return "wasm";
}
return CPU_BACKEND_ID;
} else if (rolloutState.wasmEnabled && rolloutState.wasmAvailable) {
backendHint = "wasm";
} else {
backendHint = CPU_BACKEND_ID;
}
if (rolloutState.wasmEnabled && rolloutState.wasmAvailable) {
return "wasm";
}
logBackendRouterDebug("preview-backend-hint", {
backendHint,
stepTypes: steps.map((step) => step.type),
webglAvailable: rolloutState.webglAvailable,
webglEnabled: rolloutState.webglEnabled,
wasmAvailable: rolloutState.wasmAvailable,
wasmEnabled: rolloutState.wasmEnabled,
});
return CPU_BACKEND_ID;
return backendHint;
}
export function runPreviewStepWithBackendRouter(request: PreviewBackendRequest): void {

View File

@@ -10,6 +10,8 @@ type CapabilityProbes = {
probeOffscreenCanvas: () => boolean;
};
let cachedDefaultCapabilities: BackendCapabilities | null = null;
export const WASM_SIMD_PROBE_MODULE = new Uint8Array([
0x00,
0x61,
@@ -46,12 +48,27 @@ function probeOffscreenCanvasAvailability(): boolean {
return typeof OffscreenCanvas !== "undefined";
}
function releaseProbeWebglContext(
context: WebGLRenderingContext | WebGL2RenderingContext | null,
): void {
if (!context) {
return;
}
try {
context.getExtension("WEBGL_lose_context")?.loseContext();
} catch {
// Ignore cleanup failures in capability probes.
}
}
function probeWebglAvailability(): boolean {
try {
if (typeof document !== "undefined") {
const canvas = document.createElement("canvas");
const context = canvas.getContext("webgl2") ?? canvas.getContext("webgl");
if (context) {
releaseProbeWebglContext(context);
return true;
}
}
@@ -59,6 +76,7 @@ function probeWebglAvailability(): boolean {
if (typeof OffscreenCanvas !== "undefined") {
const offscreenCanvas = new OffscreenCanvas(1, 1);
const context = offscreenCanvas.getContext("webgl2") ?? offscreenCanvas.getContext("webgl");
releaseProbeWebglContext(context);
return Boolean(context);
}
@@ -80,14 +98,28 @@ function probeWasmSimdAvailability(): boolean {
}
}
export function resetBackendCapabilitiesCache(): void {
cachedDefaultCapabilities = null;
}
export function detectBackendCapabilities(probes?: Partial<CapabilityProbes>): BackendCapabilities {
if (!probes && cachedDefaultCapabilities) {
return cachedDefaultCapabilities;
}
const probeWebgl = probes?.probeWebgl ?? probeWebglAvailability;
const probeWasmSimd = probes?.probeWasmSimd ?? probeWasmSimdAvailability;
const probeOffscreenCanvas = probes?.probeOffscreenCanvas ?? probeOffscreenCanvasAvailability;
return {
const capabilities = {
webgl: probeWebgl(),
wasmSimd: probeWasmSimd(),
offscreenCanvas: probeOffscreenCanvas(),
};
if (!probes) {
cachedDefaultCapabilities = capabilities;
}
return capabilities;
}

View File

@@ -172,6 +172,14 @@ const SUPPORTED_PREVIEW_STEP_TYPES = new Set<SupportedPreviewStepType>([
"detail-adjust",
]);
function logWebglBackendDebug(event: string, payload: Record<string, unknown>): void {
if (process.env.NODE_ENV === "production" || process.env.NODE_ENV === "test") {
return;
}
console.info("[image-pipeline webgl]", event, payload);
}
function assertSupportedStep(step: PipelineStep): void {
if (SUPPORTED_PREVIEW_STEP_TYPES.has(step.type as SupportedPreviewStepType)) {
return;
@@ -415,6 +423,7 @@ function applyStepUniforms(
function runStepOnGpu(context: WebglBackendContext, request: BackendStepRequest): void {
const { gl } = context;
const startedAtMs = performance.now();
const shaderProgram =
request.step.type === "curves"
? context.curvesProgram
@@ -509,13 +518,23 @@ function runStepOnGpu(context: WebglBackendContext, request: BackendStepRequest)
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
const readback = new Uint8Array(request.pixels.length);
const readbackStartedAtMs = performance.now();
gl.readPixels(0, 0, request.width, request.height, gl.RGBA, gl.UNSIGNED_BYTE, readback);
const readbackDurationMs = performance.now() - readbackStartedAtMs;
request.pixels.set(readback);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.deleteFramebuffer(framebuffer);
gl.deleteTexture(sourceTexture);
gl.deleteTexture(outputTexture);
logWebglBackendDebug("step-complete", {
stepType: request.step.type,
width: request.width,
height: request.height,
totalDurationMs: performance.now() - startedAtMs,
readbackDurationMs,
});
}
export function isWebglPreviewStepSupported(step: PipelineStep): boolean {

View File

@@ -3,12 +3,21 @@ import { renderPreview } from "@/lib/image-pipeline/preview-renderer";
import type { PipelineStep } from "@/lib/image-pipeline/contracts";
import type { HistogramData } from "@/lib/image-pipeline/histogram";
import type { RenderFullOptions, RenderFullResult } from "@/lib/image-pipeline/render-types";
import {
IMAGE_PIPELINE_BACKEND_FLAG_KEYS,
type BackendFeatureFlags,
} from "@/lib/image-pipeline/backend/feature-flags";
type PreviewWorkerPayload = {
sourceUrl: string;
steps: readonly PipelineStep[];
previewWidth: number;
includeHistogram?: boolean;
featureFlags?: BackendFeatureFlags;
};
type FullWorkerPayload = RenderFullOptions & {
featureFlags?: BackendFeatureFlags;
};
type WorkerRequestMessage =
@@ -20,7 +29,7 @@ type WorkerRequestMessage =
| {
kind: "full";
requestId: number;
payload: RenderFullOptions;
payload: FullWorkerPayload;
}
| {
kind: "cancel";
@@ -62,6 +71,16 @@ type WorkerScope = {
const workerScope = self as unknown as WorkerScope;
const runningControllers = new Map<number, AbortController>();
function applyWorkerFeatureFlags(featureFlags: BackendFeatureFlags | undefined): void {
(globalThis as typeof globalThis & {
__LEMONSPACE_FEATURE_FLAGS__?: Record<string, unknown>;
}).__LEMONSPACE_FEATURE_FLAGS__ = {
[IMAGE_PIPELINE_BACKEND_FLAG_KEYS.forceCpu]: featureFlags?.forceCpu ?? false,
[IMAGE_PIPELINE_BACKEND_FLAG_KEYS.webglEnabled]: featureFlags?.webglEnabled ?? false,
[IMAGE_PIPELINE_BACKEND_FLAG_KEYS.wasmEnabled]: featureFlags?.wasmEnabled ?? false,
};
}
function postMessageSafe(message: WorkerResponseMessage, transfer?: Transferable[]): void {
if (transfer) {
workerScope.postMessage(message, transfer);
@@ -90,6 +109,7 @@ async function handlePreviewRequest(requestId: number, payload: PreviewWorkerPay
runningControllers.set(requestId, controller);
try {
applyWorkerFeatureFlags(payload.featureFlags);
const result = await renderPreview({
sourceUrl: payload.sourceUrl,
steps: payload.steps,
@@ -133,13 +153,16 @@ async function handlePreviewRequest(requestId: number, payload: PreviewWorkerPay
}
}
async function handleFullRequest(requestId: number, payload: RenderFullOptions): Promise<void> {
async function handleFullRequest(requestId: number, payload: FullWorkerPayload): Promise<void> {
const controller = new AbortController();
runningControllers.set(requestId, controller);
try {
applyWorkerFeatureFlags(payload.featureFlags);
const result = await renderFull({
...payload,
sourceUrl: payload.sourceUrl,
steps: payload.steps,
render: payload.render,
signal: controller.signal,
});
@@ -150,11 +173,10 @@ async function handleFullRequest(requestId: number, payload: RenderFullOptions):
});
} catch (error: unknown) {
if (typeof console !== "undefined" && process.env.NODE_ENV !== "production") {
console.error("[image-pipeline.worker] preview request failed", {
console.error("[image-pipeline.worker] full request failed", {
requestId,
sourceUrl: payload.sourceUrl,
previewWidth: payload.previewWidth,
includeHistogram: payload.includeHistogram,
render: payload.render,
error,
});
}

View File

@@ -6,6 +6,10 @@ import {
import { hashPipeline, type PipelineStep } from "@/lib/image-pipeline/contracts";
import type { HistogramData } from "@/lib/image-pipeline/histogram";
import type { RenderFullOptions, RenderFullResult } from "@/lib/image-pipeline/render-types";
import {
getBackendFeatureFlags,
type BackendFeatureFlags,
} from "@/lib/image-pipeline/backend/feature-flags";
export type { PreviewRenderResult };
@@ -20,6 +24,11 @@ type PreviewWorkerPayload = {
steps: readonly PipelineStep[];
previewWidth: number;
includeHistogram?: boolean;
featureFlags?: BackendFeatureFlags;
};
type FullWorkerPayload = RenderFullOptions & {
featureFlags?: BackendFeatureFlags;
};
type WorkerRequestMessage =
@@ -31,7 +40,7 @@ type WorkerRequestMessage =
| {
kind: "full";
requestId: number;
payload: RenderFullOptions;
payload: FullWorkerPayload;
}
| {
kind: "cancel";
@@ -239,7 +248,7 @@ function getWorker(): Worker {
function runWorkerRequest<TResponse extends PreviewRenderResult | RenderFullResult>(args: {
kind: "preview" | "full";
payload: PreviewWorkerPayload | RenderFullOptions;
payload: PreviewWorkerPayload | FullWorkerPayload;
signal?: AbortSignal;
}): Promise<TResponse> {
if (args.signal?.aborted) {
@@ -327,6 +336,10 @@ function getPreviewRequestKey(options: {
].join(":");
}
function getWorkerFeatureFlagsSnapshot(): BackendFeatureFlags {
return getBackendFeatureFlags();
}
async function runPreviewRequest(options: {
sourceUrl: string;
steps: readonly PipelineStep[];
@@ -342,6 +355,7 @@ async function runPreviewRequest(options: {
steps: options.steps,
previewWidth: options.previewWidth,
includeHistogram: options.includeHistogram,
featureFlags: getWorkerFeatureFlagsSnapshot(),
},
signal: options.signal,
});
@@ -477,7 +491,10 @@ export async function renderFullWithWorkerFallback(
try {
return await runWorkerRequest<RenderFullResult>({
kind: "full",
payload: options,
payload: {
...options,
featureFlags: getWorkerFeatureFlagsSnapshot(),
},
signal: options.signal,
});
} catch (error: unknown) {

View File

@@ -3,7 +3,10 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import type { ImagePipelineBackend } from "@/lib/image-pipeline/backend/backend-types";
import { detectBackendCapabilities } from "@/lib/image-pipeline/backend/capabilities";
import {
detectBackendCapabilities,
resetBackendCapabilitiesCache,
} from "@/lib/image-pipeline/backend/capabilities";
import { createBackendRouter } from "@/lib/image-pipeline/backend/backend-router";
const previewRendererMocks = vi.hoisted(() => ({
@@ -23,6 +26,10 @@ vi.mock("@/lib/image-pipeline/bridge", () => ({
}));
describe("detectBackendCapabilities", () => {
beforeEach(() => {
resetBackendCapabilitiesCache();
});
it("reports webgl, wasmSimd and offscreenCanvas independently", () => {
expect(
detectBackendCapabilities({
@@ -48,6 +55,39 @@ describe("detectBackendCapabilities", () => {
offscreenCanvas: false,
});
});
it("caches default WebGL capability detection and releases the probe context", () => {
const loseContext = vi.fn();
const getContext = vi.fn(() => ({
getExtension: vi.fn((name: string) => {
if (name === "WEBGL_lose_context") {
return { loseContext };
}
return null;
}),
}));
const originalCreateElement = document.createElement.bind(document);
const createElementSpy = vi.spyOn(document, "createElement").mockImplementation((tagName) => {
if (tagName === "canvas") {
return {
getContext,
} as unknown as HTMLCanvasElement;
}
return originalCreateElement(tagName);
});
const first = detectBackendCapabilities();
const second = detectBackendCapabilities();
expect(first.webgl).toBe(true);
expect(second.webgl).toBe(true);
expect(getContext).toHaveBeenCalledTimes(1);
expect(loseContext).toHaveBeenCalledTimes(1);
createElementSpy.mockRestore();
});
});
describe("backend router fallback reasons", () => {

View File

@@ -8,12 +8,14 @@ import { emptyHistogram } from "@/lib/image-pipeline/histogram";
import type { PipelineStep } from "@/lib/image-pipeline/contracts";
const workerClientMocks = vi.hoisted(() => ({
getLastBackendDiagnostics: vi.fn(() => null),
renderPreviewWithWorkerFallback: vi.fn(),
}));
const PREVIEW_SETTLE_MS = 80;
vi.mock("@/lib/image-pipeline/worker-client", () => ({
getLastBackendDiagnostics: workerClientMocks.getLastBackendDiagnostics,
isPipelineAbortError: () => false,
renderPreviewWithWorkerFallback: workerClientMocks.renderPreviewWithWorkerFallback,
}));
@@ -96,6 +98,8 @@ describe("usePipelinePreview", () => {
previewHarnessState.latestHistogram = emptyHistogram();
previewHarnessState.latestError = null;
previewHarnessState.latestIsRendering = false;
workerClientMocks.getLastBackendDiagnostics.mockReset();
workerClientMocks.getLastBackendDiagnostics.mockReturnValue(null);
workerClientMocks.renderPreviewWithWorkerFallback.mockReset();
workerClientMocks.renderPreviewWithWorkerFallback.mockResolvedValue({
width: 120,

View File

@@ -43,6 +43,11 @@ type WorkerMessage =
payload?: {
previewWidth?: number;
includeHistogram?: boolean;
featureFlags?: {
forceCpu: boolean;
webglEnabled: boolean;
wasmEnabled: boolean;
};
};
}
| {
@@ -315,6 +320,63 @@ describe("worker-client fallbacks", () => {
expect(workerMessages.filter((message) => message.kind === "preview")).toHaveLength(3);
});
it("passes backend feature flags to worker preview requests", async () => {
const workerMessages: WorkerMessage[] = [];
FakeWorker.behavior = (worker, message) => {
workerMessages.push(message);
if (message.kind !== "preview") {
return;
}
queueMicrotask(() => {
worker.onmessage?.({
data: {
kind: "preview-result",
requestId: message.requestId,
payload: {
width: 8,
height: 4,
histogram: emptyHistogram(),
pixels: new Uint8ClampedArray(8 * 4 * 4).buffer,
},
},
} as MessageEvent);
});
};
vi.stubGlobal("Worker", FakeWorker as unknown as typeof Worker);
(
globalThis as typeof globalThis & {
__LEMONSPACE_FEATURE_FLAGS__?: Record<string, unknown>;
}
).__LEMONSPACE_FEATURE_FLAGS__ = {
"imagePipeline.backend.forceCpu": false,
"imagePipeline.backend.webgl.enabled": true,
"imagePipeline.backend.wasm.enabled": true,
};
const { renderPreviewWithWorkerFallback } = await import("@/lib/image-pipeline/worker-client");
await renderPreviewWithWorkerFallback({
sourceUrl: "https://cdn.example.com/source.png",
steps: [],
previewWidth: 128,
includeHistogram: true,
});
expect(workerMessages).toContainEqual(
expect.objectContaining({
kind: "preview",
payload: expect.objectContaining({
featureFlags: {
forceCpu: false,
webglEnabled: true,
wasmEnabled: true,
},
}),
}),
);
});
it("removes aborted subscribers without canceling surviving identical preview consumers", async () => {
const workerMessages: WorkerMessage[] = [];
const previewStarted = createDeferred<void>();