Merge branch 'feat/mixer-overlay-resize-render-bake'

This commit is contained in:
2026-04-15 08:46:42 +02:00
30 changed files with 6361 additions and 194 deletions

View File

@@ -144,16 +144,23 @@ render: 300 × 420 mixer: 360 × 320
- **Handles:** genau zwei Inputs links (`base`, `overlay`) und ein Output rechts (`mixer-out`).
- **Erlaubte Inputs:** `image`, `asset`, `ai-image`, `render`.
- **Connection-Limits:** maximal 2 eingehende Kanten insgesamt, davon pro Handle genau 1.
- **Node-Data (V1):** `blendMode` (`normal|multiply|screen|overlay`), `opacity` (0..100), `offsetX`, `offsetY`.
- **Node-Data (V1):** `blendMode` (`normal|multiply|screen|overlay`), `opacity` (0..100), `overlayX`, `overlayY`, `overlayWidth`, `overlayHeight` (Frame-Rect, normiert 0..1) plus `contentX`, `contentY`, `contentWidth`, `contentHeight` (Content-Framing innerhalb des Overlay-Frames, ebenfalls normiert 0..1).
- **Output-Semantik:** pseudo-image (clientseitig aus Graph + Controls aufgeloest), kein persistiertes Asset, kein Storage-Write.
- **UI/Interaction:** nur Inline-Formcontrols im Node; keine Drag-Manipulation im Preview, keine Rotation/Skalierung/Masks.
- **UI/Interaction:** Zwei Modi im Preview: `Frame resize` (Overlay-Frame verschieben + ueber Corner-Handles resizen) und `Content framing` (Overlay-Inhalt innerhalb des Frames verschieben). Numerische Inline-Controls bleiben als Feineinstellung erhalten.
- **Sizing/Crop-Verhalten:** Der Overlay-Inhalt wird `object-cover`-aehnlich in den Content-Rect eingepasst; bei abweichenden Seitenverhaeltnissen wird zentriert gecroppt.
### Compare-Integration (V1)
- `compare` versteht `mixer`-Outputs ueber `lib/canvas-mixer-preview.ts`.
- Die Vorschau wird als DOM/CSS-Layering im Client gerendert (inkl. Blend/Opacity/Offset).
- Die Vorschau wird als DOM/CSS-Layering im Client gerendert (inkl. Blend/Opacity/Overlay-Rect).
- Scope bleibt eng: keine pauschale pseudo-image-Unterstuetzung fuer alle Consumer in V1.
### Render-Bake-Pfad (V1)
- Offizieller Bake-Flow: `mixer -> render`.
- `render` konsumiert die Mixer-Komposition (`sourceComposition.kind = "mixer"`) und nutzt sie fuer Preview + finalen Render/Upload.
- `mixer -> adjustments -> render` ist bewusst verschoben (deferred) und aktuell nicht offizieller Scope.
---
## Node-Status-Modell
@@ -325,7 +332,8 @@ useCanvasData (use-canvas-data.ts)
- **Node-Taxonomie:** Alle Node-Typen sind in `lib/canvas-node-catalog.ts` definiert. Phase-2/3 Nodes haben `implemented: false` und `disabledHint`.
- **Video-Connection-Policy:** `video-prompt` darf **nur** mit `ai-video` verbunden werden (und umgekehrt). `text → video-prompt` ist erlaubt (Prompt-Quelle). `ai-video → compare` ist erlaubt.
- **Mixer-Connection-Policy:** `mixer` akzeptiert nur `image|asset|ai-image|render`; Ziel-Handles sind nur `base` und `overlay`, pro Handle maximal eine eingehende Kante, insgesamt maximal zwei.
- **Mixer-Pseudo-Output:** `mixer` liefert in V1 kein persistiertes Bild. Downstream-Nodes muessen den pseudo-image-Resolver nutzen (aktuell gezielt fuer `compare`).
- **Mixer-Pseudo-Output:** `mixer` liefert in V1 kein persistiertes Bild. Offizielle Consumer sind `compare` und der direkte Bake-Pfad `mixer -> render`; `mixer -> adjustments -> render` bleibt vorerst deferred.
- **Mixer Legacy-Daten:** Alte `offsetX`/`offsetY`-Mixer-Daten werden beim Lesen auf den Full-Frame-Fallback (`overlay* = 0/0/1/1`) normalisiert; Content-Framing defaults auf `content* = 0/0/1/1`.
- **Agent-Flow:** `agent` akzeptiert nur Content-/Kontext-Quellen (z. B. `render`, `compare`, `text`, `image`) als Input; ausgehende Kanten sind fuer `agent -> agent-output` vorgesehen.
- **Convex Generated Types:** `api.ai.generateVideo` wird u. U. nicht in `convex/_generated/api.d.ts` exportiert. Der Code verwendet `api as unknown as {...}` als Workaround. Ein `npx convex dev`-Zyklus würde die Typen korrekt generieren.
- **Canvas Graph Query:** Der Canvas nutzt `canvasGraph.get` (aus `convex/canvasGraph.ts`) statt separater `nodes.list`/`edges.list` Queries. Optimistic Updates laufen über `canvas-graph-query-cache.ts`.

View File

@@ -1,5 +1,9 @@
// @vitest-environment jsdom
import React from "react";
import { beforeEach, describe, expect, it, vi } from "vitest";
import { act } from "react";
import { createRoot, type Root } from "react-dom/client";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { renderToStaticMarkup } from "react-dom/server";
import { CanvasGraphProvider } from "@/components/canvas/canvas-graph-context";
@@ -15,12 +19,20 @@ type StoreState = {
}>;
};
type ResizeObserverEntryLike = {
target: Element;
contentRect: { width: number; height: number };
};
const storeState: StoreState = {
nodes: [],
edges: [],
};
const compareSurfaceSpy = vi.fn();
let resizeObserverCallback:
| ((entries: ResizeObserverEntryLike[]) => void)
| null = null;
vi.mock("@xyflow/react", () => ({
Handle: () => null,
@@ -53,6 +65,14 @@ vi.mock("@/components/canvas/canvas-handle", () => ({
),
}));
vi.mock("@/hooks/use-pipeline-preview", () => ({
usePipelinePreview: () => ({
canvasRef: { current: null },
isRendering: false,
error: null,
}),
}));
vi.mock("../nodes/base-node-wrapper", () => ({
default: ({ children }: { children: React.ReactNode }) => <div>{children}</div>,
}));
@@ -66,6 +86,8 @@ vi.mock("../nodes/compare-surface", () => ({
import CompareNode from "../nodes/compare-node";
(globalThis as typeof globalThis & { IS_REACT_ACT_ENVIRONMENT?: boolean }).IS_REACT_ACT_ENVIRONMENT = true;
function renderCompareNode(props: Record<string, unknown>) {
return renderToStaticMarkup(
<CanvasGraphProvider
@@ -78,10 +100,47 @@ function renderCompareNode(props: Record<string, unknown>) {
}
describe("CompareNode render preview inputs", () => {
let container: HTMLDivElement | null = null;
let root: Root | null = null;
beforeEach(() => {
storeState.nodes = [];
storeState.edges = [];
compareSurfaceSpy.mockReset();
resizeObserverCallback = null;
globalThis.ResizeObserver = class ResizeObserver {
constructor(callback: (entries: ResizeObserverEntryLike[]) => void) {
resizeObserverCallback = callback;
}
observe(target: Element) {
resizeObserverCallback?.([
{
target,
contentRect: { width: 500, height: 380 },
},
]);
}
unobserve() {}
disconnect() {}
} as unknown as typeof ResizeObserver;
container = document.createElement("div");
document.body.appendChild(container);
root = createRoot(container);
});
afterEach(async () => {
if (root) {
await act(async () => {
root?.unmount();
});
}
container?.remove();
root = null;
container = null;
});
it("passes previewInput to CompareSurface for a connected render node without final output", () => {
@@ -192,6 +251,108 @@ describe("CompareNode render preview inputs", () => {
});
});
it("defaults mixer-backed render compare inputs to preview mode when only sourceComposition exists", () => {
storeState.nodes = [
{
id: "base-image",
type: "image",
data: { url: "https://cdn.example.com/base.png" },
},
{
id: "overlay-image",
type: "asset",
data: { url: "https://cdn.example.com/overlay.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
blendMode: "multiply",
opacity: 62,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.4,
overlayHeight: 0.5,
cropLeft: 0.1,
cropTop: 0,
cropRight: 0.2,
cropBottom: 0.1,
},
},
{
id: "render-1",
type: "render",
data: {
lastUploadUrl: "https://cdn.example.com/stale-render-output.png",
},
},
];
storeState.edges = [
{
id: "edge-base-mixer",
source: "base-image",
target: "mixer-1",
targetHandle: "base",
},
{
id: "edge-overlay-mixer",
source: "overlay-image",
target: "mixer-1",
targetHandle: "overlay",
},
{ id: "edge-mixer-render", source: "mixer-1", target: "render-1" },
{
id: "edge-render-compare",
source: "render-1",
target: "compare-1",
targetHandle: "left",
},
];
renderCompareNode({
id: "compare-1",
data: { leftUrl: "https://cdn.example.com/stale-render-output.png" },
selected: false,
dragging: false,
zIndex: 0,
isConnectable: true,
type: "compare",
xPos: 0,
yPos: 0,
width: 500,
height: 380,
sourcePosition: undefined,
targetPosition: undefined,
positionAbsoluteX: 0,
positionAbsoluteY: 0,
});
expect(compareSurfaceSpy).toHaveBeenCalledTimes(1);
expect(compareSurfaceSpy.mock.calls[0]?.[0]).toMatchObject({
finalUrl: "https://cdn.example.com/stale-render-output.png",
preferPreview: true,
previewInput: {
sourceUrl: null,
sourceComposition: {
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "multiply",
opacity: 62,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.4,
overlayHeight: 0.5,
cropLeft: 0.1,
cropTop: 0,
cropRight: 0.2,
cropBottom: 0.1,
},
steps: [],
},
});
});
it("prefers mixer composite preview over persisted compare finalUrl when mixer is connected", () => {
storeState.nodes = [
{
@@ -275,14 +436,22 @@ describe("CompareNode render preview inputs", () => {
);
expect(mixerCall?.[0]).toMatchObject({
finalUrl: undefined,
nodeWidth: 500,
nodeHeight: 380,
mixerPreviewState: {
status: "ready",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "multiply",
opacity: 62,
offsetX: 12,
offsetY: -4,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
},
});
});
@@ -317,4 +486,183 @@ describe("CompareNode render preview inputs", () => {
expect(markup).toContain('data-top="35%"');
expect(markup).toContain('data-top="55%"');
});
it("passes the measured compare surface size to mixer previews instead of the full node box", async () => {
storeState.nodes = [
{
id: "base-image",
type: "image",
data: { url: "https://cdn.example.com/base.png" },
},
{
id: "overlay-image",
type: "asset",
data: { url: "https://cdn.example.com/overlay.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
blendMode: "normal",
opacity: 100,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.6,
overlayHeight: 0.5,
},
},
];
storeState.edges = [
{
id: "edge-base-mixer",
source: "base-image",
target: "mixer-1",
targetHandle: "base",
},
{
id: "edge-overlay-mixer",
source: "overlay-image",
target: "mixer-1",
targetHandle: "overlay",
},
{
id: "edge-mixer-compare",
source: "mixer-1",
target: "compare-1",
targetHandle: "left",
},
];
await act(async () => {
root?.render(
<CanvasGraphProvider
nodes={storeState.nodes as Array<{ id: string; type: string; data?: unknown }>}
edges={storeState.edges}
>
<CompareNode
{...({
id: "compare-1",
data: {},
selected: false,
dragging: false,
zIndex: 0,
isConnectable: true,
type: "compare",
xPos: 0,
yPos: 0,
width: 640,
height: 480,
sourcePosition: undefined,
targetPosition: undefined,
positionAbsoluteX: 0,
positionAbsoluteY: 0,
} as unknown as React.ComponentProps<typeof CompareNode>)}
/>
</CanvasGraphProvider>,
);
});
await vi.waitFor(() => {
const latestCompareSurfaceCall = compareSurfaceSpy.mock.calls.findLast(
([props]) =>
Boolean((props as { mixerPreviewState?: { status?: string } }).mixerPreviewState),
);
expect(latestCompareSurfaceCall?.[0]).toMatchObject({
nodeWidth: 500,
nodeHeight: 380,
});
});
const surfaceElement = container?.querySelector(".nodrag.relative.min-h-0.w-full");
expect(surfaceElement).toBeInstanceOf(HTMLDivElement);
await act(async () => {
resizeObserverCallback?.([
{
target: surfaceElement as HTMLDivElement,
contentRect: { width: 468, height: 312 },
},
]);
});
const latestCompareSurfaceCall = compareSurfaceSpy.mock.calls.findLast(
([props]) =>
Boolean((props as { mixerPreviewState?: { status?: string } }).mixerPreviewState),
);
expect(latestCompareSurfaceCall?.[0]).toMatchObject({
nodeWidth: 468,
nodeHeight: 312,
});
expect(latestCompareSurfaceCall?.[0]).not.toMatchObject({
nodeWidth: 640,
nodeHeight: 480,
});
});
it("anchors direct mixer previews to the actual compare surface rect", async () => {
const compareSurfaceModule = await vi.importActual<typeof import("../nodes/compare-surface")>(
"../nodes/compare-surface",
);
const ActualCompareSurface = compareSurfaceModule.default;
await act(async () => {
root?.render(
<CanvasGraphProvider nodes={[]} edges={[]}>
<ActualCompareSurface
mixerPreviewState={{
status: "ready",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "normal",
opacity: 100,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
}}
nodeWidth={500}
nodeHeight={380}
/>
</CanvasGraphProvider>,
);
});
const images = container?.querySelectorAll("img");
const baseImage = images?.[0];
if (!(baseImage instanceof HTMLImageElement)) {
throw new Error("base image not found");
}
Object.defineProperty(baseImage, "naturalWidth", { configurable: true, value: 200 });
Object.defineProperty(baseImage, "naturalHeight", { configurable: true, value: 100 });
await act(async () => {
baseImage.dispatchEvent(new Event("load"));
});
const overlayImage = container?.querySelectorAll("img")?.[1];
if (!(overlayImage instanceof HTMLImageElement)) {
throw new Error("overlay image not found");
}
Object.defineProperty(overlayImage, "naturalWidth", { configurable: true, value: 200 });
Object.defineProperty(overlayImage, "naturalHeight", { configurable: true, value: 100 });
await act(async () => {
overlayImage.dispatchEvent(new Event("load"));
});
const overlayFrame = overlayImage.parentElement;
expect(overlayFrame?.style.left).toBe("0%");
expect(overlayFrame?.style.top).toBe("17.105263157894736%");
expect(overlayFrame?.style.width).toBe("100%");
expect(overlayFrame?.style.height).toBe("65.78947368421053%");
});
});

File diff suppressed because it is too large Load Diff

View File

@@ -218,8 +218,10 @@ describe("useCanvasConnections", () => {
defaultData: {
blendMode: "normal",
opacity: 100,
offsetX: 0,
offsetY: 0,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
},
}),
);
@@ -232,8 +234,10 @@ describe("useCanvasConnections", () => {
data: {
blendMode: "normal",
opacity: 100,
offsetX: 0,
offsetY: 0,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
},
}),
);

View File

@@ -1,6 +1,6 @@
"use client";
import { useCallback, useMemo, useRef, useState } from "react";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { Position, type NodeProps } from "@xyflow/react";
import { ImageIcon } from "lucide-react";
import BaseNodeWrapper from "./base-node-wrapper";
@@ -36,12 +36,18 @@ type CompareSideState = {
type CompareDisplayMode = "render" | "preview";
export default function CompareNode({ id, data, selected, width }: NodeProps) {
type CompareSurfaceSize = {
width: number;
height: number;
};
export default function CompareNode({ id, data, selected, width, height }: NodeProps) {
const nodeData = data as CompareNodeData;
const graph = useCanvasGraph();
const [sliderX, setSliderX] = useState(50);
const [manualDisplayMode, setManualDisplayMode] = useState<CompareDisplayMode | null>(null);
const containerRef = useRef<HTMLDivElement>(null);
const [surfaceSize, setSurfaceSize] = useState<CompareSurfaceSize | null>(null);
const incomingEdges = useMemo(
() => graph.incomingEdgesByTarget.get(id) ?? [],
[graph, id],
@@ -74,8 +80,14 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) {
graph,
});
if (preview.sourceUrl) {
previewInput = {
if (preview.sourceUrl || preview.sourceComposition) {
previewInput = preview.sourceComposition
? {
sourceUrl: null,
sourceComposition: preview.sourceComposition,
steps: preview.steps,
}
: {
sourceUrl: preview.sourceUrl,
steps: preview.steps,
};
@@ -92,6 +104,7 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) {
sourceLastUploadedHash ?? sourceLastRenderedHash;
const sourceCurrentHash = resolveRenderPipelineHash({
sourceUrl: preview.sourceUrl,
sourceComposition: preview.sourceComposition,
steps: preview.steps,
data: sourceData,
});
@@ -173,7 +186,60 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) {
resolvedSides.right.isStaleRenderOutput;
const effectiveDisplayMode =
manualDisplayMode ?? (shouldDefaultToPreview ? "preview" : "render");
const previewNodeWidth = Math.max(240, Math.min(640, Math.round(width ?? 500)));
const fallbackSurfaceWidth = Math.max(240, Math.min(640, Math.round(width ?? 500)));
const fallbackSurfaceHeight = Math.max(180, Math.min(720, Math.round(height ?? 380)));
const previewNodeWidth = Math.max(
1,
Math.round(surfaceSize?.width ?? fallbackSurfaceWidth),
);
const previewNodeHeight = Math.max(
1,
Math.round(surfaceSize?.height ?? fallbackSurfaceHeight),
);
useEffect(() => {
const surfaceElement = containerRef.current;
if (!surfaceElement) {
return;
}
const updateSurfaceSize = (nextWidth: number, nextHeight: number) => {
const roundedWidth = Math.max(1, Math.round(nextWidth));
const roundedHeight = Math.max(1, Math.round(nextHeight));
setSurfaceSize((current) =>
current?.width === roundedWidth && current?.height === roundedHeight
? current
: {
width: roundedWidth,
height: roundedHeight,
},
);
};
const measureSurface = () => {
const rect = surfaceElement.getBoundingClientRect();
updateSurfaceSize(rect.width, rect.height);
};
measureSurface();
if (typeof ResizeObserver === "undefined") {
return undefined;
}
const observer = new ResizeObserver((entries) => {
const entry = entries[0];
if (!entry) {
return;
}
updateSurfaceSize(entry.contentRect.width, entry.contentRect.height);
});
observer.observe(surfaceElement);
return () => observer.disconnect();
}, []);
const setSliderPercent = useCallback((value: number) => {
setSliderX(Math.max(0, Math.min(100, value)));
@@ -321,6 +387,7 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) {
previewInput={resolvedSides.right.previewInput}
mixerPreviewState={resolvedSides.right.mixerPreviewState}
nodeWidth={previewNodeWidth}
nodeHeight={previewNodeHeight}
preferPreview={effectiveDisplayMode === "preview"}
/>
)}
@@ -332,6 +399,7 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) {
previewInput={resolvedSides.left.previewInput}
mixerPreviewState={resolvedSides.left.mixerPreviewState}
nodeWidth={previewNodeWidth}
nodeHeight={previewNodeHeight}
clipWidthPercent={sliderX}
preferPreview={effectiveDisplayMode === "preview"}
/>

View File

@@ -1,5 +1,7 @@
"use client";
import { useState } from "react";
import { useCanvasGraph } from "@/components/canvas/canvas-graph-context";
import { usePipelinePreview } from "@/hooks/use-pipeline-preview";
import {
@@ -7,8 +9,20 @@ import {
type RenderPreviewInput,
} from "@/lib/canvas-render-preview";
import type { MixerPreviewState } from "@/lib/canvas-mixer-preview";
import {
computeMixerCompareOverlayImageStyle,
computeMixerFrameRectInSurface,
isMixerCropImageReady,
} from "@/lib/mixer-crop-layout";
const EMPTY_STEPS: RenderPreviewInput["steps"] = [];
const ZERO_SIZE = { width: 0, height: 0 };
type LoadedImageState = {
url: string | null;
width: number;
height: number;
};
type CompareSurfaceProps = {
finalUrl?: string;
@@ -16,6 +30,7 @@ type CompareSurfaceProps = {
previewInput?: RenderPreviewInput;
mixerPreviewState?: MixerPreviewState;
nodeWidth: number;
nodeHeight: number;
clipWidthPercent?: number;
preferPreview?: boolean;
};
@@ -26,12 +41,22 @@ export default function CompareSurface({
previewInput,
mixerPreviewState,
nodeWidth,
nodeHeight,
clipWidthPercent,
preferPreview,
}: CompareSurfaceProps) {
const graph = useCanvasGraph();
const [baseImageState, setBaseImageState] = useState<LoadedImageState>({
url: null,
...ZERO_SIZE,
});
const [overlayImageState, setOverlayImageState] = useState<LoadedImageState>({
url: null,
...ZERO_SIZE,
});
const usePreview = Boolean(previewInput && (preferPreview || !finalUrl));
const previewSourceUrl = usePreview ? previewInput?.sourceUrl ?? null : null;
const previewSourceComposition = usePreview ? previewInput?.sourceComposition : undefined;
const previewSteps = usePreview ? previewInput?.steps ?? EMPTY_STEPS : EMPTY_STEPS;
const visibleFinalUrl = usePreview ? undefined : finalUrl;
const previewDebounceMs = shouldFastPathPreviewPipeline(
@@ -43,6 +68,7 @@ export default function CompareSurface({
const { canvasRef, isRendering, error } = usePipelinePreview({
sourceUrl: previewSourceUrl,
sourceComposition: previewSourceComposition,
steps: previewSteps,
nodeWidth,
includeHistogram: false,
@@ -64,6 +90,35 @@ export default function CompareSurface({
}
: undefined;
const baseNaturalSize =
mixerPreviewState?.baseUrl && mixerPreviewState.baseUrl === baseImageState.url
? { width: baseImageState.width, height: baseImageState.height }
: ZERO_SIZE;
const overlayNaturalSize =
mixerPreviewState?.overlayUrl && mixerPreviewState.overlayUrl === overlayImageState.url
? { width: overlayImageState.width, height: overlayImageState.height }
: ZERO_SIZE;
const mixerCropReady = isMixerCropImageReady({
currentOverlayUrl: mixerPreviewState?.overlayUrl,
loadedOverlayUrl: overlayImageState.url,
sourceWidth: overlayNaturalSize.width,
sourceHeight: overlayNaturalSize.height,
});
const mixerFrameRect = hasMixerPreview
? computeMixerFrameRectInSurface({
surfaceWidth: nodeWidth,
surfaceHeight: nodeHeight,
baseWidth: baseNaturalSize.width,
baseHeight: baseNaturalSize.height,
overlayX: mixerPreviewState.overlayX,
overlayY: mixerPreviewState.overlayY,
overlayWidth: mixerPreviewState.overlayWidth,
overlayHeight: mixerPreviewState.overlayHeight,
fit: "contain",
})
: null;
return (
<div className="pointer-events-none absolute inset-0" style={clipStyle}>
{visibleFinalUrl ? (
@@ -87,19 +142,62 @@ export default function CompareSurface({
alt={label ?? "Comparison image"}
className="absolute inset-0 h-full w-full object-contain"
draggable={false}
onLoad={(event) => {
setBaseImageState({
url: event.currentTarget.currentSrc || event.currentTarget.src,
width: event.currentTarget.naturalWidth,
height: event.currentTarget.naturalHeight,
});
}}
/>
{mixerFrameRect ? (
<div
className="absolute overflow-hidden"
style={{
mixBlendMode: mixerPreviewState.blendMode,
opacity: mixerPreviewState.opacity / 100,
left: `${mixerFrameRect.x * 100}%`,
top: `${mixerFrameRect.y * 100}%`,
width: `${mixerFrameRect.width * 100}%`,
height: `${mixerFrameRect.height * 100}%`,
}}
>
{/* eslint-disable-next-line @next/next/no-img-element */}
<img
src={mixerPreviewState.overlayUrl}
alt={label ?? "Comparison image"}
className="absolute inset-0 h-full w-full object-contain"
className="absolute max-w-none"
draggable={false}
style={{
mixBlendMode: mixerPreviewState.blendMode,
opacity: mixerPreviewState.opacity / 100,
transform: `translate(${mixerPreviewState.offsetX}px, ${mixerPreviewState.offsetY}px)`,
onLoad={(event) => {
setOverlayImageState({
url: event.currentTarget.currentSrc || event.currentTarget.src,
width: event.currentTarget.naturalWidth,
height: event.currentTarget.naturalHeight,
});
}}
style={
mixerCropReady
? computeMixerCompareOverlayImageStyle({
surfaceWidth: nodeWidth,
surfaceHeight: nodeHeight,
baseWidth: baseNaturalSize.width,
baseHeight: baseNaturalSize.height,
overlayX: mixerPreviewState.overlayX,
overlayY: mixerPreviewState.overlayY,
overlayWidth: mixerPreviewState.overlayWidth,
overlayHeight: mixerPreviewState.overlayHeight,
sourceWidth: overlayNaturalSize.width,
sourceHeight: overlayNaturalSize.height,
cropLeft: mixerPreviewState.cropLeft,
cropTop: mixerPreviewState.cropTop,
cropRight: mixerPreviewState.cropRight,
cropBottom: mixerPreviewState.cropBottom,
})
: { visibility: "hidden" }
}
/>
</div>
) : null}
</>
) : null}

File diff suppressed because it is too large Load Diff

View File

@@ -464,11 +464,13 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
);
const sourceUrl = renderPreviewInput.sourceUrl;
const sourceComposition = renderPreviewInput.sourceComposition;
useEffect(() => {
logRenderDebug("node-data-updated", {
nodeId: id,
hasSourceUrl: typeof sourceUrl === "string" && sourceUrl.length > 0,
hasSourceComposition: Boolean(sourceComposition),
storageId: data.storageId ?? null,
lastUploadStorageId: data.lastUploadStorageId ?? null,
hasResolvedUrl: typeof data.url === "string" && data.url.length > 0,
@@ -485,6 +487,7 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
data.url,
id,
sourceUrl,
sourceComposition,
]);
const sourceNode = useMemo<SourceNodeDescriptor | null>(
@@ -526,9 +529,12 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
);
const currentPipelineHash = useMemo(() => {
if (!sourceUrl) return null;
return hashPipeline({ sourceUrl, render: renderFingerprint }, steps);
}, [renderFingerprint, sourceUrl, steps]);
if (!sourceUrl && !sourceComposition) return null;
return hashPipeline(
{ source: sourceComposition ?? sourceUrl, render: renderFingerprint },
steps,
);
}, [renderFingerprint, sourceComposition, sourceUrl, steps]);
const isRenderCurrent =
Boolean(currentPipelineHash) && localData.lastRenderedHash === currentPipelineHash;
@@ -558,7 +564,8 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
error: "Error",
};
const hasSource = typeof sourceUrl === "string" && sourceUrl.length > 0;
const hasSource =
(typeof sourceUrl === "string" && sourceUrl.length > 0) || Boolean(sourceComposition);
const previewNodeWidth = Math.max(260, Math.round(width ?? 320));
const {
@@ -569,6 +576,7 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
error: previewError,
} = usePipelinePreview({
sourceUrl,
sourceComposition,
steps,
nodeWidth: previewNodeWidth,
debounceMs: previewDebounceMs,
@@ -586,6 +594,7 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
error: fullscreenPreviewError,
} = usePipelinePreview({
sourceUrl: isFullscreenOpen && sourceUrl ? sourceUrl : null,
sourceComposition: isFullscreenOpen ? sourceComposition : undefined,
steps,
nodeWidth: fullscreenPreviewWidth,
includeHistogram: false,
@@ -720,11 +729,12 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
};
const handleRender = async (mode: "download" | "upload") => {
if (!sourceUrl || !currentPipelineHash) {
if ((!sourceUrl && !sourceComposition) || !currentPipelineHash) {
logRenderDebug("render-aborted-prerequisites", {
nodeId: id,
mode,
hasSourceUrl: Boolean(sourceUrl),
hasSourceComposition: Boolean(sourceComposition),
hasPipelineHash: Boolean(currentPipelineHash),
isOffline: status.isOffline,
});
@@ -769,7 +779,8 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
});
const renderResult = await renderFullWithWorkerFallback({
sourceUrl,
sourceUrl: sourceUrl ?? undefined,
sourceComposition,
steps,
render: {
resolution: activeData.outputResolution,

View File

@@ -22,6 +22,25 @@ function logNodeDataDebug(event: string, payload: Record<string, unknown>): void
console.info("[Canvas node debug]", event, payload);
}
function diffNodeData(
before: Record<string, unknown>,
after: Record<string, unknown>,
): Record<string, { before: unknown; after: unknown }> {
const keys = new Set([...Object.keys(before), ...Object.keys(after)]);
const diff: Record<string, { before: unknown; after: unknown }> = {};
for (const key of keys) {
if (before[key] !== after[key]) {
diff[key] = {
before: before[key],
after: after[key],
};
}
}
return diff;
}
export function useNodeLocalData<T>({
nodeId,
data,
@@ -55,6 +74,16 @@ export function useNodeLocalData<T>({
const savedValue = localDataRef.current;
const savedVersion = localChangeVersionRef.current;
logNodeDataDebug("queue-save-flush", {
nodeId,
nodeType: debugLabel,
savedVersion,
changedFields: diffNodeData(
acceptedPersistedDataRef.current as Record<string, unknown>,
savedValue as Record<string, unknown>,
),
});
Promise.resolve(onSave(savedValue))
.then(() => {
if (!isMountedRef.current || savedVersion !== localChangeVersionRef.current) {
@@ -144,7 +173,17 @@ export function useNodeLocalData<T>({
const updateLocalData = useCallback(
(updater: (current: T) => T) => {
const next = updater(localDataRef.current);
const previous = localDataRef.current;
const next = updater(previous);
logNodeDataDebug("local-update", {
nodeId,
nodeType: debugLabel,
changedFields: diffNodeData(
previous as Record<string, unknown>,
next as Record<string, unknown>,
),
});
localChangeVersionRef.current += 1;
hasPendingLocalChangesRef.current = true;
@@ -153,7 +192,7 @@ export function useNodeLocalData<T>({
setPreviewNodeDataOverride(nodeId, next);
queueSave();
},
[nodeId, queueSave, setPreviewNodeDataOverride],
[debugLabel, nodeId, queueSave, setPreviewNodeDataOverride],
);
return {

View File

@@ -58,7 +58,7 @@ Alle Node-Typen werden über Validators definiert: `phase1NodeTypeValidator`, `n
| `video-prompt` | `content`, `modelId`, `durationSeconds` | KI-Video-Steuer-Node (Eingabe) |
| `ai-video` | `storageId`, `prompt`, `model`, `modelLabel`, `durationSeconds`, `creditCost`, `generatedAt`, `taskId` (transient) | Generiertes KI-Video (System-Output) |
| `compare` | `leftNodeId`, `rightNodeId`, `sliderPosition` | Vergleichs-Node |
| `mixer` | `blendMode`, `opacity`, `offsetX`, `offsetY` | V1 Merge-Control-Node mit pseudo-image Output (kein Storage-Write) |
| `mixer` | `blendMode`, `opacity`, `overlayX`, `overlayY`, `overlayWidth`, `overlayHeight` | V1 Merge-Control-Node mit pseudo-image Output (kein Storage-Write) |
| `frame` | `label`, `exportWidth`, `exportHeight`, `backgroundColor` | Artboard |
| `group` | `label`, `collapsed` | Container-Node |
| `note` | `content`, `color` | Anmerkung |
@@ -338,6 +338,8 @@ Wirft bei unauthentifiziertem Zugriff. Wird von allen Queries und Mutations genu
- `mixer` ist ein Control-Node mit pseudo-image Semantik, nicht mit persistiertem Medien-Output.
- Keine zusaetzlichen Convex-Tabellen oder Storage-Flows fuer Mixer-Vorschauen.
- Validierung laeuft client- und serverseitig ueber dieselbe Policy (`validateCanvasConnectionPolicy`); `edges.ts` delegiert darauf fuer Paritaet.
- Offizieller Bake-Pfad fuer Mixer ist `mixer -> render` (Render verarbeitet die Mixer-Komposition in Preview/Render-Pipeline).
- `mixer -> adjustments -> render` ist derzeit bewusst deferred und nicht Teil des offiziell supporteten Flows.
---

View File

@@ -4,6 +4,7 @@ import { useEffect, useMemo, useRef, useState } from "react";
import { hashPipeline, type PipelineStep } from "@/lib/image-pipeline/contracts";
import { emptyHistogram, type HistogramData } from "@/lib/image-pipeline/histogram";
import type { RenderSourceComposition } from "@/lib/image-pipeline/render-types";
import {
isPipelineAbortError,
renderPreviewWithWorkerFallback,
@@ -12,6 +13,7 @@ import {
type UsePipelinePreviewOptions = {
sourceUrl: string | null;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
nodeWidth: number;
includeHistogram?: boolean;
@@ -54,6 +56,7 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): {
const stableRenderInputRef = useRef<{
pipelineHash: string;
sourceUrl: string | null;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
} | null>(null);
@@ -95,11 +98,11 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): {
);
const pipelineHash = useMemo(() => {
if (!options.sourceUrl) {
if (!options.sourceUrl && !options.sourceComposition) {
return "no-source";
}
return hashPipeline(options.sourceUrl, options.steps);
}, [options.sourceUrl, options.steps]);
return hashPipeline(options.sourceComposition ?? options.sourceUrl, options.steps);
}, [options.sourceComposition, options.sourceUrl, options.steps]);
useEffect(() => {
if (stableRenderInputRef.current?.pipelineHash === pipelineHash) {
@@ -109,13 +112,15 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): {
stableRenderInputRef.current = {
pipelineHash,
sourceUrl: options.sourceUrl,
sourceComposition: options.sourceComposition,
steps: options.steps,
};
}, [pipelineHash, options.sourceUrl, options.steps]);
}, [pipelineHash, options.sourceComposition, options.sourceUrl, options.steps]);
useEffect(() => {
const sourceUrl = stableRenderInputRef.current?.sourceUrl ?? null;
if (!sourceUrl) {
const sourceComposition = stableRenderInputRef.current?.sourceComposition;
if (!sourceUrl && !sourceComposition) {
const frameId = window.requestAnimationFrame(() => {
setHistogram(emptyHistogram());
setError(null);
@@ -133,8 +138,10 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): {
const timer = window.setTimeout(() => {
setIsRendering(true);
setError(null);
const resolvedSourceUrl = sourceUrl ?? undefined;
void renderPreviewWithWorkerFallback({
sourceUrl,
sourceUrl: resolvedSourceUrl,
sourceComposition,
steps: stableRenderInputRef.current?.steps ?? [],
previewWidth,
includeHistogram: options.includeHistogram,
@@ -168,7 +175,8 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): {
if (process.env.NODE_ENV !== "production") {
console.error("[usePipelinePreview] render failed", {
message,
sourceUrl,
sourceUrl: resolvedSourceUrl,
sourceComposition,
pipelineHash,
previewWidth,
includeHistogram: options.includeHistogram,
@@ -194,7 +202,7 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): {
canvasRef,
histogram,
isRendering,
hasSource: Boolean(options.sourceUrl),
hasSource: Boolean(options.sourceUrl || options.sourceComposition),
previewAspectRatio,
error,
};

View File

@@ -19,8 +19,14 @@ export type MixerPreviewState = {
overlayUrl?: string;
blendMode: MixerBlendMode;
opacity: number;
offsetX: number;
offsetY: number;
overlayX: number;
overlayY: number;
overlayWidth: number;
overlayHeight: number;
cropLeft: number;
cropTop: number;
cropRight: number;
cropBottom: number;
error?: MixerPreviewError;
};
@@ -35,9 +41,18 @@ const DEFAULT_BLEND_MODE: MixerBlendMode = "normal";
const DEFAULT_OPACITY = 100;
const MIN_OPACITY = 0;
const MAX_OPACITY = 100;
const DEFAULT_OFFSET = 0;
const MIN_OFFSET = -2048;
const MAX_OFFSET = 2048;
const DEFAULT_OVERLAY_X = 0;
const DEFAULT_OVERLAY_Y = 0;
const DEFAULT_OVERLAY_WIDTH = 1;
const DEFAULT_OVERLAY_HEIGHT = 1;
const DEFAULT_CROP_LEFT = 0;
const DEFAULT_CROP_TOP = 0;
const DEFAULT_CROP_RIGHT = 0;
const DEFAULT_CROP_BOTTOM = 0;
const MIN_OVERLAY_POSITION = 0;
const MAX_OVERLAY_POSITION = 1;
const MIN_OVERLAY_SIZE = 0.1;
const MAX_OVERLAY_SIZE = 1;
function clamp(value: number, min: number, max: number): number {
return Math.max(min, Math.min(max, value));
@@ -65,18 +80,165 @@ function normalizeOpacity(value: unknown): number {
return clamp(parsed, MIN_OPACITY, MAX_OPACITY);
}
function normalizeOffset(value: unknown): number {
function normalizeOverlayNumber(value: unknown, fallback: number): number {
const parsed = parseNumeric(value);
if (parsed === null) {
return DEFAULT_OFFSET;
return fallback;
}
return clamp(parsed, MIN_OFFSET, MAX_OFFSET);
return parsed;
}
function normalizeUnitRect(args: {
x: unknown;
y: unknown;
width: unknown;
height: unknown;
defaults: { x: number; y: number; width: number; height: number };
}): { x: number; y: number; width: number; height: number } {
const x = clamp(
normalizeOverlayNumber(args.x, args.defaults.x),
MIN_OVERLAY_POSITION,
MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE,
);
const y = clamp(
normalizeOverlayNumber(args.y, args.defaults.y),
MIN_OVERLAY_POSITION,
MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE,
);
const width = clamp(
normalizeOverlayNumber(args.width, args.defaults.width),
MIN_OVERLAY_SIZE,
Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - x),
);
const height = clamp(
normalizeOverlayNumber(args.height, args.defaults.height),
MIN_OVERLAY_SIZE,
Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - y),
);
return { x, y, width, height };
}
function normalizeOverlayRect(record: Record<string, unknown>): Pick<
MixerPreviewState,
"overlayX" | "overlayY" | "overlayWidth" | "overlayHeight"
> {
const hasLegacyOffset = record.offsetX !== undefined || record.offsetY !== undefined;
const hasOverlayRectField =
record.overlayX !== undefined ||
record.overlayY !== undefined ||
record.overlayWidth !== undefined ||
record.overlayHeight !== undefined;
if (hasLegacyOffset && !hasOverlayRectField) {
return {
overlayX: DEFAULT_OVERLAY_X,
overlayY: DEFAULT_OVERLAY_Y,
overlayWidth: DEFAULT_OVERLAY_WIDTH,
overlayHeight: DEFAULT_OVERLAY_HEIGHT,
};
}
const normalized = normalizeUnitRect({
x: record.overlayX,
y: record.overlayY,
width: record.overlayWidth,
height: record.overlayHeight,
defaults: {
x: DEFAULT_OVERLAY_X,
y: DEFAULT_OVERLAY_Y,
width: DEFAULT_OVERLAY_WIDTH,
height: DEFAULT_OVERLAY_HEIGHT,
},
});
return {
overlayX: normalized.x,
overlayY: normalized.y,
overlayWidth: normalized.width,
overlayHeight: normalized.height,
};
}
function normalizeCropEdges(record: Record<string, unknown>): Pick<
MixerPreviewState,
"cropLeft" | "cropTop" | "cropRight" | "cropBottom"
> {
const hasCropField =
record.cropLeft !== undefined ||
record.cropTop !== undefined ||
record.cropRight !== undefined ||
record.cropBottom !== undefined;
const hasLegacyContentRectField =
record.contentX !== undefined ||
record.contentY !== undefined ||
record.contentWidth !== undefined ||
record.contentHeight !== undefined;
if (!hasCropField && hasLegacyContentRectField) {
const legacyRect = normalizeUnitRect({
x: record.contentX,
y: record.contentY,
width: record.contentWidth,
height: record.contentHeight,
defaults: {
x: 0,
y: 0,
width: 1,
height: 1,
},
});
return {
cropLeft: legacyRect.x,
cropTop: legacyRect.y,
cropRight: 1 - (legacyRect.x + legacyRect.width),
cropBottom: 1 - (legacyRect.y + legacyRect.height),
};
}
const cropLeft = clamp(
normalizeOverlayNumber(record.cropLeft, DEFAULT_CROP_LEFT),
0,
1 - MIN_OVERLAY_SIZE,
);
const cropTop = clamp(
normalizeOverlayNumber(record.cropTop, DEFAULT_CROP_TOP),
0,
1 - MIN_OVERLAY_SIZE,
);
const cropRight = clamp(
normalizeOverlayNumber(record.cropRight, DEFAULT_CROP_RIGHT),
0,
1 - cropLeft - MIN_OVERLAY_SIZE,
);
const cropBottom = clamp(
normalizeOverlayNumber(record.cropBottom, DEFAULT_CROP_BOTTOM),
0,
1 - cropTop - MIN_OVERLAY_SIZE,
);
return {
cropLeft,
cropTop,
cropRight,
cropBottom,
};
}
export function normalizeMixerPreviewData(data: unknown): Pick<
MixerPreviewState,
"blendMode" | "opacity" | "offsetX" | "offsetY"
| "blendMode"
| "opacity"
| "overlayX"
| "overlayY"
| "overlayWidth"
| "overlayHeight"
| "cropLeft"
| "cropTop"
| "cropRight"
| "cropBottom"
> {
const record = (data ?? {}) as Record<string, unknown>;
const blendMode = MIXER_BLEND_MODES.has(record.blendMode as MixerBlendMode)
@@ -86,8 +248,8 @@ export function normalizeMixerPreviewData(data: unknown): Pick<
return {
blendMode,
opacity: normalizeOpacity(record.opacity),
offsetX: normalizeOffset(record.offsetX),
offsetY: normalizeOffset(record.offsetY),
...normalizeOverlayRect(record),
...normalizeCropEdges(record),
};
}
@@ -119,6 +281,17 @@ function resolveSourceUrlFromNode(args: {
}
if (args.sourceNode.type === "render") {
const preview = resolveRenderPreviewInputFromGraph({
nodeId: args.sourceNode.id,
graph: args.graph,
});
if (preview.sourceComposition) {
return undefined;
}
if (preview.sourceUrl) {
return preview.sourceUrl;
}
const renderData = (args.sourceNode.data ?? {}) as Record<string, unknown>;
const renderOutputUrl =
typeof renderData.lastUploadUrl === "string" && renderData.lastUploadUrl.length > 0
@@ -133,11 +306,7 @@ function resolveSourceUrlFromNode(args: {
return directRenderUrl;
}
const preview = resolveRenderPreviewInputFromGraph({
nodeId: args.sourceNode.id,
graph: args.graph,
});
return preview.sourceUrl ?? undefined;
return undefined;
}
return resolveNodeImageUrl(args.sourceNode.data) ?? undefined;
@@ -172,6 +341,8 @@ export function resolveMixerPreviewFromGraph(args: {
if (base.duplicate || overlay.duplicate) {
return {
status: "error",
baseUrl: undefined,
overlayUrl: undefined,
...normalized,
error: "duplicate-handle-edge",
};

View File

@@ -51,8 +51,14 @@ export const CANVAS_NODE_TEMPLATES = [
defaultData: {
blendMode: "normal",
opacity: 100,
offsetX: 0,
offsetY: 0,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
},
},
{

View File

@@ -15,10 +15,29 @@ export type RenderPreviewGraphEdge = {
};
export type RenderPreviewInput = {
sourceUrl: string;
sourceUrl: string | null;
sourceComposition?: RenderPreviewSourceComposition;
steps: PipelineStep[];
};
export type MixerBlendMode = "normal" | "multiply" | "screen" | "overlay";
export type RenderPreviewSourceComposition = {
kind: "mixer";
baseUrl: string;
overlayUrl: string;
blendMode: MixerBlendMode;
opacity: number;
overlayX: number;
overlayY: number;
overlayWidth: number;
overlayHeight: number;
cropLeft: number;
cropTop: number;
cropRight: number;
cropBottom: number;
};
export type CanvasGraphNodeLike = {
id: string;
type: string;
@@ -38,6 +57,8 @@ export type CanvasGraphSnapshot = {
incomingEdgesByTarget: ReadonlyMap<string, readonly CanvasGraphEdgeLike[]>;
};
type RenderPreviewResolvedInput = RenderPreviewInput;
export type CanvasGraphNodeDataOverrides = ReadonlyMap<string, unknown>;
export function shouldFastPathPreviewPipeline(
@@ -129,6 +150,188 @@ export const RENDER_PREVIEW_PIPELINE_TYPES = new Set([
"detail-adjust",
]);
const MIXER_SOURCE_NODE_TYPES = new Set(["image", "asset", "ai-image", "render"]);
const MIXER_BLEND_MODES = new Set<MixerBlendMode>([
"normal",
"multiply",
"screen",
"overlay",
]);
const DEFAULT_BLEND_MODE: MixerBlendMode = "normal";
const DEFAULT_OPACITY = 100;
const MIN_OPACITY = 0;
const MAX_OPACITY = 100;
const DEFAULT_OVERLAY_X = 0;
const DEFAULT_OVERLAY_Y = 0;
const DEFAULT_OVERLAY_WIDTH = 1;
const DEFAULT_OVERLAY_HEIGHT = 1;
const DEFAULT_CROP_LEFT = 0;
const DEFAULT_CROP_TOP = 0;
const DEFAULT_CROP_RIGHT = 0;
const DEFAULT_CROP_BOTTOM = 0;
const MIN_OVERLAY_POSITION = 0;
const MAX_OVERLAY_POSITION = 1;
const MIN_OVERLAY_SIZE = 0.1;
const MAX_OVERLAY_SIZE = 1;
function clamp(value: number, min: number, max: number): number {
return Math.max(min, Math.min(max, value));
}
function parseNumeric(value: unknown): number | null {
if (typeof value === "number") {
return Number.isFinite(value) ? value : null;
}
if (typeof value === "string") {
const parsed = Number(value);
return Number.isFinite(parsed) ? parsed : null;
}
return null;
}
function normalizeOpacity(value: unknown): number {
const parsed = parseNumeric(value);
if (parsed === null) {
return DEFAULT_OPACITY;
}
return clamp(parsed, MIN_OPACITY, MAX_OPACITY);
}
function normalizeOverlayNumber(value: unknown, fallback: number): number {
const parsed = parseNumeric(value);
if (parsed === null) {
return fallback;
}
return parsed;
}
function normalizeMixerCompositionRect(data: Record<string, unknown>): Pick<
RenderPreviewSourceComposition,
"overlayX" | "overlayY" | "overlayWidth" | "overlayHeight"
> {
const hasLegacyOffset = data.offsetX !== undefined || data.offsetY !== undefined;
const hasOverlayRectField =
data.overlayX !== undefined ||
data.overlayY !== undefined ||
data.overlayWidth !== undefined ||
data.overlayHeight !== undefined;
if (hasLegacyOffset && !hasOverlayRectField) {
return {
overlayX: DEFAULT_OVERLAY_X,
overlayY: DEFAULT_OVERLAY_Y,
overlayWidth: DEFAULT_OVERLAY_WIDTH,
overlayHeight: DEFAULT_OVERLAY_HEIGHT,
};
}
const overlayX = clamp(
normalizeOverlayNumber(data.overlayX, DEFAULT_OVERLAY_X),
MIN_OVERLAY_POSITION,
MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE,
);
const overlayY = clamp(
normalizeOverlayNumber(data.overlayY, DEFAULT_OVERLAY_Y),
MIN_OVERLAY_POSITION,
MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE,
);
const overlayWidth = clamp(
normalizeOverlayNumber(data.overlayWidth, DEFAULT_OVERLAY_WIDTH),
MIN_OVERLAY_SIZE,
Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - overlayX),
);
const overlayHeight = clamp(
normalizeOverlayNumber(data.overlayHeight, DEFAULT_OVERLAY_HEIGHT),
MIN_OVERLAY_SIZE,
Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - overlayY),
);
return {
overlayX,
overlayY,
overlayWidth,
overlayHeight,
};
}
function normalizeMixerCompositionCropEdges(data: Record<string, unknown>): Pick<
RenderPreviewSourceComposition,
"cropLeft" | "cropTop" | "cropRight" | "cropBottom"
> {
const hasCropField =
data.cropLeft !== undefined ||
data.cropTop !== undefined ||
data.cropRight !== undefined ||
data.cropBottom !== undefined;
const hasLegacyContentRectField =
data.contentX !== undefined ||
data.contentY !== undefined ||
data.contentWidth !== undefined ||
data.contentHeight !== undefined;
if (!hasCropField && hasLegacyContentRectField) {
const contentX = clamp(
normalizeOverlayNumber(data.contentX, 0),
MIN_OVERLAY_POSITION,
MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE,
);
const contentY = clamp(
normalizeOverlayNumber(data.contentY, 0),
MIN_OVERLAY_POSITION,
MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE,
);
const contentWidth = clamp(
normalizeOverlayNumber(data.contentWidth, 1),
MIN_OVERLAY_SIZE,
Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - contentX),
);
const contentHeight = clamp(
normalizeOverlayNumber(data.contentHeight, 1),
MIN_OVERLAY_SIZE,
Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - contentY),
);
return {
cropLeft: contentX,
cropTop: contentY,
cropRight: 1 - (contentX + contentWidth),
cropBottom: 1 - (contentY + contentHeight),
};
}
const cropLeft = clamp(
normalizeOverlayNumber(data.cropLeft, DEFAULT_CROP_LEFT),
0,
1 - MIN_OVERLAY_SIZE,
);
const cropTop = clamp(
normalizeOverlayNumber(data.cropTop, DEFAULT_CROP_TOP),
0,
1 - MIN_OVERLAY_SIZE,
);
const cropRight = clamp(
normalizeOverlayNumber(data.cropRight, DEFAULT_CROP_RIGHT),
0,
1 - cropLeft - MIN_OVERLAY_SIZE,
);
const cropBottom = clamp(
normalizeOverlayNumber(data.cropBottom, DEFAULT_CROP_BOTTOM),
0,
1 - cropTop - MIN_OVERLAY_SIZE,
);
return {
cropLeft,
cropTop,
cropRight,
cropBottom,
};
}
export function resolveRenderFingerprint(data: unknown): {
resolution: RenderResolutionOption;
customWidth?: number;
@@ -163,15 +366,19 @@ export function resolveRenderFingerprint(data: unknown): {
export function resolveRenderPipelineHash(args: {
sourceUrl: string | null;
sourceComposition?: RenderPreviewSourceComposition;
steps: PipelineStep[];
data: unknown;
}): string | null {
if (!args.sourceUrl) {
if (!args.sourceUrl && !args.sourceComposition) {
return null;
}
return hashPipeline(
{ sourceUrl: args.sourceUrl, render: resolveRenderFingerprint(args.data) },
{
source: args.sourceComposition ?? args.sourceUrl,
render: resolveRenderFingerprint(args.data),
},
args.steps,
);
}
@@ -212,6 +419,119 @@ function resolveSourceNodeUrl(node: CanvasGraphNodeLike): string | null {
return resolveNodeImageUrl(node.data);
}
function resolveRenderOutputUrl(node: CanvasGraphNodeLike): string | null {
const data = (node.data ?? {}) as Record<string, unknown>;
const lastUploadUrl =
typeof data.lastUploadUrl === "string" && data.lastUploadUrl.length > 0
? data.lastUploadUrl
: null;
if (lastUploadUrl) {
return lastUploadUrl;
}
return resolveNodeImageUrl(node.data);
}
function resolveMixerHandleEdge(args: {
incomingEdges: readonly CanvasGraphEdgeLike[];
handle: "base" | "overlay";
}): CanvasGraphEdgeLike | null {
const filtered = args.incomingEdges.filter((edge) => {
if (args.handle === "base") {
return edge.targetHandle === "base" || edge.targetHandle == null || edge.targetHandle === "";
}
return edge.targetHandle === "overlay";
});
if (filtered.length !== 1) {
return null;
}
return filtered[0] ?? null;
}
function resolveMixerSourceUrlFromNode(args: {
node: CanvasGraphNodeLike;
graph: CanvasGraphSnapshot;
}): string | null {
if (!MIXER_SOURCE_NODE_TYPES.has(args.node.type)) {
return null;
}
if (args.node.type === "render") {
const preview = resolveRenderPreviewInputFromGraph({
nodeId: args.node.id,
graph: args.graph,
});
if (preview.sourceComposition) {
return null;
}
if (preview.sourceUrl) {
return preview.sourceUrl;
}
const directRenderUrl = resolveRenderOutputUrl(args.node);
if (directRenderUrl) {
return directRenderUrl;
}
return null;
}
return resolveNodeImageUrl(args.node.data);
}
function resolveMixerSourceUrlFromEdge(args: {
edge: CanvasGraphEdgeLike | null;
graph: CanvasGraphSnapshot;
}): string | null {
if (!args.edge) {
return null;
}
const sourceNode = args.graph.nodesById.get(args.edge.source);
if (!sourceNode) {
return null;
}
return resolveMixerSourceUrlFromNode({
node: sourceNode,
graph: args.graph,
});
}
function resolveRenderMixerCompositionFromGraph(args: {
node: CanvasGraphNodeLike;
graph: CanvasGraphSnapshot;
}): RenderPreviewSourceComposition | null {
const incomingEdges = args.graph.incomingEdgesByTarget.get(args.node.id) ?? [];
const baseEdge = resolveMixerHandleEdge({ incomingEdges, handle: "base" });
const overlayEdge = resolveMixerHandleEdge({ incomingEdges, handle: "overlay" });
const baseUrl = resolveMixerSourceUrlFromEdge({ edge: baseEdge, graph: args.graph });
const overlayUrl = resolveMixerSourceUrlFromEdge({ edge: overlayEdge, graph: args.graph });
if (!baseUrl || !overlayUrl) {
return null;
}
const data = (args.node.data ?? {}) as Record<string, unknown>;
const blendMode = MIXER_BLEND_MODES.has(data.blendMode as MixerBlendMode)
? (data.blendMode as MixerBlendMode)
: DEFAULT_BLEND_MODE;
return {
kind: "mixer",
baseUrl,
overlayUrl,
blendMode,
opacity: normalizeOpacity(data.opacity),
...normalizeMixerCompositionRect(data),
...normalizeMixerCompositionCropEdges(data),
};
}
export function buildGraphSnapshot(
nodes: readonly CanvasGraphNodeLike[],
edges: readonly CanvasGraphEdgeLike[],
@@ -384,7 +704,32 @@ export function findSourceNodeFromGraph(
export function resolveRenderPreviewInputFromGraph(args: {
nodeId: string;
graph: CanvasGraphSnapshot;
}): { sourceUrl: string | null; steps: PipelineStep[] } {
}): RenderPreviewResolvedInput {
const renderIncoming = getSortedIncomingEdge(
args.graph.incomingEdgesByTarget.get(args.nodeId),
);
const renderInputNode = renderIncoming
? args.graph.nodesById.get(renderIncoming.source)
: null;
if (renderInputNode?.type === "mixer") {
const sourceComposition = resolveRenderMixerCompositionFromGraph({
node: renderInputNode,
graph: args.graph,
});
const steps = collectPipelineFromGraph(args.graph, {
nodeId: args.nodeId,
isPipelineNode: (node) => RENDER_PREVIEW_PIPELINE_TYPES.has(node.type ?? ""),
});
return {
sourceUrl: null,
sourceComposition: sourceComposition ?? undefined,
steps,
};
}
const sourceUrl = getSourceImageFromGraph(args.graph, {
nodeId: args.nodeId,
isSourceNode: (node) => SOURCE_NODE_TYPES.has(node.type ?? ""),
@@ -406,7 +751,7 @@ export function resolveRenderPreviewInput(args: {
nodeId: string;
nodes: readonly RenderPreviewGraphNode[];
edges: readonly RenderPreviewGraphEdge[];
}): { sourceUrl: string | null; steps: PipelineStep[] } {
}): RenderPreviewResolvedInput {
return resolveRenderPreviewInputFromGraph({
nodeId: args.nodeId,
graph: buildGraphSnapshot(args.nodes, args.edges),

View File

@@ -437,8 +437,14 @@ export const NODE_DEFAULTS: Record<
data: {
blendMode: "normal",
opacity: 100,
offsetX: 0,
offsetY: 0,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
},
},
"agent-output": {

View File

@@ -10,7 +10,7 @@ import {
applyGeometryStepsToSource,
partitionPipelineSteps,
} from "@/lib/image-pipeline/geometry-transform";
import { loadSourceBitmap } from "@/lib/image-pipeline/source-loader";
import { loadRenderSourceBitmap } from "@/lib/image-pipeline/source-loader";
type SupportedCanvas = HTMLCanvasElement | OffscreenCanvas;
type SupportedContext = CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D;
@@ -99,7 +99,11 @@ function resolveMimeType(format: RenderFormat): string {
export async function renderFull(options: RenderFullOptions): Promise<RenderFullResult> {
const { signal } = options;
const bitmap = await loadSourceBitmap(options.sourceUrl, { signal });
const bitmap = await loadRenderSourceBitmap({
sourceUrl: options.sourceUrl,
sourceComposition: options.sourceComposition,
signal,
});
const { geometrySteps, tonalSteps } = partitionPipelineSteps(options.steps);
const geometryResult = applyGeometryStepsToSource({
source: bitmap,

View File

@@ -2,21 +2,26 @@ import { renderFull } from "@/lib/image-pipeline/bridge";
import { renderPreview } from "@/lib/image-pipeline/preview-renderer";
import type { PipelineStep } from "@/lib/image-pipeline/contracts";
import type { HistogramData } from "@/lib/image-pipeline/histogram";
import type { RenderFullOptions, RenderFullResult } from "@/lib/image-pipeline/render-types";
import type {
RenderFullOptions,
RenderFullResult,
RenderSourceComposition,
} from "@/lib/image-pipeline/render-types";
import {
IMAGE_PIPELINE_BACKEND_FLAG_KEYS,
type BackendFeatureFlags,
} from "@/lib/image-pipeline/backend/feature-flags";
type PreviewWorkerPayload = {
sourceUrl: string;
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
previewWidth: number;
includeHistogram?: boolean;
featureFlags?: BackendFeatureFlags;
};
type FullWorkerPayload = RenderFullOptions & {
type FullWorkerPayload = Omit<RenderFullOptions, "signal"> & {
featureFlags?: BackendFeatureFlags;
};
@@ -112,6 +117,7 @@ async function handlePreviewRequest(requestId: number, payload: PreviewWorkerPay
applyWorkerFeatureFlags(payload.featureFlags);
const result = await renderPreview({
sourceUrl: payload.sourceUrl,
sourceComposition: payload.sourceComposition,
steps: payload.steps,
previewWidth: payload.previewWidth,
includeHistogram: payload.includeHistogram,
@@ -161,6 +167,7 @@ async function handleFullRequest(requestId: number, payload: FullWorkerPayload):
applyWorkerFeatureFlags(payload.featureFlags);
const result = await renderFull({
sourceUrl: payload.sourceUrl,
sourceComposition: payload.sourceComposition,
steps: payload.steps,
render: payload.render,
signal: controller.signal,

View File

@@ -8,7 +8,8 @@ import {
applyGeometryStepsToSource,
partitionPipelineSteps,
} from "@/lib/image-pipeline/geometry-transform";
import { loadSourceBitmap } from "@/lib/image-pipeline/source-loader";
import { loadRenderSourceBitmap } from "@/lib/image-pipeline/source-loader";
import type { RenderSourceComposition } from "@/lib/image-pipeline/render-types";
export type PreviewRenderResult = {
width: number;
@@ -64,13 +65,16 @@ async function yieldToMainOrWorkerLoop(): Promise<void> {
}
export async function renderPreview(options: {
sourceUrl: string;
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
previewWidth: number;
includeHistogram?: boolean;
signal?: AbortSignal;
}): Promise<PreviewRenderResult> {
const bitmap = await loadSourceBitmap(options.sourceUrl, {
const bitmap = await loadRenderSourceBitmap({
sourceUrl: options.sourceUrl,
sourceComposition: options.sourceComposition,
signal: options.signal,
});
const { geometrySteps, tonalSteps } = partitionPipelineSteps(options.steps);

View File

@@ -24,6 +24,22 @@ export type RenderSizeLimits = {
maxPixels?: number;
};
export type RenderSourceComposition = {
kind: "mixer";
baseUrl: string;
overlayUrl: string;
blendMode: "normal" | "multiply" | "screen" | "overlay";
opacity: number;
overlayX: number;
overlayY: number;
overlayWidth: number;
overlayHeight: number;
cropLeft: number;
cropTop: number;
cropRight: number;
cropBottom: number;
};
export type ResolvedRenderSize = {
width: number;
height: number;
@@ -32,7 +48,8 @@ export type ResolvedRenderSize = {
};
export type RenderFullOptions = {
sourceUrl: string;
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
render: RenderOptions;
limits?: RenderSizeLimits;

View File

@@ -1,3 +1,6 @@
import type { RenderSourceComposition } from "@/lib/image-pipeline/render-types";
import { computeVisibleMixerContentRect } from "@/lib/mixer-crop-layout";
export const SOURCE_BITMAP_CACHE_MAX_ENTRIES = 32;
type CacheEntry = {
@@ -12,6 +15,12 @@ type LoadSourceBitmapOptions = {
signal?: AbortSignal;
};
type LoadRenderSourceBitmapOptions = {
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
signal?: AbortSignal;
};
function throwIfAborted(signal: AbortSignal | undefined): void {
if (signal?.aborted) {
throw new DOMException("The operation was aborted.", "AbortError");
@@ -215,3 +224,219 @@ export async function loadSourceBitmap(
const promise = getOrCreateSourceBitmapPromise(sourceUrl);
return await awaitWithLocalAbort(promise, options.signal);
}
function createWorkingCanvas(width: number, height: number):
| HTMLCanvasElement
| OffscreenCanvas {
if (typeof document !== "undefined") {
const canvas = document.createElement("canvas");
canvas.width = width;
canvas.height = height;
return canvas;
}
if (typeof OffscreenCanvas !== "undefined") {
return new OffscreenCanvas(width, height);
}
throw new Error("Canvas rendering is not available in this environment.");
}
function mixerBlendModeToCompositeOperation(
blendMode: RenderSourceComposition["blendMode"],
): GlobalCompositeOperation {
if (blendMode === "normal") {
return "source-over";
}
return blendMode;
}
function normalizeCompositionOpacity(value: number): number {
if (!Number.isFinite(value)) {
return 1;
}
return Math.max(0, Math.min(100, value)) / 100;
}
function normalizeRatio(value: number, fallback: number): number {
if (!Number.isFinite(value)) {
return fallback;
}
return value;
}
function normalizeMixerRect(source: RenderSourceComposition): {
x: number;
y: number;
width: number;
height: number;
} {
const overlayX = Math.max(0, Math.min(0.9, normalizeRatio(source.overlayX, 0)));
const overlayY = Math.max(0, Math.min(0.9, normalizeRatio(source.overlayY, 0)));
const overlayWidth = Math.max(
0.1,
Math.min(1, normalizeRatio(source.overlayWidth, 1), 1 - overlayX),
);
const overlayHeight = Math.max(
0.1,
Math.min(1, normalizeRatio(source.overlayHeight, 1), 1 - overlayY),
);
return {
x: overlayX,
y: overlayY,
width: overlayWidth,
height: overlayHeight,
};
}
function normalizeMixerCropEdges(source: RenderSourceComposition): {
left: number;
top: number;
right: number;
bottom: number;
} {
const legacySource = source as RenderSourceComposition & {
contentX?: number;
contentY?: number;
contentWidth?: number;
contentHeight?: number;
};
const hasLegacyContentRect =
legacySource.contentX !== undefined ||
legacySource.contentY !== undefined ||
legacySource.contentWidth !== undefined ||
legacySource.contentHeight !== undefined;
if (hasLegacyContentRect) {
const contentX = Math.max(
0,
Math.min(0.9, normalizeRatio(legacySource.contentX ?? Number.NaN, 0)),
);
const contentY = Math.max(
0,
Math.min(0.9, normalizeRatio(legacySource.contentY ?? Number.NaN, 0)),
);
const contentWidth = Math.max(
0.1,
Math.min(1, normalizeRatio(legacySource.contentWidth ?? Number.NaN, 1), 1 - contentX),
);
const contentHeight = Math.max(
0.1,
Math.min(1, normalizeRatio(legacySource.contentHeight ?? Number.NaN, 1), 1 - contentY),
);
return {
left: contentX,
top: contentY,
right: 1 - (contentX + contentWidth),
bottom: 1 - (contentY + contentHeight),
};
}
const cropLeft = Math.max(0, Math.min(0.9, normalizeRatio(source.cropLeft, 0)));
const cropTop = Math.max(0, Math.min(0.9, normalizeRatio(source.cropTop, 0)));
const cropRight = Math.max(0, Math.min(1 - cropLeft - 0.1, normalizeRatio(source.cropRight, 0)));
const cropBottom = Math.max(
0,
Math.min(1 - cropTop - 0.1, normalizeRatio(source.cropBottom, 0)),
);
return {
left: cropLeft,
top: cropTop,
right: cropRight,
bottom: cropBottom,
};
}
async function loadMixerCompositionBitmap(
sourceComposition: RenderSourceComposition,
signal?: AbortSignal,
): Promise<ImageBitmap> {
const [baseBitmap, overlayBitmap] = await Promise.all([
loadSourceBitmap(sourceComposition.baseUrl, { signal }),
loadSourceBitmap(sourceComposition.overlayUrl, { signal }),
]);
throwIfAborted(signal);
const canvas = createWorkingCanvas(baseBitmap.width, baseBitmap.height);
const context = canvas.getContext("2d", { willReadFrequently: true });
if (!context) {
throw new Error("Render composition could not create a 2D context.");
}
context.clearRect(0, 0, baseBitmap.width, baseBitmap.height);
context.drawImage(baseBitmap, 0, 0, baseBitmap.width, baseBitmap.height);
const rect = normalizeMixerRect(sourceComposition);
const frameX = rect.x * baseBitmap.width;
const frameY = rect.y * baseBitmap.height;
const frameWidth = rect.width * baseBitmap.width;
const frameHeight = rect.height * baseBitmap.height;
const cropEdges = normalizeMixerCropEdges(sourceComposition);
const sourceX = cropEdges.left * overlayBitmap.width;
const sourceY = cropEdges.top * overlayBitmap.height;
const sourceWidth = (1 - cropEdges.left - cropEdges.right) * overlayBitmap.width;
const sourceHeight = (1 - cropEdges.top - cropEdges.bottom) * overlayBitmap.height;
const visibleRect = computeVisibleMixerContentRect({
frameAspectRatio: frameHeight > 0 ? frameWidth / frameHeight : 1,
sourceWidth: overlayBitmap.width,
sourceHeight: overlayBitmap.height,
cropLeft: cropEdges.left,
cropTop: cropEdges.top,
cropRight: cropEdges.right,
cropBottom: cropEdges.bottom,
});
const destX = frameX + (visibleRect?.x ?? 0) * frameWidth;
const destY = frameY + (visibleRect?.y ?? 0) * frameHeight;
const destWidth = (visibleRect?.width ?? 1) * frameWidth;
const destHeight = (visibleRect?.height ?? 1) * frameHeight;
context.globalCompositeOperation = mixerBlendModeToCompositeOperation(
sourceComposition.blendMode,
);
context.globalAlpha = normalizeCompositionOpacity(sourceComposition.opacity);
context.save();
context.beginPath();
context.rect(frameX, frameY, frameWidth, frameHeight);
context.clip();
context.drawImage(
overlayBitmap,
sourceX,
sourceY,
sourceWidth,
sourceHeight,
destX,
destY,
destWidth,
destHeight,
);
context.restore();
context.globalCompositeOperation = "source-over";
context.globalAlpha = 1;
return await createImageBitmap(canvas);
}
export async function loadRenderSourceBitmap(
options: LoadRenderSourceBitmapOptions,
): Promise<ImageBitmap> {
if (options.sourceComposition) {
if (options.sourceComposition.kind !== "mixer") {
throw new Error(`Unsupported source composition '${options.sourceComposition.kind}'.`);
}
return await loadMixerCompositionBitmap(options.sourceComposition, options.signal);
}
if (!options.sourceUrl) {
throw new Error("Render source is required.");
}
return await loadSourceBitmap(options.sourceUrl, { signal: options.signal });
}

View File

@@ -5,7 +5,11 @@ import {
} from "@/lib/image-pipeline/preview-renderer";
import { hashPipeline, type PipelineStep } from "@/lib/image-pipeline/contracts";
import type { HistogramData } from "@/lib/image-pipeline/histogram";
import type { RenderFullOptions, RenderFullResult } from "@/lib/image-pipeline/render-types";
import type {
RenderFullOptions,
RenderFullResult,
RenderSourceComposition,
} from "@/lib/image-pipeline/render-types";
import {
getBackendFeatureFlags,
type BackendFeatureFlags,
@@ -20,14 +24,15 @@ export type BackendDiagnosticsMetadata = {
};
type PreviewWorkerPayload = {
sourceUrl: string;
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
previewWidth: number;
includeHistogram?: boolean;
featureFlags?: BackendFeatureFlags;
};
type FullWorkerPayload = RenderFullOptions & {
type FullWorkerPayload = Omit<RenderFullOptions, "signal"> & {
featureFlags?: BackendFeatureFlags;
};
@@ -318,19 +323,20 @@ function runWorkerRequest<TResponse extends PreviewRenderResult | RenderFullResu
worker.postMessage({
kind: "full",
requestId,
payload: args.payload as RenderFullOptions,
payload: args.payload as FullWorkerPayload,
} satisfies WorkerRequestMessage);
});
}
function getPreviewRequestKey(options: {
sourceUrl: string;
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
previewWidth: number;
includeHistogram?: boolean;
}): string {
return [
hashPipeline(options.sourceUrl, options.steps),
hashPipeline(options.sourceComposition ?? options.sourceUrl ?? null, options.steps),
options.previewWidth,
options.includeHistogram === true ? 1 : 0,
].join(":");
@@ -341,7 +347,8 @@ function getWorkerFeatureFlagsSnapshot(): BackendFeatureFlags {
}
async function runPreviewRequest(options: {
sourceUrl: string;
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
previewWidth: number;
includeHistogram?: boolean;
@@ -352,6 +359,7 @@ async function runPreviewRequest(options: {
kind: "preview",
payload: {
sourceUrl: options.sourceUrl,
sourceComposition: options.sourceComposition,
steps: options.steps,
previewWidth: options.previewWidth,
includeHistogram: options.includeHistogram,
@@ -367,6 +375,7 @@ async function runPreviewRequest(options: {
if (!shouldFallbackToMainThread(error)) {
logWorkerClientDebug("preview request failed without fallback", {
sourceUrl: options.sourceUrl,
sourceComposition: options.sourceComposition,
previewWidth: options.previewWidth,
includeHistogram: options.includeHistogram,
diagnostics: getLastBackendDiagnostics(),
@@ -377,6 +386,7 @@ async function runPreviewRequest(options: {
logWorkerClientDebug("preview request falling back to main-thread", {
sourceUrl: options.sourceUrl,
sourceComposition: options.sourceComposition,
previewWidth: options.previewWidth,
includeHistogram: options.includeHistogram,
error,
@@ -387,7 +397,8 @@ async function runPreviewRequest(options: {
}
function getOrCreateSharedPreviewRequest(options: {
sourceUrl: string;
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
previewWidth: number;
includeHistogram?: boolean;
@@ -419,7 +430,8 @@ function getOrCreateSharedPreviewRequest(options: {
}
export async function renderPreviewWithWorkerFallback(options: {
sourceUrl: string;
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
previewWidth: number;
includeHistogram?: boolean;
@@ -431,6 +443,7 @@ export async function renderPreviewWithWorkerFallback(options: {
const sharedRequest = getOrCreateSharedPreviewRequest({
sourceUrl: options.sourceUrl,
sourceComposition: options.sourceComposition,
steps: options.steps,
previewWidth: options.previewWidth,
includeHistogram: options.includeHistogram,
@@ -488,14 +501,16 @@ export async function renderPreviewWithWorkerFallback(options: {
export async function renderFullWithWorkerFallback(
options: RenderFullOptions,
): Promise<RenderFullResult> {
const { signal, ...serializableOptions } = options;
try {
return await runWorkerRequest<RenderFullResult>({
kind: "full",
payload: {
...options,
...serializableOptions,
featureFlags: getWorkerFeatureFlagsSnapshot(),
},
signal: options.signal,
signal,
});
} catch (error: unknown) {
if (isAbortError(error)) {

219
lib/mixer-crop-layout.ts Normal file
View File

@@ -0,0 +1,219 @@
const MIN_CROP_REMAINING_SIZE = 0.1;
type MixerSurfaceFit = "contain" | "cover";
function formatPercent(value: number): string {
const normalized = Math.abs(value) < 1e-10 ? 0 : value;
return `${normalized}%`;
}
function computeFittedRect(args: {
sourceWidth: number;
sourceHeight: number;
boundsX: number;
boundsY: number;
boundsWidth: number;
boundsHeight: number;
fit?: MixerSurfaceFit;
}): { x: number; y: number; width: number; height: number } {
const {
sourceWidth,
sourceHeight,
boundsX,
boundsY,
boundsWidth,
boundsHeight,
fit = "contain",
} = args;
if (sourceWidth <= 0 || sourceHeight <= 0 || boundsWidth <= 0 || boundsHeight <= 0) {
return {
x: boundsX,
y: boundsY,
width: boundsWidth,
height: boundsHeight,
};
}
const scale =
fit === "cover"
? Math.max(boundsWidth / sourceWidth, boundsHeight / sourceHeight)
: Math.min(boundsWidth / sourceWidth, boundsHeight / sourceHeight);
if (!Number.isFinite(scale) || scale <= 0) {
return {
x: boundsX,
y: boundsY,
width: boundsWidth,
height: boundsHeight,
};
}
const width = sourceWidth * scale;
const height = sourceHeight * scale;
return {
x: boundsX + (boundsWidth - width) / 2,
y: boundsY + (boundsHeight - height) / 2,
width,
height,
};
}
export function computeMixerFrameRectInSurface(args: {
surfaceWidth: number;
surfaceHeight: number;
baseWidth: number;
baseHeight: number;
overlayX: number;
overlayY: number;
overlayWidth: number;
overlayHeight: number;
fit?: MixerSurfaceFit;
}): { x: number; y: number; width: number; height: number } | null {
if (args.baseWidth <= 0 || args.baseHeight <= 0 || args.surfaceWidth <= 0 || args.surfaceHeight <= 0) {
return null;
}
const baseRect = computeFittedRect({
sourceWidth: args.baseWidth,
sourceHeight: args.baseHeight,
boundsX: 0,
boundsY: 0,
boundsWidth: args.surfaceWidth,
boundsHeight: args.surfaceHeight,
fit: args.fit,
});
return {
x: (baseRect.x + args.overlayX * baseRect.width) / args.surfaceWidth,
y: (baseRect.y + args.overlayY * baseRect.height) / args.surfaceHeight,
width: (args.overlayWidth * baseRect.width) / args.surfaceWidth,
height: (args.overlayHeight * baseRect.height) / args.surfaceHeight,
};
}
export function computeVisibleMixerContentRect(args: {
frameAspectRatio: number;
sourceWidth: number;
sourceHeight: number;
cropLeft: number;
cropTop: number;
cropRight: number;
cropBottom: number;
}): { x: number; y: number; width: number; height: number } | null {
if (args.sourceWidth <= 0 || args.sourceHeight <= 0) {
return null;
}
const cropWidth = Math.max(1 - args.cropLeft - args.cropRight, MIN_CROP_REMAINING_SIZE);
const cropHeight = Math.max(1 - args.cropTop - args.cropBottom, MIN_CROP_REMAINING_SIZE);
const frameAspectRatio = args.frameAspectRatio > 0 ? args.frameAspectRatio : 1;
const rect = computeFittedRect({
sourceWidth: args.sourceWidth * cropWidth,
sourceHeight: args.sourceHeight * cropHeight,
boundsX: 0,
boundsY: 0,
boundsWidth: frameAspectRatio,
boundsHeight: 1,
});
return {
x: rect.x / frameAspectRatio,
y: rect.y,
width: rect.width / frameAspectRatio,
height: rect.height,
};
}
export function computeMixerCropImageStyle(args: {
frameAspectRatio: number;
sourceWidth: number;
sourceHeight: number;
cropLeft: number;
cropTop: number;
cropRight: number;
cropBottom: number;
}) {
const safeWidth = Math.max(1 - args.cropLeft - args.cropRight, MIN_CROP_REMAINING_SIZE);
const safeHeight = Math.max(1 - args.cropTop - args.cropBottom, MIN_CROP_REMAINING_SIZE);
const visibleRect = computeVisibleMixerContentRect(args);
if (!visibleRect) {
return {
left: formatPercent((-args.cropLeft / safeWidth) * 100),
top: formatPercent((-args.cropTop / safeHeight) * 100),
width: formatPercent((1 / safeWidth) * 100),
height: formatPercent((1 / safeHeight) * 100),
} as const;
}
const imageWidth = visibleRect.width / safeWidth;
const imageHeight = visibleRect.height / safeHeight;
return {
left: formatPercent((visibleRect.x - (args.cropLeft / safeWidth) * visibleRect.width) * 100),
top: formatPercent((visibleRect.y - (args.cropTop / safeHeight) * visibleRect.height) * 100),
width: formatPercent(imageWidth * 100),
height: formatPercent(imageHeight * 100),
} as const;
}
export function computeMixerCompareOverlayImageStyle(args: {
surfaceWidth: number;
surfaceHeight: number;
baseWidth: number;
baseHeight: number;
overlayX: number;
overlayY: number;
overlayWidth: number;
overlayHeight: number;
sourceWidth: number;
sourceHeight: number;
cropLeft: number;
cropTop: number;
cropRight: number;
cropBottom: number;
}) {
const frameRect = computeMixerFrameRectInSurface({
surfaceWidth: args.surfaceWidth,
surfaceHeight: args.surfaceHeight,
baseWidth: args.baseWidth,
baseHeight: args.baseHeight,
overlayX: args.overlayX,
overlayY: args.overlayY,
overlayWidth: args.overlayWidth,
overlayHeight: args.overlayHeight,
});
const frameAspectRatio =
frameRect && frameRect.width > 0 && frameRect.height > 0
? (frameRect.width * args.surfaceWidth) / (frameRect.height * args.surfaceHeight)
: args.overlayWidth > 0 && args.overlayHeight > 0
? args.overlayWidth / args.overlayHeight
: 1;
return computeMixerCropImageStyle({
frameAspectRatio,
sourceWidth: args.sourceWidth,
sourceHeight: args.sourceHeight,
cropLeft: args.cropLeft,
cropTop: args.cropTop,
cropRight: args.cropRight,
cropBottom: args.cropBottom,
});
}
export function isMixerCropImageReady(args: {
currentOverlayUrl: string | null | undefined;
loadedOverlayUrl: string | null;
sourceWidth: number;
sourceHeight: number;
}): boolean {
return Boolean(
args.currentOverlayUrl &&
args.loadedOverlayUrl === args.currentOverlayUrl &&
args.sourceWidth > 0 &&
args.sourceHeight > 0,
);
}

View File

@@ -17,6 +17,13 @@ const sourceLoaderMocks = vi.hoisted(() => ({
vi.mock("@/lib/image-pipeline/source-loader", () => ({
loadSourceBitmap: sourceLoaderMocks.loadSourceBitmap,
loadRenderSourceBitmap: ({ sourceUrl }: { sourceUrl?: string }) => {
if (!sourceUrl) {
throw new Error("Render source is required.");
}
return sourceLoaderMocks.loadSourceBitmap(sourceUrl);
},
}));
function createPreviewPixels(): Uint8ClampedArray {

View File

@@ -0,0 +1,117 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import type { RenderFullResult, RenderSourceComposition } from "@/lib/image-pipeline/render-types";
const bridgeMocks = vi.hoisted(() => ({
renderFull: vi.fn(),
}));
const previewRendererMocks = vi.hoisted(() => ({
renderPreview: vi.fn(),
}));
vi.mock("@/lib/image-pipeline/bridge", () => ({
renderFull: bridgeMocks.renderFull,
}));
vi.mock("@/lib/image-pipeline/preview-renderer", () => ({
renderPreview: previewRendererMocks.renderPreview,
}));
type WorkerMessage = {
kind: "full";
requestId: number;
payload: {
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: [];
render: {
resolution: "original";
format: "png";
};
};
};
type WorkerScopeMock = {
postMessage: ReturnType<typeof vi.fn>;
onmessage: ((event: MessageEvent<WorkerMessage>) => void) | null;
};
function createFullResult(): RenderFullResult {
return {
blob: new Blob(["rendered"]),
width: 64,
height: 64,
mimeType: "image/png",
format: "png",
quality: null,
sizeBytes: 8,
sourceWidth: 64,
sourceHeight: 64,
wasSizeClamped: false,
};
}
function createWorkerScope(): WorkerScopeMock {
return {
postMessage: vi.fn(),
onmessage: null,
};
}
describe("image-pipeline.worker full render", () => {
beforeEach(() => {
vi.resetModules();
vi.unstubAllGlobals();
bridgeMocks.renderFull.mockReset();
bridgeMocks.renderFull.mockResolvedValue(createFullResult());
previewRendererMocks.renderPreview.mockReset();
});
it("forwards sourceComposition to renderFull for full requests", async () => {
const workerScope = createWorkerScope();
vi.stubGlobal("self", workerScope);
await import("@/lib/image-pipeline/image-pipeline.worker");
const sourceComposition: RenderSourceComposition = {
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "overlay",
opacity: 0.5,
overlayX: 32,
overlayY: 16,
overlayWidth: 128,
overlayHeight: 64,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
};
workerScope.onmessage?.({
data: {
kind: "full",
requestId: 41,
payload: {
sourceComposition,
steps: [],
render: {
resolution: "original",
format: "png",
},
},
},
} as MessageEvent<WorkerMessage>);
await vi.waitFor(() => {
expect(bridgeMocks.renderFull).toHaveBeenCalledTimes(1);
});
expect(bridgeMocks.renderFull).toHaveBeenCalledWith(
expect.objectContaining({
sourceComposition,
}),
);
});
});

View File

@@ -355,4 +355,446 @@ describe("loadSourceBitmap", () => {
expect(createImageBitmap).toHaveBeenCalledWith(fakeVideo);
expect(revokeObjectUrl).toHaveBeenCalledWith("blob:video-source");
});
it("renders non-square mixer overlays with contain-fit parity instead of stretching", async () => {
const baseBlob = new Blob(["base"]);
const overlayBlob = new Blob(["overlay"]);
const baseBitmap = { width: 100, height: 100 } as ImageBitmap;
const overlayBitmap = { width: 200, height: 100 } as ImageBitmap;
const composedBitmap = { width: 100, height: 100 } as ImageBitmap;
const drawImage = vi.fn();
const context = {
clearRect: vi.fn(),
drawImage,
save: vi.fn(),
restore: vi.fn(),
beginPath: vi.fn(),
rect: vi.fn(),
clip: vi.fn(),
globalCompositeOperation: "source-over" as GlobalCompositeOperation,
globalAlpha: 1,
};
const canvas = {
width: 0,
height: 0,
getContext: vi.fn().mockReturnValue(context),
} as unknown as HTMLCanvasElement;
const nativeCreateElement = document.createElement.bind(document);
vi.spyOn(document, "createElement").mockImplementation((tagName: string) => {
if (tagName.toLowerCase() === "canvas") {
return canvas;
}
return nativeCreateElement(tagName);
});
vi.stubGlobal(
"fetch",
vi.fn().mockImplementation(async (input: string | URL | Request) => {
const url = String(input);
if (url.includes("base.png")) {
return {
ok: true,
status: 200,
headers: { get: vi.fn().mockReturnValue("image/png") },
blob: vi.fn().mockResolvedValue(baseBlob),
};
}
return {
ok: true,
status: 200,
headers: { get: vi.fn().mockReturnValue("image/png") },
blob: vi.fn().mockResolvedValue(overlayBlob),
};
}),
);
vi.stubGlobal(
"createImageBitmap",
vi.fn().mockImplementation(async (input: unknown) => {
if (input === baseBlob) {
return baseBitmap;
}
if (input === overlayBlob) {
return overlayBitmap;
}
if (input === canvas) {
return composedBitmap;
}
throw new Error("Unexpected createImageBitmap input in mixer contain-fit test.");
}),
);
const { loadRenderSourceBitmap } = await importSubject();
await expect(
loadRenderSourceBitmap({
sourceComposition: {
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "overlay",
opacity: 80,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.25,
overlayHeight: 0.5,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
},
}),
).resolves.toBe(composedBitmap);
expect(drawImage).toHaveBeenNthCalledWith(1, baseBitmap, 0, 0, 100, 100);
const overlayDrawArgs = drawImage.mock.calls[1];
expect(overlayDrawArgs?.[0]).toBe(overlayBitmap);
expect(overlayDrawArgs?.[1]).toBe(0);
expect(overlayDrawArgs?.[2]).toBe(0);
expect(overlayDrawArgs?.[3]).toBe(200);
expect(overlayDrawArgs?.[4]).toBe(100);
expect(overlayDrawArgs?.[5]).toBe(10);
expect(overlayDrawArgs?.[6]).toBeCloseTo(38.75, 10);
expect(overlayDrawArgs?.[7]).toBe(25);
expect(overlayDrawArgs?.[8]).toBeCloseTo(12.5, 10);
});
it("applies mixer crop framing by trimming source edges while leaving the displayed frame size untouched", async () => {
const baseBlob = new Blob(["base"]);
const overlayBlob = new Blob(["overlay"]);
const baseBitmap = { width: 100, height: 100 } as ImageBitmap;
const overlayBitmap = { width: 200, height: 100 } as ImageBitmap;
const composedBitmap = { width: 100, height: 100 } as ImageBitmap;
const drawImage = vi.fn();
const save = vi.fn();
const restore = vi.fn();
const beginPath = vi.fn();
const rect = vi.fn();
const clip = vi.fn();
const context = {
clearRect: vi.fn(),
drawImage,
save,
restore,
beginPath,
rect,
clip,
globalCompositeOperation: "source-over" as GlobalCompositeOperation,
globalAlpha: 1,
};
const canvas = {
width: 0,
height: 0,
getContext: vi.fn().mockReturnValue(context),
} as unknown as HTMLCanvasElement;
const nativeCreateElement = document.createElement.bind(document);
vi.spyOn(document, "createElement").mockImplementation((tagName: string) => {
if (tagName.toLowerCase() === "canvas") {
return canvas;
}
return nativeCreateElement(tagName);
});
vi.stubGlobal(
"fetch",
vi.fn().mockImplementation(async (input: string | URL | Request) => {
const url = String(input);
if (url.includes("base.png")) {
return {
ok: true,
status: 200,
headers: { get: vi.fn().mockReturnValue("image/png") },
blob: vi.fn().mockResolvedValue(baseBlob),
};
}
return {
ok: true,
status: 200,
headers: { get: vi.fn().mockReturnValue("image/png") },
blob: vi.fn().mockResolvedValue(overlayBlob),
};
}),
);
vi.stubGlobal(
"createImageBitmap",
vi.fn().mockImplementation(async (input: unknown) => {
if (input === baseBlob) {
return baseBitmap;
}
if (input === overlayBlob) {
return overlayBitmap;
}
if (input === canvas) {
return composedBitmap;
}
throw new Error("Unexpected createImageBitmap input in mixer content framing test.");
}),
);
const { loadRenderSourceBitmap } = await importSubject();
await expect(
loadRenderSourceBitmap({
sourceComposition: {
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "overlay",
opacity: 80,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.4,
overlayHeight: 0.4,
cropLeft: 0.5,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
},
}),
).resolves.toBe(composedBitmap);
expect(drawImage).toHaveBeenNthCalledWith(1, baseBitmap, 0, 0, 100, 100);
expect(save).toHaveBeenCalledTimes(1);
expect(beginPath).toHaveBeenCalledTimes(1);
expect(rect).toHaveBeenCalledWith(10, 20, 40, 40);
expect(clip).toHaveBeenCalledTimes(1);
expect(drawImage).toHaveBeenNthCalledWith(
2,
overlayBitmap,
100,
0,
100,
100,
10,
20,
40,
40,
);
expect(restore).toHaveBeenCalledTimes(1);
});
it("keeps overlayWidth and overlayHeight fixed while crop framing trims the sampled source region", async () => {
const baseBlob = new Blob(["base"]);
const overlayBlob = new Blob(["overlay"]);
const baseBitmap = { width: 100, height: 100 } as ImageBitmap;
const overlayBitmap = { width: 200, height: 100 } as ImageBitmap;
const composedBitmap = { width: 100, height: 100 } as ImageBitmap;
const drawImage = vi.fn();
const context = {
clearRect: vi.fn(),
drawImage,
save: vi.fn(),
restore: vi.fn(),
beginPath: vi.fn(),
rect: vi.fn(),
clip: vi.fn(),
globalCompositeOperation: "source-over" as GlobalCompositeOperation,
globalAlpha: 1,
};
const canvas = {
width: 0,
height: 0,
getContext: vi.fn().mockReturnValue(context),
} as unknown as HTMLCanvasElement;
const nativeCreateElement = document.createElement.bind(document);
vi.spyOn(document, "createElement").mockImplementation((tagName: string) => {
if (tagName.toLowerCase() === "canvas") {
return canvas;
}
return nativeCreateElement(tagName);
});
vi.stubGlobal(
"fetch",
vi.fn().mockImplementation(async (input: string | URL | Request) => {
const url = String(input);
if (url.includes("base.png")) {
return {
ok: true,
status: 200,
headers: { get: vi.fn().mockReturnValue("image/png") },
blob: vi.fn().mockResolvedValue(baseBlob),
};
}
return {
ok: true,
status: 200,
headers: { get: vi.fn().mockReturnValue("image/png") },
blob: vi.fn().mockResolvedValue(overlayBlob),
};
}),
);
vi.stubGlobal(
"createImageBitmap",
vi.fn().mockImplementation(async (input: unknown) => {
if (input === baseBlob) {
return baseBitmap;
}
if (input === overlayBlob) {
return overlayBitmap;
}
if (input === canvas) {
return composedBitmap;
}
throw new Error("Unexpected createImageBitmap input in overlay size preservation test.");
}),
);
const { loadRenderSourceBitmap } = await importSubject();
await expect(
loadRenderSourceBitmap({
sourceComposition: {
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "overlay",
opacity: 80,
overlayX: 0.15,
overlayY: 0.25,
overlayWidth: 0.5,
overlayHeight: 0.3,
cropLeft: 0.25,
cropTop: 0.1,
cropRight: 0.25,
cropBottom: 0.3,
},
}),
).resolves.toBe(composedBitmap);
const overlayDrawArgs = drawImage.mock.calls[1];
expect(overlayDrawArgs?.[0]).toBe(overlayBitmap);
expect(overlayDrawArgs?.[1]).toBe(50);
expect(overlayDrawArgs?.[2]).toBe(10);
expect(overlayDrawArgs?.[3]).toBe(100);
expect(overlayDrawArgs?.[4]).toBeCloseTo(60, 10);
expect(overlayDrawArgs?.[5]).toBeCloseTo(15, 10);
expect(overlayDrawArgs?.[6]).toBeCloseTo(25, 10);
expect(overlayDrawArgs?.[7]).toBeCloseTo(50, 10);
expect(overlayDrawArgs?.[8]).toBeCloseTo(30, 10);
});
it("contains a cropped wide source within the overlay frame during bake", async () => {
const baseBlob = new Blob(["base"]);
const overlayBlob = new Blob(["overlay"]);
const baseBitmap = { width: 100, height: 100 } as ImageBitmap;
const overlayBitmap = { width: 200, height: 100 } as ImageBitmap;
const composedBitmap = { width: 100, height: 100 } as ImageBitmap;
const drawImage = vi.fn();
const context = {
clearRect: vi.fn(),
drawImage,
save: vi.fn(),
restore: vi.fn(),
beginPath: vi.fn(),
rect: vi.fn(),
clip: vi.fn(),
globalCompositeOperation: "source-over" as GlobalCompositeOperation,
globalAlpha: 1,
};
const canvas = {
width: 0,
height: 0,
getContext: vi.fn().mockReturnValue(context),
} as unknown as HTMLCanvasElement;
const nativeCreateElement = document.createElement.bind(document);
vi.spyOn(document, "createElement").mockImplementation((tagName: string) => {
if (tagName.toLowerCase() === "canvas") {
return canvas;
}
return nativeCreateElement(tagName);
});
vi.stubGlobal(
"fetch",
vi.fn().mockImplementation(async (input: string | URL | Request) => {
const url = String(input);
if (url.includes("base.png")) {
return {
ok: true,
status: 200,
headers: { get: vi.fn().mockReturnValue("image/png") },
blob: vi.fn().mockResolvedValue(baseBlob),
};
}
return {
ok: true,
status: 200,
headers: { get: vi.fn().mockReturnValue("image/png") },
blob: vi.fn().mockResolvedValue(overlayBlob),
};
}),
);
vi.stubGlobal(
"createImageBitmap",
vi.fn().mockImplementation(async (input: unknown) => {
if (input === baseBlob) {
return baseBitmap;
}
if (input === overlayBlob) {
return overlayBitmap;
}
if (input === canvas) {
return composedBitmap;
}
throw new Error("Unexpected createImageBitmap input in aspect-aware crop bake test.");
}),
);
const { loadRenderSourceBitmap } = await importSubject();
await expect(
loadRenderSourceBitmap({
sourceComposition: {
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "overlay",
opacity: 80,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.4,
overlayHeight: 0.4,
cropLeft: 0,
cropTop: 0.25,
cropRight: 0,
cropBottom: 0.25,
},
}),
).resolves.toBe(composedBitmap);
const overlayDrawArgs = drawImage.mock.calls[1];
expect(overlayDrawArgs?.[0]).toBe(overlayBitmap);
expect(overlayDrawArgs?.[1]).toBe(0);
expect(overlayDrawArgs?.[2]).toBe(25);
expect(overlayDrawArgs?.[3]).toBe(200);
expect(overlayDrawArgs?.[4]).toBe(50);
expect(overlayDrawArgs?.[5]).toBe(10);
expect(overlayDrawArgs?.[6]).toBeCloseTo(35, 10);
expect(overlayDrawArgs?.[7]).toBe(40);
expect(overlayDrawArgs?.[8]).toBeCloseTo(10, 10);
});
});

View File

@@ -341,6 +341,7 @@ describe("webgl backend poc", () => {
vi.doMock("@/lib/image-pipeline/source-loader", () => ({
loadSourceBitmap: vi.fn().mockResolvedValue({ width: 2, height: 2 }),
loadRenderSourceBitmap: vi.fn().mockResolvedValue({ width: 2, height: 2 }),
}));
vi.spyOn(HTMLCanvasElement.prototype, "getContext").mockReturnValue({

View File

@@ -4,7 +4,7 @@ import { buildGraphSnapshot } from "@/lib/canvas-render-preview";
import { resolveMixerPreviewFromGraph } from "@/lib/canvas-mixer-preview";
describe("resolveMixerPreviewFromGraph", () => {
it("resolves base and overlay URLs by target handle", () => {
it("resolves base and overlay URLs by target handle while keeping frame and crop trims independent", () => {
const graph = buildGraphSnapshot(
[
{
@@ -25,7 +25,18 @@ describe("resolveMixerPreviewFromGraph", () => {
{
id: "mixer-1",
type: "mixer",
data: { blendMode: "screen", opacity: 70, offsetX: 12, offsetY: -8 },
data: {
blendMode: "screen",
opacity: 70,
overlayX: 0.12,
overlayY: 0.2,
overlayWidth: 0.6,
overlayHeight: 0.5,
cropLeft: 0.08,
cropTop: 0.15,
cropRight: 0.22,
cropBottom: 0.1,
},
},
],
[
@@ -41,12 +52,114 @@ describe("resolveMixerPreviewFromGraph", () => {
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "screen",
opacity: 70,
offsetX: 12,
offsetY: -8,
overlayX: 0.12,
overlayY: 0.2,
overlayWidth: 0.6,
overlayHeight: 0.5,
cropLeft: 0.08,
cropTop: 0.15,
cropRight: 0.22,
cropBottom: 0.1,
});
});
it("prefers render output URL over upstream preview source when available", () => {
it("preserves crop trims when frame resize data changes", () => {
const graph = buildGraphSnapshot(
[
{
id: "image-base",
type: "image",
data: { url: "https://cdn.example.com/base.png" },
},
{
id: "overlay-asset",
type: "asset",
data: { url: "https://cdn.example.com/overlay.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
overlayX: 0.2,
overlayY: 0.1,
overlayWidth: 0.6,
overlayHeight: 0.3,
cropLeft: 0.15,
cropTop: 0.05,
cropRight: 0.4,
cropBottom: 0.25,
},
},
],
[
{ source: "image-base", target: "mixer-1", targetHandle: "base" },
{ source: "overlay-asset", target: "mixer-1", targetHandle: "overlay" },
],
);
expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual(
expect.objectContaining({
overlayX: 0.2,
overlayY: 0.1,
overlayWidth: 0.6,
overlayHeight: 0.3,
cropLeft: 0.15,
cropTop: 0.05,
cropRight: 0.4,
cropBottom: 0.25,
}),
);
});
it("preserves overlayWidth and overlayHeight when crop trims change", () => {
const graph = buildGraphSnapshot(
[
{
id: "image-base",
type: "image",
data: { url: "https://cdn.example.com/base.png" },
},
{
id: "overlay-asset",
type: "asset",
data: { url: "https://cdn.example.com/overlay.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
overlayX: 0.05,
overlayY: 0.25,
overlayWidth: 0.55,
overlayHeight: 0.35,
cropLeft: 0.4,
cropTop: 0.1,
cropRight: 0.3,
cropBottom: 0.1,
},
},
],
[
{ source: "image-base", target: "mixer-1", targetHandle: "base" },
{ source: "overlay-asset", target: "mixer-1", targetHandle: "overlay" },
],
);
expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual(
expect.objectContaining({
overlayX: 0.05,
overlayY: 0.25,
overlayWidth: 0.55,
overlayHeight: 0.35,
cropLeft: 0.4,
cropTop: 0.1,
cropRight: 0.3,
cropBottom: 0.1,
}),
);
});
it("prefers live render preview URL over stale baked render output", () => {
const graph = buildGraphSnapshot(
[
{
@@ -82,11 +195,79 @@ describe("resolveMixerPreviewFromGraph", () => {
expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual({
status: "ready",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/render-output.png",
overlayUrl: "https://cdn.example.com/upstream.png",
blendMode: "normal",
opacity: 100,
offsetX: 0,
offsetY: 0,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
});
});
it("does not reuse stale baked render output when only live sourceComposition exists", () => {
const graph = buildGraphSnapshot(
[
{
id: "base-image",
type: "image",
data: { url: "https://cdn.example.com/base.png" },
},
{
id: "overlay-base",
type: "image",
data: { url: "https://cdn.example.com/overlay-base.png" },
},
{
id: "overlay-asset",
type: "asset",
data: { url: "https://cdn.example.com/overlay-asset.png" },
},
{
id: "upstream-mixer",
type: "mixer",
data: {},
},
{
id: "render-overlay",
type: "render",
data: {
lastUploadUrl: "https://cdn.example.com/stale-render-output.png",
},
},
{
id: "mixer-1",
type: "mixer",
data: {},
},
],
[
{ source: "overlay-base", target: "upstream-mixer", targetHandle: "base" },
{ source: "overlay-asset", target: "upstream-mixer", targetHandle: "overlay" },
{ source: "upstream-mixer", target: "render-overlay" },
{ source: "base-image", target: "mixer-1", targetHandle: "base" },
{ source: "render-overlay", target: "mixer-1", targetHandle: "overlay" },
],
);
expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual({
status: "partial",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: undefined,
blendMode: "normal",
opacity: 100,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
});
});
@@ -113,12 +294,18 @@ describe("resolveMixerPreviewFromGraph", () => {
overlayUrl: undefined,
blendMode: "normal",
opacity: 100,
offsetX: 0,
offsetY: 0,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
});
});
it("normalizes blend mode and clamps numeric values", () => {
it("normalizes crop trims and clamps", () => {
const graph = buildGraphSnapshot(
[
{
@@ -137,8 +324,14 @@ describe("resolveMixerPreviewFromGraph", () => {
data: {
blendMode: "unknown",
opacity: 180,
offsetX: 9999,
offsetY: "-9999",
overlayX: -3,
overlayY: "1.4",
overlayWidth: 2,
overlayHeight: 0,
cropLeft: "0.95",
cropTop: -2,
cropRight: "4",
cropBottom: "0",
},
},
],
@@ -154,8 +347,151 @@ describe("resolveMixerPreviewFromGraph", () => {
overlayUrl: "https://cdn.example.com/overlay-asset.png",
blendMode: "normal",
opacity: 100,
offsetX: 2048,
offsetY: -2048,
overlayX: 0,
overlayY: 0.9,
overlayWidth: 1,
overlayHeight: 0.1,
cropLeft: 0.9,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
});
});
it("missing rect fields fallback to sensible defaults", () => {
const graph = buildGraphSnapshot(
[
{
id: "base-ai",
type: "ai-image",
data: { url: "https://cdn.example.com/base-ai.png" },
},
{
id: "overlay-asset",
type: "asset",
data: { url: "https://cdn.example.com/overlay-asset.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
blendMode: "multiply",
opacity: 42,
},
},
],
[
{ source: "base-ai", target: "mixer-1", targetHandle: "base" },
{ source: "overlay-asset", target: "mixer-1", targetHandle: "overlay" },
],
);
expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual({
status: "ready",
baseUrl: "https://cdn.example.com/base-ai.png",
overlayUrl: "https://cdn.example.com/overlay-asset.png",
blendMode: "multiply",
opacity: 42,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
});
});
it("maps legacy content rect fields into crop trims during normalization", () => {
const graph = buildGraphSnapshot(
[
{
id: "base-ai",
type: "ai-image",
data: { url: "https://cdn.example.com/base-ai.png" },
},
{
id: "overlay-asset",
type: "asset",
data: { url: "https://cdn.example.com/overlay-asset.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
contentX: 0.2,
contentY: 0.1,
contentWidth: 0.5,
contentHeight: 0.6,
},
},
],
[
{ source: "base-ai", target: "mixer-1", targetHandle: "base" },
{ source: "overlay-asset", target: "mixer-1", targetHandle: "overlay" },
],
);
expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual({
status: "ready",
baseUrl: "https://cdn.example.com/base-ai.png",
overlayUrl: "https://cdn.example.com/overlay-asset.png",
blendMode: "normal",
opacity: 100,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0.2,
cropTop: 0.1,
cropRight: 0.30000000000000004,
cropBottom: 0.30000000000000004,
});
});
it("legacy offset fields still yield visible overlay geometry", () => {
const graph = buildGraphSnapshot(
[
{
id: "base-ai",
type: "ai-image",
data: { url: "https://cdn.example.com/base-ai.png" },
},
{
id: "overlay-asset",
type: "asset",
data: { url: "https://cdn.example.com/overlay-asset.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
offsetX: 100,
offsetY: -40,
},
},
],
[
{ source: "base-ai", target: "mixer-1", targetHandle: "base" },
{ source: "overlay-asset", target: "mixer-1", targetHandle: "overlay" },
],
);
expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual({
status: "ready",
baseUrl: "https://cdn.example.com/base-ai.png",
overlayUrl: "https://cdn.example.com/overlay-asset.png",
blendMode: "normal",
opacity: 100,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
});
});
@@ -190,8 +526,14 @@ describe("resolveMixerPreviewFromGraph", () => {
overlayUrl: undefined,
blendMode: "normal",
opacity: 100,
offsetX: 0,
offsetY: 0,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
error: "duplicate-handle-edge",
});
});

View File

@@ -4,8 +4,147 @@ import {
buildGraphSnapshot,
resolveRenderPreviewInputFromGraph,
} from "@/lib/canvas-render-preview";
import {
computeMixerCompareOverlayImageStyle,
computeMixerFrameRectInSurface,
computeVisibleMixerContentRect,
computeMixerCropImageStyle,
isMixerCropImageReady,
} from "@/lib/mixer-crop-layout";
describe("resolveRenderPreviewInputFromGraph", () => {
it("resolves mixer input as renderable mixer composition", () => {
const graph = buildGraphSnapshot(
[
{
id: "base-image",
type: "image",
data: { url: "https://cdn.example.com/base.png" },
},
{
id: "overlay-image",
type: "asset",
data: { url: "https://cdn.example.com/overlay.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
blendMode: "overlay",
opacity: 76,
overlayX: 0.2,
overlayY: 0.1,
overlayWidth: 0.55,
overlayHeight: 0.44,
cropLeft: 0.08,
cropTop: 0.15,
cropRight: 0.22,
cropBottom: 0.1,
},
},
{
id: "render-1",
type: "render",
data: {},
},
],
[
{ source: "base-image", target: "mixer-1", targetHandle: "base" },
{ source: "overlay-image", target: "mixer-1", targetHandle: "overlay" },
{ source: "mixer-1", target: "render-1" },
],
);
const preview = resolveRenderPreviewInputFromGraph({
nodeId: "render-1",
graph,
});
expect(preview).toEqual({
sourceUrl: null,
sourceComposition: {
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "overlay",
opacity: 76,
overlayX: 0.2,
overlayY: 0.1,
overlayWidth: 0.55,
overlayHeight: 0.44,
cropLeft: 0.08,
cropTop: 0.15,
cropRight: 0.22,
cropBottom: 0.1,
},
steps: [],
});
});
it("normalizes mixer composition values for render input", () => {
const graph = buildGraphSnapshot(
[
{
id: "base-image",
type: "image",
data: { url: "https://cdn.example.com/base.png" },
},
{
id: "overlay-image",
type: "asset",
data: { url: "https://cdn.example.com/overlay.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
blendMode: "unknown",
opacity: 180,
overlayX: -3,
overlayY: "1.4",
overlayWidth: 2,
overlayHeight: 0,
cropLeft: "0.95",
cropTop: -2,
cropRight: "4",
cropBottom: "0",
},
},
{
id: "render-1",
type: "render",
data: {},
},
],
[
{ source: "base-image", target: "mixer-1", targetHandle: "base" },
{ source: "overlay-image", target: "mixer-1", targetHandle: "overlay" },
{ source: "mixer-1", target: "render-1" },
],
);
const preview = resolveRenderPreviewInputFromGraph({
nodeId: "render-1",
graph,
});
expect(preview.sourceComposition).toEqual({
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "normal",
opacity: 100,
overlayX: 0,
overlayY: 0.9,
overlayWidth: 1,
overlayHeight: 0.1,
cropLeft: 0.9,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
});
});
it("includes crop in collected pipeline steps", () => {
const graph = buildGraphSnapshot(
[
@@ -88,5 +227,191 @@ describe("resolveRenderPreviewInputFromGraph", () => {
const preview = resolveRenderPreviewInputFromGraph({ nodeId: "render-1", graph });
expect(preview.sourceUrl).toBe("https://cdn.example.com/generated-video.mp4");
expect(preview.sourceComposition).toBeUndefined();
});
it("prefers live render preview URLs over stale baked render URLs inside downstream mixer compositions", () => {
const graph = buildGraphSnapshot(
[
{
id: "base-image",
type: "image",
data: { url: "https://cdn.example.com/base.png" },
},
{
id: "overlay-upstream",
type: "image",
data: { url: "https://cdn.example.com/upstream.png" },
},
{
id: "render-overlay",
type: "render",
data: {
lastUploadUrl: "https://cdn.example.com/stale-render-output.png",
},
},
{
id: "mixer-1",
type: "mixer",
data: {},
},
{
id: "render-2",
type: "render",
data: {},
},
],
[
{ source: "overlay-upstream", target: "render-overlay" },
{ source: "base-image", target: "mixer-1", targetHandle: "base" },
{ source: "render-overlay", target: "mixer-1", targetHandle: "overlay" },
{ source: "mixer-1", target: "render-2" },
],
);
const preview = resolveRenderPreviewInputFromGraph({ nodeId: "render-2", graph });
expect(preview).toEqual({
sourceUrl: null,
sourceComposition: {
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/upstream.png",
blendMode: "normal",
opacity: 100,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
},
steps: [],
});
});
});
describe("mixer crop layout parity", () => {
it("contains a wide cropped source inside a square overlay frame", () => {
expect(
computeVisibleMixerContentRect({
frameAspectRatio: 1,
sourceWidth: 200,
sourceHeight: 100,
cropLeft: 0,
cropTop: 0.25,
cropRight: 0,
cropBottom: 0.25,
}),
).toEqual({
x: 0,
y: 0.375,
width: 1,
height: 0.25,
});
});
it("returns compare image styles that letterbox instead of stretching", () => {
expect(
computeMixerCropImageStyle({
frameAspectRatio: 1,
sourceWidth: 200,
sourceHeight: 100,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
}),
).toEqual({
left: "0%",
top: "25%",
width: "100%",
height: "50%",
});
});
it("uses the actual base-aware frame pixel ratio for compare crop math", () => {
expect(
computeMixerCompareOverlayImageStyle({
surfaceWidth: 500,
surfaceHeight: 380,
baseWidth: 200,
baseHeight: 100,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.4,
overlayHeight: 0.4,
sourceWidth: 200,
sourceHeight: 100,
cropLeft: 0.1,
cropTop: 0,
cropRight: 0.1,
cropBottom: 0,
}),
).toEqual({
left: "0%",
top: "0%",
width: "100%",
height: "100%",
});
});
it("does not mark compare crop overlay ready before natural size is known", () => {
expect(
isMixerCropImageReady({
currentOverlayUrl: "https://cdn.example.com/overlay-a.png",
loadedOverlayUrl: null,
sourceWidth: 0,
sourceHeight: 0,
}),
).toBe(false);
});
it("invalidates compare crop overlay readiness on source swap until the new image loads", () => {
expect(
isMixerCropImageReady({
currentOverlayUrl: "https://cdn.example.com/overlay-b.png",
loadedOverlayUrl: "https://cdn.example.com/overlay-a.png",
sourceWidth: 200,
sourceHeight: 100,
}),
).toBe(false);
});
it("positions mixer overlay frame relative to the displayed base-image rect", () => {
expect(
computeMixerFrameRectInSurface({
surfaceWidth: 1,
surfaceHeight: 1,
baseWidth: 200,
baseHeight: 100,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.4,
overlayHeight: 0.4,
}),
).toEqual({
x: 0.1,
y: 0.35,
width: 0.4,
height: 0.2,
});
});
it("returns null frame placement until base image natural size is known", () => {
expect(
computeMixerFrameRectInSurface({
surfaceWidth: 1,
surfaceHeight: 1,
baseWidth: 0,
baseHeight: 0,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.4,
overlayHeight: 0.4,
}),
).toBeNull();
});
});

View File

@@ -32,6 +32,13 @@ vi.mock("@/lib/image-pipeline/render-core", () => ({
vi.mock("@/lib/image-pipeline/source-loader", () => ({
loadSourceBitmap: sourceLoaderMocks.loadSourceBitmap,
loadRenderSourceBitmap: ({ sourceUrl }: { sourceUrl?: string }) => {
if (!sourceUrl) {
throw new Error("Render source is required.");
}
return sourceLoaderMocks.loadSourceBitmap(sourceUrl);
},
}));
describe("preview-renderer cancellation", () => {

View File

@@ -199,6 +199,48 @@ describe("worker-client fallbacks", () => {
expect(bridgeMocks.renderFull).not.toHaveBeenCalled();
});
it("does not include AbortSignal in full worker payload serialization", async () => {
const workerMessages: WorkerMessage[] = [];
FakeWorker.behavior = (worker, message) => {
workerMessages.push(message);
if (message.kind !== "full") {
return;
}
queueMicrotask(() => {
worker.onmessage?.({
data: {
kind: "full-result",
requestId: message.requestId,
payload: createFullResult(),
},
} as MessageEvent);
});
};
vi.stubGlobal("Worker", FakeWorker as unknown as typeof Worker);
const { renderFullWithWorkerFallback } = await import("@/lib/image-pipeline/worker-client");
await renderFullWithWorkerFallback({
sourceUrl: "https://cdn.example.com/source.png",
steps: [],
render: {
resolution: "original",
format: "png",
},
signal: new AbortController().signal,
});
const fullMessage = workerMessages.find((message) => message.kind === "full") as
| (WorkerMessage & {
payload?: Record<string, unknown>;
})
| undefined;
expect(fullMessage).toBeDefined();
expect(fullMessage?.payload).not.toHaveProperty("signal");
});
it("still falls back to the main thread when the Worker API is unavailable", async () => {
vi.stubGlobal("Worker", undefined);