feat(canvas): add mixer drag-resize and mixer->render bake

This commit is contained in:
2026-04-11 10:03:41 +02:00
parent ae2fa1d269
commit f499aea691
28 changed files with 1731 additions and 152 deletions

View File

@@ -133,16 +133,22 @@ render: 300 × 420 mixer: 360 × 320
- **Handles:** genau zwei Inputs links (`base`, `overlay`) und ein Output rechts (`mixer-out`).
- **Erlaubte Inputs:** `image`, `asset`, `ai-image`, `render`.
- **Connection-Limits:** maximal 2 eingehende Kanten insgesamt, davon pro Handle genau 1.
- **Node-Data (V1):** `blendMode` (`normal|multiply|screen|overlay`), `opacity` (0..100), `offsetX`, `offsetY`.
- **Node-Data (V1):** `blendMode` (`normal|multiply|screen|overlay`), `opacity` (0..100), `overlayX`, `overlayY`, `overlayWidth`, `overlayHeight` (normierte 0..1-Rect-Werte).
- **Output-Semantik:** pseudo-image (clientseitig aus Graph + Controls aufgeloest), kein persistiertes Asset, kein Storage-Write.
- **UI/Interaction:** nur Inline-Formcontrols im Node; keine Drag-Manipulation im Preview, keine Rotation/Skalierung/Masks.
- **UI/Interaction:** Overlay ist im Preview direkt per Drag verschiebbar und ueber Corner-Handles frei resizable; numerische Inline-Controls bleiben als Feineinstellung erhalten.
### Compare-Integration (V1)
- `compare` versteht `mixer`-Outputs ueber `lib/canvas-mixer-preview.ts`.
- Die Vorschau wird als DOM/CSS-Layering im Client gerendert (inkl. Blend/Opacity/Offset).
- Die Vorschau wird als DOM/CSS-Layering im Client gerendert (inkl. Blend/Opacity/Overlay-Rect).
- Scope bleibt eng: keine pauschale pseudo-image-Unterstuetzung fuer alle Consumer in V1.
### Render-Bake-Pfad (V1)
- Offizieller Bake-Flow: `mixer -> render`.
- `render` konsumiert die Mixer-Komposition (`sourceComposition.kind = "mixer"`) und nutzt sie fuer Preview + finalen Render/Upload.
- `mixer -> adjustments -> render` ist bewusst verschoben (deferred) und aktuell nicht offizieller Scope.
---
## Node-Status-Modell
@@ -314,7 +320,7 @@ useCanvasData (use-canvas-data.ts)
- **Node-Taxonomie:** Alle Node-Typen sind in `lib/canvas-node-catalog.ts` definiert. Phase-2/3 Nodes haben `implemented: false` und `disabledHint`.
- **Video-Connection-Policy:** `video-prompt` darf **nur** mit `ai-video` verbunden werden (und umgekehrt). `text → video-prompt` ist erlaubt (Prompt-Quelle). `ai-video → compare` ist erlaubt.
- **Mixer-Connection-Policy:** `mixer` akzeptiert nur `image|asset|ai-image|render`; Ziel-Handles sind nur `base` und `overlay`, pro Handle maximal eine eingehende Kante, insgesamt maximal zwei.
- **Mixer-Pseudo-Output:** `mixer` liefert in V1 kein persistiertes Bild. Downstream-Nodes muessen den pseudo-image-Resolver nutzen (aktuell gezielt fuer `compare`).
- **Mixer-Pseudo-Output:** `mixer` liefert in V1 kein persistiertes Bild. Offizielle Consumer sind `compare` und der direkte Bake-Pfad `mixer -> render`; `mixer -> adjustments -> render` bleibt vorerst deferred.
- **Agent-Flow:** `agent` akzeptiert nur Content-/Kontext-Quellen (z. B. `render`, `compare`, `text`, `image`) als Input; ausgehende Kanten sind fuer `agent -> agent-output` vorgesehen.
- **Convex Generated Types:** `api.ai.generateVideo` wird u. U. nicht in `convex/_generated/api.d.ts` exportiert. Der Code verwendet `api as unknown as {...}` als Workaround. Ein `npx convex dev`-Zyklus würde die Typen korrekt generieren.
- **Canvas Graph Query:** Der Canvas nutzt `canvasGraph.get` (aus `convex/canvasGraph.ts`) statt separater `nodes.list`/`edges.list` Queries. Optimistic Updates laufen über `canvas-graph-query-cache.ts`.

View File

@@ -256,8 +256,10 @@ describe("CompareNode render preview inputs", () => {
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "multiply",
opacity: 62,
offsetX: 12,
offsetY: -4,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
},
});
});

View File

@@ -52,8 +52,10 @@ function buildMixerNodeProps(overrides?: Partial<React.ComponentProps<typeof Mix
data: {
blendMode: "normal",
opacity: 100,
offsetX: 0,
offsetY: 0,
overlayX: 0,
overlayY: 0,
overlayWidth: 0.5,
overlayHeight: 0.5,
},
selected: false,
dragging: false,
@@ -76,7 +78,30 @@ describe("MixerNode", () => {
let container: HTMLDivElement | null = null;
let root: Root | null = null;
const readyNodes: TestNode[] = [
{ id: "image-base", type: "image", data: { url: "https://cdn.example.com/base.png" } },
{ id: "image-overlay", type: "asset", data: { url: "https://cdn.example.com/overlay.png" } },
{
id: "mixer-1",
type: "mixer",
data: {
blendMode: "normal",
opacity: 100,
overlayX: 0,
overlayY: 0,
overlayWidth: 0.5,
overlayHeight: 0.5,
},
},
];
const readyEdges: TestEdge[] = [
{ id: "edge-base", source: "image-base", target: "mixer-1", targetHandle: "base" },
{ id: "edge-overlay", source: "image-overlay", target: "mixer-1", targetHandle: "overlay" },
];
beforeEach(() => {
vi.useFakeTimers();
mocks.queueNodeDataUpdate.mockClear();
container = document.createElement("div");
document.body.appendChild(container);
@@ -90,6 +115,7 @@ describe("MixerNode", () => {
});
}
container?.remove();
vi.useRealTimers();
root = null;
container = null;
});
@@ -130,26 +156,7 @@ describe("MixerNode", () => {
});
it("renders ready state with stacked base and overlay previews", async () => {
await renderNode({
nodes: [
{ id: "image-base", type: "image", data: { url: "https://cdn.example.com/base.png" } },
{ id: "image-overlay", type: "asset", data: { url: "https://cdn.example.com/overlay.png" } },
{
id: "mixer-1",
type: "mixer",
data: { blendMode: "multiply", opacity: 60, offsetX: 14, offsetY: -8 },
},
],
edges: [
{ id: "edge-base", source: "image-base", target: "mixer-1", targetHandle: "base" },
{
id: "edge-overlay",
source: "image-overlay",
target: "mixer-1",
targetHandle: "overlay",
},
],
});
await renderNode({ nodes: readyNodes, edges: readyEdges });
const baseImage = container?.querySelector('img[alt="Mixer base"]');
const overlayImage = container?.querySelector('img[alt="Mixer overlay"]');
@@ -158,13 +165,199 @@ describe("MixerNode", () => {
expect(overlayImage).toBeTruthy();
});
it("queues node data updates for blend mode, opacity, and overlay offsets", async () => {
it("drag updates persisted overlay geometry", async () => {
await renderNode({ nodes: readyNodes, edges: readyEdges });
const preview = container?.querySelector('[data-testid="mixer-preview"]');
const overlay = container?.querySelector('[data-testid="mixer-overlay"]');
if (!(preview instanceof HTMLDivElement)) {
throw new Error("preview not found");
}
if (!(overlay instanceof HTMLImageElement)) {
throw new Error("overlay image not found");
}
vi.spyOn(preview, "getBoundingClientRect").mockReturnValue({
x: 0,
y: 0,
top: 0,
left: 0,
right: 200,
bottom: 200,
width: 200,
height: 200,
toJSON: () => ({}),
});
await act(async () => {
overlay.dispatchEvent(new MouseEvent("mousedown", { bubbles: true, clientX: 50, clientY: 50 }));
});
await act(async () => {
window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 90, clientY: 70 }));
window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true }));
await vi.advanceTimersByTimeAsync(250);
});
expect(mocks.queueNodeDataUpdate).toHaveBeenLastCalledWith({
nodeId: "mixer-1",
data: expect.objectContaining({
overlayX: 0.2,
overlayY: 0.1,
overlayWidth: 0.5,
overlayHeight: 0.5,
}),
});
});
it("drag clamps overlay bounds inside preview", async () => {
await renderNode({ nodes: readyNodes, edges: readyEdges });
const preview = container?.querySelector('[data-testid="mixer-preview"]');
const overlay = container?.querySelector('[data-testid="mixer-overlay"]');
if (!(preview instanceof HTMLDivElement)) {
throw new Error("preview not found");
}
if (!(overlay instanceof HTMLImageElement)) {
throw new Error("overlay image not found");
}
vi.spyOn(preview, "getBoundingClientRect").mockReturnValue({
x: 0,
y: 0,
top: 0,
left: 0,
right: 200,
bottom: 200,
width: 200,
height: 200,
toJSON: () => ({}),
});
await act(async () => {
overlay.dispatchEvent(new MouseEvent("mousedown", { bubbles: true, clientX: 20, clientY: 20 }));
});
await act(async () => {
window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 400, clientY: 380 }));
window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true }));
await vi.advanceTimersByTimeAsync(250);
});
expect(mocks.queueNodeDataUpdate).toHaveBeenLastCalledWith({
nodeId: "mixer-1",
data: expect.objectContaining({
overlayX: 0.5,
overlayY: 0.5,
overlayWidth: 0.5,
overlayHeight: 0.5,
}),
});
});
it("resize updates persisted overlay width and height", async () => {
await renderNode({ nodes: readyNodes, edges: readyEdges });
const preview = container?.querySelector('[data-testid="mixer-preview"]');
const resizeHandle = container?.querySelector('[data-testid="mixer-resize-se"]');
if (!(preview instanceof HTMLDivElement)) {
throw new Error("preview not found");
}
if (!(resizeHandle instanceof HTMLDivElement)) {
throw new Error("resize handle not found");
}
vi.spyOn(preview, "getBoundingClientRect").mockReturnValue({
x: 0,
y: 0,
top: 0,
left: 0,
right: 200,
bottom: 200,
width: 200,
height: 200,
toJSON: () => ({}),
});
await act(async () => {
resizeHandle.dispatchEvent(
new MouseEvent("mousedown", { bubbles: true, clientX: 100, clientY: 100 }),
);
});
await act(async () => {
window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 140, clientY: 120 }));
window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true }));
await vi.advanceTimersByTimeAsync(250);
});
expect(mocks.queueNodeDataUpdate).toHaveBeenLastCalledWith({
nodeId: "mixer-1",
data: expect.objectContaining({
overlayWidth: 0.7,
overlayHeight: 0.6,
}),
});
});
it("enforces minimum overlay size during resize", async () => {
await renderNode({ nodes: readyNodes, edges: readyEdges });
const preview = container?.querySelector('[data-testid="mixer-preview"]');
const resizeHandle = container?.querySelector('[data-testid="mixer-resize-se"]');
if (!(preview instanceof HTMLDivElement)) {
throw new Error("preview not found");
}
if (!(resizeHandle instanceof HTMLDivElement)) {
throw new Error("resize handle not found");
}
vi.spyOn(preview, "getBoundingClientRect").mockReturnValue({
x: 0,
y: 0,
top: 0,
left: 0,
right: 200,
bottom: 200,
width: 200,
height: 200,
toJSON: () => ({}),
});
await act(async () => {
resizeHandle.dispatchEvent(
new MouseEvent("mousedown", { bubbles: true, clientX: 100, clientY: 100 }),
);
});
await act(async () => {
window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: -600, clientY: -700 }));
window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true }));
await vi.advanceTimersByTimeAsync(250);
});
expect(mocks.queueNodeDataUpdate).toHaveBeenLastCalledWith({
nodeId: "mixer-1",
data: expect.objectContaining({
overlayWidth: 0.1,
overlayHeight: 0.1,
}),
});
});
it("numeric controls still update overlay rect fields", async () => {
await renderNode();
const blendMode = container?.querySelector('select[name="blendMode"]');
const opacity = container?.querySelector('input[name="opacity"]');
const offsetX = container?.querySelector('input[name="offsetX"]');
const offsetY = container?.querySelector('input[name="offsetY"]');
const overlayX = container?.querySelector('input[name="overlayX"]');
const overlayY = container?.querySelector('input[name="overlayY"]');
const overlayWidth = container?.querySelector('input[name="overlayWidth"]');
const overlayHeight = container?.querySelector('input[name="overlayHeight"]');
if (!(blendMode instanceof HTMLSelectElement)) {
throw new Error("blendMode select not found");
@@ -172,16 +365,23 @@ describe("MixerNode", () => {
if (!(opacity instanceof HTMLInputElement)) {
throw new Error("opacity input not found");
}
if (!(offsetX instanceof HTMLInputElement)) {
throw new Error("offsetX input not found");
if (!(overlayX instanceof HTMLInputElement)) {
throw new Error("overlayX input not found");
}
if (!(offsetY instanceof HTMLInputElement)) {
throw new Error("offsetY input not found");
if (!(overlayY instanceof HTMLInputElement)) {
throw new Error("overlayY input not found");
}
if (!(overlayWidth instanceof HTMLInputElement)) {
throw new Error("overlayWidth input not found");
}
if (!(overlayHeight instanceof HTMLInputElement)) {
throw new Error("overlayHeight input not found");
}
await act(async () => {
blendMode.value = "screen";
blendMode.dispatchEvent(new Event("change", { bubbles: true }));
await vi.advanceTimersByTimeAsync(250);
});
expect(mocks.queueNodeDataUpdate).toHaveBeenCalledWith({
nodeId: "mixer-1",
@@ -192,6 +392,7 @@ describe("MixerNode", () => {
opacity.value = "45";
opacity.dispatchEvent(new Event("input", { bubbles: true }));
opacity.dispatchEvent(new Event("change", { bubbles: true }));
await vi.advanceTimersByTimeAsync(250);
});
expect(mocks.queueNodeDataUpdate).toHaveBeenCalledWith({
nodeId: "mixer-1",
@@ -199,23 +400,47 @@ describe("MixerNode", () => {
});
await act(async () => {
offsetX.value = "12";
offsetX.dispatchEvent(new Event("input", { bubbles: true }));
offsetX.dispatchEvent(new Event("change", { bubbles: true }));
overlayX.value = "0.25";
overlayX.dispatchEvent(new Event("input", { bubbles: true }));
overlayX.dispatchEvent(new Event("change", { bubbles: true }));
await vi.advanceTimersByTimeAsync(250);
});
expect(mocks.queueNodeDataUpdate).toHaveBeenCalledWith({
nodeId: "mixer-1",
data: expect.objectContaining({ offsetX: 12 }),
data: expect.objectContaining({ overlayX: 0.25 }),
});
await act(async () => {
offsetY.value = "-6";
offsetY.dispatchEvent(new Event("input", { bubbles: true }));
offsetY.dispatchEvent(new Event("change", { bubbles: true }));
overlayY.value = "0.4";
overlayY.dispatchEvent(new Event("input", { bubbles: true }));
overlayY.dispatchEvent(new Event("change", { bubbles: true }));
await vi.advanceTimersByTimeAsync(250);
});
expect(mocks.queueNodeDataUpdate).toHaveBeenCalledWith({
nodeId: "mixer-1",
data: expect.objectContaining({ offsetY: -6 }),
data: expect.objectContaining({ overlayY: 0.4 }),
});
await act(async () => {
overlayWidth.value = "0.66";
overlayWidth.dispatchEvent(new Event("input", { bubbles: true }));
overlayWidth.dispatchEvent(new Event("change", { bubbles: true }));
await vi.advanceTimersByTimeAsync(250);
});
expect(mocks.queueNodeDataUpdate).toHaveBeenCalledWith({
nodeId: "mixer-1",
data: expect.objectContaining({ overlayWidth: 0.66 }),
});
await act(async () => {
overlayHeight.value = "0.33";
overlayHeight.dispatchEvent(new Event("input", { bubbles: true }));
overlayHeight.dispatchEvent(new Event("change", { bubbles: true }));
await vi.advanceTimersByTimeAsync(250);
});
expect(mocks.queueNodeDataUpdate).toHaveBeenCalledWith({
nodeId: "mixer-1",
data: expect.objectContaining({ overlayHeight: 0.33 }),
});
});

View File

@@ -155,8 +155,10 @@ describe("useCanvasConnections", () => {
defaultData: {
blendMode: "normal",
opacity: 100,
offsetX: 0,
offsetY: 0,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
},
}),
);
@@ -169,8 +171,10 @@ describe("useCanvasConnections", () => {
data: {
blendMode: "normal",
opacity: 100,
offsetX: 0,
offsetY: 0,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
},
}),
);

View File

@@ -32,6 +32,7 @@ export default function CompareSurface({
const graph = useCanvasGraph();
const usePreview = Boolean(previewInput && (preferPreview || !finalUrl));
const previewSourceUrl = usePreview ? previewInput?.sourceUrl ?? null : null;
const previewSourceComposition = usePreview ? previewInput?.sourceComposition : undefined;
const previewSteps = usePreview ? previewInput?.steps ?? EMPTY_STEPS : EMPTY_STEPS;
const visibleFinalUrl = usePreview ? undefined : finalUrl;
const previewDebounceMs = shouldFastPathPreviewPipeline(
@@ -43,6 +44,7 @@ export default function CompareSurface({
const { canvasRef, isRendering, error } = usePipelinePreview({
sourceUrl: previewSourceUrl,
sourceComposition: previewSourceComposition,
steps: previewSteps,
nodeWidth,
includeHistogram: false,
@@ -92,12 +94,15 @@ export default function CompareSurface({
<img
src={mixerPreviewState.overlayUrl}
alt={label ?? "Comparison image"}
className="absolute inset-0 h-full w-full object-contain"
className="absolute object-contain"
draggable={false}
style={{
mixBlendMode: mixerPreviewState.blendMode,
opacity: mixerPreviewState.opacity / 100,
transform: `translate(${mixerPreviewState.offsetX}px, ${mixerPreviewState.offsetY}px)`,
left: `${mixerPreviewState.overlayX * 100}%`,
top: `${mixerPreviewState.overlayY * 100}%`,
width: `${mixerPreviewState.overlayWidth * 100}%`,
height: `${mixerPreviewState.overlayHeight * 100}%`,
}}
/>
</>

View File

@@ -1,9 +1,18 @@
"use client";
import { useMemo, useState, type ChangeEvent, type FormEvent } from "react";
import {
useEffect,
useMemo,
useRef,
useState,
type ChangeEvent,
type FormEvent,
type MouseEvent as ReactMouseEvent,
} from "react";
import { Handle, Position, type NodeProps } from "@xyflow/react";
import BaseNodeWrapper from "./base-node-wrapper";
import { useNodeLocalData } from "./use-node-local-data";
import { useCanvasGraph } from "@/components/canvas/canvas-graph-context";
import { useCanvasSync } from "@/components/canvas/canvas-sync-context";
import {
@@ -14,46 +23,267 @@ import {
import type { Id } from "@/convex/_generated/dataModel";
const BLEND_MODE_OPTIONS: MixerBlendMode[] = ["normal", "multiply", "screen", "overlay"];
const MIN_OVERLAY_SIZE = 0.1;
const MAX_OVERLAY_POSITION = 1;
const SAVE_DELAY_MS = 160;
type MixerLocalData = ReturnType<typeof normalizeMixerPreviewData>;
type ResizeCorner = "nw" | "ne" | "sw" | "se";
type InteractionState =
| {
kind: "move";
startClientX: number;
startClientY: number;
startData: MixerLocalData;
previewWidth: number;
previewHeight: number;
}
| {
kind: "resize";
corner: ResizeCorner;
startClientX: number;
startClientY: number;
startData: MixerLocalData;
previewWidth: number;
previewHeight: number;
};
function clamp(value: number, min: number, max: number): number {
return Math.max(min, Math.min(max, value));
}
function normalizeLocalMixerData(data: MixerLocalData): MixerLocalData {
const overlayX = clamp(data.overlayX, 0, MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE);
const overlayY = clamp(data.overlayY, 0, MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE);
const overlayWidth = clamp(data.overlayWidth, MIN_OVERLAY_SIZE, MAX_OVERLAY_POSITION - overlayX);
const overlayHeight = clamp(data.overlayHeight, MIN_OVERLAY_SIZE, MAX_OVERLAY_POSITION - overlayY);
return {
...data,
overlayX,
overlayY,
overlayWidth,
overlayHeight,
};
}
function computeResizeRect(args: {
startData: MixerLocalData;
corner: ResizeCorner;
deltaX: number;
deltaY: number;
}): Pick<MixerLocalData, "overlayX" | "overlayY" | "overlayWidth" | "overlayHeight"> {
const { startData, corner, deltaX, deltaY } = args;
const startRight = startData.overlayX + startData.overlayWidth;
const startBottom = startData.overlayY + startData.overlayHeight;
let overlayX = startData.overlayX;
let overlayY = startData.overlayY;
let overlayWidth = startData.overlayWidth;
let overlayHeight = startData.overlayHeight;
if (corner.includes("w")) {
overlayX = clamp(
startData.overlayX + deltaX,
0,
startData.overlayX + startData.overlayWidth - MIN_OVERLAY_SIZE,
);
overlayWidth = startRight - overlayX;
}
if (corner.includes("e")) {
overlayWidth = clamp(
startData.overlayWidth + deltaX,
MIN_OVERLAY_SIZE,
MAX_OVERLAY_POSITION - startData.overlayX,
);
}
if (corner.includes("n")) {
overlayY = clamp(
startData.overlayY + deltaY,
0,
startData.overlayY + startData.overlayHeight - MIN_OVERLAY_SIZE,
);
overlayHeight = startBottom - overlayY;
}
if (corner.includes("s")) {
overlayHeight = clamp(
startData.overlayHeight + deltaY,
MIN_OVERLAY_SIZE,
MAX_OVERLAY_POSITION - startData.overlayY,
);
}
return normalizeLocalMixerData({
...startData,
overlayX,
overlayY,
overlayWidth,
overlayHeight,
});
}
export default function MixerNode({ id, data, selected }: NodeProps) {
const graph = useCanvasGraph();
const { queueNodeDataUpdate } = useCanvasSync();
const previewRef = useRef<HTMLDivElement | null>(null);
const latestNodeDataRef = useRef((data ?? {}) as Record<string, unknown>);
const [hasImageLoadError, setHasImageLoadError] = useState(false);
const [interaction, setInteraction] = useState<InteractionState | null>(null);
useEffect(() => {
latestNodeDataRef.current = (data ?? {}) as Record<string, unknown>;
}, [data]);
const { localData, updateLocalData } = useNodeLocalData<MixerLocalData>({
nodeId: id,
data,
normalize: normalizeMixerPreviewData,
saveDelayMs: SAVE_DELAY_MS,
onSave: (next) =>
queueNodeDataUpdate({
nodeId: id as Id<"nodes">,
data: {
...latestNodeDataRef.current,
...next,
},
}),
debugLabel: "mixer",
});
const normalizedData = useMemo(() => normalizeMixerPreviewData(data), [data]);
const previewState = useMemo(
() => resolveMixerPreviewFromGraph({ nodeId: id, graph }),
[graph, id],
);
const currentData = (data ?? {}) as Record<string, unknown>;
const updateData = (patch: Partial<ReturnType<typeof normalizeMixerPreviewData>>) => {
void queueNodeDataUpdate({
nodeId: id as Id<"nodes">,
data: {
...currentData,
...patch,
},
});
};
const onBlendModeChange = (event: ChangeEvent<HTMLSelectElement>) => {
setHasImageLoadError(false);
updateData({ blendMode: event.target.value as MixerBlendMode });
updateLocalData((current) => ({
...current,
blendMode: event.target.value as MixerBlendMode,
}));
};
const onNumberChange = (field: "opacity" | "offsetX" | "offsetY") => (
event: FormEvent<HTMLInputElement>,
) => {
const onNumberChange = (
field: "opacity" | "overlayX" | "overlayY" | "overlayWidth" | "overlayHeight",
) =>
(event: FormEvent<HTMLInputElement>) => {
setHasImageLoadError(false);
const nextValue = Number(event.currentTarget.value);
updateData({ [field]: Number.isFinite(nextValue) ? nextValue : 0 });
updateLocalData((current) => {
if (!Number.isFinite(nextValue)) {
return current;
}
if (field === "opacity") {
return {
...current,
opacity: clamp(nextValue, 0, 100),
};
}
return normalizeLocalMixerData({
...current,
[field]: nextValue,
});
});
};
const startInteraction = (
event: ReactMouseEvent<HTMLElement>,
kind: InteractionState["kind"],
corner?: ResizeCorner,
) => {
event.preventDefault();
event.stopPropagation();
const previewRect = previewRef.current?.getBoundingClientRect();
if (!previewRect || previewRect.width <= 0 || previewRect.height <= 0) {
return;
}
setInteraction({
kind,
corner: kind === "resize" ? (corner as ResizeCorner) : undefined,
startClientX: event.clientX,
startClientY: event.clientY,
startData: localData,
previewWidth: previewRect.width,
previewHeight: previewRect.height,
} as InteractionState);
};
useEffect(() => {
if (!interaction) {
return;
}
const handleMouseMove = (event: MouseEvent) => {
const deltaX = (event.clientX - interaction.startClientX) / interaction.previewWidth;
const deltaY = (event.clientY - interaction.startClientY) / interaction.previewHeight;
if (interaction.kind === "move") {
const nextX = clamp(
interaction.startData.overlayX + deltaX,
0,
MAX_OVERLAY_POSITION - interaction.startData.overlayWidth,
);
const nextY = clamp(
interaction.startData.overlayY + deltaY,
0,
MAX_OVERLAY_POSITION - interaction.startData.overlayHeight,
);
updateLocalData((current) => ({
...current,
overlayX: nextX,
overlayY: nextY,
}));
return;
}
const nextRect = computeResizeRect({
startData: interaction.startData,
corner: interaction.corner,
deltaX,
deltaY,
});
updateLocalData((current) => ({
...current,
...nextRect,
}));
};
const handleMouseUp = () => {
setInteraction(null);
};
window.addEventListener("mousemove", handleMouseMove);
window.addEventListener("mouseup", handleMouseUp);
return () => {
window.removeEventListener("mousemove", handleMouseMove);
window.removeEventListener("mouseup", handleMouseUp);
};
}, [interaction, updateLocalData]);
const showReadyPreview = previewState.status === "ready" && !hasImageLoadError;
const showPreviewError = hasImageLoadError || previewState.status === "error";
const overlayStyle = {
mixBlendMode: localData.blendMode,
opacity: localData.opacity / 100,
left: `${localData.overlayX * 100}%`,
top: `${localData.overlayY * 100}%`,
width: `${localData.overlayWidth * 100}%`,
height: `${localData.overlayHeight * 100}%`,
} as const;
return (
<BaseNodeWrapper nodeType="mixer" selected={selected} className="p-0">
<Handle
@@ -82,7 +312,7 @@ export default function MixerNode({ id, data, selected }: NodeProps) {
Mixer
</div>
<div className="relative min-h-[140px] overflow-hidden bg-muted/40">
<div ref={previewRef} data-testid="mixer-preview" className="relative min-h-[140px] overflow-hidden bg-muted/40 nodrag">
{showReadyPreview ? (
<>
{/* eslint-disable-next-line @next/next/no-img-element */}
@@ -97,15 +327,35 @@ export default function MixerNode({ id, data, selected }: NodeProps) {
<img
src={previewState.overlayUrl}
alt="Mixer overlay"
className="absolute inset-0 h-full w-full object-cover"
data-testid="mixer-overlay"
className="absolute object-cover nodrag cursor-move"
draggable={false}
onMouseDown={(event) => startInteraction(event, "move")}
onError={() => setHasImageLoadError(true)}
style={{
mixBlendMode: previewState.blendMode,
opacity: previewState.opacity / 100,
transform: `translate(${previewState.offsetX}px, ${previewState.offsetY}px)`,
}}
style={overlayStyle}
/>
{([
{ corner: "nw", cursor: "nwse-resize" },
{ corner: "ne", cursor: "nesw-resize" },
{ corner: "sw", cursor: "nesw-resize" },
{ corner: "se", cursor: "nwse-resize" },
] as const).map(({ corner, cursor }) => (
<div
key={corner}
role="button"
tabIndex={-1}
data-testid={`mixer-resize-${corner}`}
className="absolute z-10 h-3 w-3 rounded-full border border-white/80 bg-foreground/80 nodrag"
onMouseDown={(event) => startInteraction(event, "resize", corner)}
style={{
left: `${(corner.includes("w") ? localData.overlayX : localData.overlayX + localData.overlayWidth) * 100}%`,
top: `${(corner.includes("n") ? localData.overlayY : localData.overlayY + localData.overlayHeight) * 100}%`,
transform: "translate(-50%, -50%)",
cursor,
}}
/>
))}
</>
) : null}
@@ -133,7 +383,7 @@ export default function MixerNode({ id, data, selected }: NodeProps) {
<span>Blend mode</span>
<select
name="blendMode"
value={normalizedData.blendMode}
value={localData.blendMode}
onChange={onBlendModeChange}
className="nodrag h-8 rounded-md border border-input bg-background px-2 text-xs text-foreground"
>
@@ -154,32 +404,64 @@ export default function MixerNode({ id, data, selected }: NodeProps) {
min={0}
max={100}
step={1}
value={normalizedData.opacity}
value={localData.opacity}
onInput={onNumberChange("opacity")}
/>
</label>
<label className="flex flex-col gap-1 text-muted-foreground">
<span>Offset X</span>
<span>Overlay X</span>
<input
className="nodrag nowheel h-8 rounded-md border border-input bg-background px-2 text-xs text-foreground"
type="number"
name="offsetX"
step={1}
value={normalizedData.offsetX}
onInput={onNumberChange("offsetX")}
name="overlayX"
min={0}
max={0.9}
step={0.01}
value={localData.overlayX}
onInput={onNumberChange("overlayX")}
/>
</label>
<label className="col-span-2 flex flex-col gap-1 text-muted-foreground">
<span>Offset Y</span>
<label className="flex flex-col gap-1 text-muted-foreground">
<span>Overlay Y</span>
<input
className="nodrag nowheel h-8 rounded-md border border-input bg-background px-2 text-xs text-foreground"
type="number"
name="offsetY"
step={1}
value={normalizedData.offsetY}
onInput={onNumberChange("offsetY")}
name="overlayY"
min={0}
max={0.9}
step={0.01}
value={localData.overlayY}
onInput={onNumberChange("overlayY")}
/>
</label>
<label className="flex flex-col gap-1 text-muted-foreground">
<span>Overlay W</span>
<input
className="nodrag nowheel h-8 rounded-md border border-input bg-background px-2 text-xs text-foreground"
type="number"
name="overlayWidth"
min={MIN_OVERLAY_SIZE}
max={1}
step={0.01}
value={localData.overlayWidth}
onInput={onNumberChange("overlayWidth")}
/>
</label>
<label className="flex flex-col gap-1 text-muted-foreground">
<span>Overlay H</span>
<input
className="nodrag nowheel h-8 rounded-md border border-input bg-background px-2 text-xs text-foreground"
type="number"
name="overlayHeight"
min={MIN_OVERLAY_SIZE}
max={1}
step={0.01}
value={localData.overlayHeight}
onInput={onNumberChange("overlayHeight")}
/>
</label>
</div>

View File

@@ -463,11 +463,13 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
);
const sourceUrl = renderPreviewInput.sourceUrl;
const sourceComposition = renderPreviewInput.sourceComposition;
useEffect(() => {
logRenderDebug("node-data-updated", {
nodeId: id,
hasSourceUrl: typeof sourceUrl === "string" && sourceUrl.length > 0,
hasSourceComposition: Boolean(sourceComposition),
storageId: data.storageId ?? null,
lastUploadStorageId: data.lastUploadStorageId ?? null,
hasResolvedUrl: typeof data.url === "string" && data.url.length > 0,
@@ -484,6 +486,7 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
data.url,
id,
sourceUrl,
sourceComposition,
]);
const sourceNode = useMemo<SourceNodeDescriptor | null>(
@@ -525,9 +528,12 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
);
const currentPipelineHash = useMemo(() => {
if (!sourceUrl) return null;
return hashPipeline({ sourceUrl, render: renderFingerprint }, steps);
}, [renderFingerprint, sourceUrl, steps]);
if (!sourceUrl && !sourceComposition) return null;
return hashPipeline(
{ source: sourceComposition ?? sourceUrl, render: renderFingerprint },
steps,
);
}, [renderFingerprint, sourceComposition, sourceUrl, steps]);
const isRenderCurrent =
Boolean(currentPipelineHash) && localData.lastRenderedHash === currentPipelineHash;
@@ -557,7 +563,8 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
error: "Error",
};
const hasSource = typeof sourceUrl === "string" && sourceUrl.length > 0;
const hasSource =
(typeof sourceUrl === "string" && sourceUrl.length > 0) || Boolean(sourceComposition);
const previewNodeWidth = Math.max(260, Math.round(width ?? 320));
const {
@@ -568,6 +575,7 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
error: previewError,
} = usePipelinePreview({
sourceUrl,
sourceComposition,
steps,
nodeWidth: previewNodeWidth,
debounceMs: previewDebounceMs,
@@ -585,6 +593,7 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
error: fullscreenPreviewError,
} = usePipelinePreview({
sourceUrl: isFullscreenOpen && sourceUrl ? sourceUrl : null,
sourceComposition: isFullscreenOpen ? sourceComposition : undefined,
steps,
nodeWidth: fullscreenPreviewWidth,
includeHistogram: false,
@@ -719,11 +728,12 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
};
const handleRender = async (mode: "download" | "upload") => {
if (!sourceUrl || !currentPipelineHash) {
if ((!sourceUrl && !sourceComposition) || !currentPipelineHash) {
logRenderDebug("render-aborted-prerequisites", {
nodeId: id,
mode,
hasSourceUrl: Boolean(sourceUrl),
hasSourceComposition: Boolean(sourceComposition),
hasPipelineHash: Boolean(currentPipelineHash),
isOffline: status.isOffline,
});
@@ -768,7 +778,8 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
});
const renderResult = await renderFullWithWorkerFallback({
sourceUrl,
sourceUrl: sourceUrl ?? undefined,
sourceComposition,
steps,
render: {
resolution: activeData.outputResolution,

View File

@@ -58,7 +58,7 @@ Alle Node-Typen werden über Validators definiert: `phase1NodeTypeValidator`, `n
| `video-prompt` | `content`, `modelId`, `durationSeconds` | KI-Video-Steuer-Node (Eingabe) |
| `ai-video` | `storageId`, `prompt`, `model`, `modelLabel`, `durationSeconds`, `creditCost`, `generatedAt`, `taskId` (transient) | Generiertes KI-Video (System-Output) |
| `compare` | `leftNodeId`, `rightNodeId`, `sliderPosition` | Vergleichs-Node |
| `mixer` | `blendMode`, `opacity`, `offsetX`, `offsetY` | V1 Merge-Control-Node mit pseudo-image Output (kein Storage-Write) |
| `mixer` | `blendMode`, `opacity`, `overlayX`, `overlayY`, `overlayWidth`, `overlayHeight` | V1 Merge-Control-Node mit pseudo-image Output (kein Storage-Write) |
| `frame` | `label`, `exportWidth`, `exportHeight`, `backgroundColor` | Artboard |
| `group` | `label`, `collapsed` | Container-Node |
| `note` | `content`, `color` | Anmerkung |
@@ -338,6 +338,8 @@ Wirft bei unauthentifiziertem Zugriff. Wird von allen Queries und Mutations genu
- `mixer` ist ein Control-Node mit pseudo-image Semantik, nicht mit persistiertem Medien-Output.
- Keine zusaetzlichen Convex-Tabellen oder Storage-Flows fuer Mixer-Vorschauen.
- Validierung laeuft client- und serverseitig ueber dieselbe Policy (`validateCanvasConnectionPolicy`); `edges.ts` delegiert darauf fuer Paritaet.
- Offizieller Bake-Pfad fuer Mixer ist `mixer -> render` (Render verarbeitet die Mixer-Komposition in Preview/Render-Pipeline).
- `mixer -> adjustments -> render` ist derzeit bewusst deferred und nicht Teil des offiziell supporteten Flows.
---

View File

@@ -4,6 +4,7 @@ import { useEffect, useMemo, useRef, useState } from "react";
import { hashPipeline, type PipelineStep } from "@/lib/image-pipeline/contracts";
import { emptyHistogram, type HistogramData } from "@/lib/image-pipeline/histogram";
import type { RenderSourceComposition } from "@/lib/image-pipeline/render-types";
import {
isPipelineAbortError,
renderPreviewWithWorkerFallback,
@@ -12,6 +13,7 @@ import {
type UsePipelinePreviewOptions = {
sourceUrl: string | null;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
nodeWidth: number;
includeHistogram?: boolean;
@@ -54,6 +56,7 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): {
const stableRenderInputRef = useRef<{
pipelineHash: string;
sourceUrl: string | null;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
} | null>(null);
@@ -95,11 +98,11 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): {
);
const pipelineHash = useMemo(() => {
if (!options.sourceUrl) {
if (!options.sourceUrl && !options.sourceComposition) {
return "no-source";
}
return hashPipeline(options.sourceUrl, options.steps);
}, [options.sourceUrl, options.steps]);
return hashPipeline(options.sourceComposition ?? options.sourceUrl, options.steps);
}, [options.sourceComposition, options.sourceUrl, options.steps]);
useEffect(() => {
if (stableRenderInputRef.current?.pipelineHash === pipelineHash) {
@@ -109,13 +112,15 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): {
stableRenderInputRef.current = {
pipelineHash,
sourceUrl: options.sourceUrl,
sourceComposition: options.sourceComposition,
steps: options.steps,
};
}, [pipelineHash, options.sourceUrl, options.steps]);
}, [pipelineHash, options.sourceComposition, options.sourceUrl, options.steps]);
useEffect(() => {
const sourceUrl = stableRenderInputRef.current?.sourceUrl ?? null;
if (!sourceUrl) {
const sourceComposition = stableRenderInputRef.current?.sourceComposition;
if (!sourceUrl && !sourceComposition) {
const frameId = window.requestAnimationFrame(() => {
setHistogram(emptyHistogram());
setError(null);
@@ -133,8 +138,10 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): {
const timer = window.setTimeout(() => {
setIsRendering(true);
setError(null);
const resolvedSourceUrl = sourceUrl ?? undefined;
void renderPreviewWithWorkerFallback({
sourceUrl,
sourceUrl: resolvedSourceUrl,
sourceComposition,
steps: stableRenderInputRef.current?.steps ?? [],
previewWidth,
includeHistogram: options.includeHistogram,
@@ -168,7 +175,8 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): {
if (process.env.NODE_ENV !== "production") {
console.error("[usePipelinePreview] render failed", {
message,
sourceUrl,
sourceUrl: resolvedSourceUrl,
sourceComposition,
pipelineHash,
previewWidth,
includeHistogram: options.includeHistogram,
@@ -194,7 +202,7 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): {
canvasRef,
histogram,
isRendering,
hasSource: Boolean(options.sourceUrl),
hasSource: Boolean(options.sourceUrl || options.sourceComposition),
previewAspectRatio,
error,
};

View File

@@ -30,6 +30,7 @@ const RENDER_ALLOWED_SOURCE_TYPES = new Set<string>([
"image",
"asset",
"ai-image",
"mixer",
"crop",
"curves",
"color-adjust",

View File

@@ -19,8 +19,10 @@ export type MixerPreviewState = {
overlayUrl?: string;
blendMode: MixerBlendMode;
opacity: number;
offsetX: number;
offsetY: number;
overlayX: number;
overlayY: number;
overlayWidth: number;
overlayHeight: number;
error?: MixerPreviewError;
};
@@ -35,9 +37,14 @@ const DEFAULT_BLEND_MODE: MixerBlendMode = "normal";
const DEFAULT_OPACITY = 100;
const MIN_OPACITY = 0;
const MAX_OPACITY = 100;
const DEFAULT_OFFSET = 0;
const MIN_OFFSET = -2048;
const MAX_OFFSET = 2048;
const DEFAULT_OVERLAY_X = 0;
const DEFAULT_OVERLAY_Y = 0;
const DEFAULT_OVERLAY_WIDTH = 1;
const DEFAULT_OVERLAY_HEIGHT = 1;
const MIN_OVERLAY_POSITION = 0;
const MAX_OVERLAY_POSITION = 1;
const MIN_OVERLAY_SIZE = 0.1;
const MAX_OVERLAY_SIZE = 1;
function clamp(value: number, min: number, max: number): number {
return Math.max(min, Math.min(max, value));
@@ -65,18 +72,67 @@ function normalizeOpacity(value: unknown): number {
return clamp(parsed, MIN_OPACITY, MAX_OPACITY);
}
function normalizeOffset(value: unknown): number {
function normalizeOverlayNumber(value: unknown, fallback: number): number {
const parsed = parseNumeric(value);
if (parsed === null) {
return DEFAULT_OFFSET;
return fallback;
}
return clamp(parsed, MIN_OFFSET, MAX_OFFSET);
return parsed;
}
function normalizeOverlayRect(record: Record<string, unknown>): Pick<
MixerPreviewState,
"overlayX" | "overlayY" | "overlayWidth" | "overlayHeight"
> {
const hasLegacyOffset = record.offsetX !== undefined || record.offsetY !== undefined;
const hasOverlayRectField =
record.overlayX !== undefined ||
record.overlayY !== undefined ||
record.overlayWidth !== undefined ||
record.overlayHeight !== undefined;
if (hasLegacyOffset && !hasOverlayRectField) {
return {
overlayX: DEFAULT_OVERLAY_X,
overlayY: DEFAULT_OVERLAY_Y,
overlayWidth: DEFAULT_OVERLAY_WIDTH,
overlayHeight: DEFAULT_OVERLAY_HEIGHT,
};
}
const overlayX = clamp(
normalizeOverlayNumber(record.overlayX, DEFAULT_OVERLAY_X),
MIN_OVERLAY_POSITION,
MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE,
);
const overlayY = clamp(
normalizeOverlayNumber(record.overlayY, DEFAULT_OVERLAY_Y),
MIN_OVERLAY_POSITION,
MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE,
);
const overlayWidth = clamp(
normalizeOverlayNumber(record.overlayWidth, DEFAULT_OVERLAY_WIDTH),
MIN_OVERLAY_SIZE,
Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - overlayX),
);
const overlayHeight = clamp(
normalizeOverlayNumber(record.overlayHeight, DEFAULT_OVERLAY_HEIGHT),
MIN_OVERLAY_SIZE,
Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - overlayY),
);
return {
overlayX,
overlayY,
overlayWidth,
overlayHeight,
};
}
export function normalizeMixerPreviewData(data: unknown): Pick<
MixerPreviewState,
"blendMode" | "opacity" | "offsetX" | "offsetY"
"blendMode" | "opacity" | "overlayX" | "overlayY" | "overlayWidth" | "overlayHeight"
> {
const record = (data ?? {}) as Record<string, unknown>;
const blendMode = MIXER_BLEND_MODES.has(record.blendMode as MixerBlendMode)
@@ -86,8 +142,7 @@ export function normalizeMixerPreviewData(data: unknown): Pick<
return {
blendMode,
opacity: normalizeOpacity(record.opacity),
offsetX: normalizeOffset(record.offsetX),
offsetY: normalizeOffset(record.offsetY),
...normalizeOverlayRect(record),
};
}
@@ -172,6 +227,8 @@ export function resolveMixerPreviewFromGraph(args: {
if (base.duplicate || overlay.duplicate) {
return {
status: "error",
baseUrl: undefined,
overlayUrl: undefined,
...normalized,
error: "duplicate-handle-edge",
};

View File

@@ -51,8 +51,10 @@ export const CANVAS_NODE_TEMPLATES = [
defaultData: {
blendMode: "normal",
opacity: 100,
offsetX: 0,
offsetY: 0,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
},
},
{

View File

@@ -15,10 +15,25 @@ export type RenderPreviewGraphEdge = {
};
export type RenderPreviewInput = {
sourceUrl: string;
sourceUrl: string | null;
sourceComposition?: RenderPreviewSourceComposition;
steps: PipelineStep[];
};
export type MixerBlendMode = "normal" | "multiply" | "screen" | "overlay";
export type RenderPreviewSourceComposition = {
kind: "mixer";
baseUrl: string;
overlayUrl: string;
blendMode: MixerBlendMode;
opacity: number;
overlayX: number;
overlayY: number;
overlayWidth: number;
overlayHeight: number;
};
export type CanvasGraphNodeLike = {
id: string;
type: string;
@@ -38,6 +53,8 @@ export type CanvasGraphSnapshot = {
incomingEdgesByTarget: ReadonlyMap<string, readonly CanvasGraphEdgeLike[]>;
};
type RenderPreviewResolvedInput = RenderPreviewInput;
export type CanvasGraphNodeDataOverrides = ReadonlyMap<string, unknown>;
export function shouldFastPathPreviewPipeline(
@@ -129,6 +146,110 @@ export const RENDER_PREVIEW_PIPELINE_TYPES = new Set([
"detail-adjust",
]);
const MIXER_SOURCE_NODE_TYPES = new Set(["image", "asset", "ai-image", "render"]);
const MIXER_BLEND_MODES = new Set<MixerBlendMode>([
"normal",
"multiply",
"screen",
"overlay",
]);
const DEFAULT_BLEND_MODE: MixerBlendMode = "normal";
const DEFAULT_OPACITY = 100;
const MIN_OPACITY = 0;
const MAX_OPACITY = 100;
const DEFAULT_OVERLAY_X = 0;
const DEFAULT_OVERLAY_Y = 0;
const DEFAULT_OVERLAY_WIDTH = 1;
const DEFAULT_OVERLAY_HEIGHT = 1;
const MIN_OVERLAY_POSITION = 0;
const MAX_OVERLAY_POSITION = 1;
const MIN_OVERLAY_SIZE = 0.1;
const MAX_OVERLAY_SIZE = 1;
function clamp(value: number, min: number, max: number): number {
return Math.max(min, Math.min(max, value));
}
function parseNumeric(value: unknown): number | null {
if (typeof value === "number") {
return Number.isFinite(value) ? value : null;
}
if (typeof value === "string") {
const parsed = Number(value);
return Number.isFinite(parsed) ? parsed : null;
}
return null;
}
function normalizeOpacity(value: unknown): number {
const parsed = parseNumeric(value);
if (parsed === null) {
return DEFAULT_OPACITY;
}
return clamp(parsed, MIN_OPACITY, MAX_OPACITY);
}
function normalizeOverlayNumber(value: unknown, fallback: number): number {
const parsed = parseNumeric(value);
if (parsed === null) {
return fallback;
}
return parsed;
}
function normalizeMixerCompositionRect(data: Record<string, unknown>): Pick<
RenderPreviewSourceComposition,
"overlayX" | "overlayY" | "overlayWidth" | "overlayHeight"
> {
const hasLegacyOffset = data.offsetX !== undefined || data.offsetY !== undefined;
const hasOverlayRectField =
data.overlayX !== undefined ||
data.overlayY !== undefined ||
data.overlayWidth !== undefined ||
data.overlayHeight !== undefined;
if (hasLegacyOffset && !hasOverlayRectField) {
return {
overlayX: DEFAULT_OVERLAY_X,
overlayY: DEFAULT_OVERLAY_Y,
overlayWidth: DEFAULT_OVERLAY_WIDTH,
overlayHeight: DEFAULT_OVERLAY_HEIGHT,
};
}
const overlayX = clamp(
normalizeOverlayNumber(data.overlayX, DEFAULT_OVERLAY_X),
MIN_OVERLAY_POSITION,
MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE,
);
const overlayY = clamp(
normalizeOverlayNumber(data.overlayY, DEFAULT_OVERLAY_Y),
MIN_OVERLAY_POSITION,
MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE,
);
const overlayWidth = clamp(
normalizeOverlayNumber(data.overlayWidth, DEFAULT_OVERLAY_WIDTH),
MIN_OVERLAY_SIZE,
Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - overlayX),
);
const overlayHeight = clamp(
normalizeOverlayNumber(data.overlayHeight, DEFAULT_OVERLAY_HEIGHT),
MIN_OVERLAY_SIZE,
Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - overlayY),
);
return {
overlayX,
overlayY,
overlayWidth,
overlayHeight,
};
}
export function resolveRenderFingerprint(data: unknown): {
resolution: RenderResolutionOption;
customWidth?: number;
@@ -163,15 +284,19 @@ export function resolveRenderFingerprint(data: unknown): {
export function resolveRenderPipelineHash(args: {
sourceUrl: string | null;
sourceComposition?: RenderPreviewSourceComposition;
steps: PipelineStep[];
data: unknown;
}): string | null {
if (!args.sourceUrl) {
if (!args.sourceUrl && !args.sourceComposition) {
return null;
}
return hashPipeline(
{ sourceUrl: args.sourceUrl, render: resolveRenderFingerprint(args.data) },
{
source: args.sourceComposition ?? args.sourceUrl,
render: resolveRenderFingerprint(args.data),
},
args.steps,
);
}
@@ -212,6 +337,115 @@ function resolveSourceNodeUrl(node: CanvasGraphNodeLike): string | null {
return resolveNodeImageUrl(node.data);
}
function resolveRenderOutputUrl(node: CanvasGraphNodeLike): string | null {
const data = (node.data ?? {}) as Record<string, unknown>;
const lastUploadUrl =
typeof data.lastUploadUrl === "string" && data.lastUploadUrl.length > 0
? data.lastUploadUrl
: null;
if (lastUploadUrl) {
return lastUploadUrl;
}
return resolveNodeImageUrl(node.data);
}
function resolveMixerHandleEdge(args: {
incomingEdges: readonly CanvasGraphEdgeLike[];
handle: "base" | "overlay";
}): CanvasGraphEdgeLike | null {
const filtered = args.incomingEdges.filter((edge) => {
if (args.handle === "base") {
return edge.targetHandle === "base" || edge.targetHandle == null || edge.targetHandle === "";
}
return edge.targetHandle === "overlay";
});
if (filtered.length !== 1) {
return null;
}
return filtered[0] ?? null;
}
function resolveMixerSourceUrlFromNode(args: {
node: CanvasGraphNodeLike;
graph: CanvasGraphSnapshot;
}): string | null {
if (!MIXER_SOURCE_NODE_TYPES.has(args.node.type)) {
return null;
}
if (args.node.type === "render") {
const directRenderUrl = resolveRenderOutputUrl(args.node);
if (directRenderUrl) {
return directRenderUrl;
}
const preview = resolveRenderPreviewInputFromGraph({
nodeId: args.node.id,
graph: args.graph,
});
if (preview.sourceComposition) {
return null;
}
return preview.sourceUrl;
}
return resolveNodeImageUrl(args.node.data);
}
function resolveMixerSourceUrlFromEdge(args: {
edge: CanvasGraphEdgeLike | null;
graph: CanvasGraphSnapshot;
}): string | null {
if (!args.edge) {
return null;
}
const sourceNode = args.graph.nodesById.get(args.edge.source);
if (!sourceNode) {
return null;
}
return resolveMixerSourceUrlFromNode({
node: sourceNode,
graph: args.graph,
});
}
function resolveRenderMixerCompositionFromGraph(args: {
node: CanvasGraphNodeLike;
graph: CanvasGraphSnapshot;
}): RenderPreviewSourceComposition | null {
const incomingEdges = args.graph.incomingEdgesByTarget.get(args.node.id) ?? [];
const baseEdge = resolveMixerHandleEdge({ incomingEdges, handle: "base" });
const overlayEdge = resolveMixerHandleEdge({ incomingEdges, handle: "overlay" });
const baseUrl = resolveMixerSourceUrlFromEdge({ edge: baseEdge, graph: args.graph });
const overlayUrl = resolveMixerSourceUrlFromEdge({ edge: overlayEdge, graph: args.graph });
if (!baseUrl || !overlayUrl) {
return null;
}
const data = (args.node.data ?? {}) as Record<string, unknown>;
const blendMode = MIXER_BLEND_MODES.has(data.blendMode as MixerBlendMode)
? (data.blendMode as MixerBlendMode)
: DEFAULT_BLEND_MODE;
return {
kind: "mixer",
baseUrl,
overlayUrl,
blendMode,
opacity: normalizeOpacity(data.opacity),
...normalizeMixerCompositionRect(data),
};
}
export function buildGraphSnapshot(
nodes: readonly CanvasGraphNodeLike[],
edges: readonly CanvasGraphEdgeLike[],
@@ -384,7 +618,32 @@ export function findSourceNodeFromGraph(
export function resolveRenderPreviewInputFromGraph(args: {
nodeId: string;
graph: CanvasGraphSnapshot;
}): { sourceUrl: string | null; steps: PipelineStep[] } {
}): RenderPreviewResolvedInput {
const renderIncoming = getSortedIncomingEdge(
args.graph.incomingEdgesByTarget.get(args.nodeId),
);
const renderInputNode = renderIncoming
? args.graph.nodesById.get(renderIncoming.source)
: null;
if (renderInputNode?.type === "mixer") {
const sourceComposition = resolveRenderMixerCompositionFromGraph({
node: renderInputNode,
graph: args.graph,
});
const steps = collectPipelineFromGraph(args.graph, {
nodeId: args.nodeId,
isPipelineNode: (node) => RENDER_PREVIEW_PIPELINE_TYPES.has(node.type ?? ""),
});
return {
sourceUrl: null,
sourceComposition: sourceComposition ?? undefined,
steps,
};
}
const sourceUrl = getSourceImageFromGraph(args.graph, {
nodeId: args.nodeId,
isSourceNode: (node) => SOURCE_NODE_TYPES.has(node.type ?? ""),
@@ -406,7 +665,7 @@ export function resolveRenderPreviewInput(args: {
nodeId: string;
nodes: readonly RenderPreviewGraphNode[];
edges: readonly RenderPreviewGraphEdge[];
}): { sourceUrl: string | null; steps: PipelineStep[] } {
}): RenderPreviewResolvedInput {
return resolveRenderPreviewInputFromGraph({
nodeId: args.nodeId,
graph: buildGraphSnapshot(args.nodes, args.edges),

View File

@@ -299,8 +299,10 @@ export const NODE_DEFAULTS: Record<
data: {
blendMode: "normal",
opacity: 100,
offsetX: 0,
offsetY: 0,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
},
},
"agent-output": {

View File

@@ -10,7 +10,7 @@ import {
applyGeometryStepsToSource,
partitionPipelineSteps,
} from "@/lib/image-pipeline/geometry-transform";
import { loadSourceBitmap } from "@/lib/image-pipeline/source-loader";
import { loadRenderSourceBitmap } from "@/lib/image-pipeline/source-loader";
type SupportedCanvas = HTMLCanvasElement | OffscreenCanvas;
type SupportedContext = CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D;
@@ -99,7 +99,11 @@ function resolveMimeType(format: RenderFormat): string {
export async function renderFull(options: RenderFullOptions): Promise<RenderFullResult> {
const { signal } = options;
const bitmap = await loadSourceBitmap(options.sourceUrl, { signal });
const bitmap = await loadRenderSourceBitmap({
sourceUrl: options.sourceUrl,
sourceComposition: options.sourceComposition,
signal,
});
const { geometrySteps, tonalSteps } = partitionPipelineSteps(options.steps);
const geometryResult = applyGeometryStepsToSource({
source: bitmap,

View File

@@ -2,14 +2,19 @@ import { renderFull } from "@/lib/image-pipeline/bridge";
import { renderPreview } from "@/lib/image-pipeline/preview-renderer";
import type { PipelineStep } from "@/lib/image-pipeline/contracts";
import type { HistogramData } from "@/lib/image-pipeline/histogram";
import type { RenderFullOptions, RenderFullResult } from "@/lib/image-pipeline/render-types";
import type {
RenderFullOptions,
RenderFullResult,
RenderSourceComposition,
} from "@/lib/image-pipeline/render-types";
import {
IMAGE_PIPELINE_BACKEND_FLAG_KEYS,
type BackendFeatureFlags,
} from "@/lib/image-pipeline/backend/feature-flags";
type PreviewWorkerPayload = {
sourceUrl: string;
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
previewWidth: number;
includeHistogram?: boolean;
@@ -112,6 +117,7 @@ async function handlePreviewRequest(requestId: number, payload: PreviewWorkerPay
applyWorkerFeatureFlags(payload.featureFlags);
const result = await renderPreview({
sourceUrl: payload.sourceUrl,
sourceComposition: payload.sourceComposition,
steps: payload.steps,
previewWidth: payload.previewWidth,
includeHistogram: payload.includeHistogram,
@@ -161,6 +167,7 @@ async function handleFullRequest(requestId: number, payload: FullWorkerPayload):
applyWorkerFeatureFlags(payload.featureFlags);
const result = await renderFull({
sourceUrl: payload.sourceUrl,
sourceComposition: payload.sourceComposition,
steps: payload.steps,
render: payload.render,
signal: controller.signal,

View File

@@ -8,7 +8,8 @@ import {
applyGeometryStepsToSource,
partitionPipelineSteps,
} from "@/lib/image-pipeline/geometry-transform";
import { loadSourceBitmap } from "@/lib/image-pipeline/source-loader";
import { loadRenderSourceBitmap } from "@/lib/image-pipeline/source-loader";
import type { RenderSourceComposition } from "@/lib/image-pipeline/render-types";
export type PreviewRenderResult = {
width: number;
@@ -64,13 +65,16 @@ async function yieldToMainOrWorkerLoop(): Promise<void> {
}
export async function renderPreview(options: {
sourceUrl: string;
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
previewWidth: number;
includeHistogram?: boolean;
signal?: AbortSignal;
}): Promise<PreviewRenderResult> {
const bitmap = await loadSourceBitmap(options.sourceUrl, {
const bitmap = await loadRenderSourceBitmap({
sourceUrl: options.sourceUrl,
sourceComposition: options.sourceComposition,
signal: options.signal,
});
const { geometrySteps, tonalSteps } = partitionPipelineSteps(options.steps);

View File

@@ -24,6 +24,18 @@ export type RenderSizeLimits = {
maxPixels?: number;
};
export type RenderSourceComposition = {
kind: "mixer";
baseUrl: string;
overlayUrl: string;
blendMode: "normal" | "multiply" | "screen" | "overlay";
opacity: number;
overlayX: number;
overlayY: number;
overlayWidth: number;
overlayHeight: number;
};
export type ResolvedRenderSize = {
width: number;
height: number;
@@ -32,7 +44,8 @@ export type ResolvedRenderSize = {
};
export type RenderFullOptions = {
sourceUrl: string;
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
render: RenderOptions;
limits?: RenderSizeLimits;

View File

@@ -12,6 +12,24 @@ type LoadSourceBitmapOptions = {
signal?: AbortSignal;
};
type RenderSourceComposition = {
kind: "mixer";
baseUrl: string;
overlayUrl: string;
blendMode: "normal" | "multiply" | "screen" | "overlay";
opacity: number;
overlayX: number;
overlayY: number;
overlayWidth: number;
overlayHeight: number;
};
type LoadRenderSourceBitmapOptions = {
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
signal?: AbortSignal;
};
function throwIfAborted(signal: AbortSignal | undefined): void {
if (signal?.aborted) {
throw new DOMException("The operation was aborted.", "AbortError");
@@ -215,3 +233,200 @@ export async function loadSourceBitmap(
const promise = getOrCreateSourceBitmapPromise(sourceUrl);
return await awaitWithLocalAbort(promise, options.signal);
}
function createWorkingCanvas(width: number, height: number):
| HTMLCanvasElement
| OffscreenCanvas {
if (typeof document !== "undefined") {
const canvas = document.createElement("canvas");
canvas.width = width;
canvas.height = height;
return canvas;
}
if (typeof OffscreenCanvas !== "undefined") {
return new OffscreenCanvas(width, height);
}
throw new Error("Canvas rendering is not available in this environment.");
}
function mixerBlendModeToCompositeOperation(
blendMode: RenderSourceComposition["blendMode"],
): GlobalCompositeOperation {
if (blendMode === "normal") {
return "source-over";
}
return blendMode;
}
function normalizeCompositionOpacity(value: number): number {
if (!Number.isFinite(value)) {
return 1;
}
return Math.max(0, Math.min(100, value)) / 100;
}
function normalizeRatio(value: number, fallback: number): number {
if (!Number.isFinite(value)) {
return fallback;
}
return value;
}
function normalizeMixerRect(source: RenderSourceComposition): {
x: number;
y: number;
width: number;
height: number;
} {
const overlayX = Math.max(0, Math.min(0.9, normalizeRatio(source.overlayX, 0)));
const overlayY = Math.max(0, Math.min(0.9, normalizeRatio(source.overlayY, 0)));
const overlayWidth = Math.max(
0.1,
Math.min(1, normalizeRatio(source.overlayWidth, 1), 1 - overlayX),
);
const overlayHeight = Math.max(
0.1,
Math.min(1, normalizeRatio(source.overlayHeight, 1), 1 - overlayY),
);
return {
x: overlayX,
y: overlayY,
width: overlayWidth,
height: overlayHeight,
};
}
function computeObjectCoverSourceRect(args: {
sourceWidth: number;
sourceHeight: number;
destinationWidth: number;
destinationHeight: number;
}): {
sourceX: number;
sourceY: number;
sourceWidth: number;
sourceHeight: number;
} {
const { sourceWidth, sourceHeight, destinationWidth, destinationHeight } = args;
if (
sourceWidth <= 0 ||
sourceHeight <= 0 ||
destinationWidth <= 0 ||
destinationHeight <= 0
) {
return {
sourceX: 0,
sourceY: 0,
sourceWidth,
sourceHeight,
};
}
const sourceAspectRatio = sourceWidth / sourceHeight;
const destinationAspectRatio = destinationWidth / destinationHeight;
if (!Number.isFinite(sourceAspectRatio) || !Number.isFinite(destinationAspectRatio)) {
return {
sourceX: 0,
sourceY: 0,
sourceWidth,
sourceHeight,
};
}
if (sourceAspectRatio > destinationAspectRatio) {
const croppedWidth = sourceHeight * destinationAspectRatio;
return {
sourceX: (sourceWidth - croppedWidth) / 2,
sourceY: 0,
sourceWidth: croppedWidth,
sourceHeight,
};
}
const croppedHeight = sourceWidth / destinationAspectRatio;
return {
sourceX: 0,
sourceY: (sourceHeight - croppedHeight) / 2,
sourceWidth,
sourceHeight: croppedHeight,
};
}
async function loadMixerCompositionBitmap(
sourceComposition: RenderSourceComposition,
signal?: AbortSignal,
): Promise<ImageBitmap> {
const [baseBitmap, overlayBitmap] = await Promise.all([
loadSourceBitmap(sourceComposition.baseUrl, { signal }),
loadSourceBitmap(sourceComposition.overlayUrl, { signal }),
]);
throwIfAborted(signal);
const canvas = createWorkingCanvas(baseBitmap.width, baseBitmap.height);
const context = canvas.getContext("2d", { willReadFrequently: true });
if (!context) {
throw new Error("Render composition could not create a 2D context.");
}
context.clearRect(0, 0, baseBitmap.width, baseBitmap.height);
context.drawImage(baseBitmap, 0, 0, baseBitmap.width, baseBitmap.height);
const rect = normalizeMixerRect(sourceComposition);
const destinationX = rect.x * baseBitmap.width;
const destinationY = rect.y * baseBitmap.height;
const destinationWidth = rect.width * baseBitmap.width;
const destinationHeight = rect.height * baseBitmap.height;
const sourceRect = computeObjectCoverSourceRect({
sourceWidth: overlayBitmap.width,
sourceHeight: overlayBitmap.height,
destinationWidth,
destinationHeight,
});
context.globalCompositeOperation = mixerBlendModeToCompositeOperation(
sourceComposition.blendMode,
);
context.globalAlpha = normalizeCompositionOpacity(sourceComposition.opacity);
context.drawImage(
overlayBitmap,
sourceRect.sourceX,
sourceRect.sourceY,
sourceRect.sourceWidth,
sourceRect.sourceHeight,
destinationX,
destinationY,
destinationWidth,
destinationHeight,
);
context.globalCompositeOperation = "source-over";
context.globalAlpha = 1;
return await createImageBitmap(canvas);
}
export async function loadRenderSourceBitmap(
options: LoadRenderSourceBitmapOptions,
): Promise<ImageBitmap> {
if (options.sourceComposition) {
if (options.sourceComposition.kind !== "mixer") {
throw new Error(`Unsupported source composition '${options.sourceComposition.kind}'.`);
}
return await loadMixerCompositionBitmap(options.sourceComposition, options.signal);
}
if (!options.sourceUrl) {
throw new Error("Render source is required.");
}
return await loadSourceBitmap(options.sourceUrl, { signal: options.signal });
}

View File

@@ -5,7 +5,11 @@ import {
} from "@/lib/image-pipeline/preview-renderer";
import { hashPipeline, type PipelineStep } from "@/lib/image-pipeline/contracts";
import type { HistogramData } from "@/lib/image-pipeline/histogram";
import type { RenderFullOptions, RenderFullResult } from "@/lib/image-pipeline/render-types";
import type {
RenderFullOptions,
RenderFullResult,
RenderSourceComposition,
} from "@/lib/image-pipeline/render-types";
import {
getBackendFeatureFlags,
type BackendFeatureFlags,
@@ -20,7 +24,8 @@ export type BackendDiagnosticsMetadata = {
};
type PreviewWorkerPayload = {
sourceUrl: string;
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
previewWidth: number;
includeHistogram?: boolean;
@@ -324,13 +329,14 @@ function runWorkerRequest<TResponse extends PreviewRenderResult | RenderFullResu
}
function getPreviewRequestKey(options: {
sourceUrl: string;
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
previewWidth: number;
includeHistogram?: boolean;
}): string {
return [
hashPipeline(options.sourceUrl, options.steps),
hashPipeline(options.sourceComposition ?? options.sourceUrl ?? null, options.steps),
options.previewWidth,
options.includeHistogram === true ? 1 : 0,
].join(":");
@@ -341,7 +347,8 @@ function getWorkerFeatureFlagsSnapshot(): BackendFeatureFlags {
}
async function runPreviewRequest(options: {
sourceUrl: string;
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
previewWidth: number;
includeHistogram?: boolean;
@@ -352,6 +359,7 @@ async function runPreviewRequest(options: {
kind: "preview",
payload: {
sourceUrl: options.sourceUrl,
sourceComposition: options.sourceComposition,
steps: options.steps,
previewWidth: options.previewWidth,
includeHistogram: options.includeHistogram,
@@ -367,6 +375,7 @@ async function runPreviewRequest(options: {
if (!shouldFallbackToMainThread(error)) {
logWorkerClientDebug("preview request failed without fallback", {
sourceUrl: options.sourceUrl,
sourceComposition: options.sourceComposition,
previewWidth: options.previewWidth,
includeHistogram: options.includeHistogram,
diagnostics: getLastBackendDiagnostics(),
@@ -377,6 +386,7 @@ async function runPreviewRequest(options: {
logWorkerClientDebug("preview request falling back to main-thread", {
sourceUrl: options.sourceUrl,
sourceComposition: options.sourceComposition,
previewWidth: options.previewWidth,
includeHistogram: options.includeHistogram,
error,
@@ -387,7 +397,8 @@ async function runPreviewRequest(options: {
}
function getOrCreateSharedPreviewRequest(options: {
sourceUrl: string;
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
previewWidth: number;
includeHistogram?: boolean;
@@ -419,7 +430,8 @@ function getOrCreateSharedPreviewRequest(options: {
}
export async function renderPreviewWithWorkerFallback(options: {
sourceUrl: string;
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: readonly PipelineStep[];
previewWidth: number;
includeHistogram?: boolean;
@@ -431,6 +443,7 @@ export async function renderPreviewWithWorkerFallback(options: {
const sharedRequest = getOrCreateSharedPreviewRequest({
sourceUrl: options.sourceUrl,
sourceComposition: options.sourceComposition,
steps: options.steps,
previewWidth: options.previewWidth,
includeHistogram: options.includeHistogram,

View File

@@ -132,6 +132,16 @@ describe("canvas connection policy", () => {
).toBeNull();
});
it("allows mixer as render source", () => {
expect(
validateCanvasConnectionPolicy({
sourceType: "mixer",
targetType: "render",
targetIncomingCount: 0,
}),
).toBeNull();
});
it("describes unsupported crop source message", () => {
expect(getCanvasConnectionValidationMessage("crop-source-invalid")).toBe(
"Crop akzeptiert nur Bild-, Asset-, KI-Bild-, Video-, KI-Video-, Crop- oder Adjustment-Input.",

View File

@@ -17,6 +17,13 @@ const sourceLoaderMocks = vi.hoisted(() => ({
vi.mock("@/lib/image-pipeline/source-loader", () => ({
loadSourceBitmap: sourceLoaderMocks.loadSourceBitmap,
loadRenderSourceBitmap: ({ sourceUrl }: { sourceUrl?: string }) => {
if (!sourceUrl) {
throw new Error("Render source is required.");
}
return sourceLoaderMocks.loadSourceBitmap(sourceUrl);
},
}));
function createPreviewPixels(): Uint8ClampedArray {

View File

@@ -0,0 +1,113 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import type { RenderFullResult, RenderSourceComposition } from "@/lib/image-pipeline/render-types";
const bridgeMocks = vi.hoisted(() => ({
renderFull: vi.fn(),
}));
const previewRendererMocks = vi.hoisted(() => ({
renderPreview: vi.fn(),
}));
vi.mock("@/lib/image-pipeline/bridge", () => ({
renderFull: bridgeMocks.renderFull,
}));
vi.mock("@/lib/image-pipeline/preview-renderer", () => ({
renderPreview: previewRendererMocks.renderPreview,
}));
type WorkerMessage = {
kind: "full";
requestId: number;
payload: {
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
steps: [];
render: {
resolution: "original";
format: "png";
};
};
};
type WorkerScopeMock = {
postMessage: ReturnType<typeof vi.fn>;
onmessage: ((event: MessageEvent<WorkerMessage>) => void) | null;
};
function createFullResult(): RenderFullResult {
return {
blob: new Blob(["rendered"]),
width: 64,
height: 64,
mimeType: "image/png",
format: "png",
quality: null,
sizeBytes: 8,
sourceWidth: 64,
sourceHeight: 64,
wasSizeClamped: false,
};
}
function createWorkerScope(): WorkerScopeMock {
return {
postMessage: vi.fn(),
onmessage: null,
};
}
describe("image-pipeline.worker full render", () => {
beforeEach(() => {
vi.resetModules();
vi.unstubAllGlobals();
bridgeMocks.renderFull.mockReset();
bridgeMocks.renderFull.mockResolvedValue(createFullResult());
previewRendererMocks.renderPreview.mockReset();
});
it("forwards sourceComposition to renderFull for full requests", async () => {
const workerScope = createWorkerScope();
vi.stubGlobal("self", workerScope);
await import("@/lib/image-pipeline/image-pipeline.worker");
const sourceComposition: RenderSourceComposition = {
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "overlay",
opacity: 0.5,
overlayX: 32,
overlayY: 16,
overlayWidth: 128,
overlayHeight: 64,
};
workerScope.onmessage?.({
data: {
kind: "full",
requestId: 41,
payload: {
sourceComposition,
steps: [],
render: {
resolution: "original",
format: "png",
},
},
},
} as MessageEvent<WorkerMessage>);
await vi.waitFor(() => {
expect(bridgeMocks.renderFull).toHaveBeenCalledTimes(1);
});
expect(bridgeMocks.renderFull).toHaveBeenCalledWith(
expect.objectContaining({
sourceComposition,
}),
);
});
});

View File

@@ -355,4 +355,105 @@ describe("loadSourceBitmap", () => {
expect(createImageBitmap).toHaveBeenCalledWith(fakeVideo);
expect(revokeObjectUrl).toHaveBeenCalledWith("blob:video-source");
});
it("renders mixer overlays with object-cover semantics instead of stretching", async () => {
const baseBlob = new Blob(["base"]);
const overlayBlob = new Blob(["overlay"]);
const baseBitmap = { width: 100, height: 100 } as ImageBitmap;
const overlayBitmap = { width: 200, height: 100 } as ImageBitmap;
const composedBitmap = { width: 100, height: 100 } as ImageBitmap;
const drawImage = vi.fn();
const context = {
clearRect: vi.fn(),
drawImage,
globalCompositeOperation: "source-over" as GlobalCompositeOperation,
globalAlpha: 1,
};
const canvas = {
width: 0,
height: 0,
getContext: vi.fn().mockReturnValue(context),
} as unknown as HTMLCanvasElement;
const nativeCreateElement = document.createElement.bind(document);
vi.spyOn(document, "createElement").mockImplementation((tagName: string) => {
if (tagName.toLowerCase() === "canvas") {
return canvas;
}
return nativeCreateElement(tagName);
});
vi.stubGlobal(
"fetch",
vi.fn().mockImplementation(async (input: string | URL | Request) => {
const url = String(input);
if (url.includes("base.png")) {
return {
ok: true,
status: 200,
headers: { get: vi.fn().mockReturnValue("image/png") },
blob: vi.fn().mockResolvedValue(baseBlob),
};
}
return {
ok: true,
status: 200,
headers: { get: vi.fn().mockReturnValue("image/png") },
blob: vi.fn().mockResolvedValue(overlayBlob),
};
}),
);
vi.stubGlobal(
"createImageBitmap",
vi.fn().mockImplementation(async (input: unknown) => {
if (input === baseBlob) {
return baseBitmap;
}
if (input === overlayBlob) {
return overlayBitmap;
}
if (input === canvas) {
return composedBitmap;
}
throw new Error("Unexpected createImageBitmap input in mixer cover-fit test.");
}),
);
const { loadRenderSourceBitmap } = await importSubject();
await expect(
loadRenderSourceBitmap({
sourceComposition: {
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "overlay",
opacity: 80,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.25,
overlayHeight: 0.5,
},
}),
).resolves.toBe(composedBitmap);
expect(drawImage).toHaveBeenNthCalledWith(1, baseBitmap, 0, 0, 100, 100);
expect(drawImage).toHaveBeenNthCalledWith(
2,
overlayBitmap,
75,
0,
50,
100,
10,
20,
25,
50,
);
});
});

View File

@@ -341,6 +341,7 @@ describe("webgl backend poc", () => {
vi.doMock("@/lib/image-pipeline/source-loader", () => ({
loadSourceBitmap: vi.fn().mockResolvedValue({ width: 2, height: 2 }),
loadRenderSourceBitmap: vi.fn().mockResolvedValue({ width: 2, height: 2 }),
}));
vi.spyOn(HTMLCanvasElement.prototype, "getContext").mockReturnValue({

View File

@@ -25,7 +25,14 @@ describe("resolveMixerPreviewFromGraph", () => {
{
id: "mixer-1",
type: "mixer",
data: { blendMode: "screen", opacity: 70, offsetX: 12, offsetY: -8 },
data: {
blendMode: "screen",
opacity: 70,
overlayX: 0.12,
overlayY: 0.2,
overlayWidth: 0.6,
overlayHeight: 0.5,
},
},
],
[
@@ -41,8 +48,10 @@ describe("resolveMixerPreviewFromGraph", () => {
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "screen",
opacity: 70,
offsetX: 12,
offsetY: -8,
overlayX: 0.12,
overlayY: 0.2,
overlayWidth: 0.6,
overlayHeight: 0.5,
});
});
@@ -85,8 +94,10 @@ describe("resolveMixerPreviewFromGraph", () => {
overlayUrl: "https://cdn.example.com/render-output.png",
blendMode: "normal",
opacity: 100,
offsetX: 0,
offsetY: 0,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
});
});
@@ -113,12 +124,14 @@ describe("resolveMixerPreviewFromGraph", () => {
overlayUrl: undefined,
blendMode: "normal",
opacity: 100,
offsetX: 0,
offsetY: 0,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
});
});
it("normalizes blend mode and clamps numeric values", () => {
it("normalizes rect values and clamps", () => {
const graph = buildGraphSnapshot(
[
{
@@ -137,8 +150,10 @@ describe("resolveMixerPreviewFromGraph", () => {
data: {
blendMode: "unknown",
opacity: 180,
offsetX: 9999,
offsetY: "-9999",
overlayX: -3,
overlayY: "1.4",
overlayWidth: 2,
overlayHeight: 0,
},
},
],
@@ -154,8 +169,92 @@ describe("resolveMixerPreviewFromGraph", () => {
overlayUrl: "https://cdn.example.com/overlay-asset.png",
blendMode: "normal",
opacity: 100,
offsetX: 2048,
offsetY: -2048,
overlayX: 0,
overlayY: 0.9,
overlayWidth: 1,
overlayHeight: 0.1,
});
});
it("missing rect fields fallback to sensible defaults", () => {
const graph = buildGraphSnapshot(
[
{
id: "base-ai",
type: "ai-image",
data: { url: "https://cdn.example.com/base-ai.png" },
},
{
id: "overlay-asset",
type: "asset",
data: { url: "https://cdn.example.com/overlay-asset.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
blendMode: "multiply",
opacity: 42,
},
},
],
[
{ source: "base-ai", target: "mixer-1", targetHandle: "base" },
{ source: "overlay-asset", target: "mixer-1", targetHandle: "overlay" },
],
);
expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual({
status: "ready",
baseUrl: "https://cdn.example.com/base-ai.png",
overlayUrl: "https://cdn.example.com/overlay-asset.png",
blendMode: "multiply",
opacity: 42,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
});
});
it("legacy offset fields still yield visible overlay geometry", () => {
const graph = buildGraphSnapshot(
[
{
id: "base-ai",
type: "ai-image",
data: { url: "https://cdn.example.com/base-ai.png" },
},
{
id: "overlay-asset",
type: "asset",
data: { url: "https://cdn.example.com/overlay-asset.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
offsetX: 100,
offsetY: -40,
},
},
],
[
{ source: "base-ai", target: "mixer-1", targetHandle: "base" },
{ source: "overlay-asset", target: "mixer-1", targetHandle: "overlay" },
],
);
expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual({
status: "ready",
baseUrl: "https://cdn.example.com/base-ai.png",
overlayUrl: "https://cdn.example.com/overlay-asset.png",
blendMode: "normal",
opacity: 100,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
});
});
@@ -190,8 +289,10 @@ describe("resolveMixerPreviewFromGraph", () => {
overlayUrl: undefined,
blendMode: "normal",
opacity: 100,
offsetX: 0,
offsetY: 0,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
error: "duplicate-handle-edge",
});
});

View File

@@ -6,6 +6,122 @@ import {
} from "@/lib/canvas-render-preview";
describe("resolveRenderPreviewInputFromGraph", () => {
it("resolves mixer input as renderable mixer composition", () => {
const graph = buildGraphSnapshot(
[
{
id: "base-image",
type: "image",
data: { url: "https://cdn.example.com/base.png" },
},
{
id: "overlay-image",
type: "asset",
data: { url: "https://cdn.example.com/overlay.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
blendMode: "overlay",
opacity: 76,
overlayX: 0.2,
overlayY: 0.1,
overlayWidth: 0.55,
overlayHeight: 0.44,
},
},
{
id: "render-1",
type: "render",
data: {},
},
],
[
{ source: "base-image", target: "mixer-1", targetHandle: "base" },
{ source: "overlay-image", target: "mixer-1", targetHandle: "overlay" },
{ source: "mixer-1", target: "render-1" },
],
);
const preview = resolveRenderPreviewInputFromGraph({
nodeId: "render-1",
graph,
});
expect(preview).toEqual({
sourceUrl: null,
sourceComposition: {
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "overlay",
opacity: 76,
overlayX: 0.2,
overlayY: 0.1,
overlayWidth: 0.55,
overlayHeight: 0.44,
},
steps: [],
});
});
it("normalizes mixer composition values for render input", () => {
const graph = buildGraphSnapshot(
[
{
id: "base-image",
type: "image",
data: { url: "https://cdn.example.com/base.png" },
},
{
id: "overlay-image",
type: "asset",
data: { url: "https://cdn.example.com/overlay.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
blendMode: "unknown",
opacity: 180,
overlayX: -3,
overlayY: "1.4",
overlayWidth: 2,
overlayHeight: 0,
},
},
{
id: "render-1",
type: "render",
data: {},
},
],
[
{ source: "base-image", target: "mixer-1", targetHandle: "base" },
{ source: "overlay-image", target: "mixer-1", targetHandle: "overlay" },
{ source: "mixer-1", target: "render-1" },
],
);
const preview = resolveRenderPreviewInputFromGraph({
nodeId: "render-1",
graph,
});
expect(preview.sourceComposition).toEqual({
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "normal",
opacity: 100,
overlayX: 0,
overlayY: 0.9,
overlayWidth: 1,
overlayHeight: 0.1,
});
});
it("includes crop in collected pipeline steps", () => {
const graph = buildGraphSnapshot(
[
@@ -88,5 +204,6 @@ describe("resolveRenderPreviewInputFromGraph", () => {
const preview = resolveRenderPreviewInputFromGraph({ nodeId: "render-1", graph });
expect(preview.sourceUrl).toBe("https://cdn.example.com/generated-video.mp4");
expect(preview.sourceComposition).toBeUndefined();
});
});

View File

@@ -32,6 +32,13 @@ vi.mock("@/lib/image-pipeline/render-core", () => ({
vi.mock("@/lib/image-pipeline/source-loader", () => ({
loadSourceBitmap: sourceLoaderMocks.loadSourceBitmap,
loadRenderSourceBitmap: ({ sourceUrl }: { sourceUrl?: string }) => {
if (!sourceUrl) {
throw new Error("Render source is required.");
}
return sourceLoaderMocks.loadSourceBitmap(sourceUrl);
},
}));
describe("preview-renderer cancellation", () => {