feat(canvas): separate mixer resize and crop semantics

This commit is contained in:
2026-04-15 08:31:53 +02:00
parent 61728f9e52
commit f1c61fd14e
18 changed files with 4783 additions and 228 deletions

View File

@@ -133,9 +133,10 @@ render: 300 × 420 mixer: 360 × 320
- **Handles:** genau zwei Inputs links (`base`, `overlay`) und ein Output rechts (`mixer-out`).
- **Erlaubte Inputs:** `image`, `asset`, `ai-image`, `render`.
- **Connection-Limits:** maximal 2 eingehende Kanten insgesamt, davon pro Handle genau 1.
- **Node-Data (V1):** `blendMode` (`normal|multiply|screen|overlay`), `opacity` (0..100), `overlayX`, `overlayY`, `overlayWidth`, `overlayHeight` (normierte 0..1-Rect-Werte).
- **Node-Data (V1):** `blendMode` (`normal|multiply|screen|overlay`), `opacity` (0..100), `overlayX`, `overlayY`, `overlayWidth`, `overlayHeight` (Frame-Rect, normiert 0..1) plus `contentX`, `contentY`, `contentWidth`, `contentHeight` (Content-Framing innerhalb des Overlay-Frames, ebenfalls normiert 0..1).
- **Output-Semantik:** pseudo-image (clientseitig aus Graph + Controls aufgeloest), kein persistiertes Asset, kein Storage-Write.
- **UI/Interaction:** Overlay ist im Preview direkt per Drag verschiebbar und ueber Corner-Handles frei resizable; numerische Inline-Controls bleiben als Feineinstellung erhalten.
- **UI/Interaction:** Zwei Modi im Preview: `Frame resize` (Overlay-Frame verschieben + ueber Corner-Handles resizen) und `Content framing` (Overlay-Inhalt innerhalb des Frames verschieben). Numerische Inline-Controls bleiben als Feineinstellung erhalten.
- **Sizing/Crop-Verhalten:** Der Overlay-Inhalt wird `object-cover`-aehnlich in den Content-Rect eingepasst; bei abweichenden Seitenverhaeltnissen wird zentriert gecroppt.
### Compare-Integration (V1)
@@ -321,6 +322,7 @@ useCanvasData (use-canvas-data.ts)
- **Video-Connection-Policy:** `video-prompt` darf **nur** mit `ai-video` verbunden werden (und umgekehrt). `text → video-prompt` ist erlaubt (Prompt-Quelle). `ai-video → compare` ist erlaubt.
- **Mixer-Connection-Policy:** `mixer` akzeptiert nur `image|asset|ai-image|render`; Ziel-Handles sind nur `base` und `overlay`, pro Handle maximal eine eingehende Kante, insgesamt maximal zwei.
- **Mixer-Pseudo-Output:** `mixer` liefert in V1 kein persistiertes Bild. Offizielle Consumer sind `compare` und der direkte Bake-Pfad `mixer -> render`; `mixer -> adjustments -> render` bleibt vorerst deferred.
- **Mixer Legacy-Daten:** Alte `offsetX`/`offsetY`-Mixer-Daten werden beim Lesen auf den Full-Frame-Fallback (`overlay* = 0/0/1/1`) normalisiert; Content-Framing defaults auf `content* = 0/0/1/1`.
- **Agent-Flow:** `agent` akzeptiert nur Content-/Kontext-Quellen (z. B. `render`, `compare`, `text`, `image`) als Input; ausgehende Kanten sind fuer `agent -> agent-output` vorgesehen.
- **Convex Generated Types:** `api.ai.generateVideo` wird u. U. nicht in `convex/_generated/api.d.ts` exportiert. Der Code verwendet `api as unknown as {...}` als Workaround. Ein `npx convex dev`-Zyklus würde die Typen korrekt generieren.
- **Canvas Graph Query:** Der Canvas nutzt `canvasGraph.get` (aus `convex/canvasGraph.ts`) statt separater `nodes.list`/`edges.list` Queries. Optimistic Updates laufen über `canvas-graph-query-cache.ts`.

View File

@@ -1,5 +1,9 @@
// @vitest-environment jsdom
import React from "react";
import { beforeEach, describe, expect, it, vi } from "vitest";
import { act } from "react";
import { createRoot, type Root } from "react-dom/client";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { renderToStaticMarkup } from "react-dom/server";
import { CanvasGraphProvider } from "@/components/canvas/canvas-graph-context";
@@ -15,12 +19,20 @@ type StoreState = {
}>;
};
type ResizeObserverEntryLike = {
target: Element;
contentRect: { width: number; height: number };
};
const storeState: StoreState = {
nodes: [],
edges: [],
};
const compareSurfaceSpy = vi.fn();
let resizeObserverCallback:
| ((entries: ResizeObserverEntryLike[]) => void)
| null = null;
vi.mock("@xyflow/react", () => ({
Handle: () => null,
@@ -28,6 +40,14 @@ vi.mock("@xyflow/react", () => ({
useStore: (selector: (state: StoreState) => unknown) => selector(storeState),
}));
vi.mock("@/hooks/use-pipeline-preview", () => ({
usePipelinePreview: () => ({
canvasRef: { current: null },
isRendering: false,
error: null,
}),
}));
vi.mock("../nodes/base-node-wrapper", () => ({
default: ({ children }: { children: React.ReactNode }) => <div>{children}</div>,
}));
@@ -41,6 +61,8 @@ vi.mock("../nodes/compare-surface", () => ({
import CompareNode from "../nodes/compare-node";
(globalThis as typeof globalThis & { IS_REACT_ACT_ENVIRONMENT?: boolean }).IS_REACT_ACT_ENVIRONMENT = true;
function renderCompareNode(props: Record<string, unknown>) {
return renderToStaticMarkup(
<CanvasGraphProvider
@@ -53,10 +75,47 @@ function renderCompareNode(props: Record<string, unknown>) {
}
describe("CompareNode render preview inputs", () => {
let container: HTMLDivElement | null = null;
let root: Root | null = null;
beforeEach(() => {
storeState.nodes = [];
storeState.edges = [];
compareSurfaceSpy.mockReset();
resizeObserverCallback = null;
globalThis.ResizeObserver = class ResizeObserver {
constructor(callback: (entries: ResizeObserverEntryLike[]) => void) {
resizeObserverCallback = callback;
}
observe(target: Element) {
resizeObserverCallback?.([
{
target,
contentRect: { width: 500, height: 380 },
},
]);
}
unobserve() {}
disconnect() {}
} as unknown as typeof ResizeObserver;
container = document.createElement("div");
document.body.appendChild(container);
root = createRoot(container);
});
afterEach(async () => {
if (root) {
await act(async () => {
root?.unmount();
});
}
container?.remove();
root = null;
container = null;
});
it("passes previewInput to CompareSurface for a connected render node without final output", () => {
@@ -167,6 +226,108 @@ describe("CompareNode render preview inputs", () => {
});
});
it("defaults mixer-backed render compare inputs to preview mode when only sourceComposition exists", () => {
storeState.nodes = [
{
id: "base-image",
type: "image",
data: { url: "https://cdn.example.com/base.png" },
},
{
id: "overlay-image",
type: "asset",
data: { url: "https://cdn.example.com/overlay.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
blendMode: "multiply",
opacity: 62,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.4,
overlayHeight: 0.5,
cropLeft: 0.1,
cropTop: 0,
cropRight: 0.2,
cropBottom: 0.1,
},
},
{
id: "render-1",
type: "render",
data: {
lastUploadUrl: "https://cdn.example.com/stale-render-output.png",
},
},
];
storeState.edges = [
{
id: "edge-base-mixer",
source: "base-image",
target: "mixer-1",
targetHandle: "base",
},
{
id: "edge-overlay-mixer",
source: "overlay-image",
target: "mixer-1",
targetHandle: "overlay",
},
{ id: "edge-mixer-render", source: "mixer-1", target: "render-1" },
{
id: "edge-render-compare",
source: "render-1",
target: "compare-1",
targetHandle: "left",
},
];
renderCompareNode({
id: "compare-1",
data: { leftUrl: "https://cdn.example.com/stale-render-output.png" },
selected: false,
dragging: false,
zIndex: 0,
isConnectable: true,
type: "compare",
xPos: 0,
yPos: 0,
width: 500,
height: 380,
sourcePosition: undefined,
targetPosition: undefined,
positionAbsoluteX: 0,
positionAbsoluteY: 0,
});
expect(compareSurfaceSpy).toHaveBeenCalledTimes(1);
expect(compareSurfaceSpy.mock.calls[0]?.[0]).toMatchObject({
finalUrl: "https://cdn.example.com/stale-render-output.png",
preferPreview: true,
previewInput: {
sourceUrl: null,
sourceComposition: {
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "multiply",
opacity: 62,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.4,
overlayHeight: 0.5,
cropLeft: 0.1,
cropTop: 0,
cropRight: 0.2,
cropBottom: 0.1,
},
steps: [],
},
});
});
it("prefers mixer composite preview over persisted compare finalUrl when mixer is connected", () => {
storeState.nodes = [
{
@@ -250,6 +411,8 @@ describe("CompareNode render preview inputs", () => {
);
expect(mixerCall?.[0]).toMatchObject({
finalUrl: undefined,
nodeWidth: 500,
nodeHeight: 380,
mixerPreviewState: {
status: "ready",
baseUrl: "https://cdn.example.com/base.png",
@@ -260,7 +423,190 @@ describe("CompareNode render preview inputs", () => {
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
},
});
});
it("passes the measured compare surface size to mixer previews instead of the full node box", async () => {
storeState.nodes = [
{
id: "base-image",
type: "image",
data: { url: "https://cdn.example.com/base.png" },
},
{
id: "overlay-image",
type: "asset",
data: { url: "https://cdn.example.com/overlay.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
blendMode: "normal",
opacity: 100,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.6,
overlayHeight: 0.5,
},
},
];
storeState.edges = [
{
id: "edge-base-mixer",
source: "base-image",
target: "mixer-1",
targetHandle: "base",
},
{
id: "edge-overlay-mixer",
source: "overlay-image",
target: "mixer-1",
targetHandle: "overlay",
},
{
id: "edge-mixer-compare",
source: "mixer-1",
target: "compare-1",
targetHandle: "left",
},
];
await act(async () => {
root?.render(
<CanvasGraphProvider
nodes={storeState.nodes as Array<{ id: string; type: string; data?: unknown }>}
edges={storeState.edges}
>
<CompareNode
{...({
id: "compare-1",
data: {},
selected: false,
dragging: false,
zIndex: 0,
isConnectable: true,
type: "compare",
xPos: 0,
yPos: 0,
width: 640,
height: 480,
sourcePosition: undefined,
targetPosition: undefined,
positionAbsoluteX: 0,
positionAbsoluteY: 0,
} as unknown as React.ComponentProps<typeof CompareNode>)}
/>
</CanvasGraphProvider>,
);
});
await vi.waitFor(() => {
const latestCompareSurfaceCall = compareSurfaceSpy.mock.calls.findLast(
([props]) =>
Boolean((props as { mixerPreviewState?: { status?: string } }).mixerPreviewState),
);
expect(latestCompareSurfaceCall?.[0]).toMatchObject({
nodeWidth: 500,
nodeHeight: 380,
});
});
const surfaceElement = container?.querySelector(".nodrag.relative.min-h-0.w-full");
expect(surfaceElement).toBeInstanceOf(HTMLDivElement);
await act(async () => {
resizeObserverCallback?.([
{
target: surfaceElement as HTMLDivElement,
contentRect: { width: 468, height: 312 },
},
]);
});
const latestCompareSurfaceCall = compareSurfaceSpy.mock.calls.findLast(
([props]) =>
Boolean((props as { mixerPreviewState?: { status?: string } }).mixerPreviewState),
);
expect(latestCompareSurfaceCall?.[0]).toMatchObject({
nodeWidth: 468,
nodeHeight: 312,
});
expect(latestCompareSurfaceCall?.[0]).not.toMatchObject({
nodeWidth: 640,
nodeHeight: 480,
});
});
it("anchors direct mixer previews to the actual compare surface rect", async () => {
const compareSurfaceModule = await vi.importActual<typeof import("../nodes/compare-surface")>(
"../nodes/compare-surface",
);
const ActualCompareSurface = compareSurfaceModule.default;
await act(async () => {
root?.render(
<CanvasGraphProvider nodes={[]} edges={[]}>
<ActualCompareSurface
mixerPreviewState={{
status: "ready",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "normal",
opacity: 100,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
}}
nodeWidth={500}
nodeHeight={380}
/>
</CanvasGraphProvider>,
);
});
const images = container?.querySelectorAll("img");
const baseImage = images?.[0];
if (!(baseImage instanceof HTMLImageElement)) {
throw new Error("base image not found");
}
Object.defineProperty(baseImage, "naturalWidth", { configurable: true, value: 200 });
Object.defineProperty(baseImage, "naturalHeight", { configurable: true, value: 100 });
await act(async () => {
baseImage.dispatchEvent(new Event("load"));
});
const overlayImage = container?.querySelectorAll("img")?.[1];
if (!(overlayImage instanceof HTMLImageElement)) {
throw new Error("overlay image not found");
}
Object.defineProperty(overlayImage, "naturalWidth", { configurable: true, value: 200 });
Object.defineProperty(overlayImage, "naturalHeight", { configurable: true, value: 100 });
await act(async () => {
overlayImage.dispatchEvent(new Event("load"));
});
const overlayFrame = overlayImage.parentElement;
expect(overlayFrame?.style.left).toBe("0%");
expect(overlayFrame?.style.top).toBe("17.105263157894736%");
expect(overlayFrame?.style.width).toBe("100%");
expect(overlayFrame?.style.height).toBe("65.78947368421053%");
});
});

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
"use client";
import { useCallback, useMemo, useRef, useState } from "react";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { Handle, Position, type NodeProps } from "@xyflow/react";
import { ImageIcon } from "lucide-react";
import BaseNodeWrapper from "./base-node-wrapper";
@@ -35,12 +35,18 @@ type CompareSideState = {
type CompareDisplayMode = "render" | "preview";
export default function CompareNode({ id, data, selected, width }: NodeProps) {
type CompareSurfaceSize = {
width: number;
height: number;
};
export default function CompareNode({ id, data, selected, width, height }: NodeProps) {
const nodeData = data as CompareNodeData;
const graph = useCanvasGraph();
const [sliderX, setSliderX] = useState(50);
const [manualDisplayMode, setManualDisplayMode] = useState<CompareDisplayMode | null>(null);
const containerRef = useRef<HTMLDivElement>(null);
const [surfaceSize, setSurfaceSize] = useState<CompareSurfaceSize | null>(null);
const incomingEdges = useMemo(
() => graph.incomingEdgesByTarget.get(id) ?? [],
[graph, id],
@@ -73,8 +79,14 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) {
graph,
});
if (preview.sourceUrl) {
previewInput = {
if (preview.sourceUrl || preview.sourceComposition) {
previewInput = preview.sourceComposition
? {
sourceUrl: null,
sourceComposition: preview.sourceComposition,
steps: preview.steps,
}
: {
sourceUrl: preview.sourceUrl,
steps: preview.steps,
};
@@ -91,6 +103,7 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) {
sourceLastUploadedHash ?? sourceLastRenderedHash;
const sourceCurrentHash = resolveRenderPipelineHash({
sourceUrl: preview.sourceUrl,
sourceComposition: preview.sourceComposition,
steps: preview.steps,
data: sourceData,
});
@@ -172,7 +185,60 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) {
resolvedSides.right.isStaleRenderOutput;
const effectiveDisplayMode =
manualDisplayMode ?? (shouldDefaultToPreview ? "preview" : "render");
const previewNodeWidth = Math.max(240, Math.min(640, Math.round(width ?? 500)));
const fallbackSurfaceWidth = Math.max(240, Math.min(640, Math.round(width ?? 500)));
const fallbackSurfaceHeight = Math.max(180, Math.min(720, Math.round(height ?? 380)));
const previewNodeWidth = Math.max(
1,
Math.round(surfaceSize?.width ?? fallbackSurfaceWidth),
);
const previewNodeHeight = Math.max(
1,
Math.round(surfaceSize?.height ?? fallbackSurfaceHeight),
);
useEffect(() => {
const surfaceElement = containerRef.current;
if (!surfaceElement) {
return;
}
const updateSurfaceSize = (nextWidth: number, nextHeight: number) => {
const roundedWidth = Math.max(1, Math.round(nextWidth));
const roundedHeight = Math.max(1, Math.round(nextHeight));
setSurfaceSize((current) =>
current?.width === roundedWidth && current?.height === roundedHeight
? current
: {
width: roundedWidth,
height: roundedHeight,
},
);
};
const measureSurface = () => {
const rect = surfaceElement.getBoundingClientRect();
updateSurfaceSize(rect.width, rect.height);
};
measureSurface();
if (typeof ResizeObserver === "undefined") {
return undefined;
}
const observer = new ResizeObserver((entries) => {
const entry = entries[0];
if (!entry) {
return;
}
updateSurfaceSize(entry.contentRect.width, entry.contentRect.height);
});
observer.observe(surfaceElement);
return () => observer.disconnect();
}, []);
const setSliderPercent = useCallback((value: number) => {
setSliderX(Math.max(0, Math.min(100, value)));
@@ -314,6 +380,7 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) {
previewInput={resolvedSides.right.previewInput}
mixerPreviewState={resolvedSides.right.mixerPreviewState}
nodeWidth={previewNodeWidth}
nodeHeight={previewNodeHeight}
preferPreview={effectiveDisplayMode === "preview"}
/>
)}
@@ -325,6 +392,7 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) {
previewInput={resolvedSides.left.previewInput}
mixerPreviewState={resolvedSides.left.mixerPreviewState}
nodeWidth={previewNodeWidth}
nodeHeight={previewNodeHeight}
clipWidthPercent={sliderX}
preferPreview={effectiveDisplayMode === "preview"}
/>

View File

@@ -1,5 +1,7 @@
"use client";
import { useState } from "react";
import { useCanvasGraph } from "@/components/canvas/canvas-graph-context";
import { usePipelinePreview } from "@/hooks/use-pipeline-preview";
import {
@@ -7,8 +9,20 @@ import {
type RenderPreviewInput,
} from "@/lib/canvas-render-preview";
import type { MixerPreviewState } from "@/lib/canvas-mixer-preview";
import {
computeMixerCompareOverlayImageStyle,
computeMixerFrameRectInSurface,
isMixerCropImageReady,
} from "@/lib/mixer-crop-layout";
const EMPTY_STEPS: RenderPreviewInput["steps"] = [];
const ZERO_SIZE = { width: 0, height: 0 };
type LoadedImageState = {
url: string | null;
width: number;
height: number;
};
type CompareSurfaceProps = {
finalUrl?: string;
@@ -16,6 +30,7 @@ type CompareSurfaceProps = {
previewInput?: RenderPreviewInput;
mixerPreviewState?: MixerPreviewState;
nodeWidth: number;
nodeHeight: number;
clipWidthPercent?: number;
preferPreview?: boolean;
};
@@ -26,10 +41,19 @@ export default function CompareSurface({
previewInput,
mixerPreviewState,
nodeWidth,
nodeHeight,
clipWidthPercent,
preferPreview,
}: CompareSurfaceProps) {
const graph = useCanvasGraph();
const [baseImageState, setBaseImageState] = useState<LoadedImageState>({
url: null,
...ZERO_SIZE,
});
const [overlayImageState, setOverlayImageState] = useState<LoadedImageState>({
url: null,
...ZERO_SIZE,
});
const usePreview = Boolean(previewInput && (preferPreview || !finalUrl));
const previewSourceUrl = usePreview ? previewInput?.sourceUrl ?? null : null;
const previewSourceComposition = usePreview ? previewInput?.sourceComposition : undefined;
@@ -66,6 +90,35 @@ export default function CompareSurface({
}
: undefined;
const baseNaturalSize =
mixerPreviewState?.baseUrl && mixerPreviewState.baseUrl === baseImageState.url
? { width: baseImageState.width, height: baseImageState.height }
: ZERO_SIZE;
const overlayNaturalSize =
mixerPreviewState?.overlayUrl && mixerPreviewState.overlayUrl === overlayImageState.url
? { width: overlayImageState.width, height: overlayImageState.height }
: ZERO_SIZE;
const mixerCropReady = isMixerCropImageReady({
currentOverlayUrl: mixerPreviewState?.overlayUrl,
loadedOverlayUrl: overlayImageState.url,
sourceWidth: overlayNaturalSize.width,
sourceHeight: overlayNaturalSize.height,
});
const mixerFrameRect = hasMixerPreview
? computeMixerFrameRectInSurface({
surfaceWidth: nodeWidth,
surfaceHeight: nodeHeight,
baseWidth: baseNaturalSize.width,
baseHeight: baseNaturalSize.height,
overlayX: mixerPreviewState.overlayX,
overlayY: mixerPreviewState.overlayY,
overlayWidth: mixerPreviewState.overlayWidth,
overlayHeight: mixerPreviewState.overlayHeight,
fit: "contain",
})
: null;
return (
<div className="pointer-events-none absolute inset-0" style={clipStyle}>
{visibleFinalUrl ? (
@@ -89,22 +142,62 @@ export default function CompareSurface({
alt={label ?? "Comparison image"}
className="absolute inset-0 h-full w-full object-contain"
draggable={false}
onLoad={(event) => {
setBaseImageState({
url: event.currentTarget.currentSrc || event.currentTarget.src,
width: event.currentTarget.naturalWidth,
height: event.currentTarget.naturalHeight,
});
}}
/>
{mixerFrameRect ? (
<div
className="absolute overflow-hidden"
style={{
mixBlendMode: mixerPreviewState.blendMode,
opacity: mixerPreviewState.opacity / 100,
left: `${mixerFrameRect.x * 100}%`,
top: `${mixerFrameRect.y * 100}%`,
width: `${mixerFrameRect.width * 100}%`,
height: `${mixerFrameRect.height * 100}%`,
}}
>
{/* eslint-disable-next-line @next/next/no-img-element */}
<img
src={mixerPreviewState.overlayUrl}
alt={label ?? "Comparison image"}
className="absolute object-contain"
className="absolute max-w-none"
draggable={false}
style={{
mixBlendMode: mixerPreviewState.blendMode,
opacity: mixerPreviewState.opacity / 100,
left: `${mixerPreviewState.overlayX * 100}%`,
top: `${mixerPreviewState.overlayY * 100}%`,
width: `${mixerPreviewState.overlayWidth * 100}%`,
height: `${mixerPreviewState.overlayHeight * 100}%`,
onLoad={(event) => {
setOverlayImageState({
url: event.currentTarget.currentSrc || event.currentTarget.src,
width: event.currentTarget.naturalWidth,
height: event.currentTarget.naturalHeight,
});
}}
style={
mixerCropReady
? computeMixerCompareOverlayImageStyle({
surfaceWidth: nodeWidth,
surfaceHeight: nodeHeight,
baseWidth: baseNaturalSize.width,
baseHeight: baseNaturalSize.height,
overlayX: mixerPreviewState.overlayX,
overlayY: mixerPreviewState.overlayY,
overlayWidth: mixerPreviewState.overlayWidth,
overlayHeight: mixerPreviewState.overlayHeight,
sourceWidth: overlayNaturalSize.width,
sourceHeight: overlayNaturalSize.height,
cropLeft: mixerPreviewState.cropLeft,
cropTop: mixerPreviewState.cropTop,
cropRight: mixerPreviewState.cropRight,
cropBottom: mixerPreviewState.cropBottom,
})
: { visibility: "hidden" }
}
/>
</div>
) : null}
</>
) : null}

File diff suppressed because it is too large Load Diff

View File

@@ -22,6 +22,25 @@ function logNodeDataDebug(event: string, payload: Record<string, unknown>): void
console.info("[Canvas node debug]", event, payload);
}
function diffNodeData(
before: Record<string, unknown>,
after: Record<string, unknown>,
): Record<string, { before: unknown; after: unknown }> {
const keys = new Set([...Object.keys(before), ...Object.keys(after)]);
const diff: Record<string, { before: unknown; after: unknown }> = {};
for (const key of keys) {
if (before[key] !== after[key]) {
diff[key] = {
before: before[key],
after: after[key],
};
}
}
return diff;
}
export function useNodeLocalData<T>({
nodeId,
data,
@@ -55,6 +74,16 @@ export function useNodeLocalData<T>({
const savedValue = localDataRef.current;
const savedVersion = localChangeVersionRef.current;
logNodeDataDebug("queue-save-flush", {
nodeId,
nodeType: debugLabel,
savedVersion,
changedFields: diffNodeData(
acceptedPersistedDataRef.current as Record<string, unknown>,
savedValue as Record<string, unknown>,
),
});
Promise.resolve(onSave(savedValue))
.then(() => {
if (!isMountedRef.current || savedVersion !== localChangeVersionRef.current) {
@@ -144,7 +173,17 @@ export function useNodeLocalData<T>({
const updateLocalData = useCallback(
(updater: (current: T) => T) => {
const next = updater(localDataRef.current);
const previous = localDataRef.current;
const next = updater(previous);
logNodeDataDebug("local-update", {
nodeId,
nodeType: debugLabel,
changedFields: diffNodeData(
previous as Record<string, unknown>,
next as Record<string, unknown>,
),
});
localChangeVersionRef.current += 1;
hasPendingLocalChangesRef.current = true;
@@ -153,7 +192,7 @@ export function useNodeLocalData<T>({
setPreviewNodeDataOverride(nodeId, next);
queueSave();
},
[nodeId, queueSave, setPreviewNodeDataOverride],
[debugLabel, nodeId, queueSave, setPreviewNodeDataOverride],
);
return {

View File

@@ -23,6 +23,10 @@ export type MixerPreviewState = {
overlayY: number;
overlayWidth: number;
overlayHeight: number;
cropLeft: number;
cropTop: number;
cropRight: number;
cropBottom: number;
error?: MixerPreviewError;
};
@@ -41,6 +45,10 @@ const DEFAULT_OVERLAY_X = 0;
const DEFAULT_OVERLAY_Y = 0;
const DEFAULT_OVERLAY_WIDTH = 1;
const DEFAULT_OVERLAY_HEIGHT = 1;
const DEFAULT_CROP_LEFT = 0;
const DEFAULT_CROP_TOP = 0;
const DEFAULT_CROP_RIGHT = 0;
const DEFAULT_CROP_BOTTOM = 0;
const MIN_OVERLAY_POSITION = 0;
const MAX_OVERLAY_POSITION = 1;
const MIN_OVERLAY_SIZE = 0.1;
@@ -81,6 +89,37 @@ function normalizeOverlayNumber(value: unknown, fallback: number): number {
return parsed;
}
function normalizeUnitRect(args: {
x: unknown;
y: unknown;
width: unknown;
height: unknown;
defaults: { x: number; y: number; width: number; height: number };
}): { x: number; y: number; width: number; height: number } {
const x = clamp(
normalizeOverlayNumber(args.x, args.defaults.x),
MIN_OVERLAY_POSITION,
MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE,
);
const y = clamp(
normalizeOverlayNumber(args.y, args.defaults.y),
MIN_OVERLAY_POSITION,
MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE,
);
const width = clamp(
normalizeOverlayNumber(args.width, args.defaults.width),
MIN_OVERLAY_SIZE,
Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - x),
);
const height = clamp(
normalizeOverlayNumber(args.height, args.defaults.height),
MIN_OVERLAY_SIZE,
Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - y),
);
return { x, y, width, height };
}
function normalizeOverlayRect(record: Record<string, unknown>): Pick<
MixerPreviewState,
"overlayX" | "overlayY" | "overlayWidth" | "overlayHeight"
@@ -101,38 +140,105 @@ function normalizeOverlayRect(record: Record<string, unknown>): Pick<
};
}
const overlayX = clamp(
normalizeOverlayNumber(record.overlayX, DEFAULT_OVERLAY_X),
MIN_OVERLAY_POSITION,
MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE,
const normalized = normalizeUnitRect({
x: record.overlayX,
y: record.overlayY,
width: record.overlayWidth,
height: record.overlayHeight,
defaults: {
x: DEFAULT_OVERLAY_X,
y: DEFAULT_OVERLAY_Y,
width: DEFAULT_OVERLAY_WIDTH,
height: DEFAULT_OVERLAY_HEIGHT,
},
});
return {
overlayX: normalized.x,
overlayY: normalized.y,
overlayWidth: normalized.width,
overlayHeight: normalized.height,
};
}
function normalizeCropEdges(record: Record<string, unknown>): Pick<
MixerPreviewState,
"cropLeft" | "cropTop" | "cropRight" | "cropBottom"
> {
const hasCropField =
record.cropLeft !== undefined ||
record.cropTop !== undefined ||
record.cropRight !== undefined ||
record.cropBottom !== undefined;
const hasLegacyContentRectField =
record.contentX !== undefined ||
record.contentY !== undefined ||
record.contentWidth !== undefined ||
record.contentHeight !== undefined;
if (!hasCropField && hasLegacyContentRectField) {
const legacyRect = normalizeUnitRect({
x: record.contentX,
y: record.contentY,
width: record.contentWidth,
height: record.contentHeight,
defaults: {
x: 0,
y: 0,
width: 1,
height: 1,
},
});
return {
cropLeft: legacyRect.x,
cropTop: legacyRect.y,
cropRight: 1 - (legacyRect.x + legacyRect.width),
cropBottom: 1 - (legacyRect.y + legacyRect.height),
};
}
const cropLeft = clamp(
normalizeOverlayNumber(record.cropLeft, DEFAULT_CROP_LEFT),
0,
1 - MIN_OVERLAY_SIZE,
);
const overlayY = clamp(
normalizeOverlayNumber(record.overlayY, DEFAULT_OVERLAY_Y),
MIN_OVERLAY_POSITION,
MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE,
const cropTop = clamp(
normalizeOverlayNumber(record.cropTop, DEFAULT_CROP_TOP),
0,
1 - MIN_OVERLAY_SIZE,
);
const overlayWidth = clamp(
normalizeOverlayNumber(record.overlayWidth, DEFAULT_OVERLAY_WIDTH),
MIN_OVERLAY_SIZE,
Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - overlayX),
const cropRight = clamp(
normalizeOverlayNumber(record.cropRight, DEFAULT_CROP_RIGHT),
0,
1 - cropLeft - MIN_OVERLAY_SIZE,
);
const overlayHeight = clamp(
normalizeOverlayNumber(record.overlayHeight, DEFAULT_OVERLAY_HEIGHT),
MIN_OVERLAY_SIZE,
Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - overlayY),
const cropBottom = clamp(
normalizeOverlayNumber(record.cropBottom, DEFAULT_CROP_BOTTOM),
0,
1 - cropTop - MIN_OVERLAY_SIZE,
);
return {
overlayX,
overlayY,
overlayWidth,
overlayHeight,
cropLeft,
cropTop,
cropRight,
cropBottom,
};
}
export function normalizeMixerPreviewData(data: unknown): Pick<
MixerPreviewState,
"blendMode" | "opacity" | "overlayX" | "overlayY" | "overlayWidth" | "overlayHeight"
| "blendMode"
| "opacity"
| "overlayX"
| "overlayY"
| "overlayWidth"
| "overlayHeight"
| "cropLeft"
| "cropTop"
| "cropRight"
| "cropBottom"
> {
const record = (data ?? {}) as Record<string, unknown>;
const blendMode = MIXER_BLEND_MODES.has(record.blendMode as MixerBlendMode)
@@ -143,6 +249,7 @@ export function normalizeMixerPreviewData(data: unknown): Pick<
blendMode,
opacity: normalizeOpacity(record.opacity),
...normalizeOverlayRect(record),
...normalizeCropEdges(record),
};
}
@@ -174,6 +281,17 @@ function resolveSourceUrlFromNode(args: {
}
if (args.sourceNode.type === "render") {
const preview = resolveRenderPreviewInputFromGraph({
nodeId: args.sourceNode.id,
graph: args.graph,
});
if (preview.sourceComposition) {
return undefined;
}
if (preview.sourceUrl) {
return preview.sourceUrl;
}
const renderData = (args.sourceNode.data ?? {}) as Record<string, unknown>;
const renderOutputUrl =
typeof renderData.lastUploadUrl === "string" && renderData.lastUploadUrl.length > 0
@@ -188,11 +306,7 @@ function resolveSourceUrlFromNode(args: {
return directRenderUrl;
}
const preview = resolveRenderPreviewInputFromGraph({
nodeId: args.sourceNode.id,
graph: args.graph,
});
return preview.sourceUrl ?? undefined;
return undefined;
}
return resolveNodeImageUrl(args.sourceNode.data) ?? undefined;

View File

@@ -55,6 +55,10 @@ export const CANVAS_NODE_TEMPLATES = [
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
},
},
{

View File

@@ -32,6 +32,10 @@ export type RenderPreviewSourceComposition = {
overlayY: number;
overlayWidth: number;
overlayHeight: number;
cropLeft: number;
cropTop: number;
cropRight: number;
cropBottom: number;
};
export type CanvasGraphNodeLike = {
@@ -161,6 +165,10 @@ const DEFAULT_OVERLAY_X = 0;
const DEFAULT_OVERLAY_Y = 0;
const DEFAULT_OVERLAY_WIDTH = 1;
const DEFAULT_OVERLAY_HEIGHT = 1;
const DEFAULT_CROP_LEFT = 0;
const DEFAULT_CROP_TOP = 0;
const DEFAULT_CROP_RIGHT = 0;
const DEFAULT_CROP_BOTTOM = 0;
const MIN_OVERLAY_POSITION = 0;
const MAX_OVERLAY_POSITION = 1;
const MIN_OVERLAY_SIZE = 0.1;
@@ -250,6 +258,80 @@ function normalizeMixerCompositionRect(data: Record<string, unknown>): Pick<
};
}
function normalizeMixerCompositionCropEdges(data: Record<string, unknown>): Pick<
RenderPreviewSourceComposition,
"cropLeft" | "cropTop" | "cropRight" | "cropBottom"
> {
const hasCropField =
data.cropLeft !== undefined ||
data.cropTop !== undefined ||
data.cropRight !== undefined ||
data.cropBottom !== undefined;
const hasLegacyContentRectField =
data.contentX !== undefined ||
data.contentY !== undefined ||
data.contentWidth !== undefined ||
data.contentHeight !== undefined;
if (!hasCropField && hasLegacyContentRectField) {
const contentX = clamp(
normalizeOverlayNumber(data.contentX, 0),
MIN_OVERLAY_POSITION,
MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE,
);
const contentY = clamp(
normalizeOverlayNumber(data.contentY, 0),
MIN_OVERLAY_POSITION,
MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE,
);
const contentWidth = clamp(
normalizeOverlayNumber(data.contentWidth, 1),
MIN_OVERLAY_SIZE,
Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - contentX),
);
const contentHeight = clamp(
normalizeOverlayNumber(data.contentHeight, 1),
MIN_OVERLAY_SIZE,
Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - contentY),
);
return {
cropLeft: contentX,
cropTop: contentY,
cropRight: 1 - (contentX + contentWidth),
cropBottom: 1 - (contentY + contentHeight),
};
}
const cropLeft = clamp(
normalizeOverlayNumber(data.cropLeft, DEFAULT_CROP_LEFT),
0,
1 - MIN_OVERLAY_SIZE,
);
const cropTop = clamp(
normalizeOverlayNumber(data.cropTop, DEFAULT_CROP_TOP),
0,
1 - MIN_OVERLAY_SIZE,
);
const cropRight = clamp(
normalizeOverlayNumber(data.cropRight, DEFAULT_CROP_RIGHT),
0,
1 - cropLeft - MIN_OVERLAY_SIZE,
);
const cropBottom = clamp(
normalizeOverlayNumber(data.cropBottom, DEFAULT_CROP_BOTTOM),
0,
1 - cropTop - MIN_OVERLAY_SIZE,
);
return {
cropLeft,
cropTop,
cropRight,
cropBottom,
};
}
export function resolveRenderFingerprint(data: unknown): {
resolution: RenderResolutionOption;
customWidth?: number;
@@ -379,11 +461,6 @@ function resolveMixerSourceUrlFromNode(args: {
}
if (args.node.type === "render") {
const directRenderUrl = resolveRenderOutputUrl(args.node);
if (directRenderUrl) {
return directRenderUrl;
}
const preview = resolveRenderPreviewInputFromGraph({
nodeId: args.node.id,
graph: args.graph,
@@ -391,10 +468,18 @@ function resolveMixerSourceUrlFromNode(args: {
if (preview.sourceComposition) {
return null;
}
if (preview.sourceUrl) {
return preview.sourceUrl;
}
const directRenderUrl = resolveRenderOutputUrl(args.node);
if (directRenderUrl) {
return directRenderUrl;
}
return null;
}
return resolveNodeImageUrl(args.node.data);
}
@@ -443,6 +528,7 @@ function resolveRenderMixerCompositionFromGraph(args: {
blendMode,
opacity: normalizeOpacity(data.opacity),
...normalizeMixerCompositionRect(data),
...normalizeMixerCompositionCropEdges(data),
};
}

View File

@@ -303,6 +303,10 @@ export const NODE_DEFAULTS: Record<
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
},
},
"agent-output": {

View File

@@ -34,6 +34,10 @@ export type RenderSourceComposition = {
overlayY: number;
overlayWidth: number;
overlayHeight: number;
cropLeft: number;
cropTop: number;
cropRight: number;
cropBottom: number;
};
export type ResolvedRenderSize = {

View File

@@ -1,3 +1,6 @@
import type { RenderSourceComposition } from "@/lib/image-pipeline/render-types";
import { computeVisibleMixerContentRect } from "@/lib/mixer-crop-layout";
export const SOURCE_BITMAP_CACHE_MAX_ENTRIES = 32;
type CacheEntry = {
@@ -12,18 +15,6 @@ type LoadSourceBitmapOptions = {
signal?: AbortSignal;
};
type RenderSourceComposition = {
kind: "mixer";
baseUrl: string;
overlayUrl: string;
blendMode: "normal" | "multiply" | "screen" | "overlay";
opacity: number;
overlayX: number;
overlayY: number;
overlayWidth: number;
overlayHeight: number;
};
type LoadRenderSourceBitmapOptions = {
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
@@ -302,61 +293,63 @@ function normalizeMixerRect(source: RenderSourceComposition): {
};
}
function computeObjectCoverSourceRect(args: {
sourceWidth: number;
sourceHeight: number;
destinationWidth: number;
destinationHeight: number;
}): {
sourceX: number;
sourceY: number;
sourceWidth: number;
sourceHeight: number;
function normalizeMixerCropEdges(source: RenderSourceComposition): {
left: number;
top: number;
right: number;
bottom: number;
} {
const { sourceWidth, sourceHeight, destinationWidth, destinationHeight } = args;
const legacySource = source as RenderSourceComposition & {
contentX?: number;
contentY?: number;
contentWidth?: number;
contentHeight?: number;
};
const hasLegacyContentRect =
legacySource.contentX !== undefined ||
legacySource.contentY !== undefined ||
legacySource.contentWidth !== undefined ||
legacySource.contentHeight !== undefined;
if (hasLegacyContentRect) {
const contentX = Math.max(
0,
Math.min(0.9, normalizeRatio(legacySource.contentX ?? Number.NaN, 0)),
);
const contentY = Math.max(
0,
Math.min(0.9, normalizeRatio(legacySource.contentY ?? Number.NaN, 0)),
);
const contentWidth = Math.max(
0.1,
Math.min(1, normalizeRatio(legacySource.contentWidth ?? Number.NaN, 1), 1 - contentX),
);
const contentHeight = Math.max(
0.1,
Math.min(1, normalizeRatio(legacySource.contentHeight ?? Number.NaN, 1), 1 - contentY),
);
if (
sourceWidth <= 0 ||
sourceHeight <= 0 ||
destinationWidth <= 0 ||
destinationHeight <= 0
) {
return {
sourceX: 0,
sourceY: 0,
sourceWidth,
sourceHeight,
left: contentX,
top: contentY,
right: 1 - (contentX + contentWidth),
bottom: 1 - (contentY + contentHeight),
};
}
const sourceAspectRatio = sourceWidth / sourceHeight;
const destinationAspectRatio = destinationWidth / destinationHeight;
const cropLeft = Math.max(0, Math.min(0.9, normalizeRatio(source.cropLeft, 0)));
const cropTop = Math.max(0, Math.min(0.9, normalizeRatio(source.cropTop, 0)));
const cropRight = Math.max(0, Math.min(1 - cropLeft - 0.1, normalizeRatio(source.cropRight, 0)));
const cropBottom = Math.max(
0,
Math.min(1 - cropTop - 0.1, normalizeRatio(source.cropBottom, 0)),
);
if (!Number.isFinite(sourceAspectRatio) || !Number.isFinite(destinationAspectRatio)) {
return {
sourceX: 0,
sourceY: 0,
sourceWidth,
sourceHeight,
};
}
if (sourceAspectRatio > destinationAspectRatio) {
const croppedWidth = sourceHeight * destinationAspectRatio;
return {
sourceX: (sourceWidth - croppedWidth) / 2,
sourceY: 0,
sourceWidth: croppedWidth,
sourceHeight,
};
}
const croppedHeight = sourceWidth / destinationAspectRatio;
return {
sourceX: 0,
sourceY: (sourceHeight - croppedHeight) / 2,
sourceWidth,
sourceHeight: croppedHeight,
left: cropLeft,
top: cropTop,
right: cropRight,
bottom: cropBottom,
};
}
@@ -381,32 +374,49 @@ async function loadMixerCompositionBitmap(
context.drawImage(baseBitmap, 0, 0, baseBitmap.width, baseBitmap.height);
const rect = normalizeMixerRect(sourceComposition);
const destinationX = rect.x * baseBitmap.width;
const destinationY = rect.y * baseBitmap.height;
const destinationWidth = rect.width * baseBitmap.width;
const destinationHeight = rect.height * baseBitmap.height;
const sourceRect = computeObjectCoverSourceRect({
const frameX = rect.x * baseBitmap.width;
const frameY = rect.y * baseBitmap.height;
const frameWidth = rect.width * baseBitmap.width;
const frameHeight = rect.height * baseBitmap.height;
const cropEdges = normalizeMixerCropEdges(sourceComposition);
const sourceX = cropEdges.left * overlayBitmap.width;
const sourceY = cropEdges.top * overlayBitmap.height;
const sourceWidth = (1 - cropEdges.left - cropEdges.right) * overlayBitmap.width;
const sourceHeight = (1 - cropEdges.top - cropEdges.bottom) * overlayBitmap.height;
const visibleRect = computeVisibleMixerContentRect({
frameAspectRatio: frameHeight > 0 ? frameWidth / frameHeight : 1,
sourceWidth: overlayBitmap.width,
sourceHeight: overlayBitmap.height,
destinationWidth,
destinationHeight,
cropLeft: cropEdges.left,
cropTop: cropEdges.top,
cropRight: cropEdges.right,
cropBottom: cropEdges.bottom,
});
const destX = frameX + (visibleRect?.x ?? 0) * frameWidth;
const destY = frameY + (visibleRect?.y ?? 0) * frameHeight;
const destWidth = (visibleRect?.width ?? 1) * frameWidth;
const destHeight = (visibleRect?.height ?? 1) * frameHeight;
context.globalCompositeOperation = mixerBlendModeToCompositeOperation(
sourceComposition.blendMode,
);
context.globalAlpha = normalizeCompositionOpacity(sourceComposition.opacity);
context.save();
context.beginPath();
context.rect(frameX, frameY, frameWidth, frameHeight);
context.clip();
context.drawImage(
overlayBitmap,
sourceRect.sourceX,
sourceRect.sourceY,
sourceRect.sourceWidth,
sourceRect.sourceHeight,
destinationX,
destinationY,
destinationWidth,
destinationHeight,
sourceX,
sourceY,
sourceWidth,
sourceHeight,
destX,
destY,
destWidth,
destHeight,
);
context.restore();
context.globalCompositeOperation = "source-over";
context.globalAlpha = 1;

219
lib/mixer-crop-layout.ts Normal file
View File

@@ -0,0 +1,219 @@
const MIN_CROP_REMAINING_SIZE = 0.1;
type MixerSurfaceFit = "contain" | "cover";
function formatPercent(value: number): string {
const normalized = Math.abs(value) < 1e-10 ? 0 : value;
return `${normalized}%`;
}
function computeFittedRect(args: {
sourceWidth: number;
sourceHeight: number;
boundsX: number;
boundsY: number;
boundsWidth: number;
boundsHeight: number;
fit?: MixerSurfaceFit;
}): { x: number; y: number; width: number; height: number } {
const {
sourceWidth,
sourceHeight,
boundsX,
boundsY,
boundsWidth,
boundsHeight,
fit = "contain",
} = args;
if (sourceWidth <= 0 || sourceHeight <= 0 || boundsWidth <= 0 || boundsHeight <= 0) {
return {
x: boundsX,
y: boundsY,
width: boundsWidth,
height: boundsHeight,
};
}
const scale =
fit === "cover"
? Math.max(boundsWidth / sourceWidth, boundsHeight / sourceHeight)
: Math.min(boundsWidth / sourceWidth, boundsHeight / sourceHeight);
if (!Number.isFinite(scale) || scale <= 0) {
return {
x: boundsX,
y: boundsY,
width: boundsWidth,
height: boundsHeight,
};
}
const width = sourceWidth * scale;
const height = sourceHeight * scale;
return {
x: boundsX + (boundsWidth - width) / 2,
y: boundsY + (boundsHeight - height) / 2,
width,
height,
};
}
export function computeMixerFrameRectInSurface(args: {
surfaceWidth: number;
surfaceHeight: number;
baseWidth: number;
baseHeight: number;
overlayX: number;
overlayY: number;
overlayWidth: number;
overlayHeight: number;
fit?: MixerSurfaceFit;
}): { x: number; y: number; width: number; height: number } | null {
if (args.baseWidth <= 0 || args.baseHeight <= 0 || args.surfaceWidth <= 0 || args.surfaceHeight <= 0) {
return null;
}
const baseRect = computeFittedRect({
sourceWidth: args.baseWidth,
sourceHeight: args.baseHeight,
boundsX: 0,
boundsY: 0,
boundsWidth: args.surfaceWidth,
boundsHeight: args.surfaceHeight,
fit: args.fit,
});
return {
x: (baseRect.x + args.overlayX * baseRect.width) / args.surfaceWidth,
y: (baseRect.y + args.overlayY * baseRect.height) / args.surfaceHeight,
width: (args.overlayWidth * baseRect.width) / args.surfaceWidth,
height: (args.overlayHeight * baseRect.height) / args.surfaceHeight,
};
}
export function computeVisibleMixerContentRect(args: {
frameAspectRatio: number;
sourceWidth: number;
sourceHeight: number;
cropLeft: number;
cropTop: number;
cropRight: number;
cropBottom: number;
}): { x: number; y: number; width: number; height: number } | null {
if (args.sourceWidth <= 0 || args.sourceHeight <= 0) {
return null;
}
const cropWidth = Math.max(1 - args.cropLeft - args.cropRight, MIN_CROP_REMAINING_SIZE);
const cropHeight = Math.max(1 - args.cropTop - args.cropBottom, MIN_CROP_REMAINING_SIZE);
const frameAspectRatio = args.frameAspectRatio > 0 ? args.frameAspectRatio : 1;
const rect = computeFittedRect({
sourceWidth: args.sourceWidth * cropWidth,
sourceHeight: args.sourceHeight * cropHeight,
boundsX: 0,
boundsY: 0,
boundsWidth: frameAspectRatio,
boundsHeight: 1,
});
return {
x: rect.x / frameAspectRatio,
y: rect.y,
width: rect.width / frameAspectRatio,
height: rect.height,
};
}
export function computeMixerCropImageStyle(args: {
frameAspectRatio: number;
sourceWidth: number;
sourceHeight: number;
cropLeft: number;
cropTop: number;
cropRight: number;
cropBottom: number;
}) {
const safeWidth = Math.max(1 - args.cropLeft - args.cropRight, MIN_CROP_REMAINING_SIZE);
const safeHeight = Math.max(1 - args.cropTop - args.cropBottom, MIN_CROP_REMAINING_SIZE);
const visibleRect = computeVisibleMixerContentRect(args);
if (!visibleRect) {
return {
left: formatPercent((-args.cropLeft / safeWidth) * 100),
top: formatPercent((-args.cropTop / safeHeight) * 100),
width: formatPercent((1 / safeWidth) * 100),
height: formatPercent((1 / safeHeight) * 100),
} as const;
}
const imageWidth = visibleRect.width / safeWidth;
const imageHeight = visibleRect.height / safeHeight;
return {
left: formatPercent((visibleRect.x - (args.cropLeft / safeWidth) * visibleRect.width) * 100),
top: formatPercent((visibleRect.y - (args.cropTop / safeHeight) * visibleRect.height) * 100),
width: formatPercent(imageWidth * 100),
height: formatPercent(imageHeight * 100),
} as const;
}
export function computeMixerCompareOverlayImageStyle(args: {
surfaceWidth: number;
surfaceHeight: number;
baseWidth: number;
baseHeight: number;
overlayX: number;
overlayY: number;
overlayWidth: number;
overlayHeight: number;
sourceWidth: number;
sourceHeight: number;
cropLeft: number;
cropTop: number;
cropRight: number;
cropBottom: number;
}) {
const frameRect = computeMixerFrameRectInSurface({
surfaceWidth: args.surfaceWidth,
surfaceHeight: args.surfaceHeight,
baseWidth: args.baseWidth,
baseHeight: args.baseHeight,
overlayX: args.overlayX,
overlayY: args.overlayY,
overlayWidth: args.overlayWidth,
overlayHeight: args.overlayHeight,
});
const frameAspectRatio =
frameRect && frameRect.width > 0 && frameRect.height > 0
? (frameRect.width * args.surfaceWidth) / (frameRect.height * args.surfaceHeight)
: args.overlayWidth > 0 && args.overlayHeight > 0
? args.overlayWidth / args.overlayHeight
: 1;
return computeMixerCropImageStyle({
frameAspectRatio,
sourceWidth: args.sourceWidth,
sourceHeight: args.sourceHeight,
cropLeft: args.cropLeft,
cropTop: args.cropTop,
cropRight: args.cropRight,
cropBottom: args.cropBottom,
});
}
export function isMixerCropImageReady(args: {
currentOverlayUrl: string | null | undefined;
loadedOverlayUrl: string | null;
sourceWidth: number;
sourceHeight: number;
}): boolean {
return Boolean(
args.currentOverlayUrl &&
args.loadedOverlayUrl === args.currentOverlayUrl &&
args.sourceWidth > 0 &&
args.sourceHeight > 0,
);
}

View File

@@ -83,6 +83,10 @@ describe("image-pipeline.worker full render", () => {
overlayY: 16,
overlayWidth: 128,
overlayHeight: 64,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
};
workerScope.onmessage?.({

View File

@@ -356,7 +356,7 @@ describe("loadSourceBitmap", () => {
expect(revokeObjectUrl).toHaveBeenCalledWith("blob:video-source");
});
it("renders mixer overlays with object-cover semantics instead of stretching", async () => {
it("renders non-square mixer overlays with contain-fit parity instead of stretching", async () => {
const baseBlob = new Blob(["base"]);
const overlayBlob = new Blob(["overlay"]);
const baseBitmap = { width: 100, height: 100 } as ImageBitmap;
@@ -367,6 +367,11 @@ describe("loadSourceBitmap", () => {
const context = {
clearRect: vi.fn(),
drawImage,
save: vi.fn(),
restore: vi.fn(),
beginPath: vi.fn(),
rect: vi.fn(),
clip: vi.fn(),
globalCompositeOperation: "source-over" as GlobalCompositeOperation,
globalAlpha: 1,
};
@@ -420,7 +425,7 @@ describe("loadSourceBitmap", () => {
return composedBitmap;
}
throw new Error("Unexpected createImageBitmap input in mixer cover-fit test.");
throw new Error("Unexpected createImageBitmap input in mixer contain-fit test.");
}),
);
@@ -438,22 +443,358 @@ describe("loadSourceBitmap", () => {
overlayY: 0.2,
overlayWidth: 0.25,
overlayHeight: 0.5,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
},
}),
).resolves.toBe(composedBitmap);
expect(drawImage).toHaveBeenNthCalledWith(1, baseBitmap, 0, 0, 100, 100);
const overlayDrawArgs = drawImage.mock.calls[1];
expect(overlayDrawArgs?.[0]).toBe(overlayBitmap);
expect(overlayDrawArgs?.[1]).toBe(0);
expect(overlayDrawArgs?.[2]).toBe(0);
expect(overlayDrawArgs?.[3]).toBe(200);
expect(overlayDrawArgs?.[4]).toBe(100);
expect(overlayDrawArgs?.[5]).toBe(10);
expect(overlayDrawArgs?.[6]).toBeCloseTo(38.75, 10);
expect(overlayDrawArgs?.[7]).toBe(25);
expect(overlayDrawArgs?.[8]).toBeCloseTo(12.5, 10);
});
it("applies mixer crop framing by trimming source edges while leaving the displayed frame size untouched", async () => {
const baseBlob = new Blob(["base"]);
const overlayBlob = new Blob(["overlay"]);
const baseBitmap = { width: 100, height: 100 } as ImageBitmap;
const overlayBitmap = { width: 200, height: 100 } as ImageBitmap;
const composedBitmap = { width: 100, height: 100 } as ImageBitmap;
const drawImage = vi.fn();
const save = vi.fn();
const restore = vi.fn();
const beginPath = vi.fn();
const rect = vi.fn();
const clip = vi.fn();
const context = {
clearRect: vi.fn(),
drawImage,
save,
restore,
beginPath,
rect,
clip,
globalCompositeOperation: "source-over" as GlobalCompositeOperation,
globalAlpha: 1,
};
const canvas = {
width: 0,
height: 0,
getContext: vi.fn().mockReturnValue(context),
} as unknown as HTMLCanvasElement;
const nativeCreateElement = document.createElement.bind(document);
vi.spyOn(document, "createElement").mockImplementation((tagName: string) => {
if (tagName.toLowerCase() === "canvas") {
return canvas;
}
return nativeCreateElement(tagName);
});
vi.stubGlobal(
"fetch",
vi.fn().mockImplementation(async (input: string | URL | Request) => {
const url = String(input);
if (url.includes("base.png")) {
return {
ok: true,
status: 200,
headers: { get: vi.fn().mockReturnValue("image/png") },
blob: vi.fn().mockResolvedValue(baseBlob),
};
}
return {
ok: true,
status: 200,
headers: { get: vi.fn().mockReturnValue("image/png") },
blob: vi.fn().mockResolvedValue(overlayBlob),
};
}),
);
vi.stubGlobal(
"createImageBitmap",
vi.fn().mockImplementation(async (input: unknown) => {
if (input === baseBlob) {
return baseBitmap;
}
if (input === overlayBlob) {
return overlayBitmap;
}
if (input === canvas) {
return composedBitmap;
}
throw new Error("Unexpected createImageBitmap input in mixer content framing test.");
}),
);
const { loadRenderSourceBitmap } = await importSubject();
await expect(
loadRenderSourceBitmap({
sourceComposition: {
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "overlay",
opacity: 80,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.4,
overlayHeight: 0.4,
cropLeft: 0.5,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
},
}),
).resolves.toBe(composedBitmap);
expect(drawImage).toHaveBeenNthCalledWith(1, baseBitmap, 0, 0, 100, 100);
expect(save).toHaveBeenCalledTimes(1);
expect(beginPath).toHaveBeenCalledTimes(1);
expect(rect).toHaveBeenCalledWith(10, 20, 40, 40);
expect(clip).toHaveBeenCalledTimes(1);
expect(drawImage).toHaveBeenNthCalledWith(
2,
overlayBitmap,
75,
100,
0,
50,
100,
100,
10,
20,
25,
50,
40,
40,
);
expect(restore).toHaveBeenCalledTimes(1);
});
it("keeps overlayWidth and overlayHeight fixed while crop framing trims the sampled source region", async () => {
const baseBlob = new Blob(["base"]);
const overlayBlob = new Blob(["overlay"]);
const baseBitmap = { width: 100, height: 100 } as ImageBitmap;
const overlayBitmap = { width: 200, height: 100 } as ImageBitmap;
const composedBitmap = { width: 100, height: 100 } as ImageBitmap;
const drawImage = vi.fn();
const context = {
clearRect: vi.fn(),
drawImage,
save: vi.fn(),
restore: vi.fn(),
beginPath: vi.fn(),
rect: vi.fn(),
clip: vi.fn(),
globalCompositeOperation: "source-over" as GlobalCompositeOperation,
globalAlpha: 1,
};
const canvas = {
width: 0,
height: 0,
getContext: vi.fn().mockReturnValue(context),
} as unknown as HTMLCanvasElement;
const nativeCreateElement = document.createElement.bind(document);
vi.spyOn(document, "createElement").mockImplementation((tagName: string) => {
if (tagName.toLowerCase() === "canvas") {
return canvas;
}
return nativeCreateElement(tagName);
});
vi.stubGlobal(
"fetch",
vi.fn().mockImplementation(async (input: string | URL | Request) => {
const url = String(input);
if (url.includes("base.png")) {
return {
ok: true,
status: 200,
headers: { get: vi.fn().mockReturnValue("image/png") },
blob: vi.fn().mockResolvedValue(baseBlob),
};
}
return {
ok: true,
status: 200,
headers: { get: vi.fn().mockReturnValue("image/png") },
blob: vi.fn().mockResolvedValue(overlayBlob),
};
}),
);
vi.stubGlobal(
"createImageBitmap",
vi.fn().mockImplementation(async (input: unknown) => {
if (input === baseBlob) {
return baseBitmap;
}
if (input === overlayBlob) {
return overlayBitmap;
}
if (input === canvas) {
return composedBitmap;
}
throw new Error("Unexpected createImageBitmap input in overlay size preservation test.");
}),
);
const { loadRenderSourceBitmap } = await importSubject();
await expect(
loadRenderSourceBitmap({
sourceComposition: {
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "overlay",
opacity: 80,
overlayX: 0.15,
overlayY: 0.25,
overlayWidth: 0.5,
overlayHeight: 0.3,
cropLeft: 0.25,
cropTop: 0.1,
cropRight: 0.25,
cropBottom: 0.3,
},
}),
).resolves.toBe(composedBitmap);
const overlayDrawArgs = drawImage.mock.calls[1];
expect(overlayDrawArgs?.[0]).toBe(overlayBitmap);
expect(overlayDrawArgs?.[1]).toBe(50);
expect(overlayDrawArgs?.[2]).toBe(10);
expect(overlayDrawArgs?.[3]).toBe(100);
expect(overlayDrawArgs?.[4]).toBeCloseTo(60, 10);
expect(overlayDrawArgs?.[5]).toBeCloseTo(15, 10);
expect(overlayDrawArgs?.[6]).toBeCloseTo(25, 10);
expect(overlayDrawArgs?.[7]).toBeCloseTo(50, 10);
expect(overlayDrawArgs?.[8]).toBeCloseTo(30, 10);
});
it("contains a cropped wide source within the overlay frame during bake", async () => {
const baseBlob = new Blob(["base"]);
const overlayBlob = new Blob(["overlay"]);
const baseBitmap = { width: 100, height: 100 } as ImageBitmap;
const overlayBitmap = { width: 200, height: 100 } as ImageBitmap;
const composedBitmap = { width: 100, height: 100 } as ImageBitmap;
const drawImage = vi.fn();
const context = {
clearRect: vi.fn(),
drawImage,
save: vi.fn(),
restore: vi.fn(),
beginPath: vi.fn(),
rect: vi.fn(),
clip: vi.fn(),
globalCompositeOperation: "source-over" as GlobalCompositeOperation,
globalAlpha: 1,
};
const canvas = {
width: 0,
height: 0,
getContext: vi.fn().mockReturnValue(context),
} as unknown as HTMLCanvasElement;
const nativeCreateElement = document.createElement.bind(document);
vi.spyOn(document, "createElement").mockImplementation((tagName: string) => {
if (tagName.toLowerCase() === "canvas") {
return canvas;
}
return nativeCreateElement(tagName);
});
vi.stubGlobal(
"fetch",
vi.fn().mockImplementation(async (input: string | URL | Request) => {
const url = String(input);
if (url.includes("base.png")) {
return {
ok: true,
status: 200,
headers: { get: vi.fn().mockReturnValue("image/png") },
blob: vi.fn().mockResolvedValue(baseBlob),
};
}
return {
ok: true,
status: 200,
headers: { get: vi.fn().mockReturnValue("image/png") },
blob: vi.fn().mockResolvedValue(overlayBlob),
};
}),
);
vi.stubGlobal(
"createImageBitmap",
vi.fn().mockImplementation(async (input: unknown) => {
if (input === baseBlob) {
return baseBitmap;
}
if (input === overlayBlob) {
return overlayBitmap;
}
if (input === canvas) {
return composedBitmap;
}
throw new Error("Unexpected createImageBitmap input in aspect-aware crop bake test.");
}),
);
const { loadRenderSourceBitmap } = await importSubject();
await expect(
loadRenderSourceBitmap({
sourceComposition: {
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/overlay.png",
blendMode: "overlay",
opacity: 80,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.4,
overlayHeight: 0.4,
cropLeft: 0,
cropTop: 0.25,
cropRight: 0,
cropBottom: 0.25,
},
}),
).resolves.toBe(composedBitmap);
const overlayDrawArgs = drawImage.mock.calls[1];
expect(overlayDrawArgs?.[0]).toBe(overlayBitmap);
expect(overlayDrawArgs?.[1]).toBe(0);
expect(overlayDrawArgs?.[2]).toBe(25);
expect(overlayDrawArgs?.[3]).toBe(200);
expect(overlayDrawArgs?.[4]).toBe(50);
expect(overlayDrawArgs?.[5]).toBe(10);
expect(overlayDrawArgs?.[6]).toBeCloseTo(35, 10);
expect(overlayDrawArgs?.[7]).toBe(40);
expect(overlayDrawArgs?.[8]).toBeCloseTo(10, 10);
});
});

View File

@@ -4,7 +4,7 @@ import { buildGraphSnapshot } from "@/lib/canvas-render-preview";
import { resolveMixerPreviewFromGraph } from "@/lib/canvas-mixer-preview";
describe("resolveMixerPreviewFromGraph", () => {
it("resolves base and overlay URLs by target handle", () => {
it("resolves base and overlay URLs by target handle while keeping frame and crop trims independent", () => {
const graph = buildGraphSnapshot(
[
{
@@ -32,6 +32,10 @@ describe("resolveMixerPreviewFromGraph", () => {
overlayY: 0.2,
overlayWidth: 0.6,
overlayHeight: 0.5,
cropLeft: 0.08,
cropTop: 0.15,
cropRight: 0.22,
cropBottom: 0.1,
},
},
],
@@ -52,10 +56,110 @@ describe("resolveMixerPreviewFromGraph", () => {
overlayY: 0.2,
overlayWidth: 0.6,
overlayHeight: 0.5,
cropLeft: 0.08,
cropTop: 0.15,
cropRight: 0.22,
cropBottom: 0.1,
});
});
it("prefers render output URL over upstream preview source when available", () => {
it("preserves crop trims when frame resize data changes", () => {
const graph = buildGraphSnapshot(
[
{
id: "image-base",
type: "image",
data: { url: "https://cdn.example.com/base.png" },
},
{
id: "overlay-asset",
type: "asset",
data: { url: "https://cdn.example.com/overlay.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
overlayX: 0.2,
overlayY: 0.1,
overlayWidth: 0.6,
overlayHeight: 0.3,
cropLeft: 0.15,
cropTop: 0.05,
cropRight: 0.4,
cropBottom: 0.25,
},
},
],
[
{ source: "image-base", target: "mixer-1", targetHandle: "base" },
{ source: "overlay-asset", target: "mixer-1", targetHandle: "overlay" },
],
);
expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual(
expect.objectContaining({
overlayX: 0.2,
overlayY: 0.1,
overlayWidth: 0.6,
overlayHeight: 0.3,
cropLeft: 0.15,
cropTop: 0.05,
cropRight: 0.4,
cropBottom: 0.25,
}),
);
});
it("preserves overlayWidth and overlayHeight when crop trims change", () => {
const graph = buildGraphSnapshot(
[
{
id: "image-base",
type: "image",
data: { url: "https://cdn.example.com/base.png" },
},
{
id: "overlay-asset",
type: "asset",
data: { url: "https://cdn.example.com/overlay.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
overlayX: 0.05,
overlayY: 0.25,
overlayWidth: 0.55,
overlayHeight: 0.35,
cropLeft: 0.4,
cropTop: 0.1,
cropRight: 0.3,
cropBottom: 0.1,
},
},
],
[
{ source: "image-base", target: "mixer-1", targetHandle: "base" },
{ source: "overlay-asset", target: "mixer-1", targetHandle: "overlay" },
],
);
expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual(
expect.objectContaining({
overlayX: 0.05,
overlayY: 0.25,
overlayWidth: 0.55,
overlayHeight: 0.35,
cropLeft: 0.4,
cropTop: 0.1,
cropRight: 0.3,
cropBottom: 0.1,
}),
);
});
it("prefers live render preview URL over stale baked render output", () => {
const graph = buildGraphSnapshot(
[
{
@@ -91,13 +195,79 @@ describe("resolveMixerPreviewFromGraph", () => {
expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual({
status: "ready",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/render-output.png",
overlayUrl: "https://cdn.example.com/upstream.png",
blendMode: "normal",
opacity: 100,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
});
});
it("does not reuse stale baked render output when only live sourceComposition exists", () => {
const graph = buildGraphSnapshot(
[
{
id: "base-image",
type: "image",
data: { url: "https://cdn.example.com/base.png" },
},
{
id: "overlay-base",
type: "image",
data: { url: "https://cdn.example.com/overlay-base.png" },
},
{
id: "overlay-asset",
type: "asset",
data: { url: "https://cdn.example.com/overlay-asset.png" },
},
{
id: "upstream-mixer",
type: "mixer",
data: {},
},
{
id: "render-overlay",
type: "render",
data: {
lastUploadUrl: "https://cdn.example.com/stale-render-output.png",
},
},
{
id: "mixer-1",
type: "mixer",
data: {},
},
],
[
{ source: "overlay-base", target: "upstream-mixer", targetHandle: "base" },
{ source: "overlay-asset", target: "upstream-mixer", targetHandle: "overlay" },
{ source: "upstream-mixer", target: "render-overlay" },
{ source: "base-image", target: "mixer-1", targetHandle: "base" },
{ source: "render-overlay", target: "mixer-1", targetHandle: "overlay" },
],
);
expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual({
status: "partial",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: undefined,
blendMode: "normal",
opacity: 100,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
});
});
@@ -128,10 +298,14 @@ describe("resolveMixerPreviewFromGraph", () => {
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
});
});
it("normalizes rect values and clamps", () => {
it("normalizes crop trims and clamps", () => {
const graph = buildGraphSnapshot(
[
{
@@ -154,6 +328,10 @@ describe("resolveMixerPreviewFromGraph", () => {
overlayY: "1.4",
overlayWidth: 2,
overlayHeight: 0,
cropLeft: "0.95",
cropTop: -2,
cropRight: "4",
cropBottom: "0",
},
},
],
@@ -173,6 +351,10 @@ describe("resolveMixerPreviewFromGraph", () => {
overlayY: 0.9,
overlayWidth: 1,
overlayHeight: 0.1,
cropLeft: 0.9,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
});
});
@@ -214,6 +396,57 @@ describe("resolveMixerPreviewFromGraph", () => {
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
});
});
it("maps legacy content rect fields into crop trims during normalization", () => {
const graph = buildGraphSnapshot(
[
{
id: "base-ai",
type: "ai-image",
data: { url: "https://cdn.example.com/base-ai.png" },
},
{
id: "overlay-asset",
type: "asset",
data: { url: "https://cdn.example.com/overlay-asset.png" },
},
{
id: "mixer-1",
type: "mixer",
data: {
contentX: 0.2,
contentY: 0.1,
contentWidth: 0.5,
contentHeight: 0.6,
},
},
],
[
{ source: "base-ai", target: "mixer-1", targetHandle: "base" },
{ source: "overlay-asset", target: "mixer-1", targetHandle: "overlay" },
],
);
expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual({
status: "ready",
baseUrl: "https://cdn.example.com/base-ai.png",
overlayUrl: "https://cdn.example.com/overlay-asset.png",
blendMode: "normal",
opacity: 100,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0.2,
cropTop: 0.1,
cropRight: 0.30000000000000004,
cropBottom: 0.30000000000000004,
});
});
@@ -255,6 +488,10 @@ describe("resolveMixerPreviewFromGraph", () => {
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
});
});
@@ -293,6 +530,10 @@ describe("resolveMixerPreviewFromGraph", () => {
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
error: "duplicate-handle-edge",
});
});

View File

@@ -4,6 +4,13 @@ import {
buildGraphSnapshot,
resolveRenderPreviewInputFromGraph,
} from "@/lib/canvas-render-preview";
import {
computeMixerCompareOverlayImageStyle,
computeMixerFrameRectInSurface,
computeVisibleMixerContentRect,
computeMixerCropImageStyle,
isMixerCropImageReady,
} from "@/lib/mixer-crop-layout";
describe("resolveRenderPreviewInputFromGraph", () => {
it("resolves mixer input as renderable mixer composition", () => {
@@ -29,6 +36,10 @@ describe("resolveRenderPreviewInputFromGraph", () => {
overlayY: 0.1,
overlayWidth: 0.55,
overlayHeight: 0.44,
cropLeft: 0.08,
cropTop: 0.15,
cropRight: 0.22,
cropBottom: 0.1,
},
},
{
@@ -61,6 +72,10 @@ describe("resolveRenderPreviewInputFromGraph", () => {
overlayY: 0.1,
overlayWidth: 0.55,
overlayHeight: 0.44,
cropLeft: 0.08,
cropTop: 0.15,
cropRight: 0.22,
cropBottom: 0.1,
},
steps: [],
});
@@ -89,6 +104,10 @@ describe("resolveRenderPreviewInputFromGraph", () => {
overlayY: "1.4",
overlayWidth: 2,
overlayHeight: 0,
cropLeft: "0.95",
cropTop: -2,
cropRight: "4",
cropBottom: "0",
},
},
{
@@ -119,6 +138,10 @@ describe("resolveRenderPreviewInputFromGraph", () => {
overlayY: 0.9,
overlayWidth: 1,
overlayHeight: 0.1,
cropLeft: 0.9,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
});
});
@@ -206,4 +229,189 @@ describe("resolveRenderPreviewInputFromGraph", () => {
expect(preview.sourceUrl).toBe("https://cdn.example.com/generated-video.mp4");
expect(preview.sourceComposition).toBeUndefined();
});
it("prefers live render preview URLs over stale baked render URLs inside downstream mixer compositions", () => {
const graph = buildGraphSnapshot(
[
{
id: "base-image",
type: "image",
data: { url: "https://cdn.example.com/base.png" },
},
{
id: "overlay-upstream",
type: "image",
data: { url: "https://cdn.example.com/upstream.png" },
},
{
id: "render-overlay",
type: "render",
data: {
lastUploadUrl: "https://cdn.example.com/stale-render-output.png",
},
},
{
id: "mixer-1",
type: "mixer",
data: {},
},
{
id: "render-2",
type: "render",
data: {},
},
],
[
{ source: "overlay-upstream", target: "render-overlay" },
{ source: "base-image", target: "mixer-1", targetHandle: "base" },
{ source: "render-overlay", target: "mixer-1", targetHandle: "overlay" },
{ source: "mixer-1", target: "render-2" },
],
);
const preview = resolveRenderPreviewInputFromGraph({ nodeId: "render-2", graph });
expect(preview).toEqual({
sourceUrl: null,
sourceComposition: {
kind: "mixer",
baseUrl: "https://cdn.example.com/base.png",
overlayUrl: "https://cdn.example.com/upstream.png",
blendMode: "normal",
opacity: 100,
overlayX: 0,
overlayY: 0,
overlayWidth: 1,
overlayHeight: 1,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
},
steps: [],
});
});
});
describe("mixer crop layout parity", () => {
it("contains a wide cropped source inside a square overlay frame", () => {
expect(
computeVisibleMixerContentRect({
frameAspectRatio: 1,
sourceWidth: 200,
sourceHeight: 100,
cropLeft: 0,
cropTop: 0.25,
cropRight: 0,
cropBottom: 0.25,
}),
).toEqual({
x: 0,
y: 0.375,
width: 1,
height: 0.25,
});
});
it("returns compare image styles that letterbox instead of stretching", () => {
expect(
computeMixerCropImageStyle({
frameAspectRatio: 1,
sourceWidth: 200,
sourceHeight: 100,
cropLeft: 0,
cropTop: 0,
cropRight: 0,
cropBottom: 0,
}),
).toEqual({
left: "0%",
top: "25%",
width: "100%",
height: "50%",
});
});
it("uses the actual base-aware frame pixel ratio for compare crop math", () => {
expect(
computeMixerCompareOverlayImageStyle({
surfaceWidth: 500,
surfaceHeight: 380,
baseWidth: 200,
baseHeight: 100,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.4,
overlayHeight: 0.4,
sourceWidth: 200,
sourceHeight: 100,
cropLeft: 0.1,
cropTop: 0,
cropRight: 0.1,
cropBottom: 0,
}),
).toEqual({
left: "0%",
top: "0%",
width: "100%",
height: "100%",
});
});
it("does not mark compare crop overlay ready before natural size is known", () => {
expect(
isMixerCropImageReady({
currentOverlayUrl: "https://cdn.example.com/overlay-a.png",
loadedOverlayUrl: null,
sourceWidth: 0,
sourceHeight: 0,
}),
).toBe(false);
});
it("invalidates compare crop overlay readiness on source swap until the new image loads", () => {
expect(
isMixerCropImageReady({
currentOverlayUrl: "https://cdn.example.com/overlay-b.png",
loadedOverlayUrl: "https://cdn.example.com/overlay-a.png",
sourceWidth: 200,
sourceHeight: 100,
}),
).toBe(false);
});
it("positions mixer overlay frame relative to the displayed base-image rect", () => {
expect(
computeMixerFrameRectInSurface({
surfaceWidth: 1,
surfaceHeight: 1,
baseWidth: 200,
baseHeight: 100,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.4,
overlayHeight: 0.4,
}),
).toEqual({
x: 0.1,
y: 0.35,
width: 0.4,
height: 0.2,
});
});
it("returns null frame placement until base image natural size is known", () => {
expect(
computeMixerFrameRectInSurface({
surfaceWidth: 1,
surfaceHeight: 1,
baseWidth: 0,
baseHeight: 0,
overlayX: 0.1,
overlayY: 0.2,
overlayWidth: 0.4,
overlayHeight: 0.4,
}),
).toBeNull();
});
});