diff --git a/components/canvas/CLAUDE.md b/components/canvas/CLAUDE.md index f944529..93149f0 100644 --- a/components/canvas/CLAUDE.md +++ b/components/canvas/CLAUDE.md @@ -144,16 +144,23 @@ render: 300 × 420 mixer: 360 × 320 - **Handles:** genau zwei Inputs links (`base`, `overlay`) und ein Output rechts (`mixer-out`). - **Erlaubte Inputs:** `image`, `asset`, `ai-image`, `render`. - **Connection-Limits:** maximal 2 eingehende Kanten insgesamt, davon pro Handle genau 1. -- **Node-Data (V1):** `blendMode` (`normal|multiply|screen|overlay`), `opacity` (0..100), `offsetX`, `offsetY`. +- **Node-Data (V1):** `blendMode` (`normal|multiply|screen|overlay`), `opacity` (0..100), `overlayX`, `overlayY`, `overlayWidth`, `overlayHeight` (Frame-Rect, normiert 0..1) plus `contentX`, `contentY`, `contentWidth`, `contentHeight` (Content-Framing innerhalb des Overlay-Frames, ebenfalls normiert 0..1). - **Output-Semantik:** pseudo-image (clientseitig aus Graph + Controls aufgeloest), kein persistiertes Asset, kein Storage-Write. -- **UI/Interaction:** nur Inline-Formcontrols im Node; keine Drag-Manipulation im Preview, keine Rotation/Skalierung/Masks. +- **UI/Interaction:** Zwei Modi im Preview: `Frame resize` (Overlay-Frame verschieben + ueber Corner-Handles resizen) und `Content framing` (Overlay-Inhalt innerhalb des Frames verschieben). Numerische Inline-Controls bleiben als Feineinstellung erhalten. +- **Sizing/Crop-Verhalten:** Der Overlay-Inhalt wird `object-cover`-aehnlich in den Content-Rect eingepasst; bei abweichenden Seitenverhaeltnissen wird zentriert gecroppt. ### Compare-Integration (V1) - `compare` versteht `mixer`-Outputs ueber `lib/canvas-mixer-preview.ts`. -- Die Vorschau wird als DOM/CSS-Layering im Client gerendert (inkl. Blend/Opacity/Offset). +- Die Vorschau wird als DOM/CSS-Layering im Client gerendert (inkl. Blend/Opacity/Overlay-Rect). - Scope bleibt eng: keine pauschale pseudo-image-Unterstuetzung fuer alle Consumer in V1. +### Render-Bake-Pfad (V1) + +- Offizieller Bake-Flow: `mixer -> render`. +- `render` konsumiert die Mixer-Komposition (`sourceComposition.kind = "mixer"`) und nutzt sie fuer Preview + finalen Render/Upload. +- `mixer -> adjustments -> render` ist bewusst verschoben (deferred) und aktuell nicht offizieller Scope. + --- ## Node-Status-Modell @@ -325,7 +332,8 @@ useCanvasData (use-canvas-data.ts) - **Node-Taxonomie:** Alle Node-Typen sind in `lib/canvas-node-catalog.ts` definiert. Phase-2/3 Nodes haben `implemented: false` und `disabledHint`. - **Video-Connection-Policy:** `video-prompt` darf **nur** mit `ai-video` verbunden werden (und umgekehrt). `text → video-prompt` ist erlaubt (Prompt-Quelle). `ai-video → compare` ist erlaubt. - **Mixer-Connection-Policy:** `mixer` akzeptiert nur `image|asset|ai-image|render`; Ziel-Handles sind nur `base` und `overlay`, pro Handle maximal eine eingehende Kante, insgesamt maximal zwei. -- **Mixer-Pseudo-Output:** `mixer` liefert in V1 kein persistiertes Bild. Downstream-Nodes muessen den pseudo-image-Resolver nutzen (aktuell gezielt fuer `compare`). +- **Mixer-Pseudo-Output:** `mixer` liefert in V1 kein persistiertes Bild. Offizielle Consumer sind `compare` und der direkte Bake-Pfad `mixer -> render`; `mixer -> adjustments -> render` bleibt vorerst deferred. +- **Mixer Legacy-Daten:** Alte `offsetX`/`offsetY`-Mixer-Daten werden beim Lesen auf den Full-Frame-Fallback (`overlay* = 0/0/1/1`) normalisiert; Content-Framing defaults auf `content* = 0/0/1/1`. - **Agent-Flow:** `agent` akzeptiert nur Content-/Kontext-Quellen (z. B. `render`, `compare`, `text`, `image`) als Input; ausgehende Kanten sind fuer `agent -> agent-output` vorgesehen. - **Convex Generated Types:** `api.ai.generateVideo` wird u. U. nicht in `convex/_generated/api.d.ts` exportiert. Der Code verwendet `api as unknown as {...}` als Workaround. Ein `npx convex dev`-Zyklus würde die Typen korrekt generieren. - **Canvas Graph Query:** Der Canvas nutzt `canvasGraph.get` (aus `convex/canvasGraph.ts`) statt separater `nodes.list`/`edges.list` Queries. Optimistic Updates laufen über `canvas-graph-query-cache.ts`. diff --git a/components/canvas/__tests__/compare-node.test.tsx b/components/canvas/__tests__/compare-node.test.tsx index aa60d8c..26ec627 100644 --- a/components/canvas/__tests__/compare-node.test.tsx +++ b/components/canvas/__tests__/compare-node.test.tsx @@ -1,5 +1,9 @@ +// @vitest-environment jsdom + import React from "react"; -import { beforeEach, describe, expect, it, vi } from "vitest"; +import { act } from "react"; +import { createRoot, type Root } from "react-dom/client"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; import { renderToStaticMarkup } from "react-dom/server"; import { CanvasGraphProvider } from "@/components/canvas/canvas-graph-context"; @@ -15,12 +19,20 @@ type StoreState = { }>; }; +type ResizeObserverEntryLike = { + target: Element; + contentRect: { width: number; height: number }; +}; + const storeState: StoreState = { nodes: [], edges: [], }; const compareSurfaceSpy = vi.fn(); +let resizeObserverCallback: + | ((entries: ResizeObserverEntryLike[]) => void) + | null = null; vi.mock("@xyflow/react", () => ({ Handle: () => null, @@ -53,6 +65,14 @@ vi.mock("@/components/canvas/canvas-handle", () => ({ ), })); +vi.mock("@/hooks/use-pipeline-preview", () => ({ + usePipelinePreview: () => ({ + canvasRef: { current: null }, + isRendering: false, + error: null, + }), +})); + vi.mock("../nodes/base-node-wrapper", () => ({ default: ({ children }: { children: React.ReactNode }) =>
{children}
, })); @@ -66,6 +86,8 @@ vi.mock("../nodes/compare-surface", () => ({ import CompareNode from "../nodes/compare-node"; +(globalThis as typeof globalThis & { IS_REACT_ACT_ENVIRONMENT?: boolean }).IS_REACT_ACT_ENVIRONMENT = true; + function renderCompareNode(props: Record) { return renderToStaticMarkup( ) { } describe("CompareNode render preview inputs", () => { + let container: HTMLDivElement | null = null; + let root: Root | null = null; + beforeEach(() => { storeState.nodes = []; storeState.edges = []; compareSurfaceSpy.mockReset(); + resizeObserverCallback = null; + globalThis.ResizeObserver = class ResizeObserver { + constructor(callback: (entries: ResizeObserverEntryLike[]) => void) { + resizeObserverCallback = callback; + } + + observe(target: Element) { + resizeObserverCallback?.([ + { + target, + contentRect: { width: 500, height: 380 }, + }, + ]); + } + + unobserve() {} + + disconnect() {} + } as unknown as typeof ResizeObserver; + container = document.createElement("div"); + document.body.appendChild(container); + root = createRoot(container); + }); + + afterEach(async () => { + if (root) { + await act(async () => { + root?.unmount(); + }); + } + + container?.remove(); + root = null; + container = null; }); it("passes previewInput to CompareSurface for a connected render node without final output", () => { @@ -192,6 +251,108 @@ describe("CompareNode render preview inputs", () => { }); }); + it("defaults mixer-backed render compare inputs to preview mode when only sourceComposition exists", () => { + storeState.nodes = [ + { + id: "base-image", + type: "image", + data: { url: "https://cdn.example.com/base.png" }, + }, + { + id: "overlay-image", + type: "asset", + data: { url: "https://cdn.example.com/overlay.png" }, + }, + { + id: "mixer-1", + type: "mixer", + data: { + blendMode: "multiply", + opacity: 62, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.4, + overlayHeight: 0.5, + cropLeft: 0.1, + cropTop: 0, + cropRight: 0.2, + cropBottom: 0.1, + }, + }, + { + id: "render-1", + type: "render", + data: { + lastUploadUrl: "https://cdn.example.com/stale-render-output.png", + }, + }, + ]; + storeState.edges = [ + { + id: "edge-base-mixer", + source: "base-image", + target: "mixer-1", + targetHandle: "base", + }, + { + id: "edge-overlay-mixer", + source: "overlay-image", + target: "mixer-1", + targetHandle: "overlay", + }, + { id: "edge-mixer-render", source: "mixer-1", target: "render-1" }, + { + id: "edge-render-compare", + source: "render-1", + target: "compare-1", + targetHandle: "left", + }, + ]; + + renderCompareNode({ + id: "compare-1", + data: { leftUrl: "https://cdn.example.com/stale-render-output.png" }, + selected: false, + dragging: false, + zIndex: 0, + isConnectable: true, + type: "compare", + xPos: 0, + yPos: 0, + width: 500, + height: 380, + sourcePosition: undefined, + targetPosition: undefined, + positionAbsoluteX: 0, + positionAbsoluteY: 0, + }); + + expect(compareSurfaceSpy).toHaveBeenCalledTimes(1); + expect(compareSurfaceSpy.mock.calls[0]?.[0]).toMatchObject({ + finalUrl: "https://cdn.example.com/stale-render-output.png", + preferPreview: true, + previewInput: { + sourceUrl: null, + sourceComposition: { + kind: "mixer", + baseUrl: "https://cdn.example.com/base.png", + overlayUrl: "https://cdn.example.com/overlay.png", + blendMode: "multiply", + opacity: 62, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.4, + overlayHeight: 0.5, + cropLeft: 0.1, + cropTop: 0, + cropRight: 0.2, + cropBottom: 0.1, + }, + steps: [], + }, + }); + }); + it("prefers mixer composite preview over persisted compare finalUrl when mixer is connected", () => { storeState.nodes = [ { @@ -275,14 +436,22 @@ describe("CompareNode render preview inputs", () => { ); expect(mixerCall?.[0]).toMatchObject({ finalUrl: undefined, + nodeWidth: 500, + nodeHeight: 380, mixerPreviewState: { status: "ready", baseUrl: "https://cdn.example.com/base.png", overlayUrl: "https://cdn.example.com/overlay.png", blendMode: "multiply", opacity: 62, - offsetX: 12, - offsetY: -4, + overlayX: 0, + overlayY: 0, + overlayWidth: 1, + overlayHeight: 1, + cropLeft: 0, + cropTop: 0, + cropRight: 0, + cropBottom: 0, }, }); }); @@ -317,4 +486,183 @@ describe("CompareNode render preview inputs", () => { expect(markup).toContain('data-top="35%"'); expect(markup).toContain('data-top="55%"'); }); + + it("passes the measured compare surface size to mixer previews instead of the full node box", async () => { + storeState.nodes = [ + { + id: "base-image", + type: "image", + data: { url: "https://cdn.example.com/base.png" }, + }, + { + id: "overlay-image", + type: "asset", + data: { url: "https://cdn.example.com/overlay.png" }, + }, + { + id: "mixer-1", + type: "mixer", + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.6, + overlayHeight: 0.5, + }, + }, + ]; + storeState.edges = [ + { + id: "edge-base-mixer", + source: "base-image", + target: "mixer-1", + targetHandle: "base", + }, + { + id: "edge-overlay-mixer", + source: "overlay-image", + target: "mixer-1", + targetHandle: "overlay", + }, + { + id: "edge-mixer-compare", + source: "mixer-1", + target: "compare-1", + targetHandle: "left", + }, + ]; + + await act(async () => { + root?.render( + } + edges={storeState.edges} + > + )} + /> + , + ); + }); + + await vi.waitFor(() => { + const latestCompareSurfaceCall = compareSurfaceSpy.mock.calls.findLast( + ([props]) => + Boolean((props as { mixerPreviewState?: { status?: string } }).mixerPreviewState), + ); + + expect(latestCompareSurfaceCall?.[0]).toMatchObject({ + nodeWidth: 500, + nodeHeight: 380, + }); + }); + + const surfaceElement = container?.querySelector(".nodrag.relative.min-h-0.w-full"); + expect(surfaceElement).toBeInstanceOf(HTMLDivElement); + + await act(async () => { + resizeObserverCallback?.([ + { + target: surfaceElement as HTMLDivElement, + contentRect: { width: 468, height: 312 }, + }, + ]); + }); + + const latestCompareSurfaceCall = compareSurfaceSpy.mock.calls.findLast( + ([props]) => + Boolean((props as { mixerPreviewState?: { status?: string } }).mixerPreviewState), + ); + + expect(latestCompareSurfaceCall?.[0]).toMatchObject({ + nodeWidth: 468, + nodeHeight: 312, + }); + expect(latestCompareSurfaceCall?.[0]).not.toMatchObject({ + nodeWidth: 640, + nodeHeight: 480, + }); + }); + + it("anchors direct mixer previews to the actual compare surface rect", async () => { + const compareSurfaceModule = await vi.importActual( + "../nodes/compare-surface", + ); + const ActualCompareSurface = compareSurfaceModule.default; + + await act(async () => { + root?.render( + + + , + ); + }); + + const images = container?.querySelectorAll("img"); + const baseImage = images?.[0]; + + if (!(baseImage instanceof HTMLImageElement)) { + throw new Error("base image not found"); + } + + Object.defineProperty(baseImage, "naturalWidth", { configurable: true, value: 200 }); + Object.defineProperty(baseImage, "naturalHeight", { configurable: true, value: 100 }); + + await act(async () => { + baseImage.dispatchEvent(new Event("load")); + }); + + const overlayImage = container?.querySelectorAll("img")?.[1]; + if (!(overlayImage instanceof HTMLImageElement)) { + throw new Error("overlay image not found"); + } + + Object.defineProperty(overlayImage, "naturalWidth", { configurable: true, value: 200 }); + Object.defineProperty(overlayImage, "naturalHeight", { configurable: true, value: 100 }); + + await act(async () => { + overlayImage.dispatchEvent(new Event("load")); + }); + + const overlayFrame = overlayImage.parentElement; + expect(overlayFrame?.style.left).toBe("0%"); + expect(overlayFrame?.style.top).toBe("17.105263157894736%"); + expect(overlayFrame?.style.width).toBe("100%"); + expect(overlayFrame?.style.height).toBe("65.78947368421053%"); + }); }); diff --git a/components/canvas/__tests__/mixer-node.test.tsx b/components/canvas/__tests__/mixer-node.test.tsx index f0152d1..1a50610 100644 --- a/components/canvas/__tests__/mixer-node.test.tsx +++ b/components/canvas/__tests__/mixer-node.test.tsx @@ -71,14 +71,26 @@ type TestEdge = { targetHandle?: string; }; +function cropRectData(x: number, y: number, width: number, height: number) { + return { + cropLeft: x, + cropTop: y, + cropRight: 1 - (x + width), + cropBottom: 1 - (y + height), + }; +} + function buildMixerNodeProps(overrides?: Partial>) { return { id: "mixer-1", data: { blendMode: "normal", opacity: 100, - offsetX: 0, - offsetY: 0, + overlayX: 0, + overlayY: 0, + overlayWidth: 0.5, + overlayHeight: 0.5, + ...cropRectData(0, 0, 1, 1), }, selected: false, dragging: false, @@ -100,9 +112,59 @@ function buildMixerNodeProps(overrides?: Partial { let container: HTMLDivElement | null = null; let root: Root | null = null; + let resizeObserverCallback: + | ((entries: Array<{ target: Element; contentRect: { width: number; height: number } }>) => void) + | null = null; + + const readyNodes: TestNode[] = [ + { id: "image-base", type: "image", data: { url: "https://cdn.example.com/base.png" } }, + { id: "image-overlay", type: "asset", data: { url: "https://cdn.example.com/overlay.png" } }, + { + id: "mixer-1", + type: "mixer", + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0, + overlayY: 0, + overlayWidth: 0.5, + overlayHeight: 0.5, + ...cropRectData(0, 0, 1, 1), + }, + }, + ]; + + const readyEdges: TestEdge[] = [ + { id: "edge-base", source: "image-base", target: "mixer-1", targetHandle: "base" }, + { id: "edge-overlay", source: "image-overlay", target: "mixer-1", targetHandle: "overlay" }, + ]; beforeEach(() => { + vi.useFakeTimers(); mocks.queueNodeDataUpdate.mockClear(); + resizeObserverCallback = null; + globalThis.ResizeObserver = class ResizeObserver { + constructor( + callback: ( + entries: Array<{ target: Element; contentRect: { width: number; height: number } }>, + ) => void, + ) { + resizeObserverCallback = callback; + } + + observe(target: Element) { + resizeObserverCallback?.([ + { + target, + contentRect: { width: 200, height: 200 }, + }, + ]); + } + + unobserve() {} + + disconnect() {} + } as unknown as typeof ResizeObserver; container = document.createElement("div"); document.body.appendChild(container); root = createRoot(container); @@ -115,6 +177,7 @@ describe("MixerNode", () => { }); } container?.remove(); + vi.useRealTimers(); root = null; container = null; }); @@ -136,6 +199,29 @@ describe("MixerNode", () => { }); } + function mockPreviewRect(preview: HTMLDivElement) { + return vi.spyOn(preview, "getBoundingClientRect").mockReturnValue({ + x: 0, + y: 0, + top: 0, + left: 0, + right: 200, + bottom: 200, + width: 200, + height: 200, + toJSON: () => ({}), + }); + } + + async function setNaturalImageSize(image: HTMLImageElement, width: number, height: number) { + Object.defineProperty(image, "naturalWidth", { configurable: true, value: width }); + Object.defineProperty(image, "naturalHeight", { configurable: true, value: height }); + + await act(async () => { + image.dispatchEvent(new Event("load")); + }); + } + it("renders empty state copy when no inputs are connected", async () => { await renderNode(); @@ -155,26 +241,7 @@ describe("MixerNode", () => { }); it("renders ready state with stacked base and overlay previews", async () => { - await renderNode({ - nodes: [ - { id: "image-base", type: "image", data: { url: "https://cdn.example.com/base.png" } }, - { id: "image-overlay", type: "asset", data: { url: "https://cdn.example.com/overlay.png" } }, - { - id: "mixer-1", - type: "mixer", - data: { blendMode: "multiply", opacity: 60, offsetX: 14, offsetY: -8 }, - }, - ], - edges: [ - { id: "edge-base", source: "image-base", target: "mixer-1", targetHandle: "base" }, - { - id: "edge-overlay", - source: "image-overlay", - target: "mixer-1", - targetHandle: "overlay", - }, - ], - }); + await renderNode({ nodes: readyNodes, edges: readyEdges }); const baseImage = container?.querySelector('img[alt="Mixer base"]'); const overlayImage = container?.querySelector('img[alt="Mixer overlay"]'); @@ -183,13 +250,1889 @@ describe("MixerNode", () => { expect(overlayImage).toBeTruthy(); }); - it("queues node data updates for blend mode, opacity, and overlay offsets", async () => { + it("anchors the preview overlay frame to the visible base cover rect", async () => { + await renderNode({ + nodes: [ + { + id: "image-base", + type: "image", + data: { + url: "https://cdn.example.com/base.png", + intrinsicWidth: 200, + intrinsicHeight: 100, + }, + }, + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0, + overlayY: 0, + overlayWidth: 1, + overlayHeight: 1, + ...cropRectData(0, 0, 1, 1), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0, + overlayY: 0, + overlayWidth: 1, + overlayHeight: 1, + ...cropRectData(0, 0, 1, 1), + }, + }, + }); + + const overlayFrame = container?.querySelector('[data-testid="mixer-overlay"]'); + if (!(overlayFrame instanceof HTMLDivElement)) { + throw new Error("overlay frame not found"); + } + + expect(overlayFrame.style.left).toBe("-50%"); + expect(overlayFrame.style.top).toBe("0%"); + expect(overlayFrame.style.width).toBe("200%"); + expect(overlayFrame.style.height).toBe("100%"); + }); + + it("anchors resize handles to the displayed overlay frame rect on wide bases", async () => { + await renderNode({ + nodes: [ + { + id: "image-base", + type: "image", + data: { + url: "https://cdn.example.com/base.png", + intrinsicWidth: 200, + intrinsicHeight: 100, + }, + }, + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.25, + overlayHeight: 0.4, + ...cropRectData(0, 0, 1, 1), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.25, + overlayHeight: 0.4, + ...cropRectData(0, 0, 1, 1), + }, + }, + }); + + const northWestHandle = container?.querySelector('[data-testid="mixer-resize-nw"]'); + const southEastHandle = container?.querySelector('[data-testid="mixer-resize-se"]'); + + if (!(northWestHandle instanceof HTMLDivElement)) { + throw new Error("north west handle not found"); + } + if (!(southEastHandle instanceof HTMLDivElement)) { + throw new Error("south east handle not found"); + } + + expect(Number.parseFloat(northWestHandle.style.left)).toBeCloseTo(-30, 6); + expect(Number.parseFloat(northWestHandle.style.top)).toBeCloseTo(20, 6); + expect(Number.parseFloat(southEastHandle.style.left)).toBeCloseTo(20, 6); + expect(Number.parseFloat(southEastHandle.style.top)).toBeCloseTo(60, 6); + }); + + it("anchors crop box handles to the displayed overlay frame rect on wide bases", async () => { + await renderNode({ + nodes: [ + { + id: "image-base", + type: "image", + data: { + url: "https://cdn.example.com/base.png", + intrinsicWidth: 200, + intrinsicHeight: 100, + }, + }, + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.25, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.2, 0.5, 0.5), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.25, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.2, 0.5, 0.5), + }, + }, + }); + + const contentModeToggle = container?.querySelector('[data-testid="mixer-content-mode-toggle"]'); + const overlayContent = container?.querySelector('[data-testid="mixer-overlay-content"]'); + + if (!(contentModeToggle instanceof HTMLButtonElement)) { + throw new Error("content mode toggle not found"); + } + if (!(overlayContent instanceof HTMLImageElement)) { + throw new Error("overlay content image not found"); + } + + await setNaturalImageSize(overlayContent, 100, 100); + + await act(async () => { + contentModeToggle.click(); + }); + + const northWestHandle = container?.querySelector('[data-testid="mixer-resize-nw"]'); + const eastHandle = container?.querySelector('[data-testid="mixer-resize-e"]'); + + if (!(northWestHandle instanceof HTMLDivElement)) { + throw new Error("north west crop handle not found"); + } + if (!(eastHandle instanceof HTMLDivElement)) { + throw new Error("east crop handle not found"); + } + + expect(Number.parseFloat(northWestHandle.style.left)).toBeCloseTo(-25, 6); + expect(Number.parseFloat(northWestHandle.style.top)).toBeCloseTo(20, 6); + expect(Number.parseFloat(eastHandle.style.left)).toBeCloseTo(15, 6); + expect(Number.parseFloat(eastHandle.style.top)).toBeCloseTo(40, 6); + }); + + it("uses displayed base rect scaling for frame move deltas on wide bases", async () => { + await renderNode({ + nodes: [ + { + id: "image-base", + type: "image", + data: { + url: "https://cdn.example.com/base.png", + intrinsicWidth: 200, + intrinsicHeight: 100, + }, + }, + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.25, + overlayHeight: 0.4, + ...cropRectData(0, 0, 1, 1), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.25, + overlayHeight: 0.4, + ...cropRectData(0, 0, 1, 1), + }, + }, + }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const overlayFrame = container?.querySelector('[data-testid="mixer-overlay"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(overlayFrame instanceof HTMLDivElement)) { + throw new Error("overlay frame not found"); + } + + mockPreviewRect(preview); + + await act(async () => { + overlayFrame.dispatchEvent(new MouseEvent("mousedown", { bubbles: true, clientX: 10, clientY: 40 })); + }); + + await act(async () => { + window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 50, clientY: 60 })); + window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + const rawLastCall = mocks.queueNodeDataUpdate.mock.calls.at(-1) as unknown[] | undefined; + const lastCall = rawLastCall?.[0] as + | { nodeId: string; data: Record } + | undefined; + + expect(lastCall?.nodeId).toBe("mixer-1"); + expect(lastCall?.data.overlayX as number).toBeCloseTo(0.2, 6); + expect(lastCall?.data.overlayY as number).toBeCloseTo(0.3, 6); + }); + + it("uses displayed base rect scaling for frame resize deltas on wide bases", async () => { + await renderNode({ + nodes: [ + { + id: "image-base", + type: "image", + data: { + url: "https://cdn.example.com/base.png", + intrinsicWidth: 200, + intrinsicHeight: 100, + }, + }, + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.25, + overlayHeight: 0.4, + ...cropRectData(0, 0, 1, 1), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.25, + overlayHeight: 0.4, + ...cropRectData(0, 0, 1, 1), + }, + }, + }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const resizeHandle = container?.querySelector('[data-testid="mixer-resize-se"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(resizeHandle instanceof HTMLDivElement)) { + throw new Error("resize handle not found"); + } + + mockPreviewRect(preview); + + await act(async () => { + resizeHandle.dispatchEvent(new MouseEvent("mousedown", { bubbles: true, clientX: 40, clientY: 120 })); + }); + + await act(async () => { + window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 80, clientY: 120 })); + window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + const rawLastCall = mocks.queueNodeDataUpdate.mock.calls.at(-1) as unknown[] | undefined; + const lastCall = rawLastCall?.[0] as + | { nodeId: string; data: Record } + | undefined; + + expect(lastCall?.nodeId).toBe("mixer-1"); + expect(lastCall?.data.overlayX as number).toBeCloseTo(0.1, 6); + expect(lastCall?.data.overlayY as number).toBeCloseTo(0.2, 6); + expect(lastCall?.data.overlayWidth as number).toBeCloseTo(0.35, 6); + expect(lastCall?.data.overlayHeight as number).toBeCloseTo(0.56, 6); + }); + + it("uses displayed overlay frame scaling for crop move deltas on wide bases", async () => { + await renderNode({ + nodes: [ + { + id: "image-base", + type: "image", + data: { + url: "https://cdn.example.com/base.png", + intrinsicWidth: 200, + intrinsicHeight: 100, + }, + }, + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.25, + overlayHeight: 0.4, + ...cropRectData(0, 0, 0.6, 0.6), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.25, + overlayHeight: 0.4, + ...cropRectData(0, 0, 0.6, 0.6), + }, + }, + }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const contentModeToggle = container?.querySelector('[data-testid="mixer-content-mode-toggle"]'); + const overlayContent = container?.querySelector('[data-testid="mixer-overlay-content"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(contentModeToggle instanceof HTMLButtonElement)) { + throw new Error("content mode toggle not found"); + } + if (!(overlayContent instanceof HTMLImageElement)) { + throw new Error("overlay content image not found"); + } + + mockPreviewRect(preview); + await setNaturalImageSize(overlayContent, 100, 80); + + await act(async () => { + contentModeToggle.click(); + }); + + const cropBox = container?.querySelector('[data-testid="mixer-crop-box"]'); + if (!(cropBox instanceof HTMLDivElement)) { + throw new Error("crop box not found"); + } + + await act(async () => { + cropBox.dispatchEvent(new MouseEvent("mousedown", { bubbles: true, clientX: -10, clientY: 40 })); + }); + + await act(async () => { + window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 10, clientY: 52 })); + window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + const rawLastCall = mocks.queueNodeDataUpdate.mock.calls.at(-1) as unknown[] | undefined; + const lastCall = rawLastCall?.[0] as + | { nodeId: string; data: Record } + | undefined; + + expect(lastCall?.nodeId).toBe("mixer-1"); + expect(lastCall?.data.cropLeft as number).toBeCloseTo(0.12, 6); + expect(lastCall?.data.cropTop as number).toBeCloseTo(0.09, 6); + expect(lastCall?.data.cropRight as number).toBeCloseTo(0.28, 6); + expect(lastCall?.data.cropBottom as number).toBeCloseTo(0.31, 6); + }); + + it("uses displayed overlay frame scaling for crop resize deltas on wide bases", async () => { + await renderNode({ + nodes: [ + { + id: "image-base", + type: "image", + data: { + url: "https://cdn.example.com/base.png", + intrinsicWidth: 200, + intrinsicHeight: 100, + }, + }, + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.25, + overlayHeight: 0.4, + ...cropRectData(0, 0, 0.6, 0.6), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.25, + overlayHeight: 0.4, + ...cropRectData(0, 0, 0.6, 0.6), + }, + }, + }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const contentModeToggle = container?.querySelector('[data-testid="mixer-content-mode-toggle"]'); + const overlayContent = container?.querySelector('[data-testid="mixer-overlay-content"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(contentModeToggle instanceof HTMLButtonElement)) { + throw new Error("content mode toggle not found"); + } + if (!(overlayContent instanceof HTMLImageElement)) { + throw new Error("overlay content image not found"); + } + + mockPreviewRect(preview); + await setNaturalImageSize(overlayContent, 100, 80); + + await act(async () => { + contentModeToggle.click(); + }); + + const resizeHandle = container?.querySelector('[data-testid="mixer-resize-e"]'); + if (!(resizeHandle instanceof HTMLDivElement)) { + throw new Error("east resize handle not found"); + } + + await act(async () => { + resizeHandle.dispatchEvent(new MouseEvent("mousedown", { bubbles: true, clientX: 20, clientY: 40 })); + }); + + await act(async () => { + window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 40, clientY: 40 })); + window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + expect(mocks.queueNodeDataUpdate).toHaveBeenLastCalledWith({ + nodeId: "mixer-1", + data: expect.objectContaining({ + cropLeft: 0, + cropTop: 0, + cropRight: 0.28, + cropBottom: 0.4, + }), + }); + }); + + it("maps overlay content through crop/source-region styles instead of contain-fit", async () => { + await renderNode({ nodes: readyNodes, edges: readyEdges }); + + const overlayImage = container?.querySelector('img[alt="Mixer overlay"]'); + if (!(overlayImage instanceof HTMLImageElement)) { + throw new Error("overlay image not found"); + } + + expect(overlayImage.className).not.toContain("object-contain"); + expect(overlayImage.style.width).toBe("100%"); + expect(overlayImage.style.height).toBe("100%"); + }); + + it("drag updates persisted overlay geometry", async () => { + await renderNode({ nodes: readyNodes, edges: readyEdges }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const overlay = container?.querySelector('[data-testid="mixer-overlay"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(overlay instanceof HTMLDivElement)) { + throw new Error("overlay frame not found"); + } + + vi.spyOn(preview, "getBoundingClientRect").mockReturnValue({ + x: 0, + y: 0, + top: 0, + left: 0, + right: 200, + bottom: 200, + width: 200, + height: 200, + toJSON: () => ({}), + }); + + await act(async () => { + overlay.dispatchEvent(new MouseEvent("mousedown", { bubbles: true, clientX: 50, clientY: 50 })); + }); + + await act(async () => { + window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 90, clientY: 70 })); + window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + expect(mocks.queueNodeDataUpdate).toHaveBeenLastCalledWith({ + nodeId: "mixer-1", + data: expect.objectContaining({ + overlayX: 0.2, + overlayY: 0.1, + overlayWidth: 0.5, + overlayHeight: 0.5, + ...cropRectData(0, 0, 1, 1), + }), + }); + }); + + it("drag clamps overlay bounds inside preview", async () => { + await renderNode({ nodes: readyNodes, edges: readyEdges }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const overlay = container?.querySelector('[data-testid="mixer-overlay"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(overlay instanceof HTMLDivElement)) { + throw new Error("overlay frame not found"); + } + + vi.spyOn(preview, "getBoundingClientRect").mockReturnValue({ + x: 0, + y: 0, + top: 0, + left: 0, + right: 200, + bottom: 200, + width: 200, + height: 200, + toJSON: () => ({}), + }); + + await act(async () => { + overlay.dispatchEvent(new MouseEvent("mousedown", { bubbles: true, clientX: 20, clientY: 20 })); + }); + + await act(async () => { + window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 400, clientY: 380 })); + window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + expect(mocks.queueNodeDataUpdate).toHaveBeenLastCalledWith({ + nodeId: "mixer-1", + data: expect.objectContaining({ + overlayX: 0.5, + overlayY: 0.5, + overlayWidth: 0.5, + overlayHeight: 0.5, + ...cropRectData(0, 0, 1, 1), + }), + }); + }); + + it("frame resize keeps the displayed overlay aspect ratio locked", async () => { + await renderNode({ + nodes: [ + readyNodes[0], + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.2, 0.1, 0.4, 0.6), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.2, 0.1, 0.4, 0.6), + }, + }, + }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const resizeHandle = container?.querySelector('[data-testid="mixer-resize-se"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(resizeHandle instanceof HTMLDivElement)) { + throw new Error("resize handle not found"); + } + + vi.spyOn(preview, "getBoundingClientRect").mockReturnValue({ + x: 0, + y: 0, + top: 0, + left: 0, + right: 200, + bottom: 200, + width: 200, + height: 200, + toJSON: () => ({}), + }); + + await act(async () => { + resizeHandle.dispatchEvent( + new MouseEvent("mousedown", { bubbles: true, clientX: 100, clientY: 100 }), + ); + }); + + await act(async () => { + window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 140, clientY: 120 })); + window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + const rawLastCall = mocks.queueNodeDataUpdate.mock.calls.at(-1) as unknown[] | undefined; + const lastCall = rawLastCall?.[0] as + | { nodeId: string; data: Record } + | undefined; + + expect(lastCall?.nodeId).toBe("mixer-1"); + expect(lastCall?.data).toEqual( + expect.objectContaining({ + overlayX: 0.1, + overlayY: 0.2, + ...cropRectData(0.2, 0.1, 0.4, 0.6), + }), + ); + expect( + (lastCall?.data.overlayWidth as number) / (lastCall?.data.overlayHeight as number), + ).toBeCloseTo(1.25, 6); + }); + + it("frame resize preserves crop fields while scaling the displayed overlay proportionally", async () => { + await renderNode({ + nodes: [ + readyNodes[0], + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.2, 0.1, 0.4, 0.6), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.2, 0.1, 0.4, 0.6), + }, + }, + }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const resizeHandle = container?.querySelector('[data-testid="mixer-resize-se"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(resizeHandle instanceof HTMLDivElement)) { + throw new Error("resize handle not found"); + } + + mockPreviewRect(preview); + + expect(resizeHandle.style.left).toBe("60%"); + expect(resizeHandle.style.top).toBe("60.00000000000001%"); + + await act(async () => { + resizeHandle.dispatchEvent(new MouseEvent("mousedown", { bubbles: true, clientX: 120, clientY: 120 })); + }); + + await act(async () => { + window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 140, clientY: 140 })); + window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + expect(mocks.queueNodeDataUpdate).toHaveBeenLastCalledWith({ + nodeId: "mixer-1", + data: expect.objectContaining({ + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.625, + overlayHeight: 0.5, + ...cropRectData(0.2, 0.1, 0.4, 0.6), + }), + }); + }); + + it("enforces minimum overlay size during resize", async () => { + await renderNode({ nodes: readyNodes, edges: readyEdges }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const resizeHandle = container?.querySelector('[data-testid="mixer-resize-se"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(resizeHandle instanceof HTMLDivElement)) { + throw new Error("resize handle not found"); + } + + vi.spyOn(preview, "getBoundingClientRect").mockReturnValue({ + x: 0, + y: 0, + top: 0, + left: 0, + right: 200, + bottom: 200, + width: 200, + height: 200, + toJSON: () => ({}), + }); + + await act(async () => { + resizeHandle.dispatchEvent( + new MouseEvent("mousedown", { bubbles: true, clientX: 100, clientY: 100 }), + ); + }); + + await act(async () => { + window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: -600, clientY: -700 })); + window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + expect(mocks.queueNodeDataUpdate).toHaveBeenLastCalledWith({ + nodeId: "mixer-1", + data: expect.objectContaining({ + overlayWidth: 0.1, + overlayHeight: 0.1, + ...cropRectData(0, 0, 1, 1), + }), + }); + }); + + it("renders explicit content framing mode toggle", async () => { + await renderNode({ nodes: readyNodes, edges: readyEdges }); + + expect(container?.querySelector('[data-testid="mixer-content-mode-toggle"]')).toBeTruthy(); + }); + + it("crop drag inside the crop box repositions the crop region only", async () => { + await renderNode({ + nodes: [ + readyNodes[0], + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0, + overlayY: 0, + overlayWidth: 0.5, + overlayHeight: 0.5, + ...cropRectData(0, 0, 0.6, 0.6), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0, + overlayY: 0, + overlayWidth: 0.5, + overlayHeight: 0.5, + ...cropRectData(0, 0, 0.6, 0.6), + }, + }, + }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const contentModeToggle = container?.querySelector('[data-testid="mixer-content-mode-toggle"]'); + const overlayContent = container?.querySelector('[data-testid="mixer-overlay-content"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(contentModeToggle instanceof HTMLButtonElement)) { + throw new Error("content mode toggle not found"); + } + if (!(overlayContent instanceof HTMLImageElement)) { + throw new Error("overlay content image not found"); + } + vi.spyOn(preview, "getBoundingClientRect").mockReturnValue({ + x: 0, + y: 0, + top: 0, + left: 0, + right: 200, + bottom: 200, + width: 200, + height: 200, + toJSON: () => ({}), + }); + await setNaturalImageSize(overlayContent, 100, 100); + + await act(async () => { + contentModeToggle.click(); + }); + + const cropBox = container?.querySelector('[data-testid="mixer-crop-box"]'); + if (!(cropBox instanceof HTMLDivElement)) { + throw new Error("crop box not found"); + } + + await act(async () => { + cropBox.dispatchEvent( + new MouseEvent("mousedown", { bubbles: true, clientX: 50, clientY: 50 }), + ); + }); + + await act(async () => { + window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 90, clientY: 70 })); + window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + const rawLastCall = mocks.queueNodeDataUpdate.mock.calls.at(-1) as unknown[] | undefined; + const lastCall = rawLastCall?.[0] as + | { nodeId: string; data: Record } + | undefined; + expect(lastCall?.nodeId).toBe("mixer-1"); + expect(lastCall?.data).toEqual( + expect.objectContaining({ + overlayX: 0, + overlayY: 0, + overlayWidth: 0.5, + overlayHeight: 0.5, + ...cropRectData(0.24, 0.12, 0.6, 0.6), + }), + ); + }); + + it("content framing supports zooming content before drag from defaults", async () => { + await renderNode({ nodes: readyNodes, edges: readyEdges }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const contentModeToggle = container?.querySelector('[data-testid="mixer-content-mode-toggle"]'); + const cropRight = container?.querySelector('input[name="cropRight"]'); + const cropBottom = container?.querySelector('input[name="cropBottom"]'); + const overlayContent = container?.querySelector('[data-testid="mixer-overlay-content"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(contentModeToggle instanceof HTMLButtonElement)) { + throw new Error("content mode toggle not found"); + } + if (!(cropRight instanceof HTMLInputElement)) { + throw new Error("cropRight input not found"); + } + if (!(cropBottom instanceof HTMLInputElement)) { + throw new Error("cropBottom input not found"); + } + if (!(overlayContent instanceof HTMLImageElement)) { + throw new Error("overlay content image not found"); + } + vi.spyOn(preview, "getBoundingClientRect").mockReturnValue({ + x: 0, + y: 0, + top: 0, + left: 0, + right: 200, + bottom: 200, + width: 200, + height: 200, + toJSON: () => ({}), + }); + await setNaturalImageSize(overlayContent, 100, 100); + + await act(async () => { + contentModeToggle.click(); + }); + + const cropBox = container?.querySelector('[data-testid="mixer-crop-box"]'); + if (!(cropBox instanceof HTMLDivElement)) { + throw new Error("crop box not found"); + } + + await act(async () => { + cropRight.value = "0.4"; + cropRight.dispatchEvent(new Event("input", { bubbles: true })); + cropRight.dispatchEvent(new Event("change", { bubbles: true })); + cropBottom.value = "0.3"; + cropBottom.dispatchEvent(new Event("input", { bubbles: true })); + cropBottom.dispatchEvent(new Event("change", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + await act(async () => { + cropBox.dispatchEvent( + new MouseEvent("mousedown", { bubbles: true, clientX: 50, clientY: 50 }), + ); + }); + + await act(async () => { + window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 90, clientY: 70 })); + window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + const rawLastCall = mocks.queueNodeDataUpdate.mock.calls.at(-1) as unknown[] | undefined; + const lastCall = rawLastCall?.[0] as + | { nodeId: string; data: Record } + | undefined; + expect(lastCall?.nodeId).toBe("mixer-1"); + expect(lastCall?.data).toEqual( + expect.objectContaining({ + overlayX: 0, + overlayY: 0, + overlayWidth: 0.5, + overlayHeight: 0.5, + }), + ); + expect(lastCall?.data.cropLeft as number).toBeCloseTo(0.28, 6); + expect(lastCall?.data.cropTop as number).toBeCloseTo(0.14, 6); + expect(lastCall?.data.cropRight as number).toBeCloseTo(0.12, 6); + expect(lastCall?.data.cropBottom as number).toBeCloseTo(0.16, 6); + }); + + it("crop drag uses the crop box as the movement frame instead of the full overlay", async () => { + await renderNode({ + nodes: [ + readyNodes[0], + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.1, 0.5, 0.5), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.1, 0.5, 0.5), + }, + }, + }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const contentModeToggle = container?.querySelector('[data-testid="mixer-content-mode-toggle"]'); + const overlayContent = container?.querySelector('[data-testid="mixer-overlay-content"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(contentModeToggle instanceof HTMLButtonElement)) { + throw new Error("content mode toggle not found"); + } + if (!(overlayContent instanceof HTMLImageElement)) { + throw new Error("overlay content image not found"); + } + vi.spyOn(preview, "getBoundingClientRect").mockReturnValue({ + x: 0, + y: 0, + top: 0, + left: 0, + right: 200, + bottom: 200, + width: 200, + height: 200, + toJSON: () => ({}), + }); + await setNaturalImageSize(overlayContent, 100, 100); + + await act(async () => { + contentModeToggle.click(); + }); + + const cropBox = container?.querySelector('[data-testid="mixer-crop-box"]'); + if (!(cropBox instanceof HTMLDivElement)) { + throw new Error("crop box not found"); + } + + await act(async () => { + cropBox.dispatchEvent( + new MouseEvent("mousedown", { bubbles: true, clientX: 50, clientY: 50 }), + ); + }); + + await act(async () => { + window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 70, clientY: 66 })); + window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + const rawLastCall = mocks.queueNodeDataUpdate.mock.calls.at(-1) as unknown[] | undefined; + const lastCall = rawLastCall?.[0] as + | { nodeId: string; data: Record } + | undefined; + expect(lastCall?.nodeId).toBe("mixer-1"); + expect(lastCall?.data).toEqual( + expect.objectContaining({ + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.225, 0.2, 0.5, 0.5), + }), + ); + }); + + it("crop handles render on the crop box while the displayed overlay frame stays fixed", async () => { + await renderNode({ + nodes: [ + readyNodes[0], + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.2, 0.5, 0.5), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.2, 0.5, 0.5), + }, + }, + }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const contentModeToggle = container?.querySelector('[data-testid="mixer-content-mode-toggle"]'); + const overlayContent = container?.querySelector('[data-testid="mixer-overlay-content"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(contentModeToggle instanceof HTMLButtonElement)) { + throw new Error("content mode toggle not found"); + } + if (!(overlayContent instanceof HTMLImageElement)) { + throw new Error("overlay content image not found"); + } + mockPreviewRect(preview); + await setNaturalImageSize(overlayContent, 50, 100); + + await act(async () => { + contentModeToggle.click(); + }); + + const cropBox = container?.querySelector('[data-testid="mixer-crop-box"]'); + if (!(cropBox instanceof HTMLDivElement)) { + throw new Error("crop box not found"); + } + + expect(Number.parseFloat(cropBox.style.left)).toBeCloseTo(30, 6); + expect(Number.parseFloat(cropBox.style.top)).toBeCloseTo(0, 6); + expect(Number.parseFloat(cropBox.style.width)).toBeCloseTo(40, 6); + expect(Number.parseFloat(cropBox.style.height)).toBeCloseTo(100, 6); + const northWestHandle = container?.querySelector('[data-testid="mixer-resize-nw"]'); + const northHandle = container?.querySelector('[data-testid="mixer-resize-n"]'); + const southEastHandle = container?.querySelector('[data-testid="mixer-resize-se"]'); + if (!(northWestHandle instanceof HTMLDivElement)) { + throw new Error("north west handle not found"); + } + if (!(northHandle instanceof HTMLDivElement)) { + throw new Error("north handle not found"); + } + if (!(southEastHandle instanceof HTMLDivElement)) { + throw new Error("south east handle not found"); + } + + expect(Number.parseFloat(northWestHandle.style.left)).toBeCloseTo(25, 6); + expect(Number.parseFloat(northWestHandle.style.top)).toBeCloseTo(20, 6); + expect(Number.parseFloat(northHandle.style.left)).toBeCloseTo(35, 6); + expect(Number.parseFloat(northHandle.style.top)).toBeCloseTo(20, 6); + expect(Number.parseFloat(southEastHandle.style.left)).toBeCloseTo(45, 6); + expect(Number.parseFloat(southEastHandle.style.top)).toBeCloseTo(60, 6); + }); + + it("crop move uses the visible aspect-aware rect for non-square overlays", async () => { + await renderNode({ + nodes: [ + readyNodes[0], + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.2, 0.5, 0.5), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.2, 0.5, 0.5), + }, + }, + }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const contentModeToggle = container?.querySelector('[data-testid="mixer-content-mode-toggle"]'); + const overlayContent = container?.querySelector('[data-testid="mixer-overlay-content"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(contentModeToggle instanceof HTMLButtonElement)) { + throw new Error("content mode toggle not found"); + } + if (!(overlayContent instanceof HTMLImageElement)) { + throw new Error("overlay content image not found"); + } + + mockPreviewRect(preview); + await setNaturalImageSize(overlayContent, 50, 100); + + await act(async () => { + contentModeToggle.click(); + }); + + const cropBox = container?.querySelector('[data-testid="mixer-crop-box"]'); + if (!(cropBox instanceof HTMLDivElement)) { + throw new Error("crop box not found"); + } + + await act(async () => { + cropBox.dispatchEvent(new MouseEvent("mousedown", { bubbles: true, clientX: 55, clientY: 66 })); + }); + + await act(async () => { + window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 65, clientY: 76 })); + window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + const rawLastCall = mocks.queueNodeDataUpdate.mock.calls.at(-1) as unknown[] | undefined; + const lastCall = rawLastCall?.[0] as + | { nodeId: string; data: Record } + | undefined; + + expect(lastCall?.nodeId).toBe("mixer-1"); + expect(lastCall?.data).toEqual( + expect.objectContaining({ + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + }), + ); + expect(lastCall?.data.cropLeft as number).toBeCloseTo(0.225, 6); + expect(lastCall?.data.cropTop as number).toBeCloseTo(0.2625, 6); + expect(lastCall?.data.cropRight as number).toBeCloseTo(0.275, 6); + expect(lastCall?.data.cropBottom as number).toBeCloseTo(0.2375, 6); + }); + + it("crop resize uses the visible aspect-aware rect for non-square overlays", async () => { + await renderNode({ + nodes: [ + readyNodes[0], + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.2, 0.5, 0.5), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.2, 0.5, 0.5), + }, + }, + }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const contentModeToggle = container?.querySelector('[data-testid="mixer-content-mode-toggle"]'); + const overlayContent = container?.querySelector('[data-testid="mixer-overlay-content"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(contentModeToggle instanceof HTMLButtonElement)) { + throw new Error("content mode toggle not found"); + } + if (!(overlayContent instanceof HTMLImageElement)) { + throw new Error("overlay content image not found"); + } + + mockPreviewRect(preview); + await setNaturalImageSize(overlayContent, 50, 100); + + await act(async () => { + contentModeToggle.click(); + }); + + const resizeHandle = container?.querySelector('[data-testid="mixer-resize-e"]'); + if (!(resizeHandle instanceof HTMLDivElement)) { + throw new Error("east resize handle not found"); + } + + await act(async () => { + resizeHandle.dispatchEvent(new MouseEvent("mousedown", { bubbles: true, clientX: 70, clientY: 76 })); + }); + + await act(async () => { + window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 80, clientY: 76 })); + window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + const rawLastCall = mocks.queueNodeDataUpdate.mock.calls.at(-1) as unknown[] | undefined; + const lastCall = rawLastCall?.[0] as + | { nodeId: string; data: Record } + | undefined; + + expect(lastCall?.nodeId).toBe("mixer-1"); + expect(lastCall?.data).toEqual( + expect.objectContaining({ + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + cropLeft: 0.1, + cropTop: 0.2, + }), + ); + expect(lastCall?.data.cropBottom as number).toBeCloseTo(0.3, 6); + expect(lastCall?.data.cropRight as number).toBeCloseTo(0.275, 6); + }); + + it("ignores crop interactions until overlay natural size is known", async () => { + await renderNode({ + nodes: [ + readyNodes[0], + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.2, 0.5, 0.5), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.2, 0.5, 0.5), + }, + }, + }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const contentModeToggle = container?.querySelector('[data-testid="mixer-content-mode-toggle"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(contentModeToggle instanceof HTMLButtonElement)) { + throw new Error("content mode toggle not found"); + } + + mockPreviewRect(preview); + + await act(async () => { + contentModeToggle.click(); + }); + + await act(async () => { + await vi.advanceTimersByTimeAsync(250); + }); + + expect(container?.querySelector('[data-testid="mixer-crop-box"]')).toBeNull(); + expect(container?.querySelector('[data-testid="mixer-resize-e"]')).toBeNull(); + expect(mocks.queueNodeDataUpdate).not.toHaveBeenCalled(); + }); + + it("does not render crop affordances until overlay natural size is known", async () => { + await renderNode({ + nodes: readyNodes, + edges: readyEdges, + }); + + const contentModeToggle = container?.querySelector('[data-testid="mixer-content-mode-toggle"]'); + if (!(contentModeToggle instanceof HTMLButtonElement)) { + throw new Error("content mode toggle not found"); + } + + await act(async () => { + contentModeToggle.click(); + }); + + expect(container?.querySelector('[data-testid="mixer-crop-box"]')).toBeNull(); + expect(container?.querySelector('[data-testid="mixer-resize-e"]')).toBeNull(); + }); + + it("ignores crop interactions after overlay source swap until new natural size loads", async () => { + await renderNode({ + nodes: [ + readyNodes[0], + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.2, 0.5, 0.5), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.2, 0.5, 0.5), + }, + }, + }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const contentModeToggle = container?.querySelector('[data-testid="mixer-content-mode-toggle"]'); + let overlayContent = container?.querySelector('[data-testid="mixer-overlay-content"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(contentModeToggle instanceof HTMLButtonElement)) { + throw new Error("content mode toggle not found"); + } + if (!(overlayContent instanceof HTMLImageElement)) { + throw new Error("overlay content image not found"); + } + + mockPreviewRect(preview); + await setNaturalImageSize(overlayContent, 100, 100); + + await act(async () => { + contentModeToggle.click(); + }); + + await renderNode({ + nodes: [ + readyNodes[0], + { id: "image-overlay", type: "asset", data: { url: "https://cdn.example.com/overlay-2.png" } }, + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.2, 0.5, 0.5), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.2, 0.5, 0.5), + }, + }, + }); + + const swappedPreview = container?.querySelector('[data-testid="mixer-preview"]'); + if (!(swappedPreview instanceof HTMLDivElement)) { + throw new Error("preview not found after source swap"); + } + mockPreviewRect(swappedPreview); + + mocks.queueNodeDataUpdate.mockClear(); + + overlayContent = container?.querySelector('[data-testid="mixer-overlay-content"]'); + + if (!(overlayContent instanceof HTMLImageElement)) { + throw new Error("overlay content image not found after source swap"); + } + expect(overlayContent.getAttribute("src")).toBe("https://cdn.example.com/overlay-2.png"); + + await act(async () => { + await vi.advanceTimersByTimeAsync(250); + }); + + expect(container?.querySelector('[data-testid="mixer-crop-box"]')).toBeNull(); + expect(container?.querySelector('[data-testid="mixer-resize-e"]')).toBeNull(); + expect(mocks.queueNodeDataUpdate).not.toHaveBeenCalled(); + }); + + it("hides crop affordances after overlay source swap until the new image loads", async () => { + await renderNode({ + nodes: readyNodes, + edges: readyEdges, + }); + + const contentModeToggle = container?.querySelector('[data-testid="mixer-content-mode-toggle"]'); + let overlayContent = container?.querySelector('[data-testid="mixer-overlay-content"]'); + + if (!(contentModeToggle instanceof HTMLButtonElement)) { + throw new Error("content mode toggle not found"); + } + if (!(overlayContent instanceof HTMLImageElement)) { + throw new Error("overlay content image not found"); + } + + await setNaturalImageSize(overlayContent, 100, 100); + + await act(async () => { + contentModeToggle.click(); + }); + + await renderNode({ + nodes: [ + readyNodes[0], + { id: "image-overlay", type: "asset", data: { url: "https://cdn.example.com/overlay-2.png" } }, + readyNodes[2], + ], + edges: readyEdges, + }); + + overlayContent = container?.querySelector('[data-testid="mixer-overlay-content"]'); + if (!(overlayContent instanceof HTMLImageElement)) { + throw new Error("overlay content image not found after source swap"); + } + expect(overlayContent.getAttribute("src")).toBe("https://cdn.example.com/overlay-2.png"); + expect(container?.querySelector('[data-testid="mixer-crop-box"]')).toBeNull(); + expect(container?.querySelector('[data-testid="mixer-resize-e"]')).toBeNull(); + }); + + it("crop handle drag trims edges without changing displayed overlay frame size", async () => { + await renderNode({ + nodes: [ + readyNodes[0], + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.2, 0.5, 0.5), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + ...cropRectData(0.1, 0.2, 0.5, 0.5), + }, + }, + }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const contentModeToggle = container?.querySelector('[data-testid="mixer-content-mode-toggle"]'); + const overlayContent = container?.querySelector('[data-testid="mixer-overlay-content"]'); + const resizeHandle = container?.querySelector('[data-testid="mixer-resize-se"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(contentModeToggle instanceof HTMLButtonElement)) { + throw new Error("content mode toggle not found"); + } + if (!(overlayContent instanceof HTMLImageElement)) { + throw new Error("overlay content image not found"); + } + if (!(resizeHandle instanceof HTMLDivElement)) { + throw new Error("resize handle not found"); + } + + mockPreviewRect(preview); + await setNaturalImageSize(overlayContent, 100, 100); + + await act(async () => { + contentModeToggle.click(); + }); + + await act(async () => { + resizeHandle.dispatchEvent(new MouseEvent("mousedown", { bubbles: true, clientX: 80, clientY: 96 })); + }); + + await act(async () => { + window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 100, clientY: 116 })); + window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + const rawLastCall = mocks.queueNodeDataUpdate.mock.calls.at(-1) as unknown[] | undefined; + const lastCall = rawLastCall?.[0] as + | { nodeId: string; data: Record } + | undefined; + + expect(lastCall?.nodeId).toBe("mixer-1"); + expect(lastCall?.data).toEqual( + expect.objectContaining({ + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.4, + cropLeft: 0.1, + cropTop: 0.2, + }), + ); + expect(lastCall?.data.cropRight as number).toBeLessThan(0.4); + expect(lastCall?.data.cropBottom as number).toBeLessThan(0.3); + }); + + it("crop edge handles trim a single side only", async () => { + await renderNode({ + nodes: [ + readyNodes[0], + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.15, + overlayY: 0.1, + overlayWidth: 0.55, + overlayHeight: 0.45, + ...cropRectData(0.2, 0.1, 0.5, 0.5), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.15, + overlayY: 0.1, + overlayWidth: 0.55, + overlayHeight: 0.45, + ...cropRectData(0.2, 0.1, 0.5, 0.5), + }, + }, + }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const contentModeToggle = container?.querySelector('[data-testid="mixer-content-mode-toggle"]'); + const overlayContent = container?.querySelector('[data-testid="mixer-overlay-content"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(contentModeToggle instanceof HTMLButtonElement)) { + throw new Error("content mode toggle not found"); + } + if (!(overlayContent instanceof HTMLImageElement)) { + throw new Error("overlay content image not found"); + } + mockPreviewRect(preview); + await setNaturalImageSize(overlayContent, 100, 100); + + await act(async () => { + contentModeToggle.click(); + }); + + const resizeHandle = container?.querySelector('[data-testid="mixer-resize-e"]'); + if (!(resizeHandle instanceof HTMLDivElement)) { + throw new Error("east resize handle not found"); + } + + await act(async () => { + resizeHandle.dispatchEvent(new MouseEvent("mousedown", { bubbles: true, clientX: 92, clientY: 65 })); + }); + + await act(async () => { + window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 112, clientY: 65 })); + window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + const rawLastCall = mocks.queueNodeDataUpdate.mock.calls.at(-1) as unknown[] | undefined; + const lastCall = rawLastCall?.[0] as + | { nodeId: string; data: Record } + | undefined; + + expect(lastCall?.nodeId).toBe("mixer-1"); + expect(lastCall?.data).toEqual( + expect.objectContaining({ + overlayX: 0.15, + overlayY: 0.1, + overlayWidth: 0.55, + overlayHeight: 0.45, + cropLeft: 0.2, + cropTop: 0.1, + cropBottom: 0.4, + }), + ); + expect(lastCall?.data.cropRight as number).toBeLessThan(0.3); + }); + + it("crop handle drag does not mutate overlayWidth or overlayHeight", async () => { + await renderNode({ + nodes: [ + readyNodes[0], + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.15, + overlayY: 0.1, + overlayWidth: 0.55, + overlayHeight: 0.45, + ...cropRectData(0.2, 0.1, 0.5, 0.5), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.15, + overlayY: 0.1, + overlayWidth: 0.55, + overlayHeight: 0.45, + ...cropRectData(0.2, 0.1, 0.5, 0.5), + }, + }, + }); + + const preview = container?.querySelector('[data-testid="mixer-preview"]'); + const contentModeToggle = container?.querySelector('[data-testid="mixer-content-mode-toggle"]'); + const overlayContent = container?.querySelector('[data-testid="mixer-overlay-content"]'); + const resizeHandle = container?.querySelector('[data-testid="mixer-resize-se"]'); + + if (!(preview instanceof HTMLDivElement)) { + throw new Error("preview not found"); + } + if (!(contentModeToggle instanceof HTMLButtonElement)) { + throw new Error("content mode toggle not found"); + } + if (!(overlayContent instanceof HTMLImageElement)) { + throw new Error("overlay content image not found"); + } + if (!(resizeHandle instanceof HTMLDivElement)) { + throw new Error("resize handle not found"); + } + + mockPreviewRect(preview); + await setNaturalImageSize(overlayContent, 100, 100); + + await act(async () => { + contentModeToggle.click(); + }); + + await act(async () => { + resizeHandle.dispatchEvent(new MouseEvent("mousedown", { bubbles: true, clientX: 110, clientY: 110 })); + }); + + await act(async () => { + window.dispatchEvent(new MouseEvent("mousemove", { bubbles: true, clientX: 125, clientY: 120 })); + window.dispatchEvent(new MouseEvent("mouseup", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + + const rawLastCall = mocks.queueNodeDataUpdate.mock.calls.at(-1) as unknown[] | undefined; + const lastCall = rawLastCall?.[0] as + | { nodeId: string; data: Record } + | undefined; + + expect(lastCall?.nodeId).toBe("mixer-1"); + expect(lastCall?.data).toEqual( + expect.objectContaining({ + overlayX: 0.15, + overlayY: 0.1, + overlayWidth: 0.55, + overlayHeight: 0.45, + }), + ); + expect(lastCall?.data.cropRight as number).not.toBe(0.3); + expect(lastCall?.data.cropBottom as number).not.toBe(0.4); + }); + + it("maps crop/source-region fields into a fixed displayed frame size", async () => { + await renderNode({ + nodes: [ + readyNodes[0], + readyNodes[1], + { + ...readyNodes[2], + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.5, + ...cropRectData(0.1, 0.2, 0.5, 0.25), + }, + }, + ], + edges: readyEdges, + props: { + data: { + blendMode: "normal", + opacity: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.5, + overlayHeight: 0.5, + ...cropRectData(0.1, 0.2, 0.5, 0.25), + }, + }, + }); + + const overlayContent = container?.querySelector('[data-testid="mixer-overlay-content"]'); + if (!(overlayContent instanceof HTMLImageElement)) { + throw new Error("overlay content image not found"); + } + + expect(overlayContent.style.left).toBe("-20%"); + expect(overlayContent.style.top).toBe("-80%"); + expect(overlayContent.style.width).toBe("200%"); + expect(overlayContent.style.height).toBe("400%"); + }); + + it("numeric controls still update overlay rect fields", async () => { await renderNode(); const blendMode = container?.querySelector('select[name="blendMode"]'); const opacity = container?.querySelector('input[name="opacity"]'); - const offsetX = container?.querySelector('input[name="offsetX"]'); - const offsetY = container?.querySelector('input[name="offsetY"]'); + const overlayX = container?.querySelector('input[name="overlayX"]'); + const overlayY = container?.querySelector('input[name="overlayY"]'); + const overlayWidth = container?.querySelector('input[name="overlayWidth"]'); + const overlayHeight = container?.querySelector('input[name="overlayHeight"]'); if (!(blendMode instanceof HTMLSelectElement)) { throw new Error("blendMode select not found"); @@ -197,16 +2140,23 @@ describe("MixerNode", () => { if (!(opacity instanceof HTMLInputElement)) { throw new Error("opacity input not found"); } - if (!(offsetX instanceof HTMLInputElement)) { - throw new Error("offsetX input not found"); + if (!(overlayX instanceof HTMLInputElement)) { + throw new Error("overlayX input not found"); } - if (!(offsetY instanceof HTMLInputElement)) { - throw new Error("offsetY input not found"); + if (!(overlayY instanceof HTMLInputElement)) { + throw new Error("overlayY input not found"); + } + if (!(overlayWidth instanceof HTMLInputElement)) { + throw new Error("overlayWidth input not found"); + } + if (!(overlayHeight instanceof HTMLInputElement)) { + throw new Error("overlayHeight input not found"); } await act(async () => { blendMode.value = "screen"; blendMode.dispatchEvent(new Event("change", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); }); expect(mocks.queueNodeDataUpdate).toHaveBeenCalledWith({ nodeId: "mixer-1", @@ -217,6 +2167,7 @@ describe("MixerNode", () => { opacity.value = "45"; opacity.dispatchEvent(new Event("input", { bubbles: true })); opacity.dispatchEvent(new Event("change", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); }); expect(mocks.queueNodeDataUpdate).toHaveBeenCalledWith({ nodeId: "mixer-1", @@ -224,23 +2175,47 @@ describe("MixerNode", () => { }); await act(async () => { - offsetX.value = "12"; - offsetX.dispatchEvent(new Event("input", { bubbles: true })); - offsetX.dispatchEvent(new Event("change", { bubbles: true })); + overlayX.value = "0.25"; + overlayX.dispatchEvent(new Event("input", { bubbles: true })); + overlayX.dispatchEvent(new Event("change", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); }); expect(mocks.queueNodeDataUpdate).toHaveBeenCalledWith({ nodeId: "mixer-1", - data: expect.objectContaining({ offsetX: 12 }), + data: expect.objectContaining({ overlayX: 0.25 }), }); await act(async () => { - offsetY.value = "-6"; - offsetY.dispatchEvent(new Event("input", { bubbles: true })); - offsetY.dispatchEvent(new Event("change", { bubbles: true })); + overlayY.value = "0.4"; + overlayY.dispatchEvent(new Event("input", { bubbles: true })); + overlayY.dispatchEvent(new Event("change", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); }); expect(mocks.queueNodeDataUpdate).toHaveBeenCalledWith({ nodeId: "mixer-1", - data: expect.objectContaining({ offsetY: -6 }), + data: expect.objectContaining({ overlayY: 0.4 }), + }); + + await act(async () => { + overlayWidth.value = "0.66"; + overlayWidth.dispatchEvent(new Event("input", { bubbles: true })); + overlayWidth.dispatchEvent(new Event("change", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + expect(mocks.queueNodeDataUpdate).toHaveBeenCalledWith({ + nodeId: "mixer-1", + data: expect.objectContaining({ overlayWidth: 0.66 }), + }); + + await act(async () => { + overlayHeight.value = "0.33"; + overlayHeight.dispatchEvent(new Event("input", { bubbles: true })); + overlayHeight.dispatchEvent(new Event("change", { bubbles: true })); + await vi.advanceTimersByTimeAsync(250); + }); + expect(mocks.queueNodeDataUpdate).toHaveBeenCalledWith({ + nodeId: "mixer-1", + data: expect.objectContaining({ overlayHeight: 0.33 }), }); }); diff --git a/components/canvas/__tests__/use-canvas-connections.test.tsx b/components/canvas/__tests__/use-canvas-connections.test.tsx index 60f9720..2ea66be 100644 --- a/components/canvas/__tests__/use-canvas-connections.test.tsx +++ b/components/canvas/__tests__/use-canvas-connections.test.tsx @@ -218,8 +218,10 @@ describe("useCanvasConnections", () => { defaultData: { blendMode: "normal", opacity: 100, - offsetX: 0, - offsetY: 0, + overlayX: 0, + overlayY: 0, + overlayWidth: 1, + overlayHeight: 1, }, }), ); @@ -232,8 +234,10 @@ describe("useCanvasConnections", () => { data: { blendMode: "normal", opacity: 100, - offsetX: 0, - offsetY: 0, + overlayX: 0, + overlayY: 0, + overlayWidth: 1, + overlayHeight: 1, }, }), ); diff --git a/components/canvas/nodes/compare-node.tsx b/components/canvas/nodes/compare-node.tsx index 9731916..eeff63b 100644 --- a/components/canvas/nodes/compare-node.tsx +++ b/components/canvas/nodes/compare-node.tsx @@ -1,6 +1,6 @@ "use client"; -import { useCallback, useMemo, useRef, useState } from "react"; +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { Position, type NodeProps } from "@xyflow/react"; import { ImageIcon } from "lucide-react"; import BaseNodeWrapper from "./base-node-wrapper"; @@ -36,12 +36,18 @@ type CompareSideState = { type CompareDisplayMode = "render" | "preview"; -export default function CompareNode({ id, data, selected, width }: NodeProps) { +type CompareSurfaceSize = { + width: number; + height: number; +}; + +export default function CompareNode({ id, data, selected, width, height }: NodeProps) { const nodeData = data as CompareNodeData; const graph = useCanvasGraph(); const [sliderX, setSliderX] = useState(50); const [manualDisplayMode, setManualDisplayMode] = useState(null); const containerRef = useRef(null); + const [surfaceSize, setSurfaceSize] = useState(null); const incomingEdges = useMemo( () => graph.incomingEdgesByTarget.get(id) ?? [], [graph, id], @@ -74,11 +80,17 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) { graph, }); - if (preview.sourceUrl) { - previewInput = { - sourceUrl: preview.sourceUrl, - steps: preview.steps, - }; + if (preview.sourceUrl || preview.sourceComposition) { + previewInput = preview.sourceComposition + ? { + sourceUrl: null, + sourceComposition: preview.sourceComposition, + steps: preview.steps, + } + : { + sourceUrl: preview.sourceUrl, + steps: preview.steps, + }; const sourceLastUploadedHash = typeof sourceData.lastUploadedHash === "string" @@ -92,6 +104,7 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) { sourceLastUploadedHash ?? sourceLastRenderedHash; const sourceCurrentHash = resolveRenderPipelineHash({ sourceUrl: preview.sourceUrl, + sourceComposition: preview.sourceComposition, steps: preview.steps, data: sourceData, }); @@ -173,7 +186,60 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) { resolvedSides.right.isStaleRenderOutput; const effectiveDisplayMode = manualDisplayMode ?? (shouldDefaultToPreview ? "preview" : "render"); - const previewNodeWidth = Math.max(240, Math.min(640, Math.round(width ?? 500))); + const fallbackSurfaceWidth = Math.max(240, Math.min(640, Math.round(width ?? 500))); + const fallbackSurfaceHeight = Math.max(180, Math.min(720, Math.round(height ?? 380))); + const previewNodeWidth = Math.max( + 1, + Math.round(surfaceSize?.width ?? fallbackSurfaceWidth), + ); + const previewNodeHeight = Math.max( + 1, + Math.round(surfaceSize?.height ?? fallbackSurfaceHeight), + ); + + useEffect(() => { + const surfaceElement = containerRef.current; + if (!surfaceElement) { + return; + } + + const updateSurfaceSize = (nextWidth: number, nextHeight: number) => { + const roundedWidth = Math.max(1, Math.round(nextWidth)); + const roundedHeight = Math.max(1, Math.round(nextHeight)); + + setSurfaceSize((current) => + current?.width === roundedWidth && current?.height === roundedHeight + ? current + : { + width: roundedWidth, + height: roundedHeight, + }, + ); + }; + + const measureSurface = () => { + const rect = surfaceElement.getBoundingClientRect(); + updateSurfaceSize(rect.width, rect.height); + }; + + measureSurface(); + + if (typeof ResizeObserver === "undefined") { + return undefined; + } + + const observer = new ResizeObserver((entries) => { + const entry = entries[0]; + if (!entry) { + return; + } + + updateSurfaceSize(entry.contentRect.width, entry.contentRect.height); + }); + + observer.observe(surfaceElement); + return () => observer.disconnect(); + }, []); const setSliderPercent = useCallback((value: number) => { setSliderX(Math.max(0, Math.min(100, value))); @@ -321,6 +387,7 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) { previewInput={resolvedSides.right.previewInput} mixerPreviewState={resolvedSides.right.mixerPreviewState} nodeWidth={previewNodeWidth} + nodeHeight={previewNodeHeight} preferPreview={effectiveDisplayMode === "preview"} /> )} @@ -332,6 +399,7 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) { previewInput={resolvedSides.left.previewInput} mixerPreviewState={resolvedSides.left.mixerPreviewState} nodeWidth={previewNodeWidth} + nodeHeight={previewNodeHeight} clipWidthPercent={sliderX} preferPreview={effectiveDisplayMode === "preview"} /> diff --git a/components/canvas/nodes/compare-surface.tsx b/components/canvas/nodes/compare-surface.tsx index a5411c3..0cb48c0 100644 --- a/components/canvas/nodes/compare-surface.tsx +++ b/components/canvas/nodes/compare-surface.tsx @@ -1,5 +1,7 @@ "use client"; +import { useState } from "react"; + import { useCanvasGraph } from "@/components/canvas/canvas-graph-context"; import { usePipelinePreview } from "@/hooks/use-pipeline-preview"; import { @@ -7,8 +9,20 @@ import { type RenderPreviewInput, } from "@/lib/canvas-render-preview"; import type { MixerPreviewState } from "@/lib/canvas-mixer-preview"; +import { + computeMixerCompareOverlayImageStyle, + computeMixerFrameRectInSurface, + isMixerCropImageReady, +} from "@/lib/mixer-crop-layout"; const EMPTY_STEPS: RenderPreviewInput["steps"] = []; +const ZERO_SIZE = { width: 0, height: 0 }; + +type LoadedImageState = { + url: string | null; + width: number; + height: number; +}; type CompareSurfaceProps = { finalUrl?: string; @@ -16,6 +30,7 @@ type CompareSurfaceProps = { previewInput?: RenderPreviewInput; mixerPreviewState?: MixerPreviewState; nodeWidth: number; + nodeHeight: number; clipWidthPercent?: number; preferPreview?: boolean; }; @@ -26,12 +41,22 @@ export default function CompareSurface({ previewInput, mixerPreviewState, nodeWidth, + nodeHeight, clipWidthPercent, preferPreview, }: CompareSurfaceProps) { const graph = useCanvasGraph(); + const [baseImageState, setBaseImageState] = useState({ + url: null, + ...ZERO_SIZE, + }); + const [overlayImageState, setOverlayImageState] = useState({ + url: null, + ...ZERO_SIZE, + }); const usePreview = Boolean(previewInput && (preferPreview || !finalUrl)); const previewSourceUrl = usePreview ? previewInput?.sourceUrl ?? null : null; + const previewSourceComposition = usePreview ? previewInput?.sourceComposition : undefined; const previewSteps = usePreview ? previewInput?.steps ?? EMPTY_STEPS : EMPTY_STEPS; const visibleFinalUrl = usePreview ? undefined : finalUrl; const previewDebounceMs = shouldFastPathPreviewPipeline( @@ -43,6 +68,7 @@ export default function CompareSurface({ const { canvasRef, isRendering, error } = usePipelinePreview({ sourceUrl: previewSourceUrl, + sourceComposition: previewSourceComposition, steps: previewSteps, nodeWidth, includeHistogram: false, @@ -64,6 +90,35 @@ export default function CompareSurface({ } : undefined; + const baseNaturalSize = + mixerPreviewState?.baseUrl && mixerPreviewState.baseUrl === baseImageState.url + ? { width: baseImageState.width, height: baseImageState.height } + : ZERO_SIZE; + const overlayNaturalSize = + mixerPreviewState?.overlayUrl && mixerPreviewState.overlayUrl === overlayImageState.url + ? { width: overlayImageState.width, height: overlayImageState.height } + : ZERO_SIZE; + + const mixerCropReady = isMixerCropImageReady({ + currentOverlayUrl: mixerPreviewState?.overlayUrl, + loadedOverlayUrl: overlayImageState.url, + sourceWidth: overlayNaturalSize.width, + sourceHeight: overlayNaturalSize.height, + }); + const mixerFrameRect = hasMixerPreview + ? computeMixerFrameRectInSurface({ + surfaceWidth: nodeWidth, + surfaceHeight: nodeHeight, + baseWidth: baseNaturalSize.width, + baseHeight: baseNaturalSize.height, + overlayX: mixerPreviewState.overlayX, + overlayY: mixerPreviewState.overlayY, + overlayWidth: mixerPreviewState.overlayWidth, + overlayHeight: mixerPreviewState.overlayHeight, + fit: "contain", + }) + : null; + return (
{visibleFinalUrl ? ( @@ -87,19 +142,62 @@ export default function CompareSurface({ alt={label ?? "Comparison image"} className="absolute inset-0 h-full w-full object-contain" draggable={false} - /> - {/* eslint-disable-next-line @next/next/no-img-element */} - {label { + setBaseImageState({ + url: event.currentTarget.currentSrc || event.currentTarget.src, + width: event.currentTarget.naturalWidth, + height: event.currentTarget.naturalHeight, + }); }} /> + {mixerFrameRect ? ( +
+ {/* eslint-disable-next-line @next/next/no-img-element */} + {label { + setOverlayImageState({ + url: event.currentTarget.currentSrc || event.currentTarget.src, + width: event.currentTarget.naturalWidth, + height: event.currentTarget.naturalHeight, + }); + }} + style={ + mixerCropReady + ? computeMixerCompareOverlayImageStyle({ + surfaceWidth: nodeWidth, + surfaceHeight: nodeHeight, + baseWidth: baseNaturalSize.width, + baseHeight: baseNaturalSize.height, + overlayX: mixerPreviewState.overlayX, + overlayY: mixerPreviewState.overlayY, + overlayWidth: mixerPreviewState.overlayWidth, + overlayHeight: mixerPreviewState.overlayHeight, + sourceWidth: overlayNaturalSize.width, + sourceHeight: overlayNaturalSize.height, + cropLeft: mixerPreviewState.cropLeft, + cropTop: mixerPreviewState.cropTop, + cropRight: mixerPreviewState.cropRight, + cropBottom: mixerPreviewState.cropBottom, + }) + : { visibility: "hidden" } + } + /> +
+ ) : null} ) : null} diff --git a/components/canvas/nodes/mixer-node.tsx b/components/canvas/nodes/mixer-node.tsx index 907daf9..59d1994 100644 --- a/components/canvas/nodes/mixer-node.tsx +++ b/components/canvas/nodes/mixer-node.tsx @@ -1,9 +1,18 @@ "use client"; -import { useMemo, useState, type ChangeEvent, type FormEvent } from "react"; +import { + useEffect, + useMemo, + useRef, + useState, + type ChangeEvent, + type FormEvent, + type MouseEvent as ReactMouseEvent, +} from "react"; import { Position, type NodeProps } from "@xyflow/react"; import BaseNodeWrapper from "./base-node-wrapper"; +import { useNodeLocalData } from "./use-node-local-data"; import { useCanvasGraph } from "@/components/canvas/canvas-graph-context"; import { useCanvasSync } from "@/components/canvas/canvas-sync-context"; import { @@ -13,47 +22,1130 @@ import { } from "@/lib/canvas-mixer-preview"; import type { Id } from "@/convex/_generated/dataModel"; import CanvasHandle from "@/components/canvas/canvas-handle"; +import { computeMixerFrameRectInSurface } from "@/lib/mixer-crop-layout"; const BLEND_MODE_OPTIONS: MixerBlendMode[] = ["normal", "multiply", "screen", "overlay"]; +const MIN_OVERLAY_SIZE = 0.1; +const MIN_CROP_REMAINING_SIZE = 0.1; +const MAX_OVERLAY_POSITION = 1; +const SAVE_DELAY_MS = 160; +const MIXER_DIAGNOSTICS_ENABLED = + process.env.NODE_ENV !== "test" && process.env.NEXT_PUBLIC_MIXER_DIAGNOSTICS === "1"; -export default function MixerNode({ id, data, selected }: NodeProps) { +type MixerLocalData = ReturnType; +type ResizeCorner = "nw" | "ne" | "sw" | "se"; +type CropHandle = ResizeCorner | "n" | "e" | "s" | "w"; + +type InteractionState = + | { + kind: "frame-move"; + startClientX: number; + startClientY: number; + startData: MixerLocalData; + previewWidth: number; + previewHeight: number; + } + | { + kind: "frame-resize"; + corner: ResizeCorner; + startClientX: number; + startClientY: number; + startData: MixerLocalData; + previewWidth: number; + previewHeight: number; + } + | { + kind: "content-resize"; + corner: CropHandle; + startClientX: number; + startClientY: number; + startData: MixerLocalData; + previewWidth: number; + previewHeight: number; + } + | { + kind: "content-move"; + startClientX: number; + startClientY: number; + startData: MixerLocalData; + previewWidth: number; + previewHeight: number; + }; + +type LoadedImageSize = { + url: string | null; + width: number; + height: number; +}; + +type PreviewSurfaceSize = { + width: number; + height: number; +}; + +const ZERO_SURFACE_SIZE: PreviewSurfaceSize = { width: 0, height: 0 }; + +function clamp(value: number, min: number, max: number): number { + return Math.max(min, Math.min(max, value)); +} + +function computeAspectRatio(width: number, height: number): number | null { + if (width <= 0 || height <= 0) { + return null; + } + + const ratio = width / height; + return Number.isFinite(ratio) ? ratio : null; +} + +function resolveDisplayedRectAspectRatio(args: { + rect: { width: number; height: number } | null; + surfaceWidth: number; + surfaceHeight: number; + fallback: number; +}): number { + if (args.rect && args.rect.width > 0 && args.rect.height > 0) { + const ratio = computeAspectRatio( + args.rect.width * args.surfaceWidth, + args.rect.height * args.surfaceHeight, + ); + if (ratio) { + return ratio; + } + } + + return args.fallback; +} + +function readPositiveNumber(value: unknown): number | null { + return typeof value === "number" && Number.isFinite(value) && value > 0 ? value : null; +} + +function resolveSourceImageSize(data: unknown): PreviewSurfaceSize { + const record = (data ?? {}) as Record; + const width = + readPositiveNumber(record.intrinsicWidth) ?? + readPositiveNumber(record.outputWidth) ?? + readPositiveNumber(record.width); + const height = + readPositiveNumber(record.intrinsicHeight) ?? + readPositiveNumber(record.outputHeight) ?? + readPositiveNumber(record.height); + + if (!width || !height) { + return ZERO_SURFACE_SIZE; + } + + return { width, height }; +} + +function roundDiagnosticNumber(value: number | null): number | null { + if (value === null || !Number.isFinite(value)) { + return null; + } + + return Math.round(value * 1000) / 1000; +} + +function diffMixerData(before: MixerLocalData, after: MixerLocalData) { + const keys: Array = [ + "blendMode", + "opacity", + "overlayX", + "overlayY", + "overlayWidth", + "overlayHeight", + "cropLeft", + "cropTop", + "cropRight", + "cropBottom", + ]; + + return keys.reduce>((acc, key) => { + if (before[key] !== after[key]) { + acc[key] = { + before: before[key], + after: after[key], + }; + } + return acc; + }, {}); +} + +function computeContainRect(args: { + sourceWidth: number; + sourceHeight: number; + boundsX: number; + boundsY: number; + boundsWidth: number; + boundsHeight: number; +}): { x: number; y: number; width: number; height: number } { + const { sourceWidth, sourceHeight, boundsX, boundsY, boundsWidth, boundsHeight } = args; + + if (sourceWidth <= 0 || sourceHeight <= 0 || boundsWidth <= 0 || boundsHeight <= 0) { + return { + x: boundsX, + y: boundsY, + width: boundsWidth, + height: boundsHeight, + }; + } + + const scale = Math.min(boundsWidth / sourceWidth, boundsHeight / sourceHeight); + if (!Number.isFinite(scale) || scale <= 0) { + return { + x: boundsX, + y: boundsY, + width: boundsWidth, + height: boundsHeight, + }; + } + + const width = sourceWidth * scale; + const height = sourceHeight * scale; + + return { + x: boundsX + (boundsWidth - width) / 2, + y: boundsY + (boundsHeight - height) / 2, + width, + height, + }; +} + +function computeCropImageStyle(args: { + frameAspectRatio: number; + sourceWidth: number; + sourceHeight: number; + cropLeft: number; + cropTop: number; + cropRight: number; + cropBottom: number; +}) { + const safeWidth = Math.max(1 - args.cropLeft - args.cropRight, MIN_CROP_REMAINING_SIZE); + const safeHeight = Math.max(1 - args.cropTop - args.cropBottom, MIN_CROP_REMAINING_SIZE); + const visibleRect = computeVisibleContentRect({ + frameAspectRatio: args.frameAspectRatio, + sourceWidth: args.sourceWidth, + sourceHeight: args.sourceHeight, + cropLeft: args.cropLeft, + cropTop: args.cropTop, + cropRight: args.cropRight, + cropBottom: args.cropBottom, + }); + + if (!visibleRect) { + return { + left: `${(-args.cropLeft / safeWidth) * 100}%`, + top: `${(-args.cropTop / safeHeight) * 100}%`, + width: `${(1 / safeWidth) * 100}%`, + height: `${(1 / safeHeight) * 100}%`, + } as const; + } + + const imageWidth = visibleRect.width / safeWidth; + const imageHeight = visibleRect.height / safeHeight; + + return { + left: `${(visibleRect.x - (args.cropLeft / safeWidth) * visibleRect.width) * 100}%`, + top: `${(visibleRect.y - (args.cropTop / safeHeight) * visibleRect.height) * 100}%`, + width: `${imageWidth * 100}%`, + height: `${imageHeight * 100}%`, + } as const; +} + +function computeVisibleContentRect(args: { + frameAspectRatio: number; + sourceWidth: number; + sourceHeight: number; + cropLeft: number; + cropTop: number; + cropRight: number; + cropBottom: number; +}) { + if (args.sourceWidth <= 0 || args.sourceHeight <= 0) { + return null; + } + + const cropWidth = Math.max(1 - args.cropLeft - args.cropRight, MIN_CROP_REMAINING_SIZE); + const cropHeight = Math.max(1 - args.cropTop - args.cropBottom, MIN_CROP_REMAINING_SIZE); + const frameAspectRatio = args.frameAspectRatio > 0 ? args.frameAspectRatio : 1; + + const rect = computeContainRect({ + sourceWidth: args.sourceWidth * cropWidth, + sourceHeight: args.sourceHeight * cropHeight, + boundsX: 0, + boundsY: 0, + boundsWidth: frameAspectRatio, + boundsHeight: 1, + }); + + return { + x: rect.x / frameAspectRatio, + y: rect.y, + width: rect.width / frameAspectRatio, + height: rect.height, + }; +} + +function cropRectFromData(data: Pick< + MixerLocalData, + "cropLeft" | "cropTop" | "cropRight" | "cropBottom" +>) { + return { + x: data.cropLeft, + y: data.cropTop, + width: 1 - data.cropLeft - data.cropRight, + height: 1 - data.cropTop - data.cropBottom, + }; +} + +function cropEdgesFromRect(rect: { x: number; y: number; width: number; height: number }) { + return { + cropLeft: rect.x, + cropTop: rect.y, + cropRight: 1 - (rect.x + rect.width), + cropBottom: 1 - (rect.y + rect.height), + }; +} + +function normalizeLocalMixerData(data: MixerLocalData): MixerLocalData { + const overlayX = clamp(data.overlayX, 0, MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE); + const overlayY = clamp(data.overlayY, 0, MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE); + const overlayWidth = clamp(data.overlayWidth, MIN_OVERLAY_SIZE, MAX_OVERLAY_POSITION - overlayX); + const overlayHeight = clamp(data.overlayHeight, MIN_OVERLAY_SIZE, MAX_OVERLAY_POSITION - overlayY); + const cropLeft = clamp(data.cropLeft, 0, MAX_OVERLAY_POSITION - MIN_CROP_REMAINING_SIZE); + const cropTop = clamp(data.cropTop, 0, MAX_OVERLAY_POSITION - MIN_CROP_REMAINING_SIZE); + const cropRight = clamp(data.cropRight, 0, MAX_OVERLAY_POSITION - cropLeft - MIN_CROP_REMAINING_SIZE); + const cropBottom = clamp(data.cropBottom, 0, MAX_OVERLAY_POSITION - cropTop - MIN_CROP_REMAINING_SIZE); + + return { + ...data, + overlayX, + overlayY, + overlayWidth, + overlayHeight, + cropLeft, + cropTop, + cropRight, + cropBottom, + }; +} + +function computeLockedAspectRect(args: { + x: number; + y: number; + width: number; + height: number; + minSize: number; + corner: ResizeCorner; + deltaX: number; + deltaY: number; + aspectRatio?: number; +}) { + const { x, y, width, height, minSize, corner, deltaX, deltaY, aspectRatio } = args; + const lockedAspectRatio = aspectRatio && aspectRatio > 0 ? aspectRatio : width / height; + const lockedHeight = width / lockedAspectRatio; + const anchorX = corner.includes("w") ? x + width : x; + const anchorY = corner.includes("n") ? y + height : y; + const requestedScaleX = (width + (corner.includes("w") ? -deltaX : deltaX)) / width; + const requestedScaleY = + (lockedHeight + (corner.includes("n") ? -deltaY : deltaY)) / lockedHeight; + const dominantScale = + Math.abs(requestedScaleX - 1) >= Math.abs(requestedScaleY - 1) + ? requestedScaleX + : requestedScaleY; + const minScale = Math.max(minSize / width, minSize / lockedHeight); + const maxWidth = corner.includes("w") ? anchorX : MAX_OVERLAY_POSITION - x; + const maxHeight = corner.includes("n") ? anchorY : MAX_OVERLAY_POSITION - y; + const maxScale = Math.min(maxWidth / width, maxHeight / lockedHeight); + const scale = clamp(dominantScale, minScale, maxScale); + const nextWidth = width * scale; + const nextHeight = nextWidth / lockedAspectRatio; + + return { + x: corner.includes("w") ? anchorX - nextWidth : x, + y: corner.includes("n") ? anchorY - nextHeight : y, + width: nextWidth, + height: nextHeight, + }; +} + +function computeResizeRect(args: { + startData: MixerLocalData; + corner: ResizeCorner; + deltaX: number; + deltaY: number; + aspectRatio?: number; +}): Pick { + const { startData, corner, deltaX, deltaY, aspectRatio } = args; + const nextRect = computeLockedAspectRect({ + x: startData.overlayX, + y: startData.overlayY, + width: startData.overlayWidth, + height: startData.overlayHeight, + minSize: MIN_OVERLAY_SIZE, + corner, + deltaX, + deltaY, + aspectRatio, + }); + + return normalizeLocalMixerData({ + ...startData, + overlayX: nextRect.x, + overlayY: nextRect.y, + overlayWidth: nextRect.width, + overlayHeight: nextRect.height, + }); +} + +function computeContentResizeRect(args: { + startData: MixerLocalData; + corner: CropHandle; + deltaX: number; + deltaY: number; +}): Pick { + const { startData, corner, deltaX, deltaY } = args; + const startRect = cropRectFromData(startData); + const startRight = startRect.x + startRect.width; + const startBottom = startRect.y + startRect.height; + + let nextX = startRect.x; + let nextY = startRect.y; + let nextWidth = startRect.width; + let nextHeight = startRect.height; + + if (corner.includes("w")) { + nextX = clamp(startRect.x + deltaX, 0, startRight - MIN_CROP_REMAINING_SIZE); + nextWidth = startRight - nextX; + } + + if (corner.includes("e")) { + nextWidth = clamp(startRect.width + deltaX, MIN_CROP_REMAINING_SIZE, 1 - startRect.x); + } + + if (corner.includes("n")) { + nextY = clamp(startRect.y + deltaY, 0, startBottom - MIN_CROP_REMAINING_SIZE); + nextHeight = startBottom - nextY; + } + + if (corner.includes("s")) { + nextHeight = clamp(startRect.height + deltaY, MIN_CROP_REMAINING_SIZE, 1 - startRect.y); + } + + return normalizeLocalMixerData({ + ...startData, + ...cropEdgesFromRect({ + x: nextX, + y: nextY, + width: nextWidth, + height: nextHeight, + }), + }); +} + +export default function MixerNode({ id, data, selected, width, height }: NodeProps) { const graph = useCanvasGraph(); const { queueNodeDataUpdate } = useCanvasSync(); + const previewRef = useRef(null); + const overlayImageRef = useRef(null); + const latestNodeDataRef = useRef((data ?? {}) as Record); const [hasImageLoadError, setHasImageLoadError] = useState(false); + const [interaction, setInteraction] = useState(null); + const [isContentFramingMode, setIsContentFramingMode] = useState(false); + const [baseImageSize, setBaseImageSize] = useState({ + url: null, + width: 0, + height: 0, + }); + const [overlayImageSize, setOverlayImageSize] = useState({ + url: null, + width: 0, + height: 0, + }); + const [previewSurfaceSize, setPreviewSurfaceSize] = useState(ZERO_SURFACE_SIZE); + + useEffect(() => { + latestNodeDataRef.current = (data ?? {}) as Record; + }, [data]); + + const { localData, updateLocalData } = useNodeLocalData({ + nodeId: id, + data, + normalize: normalizeMixerPreviewData, + saveDelayMs: SAVE_DELAY_MS, + onSave: (next) => + queueNodeDataUpdate({ + nodeId: id as Id<"nodes">, + data: { + ...latestNodeDataRef.current, + ...next, + }, + }), + debugLabel: "mixer", + }); - const normalizedData = useMemo(() => normalizeMixerPreviewData(data), [data]); const previewState = useMemo( () => resolveMixerPreviewFromGraph({ nodeId: id, graph }), [graph, id], ); + const baseSourceNode = useMemo(() => { + const incomingEdges = graph.incomingEdgesByTarget.get(id) ?? []; + const baseEdge = incomingEdges.find( + (edge) => edge.targetHandle === "base" || edge.targetHandle == null || edge.targetHandle === "", + ); - const currentData = (data ?? {}) as Record; + return baseEdge ? graph.nodesById.get(baseEdge.source) : undefined; + }, [graph, id]); + const baseSourceSize = useMemo( + () => resolveSourceImageSize(baseSourceNode?.data), + [baseSourceNode?.data], + ); + const overlayImageUrl = previewState.status === "ready" ? previewState.overlayUrl : null; + const baseImageUrl = previewState.status === "ready" ? previewState.baseUrl : null; - const updateData = (patch: Partial>) => { - void queueNodeDataUpdate({ - nodeId: id as Id<"nodes">, - data: { - ...currentData, - ...patch, - }, + useEffect(() => { + const previewElement = previewRef.current; + if (!previewElement) { + return; + } + + const updatePreviewSurfaceSize = (nextWidth: number, nextHeight: number) => { + setPreviewSurfaceSize((current) => + current.width === nextWidth && current.height === nextHeight + ? current + : { width: nextWidth, height: nextHeight }, + ); + }; + + const measurePreview = () => { + const rect = previewElement.getBoundingClientRect(); + updatePreviewSurfaceSize(rect.width, rect.height); + }; + + measurePreview(); + + if (typeof ResizeObserver === "undefined") { + return undefined; + } + + const observer = new ResizeObserver((entries) => { + const entry = entries[0]; + if (!entry) { + return; + } + + updatePreviewSurfaceSize(entry.contentRect.width, entry.contentRect.height); + }); + + observer.observe(previewElement); + return () => observer.disconnect(); + }, []); + + const overlayNaturalSize = + overlayImageUrl && overlayImageUrl === overlayImageSize.url + ? { + width: overlayImageSize.width, + height: overlayImageSize.height, + } + : { width: 0, height: 0 }; + const baseNaturalSize = + baseImageUrl && baseImageUrl === baseImageSize.url + ? { + width: baseImageSize.width, + height: baseImageSize.height, + } + : baseSourceSize; + + const emitMixerDiagnostics = (reason: string, extra?: Record) => { + if (!MIXER_DIAGNOSTICS_ENABLED) { + return; + } + + const previewRect = previewRef.current?.getBoundingClientRect(); + const overlayImage = overlayImageRef.current; + + const frameRect = previewRect + ? { + x: localData.overlayX * previewRect.width, + y: localData.overlayY * previewRect.height, + width: localData.overlayWidth * previewRect.width, + height: localData.overlayHeight * previewRect.height, + } + : null; + + const cropRect = cropRectFromData(localData); + const contentBoundsRect = frameRect + ? { + x: frameRect.x + cropRect.x * frameRect.width, + y: frameRect.y + cropRect.y * frameRect.height, + width: cropRect.width * frameRect.width, + height: cropRect.height * frameRect.height, + } + : null; + + const visibleContentRect = + contentBoundsRect && overlayImage + ? computeContainRect({ + sourceWidth: overlayImage.naturalWidth, + sourceHeight: overlayImage.naturalHeight, + boundsX: contentBoundsRect.x, + boundsY: contentBoundsRect.y, + boundsWidth: contentBoundsRect.width, + boundsHeight: contentBoundsRect.height, + }) + : null; + + const frameAspectRatio = frameRect + ? computeAspectRatio(frameRect.width, frameRect.height) + : null; + const contentBoundsAspectRatio = contentBoundsRect + ? computeAspectRatio(contentBoundsRect.width, contentBoundsRect.height) + : null; + const visibleContentAspectRatio = visibleContentRect + ? computeAspectRatio(visibleContentRect.width, visibleContentRect.height) + : null; + + const currentHandleRect = + isContentFramingMode && visibleContentRect + ? { + x: visibleContentRect.x, + y: visibleContentRect.y, + width: visibleContentRect.width, + height: visibleContentRect.height, + } + : frameRect; + + const handleOffsetFromVisibleContent = + currentHandleRect && visibleContentRect + ? { + x: roundDiagnosticNumber(currentHandleRect.x - visibleContentRect.x), + y: roundDiagnosticNumber(currentHandleRect.y - visibleContentRect.y), + width: roundDiagnosticNumber(currentHandleRect.width - visibleContentRect.width), + height: roundDiagnosticNumber(currentHandleRect.height - visibleContentRect.height), + } + : null; + + console.debug("[mixer-diagnostics]", { + nodeId: id, + reason, + mode: isContentFramingMode ? "content-framing" : "frame-resize", + intent: isContentFramingMode + ? "crop should change visible area without changing displayed image size" + : "resize should change displayed image size without changing aspect ratio", + currentHandleAnchorSource: "frame", + expectedHandleAnchorSource: "frame", + interactionKind: interaction?.kind ?? null, + previewRect, + frameRect, + frameAspectRatio: roundDiagnosticNumber(frameAspectRatio), + contentBoundsRect, + contentBoundsAspectRatio: roundDiagnosticNumber(contentBoundsAspectRatio), + visibleContentRect, + visibleContentAspectRatio: roundDiagnosticNumber(visibleContentAspectRatio), + currentHandleRect, + handleOffsetFromVisibleContent, + overlayNaturalSize: overlayImage + ? { + width: overlayImage.naturalWidth, + height: overlayImage.naturalHeight, + } + : null, + localData, + ...extra, }); }; + useEffect(() => { + emitMixerDiagnostics("mode-or-geometry-changed"); + }, [ + isContentFramingMode, + localData.overlayX, + localData.overlayY, + localData.overlayWidth, + localData.overlayHeight, + localData.cropLeft, + localData.cropTop, + localData.cropRight, + localData.cropBottom, + ]); + const onBlendModeChange = (event: ChangeEvent) => { setHasImageLoadError(false); - updateData({ blendMode: event.target.value as MixerBlendMode }); + updateLocalData((current) => ({ + ...current, + blendMode: event.target.value as MixerBlendMode, + })); }; - const onNumberChange = (field: "opacity" | "offsetX" | "offsetY") => ( - event: FormEvent, - ) => { + const onNumberChange = ( + field: + | "opacity" + | "overlayX" + | "overlayY" + | "overlayWidth" + | "overlayHeight" + | "cropLeft" + | "cropTop" + | "cropRight" + | "cropBottom", + ) => + (event: FormEvent) => { setHasImageLoadError(false); const nextValue = Number(event.currentTarget.value); - updateData({ [field]: Number.isFinite(nextValue) ? nextValue : 0 }); + + updateLocalData((current) => { + if (!Number.isFinite(nextValue)) { + return current; + } + + if (field === "opacity") { + return { + ...current, + opacity: clamp(nextValue, 0, 100), + }; + } + + return normalizeLocalMixerData({ + ...current, + [field]: nextValue, + }); + }); + }; + + const startInteraction = ( + event: ReactMouseEvent, + kind: InteractionState["kind"], + corner?: CropHandle, + ) => { + event.preventDefault(); + event.stopPropagation(); + + const previewRect = previewRef.current?.getBoundingClientRect(); + if (!previewRect || previewRect.width <= 0 || previewRect.height <= 0) { + return; + } + + if ( + (kind === "content-move" || kind === "content-resize") && + (overlayNaturalSize.width <= 0 || overlayNaturalSize.height <= 0) + ) { + return; + } + + emitMixerDiagnostics("interaction-start", { + requestedInteractionKind: kind, + resizeCorner: corner ?? null, + target: event.target instanceof HTMLElement ? event.target.dataset : null, + currentTarget: event.currentTarget.dataset, + currentTargetClassName: + event.currentTarget instanceof HTMLElement ? event.currentTarget.className : null, + pointer: { + clientX: event.clientX, + clientY: event.clientY, + }, + }); + + const activeGeometryRect = + kind === "content-move" || kind === "content-resize" + ? displayedOverlayFrameRect ?? { + x: localData.overlayX, + y: localData.overlayY, + width: localData.overlayWidth, + height: localData.overlayHeight, + } + : displayedBaseRect; + const activeGeometryWidth = + (activeGeometryRect?.width ?? 1) * previewRect.width || previewRect.width; + const activeGeometryHeight = + (activeGeometryRect?.height ?? 1) * previewRect.height || previewRect.height; + + setInteraction({ + kind, + corner: + kind === "frame-resize" || kind === "content-resize" + ? (corner as ResizeCorner) + : undefined, + startClientX: event.clientX, + startClientY: event.clientY, + startData: localData, + previewWidth: activeGeometryWidth, + previewHeight: activeGeometryHeight, + } as InteractionState); }; + useEffect(() => { + if (!interaction) { + return; + } + + const handleMouseMove = (event: MouseEvent) => { + const pointerDeltaX = event.clientX - interaction.startClientX; + const pointerDeltaY = event.clientY - interaction.startClientY; + const deltaX = pointerDeltaX / interaction.previewWidth; + const deltaY = pointerDeltaY / interaction.previewHeight; + + const emitInteractionMoveDiagnostics = ( + nextData: MixerLocalData, + extra?: Record, + ) => { + const changedFields = diffMixerData(interaction.startData, nextData); + const beforeCropRect = cropRectFromData(interaction.startData); + const afterCropRect = cropRectFromData(nextData); + emitMixerDiagnostics("interaction-move", { + requestedInteractionKind: interaction.kind, + resizeCorner: interaction.kind === "frame-resize" || interaction.kind === "content-resize" + ? interaction.corner + : null, + pointer: { + clientX: event.clientX, + clientY: event.clientY, + }, + pointerDeltaPx: { + x: roundDiagnosticNumber(pointerDeltaX), + y: roundDiagnosticNumber(pointerDeltaY), + }, + deltaInPreviewSpace: { + x: roundDiagnosticNumber(deltaX), + y: roundDiagnosticNumber(deltaY), + }, + changedFields, + beforeAspectRatio: { + overlay: roundDiagnosticNumber( + computeAspectRatio( + interaction.startData.overlayWidth, + interaction.startData.overlayHeight, + ), + ), + content: roundDiagnosticNumber( + computeAspectRatio( + beforeCropRect.width, + beforeCropRect.height, + ), + ), + }, + afterAspectRatio: { + overlay: roundDiagnosticNumber(computeAspectRatio(nextData.overlayWidth, nextData.overlayHeight)), + content: roundDiagnosticNumber(computeAspectRatio(afterCropRect.width, afterCropRect.height)), + }, + semanticChecks: { + resizeChangedOverlayAspectRatio: + interaction.kind === "frame-resize" + ? interaction.startData.overlayWidth / interaction.startData.overlayHeight !== + nextData.overlayWidth / nextData.overlayHeight + : null, + cropChangedOverlaySize: + interaction.kind === "content-move" || interaction.kind === "content-resize" + ? interaction.startData.overlayWidth !== nextData.overlayWidth || + interaction.startData.overlayHeight !== nextData.overlayHeight + : null, + cropChangedContentSize: + interaction.kind === "content-resize" + ? beforeCropRect.width !== afterCropRect.width || + beforeCropRect.height !== afterCropRect.height + : null, + }, + ...extra, + }); + }; + + if (interaction.kind === "frame-move") { + const nextX = clamp( + interaction.startData.overlayX + deltaX, + 0, + MAX_OVERLAY_POSITION - interaction.startData.overlayWidth, + ); + const nextY = clamp( + interaction.startData.overlayY + deltaY, + 0, + MAX_OVERLAY_POSITION - interaction.startData.overlayHeight, + ); + + const nextData = { + ...interaction.startData, + overlayX: nextX, + overlayY: nextY, + }; + + emitInteractionMoveDiagnostics(nextData, { + deltaInFrameSpace: { + x: roundDiagnosticNumber(deltaX), + y: roundDiagnosticNumber(deltaY), + }, + }); + + updateLocalData((current) => ({ + ...current, + overlayX: nextX, + overlayY: nextY, + })); + return; + } + + if (interaction.kind === "content-move") { + const startCropRect = cropRectFromData(interaction.startData); + const visibleRect = computeVisibleContentRect({ + frameAspectRatio: + interaction.previewWidth > 0 && interaction.previewHeight > 0 + ? interaction.previewWidth / interaction.previewHeight + : 1, + sourceWidth: overlayNaturalSize.width, + sourceHeight: overlayNaturalSize.height, + cropLeft: interaction.startData.cropLeft, + cropTop: interaction.startData.cropTop, + cropRight: interaction.startData.cropRight, + cropBottom: interaction.startData.cropBottom, + }); + const contentDeltaX = + (pointerDeltaX / + (interaction.previewWidth * (visibleRect?.width ?? 1))) * + startCropRect.width; + const contentDeltaY = + (pointerDeltaY / + (interaction.previewHeight * (visibleRect?.height ?? 1))) * + startCropRect.height; + + const nextX = clamp( + startCropRect.x + contentDeltaX, + 0, + MAX_OVERLAY_POSITION - startCropRect.width, + ); + const nextY = clamp( + startCropRect.y + contentDeltaY, + 0, + MAX_OVERLAY_POSITION - startCropRect.height, + ); + + const nextData = { + ...interaction.startData, + ...cropEdgesFromRect({ + x: nextX, + y: nextY, + width: startCropRect.width, + height: startCropRect.height, + }), + }; + + emitInteractionMoveDiagnostics(nextData, { + deltaInFrameSpace: { + x: roundDiagnosticNumber(contentDeltaX), + y: roundDiagnosticNumber(contentDeltaY), + }, + }); + + updateLocalData((current) => ({ + ...current, + ...cropEdgesFromRect({ + x: nextX, + y: nextY, + width: startCropRect.width, + height: startCropRect.height, + }), + })); + return; + } + + if (interaction.kind === "content-resize") { + const startCropRect = cropRectFromData(interaction.startData); + const visibleRect = computeVisibleContentRect({ + frameAspectRatio: + interaction.previewWidth > 0 && interaction.previewHeight > 0 + ? interaction.previewWidth / interaction.previewHeight + : 1, + sourceWidth: overlayNaturalSize.width, + sourceHeight: overlayNaturalSize.height, + cropLeft: interaction.startData.cropLeft, + cropTop: interaction.startData.cropTop, + cropRight: interaction.startData.cropRight, + cropBottom: interaction.startData.cropBottom, + }); + const contentDeltaX = + (pointerDeltaX / + (interaction.previewWidth * (visibleRect?.width ?? 1))) * + startCropRect.width; + const contentDeltaY = + (pointerDeltaY / + (interaction.previewHeight * (visibleRect?.height ?? 1))) * + startCropRect.height; + + const nextRect = computeContentResizeRect({ + startData: interaction.startData, + corner: interaction.corner, + deltaX: contentDeltaX, + deltaY: contentDeltaY, + }); + + const nextData = { + ...interaction.startData, + ...nextRect, + }; + + emitInteractionMoveDiagnostics(nextData, { + deltaInFrameSpace: { + x: roundDiagnosticNumber(contentDeltaX), + y: roundDiagnosticNumber(contentDeltaY), + }, + }); + + updateLocalData((current) => ({ + ...current, + ...nextRect, + })); + return; + } + + const nextRect = computeResizeRect({ + startData: interaction.startData, + corner: interaction.corner, + deltaX, + deltaY, + aspectRatio: + interaction.startData.overlayWidth > 0 && interaction.startData.overlayHeight > 0 + ? interaction.startData.overlayWidth / interaction.startData.overlayHeight + : undefined, + }); + + emitInteractionMoveDiagnostics( + { + ...interaction.startData, + ...nextRect, + }, + { + deltaInFrameSpace: { + x: roundDiagnosticNumber(deltaX), + y: roundDiagnosticNumber(deltaY), + }, + }, + ); + + updateLocalData((current) => ({ + ...current, + ...nextRect, + })); + }; + + const handleMouseUp = () => { + emitMixerDiagnostics("interaction-end"); + setInteraction(null); + }; + + window.addEventListener("mousemove", handleMouseMove); + window.addEventListener("mouseup", handleMouseUp); + + return () => { + window.removeEventListener("mousemove", handleMouseMove); + window.removeEventListener("mouseup", handleMouseUp); + }; + }, [interaction, updateLocalData]); + const showReadyPreview = previewState.status === "ready" && !hasImageLoadError; const showPreviewError = hasImageLoadError || previewState.status === "error"; + const hasOverlayNaturalSize = overlayNaturalSize.width > 0 && overlayNaturalSize.height > 0; + const effectivePreviewSurfaceWidth = previewSurfaceSize.width || width || 0; + const effectivePreviewSurfaceHeight = previewSurfaceSize.height || height || 0; + const displayedBaseRect = computeMixerFrameRectInSurface({ + surfaceWidth: effectivePreviewSurfaceWidth, + surfaceHeight: effectivePreviewSurfaceHeight, + baseWidth: baseNaturalSize.width, + baseHeight: baseNaturalSize.height, + overlayX: 0, + overlayY: 0, + overlayWidth: 1, + overlayHeight: 1, + fit: "cover", + }); + const displayedOverlayFrameRect = computeMixerFrameRectInSurface({ + surfaceWidth: effectivePreviewSurfaceWidth, + surfaceHeight: effectivePreviewSurfaceHeight, + baseWidth: baseNaturalSize.width, + baseHeight: baseNaturalSize.height, + overlayX: localData.overlayX, + overlayY: localData.overlayY, + overlayWidth: localData.overlayWidth, + overlayHeight: localData.overlayHeight, + fit: "cover", + }); + const displayedOverlayFrameAspectRatio = resolveDisplayedRectAspectRatio({ + rect: displayedOverlayFrameRect, + surfaceWidth: effectivePreviewSurfaceWidth, + surfaceHeight: effectivePreviewSurfaceHeight, + fallback: + localData.overlayWidth > 0 && localData.overlayHeight > 0 + ? localData.overlayWidth / localData.overlayHeight + : 1, + }); + + const resizeHandleRect = displayedOverlayFrameRect + ? { + left: displayedOverlayFrameRect.x, + top: displayedOverlayFrameRect.y, + width: displayedOverlayFrameRect.width, + height: displayedOverlayFrameRect.height, + } + : { + left: localData.overlayX, + top: localData.overlayY, + width: localData.overlayWidth, + height: localData.overlayHeight, + }; + const visibleContentRect = + computeVisibleContentRect({ + frameAspectRatio: displayedOverlayFrameAspectRatio, + sourceWidth: overlayNaturalSize.width, + sourceHeight: overlayNaturalSize.height, + cropLeft: localData.cropLeft, + cropTop: localData.cropTop, + cropRight: localData.cropRight, + cropBottom: localData.cropBottom, + }) ?? { x: 0, y: 0, width: 1, height: 1 }; + const cropHandleRect = { + left: resizeHandleRect.left + resizeHandleRect.width * visibleContentRect.x, + top: resizeHandleRect.top + resizeHandleRect.height * visibleContentRect.y, + width: resizeHandleRect.width * visibleContentRect.width, + height: resizeHandleRect.height * visibleContentRect.height, + }; + + const overlayFrameStyle = { + mixBlendMode: localData.blendMode, + opacity: localData.opacity / 100, + left: `${(displayedOverlayFrameRect?.x ?? localData.overlayX) * 100}%`, + top: `${(displayedOverlayFrameRect?.y ?? localData.overlayY) * 100}%`, + width: `${(displayedOverlayFrameRect?.width ?? localData.overlayWidth) * 100}%`, + height: `${(displayedOverlayFrameRect?.height ?? localData.overlayHeight) * 100}%`, + } as const; + + const overlayContentStyle = computeCropImageStyle({ + frameAspectRatio: displayedOverlayFrameAspectRatio, + sourceWidth: overlayNaturalSize.width, + sourceHeight: overlayNaturalSize.height, + cropLeft: localData.cropLeft, + cropTop: localData.cropTop, + cropRight: localData.cropRight, + cropBottom: localData.cropBottom, + }); + const cropBoxStyle = { + left: `${visibleContentRect.x * 100}%`, + top: `${visibleContentRect.y * 100}%`, + width: `${visibleContentRect.width * 100}%`, + height: `${visibleContentRect.height * 100}%`, + } as const; + + const frameResizeHandles = [ + { corner: "nw", cursor: "nwse-resize" }, + { corner: "ne", cursor: "nesw-resize" }, + { corner: "sw", cursor: "nesw-resize" }, + { corner: "se", cursor: "nwse-resize" }, + ] as const; + const cropHandles = [ + { corner: "nw", cursor: "nwse-resize" }, + { corner: "n", cursor: "ns-resize" }, + { corner: "ne", cursor: "nesw-resize" }, + { corner: "e", cursor: "ew-resize" }, + { corner: "se", cursor: "nwse-resize" }, + { corner: "s", cursor: "ns-resize" }, + { corner: "sw", cursor: "nesw-resize" }, + { corner: "w", cursor: "ew-resize" }, + ] as const; return ( @@ -89,30 +1181,136 @@ export default function MixerNode({ id, data, selected }: NodeProps) { Mixer
-
+
{showReadyPreview ? ( <> {/* eslint-disable-next-line @next/next/no-img-element */} Mixer base setHasImageLoadError(true)} - /> - {/* eslint-disable-next-line @next/next/no-img-element */} - Mixer overlay setHasImageLoadError(true)} - style={{ - mixBlendMode: previewState.blendMode, - opacity: previewState.opacity / 100, - transform: `translate(${previewState.offsetX}px, ${previewState.offsetY}px)`, + onLoad={(event) => { + setBaseImageSize({ + url: event.currentTarget.currentSrc || event.currentTarget.src, + width: event.currentTarget.naturalWidth, + height: event.currentTarget.naturalHeight, + }); }} + onError={() => setHasImageLoadError(true)} + style={ + displayedBaseRect + ? { + left: `${displayedBaseRect.x * 100}%`, + top: `${displayedBaseRect.y * 100}%`, + width: `${displayedBaseRect.width * 100}%`, + height: `${displayedBaseRect.height * 100}%`, + } + : undefined + } /> +
{ + if (isContentFramingMode) { + return; + } + startInteraction(event, "frame-move"); + }} + style={overlayFrameStyle} + > + {/* eslint-disable-next-line @next/next/no-img-element */} + Mixer overlay { + setOverlayImageSize({ + url: event.currentTarget.currentSrc || event.currentTarget.src, + width: event.currentTarget.naturalWidth, + height: event.currentTarget.naturalHeight, + }); + emitMixerDiagnostics("overlay-image-loaded"); + }} + onError={() => setHasImageLoadError(true)} + style={overlayContentStyle} + /> + + {isContentFramingMode && hasOverlayNaturalSize ? ( +
startInteraction(event, "content-move")} + style={cropBoxStyle} + /> + ) : null} +
+ + {((isContentFramingMode && hasOverlayNaturalSize) ? cropHandles : frameResizeHandles).map(({ corner, cursor }) => ( +
{ + emitMixerDiagnostics("resize-handle-mousedown", { + resizeCorner: corner, + requestedInteractionKind: + (isContentFramingMode && hasOverlayNaturalSize) ? "content-resize" : "frame-resize", + }); + startInteraction( + event, + (isContentFramingMode && hasOverlayNaturalSize) ? "content-resize" : "frame-resize", + corner, + ); + }} + style={{ + left: `${( + corner.includes("w") + ? ((isContentFramingMode && hasOverlayNaturalSize) ? cropHandleRect.left : resizeHandleRect.left) + : corner.includes("e") + ? ((isContentFramingMode && hasOverlayNaturalSize) + ? cropHandleRect.left + cropHandleRect.width + : resizeHandleRect.left + resizeHandleRect.width) + : ((isContentFramingMode && hasOverlayNaturalSize) + ? cropHandleRect.left + cropHandleRect.width / 2 + : resizeHandleRect.left + resizeHandleRect.width / 2) + ) * 100}%`, + top: `${( + corner.includes("n") + ? ((isContentFramingMode && hasOverlayNaturalSize) ? cropHandleRect.top : resizeHandleRect.top) + : corner.includes("s") + ? ((isContentFramingMode && hasOverlayNaturalSize) + ? cropHandleRect.top + cropHandleRect.height + : resizeHandleRect.top + resizeHandleRect.height) + : ((isContentFramingMode && hasOverlayNaturalSize) + ? cropHandleRect.top + cropHandleRect.height / 2 + : resizeHandleRect.top + resizeHandleRect.height / 2) + ) * 100}%`, + transform: "translate(-50%, -50%)", + cursor, + }} + /> + ))} ) : null} @@ -136,13 +1334,31 @@ export default function MixerNode({ id, data, selected }: NodeProps) {
+ + -
diff --git a/components/canvas/nodes/render-node.tsx b/components/canvas/nodes/render-node.tsx index 732e038..4ff835f 100644 --- a/components/canvas/nodes/render-node.tsx +++ b/components/canvas/nodes/render-node.tsx @@ -464,11 +464,13 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr ); const sourceUrl = renderPreviewInput.sourceUrl; + const sourceComposition = renderPreviewInput.sourceComposition; useEffect(() => { logRenderDebug("node-data-updated", { nodeId: id, hasSourceUrl: typeof sourceUrl === "string" && sourceUrl.length > 0, + hasSourceComposition: Boolean(sourceComposition), storageId: data.storageId ?? null, lastUploadStorageId: data.lastUploadStorageId ?? null, hasResolvedUrl: typeof data.url === "string" && data.url.length > 0, @@ -485,6 +487,7 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr data.url, id, sourceUrl, + sourceComposition, ]); const sourceNode = useMemo( @@ -526,9 +529,12 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr ); const currentPipelineHash = useMemo(() => { - if (!sourceUrl) return null; - return hashPipeline({ sourceUrl, render: renderFingerprint }, steps); - }, [renderFingerprint, sourceUrl, steps]); + if (!sourceUrl && !sourceComposition) return null; + return hashPipeline( + { source: sourceComposition ?? sourceUrl, render: renderFingerprint }, + steps, + ); + }, [renderFingerprint, sourceComposition, sourceUrl, steps]); const isRenderCurrent = Boolean(currentPipelineHash) && localData.lastRenderedHash === currentPipelineHash; @@ -558,7 +564,8 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr error: "Error", }; - const hasSource = typeof sourceUrl === "string" && sourceUrl.length > 0; + const hasSource = + (typeof sourceUrl === "string" && sourceUrl.length > 0) || Boolean(sourceComposition); const previewNodeWidth = Math.max(260, Math.round(width ?? 320)); const { @@ -569,6 +576,7 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr error: previewError, } = usePipelinePreview({ sourceUrl, + sourceComposition, steps, nodeWidth: previewNodeWidth, debounceMs: previewDebounceMs, @@ -586,6 +594,7 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr error: fullscreenPreviewError, } = usePipelinePreview({ sourceUrl: isFullscreenOpen && sourceUrl ? sourceUrl : null, + sourceComposition: isFullscreenOpen ? sourceComposition : undefined, steps, nodeWidth: fullscreenPreviewWidth, includeHistogram: false, @@ -720,11 +729,12 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr }; const handleRender = async (mode: "download" | "upload") => { - if (!sourceUrl || !currentPipelineHash) { + if ((!sourceUrl && !sourceComposition) || !currentPipelineHash) { logRenderDebug("render-aborted-prerequisites", { nodeId: id, mode, hasSourceUrl: Boolean(sourceUrl), + hasSourceComposition: Boolean(sourceComposition), hasPipelineHash: Boolean(currentPipelineHash), isOffline: status.isOffline, }); @@ -769,7 +779,8 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr }); const renderResult = await renderFullWithWorkerFallback({ - sourceUrl, + sourceUrl: sourceUrl ?? undefined, + sourceComposition, steps, render: { resolution: activeData.outputResolution, diff --git a/components/canvas/nodes/use-node-local-data.ts b/components/canvas/nodes/use-node-local-data.ts index b714fc3..0948312 100644 --- a/components/canvas/nodes/use-node-local-data.ts +++ b/components/canvas/nodes/use-node-local-data.ts @@ -22,6 +22,25 @@ function logNodeDataDebug(event: string, payload: Record): void console.info("[Canvas node debug]", event, payload); } +function diffNodeData( + before: Record, + after: Record, +): Record { + const keys = new Set([...Object.keys(before), ...Object.keys(after)]); + const diff: Record = {}; + + for (const key of keys) { + if (before[key] !== after[key]) { + diff[key] = { + before: before[key], + after: after[key], + }; + } + } + + return diff; +} + export function useNodeLocalData({ nodeId, data, @@ -55,6 +74,16 @@ export function useNodeLocalData({ const savedValue = localDataRef.current; const savedVersion = localChangeVersionRef.current; + logNodeDataDebug("queue-save-flush", { + nodeId, + nodeType: debugLabel, + savedVersion, + changedFields: diffNodeData( + acceptedPersistedDataRef.current as Record, + savedValue as Record, + ), + }); + Promise.resolve(onSave(savedValue)) .then(() => { if (!isMountedRef.current || savedVersion !== localChangeVersionRef.current) { @@ -144,7 +173,17 @@ export function useNodeLocalData({ const updateLocalData = useCallback( (updater: (current: T) => T) => { - const next = updater(localDataRef.current); + const previous = localDataRef.current; + const next = updater(previous); + + logNodeDataDebug("local-update", { + nodeId, + nodeType: debugLabel, + changedFields: diffNodeData( + previous as Record, + next as Record, + ), + }); localChangeVersionRef.current += 1; hasPendingLocalChangesRef.current = true; @@ -153,7 +192,7 @@ export function useNodeLocalData({ setPreviewNodeDataOverride(nodeId, next); queueSave(); }, - [nodeId, queueSave, setPreviewNodeDataOverride], + [debugLabel, nodeId, queueSave, setPreviewNodeDataOverride], ); return { diff --git a/convex/CLAUDE.md b/convex/CLAUDE.md index e6a0f83..4514fb9 100644 --- a/convex/CLAUDE.md +++ b/convex/CLAUDE.md @@ -58,7 +58,7 @@ Alle Node-Typen werden über Validators definiert: `phase1NodeTypeValidator`, `n | `video-prompt` | `content`, `modelId`, `durationSeconds` | KI-Video-Steuer-Node (Eingabe) | | `ai-video` | `storageId`, `prompt`, `model`, `modelLabel`, `durationSeconds`, `creditCost`, `generatedAt`, `taskId` (transient) | Generiertes KI-Video (System-Output) | | `compare` | `leftNodeId`, `rightNodeId`, `sliderPosition` | Vergleichs-Node | -| `mixer` | `blendMode`, `opacity`, `offsetX`, `offsetY` | V1 Merge-Control-Node mit pseudo-image Output (kein Storage-Write) | +| `mixer` | `blendMode`, `opacity`, `overlayX`, `overlayY`, `overlayWidth`, `overlayHeight` | V1 Merge-Control-Node mit pseudo-image Output (kein Storage-Write) | | `frame` | `label`, `exportWidth`, `exportHeight`, `backgroundColor` | Artboard | | `group` | `label`, `collapsed` | Container-Node | | `note` | `content`, `color` | Anmerkung | @@ -338,6 +338,8 @@ Wirft bei unauthentifiziertem Zugriff. Wird von allen Queries und Mutations genu - `mixer` ist ein Control-Node mit pseudo-image Semantik, nicht mit persistiertem Medien-Output. - Keine zusaetzlichen Convex-Tabellen oder Storage-Flows fuer Mixer-Vorschauen. - Validierung laeuft client- und serverseitig ueber dieselbe Policy (`validateCanvasConnectionPolicy`); `edges.ts` delegiert darauf fuer Paritaet. +- Offizieller Bake-Pfad fuer Mixer ist `mixer -> render` (Render verarbeitet die Mixer-Komposition in Preview/Render-Pipeline). +- `mixer -> adjustments -> render` ist derzeit bewusst deferred und nicht Teil des offiziell supporteten Flows. --- diff --git a/hooks/use-pipeline-preview.ts b/hooks/use-pipeline-preview.ts index 895e903..3cd2c7a 100644 --- a/hooks/use-pipeline-preview.ts +++ b/hooks/use-pipeline-preview.ts @@ -4,6 +4,7 @@ import { useEffect, useMemo, useRef, useState } from "react"; import { hashPipeline, type PipelineStep } from "@/lib/image-pipeline/contracts"; import { emptyHistogram, type HistogramData } from "@/lib/image-pipeline/histogram"; +import type { RenderSourceComposition } from "@/lib/image-pipeline/render-types"; import { isPipelineAbortError, renderPreviewWithWorkerFallback, @@ -12,6 +13,7 @@ import { type UsePipelinePreviewOptions = { sourceUrl: string | null; + sourceComposition?: RenderSourceComposition; steps: readonly PipelineStep[]; nodeWidth: number; includeHistogram?: boolean; @@ -54,6 +56,7 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): { const stableRenderInputRef = useRef<{ pipelineHash: string; sourceUrl: string | null; + sourceComposition?: RenderSourceComposition; steps: readonly PipelineStep[]; } | null>(null); @@ -95,11 +98,11 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): { ); const pipelineHash = useMemo(() => { - if (!options.sourceUrl) { + if (!options.sourceUrl && !options.sourceComposition) { return "no-source"; } - return hashPipeline(options.sourceUrl, options.steps); - }, [options.sourceUrl, options.steps]); + return hashPipeline(options.sourceComposition ?? options.sourceUrl, options.steps); + }, [options.sourceComposition, options.sourceUrl, options.steps]); useEffect(() => { if (stableRenderInputRef.current?.pipelineHash === pipelineHash) { @@ -109,13 +112,15 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): { stableRenderInputRef.current = { pipelineHash, sourceUrl: options.sourceUrl, + sourceComposition: options.sourceComposition, steps: options.steps, }; - }, [pipelineHash, options.sourceUrl, options.steps]); + }, [pipelineHash, options.sourceComposition, options.sourceUrl, options.steps]); useEffect(() => { const sourceUrl = stableRenderInputRef.current?.sourceUrl ?? null; - if (!sourceUrl) { + const sourceComposition = stableRenderInputRef.current?.sourceComposition; + if (!sourceUrl && !sourceComposition) { const frameId = window.requestAnimationFrame(() => { setHistogram(emptyHistogram()); setError(null); @@ -133,8 +138,10 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): { const timer = window.setTimeout(() => { setIsRendering(true); setError(null); + const resolvedSourceUrl = sourceUrl ?? undefined; void renderPreviewWithWorkerFallback({ - sourceUrl, + sourceUrl: resolvedSourceUrl, + sourceComposition, steps: stableRenderInputRef.current?.steps ?? [], previewWidth, includeHistogram: options.includeHistogram, @@ -168,7 +175,8 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): { if (process.env.NODE_ENV !== "production") { console.error("[usePipelinePreview] render failed", { message, - sourceUrl, + sourceUrl: resolvedSourceUrl, + sourceComposition, pipelineHash, previewWidth, includeHistogram: options.includeHistogram, @@ -194,7 +202,7 @@ export function usePipelinePreview(options: UsePipelinePreviewOptions): { canvasRef, histogram, isRendering, - hasSource: Boolean(options.sourceUrl), + hasSource: Boolean(options.sourceUrl || options.sourceComposition), previewAspectRatio, error, }; diff --git a/lib/canvas-mixer-preview.ts b/lib/canvas-mixer-preview.ts index c2aa899..c641277 100644 --- a/lib/canvas-mixer-preview.ts +++ b/lib/canvas-mixer-preview.ts @@ -19,8 +19,14 @@ export type MixerPreviewState = { overlayUrl?: string; blendMode: MixerBlendMode; opacity: number; - offsetX: number; - offsetY: number; + overlayX: number; + overlayY: number; + overlayWidth: number; + overlayHeight: number; + cropLeft: number; + cropTop: number; + cropRight: number; + cropBottom: number; error?: MixerPreviewError; }; @@ -35,9 +41,18 @@ const DEFAULT_BLEND_MODE: MixerBlendMode = "normal"; const DEFAULT_OPACITY = 100; const MIN_OPACITY = 0; const MAX_OPACITY = 100; -const DEFAULT_OFFSET = 0; -const MIN_OFFSET = -2048; -const MAX_OFFSET = 2048; +const DEFAULT_OVERLAY_X = 0; +const DEFAULT_OVERLAY_Y = 0; +const DEFAULT_OVERLAY_WIDTH = 1; +const DEFAULT_OVERLAY_HEIGHT = 1; +const DEFAULT_CROP_LEFT = 0; +const DEFAULT_CROP_TOP = 0; +const DEFAULT_CROP_RIGHT = 0; +const DEFAULT_CROP_BOTTOM = 0; +const MIN_OVERLAY_POSITION = 0; +const MAX_OVERLAY_POSITION = 1; +const MIN_OVERLAY_SIZE = 0.1; +const MAX_OVERLAY_SIZE = 1; function clamp(value: number, min: number, max: number): number { return Math.max(min, Math.min(max, value)); @@ -65,18 +80,165 @@ function normalizeOpacity(value: unknown): number { return clamp(parsed, MIN_OPACITY, MAX_OPACITY); } -function normalizeOffset(value: unknown): number { +function normalizeOverlayNumber(value: unknown, fallback: number): number { const parsed = parseNumeric(value); if (parsed === null) { - return DEFAULT_OFFSET; + return fallback; } - return clamp(parsed, MIN_OFFSET, MAX_OFFSET); + return parsed; +} + +function normalizeUnitRect(args: { + x: unknown; + y: unknown; + width: unknown; + height: unknown; + defaults: { x: number; y: number; width: number; height: number }; +}): { x: number; y: number; width: number; height: number } { + const x = clamp( + normalizeOverlayNumber(args.x, args.defaults.x), + MIN_OVERLAY_POSITION, + MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE, + ); + const y = clamp( + normalizeOverlayNumber(args.y, args.defaults.y), + MIN_OVERLAY_POSITION, + MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE, + ); + const width = clamp( + normalizeOverlayNumber(args.width, args.defaults.width), + MIN_OVERLAY_SIZE, + Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - x), + ); + const height = clamp( + normalizeOverlayNumber(args.height, args.defaults.height), + MIN_OVERLAY_SIZE, + Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - y), + ); + + return { x, y, width, height }; +} + +function normalizeOverlayRect(record: Record): Pick< + MixerPreviewState, + "overlayX" | "overlayY" | "overlayWidth" | "overlayHeight" +> { + const hasLegacyOffset = record.offsetX !== undefined || record.offsetY !== undefined; + const hasOverlayRectField = + record.overlayX !== undefined || + record.overlayY !== undefined || + record.overlayWidth !== undefined || + record.overlayHeight !== undefined; + + if (hasLegacyOffset && !hasOverlayRectField) { + return { + overlayX: DEFAULT_OVERLAY_X, + overlayY: DEFAULT_OVERLAY_Y, + overlayWidth: DEFAULT_OVERLAY_WIDTH, + overlayHeight: DEFAULT_OVERLAY_HEIGHT, + }; + } + + const normalized = normalizeUnitRect({ + x: record.overlayX, + y: record.overlayY, + width: record.overlayWidth, + height: record.overlayHeight, + defaults: { + x: DEFAULT_OVERLAY_X, + y: DEFAULT_OVERLAY_Y, + width: DEFAULT_OVERLAY_WIDTH, + height: DEFAULT_OVERLAY_HEIGHT, + }, + }); + + return { + overlayX: normalized.x, + overlayY: normalized.y, + overlayWidth: normalized.width, + overlayHeight: normalized.height, + }; +} + +function normalizeCropEdges(record: Record): Pick< + MixerPreviewState, + "cropLeft" | "cropTop" | "cropRight" | "cropBottom" +> { + const hasCropField = + record.cropLeft !== undefined || + record.cropTop !== undefined || + record.cropRight !== undefined || + record.cropBottom !== undefined; + const hasLegacyContentRectField = + record.contentX !== undefined || + record.contentY !== undefined || + record.contentWidth !== undefined || + record.contentHeight !== undefined; + + if (!hasCropField && hasLegacyContentRectField) { + const legacyRect = normalizeUnitRect({ + x: record.contentX, + y: record.contentY, + width: record.contentWidth, + height: record.contentHeight, + defaults: { + x: 0, + y: 0, + width: 1, + height: 1, + }, + }); + + return { + cropLeft: legacyRect.x, + cropTop: legacyRect.y, + cropRight: 1 - (legacyRect.x + legacyRect.width), + cropBottom: 1 - (legacyRect.y + legacyRect.height), + }; + } + + const cropLeft = clamp( + normalizeOverlayNumber(record.cropLeft, DEFAULT_CROP_LEFT), + 0, + 1 - MIN_OVERLAY_SIZE, + ); + const cropTop = clamp( + normalizeOverlayNumber(record.cropTop, DEFAULT_CROP_TOP), + 0, + 1 - MIN_OVERLAY_SIZE, + ); + const cropRight = clamp( + normalizeOverlayNumber(record.cropRight, DEFAULT_CROP_RIGHT), + 0, + 1 - cropLeft - MIN_OVERLAY_SIZE, + ); + const cropBottom = clamp( + normalizeOverlayNumber(record.cropBottom, DEFAULT_CROP_BOTTOM), + 0, + 1 - cropTop - MIN_OVERLAY_SIZE, + ); + + return { + cropLeft, + cropTop, + cropRight, + cropBottom, + }; } export function normalizeMixerPreviewData(data: unknown): Pick< MixerPreviewState, - "blendMode" | "opacity" | "offsetX" | "offsetY" + | "blendMode" + | "opacity" + | "overlayX" + | "overlayY" + | "overlayWidth" + | "overlayHeight" + | "cropLeft" + | "cropTop" + | "cropRight" + | "cropBottom" > { const record = (data ?? {}) as Record; const blendMode = MIXER_BLEND_MODES.has(record.blendMode as MixerBlendMode) @@ -86,8 +248,8 @@ export function normalizeMixerPreviewData(data: unknown): Pick< return { blendMode, opacity: normalizeOpacity(record.opacity), - offsetX: normalizeOffset(record.offsetX), - offsetY: normalizeOffset(record.offsetY), + ...normalizeOverlayRect(record), + ...normalizeCropEdges(record), }; } @@ -119,6 +281,17 @@ function resolveSourceUrlFromNode(args: { } if (args.sourceNode.type === "render") { + const preview = resolveRenderPreviewInputFromGraph({ + nodeId: args.sourceNode.id, + graph: args.graph, + }); + if (preview.sourceComposition) { + return undefined; + } + if (preview.sourceUrl) { + return preview.sourceUrl; + } + const renderData = (args.sourceNode.data ?? {}) as Record; const renderOutputUrl = typeof renderData.lastUploadUrl === "string" && renderData.lastUploadUrl.length > 0 @@ -133,11 +306,7 @@ function resolveSourceUrlFromNode(args: { return directRenderUrl; } - const preview = resolveRenderPreviewInputFromGraph({ - nodeId: args.sourceNode.id, - graph: args.graph, - }); - return preview.sourceUrl ?? undefined; + return undefined; } return resolveNodeImageUrl(args.sourceNode.data) ?? undefined; @@ -172,6 +341,8 @@ export function resolveMixerPreviewFromGraph(args: { if (base.duplicate || overlay.duplicate) { return { status: "error", + baseUrl: undefined, + overlayUrl: undefined, ...normalized, error: "duplicate-handle-edge", }; diff --git a/lib/canvas-node-templates.ts b/lib/canvas-node-templates.ts index 405a479..961a022 100644 --- a/lib/canvas-node-templates.ts +++ b/lib/canvas-node-templates.ts @@ -51,8 +51,14 @@ export const CANVAS_NODE_TEMPLATES = [ defaultData: { blendMode: "normal", opacity: 100, - offsetX: 0, - offsetY: 0, + overlayX: 0, + overlayY: 0, + overlayWidth: 1, + overlayHeight: 1, + cropLeft: 0, + cropTop: 0, + cropRight: 0, + cropBottom: 0, }, }, { diff --git a/lib/canvas-render-preview.ts b/lib/canvas-render-preview.ts index f98e5b2..6fcbf05 100644 --- a/lib/canvas-render-preview.ts +++ b/lib/canvas-render-preview.ts @@ -15,10 +15,29 @@ export type RenderPreviewGraphEdge = { }; export type RenderPreviewInput = { - sourceUrl: string; + sourceUrl: string | null; + sourceComposition?: RenderPreviewSourceComposition; steps: PipelineStep[]; }; +export type MixerBlendMode = "normal" | "multiply" | "screen" | "overlay"; + +export type RenderPreviewSourceComposition = { + kind: "mixer"; + baseUrl: string; + overlayUrl: string; + blendMode: MixerBlendMode; + opacity: number; + overlayX: number; + overlayY: number; + overlayWidth: number; + overlayHeight: number; + cropLeft: number; + cropTop: number; + cropRight: number; + cropBottom: number; +}; + export type CanvasGraphNodeLike = { id: string; type: string; @@ -38,6 +57,8 @@ export type CanvasGraphSnapshot = { incomingEdgesByTarget: ReadonlyMap; }; +type RenderPreviewResolvedInput = RenderPreviewInput; + export type CanvasGraphNodeDataOverrides = ReadonlyMap; export function shouldFastPathPreviewPipeline( @@ -129,6 +150,188 @@ export const RENDER_PREVIEW_PIPELINE_TYPES = new Set([ "detail-adjust", ]); +const MIXER_SOURCE_NODE_TYPES = new Set(["image", "asset", "ai-image", "render"]); +const MIXER_BLEND_MODES = new Set([ + "normal", + "multiply", + "screen", + "overlay", +]); +const DEFAULT_BLEND_MODE: MixerBlendMode = "normal"; +const DEFAULT_OPACITY = 100; +const MIN_OPACITY = 0; +const MAX_OPACITY = 100; +const DEFAULT_OVERLAY_X = 0; +const DEFAULT_OVERLAY_Y = 0; +const DEFAULT_OVERLAY_WIDTH = 1; +const DEFAULT_OVERLAY_HEIGHT = 1; +const DEFAULT_CROP_LEFT = 0; +const DEFAULT_CROP_TOP = 0; +const DEFAULT_CROP_RIGHT = 0; +const DEFAULT_CROP_BOTTOM = 0; +const MIN_OVERLAY_POSITION = 0; +const MAX_OVERLAY_POSITION = 1; +const MIN_OVERLAY_SIZE = 0.1; +const MAX_OVERLAY_SIZE = 1; + +function clamp(value: number, min: number, max: number): number { + return Math.max(min, Math.min(max, value)); +} + +function parseNumeric(value: unknown): number | null { + if (typeof value === "number") { + return Number.isFinite(value) ? value : null; + } + + if (typeof value === "string") { + const parsed = Number(value); + return Number.isFinite(parsed) ? parsed : null; + } + + return null; +} + +function normalizeOpacity(value: unknown): number { + const parsed = parseNumeric(value); + if (parsed === null) { + return DEFAULT_OPACITY; + } + + return clamp(parsed, MIN_OPACITY, MAX_OPACITY); +} + +function normalizeOverlayNumber(value: unknown, fallback: number): number { + const parsed = parseNumeric(value); + if (parsed === null) { + return fallback; + } + + return parsed; +} + +function normalizeMixerCompositionRect(data: Record): Pick< + RenderPreviewSourceComposition, + "overlayX" | "overlayY" | "overlayWidth" | "overlayHeight" +> { + const hasLegacyOffset = data.offsetX !== undefined || data.offsetY !== undefined; + const hasOverlayRectField = + data.overlayX !== undefined || + data.overlayY !== undefined || + data.overlayWidth !== undefined || + data.overlayHeight !== undefined; + + if (hasLegacyOffset && !hasOverlayRectField) { + return { + overlayX: DEFAULT_OVERLAY_X, + overlayY: DEFAULT_OVERLAY_Y, + overlayWidth: DEFAULT_OVERLAY_WIDTH, + overlayHeight: DEFAULT_OVERLAY_HEIGHT, + }; + } + + const overlayX = clamp( + normalizeOverlayNumber(data.overlayX, DEFAULT_OVERLAY_X), + MIN_OVERLAY_POSITION, + MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE, + ); + const overlayY = clamp( + normalizeOverlayNumber(data.overlayY, DEFAULT_OVERLAY_Y), + MIN_OVERLAY_POSITION, + MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE, + ); + const overlayWidth = clamp( + normalizeOverlayNumber(data.overlayWidth, DEFAULT_OVERLAY_WIDTH), + MIN_OVERLAY_SIZE, + Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - overlayX), + ); + const overlayHeight = clamp( + normalizeOverlayNumber(data.overlayHeight, DEFAULT_OVERLAY_HEIGHT), + MIN_OVERLAY_SIZE, + Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - overlayY), + ); + + return { + overlayX, + overlayY, + overlayWidth, + overlayHeight, + }; +} + +function normalizeMixerCompositionCropEdges(data: Record): Pick< + RenderPreviewSourceComposition, + "cropLeft" | "cropTop" | "cropRight" | "cropBottom" +> { + const hasCropField = + data.cropLeft !== undefined || + data.cropTop !== undefined || + data.cropRight !== undefined || + data.cropBottom !== undefined; + const hasLegacyContentRectField = + data.contentX !== undefined || + data.contentY !== undefined || + data.contentWidth !== undefined || + data.contentHeight !== undefined; + + if (!hasCropField && hasLegacyContentRectField) { + const contentX = clamp( + normalizeOverlayNumber(data.contentX, 0), + MIN_OVERLAY_POSITION, + MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE, + ); + const contentY = clamp( + normalizeOverlayNumber(data.contentY, 0), + MIN_OVERLAY_POSITION, + MAX_OVERLAY_POSITION - MIN_OVERLAY_SIZE, + ); + const contentWidth = clamp( + normalizeOverlayNumber(data.contentWidth, 1), + MIN_OVERLAY_SIZE, + Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - contentX), + ); + const contentHeight = clamp( + normalizeOverlayNumber(data.contentHeight, 1), + MIN_OVERLAY_SIZE, + Math.min(MAX_OVERLAY_SIZE, MAX_OVERLAY_POSITION - contentY), + ); + + return { + cropLeft: contentX, + cropTop: contentY, + cropRight: 1 - (contentX + contentWidth), + cropBottom: 1 - (contentY + contentHeight), + }; + } + + const cropLeft = clamp( + normalizeOverlayNumber(data.cropLeft, DEFAULT_CROP_LEFT), + 0, + 1 - MIN_OVERLAY_SIZE, + ); + const cropTop = clamp( + normalizeOverlayNumber(data.cropTop, DEFAULT_CROP_TOP), + 0, + 1 - MIN_OVERLAY_SIZE, + ); + const cropRight = clamp( + normalizeOverlayNumber(data.cropRight, DEFAULT_CROP_RIGHT), + 0, + 1 - cropLeft - MIN_OVERLAY_SIZE, + ); + const cropBottom = clamp( + normalizeOverlayNumber(data.cropBottom, DEFAULT_CROP_BOTTOM), + 0, + 1 - cropTop - MIN_OVERLAY_SIZE, + ); + + return { + cropLeft, + cropTop, + cropRight, + cropBottom, + }; +} + export function resolveRenderFingerprint(data: unknown): { resolution: RenderResolutionOption; customWidth?: number; @@ -163,15 +366,19 @@ export function resolveRenderFingerprint(data: unknown): { export function resolveRenderPipelineHash(args: { sourceUrl: string | null; + sourceComposition?: RenderPreviewSourceComposition; steps: PipelineStep[]; data: unknown; }): string | null { - if (!args.sourceUrl) { + if (!args.sourceUrl && !args.sourceComposition) { return null; } return hashPipeline( - { sourceUrl: args.sourceUrl, render: resolveRenderFingerprint(args.data) }, + { + source: args.sourceComposition ?? args.sourceUrl, + render: resolveRenderFingerprint(args.data), + }, args.steps, ); } @@ -212,6 +419,119 @@ function resolveSourceNodeUrl(node: CanvasGraphNodeLike): string | null { return resolveNodeImageUrl(node.data); } +function resolveRenderOutputUrl(node: CanvasGraphNodeLike): string | null { + const data = (node.data ?? {}) as Record; + + const lastUploadUrl = + typeof data.lastUploadUrl === "string" && data.lastUploadUrl.length > 0 + ? data.lastUploadUrl + : null; + if (lastUploadUrl) { + return lastUploadUrl; + } + + return resolveNodeImageUrl(node.data); +} + +function resolveMixerHandleEdge(args: { + incomingEdges: readonly CanvasGraphEdgeLike[]; + handle: "base" | "overlay"; +}): CanvasGraphEdgeLike | null { + const filtered = args.incomingEdges.filter((edge) => { + if (args.handle === "base") { + return edge.targetHandle === "base" || edge.targetHandle == null || edge.targetHandle === ""; + } + + return edge.targetHandle === "overlay"; + }); + + if (filtered.length !== 1) { + return null; + } + + return filtered[0] ?? null; +} + +function resolveMixerSourceUrlFromNode(args: { + node: CanvasGraphNodeLike; + graph: CanvasGraphSnapshot; +}): string | null { + if (!MIXER_SOURCE_NODE_TYPES.has(args.node.type)) { + return null; + } + + if (args.node.type === "render") { + const preview = resolveRenderPreviewInputFromGraph({ + nodeId: args.node.id, + graph: args.graph, + }); + if (preview.sourceComposition) { + return null; + } + if (preview.sourceUrl) { + return preview.sourceUrl; + } + + const directRenderUrl = resolveRenderOutputUrl(args.node); + if (directRenderUrl) { + return directRenderUrl; + } + + return null; + } + + return resolveNodeImageUrl(args.node.data); +} + +function resolveMixerSourceUrlFromEdge(args: { + edge: CanvasGraphEdgeLike | null; + graph: CanvasGraphSnapshot; +}): string | null { + if (!args.edge) { + return null; + } + + const sourceNode = args.graph.nodesById.get(args.edge.source); + if (!sourceNode) { + return null; + } + + return resolveMixerSourceUrlFromNode({ + node: sourceNode, + graph: args.graph, + }); +} + +function resolveRenderMixerCompositionFromGraph(args: { + node: CanvasGraphNodeLike; + graph: CanvasGraphSnapshot; +}): RenderPreviewSourceComposition | null { + const incomingEdges = args.graph.incomingEdgesByTarget.get(args.node.id) ?? []; + const baseEdge = resolveMixerHandleEdge({ incomingEdges, handle: "base" }); + const overlayEdge = resolveMixerHandleEdge({ incomingEdges, handle: "overlay" }); + const baseUrl = resolveMixerSourceUrlFromEdge({ edge: baseEdge, graph: args.graph }); + const overlayUrl = resolveMixerSourceUrlFromEdge({ edge: overlayEdge, graph: args.graph }); + + if (!baseUrl || !overlayUrl) { + return null; + } + + const data = (args.node.data ?? {}) as Record; + const blendMode = MIXER_BLEND_MODES.has(data.blendMode as MixerBlendMode) + ? (data.blendMode as MixerBlendMode) + : DEFAULT_BLEND_MODE; + + return { + kind: "mixer", + baseUrl, + overlayUrl, + blendMode, + opacity: normalizeOpacity(data.opacity), + ...normalizeMixerCompositionRect(data), + ...normalizeMixerCompositionCropEdges(data), + }; +} + export function buildGraphSnapshot( nodes: readonly CanvasGraphNodeLike[], edges: readonly CanvasGraphEdgeLike[], @@ -384,7 +704,32 @@ export function findSourceNodeFromGraph( export function resolveRenderPreviewInputFromGraph(args: { nodeId: string; graph: CanvasGraphSnapshot; -}): { sourceUrl: string | null; steps: PipelineStep[] } { +}): RenderPreviewResolvedInput { + const renderIncoming = getSortedIncomingEdge( + args.graph.incomingEdgesByTarget.get(args.nodeId), + ); + const renderInputNode = renderIncoming + ? args.graph.nodesById.get(renderIncoming.source) + : null; + + if (renderInputNode?.type === "mixer") { + const sourceComposition = resolveRenderMixerCompositionFromGraph({ + node: renderInputNode, + graph: args.graph, + }); + + const steps = collectPipelineFromGraph(args.graph, { + nodeId: args.nodeId, + isPipelineNode: (node) => RENDER_PREVIEW_PIPELINE_TYPES.has(node.type ?? ""), + }); + + return { + sourceUrl: null, + sourceComposition: sourceComposition ?? undefined, + steps, + }; + } + const sourceUrl = getSourceImageFromGraph(args.graph, { nodeId: args.nodeId, isSourceNode: (node) => SOURCE_NODE_TYPES.has(node.type ?? ""), @@ -406,7 +751,7 @@ export function resolveRenderPreviewInput(args: { nodeId: string; nodes: readonly RenderPreviewGraphNode[]; edges: readonly RenderPreviewGraphEdge[]; -}): { sourceUrl: string | null; steps: PipelineStep[] } { +}): RenderPreviewResolvedInput { return resolveRenderPreviewInputFromGraph({ nodeId: args.nodeId, graph: buildGraphSnapshot(args.nodes, args.edges), diff --git a/lib/canvas-utils.ts b/lib/canvas-utils.ts index 56e9bfc..dcac878 100644 --- a/lib/canvas-utils.ts +++ b/lib/canvas-utils.ts @@ -437,8 +437,14 @@ export const NODE_DEFAULTS: Record< data: { blendMode: "normal", opacity: 100, - offsetX: 0, - offsetY: 0, + overlayX: 0, + overlayY: 0, + overlayWidth: 1, + overlayHeight: 1, + cropLeft: 0, + cropTop: 0, + cropRight: 0, + cropBottom: 0, }, }, "agent-output": { diff --git a/lib/image-pipeline/bridge.ts b/lib/image-pipeline/bridge.ts index 497bc3e..3fd5146 100644 --- a/lib/image-pipeline/bridge.ts +++ b/lib/image-pipeline/bridge.ts @@ -10,7 +10,7 @@ import { applyGeometryStepsToSource, partitionPipelineSteps, } from "@/lib/image-pipeline/geometry-transform"; -import { loadSourceBitmap } from "@/lib/image-pipeline/source-loader"; +import { loadRenderSourceBitmap } from "@/lib/image-pipeline/source-loader"; type SupportedCanvas = HTMLCanvasElement | OffscreenCanvas; type SupportedContext = CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D; @@ -99,7 +99,11 @@ function resolveMimeType(format: RenderFormat): string { export async function renderFull(options: RenderFullOptions): Promise { const { signal } = options; - const bitmap = await loadSourceBitmap(options.sourceUrl, { signal }); + const bitmap = await loadRenderSourceBitmap({ + sourceUrl: options.sourceUrl, + sourceComposition: options.sourceComposition, + signal, + }); const { geometrySteps, tonalSteps } = partitionPipelineSteps(options.steps); const geometryResult = applyGeometryStepsToSource({ source: bitmap, diff --git a/lib/image-pipeline/image-pipeline.worker.ts b/lib/image-pipeline/image-pipeline.worker.ts index 5512d54..8c7c7cf 100644 --- a/lib/image-pipeline/image-pipeline.worker.ts +++ b/lib/image-pipeline/image-pipeline.worker.ts @@ -2,21 +2,26 @@ import { renderFull } from "@/lib/image-pipeline/bridge"; import { renderPreview } from "@/lib/image-pipeline/preview-renderer"; import type { PipelineStep } from "@/lib/image-pipeline/contracts"; import type { HistogramData } from "@/lib/image-pipeline/histogram"; -import type { RenderFullOptions, RenderFullResult } from "@/lib/image-pipeline/render-types"; +import type { + RenderFullOptions, + RenderFullResult, + RenderSourceComposition, +} from "@/lib/image-pipeline/render-types"; import { IMAGE_PIPELINE_BACKEND_FLAG_KEYS, type BackendFeatureFlags, } from "@/lib/image-pipeline/backend/feature-flags"; type PreviewWorkerPayload = { - sourceUrl: string; + sourceUrl?: string; + sourceComposition?: RenderSourceComposition; steps: readonly PipelineStep[]; previewWidth: number; includeHistogram?: boolean; featureFlags?: BackendFeatureFlags; }; -type FullWorkerPayload = RenderFullOptions & { +type FullWorkerPayload = Omit & { featureFlags?: BackendFeatureFlags; }; @@ -112,6 +117,7 @@ async function handlePreviewRequest(requestId: number, payload: PreviewWorkerPay applyWorkerFeatureFlags(payload.featureFlags); const result = await renderPreview({ sourceUrl: payload.sourceUrl, + sourceComposition: payload.sourceComposition, steps: payload.steps, previewWidth: payload.previewWidth, includeHistogram: payload.includeHistogram, @@ -161,6 +167,7 @@ async function handleFullRequest(requestId: number, payload: FullWorkerPayload): applyWorkerFeatureFlags(payload.featureFlags); const result = await renderFull({ sourceUrl: payload.sourceUrl, + sourceComposition: payload.sourceComposition, steps: payload.steps, render: payload.render, signal: controller.signal, diff --git a/lib/image-pipeline/preview-renderer.ts b/lib/image-pipeline/preview-renderer.ts index 952fb2c..57c5495 100644 --- a/lib/image-pipeline/preview-renderer.ts +++ b/lib/image-pipeline/preview-renderer.ts @@ -8,7 +8,8 @@ import { applyGeometryStepsToSource, partitionPipelineSteps, } from "@/lib/image-pipeline/geometry-transform"; -import { loadSourceBitmap } from "@/lib/image-pipeline/source-loader"; +import { loadRenderSourceBitmap } from "@/lib/image-pipeline/source-loader"; +import type { RenderSourceComposition } from "@/lib/image-pipeline/render-types"; export type PreviewRenderResult = { width: number; @@ -64,13 +65,16 @@ async function yieldToMainOrWorkerLoop(): Promise { } export async function renderPreview(options: { - sourceUrl: string; + sourceUrl?: string; + sourceComposition?: RenderSourceComposition; steps: readonly PipelineStep[]; previewWidth: number; includeHistogram?: boolean; signal?: AbortSignal; }): Promise { - const bitmap = await loadSourceBitmap(options.sourceUrl, { + const bitmap = await loadRenderSourceBitmap({ + sourceUrl: options.sourceUrl, + sourceComposition: options.sourceComposition, signal: options.signal, }); const { geometrySteps, tonalSteps } = partitionPipelineSteps(options.steps); diff --git a/lib/image-pipeline/render-types.ts b/lib/image-pipeline/render-types.ts index c3c8c86..5cf3368 100644 --- a/lib/image-pipeline/render-types.ts +++ b/lib/image-pipeline/render-types.ts @@ -24,6 +24,22 @@ export type RenderSizeLimits = { maxPixels?: number; }; +export type RenderSourceComposition = { + kind: "mixer"; + baseUrl: string; + overlayUrl: string; + blendMode: "normal" | "multiply" | "screen" | "overlay"; + opacity: number; + overlayX: number; + overlayY: number; + overlayWidth: number; + overlayHeight: number; + cropLeft: number; + cropTop: number; + cropRight: number; + cropBottom: number; +}; + export type ResolvedRenderSize = { width: number; height: number; @@ -32,7 +48,8 @@ export type ResolvedRenderSize = { }; export type RenderFullOptions = { - sourceUrl: string; + sourceUrl?: string; + sourceComposition?: RenderSourceComposition; steps: readonly PipelineStep[]; render: RenderOptions; limits?: RenderSizeLimits; diff --git a/lib/image-pipeline/source-loader.ts b/lib/image-pipeline/source-loader.ts index 1ab1088..174e14e 100644 --- a/lib/image-pipeline/source-loader.ts +++ b/lib/image-pipeline/source-loader.ts @@ -1,3 +1,6 @@ +import type { RenderSourceComposition } from "@/lib/image-pipeline/render-types"; +import { computeVisibleMixerContentRect } from "@/lib/mixer-crop-layout"; + export const SOURCE_BITMAP_CACHE_MAX_ENTRIES = 32; type CacheEntry = { @@ -12,6 +15,12 @@ type LoadSourceBitmapOptions = { signal?: AbortSignal; }; +type LoadRenderSourceBitmapOptions = { + sourceUrl?: string; + sourceComposition?: RenderSourceComposition; + signal?: AbortSignal; +}; + function throwIfAborted(signal: AbortSignal | undefined): void { if (signal?.aborted) { throw new DOMException("The operation was aborted.", "AbortError"); @@ -215,3 +224,219 @@ export async function loadSourceBitmap( const promise = getOrCreateSourceBitmapPromise(sourceUrl); return await awaitWithLocalAbort(promise, options.signal); } + +function createWorkingCanvas(width: number, height: number): + | HTMLCanvasElement + | OffscreenCanvas { + if (typeof document !== "undefined") { + const canvas = document.createElement("canvas"); + canvas.width = width; + canvas.height = height; + return canvas; + } + + if (typeof OffscreenCanvas !== "undefined") { + return new OffscreenCanvas(width, height); + } + + throw new Error("Canvas rendering is not available in this environment."); +} + +function mixerBlendModeToCompositeOperation( + blendMode: RenderSourceComposition["blendMode"], +): GlobalCompositeOperation { + if (blendMode === "normal") { + return "source-over"; + } + + return blendMode; +} + +function normalizeCompositionOpacity(value: number): number { + if (!Number.isFinite(value)) { + return 1; + } + + return Math.max(0, Math.min(100, value)) / 100; +} + +function normalizeRatio(value: number, fallback: number): number { + if (!Number.isFinite(value)) { + return fallback; + } + + return value; +} + +function normalizeMixerRect(source: RenderSourceComposition): { + x: number; + y: number; + width: number; + height: number; +} { + const overlayX = Math.max(0, Math.min(0.9, normalizeRatio(source.overlayX, 0))); + const overlayY = Math.max(0, Math.min(0.9, normalizeRatio(source.overlayY, 0))); + const overlayWidth = Math.max( + 0.1, + Math.min(1, normalizeRatio(source.overlayWidth, 1), 1 - overlayX), + ); + const overlayHeight = Math.max( + 0.1, + Math.min(1, normalizeRatio(source.overlayHeight, 1), 1 - overlayY), + ); + + return { + x: overlayX, + y: overlayY, + width: overlayWidth, + height: overlayHeight, + }; +} + +function normalizeMixerCropEdges(source: RenderSourceComposition): { + left: number; + top: number; + right: number; + bottom: number; +} { + const legacySource = source as RenderSourceComposition & { + contentX?: number; + contentY?: number; + contentWidth?: number; + contentHeight?: number; + }; + const hasLegacyContentRect = + legacySource.contentX !== undefined || + legacySource.contentY !== undefined || + legacySource.contentWidth !== undefined || + legacySource.contentHeight !== undefined; + + if (hasLegacyContentRect) { + const contentX = Math.max( + 0, + Math.min(0.9, normalizeRatio(legacySource.contentX ?? Number.NaN, 0)), + ); + const contentY = Math.max( + 0, + Math.min(0.9, normalizeRatio(legacySource.contentY ?? Number.NaN, 0)), + ); + const contentWidth = Math.max( + 0.1, + Math.min(1, normalizeRatio(legacySource.contentWidth ?? Number.NaN, 1), 1 - contentX), + ); + const contentHeight = Math.max( + 0.1, + Math.min(1, normalizeRatio(legacySource.contentHeight ?? Number.NaN, 1), 1 - contentY), + ); + + return { + left: contentX, + top: contentY, + right: 1 - (contentX + contentWidth), + bottom: 1 - (contentY + contentHeight), + }; + } + + const cropLeft = Math.max(0, Math.min(0.9, normalizeRatio(source.cropLeft, 0))); + const cropTop = Math.max(0, Math.min(0.9, normalizeRatio(source.cropTop, 0))); + const cropRight = Math.max(0, Math.min(1 - cropLeft - 0.1, normalizeRatio(source.cropRight, 0))); + const cropBottom = Math.max( + 0, + Math.min(1 - cropTop - 0.1, normalizeRatio(source.cropBottom, 0)), + ); + + return { + left: cropLeft, + top: cropTop, + right: cropRight, + bottom: cropBottom, + }; +} + +async function loadMixerCompositionBitmap( + sourceComposition: RenderSourceComposition, + signal?: AbortSignal, +): Promise { + const [baseBitmap, overlayBitmap] = await Promise.all([ + loadSourceBitmap(sourceComposition.baseUrl, { signal }), + loadSourceBitmap(sourceComposition.overlayUrl, { signal }), + ]); + + throwIfAborted(signal); + + const canvas = createWorkingCanvas(baseBitmap.width, baseBitmap.height); + const context = canvas.getContext("2d", { willReadFrequently: true }); + if (!context) { + throw new Error("Render composition could not create a 2D context."); + } + + context.clearRect(0, 0, baseBitmap.width, baseBitmap.height); + context.drawImage(baseBitmap, 0, 0, baseBitmap.width, baseBitmap.height); + + const rect = normalizeMixerRect(sourceComposition); + const frameX = rect.x * baseBitmap.width; + const frameY = rect.y * baseBitmap.height; + const frameWidth = rect.width * baseBitmap.width; + const frameHeight = rect.height * baseBitmap.height; + const cropEdges = normalizeMixerCropEdges(sourceComposition); + const sourceX = cropEdges.left * overlayBitmap.width; + const sourceY = cropEdges.top * overlayBitmap.height; + const sourceWidth = (1 - cropEdges.left - cropEdges.right) * overlayBitmap.width; + const sourceHeight = (1 - cropEdges.top - cropEdges.bottom) * overlayBitmap.height; + const visibleRect = computeVisibleMixerContentRect({ + frameAspectRatio: frameHeight > 0 ? frameWidth / frameHeight : 1, + sourceWidth: overlayBitmap.width, + sourceHeight: overlayBitmap.height, + cropLeft: cropEdges.left, + cropTop: cropEdges.top, + cropRight: cropEdges.right, + cropBottom: cropEdges.bottom, + }); + const destX = frameX + (visibleRect?.x ?? 0) * frameWidth; + const destY = frameY + (visibleRect?.y ?? 0) * frameHeight; + const destWidth = (visibleRect?.width ?? 1) * frameWidth; + const destHeight = (visibleRect?.height ?? 1) * frameHeight; + + context.globalCompositeOperation = mixerBlendModeToCompositeOperation( + sourceComposition.blendMode, + ); + context.globalAlpha = normalizeCompositionOpacity(sourceComposition.opacity); + context.save(); + context.beginPath(); + context.rect(frameX, frameY, frameWidth, frameHeight); + context.clip(); + context.drawImage( + overlayBitmap, + sourceX, + sourceY, + sourceWidth, + sourceHeight, + destX, + destY, + destWidth, + destHeight, + ); + context.restore(); + context.globalCompositeOperation = "source-over"; + context.globalAlpha = 1; + + return await createImageBitmap(canvas); +} + +export async function loadRenderSourceBitmap( + options: LoadRenderSourceBitmapOptions, +): Promise { + if (options.sourceComposition) { + if (options.sourceComposition.kind !== "mixer") { + throw new Error(`Unsupported source composition '${options.sourceComposition.kind}'.`); + } + + return await loadMixerCompositionBitmap(options.sourceComposition, options.signal); + } + + if (!options.sourceUrl) { + throw new Error("Render source is required."); + } + + return await loadSourceBitmap(options.sourceUrl, { signal: options.signal }); +} diff --git a/lib/image-pipeline/worker-client.ts b/lib/image-pipeline/worker-client.ts index e8ebdb9..4f790db 100644 --- a/lib/image-pipeline/worker-client.ts +++ b/lib/image-pipeline/worker-client.ts @@ -5,7 +5,11 @@ import { } from "@/lib/image-pipeline/preview-renderer"; import { hashPipeline, type PipelineStep } from "@/lib/image-pipeline/contracts"; import type { HistogramData } from "@/lib/image-pipeline/histogram"; -import type { RenderFullOptions, RenderFullResult } from "@/lib/image-pipeline/render-types"; +import type { + RenderFullOptions, + RenderFullResult, + RenderSourceComposition, +} from "@/lib/image-pipeline/render-types"; import { getBackendFeatureFlags, type BackendFeatureFlags, @@ -20,14 +24,15 @@ export type BackendDiagnosticsMetadata = { }; type PreviewWorkerPayload = { - sourceUrl: string; + sourceUrl?: string; + sourceComposition?: RenderSourceComposition; steps: readonly PipelineStep[]; previewWidth: number; includeHistogram?: boolean; featureFlags?: BackendFeatureFlags; }; -type FullWorkerPayload = RenderFullOptions & { +type FullWorkerPayload = Omit & { featureFlags?: BackendFeatureFlags; }; @@ -318,19 +323,20 @@ function runWorkerRequest { + const { signal, ...serializableOptions } = options; + try { return await runWorkerRequest({ kind: "full", payload: { - ...options, + ...serializableOptions, featureFlags: getWorkerFeatureFlagsSnapshot(), }, - signal: options.signal, + signal, }); } catch (error: unknown) { if (isAbortError(error)) { diff --git a/lib/mixer-crop-layout.ts b/lib/mixer-crop-layout.ts new file mode 100644 index 0000000..17ba548 --- /dev/null +++ b/lib/mixer-crop-layout.ts @@ -0,0 +1,219 @@ +const MIN_CROP_REMAINING_SIZE = 0.1; + +type MixerSurfaceFit = "contain" | "cover"; + +function formatPercent(value: number): string { + const normalized = Math.abs(value) < 1e-10 ? 0 : value; + return `${normalized}%`; +} + +function computeFittedRect(args: { + sourceWidth: number; + sourceHeight: number; + boundsX: number; + boundsY: number; + boundsWidth: number; + boundsHeight: number; + fit?: MixerSurfaceFit; +}): { x: number; y: number; width: number; height: number } { + const { + sourceWidth, + sourceHeight, + boundsX, + boundsY, + boundsWidth, + boundsHeight, + fit = "contain", + } = args; + + if (sourceWidth <= 0 || sourceHeight <= 0 || boundsWidth <= 0 || boundsHeight <= 0) { + return { + x: boundsX, + y: boundsY, + width: boundsWidth, + height: boundsHeight, + }; + } + + const scale = + fit === "cover" + ? Math.max(boundsWidth / sourceWidth, boundsHeight / sourceHeight) + : Math.min(boundsWidth / sourceWidth, boundsHeight / sourceHeight); + if (!Number.isFinite(scale) || scale <= 0) { + return { + x: boundsX, + y: boundsY, + width: boundsWidth, + height: boundsHeight, + }; + } + + const width = sourceWidth * scale; + const height = sourceHeight * scale; + + return { + x: boundsX + (boundsWidth - width) / 2, + y: boundsY + (boundsHeight - height) / 2, + width, + height, + }; +} + +export function computeMixerFrameRectInSurface(args: { + surfaceWidth: number; + surfaceHeight: number; + baseWidth: number; + baseHeight: number; + overlayX: number; + overlayY: number; + overlayWidth: number; + overlayHeight: number; + fit?: MixerSurfaceFit; +}): { x: number; y: number; width: number; height: number } | null { + if (args.baseWidth <= 0 || args.baseHeight <= 0 || args.surfaceWidth <= 0 || args.surfaceHeight <= 0) { + return null; + } + + const baseRect = computeFittedRect({ + sourceWidth: args.baseWidth, + sourceHeight: args.baseHeight, + boundsX: 0, + boundsY: 0, + boundsWidth: args.surfaceWidth, + boundsHeight: args.surfaceHeight, + fit: args.fit, + }); + + return { + x: (baseRect.x + args.overlayX * baseRect.width) / args.surfaceWidth, + y: (baseRect.y + args.overlayY * baseRect.height) / args.surfaceHeight, + width: (args.overlayWidth * baseRect.width) / args.surfaceWidth, + height: (args.overlayHeight * baseRect.height) / args.surfaceHeight, + }; +} + +export function computeVisibleMixerContentRect(args: { + frameAspectRatio: number; + sourceWidth: number; + sourceHeight: number; + cropLeft: number; + cropTop: number; + cropRight: number; + cropBottom: number; +}): { x: number; y: number; width: number; height: number } | null { + if (args.sourceWidth <= 0 || args.sourceHeight <= 0) { + return null; + } + + const cropWidth = Math.max(1 - args.cropLeft - args.cropRight, MIN_CROP_REMAINING_SIZE); + const cropHeight = Math.max(1 - args.cropTop - args.cropBottom, MIN_CROP_REMAINING_SIZE); + const frameAspectRatio = args.frameAspectRatio > 0 ? args.frameAspectRatio : 1; + + const rect = computeFittedRect({ + sourceWidth: args.sourceWidth * cropWidth, + sourceHeight: args.sourceHeight * cropHeight, + boundsX: 0, + boundsY: 0, + boundsWidth: frameAspectRatio, + boundsHeight: 1, + }); + + return { + x: rect.x / frameAspectRatio, + y: rect.y, + width: rect.width / frameAspectRatio, + height: rect.height, + }; +} + +export function computeMixerCropImageStyle(args: { + frameAspectRatio: number; + sourceWidth: number; + sourceHeight: number; + cropLeft: number; + cropTop: number; + cropRight: number; + cropBottom: number; +}) { + const safeWidth = Math.max(1 - args.cropLeft - args.cropRight, MIN_CROP_REMAINING_SIZE); + const safeHeight = Math.max(1 - args.cropTop - args.cropBottom, MIN_CROP_REMAINING_SIZE); + const visibleRect = computeVisibleMixerContentRect(args); + + if (!visibleRect) { + return { + left: formatPercent((-args.cropLeft / safeWidth) * 100), + top: formatPercent((-args.cropTop / safeHeight) * 100), + width: formatPercent((1 / safeWidth) * 100), + height: formatPercent((1 / safeHeight) * 100), + } as const; + } + + const imageWidth = visibleRect.width / safeWidth; + const imageHeight = visibleRect.height / safeHeight; + + return { + left: formatPercent((visibleRect.x - (args.cropLeft / safeWidth) * visibleRect.width) * 100), + top: formatPercent((visibleRect.y - (args.cropTop / safeHeight) * visibleRect.height) * 100), + width: formatPercent(imageWidth * 100), + height: formatPercent(imageHeight * 100), + } as const; +} + +export function computeMixerCompareOverlayImageStyle(args: { + surfaceWidth: number; + surfaceHeight: number; + baseWidth: number; + baseHeight: number; + overlayX: number; + overlayY: number; + overlayWidth: number; + overlayHeight: number; + sourceWidth: number; + sourceHeight: number; + cropLeft: number; + cropTop: number; + cropRight: number; + cropBottom: number; +}) { + const frameRect = computeMixerFrameRectInSurface({ + surfaceWidth: args.surfaceWidth, + surfaceHeight: args.surfaceHeight, + baseWidth: args.baseWidth, + baseHeight: args.baseHeight, + overlayX: args.overlayX, + overlayY: args.overlayY, + overlayWidth: args.overlayWidth, + overlayHeight: args.overlayHeight, + }); + + const frameAspectRatio = + frameRect && frameRect.width > 0 && frameRect.height > 0 + ? (frameRect.width * args.surfaceWidth) / (frameRect.height * args.surfaceHeight) + : args.overlayWidth > 0 && args.overlayHeight > 0 + ? args.overlayWidth / args.overlayHeight + : 1; + + return computeMixerCropImageStyle({ + frameAspectRatio, + sourceWidth: args.sourceWidth, + sourceHeight: args.sourceHeight, + cropLeft: args.cropLeft, + cropTop: args.cropTop, + cropRight: args.cropRight, + cropBottom: args.cropBottom, + }); +} + +export function isMixerCropImageReady(args: { + currentOverlayUrl: string | null | undefined; + loadedOverlayUrl: string | null; + sourceWidth: number; + sourceHeight: number; +}): boolean { + return Boolean( + args.currentOverlayUrl && + args.loadedOverlayUrl === args.currentOverlayUrl && + args.sourceWidth > 0 && + args.sourceHeight > 0, + ); +} diff --git a/tests/image-pipeline/backend-router.test.ts b/tests/image-pipeline/backend-router.test.ts index 1f2c5e0..5ea8bc9 100644 --- a/tests/image-pipeline/backend-router.test.ts +++ b/tests/image-pipeline/backend-router.test.ts @@ -17,6 +17,13 @@ const sourceLoaderMocks = vi.hoisted(() => ({ vi.mock("@/lib/image-pipeline/source-loader", () => ({ loadSourceBitmap: sourceLoaderMocks.loadSourceBitmap, + loadRenderSourceBitmap: ({ sourceUrl }: { sourceUrl?: string }) => { + if (!sourceUrl) { + throw new Error("Render source is required."); + } + + return sourceLoaderMocks.loadSourceBitmap(sourceUrl); + }, })); function createPreviewPixels(): Uint8ClampedArray { diff --git a/tests/image-pipeline/image-pipeline.worker.test.ts b/tests/image-pipeline/image-pipeline.worker.test.ts new file mode 100644 index 0000000..9969793 --- /dev/null +++ b/tests/image-pipeline/image-pipeline.worker.test.ts @@ -0,0 +1,117 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; + +import type { RenderFullResult, RenderSourceComposition } from "@/lib/image-pipeline/render-types"; + +const bridgeMocks = vi.hoisted(() => ({ + renderFull: vi.fn(), +})); + +const previewRendererMocks = vi.hoisted(() => ({ + renderPreview: vi.fn(), +})); + +vi.mock("@/lib/image-pipeline/bridge", () => ({ + renderFull: bridgeMocks.renderFull, +})); + +vi.mock("@/lib/image-pipeline/preview-renderer", () => ({ + renderPreview: previewRendererMocks.renderPreview, +})); + +type WorkerMessage = { + kind: "full"; + requestId: number; + payload: { + sourceUrl?: string; + sourceComposition?: RenderSourceComposition; + steps: []; + render: { + resolution: "original"; + format: "png"; + }; + }; +}; + +type WorkerScopeMock = { + postMessage: ReturnType; + onmessage: ((event: MessageEvent) => void) | null; +}; + +function createFullResult(): RenderFullResult { + return { + blob: new Blob(["rendered"]), + width: 64, + height: 64, + mimeType: "image/png", + format: "png", + quality: null, + sizeBytes: 8, + sourceWidth: 64, + sourceHeight: 64, + wasSizeClamped: false, + }; +} + +function createWorkerScope(): WorkerScopeMock { + return { + postMessage: vi.fn(), + onmessage: null, + }; +} + +describe("image-pipeline.worker full render", () => { + beforeEach(() => { + vi.resetModules(); + vi.unstubAllGlobals(); + bridgeMocks.renderFull.mockReset(); + bridgeMocks.renderFull.mockResolvedValue(createFullResult()); + previewRendererMocks.renderPreview.mockReset(); + }); + + it("forwards sourceComposition to renderFull for full requests", async () => { + const workerScope = createWorkerScope(); + vi.stubGlobal("self", workerScope); + await import("@/lib/image-pipeline/image-pipeline.worker"); + + const sourceComposition: RenderSourceComposition = { + kind: "mixer", + baseUrl: "https://cdn.example.com/base.png", + overlayUrl: "https://cdn.example.com/overlay.png", + blendMode: "overlay", + opacity: 0.5, + overlayX: 32, + overlayY: 16, + overlayWidth: 128, + overlayHeight: 64, + cropLeft: 0, + cropTop: 0, + cropRight: 0, + cropBottom: 0, + }; + + workerScope.onmessage?.({ + data: { + kind: "full", + requestId: 41, + payload: { + sourceComposition, + steps: [], + render: { + resolution: "original", + format: "png", + }, + }, + }, + } as MessageEvent); + + await vi.waitFor(() => { + expect(bridgeMocks.renderFull).toHaveBeenCalledTimes(1); + }); + + expect(bridgeMocks.renderFull).toHaveBeenCalledWith( + expect.objectContaining({ + sourceComposition, + }), + ); + }); +}); diff --git a/tests/image-pipeline/source-loader.test.ts b/tests/image-pipeline/source-loader.test.ts index 52fa708..15bbbb8 100644 --- a/tests/image-pipeline/source-loader.test.ts +++ b/tests/image-pipeline/source-loader.test.ts @@ -355,4 +355,446 @@ describe("loadSourceBitmap", () => { expect(createImageBitmap).toHaveBeenCalledWith(fakeVideo); expect(revokeObjectUrl).toHaveBeenCalledWith("blob:video-source"); }); + + it("renders non-square mixer overlays with contain-fit parity instead of stretching", async () => { + const baseBlob = new Blob(["base"]); + const overlayBlob = new Blob(["overlay"]); + const baseBitmap = { width: 100, height: 100 } as ImageBitmap; + const overlayBitmap = { width: 200, height: 100 } as ImageBitmap; + const composedBitmap = { width: 100, height: 100 } as ImageBitmap; + + const drawImage = vi.fn(); + const context = { + clearRect: vi.fn(), + drawImage, + save: vi.fn(), + restore: vi.fn(), + beginPath: vi.fn(), + rect: vi.fn(), + clip: vi.fn(), + globalCompositeOperation: "source-over" as GlobalCompositeOperation, + globalAlpha: 1, + }; + const canvas = { + width: 0, + height: 0, + getContext: vi.fn().mockReturnValue(context), + } as unknown as HTMLCanvasElement; + + const nativeCreateElement = document.createElement.bind(document); + vi.spyOn(document, "createElement").mockImplementation((tagName: string) => { + if (tagName.toLowerCase() === "canvas") { + return canvas; + } + + return nativeCreateElement(tagName); + }); + + vi.stubGlobal( + "fetch", + vi.fn().mockImplementation(async (input: string | URL | Request) => { + const url = String(input); + if (url.includes("base.png")) { + return { + ok: true, + status: 200, + headers: { get: vi.fn().mockReturnValue("image/png") }, + blob: vi.fn().mockResolvedValue(baseBlob), + }; + } + + return { + ok: true, + status: 200, + headers: { get: vi.fn().mockReturnValue("image/png") }, + blob: vi.fn().mockResolvedValue(overlayBlob), + }; + }), + ); + + vi.stubGlobal( + "createImageBitmap", + vi.fn().mockImplementation(async (input: unknown) => { + if (input === baseBlob) { + return baseBitmap; + } + if (input === overlayBlob) { + return overlayBitmap; + } + if (input === canvas) { + return composedBitmap; + } + + throw new Error("Unexpected createImageBitmap input in mixer contain-fit test."); + }), + ); + + const { loadRenderSourceBitmap } = await importSubject(); + + await expect( + loadRenderSourceBitmap({ + sourceComposition: { + kind: "mixer", + baseUrl: "https://cdn.example.com/base.png", + overlayUrl: "https://cdn.example.com/overlay.png", + blendMode: "overlay", + opacity: 80, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.25, + overlayHeight: 0.5, + cropLeft: 0, + cropTop: 0, + cropRight: 0, + cropBottom: 0, + }, + }), + ).resolves.toBe(composedBitmap); + + expect(drawImage).toHaveBeenNthCalledWith(1, baseBitmap, 0, 0, 100, 100); + const overlayDrawArgs = drawImage.mock.calls[1]; + expect(overlayDrawArgs?.[0]).toBe(overlayBitmap); + expect(overlayDrawArgs?.[1]).toBe(0); + expect(overlayDrawArgs?.[2]).toBe(0); + expect(overlayDrawArgs?.[3]).toBe(200); + expect(overlayDrawArgs?.[4]).toBe(100); + expect(overlayDrawArgs?.[5]).toBe(10); + expect(overlayDrawArgs?.[6]).toBeCloseTo(38.75, 10); + expect(overlayDrawArgs?.[7]).toBe(25); + expect(overlayDrawArgs?.[8]).toBeCloseTo(12.5, 10); + }); + + it("applies mixer crop framing by trimming source edges while leaving the displayed frame size untouched", async () => { + const baseBlob = new Blob(["base"]); + const overlayBlob = new Blob(["overlay"]); + const baseBitmap = { width: 100, height: 100 } as ImageBitmap; + const overlayBitmap = { width: 200, height: 100 } as ImageBitmap; + const composedBitmap = { width: 100, height: 100 } as ImageBitmap; + + const drawImage = vi.fn(); + const save = vi.fn(); + const restore = vi.fn(); + const beginPath = vi.fn(); + const rect = vi.fn(); + const clip = vi.fn(); + const context = { + clearRect: vi.fn(), + drawImage, + save, + restore, + beginPath, + rect, + clip, + globalCompositeOperation: "source-over" as GlobalCompositeOperation, + globalAlpha: 1, + }; + const canvas = { + width: 0, + height: 0, + getContext: vi.fn().mockReturnValue(context), + } as unknown as HTMLCanvasElement; + + const nativeCreateElement = document.createElement.bind(document); + vi.spyOn(document, "createElement").mockImplementation((tagName: string) => { + if (tagName.toLowerCase() === "canvas") { + return canvas; + } + + return nativeCreateElement(tagName); + }); + + vi.stubGlobal( + "fetch", + vi.fn().mockImplementation(async (input: string | URL | Request) => { + const url = String(input); + if (url.includes("base.png")) { + return { + ok: true, + status: 200, + headers: { get: vi.fn().mockReturnValue("image/png") }, + blob: vi.fn().mockResolvedValue(baseBlob), + }; + } + + return { + ok: true, + status: 200, + headers: { get: vi.fn().mockReturnValue("image/png") }, + blob: vi.fn().mockResolvedValue(overlayBlob), + }; + }), + ); + + vi.stubGlobal( + "createImageBitmap", + vi.fn().mockImplementation(async (input: unknown) => { + if (input === baseBlob) { + return baseBitmap; + } + if (input === overlayBlob) { + return overlayBitmap; + } + if (input === canvas) { + return composedBitmap; + } + + throw new Error("Unexpected createImageBitmap input in mixer content framing test."); + }), + ); + + const { loadRenderSourceBitmap } = await importSubject(); + + await expect( + loadRenderSourceBitmap({ + sourceComposition: { + kind: "mixer", + baseUrl: "https://cdn.example.com/base.png", + overlayUrl: "https://cdn.example.com/overlay.png", + blendMode: "overlay", + opacity: 80, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.4, + overlayHeight: 0.4, + cropLeft: 0.5, + cropTop: 0, + cropRight: 0, + cropBottom: 0, + }, + }), + ).resolves.toBe(composedBitmap); + + expect(drawImage).toHaveBeenNthCalledWith(1, baseBitmap, 0, 0, 100, 100); + expect(save).toHaveBeenCalledTimes(1); + expect(beginPath).toHaveBeenCalledTimes(1); + expect(rect).toHaveBeenCalledWith(10, 20, 40, 40); + expect(clip).toHaveBeenCalledTimes(1); + expect(drawImage).toHaveBeenNthCalledWith( + 2, + overlayBitmap, + 100, + 0, + 100, + 100, + 10, + 20, + 40, + 40, + ); + expect(restore).toHaveBeenCalledTimes(1); + }); + + it("keeps overlayWidth and overlayHeight fixed while crop framing trims the sampled source region", async () => { + const baseBlob = new Blob(["base"]); + const overlayBlob = new Blob(["overlay"]); + const baseBitmap = { width: 100, height: 100 } as ImageBitmap; + const overlayBitmap = { width: 200, height: 100 } as ImageBitmap; + const composedBitmap = { width: 100, height: 100 } as ImageBitmap; + + const drawImage = vi.fn(); + const context = { + clearRect: vi.fn(), + drawImage, + save: vi.fn(), + restore: vi.fn(), + beginPath: vi.fn(), + rect: vi.fn(), + clip: vi.fn(), + globalCompositeOperation: "source-over" as GlobalCompositeOperation, + globalAlpha: 1, + }; + const canvas = { + width: 0, + height: 0, + getContext: vi.fn().mockReturnValue(context), + } as unknown as HTMLCanvasElement; + + const nativeCreateElement = document.createElement.bind(document); + vi.spyOn(document, "createElement").mockImplementation((tagName: string) => { + if (tagName.toLowerCase() === "canvas") { + return canvas; + } + + return nativeCreateElement(tagName); + }); + + vi.stubGlobal( + "fetch", + vi.fn().mockImplementation(async (input: string | URL | Request) => { + const url = String(input); + if (url.includes("base.png")) { + return { + ok: true, + status: 200, + headers: { get: vi.fn().mockReturnValue("image/png") }, + blob: vi.fn().mockResolvedValue(baseBlob), + }; + } + + return { + ok: true, + status: 200, + headers: { get: vi.fn().mockReturnValue("image/png") }, + blob: vi.fn().mockResolvedValue(overlayBlob), + }; + }), + ); + + vi.stubGlobal( + "createImageBitmap", + vi.fn().mockImplementation(async (input: unknown) => { + if (input === baseBlob) { + return baseBitmap; + } + if (input === overlayBlob) { + return overlayBitmap; + } + if (input === canvas) { + return composedBitmap; + } + + throw new Error("Unexpected createImageBitmap input in overlay size preservation test."); + }), + ); + + const { loadRenderSourceBitmap } = await importSubject(); + + await expect( + loadRenderSourceBitmap({ + sourceComposition: { + kind: "mixer", + baseUrl: "https://cdn.example.com/base.png", + overlayUrl: "https://cdn.example.com/overlay.png", + blendMode: "overlay", + opacity: 80, + overlayX: 0.15, + overlayY: 0.25, + overlayWidth: 0.5, + overlayHeight: 0.3, + cropLeft: 0.25, + cropTop: 0.1, + cropRight: 0.25, + cropBottom: 0.3, + }, + }), + ).resolves.toBe(composedBitmap); + + const overlayDrawArgs = drawImage.mock.calls[1]; + expect(overlayDrawArgs?.[0]).toBe(overlayBitmap); + expect(overlayDrawArgs?.[1]).toBe(50); + expect(overlayDrawArgs?.[2]).toBe(10); + expect(overlayDrawArgs?.[3]).toBe(100); + expect(overlayDrawArgs?.[4]).toBeCloseTo(60, 10); + expect(overlayDrawArgs?.[5]).toBeCloseTo(15, 10); + expect(overlayDrawArgs?.[6]).toBeCloseTo(25, 10); + expect(overlayDrawArgs?.[7]).toBeCloseTo(50, 10); + expect(overlayDrawArgs?.[8]).toBeCloseTo(30, 10); + }); + + it("contains a cropped wide source within the overlay frame during bake", async () => { + const baseBlob = new Blob(["base"]); + const overlayBlob = new Blob(["overlay"]); + const baseBitmap = { width: 100, height: 100 } as ImageBitmap; + const overlayBitmap = { width: 200, height: 100 } as ImageBitmap; + const composedBitmap = { width: 100, height: 100 } as ImageBitmap; + + const drawImage = vi.fn(); + const context = { + clearRect: vi.fn(), + drawImage, + save: vi.fn(), + restore: vi.fn(), + beginPath: vi.fn(), + rect: vi.fn(), + clip: vi.fn(), + globalCompositeOperation: "source-over" as GlobalCompositeOperation, + globalAlpha: 1, + }; + const canvas = { + width: 0, + height: 0, + getContext: vi.fn().mockReturnValue(context), + } as unknown as HTMLCanvasElement; + + const nativeCreateElement = document.createElement.bind(document); + vi.spyOn(document, "createElement").mockImplementation((tagName: string) => { + if (tagName.toLowerCase() === "canvas") { + return canvas; + } + + return nativeCreateElement(tagName); + }); + + vi.stubGlobal( + "fetch", + vi.fn().mockImplementation(async (input: string | URL | Request) => { + const url = String(input); + if (url.includes("base.png")) { + return { + ok: true, + status: 200, + headers: { get: vi.fn().mockReturnValue("image/png") }, + blob: vi.fn().mockResolvedValue(baseBlob), + }; + } + + return { + ok: true, + status: 200, + headers: { get: vi.fn().mockReturnValue("image/png") }, + blob: vi.fn().mockResolvedValue(overlayBlob), + }; + }), + ); + + vi.stubGlobal( + "createImageBitmap", + vi.fn().mockImplementation(async (input: unknown) => { + if (input === baseBlob) { + return baseBitmap; + } + if (input === overlayBlob) { + return overlayBitmap; + } + if (input === canvas) { + return composedBitmap; + } + + throw new Error("Unexpected createImageBitmap input in aspect-aware crop bake test."); + }), + ); + + const { loadRenderSourceBitmap } = await importSubject(); + + await expect( + loadRenderSourceBitmap({ + sourceComposition: { + kind: "mixer", + baseUrl: "https://cdn.example.com/base.png", + overlayUrl: "https://cdn.example.com/overlay.png", + blendMode: "overlay", + opacity: 80, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.4, + overlayHeight: 0.4, + cropLeft: 0, + cropTop: 0.25, + cropRight: 0, + cropBottom: 0.25, + }, + }), + ).resolves.toBe(composedBitmap); + + const overlayDrawArgs = drawImage.mock.calls[1]; + expect(overlayDrawArgs?.[0]).toBe(overlayBitmap); + expect(overlayDrawArgs?.[1]).toBe(0); + expect(overlayDrawArgs?.[2]).toBe(25); + expect(overlayDrawArgs?.[3]).toBe(200); + expect(overlayDrawArgs?.[4]).toBe(50); + expect(overlayDrawArgs?.[5]).toBe(10); + expect(overlayDrawArgs?.[6]).toBeCloseTo(35, 10); + expect(overlayDrawArgs?.[7]).toBe(40); + expect(overlayDrawArgs?.[8]).toBeCloseTo(10, 10); + }); }); diff --git a/tests/image-pipeline/webgl-backend-poc.test.ts b/tests/image-pipeline/webgl-backend-poc.test.ts index 647a54c..7bb55f9 100644 --- a/tests/image-pipeline/webgl-backend-poc.test.ts +++ b/tests/image-pipeline/webgl-backend-poc.test.ts @@ -341,6 +341,7 @@ describe("webgl backend poc", () => { vi.doMock("@/lib/image-pipeline/source-loader", () => ({ loadSourceBitmap: vi.fn().mockResolvedValue({ width: 2, height: 2 }), + loadRenderSourceBitmap: vi.fn().mockResolvedValue({ width: 2, height: 2 }), })); vi.spyOn(HTMLCanvasElement.prototype, "getContext").mockReturnValue({ diff --git a/tests/lib/canvas-mixer-preview.test.ts b/tests/lib/canvas-mixer-preview.test.ts index ef8fc89..91231dd 100644 --- a/tests/lib/canvas-mixer-preview.test.ts +++ b/tests/lib/canvas-mixer-preview.test.ts @@ -4,7 +4,7 @@ import { buildGraphSnapshot } from "@/lib/canvas-render-preview"; import { resolveMixerPreviewFromGraph } from "@/lib/canvas-mixer-preview"; describe("resolveMixerPreviewFromGraph", () => { - it("resolves base and overlay URLs by target handle", () => { + it("resolves base and overlay URLs by target handle while keeping frame and crop trims independent", () => { const graph = buildGraphSnapshot( [ { @@ -25,7 +25,18 @@ describe("resolveMixerPreviewFromGraph", () => { { id: "mixer-1", type: "mixer", - data: { blendMode: "screen", opacity: 70, offsetX: 12, offsetY: -8 }, + data: { + blendMode: "screen", + opacity: 70, + overlayX: 0.12, + overlayY: 0.2, + overlayWidth: 0.6, + overlayHeight: 0.5, + cropLeft: 0.08, + cropTop: 0.15, + cropRight: 0.22, + cropBottom: 0.1, + }, }, ], [ @@ -41,12 +52,114 @@ describe("resolveMixerPreviewFromGraph", () => { overlayUrl: "https://cdn.example.com/overlay.png", blendMode: "screen", opacity: 70, - offsetX: 12, - offsetY: -8, + overlayX: 0.12, + overlayY: 0.2, + overlayWidth: 0.6, + overlayHeight: 0.5, + cropLeft: 0.08, + cropTop: 0.15, + cropRight: 0.22, + cropBottom: 0.1, }); }); - it("prefers render output URL over upstream preview source when available", () => { + it("preserves crop trims when frame resize data changes", () => { + const graph = buildGraphSnapshot( + [ + { + id: "image-base", + type: "image", + data: { url: "https://cdn.example.com/base.png" }, + }, + { + id: "overlay-asset", + type: "asset", + data: { url: "https://cdn.example.com/overlay.png" }, + }, + { + id: "mixer-1", + type: "mixer", + data: { + overlayX: 0.2, + overlayY: 0.1, + overlayWidth: 0.6, + overlayHeight: 0.3, + cropLeft: 0.15, + cropTop: 0.05, + cropRight: 0.4, + cropBottom: 0.25, + }, + }, + ], + [ + { source: "image-base", target: "mixer-1", targetHandle: "base" }, + { source: "overlay-asset", target: "mixer-1", targetHandle: "overlay" }, + ], + ); + + expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual( + expect.objectContaining({ + overlayX: 0.2, + overlayY: 0.1, + overlayWidth: 0.6, + overlayHeight: 0.3, + cropLeft: 0.15, + cropTop: 0.05, + cropRight: 0.4, + cropBottom: 0.25, + }), + ); + }); + + it("preserves overlayWidth and overlayHeight when crop trims change", () => { + const graph = buildGraphSnapshot( + [ + { + id: "image-base", + type: "image", + data: { url: "https://cdn.example.com/base.png" }, + }, + { + id: "overlay-asset", + type: "asset", + data: { url: "https://cdn.example.com/overlay.png" }, + }, + { + id: "mixer-1", + type: "mixer", + data: { + overlayX: 0.05, + overlayY: 0.25, + overlayWidth: 0.55, + overlayHeight: 0.35, + cropLeft: 0.4, + cropTop: 0.1, + cropRight: 0.3, + cropBottom: 0.1, + }, + }, + ], + [ + { source: "image-base", target: "mixer-1", targetHandle: "base" }, + { source: "overlay-asset", target: "mixer-1", targetHandle: "overlay" }, + ], + ); + + expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual( + expect.objectContaining({ + overlayX: 0.05, + overlayY: 0.25, + overlayWidth: 0.55, + overlayHeight: 0.35, + cropLeft: 0.4, + cropTop: 0.1, + cropRight: 0.3, + cropBottom: 0.1, + }), + ); + }); + + it("prefers live render preview URL over stale baked render output", () => { const graph = buildGraphSnapshot( [ { @@ -82,11 +195,79 @@ describe("resolveMixerPreviewFromGraph", () => { expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual({ status: "ready", baseUrl: "https://cdn.example.com/base.png", - overlayUrl: "https://cdn.example.com/render-output.png", + overlayUrl: "https://cdn.example.com/upstream.png", blendMode: "normal", opacity: 100, - offsetX: 0, - offsetY: 0, + overlayX: 0, + overlayY: 0, + overlayWidth: 1, + overlayHeight: 1, + cropLeft: 0, + cropTop: 0, + cropRight: 0, + cropBottom: 0, + }); + }); + + it("does not reuse stale baked render output when only live sourceComposition exists", () => { + const graph = buildGraphSnapshot( + [ + { + id: "base-image", + type: "image", + data: { url: "https://cdn.example.com/base.png" }, + }, + { + id: "overlay-base", + type: "image", + data: { url: "https://cdn.example.com/overlay-base.png" }, + }, + { + id: "overlay-asset", + type: "asset", + data: { url: "https://cdn.example.com/overlay-asset.png" }, + }, + { + id: "upstream-mixer", + type: "mixer", + data: {}, + }, + { + id: "render-overlay", + type: "render", + data: { + lastUploadUrl: "https://cdn.example.com/stale-render-output.png", + }, + }, + { + id: "mixer-1", + type: "mixer", + data: {}, + }, + ], + [ + { source: "overlay-base", target: "upstream-mixer", targetHandle: "base" }, + { source: "overlay-asset", target: "upstream-mixer", targetHandle: "overlay" }, + { source: "upstream-mixer", target: "render-overlay" }, + { source: "base-image", target: "mixer-1", targetHandle: "base" }, + { source: "render-overlay", target: "mixer-1", targetHandle: "overlay" }, + ], + ); + + expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual({ + status: "partial", + baseUrl: "https://cdn.example.com/base.png", + overlayUrl: undefined, + blendMode: "normal", + opacity: 100, + overlayX: 0, + overlayY: 0, + overlayWidth: 1, + overlayHeight: 1, + cropLeft: 0, + cropTop: 0, + cropRight: 0, + cropBottom: 0, }); }); @@ -113,12 +294,18 @@ describe("resolveMixerPreviewFromGraph", () => { overlayUrl: undefined, blendMode: "normal", opacity: 100, - offsetX: 0, - offsetY: 0, + overlayX: 0, + overlayY: 0, + overlayWidth: 1, + overlayHeight: 1, + cropLeft: 0, + cropTop: 0, + cropRight: 0, + cropBottom: 0, }); }); - it("normalizes blend mode and clamps numeric values", () => { + it("normalizes crop trims and clamps", () => { const graph = buildGraphSnapshot( [ { @@ -137,8 +324,14 @@ describe("resolveMixerPreviewFromGraph", () => { data: { blendMode: "unknown", opacity: 180, - offsetX: 9999, - offsetY: "-9999", + overlayX: -3, + overlayY: "1.4", + overlayWidth: 2, + overlayHeight: 0, + cropLeft: "0.95", + cropTop: -2, + cropRight: "4", + cropBottom: "0", }, }, ], @@ -154,8 +347,151 @@ describe("resolveMixerPreviewFromGraph", () => { overlayUrl: "https://cdn.example.com/overlay-asset.png", blendMode: "normal", opacity: 100, - offsetX: 2048, - offsetY: -2048, + overlayX: 0, + overlayY: 0.9, + overlayWidth: 1, + overlayHeight: 0.1, + cropLeft: 0.9, + cropTop: 0, + cropRight: 0, + cropBottom: 0, + }); + }); + + it("missing rect fields fallback to sensible defaults", () => { + const graph = buildGraphSnapshot( + [ + { + id: "base-ai", + type: "ai-image", + data: { url: "https://cdn.example.com/base-ai.png" }, + }, + { + id: "overlay-asset", + type: "asset", + data: { url: "https://cdn.example.com/overlay-asset.png" }, + }, + { + id: "mixer-1", + type: "mixer", + data: { + blendMode: "multiply", + opacity: 42, + }, + }, + ], + [ + { source: "base-ai", target: "mixer-1", targetHandle: "base" }, + { source: "overlay-asset", target: "mixer-1", targetHandle: "overlay" }, + ], + ); + + expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual({ + status: "ready", + baseUrl: "https://cdn.example.com/base-ai.png", + overlayUrl: "https://cdn.example.com/overlay-asset.png", + blendMode: "multiply", + opacity: 42, + overlayX: 0, + overlayY: 0, + overlayWidth: 1, + overlayHeight: 1, + cropLeft: 0, + cropTop: 0, + cropRight: 0, + cropBottom: 0, + }); + }); + + it("maps legacy content rect fields into crop trims during normalization", () => { + const graph = buildGraphSnapshot( + [ + { + id: "base-ai", + type: "ai-image", + data: { url: "https://cdn.example.com/base-ai.png" }, + }, + { + id: "overlay-asset", + type: "asset", + data: { url: "https://cdn.example.com/overlay-asset.png" }, + }, + { + id: "mixer-1", + type: "mixer", + data: { + contentX: 0.2, + contentY: 0.1, + contentWidth: 0.5, + contentHeight: 0.6, + }, + }, + ], + [ + { source: "base-ai", target: "mixer-1", targetHandle: "base" }, + { source: "overlay-asset", target: "mixer-1", targetHandle: "overlay" }, + ], + ); + + expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual({ + status: "ready", + baseUrl: "https://cdn.example.com/base-ai.png", + overlayUrl: "https://cdn.example.com/overlay-asset.png", + blendMode: "normal", + opacity: 100, + overlayX: 0, + overlayY: 0, + overlayWidth: 1, + overlayHeight: 1, + cropLeft: 0.2, + cropTop: 0.1, + cropRight: 0.30000000000000004, + cropBottom: 0.30000000000000004, + }); + }); + + it("legacy offset fields still yield visible overlay geometry", () => { + const graph = buildGraphSnapshot( + [ + { + id: "base-ai", + type: "ai-image", + data: { url: "https://cdn.example.com/base-ai.png" }, + }, + { + id: "overlay-asset", + type: "asset", + data: { url: "https://cdn.example.com/overlay-asset.png" }, + }, + { + id: "mixer-1", + type: "mixer", + data: { + offsetX: 100, + offsetY: -40, + }, + }, + ], + [ + { source: "base-ai", target: "mixer-1", targetHandle: "base" }, + { source: "overlay-asset", target: "mixer-1", targetHandle: "overlay" }, + ], + ); + + expect(resolveMixerPreviewFromGraph({ nodeId: "mixer-1", graph })).toEqual({ + status: "ready", + baseUrl: "https://cdn.example.com/base-ai.png", + overlayUrl: "https://cdn.example.com/overlay-asset.png", + blendMode: "normal", + opacity: 100, + overlayX: 0, + overlayY: 0, + overlayWidth: 1, + overlayHeight: 1, + cropLeft: 0, + cropTop: 0, + cropRight: 0, + cropBottom: 0, }); }); @@ -190,8 +526,14 @@ describe("resolveMixerPreviewFromGraph", () => { overlayUrl: undefined, blendMode: "normal", opacity: 100, - offsetX: 0, - offsetY: 0, + overlayX: 0, + overlayY: 0, + overlayWidth: 1, + overlayHeight: 1, + cropLeft: 0, + cropTop: 0, + cropRight: 0, + cropBottom: 0, error: "duplicate-handle-edge", }); }); diff --git a/tests/lib/canvas-render-preview.test.ts b/tests/lib/canvas-render-preview.test.ts index 2266da2..4ca9dd2 100644 --- a/tests/lib/canvas-render-preview.test.ts +++ b/tests/lib/canvas-render-preview.test.ts @@ -4,8 +4,147 @@ import { buildGraphSnapshot, resolveRenderPreviewInputFromGraph, } from "@/lib/canvas-render-preview"; +import { + computeMixerCompareOverlayImageStyle, + computeMixerFrameRectInSurface, + computeVisibleMixerContentRect, + computeMixerCropImageStyle, + isMixerCropImageReady, +} from "@/lib/mixer-crop-layout"; describe("resolveRenderPreviewInputFromGraph", () => { + it("resolves mixer input as renderable mixer composition", () => { + const graph = buildGraphSnapshot( + [ + { + id: "base-image", + type: "image", + data: { url: "https://cdn.example.com/base.png" }, + }, + { + id: "overlay-image", + type: "asset", + data: { url: "https://cdn.example.com/overlay.png" }, + }, + { + id: "mixer-1", + type: "mixer", + data: { + blendMode: "overlay", + opacity: 76, + overlayX: 0.2, + overlayY: 0.1, + overlayWidth: 0.55, + overlayHeight: 0.44, + cropLeft: 0.08, + cropTop: 0.15, + cropRight: 0.22, + cropBottom: 0.1, + }, + }, + { + id: "render-1", + type: "render", + data: {}, + }, + ], + [ + { source: "base-image", target: "mixer-1", targetHandle: "base" }, + { source: "overlay-image", target: "mixer-1", targetHandle: "overlay" }, + { source: "mixer-1", target: "render-1" }, + ], + ); + + const preview = resolveRenderPreviewInputFromGraph({ + nodeId: "render-1", + graph, + }); + + expect(preview).toEqual({ + sourceUrl: null, + sourceComposition: { + kind: "mixer", + baseUrl: "https://cdn.example.com/base.png", + overlayUrl: "https://cdn.example.com/overlay.png", + blendMode: "overlay", + opacity: 76, + overlayX: 0.2, + overlayY: 0.1, + overlayWidth: 0.55, + overlayHeight: 0.44, + cropLeft: 0.08, + cropTop: 0.15, + cropRight: 0.22, + cropBottom: 0.1, + }, + steps: [], + }); + }); + + it("normalizes mixer composition values for render input", () => { + const graph = buildGraphSnapshot( + [ + { + id: "base-image", + type: "image", + data: { url: "https://cdn.example.com/base.png" }, + }, + { + id: "overlay-image", + type: "asset", + data: { url: "https://cdn.example.com/overlay.png" }, + }, + { + id: "mixer-1", + type: "mixer", + data: { + blendMode: "unknown", + opacity: 180, + overlayX: -3, + overlayY: "1.4", + overlayWidth: 2, + overlayHeight: 0, + cropLeft: "0.95", + cropTop: -2, + cropRight: "4", + cropBottom: "0", + }, + }, + { + id: "render-1", + type: "render", + data: {}, + }, + ], + [ + { source: "base-image", target: "mixer-1", targetHandle: "base" }, + { source: "overlay-image", target: "mixer-1", targetHandle: "overlay" }, + { source: "mixer-1", target: "render-1" }, + ], + ); + + const preview = resolveRenderPreviewInputFromGraph({ + nodeId: "render-1", + graph, + }); + + expect(preview.sourceComposition).toEqual({ + kind: "mixer", + baseUrl: "https://cdn.example.com/base.png", + overlayUrl: "https://cdn.example.com/overlay.png", + blendMode: "normal", + opacity: 100, + overlayX: 0, + overlayY: 0.9, + overlayWidth: 1, + overlayHeight: 0.1, + cropLeft: 0.9, + cropTop: 0, + cropRight: 0, + cropBottom: 0, + }); + }); + it("includes crop in collected pipeline steps", () => { const graph = buildGraphSnapshot( [ @@ -88,5 +227,191 @@ describe("resolveRenderPreviewInputFromGraph", () => { const preview = resolveRenderPreviewInputFromGraph({ nodeId: "render-1", graph }); expect(preview.sourceUrl).toBe("https://cdn.example.com/generated-video.mp4"); + expect(preview.sourceComposition).toBeUndefined(); + }); + + it("prefers live render preview URLs over stale baked render URLs inside downstream mixer compositions", () => { + const graph = buildGraphSnapshot( + [ + { + id: "base-image", + type: "image", + data: { url: "https://cdn.example.com/base.png" }, + }, + { + id: "overlay-upstream", + type: "image", + data: { url: "https://cdn.example.com/upstream.png" }, + }, + { + id: "render-overlay", + type: "render", + data: { + lastUploadUrl: "https://cdn.example.com/stale-render-output.png", + }, + }, + { + id: "mixer-1", + type: "mixer", + data: {}, + }, + { + id: "render-2", + type: "render", + data: {}, + }, + ], + [ + { source: "overlay-upstream", target: "render-overlay" }, + { source: "base-image", target: "mixer-1", targetHandle: "base" }, + { source: "render-overlay", target: "mixer-1", targetHandle: "overlay" }, + { source: "mixer-1", target: "render-2" }, + ], + ); + + const preview = resolveRenderPreviewInputFromGraph({ nodeId: "render-2", graph }); + + expect(preview).toEqual({ + sourceUrl: null, + sourceComposition: { + kind: "mixer", + baseUrl: "https://cdn.example.com/base.png", + overlayUrl: "https://cdn.example.com/upstream.png", + blendMode: "normal", + opacity: 100, + overlayX: 0, + overlayY: 0, + overlayWidth: 1, + overlayHeight: 1, + cropLeft: 0, + cropTop: 0, + cropRight: 0, + cropBottom: 0, + }, + steps: [], + }); + }); +}); + +describe("mixer crop layout parity", () => { + it("contains a wide cropped source inside a square overlay frame", () => { + expect( + computeVisibleMixerContentRect({ + frameAspectRatio: 1, + sourceWidth: 200, + sourceHeight: 100, + cropLeft: 0, + cropTop: 0.25, + cropRight: 0, + cropBottom: 0.25, + }), + ).toEqual({ + x: 0, + y: 0.375, + width: 1, + height: 0.25, + }); + }); + + it("returns compare image styles that letterbox instead of stretching", () => { + expect( + computeMixerCropImageStyle({ + frameAspectRatio: 1, + sourceWidth: 200, + sourceHeight: 100, + cropLeft: 0, + cropTop: 0, + cropRight: 0, + cropBottom: 0, + }), + ).toEqual({ + left: "0%", + top: "25%", + width: "100%", + height: "50%", + }); + }); + + it("uses the actual base-aware frame pixel ratio for compare crop math", () => { + expect( + computeMixerCompareOverlayImageStyle({ + surfaceWidth: 500, + surfaceHeight: 380, + baseWidth: 200, + baseHeight: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.4, + overlayHeight: 0.4, + sourceWidth: 200, + sourceHeight: 100, + cropLeft: 0.1, + cropTop: 0, + cropRight: 0.1, + cropBottom: 0, + }), + ).toEqual({ + left: "0%", + top: "0%", + width: "100%", + height: "100%", + }); + }); + + it("does not mark compare crop overlay ready before natural size is known", () => { + expect( + isMixerCropImageReady({ + currentOverlayUrl: "https://cdn.example.com/overlay-a.png", + loadedOverlayUrl: null, + sourceWidth: 0, + sourceHeight: 0, + }), + ).toBe(false); + }); + + it("invalidates compare crop overlay readiness on source swap until the new image loads", () => { + expect( + isMixerCropImageReady({ + currentOverlayUrl: "https://cdn.example.com/overlay-b.png", + loadedOverlayUrl: "https://cdn.example.com/overlay-a.png", + sourceWidth: 200, + sourceHeight: 100, + }), + ).toBe(false); + }); + + it("positions mixer overlay frame relative to the displayed base-image rect", () => { + expect( + computeMixerFrameRectInSurface({ + surfaceWidth: 1, + surfaceHeight: 1, + baseWidth: 200, + baseHeight: 100, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.4, + overlayHeight: 0.4, + }), + ).toEqual({ + x: 0.1, + y: 0.35, + width: 0.4, + height: 0.2, + }); + }); + + it("returns null frame placement until base image natural size is known", () => { + expect( + computeMixerFrameRectInSurface({ + surfaceWidth: 1, + surfaceHeight: 1, + baseWidth: 0, + baseHeight: 0, + overlayX: 0.1, + overlayY: 0.2, + overlayWidth: 0.4, + overlayHeight: 0.4, + }), + ).toBeNull(); }); }); diff --git a/tests/preview-renderer.test.ts b/tests/preview-renderer.test.ts index 1361dbe..ca6646c 100644 --- a/tests/preview-renderer.test.ts +++ b/tests/preview-renderer.test.ts @@ -32,6 +32,13 @@ vi.mock("@/lib/image-pipeline/render-core", () => ({ vi.mock("@/lib/image-pipeline/source-loader", () => ({ loadSourceBitmap: sourceLoaderMocks.loadSourceBitmap, + loadRenderSourceBitmap: ({ sourceUrl }: { sourceUrl?: string }) => { + if (!sourceUrl) { + throw new Error("Render source is required."); + } + + return sourceLoaderMocks.loadSourceBitmap(sourceUrl); + }, })); describe("preview-renderer cancellation", () => { diff --git a/tests/worker-client.test.ts b/tests/worker-client.test.ts index 1aa674f..bc3f3d2 100644 --- a/tests/worker-client.test.ts +++ b/tests/worker-client.test.ts @@ -199,6 +199,48 @@ describe("worker-client fallbacks", () => { expect(bridgeMocks.renderFull).not.toHaveBeenCalled(); }); + it("does not include AbortSignal in full worker payload serialization", async () => { + const workerMessages: WorkerMessage[] = []; + FakeWorker.behavior = (worker, message) => { + workerMessages.push(message); + if (message.kind !== "full") { + return; + } + + queueMicrotask(() => { + worker.onmessage?.({ + data: { + kind: "full-result", + requestId: message.requestId, + payload: createFullResult(), + }, + } as MessageEvent); + }); + }; + vi.stubGlobal("Worker", FakeWorker as unknown as typeof Worker); + + const { renderFullWithWorkerFallback } = await import("@/lib/image-pipeline/worker-client"); + + await renderFullWithWorkerFallback({ + sourceUrl: "https://cdn.example.com/source.png", + steps: [], + render: { + resolution: "original", + format: "png", + }, + signal: new AbortController().signal, + }); + + const fullMessage = workerMessages.find((message) => message.kind === "full") as + | (WorkerMessage & { + payload?: Record; + }) + | undefined; + + expect(fullMessage).toBeDefined(); + expect(fullMessage?.payload).not.toHaveProperty("signal"); + }); + it("still falls back to the main thread when the Worker API is unavailable", async () => { vi.stubGlobal("Worker", undefined);