Enhance canvas functionality by adding media preview capabilities and image upload handling. Introduce compressed image previews during uploads, improve media library integration, and implement retry logic for bridge edge creation. Update dashboard to display media previews and optimize image node handling.

This commit is contained in:
Matthias
2026-04-08 20:44:31 +02:00
parent a7eb2bc99c
commit b7f24223f2
43 changed files with 4064 additions and 148 deletions

View File

@@ -2,14 +2,16 @@
import Image from "next/image";
import { useRouter } from "next/navigation";
import { useEffect, useRef, useState } from "react";
import { useEffect, useMemo, useRef, useState } from "react";
import { useTheme } from "next-themes";
import { useMutation } from "convex/react";
import { useTranslations } from "next-intl";
import {
ChevronDown,
Coins,
ImageIcon,
LayoutTemplate,
Loader2,
Monitor,
Moon,
Search,
@@ -36,6 +38,11 @@ import { CreditOverview } from "@/components/dashboard/credit-overview";
import { CreditsActivityChart } from "@/components/dashboard/credits-activity-chart";
import { RecentTransactions } from "@/components/dashboard/recent-transactions";
import CanvasCard from "@/components/dashboard/canvas-card";
import { MediaLibraryDialog } from "@/components/media/media-library-dialog";
import {
collectMediaStorageIdsForResolution,
resolveMediaPreviewUrl,
} from "@/components/media/media-preview-utils";
import { useDashboardSnapshot } from "@/hooks/use-dashboard-snapshot";
import { toast } from "@/lib/toast";
@@ -51,6 +58,14 @@ function getInitials(nameOrEmail: string) {
return normalized.slice(0, 2).toUpperCase();
}
function formatDimensions(width: number | undefined, height: number | undefined): string {
if (typeof width === "number" && typeof height === "number") {
return `${width} x ${height}px`;
}
return "Größe unbekannt";
}
export function DashboardPageClient() {
const t = useTranslations("toasts");
const router = useRouter();
@@ -59,7 +74,12 @@ export function DashboardPageClient() {
const { data: session, isPending: isSessionPending } = authClient.useSession();
const { snapshot: dashboardSnapshot } = useDashboardSnapshot(session?.user?.id);
const createCanvas = useMutation(api.canvases.create);
const resolveMediaPreviewUrls = useMutation(api.storage.batchGetUrlsForUserMedia);
const [isCreatingWorkspace, setIsCreatingWorkspace] = useState(false);
const [isMediaLibraryDialogOpen, setIsMediaLibraryDialogOpen] = useState(false);
const [mediaPreviewUrlMap, setMediaPreviewUrlMap] = useState<Record<string, string | undefined>>({});
const [isResolvingMediaPreview, setIsResolvingMediaPreview] = useState(false);
const [mediaPreviewError, setMediaPreviewError] = useState<string | null>(null);
const [hasClientMounted, setHasClientMounted] = useState(false);
useEffect(() => {
@@ -69,6 +89,11 @@ export function DashboardPageClient() {
const displayName = session?.user.name?.trim() || session?.user.email || "Nutzer";
const initials = getInitials(displayName);
const canvases = dashboardSnapshot?.canvases;
const mediaPreview = dashboardSnapshot?.mediaPreview;
const mediaPreviewStorageIds = useMemo(() => {
const previewItems = mediaPreview ?? [];
return collectMediaStorageIdsForResolution(previewItems);
}, [mediaPreview]);
useEffect(() => {
if (!session?.user || welcomeToastSentRef.current) return;
@@ -79,6 +104,55 @@ export function DashboardPageClient() {
toast.success(t("auth.welcomeOnDashboard"));
}, [t, session?.user]);
useEffect(() => {
let isCancelled = false;
async function run() {
if (dashboardSnapshot === undefined) {
setMediaPreviewUrlMap({});
setMediaPreviewError(null);
setIsResolvingMediaPreview(false);
return;
}
if (mediaPreviewStorageIds.length === 0) {
setMediaPreviewUrlMap({});
setMediaPreviewError(null);
setIsResolvingMediaPreview(false);
return;
}
setIsResolvingMediaPreview(true);
setMediaPreviewError(null);
try {
const resolved = await resolveMediaPreviewUrls({ storageIds: mediaPreviewStorageIds });
if (isCancelled) {
return;
}
setMediaPreviewUrlMap(resolved);
} catch (error) {
if (isCancelled) {
return;
}
setMediaPreviewUrlMap({});
setMediaPreviewError(
error instanceof Error ? error.message : "Vorschau konnte nicht geladen werden.",
);
} finally {
if (!isCancelled) {
setIsResolvingMediaPreview(false);
}
}
}
void run();
return () => {
isCancelled = true;
};
}, [dashboardSnapshot, mediaPreviewStorageIds, resolveMediaPreviewUrls]);
const handleSignOut = async () => {
toast.info(t("auth.signedOut"));
await authClient.signOut();
@@ -248,7 +322,86 @@ export function DashboardPageClient() {
/>
<RecentTransactions recentTransactions={dashboardSnapshot?.recentTransactions} />
</section>
<section className="mb-12">
<div className="mb-4 flex items-center justify-between">
<div className="flex items-center gap-2 text-sm font-medium">
<ImageIcon className="size-3.5 text-muted-foreground" />
Mediathek
</div>
<Button
variant="ghost"
size="sm"
className="cursor-pointer text-muted-foreground"
type="button"
onClick={() => setIsMediaLibraryDialogOpen(true)}
disabled={!hasClientMounted || isSessionPending || !session?.user}
>
Ganze Mediathek öffnen
</Button>
</div>
{dashboardSnapshot === undefined ? (
<div className="rounded-xl border bg-card p-4 text-sm text-muted-foreground shadow-sm shadow-foreground/3">
Mediathek wird geladen...
</div>
) : mediaPreviewError ? (
<div className="rounded-xl border border-dashed bg-card p-4 text-sm text-muted-foreground shadow-sm shadow-foreground/3">
Mediathek-Vorschau konnte nicht geladen werden. {mediaPreviewError}
</div>
) : !mediaPreview || mediaPreview.length === 0 ? (
<div className="rounded-xl border bg-card p-4 text-sm text-muted-foreground shadow-sm shadow-foreground/3">
Noch keine Medien vorhanden. Sobald du Bilder hochlädst oder generierst, werden
sie hier angezeigt.
</div>
) : (
<div className="grid gap-3 sm:grid-cols-4">
{(mediaPreview ?? []).map((item) => {
const previewUrl = resolveMediaPreviewUrl(item, mediaPreviewUrlMap);
return (
<article key={item.storageId} className="overflow-hidden rounded-xl border bg-card">
<div className="relative aspect-square bg-muted/50">
{previewUrl ? (
// eslint-disable-next-line @next/next/no-img-element
<img
src={previewUrl}
alt={item.filename ?? "Mediathek-Bild"}
className="h-full w-full object-cover"
loading="lazy"
/>
) : isResolvingMediaPreview ? (
<div className="flex h-full w-full items-center justify-center text-muted-foreground">
<Loader2 className="size-4 animate-spin" />
</div>
) : (
<div className="flex h-full w-full items-center justify-center text-muted-foreground">
<ImageIcon className="size-5" />
</div>
)}
</div>
<div className="space-y-1 p-2">
<p className="truncate text-xs font-medium" title={item.filename}>
{item.filename ?? "Unbenanntes Bild"}
</p>
<p className="text-[11px] text-muted-foreground">
{formatDimensions(item.width, item.height)}
</p>
</div>
</article>
);
})}
</div>
)}
</section>
</main>
<MediaLibraryDialog
open={isMediaLibraryDialogOpen}
onOpenChange={setIsMediaLibraryDialogOpen}
title="Mediathek"
description="Alle deine Bilder aus LemonSpace in einer zentralen Vorschau."
/>
</div>
);
}

View File

@@ -0,0 +1,133 @@
// @vitest-environment jsdom
import React, { act, useEffect, useRef, useState } from "react";
import { createRoot, type Root } from "react-dom/client";
import type { Edge as RFEdge, Node as RFNode } from "@xyflow/react";
import { afterEach, describe, expect, it, vi } from "vitest";
import type { Id } from "@/convex/_generated/dataModel";
const toastInfoMock = vi.hoisted(() => vi.fn());
vi.mock("@/lib/toast", () => ({
toast: {
warning: vi.fn(),
info: toastInfoMock,
},
}));
import { useCanvasDeleteHandlers } from "@/components/canvas/canvas-delete-handlers";
const asCanvasId = (id: string): Id<"canvases"> => id as Id<"canvases">;
const latestHandlersRef: {
current: ReturnType<typeof useCanvasDeleteHandlers> | null;
} = { current: null };
(globalThis as typeof globalThis & { IS_REACT_ACT_ENVIRONMENT?: boolean }).IS_REACT_ACT_ENVIRONMENT = true;
type HookHarnessProps = {
runBatchRemoveNodesMutation: ReturnType<typeof vi.fn>;
runCreateEdgeMutation: ReturnType<typeof vi.fn>;
};
function HookHarness({
runBatchRemoveNodesMutation,
runCreateEdgeMutation,
}: HookHarnessProps) {
const [nodes] = useState<RFNode[]>([
{ id: "node-source", type: "image", position: { x: 0, y: 0 }, data: {} },
{ id: "node-middle", type: "note", position: { x: 120, y: 0 }, data: {} },
{ id: "node-target", type: "text", position: { x: 240, y: 0 }, data: {} },
]);
const [edges] = useState<RFEdge[]>([
{ id: "edge-source-middle", source: "node-source", target: "node-middle" },
{ id: "edge-middle-target", source: "node-middle", target: "node-target" },
]);
const nodesRef = useRef(nodes);
const edgesRef = useRef(edges);
const deletingNodeIds = useRef(new Set<string>());
const [, setAssetBrowserTargetNodeId] = useState<string | null>(null);
const handlers = useCanvasDeleteHandlers({
t: ((key: string) => key) as never,
canvasId: asCanvasId("canvas-1"),
nodes,
edges,
nodesRef,
edgesRef,
deletingNodeIds,
setAssetBrowserTargetNodeId,
runBatchRemoveNodesMutation,
runCreateEdgeMutation,
runRemoveEdgeMutation: vi.fn(async () => undefined),
});
useEffect(() => {
latestHandlersRef.current = handlers;
}, [handlers]);
return null;
}
describe("useCanvasDeleteHandlers", () => {
let container: HTMLDivElement | null = null;
let root: Root | null = null;
afterEach(async () => {
latestHandlersRef.current = null;
vi.useRealTimers();
vi.clearAllMocks();
if (root) {
await act(async () => {
root?.unmount();
});
}
container?.remove();
root = null;
container = null;
});
it("retries bridge edge creation when the first create fails", async () => {
vi.useFakeTimers();
const runBatchRemoveNodesMutation = vi.fn(async () => undefined);
const runCreateEdgeMutation = vi
.fn(async () => undefined)
.mockRejectedValueOnce(new Error("incoming limit reached"));
container = document.createElement("div");
document.body.appendChild(container);
root = createRoot(container);
await act(async () => {
root?.render(
<HookHarness
runBatchRemoveNodesMutation={runBatchRemoveNodesMutation}
runCreateEdgeMutation={runCreateEdgeMutation}
/>,
);
});
await act(async () => {
latestHandlersRef.current?.onNodesDelete([
{
id: "node-middle",
type: "note",
position: { x: 120, y: 0 },
data: {},
},
]);
await Promise.resolve();
});
await act(async () => {
vi.runAllTimers();
await Promise.resolve();
await Promise.resolve();
});
expect(runBatchRemoveNodesMutation).toHaveBeenCalledTimes(1);
expect(runCreateEdgeMutation).toHaveBeenCalledTimes(2);
expect(toastInfoMock).toHaveBeenCalledWith("canvas.nodesRemoved");
});
});

View File

@@ -0,0 +1,76 @@
// @vitest-environment jsdom
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { createCompressedImagePreview } from "@/components/canvas/canvas-media-utils";
class MockImage {
onload: (() => void) | null = null;
onerror: (() => void) | null = null;
naturalWidth = 0;
naturalHeight = 0;
set src(_value: string) {
queueMicrotask(() => {
this.naturalWidth = 4000;
this.naturalHeight = 2000;
this.onload?.();
});
}
}
describe("createCompressedImagePreview", () => {
const originalImage = globalThis.Image;
const originalCreateObjectURL = URL.createObjectURL;
const originalRevokeObjectURL = URL.revokeObjectURL;
const drawImage = vi.fn();
const toBlob = vi.fn();
beforeEach(() => {
vi.stubGlobal("Image", MockImage);
URL.createObjectURL = vi.fn(() => "blob:preview") as typeof URL.createObjectURL;
URL.revokeObjectURL = vi.fn() as typeof URL.revokeObjectURL;
vi.spyOn(HTMLCanvasElement.prototype, "getContext").mockReturnValue({
drawImage,
} as unknown as CanvasRenderingContext2D);
vi.spyOn(HTMLCanvasElement.prototype, "toBlob").mockImplementation(
(callback) => {
toBlob();
callback(new Blob(["preview"], { type: "image/webp" }));
},
);
});
afterEach(() => {
vi.restoreAllMocks();
drawImage.mockReset();
toBlob.mockReset();
vi.stubGlobal("Image", originalImage);
URL.createObjectURL = originalCreateObjectURL;
URL.revokeObjectURL = originalRevokeObjectURL;
});
it("clamps dimensions to the configured max edge", async () => {
const file = new File(["bytes"], "photo.jpg", { type: "image/jpeg" });
const preview = await createCompressedImagePreview(file);
expect(preview.width).toBe(640);
expect(preview.height).toBe(320);
expect(preview.blob.type).toBe("image/webp");
expect(drawImage).toHaveBeenCalledTimes(1);
expect(toBlob).toHaveBeenCalledTimes(1);
});
it("returns fallback mime type when encoder does not produce webp", async () => {
vi.spyOn(HTMLCanvasElement.prototype, "toBlob").mockImplementation((callback) => {
callback(new Blob(["preview"], { type: "image/png" }));
});
const file = new File(["bytes"], "photo.jpg", { type: "image/jpeg" });
const preview = await createCompressedImagePreview(file);
expect(preview.blob.type).toBe("image/png");
});
});

View File

@@ -10,6 +10,7 @@ import { CANVAS_NODE_DND_MIME } from "@/lib/canvas-connection-policy";
import { NODE_DEFAULTS } from "@/lib/canvas-utils";
import { toast } from "@/lib/toast";
import { useCanvasDrop } from "@/components/canvas/use-canvas-drop";
import { createCompressedImagePreview } from "@/components/canvas/canvas-media-utils";
vi.mock("@/lib/toast", () => ({
toast: {
@@ -20,6 +21,11 @@ vi.mock("@/lib/toast", () => ({
vi.mock("@/components/canvas/canvas-media-utils", () => ({
getImageDimensions: vi.fn(async () => ({ width: 1600, height: 900 })),
createCompressedImagePreview: vi.fn(async () => ({
blob: new Blob(["preview"], { type: "image/webp" }),
width: 640,
height: 360,
})),
}));
const latestHandlersRef: {
@@ -33,6 +39,7 @@ const asCanvasId = (id: string): Id<"canvases"> => id as Id<"canvases">;
type HookHarnessProps = {
isSyncOnline?: boolean;
generateUploadUrl?: ReturnType<typeof vi.fn>;
registerUploadedImageMedia?: ReturnType<typeof vi.fn>;
runCreateNodeOnlineOnly?: ReturnType<typeof vi.fn>;
runCreateNodeWithEdgeSplitOnlineOnly?: ReturnType<typeof vi.fn>;
notifyOfflineUnsupported?: ReturnType<typeof vi.fn>;
@@ -44,6 +51,7 @@ type HookHarnessProps = {
function HookHarness({
isSyncOnline = true,
generateUploadUrl = vi.fn(async () => "https://upload.test"),
registerUploadedImageMedia = vi.fn(async () => ({ ok: true as const })),
runCreateNodeOnlineOnly = vi.fn(async () => "node-1"),
runCreateNodeWithEdgeSplitOnlineOnly = vi.fn(async () => "node-1"),
notifyOfflineUnsupported = vi.fn(),
@@ -58,6 +66,7 @@ function HookHarness({
edges,
screenToFlowPosition,
generateUploadUrl,
registerUploadedImageMedia,
runCreateNodeOnlineOnly,
runCreateNodeWithEdgeSplitOnlineOnly,
notifyOfflineUnsupported,
@@ -78,10 +87,19 @@ describe("useCanvasDrop", () => {
beforeEach(() => {
consoleErrorSpy = vi.spyOn(console, "error").mockImplementation(() => undefined);
vi.stubGlobal("fetch", vi.fn(async () => ({
vi.stubGlobal(
"fetch",
vi
.fn()
.mockResolvedValueOnce({
ok: true,
json: async () => ({ storageId: "storage-1" }),
})));
})
.mockResolvedValueOnce({
ok: true,
json: async () => ({ storageId: "preview-storage-1" }),
}),
);
vi.stubGlobal("crypto", {
randomUUID: vi.fn(() => "req-1"),
});
@@ -151,6 +169,7 @@ describe("useCanvasDrop", () => {
it("creates an image node from a dropped image file", async () => {
const generateUploadUrl = vi.fn(async () => "https://upload.test");
const registerUploadedImageMedia = vi.fn(async () => ({ ok: true as const }));
const runCreateNodeOnlineOnly = vi.fn(async () => "node-image");
const syncPendingMoveForClientRequest = vi.fn(async () => undefined);
const file = new File(["image-bytes"], "photo.png", { type: "image/png" });
@@ -163,6 +182,7 @@ describe("useCanvasDrop", () => {
root?.render(
<HookHarness
generateUploadUrl={generateUploadUrl}
registerUploadedImageMedia={registerUploadedImageMedia}
runCreateNodeOnlineOnly={runCreateNodeOnlineOnly}
syncPendingMoveForClientRequest={syncPendingMoveForClientRequest}
/>,
@@ -181,12 +201,17 @@ describe("useCanvasDrop", () => {
} as unknown as React.DragEvent);
});
expect(generateUploadUrl).toHaveBeenCalledTimes(1);
expect(fetch).toHaveBeenCalledWith("https://upload.test", {
expect(generateUploadUrl).toHaveBeenCalledTimes(2);
expect(fetch).toHaveBeenNthCalledWith(1, "https://upload.test", {
method: "POST",
headers: { "Content-Type": "image/png" },
body: file,
});
expect(fetch).toHaveBeenNthCalledWith(2, "https://upload.test", {
method: "POST",
headers: { "Content-Type": "image/webp" },
body: expect.any(Blob),
});
expect(runCreateNodeOnlineOnly).toHaveBeenCalledWith({
canvasId: "canvas-1",
type: "image",
@@ -196,10 +221,13 @@ describe("useCanvasDrop", () => {
height: NODE_DEFAULTS.image.height,
data: {
storageId: "storage-1",
previewStorageId: "preview-storage-1",
filename: "photo.png",
mimeType: "image/png",
width: 1600,
height: 900,
previewWidth: 640,
previewHeight: 360,
canvasId: "canvas-1",
},
clientRequestId: "req-1",
@@ -208,6 +236,15 @@ describe("useCanvasDrop", () => {
"req-1",
"node-image",
);
expect(registerUploadedImageMedia).toHaveBeenCalledWith({
canvasId: "canvas-1",
nodeId: "node-image",
storageId: "storage-1",
filename: "photo.png",
mimeType: "image/png",
width: 1600,
height: 900,
});
});
it("creates a node from a JSON payload drop", async () => {
@@ -267,6 +304,58 @@ describe("useCanvasDrop", () => {
expect(syncPendingMoveForClientRequest).toHaveBeenCalledWith("req-1", "node-video");
});
it("continues with original upload when preview generation fails", async () => {
vi.mocked(createCompressedImagePreview).mockRejectedValueOnce(
new Error("preview failed"),
);
const generateUploadUrl = vi.fn(async () => "https://upload.test");
const runCreateNodeOnlineOnly = vi.fn(async () => "node-image");
const file = new File(["image-bytes"], "photo.png", { type: "image/png" });
container = document.createElement("div");
document.body.appendChild(container);
root = createRoot(container);
await act(async () => {
root?.render(
<HookHarness
generateUploadUrl={generateUploadUrl}
runCreateNodeOnlineOnly={runCreateNodeOnlineOnly}
/>,
);
});
await act(async () => {
await latestHandlersRef.current?.onDrop({
preventDefault: vi.fn(),
clientX: 20,
clientY: 10,
dataTransfer: {
getData: vi.fn(() => ""),
files: [file],
},
} as unknown as React.DragEvent);
});
expect(generateUploadUrl).toHaveBeenCalledTimes(1);
expect(fetch).toHaveBeenCalledTimes(1);
expect(runCreateNodeOnlineOnly).toHaveBeenCalledWith(
expect.objectContaining({
data: expect.objectContaining({
storageId: "storage-1",
}),
}),
);
expect(runCreateNodeOnlineOnly).toHaveBeenCalledWith(
expect.objectContaining({
data: expect.not.objectContaining({
previewStorageId: expect.anything(),
}),
}),
);
});
it("splits an intersected persisted edge for sidebar node drops", async () => {
const runCreateNodeOnlineOnly = vi.fn(async () => "node-note");
const runCreateNodeWithEdgeSplitOnlineOnly = vi.fn(async () => "node-note");

View File

@@ -18,6 +18,50 @@ import { validateCanvasConnection } from "./canvas-connection-validation";
type ToastTranslations = ReturnType<typeof useTranslations<'toasts'>>;
const BRIDGE_CREATE_MAX_ATTEMPTS = 4;
const BRIDGE_CREATE_INITIAL_BACKOFF_MS = 40;
function waitFor(ms: number): Promise<void> {
return new Promise((resolve) => {
setTimeout(resolve, ms);
});
}
function getErrorMessage(error: unknown): string {
if (error instanceof Error) {
return error.message;
}
return String(error);
}
function isRetryableBridgeCreateError(error: unknown): boolean {
const message = getErrorMessage(error).toLowerCase();
if (
message.includes("unauthorized") ||
message.includes("forbidden") ||
message.includes("not authenticated")
) {
return false;
}
if (
message.includes("limit") ||
message.includes("duplicate") ||
message.includes("already exists") ||
message.includes("conflict") ||
message.includes("concurrent") ||
message.includes("tempor") ||
message.includes("timeout") ||
message.includes("try again") ||
message.includes("retry") ||
message.includes("stale")
) {
return true;
}
return true;
}
type UseCanvasDeleteHandlersParams = {
t: ToastTranslations;
canvasId: Id<"canvases">;
@@ -136,13 +180,37 @@ export function useCanvasDeleteHandlers({
liveNodes,
liveEdges,
);
const connectedDeletedEdges = getConnectedEdges(deletedNodes, liveEdges);
const remainingNodes = liveNodes.filter(
(node) => !removedTargetSet.has(node.id),
);
let remainingEdges = liveEdges.filter(
(edge) => !connectedDeletedEdges.includes(edge) && edge.className !== "temp",
);
const bridgeEdgesCreatedInThisRun: RFEdge[] = [];
const getRemainingNodes = () =>
nodesRef.current.filter((node) => !removedTargetSet.has(node.id));
const getRemainingEdges = () => {
const fromRefs = edgesRef.current.filter((edge) => {
if (edge.className === "temp") {
return false;
}
if (removedTargetSet.has(edge.source) || removedTargetSet.has(edge.target)) {
return false;
}
return true;
});
const deduped = [...fromRefs];
const dedupedKeys = new Set(fromRefs.map((edge) => edgeKey(edge)));
for (const createdEdge of bridgeEdgesCreatedInThisRun) {
const key = edgeKey(createdEdge);
if (dedupedKeys.has(key)) {
continue;
}
deduped.push(createdEdge);
dedupedKeys.add(key);
}
return deduped;
};
if (bridgeCreates.length > 0) {
console.info("[Canvas] computed bridge edges for delete", {
@@ -168,13 +236,24 @@ export function useCanvasDeleteHandlers({
sourceHandle: bridgeCreate.sourceHandle,
targetHandle: bridgeCreate.targetHandle,
});
let created = false;
for (
let attempt = 1;
attempt <= BRIDGE_CREATE_MAX_ATTEMPTS;
attempt += 1
) {
const remainingNodes = getRemainingNodes();
const remainingEdges = getRemainingEdges();
if (remainingEdges.some((edge) => edgeKey(edge) === bridgeKey)) {
console.info("[Canvas] skipped duplicate bridge edge after delete", {
canvasId,
deletedNodeIds: idsToDelete,
bridgeCreate,
});
continue;
break;
}
const validationError = validateCanvasConnection(
@@ -197,7 +276,7 @@ export function useCanvasDeleteHandlers({
bridgeCreate,
validationError,
});
continue;
break;
}
try {
@@ -205,6 +284,7 @@ export function useCanvasDeleteHandlers({
canvasId,
deletedNodeIds: idsToDelete,
bridgeCreate,
attempt,
});
await runCreateEdgeMutation({
@@ -214,24 +294,53 @@ export function useCanvasDeleteHandlers({
sourceHandle: bridgeCreate.sourceHandle,
targetHandle: bridgeCreate.targetHandle,
});
remainingEdges = [
...remainingEdges,
{
id: `bridge-${bridgeCreate.sourceNodeId}-${bridgeCreate.targetNodeId}-${remainingEdges.length}`,
bridgeEdgesCreatedInThisRun.push({
id: `bridge-${bridgeCreate.sourceNodeId}-${bridgeCreate.targetNodeId}-${bridgeEdgesCreatedInThisRun.length}`,
source: bridgeCreate.sourceNodeId,
target: bridgeCreate.targetNodeId,
sourceHandle: bridgeCreate.sourceHandle,
targetHandle: bridgeCreate.targetHandle,
},
];
});
created = true;
break;
} catch (error: unknown) {
const errorMessage = getErrorMessage(error);
const retryable = isRetryableBridgeCreateError(error);
const isLastAttempt = attempt >= BRIDGE_CREATE_MAX_ATTEMPTS;
if (!retryable || isLastAttempt) {
console.error("[Canvas] bridge edge create failed", {
canvasId,
deletedNodeIds: idsToDelete,
bridgeCreate,
error,
attempt,
maxAttempts: BRIDGE_CREATE_MAX_ATTEMPTS,
retryable,
error: errorMessage,
});
throw error;
break;
}
const backoffMs =
BRIDGE_CREATE_INITIAL_BACKOFF_MS * 2 ** (attempt - 1);
console.warn("[Canvas] bridge edge create retry scheduled", {
canvasId,
deletedNodeIds: idsToDelete,
bridgeCreate,
attempt,
nextAttempt: attempt + 1,
backoffMs,
error: errorMessage,
});
await waitFor(backoffMs);
}
}
if (!created) {
continue;
}
}
})()
@@ -240,7 +349,11 @@ export function useCanvasDeleteHandlers({
// Den Delete-Lock erst lösen, wenn Convex-Snapshot die Node wirklich nicht mehr enthält.
})
.catch((error: unknown) => {
console.error("[Canvas] batch remove failed", error);
console.error("[Canvas] batch remove failed", {
canvasId,
deletedNodeIds: idsToDelete,
error: getErrorMessage(error),
});
for (const id of idsToDelete) {
deletingNodeIds.current.delete(id);
}

View File

@@ -1,6 +1,74 @@
export async function getImageDimensions(
file: File,
): Promise<{ width: number; height: number }> {
const image = await decodeImageFile(file);
return { width: image.naturalWidth, height: image.naturalHeight };
}
export type ImagePreviewOptions = {
maxEdge?: number;
format?: string;
quality?: number;
};
export type CompressedImagePreview = {
blob: Blob;
width: number;
height: number;
};
export async function createCompressedImagePreview(
file: File,
options: ImagePreviewOptions = {},
): Promise<CompressedImagePreview> {
const maxEdge = options.maxEdge ?? 640;
const format = options.format ?? "image/webp";
const quality = options.quality ?? 0.75;
const image = await decodeImageFile(file);
const sourceWidth = image.naturalWidth;
const sourceHeight = image.naturalHeight;
if (!sourceWidth || !sourceHeight) {
throw new Error("Could not read image dimensions");
}
const scale = Math.min(1, maxEdge / Math.max(sourceWidth, sourceHeight));
const targetWidth = Math.max(1, Math.round(sourceWidth * scale));
const targetHeight = Math.max(1, Math.round(sourceHeight * scale));
const canvas = document.createElement("canvas");
canvas.width = targetWidth;
canvas.height = targetHeight;
const context = canvas.getContext("2d");
if (!context) {
throw new Error("Could not create canvas context");
}
context.drawImage(image, 0, 0, targetWidth, targetHeight);
const blob = await new Promise<Blob>((resolve, reject) => {
canvas.toBlob(
(result) => {
if (!result) {
reject(new Error("Could not encode preview image"));
return;
}
resolve(result);
},
format,
quality,
);
});
return {
blob,
width: targetWidth,
height: targetHeight,
};
}
async function decodeImageFile(file: File): Promise<HTMLImageElement> {
return new Promise((resolve, reject) => {
const objectUrl = URL.createObjectURL(file);
const image = new window.Image();
@@ -15,7 +83,7 @@ export async function getImageDimensions(
return;
}
resolve({ width, height });
resolve(image);
};
image.onerror = () => {

View File

@@ -99,6 +99,7 @@ function CanvasInner({ canvasId }: CanvasInnerProps) {
});
const generateUploadUrl = useMutation(api.storage.generateUploadUrl);
const registerUploadedImageMedia = useMutation(api.storage.registerUploadedImageMedia);
const convexNodeIdsSnapshotForEdgeCarryRef = useRef(new Set<string>());
const [assetBrowserTargetNodeId, setAssetBrowserTargetNodeId] = useState<
string | null
@@ -516,6 +517,7 @@ function CanvasInner({ canvasId }: CanvasInnerProps) {
edges,
screenToFlowPosition,
generateUploadUrl,
registerUploadedImageMedia,
runCreateNodeOnlineOnly,
runCreateNodeWithEdgeSplitOnlineOnly,
notifyOfflineUnsupported,

View File

@@ -15,6 +15,7 @@ import ColorAdjustNode from "./nodes/color-adjust-node";
import LightAdjustNode from "./nodes/light-adjust-node";
import DetailAdjustNode from "./nodes/detail-adjust-node";
import RenderNode from "./nodes/render-node";
import CropNode from "./nodes/crop-node";
/**
* Node-Type-Map für React Flow.
@@ -40,5 +41,6 @@ export const nodeTypes = {
"color-adjust": ColorAdjustNode,
"light-adjust": LightAdjustNode,
"detail-adjust": DetailAdjustNode,
crop: CropNode,
render: RenderNode,
} as const;

View File

@@ -43,6 +43,7 @@ const RESIZE_CONFIGS: Record<string, ResizeConfig> = {
"color-adjust": { minWidth: 300, minHeight: 760 },
"light-adjust": { minWidth: 300, minHeight: 860 },
"detail-adjust": { minWidth: 300, minHeight: 820 },
crop: { minWidth: 320, minHeight: 520 },
render: { minWidth: 260, minHeight: 300, keepAspectRatio: true },
text: { minWidth: 220, minHeight: 90 },
note: { minWidth: 200, minHeight: 90 },

View File

@@ -0,0 +1,740 @@
"use client";
import { useCallback, useMemo, useRef, type PointerEvent as ReactPointerEvent } from "react";
import { Handle, Position, type Node, type NodeProps } from "@xyflow/react";
import { Crop } from "lucide-react";
import { useTranslations } from "next-intl";
import { useCanvasGraph } from "@/components/canvas/canvas-graph-context";
import { useCanvasSync } from "@/components/canvas/canvas-sync-context";
import BaseNodeWrapper from "@/components/canvas/nodes/base-node-wrapper";
import { useNodeLocalData } from "@/components/canvas/nodes/use-node-local-data";
import { usePipelinePreview } from "@/hooks/use-pipeline-preview";
import {
collectPipelineFromGraph,
getSourceImageFromGraph,
shouldFastPathPreviewPipeline,
} from "@/lib/canvas-render-preview";
import {
normalizeCropNodeData,
type CropFitMode,
type CropNodeData,
type CropResizeMode,
} from "@/lib/image-pipeline/crop-node-data";
import type { Id } from "@/convex/_generated/dataModel";
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select";
type CropNodeViewData = CropNodeData & {
_status?: string;
_statusMessage?: string;
};
export type CropNodeType = Node<CropNodeViewData, "crop">;
const PREVIEW_PIPELINE_TYPES = new Set([
"curves",
"color-adjust",
"light-adjust",
"detail-adjust",
"crop",
]);
const CUSTOM_DIMENSION_FALLBACK = 1024;
const CROP_MIN_SIZE = 0.01;
type CropHandle = "n" | "s" | "e" | "w" | "ne" | "nw" | "se" | "sw";
type CropInteractionState = {
pointerId: number;
mode: "move" | "resize";
handle?: CropHandle;
startX: number;
startY: number;
previewWidth: number;
previewHeight: number;
startCrop: CropNodeData["crop"];
keepAspect: boolean;
aspectRatio: number;
};
function clamp(value: number, min: number, max: number): number {
return Math.max(min, Math.min(max, value));
}
function parseNumberInput(value: string): number | null {
const parsed = Number(value);
if (!Number.isFinite(parsed)) {
return null;
}
return parsed;
}
function formatPercent(value: number): string {
return `${Math.round(value * 100)}%`;
}
function clampCropRect(rect: CropNodeData["crop"]): CropNodeData["crop"] {
const width = clamp(rect.width, CROP_MIN_SIZE, 1);
const height = clamp(rect.height, CROP_MIN_SIZE, 1);
const x = clamp(rect.x, 0, Math.max(0, 1 - width));
const y = clamp(rect.y, 0, Math.max(0, 1 - height));
return {
x,
y,
width,
height,
};
}
function resizeCropRect(
start: CropNodeData["crop"],
handle: CropHandle,
deltaX: number,
deltaY: number,
keepAspect: boolean,
aspectRatio: number,
): CropNodeData["crop"] {
const startRight = start.x + start.width;
const startBottom = start.y + start.height;
if (!keepAspect) {
let left = start.x;
let top = start.y;
let right = startRight;
let bottom = startBottom;
if (handle.includes("w")) {
left = clamp(start.x + deltaX, 0, startRight - CROP_MIN_SIZE);
}
if (handle.includes("e")) {
right = clamp(startRight + deltaX, start.x + CROP_MIN_SIZE, 1);
}
if (handle.includes("n")) {
top = clamp(start.y + deltaY, 0, startBottom - CROP_MIN_SIZE);
}
if (handle.includes("s")) {
bottom = clamp(startBottom + deltaY, start.y + CROP_MIN_SIZE, 1);
}
return clampCropRect({
x: left,
y: top,
width: right - left,
height: bottom - top,
});
}
const aspect = Math.max(CROP_MIN_SIZE, aspectRatio);
if (handle === "e" || handle === "w") {
const centerY = start.y + start.height / 2;
const maxWidth = handle === "e" ? 1 - start.x : startRight;
const minWidth = Math.max(CROP_MIN_SIZE, CROP_MIN_SIZE * aspect);
const rawWidth = handle === "e" ? start.width + deltaX : start.width - deltaX;
const width = clamp(rawWidth, minWidth, Math.max(minWidth, maxWidth));
const height = width / aspect;
const y = clamp(centerY - height / 2, 0, Math.max(0, 1 - height));
const x = handle === "e" ? start.x : startRight - width;
return clampCropRect({ x, y, width, height });
}
if (handle === "n" || handle === "s") {
const centerX = start.x + start.width / 2;
const maxHeight = handle === "s" ? 1 - start.y : startBottom;
const minHeight = Math.max(CROP_MIN_SIZE, CROP_MIN_SIZE / aspect);
const rawHeight = handle === "s" ? start.height + deltaY : start.height - deltaY;
const height = clamp(rawHeight, minHeight, Math.max(minHeight, maxHeight));
const width = height * aspect;
const x = clamp(centerX - width / 2, 0, Math.max(0, 1 - width));
const y = handle === "s" ? start.y : startBottom - height;
return clampCropRect({ x, y, width, height });
}
const movesRight = handle.includes("e");
const movesDown = handle.includes("s");
const rawWidth = start.width + (movesRight ? deltaX : -deltaX);
const rawHeight = start.height + (movesDown ? deltaY : -deltaY);
const widthByHeight = rawHeight * aspect;
const heightByWidth = rawWidth / aspect;
const useWidth = Math.abs(rawWidth - start.width) >= Math.abs(rawHeight - start.height);
let width = useWidth ? rawWidth : widthByHeight;
let height = useWidth ? heightByWidth : rawHeight;
const anchorX = movesRight ? start.x : startRight;
const anchorY = movesDown ? start.y : startBottom;
const maxWidth = movesRight ? 1 - anchorX : anchorX;
const maxHeight = movesDown ? 1 - anchorY : anchorY;
const maxScaleByWidth = maxWidth / Math.max(CROP_MIN_SIZE, width);
const maxScaleByHeight = maxHeight / Math.max(CROP_MIN_SIZE, height);
const maxScale = Math.min(1, maxScaleByWidth, maxScaleByHeight);
width *= maxScale;
height *= maxScale;
const minScaleByWidth = Math.max(1, CROP_MIN_SIZE / Math.max(CROP_MIN_SIZE, width));
const minScaleByHeight = Math.max(1, CROP_MIN_SIZE / Math.max(CROP_MIN_SIZE, height));
const minScale = Math.max(minScaleByWidth, minScaleByHeight);
width *= minScale;
height *= minScale;
const x = movesRight ? anchorX : anchorX - width;
const y = movesDown ? anchorY : anchorY - height;
return clampCropRect({ x, y, width, height });
}
export default function CropNode({ id, data, selected, width }: NodeProps<CropNodeType>) {
const tNodes = useTranslations("nodes");
const { queueNodeDataUpdate } = useCanvasSync();
const graph = useCanvasGraph();
const normalizeData = useCallback((value: unknown) => normalizeCropNodeData(value), []);
const previewAreaRef = useRef<HTMLDivElement | null>(null);
const interactionRef = useRef<CropInteractionState | null>(null);
const { localData, updateLocalData } = useNodeLocalData<CropNodeData>({
nodeId: id,
data,
normalize: normalizeData,
saveDelayMs: 40,
onSave: (next) =>
queueNodeDataUpdate({
nodeId: id as Id<"nodes">,
data: next,
}),
debugLabel: "crop",
});
const sourceUrl = useMemo(
() =>
getSourceImageFromGraph(graph, {
nodeId: id,
isSourceNode: (node) =>
node.type === "image" ||
node.type === "ai-image" ||
node.type === "asset" ||
node.type === "video" ||
node.type === "ai-video",
getSourceImageFromNode: (node) => {
const sourceData = (node.data ?? {}) as Record<string, unknown>;
const directUrl = typeof sourceData.url === "string" ? sourceData.url : null;
if (directUrl && directUrl.length > 0) {
return directUrl;
}
const previewUrl =
typeof sourceData.previewUrl === "string" ? sourceData.previewUrl : null;
return previewUrl && previewUrl.length > 0 ? previewUrl : null;
},
}),
[graph, id],
);
const steps = useMemo(() => {
const collected = collectPipelineFromGraph(graph, {
nodeId: id,
isPipelineNode: (node) => PREVIEW_PIPELINE_TYPES.has(node.type ?? ""),
});
return collected.map((step) => {
if (step.nodeId === id && step.type === "crop") {
return {
...step,
params: localData,
};
}
return step;
});
}, [graph, id, localData]);
const previewDebounceMs = shouldFastPathPreviewPipeline(steps, graph.previewNodeDataOverrides)
? 16
: undefined;
const { canvasRef, hasSource, isRendering, previewAspectRatio, error } = usePipelinePreview({
sourceUrl,
steps,
nodeWidth: Math.max(250, Math.round(width ?? 300)),
includeHistogram: false,
debounceMs: previewDebounceMs,
previewScale: 0.5,
maxPreviewWidth: 720,
maxDevicePixelRatio: 1.25,
});
const outputResolutionLabel =
localData.resize.mode === "custom"
? `${localData.resize.width ?? CUSTOM_DIMENSION_FALLBACK} x ${localData.resize.height ?? CUSTOM_DIMENSION_FALLBACK}`
: tNodes("adjustments.crop.sourceResolution");
const updateCropField = (field: keyof CropNodeData["crop"], value: number) => {
updateLocalData((current) =>
normalizeCropNodeData({
...current,
crop: {
...current.crop,
[field]: value,
},
}),
);
};
const updateResize = (next: Partial<CropNodeData["resize"]>) => {
updateLocalData((current) =>
normalizeCropNodeData({
...current,
resize: {
...current.resize,
...next,
},
}),
);
};
const beginCropInteraction = useCallback(
(event: ReactPointerEvent<HTMLElement>, mode: "move" | "resize", handle?: CropHandle) => {
if (!hasSource) {
return;
}
const previewElement = previewAreaRef.current;
if (!previewElement) {
return;
}
const bounds = previewElement.getBoundingClientRect();
if (bounds.width <= 0 || bounds.height <= 0) {
return;
}
event.preventDefault();
event.stopPropagation();
const pointerId = Number.isFinite(event.pointerId) ? event.pointerId : 1;
event.currentTarget.setPointerCapture?.(pointerId);
interactionRef.current = {
pointerId,
mode,
handle,
startX: event.clientX,
startY: event.clientY,
previewWidth: bounds.width,
previewHeight: bounds.height,
startCrop: localData.crop,
keepAspect: localData.resize.keepAspect,
aspectRatio: localData.crop.width / Math.max(CROP_MIN_SIZE, localData.crop.height),
};
},
[hasSource, localData.crop, localData.resize.keepAspect],
);
const updateCropInteraction = useCallback(
(event: ReactPointerEvent<HTMLElement>) => {
const activeInteraction = interactionRef.current;
if (!activeInteraction) {
return;
}
const pointerId = Number.isFinite(event.pointerId) ? event.pointerId : 1;
if (pointerId !== activeInteraction.pointerId) {
return;
}
event.preventDefault();
event.stopPropagation();
const deltaX = (event.clientX - activeInteraction.startX) / activeInteraction.previewWidth;
const deltaY = (event.clientY - activeInteraction.startY) / activeInteraction.previewHeight;
const nextCrop =
activeInteraction.mode === "move"
? clampCropRect({
...activeInteraction.startCrop,
x: activeInteraction.startCrop.x + deltaX,
y: activeInteraction.startCrop.y + deltaY,
})
: resizeCropRect(
activeInteraction.startCrop,
activeInteraction.handle ?? "se",
deltaX,
deltaY,
activeInteraction.keepAspect,
activeInteraction.aspectRatio,
);
updateLocalData((current) =>
normalizeCropNodeData({
...current,
crop: nextCrop,
}),
);
},
[updateLocalData],
);
const endCropInteraction = useCallback((event: ReactPointerEvent<HTMLElement>) => {
const activeInteraction = interactionRef.current;
if (!activeInteraction) {
return;
}
const pointerId = Number.isFinite(event.pointerId) ? event.pointerId : 1;
if (pointerId !== activeInteraction.pointerId) {
return;
}
event.preventDefault();
event.stopPropagation();
event.currentTarget.releasePointerCapture?.(pointerId);
interactionRef.current = null;
}, []);
return (
<BaseNodeWrapper
nodeType="crop"
selected={selected}
status={data._status}
statusMessage={data._statusMessage}
className="min-w-[320px] border-violet-500/30"
>
<Handle
type="target"
position={Position.Left}
className="!h-3 !w-3 !border-2 !border-background !bg-violet-500"
/>
<div className="space-y-3 p-3">
<div className="flex items-center gap-1.5 text-xs font-medium text-violet-700 dark:text-violet-400">
<Crop className="h-3.5 w-3.5" />
{tNodes("adjustments.crop.title")}
</div>
<div className="space-y-2">
<div
ref={previewAreaRef}
data-testid="crop-preview-area"
className="relative overflow-hidden rounded-md border border-border bg-muted/30"
style={{ aspectRatio: `${Math.max(0.25, previewAspectRatio)}` }}
>
{!hasSource ? (
<div className="absolute inset-0 flex items-center justify-center px-3 text-center text-[11px] text-muted-foreground">
{tNodes("adjustments.crop.previewHint")}
</div>
) : null}
{hasSource ? <canvas ref={canvasRef} className="h-full w-full" /> : null}
{hasSource ? (
<div className="pointer-events-none absolute inset-0">
<div
data-testid="crop-overlay"
className="nodrag pointer-events-auto absolute cursor-move border border-violet-300 bg-violet-500/10"
style={{
left: `${localData.crop.x * 100}%`,
top: `${localData.crop.y * 100}%`,
width: `${localData.crop.width * 100}%`,
height: `${localData.crop.height * 100}%`,
}}
onPointerDown={(event) => beginCropInteraction(event, "move")}
onPointerMove={updateCropInteraction}
onPointerUp={endCropInteraction}
onPointerCancel={endCropInteraction}
>
<div className="pointer-events-none absolute left-1/3 top-0 h-full w-px bg-violet-200/70" />
<div className="pointer-events-none absolute left-2/3 top-0 h-full w-px bg-violet-200/70" />
<div className="pointer-events-none absolute left-0 top-1/3 h-px w-full bg-violet-200/70" />
<div className="pointer-events-none absolute left-0 top-2/3 h-px w-full bg-violet-200/70" />
</div>
<button
type="button"
data-testid="crop-handle-nw"
className="nodrag pointer-events-auto absolute h-3 w-3 -translate-x-1/2 -translate-y-1/2 cursor-nwse-resize rounded-full border border-background bg-violet-500"
style={{ left: `${localData.crop.x * 100}%`, top: `${localData.crop.y * 100}%` }}
onPointerDown={(event) => beginCropInteraction(event, "resize", "nw")}
onPointerMove={updateCropInteraction}
onPointerUp={endCropInteraction}
onPointerCancel={endCropInteraction}
/>
<button
type="button"
data-testid="crop-handle-n"
className="nodrag pointer-events-auto absolute h-3 w-3 -translate-x-1/2 -translate-y-1/2 cursor-ns-resize rounded-full border border-background bg-violet-500"
style={{
left: `${(localData.crop.x + localData.crop.width / 2) * 100}%`,
top: `${localData.crop.y * 100}%`,
}}
onPointerDown={(event) => beginCropInteraction(event, "resize", "n")}
onPointerMove={updateCropInteraction}
onPointerUp={endCropInteraction}
onPointerCancel={endCropInteraction}
/>
<button
type="button"
data-testid="crop-handle-ne"
className="nodrag pointer-events-auto absolute h-3 w-3 -translate-x-1/2 -translate-y-1/2 cursor-nesw-resize rounded-full border border-background bg-violet-500"
style={{
left: `${(localData.crop.x + localData.crop.width) * 100}%`,
top: `${localData.crop.y * 100}%`,
}}
onPointerDown={(event) => beginCropInteraction(event, "resize", "ne")}
onPointerMove={updateCropInteraction}
onPointerUp={endCropInteraction}
onPointerCancel={endCropInteraction}
/>
<button
type="button"
data-testid="crop-handle-e"
className="nodrag pointer-events-auto absolute h-3 w-3 -translate-x-1/2 -translate-y-1/2 cursor-ew-resize rounded-full border border-background bg-violet-500"
style={{
left: `${(localData.crop.x + localData.crop.width) * 100}%`,
top: `${(localData.crop.y + localData.crop.height / 2) * 100}%`,
}}
onPointerDown={(event) => beginCropInteraction(event, "resize", "e")}
onPointerMove={updateCropInteraction}
onPointerUp={endCropInteraction}
onPointerCancel={endCropInteraction}
/>
<button
type="button"
data-testid="crop-handle-se"
className="nodrag pointer-events-auto absolute h-3 w-3 -translate-x-1/2 -translate-y-1/2 cursor-nwse-resize rounded-full border border-background bg-violet-500"
style={{
left: `${(localData.crop.x + localData.crop.width) * 100}%`,
top: `${(localData.crop.y + localData.crop.height) * 100}%`,
}}
onPointerDown={(event) => beginCropInteraction(event, "resize", "se")}
onPointerMove={updateCropInteraction}
onPointerUp={endCropInteraction}
onPointerCancel={endCropInteraction}
/>
<button
type="button"
data-testid="crop-handle-s"
className="nodrag pointer-events-auto absolute h-3 w-3 -translate-x-1/2 -translate-y-1/2 cursor-ns-resize rounded-full border border-background bg-violet-500"
style={{
left: `${(localData.crop.x + localData.crop.width / 2) * 100}%`,
top: `${(localData.crop.y + localData.crop.height) * 100}%`,
}}
onPointerDown={(event) => beginCropInteraction(event, "resize", "s")}
onPointerMove={updateCropInteraction}
onPointerUp={endCropInteraction}
onPointerCancel={endCropInteraction}
/>
<button
type="button"
data-testid="crop-handle-sw"
className="nodrag pointer-events-auto absolute h-3 w-3 -translate-x-1/2 -translate-y-1/2 cursor-nesw-resize rounded-full border border-background bg-violet-500"
style={{
left: `${localData.crop.x * 100}%`,
top: `${(localData.crop.y + localData.crop.height) * 100}%`,
}}
onPointerDown={(event) => beginCropInteraction(event, "resize", "sw")}
onPointerMove={updateCropInteraction}
onPointerUp={endCropInteraction}
onPointerCancel={endCropInteraction}
/>
<button
type="button"
data-testid="crop-handle-w"
className="nodrag pointer-events-auto absolute h-3 w-3 -translate-x-1/2 -translate-y-1/2 cursor-ew-resize rounded-full border border-background bg-violet-500"
style={{
left: `${localData.crop.x * 100}%`,
top: `${(localData.crop.y + localData.crop.height / 2) * 100}%`,
}}
onPointerDown={(event) => beginCropInteraction(event, "resize", "w")}
onPointerMove={updateCropInteraction}
onPointerUp={endCropInteraction}
onPointerCancel={endCropInteraction}
/>
</div>
) : null}
{isRendering ? (
<div className="absolute right-1 top-1 rounded bg-background/80 px-1.5 py-0.5 text-[10px] text-muted-foreground">
{tNodes("adjustments.crop.previewRendering")}
</div>
) : null}
</div>
<div className="flex items-center justify-between rounded-md border border-border/70 bg-muted/30 px-2 py-1 text-[11px] text-muted-foreground">
<span>{tNodes("adjustments.crop.outputResolutionLabel")}</span>
<span className="font-medium text-foreground">{outputResolutionLabel}</span>
</div>
</div>
<div className="grid grid-cols-2 gap-2">
<label className="space-y-1 text-[11px] text-muted-foreground">
<span>{tNodes("adjustments.crop.fields.x")}</span>
<input
type="number"
min={0}
max={1}
step={0.01}
value={localData.crop.x}
onChange={(event) => {
const parsed = parseNumberInput(event.target.value);
if (parsed === null) return;
updateCropField("x", clamp(parsed, 0, 1));
}}
className="nodrag nowheel h-8 w-full rounded-md border border-input bg-background px-2 text-xs"
/>
</label>
<label className="space-y-1 text-[11px] text-muted-foreground">
<span>{tNodes("adjustments.crop.fields.y")}</span>
<input
type="number"
min={0}
max={1}
step={0.01}
value={localData.crop.y}
onChange={(event) => {
const parsed = parseNumberInput(event.target.value);
if (parsed === null) return;
updateCropField("y", clamp(parsed, 0, 1));
}}
className="nodrag nowheel h-8 w-full rounded-md border border-input bg-background px-2 text-xs"
/>
</label>
<label className="space-y-1 text-[11px] text-muted-foreground">
<span>{tNodes("adjustments.crop.fields.width")}</span>
<input
type="number"
min={0.01}
max={1}
step={0.01}
value={localData.crop.width}
onChange={(event) => {
const parsed = parseNumberInput(event.target.value);
if (parsed === null) return;
updateCropField("width", clamp(parsed, 0.01, 1));
}}
className="nodrag nowheel h-8 w-full rounded-md border border-input bg-background px-2 text-xs"
/>
</label>
<label className="space-y-1 text-[11px] text-muted-foreground">
<span>{tNodes("adjustments.crop.fields.height")}</span>
<input
type="number"
min={0.01}
max={1}
step={0.01}
value={localData.crop.height}
onChange={(event) => {
const parsed = parseNumberInput(event.target.value);
if (parsed === null) return;
updateCropField("height", clamp(parsed, 0.01, 1));
}}
className="nodrag nowheel h-8 w-full rounded-md border border-input bg-background px-2 text-xs"
/>
</label>
</div>
<div className="grid grid-cols-2 gap-2 text-[11px]">
<div className="space-y-1">
<div className="text-muted-foreground">{tNodes("adjustments.crop.resizeMode")}</div>
<Select
value={localData.resize.mode}
onValueChange={(value: CropResizeMode) => {
updateResize({ mode: value });
}}
>
<SelectTrigger className="nodrag h-8 text-xs" size="sm">
<SelectValue />
</SelectTrigger>
<SelectContent className="nodrag">
<SelectItem value="source">{tNodes("adjustments.crop.resizeModes.source")}</SelectItem>
<SelectItem value="custom">{tNodes("adjustments.crop.resizeModes.custom")}</SelectItem>
</SelectContent>
</Select>
</div>
<div className="space-y-1">
<div className="text-muted-foreground">{tNodes("adjustments.crop.fitMode")}</div>
<Select
value={localData.resize.fit}
onValueChange={(value: CropFitMode) => {
updateResize({ fit: value });
}}
>
<SelectTrigger className="nodrag h-8 text-xs" size="sm">
<SelectValue />
</SelectTrigger>
<SelectContent className="nodrag">
<SelectItem value="cover">{tNodes("adjustments.crop.fitModes.cover")}</SelectItem>
<SelectItem value="contain">{tNodes("adjustments.crop.fitModes.contain")}</SelectItem>
<SelectItem value="fill">{tNodes("adjustments.crop.fitModes.fill")}</SelectItem>
</SelectContent>
</Select>
</div>
</div>
{localData.resize.mode === "custom" ? (
<div className="grid grid-cols-2 gap-2">
<label className="space-y-1 text-[11px] text-muted-foreground">
<span>{tNodes("adjustments.crop.fields.outputWidth")}</span>
<input
type="number"
min={1}
max={16384}
step={1}
value={localData.resize.width ?? CUSTOM_DIMENSION_FALLBACK}
onChange={(event) => {
const parsed = parseNumberInput(event.target.value);
if (parsed === null) return;
updateResize({ width: Math.round(clamp(parsed, 1, 16384)) });
}}
className="nodrag nowheel h-8 w-full rounded-md border border-input bg-background px-2 text-xs"
/>
</label>
<label className="space-y-1 text-[11px] text-muted-foreground">
<span>{tNodes("adjustments.crop.fields.outputHeight")}</span>
<input
type="number"
min={1}
max={16384}
step={1}
value={localData.resize.height ?? CUSTOM_DIMENSION_FALLBACK}
onChange={(event) => {
const parsed = parseNumberInput(event.target.value);
if (parsed === null) return;
updateResize({ height: Math.round(clamp(parsed, 1, 16384)) });
}}
className="nodrag nowheel h-8 w-full rounded-md border border-input bg-background px-2 text-xs"
/>
</label>
</div>
) : null}
<label className="flex items-center gap-2 text-[11px] text-muted-foreground">
<input
type="checkbox"
checked={localData.resize.keepAspect}
onChange={(event) => updateResize({ keepAspect: event.target.checked })}
className="nodrag h-3.5 w-3.5 rounded border-input"
/>
{tNodes("adjustments.crop.keepAspect")}
</label>
<div className="text-[10px] text-muted-foreground">
{tNodes("adjustments.crop.cropSummary", {
x: formatPercent(localData.crop.x),
y: formatPercent(localData.crop.y),
width: formatPercent(localData.crop.width),
height: formatPercent(localData.crop.height),
})}
</div>
{error ? <p className="text-[11px] text-destructive">{error}</p> : null}
</div>
<Handle
type="source"
position={Position.Right}
className="!h-3 !w-3 !border-2 !border-background !bg-violet-500"
/>
</BaseNodeWrapper>
);
}

View File

@@ -14,6 +14,10 @@ import { useTranslations } from "next-intl";
import { api } from "@/convex/_generated/api";
import type { Id } from "@/convex/_generated/dataModel";
import BaseNodeWrapper from "./base-node-wrapper";
import {
MediaLibraryDialog,
type MediaLibraryItem,
} from "@/components/media/media-library-dialog";
import {
Dialog,
DialogContent,
@@ -21,9 +25,17 @@ import {
} from "@/components/ui/dialog";
import { toast } from "@/lib/toast";
import { computeMediaNodeSize } from "@/lib/canvas-utils";
import {
emitDashboardSnapshotCacheInvalidationSignal,
invalidateDashboardSnapshotForLastSignedInUser,
} from "@/lib/dashboard-snapshot-cache";
import { useCanvasSync } from "@/components/canvas/canvas-sync-context";
import { useMutation } from "convex/react";
import { Progress } from "@/components/ui/progress";
import {
createCompressedImagePreview,
getImageDimensions,
} from "@/components/canvas/canvas-media-utils";
const ALLOWED_IMAGE_TYPES = new Set([
"image/png",
@@ -34,45 +46,22 @@ const MAX_IMAGE_BYTES = 10 * 1024 * 1024;
const OPTIMISTIC_NODE_PREFIX = "optimistic_";
type ImageNodeData = {
canvasId?: string;
storageId?: string;
previewStorageId?: string;
url?: string;
filename?: string;
mimeType?: string;
width?: number;
height?: number;
previewWidth?: number;
previewHeight?: number;
_status?: string;
_statusMessage?: string;
};
export type ImageNode = Node<ImageNodeData, "image">;
async function getImageDimensions(file: File): Promise<{ width: number; height: number }> {
return await new Promise((resolve, reject) => {
const objectUrl = URL.createObjectURL(file);
const image = new window.Image();
image.onload = () => {
const width = image.naturalWidth;
const height = image.naturalHeight;
URL.revokeObjectURL(objectUrl);
if (!width || !height) {
reject(new Error("Could not read image dimensions"));
return;
}
resolve({ width, height });
};
image.onerror = () => {
URL.revokeObjectURL(objectUrl);
reject(new Error("Could not decode image"));
};
image.src = objectUrl;
});
}
export default function ImageNode({
id,
data,
@@ -82,6 +71,7 @@ export default function ImageNode({
}: NodeProps<ImageNode>) {
const t = useTranslations('toasts');
const generateUploadUrl = useMutation(api.storage.generateUploadUrl);
const registerUploadedImageMedia = useMutation(api.storage.registerUploadedImageMedia);
const { queueNodeDataUpdate, queueNodeResize, status } = useCanvasSync();
const fileInputRef = useRef<HTMLInputElement>(null);
const [uploadPhase, setUploadPhase] = useState<"idle" | "uploading" | "syncing">("idle");
@@ -89,9 +79,48 @@ export default function ImageNode({
const [pendingUploadStorageId, setPendingUploadStorageId] = useState<string | null>(
null,
);
const [mediaLibraryPhase, setMediaLibraryPhase] = useState<
"idle" | "applying" | "syncing"
>("idle");
const [pendingMediaLibraryStorageId, setPendingMediaLibraryStorageId] = useState<
string | null
>(null);
const [isDragOver, setIsDragOver] = useState(false);
const [isFullscreenOpen, setIsFullscreenOpen] = useState(false);
const [isMediaLibraryOpen, setIsMediaLibraryOpen] = useState(false);
const hasAutoSizedRef = useRef(false);
const canvasId = data.canvasId as Id<"canvases"> | undefined;
const isOptimisticNodeId =
typeof id === "string" && id.startsWith(OPTIMISTIC_NODE_PREFIX);
const isNodeStable = !isOptimisticNodeId;
const registerUploadInMediaLibrary = useCallback(
(args: {
storageId: string;
filename?: string;
mimeType?: string;
width?: number;
height?: number;
nodeId?: Id<"nodes">;
}) => {
if (!canvasId) {
return;
}
void registerUploadedImageMedia({
canvasId,
storageId: args.storageId as Id<"_storage">,
nodeId: args.nodeId,
filename: args.filename,
mimeType: args.mimeType,
width: args.width,
height: args.height,
}).catch((error: unknown) => {
console.warn("[ImageNode] registerUploadedImageMedia failed", error);
});
},
[canvasId, registerUploadedImageMedia],
);
const isPendingUploadSynced =
pendingUploadStorageId !== null &&
@@ -99,7 +128,35 @@ export default function ImageNode({
typeof data.url === "string" &&
data.url.length > 0;
const isWaitingForCanvasSync = pendingUploadStorageId !== null && !isPendingUploadSynced;
const isPendingMediaLibrarySynced =
pendingMediaLibraryStorageId !== null &&
data.storageId === pendingMediaLibraryStorageId &&
typeof data.url === "string" &&
data.url.length > 0;
const isWaitingForMediaLibrarySync =
pendingMediaLibraryStorageId !== null && !isPendingMediaLibrarySynced;
const isUploading = uploadPhase !== "idle" || isWaitingForCanvasSync;
const isApplyingMediaLibrary =
mediaLibraryPhase !== "idle" || isWaitingForMediaLibrarySync;
const isNodeLoading = isUploading || isApplyingMediaLibrary;
useEffect(() => {
if (!isPendingUploadSynced) {
return;
}
setPendingUploadStorageId(null);
setUploadPhase("idle");
}, [isPendingUploadSynced]);
useEffect(() => {
if (!isPendingMediaLibrarySynced) {
return;
}
setPendingMediaLibraryStorageId(null);
setMediaLibraryPhase("idle");
}, [isPendingMediaLibrarySynced]);
useEffect(() => {
if (typeof id === "string" && id.startsWith(OPTIMISTIC_NODE_PREFIX)) {
@@ -170,6 +227,13 @@ export default function ImageNode({
try {
let dimensions: { width: number; height: number } | undefined;
let previewUpload:
| {
previewStorageId: string;
previewWidth: number;
previewHeight: number;
}
| undefined;
try {
dimensions = await getImageDimensions(file);
} catch {
@@ -208,6 +272,30 @@ export default function ImageNode({
},
);
try {
const preview = await createCompressedImagePreview(file);
const previewUploadUrl = await generateUploadUrl();
const previewUploadResult = await fetch(previewUploadUrl, {
method: "POST",
headers: { "Content-Type": preview.blob.type || "image/webp" },
body: preview.blob,
});
if (!previewUploadResult.ok) {
throw new Error(`Preview upload failed: ${previewUploadResult.status}`);
}
const { storageId: previewStorageId } =
(await previewUploadResult.json()) as { storageId: string };
previewUpload = {
previewStorageId,
previewWidth: preview.width,
previewHeight: preview.height,
};
} catch (previewError) {
console.warn("[ImageNode] preview generation/upload failed", previewError);
}
setUploadProgress(100);
setPendingUploadStorageId(storageId);
setUploadPhase("syncing");
@@ -216,6 +304,7 @@ export default function ImageNode({
nodeId: id as Id<"nodes">,
data: {
storageId,
...(previewUpload ?? {}),
filename: file.name,
mimeType: file.type,
...(dimensions ? { width: dimensions.width, height: dimensions.height } : {}),
@@ -235,6 +324,22 @@ export default function ImageNode({
});
}
const nodeIdForRegistration =
typeof id === "string" && !id.startsWith(OPTIMISTIC_NODE_PREFIX)
? (id as Id<"nodes">)
: undefined;
registerUploadInMediaLibrary({
storageId,
filename: file.name,
mimeType: file.type,
width: dimensions?.width,
height: dimensions?.height,
nodeId: nodeIdForRegistration,
});
invalidateDashboardSnapshotForLastSignedInUser();
emitDashboardSnapshotCacheInvalidationSignal();
toast.success(t('canvas.imageUploaded'));
setUploadPhase("idle");
} catch (err) {
@@ -254,16 +359,69 @@ export default function ImageNode({
isUploading,
queueNodeDataUpdate,
queueNodeResize,
registerUploadInMediaLibrary,
status.isOffline,
t,
],
);
const handlePickFromMediaLibrary = useCallback(
async (item: MediaLibraryItem) => {
if (isNodeLoading) {
return;
}
setMediaLibraryPhase("applying");
setPendingMediaLibraryStorageId(item.storageId);
try {
await queueNodeDataUpdate({
nodeId: id as Id<"nodes">,
data: {
storageId: item.storageId,
previewStorageId: item.previewStorageId,
filename: item.filename,
mimeType: item.mimeType,
width: item.width,
height: item.height,
previewWidth: item.previewWidth,
previewHeight: item.previewHeight,
},
});
setMediaLibraryPhase("syncing");
if (typeof item.width === "number" && typeof item.height === "number") {
const targetSize = computeMediaNodeSize("image", {
intrinsicWidth: item.width,
intrinsicHeight: item.height,
});
await queueNodeResize({
nodeId: id as Id<"nodes">,
width: targetSize.width,
height: targetSize.height,
});
}
setIsMediaLibraryOpen(false);
} catch (error) {
console.error("Failed to apply media library image", error);
setPendingMediaLibraryStorageId(null);
setMediaLibraryPhase("idle");
toast.error(
t('canvas.uploadFailed'),
error instanceof Error ? error.message : undefined,
);
}
},
[id, isNodeLoading, queueNodeDataUpdate, queueNodeResize, t],
);
const handleClick = useCallback(() => {
if (!data.url && !isUploading) {
if (!data.url && !isNodeLoading) {
fileInputRef.current?.click();
}
}, [data.url, isUploading]);
}, [data.url, isNodeLoading]);
const handleFileChange = useCallback(
(e: ChangeEvent<HTMLInputElement>) => {
@@ -294,26 +452,31 @@ export default function ImageNode({
e.stopPropagation();
setIsDragOver(false);
if (isUploading) return;
if (isNodeLoading) return;
const file = e.dataTransfer.files?.[0];
if (file && file.type.startsWith("image/")) {
uploadFile(file);
}
},
[isUploading, uploadFile]
[isNodeLoading, uploadFile]
);
const handleReplace = useCallback(() => {
if (isUploading) return;
if (isNodeLoading) return;
fileInputRef.current?.click();
}, [isUploading]);
}, [isNodeLoading]);
const showFilename = Boolean(data.filename && data.url);
const effectiveUploadProgress = isWaitingForCanvasSync ? 100 : uploadProgress;
const uploadingLabel =
isWaitingForCanvasSync
const effectiveUploadProgress = isUploading
? isWaitingForCanvasSync
? 100
: uploadProgress
: 100;
const uploadingLabel = isUploading
? isWaitingForCanvasSync
? "100% — wird synchronisiert…"
: "Wird hochgeladen…";
: "Wird hochgeladen…"
: "Bild wird uebernommen…";
return (
<>
@@ -349,7 +512,7 @@ export default function ImageNode({
{data.url && (
<button
onClick={handleReplace}
disabled={isUploading}
disabled={isNodeLoading}
className="nodrag text-xs text-muted-foreground transition-colors hover:text-foreground disabled:cursor-not-allowed disabled:opacity-60"
>
Ersetzen
@@ -358,7 +521,7 @@ export default function ImageNode({
</div>
<div className="relative min-h-0 overflow-hidden rounded-lg bg-muted/30">
{isUploading ? (
{isNodeLoading ? (
<div className="flex h-full w-full items-center justify-center bg-muted">
<div className="flex flex-col items-center gap-2">
<span className="text-xs text-muted-foreground">{uploadingLabel}</span>
@@ -397,6 +560,21 @@ export default function ImageNode({
<span className="mb-1 text-lg">📁</span>
<span>Klicken oder hierhin ziehen</span>
<span className="mt-0.5 text-xs">PNG, JPG, WebP</span>
<button
type="button"
onClick={(event) => {
event.preventDefault();
event.stopPropagation();
if (!isNodeStable) {
return;
}
setIsMediaLibraryOpen(true);
}}
disabled={isNodeLoading || !isNodeStable}
className="nodrag mt-3 inline-flex items-center rounded-md border border-border bg-background px-2.5 py-1 text-xs font-medium text-foreground transition-colors hover:bg-accent disabled:cursor-not-allowed disabled:opacity-60"
>
{isNodeStable ? "Aus Mediathek" : "Mediathek wird vorbereitet..."}
</button>
</div>
)}
</div>
@@ -410,7 +588,7 @@ export default function ImageNode({
ref={fileInputRef}
type="file"
accept="image/png,image/jpeg,image/webp"
disabled={isUploading}
disabled={isNodeLoading}
onChange={handleFileChange}
className="hidden"
/>
@@ -453,6 +631,13 @@ export default function ImageNode({
</div>
</DialogContent>
</Dialog>
<MediaLibraryDialog
open={isMediaLibraryOpen}
onOpenChange={setIsMediaLibraryOpen}
onPick={handlePickFromMediaLibrary}
pickCtaLabel="Uebernehmen"
/>
</>
);
}

View File

@@ -5,6 +5,10 @@ import {
CANVAS_NODE_DND_MIME,
} from "@/lib/canvas-connection-policy";
import { NODE_DEFAULTS, NODE_HANDLE_MAP } from "@/lib/canvas-utils";
import {
emitDashboardSnapshotCacheInvalidationSignal,
invalidateDashboardSnapshotForLastSignedInUser,
} from "@/lib/dashboard-snapshot-cache";
import {
isCanvasNodeType,
type CanvasNodeType,
@@ -18,7 +22,10 @@ import {
logCanvasConnectionDebug,
normalizeHandle,
} from "./canvas-helpers";
import { getImageDimensions } from "./canvas-media-utils";
import {
createCompressedImagePreview,
getImageDimensions,
} from "./canvas-media-utils";
type UseCanvasDropParams = {
canvasId: Id<"canvases">;
@@ -34,6 +41,15 @@ type UseCanvasDropParams = {
}>;
screenToFlowPosition: (position: { x: number; y: number }) => { x: number; y: number };
generateUploadUrl: () => Promise<string>;
registerUploadedImageMedia?: (args: {
canvasId: Id<"canvases">;
nodeId?: Id<"nodes">;
storageId: Id<"_storage">;
filename?: string;
mimeType?: string;
width?: number;
height?: number;
}) => Promise<{ ok: true }>;
runCreateNodeOnlineOnly: (args: {
canvasId: Id<"canvases">;
type: CanvasNodeType;
@@ -99,6 +115,7 @@ export function useCanvasDrop({
edges,
screenToFlowPosition,
generateUploadUrl,
registerUploadedImageMedia,
runCreateNodeOnlineOnly,
runCreateNodeWithEdgeSplitOnlineOnly,
notifyOfflineUnsupported,
@@ -127,6 +144,13 @@ export function useCanvasDrop({
if (file.type.startsWith("image/")) {
try {
let dimensions: { width: number; height: number } | undefined;
let previewUpload:
| {
previewStorageId: string;
previewWidth: number;
previewHeight: number;
}
| undefined;
try {
dimensions = await getImageDimensions(file);
} catch {
@@ -145,13 +169,38 @@ export function useCanvasDrop({
}
const { storageId } = (await result.json()) as { storageId: string };
try {
const preview = await createCompressedImagePreview(file);
const previewUploadUrl = await generateUploadUrl();
const previewUploadResult = await fetch(previewUploadUrl, {
method: "POST",
headers: { "Content-Type": preview.blob.type || "image/webp" },
body: preview.blob,
});
if (!previewUploadResult.ok) {
throw new Error("Preview upload failed");
}
const { storageId: previewStorageId } =
(await previewUploadResult.json()) as { storageId: string };
previewUpload = {
previewStorageId,
previewWidth: preview.width,
previewHeight: preview.height,
};
} catch (previewError) {
console.warn("[Canvas] dropped image preview generation/upload failed", previewError);
}
const position = screenToFlowPosition({
x: event.clientX,
y: event.clientY,
});
const clientRequestId = crypto.randomUUID();
void runCreateNodeOnlineOnly({
const createNodePromise = runCreateNodeOnlineOnly({
canvasId,
type: "image",
positionX: position.x,
@@ -160,18 +209,60 @@ export function useCanvasDrop({
height: NODE_DEFAULTS.image.height,
data: {
storageId,
...(previewUpload ?? {}),
filename: file.name,
mimeType: file.type,
...(dimensions ? { width: dimensions.width, height: dimensions.height } : {}),
canvasId,
},
clientRequestId,
}).then((realId) => {
});
void createNodePromise.then((realId) => {
void syncPendingMoveForClientRequest(clientRequestId, realId).catch(
(error: unknown) => {
console.error("[Canvas] drop createNode syncPendingMove failed", error);
},
);
invalidateDashboardSnapshotForLastSignedInUser();
emitDashboardSnapshotCacheInvalidationSignal();
if (!registerUploadedImageMedia) {
return;
}
void registerUploadedImageMedia({
canvasId,
nodeId: realId,
storageId: storageId as Id<"_storage">,
filename: file.name,
mimeType: file.type,
width: dimensions?.width,
height: dimensions?.height,
}).catch((error: unknown) => {
console.warn("[Canvas] dropped image media registration failed", error);
});
}, () => {
if (!registerUploadedImageMedia) {
return;
}
void registerUploadedImageMedia({
canvasId,
storageId: storageId as Id<"_storage">,
filename: file.name,
mimeType: file.type,
width: dimensions?.width,
height: dimensions?.height,
})
.then(() => {
invalidateDashboardSnapshotForLastSignedInUser();
emitDashboardSnapshotCacheInvalidationSignal();
})
.catch((error: unknown) => {
console.warn("[Canvas] dropped image media registration failed", error);
});
});
} catch (error) {
console.error("Failed to upload dropped file:", error);
@@ -298,6 +389,7 @@ export function useCanvasDrop({
canvasId,
edges,
generateUploadUrl,
registerUploadedImageMedia,
isSyncOnline,
notifyOfflineUnsupported,
runCreateNodeWithEdgeSplitOnlineOnly,

View File

@@ -0,0 +1,38 @@
import { describe, expect, it } from "vitest";
import {
collectMediaStorageIdsForResolution,
resolveMediaPreviewUrl,
} from "@/components/media/media-preview-utils";
describe("media-preview-utils", () => {
it("collects preview ids first and includes original ids as fallback", () => {
const ids = collectMediaStorageIdsForResolution([
{ storageId: "orig-1", previewStorageId: "preview-1" },
{ storageId: "orig-2" },
]);
expect(ids).toEqual(["preview-1", "orig-1", "orig-2"]);
});
it("resolves preview url first and falls back to original url", () => {
const previewFirst = resolveMediaPreviewUrl(
{ storageId: "orig-1", previewStorageId: "preview-1" },
{
"preview-1": "https://cdn.example.com/preview.webp",
"orig-1": "https://cdn.example.com/original.jpg",
},
);
expect(previewFirst).toBe("https://cdn.example.com/preview.webp");
const fallbackToOriginal = resolveMediaPreviewUrl(
{ storageId: "orig-1", previewStorageId: "preview-1" },
{
"orig-1": "https://cdn.example.com/original.jpg",
},
);
expect(fallbackToOriginal).toBe("https://cdn.example.com/original.jpg");
});
});

View File

@@ -0,0 +1,272 @@
"use client";
import { useEffect, useMemo, useState } from "react";
import { useMutation } from "convex/react";
import { AlertCircle, ImageIcon, Loader2 } from "lucide-react";
import { api } from "@/convex/_generated/api";
import type { Id } from "@/convex/_generated/dataModel";
import { useAuthQuery } from "@/hooks/use-auth-query";
import { Button } from "@/components/ui/button";
import {
Dialog,
DialogContent,
DialogDescription,
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
import {
collectMediaStorageIdsForResolution,
resolveMediaPreviewUrl,
} from "@/components/media/media-preview-utils";
const DEFAULT_LIMIT = 200;
const MIN_LIMIT = 1;
const MAX_LIMIT = 500;
export type MediaLibraryMetadataItem = {
storageId: Id<"_storage">;
previewStorageId?: Id<"_storage">;
filename?: string;
mimeType?: string;
width?: number;
height?: number;
previewWidth?: number;
previewHeight?: number;
sourceCanvasId: Id<"canvases">;
sourceNodeId: Id<"nodes">;
createdAt: number;
};
export type MediaLibraryItem = MediaLibraryMetadataItem & {
url?: string;
};
export type MediaLibraryDialogProps = {
open: boolean;
onOpenChange: (open: boolean) => void;
onPick?: (item: MediaLibraryItem) => void | Promise<void>;
title?: string;
description?: string;
limit?: number;
pickCtaLabel?: string;
};
function normalizeLimit(limit: number | undefined): number {
if (typeof limit !== "number" || !Number.isFinite(limit)) {
return DEFAULT_LIMIT;
}
return Math.min(MAX_LIMIT, Math.max(MIN_LIMIT, Math.floor(limit)));
}
function formatDimensions(width: number | undefined, height: number | undefined): string | null {
if (typeof width !== "number" || typeof height !== "number") {
return null;
}
return `${width} x ${height}px`;
}
export function MediaLibraryDialog({
open,
onOpenChange,
onPick,
title = "Mediathek",
description = "Waehle ein Bild aus deiner LemonSpace-Mediathek.",
limit,
pickCtaLabel = "Auswaehlen",
}: MediaLibraryDialogProps) {
const normalizedLimit = useMemo(() => normalizeLimit(limit), [limit]);
const metadata = useAuthQuery(
api.dashboard.listMediaLibrary,
open ? { limit: normalizedLimit } : "skip",
);
const resolveUrls = useMutation(api.storage.batchGetUrlsForUserMedia);
const [urlMap, setUrlMap] = useState<Record<string, string | undefined>>({});
const [isResolvingUrls, setIsResolvingUrls] = useState(false);
const [urlError, setUrlError] = useState<string | null>(null);
const [pendingPickStorageId, setPendingPickStorageId] = useState<Id<"_storage"> | null>(null);
useEffect(() => {
let isCancelled = false;
async function run() {
if (!open) {
setUrlMap({});
setUrlError(null);
setIsResolvingUrls(false);
return;
}
if (!metadata) {
return;
}
const storageIds = collectMediaStorageIdsForResolution(metadata);
if (storageIds.length === 0) {
setUrlMap({});
setUrlError(null);
setIsResolvingUrls(false);
return;
}
setIsResolvingUrls(true);
setUrlError(null);
try {
const resolved = await resolveUrls({ storageIds });
if (isCancelled) {
return;
}
setUrlMap(resolved);
} catch (error) {
if (isCancelled) {
return;
}
setUrlMap({});
setUrlError(error instanceof Error ? error.message : "URLs konnten nicht geladen werden.");
} finally {
if (!isCancelled) {
setIsResolvingUrls(false);
}
}
}
void run();
return () => {
isCancelled = true;
};
}, [metadata, open, resolveUrls]);
const items: MediaLibraryItem[] = useMemo(() => {
if (!metadata) {
return [];
}
return metadata.map((item) => ({
...item,
url: resolveMediaPreviewUrl(item, urlMap),
}));
}, [metadata, urlMap]);
const isMetadataLoading = open && metadata === undefined;
const isInitialLoading = isMetadataLoading || (metadata !== undefined && isResolvingUrls);
const isPreviewMode = typeof onPick !== "function";
async function handlePick(item: MediaLibraryItem): Promise<void> {
if (!onPick || pendingPickStorageId) {
return;
}
setPendingPickStorageId(item.storageId);
try {
await onPick(item);
} finally {
setPendingPickStorageId(null);
}
}
return (
<Dialog open={open} onOpenChange={onOpenChange}>
<DialogContent className="max-h-[85vh] sm:max-w-5xl" showCloseButton>
<DialogHeader>
<DialogTitle>{title}</DialogTitle>
<DialogDescription>{description}</DialogDescription>
</DialogHeader>
<div className="min-h-[320px] overflow-y-auto pr-1">
{isInitialLoading ? (
<div className="grid grid-cols-2 gap-3 sm:grid-cols-3 lg:grid-cols-4">
{Array.from({ length: 12 }).map((_, index) => (
<div key={index} className="overflow-hidden rounded-lg border">
<div className="aspect-square animate-pulse bg-muted" />
<div className="space-y-1 p-2">
<div className="h-3 w-2/3 animate-pulse rounded bg-muted" />
<div className="h-3 w-1/2 animate-pulse rounded bg-muted" />
</div>
</div>
))}
</div>
) : urlError ? (
<div className="flex h-full min-h-[260px] flex-col items-center justify-center gap-2 rounded-lg border border-dashed px-6 text-center">
<AlertCircle className="h-8 w-8 text-destructive" />
<p className="text-sm font-medium">Mediathek konnte nicht geladen werden</p>
<p className="max-w-md text-xs text-muted-foreground">{urlError}</p>
</div>
) : items.length === 0 ? (
<div className="flex h-full min-h-[260px] flex-col items-center justify-center gap-2 rounded-lg border border-dashed px-6 text-center">
<ImageIcon className="h-8 w-8 text-muted-foreground" />
<p className="text-sm font-medium">Keine Medien vorhanden</p>
<p className="text-xs text-muted-foreground">
Sobald du Bilder hochlaedst oder generierst, erscheinen sie hier.
</p>
</div>
) : (
<div className="grid grid-cols-2 gap-3 sm:grid-cols-3 lg:grid-cols-4">
{items.map((item) => {
const dimensions = formatDimensions(item.width, item.height);
const isPickingThis = pendingPickStorageId === item.storageId;
return (
<div
key={item.storageId}
className="group flex flex-col overflow-hidden rounded-lg border bg-card"
>
<div className="relative aspect-square bg-muted/50">
{item.url ? (
// eslint-disable-next-line @next/next/no-img-element
<img
src={item.url}
alt={item.filename ?? "Mediathek-Bild"}
className="h-full w-full object-cover"
loading="lazy"
/>
) : (
<div className="flex h-full w-full items-center justify-center text-muted-foreground">
<ImageIcon className="h-6 w-6" />
</div>
)}
</div>
<div className="flex flex-1 flex-col gap-1 p-2">
<p className="truncate text-xs font-medium" title={item.filename}>
{item.filename ?? "Unbenanntes Bild"}
</p>
<p className="text-[11px] text-muted-foreground">
{dimensions ?? "Groesse unbekannt"}
</p>
{isPreviewMode ? (
<p className="mt-auto text-[11px] text-muted-foreground">Nur Vorschau</p>
) : (
<Button
type="button"
size="sm"
className="mt-2 h-7"
onClick={() => void handlePick(item)}
disabled={Boolean(pendingPickStorageId)}
>
{isPickingThis ? (
<>
<Loader2 className="mr-1.5 h-3.5 w-3.5 animate-spin" />
Wird uebernommen...
</>
) : (
pickCtaLabel
)}
</Button>
)}
</div>
</div>
);
})}
</div>
)}
</div>
</DialogContent>
</Dialog>
);
}

View File

@@ -0,0 +1,36 @@
type MediaPreviewReference<TStorageId extends string = string> = {
storageId: TStorageId;
previewStorageId?: TStorageId;
};
export function collectMediaStorageIdsForResolution<TStorageId extends string>(
items: readonly MediaPreviewReference<TStorageId>[],
): TStorageId[] {
const ordered = new Set<TStorageId>();
for (const item of items) {
const preferredId = item.previewStorageId ?? item.storageId;
if (preferredId) {
ordered.add(preferredId);
}
if (item.storageId) {
ordered.add(item.storageId);
}
}
return [...ordered];
}
export function resolveMediaPreviewUrl(
item: MediaPreviewReference,
urlMap: Record<string, string | undefined>,
): string | undefined {
if (item.previewStorageId) {
const previewUrl = urlMap[item.previewStorageId];
if (previewUrl) {
return previewUrl;
}
}
return urlMap[item.storageId];
}

View File

@@ -1,4 +1,6 @@
import { query } from "./_generated/server";
import type { Doc, Id } from "./_generated/dataModel";
import { v } from "convex/values";
import { optionalAuth } from "./helpers";
import { prioritizeRecentCreditTransactions } from "../lib/credits-activity";
@@ -6,6 +8,102 @@ import { MONTHLY_TIER_CREDITS, normalizeBillingTier } from "../lib/tier-credits"
const DEFAULT_TIER = "free" as const;
const DEFAULT_SUBSCRIPTION_STATUS = "active" as const;
const DASHBOARD_MEDIA_PREVIEW_LIMIT = 8;
const MEDIA_LIBRARY_DEFAULT_LIMIT = 200;
const MEDIA_LIBRARY_MIN_LIMIT = 1;
const MEDIA_LIBRARY_MAX_LIMIT = 500;
type MediaPreviewItem = {
storageId: Id<"_storage">;
previewStorageId?: Id<"_storage">;
filename?: string;
mimeType?: string;
width?: number;
height?: number;
previewWidth?: number;
previewHeight?: number;
sourceCanvasId: Id<"canvases">;
sourceNodeId: Id<"nodes">;
createdAt: number;
};
function readImageMediaPreview(node: Doc<"nodes">): MediaPreviewItem | null {
if (node.type !== "image") {
return null;
}
const data = (node.data as Record<string, unknown> | undefined) ?? {};
const storageId = data.storageId;
if (typeof storageId !== "string" || storageId.length === 0) {
return null;
}
const previewStorageId =
typeof data.previewStorageId === "string" && data.previewStorageId.length > 0
? (data.previewStorageId as Id<"_storage">)
: undefined;
const filename =
typeof data.filename === "string"
? data.filename
: typeof data.originalFilename === "string"
? data.originalFilename
: undefined;
const mimeType = typeof data.mimeType === "string" ? data.mimeType : undefined;
const width = typeof data.width === "number" && Number.isFinite(data.width) ? data.width : undefined;
const height =
typeof data.height === "number" && Number.isFinite(data.height) ? data.height : undefined;
const previewWidth =
typeof data.previewWidth === "number" && Number.isFinite(data.previewWidth)
? data.previewWidth
: undefined;
const previewHeight =
typeof data.previewHeight === "number" && Number.isFinite(data.previewHeight)
? data.previewHeight
: undefined;
return {
storageId: storageId as Id<"_storage">,
previewStorageId,
filename,
mimeType,
width,
height,
previewWidth,
previewHeight,
sourceCanvasId: node.canvasId,
sourceNodeId: node._id,
createdAt: node._creationTime,
};
}
function buildMediaPreview(nodes: Array<Doc<"nodes">>, limit: number): MediaPreviewItem[] {
const candidates = nodes
.map(readImageMediaPreview)
.filter((item): item is MediaPreviewItem => item !== null)
.sort((a, b) => b.createdAt - a.createdAt);
const deduped = new Map<Id<"_storage">, MediaPreviewItem>();
for (const item of candidates) {
if (deduped.has(item.storageId)) {
continue;
}
deduped.set(item.storageId, item);
if (deduped.size >= limit) {
break;
}
}
return [...deduped.values()];
}
function normalizeMediaLibraryLimit(limit: number | undefined): number {
if (typeof limit !== "number" || !Number.isFinite(limit)) {
return MEDIA_LIBRARY_DEFAULT_LIMIT;
}
return Math.min(MEDIA_LIBRARY_MAX_LIMIT, Math.max(MEDIA_LIBRARY_MIN_LIMIT, Math.floor(limit)));
}
export const getSnapshot = query({
args: {},
@@ -27,6 +125,7 @@ export const getSnapshot = query({
},
recentTransactions: [],
canvases: [],
mediaPreview: [],
generatedAt: Date.now(),
};
}
@@ -59,6 +158,17 @@ export const getSnapshot = query({
.collect(),
]);
const imageNodesByCanvas = await Promise.all(
canvases.map((canvas) =>
ctx.db
.query("nodes")
.withIndex("by_canvas_type", (q) => q.eq("canvasId", canvas._id).eq("type", "image"))
.order("desc")
.collect(),
),
);
const mediaPreview = buildMediaPreview(imageNodesByCanvas.flat(), DASHBOARD_MEDIA_PREVIEW_LIMIT);
const tier = normalizeBillingTier(subscriptionRow?.tier);
const monthStart = new Date(new Date().getFullYear(), new Date().getMonth(), 1).getTime();
let monthlyUsage = 0;
@@ -96,7 +206,43 @@ export const getSnapshot = query({
},
recentTransactions: prioritizeRecentCreditTransactions(recentTransactionsRaw, 20),
canvases,
mediaPreview,
generatedAt: Date.now(),
};
},
});
export const listMediaLibrary = query({
args: {
limit: v.optional(v.number()),
},
handler: async (ctx, { limit }) => {
const user = await optionalAuth(ctx);
if (!user) {
return [];
}
const normalizedLimit = normalizeMediaLibraryLimit(limit);
const canvases = await ctx.db
.query("canvases")
.withIndex("by_owner_updated", (q) => q.eq("ownerId", user.userId))
.order("desc")
.collect();
if (canvases.length === 0) {
return [];
}
const imageNodesByCanvas = await Promise.all(
canvases.map((canvas) =>
ctx.db
.query("nodes")
.withIndex("by_canvas_type", (q) => q.eq("canvasId", canvas._id).eq("type", "image"))
.order("desc")
.collect(),
),
);
return buildMediaPreview(imageNodesByCanvas.flat(), normalizedLimit);
},
});

View File

@@ -9,6 +9,7 @@ import {
validateCanvasConnectionPolicy,
} from "../lib/canvas-connection-policy";
import { nodeTypeValidator } from "./node_type_validator";
import { normalizeCropNodeData } from "../lib/image-pipeline/crop-node-data";
// ============================================================================
// Interne Helpers
@@ -391,6 +392,12 @@ function normalizeNodeDataForWrite(
nodeType: Doc<"nodes">["type"],
data: unknown,
): unknown {
if (nodeType === "crop") {
return normalizeCropNodeData(data, {
rejectDisallowedPayloadFields: true,
});
}
if (!isAdjustmentNodeType(nodeType)) {
return data;
}

View File

@@ -42,9 +42,11 @@ async function assertCanvasOwner(
}
async function resolveStorageUrls(
ctx: QueryCtx,
ctx: QueryCtx | MutationCtx,
storageIds: Array<Id<"_storage">>,
options?: { logLabel?: string },
): Promise<StorageUrlMap> {
const logLabel = options?.logLabel ?? "batchGetUrlsForCanvas";
const resolved: StorageUrlMap = {};
const operationStartedAt = Date.now();
let failedCount = 0;
@@ -75,7 +77,7 @@ async function resolveStorageUrls(
if (entry.error) {
failedCount += 1;
batchFailedCount += 1;
console.warn("[storage.batchGetUrlsForCanvas] getUrl failed", {
console.warn(`[storage.${logLabel}] getUrl failed`, {
storageId: entry.storageId,
error: entry.error,
});
@@ -89,7 +91,7 @@ async function resolveStorageUrls(
}
}
logSlowQuery("batchGetUrlsForCanvas::resolveStorageBatch", batchStartedAt, {
logSlowQuery(`${logLabel}::resolveStorageBatch`, batchStartedAt, {
batchSize: batch.length,
successCount: entries.length - batchFailedCount,
failedCount: batchFailedCount,
@@ -97,7 +99,7 @@ async function resolveStorageUrls(
});
}
logSlowQuery("batchGetUrlsForCanvas", operationStartedAt, {
logSlowQuery(logLabel, operationStartedAt, {
requestStorageCount: storageIds.length,
resolvedCount: totalResolved,
failedCount,
@@ -147,7 +149,9 @@ export const batchGetUrlsForCanvas = mutation({
});
}
const result = await resolveStorageUrls(ctx, verifiedStorageIds);
const result = await resolveStorageUrls(ctx, verifiedStorageIds, {
logLabel: "batchGetUrlsForCanvas",
});
logSlowQuery("batchGetUrlsForCanvas::total", startedAt, {
canvasId,
storageIdCount: verifiedStorageIds.length,
@@ -157,6 +161,96 @@ export const batchGetUrlsForCanvas = mutation({
return result;
},
});
export const batchGetUrlsForUserMedia = mutation({
args: {
storageIds: v.array(v.id("_storage")),
},
handler: async (ctx, { storageIds }) => {
const startedAt = Date.now();
const user = await requireAuth(ctx);
const uniqueSortedStorageIds = [...new Set(storageIds)].sort();
if (uniqueSortedStorageIds.length === 0) {
return {};
}
const ownedStorageIds = await collectOwnedImageStorageIdsForUser(ctx, user.userId);
const verifiedStorageIds = uniqueSortedStorageIds.filter((storageId) =>
ownedStorageIds.has(storageId),
);
const rejectedStorageIds = uniqueSortedStorageIds.length - verifiedStorageIds.length;
if (rejectedStorageIds > 0) {
console.warn("[storage.batchGetUrlsForUserMedia] rejected unowned storage ids", {
userId: user.userId,
requestedCount: uniqueSortedStorageIds.length,
rejectedStorageIds,
});
}
const result = await resolveStorageUrls(ctx, verifiedStorageIds, {
logLabel: "batchGetUrlsForUserMedia",
});
logSlowQuery("batchGetUrlsForUserMedia::total", startedAt, {
userId: user.userId,
storageIdCount: verifiedStorageIds.length,
rejectedStorageIds,
resolvedCount: Object.keys(result).length,
});
return result;
},
});
export const registerUploadedImageMedia = mutation({
args: {
canvasId: v.id("canvases"),
nodeId: v.optional(v.id("nodes")),
storageId: v.id("_storage"),
filename: v.optional(v.string()),
mimeType: v.optional(v.string()),
width: v.optional(v.number()),
height: v.optional(v.number()),
},
handler: async (ctx, args) => {
const user = await requireAuth(ctx);
await assertCanvasOwner(ctx, args.canvasId, user.userId);
if (args.nodeId) {
const node = await ctx.db.get(args.nodeId);
if (!node) {
console.warn("[storage.registerUploadedImageMedia] node not found", {
userId: user.userId,
canvasId: args.canvasId,
nodeId: args.nodeId,
storageId: args.storageId,
});
} else if (node.canvasId !== args.canvasId) {
console.warn("[storage.registerUploadedImageMedia] node/canvas mismatch", {
userId: user.userId,
canvasId: args.canvasId,
nodeId: args.nodeId,
nodeCanvasId: node.canvasId,
storageId: args.storageId,
});
}
}
console.info("[storage.registerUploadedImageMedia] acknowledged", {
userId: user.userId,
canvasId: args.canvasId,
nodeId: args.nodeId,
storageId: args.storageId,
filename: args.filename,
mimeType: args.mimeType,
width: args.width,
height: args.height,
});
return { ok: true as const };
},
});
async function listNodesForCanvas(
ctx: QueryCtx | MutationCtx,
canvasId: Id<"canvases">,
@@ -175,10 +269,53 @@ function collectStorageIds(
for (const node of nodes) {
const data = node.data as Record<string, unknown> | undefined;
const storageId = data?.storageId;
const previewStorageId = data?.previewStorageId;
if (typeof storageId === "string" && storageId.length > 0) {
ids.add(storageId as Id<"_storage">);
}
if (typeof previewStorageId === "string" && previewStorageId.length > 0) {
ids.add(previewStorageId as Id<"_storage">);
}
}
return [...ids];
}
async function collectOwnedImageStorageIdsForUser(
ctx: QueryCtx | MutationCtx,
userId: string,
): Promise<Set<Id<"_storage">>> {
const canvases = await ctx.db
.query("canvases")
.withIndex("by_owner", (q) => q.eq("ownerId", userId))
.collect();
if (canvases.length === 0) {
return new Set();
}
const imageNodesByCanvas = await Promise.all(
canvases.map((canvas) =>
ctx.db
.query("nodes")
.withIndex("by_canvas_type", (q) => q.eq("canvasId", canvas._id).eq("type", "image"))
.collect(),
),
);
const imageStorageIds = new Set<Id<"_storage">>();
for (const nodes of imageNodesByCanvas) {
for (const node of nodes) {
const data = node.data as Record<string, unknown> | undefined;
const storageId = data?.storageId;
const previewStorageId = data?.previewStorageId;
if (typeof storageId === "string" && storageId.length > 0) {
imageStorageIds.add(storageId as Id<"_storage">);
}
if (typeof previewStorageId === "string" && previewStorageId.length > 0) {
imageStorageIds.add(previewStorageId as Id<"_storage">);
}
}
}
return imageStorageIds;
}

View File

@@ -1,12 +1,13 @@
"use client";
import { useEffect, useMemo } from "react";
import { useEffect, useMemo, useState } from "react";
import type { FunctionReturnType } from "convex/server";
import { api } from "@/convex/_generated/api";
import { useAuthQuery } from "@/hooks/use-auth-query";
import {
clearDashboardSnapshotCache,
getDashboardSnapshotCacheInvalidationSignalKey,
readDashboardSnapshotCache,
writeDashboardSnapshotCache,
} from "@/lib/dashboard-snapshot-cache";
@@ -17,12 +18,12 @@ export function useDashboardSnapshot(userId?: string | null): {
snapshot: DashboardSnapshot | undefined;
source: "live" | "cache" | "none";
} {
const [cacheEpoch, setCacheEpoch] = useState(0);
const liveSnapshot = useAuthQuery(api.dashboard.getSnapshot, userId ? {} : "skip");
const cachedSnapshot = useMemo(() => {
if (!userId) return null;
const cached = readDashboardSnapshotCache<DashboardSnapshot>(userId);
return cached?.snapshot ?? null;
}, [userId]);
const cachedSnapshot =
userId && cacheEpoch >= 0
? readDashboardSnapshotCache<DashboardSnapshot>(userId)?.snapshot ?? null
: null;
useEffect(() => {
if (!userId || !liveSnapshot) return;
@@ -46,6 +47,25 @@ export function useDashboardSnapshot(userId?: string | null): {
window.sessionStorage.setItem("ls-last-dashboard-user", userId);
}, [userId]);
useEffect(() => {
if (!userId) return;
if (typeof window === "undefined") return;
const signalKey = getDashboardSnapshotCacheInvalidationSignalKey();
const onStorage = (event: StorageEvent) => {
if (event.key !== signalKey) {
return;
}
clearDashboardSnapshotCache(userId);
setCacheEpoch((value) => value + 1);
};
window.addEventListener("storage", onStorage);
return () => {
window.removeEventListener("storage", onStorage);
};
}, [userId]);
return useMemo(() => {
if (liveSnapshot) {
return { snapshot: liveSnapshot, source: "live" as const };

View File

@@ -6,6 +6,20 @@ const ADJUSTMENT_ALLOWED_SOURCE_TYPES = new Set<string>([
"image",
"asset",
"ai-image",
"crop",
"curves",
"color-adjust",
"light-adjust",
"detail-adjust",
]);
const CROP_ALLOWED_SOURCE_TYPES = new Set<string>([
"image",
"asset",
"ai-image",
"video",
"ai-video",
"crop",
"curves",
"color-adjust",
"light-adjust",
@@ -16,6 +30,7 @@ const RENDER_ALLOWED_SOURCE_TYPES = new Set<string>([
"image",
"asset",
"ai-image",
"crop",
"curves",
"color-adjust",
"light-adjust",
@@ -32,6 +47,8 @@ export type CanvasConnectionValidationReason =
| "video-prompt-target-invalid"
| "adjustment-source-invalid"
| "adjustment-incoming-limit"
| "crop-source-invalid"
| "crop-incoming-limit"
| "compare-incoming-limit"
| "adjustment-target-forbidden"
| "render-source-invalid";
@@ -55,6 +72,15 @@ export function validateCanvasConnectionPolicy(args: {
return "render-source-invalid";
}
if (targetType === "crop") {
if (!CROP_ALLOWED_SOURCE_TYPES.has(sourceType)) {
return "crop-source-invalid";
}
if (targetIncomingCount >= 1) {
return "crop-incoming-limit";
}
}
if (isAdjustmentNodeType(targetType) && targetType !== "render") {
if (!ADJUSTMENT_ALLOWED_SOURCE_TYPES.has(sourceType)) {
return "adjustment-source-invalid";
@@ -92,8 +118,12 @@ export function getCanvasConnectionValidationMessage(
return "KI-Video-Ausgabe akzeptiert nur Eingaben von KI-Video.";
case "video-prompt-target-invalid":
return "KI-Video kann nur mit KI-Video-Ausgabe verbunden werden.";
case "crop-source-invalid":
return "Crop akzeptiert nur Bild-, Asset-, KI-Bild-, Video-, KI-Video-, Crop- oder Adjustment-Input.";
case "crop-incoming-limit":
return "Crop-Nodes erlauben genau eine eingehende Verbindung.";
case "adjustment-source-invalid":
return "Adjustment-Nodes akzeptieren nur Bild-, Asset-, KI-Bild- oder Adjustment-Input.";
return "Adjustment-Nodes akzeptieren nur Bild-, Asset-, KI-Bild-, Crop- oder Adjustment-Input.";
case "adjustment-incoming-limit":
return "Adjustment-Nodes erlauben genau eine eingehende Verbindung.";
case "compare-incoming-limit":
@@ -101,7 +131,7 @@ export function getCanvasConnectionValidationMessage(
case "adjustment-target-forbidden":
return "Adjustment-Ausgaben koennen nicht an Prompt- oder KI-Bild-Nodes angeschlossen werden.";
case "render-source-invalid":
return "Render akzeptiert nur Bild-, Asset-, KI-Bild- oder Adjustment-Input.";
return "Render akzeptiert nur Bild-, Asset-, KI-Bild-, Crop- oder Adjustment-Input.";
default:
return "Verbindung ist fuer diese Node-Typen nicht erlaubt.";
}

View File

@@ -134,8 +134,6 @@ export const NODE_CATALOG: readonly NodeCatalogEntry[] = [
label: "Crop / Resize",
category: "transform",
phase: 2,
implemented: false,
disabledHint: "Folgt in Phase 2",
}),
entry({
type: "bg-remove",

View File

@@ -1,3 +1,5 @@
import { DEFAULT_CROP_NODE_DATA } from "@/lib/image-pipeline/crop-node-data";
export const CANVAS_NODE_TEMPLATES = [
{
type: "image",
@@ -74,6 +76,13 @@ export const CANVAS_NODE_TEMPLATES = [
height: 180,
defaultData: {},
},
{
type: "crop",
label: "Crop / Resize",
width: 340,
height: 620,
defaultData: DEFAULT_CROP_NODE_DATA,
},
{
type: "curves",
label: "Kurven",

View File

@@ -119,9 +119,10 @@ function sanitizeDimension(value: unknown): number | undefined {
return rounded;
}
const SOURCE_NODE_TYPES = new Set(["image", "ai-image", "asset"]);
const SOURCE_NODE_TYPES = new Set(["image", "ai-image", "asset", "video", "ai-video"]);
export const RENDER_PREVIEW_PIPELINE_TYPES = new Set([
"crop",
"curves",
"color-adjust",
"light-adjust",
@@ -191,6 +192,26 @@ export function resolveNodeImageUrl(data: unknown): string | null {
return null;
}
function resolveSourceNodeUrl(node: CanvasGraphNodeLike): string | null {
const data = (node.data ?? {}) as Record<string, unknown>;
if (node.type === "video") {
const mp4Url = typeof data.mp4Url === "string" ? data.mp4Url : null;
if (mp4Url && mp4Url.length > 0) {
return `/api/pexels-video?u=${encodeURIComponent(mp4Url)}`;
}
}
if (node.type === "ai-video") {
const directUrl = typeof data.url === "string" ? data.url : null;
if (directUrl && directUrl.length > 0) {
return directUrl;
}
}
return resolveNodeImageUrl(node.data);
}
export function buildGraphSnapshot(
nodes: readonly CanvasGraphNodeLike[],
edges: readonly CanvasGraphEdgeLike[],
@@ -367,7 +388,7 @@ export function resolveRenderPreviewInputFromGraph(args: {
const sourceUrl = getSourceImageFromGraph(args.graph, {
nodeId: args.nodeId,
isSourceNode: (node) => SOURCE_NODE_TYPES.has(node.type ?? ""),
getSourceImageFromNode: (node) => resolveNodeImageUrl(node.data),
getSourceImageFromNode: (node) => resolveSourceNodeUrl(node),
});
const steps = collectPipelineFromGraph(args.graph, {

View File

@@ -12,6 +12,7 @@ import {
DEFAULT_DETAIL_ADJUST_DATA,
DEFAULT_LIGHT_ADJUST_DATA,
} from "@/lib/image-pipeline/adjustment-types";
import { DEFAULT_CROP_NODE_DATA } from "@/lib/image-pipeline/crop-node-data";
/**
* Convex Node → React Flow Node
@@ -117,6 +118,7 @@ const SOURCE_NODE_GLOW_RGB: Record<string, readonly [number, number, number]> =
"color-adjust": [6, 182, 212],
"light-adjust": [245, 158, 11],
"detail-adjust": [99, 102, 241],
crop: [139, 92, 246],
render: [14, 165, 233],
};
@@ -223,6 +225,7 @@ export const NODE_HANDLE_MAP: Record<
"color-adjust": { source: undefined, target: undefined },
"light-adjust": { source: undefined, target: undefined },
"detail-adjust": { source: undefined, target: undefined },
crop: { source: undefined, target: undefined },
render: { source: undefined, target: undefined },
};
@@ -267,6 +270,7 @@ export const NODE_DEFAULTS: Record<
"color-adjust": { width: 320, height: 800, data: DEFAULT_COLOR_ADJUST_DATA },
"light-adjust": { width: 320, height: 920, data: DEFAULT_LIGHT_ADJUST_DATA },
"detail-adjust": { width: 320, height: 880, data: DEFAULT_DETAIL_ADJUST_DATA },
crop: { width: 340, height: 620, data: DEFAULT_CROP_NODE_DATA },
render: {
width: 300,
height: 420,

View File

@@ -1,6 +1,8 @@
const STORAGE_NAMESPACE = "lemonspace.dashboard";
const CACHE_VERSION = 1;
const DEFAULT_TTL_MS = 12 * 60 * 60 * 1000;
const LAST_DASHBOARD_USER_KEY = "ls-last-dashboard-user";
const INVALIDATION_SIGNAL_KEY = `${STORAGE_NAMESPACE}:snapshot:invalidate:v${CACHE_VERSION}`;
type JsonRecord = Record<string, unknown>;
@@ -19,6 +21,15 @@ function getLocalStorage(): Storage | null {
}
}
function getSessionStorage(): Storage | null {
if (typeof window === "undefined") return null;
try {
return window.sessionStorage;
} catch {
return null;
}
}
function isRecord(value: unknown): value is JsonRecord {
return typeof value === "object" && value !== null;
}
@@ -120,3 +131,22 @@ export function clearDashboardSnapshotCache(userId: string): void {
if (!storage) return;
safeRemove(storage, cacheKey(userId));
}
export function invalidateDashboardSnapshotForLastSignedInUser(): void {
const sessionStorage = getSessionStorage();
if (!sessionStorage) return;
const userId = safeGet(sessionStorage, LAST_DASHBOARD_USER_KEY);
if (!userId) return;
clearDashboardSnapshotCache(userId);
}
export function emitDashboardSnapshotCacheInvalidationSignal(): void {
const storage = getLocalStorage();
if (!storage) return;
safeSet(storage, INVALIDATION_SIGNAL_KEY, String(Date.now()));
}
export function getDashboardSnapshotCacheInvalidationSignalKey(): string {
return INVALIDATION_SIGNAL_KEY;
}

View File

@@ -67,6 +67,23 @@ export type DetailAdjustData = {
preset: string | null;
};
export type NormalizedCropRect = {
x: number;
y: number;
width: number;
height: number;
};
export type CropResizeOptions = {
width: number | null;
height: number | null;
};
export type CropResizeStepParams = {
cropRect: NormalizedCropRect;
resize: CropResizeOptions | null;
};
export const DEFAULT_CURVES_DATA: CurvesData = {
channelMode: "rgb",
points: {
@@ -141,6 +158,16 @@ export const DEFAULT_DETAIL_ADJUST_DATA: DetailAdjustData = {
preset: null,
};
export const DEFAULT_CROP_RESIZE_STEP_PARAMS: CropResizeStepParams = {
cropRect: {
x: 0,
y: 0,
width: 1,
height: 1,
},
resize: null,
};
export function cloneAdjustmentData<T>(value: T): T {
return JSON.parse(JSON.stringify(value)) as T;
}
@@ -153,6 +180,54 @@ function safeNumber(value: unknown, fallback: number): number {
return typeof value === "number" && Number.isFinite(value) ? value : fallback;
}
function normalizePositiveIntOrNull(value: unknown): number | null {
if (typeof value !== "number" || !Number.isFinite(value) || value <= 0) {
return null;
}
return Math.max(1, Math.round(value));
}
function normalizeCropRect(value: unknown): NormalizedCropRect {
const input = (value ?? {}) as Record<string, unknown>;
const normalizedX = clamp(safeNumber(input.x, 0), 0, 1);
const normalizedY = clamp(safeNumber(input.y, 0), 0, 1);
const maxWidth = Math.max(0.0001, 1 - normalizedX);
const maxHeight = Math.max(0.0001, 1 - normalizedY);
return {
x: normalizedX,
y: normalizedY,
width: clamp(safeNumber(input.width, 1), 0.0001, maxWidth),
height: clamp(safeNumber(input.height, 1), 0.0001, maxHeight),
};
}
export function normalizeCropResizeStepParams(value: unknown): CropResizeStepParams {
const input = (value ?? {}) as Record<string, unknown>;
const cropRectCandidate =
(input.cropRect as Record<string, unknown> | undefined) ??
(input.crop as Record<string, unknown> | undefined) ??
(input.rect as Record<string, unknown> | undefined) ??
input;
const resizeCandidate = (input.resize ?? {}) as Record<string, unknown>;
const resizeWidth = normalizePositiveIntOrNull(resizeCandidate.width ?? resizeCandidate.targetWidth);
const resizeHeight = normalizePositiveIntOrNull(resizeCandidate.height ?? resizeCandidate.targetHeight);
return {
cropRect: normalizeCropRect(cropRectCandidate),
resize:
resizeWidth === null && resizeHeight === null
? null
: {
width: resizeWidth,
height: resizeHeight,
},
};
}
function normalizeCurvePoints(points: unknown): CurvePoint[] {
if (!Array.isArray(points)) {
return cloneAdjustmentData(DEFAULT_CURVES_DATA.points.rgb);

View File

@@ -6,6 +6,10 @@ import {
type RenderFullOptions,
type RenderFullResult,
} from "@/lib/image-pipeline/render-types";
import {
applyGeometryStepsToSource,
partitionPipelineSteps,
} from "@/lib/image-pipeline/geometry-transform";
import { loadSourceBitmap } from "@/lib/image-pipeline/source-loader";
type SupportedCanvas = HTMLCanvasElement | OffscreenCanvas;
@@ -96,21 +100,30 @@ export async function renderFull(options: RenderFullOptions): Promise<RenderFull
const { signal } = options;
const bitmap = await loadSourceBitmap(options.sourceUrl, { signal });
const resolvedSize = resolveRenderSize({
const { geometrySteps, tonalSteps } = partitionPipelineSteps(options.steps);
const geometryResult = applyGeometryStepsToSource({
source: bitmap,
sourceWidth: bitmap.width,
sourceHeight: bitmap.height,
steps: geometrySteps,
signal,
});
const resolvedSize = resolveRenderSize({
sourceWidth: geometryResult.width,
sourceHeight: geometryResult.height,
render: options.render,
limits: options.limits,
});
const { canvas, context } = createCanvasContext(resolvedSize.width, resolvedSize.height);
context.drawImage(bitmap, 0, 0, resolvedSize.width, resolvedSize.height);
context.drawImage(geometryResult.canvas, 0, 0, resolvedSize.width, resolvedSize.height);
const imageData = context.getImageData(0, 0, resolvedSize.width, resolvedSize.height);
runFullPipelineWithBackendRouter({
pixels: imageData.data,
steps: options.steps,
steps: tonalSteps,
width: resolvedSize.width,
height: resolvedSize.height,
executionOptions: {

View File

@@ -0,0 +1,166 @@
export type CropResizeMode = "source" | "custom";
export type CropFitMode = "cover" | "contain" | "fill";
export type CropRect = {
x: number;
y: number;
width: number;
height: number;
};
export type CropResizeSettings = {
mode: CropResizeMode;
width?: number;
height?: number;
fit: CropFitMode;
keepAspect: boolean;
};
export type CropNodeData = {
crop: CropRect;
resize: CropResizeSettings;
};
const CROP_MIN_SIZE = 0.01;
const CUSTOM_SIZE_MIN = 1;
const CUSTOM_SIZE_MAX = 16_384;
const DEFAULT_CUSTOM_SIZE = 1024;
const DISALLOWED_CROP_PAYLOAD_KEYS = [
"blob",
"blobUrl",
"imageData",
"storageId",
"url",
] as const;
export const DEFAULT_CROP_NODE_DATA: CropNodeData = {
crop: {
x: 0,
y: 0,
width: 1,
height: 1,
},
resize: {
mode: "source",
fit: "cover",
keepAspect: true,
},
};
function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === "object" && value !== null && !Array.isArray(value);
}
function readFiniteNumber(value: unknown): number | null {
if (typeof value !== "number" || !Number.isFinite(value)) {
return null;
}
return value;
}
function clamp(value: number, min: number, max: number): number {
return Math.max(min, Math.min(max, value));
}
function clampUnit(value: number | null, fallback: number): number {
if (value === null) {
return fallback;
}
return clamp(value, 0, 1);
}
function normalizeCropRect(value: unknown): CropRect {
const source = isRecord(value) ? value : {};
const base = DEFAULT_CROP_NODE_DATA.crop;
const xInput = readFiniteNumber(source.x);
const yInput = readFiniteNumber(source.y);
const widthInput = readFiniteNumber(source.width);
const heightInput = readFiniteNumber(source.height);
const width = widthInput !== null && widthInput > 0
? clamp(widthInput, CROP_MIN_SIZE, 1)
: base.width;
const height = heightInput !== null && heightInput > 0
? clamp(heightInput, CROP_MIN_SIZE, 1)
: base.height;
const x = clamp(clampUnit(xInput, base.x), 0, Math.max(0, 1 - width));
const y = clamp(clampUnit(yInput, base.y), 0, Math.max(0, 1 - height));
return {
x,
y,
width,
height,
};
}
function normalizeCustomSize(value: unknown): number | undefined {
if (!Number.isInteger(value)) {
return undefined;
}
const parsed = value as number;
if (parsed < CUSTOM_SIZE_MIN || parsed > CUSTOM_SIZE_MAX) {
return undefined;
}
return parsed;
}
function normalizeResizeSettings(value: unknown): CropResizeSettings {
const source = isRecord(value) ? value : {};
const defaults = DEFAULT_CROP_NODE_DATA.resize;
const mode: CropResizeMode = source.mode === "custom" ? "custom" : defaults.mode;
const fit: CropFitMode =
source.fit === "contain" || source.fit === "fill" || source.fit === "cover"
? source.fit
: defaults.fit;
const keepAspect = typeof source.keepAspect === "boolean" ? source.keepAspect : defaults.keepAspect;
if (mode !== "custom") {
return {
mode,
fit,
keepAspect,
};
}
return {
mode,
width: normalizeCustomSize(source.width) ?? DEFAULT_CUSTOM_SIZE,
height: normalizeCustomSize(source.height) ?? DEFAULT_CUSTOM_SIZE,
fit,
keepAspect,
};
}
function assertNoDisallowedPayloadFields(data: Record<string, unknown>): void {
for (const key of DISALLOWED_CROP_PAYLOAD_KEYS) {
if (key in data) {
throw new Error(`Crop node accepts parameter data only. '${key}' is not allowed in data.`);
}
}
}
export function normalizeCropNodeData(
value: unknown,
options?: {
rejectDisallowedPayloadFields?: boolean;
},
): CropNodeData {
const source = isRecord(value) ? value : {};
if (options?.rejectDisallowedPayloadFields) {
assertNoDisallowedPayloadFields(source);
}
return {
crop: normalizeCropRect(source.crop),
resize: normalizeResizeSettings(source.resize),
};
}

View File

@@ -0,0 +1,146 @@
import type { PipelineStep } from "@/lib/image-pipeline/contracts";
import { normalizeCropResizeStepParams } from "@/lib/image-pipeline/adjustment-types";
type SupportedCanvas = HTMLCanvasElement | OffscreenCanvas;
type SupportedContext = CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D;
export type GeometryTransformResult = {
canvas: SupportedCanvas;
context: SupportedContext;
width: number;
height: number;
};
type ApplyGeometryStepsOptions = {
source: CanvasImageSource;
sourceWidth?: number;
sourceHeight?: number;
steps: readonly PipelineStep[];
signal?: AbortSignal;
};
function throwIfAborted(signal: AbortSignal | undefined): void {
if (signal?.aborted) {
throw new DOMException("The operation was aborted.", "AbortError");
}
}
function createCanvasContext(width: number, height: number): {
canvas: SupportedCanvas;
context: SupportedContext;
} {
if (typeof document !== "undefined") {
const canvas = document.createElement("canvas");
canvas.width = width;
canvas.height = height;
const context = canvas.getContext("2d", { willReadFrequently: true });
if (!context) {
throw new Error("Geometry transform could not create a 2D context.");
}
return { canvas, context };
}
if (typeof OffscreenCanvas !== "undefined") {
const canvas = new OffscreenCanvas(width, height);
const context = canvas.getContext("2d", { willReadFrequently: true });
if (!context) {
throw new Error("Geometry transform could not create an offscreen 2D context.");
}
return { canvas, context };
}
throw new Error("Geometry transform is not available in this environment.");
}
function ensurePositiveDimension(name: string, value: number): number {
if (!Number.isFinite(value) || value <= 0) {
throw new Error(`Invalid ${name}. Expected a positive finite number.`);
}
return Math.max(1, Math.round(value));
}
export function partitionPipelineSteps(steps: readonly PipelineStep[]): {
geometrySteps: PipelineStep[];
tonalSteps: PipelineStep[];
} {
const geometrySteps: PipelineStep[] = [];
const tonalSteps: PipelineStep[] = [];
for (const step of steps) {
if (step.type === "crop") {
geometrySteps.push(step);
continue;
}
tonalSteps.push(step);
}
return { geometrySteps, tonalSteps };
}
export function applyGeometryStepsToSource(options: ApplyGeometryStepsOptions): GeometryTransformResult {
throwIfAborted(options.signal);
const sourceWidth =
options.sourceWidth ?? (options.source as { width?: number }).width ?? Number.NaN;
const sourceHeight =
options.sourceHeight ?? (options.source as { height?: number }).height ?? Number.NaN;
let currentWidth = ensurePositiveDimension("sourceWidth", sourceWidth);
let currentHeight = ensurePositiveDimension("sourceHeight", sourceHeight);
let current = createCanvasContext(currentWidth, currentHeight);
current.context.drawImage(options.source, 0, 0, currentWidth, currentHeight);
for (const step of options.steps) {
throwIfAborted(options.signal);
if (step.type !== "crop") {
continue;
}
const normalized = normalizeCropResizeStepParams(step.params);
const sourceX = Math.max(0, Math.floor(normalized.cropRect.x * currentWidth));
const sourceY = Math.max(0, Math.floor(normalized.cropRect.y * currentHeight));
const maxSourceWidth = Math.max(1, currentWidth - sourceX);
const maxSourceHeight = Math.max(1, currentHeight - sourceY);
const sourceWidth = Math.max(
1,
Math.min(maxSourceWidth, Math.round(normalized.cropRect.width * currentWidth)),
);
const sourceHeight = Math.max(
1,
Math.min(maxSourceHeight, Math.round(normalized.cropRect.height * currentHeight)),
);
const targetWidth = normalized.resize?.width ?? sourceWidth;
const targetHeight = normalized.resize?.height ?? sourceHeight;
const next = createCanvasContext(targetWidth, targetHeight);
next.context.drawImage(
current.canvas,
sourceX,
sourceY,
sourceWidth,
sourceHeight,
0,
0,
targetWidth,
targetHeight,
);
current = next;
currentWidth = targetWidth;
currentHeight = targetHeight;
}
return {
canvas: current.canvas,
context: current.context,
width: currentWidth,
height: currentHeight,
};
}

View File

@@ -4,6 +4,10 @@ import {
runPreviewStepWithBackendRouter,
} from "@/lib/image-pipeline/backend/backend-router";
import { computeHistogram, emptyHistogram, type HistogramData } from "@/lib/image-pipeline/histogram";
import {
applyGeometryStepsToSource,
partitionPipelineSteps,
} from "@/lib/image-pipeline/geometry-transform";
import { loadSourceBitmap } from "@/lib/image-pipeline/source-loader";
export type PreviewRenderResult = {
@@ -69,21 +73,30 @@ export async function renderPreview(options: {
const bitmap = await loadSourceBitmap(options.sourceUrl, {
signal: options.signal,
});
const { geometrySteps, tonalSteps } = partitionPipelineSteps(options.steps);
const geometryResult = applyGeometryStepsToSource({
source: bitmap,
sourceWidth: bitmap.width,
sourceHeight: bitmap.height,
steps: geometrySteps,
signal: options.signal,
});
const width = Math.max(1, Math.round(options.previewWidth));
const height = Math.max(1, Math.round((bitmap.height / bitmap.width) * width));
const height = Math.max(1, Math.round((geometryResult.height / geometryResult.width) * width));
throwIfAborted(options.signal);
const context = createPreviewContext(width, height);
context.drawImage(bitmap, 0, 0, width, height);
context.drawImage(geometryResult.canvas, 0, 0, width, height);
const imageData = context.getImageData(0, 0, width, height);
const backendHint = getPreviewBackendHintForSteps(options.steps);
const backendHint = getPreviewBackendHintForSteps(tonalSteps);
for (let index = 0; index < options.steps.length; index += 1) {
for (let index = 0; index < tonalSteps.length; index += 1) {
runPreviewStepWithBackendRouter({
pixels: imageData.data,
step: options.steps[index]!,
step: tonalSteps[index]!,
width,
height,
backendHint,

View File

@@ -54,6 +54,62 @@ function evictIfNeeded(excludeSourceUrl?: string): void {
}
}
function isLikelyVideoUrl(sourceUrl: string): boolean {
try {
const url = new URL(sourceUrl, typeof window !== "undefined" ? window.location.origin : "http://localhost");
const pathname = url.pathname.toLowerCase();
if (pathname.includes("/api/pexels-video")) {
return true;
}
return /\.(mp4|webm|ogg|ogv|mov|m4v)$/.test(pathname);
} catch {
return /\.(mp4|webm|ogg|ogv|mov|m4v)(?:\?|$)/i.test(sourceUrl);
}
}
async function decodeVideoFrameBitmap(blob: Blob): Promise<ImageBitmap> {
if (typeof document === "undefined") {
return await createImageBitmap(blob);
}
const video = document.createElement("video");
video.preload = "auto";
video.muted = true;
video.playsInline = true;
const objectUrl = URL.createObjectURL(blob);
video.src = objectUrl;
try {
await new Promise<void>((resolve, reject) => {
video.onloadeddata = () => resolve();
video.onerror = () => reject(new Error("Render source video decode failed."));
video.load();
});
return await createImageBitmap(video);
} finally {
video.pause();
video.removeAttribute("src");
video.load();
URL.revokeObjectURL(objectUrl);
}
}
async function decodeBitmapFromResponse(sourceUrl: string, response: Response): Promise<ImageBitmap> {
const contentType = response.headers?.get("content-type")?.toLowerCase() ?? "";
const blob = await response.blob();
const isVideo = contentType.startsWith("video/") || blob.type.startsWith("video/") || isLikelyVideoUrl(sourceUrl);
if (isVideo) {
return await decodeVideoFrameBitmap(blob);
}
return await createImageBitmap(blob);
}
export function clearSourceBitmapCache(): void {
for (const sourceUrl of [...imageBitmapCache.keys()]) {
deleteCacheEntry(sourceUrl);
@@ -77,8 +133,7 @@ function getOrCreateSourceBitmapPromise(sourceUrl: string): Promise<ImageBitmap>
throw new Error(`Render source failed: ${response.status}`);
}
const blob = await response.blob();
const bitmap = await createImageBitmap(blob);
const bitmap = await decodeBitmapFromResponse(sourceUrl, response);
if (entry.released || imageBitmapCache.get(sourceUrl) !== entry) {
closeBitmap(bitmap);

View File

@@ -106,6 +106,34 @@
"denoiseColor": "Entrauschen Farbe",
"grain": "Körnung"
}
},
"crop": {
"title": "Crop / Resize",
"previewHint": "Verbinde eine Bild-, Asset-, KI-Bild- oder Video-Quelle fuer die Live-Vorschau.",
"previewRendering": "Rendering...",
"fields": {
"x": "X",
"y": "Y",
"width": "Breite",
"height": "Hoehe",
"outputWidth": "Ausgabe-Breite",
"outputHeight": "Ausgabe-Hoehe"
},
"resizeMode": "Ausgabe",
"resizeModes": {
"source": "Quelle",
"custom": "Benutzerdefiniert"
},
"fitMode": "Einpassung",
"fitModes": {
"cover": "Cover",
"contain": "Contain",
"fill": "Fill"
},
"keepAspect": "Seitenverhaeltnis beibehalten",
"sourceResolution": "Quell-Frame",
"outputResolutionLabel": "Ausgabeaufloesung",
"cropSummary": "Crop-Bereich: x {x}, y {y}, b {width}, h {height}"
}
},
"prompts": {

View File

@@ -106,6 +106,34 @@
"denoiseColor": "Denoise Color",
"grain": "Grain"
}
},
"crop": {
"title": "Crop / Resize",
"previewHint": "Connect an image, asset, AI image, or video source for live preview.",
"previewRendering": "Rendering...",
"fields": {
"x": "X",
"y": "Y",
"width": "Width",
"height": "Height",
"outputWidth": "Output Width",
"outputHeight": "Output Height"
},
"resizeMode": "Output",
"resizeModes": {
"source": "Source",
"custom": "Custom"
},
"fitMode": "Fit",
"fitModes": {
"cover": "Cover",
"contain": "Contain",
"fill": "Fill"
},
"keepAspect": "Keep aspect ratio",
"sourceResolution": "Source frame",
"outputResolutionLabel": "Output resolution",
"cropSummary": "Crop area: x {x}, y {y}, w {width}, h {height}"
}
},
"prompts": {

View File

@@ -62,6 +62,88 @@ describe("canvas connection policy", () => {
).toBe("adjustment-source-invalid");
});
it("allows image sources to crop", () => {
expect(
validateCanvasConnectionPolicy({
sourceType: "image",
targetType: "crop",
targetIncomingCount: 0,
}),
).toBeNull();
});
it("allows video sources to crop", () => {
expect(
validateCanvasConnectionPolicy({
sourceType: "video",
targetType: "crop",
targetIncomingCount: 0,
}),
).toBeNull();
});
it("allows ai-video sources to crop", () => {
expect(
validateCanvasConnectionPolicy({
sourceType: "ai-video",
targetType: "crop",
targetIncomingCount: 0,
}),
).toBeNull();
});
it("allows chained crop nodes", () => {
expect(
validateCanvasConnectionPolicy({
sourceType: "crop",
targetType: "crop",
targetIncomingCount: 0,
}),
).toBeNull();
});
it("blocks unsupported crop sources", () => {
expect(
validateCanvasConnectionPolicy({
sourceType: "prompt",
targetType: "crop",
targetIncomingCount: 0,
}),
).toBe("crop-source-invalid");
});
it("limits crop nodes to one incoming connection", () => {
expect(
validateCanvasConnectionPolicy({
sourceType: "image",
targetType: "crop",
targetIncomingCount: 1,
}),
).toBe("crop-incoming-limit");
});
it("allows crop output as render source", () => {
expect(
validateCanvasConnectionPolicy({
sourceType: "crop",
targetType: "render",
targetIncomingCount: 0,
}),
).toBeNull();
});
it("describes unsupported crop source message", () => {
expect(getCanvasConnectionValidationMessage("crop-source-invalid")).toBe(
"Crop akzeptiert nur Bild-, Asset-, KI-Bild-, Video-, KI-Video-, Crop- oder Adjustment-Input.",
);
});
it("describes crop incoming limit", () => {
expect(getCanvasConnectionValidationMessage("crop-incoming-limit")).toBe(
"Crop-Nodes erlauben genau eine eingehende Verbindung.",
);
});
it("blocks ai-video as render source", () => {
expect(
validateCanvasConnectionPolicy({

View File

@@ -73,6 +73,7 @@ describe("useCanvasDeleteHandlers", () => {
afterEach(async () => {
latestHandlersRef.current = null;
vi.useRealTimers();
vi.clearAllMocks();
consoleErrorSpy?.mockRestore();
consoleInfoSpy?.mockRestore();
@@ -148,7 +149,8 @@ describe("useCanvasDeleteHandlers", () => {
});
});
it("logs bridge payload details when bridge edge creation fails", async () => {
it("logs bridge payload details when bridge edge creation retries are exhausted", async () => {
vi.useFakeTimers();
consoleErrorSpy = vi.spyOn(console, "error").mockImplementation(() => undefined);
consoleInfoSpy = vi.spyOn(console, "info").mockImplementation(() => undefined);
@@ -200,9 +202,12 @@ describe("useCanvasDeleteHandlers", () => {
await act(async () => {
resolveBatchRemove?.();
await Promise.resolve();
await vi.runAllTimersAsync();
await Promise.resolve();
});
expect(runCreateEdgeMutation).toHaveBeenCalledTimes(4);
expect(consoleErrorSpy).toHaveBeenCalledWith(
"[Canvas] bridge edge create failed",
expect.objectContaining({
@@ -214,7 +219,9 @@ describe("useCanvasDeleteHandlers", () => {
sourceHandle: undefined,
targetHandle: undefined,
},
error: bridgeError,
attempt: 4,
maxAttempts: 4,
error: bridgeError.message,
}),
);
});

View File

@@ -0,0 +1,84 @@
import { describe, expect, it } from "vitest";
import {
DEFAULT_CROP_NODE_DATA,
normalizeCropNodeData,
} from "@/lib/image-pipeline/crop-node-data";
describe("crop node data validation", () => {
it("normalizes and clamps crop rectangle data", () => {
expect(
normalizeCropNodeData({
crop: {
x: -0.2,
y: 0.9,
width: 0.8,
height: 0.4,
},
resize: {
mode: "custom",
width: 2048,
height: 1024,
fit: "cover",
keepAspect: false,
},
}),
).toEqual({
crop: {
x: 0,
y: 0.6,
width: 0.8,
height: 0.4,
},
resize: {
mode: "custom",
width: 2048,
height: 1024,
fit: "cover",
keepAspect: false,
},
});
});
it("falls back to defaults for invalid values", () => {
expect(
normalizeCropNodeData({
crop: {
x: Number.NaN,
y: "foo",
width: 2,
height: -1,
},
resize: {
mode: "invalid",
width: 0,
height: Number.NaN,
fit: "invalid",
keepAspect: "invalid",
},
}),
).toEqual(DEFAULT_CROP_NODE_DATA);
});
it("rejects destructive payload fields", () => {
expect(() =>
normalizeCropNodeData(
{
...DEFAULT_CROP_NODE_DATA,
storageId: "storage_123",
},
{ rejectDisallowedPayloadFields: true },
),
).toThrow("Crop node accepts parameter data only. 'storageId' is not allowed in data.");
expect(() =>
normalizeCropNodeData(
{
...DEFAULT_CROP_NODE_DATA,
imageData: "...",
},
{ rejectDisallowedPayloadFields: true },
),
).toThrow("Crop node accepts parameter data only. 'imageData' is not allowed in data.");
});
});

321
tests/crop-node.test.ts Normal file
View File

@@ -0,0 +1,321 @@
// @vitest-environment jsdom
import React, { act } from "react";
import { createRoot, type Root } from "react-dom/client";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { CropNodeData } from "@/lib/image-pipeline/crop-node-data";
const mocks = vi.hoisted(() => ({
queueNodeDataUpdate: vi.fn(async () => undefined),
setPreviewNodeDataOverride: vi.fn(),
clearPreviewNodeDataOverride: vi.fn(),
collectPipelineFromGraph: vi.fn(() => []),
getSourceImageFromGraph: vi.fn(() => "https://cdn.example.com/source.png"),
shouldFastPathPreviewPipeline: vi.fn(() => false),
}));
vi.mock("@xyflow/react", () => ({
Handle: () => null,
Position: { Left: "left", Right: "right" },
}));
vi.mock("next-intl", () => ({
useTranslations: () => (key: string) => key,
}));
vi.mock("lucide-react", () => ({
Crop: () => null,
}));
vi.mock("@/components/canvas/canvas-sync-context", () => ({
useCanvasSync: () => ({
queueNodeDataUpdate: mocks.queueNodeDataUpdate,
}),
}));
vi.mock("@/components/canvas/canvas-graph-context", () => ({
useCanvasGraph: () => ({ nodes: [], edges: [], previewNodeDataOverrides: {} }),
useCanvasGraphPreviewOverrides: () => ({
setPreviewNodeDataOverride: mocks.setPreviewNodeDataOverride,
clearPreviewNodeDataOverride: mocks.clearPreviewNodeDataOverride,
}),
}));
vi.mock("@/hooks/use-pipeline-preview", () => ({
usePipelinePreview: () => ({
canvasRef: { current: null },
hasSource: true,
isRendering: false,
previewAspectRatio: 1,
error: null,
}),
}));
vi.mock("@/lib/canvas-render-preview", () => ({
collectPipelineFromGraph: mocks.collectPipelineFromGraph,
getSourceImageFromGraph: mocks.getSourceImageFromGraph,
shouldFastPathPreviewPipeline: mocks.shouldFastPathPreviewPipeline,
}));
vi.mock("@/components/canvas/nodes/base-node-wrapper", () => ({
default: ({ children }: { children: React.ReactNode }) => React.createElement("div", null, children),
}));
vi.mock("@/components/ui/select", () => ({
Select: ({ children }: { children: React.ReactNode }) => React.createElement("div", null, children),
SelectContent: ({ children }: { children: React.ReactNode }) => React.createElement("div", null, children),
SelectItem: ({ children }: { children: React.ReactNode }) => React.createElement("div", null, children),
SelectTrigger: ({ children }: { children: React.ReactNode }) => React.createElement("div", null, children),
SelectValue: () => null,
}));
import CropNode from "@/components/canvas/nodes/crop-node";
(globalThis as typeof globalThis & { IS_REACT_ACT_ENVIRONMENT?: boolean }).IS_REACT_ACT_ENVIRONMENT = true;
type PointerInit = {
pointerId?: number;
clientX: number;
clientY: number;
};
function dispatchPointerEvent(target: Element, type: string, init: PointerInit) {
const event = new MouseEvent(type, {
bubbles: true,
cancelable: true,
clientX: init.clientX,
clientY: init.clientY,
}) as MouseEvent & { pointerId?: number };
event.pointerId = init.pointerId ?? 1;
target.dispatchEvent(event);
}
function getNumberInput(container: HTMLElement, labelKey: string): HTMLInputElement {
const label = Array.from(container.querySelectorAll("label")).find((element) =>
element.textContent?.includes(labelKey),
);
if (!(label instanceof HTMLLabelElement)) {
throw new Error(`Label not found: ${labelKey}`);
}
const input = label.querySelector("input[type='number']");
if (!(input instanceof HTMLInputElement)) {
throw new Error(`Input not found for: ${labelKey}`);
}
return input;
}
describe("CropNode", () => {
let container: HTMLDivElement | null = null;
let root: Root | null = null;
beforeEach(() => {
vi.useFakeTimers();
mocks.queueNodeDataUpdate.mockClear();
mocks.setPreviewNodeDataOverride.mockClear();
mocks.clearPreviewNodeDataOverride.mockClear();
mocks.collectPipelineFromGraph.mockClear();
mocks.collectPipelineFromGraph.mockReturnValue([]);
if (!("setPointerCapture" in HTMLElement.prototype)) {
Object.defineProperty(HTMLElement.prototype, "setPointerCapture", {
configurable: true,
value: () => undefined,
});
}
if (!("releasePointerCapture" in HTMLElement.prototype)) {
Object.defineProperty(HTMLElement.prototype, "releasePointerCapture", {
configurable: true,
value: () => undefined,
});
}
vi.spyOn(HTMLElement.prototype, "setPointerCapture").mockImplementation(() => undefined);
vi.spyOn(HTMLElement.prototype, "releasePointerCapture").mockImplementation(() => undefined);
container = document.createElement("div");
document.body.appendChild(container);
root = createRoot(container);
});
afterEach(async () => {
if (root) {
await act(async () => {
root?.unmount();
});
}
container?.remove();
container = null;
root = null;
vi.restoreAllMocks();
vi.useRealTimers();
});
async function renderNode(data: CropNodeData) {
await act(async () => {
root?.render(
React.createElement(CropNode, {
id: "crop-1",
data,
selected: false,
dragging: false,
zIndex: 0,
isConnectable: true,
type: "crop",
xPos: 0,
yPos: 0,
width: 320,
height: 360,
positionAbsoluteX: 0,
positionAbsoluteY: 0,
} as never),
);
});
}
function setPreviewBounds() {
const preview = container?.querySelector("[data-testid='crop-preview-area']");
if (!(preview instanceof HTMLElement)) {
throw new Error("Preview area not found");
}
vi.spyOn(preview, "getBoundingClientRect").mockReturnValue({
x: 0,
y: 0,
left: 0,
top: 0,
right: 200,
bottom: 200,
width: 200,
height: 200,
toJSON: () => ({}),
});
return preview;
}
it("moves crop rect when dragging inside overlay", async () => {
await renderNode({
crop: { x: 0.1, y: 0.1, width: 0.4, height: 0.4 },
resize: { mode: "source", fit: "cover", keepAspect: false },
});
setPreviewBounds();
const overlay = container?.querySelector("[data-testid='crop-overlay']");
if (!(overlay instanceof HTMLElement)) {
throw new Error("Overlay not found");
}
await act(async () => {
dispatchPointerEvent(overlay, "pointerdown", { clientX: 40, clientY: 40 });
dispatchPointerEvent(overlay, "pointermove", { clientX: 60, clientY: 60 });
dispatchPointerEvent(overlay, "pointerup", { clientX: 60, clientY: 60 });
});
expect(getNumberInput(container as HTMLElement, "adjustments.crop.fields.x").value).toBe("0.2");
expect(getNumberInput(container as HTMLElement, "adjustments.crop.fields.y").value).toBe("0.2");
});
it("resizes crop rect from corner and edge handles", async () => {
await renderNode({
crop: { x: 0.1, y: 0.1, width: 0.4, height: 0.4 },
resize: { mode: "source", fit: "cover", keepAspect: false },
});
setPreviewBounds();
const eastHandle = container?.querySelector("[data-testid='crop-handle-e']");
const southEastHandle = container?.querySelector("[data-testid='crop-handle-se']");
if (!(eastHandle instanceof HTMLElement) || !(southEastHandle instanceof HTMLElement)) {
throw new Error("Resize handles not found");
}
await act(async () => {
dispatchPointerEvent(eastHandle, "pointerdown", { clientX: 100, clientY: 80 });
dispatchPointerEvent(eastHandle, "pointermove", { clientX: 140, clientY: 80 });
dispatchPointerEvent(eastHandle, "pointerup", { clientX: 140, clientY: 80 });
});
expect(getNumberInput(container as HTMLElement, "adjustments.crop.fields.width").value).toBe("0.6");
await act(async () => {
dispatchPointerEvent(southEastHandle, "pointerdown", { clientX: 140, clientY: 140 });
dispatchPointerEvent(southEastHandle, "pointermove", { clientX: 160, clientY: 180 });
dispatchPointerEvent(southEastHandle, "pointerup", { clientX: 160, clientY: 180 });
});
expect(getNumberInput(container as HTMLElement, "adjustments.crop.fields.width").value).toBe("0.7");
expect(getNumberInput(container as HTMLElement, "adjustments.crop.fields.height").value).toBe("0.6");
});
it("preserves aspect ratio while resizing when keepAspect is enabled", async () => {
await renderNode({
crop: { x: 0.1, y: 0.1, width: 0.4, height: 0.2 },
resize: { mode: "source", fit: "cover", keepAspect: true },
});
setPreviewBounds();
const southEastHandle = container?.querySelector("[data-testid='crop-handle-se']");
if (!(southEastHandle instanceof HTMLElement)) {
throw new Error("Corner handle not found");
}
await act(async () => {
dispatchPointerEvent(southEastHandle, "pointerdown", { clientX: 100, clientY: 60 });
dispatchPointerEvent(southEastHandle, "pointermove", { clientX: 140, clientY: 60 });
dispatchPointerEvent(southEastHandle, "pointerup", { clientX: 140, clientY: 60 });
});
expect(Number(getNumberInput(container as HTMLElement, "adjustments.crop.fields.width").value)).toBeCloseTo(
0.6,
6,
);
expect(Number(getNumberInput(container as HTMLElement, "adjustments.crop.fields.height").value)).toBeCloseTo(
0.3,
6,
);
});
it("clamps drag operations to image bounds", async () => {
await renderNode({
crop: { x: 0.7, y: 0.7, width: 0.3, height: 0.3 },
resize: { mode: "source", fit: "cover", keepAspect: false },
});
setPreviewBounds();
const overlay = container?.querySelector("[data-testid='crop-overlay']");
if (!(overlay instanceof HTMLElement)) {
throw new Error("Overlay not found");
}
await act(async () => {
dispatchPointerEvent(overlay, "pointerdown", { clientX: 150, clientY: 150 });
dispatchPointerEvent(overlay, "pointermove", { clientX: -50, clientY: -50 });
dispatchPointerEvent(overlay, "pointerup", { clientX: -50, clientY: -50 });
});
expect(getNumberInput(container as HTMLElement, "adjustments.crop.fields.x").value).toBe("0");
expect(getNumberInput(container as HTMLElement, "adjustments.crop.fields.y").value).toBe("0");
});
it("ignores drag starts outside overlay and handles", async () => {
await renderNode({
crop: { x: 0.2, y: 0.2, width: 0.4, height: 0.4 },
resize: { mode: "source", fit: "cover", keepAspect: false },
});
setPreviewBounds();
const preview = container?.querySelector("[data-testid='crop-preview-area']");
if (!(preview instanceof HTMLElement)) {
throw new Error("Preview not found");
}
await act(async () => {
dispatchPointerEvent(preview, "pointerdown", { clientX: 10, clientY: 10 });
dispatchPointerEvent(preview, "pointermove", { clientX: 120, clientY: 120 });
dispatchPointerEvent(preview, "pointerup", { clientX: 120, clientY: 120 });
});
expect(getNumberInput(container as HTMLElement, "adjustments.crop.fields.x").value).toBe("0.2");
expect(getNumberInput(container as HTMLElement, "adjustments.crop.fields.y").value).toBe("0.2");
expect(getNumberInput(container as HTMLElement, "adjustments.crop.fields.width").value).toBe("0.4");
expect(getNumberInput(container as HTMLElement, "adjustments.crop.fields.height").value).toBe("0.4");
expect(mocks.setPreviewNodeDataOverride).not.toHaveBeenCalled();
});
});

View File

@@ -0,0 +1,149 @@
// @vitest-environment jsdom
import { describe, expect, it, vi } from "vitest";
import { normalizeCropResizeStepParams } from "@/lib/image-pipeline/adjustment-types";
import { applyGeometryStepsToSource } from "@/lib/image-pipeline/geometry-transform";
describe("crop/resize normalization", () => {
it("falls back to default full-frame crop when params are invalid", () => {
expect(normalizeCropResizeStepParams(null)).toEqual({
cropRect: {
x: 0,
y: 0,
width: 1,
height: 1,
},
resize: null,
});
});
it("clamps normalized crop rect and rounds resize dimensions", () => {
expect(
normalizeCropResizeStepParams({
cropRect: {
x: -0.25,
y: 0.2,
width: 1.75,
height: 0.5,
},
resize: {
width: 99.7,
height: 0,
},
}),
).toEqual({
cropRect: {
x: 0,
y: 0.2,
width: 1,
height: 0.5,
},
resize: {
width: 100,
height: null,
},
});
});
});
describe("geometry transform", () => {
it("applies crop before tonal execution and updates output dimensions", () => {
const contexts: Array<{ drawImage: ReturnType<typeof vi.fn> }> = [];
const nativeCreateElement = document.createElement.bind(document);
vi.spyOn(document, "createElement").mockImplementation((tagName: string) => {
if (tagName.toLowerCase() !== "canvas") {
return nativeCreateElement(tagName);
}
const context = {
drawImage: vi.fn(),
};
contexts.push(context);
return {
width: 0,
height: 0,
getContext: vi.fn().mockReturnValue(context),
} as unknown as HTMLCanvasElement;
});
const source = { width: 4, height: 2 } as CanvasImageSource;
const result = applyGeometryStepsToSource({
source,
steps: [
{
nodeId: "crop-1",
type: "crop",
params: {
cropRect: {
x: 0.5,
y: 0,
width: 0.5,
height: 1,
},
},
},
],
});
expect(result.width).toBe(2);
expect(result.height).toBe(2);
expect(contexts).toHaveLength(2);
expect(contexts[0]!.drawImage).toHaveBeenCalledWith(source, 0, 0, 4, 2);
expect(contexts[1]!.drawImage).toHaveBeenCalledWith(
expect.objectContaining({ width: 4, height: 2 }),
2,
0,
2,
2,
0,
0,
2,
2,
);
});
it("applies resize dimensions from crop params", () => {
const nativeCreateElement = document.createElement.bind(document);
vi.spyOn(document, "createElement").mockImplementation((tagName: string) => {
if (tagName.toLowerCase() !== "canvas") {
return nativeCreateElement(tagName);
}
return {
width: 0,
height: 0,
getContext: vi.fn().mockReturnValue({ drawImage: vi.fn() }),
} as unknown as HTMLCanvasElement;
});
const source = { width: 4, height: 4 } as CanvasImageSource;
const result = applyGeometryStepsToSource({
source,
steps: [
{
nodeId: "crop-1",
type: "crop",
params: {
cropRect: {
x: 0,
y: 0,
width: 1,
height: 1,
},
resize: {
width: 3,
height: 2,
},
},
},
],
});
expect(result.width).toBe(3);
expect(result.height).toBe(2);
});
});

View File

@@ -298,4 +298,61 @@ describe("loadSourceBitmap", () => {
await expect(loadSourceBitmap(sourceUrl)).resolves.toBe(secondBitmap);
expect(fetch).toHaveBeenCalledTimes(2);
});
it("extracts the first decodable frame for video sources", async () => {
const response = {
ok: true,
status: 200,
headers: {
get: vi.fn().mockReturnValue("video/mp4"),
},
blob: vi.fn().mockResolvedValue(blob),
};
const fakeVideo: Partial<HTMLVideoElement> & {
onloadeddata: ((event: Event) => void) | null;
onerror: ((event: Event) => void) | null;
load: () => void;
} = {
muted: false,
playsInline: false,
preload: "none",
onloadeddata: null,
onerror: null,
load() {
this.onloadeddata?.(new Event("loadeddata"));
},
pause: vi.fn(),
removeAttribute: vi.fn(),
};
const createObjectUrl = vi.fn().mockReturnValue("blob:video-source");
const revokeObjectUrl = vi.fn();
const nativeCreateElement = document.createElement.bind(document);
vi.stubGlobal(
"URL",
Object.assign(URL, {
createObjectURL: createObjectUrl,
revokeObjectURL: revokeObjectUrl,
}),
);
vi.spyOn(document, "createElement").mockImplementation((tagName: string) => {
if (tagName.toLowerCase() === "video") {
return fakeVideo as HTMLVideoElement;
}
return nativeCreateElement(tagName);
});
vi.stubGlobal("fetch", vi.fn().mockResolvedValue(response));
const { loadSourceBitmap } = await importSubject();
await expect(loadSourceBitmap("https://cdn.example.com/video.mp4")).resolves.toBe(bitmap);
expect(response.headers.get).toHaveBeenCalledWith("content-type");
expect(createObjectUrl).toHaveBeenCalledWith(blob);
expect(createImageBitmap).toHaveBeenCalledWith(fakeVideo);
expect(revokeObjectUrl).toHaveBeenCalledWith("blob:video-source");
});
});

View File

@@ -0,0 +1,92 @@
import { describe, expect, it } from "vitest";
import {
buildGraphSnapshot,
resolveRenderPreviewInputFromGraph,
} from "@/lib/canvas-render-preview";
describe("resolveRenderPreviewInputFromGraph", () => {
it("includes crop in collected pipeline steps", () => {
const graph = buildGraphSnapshot(
[
{
id: "image-1",
type: "image",
data: { url: "https://cdn.example.com/source.png" },
},
{
id: "crop-1",
type: "crop",
data: { cropRect: { x: 0.1, y: 0.2, width: 0.4, height: 0.3 } },
},
{
id: "render-1",
type: "render",
data: {},
},
],
[
{ source: "image-1", target: "crop-1" },
{ source: "crop-1", target: "render-1" },
],
);
const preview = resolveRenderPreviewInputFromGraph({
nodeId: "render-1",
graph,
});
expect(preview.steps).toEqual([
{
nodeId: "crop-1",
type: "crop",
params: { cropRect: { x: 0.1, y: 0.2, width: 0.4, height: 0.3 } },
},
]);
});
it("derives proxied pexels video source URL from mp4Url", () => {
const mp4Url = "https://player.pexels.com/videos/example.mp4";
const graph = buildGraphSnapshot(
[
{
id: "video-1",
type: "video",
data: { mp4Url },
},
{
id: "render-1",
type: "render",
data: {},
},
],
[{ source: "video-1", target: "render-1" }],
);
const preview = resolveRenderPreviewInputFromGraph({ nodeId: "render-1", graph });
expect(preview.sourceUrl).toBe(`/api/pexels-video?u=${encodeURIComponent(mp4Url)}`);
});
it("uses ai-video data.url as source URL when available", () => {
const graph = buildGraphSnapshot(
[
{
id: "ai-video-1",
type: "ai-video",
data: { url: "https://cdn.example.com/generated-video.mp4" },
},
{
id: "render-1",
type: "render",
data: {},
},
],
[{ source: "ai-video-1", target: "render-1" }],
);
const preview = resolveRenderPreviewInputFromGraph({ nodeId: "render-1", graph });
expect(preview.sourceUrl).toBe("https://cdn.example.com/generated-video.mp4");
});
});

View File

@@ -4,15 +4,20 @@ import { beforeEach, describe, expect, it } from "vitest";
import {
clearDashboardSnapshotCache,
emitDashboardSnapshotCacheInvalidationSignal,
invalidateDashboardSnapshotForLastSignedInUser,
readDashboardSnapshotCache,
writeDashboardSnapshotCache,
} from "@/lib/dashboard-snapshot-cache";
const USER_ID = "user-cache-test";
const LAST_DASHBOARD_USER_KEY = "ls-last-dashboard-user";
const INVALIDATION_SIGNAL_KEY = "lemonspace.dashboard:snapshot:invalidate:v1";
describe("dashboard snapshot cache", () => {
beforeEach(() => {
const data = new Map<string, string>();
const sessionData = new Map<string, string>();
const localStorageMock = {
getItem: (key: string) => data.get(key) ?? null,
setItem: (key: string, value: string) => {
@@ -22,11 +27,24 @@ describe("dashboard snapshot cache", () => {
data.delete(key);
},
};
const sessionStorageMock = {
getItem: (key: string) => sessionData.get(key) ?? null,
setItem: (key: string, value: string) => {
sessionData.set(key, value);
},
removeItem: (key: string) => {
sessionData.delete(key);
},
};
Object.defineProperty(window, "localStorage", {
value: localStorageMock,
configurable: true,
});
Object.defineProperty(window, "sessionStorage", {
value: sessionStorageMock,
configurable: true,
});
clearDashboardSnapshotCache(USER_ID);
});
@@ -70,4 +88,26 @@ describe("dashboard snapshot cache", () => {
expect(readDashboardSnapshotCache(USER_ID)).toBeNull();
});
it("invalidates cache for the last signed-in user", () => {
writeDashboardSnapshotCache(USER_ID, { generatedAt: 1 });
window.sessionStorage.setItem(LAST_DASHBOARD_USER_KEY, USER_ID);
invalidateDashboardSnapshotForLastSignedInUser();
expect(readDashboardSnapshotCache(USER_ID)).toBeNull();
expect(window.sessionStorage.getItem(LAST_DASHBOARD_USER_KEY)).toBe(USER_ID);
});
it("does not fail if no last dashboard user exists", () => {
expect(() => invalidateDashboardSnapshotForLastSignedInUser()).not.toThrow();
});
it("emits a localStorage invalidation signal", () => {
emitDashboardSnapshotCacheInvalidationSignal();
const signal = window.localStorage.getItem(INVALIDATION_SIGNAL_KEY);
expect(typeof signal).toBe("string");
expect(Number(signal)).toBeGreaterThan(0);
});
});

View File

@@ -0,0 +1,155 @@
/* @vitest-environment jsdom */
import React, { act, useEffect } from "react";
import { createRoot, type Root } from "react-dom/client";
import { afterEach, describe, expect, it, vi } from "vitest";
import type { Id } from "@/convex/_generated/dataModel";
const getImageDimensionsMock = vi.hoisted(() => vi.fn());
const createCompressedImagePreviewMock = vi.hoisted(() => vi.fn());
const invalidateDashboardSnapshotForLastSignedInUserMock = vi.hoisted(() => vi.fn());
const emitDashboardSnapshotCacheInvalidationSignalMock = vi.hoisted(() => vi.fn());
vi.mock("@/components/canvas/canvas-media-utils", () => ({
getImageDimensions: getImageDimensionsMock,
createCompressedImagePreview: createCompressedImagePreviewMock,
}));
vi.mock("@/lib/dashboard-snapshot-cache", () => ({
invalidateDashboardSnapshotForLastSignedInUser:
invalidateDashboardSnapshotForLastSignedInUserMock,
emitDashboardSnapshotCacheInvalidationSignal:
emitDashboardSnapshotCacheInvalidationSignalMock,
}));
vi.mock("@/lib/toast", () => ({
toast: {
error: vi.fn(),
warning: vi.fn(),
},
}));
import { useCanvasDrop } from "@/components/canvas/use-canvas-drop";
const latestHandlers: {
current: ReturnType<typeof useCanvasDrop> | null;
} = { current: null };
type RunCreateNodeOnlineOnly = Parameters<typeof useCanvasDrop>[0]["runCreateNodeOnlineOnly"];
type HarnessProps = {
runCreateNodeOnlineOnly: RunCreateNodeOnlineOnly;
};
function HookHarness({ runCreateNodeOnlineOnly }: HarnessProps) {
const value = useCanvasDrop({
canvasId: "canvas_1" as Id<"canvases">,
isSyncOnline: true,
t: (key: string) => key,
edges: [],
screenToFlowPosition: ({ x, y }) => ({ x, y }),
generateUploadUrl: async () => "https://upload.example.com",
registerUploadedImageMedia: async () => ({ ok: true }),
runCreateNodeOnlineOnly,
runCreateNodeWithEdgeSplitOnlineOnly: async () => "node_split_1" as Id<"nodes">,
notifyOfflineUnsupported: () => {},
syncPendingMoveForClientRequest: async () => {},
});
useEffect(() => {
latestHandlers.current = value;
return () => {
latestHandlers.current = null;
};
}, [value]);
return null;
}
(globalThis as typeof globalThis & { IS_REACT_ACT_ENVIRONMENT?: boolean }).IS_REACT_ACT_ENVIRONMENT = true;
describe("useCanvasDrop image upload path", () => {
let container: HTMLDivElement | null = null;
let root: Root | null = null;
afterEach(async () => {
if (root) {
await act(async () => {
root?.unmount();
});
}
container?.remove();
container = null;
root = null;
latestHandlers.current = null;
getImageDimensionsMock.mockReset();
createCompressedImagePreviewMock.mockReset();
invalidateDashboardSnapshotForLastSignedInUserMock.mockReset();
emitDashboardSnapshotCacheInvalidationSignalMock.mockReset();
vi.unstubAllGlobals();
});
it("invalidates dashboard snapshot after successful dropped image upload", async () => {
getImageDimensionsMock.mockResolvedValue({ width: 640, height: 480 });
createCompressedImagePreviewMock.mockResolvedValue({
blob: new Blob(["preview"], { type: "image/webp" }),
width: 640,
height: 480,
});
const fetchMock = vi
.fn()
.mockResolvedValueOnce({
ok: true,
json: async () => ({ storageId: "storage_1" }),
})
.mockResolvedValueOnce({
ok: true,
json: async () => ({ storageId: "preview_storage_1" }),
});
vi.stubGlobal("fetch", fetchMock);
vi.stubGlobal("crypto", {
randomUUID: () => "client-request-id",
});
const runCreateNodeOnlineOnly = vi
.fn<HarnessProps["runCreateNodeOnlineOnly"]>()
.mockResolvedValue("node_1" as Id<"nodes">);
container = document.createElement("div");
document.body.appendChild(container);
root = createRoot(container);
await act(async () => {
root?.render(
React.createElement(HookHarness, {
runCreateNodeOnlineOnly,
}),
);
});
const file = new File(["file"], "drop.png", { type: "image/png" });
await act(async () => {
await latestHandlers.current?.onDrop({
preventDefault: () => {},
clientX: 120,
clientY: 80,
dataTransfer: {
getData: () => "",
files: [file],
},
} as unknown as React.DragEvent);
});
await act(async () => {
await Promise.resolve();
});
expect(fetchMock).toHaveBeenCalledTimes(2);
expect(runCreateNodeOnlineOnly).toHaveBeenCalledTimes(1);
expect(invalidateDashboardSnapshotForLastSignedInUserMock).toHaveBeenCalledTimes(1);
expect(emitDashboardSnapshotCacheInvalidationSignalMock).toHaveBeenCalledTimes(1);
});
});

View File

@@ -23,9 +23,12 @@ export default defineConfig({
"components/canvas/__tests__/use-canvas-edge-insertions.test.tsx",
"components/canvas/__tests__/use-canvas-edge-types.test.tsx",
"components/canvas/__tests__/use-canvas-node-interactions.test.tsx",
"components/canvas/__tests__/canvas-delete-handlers.test.tsx",
"components/canvas/__tests__/canvas-media-utils.test.ts",
"components/canvas/__tests__/use-node-local-data.test.tsx",
"components/canvas/__tests__/use-canvas-sync-engine.test.ts",
"components/canvas/__tests__/use-canvas-sync-engine-hook.test.tsx",
"components/media/__tests__/media-preview-utils.test.ts",
],
},
});