refactor(canvas): integrate graph-based handling for image source resolution and pipeline steps

This commit is contained in:
2026-04-04 10:28:20 +02:00
parent 90d6fe55b1
commit 12cd75c836
11 changed files with 477 additions and 218 deletions

View File

@@ -1,10 +1,14 @@
"use client";
import { useMemo } from "react";
import { useStore, type Node } from "@xyflow/react";
import { useCanvasGraph } from "@/components/canvas/canvas-graph-context";
import { usePipelinePreview } from "@/hooks/use-pipeline-preview";
import { collectPipeline, getSourceImage, type PipelineStep } from "@/lib/image-pipeline/contracts";
import {
collectPipelineFromGraph,
getSourceImageFromGraph,
type PipelineStep,
} from "@/lib/canvas-render-preview";
const PREVIEW_PIPELINE_TYPES = new Set([
"curves",
@@ -13,19 +17,6 @@ const PREVIEW_PIPELINE_TYPES = new Set([
"detail-adjust",
]);
function resolveNodeImageUrl(node: Node): string | null {
const data = (node.data ?? {}) as Record<string, unknown>;
const directUrl = typeof data.url === "string" ? data.url : null;
if (directUrl && directUrl.length > 0) {
return directUrl;
}
const previewUrl = typeof data.previewUrl === "string" ? data.previewUrl : null;
if (previewUrl && previewUrl.length > 0) {
return previewUrl;
}
return null;
}
function compactHistogram(values: readonly number[], points = 64): number[] {
if (points <= 0) {
return [];
@@ -76,39 +67,31 @@ export default function AdjustmentPreview({
currentType: string;
currentParams: unknown;
}) {
const nodes = useStore((state) => state.nodes);
const edges = useStore((state) => state.edges);
const pipelineNodes = useMemo(
() => nodes.map((node) => ({ id: node.id, type: node.type ?? "", data: node.data })),
[nodes],
);
const pipelineEdges = useMemo(
() => edges.map((edge) => ({ source: edge.source, target: edge.target })),
[edges],
);
const graph = useCanvasGraph();
const sourceUrl = useMemo(
() =>
getSourceImage({
getSourceImageFromGraph(graph, {
nodeId,
nodes: pipelineNodes,
edges: pipelineEdges,
isSourceNode: (node) =>
node.type === "image" || node.type === "ai-image" || node.type === "asset",
getSourceImageFromNode: (node) => {
const sourceNode = nodes.find((candidate) => candidate.id === node.id);
return sourceNode ? resolveNodeImageUrl(sourceNode) : null;
const sourceData = (node.data ?? {}) as Record<string, unknown>;
const directUrl = typeof sourceData.url === "string" ? sourceData.url : null;
if (directUrl && directUrl.length > 0) {
return directUrl;
}
const previewUrl =
typeof sourceData.previewUrl === "string" ? sourceData.previewUrl : null;
return previewUrl && previewUrl.length > 0 ? previewUrl : null;
},
}),
[nodeId, nodes, pipelineEdges, pipelineNodes],
[graph, nodeId],
);
const steps = useMemo(() => {
const collected = collectPipeline({
const collected = collectPipelineFromGraph(graph, {
nodeId,
nodes: pipelineNodes,
edges: pipelineEdges,
isPipelineNode: (node) => PREVIEW_PIPELINE_TYPES.has(node.type ?? ""),
});
@@ -121,13 +104,18 @@ export default function AdjustmentPreview({
}
return step as PipelineStep;
});
}, [currentParams, currentType, nodeId, pipelineEdges, pipelineNodes]);
}, [currentParams, currentType, graph, nodeId]);
const { canvasRef, histogram, isRendering, hasSource, previewAspectRatio, error } =
usePipelinePreview({
sourceUrl,
steps,
nodeWidth,
// Die Vorschau muss in-Node gut lesbar bleiben, aber nicht in voller
// Display-Auflösung rechnen.
previewScale: 0.5,
maxPreviewWidth: 720,
maxDevicePixelRatio: 1.25,
});
const histogramSeries = useMemo(() => {

View File

@@ -1,13 +1,14 @@
"use client";
import { useCallback, useMemo, useRef, useState } from "react";
import { Handle, Position, useStore, type NodeProps } from "@xyflow/react";
import { Handle, Position, type NodeProps } from "@xyflow/react";
import { ImageIcon } from "lucide-react";
import BaseNodeWrapper from "./base-node-wrapper";
import CompareSurface from "./compare-surface";
import { useCanvasGraph } from "@/components/canvas/canvas-graph-context";
import {
resolveRenderPipelineHash,
resolveRenderPreviewInput,
resolveRenderPreviewInputFromGraph,
type RenderPreviewInput,
} from "@/lib/canvas-render-preview";
@@ -31,31 +32,13 @@ type CompareDisplayMode = "render" | "preview";
export default function CompareNode({ id, data, selected, width }: NodeProps) {
const nodeData = data as CompareNodeData;
const nodes = useStore((state) => state.nodes);
const edges = useStore((state) => state.edges);
const graph = useCanvasGraph();
const [sliderX, setSliderX] = useState(50);
const [manualDisplayMode, setManualDisplayMode] = useState<CompareDisplayMode | null>(null);
const containerRef = useRef<HTMLDivElement>(null);
const pipelineNodes = useMemo(
() => nodes.map((node) => ({ id: node.id, type: node.type ?? "", data: node.data })),
[nodes],
);
const pipelineEdges = useMemo(
() => edges.map((edge) => ({ source: edge.source, target: edge.target })),
[edges],
);
const nodesById = useMemo(() => new Map(nodes.map((node) => [node.id, node])), [nodes]);
const incomingEdges = useMemo(
() =>
edges.filter(
(edge) =>
edge.target === id &&
edge.className !== "temp" &&
(edge.targetHandle === "left" || edge.targetHandle === "right"),
),
[edges, id],
() => graph.incomingEdgesByTarget.get(id) ?? [],
[graph, id],
);
const resolvedSides = useMemo(() => {
@@ -66,7 +49,7 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) {
defaultLabel: string,
): CompareSideState => {
const incomingEdge = incomingEdges.find((edge) => edge.targetHandle === side);
const sourceNode = incomingEdge ? nodesById.get(incomingEdge.source) : undefined;
const sourceNode = incomingEdge ? graph.nodesById.get(incomingEdge.source) : undefined;
const sourceData = (sourceNode?.data ?? {}) as Record<string, unknown>;
const sourceLabel =
typeof sourceData.label === "string" && sourceData.label.length > 0
@@ -79,10 +62,9 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) {
let isStaleRenderOutput = false;
if (sourceNode && sourceNode.type === "render") {
const preview = resolveRenderPreviewInput({
const preview = resolveRenderPreviewInputFromGraph({
nodeId: sourceNode.id,
nodes: pipelineNodes,
edges: pipelineEdges,
graph,
});
if (preview.sourceUrl) {
@@ -132,9 +114,7 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) {
nodeData.leftUrl,
nodeData.rightLabel,
nodeData.rightUrl,
nodesById,
pipelineEdges,
pipelineNodes,
graph,
]);
const hasLeft = Boolean(resolvedSides.left.finalUrl || resolvedSides.left.previewInput);
@@ -142,10 +122,10 @@ export default function CompareNode({ id, data, selected, width }: NodeProps) {
const hasConnectedRenderInput = useMemo(
() =>
incomingEdges.some((edge) => {
const sourceNode = nodesById.get(edge.source);
const sourceNode = graph.nodesById.get(edge.source);
return sourceNode?.type === "render";
}),
[incomingEdges, nodesById],
[graph, incomingEdges],
);
const shouldDefaultToPreview =
hasConnectedRenderInput ||

View File

@@ -31,8 +31,11 @@ export default function CompareSurface({
sourceUrl: previewSourceUrl,
steps: previewSteps,
nodeWidth,
previewScale: 0.7,
maxPreviewWidth: 960,
// Compare-Nodes zeigen nur eine kompakte Live-Ansicht; kleinere Kacheln
// halten lange Workflows spürbar reaktionsfreudiger.
previewScale: 0.5,
maxPreviewWidth: 720,
maxDevicePixelRatio: 1.25,
});
const hasPreview = Boolean(usePreview && previewInput);

View File

@@ -1,7 +1,7 @@
"use client";
import { useEffect, useMemo, useRef, useState } from "react";
import { Handle, Position, useStore, type Node, type NodeProps } from "@xyflow/react";
import { Handle, Position, type Node, type NodeProps } from "@xyflow/react";
import { AlertCircle, ArrowDown, CheckCircle2, CloudUpload, Loader2, Maximize2, X } from "lucide-react";
import { useMutation } from "convex/react";
@@ -12,10 +12,14 @@ import { useCanvasSync } from "@/components/canvas/canvas-sync-context";
import { api } from "@/convex/_generated/api";
import { useDebouncedCallback } from "@/hooks/use-debounced-callback";
import { usePipelinePreview } from "@/hooks/use-pipeline-preview";
import { resolveRenderPreviewInput } from "@/lib/canvas-render-preview";
import { useCanvasGraph } from "@/components/canvas/canvas-graph-context";
import {
findSourceNodeFromGraph,
resolveRenderPreviewInputFromGraph,
} from "@/lib/canvas-render-preview";
import { resolveMediaAspectRatio } from "@/lib/canvas-utils";
import { parseAspectRatioString } from "@/lib/image-formats";
import { getSourceImage, hashPipeline } from "@/lib/image-pipeline/contracts";
import { hashPipeline } from "@/lib/image-pipeline/contracts";
import {
isPipelineAbortError,
renderFullWithWorkerFallback,
@@ -431,8 +435,7 @@ async function uploadBlobToConvex(args: {
export default function RenderNode({ id, data, selected, width, height }: NodeProps<RenderNodeType>) {
const generateUploadUrl = useMutation(api.storage.generateUploadUrl);
const { queueNodeDataUpdate, queueNodeResize, status } = useCanvasSync();
const nodes = useStore((state) => state.nodes);
const edges = useStore((state) => state.edges);
const graph = useCanvasGraph();
const [localData, setLocalData] = useState<PersistedRenderData>(() =>
sanitizeRenderData(data),
@@ -485,24 +488,13 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
});
};
const pipelineNodes = useMemo(
() => nodes.map((node) => ({ id: node.id, type: node.type ?? "", data: node.data })),
[nodes],
);
const pipelineEdges = useMemo(
() => edges.map((edge) => ({ source: edge.source, target: edge.target })),
[edges],
);
const renderPreviewInput = useMemo(
() =>
resolveRenderPreviewInput({
resolveRenderPreviewInputFromGraph({
nodeId: id,
nodes: pipelineNodes,
edges: pipelineEdges,
graph,
}),
[id, pipelineEdges, pipelineNodes],
[graph, id],
);
const sourceUrl = renderPreviewInput.sourceUrl;
@@ -531,15 +523,13 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
const sourceNode = useMemo<SourceNodeDescriptor | null>(
() =>
getSourceImage({
findSourceNodeFromGraph(graph, {
nodeId: id,
nodes: pipelineNodes,
edges: pipelineEdges,
isSourceNode: (node) =>
node.type === "image" || node.type === "ai-image" || node.type === "asset",
getSourceImageFromNode: (node) => node as SourceNodeDescriptor,
getSourceImageFromNode: () => true,
}),
[id, pipelineEdges, pipelineNodes],
[graph, id],
);
const steps = renderPreviewInput.steps;
@@ -608,8 +598,11 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
sourceUrl,
steps,
nodeWidth: previewNodeWidth,
previewScale: 0.7,
maxPreviewWidth: 960,
// Inline-Preview: bewusst kompakt halten, damit Änderungen schneller
// sichtbar werden, besonders in langen Graphen.
previewScale: 0.5,
maxPreviewWidth: 720,
maxDevicePixelRatio: 1.25,
});
const fullscreenPreviewWidth = Math.max(960, Math.round((width ?? 320) * 3));
@@ -621,8 +614,9 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
sourceUrl: isFullscreenOpen && sourceUrl ? sourceUrl : null,
steps,
nodeWidth: fullscreenPreviewWidth,
previewScale: 1,
maxPreviewWidth: 3072,
previewScale: 0.85,
maxPreviewWidth: 1920,
maxDevicePixelRatio: 1.5,
});
const targetAspectRatio = useMemo(() => {