Implement fullscreen preview functionality and optimize storage URL handling

- Added fullscreen output capability for render nodes, allowing users to view images in a larger format.
- Introduced a dialog component for fullscreen image display, including a close button.
- Enhanced storage URL resolution with performance logging to identify slow queries and improve efficiency.
- Updated various queries and handlers to include performance metrics for better monitoring and debugging.
This commit is contained in:
Matthias
2026-04-02 12:28:36 +02:00
parent f3c5c2d8f1
commit 3fa686d60d
6 changed files with 348 additions and 77 deletions

View File

@@ -2,8 +2,8 @@
import { useEffect, useMemo, useRef, useState } from "react"; import { useEffect, useMemo, useRef, useState } from "react";
import { Handle, Position, useStore, type Node, type NodeProps } from "@xyflow/react"; import { Handle, Position, useStore, type Node, type NodeProps } from "@xyflow/react";
import { AlertCircle, ArrowDown, CheckCircle2, CloudUpload, Loader2 } from "lucide-react"; import { AlertCircle, ArrowDown, CheckCircle2, CloudUpload, Loader2, Maximize2, X } from "lucide-react";
import { useConvex, useMutation } from "convex/react"; import { useMutation } from "convex/react";
import BaseNodeWrapper from "@/components/canvas/nodes/base-node-wrapper"; import BaseNodeWrapper from "@/components/canvas/nodes/base-node-wrapper";
import { SliderRow } from "@/components/canvas/nodes/adjustment-controls"; import { SliderRow } from "@/components/canvas/nodes/adjustment-controls";
@@ -22,6 +22,7 @@ import {
} from "@/lib/image-pipeline/contracts"; } from "@/lib/image-pipeline/contracts";
import { bridge } from "@/lib/image-pipeline/bridge"; import { bridge } from "@/lib/image-pipeline/bridge";
import type { Id } from "@/convex/_generated/dataModel"; import type { Id } from "@/convex/_generated/dataModel";
import { Dialog, DialogContent, DialogTitle } from "@/components/ui/dialog";
type RenderResolutionOption = "original" | "2x" | "custom"; type RenderResolutionOption = "original" | "2x" | "custom";
type RenderFormatOption = "png" | "jpeg" | "webp"; type RenderFormatOption = "png" | "jpeg" | "webp";
@@ -438,7 +439,6 @@ async function uploadBlobToConvex(args: {
} }
export default function RenderNode({ id, data, selected, width, height }: NodeProps<RenderNodeType>) { export default function RenderNode({ id, data, selected, width, height }: NodeProps<RenderNodeType>) {
const convex = useConvex();
const generateUploadUrl = useMutation(api.storage.generateUploadUrl); const generateUploadUrl = useMutation(api.storage.generateUploadUrl);
const { queueNodeDataUpdate, queueNodeResize, status } = useCanvasSync(); const { queueNodeDataUpdate, queueNodeResize, status } = useCanvasSync();
const nodes = useStore((state) => state.nodes); const nodes = useStore((state) => state.nodes);
@@ -450,6 +450,7 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
const [isRendering, setIsRendering] = useState(false); const [isRendering, setIsRendering] = useState(false);
const [isUploading, setIsUploading] = useState(false); const [isUploading, setIsUploading] = useState(false);
const [isMenuOpen, setIsMenuOpen] = useState(false); const [isMenuOpen, setIsMenuOpen] = useState(false);
const [isFullscreenOpen, setIsFullscreenOpen] = useState(false);
const localDataRef = useRef(localData); const localDataRef = useRef(localData);
const renderRunIdRef = useRef(0); const renderRunIdRef = useRef(0);
@@ -604,6 +605,19 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
maxPreviewWidth: 960, maxPreviewWidth: 960,
}); });
const fullscreenPreviewWidth = Math.max(960, Math.round((width ?? 320) * 3));
const {
canvasRef: fullscreenCanvasRef,
isRendering: isFullscreenPreviewRendering,
error: fullscreenPreviewError,
} = usePipelinePreview({
sourceUrl: isFullscreenOpen && sourceUrl ? sourceUrl : null,
steps,
nodeWidth: fullscreenPreviewWidth,
previewScale: 1,
maxPreviewWidth: 3072,
});
const targetAspectRatio = useMemo(() => { const targetAspectRatio = useMemo(() => {
const sourceAspectRatio = resolveSourceAspectRatio(sourceNode); const sourceAspectRatio = resolveSourceAspectRatio(sourceNode);
if (sourceAspectRatio && Number.isFinite(sourceAspectRatio) && sourceAspectRatio > 0) { if (sourceAspectRatio && Number.isFinite(sourceAspectRatio) && sourceAspectRatio > 0) {
@@ -693,6 +707,7 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
(localData.outputResolution !== "custom" || (localData.outputResolution !== "custom" ||
(typeof localData.customWidth === "number" && typeof localData.customHeight === "number")); (typeof localData.customWidth === "number" && typeof localData.customHeight === "number"));
const canUpload = canRender && !status.isOffline; const canUpload = canRender && !status.isOffline;
const canOpenFullscreen = hasSource || Boolean(localData.url);
useEffect(() => { useEffect(() => {
if (!isMenuOpen) { if (!isMenuOpen) {
@@ -840,24 +855,8 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
if (runId !== renderRunIdRef.current) return; if (runId !== renderRunIdRef.current) return;
try { // URL-Aufloesung findet ueber den Canvas-Subscription-Cache statt.
const refreshed = await convex.query(api.nodes.get, { nodeId: id as Id<"nodes"> }); // Optionaler Nachlade-Lookup ist hier nicht erforderlich.
const refreshedData = refreshed?.data as Record<string, unknown> | undefined;
const resolvedUrl =
typeof refreshedData?.url === "string" && refreshedData.url.length > 0
? refreshedData.url
: undefined;
if (resolvedUrl && runId === renderRunIdRef.current) {
await persistImmediately({
...localDataRef.current,
url: resolvedUrl,
lastUploadUrl: resolvedUrl,
});
}
} catch {
// URL-Aufloesung ist optional; storageId bleibt die persistente Referenz.
}
} catch (uploadError: unknown) { } catch (uploadError: unknown) {
if (runId !== renderRunIdRef.current) return; if (runId !== renderRunIdRef.current) return;
@@ -901,13 +900,23 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
const wrapperStatus = renderState === "rendering" ? "executing" : renderState; const wrapperStatus = renderState === "rendering" ? "executing" : renderState;
return ( return (
<BaseNodeWrapper <>
nodeType="render" <BaseNodeWrapper
selected={selected} nodeType="render"
status={wrapperStatus} selected={selected}
statusMessage={currentError ?? data._statusMessage} status={wrapperStatus}
className="flex h-full min-w-[280px] flex-col overflow-hidden border-sky-500/30" statusMessage={currentError ?? data._statusMessage}
> toolbarActions={[
{
id: "fullscreen-output",
label: "Fullscreen",
icon: <Maximize2 size={14} />,
onClick: () => setIsFullscreenOpen(true),
disabled: !canOpenFullscreen,
},
]}
className="flex h-full min-w-[280px] flex-col overflow-hidden border-sky-500/30"
>
<Handle <Handle
type="target" type="target"
position={Position.Left} position={Position.Left}
@@ -1203,11 +1212,61 @@ export default function RenderNode({ id, data, selected, width, height }: NodePr
</div> </div>
</div> </div>
<Handle <Handle
type="source" type="source"
position={Position.Right} position={Position.Right}
className="!h-3 !w-3 !border-2 !border-background !bg-sky-500" className="!h-3 !w-3 !border-2 !border-background !bg-sky-500"
/> />
</BaseNodeWrapper> </BaseNodeWrapper>
<Dialog open={isFullscreenOpen} onOpenChange={setIsFullscreenOpen}>
<DialogContent
className="inset-0 left-0 top-0 h-screen w-screen max-w-none -translate-x-0 -translate-y-0 place-items-center gap-0 rounded-none border-none bg-transparent p-0 ring-0 shadow-none sm:max-w-none"
showCloseButton={false}
>
<DialogTitle className="sr-only">Render-Ausgabe</DialogTitle>
<button
type="button"
onClick={() => setIsFullscreenOpen(false)}
aria-label="Close render preview"
className="absolute right-6 top-6 z-50 inline-flex h-10 w-10 items-center justify-center rounded-full bg-black/20 text-white/90 transition-colors hover:bg-black/30"
>
<X className="h-5 w-5" />
</button>
<div className="flex h-full w-full items-center justify-center">
{hasSource ? (
<div className="relative flex h-full w-full items-center justify-center">
<canvas
ref={fullscreenCanvasRef}
className="h-auto max-h-[80vh] w-auto max-w-[80vw] rounded-xl object-contain shadow-2xl"
/>
{isFullscreenPreviewRendering ? (
<div className="pointer-events-none absolute bottom-6 rounded-md border border-border/80 bg-background/85 px-3 py-1 text-xs text-muted-foreground backdrop-blur-sm">
Rendering preview...
</div>
) : null}
{fullscreenPreviewError ? (
<div className="pointer-events-none absolute bottom-6 rounded-md border border-red-500/40 bg-background/90 px-3 py-1 text-xs text-red-600 backdrop-blur-sm">
Preview: {fullscreenPreviewError}
</div>
) : null}
</div>
) : localData.url ? (
// eslint-disable-next-line @next/next/no-img-element
<img
src={localData.url}
alt="Render output"
className="h-auto max-h-[80vh] w-auto max-w-[80vw] rounded-xl object-contain shadow-2xl"
draggable={false}
/>
) : (
<div className="rounded-lg bg-popover/95 px-4 py-3 text-sm text-muted-foreground shadow-lg">
Keine Render-Ausgabe verfuegbar
</div>
)}
</div>
</DialogContent>
</Dialog>
</>
); );
} }

View File

@@ -47,6 +47,8 @@ export const TIER_CONFIG = {
export type Tier = keyof typeof TIER_CONFIG; export type Tier = keyof typeof TIER_CONFIG;
const PERFORMANCE_LOG_THRESHOLD_MS = 250;
// ============================================================================ // ============================================================================
// Queries // Queries
// ============================================================================ // ============================================================================
@@ -189,19 +191,36 @@ export const getUsageStats = query({
const now = new Date(); const now = new Date();
const monthStart = new Date(now.getFullYear(), now.getMonth(), 1).getTime(); const monthStart = new Date(now.getFullYear(), now.getMonth(), 1).getTime();
const startedAt = Date.now();
const transactions = await ctx.db const transactions = await ctx.db
.query("creditTransactions") .query("creditTransactions")
.withIndex("by_user", (q) => q.eq("userId", user.userId)) .withIndex("by_user_type", (q) =>
q.eq("userId", user.userId).eq("type", "usage")
)
.order("desc") .order("desc")
.collect(); .collect();
const monthlyTransactions = transactions.filter( const monthlyTransactions = [] as Array<typeof transactions[0]>;
(t) =>
t._creationTime >= monthStart && for (const transaction of transactions) {
t.status === "committed" && if (transaction._creationTime < monthStart) {
t.type === "usage" break;
); }
if (transaction.status === "committed") {
monthlyTransactions.push(transaction);
}
}
const durationMs = Date.now() - startedAt;
if (durationMs >= PERFORMANCE_LOG_THRESHOLD_MS) {
console.warn("[credits.getUsageStats] slow usage stats query", {
userId: user.userId,
durationMs,
scannedTransactionCount: transactions.length,
includedCount: monthlyTransactions.length,
});
}
return { return {
monthlyUsage: monthlyTransactions.reduce( monthlyUsage: monthlyTransactions.reduce(

View File

@@ -4,6 +4,8 @@ import { requireAuth } from "./helpers";
import type { Doc, Id } from "./_generated/dataModel"; import type { Doc, Id } from "./_generated/dataModel";
import { isAdjustmentNodeType } from "../lib/canvas-node-types"; import { isAdjustmentNodeType } from "../lib/canvas-node-types";
const PERFORMANCE_LOG_THRESHOLD_MS = 250;
async function assertTargetAllowsIncomingEdge( async function assertTargetAllowsIncomingEdge(
ctx: MutationCtx, ctx: MutationCtx,
args: { args: {
@@ -19,15 +21,37 @@ async function assertTargetAllowsIncomingEdge(
return; return;
} }
const incomingEdges = await ctx.db const incomingEdgesQuery = ctx.db
.query("edges") .query("edges")
.withIndex("by_target", (q) => q.eq("targetNodeId", args.targetNodeId)) .withIndex("by_target", (q) => q.eq("targetNodeId", args.targetNodeId));
.collect();
const existingIncoming = incomingEdges.filter( const checkStartedAt = Date.now();
(edge: Doc<"edges">) => edge._id !== args.edgeIdToIgnore, const incomingEdges = await (
args.edgeIdToIgnore
? incomingEdgesQuery.take(2)
: incomingEdgesQuery.first()
); );
if (existingIncoming.length >= 1) { const checkDurationMs = Date.now() - checkStartedAt;
const hasAnyIncoming = Array.isArray(incomingEdges)
? incomingEdges.some((edge: Doc<"edges">) => edge._id !== args.edgeIdToIgnore)
: incomingEdges !== null && incomingEdges._id !== args.edgeIdToIgnore;
if (checkDurationMs >= PERFORMANCE_LOG_THRESHOLD_MS) {
const inspected = Array.isArray(incomingEdges)
? incomingEdges.length
: incomingEdges === null
? 0
: 1;
console.warn("[edges.assertTargetAllowsIncomingEdge] slow incoming edge check", {
targetNodeId: args.targetNodeId,
edgeIdToIgnore: args.edgeIdToIgnore,
inspected,
checkDurationMs,
});
}
if (hasAnyIncoming) {
throw new Error("Adjustment nodes allow only one incoming edge."); throw new Error("Adjustment nodes allow only one incoming edge.");
} }
} }

View File

@@ -19,14 +19,25 @@ export const exportFrame = action({
frameNodeId: v.id("nodes"), frameNodeId: v.id("nodes"),
}, },
handler: async (ctx, args) => { handler: async (ctx, args) => {
const startedAt = Date.now();
const identity = await ctx.auth.getUserIdentity(); const identity = await ctx.auth.getUserIdentity();
if (!identity) throw new Error("Not authenticated"); if (!identity) throw new Error("Not authenticated");
// ── 1. Load the frame node ───────────────────────────────────────────── // ── 1. Load the frame node ─────────────────────────────────────────────
const frame = await ctx.runQuery(api.nodes.get, { nodeId: args.frameNodeId }); const frame = await ctx.runQuery(api.nodes.get, {
nodeId: args.frameNodeId,
includeStorageUrl: false,
});
if (!frame) throw new Error("Frame node not found"); if (!frame) throw new Error("Frame node not found");
if (frame.type !== "frame") throw new Error("Node is not a frame"); if (frame.type !== "frame") throw new Error("Node is not a frame");
const authorizedCanvas = await ctx.runQuery(api.canvases.get, {
canvasId: frame.canvasId,
});
if (!authorizedCanvas) {
throw new Error("Not authorized for canvas");
}
const frameData = frame.data as { const frameData = frame.data as {
label?: string; label?: string;
width?: number; width?: number;
@@ -73,10 +84,39 @@ export const exportFrame = action({
color: 0xffffffff, // white background color: 0xffffffff, // white background
}); });
const resolveUrlsAt = Date.now();
const imageNodeUrlEntries = await Promise.all(
imageNodes.map(async (node) => {
const data = node.data as { storageId: string };
try {
const url = await ctx.storage.getUrl(data.storageId as Id<"_storage">);
return { nodeId: node._id, url };
} catch (error) {
console.warn("[exportFrame] failed to resolve storage URL", {
nodeId: node._id,
storageId: data.storageId,
error: String(error),
});
return { nodeId: node._id, url: null };
}
}),
);
const resolveUrlsDurationMs = Date.now() - resolveUrlsAt;
if (resolveUrlsDurationMs >= 250) {
console.warn("[exportFrame] slow storage URL resolution", {
frameNodeId: args.frameNodeId,
imageCount: imageNodes.length,
resolvedCount: imageNodeUrlEntries.filter((entry) => entry?.url).length,
durationMs: resolveUrlsDurationMs,
});
}
// ── 4. Fetch, resize and composite each image ────────────────────────── // ── 4. Fetch, resize and composite each image ──────────────────────────
const urlByNodeId = new Map(imageNodeUrlEntries.map((entry) => [entry.nodeId, entry.url]));
const resolveImageDataAt = Date.now();
for (const node of imageNodes) { for (const node of imageNodes) {
const data = node.data as { storageId: string }; const url = urlByNodeId.get(node._id) ?? null;
const url = await ctx.storage.getUrl(data.storageId as Id<"_storage">);
if (!url) continue; if (!url) continue;
const response = await fetch(url); const response = await fetch(url);
@@ -95,6 +135,14 @@ export const exportFrame = action({
base.composite(img, relX, relY); base.composite(img, relX, relY);
} }
const resolveImageDataDurationMs = Date.now() - resolveImageDataAt;
if (resolveImageDataDurationMs >= 250) {
console.warn("[exportFrame] slow image download loop", {
frameNodeId: args.frameNodeId,
durationMs: resolveImageDataDurationMs,
});
}
// ── 5. Encode to PNG buffer ──────────────────────────────────────────── // ── 5. Encode to PNG buffer ────────────────────────────────────────────
const outputBuffer = await base.getBuffer("image/png"); const outputBuffer = await base.getBuffer("image/png");
@@ -105,6 +153,14 @@ export const exportFrame = action({
const downloadUrl = await ctx.storage.getUrl(storageId); const downloadUrl = await ctx.storage.getUrl(storageId);
if (!downloadUrl) throw new Error("Failed to generate download URL"); if (!downloadUrl) throw new Error("Failed to generate download URL");
const totalDurationMs = Date.now() - startedAt;
if (totalDurationMs >= 500) {
console.warn("[exportFrame] slow total export execution", {
frameNodeId: args.frameNodeId,
durationMs: totalDurationMs,
});
}
return { return {
url: downloadUrl, url: downloadUrl,
storageId, storageId,

View File

@@ -62,6 +62,8 @@ const DEFAULT_RENDER_FORMAT = "png" as const;
const DEFAULT_RENDER_JPEG_QUALITY = 90; const DEFAULT_RENDER_JPEG_QUALITY = 90;
const ADJUSTMENT_MIN_WIDTH = 240; const ADJUSTMENT_MIN_WIDTH = 240;
const PERFORMANCE_LOG_THRESHOLD_MS = 250;
type RenderOutputResolution = (typeof RENDER_OUTPUT_RESOLUTIONS)[number]; type RenderOutputResolution = (typeof RENDER_OUTPUT_RESOLUTIONS)[number];
type RenderFormat = (typeof RENDER_FORMATS)[number]; type RenderFormat = (typeof RENDER_FORMATS)[number];
@@ -387,12 +389,35 @@ async function assertTargetAllowsIncomingEdge(
return; return;
} }
const incomingEdges = await ctx.db const incomingEdgesQuery = ctx.db
.query("edges") .query("edges")
.withIndex("by_target", (q) => q.eq("targetNodeId", args.targetNodeId)) .withIndex("by_target", (q) => q.eq("targetNodeId", args.targetNodeId));
.collect();
const existingIncoming = incomingEdges.filter((edge) => edge._id !== args.edgeIdToIgnore); const checkStartedAt = Date.now();
if (existingIncoming.length >= 1) { const incomingEdges = await (
args.edgeIdToIgnore ? incomingEdgesQuery.take(2) : incomingEdgesQuery.first()
);
const checkDurationMs = Date.now() - checkStartedAt;
const hasAnyIncoming = Array.isArray(incomingEdges)
? incomingEdges.some((edge) => edge._id !== args.edgeIdToIgnore)
: incomingEdges !== null && incomingEdges._id !== args.edgeIdToIgnore;
if (checkDurationMs >= PERFORMANCE_LOG_THRESHOLD_MS) {
const inspected = Array.isArray(incomingEdges)
? incomingEdges.length
: incomingEdges === null
? 0
: 1;
console.warn("[nodes.assertTargetAllowsIncomingEdge] slow incoming edge check", {
targetNodeId: args.targetNodeId,
edgeIdToIgnore: args.edgeIdToIgnore,
inspected,
checkDurationMs,
});
}
if (hasAnyIncoming) {
throw new Error("Adjustment nodes allow only one incoming edge."); throw new Error("Adjustment nodes allow only one incoming edge.");
} }
} }
@@ -472,9 +497,14 @@ export const list = query({
* Einzelnen Node laden. * Einzelnen Node laden.
*/ */
export const get = query({ export const get = query({
args: { nodeId: v.id("nodes") }, args: {
handler: async (ctx, { nodeId }) => { nodeId: v.id("nodes"),
includeStorageUrl: v.optional(v.boolean()),
},
handler: async (ctx, { nodeId, includeStorageUrl }) => {
const user = await requireAuth(ctx); const user = await requireAuth(ctx);
const startedAt = Date.now();
const shouldIncludeStorageUrl = includeStorageUrl ?? true;
const node = await ctx.db.get(nodeId); const node = await ctx.db.get(nodeId);
if (!node) return null; if (!node) return null;
@@ -483,27 +513,57 @@ export const get = query({
return null; return null;
} }
const data = node.data as Record<string, unknown> | undefined; if (!shouldIncludeStorageUrl) {
if (!data?.storageId) { const durationMs = Date.now() - startedAt;
return node; if (durationMs >= PERFORMANCE_LOG_THRESHOLD_MS) {
} console.warn("[nodes.get] fast path query", {
nodeId,
durationMs,
includeStorageUrl,
shouldIncludeStorageUrl,
});
}
return node;
}
let url: string | null; const data = node.data as Record<string, unknown> | undefined;
try { if (!data?.storageId) {
url = await ctx.storage.getUrl(data.storageId as Id<"_storage">); const durationMs = Date.now() - startedAt;
} catch (error) { if (durationMs >= PERFORMANCE_LOG_THRESHOLD_MS) {
console.warn("[nodes.get] failed to resolve storage URL", { console.warn("[nodes.get] no storage URL query", {
nodeId: node._id, nodeId,
storageId: data.storageId, durationMs,
error: String(error), });
}); }
return node; return node;
} }
return { let url: string | null;
...node, try {
data: { const getUrlStartedAt = Date.now();
...data, url = await ctx.storage.getUrl(data.storageId as Id<"_storage">);
const getUrlDurationMs = Date.now() - getUrlStartedAt;
if (getUrlDurationMs >= PERFORMANCE_LOG_THRESHOLD_MS) {
console.warn("[nodes.get] slow storage URL resolution", {
nodeId: node._id,
storageId: data.storageId,
getUrlDurationMs,
durationMs: Date.now() - startedAt,
});
}
} catch (error) {
console.warn("[nodes.get] failed to resolve storage URL", {
nodeId: node._id,
storageId: data.storageId,
error: String(error),
});
return node;
}
return {
...node,
data: {
...data,
url: url ?? undefined, url: url ?? undefined,
}, },
}; };

View File

@@ -4,6 +4,17 @@ import { requireAuth } from "./helpers";
import type { Id } from "./_generated/dataModel"; import type { Id } from "./_generated/dataModel";
const STORAGE_URL_BATCH_SIZE = 12; const STORAGE_URL_BATCH_SIZE = 12;
const PERFORMANCE_LOG_THRESHOLD_MS = 250;
function logSlowQuery(label: string, startedAt: number, details: Record<string, unknown>) {
const durationMs = Date.now() - startedAt;
if (durationMs >= PERFORMANCE_LOG_THRESHOLD_MS) {
console.warn(`[storage] ${label} slow`, {
durationMs,
...details,
});
}
}
type StorageUrlMap = Record<string, string | undefined>; type StorageUrlMap = Record<string, string | undefined>;
@@ -58,10 +69,16 @@ async function resolveStorageUrls(
storageIds: Array<Id<"_storage">>, storageIds: Array<Id<"_storage">>,
): Promise<StorageUrlMap> { ): Promise<StorageUrlMap> {
const resolved: StorageUrlMap = {}; const resolved: StorageUrlMap = {};
const operationStartedAt = Date.now();
let failedCount = 0;
let totalResolved = 0;
for (let i = 0; i < storageIds.length; i += STORAGE_URL_BATCH_SIZE) { for (let i = 0; i < storageIds.length; i += STORAGE_URL_BATCH_SIZE) {
const batch = storageIds.slice(i, i + STORAGE_URL_BATCH_SIZE); const batch = storageIds.slice(i, i + STORAGE_URL_BATCH_SIZE);
const batchStartedAt = Date.now();
let batchFailedCount = 0;
const entries = await Promise.all( const entries = await Promise.all(
batch.map(async (id): Promise<StorageUrlResult> => { batch.map(async (id): Promise<StorageUrlResult> => {
try { try {
@@ -79,6 +96,8 @@ async function resolveStorageUrls(
for (const entry of entries) { for (const entry of entries) {
if (entry.error) { if (entry.error) {
failedCount += 1;
batchFailedCount += 1;
console.warn("[storage.batchGetUrlsForCanvas] getUrl failed", { console.warn("[storage.batchGetUrlsForCanvas] getUrl failed", {
storageId: entry.storageId, storageId: entry.storageId,
error: entry.error, error: entry.error,
@@ -88,9 +107,25 @@ async function resolveStorageUrls(
const { storageId, url } = entry; const { storageId, url } = entry;
resolved[storageId] = url ?? undefined; resolved[storageId] = url ?? undefined;
if (url) {
totalResolved += 1;
}
} }
logSlowQuery("batchGetUrlsForCanvas::resolveStorageBatch", batchStartedAt, {
batchSize: batch.length,
successCount: entries.length - batchFailedCount,
failedCount: batchFailedCount,
cursor: `${i + 1}..${Math.min(i + STORAGE_URL_BATCH_SIZE, storageIds.length)} / ${storageIds.length}`,
});
} }
logSlowQuery("batchGetUrlsForCanvas", operationStartedAt, {
requestStorageCount: storageIds.length,
resolvedCount: totalResolved,
failedCount,
});
return resolved; return resolved;
} }
@@ -109,12 +144,30 @@ export const generateUploadUrl = mutation({
export const batchGetUrlsForCanvas = query({ export const batchGetUrlsForCanvas = query({
args: { canvasId: v.id("canvases") }, args: { canvasId: v.id("canvases") },
handler: async (ctx, { canvasId }) => { handler: async (ctx, { canvasId }) => {
const startedAt = Date.now();
const user = await requireAuth(ctx); const user = await requireAuth(ctx);
await assertCanvasOwner(ctx, canvasId, user.userId); await assertCanvasOwner(ctx, canvasId, user.userId);
const nodes = await listNodesForCanvas(ctx, canvasId); const nodes = await listNodesForCanvas(ctx, canvasId);
const nodeCount = nodes.length;
const storageIds = collectStorageIds(nodes); const storageIds = collectStorageIds(nodes);
const collectTimeMs = Date.now() - startedAt;
if (collectTimeMs >= PERFORMANCE_LOG_THRESHOLD_MS) {
console.warn("[storage.batchGetUrlsForCanvas] slow node scan", {
canvasId,
nodeCount,
storageIdCount: storageIds.length,
durationMs: collectTimeMs,
});
}
return await resolveStorageUrls(ctx, storageIds); const result = await resolveStorageUrls(ctx, storageIds);
logSlowQuery("batchGetUrlsForCanvas::total", startedAt, {
canvasId,
nodeCount,
storageIdCount: storageIds.length,
resolvedCount: Object.keys(result).length,
});
return result;
}, },
}); });