Implement fullscreen preview functionality and optimize storage URL handling

- Added fullscreen output capability for render nodes, allowing users to view images in a larger format.
- Introduced a dialog component for fullscreen image display, including a close button.
- Enhanced storage URL resolution with performance logging to identify slow queries and improve efficiency.
- Updated various queries and handlers to include performance metrics for better monitoring and debugging.
This commit is contained in:
Matthias
2026-04-02 12:28:36 +02:00
parent f3c5c2d8f1
commit 3fa686d60d
6 changed files with 348 additions and 77 deletions

View File

@@ -47,6 +47,8 @@ export const TIER_CONFIG = {
export type Tier = keyof typeof TIER_CONFIG;
const PERFORMANCE_LOG_THRESHOLD_MS = 250;
// ============================================================================
// Queries
// ============================================================================
@@ -189,19 +191,36 @@ export const getUsageStats = query({
const now = new Date();
const monthStart = new Date(now.getFullYear(), now.getMonth(), 1).getTime();
const startedAt = Date.now();
const transactions = await ctx.db
.query("creditTransactions")
.withIndex("by_user", (q) => q.eq("userId", user.userId))
.withIndex("by_user_type", (q) =>
q.eq("userId", user.userId).eq("type", "usage")
)
.order("desc")
.collect();
const monthlyTransactions = transactions.filter(
(t) =>
t._creationTime >= monthStart &&
t.status === "committed" &&
t.type === "usage"
);
const monthlyTransactions = [] as Array<typeof transactions[0]>;
for (const transaction of transactions) {
if (transaction._creationTime < monthStart) {
break;
}
if (transaction.status === "committed") {
monthlyTransactions.push(transaction);
}
}
const durationMs = Date.now() - startedAt;
if (durationMs >= PERFORMANCE_LOG_THRESHOLD_MS) {
console.warn("[credits.getUsageStats] slow usage stats query", {
userId: user.userId,
durationMs,
scannedTransactionCount: transactions.length,
includedCount: monthlyTransactions.length,
});
}
return {
monthlyUsage: monthlyTransactions.reduce(

View File

@@ -4,6 +4,8 @@ import { requireAuth } from "./helpers";
import type { Doc, Id } from "./_generated/dataModel";
import { isAdjustmentNodeType } from "../lib/canvas-node-types";
const PERFORMANCE_LOG_THRESHOLD_MS = 250;
async function assertTargetAllowsIncomingEdge(
ctx: MutationCtx,
args: {
@@ -19,15 +21,37 @@ async function assertTargetAllowsIncomingEdge(
return;
}
const incomingEdges = await ctx.db
const incomingEdgesQuery = ctx.db
.query("edges")
.withIndex("by_target", (q) => q.eq("targetNodeId", args.targetNodeId))
.collect();
.withIndex("by_target", (q) => q.eq("targetNodeId", args.targetNodeId));
const existingIncoming = incomingEdges.filter(
(edge: Doc<"edges">) => edge._id !== args.edgeIdToIgnore,
const checkStartedAt = Date.now();
const incomingEdges = await (
args.edgeIdToIgnore
? incomingEdgesQuery.take(2)
: incomingEdgesQuery.first()
);
if (existingIncoming.length >= 1) {
const checkDurationMs = Date.now() - checkStartedAt;
const hasAnyIncoming = Array.isArray(incomingEdges)
? incomingEdges.some((edge: Doc<"edges">) => edge._id !== args.edgeIdToIgnore)
: incomingEdges !== null && incomingEdges._id !== args.edgeIdToIgnore;
if (checkDurationMs >= PERFORMANCE_LOG_THRESHOLD_MS) {
const inspected = Array.isArray(incomingEdges)
? incomingEdges.length
: incomingEdges === null
? 0
: 1;
console.warn("[edges.assertTargetAllowsIncomingEdge] slow incoming edge check", {
targetNodeId: args.targetNodeId,
edgeIdToIgnore: args.edgeIdToIgnore,
inspected,
checkDurationMs,
});
}
if (hasAnyIncoming) {
throw new Error("Adjustment nodes allow only one incoming edge.");
}
}

View File

@@ -19,14 +19,25 @@ export const exportFrame = action({
frameNodeId: v.id("nodes"),
},
handler: async (ctx, args) => {
const startedAt = Date.now();
const identity = await ctx.auth.getUserIdentity();
if (!identity) throw new Error("Not authenticated");
// ── 1. Load the frame node ─────────────────────────────────────────────
const frame = await ctx.runQuery(api.nodes.get, { nodeId: args.frameNodeId });
const frame = await ctx.runQuery(api.nodes.get, {
nodeId: args.frameNodeId,
includeStorageUrl: false,
});
if (!frame) throw new Error("Frame node not found");
if (frame.type !== "frame") throw new Error("Node is not a frame");
const authorizedCanvas = await ctx.runQuery(api.canvases.get, {
canvasId: frame.canvasId,
});
if (!authorizedCanvas) {
throw new Error("Not authorized for canvas");
}
const frameData = frame.data as {
label?: string;
width?: number;
@@ -73,10 +84,39 @@ export const exportFrame = action({
color: 0xffffffff, // white background
});
const resolveUrlsAt = Date.now();
const imageNodeUrlEntries = await Promise.all(
imageNodes.map(async (node) => {
const data = node.data as { storageId: string };
try {
const url = await ctx.storage.getUrl(data.storageId as Id<"_storage">);
return { nodeId: node._id, url };
} catch (error) {
console.warn("[exportFrame] failed to resolve storage URL", {
nodeId: node._id,
storageId: data.storageId,
error: String(error),
});
return { nodeId: node._id, url: null };
}
}),
);
const resolveUrlsDurationMs = Date.now() - resolveUrlsAt;
if (resolveUrlsDurationMs >= 250) {
console.warn("[exportFrame] slow storage URL resolution", {
frameNodeId: args.frameNodeId,
imageCount: imageNodes.length,
resolvedCount: imageNodeUrlEntries.filter((entry) => entry?.url).length,
durationMs: resolveUrlsDurationMs,
});
}
// ── 4. Fetch, resize and composite each image ──────────────────────────
const urlByNodeId = new Map(imageNodeUrlEntries.map((entry) => [entry.nodeId, entry.url]));
const resolveImageDataAt = Date.now();
for (const node of imageNodes) {
const data = node.data as { storageId: string };
const url = await ctx.storage.getUrl(data.storageId as Id<"_storage">);
const url = urlByNodeId.get(node._id) ?? null;
if (!url) continue;
const response = await fetch(url);
@@ -95,6 +135,14 @@ export const exportFrame = action({
base.composite(img, relX, relY);
}
const resolveImageDataDurationMs = Date.now() - resolveImageDataAt;
if (resolveImageDataDurationMs >= 250) {
console.warn("[exportFrame] slow image download loop", {
frameNodeId: args.frameNodeId,
durationMs: resolveImageDataDurationMs,
});
}
// ── 5. Encode to PNG buffer ────────────────────────────────────────────
const outputBuffer = await base.getBuffer("image/png");
@@ -105,6 +153,14 @@ export const exportFrame = action({
const downloadUrl = await ctx.storage.getUrl(storageId);
if (!downloadUrl) throw new Error("Failed to generate download URL");
const totalDurationMs = Date.now() - startedAt;
if (totalDurationMs >= 500) {
console.warn("[exportFrame] slow total export execution", {
frameNodeId: args.frameNodeId,
durationMs: totalDurationMs,
});
}
return {
url: downloadUrl,
storageId,

View File

@@ -62,6 +62,8 @@ const DEFAULT_RENDER_FORMAT = "png" as const;
const DEFAULT_RENDER_JPEG_QUALITY = 90;
const ADJUSTMENT_MIN_WIDTH = 240;
const PERFORMANCE_LOG_THRESHOLD_MS = 250;
type RenderOutputResolution = (typeof RENDER_OUTPUT_RESOLUTIONS)[number];
type RenderFormat = (typeof RENDER_FORMATS)[number];
@@ -387,12 +389,35 @@ async function assertTargetAllowsIncomingEdge(
return;
}
const incomingEdges = await ctx.db
const incomingEdgesQuery = ctx.db
.query("edges")
.withIndex("by_target", (q) => q.eq("targetNodeId", args.targetNodeId))
.collect();
const existingIncoming = incomingEdges.filter((edge) => edge._id !== args.edgeIdToIgnore);
if (existingIncoming.length >= 1) {
.withIndex("by_target", (q) => q.eq("targetNodeId", args.targetNodeId));
const checkStartedAt = Date.now();
const incomingEdges = await (
args.edgeIdToIgnore ? incomingEdgesQuery.take(2) : incomingEdgesQuery.first()
);
const checkDurationMs = Date.now() - checkStartedAt;
const hasAnyIncoming = Array.isArray(incomingEdges)
? incomingEdges.some((edge) => edge._id !== args.edgeIdToIgnore)
: incomingEdges !== null && incomingEdges._id !== args.edgeIdToIgnore;
if (checkDurationMs >= PERFORMANCE_LOG_THRESHOLD_MS) {
const inspected = Array.isArray(incomingEdges)
? incomingEdges.length
: incomingEdges === null
? 0
: 1;
console.warn("[nodes.assertTargetAllowsIncomingEdge] slow incoming edge check", {
targetNodeId: args.targetNodeId,
edgeIdToIgnore: args.edgeIdToIgnore,
inspected,
checkDurationMs,
});
}
if (hasAnyIncoming) {
throw new Error("Adjustment nodes allow only one incoming edge.");
}
}
@@ -472,9 +497,14 @@ export const list = query({
* Einzelnen Node laden.
*/
export const get = query({
args: { nodeId: v.id("nodes") },
handler: async (ctx, { nodeId }) => {
args: {
nodeId: v.id("nodes"),
includeStorageUrl: v.optional(v.boolean()),
},
handler: async (ctx, { nodeId, includeStorageUrl }) => {
const user = await requireAuth(ctx);
const startedAt = Date.now();
const shouldIncludeStorageUrl = includeStorageUrl ?? true;
const node = await ctx.db.get(nodeId);
if (!node) return null;
@@ -483,27 +513,57 @@ export const get = query({
return null;
}
const data = node.data as Record<string, unknown> | undefined;
if (!data?.storageId) {
return node;
}
if (!shouldIncludeStorageUrl) {
const durationMs = Date.now() - startedAt;
if (durationMs >= PERFORMANCE_LOG_THRESHOLD_MS) {
console.warn("[nodes.get] fast path query", {
nodeId,
durationMs,
includeStorageUrl,
shouldIncludeStorageUrl,
});
}
return node;
}
let url: string | null;
try {
url = await ctx.storage.getUrl(data.storageId as Id<"_storage">);
} catch (error) {
console.warn("[nodes.get] failed to resolve storage URL", {
nodeId: node._id,
storageId: data.storageId,
error: String(error),
});
return node;
}
const data = node.data as Record<string, unknown> | undefined;
if (!data?.storageId) {
const durationMs = Date.now() - startedAt;
if (durationMs >= PERFORMANCE_LOG_THRESHOLD_MS) {
console.warn("[nodes.get] no storage URL query", {
nodeId,
durationMs,
});
}
return node;
}
return {
...node,
data: {
...data,
let url: string | null;
try {
const getUrlStartedAt = Date.now();
url = await ctx.storage.getUrl(data.storageId as Id<"_storage">);
const getUrlDurationMs = Date.now() - getUrlStartedAt;
if (getUrlDurationMs >= PERFORMANCE_LOG_THRESHOLD_MS) {
console.warn("[nodes.get] slow storage URL resolution", {
nodeId: node._id,
storageId: data.storageId,
getUrlDurationMs,
durationMs: Date.now() - startedAt,
});
}
} catch (error) {
console.warn("[nodes.get] failed to resolve storage URL", {
nodeId: node._id,
storageId: data.storageId,
error: String(error),
});
return node;
}
return {
...node,
data: {
...data,
url: url ?? undefined,
},
};

View File

@@ -4,6 +4,17 @@ import { requireAuth } from "./helpers";
import type { Id } from "./_generated/dataModel";
const STORAGE_URL_BATCH_SIZE = 12;
const PERFORMANCE_LOG_THRESHOLD_MS = 250;
function logSlowQuery(label: string, startedAt: number, details: Record<string, unknown>) {
const durationMs = Date.now() - startedAt;
if (durationMs >= PERFORMANCE_LOG_THRESHOLD_MS) {
console.warn(`[storage] ${label} slow`, {
durationMs,
...details,
});
}
}
type StorageUrlMap = Record<string, string | undefined>;
@@ -58,10 +69,16 @@ async function resolveStorageUrls(
storageIds: Array<Id<"_storage">>,
): Promise<StorageUrlMap> {
const resolved: StorageUrlMap = {};
const operationStartedAt = Date.now();
let failedCount = 0;
let totalResolved = 0;
for (let i = 0; i < storageIds.length; i += STORAGE_URL_BATCH_SIZE) {
const batch = storageIds.slice(i, i + STORAGE_URL_BATCH_SIZE);
const batchStartedAt = Date.now();
let batchFailedCount = 0;
const entries = await Promise.all(
batch.map(async (id): Promise<StorageUrlResult> => {
try {
@@ -79,6 +96,8 @@ async function resolveStorageUrls(
for (const entry of entries) {
if (entry.error) {
failedCount += 1;
batchFailedCount += 1;
console.warn("[storage.batchGetUrlsForCanvas] getUrl failed", {
storageId: entry.storageId,
error: entry.error,
@@ -88,9 +107,25 @@ async function resolveStorageUrls(
const { storageId, url } = entry;
resolved[storageId] = url ?? undefined;
if (url) {
totalResolved += 1;
}
}
logSlowQuery("batchGetUrlsForCanvas::resolveStorageBatch", batchStartedAt, {
batchSize: batch.length,
successCount: entries.length - batchFailedCount,
failedCount: batchFailedCount,
cursor: `${i + 1}..${Math.min(i + STORAGE_URL_BATCH_SIZE, storageIds.length)} / ${storageIds.length}`,
});
}
logSlowQuery("batchGetUrlsForCanvas", operationStartedAt, {
requestStorageCount: storageIds.length,
resolvedCount: totalResolved,
failedCount,
});
return resolved;
}
@@ -109,12 +144,30 @@ export const generateUploadUrl = mutation({
export const batchGetUrlsForCanvas = query({
args: { canvasId: v.id("canvases") },
handler: async (ctx, { canvasId }) => {
const startedAt = Date.now();
const user = await requireAuth(ctx);
await assertCanvasOwner(ctx, canvasId, user.userId);
const nodes = await listNodesForCanvas(ctx, canvasId);
const nodeCount = nodes.length;
const storageIds = collectStorageIds(nodes);
const collectTimeMs = Date.now() - startedAt;
if (collectTimeMs >= PERFORMANCE_LOG_THRESHOLD_MS) {
console.warn("[storage.batchGetUrlsForCanvas] slow node scan", {
canvasId,
nodeCount,
storageIdCount: storageIds.length,
durationMs: collectTimeMs,
});
}
return await resolveStorageUrls(ctx, storageIds);
const result = await resolveStorageUrls(ctx, storageIds);
logSlowQuery("batchGetUrlsForCanvas::total", startedAt, {
canvasId,
nodeCount,
storageIdCount: storageIds.length,
resolvedCount: Object.keys(result).length,
});
return result;
},
});