Enhance canvas functionality by adding media preview capabilities and image upload handling. Introduce compressed image previews during uploads, improve media library integration, and implement retry logic for bridge edge creation. Update dashboard to display media previews and optimize image node handling.

This commit is contained in:
Matthias
2026-04-08 20:44:31 +02:00
parent a7eb2bc99c
commit b7f24223f2
43 changed files with 4064 additions and 148 deletions

View File

@@ -67,6 +67,23 @@ export type DetailAdjustData = {
preset: string | null;
};
export type NormalizedCropRect = {
x: number;
y: number;
width: number;
height: number;
};
export type CropResizeOptions = {
width: number | null;
height: number | null;
};
export type CropResizeStepParams = {
cropRect: NormalizedCropRect;
resize: CropResizeOptions | null;
};
export const DEFAULT_CURVES_DATA: CurvesData = {
channelMode: "rgb",
points: {
@@ -141,6 +158,16 @@ export const DEFAULT_DETAIL_ADJUST_DATA: DetailAdjustData = {
preset: null,
};
export const DEFAULT_CROP_RESIZE_STEP_PARAMS: CropResizeStepParams = {
cropRect: {
x: 0,
y: 0,
width: 1,
height: 1,
},
resize: null,
};
export function cloneAdjustmentData<T>(value: T): T {
return JSON.parse(JSON.stringify(value)) as T;
}
@@ -153,6 +180,54 @@ function safeNumber(value: unknown, fallback: number): number {
return typeof value === "number" && Number.isFinite(value) ? value : fallback;
}
function normalizePositiveIntOrNull(value: unknown): number | null {
if (typeof value !== "number" || !Number.isFinite(value) || value <= 0) {
return null;
}
return Math.max(1, Math.round(value));
}
function normalizeCropRect(value: unknown): NormalizedCropRect {
const input = (value ?? {}) as Record<string, unknown>;
const normalizedX = clamp(safeNumber(input.x, 0), 0, 1);
const normalizedY = clamp(safeNumber(input.y, 0), 0, 1);
const maxWidth = Math.max(0.0001, 1 - normalizedX);
const maxHeight = Math.max(0.0001, 1 - normalizedY);
return {
x: normalizedX,
y: normalizedY,
width: clamp(safeNumber(input.width, 1), 0.0001, maxWidth),
height: clamp(safeNumber(input.height, 1), 0.0001, maxHeight),
};
}
export function normalizeCropResizeStepParams(value: unknown): CropResizeStepParams {
const input = (value ?? {}) as Record<string, unknown>;
const cropRectCandidate =
(input.cropRect as Record<string, unknown> | undefined) ??
(input.crop as Record<string, unknown> | undefined) ??
(input.rect as Record<string, unknown> | undefined) ??
input;
const resizeCandidate = (input.resize ?? {}) as Record<string, unknown>;
const resizeWidth = normalizePositiveIntOrNull(resizeCandidate.width ?? resizeCandidate.targetWidth);
const resizeHeight = normalizePositiveIntOrNull(resizeCandidate.height ?? resizeCandidate.targetHeight);
return {
cropRect: normalizeCropRect(cropRectCandidate),
resize:
resizeWidth === null && resizeHeight === null
? null
: {
width: resizeWidth,
height: resizeHeight,
},
};
}
function normalizeCurvePoints(points: unknown): CurvePoint[] {
if (!Array.isArray(points)) {
return cloneAdjustmentData(DEFAULT_CURVES_DATA.points.rgb);

View File

@@ -6,6 +6,10 @@ import {
type RenderFullOptions,
type RenderFullResult,
} from "@/lib/image-pipeline/render-types";
import {
applyGeometryStepsToSource,
partitionPipelineSteps,
} from "@/lib/image-pipeline/geometry-transform";
import { loadSourceBitmap } from "@/lib/image-pipeline/source-loader";
type SupportedCanvas = HTMLCanvasElement | OffscreenCanvas;
@@ -96,21 +100,30 @@ export async function renderFull(options: RenderFullOptions): Promise<RenderFull
const { signal } = options;
const bitmap = await loadSourceBitmap(options.sourceUrl, { signal });
const resolvedSize = resolveRenderSize({
const { geometrySteps, tonalSteps } = partitionPipelineSteps(options.steps);
const geometryResult = applyGeometryStepsToSource({
source: bitmap,
sourceWidth: bitmap.width,
sourceHeight: bitmap.height,
steps: geometrySteps,
signal,
});
const resolvedSize = resolveRenderSize({
sourceWidth: geometryResult.width,
sourceHeight: geometryResult.height,
render: options.render,
limits: options.limits,
});
const { canvas, context } = createCanvasContext(resolvedSize.width, resolvedSize.height);
context.drawImage(bitmap, 0, 0, resolvedSize.width, resolvedSize.height);
context.drawImage(geometryResult.canvas, 0, 0, resolvedSize.width, resolvedSize.height);
const imageData = context.getImageData(0, 0, resolvedSize.width, resolvedSize.height);
runFullPipelineWithBackendRouter({
pixels: imageData.data,
steps: options.steps,
steps: tonalSteps,
width: resolvedSize.width,
height: resolvedSize.height,
executionOptions: {

View File

@@ -0,0 +1,166 @@
export type CropResizeMode = "source" | "custom";
export type CropFitMode = "cover" | "contain" | "fill";
export type CropRect = {
x: number;
y: number;
width: number;
height: number;
};
export type CropResizeSettings = {
mode: CropResizeMode;
width?: number;
height?: number;
fit: CropFitMode;
keepAspect: boolean;
};
export type CropNodeData = {
crop: CropRect;
resize: CropResizeSettings;
};
const CROP_MIN_SIZE = 0.01;
const CUSTOM_SIZE_MIN = 1;
const CUSTOM_SIZE_MAX = 16_384;
const DEFAULT_CUSTOM_SIZE = 1024;
const DISALLOWED_CROP_PAYLOAD_KEYS = [
"blob",
"blobUrl",
"imageData",
"storageId",
"url",
] as const;
export const DEFAULT_CROP_NODE_DATA: CropNodeData = {
crop: {
x: 0,
y: 0,
width: 1,
height: 1,
},
resize: {
mode: "source",
fit: "cover",
keepAspect: true,
},
};
function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === "object" && value !== null && !Array.isArray(value);
}
function readFiniteNumber(value: unknown): number | null {
if (typeof value !== "number" || !Number.isFinite(value)) {
return null;
}
return value;
}
function clamp(value: number, min: number, max: number): number {
return Math.max(min, Math.min(max, value));
}
function clampUnit(value: number | null, fallback: number): number {
if (value === null) {
return fallback;
}
return clamp(value, 0, 1);
}
function normalizeCropRect(value: unknown): CropRect {
const source = isRecord(value) ? value : {};
const base = DEFAULT_CROP_NODE_DATA.crop;
const xInput = readFiniteNumber(source.x);
const yInput = readFiniteNumber(source.y);
const widthInput = readFiniteNumber(source.width);
const heightInput = readFiniteNumber(source.height);
const width = widthInput !== null && widthInput > 0
? clamp(widthInput, CROP_MIN_SIZE, 1)
: base.width;
const height = heightInput !== null && heightInput > 0
? clamp(heightInput, CROP_MIN_SIZE, 1)
: base.height;
const x = clamp(clampUnit(xInput, base.x), 0, Math.max(0, 1 - width));
const y = clamp(clampUnit(yInput, base.y), 0, Math.max(0, 1 - height));
return {
x,
y,
width,
height,
};
}
function normalizeCustomSize(value: unknown): number | undefined {
if (!Number.isInteger(value)) {
return undefined;
}
const parsed = value as number;
if (parsed < CUSTOM_SIZE_MIN || parsed > CUSTOM_SIZE_MAX) {
return undefined;
}
return parsed;
}
function normalizeResizeSettings(value: unknown): CropResizeSettings {
const source = isRecord(value) ? value : {};
const defaults = DEFAULT_CROP_NODE_DATA.resize;
const mode: CropResizeMode = source.mode === "custom" ? "custom" : defaults.mode;
const fit: CropFitMode =
source.fit === "contain" || source.fit === "fill" || source.fit === "cover"
? source.fit
: defaults.fit;
const keepAspect = typeof source.keepAspect === "boolean" ? source.keepAspect : defaults.keepAspect;
if (mode !== "custom") {
return {
mode,
fit,
keepAspect,
};
}
return {
mode,
width: normalizeCustomSize(source.width) ?? DEFAULT_CUSTOM_SIZE,
height: normalizeCustomSize(source.height) ?? DEFAULT_CUSTOM_SIZE,
fit,
keepAspect,
};
}
function assertNoDisallowedPayloadFields(data: Record<string, unknown>): void {
for (const key of DISALLOWED_CROP_PAYLOAD_KEYS) {
if (key in data) {
throw new Error(`Crop node accepts parameter data only. '${key}' is not allowed in data.`);
}
}
}
export function normalizeCropNodeData(
value: unknown,
options?: {
rejectDisallowedPayloadFields?: boolean;
},
): CropNodeData {
const source = isRecord(value) ? value : {};
if (options?.rejectDisallowedPayloadFields) {
assertNoDisallowedPayloadFields(source);
}
return {
crop: normalizeCropRect(source.crop),
resize: normalizeResizeSettings(source.resize),
};
}

View File

@@ -0,0 +1,146 @@
import type { PipelineStep } from "@/lib/image-pipeline/contracts";
import { normalizeCropResizeStepParams } from "@/lib/image-pipeline/adjustment-types";
type SupportedCanvas = HTMLCanvasElement | OffscreenCanvas;
type SupportedContext = CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D;
export type GeometryTransformResult = {
canvas: SupportedCanvas;
context: SupportedContext;
width: number;
height: number;
};
type ApplyGeometryStepsOptions = {
source: CanvasImageSource;
sourceWidth?: number;
sourceHeight?: number;
steps: readonly PipelineStep[];
signal?: AbortSignal;
};
function throwIfAborted(signal: AbortSignal | undefined): void {
if (signal?.aborted) {
throw new DOMException("The operation was aborted.", "AbortError");
}
}
function createCanvasContext(width: number, height: number): {
canvas: SupportedCanvas;
context: SupportedContext;
} {
if (typeof document !== "undefined") {
const canvas = document.createElement("canvas");
canvas.width = width;
canvas.height = height;
const context = canvas.getContext("2d", { willReadFrequently: true });
if (!context) {
throw new Error("Geometry transform could not create a 2D context.");
}
return { canvas, context };
}
if (typeof OffscreenCanvas !== "undefined") {
const canvas = new OffscreenCanvas(width, height);
const context = canvas.getContext("2d", { willReadFrequently: true });
if (!context) {
throw new Error("Geometry transform could not create an offscreen 2D context.");
}
return { canvas, context };
}
throw new Error("Geometry transform is not available in this environment.");
}
function ensurePositiveDimension(name: string, value: number): number {
if (!Number.isFinite(value) || value <= 0) {
throw new Error(`Invalid ${name}. Expected a positive finite number.`);
}
return Math.max(1, Math.round(value));
}
export function partitionPipelineSteps(steps: readonly PipelineStep[]): {
geometrySteps: PipelineStep[];
tonalSteps: PipelineStep[];
} {
const geometrySteps: PipelineStep[] = [];
const tonalSteps: PipelineStep[] = [];
for (const step of steps) {
if (step.type === "crop") {
geometrySteps.push(step);
continue;
}
tonalSteps.push(step);
}
return { geometrySteps, tonalSteps };
}
export function applyGeometryStepsToSource(options: ApplyGeometryStepsOptions): GeometryTransformResult {
throwIfAborted(options.signal);
const sourceWidth =
options.sourceWidth ?? (options.source as { width?: number }).width ?? Number.NaN;
const sourceHeight =
options.sourceHeight ?? (options.source as { height?: number }).height ?? Number.NaN;
let currentWidth = ensurePositiveDimension("sourceWidth", sourceWidth);
let currentHeight = ensurePositiveDimension("sourceHeight", sourceHeight);
let current = createCanvasContext(currentWidth, currentHeight);
current.context.drawImage(options.source, 0, 0, currentWidth, currentHeight);
for (const step of options.steps) {
throwIfAborted(options.signal);
if (step.type !== "crop") {
continue;
}
const normalized = normalizeCropResizeStepParams(step.params);
const sourceX = Math.max(0, Math.floor(normalized.cropRect.x * currentWidth));
const sourceY = Math.max(0, Math.floor(normalized.cropRect.y * currentHeight));
const maxSourceWidth = Math.max(1, currentWidth - sourceX);
const maxSourceHeight = Math.max(1, currentHeight - sourceY);
const sourceWidth = Math.max(
1,
Math.min(maxSourceWidth, Math.round(normalized.cropRect.width * currentWidth)),
);
const sourceHeight = Math.max(
1,
Math.min(maxSourceHeight, Math.round(normalized.cropRect.height * currentHeight)),
);
const targetWidth = normalized.resize?.width ?? sourceWidth;
const targetHeight = normalized.resize?.height ?? sourceHeight;
const next = createCanvasContext(targetWidth, targetHeight);
next.context.drawImage(
current.canvas,
sourceX,
sourceY,
sourceWidth,
sourceHeight,
0,
0,
targetWidth,
targetHeight,
);
current = next;
currentWidth = targetWidth;
currentHeight = targetHeight;
}
return {
canvas: current.canvas,
context: current.context,
width: currentWidth,
height: currentHeight,
};
}

View File

@@ -4,6 +4,10 @@ import {
runPreviewStepWithBackendRouter,
} from "@/lib/image-pipeline/backend/backend-router";
import { computeHistogram, emptyHistogram, type HistogramData } from "@/lib/image-pipeline/histogram";
import {
applyGeometryStepsToSource,
partitionPipelineSteps,
} from "@/lib/image-pipeline/geometry-transform";
import { loadSourceBitmap } from "@/lib/image-pipeline/source-loader";
export type PreviewRenderResult = {
@@ -69,21 +73,30 @@ export async function renderPreview(options: {
const bitmap = await loadSourceBitmap(options.sourceUrl, {
signal: options.signal,
});
const { geometrySteps, tonalSteps } = partitionPipelineSteps(options.steps);
const geometryResult = applyGeometryStepsToSource({
source: bitmap,
sourceWidth: bitmap.width,
sourceHeight: bitmap.height,
steps: geometrySteps,
signal: options.signal,
});
const width = Math.max(1, Math.round(options.previewWidth));
const height = Math.max(1, Math.round((bitmap.height / bitmap.width) * width));
const height = Math.max(1, Math.round((geometryResult.height / geometryResult.width) * width));
throwIfAborted(options.signal);
const context = createPreviewContext(width, height);
context.drawImage(bitmap, 0, 0, width, height);
context.drawImage(geometryResult.canvas, 0, 0, width, height);
const imageData = context.getImageData(0, 0, width, height);
const backendHint = getPreviewBackendHintForSteps(options.steps);
const backendHint = getPreviewBackendHintForSteps(tonalSteps);
for (let index = 0; index < options.steps.length; index += 1) {
for (let index = 0; index < tonalSteps.length; index += 1) {
runPreviewStepWithBackendRouter({
pixels: imageData.data,
step: options.steps[index]!,
step: tonalSteps[index]!,
width,
height,
backendHint,

View File

@@ -54,6 +54,62 @@ function evictIfNeeded(excludeSourceUrl?: string): void {
}
}
function isLikelyVideoUrl(sourceUrl: string): boolean {
try {
const url = new URL(sourceUrl, typeof window !== "undefined" ? window.location.origin : "http://localhost");
const pathname = url.pathname.toLowerCase();
if (pathname.includes("/api/pexels-video")) {
return true;
}
return /\.(mp4|webm|ogg|ogv|mov|m4v)$/.test(pathname);
} catch {
return /\.(mp4|webm|ogg|ogv|mov|m4v)(?:\?|$)/i.test(sourceUrl);
}
}
async function decodeVideoFrameBitmap(blob: Blob): Promise<ImageBitmap> {
if (typeof document === "undefined") {
return await createImageBitmap(blob);
}
const video = document.createElement("video");
video.preload = "auto";
video.muted = true;
video.playsInline = true;
const objectUrl = URL.createObjectURL(blob);
video.src = objectUrl;
try {
await new Promise<void>((resolve, reject) => {
video.onloadeddata = () => resolve();
video.onerror = () => reject(new Error("Render source video decode failed."));
video.load();
});
return await createImageBitmap(video);
} finally {
video.pause();
video.removeAttribute("src");
video.load();
URL.revokeObjectURL(objectUrl);
}
}
async function decodeBitmapFromResponse(sourceUrl: string, response: Response): Promise<ImageBitmap> {
const contentType = response.headers?.get("content-type")?.toLowerCase() ?? "";
const blob = await response.blob();
const isVideo = contentType.startsWith("video/") || blob.type.startsWith("video/") || isLikelyVideoUrl(sourceUrl);
if (isVideo) {
return await decodeVideoFrameBitmap(blob);
}
return await createImageBitmap(blob);
}
export function clearSourceBitmapCache(): void {
for (const sourceUrl of [...imageBitmapCache.keys()]) {
deleteCacheEntry(sourceUrl);
@@ -77,8 +133,7 @@ function getOrCreateSourceBitmapPromise(sourceUrl: string): Promise<ImageBitmap>
throw new Error(`Render source failed: ${response.status}`);
}
const blob = await response.blob();
const bitmap = await createImageBitmap(blob);
const bitmap = await decodeBitmapFromResponse(sourceUrl, response);
if (entry.released || imageBitmapCache.get(sourceUrl) !== entry) {
closeBitmap(bitmap);