Enhance canvas functionality with new node types and validation

- Added support for new canvas node types: curves, color-adjust, light-adjust, detail-adjust, and render.
- Implemented validation for adjustment nodes to restrict incoming edges to one.
- Updated canvas connection validation to improve user feedback on invalid connections.
- Enhanced node creation and rendering logic to accommodate new node types and their properties.
- Refactored related components and utilities for better maintainability and performance.
This commit is contained in:
Matthias
2026-04-02 11:39:05 +02:00
parent 9bab9bb93d
commit f3c5c2d8f1
52 changed files with 5755 additions and 44 deletions

View File

@@ -0,0 +1,264 @@
export type AdjustmentNodeKind = "curves" | "color-adjust" | "light-adjust" | "detail-adjust";
export type CurvePoint = {
x: number;
y: number;
};
export type CurvesData = {
channelMode: "rgb" | "red" | "green" | "blue";
points: {
rgb: CurvePoint[];
red: CurvePoint[];
green: CurvePoint[];
blue: CurvePoint[];
};
levels: {
blackPoint: number;
whitePoint: number;
gamma: number;
};
preset: string | null;
};
export type ColorAdjustData = {
hsl: {
hue: number;
saturation: number;
luminance: number;
};
temperature: number;
tint: number;
vibrance: number;
preset: string | null;
};
export type LightAdjustData = {
brightness: number;
contrast: number;
exposure: number;
highlights: number;
shadows: number;
whites: number;
blacks: number;
vignette: {
amount: number;
size: number;
roundness: number;
};
preset: string | null;
};
export type DetailAdjustData = {
sharpen: {
amount: number;
radius: number;
threshold: number;
};
clarity: number;
denoise: {
luminance: number;
color: number;
};
grain: {
amount: number;
size: number;
};
preset: string | null;
};
export const DEFAULT_CURVES_DATA: CurvesData = {
channelMode: "rgb",
points: {
rgb: [
{ x: 0, y: 0 },
{ x: 255, y: 255 },
],
red: [
{ x: 0, y: 0 },
{ x: 255, y: 255 },
],
green: [
{ x: 0, y: 0 },
{ x: 255, y: 255 },
],
blue: [
{ x: 0, y: 0 },
{ x: 255, y: 255 },
],
},
levels: {
blackPoint: 0,
whitePoint: 255,
gamma: 1,
},
preset: null,
};
export const DEFAULT_COLOR_ADJUST_DATA: ColorAdjustData = {
hsl: {
hue: 0,
saturation: 0,
luminance: 0,
},
temperature: 0,
tint: 0,
vibrance: 0,
preset: null,
};
export const DEFAULT_LIGHT_ADJUST_DATA: LightAdjustData = {
brightness: 0,
contrast: 0,
exposure: 0,
highlights: 0,
shadows: 0,
whites: 0,
blacks: 0,
vignette: {
amount: 0,
size: 0.5,
roundness: 1,
},
preset: null,
};
export const DEFAULT_DETAIL_ADJUST_DATA: DetailAdjustData = {
sharpen: {
amount: 0,
radius: 1,
threshold: 0,
},
clarity: 0,
denoise: {
luminance: 0,
color: 0,
},
grain: {
amount: 0,
size: 1,
},
preset: null,
};
export function cloneAdjustmentData<T>(value: T): T {
return JSON.parse(JSON.stringify(value)) as T;
}
function clamp(value: number, min: number, max: number): number {
return Math.max(min, Math.min(max, value));
}
function safeNumber(value: unknown, fallback: number): number {
return typeof value === "number" && Number.isFinite(value) ? value : fallback;
}
function normalizeCurvePoints(points: unknown): CurvePoint[] {
if (!Array.isArray(points)) {
return cloneAdjustmentData(DEFAULT_CURVES_DATA.points.rgb);
}
const normalized = points
.map((point) => {
if (!point || typeof point !== "object") return null;
const record = point as Record<string, unknown>;
return {
x: clamp(safeNumber(record.x, 0), 0, 255),
y: clamp(safeNumber(record.y, 0), 0, 255),
};
})
.filter((point): point is CurvePoint => point !== null)
.sort((a, b) => a.x - b.x);
if (normalized.length >= 2) return normalized;
return cloneAdjustmentData(DEFAULT_CURVES_DATA.points.rgb);
}
export function normalizeCurvesData(value: unknown): CurvesData {
const input = (value ?? {}) as Record<string, unknown>;
const levels = (input.levels ?? {}) as Record<string, unknown>;
const points = (input.points ?? {}) as Record<string, unknown>;
const channelMode =
input.channelMode === "red" ||
input.channelMode === "green" ||
input.channelMode === "blue" ||
input.channelMode === "rgb"
? input.channelMode
: DEFAULT_CURVES_DATA.channelMode;
return {
channelMode,
points: {
rgb: normalizeCurvePoints(points.rgb),
red: normalizeCurvePoints(points.red),
green: normalizeCurvePoints(points.green),
blue: normalizeCurvePoints(points.blue),
},
levels: {
blackPoint: clamp(safeNumber(levels.blackPoint, 0), 0, 255),
whitePoint: clamp(safeNumber(levels.whitePoint, 255), 0, 255),
gamma: clamp(safeNumber(levels.gamma, 1), 0.1, 10),
},
preset: typeof input.preset === "string" ? input.preset : null,
};
}
export function normalizeColorAdjustData(value: unknown): ColorAdjustData {
const input = (value ?? {}) as Record<string, unknown>;
const hsl = (input.hsl ?? {}) as Record<string, unknown>;
return {
hsl: {
hue: clamp(safeNumber(hsl.hue, 0), -180, 180),
saturation: clamp(safeNumber(hsl.saturation, 0), -100, 100),
luminance: clamp(safeNumber(hsl.luminance, 0), -100, 100),
},
temperature: clamp(safeNumber(input.temperature, 0), -100, 100),
tint: clamp(safeNumber(input.tint, 0), -100, 100),
vibrance: clamp(safeNumber(input.vibrance, 0), -100, 100),
preset: typeof input.preset === "string" ? input.preset : null,
};
}
export function normalizeLightAdjustData(value: unknown): LightAdjustData {
const input = (value ?? {}) as Record<string, unknown>;
const vignette = (input.vignette ?? {}) as Record<string, unknown>;
return {
brightness: clamp(safeNumber(input.brightness, 0), -100, 100),
contrast: clamp(safeNumber(input.contrast, 0), -100, 100),
exposure: clamp(safeNumber(input.exposure, 0), -5, 5),
highlights: clamp(safeNumber(input.highlights, 0), -100, 100),
shadows: clamp(safeNumber(input.shadows, 0), -100, 100),
whites: clamp(safeNumber(input.whites, 0), -100, 100),
blacks: clamp(safeNumber(input.blacks, 0), -100, 100),
vignette: {
amount: clamp(safeNumber(vignette.amount, 0), 0, 1),
size: clamp(safeNumber(vignette.size, 0.5), 0, 1),
roundness: clamp(safeNumber(vignette.roundness, 1), 0, 1),
},
preset: typeof input.preset === "string" ? input.preset : null,
};
}
export function normalizeDetailAdjustData(value: unknown): DetailAdjustData {
const input = (value ?? {}) as Record<string, unknown>;
const sharpen = (input.sharpen ?? {}) as Record<string, unknown>;
const denoise = (input.denoise ?? {}) as Record<string, unknown>;
const grain = (input.grain ?? {}) as Record<string, unknown>;
return {
sharpen: {
amount: clamp(safeNumber(sharpen.amount, 0), 0, 500),
radius: clamp(safeNumber(sharpen.radius, 1), 0.5, 5),
threshold: clamp(safeNumber(sharpen.threshold, 0), 0, 255),
},
clarity: clamp(safeNumber(input.clarity, 0), -100, 100),
denoise: {
luminance: clamp(safeNumber(denoise.luminance, 0), 0, 100),
color: clamp(safeNumber(denoise.color, 0), 0, 100),
},
grain: {
amount: clamp(safeNumber(grain.amount, 0), 0, 100),
size: clamp(safeNumber(grain.size, 1), 0.5, 3),
},
preset: typeof input.preset === "string" ? input.preset : null,
};
}

View File

@@ -0,0 +1,137 @@
import { applyPipelineSteps } from "@/lib/image-pipeline/render-core";
import { resolveRenderSize } from "@/lib/image-pipeline/render-size";
import {
RENDER_FORMAT_TO_MIME,
type RenderFormat,
type RenderFullOptions,
type RenderFullResult,
} from "@/lib/image-pipeline/render-types";
import { loadSourceBitmap } from "@/lib/image-pipeline/source-loader";
type SupportedCanvas = HTMLCanvasElement | OffscreenCanvas;
type SupportedContext = CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D;
function normalizeJpegQuality(value: number | undefined): number {
if (value === undefined) {
return 0.92;
}
if (!Number.isFinite(value)) {
throw new Error("Invalid render options: jpegQuality must be a finite number.");
}
return Math.max(0, Math.min(1, value));
}
function createCanvasContext(width: number, height: number): {
canvas: SupportedCanvas;
context: SupportedContext;
} {
if (typeof document !== "undefined") {
const canvas = document.createElement("canvas");
canvas.width = width;
canvas.height = height;
const context = canvas.getContext("2d", { willReadFrequently: true });
if (!context) {
throw new Error("Render bridge could not create a 2D context.");
}
return {
canvas,
context,
};
}
if (typeof OffscreenCanvas !== "undefined") {
const canvas = new OffscreenCanvas(width, height);
const context = canvas.getContext("2d", { willReadFrequently: true });
if (!context) {
throw new Error("Render bridge could not create an offscreen 2D context.");
}
return {
canvas,
context,
};
}
throw new Error("Canvas rendering is not available in this environment.");
}
async function canvasToBlob(
canvas: SupportedCanvas,
mimeType: string,
quality: number | undefined,
): Promise<Blob> {
if (typeof OffscreenCanvas !== "undefined" && canvas instanceof OffscreenCanvas) {
return await canvas.convertToBlob({ type: mimeType, quality });
}
return await new Promise<Blob>((resolve, reject) => {
(canvas as HTMLCanvasElement).toBlob(
(blob) => {
if (!blob) {
reject(new Error("Render bridge could not encode output blob."));
return;
}
resolve(blob);
},
mimeType,
quality,
);
});
}
function resolveMimeType(format: RenderFormat): string {
const mimeType = RENDER_FORMAT_TO_MIME[format];
if (!mimeType) {
throw new Error(`Unsupported render format '${format}'.`);
}
return mimeType;
}
export async function renderFull(options: RenderFullOptions): Promise<RenderFullResult> {
const bitmap = await loadSourceBitmap(options.sourceUrl);
const resolvedSize = resolveRenderSize({
sourceWidth: bitmap.width,
sourceHeight: bitmap.height,
render: options.render,
limits: options.limits,
});
const { canvas, context } = createCanvasContext(resolvedSize.width, resolvedSize.height);
context.drawImage(bitmap, 0, 0, resolvedSize.width, resolvedSize.height);
const imageData = context.getImageData(0, 0, resolvedSize.width, resolvedSize.height);
applyPipelineSteps(
imageData.data,
options.steps,
resolvedSize.width,
resolvedSize.height,
);
context.putImageData(imageData, 0, 0);
const mimeType = resolveMimeType(options.render.format);
const quality = options.render.format === "jpeg" ? normalizeJpegQuality(options.render.jpegQuality) : null;
const blob = await canvasToBlob(canvas, mimeType, quality ?? undefined);
return {
blob,
width: resolvedSize.width,
height: resolvedSize.height,
mimeType,
format: options.render.format,
quality,
sizeBytes: blob.size,
sourceWidth: bitmap.width,
sourceHeight: bitmap.height,
wasSizeClamped: resolvedSize.wasClamped,
};
}
export const bridge = {
renderFull,
};

View File

@@ -0,0 +1,42 @@
export type HistogramData = {
rgb: number[];
red: number[];
green: number[];
blue: number[];
max: number;
};
export function emptyHistogram(): HistogramData {
return {
rgb: Array.from({ length: 256 }, () => 0),
red: Array.from({ length: 256 }, () => 0),
green: Array.from({ length: 256 }, () => 0),
blue: Array.from({ length: 256 }, () => 0),
max: 0,
};
}
export function computeHistogram(data: Uint8ClampedArray): HistogramData {
const histogram = emptyHistogram();
for (let index = 0; index < data.length; index += 4) {
const red = data[index] ?? 0;
const green = data[index + 1] ?? 0;
const blue = data[index + 2] ?? 0;
const luminance = Math.round(red * 0.2126 + green * 0.7152 + blue * 0.0722);
histogram.red[red] += 1;
histogram.green[green] += 1;
histogram.blue[blue] += 1;
histogram.rgb[luminance] += 1;
}
histogram.max = Math.max(
...histogram.rgb,
...histogram.red,
...histogram.green,
...histogram.blue,
);
return histogram;
}

View File

@@ -0,0 +1,144 @@
import {
cloneAdjustmentData,
DEFAULT_COLOR_ADJUST_DATA,
DEFAULT_CURVES_DATA,
DEFAULT_DETAIL_ADJUST_DATA,
DEFAULT_LIGHT_ADJUST_DATA,
type ColorAdjustData,
type CurvesData,
type DetailAdjustData,
type LightAdjustData,
} from "@/lib/image-pipeline/adjustment-types";
export const CURVE_PRESETS: Record<string, CurvesData> = {
contrast: {
...cloneAdjustmentData(DEFAULT_CURVES_DATA),
points: {
...cloneAdjustmentData(DEFAULT_CURVES_DATA.points),
rgb: [
{ x: 0, y: 0 },
{ x: 64, y: 48 },
{ x: 192, y: 220 },
{ x: 255, y: 255 },
],
},
preset: "contrast",
},
brighten: {
...cloneAdjustmentData(DEFAULT_CURVES_DATA),
levels: {
blackPoint: 0,
whitePoint: 245,
gamma: 0.9,
},
preset: "brighten",
},
film: {
...cloneAdjustmentData(DEFAULT_CURVES_DATA),
points: {
...cloneAdjustmentData(DEFAULT_CURVES_DATA.points),
rgb: [
{ x: 0, y: 8 },
{ x: 74, y: 68 },
{ x: 180, y: 196 },
{ x: 255, y: 248 },
],
},
preset: "film",
},
};
export const COLOR_PRESETS: Record<string, ColorAdjustData> = {
warm: {
...cloneAdjustmentData(DEFAULT_COLOR_ADJUST_DATA),
temperature: 24,
tint: 6,
vibrance: 22,
preset: "warm",
},
cool: {
...cloneAdjustmentData(DEFAULT_COLOR_ADJUST_DATA),
temperature: -22,
tint: -4,
vibrance: 14,
preset: "cool",
},
vintage: {
...cloneAdjustmentData(DEFAULT_COLOR_ADJUST_DATA),
hsl: { hue: -6, saturation: -18, luminance: 4 },
temperature: 14,
tint: 5,
vibrance: -12,
preset: "vintage",
},
};
export const LIGHT_PRESETS: Record<string, LightAdjustData> = {
hdr: {
...cloneAdjustmentData(DEFAULT_LIGHT_ADJUST_DATA),
contrast: 24,
exposure: 0.3,
highlights: -34,
shadows: 38,
whites: 18,
blacks: -16,
preset: "hdr",
},
lowkey: {
...cloneAdjustmentData(DEFAULT_LIGHT_ADJUST_DATA),
brightness: -18,
contrast: 28,
exposure: -0.4,
highlights: -20,
shadows: -8,
whites: -10,
blacks: -22,
preset: "lowkey",
},
highkey: {
...cloneAdjustmentData(DEFAULT_LIGHT_ADJUST_DATA),
brightness: 18,
contrast: -8,
exposure: 0.5,
highlights: 22,
shadows: 16,
whites: 26,
blacks: 8,
preset: "highkey",
},
};
export const DETAIL_PRESETS: Record<string, DetailAdjustData> = {
web: {
...cloneAdjustmentData(DEFAULT_DETAIL_ADJUST_DATA),
sharpen: {
amount: 72,
radius: 1,
threshold: 6,
},
clarity: 10,
preset: "web",
},
print: {
...cloneAdjustmentData(DEFAULT_DETAIL_ADJUST_DATA),
sharpen: {
amount: 120,
radius: 1.6,
threshold: 4,
},
denoise: {
luminance: 8,
color: 10,
},
preset: "print",
},
"film-grain": {
...cloneAdjustmentData(DEFAULT_DETAIL_ADJUST_DATA),
grain: {
amount: 22,
size: 1.4,
},
clarity: -6,
preset: "film-grain",
},
};

View File

@@ -0,0 +1,48 @@
import type { PipelineStep } from "@/lib/image-pipeline/contracts";
import { computeHistogram, type HistogramData } from "@/lib/image-pipeline/histogram";
import { applyPipelineStep } from "@/lib/image-pipeline/render-core";
import { loadSourceBitmap } from "@/lib/image-pipeline/source-loader";
export type PreviewRenderResult = {
width: number;
height: number;
imageData: ImageData;
histogram: HistogramData;
};
export async function renderPreview(options: {
sourceUrl: string;
steps: readonly PipelineStep[];
previewWidth: number;
}): Promise<PreviewRenderResult> {
const bitmap = await loadSourceBitmap(options.sourceUrl);
const width = Math.max(1, Math.round(options.previewWidth));
const height = Math.max(1, Math.round((bitmap.height / bitmap.width) * width));
const canvas = document.createElement("canvas");
canvas.width = width;
canvas.height = height;
const context = canvas.getContext("2d", { willReadFrequently: true });
if (!context) {
throw new Error("Preview renderer could not create 2D context.");
}
context.drawImage(bitmap, 0, 0, width, height);
const imageData = context.getImageData(0, 0, width, height);
for (let index = 0; index < options.steps.length; index += 1) {
applyPipelineStep(imageData.data, options.steps[index]!, width, height);
await new Promise<void>((resolve) => {
requestAnimationFrame(() => resolve());
});
}
const histogram = computeHistogram(imageData.data);
return {
width,
height,
imageData,
histogram,
};
}

View File

@@ -0,0 +1,323 @@
import type { PipelineStep } from "@/lib/image-pipeline/contracts";
import {
normalizeColorAdjustData,
normalizeCurvesData,
normalizeDetailAdjustData,
normalizeLightAdjustData,
type CurvePoint,
} from "@/lib/image-pipeline/adjustment-types";
function clamp(value: number, min: number, max: number): number {
return Math.max(min, Math.min(max, value));
}
function toByte(value: number): number {
return clamp(Math.round(value), 0, 255);
}
function buildLut(points: CurvePoint[]): Uint8Array {
const lut = new Uint8Array(256);
const normalized = [...points].sort((a, b) => a.x - b.x);
for (let input = 0; input < 256; input += 1) {
const first = normalized[0] ?? { x: 0, y: 0 };
const last = normalized[normalized.length - 1] ?? { x: 255, y: 255 };
if (input <= first.x) {
lut[input] = toByte(first.y);
continue;
}
if (input >= last.x) {
lut[input] = toByte(last.y);
continue;
}
for (let index = 1; index < normalized.length; index += 1) {
const left = normalized[index - 1]!;
const right = normalized[index]!;
if (input < left.x || input > right.x) continue;
const span = Math.max(1, right.x - left.x);
const progress = (input - left.x) / span;
lut[input] = toByte(left.y + (right.y - left.y) * progress);
break;
}
}
return lut;
}
function rgbToHsl(r: number, g: number, b: number): { h: number; s: number; l: number } {
const rn = r / 255;
const gn = g / 255;
const bn = b / 255;
const max = Math.max(rn, gn, bn);
const min = Math.min(rn, gn, bn);
const delta = max - min;
const l = (max + min) / 2;
if (delta === 0) return { h: 0, s: 0, l };
const s = delta / (1 - Math.abs(2 * l - 1));
let h = 0;
if (max === rn) h = ((gn - bn) / delta) % 6;
else if (max === gn) h = (bn - rn) / delta + 2;
else h = (rn - gn) / delta + 4;
h *= 60;
if (h < 0) h += 360;
return { h, s, l };
}
function hslToRgb(h: number, s: number, l: number): { r: number; g: number; b: number } {
const c = (1 - Math.abs(2 * l - 1)) * s;
const x = c * (1 - Math.abs(((h / 60) % 2) - 1));
const m = l - c / 2;
let rp = 0;
let gp = 0;
let bp = 0;
if (h < 60) {
rp = c;
gp = x;
} else if (h < 120) {
rp = x;
gp = c;
} else if (h < 180) {
gp = c;
bp = x;
} else if (h < 240) {
gp = x;
bp = c;
} else if (h < 300) {
rp = x;
bp = c;
} else {
rp = c;
bp = x;
}
return {
r: toByte((rp + m) * 255),
g: toByte((gp + m) * 255),
b: toByte((bp + m) * 255),
};
}
function applyCurves(pixels: Uint8ClampedArray, params: unknown): void {
const curves = normalizeCurvesData(params);
const rgbLut = buildLut(curves.points.rgb);
const redLut = buildLut(curves.points.red);
const greenLut = buildLut(curves.points.green);
const blueLut = buildLut(curves.points.blue);
const whitePoint = Math.max(curves.levels.whitePoint, curves.levels.blackPoint + 1);
const levelRange = whitePoint - curves.levels.blackPoint;
const invGamma = 1 / curves.levels.gamma;
for (let index = 0; index < pixels.length; index += 4) {
const applyLevels = (value: number) => {
const normalized = clamp((value - curves.levels.blackPoint) / levelRange, 0, 1);
return toByte(Math.pow(normalized, invGamma) * 255);
};
let red = applyLevels(pixels[index] ?? 0);
let green = applyLevels(pixels[index + 1] ?? 0);
let blue = applyLevels(pixels[index + 2] ?? 0);
red = rgbLut[red];
green = rgbLut[green];
blue = rgbLut[blue];
if (curves.channelMode === "red") {
red = redLut[red];
} else if (curves.channelMode === "green") {
green = greenLut[green];
} else if (curves.channelMode === "blue") {
blue = blueLut[blue];
} else {
red = redLut[red];
green = greenLut[green];
blue = blueLut[blue];
}
pixels[index] = red;
pixels[index + 1] = green;
pixels[index + 2] = blue;
}
}
function applyColorAdjust(pixels: Uint8ClampedArray, params: unknown): void {
const color = normalizeColorAdjustData(params);
const saturationFactor = 1 + color.hsl.saturation / 100;
const luminanceShift = color.hsl.luminance / 100;
const hueShift = color.hsl.hue;
for (let index = 0; index < pixels.length; index += 4) {
const currentRed = pixels[index] ?? 0;
const currentGreen = pixels[index + 1] ?? 0;
const currentBlue = pixels[index + 2] ?? 0;
const hsl = rgbToHsl(currentRed, currentGreen, currentBlue);
const shiftedHue = (hsl.h + hueShift + 360) % 360;
const shiftedSaturation = clamp(hsl.s * saturationFactor, 0, 1);
const shiftedLuminance = clamp(hsl.l + luminanceShift, 0, 1);
const tempShift = color.temperature * 0.6;
const tintShift = color.tint * 0.4;
const vibranceBoost = color.vibrance / 100;
const saturationDelta = (1 - hsl.s) * vibranceBoost;
const vivid = hslToRgb(
shiftedHue,
clamp(shiftedSaturation + saturationDelta, 0, 1),
shiftedLuminance,
);
pixels[index] = toByte(vivid.r + tempShift);
pixels[index + 1] = toByte(vivid.g + tintShift);
pixels[index + 2] = toByte(vivid.b - tempShift - tintShift * 0.3);
}
}
function applyLightAdjust(
pixels: Uint8ClampedArray,
params: unknown,
width: number,
height: number,
): void {
const light = normalizeLightAdjustData(params);
const exposureFactor = Math.pow(2, light.exposure / 2);
const contrastFactor = 1 + light.contrast / 100;
const brightnessShift = light.brightness * 1.8;
const centerX = width / 2;
const centerY = height / 2;
for (let y = 0; y < height; y += 1) {
for (let x = 0; x < width; x += 1) {
const index = (y * width + x) * 4;
let red = pixels[index] ?? 0;
let green = pixels[index + 1] ?? 0;
let blue = pixels[index + 2] ?? 0;
red = red * exposureFactor;
green = green * exposureFactor;
blue = blue * exposureFactor;
red = (red - 128) * contrastFactor + 128 + brightnessShift;
green = (green - 128) * contrastFactor + 128 + brightnessShift;
blue = (blue - 128) * contrastFactor + 128 + brightnessShift;
const luma = red * 0.2126 + green * 0.7152 + blue * 0.0722;
const highlightsBoost = (luma / 255) * (light.highlights / 100) * 40;
const shadowsBoost = ((255 - luma) / 255) * (light.shadows / 100) * 40;
const whitesBoost = (luma / 255) * (light.whites / 100) * 35;
const blacksBoost = ((255 - luma) / 255) * (light.blacks / 100) * 35;
const totalBoost = highlightsBoost + shadowsBoost + whitesBoost + blacksBoost;
red = toByte(red + totalBoost);
green = toByte(green + totalBoost);
blue = toByte(blue + totalBoost);
if (light.vignette.amount > 0) {
const dx = (x - centerX) / Math.max(1, centerX);
const dy = (y - centerY) / Math.max(1, centerY);
const radialDistance = Math.sqrt(dx * dx + dy * dy);
const softEdge = Math.pow(1 - clamp(radialDistance, 0, 1), 1 + light.vignette.roundness);
const strength = 1 - light.vignette.amount * (1 - softEdge) * (1.5 - light.vignette.size);
red = toByte(red * strength);
green = toByte(green * strength);
blue = toByte(blue * strength);
}
pixels[index] = red;
pixels[index + 1] = green;
pixels[index + 2] = blue;
}
}
}
function pseudoNoise(seed: number): number {
const x = Math.sin(seed * 12.9898) * 43758.5453;
return x - Math.floor(x);
}
function applyDetailAdjust(pixels: Uint8ClampedArray, params: unknown): void {
const detail = normalizeDetailAdjustData(params);
const sharpenBoost = detail.sharpen.amount / 500;
const clarityBoost = detail.clarity / 100;
const denoiseLuma = detail.denoise.luminance / 100;
const denoiseColor = detail.denoise.color / 100;
const grainAmount = detail.grain.amount / 100;
const grainScale = Math.max(0.5, detail.grain.size);
for (let index = 0; index < pixels.length; index += 4) {
let red = pixels[index] ?? 0;
let green = pixels[index + 1] ?? 0;
let blue = pixels[index + 2] ?? 0;
const luma = red * 0.2126 + green * 0.7152 + blue * 0.0722;
red = red + (red - luma) * sharpenBoost * 0.6;
green = green + (green - luma) * sharpenBoost * 0.6;
blue = blue + (blue - luma) * sharpenBoost * 0.6;
const midtoneFactor = 1 - Math.abs(luma / 255 - 0.5) * 2;
const clarityScale = 1 + clarityBoost * midtoneFactor * 0.7;
red = (red - 128) * clarityScale + 128;
green = (green - 128) * clarityScale + 128;
blue = (blue - 128) * clarityScale + 128;
if (denoiseLuma > 0 || denoiseColor > 0) {
red = red * (1 - denoiseLuma * 0.2) + luma * denoiseLuma * 0.2;
green = green * (1 - denoiseLuma * 0.2) + luma * denoiseLuma * 0.2;
blue = blue * (1 - denoiseLuma * 0.2) + luma * denoiseLuma * 0.2;
const average = (red + green + blue) / 3;
red = red * (1 - denoiseColor * 0.2) + average * denoiseColor * 0.2;
green = green * (1 - denoiseColor * 0.2) + average * denoiseColor * 0.2;
blue = blue * (1 - denoiseColor * 0.2) + average * denoiseColor * 0.2;
}
if (grainAmount > 0) {
const grain = (pseudoNoise((index + 1) / grainScale) - 0.5) * grainAmount * 40;
red += grain;
green += grain;
blue += grain;
}
pixels[index] = toByte(red);
pixels[index + 1] = toByte(green);
pixels[index + 2] = toByte(blue);
}
}
export function applyPipelineStep(
pixels: Uint8ClampedArray,
step: PipelineStep<string, unknown>,
width: number,
height: number,
): void {
if (step.type === "curves") {
applyCurves(pixels, step.params);
return;
}
if (step.type === "color-adjust") {
applyColorAdjust(pixels, step.params);
return;
}
if (step.type === "light-adjust") {
applyLightAdjust(pixels, step.params, width, height);
return;
}
if (step.type === "detail-adjust") {
applyDetailAdjust(pixels, step.params);
}
}
export function applyPipelineSteps(
pixels: Uint8ClampedArray,
steps: readonly PipelineStep[],
width: number,
height: number,
): void {
for (let index = 0; index < steps.length; index += 1) {
applyPipelineStep(pixels, steps[index]!, width, height);
}
}

View File

@@ -0,0 +1,108 @@
import type {
RenderOptions,
RenderSizeLimits,
ResolvedRenderSize,
} from "@/lib/image-pipeline/render-types";
const DEFAULT_MAX_DIMENSION = 8192;
const DEFAULT_MAX_PIXELS = 33_554_432;
function sanitizeLimit(name: string, value: number | undefined, fallback: number): number {
if (value === undefined) {
return fallback;
}
if (!Number.isFinite(value) || value <= 0) {
throw new Error(`Invalid render limit '${name}'. Expected a positive finite number.`);
}
return Math.max(1, Math.floor(value));
}
function sanitizeDimension(name: string, value: number): number {
if (!Number.isFinite(value) || value <= 0) {
throw new Error(`Invalid ${name}. Expected a positive finite number.`);
}
return Math.max(1, Math.round(value));
}
function scaleDimensions(
width: number,
height: number,
factor: number,
): {
width: number;
height: number;
} {
return {
width: Math.max(1, Math.floor(width * factor)),
height: Math.max(1, Math.floor(height * factor)),
};
}
export function resolveRenderSize(options: {
sourceWidth: number;
sourceHeight: number;
render: RenderOptions;
limits?: RenderSizeLimits;
}): ResolvedRenderSize {
const sourceWidth = sanitizeDimension("sourceWidth", options.sourceWidth);
const sourceHeight = sanitizeDimension("sourceHeight", options.sourceHeight);
const maxDimension = sanitizeLimit(
"maxDimension",
options.limits?.maxDimension,
DEFAULT_MAX_DIMENSION,
);
const maxPixels = sanitizeLimit("maxPixels", options.limits?.maxPixels, DEFAULT_MAX_PIXELS);
let targetWidth = sourceWidth;
let targetHeight = sourceHeight;
if (options.render.resolution === "2x") {
targetWidth = sourceWidth * 2;
targetHeight = sourceHeight * 2;
} else if (options.render.resolution === "custom") {
if (!options.render.customSize) {
throw new Error("Invalid render options: resolution 'custom' requires customSize.");
}
targetWidth = sanitizeDimension("customSize.width", options.render.customSize.width);
targetHeight = sanitizeDimension("customSize.height", options.render.customSize.height);
} else if (options.render.resolution !== "original") {
throw new Error(`Unsupported render resolution '${options.render.resolution}'.`);
}
targetWidth = sanitizeDimension("targetWidth", targetWidth);
targetHeight = sanitizeDimension("targetHeight", targetHeight);
let scaleFactor = 1;
let wasClamped = false;
const dimensionScale = Math.min(1, maxDimension / Math.max(targetWidth, targetHeight));
if (dimensionScale < 1) {
const scaled = scaleDimensions(targetWidth, targetHeight, dimensionScale);
targetWidth = scaled.width;
targetHeight = scaled.height;
scaleFactor *= dimensionScale;
wasClamped = true;
}
const pixelCount = targetWidth * targetHeight;
if (pixelCount > maxPixels) {
const pixelScale = Math.sqrt(maxPixels / pixelCount);
const scaled = scaleDimensions(targetWidth, targetHeight, pixelScale);
targetWidth = scaled.width;
targetHeight = scaled.height;
scaleFactor *= pixelScale;
wasClamped = true;
}
return {
width: targetWidth,
height: targetHeight,
scaleFactor,
wasClamped,
};
}

View File

@@ -0,0 +1,52 @@
import type { PipelineStep } from "@/lib/image-pipeline/contracts";
export const RENDER_FORMAT_TO_MIME = {
png: "image/png",
jpeg: "image/jpeg",
webp: "image/webp",
} as const;
export type RenderResolution = "original" | "2x" | "custom";
export type RenderFormat = keyof typeof RENDER_FORMAT_TO_MIME;
export type RenderOptions = {
resolution: RenderResolution;
customSize?: {
width: number;
height: number;
};
format: RenderFormat;
jpegQuality?: number;
};
export type RenderSizeLimits = {
maxDimension?: number;
maxPixels?: number;
};
export type ResolvedRenderSize = {
width: number;
height: number;
scaleFactor: number;
wasClamped: boolean;
};
export type RenderFullOptions = {
sourceUrl: string;
steps: readonly PipelineStep[];
render: RenderOptions;
limits?: RenderSizeLimits;
};
export type RenderFullResult = {
blob: Blob;
width: number;
height: number;
mimeType: string;
format: RenderFormat;
quality: number | null;
sizeBytes: number;
sourceWidth: number;
sourceHeight: number;
wasSizeClamped: boolean;
};

View File

@@ -0,0 +1,35 @@
const imageBitmapCache = new Map<string, Promise<ImageBitmap>>();
export async function loadSourceBitmap(sourceUrl: string): Promise<ImageBitmap> {
if (!sourceUrl || sourceUrl.trim().length === 0) {
throw new Error("Render sourceUrl is required.");
}
if (typeof createImageBitmap !== "function") {
throw new Error("ImageBitmap is not available in this environment.");
}
const cached = imageBitmapCache.get(sourceUrl);
if (cached) {
return await cached;
}
const promise = (async () => {
const response = await fetch(sourceUrl);
if (!response.ok) {
throw new Error(`Render source failed: ${response.status}`);
}
const blob = await response.blob();
return await createImageBitmap(blob);
})();
imageBitmapCache.set(sourceUrl, promise);
try {
return await promise;
} catch (error) {
imageBitmapCache.delete(sourceUrl);
throw error;
}
}