Files
lemonspace_app/lib/image-pipeline/source-loader.ts

433 lines
11 KiB
TypeScript

export const SOURCE_BITMAP_CACHE_MAX_ENTRIES = 32;
type CacheEntry = {
promise: Promise<ImageBitmap>;
bitmap?: ImageBitmap;
released?: boolean;
};
const imageBitmapCache = new Map<string, CacheEntry>();
type LoadSourceBitmapOptions = {
signal?: AbortSignal;
};
type RenderSourceComposition = {
kind: "mixer";
baseUrl: string;
overlayUrl: string;
blendMode: "normal" | "multiply" | "screen" | "overlay";
opacity: number;
overlayX: number;
overlayY: number;
overlayWidth: number;
overlayHeight: number;
};
type LoadRenderSourceBitmapOptions = {
sourceUrl?: string;
sourceComposition?: RenderSourceComposition;
signal?: AbortSignal;
};
function throwIfAborted(signal: AbortSignal | undefined): void {
if (signal?.aborted) {
throw new DOMException("The operation was aborted.", "AbortError");
}
}
function closeBitmap(bitmap: ImageBitmap | undefined): void {
if (typeof bitmap?.close === "function") {
bitmap.close();
}
}
function deleteCacheEntry(sourceUrl: string): void {
const entry = imageBitmapCache.get(sourceUrl);
if (!entry) {
return;
}
entry.released = true;
imageBitmapCache.delete(sourceUrl);
closeBitmap(entry.bitmap);
}
function touchCacheEntry(sourceUrl: string, entry: CacheEntry): void {
imageBitmapCache.delete(sourceUrl);
imageBitmapCache.set(sourceUrl, entry);
}
function evictIfNeeded(excludeSourceUrl?: string): void {
while (imageBitmapCache.size > SOURCE_BITMAP_CACHE_MAX_ENTRIES) {
const oldestSourceUrl = [...imageBitmapCache.entries()].find(
([key, entry]) => key !== excludeSourceUrl && entry.bitmap,
)?.[0];
if (!oldestSourceUrl) {
return;
}
deleteCacheEntry(oldestSourceUrl);
}
}
function isLikelyVideoUrl(sourceUrl: string): boolean {
try {
const url = new URL(sourceUrl, typeof window !== "undefined" ? window.location.origin : "http://localhost");
const pathname = url.pathname.toLowerCase();
if (pathname.includes("/api/pexels-video")) {
return true;
}
return /\.(mp4|webm|ogg|ogv|mov|m4v)$/.test(pathname);
} catch {
return /\.(mp4|webm|ogg|ogv|mov|m4v)(?:\?|$)/i.test(sourceUrl);
}
}
async function decodeVideoFrameBitmap(blob: Blob): Promise<ImageBitmap> {
if (typeof document === "undefined") {
return await createImageBitmap(blob);
}
const video = document.createElement("video");
video.preload = "auto";
video.muted = true;
video.playsInline = true;
const objectUrl = URL.createObjectURL(blob);
video.src = objectUrl;
try {
await new Promise<void>((resolve, reject) => {
video.onloadeddata = () => resolve();
video.onerror = () => reject(new Error("Render source video decode failed."));
video.load();
});
return await createImageBitmap(video);
} finally {
video.pause();
video.removeAttribute("src");
video.load();
URL.revokeObjectURL(objectUrl);
}
}
async function decodeBitmapFromResponse(sourceUrl: string, response: Response): Promise<ImageBitmap> {
const contentType = response.headers?.get("content-type")?.toLowerCase() ?? "";
const blob = await response.blob();
const isVideo = contentType.startsWith("video/") || blob.type.startsWith("video/") || isLikelyVideoUrl(sourceUrl);
if (isVideo) {
return await decodeVideoFrameBitmap(blob);
}
return await createImageBitmap(blob);
}
export function clearSourceBitmapCache(): void {
for (const sourceUrl of [...imageBitmapCache.keys()]) {
deleteCacheEntry(sourceUrl);
}
}
function getOrCreateSourceBitmapPromise(sourceUrl: string): Promise<ImageBitmap> {
const cached = imageBitmapCache.get(sourceUrl);
if (cached) {
touchCacheEntry(sourceUrl, cached);
return cached.promise;
}
const entry: CacheEntry = {
promise: Promise.resolve(undefined as never),
};
const promise = (async () => {
const response = await fetch(sourceUrl);
if (!response.ok) {
throw new Error(`Render source failed: ${response.status}`);
}
const bitmap = await decodeBitmapFromResponse(sourceUrl, response);
if (entry.released || imageBitmapCache.get(sourceUrl) !== entry) {
closeBitmap(bitmap);
return bitmap;
}
entry.bitmap = bitmap;
evictIfNeeded(sourceUrl);
return bitmap;
})();
entry.promise = promise;
imageBitmapCache.set(sourceUrl, entry);
void promise.catch(() => {
if (imageBitmapCache.get(sourceUrl) === entry) {
imageBitmapCache.delete(sourceUrl);
}
});
return promise;
}
async function awaitWithLocalAbort<T>(
promise: Promise<T>,
signal: AbortSignal | undefined,
): Promise<T> {
throwIfAborted(signal);
if (!signal) {
return await promise;
}
return await new Promise<T>((resolve, reject) => {
const abortError = () => new DOMException("The operation was aborted.", "AbortError");
const cleanup = () => {
signal.removeEventListener("abort", onAbort);
};
const onAbort = () => {
cleanup();
reject(abortError());
};
signal.addEventListener("abort", onAbort, { once: true });
promise.then(
(value) => {
cleanup();
if (signal.aborted) {
reject(abortError());
return;
}
resolve(value);
},
(error) => {
cleanup();
reject(error);
},
);
});
}
export async function loadSourceBitmap(
sourceUrl: string,
options: LoadSourceBitmapOptions = {},
): Promise<ImageBitmap> {
if (!sourceUrl || sourceUrl.trim().length === 0) {
throw new Error("Render sourceUrl is required.");
}
if (typeof createImageBitmap !== "function") {
throw new Error("ImageBitmap is not available in this environment.");
}
throwIfAborted(options.signal);
const promise = getOrCreateSourceBitmapPromise(sourceUrl);
return await awaitWithLocalAbort(promise, options.signal);
}
function createWorkingCanvas(width: number, height: number):
| HTMLCanvasElement
| OffscreenCanvas {
if (typeof document !== "undefined") {
const canvas = document.createElement("canvas");
canvas.width = width;
canvas.height = height;
return canvas;
}
if (typeof OffscreenCanvas !== "undefined") {
return new OffscreenCanvas(width, height);
}
throw new Error("Canvas rendering is not available in this environment.");
}
function mixerBlendModeToCompositeOperation(
blendMode: RenderSourceComposition["blendMode"],
): GlobalCompositeOperation {
if (blendMode === "normal") {
return "source-over";
}
return blendMode;
}
function normalizeCompositionOpacity(value: number): number {
if (!Number.isFinite(value)) {
return 1;
}
return Math.max(0, Math.min(100, value)) / 100;
}
function normalizeRatio(value: number, fallback: number): number {
if (!Number.isFinite(value)) {
return fallback;
}
return value;
}
function normalizeMixerRect(source: RenderSourceComposition): {
x: number;
y: number;
width: number;
height: number;
} {
const overlayX = Math.max(0, Math.min(0.9, normalizeRatio(source.overlayX, 0)));
const overlayY = Math.max(0, Math.min(0.9, normalizeRatio(source.overlayY, 0)));
const overlayWidth = Math.max(
0.1,
Math.min(1, normalizeRatio(source.overlayWidth, 1), 1 - overlayX),
);
const overlayHeight = Math.max(
0.1,
Math.min(1, normalizeRatio(source.overlayHeight, 1), 1 - overlayY),
);
return {
x: overlayX,
y: overlayY,
width: overlayWidth,
height: overlayHeight,
};
}
function computeObjectCoverSourceRect(args: {
sourceWidth: number;
sourceHeight: number;
destinationWidth: number;
destinationHeight: number;
}): {
sourceX: number;
sourceY: number;
sourceWidth: number;
sourceHeight: number;
} {
const { sourceWidth, sourceHeight, destinationWidth, destinationHeight } = args;
if (
sourceWidth <= 0 ||
sourceHeight <= 0 ||
destinationWidth <= 0 ||
destinationHeight <= 0
) {
return {
sourceX: 0,
sourceY: 0,
sourceWidth,
sourceHeight,
};
}
const sourceAspectRatio = sourceWidth / sourceHeight;
const destinationAspectRatio = destinationWidth / destinationHeight;
if (!Number.isFinite(sourceAspectRatio) || !Number.isFinite(destinationAspectRatio)) {
return {
sourceX: 0,
sourceY: 0,
sourceWidth,
sourceHeight,
};
}
if (sourceAspectRatio > destinationAspectRatio) {
const croppedWidth = sourceHeight * destinationAspectRatio;
return {
sourceX: (sourceWidth - croppedWidth) / 2,
sourceY: 0,
sourceWidth: croppedWidth,
sourceHeight,
};
}
const croppedHeight = sourceWidth / destinationAspectRatio;
return {
sourceX: 0,
sourceY: (sourceHeight - croppedHeight) / 2,
sourceWidth,
sourceHeight: croppedHeight,
};
}
async function loadMixerCompositionBitmap(
sourceComposition: RenderSourceComposition,
signal?: AbortSignal,
): Promise<ImageBitmap> {
const [baseBitmap, overlayBitmap] = await Promise.all([
loadSourceBitmap(sourceComposition.baseUrl, { signal }),
loadSourceBitmap(sourceComposition.overlayUrl, { signal }),
]);
throwIfAborted(signal);
const canvas = createWorkingCanvas(baseBitmap.width, baseBitmap.height);
const context = canvas.getContext("2d", { willReadFrequently: true });
if (!context) {
throw new Error("Render composition could not create a 2D context.");
}
context.clearRect(0, 0, baseBitmap.width, baseBitmap.height);
context.drawImage(baseBitmap, 0, 0, baseBitmap.width, baseBitmap.height);
const rect = normalizeMixerRect(sourceComposition);
const destinationX = rect.x * baseBitmap.width;
const destinationY = rect.y * baseBitmap.height;
const destinationWidth = rect.width * baseBitmap.width;
const destinationHeight = rect.height * baseBitmap.height;
const sourceRect = computeObjectCoverSourceRect({
sourceWidth: overlayBitmap.width,
sourceHeight: overlayBitmap.height,
destinationWidth,
destinationHeight,
});
context.globalCompositeOperation = mixerBlendModeToCompositeOperation(
sourceComposition.blendMode,
);
context.globalAlpha = normalizeCompositionOpacity(sourceComposition.opacity);
context.drawImage(
overlayBitmap,
sourceRect.sourceX,
sourceRect.sourceY,
sourceRect.sourceWidth,
sourceRect.sourceHeight,
destinationX,
destinationY,
destinationWidth,
destinationHeight,
);
context.globalCompositeOperation = "source-over";
context.globalAlpha = 1;
return await createImageBitmap(canvas);
}
export async function loadRenderSourceBitmap(
options: LoadRenderSourceBitmapOptions,
): Promise<ImageBitmap> {
if (options.sourceComposition) {
if (options.sourceComposition.kind !== "mixer") {
throw new Error(`Unsupported source composition '${options.sourceComposition.kind}'.`);
}
return await loadMixerCompositionBitmap(options.sourceComposition, options.signal);
}
if (!options.sourceUrl) {
throw new Error("Render source is required.");
}
return await loadSourceBitmap(options.sourceUrl, { signal: options.signal });
}