Implement internationalization support across components

- Integrated `next-intl` for toast messages and locale handling in various components, including `Providers`, `CanvasUserMenu`, and `CreditOverview`.
- Replaced hardcoded strings with translation keys to enhance localization capabilities.
- Updated `RootLayout` to dynamically set the language attribute based on the user's locale.
- Ensured consistent user feedback through localized toast messages in actions such as sign-out, canvas operations, and billing notifications.
This commit is contained in:
2026-04-01 18:16:52 +02:00
parent 6ce1d4a82e
commit 79d9092d43
44 changed files with 1385 additions and 507 deletions

View File

@@ -2,16 +2,16 @@
import { useCallback, useState } from "react";
import { useRouter } from "next/navigation";
import { useTranslations } from "next-intl";
import { Handle, Position, useReactFlow, type NodeProps, type Node } from "@xyflow/react";
import { useAction } from "convex/react";
import { api } from "@/convex/_generated/api";
import type { Id } from "@/convex/_generated/dataModel";
import BaseNodeWrapper from "./base-node-wrapper";
import { DEFAULT_MODEL_ID, getModel } from "@/lib/ai-models";
import { classifyError, type AiErrorCategory } from "@/lib/ai-errors";
import { classifyError, type ErrorType } from "@/lib/ai-errors";
import { DEFAULT_ASPECT_RATIO } from "@/lib/image-formats";
import { toast } from "@/lib/toast";
import { msg } from "@/lib/toast-messages";
import { useCanvasSync } from "@/components/canvas/canvas-sync-context";
import {
Loader2,
@@ -59,6 +59,7 @@ export default function AiImageNode({
data,
selected,
}: NodeProps<AiImageNode>) {
const t = useTranslations('toasts');
const nodeData = data as AiImageNodeData;
const { getEdges, getNode } = useReactFlow();
const { status: syncStatus } = useCanvasSync();
@@ -135,17 +136,17 @@ export default function AiImageNode({
aspectRatio: nodeData.aspectRatio ?? DEFAULT_ASPECT_RATIO,
}),
{
loading: msg.ai.generating.title,
success: msg.ai.generationQueued.title,
error: msg.ai.generationFailed.title,
loading: t('ai.generating'),
success: t('ai.generationQueued'),
error: t('ai.generationFailed'),
description: {
success: msg.ai.generationQueuedDesc,
error: msg.ai.creditsNotCharged,
success: t('ai.generationQueuedDesc'),
error: t('ai.creditsNotCharged'),
},
},
);
} catch (err) {
setLocalError(err instanceof Error ? err.message : msg.ai.generationFailed.title);
setLocalError(err instanceof Error ? err.message : t('ai.generationFailed'));
} finally {
setIsGenerating(false);
}
@@ -154,16 +155,16 @@ export default function AiImageNode({
const modelName =
getModel(nodeData.model ?? DEFAULT_MODEL_ID)?.name ?? "AI";
const renderErrorIcon = (category: AiErrorCategory) => {
switch (category) {
case "insufficient_credits":
const renderErrorIcon = (type: ErrorType) => {
switch (type) {
case "insufficientCredits":
return <Coins className="h-8 w-8 text-amber-500" />;
case "rate_limited":
case "rateLimited":
case "timeout":
return <Clock3 className="h-8 w-8 text-amber-500" />;
case "content_policy":
case "contentPolicy":
return <ShieldAlert className="h-8 w-8 text-destructive" />;
case "network":
case "networkError":
return <WifiOff className="h-8 w-8 text-destructive" />;
default:
return <AlertCircle className="h-8 w-8 text-destructive" />;
@@ -226,15 +227,10 @@ export default function AiImageNode({
{status === "error" && !isLoading && (
<div className="absolute inset-0 z-10 flex flex-col items-center justify-center gap-3 bg-muted">
{renderErrorIcon(classifiedError.category)}
{renderErrorIcon(classifiedError.type)}
<p className="px-4 text-center text-xs font-medium text-destructive">
{classifiedError.message}
{classifiedError.rawMessage}
</p>
{classifiedError.detail && (
<p className="px-6 text-center text-[10px] text-muted-foreground">
{classifiedError.detail}
</p>
)}
{classifiedError.creditsNotCharged && (
<p className="px-6 text-center text-[10px] text-muted-foreground">
Credits not charged

View File

@@ -3,13 +3,13 @@
import { useCallback, useState } from "react";
import { Handle, Position, type NodeProps } from "@xyflow/react";
import { useAction } from "convex/react";
import { useTranslations } from "next-intl";
import { Download, Loader2 } from "lucide-react";
import { api } from "@/convex/_generated/api";
import type { Id } from "@/convex/_generated/dataModel";
import { useDebouncedCallback } from "@/hooks/use-debounced-callback";
import BaseNodeWrapper from "./base-node-wrapper";
import { toast } from "@/lib/toast";
import { msg } from "@/lib/toast-messages";
import { useCanvasSync } from "@/components/canvas/canvas-sync-context";
interface FrameNodeData {
@@ -19,6 +19,7 @@ interface FrameNodeData {
}
export default function FrameNode({ id, data, selected, width, height }: NodeProps) {
const t = useTranslations('toasts');
const nodeData = data as FrameNodeData;
const { queueNodeDataUpdate, status } = useCanvasSync();
const exportFrame = useAction(api.export.exportFrame);
@@ -54,23 +55,23 @@ export default function FrameNode({ id, data, selected, width, height }: NodePro
try {
const result = await exportFrame({ frameNodeId: id as Id<"nodes"> });
const fileLabel = `${label.trim() || "frame"}.png`;
toast.action(msg.export.frameExported.title, {
toast.action(t('export.frameExported'), {
description: fileLabel,
label: msg.export.download,
label: t('export.download'),
onClick: () => {
window.open(result.url, "_blank", "noopener,noreferrer");
},
successLabel: msg.export.downloaded,
successLabel: t('export.downloaded'),
type: "success",
});
} catch (error) {
const m = error instanceof Error ? error.message : "";
if (m.includes("No images found")) {
toast.error(msg.export.frameEmpty.title, msg.export.frameEmpty.desc);
setExportError(msg.export.frameEmpty.desc);
toast.error(t('export.frameEmptyTitle'), t('export.frameEmptyDesc'));
setExportError(t('export.frameEmptyDesc'));
} else {
toast.error(msg.export.exportFailed.title, m || undefined);
setExportError(m || msg.export.exportFailed.title);
toast.error(t('export.exportFailed'), m || undefined);
setExportError(m || t('export.exportFailed'));
}
} finally {
setIsExporting(false);

View File

@@ -9,11 +9,11 @@ import {
type DragEvent,
} from "react";
import { Handle, Position, type NodeProps, type Node } from "@xyflow/react";
import { useTranslations } from "next-intl";
import { api } from "@/convex/_generated/api";
import type { Id } from "@/convex/_generated/dataModel";
import BaseNodeWrapper from "./base-node-wrapper";
import { toast } from "@/lib/toast";
import { msg } from "@/lib/toast-messages";
import { computeMediaNodeSize } from "@/lib/canvas-utils";
import { useCanvasSync } from "@/components/canvas/canvas-sync-context";
import { useMutation } from "convex/react";
@@ -73,6 +73,7 @@ export default function ImageNode({
width,
height,
}: NodeProps<ImageNode>) {
const t = useTranslations('toasts');
const generateUploadUrl = useMutation(api.storage.generateUploadUrl);
const { queueNodeDataUpdate, queueNodeResize, status } = useCanvasSync();
const fileInputRef = useRef<HTMLInputElement>(null);
@@ -121,17 +122,17 @@ export default function ImageNode({
const uploadFile = useCallback(
async (file: File) => {
if (!ALLOWED_IMAGE_TYPES.has(file.type)) {
const { title, desc } = msg.canvas.uploadFormatError(
file.type || file.name.split(".").pop() || "—",
toast.error(
t('canvas.uploadFailed'),
t('canvas.uploadFormatError', { format: file.type || file.name.split(".").pop() || "—" }),
);
toast.error(title, desc);
return;
}
if (file.size > MAX_IMAGE_BYTES) {
const { title, desc } = msg.canvas.uploadSizeError(
Math.round(MAX_IMAGE_BYTES / (1024 * 1024)),
toast.error(
t('canvas.uploadFailed'),
t('canvas.uploadSizeError', { maxMb: Math.round(MAX_IMAGE_BYTES / (1024 * 1024)) }),
);
toast.error(title, desc);
return;
}
if (status.isOffline) {
@@ -188,11 +189,11 @@ export default function ImageNode({
});
}
toast.success(msg.canvas.imageUploaded.title);
toast.success(t('canvas.imageUploaded'));
} catch (err) {
console.error("Upload failed:", err);
toast.error(
msg.canvas.uploadFailed.title,
t('canvas.uploadFailed'),
err instanceof Error ? err.message : undefined,
);
} finally {

View File

@@ -10,6 +10,7 @@ import {
type Node,
} from "@xyflow/react";
import { useAction } from "convex/react";
import { useTranslations } from "next-intl";
import { useAuthQuery } from "@/hooks/use-auth-query";
import { api } from "@/convex/_generated/api";
import type { Id } from "@/convex/_generated/dataModel";
@@ -38,7 +39,6 @@ import {
import { Sparkles, Loader2, Coins } from "lucide-react";
import { useRouter } from "next/navigation";
import { toast } from "@/lib/toast";
import { msg } from "@/lib/toast-messages";
import { classifyError } from "@/lib/ai-errors";
type PromptNodeData = {
@@ -57,6 +57,7 @@ export default function PromptNode({
data,
selected,
}: NodeProps<PromptNode>) {
const t = useTranslations('toasts');
const nodeData = data as PromptNodeData;
const router = useRouter();
const { getEdges, getNode } = useReactFlow();
@@ -166,13 +167,9 @@ export default function PromptNode({
}
if (availableCredits !== null && !hasEnoughCredits) {
const { title, desc } = msg.ai.insufficientCredits(
creditCost,
availableCredits,
);
toast.action(title, {
description: desc,
label: msg.billing.topUp,
toast.action(t('ai.insufficientCreditsTitle'), {
description: t('ai.insufficientCreditsDesc', { needed: creditCost, available: availableCredits }),
label: t('billing.topUp'),
onClick: () => router.push("/settings/billing"),
type: "warning",
});
@@ -256,30 +253,30 @@ export default function PromptNode({
aspectRatio,
}),
{
loading: msg.ai.generating.title,
success: msg.ai.generationQueued.title,
error: msg.ai.generationFailed.title,
loading: t('ai.generating'),
success: t('ai.generationQueued'),
error: t('ai.generationFailed'),
description: {
success: msg.ai.generationQueuedDesc,
error: msg.ai.creditsNotCharged,
success: t('ai.generationQueuedDesc'),
error: t('ai.creditsNotCharged'),
},
},
);
} catch (err) {
const classified = classifyError(err);
if (classified.category === "daily_cap") {
if (classified.type === "dailyCap") {
toast.error(
msg.billing.dailyLimitReached(0).title,
t('billing.dailyLimitReachedTitle'),
"Morgen stehen wieder Generierungen zur Verfügung.",
);
} else if (classified.category === "concurrency") {
} else if (classified.type === "concurrency") {
toast.warning(
msg.ai.concurrentLimitReached.title,
msg.ai.concurrentLimitReached.desc,
t('ai.concurrentLimitReachedTitle'),
t('ai.concurrentLimitReachedDesc'),
);
} else {
setError(classified.message || msg.ai.generationFailed.title);
setError(classified.rawMessage || t('ai.generationFailed'));
}
} finally {
setIsGenerating(false);