From af7094637d9d4ef47e12aa5d8bbb19c7c28bd539 Mon Sep 17 00:00:00 2001 From: xCyanGrizzly Date: Mon, 30 Mar 2026 20:26:06 +0200 Subject: [PATCH] feat: file upload from UI, notification dismiss, audit false positive fix Manual file upload: - Upload dialog in STL page with drag-and-drop file picker - Files saved to shared Docker volume (/data/uploads) - Worker processes via pg_notify('manual_upload') channel - Hashes, reads metadata, splits >2GB, uploads to Telegram - Multiple files automatically grouped - Status polling shows upload/processing/complete states Notification fixes: - Add dismiss (X) button on each notification - Add "Clear" button to remove all notifications - Fix false positive MISSING_PART alerts from legacy packages (only flag when >1 destMessageIds stored but count wrong, not when only 1 ID from backfill) Infrastructure: - ManualUpload + ManualUploadFile schema + migration - Shared manual_uploads Docker volume between app and worker - Upload API routes (POST /api/uploads, GET /api/uploads/[id]) - Worker manual-upload processor with full pipeline Co-Authored-By: Claude Opus 4.6 (1M context) --- docker-compose.yml | 4 + .../migration.sql | 30 +++ prisma/schema.prisma | 44 +++- src/app/(app)/stls/_components/stl-table.tsx | 12 +- .../(app)/stls/_components/upload-dialog.tsx | 243 ++++++++++++++++++ src/app/api/notifications/read/route.ts | 9 +- src/app/api/uploads/[id]/route.ts | 43 ++++ src/app/api/uploads/route.ts | 83 ++++++ src/components/layout/notification-bell.tsx | 70 ++++- src/data/notification.queries.ts | 8 + worker/src/audit.ts | 4 +- worker/src/fetch-listener.ts | 14 +- worker/src/manual-upload.ts | 211 +++++++++++++++ 13 files changed, 757 insertions(+), 18 deletions(-) create mode 100644 prisma/migrations/20260330150000_manual_uploads/migration.sql create mode 100644 src/app/(app)/stls/_components/upload-dialog.tsx create mode 100644 src/app/api/uploads/[id]/route.ts create mode 100644 src/app/api/uploads/route.ts create mode 100644 worker/src/manual-upload.ts diff --git a/docker-compose.yml b/docker-compose.yml index 285d36a..fe5ddc7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -28,6 +28,8 @@ services: timeout: 5s retries: 3 start_period: 60s + volumes: + - manual_uploads:/data/uploads restart: unless-stopped deploy: resources: @@ -54,6 +56,7 @@ services: volumes: - tdlib_state:/data/tdlib - tmp_zips:/tmp/zips + - manual_uploads:/data/uploads depends_on: db: condition: service_healthy @@ -121,6 +124,7 @@ volumes: tdlib_state: tdlib_bot_state: tmp_zips: + manual_uploads: networks: frontend: diff --git a/prisma/migrations/20260330150000_manual_uploads/migration.sql b/prisma/migrations/20260330150000_manual_uploads/migration.sql new file mode 100644 index 0000000..0aaf6be --- /dev/null +++ b/prisma/migrations/20260330150000_manual_uploads/migration.sql @@ -0,0 +1,30 @@ +-- CreateEnum +CREATE TYPE "ManualUploadStatus" AS ENUM ('PENDING', 'PROCESSING', 'COMPLETED', 'FAILED'); + +-- CreateTable +CREATE TABLE "manual_uploads" ( + "id" TEXT NOT NULL, + "status" "ManualUploadStatus" NOT NULL DEFAULT 'PENDING', + "groupName" TEXT, + "userId" TEXT NOT NULL, + "errorMessage" TEXT, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "completedAt" TIMESTAMP(3), + CONSTRAINT "manual_uploads_pkey" PRIMARY KEY ("id") +); + +CREATE TABLE "manual_upload_files" ( + "id" TEXT NOT NULL, + "uploadId" TEXT NOT NULL, + "fileName" TEXT NOT NULL, + "filePath" TEXT NOT NULL, + "fileSize" BIGINT NOT NULL, + "packageId" TEXT, + CONSTRAINT "manual_upload_files_pkey" PRIMARY KEY ("id") +); + +CREATE INDEX "manual_uploads_status_idx" ON "manual_uploads"("status"); +CREATE INDEX "manual_upload_files_uploadId_idx" ON "manual_upload_files"("uploadId"); + +ALTER TABLE "manual_uploads" ADD CONSTRAINT "manual_uploads_userId_fkey" FOREIGN KEY ("userId") REFERENCES "users"("id") ON DELETE RESTRICT ON UPDATE CASCADE; +ALTER TABLE "manual_upload_files" ADD CONSTRAINT "manual_upload_files_uploadId_fkey" FOREIGN KEY ("uploadId") REFERENCES "manual_uploads"("id") ON DELETE CASCADE ON UPDATE CASCADE; diff --git a/prisma/schema.prisma b/prisma/schema.prisma index f1c4ccc..95f5580 100644 --- a/prisma/schema.prisma +++ b/prisma/schema.prisma @@ -39,9 +39,10 @@ model User { settings UserSettings? telegramLink TelegramLink? kickstarters Kickstarter[] - inviteCodes InviteCode[] @relation("InviteCreator") - usedInvite InviteCode? @relation("InviteUser", fields: [usedInviteId], references: [id], onDelete: SetNull) - usedInviteId String? + inviteCodes InviteCode[] @relation("InviteCreator") + usedInvite InviteCode? @relation("InviteUser", fields: [usedInviteId], references: [id], onDelete: SetNull) + usedInviteId String? + manualUploads ManualUpload[] } model Account { @@ -865,3 +866,40 @@ model GroupingRule { @@index([sourceChannelId]) @@map("grouping_rules") } + +enum ManualUploadStatus { + PENDING + PROCESSING + COMPLETED + FAILED +} + +model ManualUpload { + id String @id @default(cuid()) + status ManualUploadStatus @default(PENDING) + groupName String? // Group name if multiple files + userId String + errorMessage String? + createdAt DateTime @default(now()) + completedAt DateTime? + + files ManualUploadFile[] + user User @relation(fields: [userId], references: [id]) + + @@index([status]) + @@map("manual_uploads") +} + +model ManualUploadFile { + id String @id @default(cuid()) + uploadId String + fileName String + filePath String // Path on shared volume + fileSize BigInt + packageId String? // Set after processing + + upload ManualUpload @relation(fields: [uploadId], references: [id], onDelete: Cascade) + + @@index([uploadId]) + @@map("manual_upload_files") +} diff --git a/src/app/(app)/stls/_components/stl-table.tsx b/src/app/(app)/stls/_components/stl-table.tsx index 6f9c3ca..9235e4d 100644 --- a/src/app/(app)/stls/_components/stl-table.tsx +++ b/src/app/(app)/stls/_components/stl-table.tsx @@ -3,7 +3,8 @@ import { useState, useCallback, useTransition, useMemo, useRef } from "react"; import { useRouter, usePathname, useSearchParams } from "next/navigation"; import { toast } from "sonner"; -import { Search, Layers } from "lucide-react"; +import { Search, Layers, Upload } from "lucide-react"; +import { UploadDialog } from "./upload-dialog"; import { useDataTable } from "@/hooks/use-data-table"; import { getPackageColumns, @@ -106,6 +107,9 @@ export function StlTable({ // Group merge state const [mergeSourceId, setMergeSourceId] = useState(null); + // Upload dialog state + const [uploadOpen, setUploadOpen] = useState(false); + const toggleGroup = useCallback((groupId: string) => { setExpandedGroups((prev) => { const next = new Set(prev); @@ -497,6 +501,10 @@ export function StlTable({ )} + {selectedPackages.size >= 2 && ( + + ))} + + )} + + {files.length > 1 && ( +
+ + setGroupName(e.target.value)} + placeholder="Auto-generated from filenames" + className="mt-1" + /> +
+ )} + + )} + + {(status === "uploading" || status === "processing") && ( +
+ +
+

+ {status === "uploading" ? "Uploading files..." : "Processing & uploading to Telegram..."} +

+

+ {status === "uploading" + ? "Sending files to server" + : "Hashing, extracting metadata, uploading to destination channel"} +

+
+
+ )} + + {status === "done" && ( +
+ +
+

Upload complete!

+

Files have been indexed and uploaded to Telegram.

+
+
+ )} + + {status === "error" && ( +
+ +
+

Upload failed

+

{error}

+
+
+ )} + + + {status === "idle" && ( + <> + + + + )} + {(status === "done" || status === "error") && ( + + )} + + + + ); +} diff --git a/src/app/api/notifications/read/route.ts b/src/app/api/notifications/read/route.ts index b44872e..a45459c 100644 --- a/src/app/api/notifications/read/route.ts +++ b/src/app/api/notifications/read/route.ts @@ -3,6 +3,8 @@ import { auth } from "@/lib/auth"; import { markNotificationRead, markAllNotificationsRead, + dismissNotification, + clearAllNotifications, } from "@/data/notification.queries"; export const dynamic = "force-dynamic"; @@ -15,8 +17,13 @@ export async function POST(request: Request) { const body = await request.json().catch(() => ({})); const id = body.id as string | undefined; + const action = (body.action as string) ?? "read"; - if (id) { + if (action === "dismiss" && id) { + await dismissNotification(id); + } else if (action === "clear") { + await clearAllNotifications(); + } else if (id) { await markNotificationRead(id); } else { await markAllNotificationsRead(); diff --git a/src/app/api/uploads/[id]/route.ts b/src/app/api/uploads/[id]/route.ts new file mode 100644 index 0000000..47e712a --- /dev/null +++ b/src/app/api/uploads/[id]/route.ts @@ -0,0 +1,43 @@ +import { NextResponse } from "next/server"; +import { auth } from "@/lib/auth"; +import { prisma } from "@/lib/prisma"; + +export const dynamic = "force-dynamic"; + +export async function GET( + _request: Request, + { params }: { params: Promise<{ id: string }> } +) { + const session = await auth(); + if (!session?.user?.id) { + return NextResponse.json({ error: "Unauthorized" }, { status: 401 }); + } + + const { id } = await params; + + const upload = await prisma.manualUpload.findUnique({ + where: { id }, + include: { + files: { + select: { id: true, fileName: true, fileSize: true, packageId: true }, + }, + }, + }); + + if (!upload || upload.userId !== session.user.id) { + return NextResponse.json({ error: "Not found" }, { status: 404 }); + } + + return NextResponse.json({ + id: upload.id, + status: upload.status, + groupName: upload.groupName, + errorMessage: upload.errorMessage, + files: upload.files.map((f) => ({ + ...f, + fileSize: f.fileSize.toString(), + })), + createdAt: upload.createdAt.toISOString(), + completedAt: upload.completedAt?.toISOString() ?? null, + }); +} diff --git a/src/app/api/uploads/route.ts b/src/app/api/uploads/route.ts new file mode 100644 index 0000000..225abee --- /dev/null +++ b/src/app/api/uploads/route.ts @@ -0,0 +1,83 @@ +import { NextResponse } from "next/server"; +import { auth } from "@/lib/auth"; +import { prisma } from "@/lib/prisma"; +import { writeFile, mkdir } from "fs/promises"; +import path from "path"; + +export const dynamic = "force-dynamic"; + +const UPLOAD_DIR = process.env.UPLOAD_DIR ?? "/data/uploads"; +const MAX_FILE_SIZE = 4 * 1024 * 1024 * 1024; // 4GB per file + +export async function POST(request: Request) { + const session = await auth(); + if (!session?.user?.id) { + return NextResponse.json({ error: "Unauthorized" }, { status: 401 }); + } + + try { + const formData = await request.formData(); + const files = formData.getAll("files") as File[]; + const groupName = formData.get("groupName") as string | null; + + if (!files.length) { + return NextResponse.json({ error: "No files provided" }, { status: 400 }); + } + + // Create the upload record + const upload = await prisma.manualUpload.create({ + data: { + userId: session.user.id, + groupName: groupName || (files.length > 1 ? files[0].name.replace(/\.[^.]+$/, "") : null), + status: "PENDING", + }, + }); + + // Save files to shared volume + const uploadDir = path.join(UPLOAD_DIR, upload.id); + await mkdir(uploadDir, { recursive: true }); + + for (const file of files) { + if (file.size > MAX_FILE_SIZE) { + return NextResponse.json( + { error: `File "${file.name}" exceeds 4GB limit` }, + { status: 400 } + ); + } + + const filePath = path.join(uploadDir, file.name); + const buffer = Buffer.from(await file.arrayBuffer()); + await writeFile(filePath, buffer); + + await prisma.manualUploadFile.create({ + data: { + uploadId: upload.id, + fileName: file.name, + filePath, + fileSize: BigInt(file.size), + }, + }); + } + + // Notify worker + try { + await prisma.$queryRawUnsafe( + `SELECT pg_notify('manual_upload', $1)`, + upload.id + ); + } catch { + // Best-effort + } + + return NextResponse.json({ + uploadId: upload.id, + fileCount: files.length, + status: "PENDING", + }); + } catch (err) { + return NextResponse.json( + { error: err instanceof Error ? err.message : "Upload failed" }, + { status: 500 } + ); + } +} diff --git a/src/components/layout/notification-bell.tsx b/src/components/layout/notification-bell.tsx index b935cfa..bf99fc1 100644 --- a/src/components/layout/notification-bell.tsx +++ b/src/components/layout/notification-bell.tsx @@ -1,7 +1,7 @@ "use client"; import { useState, useEffect, useCallback } from "react"; -import { Bell, AlertTriangle, AlertCircle, Info, CheckCircle2 } from "lucide-react"; +import { Bell, AlertTriangle, AlertCircle, Info, CheckCircle2, X, Trash2 } from "lucide-react"; import { Button } from "@/components/ui/button"; import { Badge } from "@/components/ui/badge"; import { @@ -94,6 +94,34 @@ export function NotificationBell() { } } + async function handleDismiss(id: string) { + try { + await fetch("/api/notifications/read", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ id, action: "dismiss" }), + }); + setNotifications((prev) => prev.filter((n) => n.id !== id)); + setUnreadCount((c) => Math.max(0, c - 1)); + } catch { + // Ignore + } + } + + async function handleClearAll() { + try { + await fetch("/api/notifications/read", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ action: "clear" }), + }); + setNotifications([]); + setUnreadCount(0); + } catch { + // Ignore + } + } + async function handleRepair(notificationId: string) { try { const res = await fetch("/api/notifications/repair", { @@ -141,16 +169,29 @@ export function NotificationBell() {

Notifications

- {unreadCount > 0 && ( - - )} +
+ {unreadCount > 0 && ( + + )} + {notifications.length > 0 && ( + + )} +
{notifications.length === 0 ? ( @@ -187,6 +228,13 @@ export function NotificationBell() { {!n.isRead && ( )} +

{n.message} diff --git a/src/data/notification.queries.ts b/src/data/notification.queries.ts index 7772f0d..5d67fb1 100644 --- a/src/data/notification.queries.ts +++ b/src/data/notification.queries.ts @@ -35,3 +35,11 @@ export async function markAllNotificationsRead() { data: { isRead: true }, }); } + +export async function dismissNotification(id: string) { + return prisma.systemNotification.delete({ where: { id } }); +} + +export async function clearAllNotifications() { + return prisma.systemNotification.deleteMany({}); +} diff --git a/worker/src/audit.ts b/worker/src/audit.ts index 62ce1ab..5511277 100644 --- a/worker/src/audit.ts +++ b/worker/src/audit.ts @@ -38,7 +38,9 @@ export async function runIntegrityAudit(): Promise<{ checked: number; issues: nu for (const pkg of multipartPackages) { const actualParts = pkg.destMessageIds.length; - if (actualParts > 0 && actualParts !== pkg.partCount) { + // Only flag when we have >1 stored IDs but count doesn't match. + // Packages with exactly 1 ID are legacy (backfilled from single destMessageId) — not actionable. + if (actualParts > 1 && actualParts !== pkg.partCount) { issues++; // Check if we already have a notification for this diff --git a/worker/src/fetch-listener.ts b/worker/src/fetch-listener.ts index 59340ce..8fb3388 100644 --- a/worker/src/fetch-listener.ts +++ b/worker/src/fetch-listener.ts @@ -5,6 +5,7 @@ import { withTdlibMutex } from "./util/mutex.js"; import { processFetchRequest } from "./worker.js"; import { processExtractRequest } from "./extract-listener.js"; import { rebuildPackageDatabase } from "./rebuild.js"; +import { processManualUpload } from "./manual-upload.js"; import { generateInviteLink, createSupergroup, searchPublicChat } from "./tdlib/chats.js"; import { createTdlibClient, closeTdlibClient } from "./tdlib/client.js"; import { triggerImmediateCycle } from "./scheduler.js"; @@ -55,6 +56,7 @@ async function connectListener(): Promise { await pgClient.query("LISTEN join_channel"); await pgClient.query("LISTEN archive_extract"); await pgClient.query("LISTEN rebuild_packages"); + await pgClient.query("LISTEN manual_upload"); pgClient.on("notification", (msg) => { if (msg.channel === "channel_fetch" && msg.payload) { @@ -71,6 +73,8 @@ async function connectListener(): Promise { handleArchiveExtract(msg.payload); } else if (msg.channel === "rebuild_packages" && msg.payload) { handleRebuildPackages(msg.payload); + } else if (msg.channel === "manual_upload" && msg.payload) { + handleManualUpload(msg.payload); } }); @@ -96,7 +100,7 @@ async function connectListener(): Promise { } }); - log.info("Fetch listener started (channel_fetch, generate_invite, create_destination, ingestion_trigger, join_channel, archive_extract, rebuild_packages)"); + log.info("Fetch listener started (channel_fetch, generate_invite, create_destination, ingestion_trigger, join_channel, archive_extract, rebuild_packages, manual_upload)"); } catch (err) { log.error({ err }, "Failed to start fetch listener — retrying"); scheduleReconnect(); @@ -511,3 +515,11 @@ function handleRebuildPackages(requestId: string): void { } }); } + +// ── Manual upload handler ── + +function handleManualUpload(uploadId: string): void { + fetchQueue = fetchQueue + .then(() => processManualUpload(uploadId)) + .catch((err) => log.error({ err, uploadId }, "Manual upload processing failed")); +} diff --git a/worker/src/manual-upload.ts b/worker/src/manual-upload.ts new file mode 100644 index 0000000..d66f441 --- /dev/null +++ b/worker/src/manual-upload.ts @@ -0,0 +1,211 @@ +import path from "path"; +import { rm } from "fs/promises"; +import { db } from "./db/client.js"; +import { childLogger } from "./util/logger.js"; +import { config } from "./util/config.js"; +import { hashParts } from "./archive/hash.js"; +import { byteLevelSplit } from "./archive/split.js"; +import { uploadToChannel } from "./upload/channel.js"; +import { createTdlibClient, closeTdlibClient } from "./tdlib/client.js"; +import { readZipCentralDirectory } from "./archive/zip-reader.js"; +import { readRarContents } from "./archive/rar-reader.js"; +import { read7zContents } from "./archive/sevenz-reader.js"; +import { getActiveAccounts } from "./db/queries.js"; + +const log = childLogger("manual-upload"); + +export async function processManualUpload(uploadId: string): Promise { + log.info({ uploadId }, "Processing manual upload"); + + const upload = await db.manualUpload.findUnique({ + where: { id: uploadId }, + include: { files: true }, + }); + + if (!upload || upload.status !== "PENDING") { + log.warn({ uploadId }, "Manual upload not found or not pending"); + return; + } + + await db.manualUpload.update({ + where: { id: uploadId }, + data: { status: "PROCESSING" }, + }); + + try { + // Get destination channel + const destSetting = await db.globalSetting.findUnique({ + where: { key: "destination_channel_id" }, + }); + if (!destSetting) throw new Error("No destination channel configured"); + + const destChannel = await db.telegramChannel.findFirst({ + where: { id: destSetting.value, type: "DESTINATION", isActive: true }, + }); + if (!destChannel) throw new Error("Destination channel not found or inactive"); + + // Get a TDLib client (use first active account) + const accounts = await getActiveAccounts(); + const account = accounts[0]; + if (!account) throw new Error("No authenticated Telegram account available"); + + const client = await createTdlibClient({ id: account.id, phone: account.phone }); + + try { + const packageIds: string[] = []; + + for (const file of upload.files) { + try { + const filePath = file.filePath; + const fileName = file.fileName; + const fileSize = file.fileSize; + + log.info({ fileName, fileSize: Number(fileSize) }, "Processing file"); + + // Determine archive type + let archiveType: "ZIP" | "RAR" | "SEVEN_Z" | "DOCUMENT" = "DOCUMENT"; + const ext = fileName.toLowerCase(); + if (ext.endsWith(".zip")) archiveType = "ZIP"; + else if (ext.endsWith(".rar")) archiveType = "RAR"; + else if (ext.endsWith(".7z")) archiveType = "SEVEN_Z"; + + // Hash the file + const contentHash = await hashParts([filePath]); + + // Check for duplicates + const existing = await db.package.findFirst({ + where: { contentHash, destMessageId: { not: null } }, + select: { id: true }, + }); + + if (existing) { + log.info({ fileName, contentHash }, "Duplicate file, skipping upload"); + await db.manualUploadFile.update({ + where: { id: file.id }, + data: { packageId: existing.id }, + }); + packageIds.push(existing.id); + continue; + } + + // Read archive metadata + let entries: { + path: string; + fileName: string; + extension: string | null; + compressedSize: bigint; + uncompressedSize: bigint; + crc32: string | null; + }[] = []; + try { + if (archiveType === "ZIP") entries = await readZipCentralDirectory([filePath]); + else if (archiveType === "RAR") entries = await readRarContents(filePath); + else if (archiveType === "SEVEN_Z") entries = await read7zContents(filePath); + } catch { + log.debug({ fileName }, "Could not read archive metadata"); + } + + // Split if needed + const MAX_UPLOAD_SIZE = BigInt(config.maxPartSizeMB) * 1024n * 1024n; + let uploadPaths = [filePath]; + if (fileSize > MAX_UPLOAD_SIZE) { + uploadPaths = await byteLevelSplit(filePath); + } + + // Upload to Telegram + const destResult = await uploadToChannel( + client, + destChannel.telegramId, + uploadPaths + ); + + // Create package record + const pkg = await db.package.create({ + data: { + contentHash, + fileName, + fileSize, + archiveType, + sourceChannelId: destChannel.id, + sourceMessageId: destResult.messageId, + destChannelId: destChannel.id, + destMessageId: destResult.messageId, + destMessageIds: destResult.messageIds, + isMultipart: uploadPaths.length > 1, + partCount: uploadPaths.length, + fileCount: entries.length, + files: entries.length > 0 ? { create: entries } : undefined, + }, + }); + + await db.manualUploadFile.update({ + where: { id: file.id }, + data: { packageId: pkg.id }, + }); + + packageIds.push(pkg.id); + log.info({ fileName, packageId: pkg.id }, "File processed and uploaded"); + + // Clean up split files (but not the original) + if (uploadPaths.length > 1) { + for (const splitPath of uploadPaths) { + if (splitPath !== filePath) { + await rm(splitPath, { force: true }).catch(() => {}); + } + } + } + } catch (fileErr) { + log.error({ err: fileErr, fileName: file.fileName }, "Failed to process file"); + } + } + + // Group packages if multiple files + if (packageIds.length >= 2) { + const groupName = + upload.groupName ?? upload.files[0].fileName.replace(/\.[^.]+$/, ""); + const group = await db.packageGroup.create({ + data: { + name: groupName, + sourceChannelId: destChannel.id, + groupingSource: "MANUAL", + }, + }); + await db.package.updateMany({ + where: { id: { in: packageIds } }, + data: { packageGroupId: group.id }, + }); + log.info( + { groupId: group.id, groupName, packageCount: packageIds.length }, + "Created group for uploaded files" + ); + } + + await db.manualUpload.update({ + where: { id: uploadId }, + data: { status: "COMPLETED", completedAt: new Date() }, + }); + + log.info( + { uploadId, fileCount: upload.files.length, packageCount: packageIds.length }, + "Manual upload completed" + ); + } finally { + await closeTdlibClient(client); + } + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + log.error({ err, uploadId }, "Manual upload failed"); + await db.manualUpload.update({ + where: { id: uploadId }, + data: { status: "FAILED", errorMessage: message }, + }); + } + + // Clean up uploaded files + try { + const uploadDir = path.join("/data/uploads", uploadId); + await rm(uploadDir, { recursive: true, force: true }); + } catch { + // Best-effort cleanup + } +}