2 Commits

Author SHA1 Message Date
be4daf950b fix: correct User table reference in manual_uploads migration
All checks were successful
continuous-integration/drone/push Build is passing
The FK referenced "users" but the actual table is "User" (no @@map in Prisma schema).

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-30 21:29:55 +02:00
af7094637d feat: file upload from UI, notification dismiss, audit false positive fix
Manual file upload:
- Upload dialog in STL page with drag-and-drop file picker
- Files saved to shared Docker volume (/data/uploads)
- Worker processes via pg_notify('manual_upload') channel
- Hashes, reads metadata, splits >2GB, uploads to Telegram
- Multiple files automatically grouped
- Status polling shows upload/processing/complete states

Notification fixes:
- Add dismiss (X) button on each notification
- Add "Clear" button to remove all notifications
- Fix false positive MISSING_PART alerts from legacy packages
  (only flag when >1 destMessageIds stored but count wrong,
  not when only 1 ID from backfill)

Infrastructure:
- ManualUpload + ManualUploadFile schema + migration
- Shared manual_uploads Docker volume between app and worker
- Upload API routes (POST /api/uploads, GET /api/uploads/[id])
- Worker manual-upload processor with full pipeline

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-30 20:26:06 +02:00
13 changed files with 757 additions and 18 deletions

View File

@@ -28,6 +28,8 @@ services:
timeout: 5s timeout: 5s
retries: 3 retries: 3
start_period: 60s start_period: 60s
volumes:
- manual_uploads:/data/uploads
restart: unless-stopped restart: unless-stopped
deploy: deploy:
resources: resources:
@@ -54,6 +56,7 @@ services:
volumes: volumes:
- tdlib_state:/data/tdlib - tdlib_state:/data/tdlib
- tmp_zips:/tmp/zips - tmp_zips:/tmp/zips
- manual_uploads:/data/uploads
depends_on: depends_on:
db: db:
condition: service_healthy condition: service_healthy
@@ -121,6 +124,7 @@ volumes:
tdlib_state: tdlib_state:
tdlib_bot_state: tdlib_bot_state:
tmp_zips: tmp_zips:
manual_uploads:
networks: networks:
frontend: frontend:

View File

@@ -0,0 +1,30 @@
-- CreateEnum
CREATE TYPE "ManualUploadStatus" AS ENUM ('PENDING', 'PROCESSING', 'COMPLETED', 'FAILED');
-- CreateTable
CREATE TABLE "manual_uploads" (
"id" TEXT NOT NULL,
"status" "ManualUploadStatus" NOT NULL DEFAULT 'PENDING',
"groupName" TEXT,
"userId" TEXT NOT NULL,
"errorMessage" TEXT,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"completedAt" TIMESTAMP(3),
CONSTRAINT "manual_uploads_pkey" PRIMARY KEY ("id")
);
CREATE TABLE "manual_upload_files" (
"id" TEXT NOT NULL,
"uploadId" TEXT NOT NULL,
"fileName" TEXT NOT NULL,
"filePath" TEXT NOT NULL,
"fileSize" BIGINT NOT NULL,
"packageId" TEXT,
CONSTRAINT "manual_upload_files_pkey" PRIMARY KEY ("id")
);
CREATE INDEX "manual_uploads_status_idx" ON "manual_uploads"("status");
CREATE INDEX "manual_upload_files_uploadId_idx" ON "manual_upload_files"("uploadId");
ALTER TABLE "manual_uploads" ADD CONSTRAINT "manual_uploads_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
ALTER TABLE "manual_upload_files" ADD CONSTRAINT "manual_upload_files_uploadId_fkey" FOREIGN KEY ("uploadId") REFERENCES "manual_uploads"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -42,6 +42,7 @@ model User {
inviteCodes InviteCode[] @relation("InviteCreator") inviteCodes InviteCode[] @relation("InviteCreator")
usedInvite InviteCode? @relation("InviteUser", fields: [usedInviteId], references: [id], onDelete: SetNull) usedInvite InviteCode? @relation("InviteUser", fields: [usedInviteId], references: [id], onDelete: SetNull)
usedInviteId String? usedInviteId String?
manualUploads ManualUpload[]
} }
model Account { model Account {
@@ -865,3 +866,40 @@ model GroupingRule {
@@index([sourceChannelId]) @@index([sourceChannelId])
@@map("grouping_rules") @@map("grouping_rules")
} }
enum ManualUploadStatus {
PENDING
PROCESSING
COMPLETED
FAILED
}
model ManualUpload {
id String @id @default(cuid())
status ManualUploadStatus @default(PENDING)
groupName String? // Group name if multiple files
userId String
errorMessage String?
createdAt DateTime @default(now())
completedAt DateTime?
files ManualUploadFile[]
user User @relation(fields: [userId], references: [id])
@@index([status])
@@map("manual_uploads")
}
model ManualUploadFile {
id String @id @default(cuid())
uploadId String
fileName String
filePath String // Path on shared volume
fileSize BigInt
packageId String? // Set after processing
upload ManualUpload @relation(fields: [uploadId], references: [id], onDelete: Cascade)
@@index([uploadId])
@@map("manual_upload_files")
}

View File

@@ -3,7 +3,8 @@
import { useState, useCallback, useTransition, useMemo, useRef } from "react"; import { useState, useCallback, useTransition, useMemo, useRef } from "react";
import { useRouter, usePathname, useSearchParams } from "next/navigation"; import { useRouter, usePathname, useSearchParams } from "next/navigation";
import { toast } from "sonner"; import { toast } from "sonner";
import { Search, Layers } from "lucide-react"; import { Search, Layers, Upload } from "lucide-react";
import { UploadDialog } from "./upload-dialog";
import { useDataTable } from "@/hooks/use-data-table"; import { useDataTable } from "@/hooks/use-data-table";
import { import {
getPackageColumns, getPackageColumns,
@@ -106,6 +107,9 @@ export function StlTable({
// Group merge state // Group merge state
const [mergeSourceId, setMergeSourceId] = useState<string | null>(null); const [mergeSourceId, setMergeSourceId] = useState<string | null>(null);
// Upload dialog state
const [uploadOpen, setUploadOpen] = useState(false);
const toggleGroup = useCallback((groupId: string) => { const toggleGroup = useCallback((groupId: string) => {
setExpandedGroups((prev) => { setExpandedGroups((prev) => {
const next = new Set(prev); const next = new Set(prev);
@@ -497,6 +501,10 @@ export function StlTable({
</Select> </Select>
)} )}
<DataTableViewOptions table={table} /> <DataTableViewOptions table={table} />
<Button variant="outline" size="sm" className="h-9" onClick={() => setUploadOpen(true)}>
<Upload className="mr-2 h-4 w-4" />
Upload Files
</Button>
{selectedPackages.size >= 2 && ( {selectedPackages.size >= 2 && (
<Button <Button
variant="outline" variant="outline"
@@ -587,6 +595,8 @@ export function StlTable({
</DialogContent> </DialogContent>
</Dialog> </Dialog>
<UploadDialog open={uploadOpen} onOpenChange={setUploadOpen} />
{/* Hidden file input for group preview upload (Task 12) */} {/* Hidden file input for group preview upload (Task 12) */}
<input <input
ref={previewInputRef} ref={previewInputRef}

View File

@@ -0,0 +1,243 @@
"use client";
import { useState, useRef, useTransition, useEffect } from "react";
import { Upload, File, X, Loader2, CheckCircle2, AlertCircle } from "lucide-react";
import { toast } from "sonner";
import { Button } from "@/components/ui/button";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import {
Dialog,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
interface UploadDialogProps {
open: boolean;
onOpenChange: (open: boolean) => void;
}
function formatSize(bytes: number): string {
if (bytes >= 1024 * 1024 * 1024) return `${(bytes / (1024 * 1024 * 1024)).toFixed(1)} GB`;
if (bytes >= 1024 * 1024) return `${(bytes / (1024 * 1024)).toFixed(0)} MB`;
return `${(bytes / 1024).toFixed(0)} KB`;
}
type UploadStatus = "idle" | "uploading" | "processing" | "done" | "error";
export function UploadDialog({ open, onOpenChange }: UploadDialogProps) {
const [files, setFiles] = useState<File[]>([]);
const [groupName, setGroupName] = useState("");
const [status, setStatus] = useState<UploadStatus>("idle");
const [error, setError] = useState<string | null>(null);
const [isPending, startTransition] = useTransition();
const fileInputRef = useRef<HTMLInputElement>(null);
const pollRef = useRef<ReturnType<typeof setInterval> | null>(null);
useEffect(() => {
if (open) {
setFiles([]);
setGroupName("");
setStatus("idle");
setError(null);
}
return () => {
if (pollRef.current) clearInterval(pollRef.current);
};
}, [open]);
function handleFileChange(e: React.ChangeEvent<HTMLInputElement>) {
if (e.target.files) {
setFiles(Array.from(e.target.files));
}
}
function removeFile(index: number) {
setFiles((prev) => prev.filter((_, i) => i !== index));
}
function handleUpload() {
if (files.length === 0) return;
startTransition(async () => {
setStatus("uploading");
setError(null);
try {
const formData = new FormData();
for (const file of files) {
formData.append("files", file);
}
if (groupName.trim()) {
formData.append("groupName", groupName.trim());
}
const res = await fetch("/api/uploads", {
method: "POST",
body: formData,
});
const data = await res.json();
if (!res.ok) {
setStatus("error");
setError(data.error ?? "Upload failed");
return;
}
setStatus("processing");
// Poll for completion
pollRef.current = setInterval(async () => {
try {
const statusRes = await fetch(`/api/uploads/${data.uploadId}`);
const statusData = await statusRes.json();
if (statusData.status === "COMPLETED") {
setStatus("done");
toast.success(`${files.length} file(s) uploaded and indexed`);
if (pollRef.current) clearInterval(pollRef.current);
} else if (statusData.status === "FAILED") {
setStatus("error");
setError(statusData.errorMessage ?? "Processing failed");
if (pollRef.current) clearInterval(pollRef.current);
}
} catch {
// Keep polling
}
}, 3000);
// Stop polling after 10 minutes
setTimeout(() => {
if (pollRef.current) {
clearInterval(pollRef.current);
pollRef.current = null;
setStatus((s) => s === "processing" ? "done" : s);
}
}, 600_000);
} catch {
setStatus("error");
setError("Network error");
}
});
}
return (
<Dialog open={open} onOpenChange={onOpenChange}>
<DialogContent className="sm:max-w-lg">
<DialogHeader>
<DialogTitle>Upload Files</DialogTitle>
<DialogDescription>
Upload archive files to be processed and indexed. Multiple files will be automatically grouped.
</DialogDescription>
</DialogHeader>
{status === "idle" && (
<div className="space-y-4">
<div
className="border-2 border-dashed rounded-lg p-8 text-center cursor-pointer hover:border-primary/50 transition-colors"
onClick={() => fileInputRef.current?.click()}
>
<Upload className="h-8 w-8 mx-auto mb-2 text-muted-foreground" />
<p className="text-sm text-muted-foreground">
Click to select files or drag & drop
</p>
<p className="text-xs text-muted-foreground mt-1">
ZIP, RAR, 7Z files up to 4GB each
</p>
<input
ref={fileInputRef}
type="file"
multiple
accept=".zip,.rar,.7z,.pdf,.stl"
onChange={handleFileChange}
className="hidden"
/>
</div>
{files.length > 0 && (
<div className="space-y-2">
{files.map((file, i) => (
<div key={i} className="flex items-center gap-2 p-2 rounded bg-muted/30">
<File className="h-4 w-4 shrink-0 text-muted-foreground" />
<span className="text-sm flex-1 truncate">{file.name}</span>
<span className="text-xs text-muted-foreground">{formatSize(file.size)}</span>
<button onClick={() => removeFile(i)} className="p-0.5 hover:text-destructive">
<X className="h-3.5 w-3.5" />
</button>
</div>
))}
</div>
)}
{files.length > 1 && (
<div>
<Label htmlFor="groupName" className="text-sm">Group Name (optional)</Label>
<Input
id="groupName"
value={groupName}
onChange={(e) => setGroupName(e.target.value)}
placeholder="Auto-generated from filenames"
className="mt-1"
/>
</div>
)}
</div>
)}
{(status === "uploading" || status === "processing") && (
<div className="flex items-center gap-3 p-6 rounded-lg bg-muted/30 border">
<Loader2 className="h-6 w-6 animate-spin text-primary" />
<div>
<p className="text-sm font-medium">
{status === "uploading" ? "Uploading files..." : "Processing & uploading to Telegram..."}
</p>
<p className="text-xs text-muted-foreground mt-0.5">
{status === "uploading"
? "Sending files to server"
: "Hashing, extracting metadata, uploading to destination channel"}
</p>
</div>
</div>
)}
{status === "done" && (
<div className="flex items-center gap-3 p-6 rounded-lg bg-green-500/10 border border-green-500/20">
<CheckCircle2 className="h-6 w-6 text-green-500" />
<div>
<p className="text-sm font-medium text-green-500">Upload complete!</p>
<p className="text-xs text-muted-foreground">Files have been indexed and uploaded to Telegram.</p>
</div>
</div>
)}
{status === "error" && (
<div className="flex items-center gap-3 p-6 rounded-lg bg-destructive/10 border border-destructive/20">
<AlertCircle className="h-6 w-6 text-destructive" />
<div>
<p className="text-sm font-medium text-destructive">Upload failed</p>
<p className="text-xs text-muted-foreground">{error}</p>
</div>
</div>
)}
<DialogFooter>
{status === "idle" && (
<>
<Button variant="outline" onClick={() => onOpenChange(false)}>Cancel</Button>
<Button onClick={handleUpload} disabled={files.length === 0 || isPending}>
<Upload className="h-4 w-4 mr-1" />
Upload {files.length > 0 ? `(${files.length})` : ""}
</Button>
</>
)}
{(status === "done" || status === "error") && (
<Button variant="outline" onClick={() => onOpenChange(false)}>Close</Button>
)}
</DialogFooter>
</DialogContent>
</Dialog>
);
}

View File

@@ -3,6 +3,8 @@ import { auth } from "@/lib/auth";
import { import {
markNotificationRead, markNotificationRead,
markAllNotificationsRead, markAllNotificationsRead,
dismissNotification,
clearAllNotifications,
} from "@/data/notification.queries"; } from "@/data/notification.queries";
export const dynamic = "force-dynamic"; export const dynamic = "force-dynamic";
@@ -15,8 +17,13 @@ export async function POST(request: Request) {
const body = await request.json().catch(() => ({})); const body = await request.json().catch(() => ({}));
const id = body.id as string | undefined; const id = body.id as string | undefined;
const action = (body.action as string) ?? "read";
if (id) { if (action === "dismiss" && id) {
await dismissNotification(id);
} else if (action === "clear") {
await clearAllNotifications();
} else if (id) {
await markNotificationRead(id); await markNotificationRead(id);
} else { } else {
await markAllNotificationsRead(); await markAllNotificationsRead();

View File

@@ -0,0 +1,43 @@
import { NextResponse } from "next/server";
import { auth } from "@/lib/auth";
import { prisma } from "@/lib/prisma";
export const dynamic = "force-dynamic";
export async function GET(
_request: Request,
{ params }: { params: Promise<{ id: string }> }
) {
const session = await auth();
if (!session?.user?.id) {
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
}
const { id } = await params;
const upload = await prisma.manualUpload.findUnique({
where: { id },
include: {
files: {
select: { id: true, fileName: true, fileSize: true, packageId: true },
},
},
});
if (!upload || upload.userId !== session.user.id) {
return NextResponse.json({ error: "Not found" }, { status: 404 });
}
return NextResponse.json({
id: upload.id,
status: upload.status,
groupName: upload.groupName,
errorMessage: upload.errorMessage,
files: upload.files.map((f) => ({
...f,
fileSize: f.fileSize.toString(),
})),
createdAt: upload.createdAt.toISOString(),
completedAt: upload.completedAt?.toISOString() ?? null,
});
}

View File

@@ -0,0 +1,83 @@
import { NextResponse } from "next/server";
import { auth } from "@/lib/auth";
import { prisma } from "@/lib/prisma";
import { writeFile, mkdir } from "fs/promises";
import path from "path";
export const dynamic = "force-dynamic";
const UPLOAD_DIR = process.env.UPLOAD_DIR ?? "/data/uploads";
const MAX_FILE_SIZE = 4 * 1024 * 1024 * 1024; // 4GB per file
export async function POST(request: Request) {
const session = await auth();
if (!session?.user?.id) {
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
}
try {
const formData = await request.formData();
const files = formData.getAll("files") as File[];
const groupName = formData.get("groupName") as string | null;
if (!files.length) {
return NextResponse.json({ error: "No files provided" }, { status: 400 });
}
// Create the upload record
const upload = await prisma.manualUpload.create({
data: {
userId: session.user.id,
groupName: groupName || (files.length > 1 ? files[0].name.replace(/\.[^.]+$/, "") : null),
status: "PENDING",
},
});
// Save files to shared volume
const uploadDir = path.join(UPLOAD_DIR, upload.id);
await mkdir(uploadDir, { recursive: true });
for (const file of files) {
if (file.size > MAX_FILE_SIZE) {
return NextResponse.json(
{ error: `File "${file.name}" exceeds 4GB limit` },
{ status: 400 }
);
}
const filePath = path.join(uploadDir, file.name);
const buffer = Buffer.from(await file.arrayBuffer());
await writeFile(filePath, buffer);
await prisma.manualUploadFile.create({
data: {
uploadId: upload.id,
fileName: file.name,
filePath,
fileSize: BigInt(file.size),
},
});
}
// Notify worker
try {
await prisma.$queryRawUnsafe(
`SELECT pg_notify('manual_upload', $1)`,
upload.id
);
} catch {
// Best-effort
}
return NextResponse.json({
uploadId: upload.id,
fileCount: files.length,
status: "PENDING",
});
} catch (err) {
return NextResponse.json(
{ error: err instanceof Error ? err.message : "Upload failed" },
{ status: 500 }
);
}
}

View File

@@ -1,7 +1,7 @@
"use client"; "use client";
import { useState, useEffect, useCallback } from "react"; import { useState, useEffect, useCallback } from "react";
import { Bell, AlertTriangle, AlertCircle, Info, CheckCircle2 } from "lucide-react"; import { Bell, AlertTriangle, AlertCircle, Info, CheckCircle2, X, Trash2 } from "lucide-react";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import { Badge } from "@/components/ui/badge"; import { Badge } from "@/components/ui/badge";
import { import {
@@ -94,6 +94,34 @@ export function NotificationBell() {
} }
} }
async function handleDismiss(id: string) {
try {
await fetch("/api/notifications/read", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ id, action: "dismiss" }),
});
setNotifications((prev) => prev.filter((n) => n.id !== id));
setUnreadCount((c) => Math.max(0, c - 1));
} catch {
// Ignore
}
}
async function handleClearAll() {
try {
await fetch("/api/notifications/read", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ action: "clear" }),
});
setNotifications([]);
setUnreadCount(0);
} catch {
// Ignore
}
}
async function handleRepair(notificationId: string) { async function handleRepair(notificationId: string) {
try { try {
const res = await fetch("/api/notifications/repair", { const res = await fetch("/api/notifications/repair", {
@@ -141,6 +169,7 @@ export function NotificationBell() {
<PopoverContent className="w-96 p-0" align="end"> <PopoverContent className="w-96 p-0" align="end">
<div className="flex items-center justify-between border-b px-4 py-3"> <div className="flex items-center justify-between border-b px-4 py-3">
<h3 className="text-sm font-semibold">Notifications</h3> <h3 className="text-sm font-semibold">Notifications</h3>
<div className="flex items-center gap-1">
{unreadCount > 0 && ( {unreadCount > 0 && (
<Button <Button
variant="ghost" variant="ghost"
@@ -151,6 +180,18 @@ export function NotificationBell() {
Mark all read Mark all read
</Button> </Button>
)} )}
{notifications.length > 0 && (
<Button
variant="ghost"
size="sm"
className="h-7 text-xs text-muted-foreground"
onClick={handleClearAll}
>
<Trash2 className="h-3 w-3 mr-1" />
Clear
</Button>
)}
</div>
</div> </div>
<ScrollArea className="max-h-[400px]"> <ScrollArea className="max-h-[400px]">
{notifications.length === 0 ? ( {notifications.length === 0 ? (
@@ -187,6 +228,13 @@ export function NotificationBell() {
{!n.isRead && ( {!n.isRead && (
<span className="h-2 w-2 rounded-full bg-primary shrink-0" /> <span className="h-2 w-2 rounded-full bg-primary shrink-0" />
)} )}
<button
className="ml-auto shrink-0 p-0.5 rounded hover:bg-muted text-muted-foreground hover:text-foreground"
onClick={(e) => { e.stopPropagation(); handleDismiss(n.id); }}
title="Dismiss"
>
<X className="h-3 w-3" />
</button>
</div> </div>
<p className="text-xs text-muted-foreground line-clamp-2 mt-0.5"> <p className="text-xs text-muted-foreground line-clamp-2 mt-0.5">
{n.message} {n.message}

View File

@@ -35,3 +35,11 @@ export async function markAllNotificationsRead() {
data: { isRead: true }, data: { isRead: true },
}); });
} }
export async function dismissNotification(id: string) {
return prisma.systemNotification.delete({ where: { id } });
}
export async function clearAllNotifications() {
return prisma.systemNotification.deleteMany({});
}

View File

@@ -38,7 +38,9 @@ export async function runIntegrityAudit(): Promise<{ checked: number; issues: nu
for (const pkg of multipartPackages) { for (const pkg of multipartPackages) {
const actualParts = pkg.destMessageIds.length; const actualParts = pkg.destMessageIds.length;
if (actualParts > 0 && actualParts !== pkg.partCount) { // Only flag when we have >1 stored IDs but count doesn't match.
// Packages with exactly 1 ID are legacy (backfilled from single destMessageId) — not actionable.
if (actualParts > 1 && actualParts !== pkg.partCount) {
issues++; issues++;
// Check if we already have a notification for this // Check if we already have a notification for this

View File

@@ -5,6 +5,7 @@ import { withTdlibMutex } from "./util/mutex.js";
import { processFetchRequest } from "./worker.js"; import { processFetchRequest } from "./worker.js";
import { processExtractRequest } from "./extract-listener.js"; import { processExtractRequest } from "./extract-listener.js";
import { rebuildPackageDatabase } from "./rebuild.js"; import { rebuildPackageDatabase } from "./rebuild.js";
import { processManualUpload } from "./manual-upload.js";
import { generateInviteLink, createSupergroup, searchPublicChat } from "./tdlib/chats.js"; import { generateInviteLink, createSupergroup, searchPublicChat } from "./tdlib/chats.js";
import { createTdlibClient, closeTdlibClient } from "./tdlib/client.js"; import { createTdlibClient, closeTdlibClient } from "./tdlib/client.js";
import { triggerImmediateCycle } from "./scheduler.js"; import { triggerImmediateCycle } from "./scheduler.js";
@@ -55,6 +56,7 @@ async function connectListener(): Promise<void> {
await pgClient.query("LISTEN join_channel"); await pgClient.query("LISTEN join_channel");
await pgClient.query("LISTEN archive_extract"); await pgClient.query("LISTEN archive_extract");
await pgClient.query("LISTEN rebuild_packages"); await pgClient.query("LISTEN rebuild_packages");
await pgClient.query("LISTEN manual_upload");
pgClient.on("notification", (msg) => { pgClient.on("notification", (msg) => {
if (msg.channel === "channel_fetch" && msg.payload) { if (msg.channel === "channel_fetch" && msg.payload) {
@@ -71,6 +73,8 @@ async function connectListener(): Promise<void> {
handleArchiveExtract(msg.payload); handleArchiveExtract(msg.payload);
} else if (msg.channel === "rebuild_packages" && msg.payload) { } else if (msg.channel === "rebuild_packages" && msg.payload) {
handleRebuildPackages(msg.payload); handleRebuildPackages(msg.payload);
} else if (msg.channel === "manual_upload" && msg.payload) {
handleManualUpload(msg.payload);
} }
}); });
@@ -96,7 +100,7 @@ async function connectListener(): Promise<void> {
} }
}); });
log.info("Fetch listener started (channel_fetch, generate_invite, create_destination, ingestion_trigger, join_channel, archive_extract, rebuild_packages)"); log.info("Fetch listener started (channel_fetch, generate_invite, create_destination, ingestion_trigger, join_channel, archive_extract, rebuild_packages, manual_upload)");
} catch (err) { } catch (err) {
log.error({ err }, "Failed to start fetch listener — retrying"); log.error({ err }, "Failed to start fetch listener — retrying");
scheduleReconnect(); scheduleReconnect();
@@ -511,3 +515,11 @@ function handleRebuildPackages(requestId: string): void {
} }
}); });
} }
// ── Manual upload handler ──
function handleManualUpload(uploadId: string): void {
fetchQueue = fetchQueue
.then(() => processManualUpload(uploadId))
.catch((err) => log.error({ err, uploadId }, "Manual upload processing failed"));
}

211
worker/src/manual-upload.ts Normal file
View File

@@ -0,0 +1,211 @@
import path from "path";
import { rm } from "fs/promises";
import { db } from "./db/client.js";
import { childLogger } from "./util/logger.js";
import { config } from "./util/config.js";
import { hashParts } from "./archive/hash.js";
import { byteLevelSplit } from "./archive/split.js";
import { uploadToChannel } from "./upload/channel.js";
import { createTdlibClient, closeTdlibClient } from "./tdlib/client.js";
import { readZipCentralDirectory } from "./archive/zip-reader.js";
import { readRarContents } from "./archive/rar-reader.js";
import { read7zContents } from "./archive/sevenz-reader.js";
import { getActiveAccounts } from "./db/queries.js";
const log = childLogger("manual-upload");
export async function processManualUpload(uploadId: string): Promise<void> {
log.info({ uploadId }, "Processing manual upload");
const upload = await db.manualUpload.findUnique({
where: { id: uploadId },
include: { files: true },
});
if (!upload || upload.status !== "PENDING") {
log.warn({ uploadId }, "Manual upload not found or not pending");
return;
}
await db.manualUpload.update({
where: { id: uploadId },
data: { status: "PROCESSING" },
});
try {
// Get destination channel
const destSetting = await db.globalSetting.findUnique({
where: { key: "destination_channel_id" },
});
if (!destSetting) throw new Error("No destination channel configured");
const destChannel = await db.telegramChannel.findFirst({
where: { id: destSetting.value, type: "DESTINATION", isActive: true },
});
if (!destChannel) throw new Error("Destination channel not found or inactive");
// Get a TDLib client (use first active account)
const accounts = await getActiveAccounts();
const account = accounts[0];
if (!account) throw new Error("No authenticated Telegram account available");
const client = await createTdlibClient({ id: account.id, phone: account.phone });
try {
const packageIds: string[] = [];
for (const file of upload.files) {
try {
const filePath = file.filePath;
const fileName = file.fileName;
const fileSize = file.fileSize;
log.info({ fileName, fileSize: Number(fileSize) }, "Processing file");
// Determine archive type
let archiveType: "ZIP" | "RAR" | "SEVEN_Z" | "DOCUMENT" = "DOCUMENT";
const ext = fileName.toLowerCase();
if (ext.endsWith(".zip")) archiveType = "ZIP";
else if (ext.endsWith(".rar")) archiveType = "RAR";
else if (ext.endsWith(".7z")) archiveType = "SEVEN_Z";
// Hash the file
const contentHash = await hashParts([filePath]);
// Check for duplicates
const existing = await db.package.findFirst({
where: { contentHash, destMessageId: { not: null } },
select: { id: true },
});
if (existing) {
log.info({ fileName, contentHash }, "Duplicate file, skipping upload");
await db.manualUploadFile.update({
where: { id: file.id },
data: { packageId: existing.id },
});
packageIds.push(existing.id);
continue;
}
// Read archive metadata
let entries: {
path: string;
fileName: string;
extension: string | null;
compressedSize: bigint;
uncompressedSize: bigint;
crc32: string | null;
}[] = [];
try {
if (archiveType === "ZIP") entries = await readZipCentralDirectory([filePath]);
else if (archiveType === "RAR") entries = await readRarContents(filePath);
else if (archiveType === "SEVEN_Z") entries = await read7zContents(filePath);
} catch {
log.debug({ fileName }, "Could not read archive metadata");
}
// Split if needed
const MAX_UPLOAD_SIZE = BigInt(config.maxPartSizeMB) * 1024n * 1024n;
let uploadPaths = [filePath];
if (fileSize > MAX_UPLOAD_SIZE) {
uploadPaths = await byteLevelSplit(filePath);
}
// Upload to Telegram
const destResult = await uploadToChannel(
client,
destChannel.telegramId,
uploadPaths
);
// Create package record
const pkg = await db.package.create({
data: {
contentHash,
fileName,
fileSize,
archiveType,
sourceChannelId: destChannel.id,
sourceMessageId: destResult.messageId,
destChannelId: destChannel.id,
destMessageId: destResult.messageId,
destMessageIds: destResult.messageIds,
isMultipart: uploadPaths.length > 1,
partCount: uploadPaths.length,
fileCount: entries.length,
files: entries.length > 0 ? { create: entries } : undefined,
},
});
await db.manualUploadFile.update({
where: { id: file.id },
data: { packageId: pkg.id },
});
packageIds.push(pkg.id);
log.info({ fileName, packageId: pkg.id }, "File processed and uploaded");
// Clean up split files (but not the original)
if (uploadPaths.length > 1) {
for (const splitPath of uploadPaths) {
if (splitPath !== filePath) {
await rm(splitPath, { force: true }).catch(() => {});
}
}
}
} catch (fileErr) {
log.error({ err: fileErr, fileName: file.fileName }, "Failed to process file");
}
}
// Group packages if multiple files
if (packageIds.length >= 2) {
const groupName =
upload.groupName ?? upload.files[0].fileName.replace(/\.[^.]+$/, "");
const group = await db.packageGroup.create({
data: {
name: groupName,
sourceChannelId: destChannel.id,
groupingSource: "MANUAL",
},
});
await db.package.updateMany({
where: { id: { in: packageIds } },
data: { packageGroupId: group.id },
});
log.info(
{ groupId: group.id, groupName, packageCount: packageIds.length },
"Created group for uploaded files"
);
}
await db.manualUpload.update({
where: { id: uploadId },
data: { status: "COMPLETED", completedAt: new Date() },
});
log.info(
{ uploadId, fileCount: upload.files.length, packageCount: packageIds.length },
"Manual upload completed"
);
} finally {
await closeTdlibClient(client);
}
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
log.error({ err, uploadId }, "Manual upload failed");
await db.manualUpload.update({
where: { id: uploadId },
data: { status: "FAILED", errorMessage: message },
});
}
// Clean up uploaded files
try {
const uploadDir = path.join("/data/uploads", uploadId);
await rm(uploadDir, { recursive: true, force: true });
} catch {
// Best-effort cleanup
}
}