mirror of
https://github.com/xCyanGrizzly/DragonsStash.git
synced 2026-05-11 06:11:15 +00:00
Compare commits
53 Commits
9bc9271f11
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 59038889ae | |||
| 77c26adb31 | |||
| 35cce3151c | |||
| d6c82ede1e | |||
| 7e48131f67 | |||
| a79cb4749b | |||
| e9017fc518 | |||
| 4f59d19ac2 | |||
| 579276ee2d | |||
| b48cc510a4 | |||
| 614c8e5b74 | |||
| 3019c23f70 | |||
| 436a576085 | |||
| f454303352 | |||
| e29bd79d66 | |||
| 61e61d0085 | |||
| 925d916a3c | |||
| 27bacaf24c | |||
| be4daf950b | |||
| af7094637d | |||
| f4aa9d9a2f | |||
| 7f9a03d4ee | |||
| 2c46ab0843 | |||
| 9e78cc5d19 | |||
| 194c87a256 | |||
| 718007446f | |||
| 527aca7c25 | |||
| a4156b2ac6 | |||
| d50c68f67c | |||
| f6e7f5ed3c | |||
| e7f213eec4 | |||
| 20b7d28fdf | |||
| 21663fc29e | |||
| 218ccb9282 | |||
| b632533f54 | |||
| 4baf5aad83 | |||
| ad7790c07b | |||
| e4398caebe | |||
| 6eb7129637 | |||
| d6386209be | |||
| fe28c31b9e | |||
| 55bdf3c890 | |||
| 5506c7d91b | |||
| 5a3550fa10 | |||
| ad3d42a997 | |||
| dd0d246a77 | |||
| dcc1c97053 | |||
| 71c3228e44 | |||
| 094001f9f7 | |||
| 0faacc214b | |||
| d53e581623 | |||
| 780e6200d8 | |||
| 9642adaba7 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -54,3 +54,4 @@ src/generated
|
||||
# temp files
|
||||
nul
|
||||
tmpclaude-*
|
||||
.worktrees/
|
||||
|
||||
@@ -10,7 +10,10 @@ import {
|
||||
getSubscriptions,
|
||||
addSubscription,
|
||||
removeSubscription,
|
||||
getGroupById,
|
||||
searchGroups,
|
||||
} from "./db/queries.js";
|
||||
import { db } from "./db/client.js";
|
||||
import { sendTextMessage, sendPhotoMessage } from "./tdlib/client.js";
|
||||
|
||||
const log = childLogger("commands");
|
||||
@@ -78,6 +81,12 @@ export async function handleMessage(msg: IncomingMessage): Promise<void> {
|
||||
case "/status":
|
||||
await handleStatus(chatId, userId);
|
||||
break;
|
||||
case "/group":
|
||||
await handleGroup(chatId, args);
|
||||
break;
|
||||
case "/sendgroup":
|
||||
await handleSendGroup(chatId, userId, args);
|
||||
break;
|
||||
default:
|
||||
await sendTextMessage(
|
||||
chatId,
|
||||
@@ -117,6 +126,8 @@ async function handleStart(
|
||||
`/search <query> — Search packages`,
|
||||
`/latest [n] — Show latest packages`,
|
||||
`/package <id> — Package details`,
|
||||
`/group <id or name> — View group info and package list`,
|
||||
`/sendgroup <id> — Send all packages in a group to yourself`,
|
||||
`/link <code> — Link your Telegram to your web account`,
|
||||
`/subscribe <keyword> — Get notified for new packages`,
|
||||
`/subscriptions — View your subscriptions`,
|
||||
@@ -136,6 +147,8 @@ async function handleHelp(chatId: bigint): Promise<void> {
|
||||
`/search <query> — Search by filename or creator`,
|
||||
`/latest [n] — Show n most recent packages (default: 5)`,
|
||||
`/package <id> — View package details and file list`,
|
||||
`/group <id or name> — View group info and package list`,
|
||||
`/sendgroup <id> — Send all packages in a group to yourself`,
|
||||
``,
|
||||
`🔗 <b>Account Linking</b>`,
|
||||
`/link <code> — Link Telegram to your web account`,
|
||||
@@ -432,6 +445,168 @@ async function handleStatus(chatId: bigint, userId: bigint): Promise<void> {
|
||||
}
|
||||
}
|
||||
|
||||
async function handleGroup(chatId: bigint, query: string): Promise<void> {
|
||||
if (!query) {
|
||||
await sendTextMessage(
|
||||
chatId,
|
||||
"Usage: /group <id or name>\n\nProvide a group ID (starts with 'c') or a name to search.",
|
||||
"textParseModeHTML"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const trimmed = query.trim();
|
||||
|
||||
// If it looks like a cuid (starts with 'c', ~25 chars), look up by ID directly
|
||||
if (/^c[a-z0-9]{20,}$/i.test(trimmed)) {
|
||||
const group = await getGroupById(trimmed);
|
||||
if (!group) {
|
||||
await sendTextMessage(chatId, "Group not found.", "textParseModeHTML");
|
||||
return;
|
||||
}
|
||||
|
||||
const packageLines = group.packages.slice(0, 20).map((pkg, i) => {
|
||||
const size = formatSize(pkg.fileSize);
|
||||
return ` ${i + 1}. <b>${escapeHtml(pkg.fileName)}</b> (${size}, ${pkg.fileCount} files) — <code>${pkg.id}</code>`;
|
||||
});
|
||||
const more = group.packages.length > 20
|
||||
? `\n ... and ${group.packages.length - 20} more`
|
||||
: "";
|
||||
|
||||
const response = [
|
||||
`📦 <b>Group: ${escapeHtml(group.name)}</b>`,
|
||||
``,
|
||||
`Packages: ${group.packages.length}`,
|
||||
`ID: <code>${group.id}</code>`,
|
||||
``,
|
||||
`<b>Contents:</b>`,
|
||||
...packageLines,
|
||||
more,
|
||||
``,
|
||||
`Use /sendgroup ${group.id} to receive all packages.`,
|
||||
]
|
||||
.filter((l) => l !== "")
|
||||
.join("\n");
|
||||
|
||||
await sendTextMessage(chatId, response, "textParseModeHTML");
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise search by name
|
||||
const groups = await searchGroups(trimmed, 5);
|
||||
|
||||
if (groups.length === 0) {
|
||||
await sendTextMessage(
|
||||
chatId,
|
||||
`No groups found matching "<b>${escapeHtml(trimmed)}</b>".`,
|
||||
"textParseModeHTML"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const lines = groups.map(
|
||||
(g, i) =>
|
||||
`${i + 1}. <b>${escapeHtml(g.name)}</b> — ${g._count.packages} package(s)\n ID: <code>${g.id}</code>`
|
||||
);
|
||||
|
||||
const response = [
|
||||
`🔍 <b>Groups matching "${escapeHtml(trimmed)}":</b>`,
|
||||
``,
|
||||
...lines,
|
||||
``,
|
||||
`Use /group <id> for full details.`,
|
||||
].join("\n");
|
||||
|
||||
await sendTextMessage(chatId, response, "textParseModeHTML");
|
||||
}
|
||||
|
||||
async function handleSendGroup(
|
||||
chatId: bigint,
|
||||
userId: bigint,
|
||||
args: string
|
||||
): Promise<void> {
|
||||
if (!args) {
|
||||
await sendTextMessage(
|
||||
chatId,
|
||||
"Usage: /sendgroup <group-id>",
|
||||
"textParseModeHTML"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const groupId = args.trim();
|
||||
const group = await getGroupById(groupId);
|
||||
|
||||
if (!group) {
|
||||
await sendTextMessage(chatId, "Group not found.", "textParseModeHTML");
|
||||
return;
|
||||
}
|
||||
|
||||
// Require account linking
|
||||
const link = await findLinkByTelegramUserId(userId);
|
||||
if (!link) {
|
||||
await sendTextMessage(
|
||||
chatId,
|
||||
"You must link your account before receiving packages.\nUse /link <code> to connect.",
|
||||
"textParseModeHTML"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Only send packages that have been uploaded to the destination channel
|
||||
const sendable = group.packages.filter(
|
||||
(pkg) => pkg.destChannelId && pkg.destMessageId
|
||||
);
|
||||
|
||||
if (sendable.length === 0) {
|
||||
await sendTextMessage(
|
||||
chatId,
|
||||
`No packages in group "<b>${escapeHtml(group.name)}</b>" are ready to send yet.`,
|
||||
"textParseModeHTML"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create a BotSendRequest for each sendable package
|
||||
const requests = await Promise.all(
|
||||
sendable.map((pkg) =>
|
||||
db.botSendRequest.create({
|
||||
data: {
|
||||
packageId: pkg.id,
|
||||
telegramLinkId: link.id,
|
||||
requestedByUserId: link.userId,
|
||||
status: "PENDING",
|
||||
},
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
// Fire pg_notify for each request so the send listener picks them up
|
||||
for (const req of requests) {
|
||||
await db.$queryRawUnsafe(
|
||||
`SELECT pg_notify('bot_send', $1)`,
|
||||
req.id
|
||||
).catch(() => {
|
||||
// Best-effort — the bot also processes PENDING requests on its send queue
|
||||
});
|
||||
}
|
||||
|
||||
await sendTextMessage(
|
||||
chatId,
|
||||
[
|
||||
`✅ <b>Queued ${requests.length} package(s) from "${escapeHtml(group.name)}"</b>`,
|
||||
``,
|
||||
`You'll receive each archive shortly. Use /package <id> to check individual packages.`,
|
||||
].join("\n"),
|
||||
"textParseModeHTML"
|
||||
);
|
||||
|
||||
log.info(
|
||||
{ groupId, packageCount: requests.length, userId: userId.toString() },
|
||||
"Group send queued"
|
||||
);
|
||||
}
|
||||
|
||||
function escapeHtml(text: string): string {
|
||||
return text
|
||||
.replace(/&/g, "&")
|
||||
|
||||
@@ -53,7 +53,52 @@ export async function createTelegramLink(
|
||||
// ── Package search ──
|
||||
|
||||
export async function searchPackages(query: string, limit = 10) {
|
||||
const packages = await db.package.findMany({
|
||||
// Try full-text search first
|
||||
if (query.length >= 3) {
|
||||
const tsQuery = query
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.filter((w) => w.length >= 2)
|
||||
.map((w) => w.replace(/[^a-zA-Z0-9]/g, ""))
|
||||
.filter(Boolean)
|
||||
.join(" & ");
|
||||
|
||||
if (tsQuery) {
|
||||
try {
|
||||
const ftsResults = await db.$queryRawUnsafe<{ id: string }[]>(
|
||||
`SELECT id FROM packages
|
||||
WHERE "searchVector" @@ to_tsquery('english', $1)
|
||||
ORDER BY ts_rank("searchVector", to_tsquery('english', $1)) DESC
|
||||
LIMIT $2`,
|
||||
tsQuery,
|
||||
limit
|
||||
);
|
||||
|
||||
if (ftsResults.length > 0) {
|
||||
return db.package.findMany({
|
||||
where: { id: { in: ftsResults.map((r) => r.id) } },
|
||||
orderBy: { indexedAt: "desc" },
|
||||
select: {
|
||||
id: true,
|
||||
fileName: true,
|
||||
fileSize: true,
|
||||
archiveType: true,
|
||||
fileCount: true,
|
||||
creator: true,
|
||||
indexedAt: true,
|
||||
destChannelId: true,
|
||||
destMessageId: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// FTS failed — fall back to ILIKE
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: ILIKE search
|
||||
return db.package.findMany({
|
||||
where: {
|
||||
OR: [
|
||||
{ fileName: { contains: query, mode: "insensitive" } },
|
||||
@@ -74,7 +119,44 @@ export async function searchPackages(query: string, limit = 10) {
|
||||
destMessageId: true,
|
||||
},
|
||||
});
|
||||
return packages;
|
||||
}
|
||||
|
||||
// ── Group queries ──
|
||||
|
||||
export async function getGroupById(groupId: string) {
|
||||
return db.packageGroup.findUnique({
|
||||
where: { id: groupId },
|
||||
include: {
|
||||
packages: {
|
||||
orderBy: { indexedAt: "desc" },
|
||||
select: {
|
||||
id: true,
|
||||
fileName: true,
|
||||
fileSize: true,
|
||||
archiveType: true,
|
||||
fileCount: true,
|
||||
creator: true,
|
||||
destChannelId: true,
|
||||
destMessageId: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function searchGroups(query: string, limit = 5) {
|
||||
return db.packageGroup.findMany({
|
||||
where: {
|
||||
name: { contains: query, mode: "insensitive" },
|
||||
},
|
||||
orderBy: { createdAt: "desc" },
|
||||
take: limit,
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
_count: { select: { packages: true } },
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function getLatestPackages(limit = 5) {
|
||||
@@ -122,6 +204,9 @@ export async function getPendingSendRequest(requestId: string) {
|
||||
archiveType: true,
|
||||
destChannelId: true,
|
||||
destMessageId: true,
|
||||
destMessageIds: true,
|
||||
isMultipart: true,
|
||||
partCount: true,
|
||||
previewData: true,
|
||||
sourceChannel: { select: { title: true, telegramId: true } },
|
||||
},
|
||||
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
findMatchingSubscriptions,
|
||||
getGlobalDestinationChannel,
|
||||
} from "./db/queries.js";
|
||||
import { copyMessageToUser, sendTextMessage, sendPhotoMessage } from "./tdlib/client.js";
|
||||
import { copyMessageToUser, copyMultipleMessagesToUser, sendTextMessage, sendPhotoMessage } from "./tdlib/client.js";
|
||||
import { sleep } from "./util/flood-wait.js";
|
||||
|
||||
const log = childLogger("send-listener");
|
||||
@@ -154,11 +154,25 @@ async function processSendRequest(requestId: string): Promise<void> {
|
||||
}
|
||||
|
||||
// Forward the actual archive file(s) from destination channel
|
||||
const messageIds = pkg.destMessageIds as bigint[] | undefined;
|
||||
if (messageIds && messageIds.length > 1) {
|
||||
log.info(
|
||||
{ requestId, parts: messageIds.length },
|
||||
"Sending multi-part archive"
|
||||
);
|
||||
await copyMultipleMessagesToUser(
|
||||
destChannel.telegramId,
|
||||
messageIds,
|
||||
targetUserId
|
||||
);
|
||||
} else {
|
||||
// Single part or legacy (no destMessageIds populated)
|
||||
await copyMessageToUser(
|
||||
destChannel.telegramId,
|
||||
pkg.destMessageId,
|
||||
targetUserId
|
||||
);
|
||||
}
|
||||
|
||||
await updateSendRequest(requestId, "SENT");
|
||||
log.info({ requestId }, "Send request completed successfully");
|
||||
|
||||
@@ -121,6 +121,25 @@ export async function copyMessageToUser(
|
||||
}, fileName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send multiple document messages from a channel to a user's DM.
|
||||
* Used for multi-part archives where each part is a separate Telegram message.
|
||||
* Sends parts sequentially with a small delay to avoid rate limits.
|
||||
*/
|
||||
export async function copyMultipleMessagesToUser(
|
||||
fromChatId: bigint,
|
||||
messageIds: bigint[],
|
||||
toUserId: bigint
|
||||
): Promise<void> {
|
||||
for (let i = 0; i < messageIds.length; i++) {
|
||||
await copyMessageToUser(fromChatId, messageIds[i], toUserId);
|
||||
// Small delay between parts to avoid rate limits
|
||||
if (i < messageIds.length - 1) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a message and wait for Telegram to confirm delivery.
|
||||
* Returns when updateMessageSendSucceeded fires for the temp message.
|
||||
|
||||
@@ -28,6 +28,8 @@ services:
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
start_period: 60s
|
||||
volumes:
|
||||
- manual_uploads:/data/uploads
|
||||
restart: unless-stopped
|
||||
deploy:
|
||||
resources:
|
||||
@@ -54,6 +56,7 @@ services:
|
||||
volumes:
|
||||
- tdlib_state:/data/tdlib
|
||||
- tmp_zips:/tmp/zips
|
||||
- manual_uploads:/data/uploads
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
@@ -121,6 +124,7 @@ volumes:
|
||||
tdlib_state:
|
||||
tdlib_bot_state:
|
||||
tmp_zips:
|
||||
manual_uploads:
|
||||
|
||||
networks:
|
||||
frontend:
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
1343
docs/superpowers/plans/2026-03-25-package-grouping.md
Normal file
1343
docs/superpowers/plans/2026-03-25-package-grouping.md
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,964 @@
|
||||
# Multi-Part Send Fix & Kickstarter Package Linking
|
||||
|
||||
> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||
|
||||
**Goal:** Fix multi-part package forwarding so all archive parts reach the user, and add UI to link STL packages to kickstarters with "send all" capability.
|
||||
|
||||
**Architecture:** Two independent subsystems. (A) Store all destination message IDs when the worker uploads multi-part archives, then have the bot forward every part. (B) Add a package-linker dialog in the kickstarter UI using the existing `linkPackages` action, plus a "send all" action that queues every linked package.
|
||||
|
||||
**Tech Stack:** Prisma (schema + migration), TypeScript worker/bot services, Next.js App Router (server actions + React client components), shadcn/ui, TanStack Table.
|
||||
|
||||
---
|
||||
|
||||
## File Map
|
||||
|
||||
### Subsystem A — Multi-Part Send Fix
|
||||
|
||||
| Action | File | Responsibility |
|
||||
|--------|------|----------------|
|
||||
| Modify | `prisma/schema.prisma` | Add `destMessageIds BigInt[]` to Package |
|
||||
| Create | `prisma/migrations/<ts>_add_dest_message_ids/migration.sql` | Migration SQL |
|
||||
| Modify | `worker/src/upload/channel.ts` | Return all message IDs from `uploadToChannel` |
|
||||
| Modify | `worker/src/db/queries.ts` | Add `destMessageIds` to `CreatePackageInput` and `createPackageWithFiles` |
|
||||
| Modify | `worker/src/worker.ts` | Pass all message IDs when creating package |
|
||||
| Modify | `bot/src/db/queries.ts` | Include `destMessageIds` in `getPendingSendRequest` |
|
||||
| Modify | `bot/src/send-listener.ts` | Forward all parts, not just the first |
|
||||
|
||||
### Subsystem B — Kickstarter Package Linking UI
|
||||
|
||||
| Action | File | Responsibility |
|
||||
|--------|------|----------------|
|
||||
| Create | `src/app/(app)/kickstarters/_components/package-linker-dialog.tsx` | Dialog with package search + selection for linking |
|
||||
| Modify | `src/app/(app)/kickstarters/_components/kickstarter-columns.tsx` | Add "Link Packages" and "Send All" actions to row menu |
|
||||
| Modify | `src/app/(app)/kickstarters/_components/kickstarter-table.tsx` | Wire up new dialogs + state |
|
||||
| Modify | `src/app/(app)/kickstarters/actions.ts` | Add `sendAllKickstarterPackages` action |
|
||||
| Modify | `src/data/kickstarter.queries.ts` | Add query to search packages for linking |
|
||||
|
||||
---
|
||||
|
||||
## Task 1: Add `destMessageIds` to Prisma Schema + Migration
|
||||
|
||||
**Files:**
|
||||
- Modify: `prisma/schema.prisma:470-471`
|
||||
- Create: migration SQL
|
||||
|
||||
- [ ] **Step 1: Add field to schema**
|
||||
|
||||
In `prisma/schema.prisma`, add `destMessageIds` after `destMessageId`:
|
||||
|
||||
```prisma
|
||||
destMessageId BigInt?
|
||||
destMessageIds BigInt[] @default([])
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Create migration SQL manually**
|
||||
|
||||
Create the migration directory and SQL file. The migration adds the column with a default and backfills existing rows by copying `destMessageId` into the array where it's non-null:
|
||||
|
||||
```sql
|
||||
-- AlterTable
|
||||
ALTER TABLE "packages" ADD COLUMN "destMessageIds" BIGINT[] DEFAULT ARRAY[]::BIGINT[];
|
||||
|
||||
-- Backfill: copy existing destMessageId into the array
|
||||
UPDATE "packages"
|
||||
SET "destMessageIds" = ARRAY["destMessageId"]
|
||||
WHERE "destMessageId" IS NOT NULL;
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Apply migration to database**
|
||||
|
||||
```bash
|
||||
docker exec dragonsstash-db psql -U dragons -d dragonsstash -f - < migration.sql
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Regenerate Prisma client**
|
||||
|
||||
Use the app container (which has node/prisma) to regenerate:
|
||||
|
||||
```bash
|
||||
docker exec dragonsstash npx prisma generate
|
||||
```
|
||||
|
||||
Or, if running locally with node: `npx prisma generate`
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add prisma/schema.prisma prisma/migrations/
|
||||
git commit -m "feat: add destMessageIds field to Package for multi-part forwarding"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 2: Worker — Return All Message IDs from Upload
|
||||
|
||||
**Files:**
|
||||
- Modify: `worker/src/upload/channel.ts:10-12,25-74`
|
||||
|
||||
- [ ] **Step 1: Update UploadResult interface**
|
||||
|
||||
In `worker/src/upload/channel.ts`, change the interface to include all IDs:
|
||||
|
||||
```typescript
|
||||
export interface UploadResult {
|
||||
messageId: bigint;
|
||||
messageIds: bigint[];
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Collect all message IDs in uploadToChannel**
|
||||
|
||||
Replace the upload loop to track all message IDs:
|
||||
|
||||
```typescript
|
||||
export async function uploadToChannel(
|
||||
client: Client,
|
||||
chatId: bigint,
|
||||
filePaths: string[],
|
||||
caption?: string
|
||||
): Promise<UploadResult> {
|
||||
const allMessageIds: bigint[] = [];
|
||||
|
||||
for (let i = 0; i < filePaths.length; i++) {
|
||||
const filePath = filePaths[i];
|
||||
const fileCaption = i === 0 && caption ? caption : undefined;
|
||||
|
||||
const fileName = path.basename(filePath);
|
||||
let fileSizeMB = 0;
|
||||
try {
|
||||
const s = await stat(filePath);
|
||||
fileSizeMB = Math.round(s.size / (1024 * 1024));
|
||||
} catch {
|
||||
// Non-critical
|
||||
}
|
||||
|
||||
log.info(
|
||||
{ chatId: Number(chatId), fileName, sizeMB: fileSizeMB, part: i + 1, total: filePaths.length },
|
||||
"Uploading file to channel"
|
||||
);
|
||||
|
||||
const serverMsgId = await sendWithRetry(client, chatId, filePath, fileCaption, fileName, fileSizeMB);
|
||||
allMessageIds.push(serverMsgId);
|
||||
|
||||
// Rate limit delay between uploads
|
||||
if (i < filePaths.length - 1) {
|
||||
await sleep(config.apiDelayMs);
|
||||
}
|
||||
}
|
||||
|
||||
if (allMessageIds.length === 0) {
|
||||
throw new Error("Upload failed: no messages sent");
|
||||
}
|
||||
|
||||
log.info(
|
||||
{ chatId: Number(chatId), messageId: Number(allMessageIds[0]), files: filePaths.length },
|
||||
"All uploads confirmed by Telegram"
|
||||
);
|
||||
|
||||
return { messageId: allMessageIds[0], messageIds: allMessageIds };
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Commit**
|
||||
|
||||
```bash
|
||||
git add worker/src/upload/channel.ts
|
||||
git commit -m "feat: return all message IDs from uploadToChannel for multi-part"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 3: Worker — Store All Message IDs in Database
|
||||
|
||||
**Files:**
|
||||
- Modify: `worker/src/db/queries.ts:104-155`
|
||||
- Modify: `worker/src/worker.ts:1056-1086`
|
||||
|
||||
- [ ] **Step 1: Add destMessageIds to CreatePackageInput**
|
||||
|
||||
In `worker/src/db/queries.ts`, add the field to the interface:
|
||||
|
||||
```typescript
|
||||
export interface CreatePackageInput {
|
||||
// ... existing fields ...
|
||||
destMessageId?: bigint;
|
||||
destMessageIds?: bigint[];
|
||||
// ... rest ...
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Store destMessageIds in createPackageWithFiles**
|
||||
|
||||
In the `db.package.create` call inside `createPackageWithFiles`, add:
|
||||
|
||||
```typescript
|
||||
destMessageIds: input.destMessageIds ?? (input.destMessageId ? [input.destMessageId] : []),
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Pass messageIds from worker pipeline**
|
||||
|
||||
In `worker/src/worker.ts`, the upload section (around line 1068-1085) currently does:
|
||||
|
||||
```typescript
|
||||
destResult = await uploadToChannel(client, destChannelTelegramId, uploadPaths);
|
||||
```
|
||||
|
||||
After this, when calling `createPackageWithFiles`, add `destMessageIds`:
|
||||
|
||||
```typescript
|
||||
const pkg = await createPackageWithFiles({
|
||||
// ... existing fields ...
|
||||
destMessageId: destResult.messageId,
|
||||
destMessageIds: destResult.messageIds,
|
||||
// ... rest ...
|
||||
});
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add worker/src/db/queries.ts worker/src/worker.ts
|
||||
git commit -m "feat: store all multi-part message IDs in package record"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 4: Bot — Forward All Parts
|
||||
|
||||
**Files:**
|
||||
- Modify: `bot/src/db/queries.ts:110-132`
|
||||
- Modify: `bot/src/send-listener.ts:105-169`
|
||||
- Modify: `bot/src/tdlib/client.ts:66-122`
|
||||
|
||||
- [ ] **Step 1: Include destMessageIds in bot query**
|
||||
|
||||
In `bot/src/db/queries.ts`, add `destMessageIds` to the `getPendingSendRequest` select:
|
||||
|
||||
```typescript
|
||||
package: {
|
||||
select: {
|
||||
id: true,
|
||||
fileName: true,
|
||||
fileSize: true,
|
||||
fileCount: true,
|
||||
creator: true,
|
||||
tags: true,
|
||||
archiveType: true,
|
||||
destChannelId: true,
|
||||
destMessageId: true,
|
||||
destMessageIds: true, // <-- ADD THIS
|
||||
isMultipart: true, // <-- ADD THIS (for logging)
|
||||
partCount: true, // <-- ADD THIS (for logging)
|
||||
previewData: true,
|
||||
sourceChannel: { select: { title: true, telegramId: true } },
|
||||
},
|
||||
},
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Add copyMultipleMessagesToUser helper**
|
||||
|
||||
In `bot/src/tdlib/client.ts`, add a new export after `copyMessageToUser`:
|
||||
|
||||
```typescript
|
||||
/**
|
||||
* Send multiple document messages from a channel to a user's DM.
|
||||
* Used for multi-part archives where each part is a separate Telegram message.
|
||||
* Sends parts sequentially with a small delay to avoid rate limits.
|
||||
*/
|
||||
export async function copyMultipleMessagesToUser(
|
||||
fromChatId: bigint,
|
||||
messageIds: bigint[],
|
||||
toUserId: bigint
|
||||
): Promise<void> {
|
||||
for (let i = 0; i < messageIds.length; i++) {
|
||||
await copyMessageToUser(fromChatId, messageIds[i], toUserId);
|
||||
// Small delay between parts to avoid rate limits
|
||||
if (i < messageIds.length - 1) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Update processSendRequest to forward all parts**
|
||||
|
||||
In `bot/src/send-listener.ts`, update the import to include the new function:
|
||||
|
||||
```typescript
|
||||
import { copyMessageToUser, copyMultipleMessagesToUser, sendTextMessage, sendPhotoMessage } from "./tdlib/client.js";
|
||||
```
|
||||
|
||||
Then replace the single `copyMessageToUser` call (around line 157) with logic that forwards all parts:
|
||||
|
||||
```typescript
|
||||
// Forward the actual archive file(s) from destination channel
|
||||
const messageIds = pkg.destMessageIds as bigint[] | undefined;
|
||||
if (messageIds && messageIds.length > 1) {
|
||||
log.info(
|
||||
{ requestId, parts: messageIds.length },
|
||||
"Sending multi-part archive"
|
||||
);
|
||||
await copyMultipleMessagesToUser(
|
||||
destChannel.telegramId,
|
||||
messageIds,
|
||||
targetUserId
|
||||
);
|
||||
} else {
|
||||
// Single part or legacy (no destMessageIds populated)
|
||||
await copyMessageToUser(
|
||||
destChannel.telegramId,
|
||||
pkg.destMessageId,
|
||||
targetUserId
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add bot/src/db/queries.ts bot/src/send-listener.ts bot/src/tdlib/client.ts
|
||||
git commit -m "feat: forward all parts of multi-part archives via bot"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 5: Rebuild & Deploy Worker + Bot
|
||||
|
||||
- [ ] **Step 1: Rebuild worker image**
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.dev.yml build worker
|
||||
docker tag dragonsstash-worker:latest git.samagsteribbe.nl/admin/dragonsstash-worker:latest
|
||||
docker compose -p dragonsstash -f /opt/stacks/DragonsStash/docker-compose.yml up -d worker
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Rebuild bot image**
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.dev.yml build bot
|
||||
docker tag dragonsstash-bot:latest git.samagsteribbe.nl/admin/dragonsstash-bot:latest
|
||||
docker compose -p dragonsstash -f /opt/stacks/DragonsStash/docker-compose.yml up -d bot
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Verify bot startup**
|
||||
|
||||
```bash
|
||||
docker logs dragonsstash-bot --tail=20
|
||||
```
|
||||
|
||||
Expected: Bot starts cleanly, "Send listener started" message.
|
||||
|
||||
---
|
||||
|
||||
## Task 6: Kickstarter — Package Search Query
|
||||
|
||||
**Files:**
|
||||
- Modify: `src/data/kickstarter.queries.ts`
|
||||
|
||||
- [ ] **Step 1: Add searchPackagesForLinking query**
|
||||
|
||||
Append to `src/data/kickstarter.queries.ts`:
|
||||
|
||||
```typescript
|
||||
export async function searchPackagesForLinking(query: string, limit = 20) {
|
||||
if (!query || query.length < 2) return [];
|
||||
|
||||
return prisma.package.findMany({
|
||||
where: {
|
||||
OR: [
|
||||
{ fileName: { contains: query, mode: "insensitive" } },
|
||||
{ creator: { contains: query, mode: "insensitive" } },
|
||||
],
|
||||
},
|
||||
orderBy: { indexedAt: "desc" },
|
||||
take: limit,
|
||||
select: {
|
||||
id: true,
|
||||
fileName: true,
|
||||
fileSize: true,
|
||||
archiveType: true,
|
||||
creator: true,
|
||||
fileCount: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function getLinkedPackageIds(kickstarterId: string): Promise<string[]> {
|
||||
const links = await prisma.kickstarterPackage.findMany({
|
||||
where: { kickstarterId },
|
||||
select: { packageId: true },
|
||||
});
|
||||
return links.map((l) => l.packageId);
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Commit**
|
||||
|
||||
```bash
|
||||
git add src/data/kickstarter.queries.ts
|
||||
git commit -m "feat: add package search query for kickstarter linking"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 7: Kickstarter — Package Linker Dialog Component
|
||||
|
||||
**Files:**
|
||||
- Create: `src/app/(app)/kickstarters/_components/package-linker-dialog.tsx`
|
||||
|
||||
- [ ] **Step 1: Create the package linker dialog**
|
||||
|
||||
This component provides a search input to find packages and checkboxes to select/deselect them. It calls the existing `linkPackages` action on save.
|
||||
|
||||
```tsx
|
||||
"use client";
|
||||
|
||||
import { useState, useTransition, useCallback, useEffect } from "react";
|
||||
import { Search, Package, X, Loader2 } from "lucide-react";
|
||||
import { toast } from "sonner";
|
||||
import { linkPackages } from "../actions";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Checkbox } from "@/components/ui/checkbox";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||
|
||||
interface PackageResult {
|
||||
id: string;
|
||||
fileName: string;
|
||||
fileSize: bigint;
|
||||
archiveType: string;
|
||||
creator: string | null;
|
||||
fileCount: number;
|
||||
}
|
||||
|
||||
interface PackageLinkerDialogProps {
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
kickstarterId: string;
|
||||
kickstarterName: string;
|
||||
initialPackageIds: string[];
|
||||
}
|
||||
|
||||
function formatSize(bytes: bigint | number): string {
|
||||
const b = Number(bytes);
|
||||
if (b >= 1024 * 1024 * 1024) return `${(b / (1024 * 1024 * 1024)).toFixed(1)} GB`;
|
||||
if (b >= 1024 * 1024) return `${(b / (1024 * 1024)).toFixed(0)} MB`;
|
||||
return `${(b / 1024).toFixed(0)} KB`;
|
||||
}
|
||||
|
||||
export function PackageLinkerDialog({
|
||||
open,
|
||||
onOpenChange,
|
||||
kickstarterId,
|
||||
kickstarterName,
|
||||
initialPackageIds,
|
||||
}: PackageLinkerDialogProps) {
|
||||
const [isPending, startTransition] = useTransition();
|
||||
const [searchQuery, setSearchQuery] = useState("");
|
||||
const [searchResults, setSearchResults] = useState<PackageResult[]>([]);
|
||||
const [isSearching, setIsSearching] = useState(false);
|
||||
const [selectedIds, setSelectedIds] = useState<Set<string>>(new Set(initialPackageIds));
|
||||
|
||||
// Reset state when dialog opens
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
setSelectedIds(new Set(initialPackageIds));
|
||||
setSearchQuery("");
|
||||
setSearchResults([]);
|
||||
}
|
||||
}, [open, initialPackageIds]);
|
||||
|
||||
const doSearch = useCallback(async (query: string) => {
|
||||
if (query.length < 2) {
|
||||
setSearchResults([]);
|
||||
return;
|
||||
}
|
||||
setIsSearching(true);
|
||||
try {
|
||||
const res = await fetch(`/api/packages/search?q=${encodeURIComponent(query)}&limit=20`);
|
||||
if (res.ok) {
|
||||
const data = await res.json();
|
||||
setSearchResults(data.packages ?? []);
|
||||
}
|
||||
} catch {
|
||||
// Ignore search errors
|
||||
} finally {
|
||||
setIsSearching(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Debounced search
|
||||
useEffect(() => {
|
||||
const timer = setTimeout(() => doSearch(searchQuery), 300);
|
||||
return () => clearTimeout(timer);
|
||||
}, [searchQuery, doSearch]);
|
||||
|
||||
function togglePackage(id: string) {
|
||||
setSelectedIds((prev) => {
|
||||
const next = new Set(prev);
|
||||
if (next.has(id)) next.delete(id);
|
||||
else next.add(id);
|
||||
return next;
|
||||
});
|
||||
}
|
||||
|
||||
function handleSave() {
|
||||
startTransition(async () => {
|
||||
const result = await linkPackages(kickstarterId, Array.from(selectedIds));
|
||||
if (result.success) {
|
||||
toast.success(`Linked ${selectedIds.size} package(s) to "${kickstarterName}"`);
|
||||
onOpenChange(false);
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogContent className="sm:max-w-lg">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Link Packages</DialogTitle>
|
||||
<DialogDescription>
|
||||
Search and select STL packages to link to “{kickstarterName}”.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
<div className="space-y-3">
|
||||
{/* Selected count */}
|
||||
{selectedIds.size > 0 && (
|
||||
<div className="flex items-center gap-2 text-sm text-muted-foreground">
|
||||
<Package className="h-4 w-4" />
|
||||
{selectedIds.size} package(s) selected
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-6 px-2 text-xs"
|
||||
onClick={() => setSelectedIds(new Set())}
|
||||
>
|
||||
Clear all
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Search input */}
|
||||
<div className="relative">
|
||||
<Search className="absolute left-2.5 top-2.5 h-4 w-4 text-muted-foreground" />
|
||||
<Input
|
||||
placeholder="Search packages by name or creator..."
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
className="pl-9"
|
||||
autoFocus
|
||||
/>
|
||||
{isSearching && (
|
||||
<Loader2 className="absolute right-2.5 top-2.5 h-4 w-4 animate-spin text-muted-foreground" />
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Results */}
|
||||
<ScrollArea className="h-[300px] rounded-md border">
|
||||
<div className="p-2 space-y-1">
|
||||
{searchResults.length === 0 && searchQuery.length >= 2 && !isSearching && (
|
||||
<p className="text-sm text-muted-foreground text-center py-8">
|
||||
No packages found
|
||||
</p>
|
||||
)}
|
||||
{searchQuery.length < 2 && (
|
||||
<p className="text-sm text-muted-foreground text-center py-8">
|
||||
Type at least 2 characters to search
|
||||
</p>
|
||||
)}
|
||||
{searchResults.map((pkg) => (
|
||||
<label
|
||||
key={pkg.id}
|
||||
className="flex items-center gap-3 p-2 rounded-md hover:bg-muted/50 cursor-pointer"
|
||||
>
|
||||
<Checkbox
|
||||
checked={selectedIds.has(pkg.id)}
|
||||
onCheckedChange={() => togglePackage(pkg.id)}
|
||||
/>
|
||||
<div className="flex-1 min-w-0">
|
||||
<p className="text-sm font-medium truncate">{pkg.fileName}</p>
|
||||
<div className="flex items-center gap-2 text-xs text-muted-foreground">
|
||||
{pkg.creator && <span>{pkg.creator}</span>}
|
||||
<span>{formatSize(pkg.fileSize)}</span>
|
||||
<Badge variant="outline" className="text-[10px] h-4 px-1">
|
||||
{pkg.archiveType}
|
||||
</Badge>
|
||||
{pkg.fileCount > 0 && <span>{pkg.fileCount} files</span>}
|
||||
</div>
|
||||
</div>
|
||||
{selectedIds.has(pkg.id) && (
|
||||
<X className="h-3.5 w-3.5 text-muted-foreground shrink-0" />
|
||||
)}
|
||||
</label>
|
||||
))}
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<Button variant="outline" onClick={() => onOpenChange(false)}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button onClick={handleSave} disabled={isPending}>
|
||||
{isPending ? <Loader2 className="h-4 w-4 animate-spin mr-1" /> : null}
|
||||
Save ({selectedIds.size})
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Commit**
|
||||
|
||||
```bash
|
||||
git add src/app/(app)/kickstarters/_components/package-linker-dialog.tsx
|
||||
git commit -m "feat: add package linker dialog for kickstarters"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 8: Package Search API Route
|
||||
|
||||
**Files:**
|
||||
- Create: `src/app/api/packages/search/route.ts`
|
||||
|
||||
- [ ] **Step 1: Create the API route**
|
||||
|
||||
The package linker dialog needs a client-side fetch for debounced search. Create a lightweight API route:
|
||||
|
||||
```typescript
|
||||
import { NextResponse } from "next/server";
|
||||
import { auth } from "@/lib/auth";
|
||||
import { searchPackagesForLinking } from "@/data/kickstarter.queries";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
export async function GET(request: Request) {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url);
|
||||
const query = searchParams.get("q") ?? "";
|
||||
const limit = Math.min(Number(searchParams.get("limit") ?? "20"), 50);
|
||||
|
||||
const packages = await searchPackagesForLinking(query, limit);
|
||||
|
||||
// Serialize BigInt for JSON
|
||||
const serialized = packages.map((p) => ({
|
||||
...p,
|
||||
fileSize: p.fileSize.toString(),
|
||||
}));
|
||||
|
||||
return NextResponse.json({ packages: serialized });
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Commit**
|
||||
|
||||
```bash
|
||||
git add src/app/api/packages/search/route.ts
|
||||
git commit -m "feat: add package search API route for kickstarter linking"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 9: Kickstarter — Send All Packages Action
|
||||
|
||||
**Files:**
|
||||
- Modify: `src/app/(app)/kickstarters/actions.ts`
|
||||
|
||||
- [ ] **Step 1: Add sendAllKickstarterPackages action**
|
||||
|
||||
Append to `src/app/(app)/kickstarters/actions.ts`:
|
||||
|
||||
```typescript
|
||||
export async function sendAllKickstarterPackages(
|
||||
kickstarterId: string
|
||||
): Promise<ActionResult<{ queued: number }>> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
const telegramLink = await prisma.telegramLink.findUnique({
|
||||
where: { userId: session.user.id },
|
||||
});
|
||||
|
||||
if (!telegramLink) {
|
||||
return { success: false, error: "No linked Telegram account. Link one in Settings." };
|
||||
}
|
||||
|
||||
const kickstarter = await prisma.kickstarter.findFirst({
|
||||
where: { id: kickstarterId, userId: session.user.id },
|
||||
select: {
|
||||
packages: {
|
||||
select: {
|
||||
package: {
|
||||
select: { id: true, destChannelId: true, destMessageId: true, fileName: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!kickstarter) {
|
||||
return { success: false, error: "Kickstarter not found" };
|
||||
}
|
||||
|
||||
const sendablePackages = kickstarter.packages
|
||||
.map((lnk) => lnk.package)
|
||||
.filter((p) => p.destChannelId && p.destMessageId);
|
||||
|
||||
if (sendablePackages.length === 0) {
|
||||
return { success: false, error: "No linked packages are available for sending" };
|
||||
}
|
||||
|
||||
let queued = 0;
|
||||
for (const pkg of sendablePackages) {
|
||||
const existing = await prisma.botSendRequest.findFirst({
|
||||
where: {
|
||||
packageId: pkg.id,
|
||||
telegramLinkId: telegramLink.id,
|
||||
status: { in: ["PENDING", "SENDING"] },
|
||||
},
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
const sendRequest = await prisma.botSendRequest.create({
|
||||
data: {
|
||||
packageId: pkg.id,
|
||||
telegramLinkId: telegramLink.id,
|
||||
requestedByUserId: session.user.id,
|
||||
status: "PENDING",
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
await prisma.$queryRawUnsafe(
|
||||
`SELECT pg_notify('bot_send', $1)`,
|
||||
sendRequest.id
|
||||
);
|
||||
} catch {
|
||||
// Best-effort
|
||||
}
|
||||
|
||||
queued++;
|
||||
}
|
||||
}
|
||||
|
||||
revalidatePath(REVALIDATE_PATH);
|
||||
return { success: true, data: { queued } };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to send packages" };
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Commit**
|
||||
|
||||
```bash
|
||||
git add src/app/(app)/kickstarters/actions.ts
|
||||
git commit -m "feat: add sendAllKickstarterPackages action"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 10: Kickstarter Table — Wire Up Link & Send Actions
|
||||
|
||||
**Files:**
|
||||
- Modify: `src/app/(app)/kickstarters/_components/kickstarter-columns.tsx`
|
||||
- Modify: `src/app/(app)/kickstarters/_components/kickstarter-table.tsx`
|
||||
|
||||
- [ ] **Step 1: Add actions to column menu**
|
||||
|
||||
In `kickstarter-columns.tsx`, add `Link2` and `Send` imports from lucide-react, add `onLinkPackages` and `onSendAll` to props, and add menu items:
|
||||
|
||||
```typescript
|
||||
import { MoreHorizontal, Pencil, Trash2, ExternalLink, Link2, Send } from "lucide-react";
|
||||
|
||||
// Update interface:
|
||||
interface KickstarterColumnsProps {
|
||||
onEdit: (kickstarter: KickstarterRow) => void;
|
||||
onDelete: (id: string) => void;
|
||||
onLinkPackages: (kickstarter: KickstarterRow) => void;
|
||||
onSendAll: (kickstarter: KickstarterRow) => void;
|
||||
}
|
||||
```
|
||||
|
||||
In the actions column dropdown, add between Edit and the separator:
|
||||
|
||||
```tsx
|
||||
<DropdownMenuItem onClick={() => onLinkPackages(row.original)}>
|
||||
<Link2 className="mr-2 h-3.5 w-3.5" />
|
||||
Link Packages
|
||||
</DropdownMenuItem>
|
||||
{row.original._count.packages > 0 && (
|
||||
<DropdownMenuItem onClick={() => onSendAll(row.original)}>
|
||||
<Send className="mr-2 h-3.5 w-3.5" />
|
||||
Send All ({row.original._count.packages})
|
||||
</DropdownMenuItem>
|
||||
)}
|
||||
```
|
||||
|
||||
Update the function signature to destructure the new props:
|
||||
|
||||
```typescript
|
||||
export function getKickstarterColumns({
|
||||
onEdit,
|
||||
onDelete,
|
||||
onLinkPackages,
|
||||
onSendAll,
|
||||
}: KickstarterColumnsProps): ColumnDef<KickstarterRow, unknown>[] {
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Wire up state in kickstarter-table.tsx**
|
||||
|
||||
Add imports and state for the new dialogs:
|
||||
|
||||
```typescript
|
||||
import { PackageLinkerDialog } from "./package-linker-dialog";
|
||||
import { sendAllKickstarterPackages } from "../actions";
|
||||
|
||||
// Inside KickstarterTable:
|
||||
const [linkTarget, setLinkTarget] = useState<KickstarterRow | null>(null);
|
||||
const [sendAllTarget, setSendAllTarget] = useState<KickstarterRow | null>(null);
|
||||
```
|
||||
|
||||
Update the columns call:
|
||||
|
||||
```typescript
|
||||
const columns = getKickstarterColumns({
|
||||
onEdit: (kickstarter) => {
|
||||
setEditKickstarter(kickstarter);
|
||||
setModalOpen(true);
|
||||
},
|
||||
onDelete: (id) => setDeleteId(id),
|
||||
onLinkPackages: (kickstarter) => setLinkTarget(kickstarter),
|
||||
onSendAll: (kickstarter) => {
|
||||
startTransition(async () => {
|
||||
const result = await sendAllKickstarterPackages(kickstarter.id);
|
||||
if (result.success) {
|
||||
toast.success(`Queued ${result.data!.queued} package(s) for delivery`);
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Add the `PackageLinkerDialog` before the closing `</div>` of the component's return:
|
||||
|
||||
```tsx
|
||||
{linkTarget && (
|
||||
<PackageLinkerDialog
|
||||
open={!!linkTarget}
|
||||
onOpenChange={(open) => !open && setLinkTarget(null)}
|
||||
kickstarterId={linkTarget.id}
|
||||
kickstarterName={linkTarget.name}
|
||||
initialPackageIds={[]}
|
||||
/>
|
||||
)}
|
||||
```
|
||||
|
||||
Note: `initialPackageIds` is `[]` because the table doesn't fetch linked packages. The dialog will start empty but preserve selections during the session. For a better UX, we fetch the linked IDs when the dialog opens — see step 3.
|
||||
|
||||
- [ ] **Step 3: Fetch initial linked packages when dialog opens**
|
||||
|
||||
To populate the dialog with already-linked packages, add an API route or use a server action. The simplest approach: modify the `PackageLinkerDialog` to fetch linked IDs on mount.
|
||||
|
||||
In `package-linker-dialog.tsx`, add to the `useEffect` that runs when `open` changes:
|
||||
|
||||
```typescript
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
setSearchQuery("");
|
||||
setSearchResults([]);
|
||||
// Fetch currently linked packages
|
||||
fetch(`/api/packages/linked?kickstarterId=${kickstarterId}`)
|
||||
.then((res) => res.json())
|
||||
.then((data) => {
|
||||
if (data.packageIds) {
|
||||
setSelectedIds(new Set(data.packageIds));
|
||||
}
|
||||
})
|
||||
.catch(() => {});
|
||||
}
|
||||
}, [open, kickstarterId]);
|
||||
```
|
||||
|
||||
Create the API route at `src/app/api/packages/linked/route.ts`:
|
||||
|
||||
```typescript
|
||||
import { NextResponse } from "next/server";
|
||||
import { auth } from "@/lib/auth";
|
||||
import { getLinkedPackageIds } from "@/data/kickstarter.queries";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
export async function GET(request: Request) {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url);
|
||||
const kickstarterId = searchParams.get("kickstarterId");
|
||||
if (!kickstarterId) {
|
||||
return NextResponse.json({ error: "kickstarterId required" }, { status: 400 });
|
||||
}
|
||||
|
||||
const packageIds = await getLinkedPackageIds(kickstarterId);
|
||||
return NextResponse.json({ packageIds });
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add src/app/(app)/kickstarters/_components/ src/app/api/packages/
|
||||
git commit -m "feat: wire up package linking and send-all in kickstarter table"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task 11: Rebuild & Deploy App
|
||||
|
||||
- [ ] **Step 1: Rebuild app image**
|
||||
|
||||
```bash
|
||||
docker compose build app # or equivalent for the production compose
|
||||
docker tag dragonsstash:latest git.samagsteribbe.nl/admin/dragonsstash:latest
|
||||
docker compose -p dragonsstash -f /opt/stacks/DragonsStash/docker-compose.yml up -d app
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Verify app startup**
|
||||
|
||||
```bash
|
||||
docker logs dragonsstash --tail=20
|
||||
```
|
||||
|
||||
Expected: App starts cleanly, health check passes.
|
||||
|
||||
- [ ] **Step 3: Manual test**
|
||||
|
||||
1. Go to Kickstarters tab
|
||||
2. Open a kickstarter's row menu → "Link Packages"
|
||||
3. Search for a package, select it, save
|
||||
4. Verify the package count column updates
|
||||
5. Use "Send All" to queue all linked packages for Telegram delivery
|
||||
472
docs/superpowers/plans/2026-03-30-grouping-audit-report.md
Normal file
472
docs/superpowers/plans/2026-03-30-grouping-audit-report.md
Normal file
@@ -0,0 +1,472 @@
|
||||
# Dragonstash Grouping System Audit & Enhancement Report
|
||||
|
||||
## Appendix: Real-World Failure Cases (2026-03-29/30)
|
||||
|
||||
These skipped packages reveal two concrete issues:
|
||||
|
||||
### Issue A: `WORKER_MAX_ZIP_SIZE_MB` was 4 GB — blocking all large multipart archives
|
||||
|
||||
| File | Parts | Total Size | Status |
|
||||
|------|-------|-----------|--------|
|
||||
| DM-Stash - Guide to Tharador - Complete STL | 19 | 70.5 GB | SIZE_LIMIT |
|
||||
| DM-Stash - 2023-05 - Greywinds All-in | 16 | 58.9 GB | SIZE_LIMIT |
|
||||
| Axolote Gaming - Castle of the Vampire Lord | 10 | 18 GB | SIZE_LIMIT |
|
||||
| Dungeon Blocks - THE ULTIMATE DUNGEON | 5 | 7.6 GB | SIZE_LIMIT |
|
||||
| Dungeon Blocks - The Toxic sewer | 4 | 6.2 GB | SIZE_LIMIT |
|
||||
| Soulmist | 4 | 6.3 GB | SIZE_LIMIT |
|
||||
| Medieval Town PT1 | 3 | 5.7 GB | SIZE_LIMIT |
|
||||
| Knight Models - Game Of Thrones | 3 | 5.5 GB | SIZE_LIMIT |
|
||||
| Dungeon Blocks - The Lost Cave | 3 | 4.9 GB | SIZE_LIMIT |
|
||||
| El Miniaturista 2025-05 Fulgrim Part II and III | 5 | 4.7 GB | SIZE_LIMIT |
|
||||
|
||||
**Root cause:** Production env had `WORKER_MAX_ZIP_SIZE_MB=4096`. The default in code is 204800 (200 GB), but docker-compose.yml defaulted to 4096.
|
||||
|
||||
**Fix applied:** Raised to 204800 in `/opt/stacks/DragonsStash/.env`. Worker restarted. These archives will be retried on the next ingestion cycle. The worker downloads parts individually (each under 2-4 GB), concatenates, re-splits at 1950 MiB for upload. Peak temp disk usage for the 70.5 GB archive: ~211 GB (353 GB available).
|
||||
|
||||
**Code fix:** `MAX_PART_SIZE` is now configurable via `MAX_PART_SIZE_MB` env var (was hardcoded at 1950). Set to 3900 for Telegram Premium accounts to avoid unnecessary splitting.
|
||||
|
||||
### Issue B: Download failure at 98% (DE1-Supported.7z)
|
||||
|
||||
| File | Size | Error |
|
||||
|------|------|-------|
|
||||
| DE1-Supported.7z | 1.9 GB | Download stopped unexpectedly at 2043674624/2078338541 bytes (98%) |
|
||||
|
||||
**Root cause:** Download stalled near completion with no retry mechanism.
|
||||
|
||||
**Fix applied:** Earlier in this session, download retry logic was added (max 3 retries with `cancelDownloadFile` before each retry). This file will be retried automatically on next ingestion cycle.
|
||||
|
||||
---
|
||||
|
||||
## Deliverable 1: Audit Report — Current State
|
||||
|
||||
### 1.1 Grouping Signal Stack (Current)
|
||||
|
||||
The system currently uses exactly **one automatic grouping signal**:
|
||||
|
||||
| Priority | Signal | Status | Location |
|
||||
|----------|--------|--------|----------|
|
||||
| 1 | `mediaAlbumId` | Implemented | `worker/src/grouping.ts:26-33` |
|
||||
| 2 | Manual override | Implemented | `src/lib/telegram/queries.ts:606-639` |
|
||||
|
||||
**How it works:**
|
||||
- `processAlbumGroups()` in `worker/src/grouping.ts` groups indexed packages by `mediaAlbumId` (filtering out "0" and null)
|
||||
- For albums with 2+ members: creates `PackageGroup`, links packages, assigns name from album photo caption or first filename
|
||||
- Manual grouping via UI: select 2+ packages, enter name, creates group in `createManualGroup()`
|
||||
|
||||
**What does NOT exist:**
|
||||
- No `message_thread_id` (forum topic) scoping
|
||||
- No project/month pattern extraction from filenames
|
||||
- No creator/sender grouping
|
||||
- No time-window + sender clustering
|
||||
- No reply chain analysis
|
||||
- No ZIP internal path prefix matching
|
||||
- No caption fuzzy matching
|
||||
- No staging queue for ungrouped files
|
||||
|
||||
### 1.2 Multipart Archive Detection (`worker/src/archive/multipart.ts`)
|
||||
|
||||
This is a **separate system** from display grouping. `groupArchiveSets()` groups Telegram messages into `ArchiveSet[]` based on filename patterns:
|
||||
|
||||
- `.zip.001`, `.zip.002` → ZIP_NUMBERED
|
||||
- `.z01`, `.z02`, `.zip` → ZIP_LEGACY
|
||||
- `.part1.rar`, `.part2.rar` → RAR_PART
|
||||
- `.r00`, `.r01`, `.rar` → RAR_LEGACY
|
||||
|
||||
These are grouped by `format:baseName.toLowerCase()` key. This is about **reassembling split archives**, not UI grouping. An `ArchiveSet` becomes a single `Package` in the database.
|
||||
|
||||
### 1.3 TDLib Ingestion Handler
|
||||
|
||||
**Pipeline in `worker/src/worker.ts:801-1197`:**
|
||||
```
|
||||
processOneArchiveSet():
|
||||
1. Early skip check (source message ID)
|
||||
2. Size guard (maxZipSizeMB)
|
||||
3. Download all parts
|
||||
4. Compute SHA-256 hash
|
||||
5. Check hash dedup
|
||||
6. Read archive metadata
|
||||
7. Split/repack if needed
|
||||
8. Upload to destination
|
||||
9. Download preview
|
||||
10. Extract fallback preview
|
||||
11. Resolve creator
|
||||
12. Index in database
|
||||
13. Cleanup temp files
|
||||
```
|
||||
|
||||
**Post-indexing:** `processAlbumGroups()` is called once per channel/topic scan to create album-based groups.
|
||||
|
||||
**Gaps:**
|
||||
- Messages are never "dropped" silently — failures go to `SkippedPackage` table with reason
|
||||
- Watermark only advances past successfully processed sets (failed sets block advancement)
|
||||
- No messages are missed within a channel, but there's no audit to verify completeness after the fact
|
||||
|
||||
### 1.4 Hash Verification
|
||||
|
||||
**What IS verified:**
|
||||
| Check | Where | When |
|
||||
|-------|-------|------|
|
||||
| Download file size | `download.ts:verifyAndMove()` | After each file download |
|
||||
| SHA-256 content hash | `worker.ts:952` | After download, used for dedup |
|
||||
| Telegram upload confirmation | `channel.ts:updateMessageSendSucceeded` | Waits for server ACK |
|
||||
|
||||
**What is NOT verified:**
|
||||
| Gap | Impact |
|
||||
|-----|--------|
|
||||
| No hash after upload | Can't detect Telegram-side corruption |
|
||||
| No hash after split | Split files could be silently corrupted |
|
||||
| CRC-32 extracted but never checked | ZIP/RAR per-file integrity not validated |
|
||||
| No end-to-end hash | Split files have different hash than original |
|
||||
| No periodic audit job | Stale/missing data never detected |
|
||||
|
||||
### 1.5 File Size Limit
|
||||
|
||||
| Setting | Value | Configurable? | Location |
|
||||
|---------|-------|---------------|----------|
|
||||
| `MAX_PART_SIZE` | 1950 MiB | **Hardcoded** | `worker/src/archive/split.ts:14` |
|
||||
| `MAX_UPLOAD_SIZE` | 1950 MiB | **Hardcoded** | `worker/src/worker.ts:1023` |
|
||||
| `maxZipSizeMB` | 200 GB | `WORKER_MAX_ZIP_SIZE_MB` env var | `worker/src/util/config.ts:6` |
|
||||
|
||||
The 1950 MiB limit is deliberately below 2 GiB to avoid TDLib's `FILE_PARTS_INVALID` error. There is **no Premium awareness** — all accounts are treated as non-Premium.
|
||||
|
||||
### 1.6 Search Implementation
|
||||
|
||||
- **No fuzzy search** — uses Prisma's `contains` with `mode: "insensitive"` (translates to PostgreSQL `ILIKE`)
|
||||
- **No full-text search infrastructure** — no `tsvector`, no GiST/GIN indexes
|
||||
- **Indexes:** B-tree on `fileName`, `creator`, `archiveType`, `indexedAt`, plus `PackageFile.fileName` and `extension`
|
||||
- Search works for substring matching but won't match typos or similar names
|
||||
|
||||
### 1.7 Notification Infrastructure
|
||||
|
||||
- **pg_notify channels:** `bot_send`, `new_package` (bot), plus 7 worker channels
|
||||
- **Bot subscriptions:** pattern-match (case-insensitive substring) on `fileName` and `creator`
|
||||
- **UI notifications:** Sonner toast (ephemeral only)
|
||||
- **No persistent notification store** — no database model for notifications
|
||||
- **No notification UI panel** in the web app
|
||||
- **No alerts for:** grouping conflicts, hash mismatches, missing parts, upload failures (beyond SkippedPackage table)
|
||||
|
||||
---
|
||||
|
||||
## Deliverable 2: Revised Grouping Signal Stack
|
||||
|
||||
### Recommended Implementation Plan
|
||||
|
||||
I recommend an **incremental approach** — implement signals in phases, starting with highest-value/lowest-risk.
|
||||
|
||||
### Phase 1: Foundation (Required Before Other Signals)
|
||||
|
||||
#### Signal 9: Manual Override Persistence
|
||||
**Status:** Partially implemented. Manual groups exist but don't influence future auto-grouping.
|
||||
|
||||
**Implementation:**
|
||||
- Add `groupingSource` field to `PackageGroup`: `"ALBUM" | "MANUAL" | "AUTO_PATTERN" | "AUTO_TIME" | "AUTO_REPLY" | "AUTO_ZIP" | "AUTO_CAPTION"`
|
||||
- Manual groups already persist. What's missing is the **training feedback** where a manual grouping teaches the system to auto-group similar future files.
|
||||
- This requires a `GroupingRule` model (see schema diff below) that stores learned patterns from manual overrides.
|
||||
|
||||
#### Ungrouped Staging Queue
|
||||
**Implementation:**
|
||||
- After ingestion, packages without a `packageGroupId` are naturally "ungrouped"
|
||||
- Add a filter/tab to the STL page: "Ungrouped" showing packages where `packageGroupId IS NULL`
|
||||
- No schema change needed — just a query filter
|
||||
|
||||
### Phase 2: High-Value Automatic Signals
|
||||
|
||||
#### Signal 1: `mediaAlbumId` (Already Implemented)
|
||||
No changes needed. This is working correctly.
|
||||
|
||||
#### Signal 2: `message_thread_id` Forum Topic Scoping
|
||||
**Status:** Already used for scan scoping (worker scans by topic), but not used as a grouping signal.
|
||||
|
||||
**Implementation:**
|
||||
- `sourceTopicId` is already stored on `Package` (schema line 469)
|
||||
- Use it as a **scoping constraint** for all other signals: time-window, caption matching, etc. only apply within the same topic
|
||||
- No additional schema changes needed
|
||||
|
||||
#### Signal 5: Time Window + Sender Grouping
|
||||
**Implementation:**
|
||||
- After album grouping, find ungrouped packages from the same source channel + topic
|
||||
- Within a configurable window (default 5 min), cluster by proximity
|
||||
- Since we don't have `sender_id` from the source channel (TDLib `searchChatMessages` doesn't return it for channels), this becomes **time-window within topic/channel**
|
||||
- New config: `AUTO_GROUP_TIME_WINDOW_MINUTES` (default: 5)
|
||||
|
||||
#### Signal 3: Project/Month Pattern Extraction
|
||||
**Implementation:**
|
||||
- Extract date patterns from filenames/captions: `YYYY-MM`, `YYYY_MM`, `MonthName Year`
|
||||
- Extract project slugs: common prefix before separator (e.g., "ProjectName - File1.zip" and "ProjectName - File2.zip")
|
||||
- Group packages with matching patterns from the same channel
|
||||
- This should run as a **post-processing pass** after time-window grouping, merging small time-window groups that share a pattern
|
||||
|
||||
#### Signal 4: Creator Grouping
|
||||
**Implementation:**
|
||||
- The `creator` field is already extracted from filenames and stored per-package
|
||||
- Within a channel, if multiple ungrouped packages have the same `creator` and were indexed within the same ingestion run, auto-group them
|
||||
- Lower priority than time-window (might create overly broad groups)
|
||||
|
||||
### Phase 3: Advanced Signals
|
||||
|
||||
#### Signal 6: Reply Chain
|
||||
**Implementation:**
|
||||
- TDLib messages have `reply_to_message_id` but this isn't currently captured during scanning
|
||||
- Would need to modify `getChannelMessages()` in `download.ts` to extract `reply_to_message_id`
|
||||
- Then: if message B replies to message A, and both are archives, group them
|
||||
- **Moderate complexity**, deferred to Phase 3
|
||||
|
||||
#### Signal 7: ZIP Internal Path Prefix
|
||||
**Implementation:**
|
||||
- Already have `PackageFile.path` stored for each file inside an archive
|
||||
- After indexing, find the common root folder across all files
|
||||
- If two packages share the same root prefix and same channel, suggest grouping
|
||||
- This is a **post-hoc analysis** that could run as a background job
|
||||
|
||||
#### Signal 8: Caption Fuzzy Match
|
||||
**Implementation:**
|
||||
- Currently captions from source messages are NOT stored (only photo captions for preview matching)
|
||||
- Would need to capture `msg.content?.caption?.text` during scanning and store on Package
|
||||
- Then: fuzzy-match captions from nearby messages in same channel
|
||||
- **Requires schema change + scan modification**, deferred to Phase 3
|
||||
|
||||
---
|
||||
|
||||
## Deliverable 3: Schema Diff
|
||||
|
||||
All changes are **additive** — no columns dropped, no types changed.
|
||||
|
||||
```prisma
|
||||
// ── PackageGroup additions ──
|
||||
model PackageGroup {
|
||||
// ... existing fields ...
|
||||
groupingSource GroupingSource @default(MANUAL) // NEW: how this group was created
|
||||
}
|
||||
|
||||
// NEW enum
|
||||
enum GroupingSource {
|
||||
ALBUM // From Telegram mediaAlbumId
|
||||
MANUAL // User-created via UI
|
||||
AUTO_PATTERN // Filename/date pattern matching
|
||||
AUTO_TIME // Time-window clustering
|
||||
AUTO_REPLY // Reply chain
|
||||
AUTO_ZIP // ZIP path prefix
|
||||
AUTO_CAPTION // Caption fuzzy match
|
||||
}
|
||||
|
||||
// ── Package additions ──
|
||||
model Package {
|
||||
// ... existing fields ...
|
||||
sourceCaption String? // NEW: caption text from source Telegram message
|
||||
}
|
||||
|
||||
// ── New model: GroupingRule (training from manual overrides) ──
|
||||
model GroupingRule {
|
||||
id String @id @default(cuid())
|
||||
sourceChannelId String
|
||||
pattern String // Regex or glob pattern learned from manual grouping
|
||||
signalType GroupingSource // Which signal this rule applies to
|
||||
confidence Float @default(1.0)
|
||||
createdAt DateTime @default(now())
|
||||
createdByGroupId String? // The manual group that spawned this rule
|
||||
|
||||
sourceChannel TelegramChannel @relation(fields: [sourceChannelId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([sourceChannelId])
|
||||
@@map("grouping_rules")
|
||||
}
|
||||
|
||||
// ── New model: SystemNotification ──
|
||||
model SystemNotification {
|
||||
id String @id @default(cuid())
|
||||
type NotificationType
|
||||
severity NotificationSeverity @default(INFO)
|
||||
title String
|
||||
message String
|
||||
context Json? // Structured data: packageId, groupId, sourceMessageId, etc.
|
||||
isRead Boolean @default(false)
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
@@index([isRead, createdAt])
|
||||
@@index([type])
|
||||
@@map("system_notifications")
|
||||
}
|
||||
|
||||
enum NotificationType {
|
||||
HASH_MISMATCH
|
||||
MISSING_PART
|
||||
UPLOAD_FAILED
|
||||
DOWNLOAD_FAILED
|
||||
GROUPING_CONFLICT
|
||||
INTEGRITY_AUDIT
|
||||
}
|
||||
|
||||
enum NotificationSeverity {
|
||||
INFO
|
||||
WARNING
|
||||
ERROR
|
||||
}
|
||||
|
||||
// ── Config additions (worker/src/util/config.ts) ──
|
||||
// maxPartSizeMB: parseInt(process.env.MAX_PART_SIZE_MB ?? "1950", 10)
|
||||
// autoGroupTimeWindowMinutes: parseInt(process.env.AUTO_GROUP_TIME_WINDOW_MINUTES ?? "5", 10)
|
||||
// telegramPremium: process.env.TELEGRAM_PREMIUM === "true"
|
||||
```
|
||||
|
||||
**Migration notes:**
|
||||
- All new fields are optional/have defaults — zero-risk to existing data
|
||||
- `GroupingSource` enum added with `@default(MANUAL)` — existing groups unaffected
|
||||
- `GroupingRule` and `SystemNotification` are new tables — no impact on existing
|
||||
- Backfill: set `groupingSource = ALBUM` for groups where `mediaAlbumId IS NOT NULL`
|
||||
|
||||
---
|
||||
|
||||
## Deliverable 4: Notification Contract
|
||||
|
||||
### Event Shape
|
||||
|
||||
```typescript
|
||||
interface SystemNotificationEvent {
|
||||
type: NotificationType;
|
||||
severity: "INFO" | "WARNING" | "ERROR";
|
||||
title: string;
|
||||
message: string;
|
||||
context: {
|
||||
packageId?: string;
|
||||
groupId?: string;
|
||||
sourceChannelId?: string;
|
||||
sourceMessageId?: bigint;
|
||||
fileName?: string;
|
||||
partNumber?: number;
|
||||
totalParts?: number;
|
||||
expectedHash?: string;
|
||||
actualHash?: string;
|
||||
reason?: string;
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### Where Notifications Fire
|
||||
|
||||
| Event | Where | Trigger |
|
||||
|-------|-------|---------|
|
||||
| `HASH_MISMATCH` | `worker/src/worker.ts` after split | SHA-256 of concatenated split parts != original hash |
|
||||
| `MISSING_PART` | Periodic audit job (new) | Group has `partCount > 1` but fewer than `partCount` dest messages exist |
|
||||
| `UPLOAD_FAILED` | `worker/src/worker.ts` catch block | Upload fails after all retries exhausted |
|
||||
| `DOWNLOAD_FAILED` | `worker/src/worker.ts` catch block | Download fails after all retries |
|
||||
| `GROUPING_CONFLICT` | Auto-grouping pass (new) | Two signals suggest different groups for the same package |
|
||||
| `INTEGRITY_AUDIT` | Periodic job (new) | Scheduled check finds inconsistencies |
|
||||
|
||||
### Delivery
|
||||
|
||||
1. **Database:** Always persisted to `SystemNotification` table
|
||||
2. **pg_notify:** `SELECT pg_notify('system_notification', jsonPayload)` for real-time
|
||||
3. **Web UI:** Notification bell/panel that polls or listens for new notifications
|
||||
4. **Telegram (optional):** Forward critical notifications to admin via bot
|
||||
|
||||
---
|
||||
|
||||
## Deliverable 5: Feature Flag Plan
|
||||
|
||||
### Runtime Configuration (Environment Variables)
|
||||
|
||||
| Flag | Type | Default | Purpose |
|
||||
|------|------|---------|---------|
|
||||
| `TELEGRAM_PREMIUM` | boolean | `false` | Enable 4GB upload limit |
|
||||
| `MAX_PART_SIZE_MB` | number | `1950` | Split threshold in MiB (overrides hardcoded value) |
|
||||
| `AUTO_GROUP_ENABLED` | boolean | `false` | Enable automatic grouping beyond album |
|
||||
| `AUTO_GROUP_TIME_WINDOW_MINUTES` | number | `5` | Time-window clustering threshold |
|
||||
| `AUTO_GROUP_PATTERN_ENABLED` | boolean | `false` | Enable filename/date pattern grouping |
|
||||
| `INTEGRITY_AUDIT_ENABLED` | boolean | `false` | Enable periodic integrity audit |
|
||||
| `INTEGRITY_AUDIT_INTERVAL_HOURS` | number | `24` | How often to run the audit |
|
||||
|
||||
### Premium Mode Behavior
|
||||
|
||||
When `TELEGRAM_PREMIUM=true`:
|
||||
1. `MAX_PART_SIZE_MB` defaults to `3900` (safely under 4 GiB) instead of `1950`
|
||||
2. Files under 4 GB: uploaded as-is (no splitting)
|
||||
3. Files over 4 GB: split using existing `byteLevelSplit()` at the new threshold
|
||||
4. Existing split/rejoin logic is **kept as fallback** — never removed
|
||||
5. `isMultipart` and `partCount` continue to track actual upload state
|
||||
|
||||
### Implementation in `split.ts`:
|
||||
|
||||
```typescript
|
||||
// Replace hardcoded constant with config-driven:
|
||||
const MAX_PART_SIZE = BigInt(config.maxPartSizeMB) * 1024n * 1024n;
|
||||
```
|
||||
|
||||
And in `config.ts`:
|
||||
```typescript
|
||||
maxPartSizeMB: parseInt(
|
||||
process.env.MAX_PART_SIZE_MB ??
|
||||
(process.env.TELEGRAM_PREMIUM === "true" ? "3900" : "1950"),
|
||||
10
|
||||
),
|
||||
```
|
||||
|
||||
### Rollout Strategy
|
||||
|
||||
1. **All flags default to off** — zero behavior change on deploy
|
||||
2. Enable `TELEGRAM_PREMIUM` first (simple, well-understood)
|
||||
3. Enable `AUTO_GROUP_ENABLED` on a **per-channel basis** (see test plan) before globally
|
||||
4. Enable `INTEGRITY_AUDIT_ENABLED` after manual validation
|
||||
5. Pattern-based grouping enabled last (highest complexity)
|
||||
|
||||
---
|
||||
|
||||
## Deliverable 6: Test Plan
|
||||
|
||||
### Phase 0: Pre-Implementation Validation
|
||||
|
||||
Before touching any code, verify the current system baseline:
|
||||
|
||||
1. **Pick one test channel** with known content (a mix of albums, single files, and multipart archives)
|
||||
2. Run an ingestion cycle and record: number of packages, groups, skipped
|
||||
3. Verify all album-based groups are correct
|
||||
4. Note any ungrouped files that "should" be grouped
|
||||
5. This becomes the **regression baseline**
|
||||
|
||||
### Phase 1: Premium Mode Testing
|
||||
|
||||
1. Set `TELEGRAM_PREMIUM=true` and `MAX_PART_SIZE_MB=3900`
|
||||
2. Manually upload a 3 GB test file to a source channel
|
||||
3. Trigger ingestion — verify it uploads as a single message (not split)
|
||||
4. Manually upload a 5 GB test file
|
||||
5. Trigger ingestion — verify it splits at ~3.9 GB threshold
|
||||
6. Verify `isMultipart`, `partCount`, `destMessageIds` are correct
|
||||
7. Send the package via bot — verify all parts arrive
|
||||
|
||||
### Phase 2: Time-Window Grouping Testing
|
||||
|
||||
1. Enable `AUTO_GROUP_ENABLED=true` on the test channel only
|
||||
2. Post 3 files to the channel within 2 minutes (no album)
|
||||
3. Trigger ingestion — verify they auto-group
|
||||
4. Post 2 files 10 minutes apart
|
||||
5. Trigger ingestion — verify they stay ungrouped
|
||||
6. Manually group them — verify `GroupingRule` is created
|
||||
7. Post similar files — verify auto-grouping kicks in
|
||||
|
||||
### Phase 3: Manual QA via API
|
||||
|
||||
Add a **test endpoint** (dev-only) that accepts a fake message payload and runs it through the grouping pipeline without hitting Telegram:
|
||||
|
||||
```
|
||||
POST /api/dev/test-grouping
|
||||
Body: { messages: [...], channelId: "..." }
|
||||
Response: { suggestedGroups: [...] }
|
||||
```
|
||||
|
||||
This allows testing grouping logic against crafted scenarios without waiting for real Telegram messages.
|
||||
|
||||
### Phase 4: Integrity Audit Testing
|
||||
|
||||
1. Enable `INTEGRITY_AUDIT_ENABLED=true`
|
||||
2. Manually corrupt a record (set wrong `contentHash` in DB)
|
||||
3. Run audit — verify `HASH_MISMATCH` notification is created
|
||||
4. Delete one `destMessageId` from a multipart package's `destMessageIds`
|
||||
5. Run audit — verify `MISSING_PART` notification is created
|
||||
6. Check notification UI shows both
|
||||
|
||||
### Regression Checks After Each Phase
|
||||
|
||||
- Re-run ingestion on test channel — same number of packages/groups as baseline
|
||||
- Search for known filenames — still returns correct results
|
||||
- Send a package via bot — still delivers correctly
|
||||
- Album groups unchanged
|
||||
- Manual groups unchanged
|
||||
@@ -0,0 +1,67 @@
|
||||
# Grouping Phase 1: Foundation + Time-Window Grouping
|
||||
|
||||
> **For agentic workers:** Use superpowers:subagent-driven-development to implement this plan.
|
||||
|
||||
**Goal:** Add grouping infrastructure (schema, enums, notifications model), an ungrouped staging queue in the UI, and time-window auto-grouping as the first automatic signal beyond album grouping.
|
||||
|
||||
**Architecture:** Schema changes lay the foundation. Ungrouped tab is a query filter. Time-window grouping runs as a post-processing pass after album grouping in the worker pipeline.
|
||||
|
||||
**Tech Stack:** Prisma schema + migration, worker TypeScript, Next.js App Router.
|
||||
|
||||
---
|
||||
|
||||
## Task 1: Schema Migration
|
||||
|
||||
**Files:**
|
||||
- Modify: `prisma/schema.prisma`
|
||||
- Create: migration SQL
|
||||
|
||||
Add:
|
||||
1. `GroupingSource` enum: `ALBUM`, `MANUAL`, `AUTO_TIME`, `AUTO_PATTERN`, `AUTO_REPLY`, `AUTO_ZIP`, `AUTO_CAPTION`
|
||||
2. `groupingSource GroupingSource @default(MANUAL)` on `PackageGroup`
|
||||
3. `SystemNotification` model with `type`, `severity`, `title`, `message`, `context` (Json), `isRead`
|
||||
4. `NotificationType` enum: `HASH_MISMATCH`, `MISSING_PART`, `UPLOAD_FAILED`, `DOWNLOAD_FAILED`, `GROUPING_CONFLICT`, `INTEGRITY_AUDIT`
|
||||
5. `NotificationSeverity` enum: `INFO`, `WARNING`, `ERROR`
|
||||
|
||||
Backfill: `UPDATE package_groups SET "groupingSource" = 'ALBUM' WHERE "mediaAlbumId" IS NOT NULL`
|
||||
|
||||
---
|
||||
|
||||
## Task 2: Ungrouped Staging Tab in STL Page
|
||||
|
||||
**Files:**
|
||||
- Modify: `src/lib/telegram/queries.ts` — add `listUngroupedPackages()` query
|
||||
- Modify: `src/app/(app)/stls/page.tsx` — add tab parameter support
|
||||
- Modify: `src/app/(app)/stls/_components/stl-table.tsx` — add "Ungrouped" tab
|
||||
|
||||
Add a tab next to the existing "Skipped" tab that shows packages where `packageGroupId IS NULL`. Uses the existing `PackageListItem` type and table rendering. This gives users a clear view of files that need manual grouping.
|
||||
|
||||
---
|
||||
|
||||
## Task 3: Time-Window Auto-Grouping in Worker
|
||||
|
||||
**Files:**
|
||||
- Create: `worker/src/grouping.ts` — add `processTimeWindowGroups()` after existing `processAlbumGroups()`
|
||||
- Modify: `worker/src/worker.ts` — call time-window grouping after album grouping
|
||||
- Modify: `worker/src/util/config.ts` — add `autoGroupTimeWindowMinutes` config
|
||||
|
||||
After album grouping completes, find remaining ungrouped packages from the same channel scan. Cluster packages whose `sourceMessageId` timestamps are within the configured window (default 5 minutes). Create groups for clusters of 2+ with `groupingSource = AUTO_TIME` and name derived from the common filename prefix or first file's base name.
|
||||
|
||||
---
|
||||
|
||||
## Task 4: Hash Verification After Split
|
||||
|
||||
**Files:**
|
||||
- Modify: `worker/src/worker.ts` — add hash re-check after concat+split
|
||||
- Modify: `worker/src/archive/hash.ts` — (no changes needed, reuse `hashParts`)
|
||||
|
||||
After `concatenateFiles()` + `byteLevelSplit()`, re-hash the split parts and compare to the original `contentHash`. If mismatch, log error and create a `SystemNotification` (once that table exists). This closes the integrity gap identified in the audit.
|
||||
|
||||
---
|
||||
|
||||
## Task 5: Build & Deploy
|
||||
|
||||
Rebuild worker and app images. Deploy. Verify:
|
||||
- Worker logs show `maxPartSizeMB` and new `autoGroupTimeWindowMinutes` in config
|
||||
- Ungrouped tab visible in STL page
|
||||
- Previously-skipped large archives begin processing
|
||||
1000
docs/superpowers/plans/2026-05-02-worker-improvements.md
Normal file
1000
docs/superpowers/plans/2026-05-02-worker-improvements.md
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,241 @@
|
||||
# Design: Search Match Indicators, Size Limit Increase, Skipped/Failed Files Overview
|
||||
|
||||
**Date:** 2026-03-24
|
||||
**Status:** Approved
|
||||
|
||||
## Overview
|
||||
|
||||
Three related improvements to the STL packages system:
|
||||
|
||||
1. **Search match indicators** — Show which internal files matched a search query, with highlighted files in the drawer
|
||||
2. **Size limit increase** — Raise the ingestion limit from 4 GB to 200 GB so large multipart archives aren't skipped
|
||||
3. **Skipped/failed files overview** — Track and display archives that were skipped or failed, with retry capability
|
||||
|
||||
---
|
||||
|
||||
## Feature 1: Size Limit Increase
|
||||
|
||||
### Change
|
||||
|
||||
`worker/src/util/config.ts` line 6 — change default from `"4096"` to `"204800"`.
|
||||
|
||||
One-line change. The split/upload pipeline already handles arbitrary sizes. The 2 GB per-part Telegram API limit is a separate hard-coded constant and stays as-is.
|
||||
|
||||
### Impact
|
||||
|
||||
- Archives up to 200 GB will now be attempted
|
||||
- Multipart archives where individual parts are under 2 GB (but total exceeds 4 GB) will no longer be skipped — these upload directly without any splitting
|
||||
- Single files over 2 GB are automatically split into 2 GB parts (existing behavior)
|
||||
- Temp disk usage during processing can now reach up to ~200 GB per archive
|
||||
|
||||
---
|
||||
|
||||
## Feature 2: Search Match Indicators
|
||||
|
||||
### Backend Changes
|
||||
|
||||
**File:** `src/lib/telegram/queries.ts` — `searchPackages()`
|
||||
|
||||
When `searchIn` is `"files"` or `"both"`, change the PackageFile query from `distinct` to a **grouped count**:
|
||||
|
||||
```typescript
|
||||
// Current: findMany with select: { packageId }, distinct: ["packageId"]
|
||||
// New: groupBy packageId with _count
|
||||
const fileMatches = await prisma.packageFile.groupBy({
|
||||
by: ["packageId"],
|
||||
where: {
|
||||
OR: [
|
||||
{ fileName: { contains: q, mode: "insensitive" } },
|
||||
{ path: { contains: q, mode: "insensitive" } },
|
||||
],
|
||||
},
|
||||
_count: { _all: true },
|
||||
});
|
||||
```
|
||||
|
||||
This returns `{ packageId: string, _count: { _all: number } }[]`.
|
||||
|
||||
Note: `PackageRow` in `package-columns.tsx` mirrors `PackageListItem` and must also receive the two new fields.
|
||||
|
||||
**File:** `src/lib/telegram/types.ts` — `PackageListItem`
|
||||
|
||||
Add two fields:
|
||||
- `matchedFileCount: number` — how many files inside matched (0 if matched by package name only)
|
||||
- `matchedByContent: boolean` — true if any files inside matched
|
||||
|
||||
### Frontend Changes
|
||||
|
||||
**File:** `src/app/(app)/stls/page.tsx`
|
||||
|
||||
Pass the search term to `StlTable` as a new prop.
|
||||
|
||||
**File:** `src/app/(app)/stls/_components/stl-table.tsx`
|
||||
|
||||
Pass search term to columns via TanStack Table column meta.
|
||||
|
||||
**File:** `src/app/(app)/stls/_components/package-columns.tsx`
|
||||
|
||||
When search is active and `matchedByContent` is true, render a clickable badge below the filename: e.g., "3 file matches". Clicking opens the `PackageFilesDrawer` with a `highlightTerm` prop set to the search term.
|
||||
|
||||
**File:** `src/app/(app)/stls/_components/package-files-drawer.tsx`
|
||||
|
||||
- Accept optional `highlightTerm: string` prop
|
||||
- Render full file tree as normal (all files visible)
|
||||
- Files whose `fileName` or `path` case-insensitively contains `highlightTerm` get a subtle highlight (amber/yellow background on the row)
|
||||
- Auto-expand folders that contain highlighted files
|
||||
- The drawer's own search input remains independent
|
||||
|
||||
### Data Flow
|
||||
|
||||
1. User types search term in STL table search input
|
||||
2. URL updates with `?search=value`, page reloads
|
||||
3. `page.tsx` calls `searchPackages()` with `searchIn: "both"`
|
||||
4. Query returns packages with `matchedFileCount` and `matchedByContent`
|
||||
5. Table renders "N file matches" badge on content-matched rows
|
||||
6. User clicks badge -> drawer opens with full tree, matching files highlighted
|
||||
7. Folders containing matches auto-expanded
|
||||
|
||||
---
|
||||
|
||||
## Feature 3: Skipped/Failed Files Overview
|
||||
|
||||
### Database Schema
|
||||
|
||||
New model in `prisma/schema.prisma`:
|
||||
|
||||
```prisma
|
||||
enum SkipReason {
|
||||
SIZE_LIMIT
|
||||
DOWNLOAD_FAILED
|
||||
EXTRACT_FAILED
|
||||
UPLOAD_FAILED
|
||||
}
|
||||
|
||||
model SkippedPackage {
|
||||
id String @id @default(cuid())
|
||||
fileName String
|
||||
fileSize BigInt
|
||||
reason SkipReason
|
||||
errorMessage String?
|
||||
sourceChannelId String
|
||||
sourceChannel TelegramChannel @relation(fields: [sourceChannelId], references: [id], onDelete: Cascade)
|
||||
sourceMessageId BigInt
|
||||
sourceTopicId BigInt?
|
||||
isMultipart Boolean @default(false)
|
||||
partCount Int @default(1)
|
||||
accountId String
|
||||
account TelegramAccount @relation(fields: [accountId], references: [id], onDelete: Cascade)
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
@@unique([sourceChannelId, sourceMessageId])
|
||||
@@index([reason])
|
||||
@@index([accountId])
|
||||
@@map("skipped_packages")
|
||||
}
|
||||
```
|
||||
|
||||
Reverse relations must be added to `TelegramChannel` and `TelegramAccount` models:
|
||||
```prisma
|
||||
// In TelegramChannel:
|
||||
skippedPackages SkippedPackage[]
|
||||
|
||||
// In TelegramAccount:
|
||||
skippedPackages SkippedPackage[]
|
||||
```
|
||||
|
||||
### Worker Changes
|
||||
|
||||
**File:** `worker/src/worker.ts`
|
||||
|
||||
Extend `PipelineContext` interface to include `accountId` (derived from the ingestion run's account).
|
||||
|
||||
At each skip/failure point, upsert a `SkippedPackage` record:
|
||||
|
||||
- **Size limit skip** (line 784): reason `SIZE_LIMIT`, no error message
|
||||
- **Download failure** (catch in download loop): reason `DOWNLOAD_FAILED` + error text
|
||||
- **Extract/metadata failure** (catch in extract): reason `EXTRACT_FAILED` + error text
|
||||
- **Upload failure** (catch in upload): reason `UPLOAD_FAILED` + error text
|
||||
|
||||
On **successful ingestion** of a package, delete any existing `SkippedPackage` with the same `(sourceChannelId, sourceMessageId)` — so successful retries clean up after themselves.
|
||||
|
||||
**File:** `worker/src/db/queries.ts`
|
||||
|
||||
Add functions:
|
||||
- `upsertSkippedPackage(data)` — create or update skip record
|
||||
- `deleteSkippedPackage(sourceChannelId, sourceMessageId)` — remove on success
|
||||
|
||||
### Retry Mechanism
|
||||
|
||||
Retrying a skipped package:
|
||||
1. Delete the `SkippedPackage` record
|
||||
2. Find the `AccountChannelMap` record using both `accountId` and `sourceChannelId`, then reset its `lastProcessedMessageId` to `sourceMessageId - 1` (only if less than current watermark)
|
||||
3. If `sourceTopicId` is non-null, also reset the corresponding `TopicProgress.lastProcessedMessageId` for that topic
|
||||
4. The next ingestion cycle picks up the message and re-attempts processing
|
||||
|
||||
For "Retry All" (e.g., all `SIZE_LIMIT` skips after raising the limit):
|
||||
- Delete all matching `SkippedPackage` records
|
||||
- For each affected (account, channel) pair, reset `AccountChannelMap` watermark to the minimum `sourceMessageId - 1` among deleted records
|
||||
- For each affected (account, channel, topic) triple, reset `TopicProgress` watermark similarly
|
||||
|
||||
**Note on behavioral distinction:** `DOWNLOAD_FAILED`, `EXTRACT_FAILED`, and `UPLOAD_FAILED` archives already naturally retry because the worker does not advance the watermark past failed sets. The `SkippedPackage` record provides visibility into these failures. The explicit retry/watermark reset is only strictly needed for `SIZE_LIMIT` skips (where the watermark does advance past the skipped message). The UI should present both types but the retry button is most impactful for `SIZE_LIMIT` skips.
|
||||
|
||||
**Performance note:** "Retry All" can cause the worker to re-scan large message ranges. The existing dedup logic (`packageExistsBySourceMessage`) ensures already-ingested packages are skipped quickly, but there is a scanning cost proportional to the number of messages between the reset watermark and the current position.
|
||||
|
||||
### Frontend Changes
|
||||
|
||||
**File:** `src/app/(app)/stls/_components/stl-table.tsx`
|
||||
|
||||
Add a "Skipped / Failed" tab alongside the main packages table.
|
||||
|
||||
**New file:** `src/app/(app)/stls/_components/skipped-packages-tab.tsx`
|
||||
|
||||
Table columns:
|
||||
- **fileName** — archive name
|
||||
- **fileSize** — formatted size
|
||||
- **reason** — color-coded badge: `SIZE_LIMIT` (yellow), `DOWNLOAD_FAILED` (red), `EXTRACT_FAILED` (red), `UPLOAD_FAILED` (red)
|
||||
- **errorMessage** — truncated with expandable tooltip/popover for full text
|
||||
- **channel** — source channel title
|
||||
- **createdAt** — when the skip/failure was recorded
|
||||
|
||||
Actions:
|
||||
- **Retry** button per row — server action that deletes record + resets watermark
|
||||
- **Retry All** button in the header — bulk retry, filterable by reason
|
||||
|
||||
**File:** `src/app/(app)/stls/page.tsx`
|
||||
|
||||
Fetch skipped packages count (for tab badge) alongside existing queries.
|
||||
|
||||
**File:** `src/data/` or `src/lib/telegram/queries.ts`
|
||||
|
||||
Add query functions:
|
||||
- `listSkippedPackages(options)` — paginated list with reason filter
|
||||
- `countSkippedPackages()` — for tab badge
|
||||
- `retrySkippedPackage(id)` — delete record + reset watermark
|
||||
- `retryAllSkippedPackages(reason?)` — bulk retry
|
||||
|
||||
**File:** `src/app/(app)/stls/actions.ts`
|
||||
|
||||
Add server actions:
|
||||
- `retrySkippedPackageAction(id)`
|
||||
- `retryAllSkippedPackagesAction(reason?)`
|
||||
|
||||
---
|
||||
|
||||
## Files to Create/Modify
|
||||
|
||||
### Create
|
||||
- `src/app/(app)/stls/_components/skipped-packages-tab.tsx` — skipped packages table UI
|
||||
- Prisma migration for `SkippedPackage` model
|
||||
|
||||
### Modify
|
||||
- `worker/src/util/config.ts` — raise default max size
|
||||
- `worker/src/worker.ts` — record skips/failures, clean up on success
|
||||
- `worker/src/db/queries.ts` — add skip record CRUD functions
|
||||
- `prisma/schema.prisma` — add `SkippedPackage` model and `SkipReason` enum
|
||||
- `src/lib/telegram/queries.ts` — modify `searchPackages()` for match counts, add skipped package queries
|
||||
- `src/lib/telegram/types.ts` — add `matchedFileCount`/`matchedByContent` to `PackageListItem`, add skipped package types
|
||||
- `src/app/(app)/stls/page.tsx` — pass search term, fetch skipped count, add tab
|
||||
- `src/app/(app)/stls/_components/stl-table.tsx` — accept search prop, render tabs
|
||||
- `src/app/(app)/stls/_components/package-columns.tsx` — render match badge
|
||||
- `src/app/(app)/stls/_components/package-files-drawer.tsx` — accept highlightTerm, highlight matching files, auto-expand matched folders
|
||||
- `src/app/(app)/stls/actions.ts` — add retry server actions
|
||||
246
docs/superpowers/specs/2026-03-25-package-grouping-design.md
Normal file
246
docs/superpowers/specs/2026-03-25-package-grouping-design.md
Normal file
@@ -0,0 +1,246 @@
|
||||
# Package Grouping Design
|
||||
|
||||
## Overview
|
||||
|
||||
Add the ability to group related packages that were posted together in a Telegram channel (e.g., "DUNGEON BLOCKS - Colossal Dungeon" with 6 separate archive files). Groups appear as collapsible rows in the STL files table, with support for both automatic detection via Telegram album IDs and manual grouping through the UI.
|
||||
|
||||
## Goals
|
||||
|
||||
- Automatically detect and group files posted together in Telegram (same `media_album_id`)
|
||||
- Display groups as collapsed rows in the STL table with aggregated metadata
|
||||
- Allow manual grouping/ungrouping of packages via the UI
|
||||
- Support editable group names and preview images
|
||||
- Enable "Send All" to deliver every package in a group via the bot
|
||||
|
||||
## Non-Goals
|
||||
|
||||
- Merging grouped packages into a single Package record (each stays independent)
|
||||
- Time-proximity heuristics for grouping (too error-prone)
|
||||
- Grouping across different source channels
|
||||
|
||||
---
|
||||
|
||||
## Data Model
|
||||
|
||||
### New `PackageGroup` Table
|
||||
|
||||
```prisma
|
||||
model PackageGroup {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
mediaAlbumId String?
|
||||
sourceChannelId String
|
||||
previewData Bytes?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
packages Package[]
|
||||
sourceChannel TelegramChannel @relation(fields: [sourceChannelId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([mediaAlbumId, sourceChannelId])
|
||||
@@index([sourceChannelId])
|
||||
@@map("package_groups")
|
||||
}
|
||||
```
|
||||
|
||||
### Package Model Changes
|
||||
|
||||
Add optional group membership:
|
||||
|
||||
```prisma
|
||||
model Package {
|
||||
// ... existing fields ...
|
||||
packageGroupId String?
|
||||
packageGroup PackageGroup? @relation(fields: [packageGroupId], references: [id], onDelete: SetNull)
|
||||
|
||||
@@index([packageGroupId])
|
||||
}
|
||||
```
|
||||
|
||||
### TelegramChannel Model Changes
|
||||
|
||||
Add back-relation for the new `PackageGroup` model:
|
||||
|
||||
```prisma
|
||||
model TelegramChannel {
|
||||
// ... existing fields and relations ...
|
||||
packageGroups PackageGroup[]
|
||||
}
|
||||
```
|
||||
|
||||
### Key Decisions
|
||||
|
||||
- `mediaAlbumId` is `String?` (TDLib int64 stringified) — only used for dedup lookups, avoids BigInt complexity
|
||||
- `@@unique([mediaAlbumId, sourceChannelId])` prevents duplicate album-derived groups when re-scanning. PostgreSQL treats NULLs as distinct in unique constraints, so manually-created groups (with `mediaAlbumId = null`) are not constrained by this — which is correct behavior
|
||||
- Idempotency for album groups uses `findFirst({ where: { mediaAlbumId, sourceChannelId } })` + conditional `create`, not `upsert`, because Prisma does not support `upsert` on compound unique keys with nullable fields
|
||||
- `onDelete: SetNull` on `Package.packageGroup` means dissolving a group automatically unlinks all members
|
||||
- `onDelete: Cascade` on `PackageGroup.sourceChannel` means deleting a channel cleans up its groups
|
||||
- `sourceTopicId` is omitted from `PackageGroup` — it can be inferred from member packages, and manual groups may span topics
|
||||
- `@@map("package_groups")` follows the project's snake_case table naming convention
|
||||
- `previewData` stores JPEG thumbnail bytes directly on the group (same pattern as Package)
|
||||
|
||||
---
|
||||
|
||||
## Worker Changes
|
||||
|
||||
### TelegramMessage Interface
|
||||
|
||||
Add optional `mediaAlbumId` field:
|
||||
|
||||
```typescript
|
||||
export interface TelegramMessage {
|
||||
id: bigint;
|
||||
fileName: string;
|
||||
fileId: string;
|
||||
fileSize: bigint;
|
||||
date: Date;
|
||||
mediaAlbumId?: string; // Absent or "0" when not part of an album
|
||||
}
|
||||
```
|
||||
|
||||
The field is optional to minimize call-site changes. The grouping step treats `undefined` and `"0"` equivalently as "not part of an album."
|
||||
|
||||
### TelegramPhoto Interface
|
||||
|
||||
Add optional `mediaAlbumId` field:
|
||||
|
||||
```typescript
|
||||
export interface TelegramPhoto {
|
||||
id: bigint;
|
||||
date: Date;
|
||||
caption: string;
|
||||
fileId: string;
|
||||
fileSize: number;
|
||||
mediaAlbumId?: string; // For album-to-preview correlation
|
||||
}
|
||||
```
|
||||
|
||||
### Channel Scanning
|
||||
|
||||
In `getChannelMessages()`, read `media_album_id` from the TDLib message object (already present in TDLib responses, just not captured today). Add `media_album_id?: string` to the `TdMessage` interface and pass through to both `TelegramMessage` and `TelegramPhoto`.
|
||||
|
||||
The document pass and photo pass already run as separate loops over `searchChatMessages`. Both loops capture `media_album_id` independently. Correlation happens at grouping time: album photos are matched to album documents by comparing their `mediaAlbumId` values, not at scan time.
|
||||
|
||||
### Group Creation (Post-Processing)
|
||||
|
||||
After each scan cycle's packages are individually processed (downloaded, hashed, uploaded, indexed), a post-processing step handles grouping:
|
||||
|
||||
1. Collect all packages from the current scan batch that share the same non-zero `mediaAlbumId`
|
||||
2. For each distinct `mediaAlbumId`, check if a `PackageGroup` already exists via `findFirst({ where: { mediaAlbumId, sourceChannelId } })`
|
||||
3. If no group exists, create one:
|
||||
- **Name:** caption of the first message in the album (falls back to first file's base name)
|
||||
- **Preview:** find a `TelegramPhoto` from the scan's `photos[]` array with the same `mediaAlbumId`. If found, download via `downloadPhotoThumbnail`. If not, the group starts with no preview (can be added in UI later)
|
||||
4. Link all member packages via an idempotent `updateMany` — sets `packageGroupId` on all packages whose `sourceMessageId` is in the album's message set. This handles both newly-indexed packages and previously-indexed ones that were created in an earlier partial scan (e.g., if one package failed and was retried later)
|
||||
|
||||
The per-package pipeline is unchanged — each file is still downloaded, hashed, deduped, split, uploaded, and indexed independently. Grouping is a layer on top.
|
||||
|
||||
---
|
||||
|
||||
## Query Layer
|
||||
|
||||
### Paginated Listing with Groups
|
||||
|
||||
The STL table shows "display items" — either a group (collapsed) or a standalone package. Pagination operates on display items so that a group occupies exactly one slot regardless of member count.
|
||||
|
||||
**Two-step query approach** (handles filters correctly):
|
||||
|
||||
**Step 1 — Find matching display item IDs:**
|
||||
|
||||
```sql
|
||||
-- Find all group IDs and standalone package IDs where at least one member matches filters
|
||||
SELECT DISTINCT COALESCE(p."packageGroupId", p.id) AS display_id,
|
||||
CASE WHEN p."packageGroupId" IS NOT NULL THEN 'group' ELSE 'package' END AS display_type,
|
||||
MAX(p."indexedAt") AS sort_date
|
||||
FROM packages p
|
||||
LEFT JOIN package_groups pg ON pg.id = p."packageGroupId"
|
||||
WHERE 1=1
|
||||
-- Optional filters applied here (creator, tags, search text, channelId)
|
||||
GROUP BY COALESCE(p."packageGroupId", p.id),
|
||||
CASE WHEN p."packageGroupId" IS NOT NULL THEN 'group' ELSE 'package' END
|
||||
ORDER BY sort_date DESC
|
||||
LIMIT $1 OFFSET $2
|
||||
```
|
||||
|
||||
**Step 2 — Fetch full data:**
|
||||
|
||||
For groups on the current page, fetch all member packages (including those that didn't match filters — the group appears because at least one member matched, but the expanded view shows all members). For standalone packages, fetch the full package data.
|
||||
|
||||
**Count query** (for pagination total):
|
||||
|
||||
```sql
|
||||
SELECT COUNT(*) FROM (
|
||||
SELECT DISTINCT COALESCE(p."packageGroupId", p.id)
|
||||
FROM packages p
|
||||
WHERE 1=1
|
||||
-- Same filters as step 1
|
||||
) AS display_items
|
||||
```
|
||||
|
||||
### Group Row Aggregates
|
||||
|
||||
Computed in the step 2 fetch: total file size (sum), total file count (sum), combined tags (array union), member package count per group. These populate the collapsed group row.
|
||||
|
||||
### Search
|
||||
|
||||
`searchPackages` adds `PackageGroup.name` to search targets via a `LEFT JOIN` to `package_groups`. If any package in a group matches by name/file content, or the group name matches, the whole group appears.
|
||||
|
||||
### Filtering
|
||||
|
||||
Creator/tag filters apply to member packages. A group appears if any member matches the filter. The group row shows aggregates of all members (not just matching ones).
|
||||
|
||||
### New Query Functions
|
||||
|
||||
| Function | Purpose |
|
||||
|----------|---------|
|
||||
| `listDisplayItems(page, limit, filters)` | Two-step paginated query returning groups + standalone packages |
|
||||
| `getDisplayItemCount(filters)` | Count of display items for pagination total |
|
||||
| `getPackageGroup(groupId)` | Group metadata + all member packages |
|
||||
| `updatePackageGroupName(groupId, name)` | Rename group |
|
||||
| `updatePackageGroupPreview(groupId, previewData)` | Replace group preview |
|
||||
| `addPackagesToGroup(packageIds, groupId)` | Manual grouping — add to existing group |
|
||||
| `removePackageFromGroup(packageId)` | Ungroup single package |
|
||||
| `createManualGroup(name, packageIds)` | Create new group from UI |
|
||||
| `dissolveGroup(groupId)` | Ungroup all members, delete group record |
|
||||
|
||||
For manual grouping of packages that already belong to different groups: the UI first dissolves empty source groups (groups where all members were moved), then links the selected packages to the target group. Non-selected members of source groups remain in their original group.
|
||||
|
||||
---
|
||||
|
||||
## UI Changes
|
||||
|
||||
### STL Table — Group Rows
|
||||
|
||||
- **Collapsed (default):** Single row showing preview thumbnail, group name (editable inline), archive type badge ("Mixed" if heterogeneous), combined size, combined file count, combined tags (editable), source channel, latest `indexedAt`, actions
|
||||
- **Expanded:** Chevron toggle reveals member packages as indented sub-rows with their existing columns and per-package actions
|
||||
- Chevron icon on the left of the row toggles expand/collapse
|
||||
|
||||
**Loading strategy:** Member packages for all groups on the current page are prefetched in a single batched query during the step 2 fetch. This means expand/collapse is instant (no on-demand loading) and avoids per-row loading states.
|
||||
|
||||
### Group Row Actions
|
||||
|
||||
- **Send All** — Queues bot send requests for every package in the group. Checks for existing PENDING/SENDING requests per package to avoid duplicates.
|
||||
- **View Files** — Opens file drawer showing all member packages' files, separated by package name headers
|
||||
- **Dissolve Group** — Ungroups all members (confirmation required)
|
||||
|
||||
### Individual Package Actions (Within a Group)
|
||||
|
||||
- Existing: Send, View Files
|
||||
- New: "Remove from group" in dropdown menu
|
||||
|
||||
### Manual Grouping
|
||||
|
||||
- Checkbox selection column on package rows
|
||||
- When 2+ packages selected, a "Group Selected" button appears in the table toolbar
|
||||
- Prompts for a group name, creates the group
|
||||
- If selected packages belong to existing groups, those packages are moved to the new group. Source groups that become empty are automatically dissolved.
|
||||
|
||||
### Preview Editing
|
||||
|
||||
- Click the group's preview thumbnail to upload a replacement image
|
||||
- Same upload flow as individual packages (existing component reuse)
|
||||
|
||||
### No Changes To
|
||||
|
||||
- Skipped/failed packages tab
|
||||
- Package detail drawer internals
|
||||
- Search UI (just broader matching behind the scenes)
|
||||
184
docs/superpowers/specs/2026-05-02-worker-improvements-design.md
Normal file
184
docs/superpowers/specs/2026-05-02-worker-improvements-design.md
Normal file
@@ -0,0 +1,184 @@
|
||||
# Worker Improvements Design
|
||||
|
||||
**Date:** 2026-05-02
|
||||
**Status:** Approved
|
||||
**Scope:** Dragon's Stash Telegram ingestion worker
|
||||
|
||||
## Problem Statement
|
||||
|
||||
Three issues to address:
|
||||
|
||||
1. **Double-uploads**: The same archive occasionally appears twice in the destination Telegram channel. Root causes: (a) the worker crashes between `uploadToChannel()` confirming success and `createPackageWithFiles()` writing to the DB — no DB record means `recoverIncompleteUploads()` can't detect the orphaned Telegram message, and the next cycle re-uploads; (b) two accounts scanning the same source channel can both pass the hash dedup check before either creates a DB record, racing to upload the same file.
|
||||
|
||||
2. **Sequential account processing**: Both Telegram accounts are processed one after another via `withTdlibMutex`, even though TDLib fully supports multiple concurrent clients in the same process (each with separate `databaseDirectory` and `filesDirectory`). This halves throughput unnecessarily.
|
||||
|
||||
3. **Premium upload limit not used**: The Premium account can upload up to 4 GB per file, but `MAX_UPLOAD_SIZE` is hardcoded at ~1,950 MB. This causes unnecessary file splitting and expensive repack operations for files that could upload directly.
|
||||
|
||||
## Solution Overview
|
||||
|
||||
Three targeted changes, no architectural overhaul:
|
||||
|
||||
1. Two-phase DB write + hash advisory lock (fixes double-uploads)
|
||||
2. Remove TDLib mutex from the scheduler loop (enables parallel accounts)
|
||||
3. Per-account `maxUploadSize` from `getMe().is_premium` (enables 4 GB for Premium)
|
||||
|
||||
---
|
||||
|
||||
## Section 1: Double-Upload Fix
|
||||
|
||||
### 1a. Two-Phase DB Write
|
||||
|
||||
**Current flow:**
|
||||
```
|
||||
uploadToChannel() → preview download → metadata extraction → createPackageWithFiles()
|
||||
```
|
||||
|
||||
If the worker crashes anywhere between upload confirmation and `createPackageWithFiles()`, no DB record exists. `recoverIncompleteUploads()` only checks packages with an existing `destMessageId` in the DB — it cannot find an orphaned Telegram message with no corresponding row.
|
||||
|
||||
**New flow:**
|
||||
```
|
||||
uploadToChannel()
|
||||
→ createPackageStub() ← minimal record, destMessageId set immediately
|
||||
→ preview download
|
||||
→ metadata extraction
|
||||
→ updatePackageWithMetadata() ← adds file list, preview, creator, tags
|
||||
```
|
||||
|
||||
`createPackageStub()` writes: `contentHash`, `fileName`, `fileSize`, `archiveType`, `sourceChannelId`, `sourceMessageId`, `destChannelId`, `destMessageId`, `isMultipart`, `partCount`, `ingestionRunId`. File list and preview are left empty.
|
||||
|
||||
If the worker crashes after the stub is written:
|
||||
- `recoverIncompleteUploads()` finds the record (has `destMessageId`), verifies the Telegram message exists, keeps it.
|
||||
- Next cycle: `packageExistsByHash()` returns true → skips re-upload.
|
||||
- The stub has `fileCount = 0` and no file listing. The UI shows "metadata pending" rather than failing silently.
|
||||
|
||||
Stubs with `fileCount = 0` are valid deliverable packages (the bot can still send the file). Backfilling metadata on stubs is out of scope for this change — the crash case is rare and the stub is functional.
|
||||
|
||||
### 1b. Hash Advisory Lock
|
||||
|
||||
**The race (two accounts, shared source channel):**
|
||||
```
|
||||
Worker A: packageExistsByHash(X) → false (no record yet)
|
||||
Worker B: packageExistsByHash(X) → false (no record yet)
|
||||
Worker A: uploads file → destMessageId_A
|
||||
Worker B: uploads file → destMessageId_B ← duplicate Telegram message
|
||||
Worker A: createPackageStub() → succeeds (contentHash @unique satisfied)
|
||||
Worker B: createPackageStub() → fails unique constraint on contentHash
|
||||
```
|
||||
Result: two Telegram messages, one DB record. Worker B's upload is wasted.
|
||||
|
||||
**Fix:** Before calling `uploadToChannel()`, acquire a PostgreSQL session advisory lock keyed on the content hash:
|
||||
|
||||
```sql
|
||||
SELECT pg_try_advisory_lock(hash_bigint)
|
||||
```
|
||||
|
||||
Where `hash_bigint` is the first 8 bytes of the SHA-256 content hash interpreted as a signed bigint.
|
||||
|
||||
- `pg_try_advisory_lock` is non-blocking. If another worker holds the lock (same file, shared channel), return `false` → treat as duplicate, skip.
|
||||
- After acquiring the lock, **re-run `packageExistsByHash()`** before uploading. This catches the case where another worker finished and released the lock between the first check and this one — without the re-check, the current worker would proceed to re-upload.
|
||||
- The lock is session-scoped: released automatically on DB session end. No manual cleanup needed on crash.
|
||||
- The lock is released explicitly after `createPackageStub()` completes (or on any error path).
|
||||
|
||||
**Implementation location:** New helper `tryAcquireHashLock(contentHash)` / `releaseHashLock(contentHash)` in `worker/src/db/locks.ts`, reusing the existing DB client pattern.
|
||||
|
||||
---
|
||||
|
||||
## Section 2: Parallel Account Processing
|
||||
|
||||
### Current Constraint
|
||||
|
||||
`withTdlibMutex` in `scheduler.ts` serializes all TDLib operations across accounts. This was a conservative guard, but TDLib explicitly supports multiple concurrent clients in the same process provided each has its own `databaseDirectory` and `filesDirectory`.
|
||||
|
||||
The codebase already satisfies this requirement:
|
||||
```typescript
|
||||
// worker/src/tdlib/client.ts
|
||||
const dbPath = path.join(config.tdlibStateDir, account.id);
|
||||
const client = createClient({
|
||||
databaseDirectory: dbPath,
|
||||
filesDirectory: path.join(dbPath, "files"),
|
||||
});
|
||||
```
|
||||
|
||||
Each account gets `<TDLIB_STATE_DIR>/<account.id>/` — fully isolated.
|
||||
|
||||
### Change
|
||||
|
||||
Replace the sequential `for` loop in `scheduler.ts` with `Promise.allSettled()`:
|
||||
|
||||
```typescript
|
||||
// Before
|
||||
for (const account of accounts) {
|
||||
await withTdlibMutex(`ingest:${account.phone}`, () => runWorkerForAccount(account));
|
||||
}
|
||||
|
||||
// After
|
||||
await Promise.allSettled(accounts.map((account) => runWorkerForAccount(account)));
|
||||
```
|
||||
|
||||
The per-account PostgreSQL advisory lock in `db/locks.ts` already prevents any account from being processed twice simultaneously. `Promise.allSettled()` ensures one account's failure doesn't abort the other.
|
||||
|
||||
The `withTdlibMutex` wrapper can be removed from the ingest path entirely. The auth path (`authenticateAccount`) should also be run in parallel but may remain guarded if TDLib auth flows have ordering dependencies — verify during implementation.
|
||||
|
||||
**No Docker Compose changes needed.** Both accounts run in the same container.
|
||||
|
||||
### Speed Limit Notifications
|
||||
|
||||
TDLib fires `updateSpeedLimitNotification` when an account's upload or download speed is throttled (non-Premium accounts). Log this event at `warn` level in the client update handler so it's visible in logs without being actionable.
|
||||
|
||||
---
|
||||
|
||||
## Section 3: Per-Account Premium Upload Limit
|
||||
|
||||
### Premium Detection
|
||||
|
||||
After successful authentication, call `getMe()` and read `is_premium: bool` from the returned `user` object. Store this on `TelegramAccount.isPremium` (new boolean field, default `false`, updated on each successful auth).
|
||||
|
||||
```typescript
|
||||
const me = await client.invoke({ _: 'getMe' }) as { is_premium?: boolean };
|
||||
await updateAccountPremiumStatus(account.id, me.is_premium ?? false);
|
||||
```
|
||||
|
||||
### Upload Size Limits
|
||||
|
||||
| Account type | `maxUploadSize` | Effect |
|
||||
|---|---|---|
|
||||
| Premium | 3,950 MB | Parts ≤ 3.95 GB upload as-is; repack only for parts >3.95 GB (extremely rare) |
|
||||
| Non-Premium | 1,950 MB | Current behavior unchanged |
|
||||
|
||||
Pass `maxUploadSize` into `processOneArchiveSet()` as a parameter (currently hardcoded as `MAX_UPLOAD_SIZE` at `worker.ts:1023` and in `archive/split.ts`).
|
||||
|
||||
The `hasOversizedPart` check and `byteLevelSplit` call both use this value, so the repack step is effectively eliminated for Premium accounts in practice — no separate "skip repack" flag needed.
|
||||
|
||||
### Migration
|
||||
|
||||
```prisma
|
||||
model TelegramAccount {
|
||||
// ... existing fields
|
||||
isPremium Boolean @default(false)
|
||||
}
|
||||
```
|
||||
|
||||
One migration, one new query `updateAccountPremiumStatus(accountId, isPremium)`.
|
||||
|
||||
---
|
||||
|
||||
## Files to Change
|
||||
|
||||
| File | Change |
|
||||
|---|---|
|
||||
| `prisma/schema.prisma` | Add `isPremium Boolean @default(false)` to `TelegramAccount` |
|
||||
| `worker/src/db/queries.ts` | Add `updateAccountPremiumStatus()`, `createPackageStub()`, `updatePackageWithMetadata()` |
|
||||
| `worker/src/db/locks.ts` | Add `tryAcquireHashLock()`, `releaseHashLock()` |
|
||||
| `worker/src/tdlib/client.ts` | Call `getMe()` after auth, return `isPremium` from `createTdlibClient()` |
|
||||
| `worker/src/worker.ts` | Two-phase write, hash lock acquire/release, pass `maxUploadSize` per account |
|
||||
| `worker/src/archive/split.ts` | Accept `maxPartSize` parameter instead of hardcoded constant |
|
||||
| `worker/src/scheduler.ts` | Replace sequential loop with `Promise.allSettled()`, remove `withTdlibMutex` from ingest path |
|
||||
|
||||
---
|
||||
|
||||
## What Is Explicitly Out of Scope
|
||||
|
||||
- Backfilling metadata on stub records (rare crash case, functional without it)
|
||||
- Download pre-fetching / pipeline parallelism within one account
|
||||
- Two separate worker containers (single container is sufficient)
|
||||
- Bot or app changes (worker-only)
|
||||
@@ -0,0 +1,35 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "SkipReason" AS ENUM ('SIZE_LIMIT', 'DOWNLOAD_FAILED', 'EXTRACT_FAILED', 'UPLOAD_FAILED');
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "skipped_packages" (
|
||||
"id" TEXT NOT NULL,
|
||||
"fileName" TEXT NOT NULL,
|
||||
"fileSize" BIGINT NOT NULL,
|
||||
"reason" "SkipReason" NOT NULL,
|
||||
"errorMessage" TEXT,
|
||||
"sourceChannelId" TEXT NOT NULL,
|
||||
"sourceMessageId" BIGINT NOT NULL,
|
||||
"sourceTopicId" BIGINT,
|
||||
"isMultipart" BOOLEAN NOT NULL DEFAULT false,
|
||||
"partCount" INTEGER NOT NULL DEFAULT 1,
|
||||
"accountId" TEXT NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "skipped_packages_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "skipped_packages_sourceChannelId_sourceMessageId_key" ON "skipped_packages"("sourceChannelId", "sourceMessageId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "skipped_packages_reason_idx" ON "skipped_packages"("reason");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "skipped_packages_accountId_idx" ON "skipped_packages"("accountId");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "skipped_packages" ADD CONSTRAINT "skipped_packages_sourceChannelId_fkey" FOREIGN KEY ("sourceChannelId") REFERENCES "telegram_channels"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "skipped_packages" ADD CONSTRAINT "skipped_packages_accountId_fkey" FOREIGN KEY ("accountId") REFERENCES "telegram_accounts"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,30 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "packages" ADD COLUMN "packageGroupId" TEXT;
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "package_groups" (
|
||||
"id" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"mediaAlbumId" TEXT,
|
||||
"sourceChannelId" TEXT NOT NULL,
|
||||
"previewData" BYTEA,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "package_groups_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "package_groups_sourceChannelId_idx" ON "package_groups"("sourceChannelId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "package_groups_mediaAlbumId_sourceChannelId_key" ON "package_groups"("mediaAlbumId", "sourceChannelId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "packages_packageGroupId_idx" ON "packages"("packageGroupId");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "packages" ADD CONSTRAINT "packages_packageGroupId_fkey" FOREIGN KEY ("packageGroupId") REFERENCES "package_groups"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "package_groups" ADD CONSTRAINT "package_groups_sourceChannelId_fkey" FOREIGN KEY ("sourceChannelId") REFERENCES "telegram_channels"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,7 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "packages" ADD COLUMN "destMessageIds" BIGINT[] DEFAULT ARRAY[]::BIGINT[];
|
||||
|
||||
-- Backfill: copy existing destMessageId into the array
|
||||
UPDATE "packages"
|
||||
SET "destMessageIds" = ARRAY["destMessageId"]
|
||||
WHERE "destMessageId" IS NOT NULL;
|
||||
@@ -0,0 +1,32 @@
|
||||
-- CreateEnum GroupingSource
|
||||
CREATE TYPE "GroupingSource" AS ENUM ('ALBUM', 'MANUAL', 'AUTO_TIME', 'AUTO_PATTERN', 'AUTO_REPLY', 'AUTO_ZIP', 'AUTO_CAPTION');
|
||||
|
||||
-- CreateEnum NotificationType
|
||||
CREATE TYPE "NotificationType" AS ENUM ('HASH_MISMATCH', 'MISSING_PART', 'UPLOAD_FAILED', 'DOWNLOAD_FAILED', 'GROUPING_CONFLICT', 'INTEGRITY_AUDIT');
|
||||
|
||||
-- CreateEnum NotificationSeverity
|
||||
CREATE TYPE "NotificationSeverity" AS ENUM ('INFO', 'WARNING', 'ERROR');
|
||||
|
||||
-- AlterTable: add groupingSource to package_groups
|
||||
ALTER TABLE "package_groups" ADD COLUMN "groupingSource" "GroupingSource" NOT NULL DEFAULT 'MANUAL';
|
||||
|
||||
-- Backfill: mark album-based groups
|
||||
UPDATE "package_groups" SET "groupingSource" = 'ALBUM' WHERE "mediaAlbumId" IS NOT NULL;
|
||||
|
||||
-- CreateTable: system_notifications
|
||||
CREATE TABLE "system_notifications" (
|
||||
"id" TEXT NOT NULL,
|
||||
"type" "NotificationType" NOT NULL,
|
||||
"severity" "NotificationSeverity" NOT NULL DEFAULT 'INFO',
|
||||
"title" TEXT NOT NULL,
|
||||
"message" TEXT NOT NULL,
|
||||
"context" JSONB,
|
||||
"isRead" BOOLEAN NOT NULL DEFAULT false,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "system_notifications_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "system_notifications_isRead_createdAt_idx" ON "system_notifications"("isRead", "createdAt");
|
||||
CREATE INDEX "system_notifications_type_idx" ON "system_notifications"("type");
|
||||
@@ -0,0 +1,3 @@
|
||||
-- AlterTable: add sourceCaption and replyToMessageId to packages
|
||||
ALTER TABLE "packages" ADD COLUMN "sourceCaption" TEXT;
|
||||
ALTER TABLE "packages" ADD COLUMN "replyToMessageId" BIGINT;
|
||||
@@ -0,0 +1,47 @@
|
||||
-- AlterTable: add autoGroupEnabled to telegram_channels
|
||||
ALTER TABLE "telegram_channels" ADD COLUMN "autoGroupEnabled" BOOLEAN NOT NULL DEFAULT true;
|
||||
|
||||
-- CreateTable: grouping_rules
|
||||
CREATE TABLE "grouping_rules" (
|
||||
"id" TEXT NOT NULL,
|
||||
"sourceChannelId" TEXT NOT NULL,
|
||||
"pattern" TEXT NOT NULL,
|
||||
"signalType" "GroupingSource" NOT NULL,
|
||||
"confidence" DOUBLE PRECISION NOT NULL DEFAULT 1.0,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"createdByGroupId" TEXT,
|
||||
|
||||
CONSTRAINT "grouping_rules_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "grouping_rules_sourceChannelId_idx" ON "grouping_rules"("sourceChannelId");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "grouping_rules" ADD CONSTRAINT "grouping_rules_sourceChannelId_fkey" FOREIGN KEY ("sourceChannelId") REFERENCES "telegram_channels"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- Full-text search: add tsvector column and GIN index
|
||||
ALTER TABLE "packages" ADD COLUMN IF NOT EXISTS "searchVector" tsvector;
|
||||
|
||||
UPDATE "packages" SET "searchVector" = to_tsvector('english',
|
||||
coalesce("fileName", '') || ' ' || coalesce("creator", '') || ' ' || coalesce("sourceCaption", '')
|
||||
) WHERE "searchVector" IS NULL;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS "packages_search_vector_idx" ON "packages" USING GIN ("searchVector");
|
||||
|
||||
-- Trigger to auto-update searchVector on insert/update
|
||||
CREATE OR REPLACE FUNCTION packages_search_vector_update() RETURNS trigger AS $$
|
||||
BEGIN
|
||||
NEW."searchVector" := to_tsvector('english',
|
||||
coalesce(NEW."fileName", '') || ' ' || coalesce(NEW."creator", '') || ' ' || coalesce(NEW."sourceCaption", '')
|
||||
);
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
DROP TRIGGER IF EXISTS packages_search_vector_trigger ON "packages";
|
||||
CREATE TRIGGER packages_search_vector_trigger
|
||||
BEFORE INSERT OR UPDATE OF "fileName", "creator", "sourceCaption"
|
||||
ON "packages"
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION packages_search_vector_update();
|
||||
@@ -0,0 +1,30 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "ManualUploadStatus" AS ENUM ('PENDING', 'PROCESSING', 'COMPLETED', 'FAILED');
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "manual_uploads" (
|
||||
"id" TEXT NOT NULL,
|
||||
"status" "ManualUploadStatus" NOT NULL DEFAULT 'PENDING',
|
||||
"groupName" TEXT,
|
||||
"userId" TEXT NOT NULL,
|
||||
"errorMessage" TEXT,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"completedAt" TIMESTAMP(3),
|
||||
CONSTRAINT "manual_uploads_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
CREATE TABLE "manual_upload_files" (
|
||||
"id" TEXT NOT NULL,
|
||||
"uploadId" TEXT NOT NULL,
|
||||
"fileName" TEXT NOT NULL,
|
||||
"filePath" TEXT NOT NULL,
|
||||
"fileSize" BIGINT NOT NULL,
|
||||
"packageId" TEXT,
|
||||
CONSTRAINT "manual_upload_files_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
CREATE INDEX "manual_uploads_status_idx" ON "manual_uploads"("status");
|
||||
CREATE INDEX "manual_upload_files_uploadId_idx" ON "manual_upload_files"("uploadId");
|
||||
|
||||
ALTER TABLE "manual_uploads" ADD CONSTRAINT "manual_uploads_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||
ALTER TABLE "manual_upload_files" ADD CONSTRAINT "manual_upload_files_uploadId_fkey" FOREIGN KEY ("uploadId") REFERENCES "manual_uploads"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "telegram_accounts" ADD COLUMN "isPremium" BOOLEAN NOT NULL DEFAULT false;
|
||||
@@ -42,6 +42,7 @@ model User {
|
||||
inviteCodes InviteCode[] @relation("InviteCreator")
|
||||
usedInvite InviteCode? @relation("InviteUser", fields: [usedInviteId], references: [id], onDelete: SetNull)
|
||||
usedInviteId String?
|
||||
manualUploads ManualUpload[]
|
||||
}
|
||||
|
||||
model Account {
|
||||
@@ -405,6 +406,7 @@ model TelegramAccount {
|
||||
isActive Boolean @default(true)
|
||||
authState AuthState @default(PENDING)
|
||||
authCode String?
|
||||
isPremium Boolean @default(false)
|
||||
lastSeenAt DateTime?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
@@ -412,6 +414,7 @@ model TelegramAccount {
|
||||
channelMaps AccountChannelMap[]
|
||||
ingestionRuns IngestionRun[]
|
||||
fetchRequests ChannelFetchRequest[]
|
||||
skippedPackages SkippedPackage[]
|
||||
|
||||
@@index([isActive])
|
||||
@@map("telegram_accounts")
|
||||
@@ -428,8 +431,13 @@ model TelegramChannel {
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
autoGroupEnabled Boolean @default(true)
|
||||
|
||||
accountMaps AccountChannelMap[]
|
||||
packages Package[]
|
||||
skippedPackages SkippedPackage[]
|
||||
packageGroups PackageGroup[]
|
||||
groupingRules GroupingRule[]
|
||||
|
||||
@@index([type, isActive])
|
||||
@@index([category])
|
||||
@@ -466,16 +474,21 @@ model Package {
|
||||
sourceTopicId BigInt?
|
||||
destChannelId String?
|
||||
destMessageId BigInt?
|
||||
destMessageIds BigInt[] @default([])
|
||||
isMultipart Boolean @default(false)
|
||||
partCount Int @default(1)
|
||||
fileCount Int @default(0)
|
||||
tags String[] @default([])
|
||||
sourceCaption String? // Caption text from source Telegram message
|
||||
replyToMessageId BigInt? // reply_to_message_id from source message (for reply chain grouping)
|
||||
previewData Bytes? // JPEG thumbnail from nearby Telegram photo (stored as raw bytes)
|
||||
previewMsgId BigInt? // Telegram message ID of the matched photo
|
||||
packageGroupId String?
|
||||
indexedAt DateTime @default(now())
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
sourceChannel TelegramChannel @relation(fields: [sourceChannelId], references: [id])
|
||||
packageGroup PackageGroup? @relation(fields: [packageGroupId], references: [id], onDelete: SetNull)
|
||||
files PackageFile[]
|
||||
ingestionRun IngestionRun? @relation(fields: [ingestionRunId], references: [id])
|
||||
ingestionRunId String?
|
||||
@@ -489,6 +502,7 @@ model Package {
|
||||
@@index([indexedAt])
|
||||
@@index([archiveType])
|
||||
@@index([creator])
|
||||
@@index([packageGroupId])
|
||||
@@map("packages")
|
||||
}
|
||||
|
||||
@@ -510,6 +524,24 @@ model PackageFile {
|
||||
@@map("package_files")
|
||||
}
|
||||
|
||||
model PackageGroup {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
mediaAlbumId String?
|
||||
sourceChannelId String
|
||||
groupingSource GroupingSource @default(MANUAL)
|
||||
previewData Bytes?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
packages Package[]
|
||||
sourceChannel TelegramChannel @relation(fields: [sourceChannelId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([mediaAlbumId, sourceChannelId])
|
||||
@@index([sourceChannelId])
|
||||
@@map("package_groups")
|
||||
}
|
||||
|
||||
model IngestionRun {
|
||||
id String @id @default(cuid())
|
||||
accountId String
|
||||
@@ -686,6 +718,39 @@ model ArchiveExtractRequest {
|
||||
@@map("archive_extract_requests")
|
||||
}
|
||||
|
||||
// ───────────────────────────────────────
|
||||
// Skipped/Failed Archives
|
||||
// ───────────────────────────────────────
|
||||
|
||||
enum SkipReason {
|
||||
SIZE_LIMIT
|
||||
DOWNLOAD_FAILED
|
||||
EXTRACT_FAILED
|
||||
UPLOAD_FAILED
|
||||
}
|
||||
|
||||
model SkippedPackage {
|
||||
id String @id @default(cuid())
|
||||
fileName String
|
||||
fileSize BigInt
|
||||
reason SkipReason
|
||||
errorMessage String?
|
||||
sourceChannelId String
|
||||
sourceChannel TelegramChannel @relation(fields: [sourceChannelId], references: [id], onDelete: Cascade)
|
||||
sourceMessageId BigInt
|
||||
sourceTopicId BigInt?
|
||||
isMultipart Boolean @default(false)
|
||||
partCount Int @default(1)
|
||||
accountId String
|
||||
account TelegramAccount @relation(fields: [accountId], references: [id], onDelete: Cascade)
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
@@unique([sourceChannelId, sourceMessageId])
|
||||
@@index([reason])
|
||||
@@index([accountId])
|
||||
@@map("skipped_packages")
|
||||
}
|
||||
|
||||
// ───────────────────────────────────────
|
||||
// Purchased Kickstarters
|
||||
// ───────────────────────────────────────
|
||||
@@ -745,3 +810,97 @@ model KickstarterPackage {
|
||||
@@id([kickstarterId, packageId])
|
||||
@@map("kickstarter_packages")
|
||||
}
|
||||
|
||||
// ── Grouping & Notifications ──
|
||||
|
||||
enum GroupingSource {
|
||||
ALBUM
|
||||
MANUAL
|
||||
AUTO_TIME
|
||||
AUTO_PATTERN
|
||||
AUTO_REPLY
|
||||
AUTO_ZIP
|
||||
AUTO_CAPTION
|
||||
}
|
||||
|
||||
enum NotificationType {
|
||||
HASH_MISMATCH
|
||||
MISSING_PART
|
||||
UPLOAD_FAILED
|
||||
DOWNLOAD_FAILED
|
||||
GROUPING_CONFLICT
|
||||
INTEGRITY_AUDIT
|
||||
}
|
||||
|
||||
enum NotificationSeverity {
|
||||
INFO
|
||||
WARNING
|
||||
ERROR
|
||||
}
|
||||
|
||||
model SystemNotification {
|
||||
id String @id @default(cuid())
|
||||
type NotificationType
|
||||
severity NotificationSeverity @default(INFO)
|
||||
title String
|
||||
message String
|
||||
context Json?
|
||||
isRead Boolean @default(false)
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
@@index([isRead, createdAt])
|
||||
@@index([type])
|
||||
@@map("system_notifications")
|
||||
}
|
||||
|
||||
model GroupingRule {
|
||||
id String @id @default(cuid())
|
||||
sourceChannelId String
|
||||
pattern String // Regex or keyword pattern learned from manual grouping
|
||||
signalType GroupingSource // Which grouping signal this rule applies to
|
||||
confidence Float @default(1.0)
|
||||
createdAt DateTime @default(now())
|
||||
createdByGroupId String? // The manual group that spawned this rule
|
||||
|
||||
sourceChannel TelegramChannel @relation(fields: [sourceChannelId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([sourceChannelId])
|
||||
@@map("grouping_rules")
|
||||
}
|
||||
|
||||
enum ManualUploadStatus {
|
||||
PENDING
|
||||
PROCESSING
|
||||
COMPLETED
|
||||
FAILED
|
||||
}
|
||||
|
||||
model ManualUpload {
|
||||
id String @id @default(cuid())
|
||||
status ManualUploadStatus @default(PENDING)
|
||||
groupName String? // Group name if multiple files
|
||||
userId String
|
||||
errorMessage String?
|
||||
createdAt DateTime @default(now())
|
||||
completedAt DateTime?
|
||||
|
||||
files ManualUploadFile[]
|
||||
user User @relation(fields: [userId], references: [id])
|
||||
|
||||
@@index([status])
|
||||
@@map("manual_uploads")
|
||||
}
|
||||
|
||||
model ManualUploadFile {
|
||||
id String @id @default(cuid())
|
||||
uploadId String
|
||||
fileName String
|
||||
filePath String // Path on shared volume
|
||||
fileSize BigInt
|
||||
packageId String? // Set after processing
|
||||
|
||||
upload ManualUpload @relation(fields: [uploadId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([uploadId])
|
||||
@@map("manual_upload_files")
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"use client";
|
||||
|
||||
import { type ColumnDef } from "@tanstack/react-table";
|
||||
import { MoreHorizontal, Pencil, Trash2, ExternalLink } from "lucide-react";
|
||||
import { MoreHorizontal, Pencil, Trash2, ExternalLink, Link2, Send } from "lucide-react";
|
||||
import { DataTableColumnHeader } from "@/components/shared/data-table-column-header";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
@@ -32,6 +32,8 @@ export interface KickstarterRow {
|
||||
interface KickstarterColumnsProps {
|
||||
onEdit: (kickstarter: KickstarterRow) => void;
|
||||
onDelete: (id: string) => void;
|
||||
onLinkPackages: (kickstarter: KickstarterRow) => void;
|
||||
onSendAll: (kickstarter: KickstarterRow) => void;
|
||||
}
|
||||
|
||||
const deliveryConfig: Record<string, { label: string; className: string }> = {
|
||||
@@ -63,6 +65,8 @@ const paymentConfig: Record<string, { label: string; className: string }> = {
|
||||
export function getKickstarterColumns({
|
||||
onEdit,
|
||||
onDelete,
|
||||
onLinkPackages,
|
||||
onSendAll,
|
||||
}: KickstarterColumnsProps): ColumnDef<KickstarterRow, unknown>[] {
|
||||
return [
|
||||
{
|
||||
@@ -170,6 +174,16 @@ export function getKickstarterColumns({
|
||||
<Pencil className="mr-2 h-3.5 w-3.5" />
|
||||
Edit
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem onClick={() => onLinkPackages(row.original)}>
|
||||
<Link2 className="mr-2 h-3.5 w-3.5" />
|
||||
Link Packages
|
||||
</DropdownMenuItem>
|
||||
{row.original._count.packages > 0 && (
|
||||
<DropdownMenuItem onClick={() => onSendAll(row.original)}>
|
||||
<Send className="mr-2 h-3.5 w-3.5" />
|
||||
Send All ({row.original._count.packages})
|
||||
</DropdownMenuItem>
|
||||
)}
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuItem
|
||||
onClick={() => onDelete(row.original.id)}
|
||||
|
||||
@@ -7,7 +7,8 @@ import { toast } from "sonner";
|
||||
import { useDataTable } from "@/hooks/use-data-table";
|
||||
import { getKickstarterColumns, type KickstarterRow } from "./kickstarter-columns";
|
||||
import { KickstarterModal } from "./kickstarter-modal";
|
||||
import { deleteKickstarter } from "../actions";
|
||||
import { PackageLinkerDialog } from "./package-linker-dialog";
|
||||
import { deleteKickstarter, sendAllKickstarterPackages } from "../actions";
|
||||
import { DataTable } from "@/components/shared/data-table";
|
||||
import { DataTablePagination } from "@/components/shared/data-table-pagination";
|
||||
import { DataTableViewOptions } from "@/components/shared/data-table-view-options";
|
||||
@@ -50,6 +51,7 @@ export function KickstarterTable({
|
||||
const [modalOpen, setModalOpen] = useState(false);
|
||||
const [editKickstarter, setEditKickstarter] = useState<KickstarterRow | undefined>();
|
||||
const [deleteId, setDeleteId] = useState<string | null>(null);
|
||||
const [linkTarget, setLinkTarget] = useState<KickstarterRow | null>(null);
|
||||
|
||||
const [searchValue, setSearchValue] = useState(searchParams.get("search") ?? "");
|
||||
|
||||
@@ -88,6 +90,17 @@ export function KickstarterTable({
|
||||
setModalOpen(true);
|
||||
},
|
||||
onDelete: (id) => setDeleteId(id),
|
||||
onLinkPackages: (kickstarter) => setLinkTarget(kickstarter),
|
||||
onSendAll: (kickstarter) => {
|
||||
startTransition(async () => {
|
||||
const result = await sendAllKickstarterPackages(kickstarter.id);
|
||||
if (result.success) {
|
||||
toast.success(`Queued ${result.data!.queued} package(s) for delivery`);
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const { table } = useDataTable({ data, columns, pageCount });
|
||||
@@ -188,6 +201,15 @@ export function KickstarterTable({
|
||||
onConfirm={handleDelete}
|
||||
isLoading={isPending}
|
||||
/>
|
||||
|
||||
{linkTarget && (
|
||||
<PackageLinkerDialog
|
||||
open={!!linkTarget}
|
||||
onOpenChange={(open) => !open && setLinkTarget(null)}
|
||||
kickstarterId={linkTarget.id}
|
||||
kickstarterName={linkTarget.name}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
211
src/app/(app)/kickstarters/_components/package-linker-dialog.tsx
Normal file
211
src/app/(app)/kickstarters/_components/package-linker-dialog.tsx
Normal file
@@ -0,0 +1,211 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useTransition, useCallback, useEffect } from "react";
|
||||
import { Search, Package, X, Loader2 } from "lucide-react";
|
||||
import { toast } from "sonner";
|
||||
import { linkPackages } from "../actions";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Checkbox } from "@/components/ui/checkbox";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||
|
||||
interface PackageResult {
|
||||
id: string;
|
||||
fileName: string;
|
||||
fileSize: string;
|
||||
archiveType: string;
|
||||
creator: string | null;
|
||||
fileCount: number;
|
||||
}
|
||||
|
||||
interface PackageLinkerDialogProps {
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
kickstarterId: string;
|
||||
kickstarterName: string;
|
||||
}
|
||||
|
||||
function formatSize(bytes: string | number): string {
|
||||
const b = Number(bytes);
|
||||
if (b >= 1024 * 1024 * 1024) return `${(b / (1024 * 1024 * 1024)).toFixed(1)} GB`;
|
||||
if (b >= 1024 * 1024) return `${(b / (1024 * 1024)).toFixed(0)} MB`;
|
||||
return `${(b / 1024).toFixed(0)} KB`;
|
||||
}
|
||||
|
||||
export function PackageLinkerDialog({
|
||||
open,
|
||||
onOpenChange,
|
||||
kickstarterId,
|
||||
kickstarterName,
|
||||
}: PackageLinkerDialogProps) {
|
||||
const [isPending, startTransition] = useTransition();
|
||||
const [searchQuery, setSearchQuery] = useState("");
|
||||
const [searchResults, setSearchResults] = useState<PackageResult[]>([]);
|
||||
const [isSearching, setIsSearching] = useState(false);
|
||||
const [selectedIds, setSelectedIds] = useState<Set<string>>(new Set());
|
||||
|
||||
// Fetch currently linked packages when dialog opens
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
setSearchQuery("");
|
||||
setSearchResults([]);
|
||||
fetch(`/api/packages/linked?kickstarterId=${kickstarterId}`)
|
||||
.then((res) => res.json())
|
||||
.then((data) => {
|
||||
if (data.packageIds) {
|
||||
setSelectedIds(new Set(data.packageIds));
|
||||
}
|
||||
})
|
||||
.catch(() => {});
|
||||
}
|
||||
}, [open, kickstarterId]);
|
||||
|
||||
const doSearch = useCallback(async (query: string) => {
|
||||
if (query.length < 2) {
|
||||
setSearchResults([]);
|
||||
return;
|
||||
}
|
||||
setIsSearching(true);
|
||||
try {
|
||||
const res = await fetch(`/api/packages/search?q=${encodeURIComponent(query)}&limit=20`);
|
||||
if (res.ok) {
|
||||
const data = await res.json();
|
||||
setSearchResults(data.packages ?? []);
|
||||
}
|
||||
} catch {
|
||||
// Ignore search errors
|
||||
} finally {
|
||||
setIsSearching(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Debounced search
|
||||
useEffect(() => {
|
||||
const timer = setTimeout(() => doSearch(searchQuery), 300);
|
||||
return () => clearTimeout(timer);
|
||||
}, [searchQuery, doSearch]);
|
||||
|
||||
function togglePackage(id: string) {
|
||||
setSelectedIds((prev) => {
|
||||
const next = new Set(prev);
|
||||
if (next.has(id)) next.delete(id);
|
||||
else next.add(id);
|
||||
return next;
|
||||
});
|
||||
}
|
||||
|
||||
function handleSave() {
|
||||
startTransition(async () => {
|
||||
const result = await linkPackages(kickstarterId, Array.from(selectedIds));
|
||||
if (result.success) {
|
||||
toast.success(`Linked ${selectedIds.size} package(s) to "${kickstarterName}"`);
|
||||
onOpenChange(false);
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogContent className="sm:max-w-lg">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Link Packages</DialogTitle>
|
||||
<DialogDescription>
|
||||
Search and select STL packages to link to “{kickstarterName}”.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
<div className="space-y-3">
|
||||
{selectedIds.size > 0 && (
|
||||
<div className="flex items-center gap-2 text-sm text-muted-foreground">
|
||||
<Package className="h-4 w-4" />
|
||||
{selectedIds.size} package(s) selected
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-6 px-2 text-xs"
|
||||
onClick={() => setSelectedIds(new Set())}
|
||||
>
|
||||
Clear all
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="relative">
|
||||
<Search className="absolute left-2.5 top-2.5 h-4 w-4 text-muted-foreground" />
|
||||
<Input
|
||||
placeholder="Search packages by name or creator..."
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
className="pl-9"
|
||||
autoFocus
|
||||
/>
|
||||
{isSearching && (
|
||||
<Loader2 className="absolute right-2.5 top-2.5 h-4 w-4 animate-spin text-muted-foreground" />
|
||||
)}
|
||||
</div>
|
||||
|
||||
<ScrollArea className="h-[300px] rounded-md border">
|
||||
<div className="p-2 space-y-1">
|
||||
{searchResults.length === 0 && searchQuery.length >= 2 && !isSearching && (
|
||||
<p className="text-sm text-muted-foreground text-center py-8">
|
||||
No packages found
|
||||
</p>
|
||||
)}
|
||||
{searchQuery.length < 2 && (
|
||||
<p className="text-sm text-muted-foreground text-center py-8">
|
||||
Type at least 2 characters to search
|
||||
</p>
|
||||
)}
|
||||
{searchResults.map((pkg) => (
|
||||
<label
|
||||
key={pkg.id}
|
||||
className="flex items-center gap-3 p-2 rounded-md hover:bg-muted/50 cursor-pointer"
|
||||
>
|
||||
<Checkbox
|
||||
checked={selectedIds.has(pkg.id)}
|
||||
onCheckedChange={() => togglePackage(pkg.id)}
|
||||
/>
|
||||
<div className="flex-1 min-w-0">
|
||||
<p className="text-sm font-medium truncate">{pkg.fileName}</p>
|
||||
<div className="flex items-center gap-2 text-xs text-muted-foreground">
|
||||
{pkg.creator && <span>{pkg.creator}</span>}
|
||||
<span>{formatSize(pkg.fileSize)}</span>
|
||||
<Badge variant="outline" className="text-[10px] h-4 px-1">
|
||||
{pkg.archiveType}
|
||||
</Badge>
|
||||
{pkg.fileCount > 0 && <span>{pkg.fileCount} files</span>}
|
||||
</div>
|
||||
</div>
|
||||
{selectedIds.has(pkg.id) && (
|
||||
<X className="h-3.5 w-3.5 text-muted-foreground shrink-0" />
|
||||
)}
|
||||
</label>
|
||||
))}
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<Button variant="outline" onClick={() => onOpenChange(false)}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button onClick={handleSave} disabled={isPending}>
|
||||
{isPending ? <Loader2 className="h-4 w-4 animate-spin mr-1" /> : null}
|
||||
Save ({selectedIds.size})
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -146,3 +146,83 @@ export async function linkPackages(
|
||||
return { success: false, error: "Failed to link packages" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function sendAllKickstarterPackages(
|
||||
kickstarterId: string
|
||||
): Promise<ActionResult<{ queued: number }>> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
const telegramLink = await prisma.telegramLink.findUnique({
|
||||
where: { userId: session.user.id },
|
||||
});
|
||||
|
||||
if (!telegramLink) {
|
||||
return { success: false, error: "No linked Telegram account. Link one in Settings." };
|
||||
}
|
||||
|
||||
const kickstarter = await prisma.kickstarter.findFirst({
|
||||
where: { id: kickstarterId, userId: session.user.id },
|
||||
select: {
|
||||
packages: {
|
||||
select: {
|
||||
package: {
|
||||
select: { id: true, destChannelId: true, destMessageId: true, fileName: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!kickstarter) {
|
||||
return { success: false, error: "Kickstarter not found" };
|
||||
}
|
||||
|
||||
const sendablePackages = kickstarter.packages
|
||||
.map((lnk) => lnk.package)
|
||||
.filter((p) => p.destChannelId && p.destMessageId);
|
||||
|
||||
if (sendablePackages.length === 0) {
|
||||
return { success: false, error: "No linked packages are available for sending" };
|
||||
}
|
||||
|
||||
let queued = 0;
|
||||
for (const pkg of sendablePackages) {
|
||||
const existing = await prisma.botSendRequest.findFirst({
|
||||
where: {
|
||||
packageId: pkg.id,
|
||||
telegramLinkId: telegramLink.id,
|
||||
status: { in: ["PENDING", "SENDING"] },
|
||||
},
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
const sendRequest = await prisma.botSendRequest.create({
|
||||
data: {
|
||||
packageId: pkg.id,
|
||||
telegramLinkId: telegramLink.id,
|
||||
requestedByUserId: session.user.id,
|
||||
status: "PENDING",
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
await prisma.$queryRawUnsafe(
|
||||
`SELECT pg_notify('bot_send', $1)`,
|
||||
sendRequest.id
|
||||
);
|
||||
} catch {
|
||||
// Best-effort
|
||||
}
|
||||
|
||||
queued++;
|
||||
}
|
||||
}
|
||||
|
||||
revalidatePath(REVALIDATE_PATH);
|
||||
return { success: true, data: { queued } };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to send packages" };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
"use client";
|
||||
|
||||
import { type ColumnDef } from "@tanstack/react-table";
|
||||
import { FileArchive, Eye } from "lucide-react";
|
||||
import { FileArchive, Eye, ChevronRight, Layers, Ungroup, Send, ImagePlus, GitMerge } from "lucide-react";
|
||||
import { DataTableColumnHeader } from "@/components/shared/data-table-column-header";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Checkbox } from "@/components/ui/checkbox";
|
||||
import { SendToTelegramButton } from "./send-to-telegram-button";
|
||||
|
||||
export interface PackageRow {
|
||||
@@ -23,15 +24,57 @@ export interface PackageRow {
|
||||
id: string;
|
||||
title: string;
|
||||
};
|
||||
matchedFileCount: number;
|
||||
matchedByContent: boolean;
|
||||
packageGroupId?: string | null;
|
||||
}
|
||||
|
||||
export interface GroupHeaderRow {
|
||||
_rowType: "group";
|
||||
id: string;
|
||||
name: string;
|
||||
hasPreview: boolean;
|
||||
totalFileSize: string;
|
||||
totalFileCount: number;
|
||||
packageCount: number;
|
||||
combinedTags: string[];
|
||||
archiveTypes: ("ZIP" | "RAR" | "SEVEN_Z" | "DOCUMENT")[];
|
||||
latestIndexedAt: string;
|
||||
sourceChannel: { id: string; title: string };
|
||||
_expanded: boolean;
|
||||
}
|
||||
|
||||
export interface PackageTableRow extends PackageRow {
|
||||
_rowType: "package";
|
||||
_groupId: string | null;
|
||||
_isGroupMember: boolean;
|
||||
}
|
||||
|
||||
export type StlTableRow = GroupHeaderRow | PackageTableRow;
|
||||
|
||||
function isGroupRow(row: StlTableRow): row is GroupHeaderRow {
|
||||
return row._rowType === "group";
|
||||
}
|
||||
|
||||
interface PackageColumnsProps {
|
||||
onViewFiles: (pkg: PackageRow) => void;
|
||||
onSetCreator: (pkg: PackageRow) => void;
|
||||
onSetTags: (pkg: PackageRow) => void;
|
||||
searchTerm: string;
|
||||
onToggleGroup: (groupId: string) => void;
|
||||
onRenameGroup: (groupId: string, currentName: string) => void;
|
||||
onDissolveGroup: (groupId: string) => void;
|
||||
onSendAllInGroup: (groupId: string) => void;
|
||||
onRemoveFromGroup: (packageId: string) => void;
|
||||
onGroupPreviewUpload: (groupId: string) => void;
|
||||
selectedPackages: Set<string>;
|
||||
onToggleSelect: (packageId: string) => void;
|
||||
mergeSourceId: string | null;
|
||||
onStartMerge: (groupId: string) => void;
|
||||
onCompleteMerge: (targetGroupId: string) => void;
|
||||
}
|
||||
|
||||
function formatBytes(bytesStr: string): string {
|
||||
export function formatBytes(bytesStr: string): string {
|
||||
const bytes = Number(bytesStr);
|
||||
if (bytes === 0) return "0 B";
|
||||
const k = 1024;
|
||||
@@ -58,96 +101,257 @@ function PreviewCell({ pkg }: { pkg: PackageRow }) {
|
||||
);
|
||||
}
|
||||
|
||||
function GroupPreviewCell({
|
||||
group,
|
||||
onUpload,
|
||||
}: {
|
||||
group: GroupHeaderRow;
|
||||
onUpload: (groupId: string) => void;
|
||||
}) {
|
||||
if (group.hasPreview) {
|
||||
return (
|
||||
<button
|
||||
className="relative group/preview cursor-pointer"
|
||||
onClick={() => onUpload(group.id)}
|
||||
title="Click to change preview image"
|
||||
>
|
||||
<img
|
||||
src={`/api/groups/${group.id}/preview`}
|
||||
alt=""
|
||||
className="h-9 w-9 rounded-md object-cover bg-muted"
|
||||
loading="lazy"
|
||||
/>
|
||||
<div className="absolute inset-0 flex items-center justify-center rounded-md bg-black/50 opacity-0 group-hover/preview:opacity-100 transition-opacity">
|
||||
<ImagePlus className="h-3.5 w-3.5 text-white" />
|
||||
</div>
|
||||
</button>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<button
|
||||
className="flex h-9 w-9 items-center justify-center rounded-md bg-muted hover:bg-muted/80 transition-colors cursor-pointer"
|
||||
onClick={() => onUpload(group.id)}
|
||||
title="Click to add preview image"
|
||||
>
|
||||
<Layers className="h-4 w-4 text-muted-foreground" />
|
||||
</button>
|
||||
);
|
||||
}
|
||||
|
||||
export function getPackageColumns({
|
||||
onViewFiles,
|
||||
onSetCreator,
|
||||
onSetTags,
|
||||
}: PackageColumnsProps): ColumnDef<PackageRow, unknown>[] {
|
||||
searchTerm,
|
||||
onToggleGroup,
|
||||
onRenameGroup,
|
||||
onDissolveGroup,
|
||||
onSendAllInGroup,
|
||||
onRemoveFromGroup,
|
||||
onGroupPreviewUpload,
|
||||
selectedPackages,
|
||||
onToggleSelect,
|
||||
mergeSourceId,
|
||||
onStartMerge,
|
||||
onCompleteMerge,
|
||||
}: PackageColumnsProps): ColumnDef<StlTableRow, unknown>[] {
|
||||
return [
|
||||
{
|
||||
id: "select",
|
||||
header: "",
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
if (isGroupRow(data)) return null;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={selectedPackages.has(data.id)}
|
||||
onCheckedChange={() => onToggleSelect(data.id)}
|
||||
aria-label="Select package"
|
||||
className="translate-y-[2px]"
|
||||
/>
|
||||
);
|
||||
},
|
||||
enableHiding: false,
|
||||
enableSorting: false,
|
||||
size: 32,
|
||||
},
|
||||
{
|
||||
id: "preview",
|
||||
header: "",
|
||||
cell: ({ row }) => <PreviewCell pkg={row.original} />,
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
if (isGroupRow(data)) {
|
||||
return (
|
||||
<div className="flex items-center gap-1">
|
||||
<button
|
||||
className="shrink-0 p-0.5 cursor-pointer"
|
||||
onClick={() => onToggleGroup(data.id)}
|
||||
aria-label={data._expanded ? "Collapse group" : "Expand group"}
|
||||
>
|
||||
<ChevronRight
|
||||
className={`h-4 w-4 text-muted-foreground transition-transform ${
|
||||
data._expanded ? "rotate-90" : ""
|
||||
}`}
|
||||
/>
|
||||
</button>
|
||||
<GroupPreviewCell group={data} onUpload={onGroupPreviewUpload} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<div className={data._isGroupMember ? "pl-5" : ""}>
|
||||
<PreviewCell pkg={data} />
|
||||
</div>
|
||||
);
|
||||
},
|
||||
enableHiding: false,
|
||||
enableSorting: false,
|
||||
size: 52,
|
||||
size: 72,
|
||||
},
|
||||
{
|
||||
accessorKey: "fileName",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="File Name" />,
|
||||
cell: ({ row }) => (
|
||||
<div className="flex items-center gap-2 min-w-0">
|
||||
<span className="font-medium truncate max-w-[300px]">{row.original.fileName}</span>
|
||||
{row.original.isMultipart && (
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
if (isGroupRow(data)) {
|
||||
return (
|
||||
<div className="min-w-0">
|
||||
<div className="flex items-center gap-2">
|
||||
<button
|
||||
className="font-semibold truncate max-w-[300px] cursor-pointer hover:underline text-left"
|
||||
onClick={() => onRenameGroup(data.id, data.name)}
|
||||
title="Click to rename group"
|
||||
>
|
||||
{data.name}
|
||||
</button>
|
||||
<Badge variant="secondary" className="text-[10px] shrink-0">
|
||||
{data.packageCount} pkg{data.packageCount !== 1 ? "s" : ""}
|
||||
</Badge>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<div className="min-w-0">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="font-medium truncate max-w-[300px]">{data.fileName}</span>
|
||||
{data.isMultipart && (
|
||||
<Badge variant="outline" className="text-[10px] shrink-0">
|
||||
Multi
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
{searchTerm && data.matchedByContent && (
|
||||
<button
|
||||
className="text-[11px] text-amber-500 hover:text-amber-400 hover:underline cursor-pointer mt-0.5"
|
||||
onClick={() => onViewFiles(data)}
|
||||
>
|
||||
{data.matchedFileCount.toLocaleString()} file match{data.matchedFileCount !== 1 ? "es" : ""}
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
},
|
||||
enableHiding: false,
|
||||
},
|
||||
{
|
||||
accessorKey: "archiveType",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Type" />,
|
||||
cell: ({ row }) => (
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
if (isGroupRow(data)) {
|
||||
const types = data.archiveTypes;
|
||||
if (types.length === 1) {
|
||||
return (
|
||||
<Badge variant="secondary" className="text-[10px]">
|
||||
{row.original.archiveType}
|
||||
{types[0]}
|
||||
</Badge>
|
||||
),
|
||||
);
|
||||
}
|
||||
return (
|
||||
<Badge variant="secondary" className="text-[10px]">
|
||||
Mixed
|
||||
</Badge>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<Badge variant="secondary" className="text-[10px]">
|
||||
{data.archiveType}
|
||||
</Badge>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: "fileSize",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Size" />,
|
||||
cell: ({ row }) => (
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
const size = isGroupRow(data) ? data.totalFileSize : data.fileSize;
|
||||
return (
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{formatBytes(row.original.fileSize)}
|
||||
{formatBytes(size)}
|
||||
</span>
|
||||
),
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: "fileCount",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Files" />,
|
||||
cell: ({ row }) => (
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
const count = isGroupRow(data) ? data.totalFileCount : data.fileCount;
|
||||
return (
|
||||
<span className="text-sm">
|
||||
{row.original.fileCount.toLocaleString()}
|
||||
{count.toLocaleString()}
|
||||
</span>
|
||||
),
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: "creator",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Creator" />,
|
||||
cell: ({ row }) => (
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
if (isGroupRow(data)) {
|
||||
return <span className="text-sm text-muted-foreground">{"\u2014"}</span>;
|
||||
}
|
||||
return (
|
||||
<button
|
||||
className="text-sm text-muted-foreground truncate max-w-[160px] block hover:text-foreground hover:underline cursor-pointer text-left"
|
||||
onClick={() => onSetCreator(row.original)}
|
||||
onClick={() => onSetCreator(data)}
|
||||
title="Click to edit creator"
|
||||
>
|
||||
{row.original.creator || "\u2014"}
|
||||
{data.creator || "\u2014"}
|
||||
</button>
|
||||
),
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "tags",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Tags" />,
|
||||
cell: ({ row }) => {
|
||||
const tags = row.original.tags;
|
||||
const data = row.original;
|
||||
const tags = isGroupRow(data) ? data.combinedTags : data.tags;
|
||||
if (tags.length === 0) {
|
||||
if (isGroupRow(data)) {
|
||||
return <span className="text-sm text-muted-foreground">{"\u2014"}</span>;
|
||||
}
|
||||
return (
|
||||
<button
|
||||
className="text-sm text-muted-foreground hover:text-foreground cursor-pointer"
|
||||
onClick={() => onSetTags(row.original)}
|
||||
onClick={() => onSetTags(data)}
|
||||
title="Click to add tags"
|
||||
>
|
||||
{"\u2014"}
|
||||
</button>
|
||||
);
|
||||
}
|
||||
const clickHandler = isGroupRow(data) ? undefined : () => onSetTags(data as PackageTableRow);
|
||||
return (
|
||||
<button
|
||||
className="flex flex-wrap gap-1 cursor-pointer"
|
||||
onClick={() => onSetTags(row.original)}
|
||||
title="Click to edit tags"
|
||||
className={`flex flex-wrap gap-1 ${clickHandler ? "cursor-pointer" : "cursor-default"}`}
|
||||
onClick={clickHandler}
|
||||
title={clickHandler ? "Click to edit tags" : undefined}
|
||||
>
|
||||
{tags.map((tag) => (
|
||||
<Badge
|
||||
@@ -161,7 +365,10 @@ export function getPackageColumns({
|
||||
</button>
|
||||
);
|
||||
},
|
||||
accessorFn: (row) => row.tags.join(", "),
|
||||
accessorFn: (row) => {
|
||||
if (isGroupRow(row)) return row.combinedTags.join(", ");
|
||||
return row.tags.join(", ");
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "channel",
|
||||
@@ -176,31 +383,95 @@ export function getPackageColumns({
|
||||
{
|
||||
accessorKey: "indexedAt",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Indexed" />,
|
||||
cell: ({ row }) => (
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
const date = isGroupRow(data) ? data.latestIndexedAt : data.indexedAt;
|
||||
return (
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{new Date(row.original.indexedAt).toLocaleDateString()}
|
||||
{new Date(date).toLocaleDateString()}
|
||||
</span>
|
||||
),
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "actions",
|
||||
cell: ({ row }) => (
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
if (isGroupRow(data)) {
|
||||
const isMergeSource = mergeSourceId === data.id;
|
||||
const canMergeHere = mergeSourceId !== null && mergeSourceId !== data.id;
|
||||
return (
|
||||
<div className="flex items-center gap-0.5">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="h-8 w-8"
|
||||
onClick={() => onSendAllInGroup(data.id)}
|
||||
title="Send all packages in group"
|
||||
>
|
||||
<Send className="h-4 w-4" />
|
||||
</Button>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className={`h-8 w-8 ${isMergeSource ? "text-amber-500 bg-amber-500/10 hover:bg-amber-500/20" : ""}`}
|
||||
onClick={() => onStartMerge(data.id)}
|
||||
title={isMergeSource ? "Cancel merge (this group is the merge source)" : "Start merge — mark this group as merge source"}
|
||||
>
|
||||
<GitMerge className="h-4 w-4" />
|
||||
</Button>
|
||||
{canMergeHere && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="h-8 w-8 text-primary bg-primary/10 hover:bg-primary/20"
|
||||
onClick={() => onCompleteMerge(data.id)}
|
||||
title="Merge source group into this group"
|
||||
>
|
||||
<Layers className="h-4 w-4" />
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="h-8 w-8"
|
||||
onClick={() => onDissolveGroup(data.id)}
|
||||
title="Dissolve group"
|
||||
>
|
||||
<Ungroup className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<div className="flex items-center gap-0.5">
|
||||
<SendToTelegramButton
|
||||
packageId={row.original.id}
|
||||
packageName={row.original.fileName}
|
||||
packageId={data.id}
|
||||
packageName={data.fileName}
|
||||
variant="icon"
|
||||
/>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="h-8 w-8"
|
||||
onClick={() => onViewFiles(row.original)}
|
||||
onClick={() => onViewFiles(data)}
|
||||
>
|
||||
<Eye className="h-4 w-4" />
|
||||
</Button>
|
||||
{data._isGroupMember && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="h-8 w-8"
|
||||
onClick={() => onRemoveFromGroup(data.id)}
|
||||
title="Remove from group"
|
||||
>
|
||||
<Ungroup className="h-3.5 w-3.5" />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
);
|
||||
},
|
||||
enableHiding: false,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -52,6 +52,7 @@ interface PackageFilesDrawerProps {
|
||||
pkg: PackageRow | null;
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
highlightTerm?: string;
|
||||
}
|
||||
|
||||
function formatBytes(bytesStr: string): string {
|
||||
@@ -81,6 +82,15 @@ function getExtBadgeClass(ext: string | null): string {
|
||||
return EXTENSION_COLORS[ext.toLowerCase()] ?? "bg-zinc-500/15 text-zinc-400 border-zinc-500/30";
|
||||
}
|
||||
|
||||
function fileMatchesHighlight(file: FileItem, term: string): boolean {
|
||||
if (!term) return false;
|
||||
const lower = term.toLowerCase();
|
||||
return (
|
||||
file.fileName.toLowerCase().includes(lower) ||
|
||||
file.path.toLowerCase().includes(lower)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a tree structure from flat file paths.
|
||||
*/
|
||||
@@ -120,11 +130,13 @@ function TreeNodeView({
|
||||
depth,
|
||||
search,
|
||||
defaultOpen,
|
||||
highlightTerm,
|
||||
}: {
|
||||
node: TreeNode;
|
||||
depth: number;
|
||||
search: string;
|
||||
defaultOpen: boolean;
|
||||
highlightTerm?: string;
|
||||
}) {
|
||||
const [open, setOpen] = useState(defaultOpen);
|
||||
|
||||
@@ -137,10 +149,22 @@ function TreeNodeView({
|
||||
});
|
||||
}, [node.children]);
|
||||
|
||||
// If searching, force all open
|
||||
const hasHighlightedDescendant = useMemo(() => {
|
||||
if (!highlightTerm) return false;
|
||||
function check(n: TreeNode): boolean {
|
||||
if (n.file && fileMatchesHighlight(n.file, highlightTerm!)) return true;
|
||||
for (const child of n.children.values()) {
|
||||
if (check(child)) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
return check(node);
|
||||
}, [node, highlightTerm]);
|
||||
|
||||
// If searching or has highlighted descendants, force all open
|
||||
useEffect(() => {
|
||||
if (search) setOpen(true);
|
||||
}, [search]);
|
||||
if (search || hasHighlightedDescendant) setOpen(true);
|
||||
}, [search, hasHighlightedDescendant]);
|
||||
|
||||
if (node.isFolder && node.children.size > 0) {
|
||||
return (
|
||||
@@ -177,6 +201,7 @@ function TreeNodeView({
|
||||
depth={depth + 1}
|
||||
search={search}
|
||||
defaultOpen={depth < 1} // Auto-expand first 2 levels
|
||||
highlightTerm={highlightTerm}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
@@ -185,9 +210,15 @@ function TreeNodeView({
|
||||
|
||||
// File node
|
||||
if (node.file) {
|
||||
const isHighlighted = highlightTerm ? fileMatchesHighlight(node.file, highlightTerm) : false;
|
||||
return (
|
||||
<div
|
||||
className="flex items-center gap-2 rounded-md px-1 py-1 hover:bg-muted/50 transition-colors"
|
||||
className={cn(
|
||||
"flex items-center gap-2 rounded-md px-1 py-1 transition-colors",
|
||||
isHighlighted
|
||||
? "bg-amber-500/15 hover:bg-amber-500/20"
|
||||
: "hover:bg-muted/50"
|
||||
)}
|
||||
style={{ paddingLeft: `${Math.max(0, depth) * 16 + 4}px` }}
|
||||
>
|
||||
<FileText className="h-3.5 w-3.5 shrink-0 text-muted-foreground" />
|
||||
@@ -223,7 +254,7 @@ function countFiles(node: TreeNode): number {
|
||||
|
||||
const PAGE_SIZE = 100;
|
||||
|
||||
export function PackageFilesDrawer({ pkg, open, onOpenChange }: PackageFilesDrawerProps) {
|
||||
export function PackageFilesDrawer({ pkg, open, onOpenChange, highlightTerm }: PackageFilesDrawerProps) {
|
||||
const [files, setFiles] = useState<FileItem[]>([]);
|
||||
const [total, setTotal] = useState(0);
|
||||
const [loading, setLoading] = useState(false);
|
||||
@@ -471,16 +502,24 @@ export function PackageFilesDrawer({ pkg, open, onOpenChange }: PackageFilesDraw
|
||||
depth={0}
|
||||
search={search}
|
||||
defaultOpen={true}
|
||||
highlightTerm={highlightTerm}
|
||||
/>
|
||||
))}
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
{/* Flat list for archives without folders */}
|
||||
{filtered.map((file) => (
|
||||
{filtered.map((file) => {
|
||||
const isHighlighted = highlightTerm ? fileMatchesHighlight(file, highlightTerm) : false;
|
||||
return (
|
||||
<div
|
||||
key={file.id}
|
||||
className="flex items-center gap-3 rounded-md px-2 py-1.5 hover:bg-muted/50 transition-colors"
|
||||
className={cn(
|
||||
"flex items-center gap-3 rounded-md px-2 py-1.5 transition-colors",
|
||||
isHighlighted
|
||||
? "bg-amber-500/15 hover:bg-amber-500/20"
|
||||
: "hover:bg-muted/50"
|
||||
)}
|
||||
>
|
||||
<FileText className="h-3.5 w-3.5 shrink-0 text-muted-foreground" />
|
||||
<div className="min-w-0 flex-1">
|
||||
@@ -500,7 +539,8 @@ export function PackageFilesDrawer({ pkg, open, onOpenChange }: PackageFilesDraw
|
||||
{formatBytes(file.uncompressedSize)}
|
||||
</span>
|
||||
</div>
|
||||
))}
|
||||
);
|
||||
})}
|
||||
</>
|
||||
)}
|
||||
|
||||
|
||||
135
src/app/(app)/stls/_components/skipped-columns.tsx
Normal file
135
src/app/(app)/stls/_components/skipped-columns.tsx
Normal file
@@ -0,0 +1,135 @@
|
||||
"use client";
|
||||
|
||||
import { type ColumnDef } from "@tanstack/react-table";
|
||||
import { DataTableColumnHeader } from "@/components/shared/data-table-column-header";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { RotateCw } from "lucide-react";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip";
|
||||
|
||||
export interface SkippedRow {
|
||||
id: string;
|
||||
fileName: string;
|
||||
fileSize: string;
|
||||
reason: "SIZE_LIMIT" | "DOWNLOAD_FAILED" | "EXTRACT_FAILED" | "UPLOAD_FAILED";
|
||||
errorMessage: string | null;
|
||||
sourceChannel: { id: string; title: string };
|
||||
isMultipart: boolean;
|
||||
partCount: number;
|
||||
createdAt: string;
|
||||
}
|
||||
|
||||
function formatBytes(bytesStr: string): string {
|
||||
const bytes = Number(bytesStr);
|
||||
if (bytes === 0) return "0 B";
|
||||
const k = 1024;
|
||||
const sizes = ["B", "KB", "MB", "GB", "TB"];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(1))} ${sizes[i]}`;
|
||||
}
|
||||
|
||||
const REASON_LABELS: Record<SkippedRow["reason"], { label: string; variant: "default" | "destructive" | "outline" | "secondary" }> = {
|
||||
SIZE_LIMIT: { label: "Size Limit", variant: "secondary" },
|
||||
DOWNLOAD_FAILED: { label: "Download Failed", variant: "destructive" },
|
||||
EXTRACT_FAILED: { label: "Extract Failed", variant: "destructive" },
|
||||
UPLOAD_FAILED: { label: "Upload Failed", variant: "destructive" },
|
||||
};
|
||||
|
||||
export function getSkippedColumns({
|
||||
onRetry,
|
||||
}: {
|
||||
onRetry: (row: SkippedRow) => void;
|
||||
}): ColumnDef<SkippedRow, unknown>[] {
|
||||
return [
|
||||
{
|
||||
accessorKey: "fileName",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="File Name" />,
|
||||
cell: ({ row }) => (
|
||||
<div className="flex items-center gap-2 min-w-0">
|
||||
<span className="font-medium truncate max-w-[300px]">{row.original.fileName}</span>
|
||||
{row.original.isMultipart && (
|
||||
<Badge variant="outline" className="text-[10px] shrink-0">
|
||||
{row.original.partCount} parts
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
enableHiding: false,
|
||||
},
|
||||
{
|
||||
accessorKey: "fileSize",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Size" />,
|
||||
cell: ({ row }) => (
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{formatBytes(row.original.fileSize)}
|
||||
</span>
|
||||
),
|
||||
},
|
||||
{
|
||||
accessorKey: "reason",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Reason" />,
|
||||
cell: ({ row }) => {
|
||||
const { label, variant } = REASON_LABELS[row.original.reason];
|
||||
return <Badge variant={variant} className="text-[10px]">{label}</Badge>;
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: "errorMessage",
|
||||
header: "Error",
|
||||
cell: ({ row }) => {
|
||||
const msg = row.original.errorMessage;
|
||||
if (!msg) return <span className="text-sm text-muted-foreground">{"\u2014"}</span>;
|
||||
return (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span className="text-sm text-muted-foreground truncate max-w-[200px] block cursor-help">
|
||||
{msg}
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent className="max-w-sm">
|
||||
<p className="text-xs break-all">{msg}</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "channel",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Source" />,
|
||||
cell: ({ row }) => (
|
||||
<span className="text-sm text-muted-foreground truncate max-w-[160px] block">
|
||||
{row.original.sourceChannel.title}
|
||||
</span>
|
||||
),
|
||||
accessorFn: (row) => row.sourceChannel.title,
|
||||
},
|
||||
{
|
||||
accessorKey: "createdAt",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Skipped" />,
|
||||
cell: ({ row }) => (
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{new Date(row.original.createdAt).toLocaleDateString()}
|
||||
</span>
|
||||
),
|
||||
},
|
||||
{
|
||||
id: "actions",
|
||||
cell: ({ row }) => (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="h-8 w-8"
|
||||
onClick={() => onRetry(row.original)}
|
||||
title="Retry this package"
|
||||
>
|
||||
<RotateCw className="h-4 w-4" />
|
||||
</Button>
|
||||
),
|
||||
enableHiding: false,
|
||||
},
|
||||
];
|
||||
}
|
||||
77
src/app/(app)/stls/_components/skipped-packages-tab.tsx
Normal file
77
src/app/(app)/stls/_components/skipped-packages-tab.tsx
Normal file
@@ -0,0 +1,77 @@
|
||||
"use client";
|
||||
|
||||
import { useTransition } from "react";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { toast } from "sonner";
|
||||
import { RotateCw } from "lucide-react";
|
||||
import { useDataTable } from "@/hooks/use-data-table";
|
||||
import { getSkippedColumns, type SkippedRow } from "./skipped-columns";
|
||||
import { DataTable } from "@/components/shared/data-table";
|
||||
import { DataTablePagination } from "@/components/shared/data-table-pagination";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { retrySkippedPackageAction, retryAllSkippedPackagesAction } from "../actions";
|
||||
|
||||
interface SkippedPackagesTabProps {
|
||||
data: SkippedRow[];
|
||||
pageCount: number;
|
||||
totalCount: number;
|
||||
}
|
||||
|
||||
export function SkippedPackagesTab({
|
||||
data,
|
||||
pageCount,
|
||||
totalCount,
|
||||
}: SkippedPackagesTabProps) {
|
||||
const router = useRouter();
|
||||
const [isPending, startTransition] = useTransition();
|
||||
|
||||
const columns = getSkippedColumns({
|
||||
onRetry: (row) => {
|
||||
startTransition(async () => {
|
||||
const result = await retrySkippedPackageAction(row.id);
|
||||
if (result.success) {
|
||||
toast.success(`"${row.fileName}" queued for retry`);
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const { table } = useDataTable({ data, columns, pageCount });
|
||||
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
{totalCount > 0 && (
|
||||
<div className="flex justify-end">
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
className="gap-1.5"
|
||||
disabled={isPending}
|
||||
onClick={() => {
|
||||
startTransition(async () => {
|
||||
const result = await retryAllSkippedPackagesAction();
|
||||
if (result.success) {
|
||||
toast.success(`All ${totalCount} skipped packages queued for retry`);
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
}}
|
||||
>
|
||||
<RotateCw className="h-3.5 w-3.5" />
|
||||
Retry All ({totalCount})
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
<DataTable
|
||||
table={table}
|
||||
emptyMessage="No skipped or failed packages."
|
||||
/>
|
||||
<DataTablePagination table={table} totalCount={totalCount} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,18 +1,27 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useCallback, useTransition } from "react";
|
||||
import { useState, useCallback, useTransition, useMemo, useRef } from "react";
|
||||
import { useRouter, usePathname, useSearchParams } from "next/navigation";
|
||||
import { toast } from "sonner";
|
||||
import { Search } from "lucide-react";
|
||||
import { Search, Layers, Upload } from "lucide-react";
|
||||
import { UploadDialog } from "./upload-dialog";
|
||||
import { useDataTable } from "@/hooks/use-data-table";
|
||||
import { getPackageColumns, type PackageRow } from "./package-columns";
|
||||
import {
|
||||
getPackageColumns,
|
||||
type PackageRow,
|
||||
type StlTableRow,
|
||||
type PackageTableRow,
|
||||
type GroupHeaderRow,
|
||||
} from "./package-columns";
|
||||
import { PackageFilesDrawer } from "./package-files-drawer";
|
||||
import { IngestionStatus } from "./ingestion-status";
|
||||
import { SkippedPackagesTab } from "./skipped-packages-tab";
|
||||
import { DataTable } from "@/components/shared/data-table";
|
||||
import { DataTablePagination } from "@/components/shared/data-table-pagination";
|
||||
import { DataTableViewOptions } from "@/components/shared/data-table-view-options";
|
||||
import { PageHeader } from "@/components/shared/page-header";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
@@ -20,15 +29,43 @@ import {
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
import type { IngestionAccountStatus } from "@/lib/telegram/types";
|
||||
import { updatePackageCreator, updatePackageTags } from "../actions";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import type { DisplayItem, IngestionAccountStatus, PackageListItem } from "@/lib/telegram/types";
|
||||
import type { SkippedRow } from "./skipped-columns";
|
||||
import {
|
||||
updatePackageCreator,
|
||||
updatePackageTags,
|
||||
renameGroupAction,
|
||||
dissolveGroupAction,
|
||||
createGroupAction,
|
||||
removeFromGroupAction,
|
||||
sendAllInGroupAction,
|
||||
updateGroupPreviewAction,
|
||||
mergeGroupsAction,
|
||||
} from "../actions";
|
||||
|
||||
interface StlTableProps {
|
||||
data: PackageRow[];
|
||||
data: DisplayItem[];
|
||||
pageCount: number;
|
||||
totalCount: number;
|
||||
ingestionStatus: IngestionAccountStatus[];
|
||||
availableTags: string[];
|
||||
searchTerm: string;
|
||||
skippedData: SkippedRow[];
|
||||
skippedPageCount: number;
|
||||
skippedTotalCount: number;
|
||||
ungroupedData: PackageListItem[];
|
||||
ungroupedPageCount: number;
|
||||
ungroupedTotalCount: number;
|
||||
}
|
||||
|
||||
export function StlTable({
|
||||
@@ -37,6 +74,13 @@ export function StlTable({
|
||||
totalCount,
|
||||
ingestionStatus,
|
||||
availableTags,
|
||||
searchTerm,
|
||||
skippedData,
|
||||
skippedPageCount,
|
||||
skippedTotalCount,
|
||||
ungroupedData,
|
||||
ungroupedPageCount,
|
||||
ungroupedTotalCount,
|
||||
}: StlTableProps) {
|
||||
const router = useRouter();
|
||||
const pathname = usePathname();
|
||||
@@ -46,6 +90,94 @@ export function StlTable({
|
||||
const [viewPkg, setViewPkg] = useState<PackageRow | null>(null);
|
||||
const [, startTransition] = useTransition();
|
||||
|
||||
// Group expansion state
|
||||
const [expandedGroups, setExpandedGroups] = useState<Set<string>>(new Set());
|
||||
|
||||
// Package selection state (for manual grouping)
|
||||
const [selectedPackages, setSelectedPackages] = useState<Set<string>>(new Set());
|
||||
|
||||
// Create group dialog state
|
||||
const [createGroupOpen, setCreateGroupOpen] = useState(false);
|
||||
const [groupName, setGroupName] = useState("");
|
||||
|
||||
// Group preview upload ref
|
||||
const previewInputRef = useRef<HTMLInputElement>(null);
|
||||
const [uploadGroupId, setUploadGroupId] = useState<string | null>(null);
|
||||
|
||||
// Group merge state
|
||||
const [mergeSourceId, setMergeSourceId] = useState<string | null>(null);
|
||||
|
||||
// Upload dialog state
|
||||
const [uploadOpen, setUploadOpen] = useState(false);
|
||||
|
||||
const toggleGroup = useCallback((groupId: string) => {
|
||||
setExpandedGroups((prev) => {
|
||||
const next = new Set(prev);
|
||||
if (next.has(groupId)) {
|
||||
next.delete(groupId);
|
||||
} else {
|
||||
next.add(groupId);
|
||||
}
|
||||
return next;
|
||||
});
|
||||
}, []);
|
||||
|
||||
const toggleSelect = useCallback((packageId: string) => {
|
||||
setSelectedPackages((prev) => {
|
||||
const next = new Set(prev);
|
||||
if (next.has(packageId)) {
|
||||
next.delete(packageId);
|
||||
} else {
|
||||
next.add(packageId);
|
||||
}
|
||||
return next;
|
||||
});
|
||||
}, []);
|
||||
|
||||
// Flatten DisplayItem[] into StlTableRow[] based on expansion state
|
||||
const tableRows: StlTableRow[] = useMemo(() => {
|
||||
const rows: StlTableRow[] = [];
|
||||
for (const item of data) {
|
||||
if (item.type === "package") {
|
||||
rows.push({
|
||||
...item.data,
|
||||
_rowType: "package" as const,
|
||||
_groupId: null,
|
||||
_isGroupMember: false,
|
||||
});
|
||||
} else {
|
||||
const group = item.data;
|
||||
const isExpanded = expandedGroups.has(group.id);
|
||||
rows.push({
|
||||
_rowType: "group" as const,
|
||||
id: group.id,
|
||||
name: group.name,
|
||||
hasPreview: group.hasPreview,
|
||||
totalFileSize: group.totalFileSize,
|
||||
totalFileCount: group.totalFileCount,
|
||||
packageCount: group.packageCount,
|
||||
combinedTags: group.combinedTags,
|
||||
archiveTypes: group.archiveTypes,
|
||||
latestIndexedAt: group.latestIndexedAt,
|
||||
sourceChannel: group.sourceChannel,
|
||||
_expanded: isExpanded,
|
||||
});
|
||||
if (isExpanded) {
|
||||
for (const pkg of group.packages) {
|
||||
rows.push({
|
||||
...pkg,
|
||||
_rowType: "package" as const,
|
||||
_groupId: group.id,
|
||||
_isGroupMember: true,
|
||||
packageGroupId: group.id,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return rows;
|
||||
}, [data, expandedGroups]);
|
||||
|
||||
const updateSearch = useCallback(
|
||||
(value: string) => {
|
||||
setSearchValue(value);
|
||||
@@ -75,8 +207,179 @@ export function StlTable({
|
||||
[router, pathname, searchParams]
|
||||
);
|
||||
|
||||
const activeTab = searchParams.get("tab") ?? "packages";
|
||||
|
||||
const updateTab = useCallback(
|
||||
(value: string) => {
|
||||
const params = new URLSearchParams(searchParams.toString());
|
||||
if (value === "packages") {
|
||||
params.delete("tab");
|
||||
} else {
|
||||
params.set("tab", value);
|
||||
}
|
||||
params.set("page", "1");
|
||||
router.push(`${pathname}?${params.toString()}`, { scroll: false });
|
||||
},
|
||||
[router, pathname, searchParams]
|
||||
);
|
||||
|
||||
const handleRenameGroup = useCallback(
|
||||
(groupId: string, currentName: string) => {
|
||||
const value = prompt("Enter group name:", currentName);
|
||||
if (value === null || value.trim() === currentName) return;
|
||||
startTransition(async () => {
|
||||
const result = await renameGroupAction(groupId, value);
|
||||
if (result.success) {
|
||||
toast.success(`Group renamed to "${value.trim()}"`);
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
},
|
||||
[router]
|
||||
);
|
||||
|
||||
const handleDissolveGroup = useCallback(
|
||||
(groupId: string) => {
|
||||
if (!confirm("Dissolve this group? Packages will become standalone items.")) return;
|
||||
startTransition(async () => {
|
||||
const result = await dissolveGroupAction(groupId);
|
||||
if (result.success) {
|
||||
toast.success("Group dissolved");
|
||||
setExpandedGroups((prev) => {
|
||||
const next = new Set(prev);
|
||||
next.delete(groupId);
|
||||
return next;
|
||||
});
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
},
|
||||
[router]
|
||||
);
|
||||
|
||||
const handleSendAllInGroup = useCallback(
|
||||
(groupId: string) => {
|
||||
if (!confirm("Send all packages in this group to your Telegram?")) return;
|
||||
startTransition(async () => {
|
||||
const result = await sendAllInGroupAction(groupId);
|
||||
if (result.success) {
|
||||
toast.success("Group packages queued for sending");
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
},
|
||||
[router]
|
||||
);
|
||||
|
||||
const handleRemoveFromGroup = useCallback(
|
||||
(packageId: string) => {
|
||||
startTransition(async () => {
|
||||
const result = await removeFromGroupAction(packageId);
|
||||
if (result.success) {
|
||||
toast.success("Package removed from group");
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
},
|
||||
[router]
|
||||
);
|
||||
|
||||
const handleCreateGroup = useCallback(() => {
|
||||
if (selectedPackages.size < 2) return;
|
||||
setGroupName("");
|
||||
setCreateGroupOpen(true);
|
||||
}, [selectedPackages.size]);
|
||||
|
||||
const submitCreateGroup = useCallback(() => {
|
||||
if (!groupName.trim() || selectedPackages.size < 2) return;
|
||||
const ids = Array.from(selectedPackages);
|
||||
startTransition(async () => {
|
||||
const result = await createGroupAction(groupName, ids);
|
||||
if (result.success) {
|
||||
toast.success(`Group "${groupName.trim()}" created`);
|
||||
setSelectedPackages(new Set());
|
||||
setCreateGroupOpen(false);
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
}, [groupName, selectedPackages, router]);
|
||||
|
||||
// Group preview upload handler (Task 12)
|
||||
const handleGroupPreviewUpload = useCallback((groupId: string) => {
|
||||
setUploadGroupId(groupId);
|
||||
// Trigger file input after state update
|
||||
setTimeout(() => {
|
||||
previewInputRef.current?.click();
|
||||
}, 0);
|
||||
}, []);
|
||||
|
||||
const handlePreviewFileChange = useCallback(
|
||||
(e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const file = e.target.files?.[0];
|
||||
if (!file || !uploadGroupId) return;
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append("file", file);
|
||||
|
||||
startTransition(async () => {
|
||||
const result = await updateGroupPreviewAction(uploadGroupId, formData);
|
||||
if (result.success) {
|
||||
toast.success("Group preview updated");
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
setUploadGroupId(null);
|
||||
});
|
||||
|
||||
// Reset input so the same file can be selected again
|
||||
e.target.value = "";
|
||||
},
|
||||
[uploadGroupId, router]
|
||||
);
|
||||
|
||||
const handleStartMerge = useCallback((groupId: string) => {
|
||||
setMergeSourceId((prev) => {
|
||||
if (prev === groupId) {
|
||||
toast.info("Merge cancelled");
|
||||
return null;
|
||||
}
|
||||
toast.info("Merge source selected — click the merge-here button on the target group");
|
||||
return groupId;
|
||||
});
|
||||
}, []);
|
||||
|
||||
const handleMergeGroups = useCallback(
|
||||
(targetGroupId: string) => {
|
||||
if (!mergeSourceId) return;
|
||||
const sourceId = mergeSourceId;
|
||||
startTransition(async () => {
|
||||
const result = await mergeGroupsAction(targetGroupId, sourceId);
|
||||
if (result.success) {
|
||||
toast.success("Groups merged successfully");
|
||||
setMergeSourceId(null);
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
},
|
||||
[mergeSourceId, router]
|
||||
);
|
||||
|
||||
const columns = getPackageColumns({
|
||||
onViewFiles: (pkg) => setViewPkg(pkg),
|
||||
searchTerm,
|
||||
onSetCreator: (pkg) => {
|
||||
const value = prompt("Enter creator name:", pkg.creator ?? "");
|
||||
if (value === null) return;
|
||||
@@ -107,9 +410,37 @@ export function StlTable({
|
||||
}
|
||||
});
|
||||
},
|
||||
onToggleGroup: toggleGroup,
|
||||
onRenameGroup: handleRenameGroup,
|
||||
onDissolveGroup: handleDissolveGroup,
|
||||
onSendAllInGroup: handleSendAllInGroup,
|
||||
onRemoveFromGroup: handleRemoveFromGroup,
|
||||
onGroupPreviewUpload: handleGroupPreviewUpload,
|
||||
selectedPackages,
|
||||
onToggleSelect: toggleSelect,
|
||||
mergeSourceId,
|
||||
onStartMerge: handleStartMerge,
|
||||
onCompleteMerge: handleMergeGroups,
|
||||
});
|
||||
|
||||
const { table } = useDataTable({ data, columns, pageCount });
|
||||
const { table } = useDataTable({ data: tableRows, columns, pageCount });
|
||||
|
||||
const ungroupedRows: StlTableRow[] = useMemo(
|
||||
() =>
|
||||
ungroupedData.map((pkg) => ({
|
||||
...pkg,
|
||||
_rowType: "package" as const,
|
||||
_groupId: null,
|
||||
_isGroupMember: false,
|
||||
})),
|
||||
[ungroupedData]
|
||||
);
|
||||
|
||||
const { table: ungroupedTable } = useDataTable({
|
||||
data: ungroupedRows,
|
||||
columns,
|
||||
pageCount: ungroupedPageCount,
|
||||
});
|
||||
|
||||
const activeTag = searchParams.get("tag") ?? "";
|
||||
|
||||
@@ -122,6 +453,28 @@ export function StlTable({
|
||||
<IngestionStatus initialStatus={ingestionStatus} />
|
||||
</PageHeader>
|
||||
|
||||
<Tabs value={activeTab} onValueChange={updateTab}>
|
||||
<TabsList>
|
||||
<TabsTrigger value="packages">Packages</TabsTrigger>
|
||||
<TabsTrigger value="skipped" className="gap-1.5">
|
||||
Skipped / Failed
|
||||
{skippedTotalCount > 0 && (
|
||||
<Badge variant="secondary" className="text-[10px] ml-1">
|
||||
{skippedTotalCount}
|
||||
</Badge>
|
||||
)}
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="ungrouped" className="gap-1.5">
|
||||
Ungrouped
|
||||
{ungroupedTotalCount > 0 && (
|
||||
<Badge variant="secondary" className="h-5 px-1.5 text-[10px]">
|
||||
{ungroupedTotalCount}
|
||||
</Badge>
|
||||
)}
|
||||
</TabsTrigger>
|
||||
</TabsList>
|
||||
|
||||
<TabsContent value="packages" className="space-y-4">
|
||||
<div className="flex flex-wrap items-center gap-2">
|
||||
<div className="relative flex-1 min-w-[200px] max-w-sm">
|
||||
<Search className="absolute left-2.5 top-2.5 h-4 w-4 text-muted-foreground" />
|
||||
@@ -148,13 +501,58 @@ export function StlTable({
|
||||
</Select>
|
||||
)}
|
||||
<DataTableViewOptions table={table} />
|
||||
<Button variant="outline" size="sm" className="h-9" onClick={() => setUploadOpen(true)}>
|
||||
<Upload className="mr-2 h-4 w-4" />
|
||||
Upload Files
|
||||
</Button>
|
||||
{selectedPackages.size >= 2 && (
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
className="h-9 gap-1.5"
|
||||
onClick={handleCreateGroup}
|
||||
>
|
||||
<Layers className="h-3.5 w-3.5" />
|
||||
Group {selectedPackages.size} Selected
|
||||
</Button>
|
||||
)}
|
||||
{selectedPackages.size > 0 && selectedPackages.size < 2 && (
|
||||
<span className="text-xs text-muted-foreground">
|
||||
Select at least 2 packages to group
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<DataTable
|
||||
table={table}
|
||||
emptyMessage="No packages found. Archives will appear here after ingestion."
|
||||
rowClassName={(row) => {
|
||||
const data = row.original as StlTableRow;
|
||||
if (data._rowType === "group") {
|
||||
return "bg-muted/30 border-border";
|
||||
}
|
||||
if (data._rowType === "package" && (data as PackageTableRow)._isGroupMember) {
|
||||
return "bg-muted/10";
|
||||
}
|
||||
return "";
|
||||
}}
|
||||
/>
|
||||
<DataTablePagination table={table} totalCount={totalCount} />
|
||||
</TabsContent>
|
||||
|
||||
<TabsContent value="skipped">
|
||||
<SkippedPackagesTab
|
||||
data={skippedData}
|
||||
pageCount={skippedPageCount}
|
||||
totalCount={skippedTotalCount}
|
||||
/>
|
||||
</TabsContent>
|
||||
|
||||
<TabsContent value="ungrouped" className="space-y-4">
|
||||
<DataTable table={ungroupedTable} emptyMessage="All packages are grouped!" />
|
||||
<DataTablePagination table={ungroupedTable} totalCount={ungroupedTotalCount} />
|
||||
</TabsContent>
|
||||
</Tabs>
|
||||
|
||||
<PackageFilesDrawer
|
||||
pkg={viewPkg}
|
||||
@@ -162,6 +560,50 @@ export function StlTable({
|
||||
onOpenChange={(open) => {
|
||||
if (!open) setViewPkg(null);
|
||||
}}
|
||||
highlightTerm={searchTerm}
|
||||
/>
|
||||
|
||||
{/* Create Group Dialog */}
|
||||
<Dialog open={createGroupOpen} onOpenChange={setCreateGroupOpen}>
|
||||
<DialogContent className="sm:max-w-md">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Create Package Group</DialogTitle>
|
||||
<DialogDescription>
|
||||
Group {selectedPackages.size} selected packages together. Enter a name for the group.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<div className="py-4">
|
||||
<Input
|
||||
placeholder="Group name..."
|
||||
value={groupName}
|
||||
onChange={(e) => setGroupName(e.target.value)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === "Enter") submitCreateGroup();
|
||||
}}
|
||||
autoFocus
|
||||
/>
|
||||
</div>
|
||||
<DialogFooter>
|
||||
<Button variant="outline" onClick={() => setCreateGroupOpen(false)}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button onClick={submitCreateGroup} disabled={!groupName.trim()}>
|
||||
<Layers className="h-4 w-4 mr-1" />
|
||||
Create Group
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
|
||||
<UploadDialog open={uploadOpen} onOpenChange={setUploadOpen} />
|
||||
|
||||
{/* Hidden file input for group preview upload (Task 12) */}
|
||||
<input
|
||||
ref={previewInputRef}
|
||||
type="file"
|
||||
accept="image/jpeg,image/png,image/webp"
|
||||
className="hidden"
|
||||
onChange={handlePreviewFileChange}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
243
src/app/(app)/stls/_components/upload-dialog.tsx
Normal file
243
src/app/(app)/stls/_components/upload-dialog.tsx
Normal file
@@ -0,0 +1,243 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useRef, useTransition, useEffect } from "react";
|
||||
import { Upload, File, X, Loader2, CheckCircle2, AlertCircle } from "lucide-react";
|
||||
import { toast } from "sonner";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
|
||||
interface UploadDialogProps {
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
}
|
||||
|
||||
function formatSize(bytes: number): string {
|
||||
if (bytes >= 1024 * 1024 * 1024) return `${(bytes / (1024 * 1024 * 1024)).toFixed(1)} GB`;
|
||||
if (bytes >= 1024 * 1024) return `${(bytes / (1024 * 1024)).toFixed(0)} MB`;
|
||||
return `${(bytes / 1024).toFixed(0)} KB`;
|
||||
}
|
||||
|
||||
type UploadStatus = "idle" | "uploading" | "processing" | "done" | "error";
|
||||
|
||||
export function UploadDialog({ open, onOpenChange }: UploadDialogProps) {
|
||||
const [files, setFiles] = useState<File[]>([]);
|
||||
const [groupName, setGroupName] = useState("");
|
||||
const [status, setStatus] = useState<UploadStatus>("idle");
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [isPending, startTransition] = useTransition();
|
||||
const fileInputRef = useRef<HTMLInputElement>(null);
|
||||
const pollRef = useRef<ReturnType<typeof setInterval> | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
setFiles([]);
|
||||
setGroupName("");
|
||||
setStatus("idle");
|
||||
setError(null);
|
||||
}
|
||||
return () => {
|
||||
if (pollRef.current) clearInterval(pollRef.current);
|
||||
};
|
||||
}, [open]);
|
||||
|
||||
function handleFileChange(e: React.ChangeEvent<HTMLInputElement>) {
|
||||
if (e.target.files) {
|
||||
setFiles(Array.from(e.target.files));
|
||||
}
|
||||
}
|
||||
|
||||
function removeFile(index: number) {
|
||||
setFiles((prev) => prev.filter((_, i) => i !== index));
|
||||
}
|
||||
|
||||
function handleUpload() {
|
||||
if (files.length === 0) return;
|
||||
|
||||
startTransition(async () => {
|
||||
setStatus("uploading");
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const formData = new FormData();
|
||||
for (const file of files) {
|
||||
formData.append("files", file);
|
||||
}
|
||||
if (groupName.trim()) {
|
||||
formData.append("groupName", groupName.trim());
|
||||
}
|
||||
|
||||
const res = await fetch("/api/uploads", {
|
||||
method: "POST",
|
||||
body: formData,
|
||||
});
|
||||
|
||||
const data = await res.json();
|
||||
if (!res.ok) {
|
||||
setStatus("error");
|
||||
setError(data.error ?? "Upload failed");
|
||||
return;
|
||||
}
|
||||
|
||||
setStatus("processing");
|
||||
|
||||
// Poll for completion
|
||||
pollRef.current = setInterval(async () => {
|
||||
try {
|
||||
const statusRes = await fetch(`/api/uploads/${data.uploadId}`);
|
||||
const statusData = await statusRes.json();
|
||||
|
||||
if (statusData.status === "COMPLETED") {
|
||||
setStatus("done");
|
||||
toast.success(`${files.length} file(s) uploaded and indexed`);
|
||||
if (pollRef.current) clearInterval(pollRef.current);
|
||||
} else if (statusData.status === "FAILED") {
|
||||
setStatus("error");
|
||||
setError(statusData.errorMessage ?? "Processing failed");
|
||||
if (pollRef.current) clearInterval(pollRef.current);
|
||||
}
|
||||
} catch {
|
||||
// Keep polling
|
||||
}
|
||||
}, 3000);
|
||||
|
||||
// Stop polling after 10 minutes
|
||||
setTimeout(() => {
|
||||
if (pollRef.current) {
|
||||
clearInterval(pollRef.current);
|
||||
pollRef.current = null;
|
||||
setStatus((s) => s === "processing" ? "done" : s);
|
||||
}
|
||||
}, 600_000);
|
||||
} catch {
|
||||
setStatus("error");
|
||||
setError("Network error");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogContent className="sm:max-w-lg">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Upload Files</DialogTitle>
|
||||
<DialogDescription>
|
||||
Upload archive files to be processed and indexed. Multiple files will be automatically grouped.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
{status === "idle" && (
|
||||
<div className="space-y-4">
|
||||
<div
|
||||
className="border-2 border-dashed rounded-lg p-8 text-center cursor-pointer hover:border-primary/50 transition-colors"
|
||||
onClick={() => fileInputRef.current?.click()}
|
||||
>
|
||||
<Upload className="h-8 w-8 mx-auto mb-2 text-muted-foreground" />
|
||||
<p className="text-sm text-muted-foreground">
|
||||
Click to select files or drag & drop
|
||||
</p>
|
||||
<p className="text-xs text-muted-foreground mt-1">
|
||||
ZIP, RAR, 7Z files up to 4GB each
|
||||
</p>
|
||||
<input
|
||||
ref={fileInputRef}
|
||||
type="file"
|
||||
multiple
|
||||
accept=".zip,.rar,.7z,.pdf,.stl"
|
||||
onChange={handleFileChange}
|
||||
className="hidden"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{files.length > 0 && (
|
||||
<div className="space-y-2">
|
||||
{files.map((file, i) => (
|
||||
<div key={i} className="flex items-center gap-2 p-2 rounded bg-muted/30">
|
||||
<File className="h-4 w-4 shrink-0 text-muted-foreground" />
|
||||
<span className="text-sm flex-1 truncate">{file.name}</span>
|
||||
<span className="text-xs text-muted-foreground">{formatSize(file.size)}</span>
|
||||
<button onClick={() => removeFile(i)} className="p-0.5 hover:text-destructive">
|
||||
<X className="h-3.5 w-3.5" />
|
||||
</button>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{files.length > 1 && (
|
||||
<div>
|
||||
<Label htmlFor="groupName" className="text-sm">Group Name (optional)</Label>
|
||||
<Input
|
||||
id="groupName"
|
||||
value={groupName}
|
||||
onChange={(e) => setGroupName(e.target.value)}
|
||||
placeholder="Auto-generated from filenames"
|
||||
className="mt-1"
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{(status === "uploading" || status === "processing") && (
|
||||
<div className="flex items-center gap-3 p-6 rounded-lg bg-muted/30 border">
|
||||
<Loader2 className="h-6 w-6 animate-spin text-primary" />
|
||||
<div>
|
||||
<p className="text-sm font-medium">
|
||||
{status === "uploading" ? "Uploading files..." : "Processing & uploading to Telegram..."}
|
||||
</p>
|
||||
<p className="text-xs text-muted-foreground mt-0.5">
|
||||
{status === "uploading"
|
||||
? "Sending files to server"
|
||||
: "Hashing, extracting metadata, uploading to destination channel"}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{status === "done" && (
|
||||
<div className="flex items-center gap-3 p-6 rounded-lg bg-green-500/10 border border-green-500/20">
|
||||
<CheckCircle2 className="h-6 w-6 text-green-500" />
|
||||
<div>
|
||||
<p className="text-sm font-medium text-green-500">Upload complete!</p>
|
||||
<p className="text-xs text-muted-foreground">Files have been indexed and uploaded to Telegram.</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{status === "error" && (
|
||||
<div className="flex items-center gap-3 p-6 rounded-lg bg-destructive/10 border border-destructive/20">
|
||||
<AlertCircle className="h-6 w-6 text-destructive" />
|
||||
<div>
|
||||
<p className="text-sm font-medium text-destructive">Upload failed</p>
|
||||
<p className="text-xs text-muted-foreground">{error}</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<DialogFooter>
|
||||
{status === "idle" && (
|
||||
<>
|
||||
<Button variant="outline" onClick={() => onOpenChange(false)}>Cancel</Button>
|
||||
<Button onClick={handleUpload} disabled={files.length === 0 || isPending}>
|
||||
<Upload className="h-4 w-4 mr-1" />
|
||||
Upload {files.length > 0 ? `(${files.length})` : ""}
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
{(status === "done" || status === "error") && (
|
||||
<Button variant="outline" onClick={() => onOpenChange(false)}>Close</Button>
|
||||
)}
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -4,6 +4,14 @@ import { auth } from "@/lib/auth";
|
||||
import { prisma } from "@/lib/prisma";
|
||||
import type { ActionResult } from "@/types/api.types";
|
||||
import { revalidatePath } from "next/cache";
|
||||
import {
|
||||
updatePackageGroupName,
|
||||
updatePackageGroupPreview,
|
||||
createManualGroup,
|
||||
removePackageFromGroup,
|
||||
dissolveGroup,
|
||||
mergeGroups,
|
||||
} from "@/lib/telegram/queries";
|
||||
|
||||
const ALLOWED_IMAGE_TYPES = [
|
||||
"image/jpeg",
|
||||
@@ -177,3 +185,407 @@ export async function setPreviewFromExtract(
|
||||
return { success: false, error: "Failed to set preview from archive image" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function repairPackageAction(
|
||||
packageId: string
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
const pkg = await prisma.package.findUnique({
|
||||
where: { id: packageId },
|
||||
select: {
|
||||
id: true,
|
||||
fileName: true,
|
||||
sourceChannelId: true,
|
||||
sourceMessageId: true,
|
||||
destChannelId: true,
|
||||
destMessageId: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!pkg) return { success: false, error: "Package not found" };
|
||||
|
||||
// Clear the destination info so the worker re-processes it
|
||||
await prisma.package.update({
|
||||
where: { id: packageId },
|
||||
data: {
|
||||
destMessageId: null,
|
||||
destMessageIds: [],
|
||||
destChannelId: null,
|
||||
},
|
||||
});
|
||||
|
||||
// Reset the channel watermark to before this message so worker picks it up
|
||||
await prisma.accountChannelMap.updateMany({
|
||||
where: {
|
||||
channelId: pkg.sourceChannelId,
|
||||
lastProcessedMessageId: { gte: pkg.sourceMessageId },
|
||||
},
|
||||
data: { lastProcessedMessageId: pkg.sourceMessageId - BigInt(1) },
|
||||
});
|
||||
|
||||
// Mark related notifications as read
|
||||
await prisma.systemNotification.updateMany({
|
||||
where: {
|
||||
context: { path: ["packageId"], equals: packageId },
|
||||
isRead: false,
|
||||
},
|
||||
data: { isRead: true },
|
||||
});
|
||||
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to schedule repair" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function retrySkippedPackageAction(
|
||||
id: string
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
const skipped = await prisma.skippedPackage.findUnique({
|
||||
where: { id },
|
||||
});
|
||||
if (!skipped) return { success: false, error: "Skipped package not found" };
|
||||
|
||||
// Find the AccountChannelMap and reset watermark if needed
|
||||
const mapping = await prisma.accountChannelMap.findUnique({
|
||||
where: {
|
||||
accountId_channelId: {
|
||||
accountId: skipped.accountId,
|
||||
channelId: skipped.sourceChannelId,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (mapping) {
|
||||
const targetId = skipped.sourceMessageId - BigInt(1);
|
||||
|
||||
// Only reset if the watermark is past this message
|
||||
if (mapping.lastProcessedMessageId && mapping.lastProcessedMessageId >= skipped.sourceMessageId) {
|
||||
await prisma.accountChannelMap.update({
|
||||
where: { id: mapping.id },
|
||||
data: { lastProcessedMessageId: targetId },
|
||||
});
|
||||
}
|
||||
|
||||
// Also reset TopicProgress if this was a forum topic message
|
||||
if (skipped.sourceTopicId) {
|
||||
const topicProgress = await prisma.topicProgress.findFirst({
|
||||
where: {
|
||||
accountChannelMapId: mapping.id,
|
||||
topicId: skipped.sourceTopicId,
|
||||
},
|
||||
});
|
||||
if (topicProgress && topicProgress.lastProcessedMessageId && topicProgress.lastProcessedMessageId >= skipped.sourceMessageId) {
|
||||
await prisma.topicProgress.update({
|
||||
where: { id: topicProgress.id },
|
||||
data: { lastProcessedMessageId: targetId },
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the skip record
|
||||
await prisma.skippedPackage.delete({ where: { id } });
|
||||
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to retry skipped package" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function retryAllSkippedPackagesAction(
|
||||
reason?: "SIZE_LIMIT" | "DOWNLOAD_FAILED" | "EXTRACT_FAILED" | "UPLOAD_FAILED"
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
const where: Record<string, unknown> = {};
|
||||
if (reason) where.reason = reason;
|
||||
|
||||
const skippedItems = await prisma.skippedPackage.findMany({ where });
|
||||
|
||||
if (skippedItems.length === 0) {
|
||||
return { success: true, data: undefined };
|
||||
}
|
||||
|
||||
// Group by (accountId, channelId) to find minimum messageId per channel
|
||||
const channelResets = new Map<string, { mappingKey: { accountId: string; channelId: string }; minMessageId: bigint; topicResets: Map<bigint, bigint> }>();
|
||||
|
||||
for (const item of skippedItems) {
|
||||
const key = `${item.accountId}:${item.sourceChannelId}`;
|
||||
const existing = channelResets.get(key);
|
||||
const targetId = item.sourceMessageId - BigInt(1);
|
||||
|
||||
if (!existing) {
|
||||
const topicResets = new Map<bigint, bigint>();
|
||||
if (item.sourceTopicId) {
|
||||
topicResets.set(item.sourceTopicId, targetId);
|
||||
}
|
||||
channelResets.set(key, {
|
||||
mappingKey: { accountId: item.accountId, channelId: item.sourceChannelId },
|
||||
minMessageId: targetId,
|
||||
topicResets,
|
||||
});
|
||||
} else {
|
||||
if (targetId < existing.minMessageId) {
|
||||
existing.minMessageId = targetId;
|
||||
}
|
||||
if (item.sourceTopicId) {
|
||||
const existingTopic = existing.topicResets.get(item.sourceTopicId);
|
||||
if (!existingTopic || targetId < existingTopic) {
|
||||
existing.topicResets.set(item.sourceTopicId, targetId);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reset watermarks
|
||||
for (const reset of channelResets.values()) {
|
||||
const mapping = await prisma.accountChannelMap.findUnique({
|
||||
where: { accountId_channelId: reset.mappingKey },
|
||||
});
|
||||
if (!mapping) continue;
|
||||
|
||||
if (mapping.lastProcessedMessageId && mapping.lastProcessedMessageId > reset.minMessageId) {
|
||||
await prisma.accountChannelMap.update({
|
||||
where: { id: mapping.id },
|
||||
data: { lastProcessedMessageId: reset.minMessageId },
|
||||
});
|
||||
}
|
||||
|
||||
// Reset topic progress
|
||||
for (const [topicId, targetId] of reset.topicResets) {
|
||||
const topicProgress = await prisma.topicProgress.findFirst({
|
||||
where: { accountChannelMapId: mapping.id, topicId },
|
||||
});
|
||||
if (topicProgress && topicProgress.lastProcessedMessageId && topicProgress.lastProcessedMessageId > targetId) {
|
||||
await prisma.topicProgress.update({
|
||||
where: { id: topicProgress.id },
|
||||
data: { lastProcessedMessageId: targetId },
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete all matching skip records
|
||||
await prisma.skippedPackage.deleteMany({ where });
|
||||
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to retry skipped packages" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function renameGroupAction(
|
||||
groupId: string,
|
||||
name: string
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
if (!name.trim()) {
|
||||
return { success: false, error: "Group name cannot be empty" };
|
||||
}
|
||||
|
||||
try {
|
||||
await updatePackageGroupName(groupId, name);
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to rename group" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function dissolveGroupAction(
|
||||
groupId: string
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
await dissolveGroup(groupId);
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to dissolve group" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function createGroupAction(
|
||||
name: string,
|
||||
packageIds: string[]
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
if (!name.trim()) {
|
||||
return { success: false, error: "Group name cannot be empty" };
|
||||
}
|
||||
if (packageIds.length < 2) {
|
||||
return { success: false, error: "At least 2 packages are required to create a group" };
|
||||
}
|
||||
|
||||
try {
|
||||
await createManualGroup(name, packageIds);
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : "Failed to create group";
|
||||
return { success: false, error: message };
|
||||
}
|
||||
}
|
||||
|
||||
export async function removeFromGroupAction(
|
||||
packageId: string
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
await removePackageFromGroup(packageId);
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to remove package from group" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function updateGroupPreviewAction(
|
||||
groupId: string,
|
||||
formData: FormData
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
const file = formData.get("file");
|
||||
if (!(file instanceof File)) {
|
||||
return { success: false, error: "No file provided" };
|
||||
}
|
||||
|
||||
if (!ALLOWED_IMAGE_TYPES.includes(file.type as (typeof ALLOWED_IMAGE_TYPES)[number])) {
|
||||
return { success: false, error: "Only JPG, PNG, and WebP images are accepted" };
|
||||
}
|
||||
|
||||
if (file.size > MAX_IMAGE_SIZE) {
|
||||
return { success: false, error: "Image must be smaller than 2 MB" };
|
||||
}
|
||||
|
||||
try {
|
||||
const arrayBuffer = await file.arrayBuffer();
|
||||
const buffer = Buffer.from(arrayBuffer);
|
||||
await updatePackageGroupPreview(groupId, buffer);
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to upload group preview image" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function mergeGroupsAction(
|
||||
targetGroupId: string,
|
||||
sourceGroupId: string
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
if (targetGroupId === sourceGroupId) {
|
||||
return { success: false, error: "Cannot merge a group with itself" };
|
||||
}
|
||||
|
||||
try {
|
||||
await mergeGroups(targetGroupId, sourceGroupId);
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to merge groups" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function sendAllInGroupAction(
|
||||
groupId: string
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
const telegramLink = await prisma.telegramLink.findUnique({
|
||||
where: { userId: session.user.id },
|
||||
});
|
||||
|
||||
if (!telegramLink) {
|
||||
return { success: false, error: "No linked Telegram account. Link one in Settings." };
|
||||
}
|
||||
|
||||
const group = await prisma.packageGroup.findUnique({
|
||||
where: { id: groupId },
|
||||
select: {
|
||||
packages: {
|
||||
select: { id: true, destChannelId: true, destMessageId: true, fileName: true },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!group) {
|
||||
return { success: false, error: "Group not found" };
|
||||
}
|
||||
|
||||
const sendablePackages = group.packages.filter(
|
||||
(p) => p.destChannelId && p.destMessageId
|
||||
);
|
||||
|
||||
if (sendablePackages.length === 0) {
|
||||
return { success: false, error: "No packages in this group have been uploaded to a destination channel" };
|
||||
}
|
||||
|
||||
let queued = 0;
|
||||
for (const pkg of sendablePackages) {
|
||||
// Only create if no existing PENDING/SENDING request for this package+link combo
|
||||
const existing = await prisma.botSendRequest.findFirst({
|
||||
where: {
|
||||
packageId: pkg.id,
|
||||
telegramLinkId: telegramLink.id,
|
||||
status: { in: ["PENDING", "SENDING"] },
|
||||
},
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
const sendRequest = await prisma.botSendRequest.create({
|
||||
data: {
|
||||
packageId: pkg.id,
|
||||
telegramLinkId: telegramLink.id,
|
||||
requestedByUserId: session.user.id,
|
||||
status: "PENDING",
|
||||
},
|
||||
});
|
||||
|
||||
// Notify the bot via pg_notify
|
||||
try {
|
||||
await prisma.$queryRawUnsafe(
|
||||
`SELECT pg_notify('bot_send', $1)`,
|
||||
sendRequest.id
|
||||
);
|
||||
} catch {
|
||||
// Best-effort — the bot also polls periodically
|
||||
}
|
||||
|
||||
queued++;
|
||||
}
|
||||
}
|
||||
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to send group packages" };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { auth } from "@/lib/auth";
|
||||
import { redirect } from "next/navigation";
|
||||
import { listPackages, searchPackages, getIngestionStatus, getAllPackageTags } from "@/lib/telegram/queries";
|
||||
import { listDisplayItems, searchPackages, getIngestionStatus, getAllPackageTags, listSkippedPackages, countSkippedPackages, listUngroupedPackages, countUngroupedPackages } from "@/lib/telegram/queries";
|
||||
import { StlTable } from "./_components/stl-table";
|
||||
import type { DisplayItem, PackageListItem } from "@/lib/telegram/types";
|
||||
|
||||
interface Props {
|
||||
searchParams: Promise<Record<string, string | string[] | undefined>>;
|
||||
@@ -20,9 +21,10 @@ export default async function StlFilesPage({ searchParams }: Props) {
|
||||
const search = (params.search as string) ?? "";
|
||||
const creator = (params.creator as string) || undefined;
|
||||
const tag = (params.tag as string) || undefined;
|
||||
const tab = (params.tab as string) ?? "packages";
|
||||
|
||||
// Fetch packages, ingestion status, and available tags in parallel
|
||||
const [result, ingestionStatus, availableTags] = await Promise.all([
|
||||
// Fetch packages, ingestion status, tags, and skipped count in parallel
|
||||
const [result, ingestionStatus, availableTags, skippedCount, ungroupedCount] = await Promise.all([
|
||||
search
|
||||
? searchPackages({
|
||||
query: search,
|
||||
@@ -30,7 +32,7 @@ export default async function StlFilesPage({ searchParams }: Props) {
|
||||
limit: perPage,
|
||||
searchIn: "both",
|
||||
})
|
||||
: listPackages({
|
||||
: listDisplayItems({
|
||||
page,
|
||||
limit: perPage,
|
||||
creator,
|
||||
@@ -40,15 +42,39 @@ export default async function StlFilesPage({ searchParams }: Props) {
|
||||
}),
|
||||
getIngestionStatus(),
|
||||
getAllPackageTags(),
|
||||
countSkippedPackages(),
|
||||
countUngroupedPackages(),
|
||||
]);
|
||||
|
||||
// For search results, wrap as DisplayItem[]; for non-search, already DisplayItem[]
|
||||
const displayItems: DisplayItem[] = search
|
||||
? (result as { items: PackageListItem[] }).items.map((item) => ({ type: "package" as const, data: item }))
|
||||
: (result as { items: DisplayItem[] }).items;
|
||||
|
||||
// Fetch skipped packages only if on that tab
|
||||
const skippedResult = tab === "skipped"
|
||||
? await listSkippedPackages({ page, limit: perPage })
|
||||
: null;
|
||||
|
||||
// Fetch ungrouped packages only if on that tab
|
||||
const ungroupedResult = tab === "ungrouped"
|
||||
? await listUngroupedPackages({ page, limit: perPage })
|
||||
: null;
|
||||
|
||||
return (
|
||||
<StlTable
|
||||
data={result.items}
|
||||
data={displayItems}
|
||||
pageCount={result.pagination.totalPages}
|
||||
totalCount={result.pagination.total}
|
||||
ingestionStatus={ingestionStatus}
|
||||
availableTags={availableTags}
|
||||
searchTerm={search}
|
||||
skippedData={skippedResult?.items ?? []}
|
||||
skippedPageCount={skippedResult?.pagination.totalPages ?? 0}
|
||||
skippedTotalCount={skippedCount}
|
||||
ungroupedData={ungroupedResult?.items ?? []}
|
||||
ungroupedPageCount={ungroupedResult?.pagination.totalPages ?? 0}
|
||||
ungroupedTotalCount={ungroupedCount}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -291,10 +291,25 @@ export async function setChannelCategory(
|
||||
if (!admin.success) return admin;
|
||||
|
||||
try {
|
||||
const existing = await prisma.telegramChannel.findUnique({
|
||||
where: { id },
|
||||
select: { category: true },
|
||||
});
|
||||
if (!existing) return { success: false, error: "Channel not found" };
|
||||
|
||||
const oldCategory = existing.category;
|
||||
const newCategory = category?.trim() || null;
|
||||
|
||||
await prisma.telegramChannel.update({
|
||||
where: { id },
|
||||
data: { category: category?.trim() || null },
|
||||
data: { category: newCategory },
|
||||
});
|
||||
|
||||
// Retroactively re-tag packages from this channel when category changes
|
||||
if (oldCategory !== newCategory && newCategory) {
|
||||
await retagChannelPackages(id, oldCategory, newCategory);
|
||||
}
|
||||
|
||||
revalidatePath("/telegram");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
@@ -302,6 +317,50 @@ export async function setChannelCategory(
|
||||
}
|
||||
}
|
||||
|
||||
export async function retagChannelPackages(
|
||||
channelId: string,
|
||||
oldCategory: string | null,
|
||||
newCategory: string
|
||||
): Promise<ActionResult<{ updated: number }>> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
// Find packages from this channel that have the old category tag (or no category tag)
|
||||
const packages = await prisma.package.findMany({
|
||||
where: { sourceChannelId: channelId },
|
||||
select: { id: true, tags: true },
|
||||
});
|
||||
|
||||
let updated = 0;
|
||||
for (const pkg of packages) {
|
||||
const tags = [...pkg.tags];
|
||||
// Remove old category tag if present
|
||||
if (oldCategory) {
|
||||
const idx = tags.indexOf(oldCategory);
|
||||
if (idx !== -1) tags.splice(idx, 1);
|
||||
}
|
||||
// Add new category tag if not already present
|
||||
if (!tags.includes(newCategory)) {
|
||||
tags.push(newCategory);
|
||||
}
|
||||
// Only update if tags actually changed
|
||||
if (JSON.stringify(tags) !== JSON.stringify(pkg.tags)) {
|
||||
await prisma.package.update({
|
||||
where: { id: pkg.id },
|
||||
data: { tags },
|
||||
});
|
||||
updated++;
|
||||
}
|
||||
}
|
||||
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: { updated } };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to re-tag packages" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function setChannelType(
|
||||
id: string,
|
||||
type: "SOURCE" | "DESTINATION"
|
||||
|
||||
36
src/app/api/groups/[id]/preview/route.ts
Normal file
36
src/app/api/groups/[id]/preview/route.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { prisma } from "@/lib/prisma";
|
||||
import { authenticateApiRequest } from "@/lib/telegram/api-auth";
|
||||
|
||||
export async function GET(
|
||||
request: Request,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
const authResult = await authenticateApiRequest(request);
|
||||
if ("error" in authResult) return authResult.error;
|
||||
|
||||
const { id } = await params;
|
||||
|
||||
const group = await prisma.packageGroup.findUnique({
|
||||
where: { id },
|
||||
select: { previewData: true },
|
||||
});
|
||||
|
||||
if (!group || !group.previewData) {
|
||||
return new NextResponse(null, { status: 404 });
|
||||
}
|
||||
|
||||
const buffer =
|
||||
group.previewData instanceof Buffer
|
||||
? group.previewData
|
||||
: Buffer.from(group.previewData);
|
||||
|
||||
return new NextResponse(buffer, {
|
||||
status: 200,
|
||||
headers: {
|
||||
"Content-Type": "image/jpeg",
|
||||
"Content-Length": String(buffer.length),
|
||||
"Cache-Control": "public, max-age=3600, immutable",
|
||||
},
|
||||
});
|
||||
}
|
||||
33
src/app/api/notifications/read/route.ts
Normal file
33
src/app/api/notifications/read/route.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { auth } from "@/lib/auth";
|
||||
import {
|
||||
markNotificationRead,
|
||||
markAllNotificationsRead,
|
||||
dismissNotification,
|
||||
clearAllNotifications,
|
||||
} from "@/data/notification.queries";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
export async function POST(request: Request) {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
||||
}
|
||||
|
||||
const body = await request.json().catch(() => ({}));
|
||||
const id = body.id as string | undefined;
|
||||
const action = (body.action as string) ?? "read";
|
||||
|
||||
if (action === "dismiss" && id) {
|
||||
await dismissNotification(id);
|
||||
} else if (action === "clear") {
|
||||
await clearAllNotifications();
|
||||
} else if (id) {
|
||||
await markNotificationRead(id);
|
||||
} else {
|
||||
await markAllNotificationsRead();
|
||||
}
|
||||
|
||||
return NextResponse.json({ success: true });
|
||||
}
|
||||
43
src/app/api/notifications/repair/route.ts
Normal file
43
src/app/api/notifications/repair/route.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { auth } from "@/lib/auth";
|
||||
import { prisma } from "@/lib/prisma";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
export async function POST(request: Request) {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
||||
}
|
||||
|
||||
const body = await request.json().catch(() => ({}));
|
||||
const notificationId = body.notificationId as string;
|
||||
if (!notificationId) {
|
||||
return NextResponse.json({ error: "notificationId required" }, { status: 400 });
|
||||
}
|
||||
|
||||
const notification = await prisma.systemNotification.findUnique({
|
||||
where: { id: notificationId },
|
||||
});
|
||||
|
||||
if (!notification) {
|
||||
return NextResponse.json({ error: "Notification not found" }, { status: 404 });
|
||||
}
|
||||
|
||||
const context = notification.context as Record<string, unknown> | null;
|
||||
const packageId = context?.packageId as string | undefined;
|
||||
|
||||
if (!packageId) {
|
||||
return NextResponse.json({ error: "Notification has no associated package" }, { status: 400 });
|
||||
}
|
||||
|
||||
// Import and call the repair action
|
||||
const { repairPackageAction } = await import("@/app/(app)/stls/actions");
|
||||
const result = await repairPackageAction(packageId);
|
||||
|
||||
if (!result.success) {
|
||||
return NextResponse.json({ error: result.error }, { status: 500 });
|
||||
}
|
||||
|
||||
return NextResponse.json({ success: true });
|
||||
}
|
||||
27
src/app/api/notifications/route.ts
Normal file
27
src/app/api/notifications/route.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { auth } from "@/lib/auth";
|
||||
import {
|
||||
getRecentNotifications,
|
||||
getUnreadNotificationCount,
|
||||
} from "@/data/notification.queries";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
export async function GET() {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
||||
}
|
||||
|
||||
const [notifications, unreadCount] = await Promise.all([
|
||||
getRecentNotifications(30),
|
||||
getUnreadNotificationCount(),
|
||||
]);
|
||||
|
||||
const serialized = notifications.map((n) => ({
|
||||
...n,
|
||||
createdAt: n.createdAt.toISOString(),
|
||||
}));
|
||||
|
||||
return NextResponse.json({ notifications: serialized, unreadCount });
|
||||
}
|
||||
21
src/app/api/packages/linked/route.ts
Normal file
21
src/app/api/packages/linked/route.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { auth } from "@/lib/auth";
|
||||
import { getLinkedPackageIds } from "@/data/kickstarter.queries";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
export async function GET(request: Request) {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url);
|
||||
const kickstarterId = searchParams.get("kickstarterId");
|
||||
if (!kickstarterId) {
|
||||
return NextResponse.json({ error: "kickstarterId required" }, { status: 400 });
|
||||
}
|
||||
|
||||
const packageIds = await getLinkedPackageIds(kickstarterId);
|
||||
return NextResponse.json({ packageIds });
|
||||
}
|
||||
26
src/app/api/packages/search/route.ts
Normal file
26
src/app/api/packages/search/route.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { auth } from "@/lib/auth";
|
||||
import { searchPackagesForLinking } from "@/data/kickstarter.queries";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
export async function GET(request: Request) {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url);
|
||||
const query = searchParams.get("q") ?? "";
|
||||
const limit = Math.min(Number(searchParams.get("limit") ?? "20"), 50);
|
||||
|
||||
const packages = await searchPackagesForLinking(query, limit);
|
||||
|
||||
// Serialize BigInt for JSON
|
||||
const serialized = packages.map((p) => ({
|
||||
...p,
|
||||
fileSize: p.fileSize.toString(),
|
||||
}));
|
||||
|
||||
return NextResponse.json({ packages: serialized });
|
||||
}
|
||||
43
src/app/api/uploads/[id]/route.ts
Normal file
43
src/app/api/uploads/[id]/route.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { auth } from "@/lib/auth";
|
||||
import { prisma } from "@/lib/prisma";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
export async function GET(
|
||||
_request: Request,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
||||
}
|
||||
|
||||
const { id } = await params;
|
||||
|
||||
const upload = await prisma.manualUpload.findUnique({
|
||||
where: { id },
|
||||
include: {
|
||||
files: {
|
||||
select: { id: true, fileName: true, fileSize: true, packageId: true },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!upload || upload.userId !== session.user.id) {
|
||||
return NextResponse.json({ error: "Not found" }, { status: 404 });
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
id: upload.id,
|
||||
status: upload.status,
|
||||
groupName: upload.groupName,
|
||||
errorMessage: upload.errorMessage,
|
||||
files: upload.files.map((f) => ({
|
||||
...f,
|
||||
fileSize: f.fileSize.toString(),
|
||||
})),
|
||||
createdAt: upload.createdAt.toISOString(),
|
||||
completedAt: upload.completedAt?.toISOString() ?? null,
|
||||
});
|
||||
}
|
||||
83
src/app/api/uploads/route.ts
Normal file
83
src/app/api/uploads/route.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { auth } from "@/lib/auth";
|
||||
import { prisma } from "@/lib/prisma";
|
||||
import { writeFile, mkdir } from "fs/promises";
|
||||
import path from "path";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
const UPLOAD_DIR = process.env.UPLOAD_DIR ?? "/data/uploads";
|
||||
const MAX_FILE_SIZE = 4 * 1024 * 1024 * 1024; // 4GB per file
|
||||
|
||||
export async function POST(request: Request) {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
||||
}
|
||||
|
||||
try {
|
||||
const formData = await request.formData();
|
||||
const files = formData.getAll("files") as File[];
|
||||
const groupName = formData.get("groupName") as string | null;
|
||||
|
||||
if (!files.length) {
|
||||
return NextResponse.json({ error: "No files provided" }, { status: 400 });
|
||||
}
|
||||
|
||||
// Create the upload record
|
||||
const upload = await prisma.manualUpload.create({
|
||||
data: {
|
||||
userId: session.user.id,
|
||||
groupName: groupName || (files.length > 1 ? files[0].name.replace(/\.[^.]+$/, "") : null),
|
||||
status: "PENDING",
|
||||
},
|
||||
});
|
||||
|
||||
// Save files to shared volume
|
||||
const uploadDir = path.join(UPLOAD_DIR, upload.id);
|
||||
await mkdir(uploadDir, { recursive: true });
|
||||
|
||||
for (const file of files) {
|
||||
if (file.size > MAX_FILE_SIZE) {
|
||||
return NextResponse.json(
|
||||
{ error: `File "${file.name}" exceeds 4GB limit` },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
const filePath = path.join(uploadDir, file.name);
|
||||
const buffer = Buffer.from(await file.arrayBuffer());
|
||||
await writeFile(filePath, buffer);
|
||||
|
||||
await prisma.manualUploadFile.create({
|
||||
data: {
|
||||
uploadId: upload.id,
|
||||
fileName: file.name,
|
||||
filePath,
|
||||
fileSize: BigInt(file.size),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Notify worker
|
||||
try {
|
||||
await prisma.$queryRawUnsafe(
|
||||
`SELECT pg_notify('manual_upload', $1)`,
|
||||
upload.id
|
||||
);
|
||||
} catch {
|
||||
// Best-effort
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
uploadId: upload.id,
|
||||
fileCount: files.length,
|
||||
status: "PENDING",
|
||||
});
|
||||
} catch (err) {
|
||||
return NextResponse.json(
|
||||
{ error: err instanceof Error ? err.message : "Upload failed" },
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import { Button } from "@/components/ui/button";
|
||||
import { Sheet, SheetContent, SheetTrigger } from "@/components/ui/sheet";
|
||||
import { UserMenu } from "./user-menu";
|
||||
import { MobileSidebar } from "./mobile-sidebar";
|
||||
import { NotificationBell } from "./notification-bell";
|
||||
|
||||
const routeTitles: Record<string, string> = {
|
||||
"/dashboard": "Dashboard",
|
||||
@@ -38,7 +39,8 @@ export function Header() {
|
||||
|
||||
<h1 className="text-lg font-semibold">{title}</h1>
|
||||
|
||||
<div className="ml-auto">
|
||||
<div className="ml-auto flex items-center gap-1">
|
||||
<NotificationBell />
|
||||
<UserMenu />
|
||||
</div>
|
||||
</header>
|
||||
|
||||
268
src/components/layout/notification-bell.tsx
Normal file
268
src/components/layout/notification-bell.tsx
Normal file
@@ -0,0 +1,268 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useEffect, useCallback } from "react";
|
||||
import { Bell, AlertTriangle, AlertCircle, Info, CheckCircle2, X, Trash2 } from "lucide-react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import {
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverTrigger,
|
||||
} from "@/components/ui/popover";
|
||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||
import { toast } from "sonner";
|
||||
|
||||
interface Notification {
|
||||
id: string;
|
||||
type: string;
|
||||
severity: "INFO" | "WARNING" | "ERROR";
|
||||
title: string;
|
||||
message: string;
|
||||
isRead: boolean;
|
||||
createdAt: string;
|
||||
}
|
||||
|
||||
const severityIcon = {
|
||||
INFO: Info,
|
||||
WARNING: AlertTriangle,
|
||||
ERROR: AlertCircle,
|
||||
};
|
||||
|
||||
const severityColor = {
|
||||
INFO: "text-blue-400",
|
||||
WARNING: "text-orange-400",
|
||||
ERROR: "text-red-400",
|
||||
};
|
||||
|
||||
export function NotificationBell() {
|
||||
const [notifications, setNotifications] = useState<Notification[]>([]);
|
||||
const [unreadCount, setUnreadCount] = useState(0);
|
||||
const [open, setOpen] = useState(false);
|
||||
|
||||
const fetchNotifications = useCallback(async () => {
|
||||
try {
|
||||
const res = await fetch("/api/notifications");
|
||||
if (res.ok) {
|
||||
const data = await res.json();
|
||||
setNotifications(data.notifications ?? []);
|
||||
setUnreadCount(data.unreadCount ?? 0);
|
||||
}
|
||||
} catch {
|
||||
// Ignore fetch errors
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Poll every 30 seconds + on mount
|
||||
useEffect(() => {
|
||||
fetchNotifications();
|
||||
const interval = setInterval(fetchNotifications, 30_000);
|
||||
return () => clearInterval(interval);
|
||||
}, [fetchNotifications]);
|
||||
|
||||
// Refresh when popover opens
|
||||
useEffect(() => {
|
||||
if (open) fetchNotifications();
|
||||
}, [open, fetchNotifications]);
|
||||
|
||||
async function handleMarkAllRead() {
|
||||
try {
|
||||
await fetch("/api/notifications/read", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({}),
|
||||
});
|
||||
setNotifications((prev) => prev.map((n) => ({ ...n, isRead: true })));
|
||||
setUnreadCount(0);
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
|
||||
async function handleMarkRead(id: string) {
|
||||
try {
|
||||
await fetch("/api/notifications/read", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ id }),
|
||||
});
|
||||
setNotifications((prev) =>
|
||||
prev.map((n) => (n.id === id ? { ...n, isRead: true } : n))
|
||||
);
|
||||
setUnreadCount((c) => Math.max(0, c - 1));
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
|
||||
async function handleDismiss(id: string) {
|
||||
try {
|
||||
await fetch("/api/notifications/read", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ id, action: "dismiss" }),
|
||||
});
|
||||
setNotifications((prev) => prev.filter((n) => n.id !== id));
|
||||
setUnreadCount((c) => Math.max(0, c - 1));
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
|
||||
async function handleClearAll() {
|
||||
try {
|
||||
await fetch("/api/notifications/read", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ action: "clear" }),
|
||||
});
|
||||
setNotifications([]);
|
||||
setUnreadCount(0);
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
|
||||
async function handleRepair(notificationId: string) {
|
||||
try {
|
||||
const res = await fetch("/api/notifications/repair", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ notificationId }),
|
||||
});
|
||||
if (res.ok) {
|
||||
toast.success("Repair scheduled — package will be re-processed on next cycle");
|
||||
fetchNotifications();
|
||||
}
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
|
||||
function formatTime(iso: string): string {
|
||||
const d = new Date(iso);
|
||||
const now = new Date();
|
||||
const diffMs = now.getTime() - d.getTime();
|
||||
const diffMin = Math.floor(diffMs / 60_000);
|
||||
if (diffMin < 1) return "just now";
|
||||
if (diffMin < 60) return `${diffMin}m ago`;
|
||||
const diffHr = Math.floor(diffMin / 60);
|
||||
if (diffHr < 24) return `${diffHr}h ago`;
|
||||
const diffDay = Math.floor(diffHr / 24);
|
||||
return `${diffDay}d ago`;
|
||||
}
|
||||
|
||||
return (
|
||||
<Popover open={open} onOpenChange={setOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant="ghost" size="icon" className="relative h-9 w-9">
|
||||
<Bell className="h-4 w-4" />
|
||||
{unreadCount > 0 && (
|
||||
<Badge
|
||||
variant="destructive"
|
||||
className="absolute -top-1 -right-1 h-4 min-w-4 px-1 text-[10px] leading-none"
|
||||
>
|
||||
{unreadCount > 99 ? "99+" : unreadCount}
|
||||
</Badge>
|
||||
)}
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className="w-96 p-0" align="end">
|
||||
<div className="flex items-center justify-between border-b px-4 py-3">
|
||||
<h3 className="text-sm font-semibold">Notifications</h3>
|
||||
<div className="flex items-center gap-1">
|
||||
{unreadCount > 0 && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-7 text-xs"
|
||||
onClick={handleMarkAllRead}
|
||||
>
|
||||
Mark all read
|
||||
</Button>
|
||||
)}
|
||||
{notifications.length > 0 && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-7 text-xs text-muted-foreground"
|
||||
onClick={handleClearAll}
|
||||
>
|
||||
<Trash2 className="h-3 w-3 mr-1" />
|
||||
Clear
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<ScrollArea className="max-h-[400px]">
|
||||
{notifications.length === 0 ? (
|
||||
<div className="flex flex-col items-center justify-center py-8 text-muted-foreground">
|
||||
<CheckCircle2 className="h-8 w-8 mb-2 opacity-50" />
|
||||
<p className="text-sm">All clear!</p>
|
||||
</div>
|
||||
) : (
|
||||
<div className="divide-y">
|
||||
{notifications.map((n) => {
|
||||
const Icon = severityIcon[n.severity] ?? Info;
|
||||
const color = severityColor[n.severity] ?? "text-muted-foreground";
|
||||
return (
|
||||
<div
|
||||
key={n.id}
|
||||
className={`flex w-full gap-3 px-4 py-3 text-left hover:bg-muted/50 transition-colors ${
|
||||
!n.isRead ? "bg-muted/20" : ""
|
||||
}`}
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
onClick={() => !n.isRead && handleMarkRead(n.id)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === "Enter" || e.key === " ") {
|
||||
if (!n.isRead) handleMarkRead(n.id);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<Icon className={`h-4 w-4 mt-0.5 shrink-0 ${color}`} />
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="flex items-center gap-2">
|
||||
<p className={`text-sm truncate ${!n.isRead ? "font-medium" : ""}`}>
|
||||
{n.title}
|
||||
</p>
|
||||
{!n.isRead && (
|
||||
<span className="h-2 w-2 rounded-full bg-primary shrink-0" />
|
||||
)}
|
||||
<button
|
||||
className="ml-auto shrink-0 p-0.5 rounded hover:bg-muted text-muted-foreground hover:text-foreground"
|
||||
onClick={(e) => { e.stopPropagation(); handleDismiss(n.id); }}
|
||||
title="Dismiss"
|
||||
>
|
||||
<X className="h-3 w-3" />
|
||||
</button>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground line-clamp-2 mt-0.5">
|
||||
{n.message}
|
||||
</p>
|
||||
<p className="text-[10px] text-muted-foreground mt-1">
|
||||
{formatTime(n.createdAt)}
|
||||
</p>
|
||||
{(n.type === "MISSING_PART" || n.type === "HASH_MISMATCH") && (
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
className="h-6 px-2 text-xs mt-1"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleRepair(n.id);
|
||||
}}
|
||||
>
|
||||
Repair
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
</ScrollArea>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
);
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { type Table as TanStackTable, flexRender } from "@tanstack/react-table";
|
||||
import { type Table as TanStackTable, type Row, flexRender } from "@tanstack/react-table";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
@@ -10,13 +10,15 @@ import {
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
import { EmptyState } from "./empty-state";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
interface DataTableProps<TData> {
|
||||
table: TanStackTable<TData>;
|
||||
emptyMessage?: string;
|
||||
rowClassName?: (row: Row<TData>) => string;
|
||||
}
|
||||
|
||||
export function DataTable<TData>({ table, emptyMessage }: DataTableProps<TData>) {
|
||||
export function DataTable<TData>({ table, emptyMessage, rowClassName }: DataTableProps<TData>) {
|
||||
return (
|
||||
<div className="rounded-md border border-border">
|
||||
<Table>
|
||||
@@ -36,7 +38,10 @@ export function DataTable<TData>({ table, emptyMessage }: DataTableProps<TData>)
|
||||
<TableBody>
|
||||
{table.getRowModel().rows?.length ? (
|
||||
table.getRowModel().rows.map((row) => (
|
||||
<TableRow key={row.id} className="h-10 border-border hover:bg-muted/50">
|
||||
<TableRow
|
||||
key={row.id}
|
||||
className={cn("h-10 border-border hover:bg-muted/50", rowClassName?.(row))}
|
||||
>
|
||||
{row.getVisibleCells().map((cell) => (
|
||||
<TableCell key={cell.id} className="py-1.5 text-sm">
|
||||
{flexRender(cell.column.columnDef.cell, cell.getContext())}
|
||||
|
||||
@@ -95,3 +95,34 @@ export async function getKickstarterHosts() {
|
||||
include: { _count: { select: { kickstarters: true } } },
|
||||
});
|
||||
}
|
||||
|
||||
export async function searchPackagesForLinking(query: string, limit = 20) {
|
||||
if (!query || query.length < 2) return [];
|
||||
|
||||
return prisma.package.findMany({
|
||||
where: {
|
||||
OR: [
|
||||
{ fileName: { contains: query, mode: "insensitive" } },
|
||||
{ creator: { contains: query, mode: "insensitive" } },
|
||||
],
|
||||
},
|
||||
orderBy: { indexedAt: "desc" },
|
||||
take: limit,
|
||||
select: {
|
||||
id: true,
|
||||
fileName: true,
|
||||
fileSize: true,
|
||||
archiveType: true,
|
||||
creator: true,
|
||||
fileCount: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function getLinkedPackageIds(kickstarterId: string): Promise<string[]> {
|
||||
const links = await prisma.kickstarterPackage.findMany({
|
||||
where: { kickstarterId },
|
||||
select: { packageId: true },
|
||||
});
|
||||
return links.map((l) => l.packageId);
|
||||
}
|
||||
|
||||
45
src/data/notification.queries.ts
Normal file
45
src/data/notification.queries.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { prisma } from "@/lib/prisma";
|
||||
|
||||
export async function getUnreadNotificationCount(): Promise<number> {
|
||||
return prisma.systemNotification.count({
|
||||
where: { isRead: false },
|
||||
});
|
||||
}
|
||||
|
||||
export async function getRecentNotifications(limit = 20) {
|
||||
return prisma.systemNotification.findMany({
|
||||
orderBy: { createdAt: "desc" },
|
||||
take: limit,
|
||||
select: {
|
||||
id: true,
|
||||
type: true,
|
||||
severity: true,
|
||||
title: true,
|
||||
message: true,
|
||||
isRead: true,
|
||||
createdAt: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function markNotificationRead(id: string) {
|
||||
return prisma.systemNotification.update({
|
||||
where: { id },
|
||||
data: { isRead: true },
|
||||
});
|
||||
}
|
||||
|
||||
export async function markAllNotificationsRead() {
|
||||
return prisma.systemNotification.updateMany({
|
||||
where: { isRead: false },
|
||||
data: { isRead: true },
|
||||
});
|
||||
}
|
||||
|
||||
export async function dismissNotification(id: string) {
|
||||
return prisma.systemNotification.delete({ where: { id } });
|
||||
}
|
||||
|
||||
export async function clearAllNotifications() {
|
||||
return prisma.systemNotification.deleteMany({});
|
||||
}
|
||||
@@ -4,6 +4,9 @@ import type {
|
||||
PackageDetail,
|
||||
PackageFileItem,
|
||||
IngestionAccountStatus,
|
||||
SkippedPackageItem,
|
||||
DisplayItem,
|
||||
PackageGroupRow,
|
||||
} from "./types";
|
||||
|
||||
export async function listPackages(options: {
|
||||
@@ -57,6 +60,8 @@ export async function listPackages(options: {
|
||||
tags: pkg.tags,
|
||||
indexedAt: pkg.indexedAt.toISOString(),
|
||||
sourceChannel: pkg.sourceChannel,
|
||||
matchedFileCount: 0,
|
||||
matchedByContent: false,
|
||||
}));
|
||||
|
||||
return {
|
||||
@@ -70,6 +75,177 @@ export async function listPackages(options: {
|
||||
};
|
||||
}
|
||||
|
||||
export async function listDisplayItems(options: {
|
||||
page: number;
|
||||
limit: number;
|
||||
channelId?: string;
|
||||
creator?: string;
|
||||
tag?: string;
|
||||
sortBy: "indexedAt" | "fileName" | "fileSize";
|
||||
order: "asc" | "desc";
|
||||
}): Promise<{ items: DisplayItem[]; pagination: { page: number; limit: number; total: number; totalPages: number } }> {
|
||||
const { page, limit, channelId, creator, tag, sortBy, order } = options;
|
||||
|
||||
// Build WHERE clause fragments for raw SQL
|
||||
const conditions: string[] = [];
|
||||
const params: unknown[] = [];
|
||||
let paramIdx = 1;
|
||||
|
||||
if (channelId) {
|
||||
conditions.push(`p."sourceChannelId" = $${paramIdx++}`);
|
||||
params.push(channelId);
|
||||
}
|
||||
if (creator) {
|
||||
conditions.push(`p."creator" = $${paramIdx++}`);
|
||||
params.push(creator);
|
||||
}
|
||||
if (tag) {
|
||||
conditions.push(`$${paramIdx++} = ANY(p."tags")`);
|
||||
params.push(tag);
|
||||
}
|
||||
|
||||
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
||||
|
||||
const sortCol = sortBy === "fileName" ? `"fileName"` : sortBy === "fileSize" ? `"fileSize"` : `"indexedAt"`;
|
||||
const sortDir = order === "asc" ? "ASC" : "DESC";
|
||||
|
||||
// Step 1: Count display items
|
||||
const countResult = await prisma.$queryRawUnsafe<[{ count: bigint }]>(
|
||||
`SELECT COUNT(*) AS count FROM (
|
||||
SELECT DISTINCT COALESCE(p."packageGroupId", p."id") AS display_id
|
||||
FROM packages p
|
||||
${whereClause}
|
||||
) AS display_items`,
|
||||
...params
|
||||
);
|
||||
const total = Number(countResult[0].count);
|
||||
|
||||
// Step 2: Get display item IDs for this page
|
||||
const limitParam = paramIdx++;
|
||||
const offsetParam = paramIdx++;
|
||||
const displayRows = await prisma.$queryRawUnsafe<
|
||||
{ display_id: string; display_type: string }[]
|
||||
>(
|
||||
`SELECT
|
||||
COALESCE(p."packageGroupId", p."id") AS display_id,
|
||||
CASE WHEN p."packageGroupId" IS NOT NULL THEN 'group' ELSE 'package' END AS display_type,
|
||||
MAX(p.${sortCol}) AS sort_value
|
||||
FROM packages p
|
||||
${whereClause}
|
||||
GROUP BY COALESCE(p."packageGroupId", p."id"),
|
||||
CASE WHEN p."packageGroupId" IS NOT NULL THEN 'group' ELSE 'package' END
|
||||
ORDER BY sort_value ${sortDir}
|
||||
LIMIT $${limitParam} OFFSET $${offsetParam}`,
|
||||
...params, limit, (page - 1) * limit
|
||||
);
|
||||
|
||||
// Step 3: Fetch full data
|
||||
const groupIds = displayRows.filter((r) => r.display_type === "group").map((r) => r.display_id);
|
||||
const packageIds = displayRows.filter((r) => r.display_type === "package").map((r) => r.display_id);
|
||||
|
||||
const standalonePackages = packageIds.length > 0
|
||||
? await prisma.package.findMany({
|
||||
where: { id: { in: packageIds } },
|
||||
select: {
|
||||
id: true, fileName: true, fileSize: true, contentHash: true,
|
||||
archiveType: true, fileCount: true, isMultipart: true,
|
||||
indexedAt: true, creator: true, tags: true, previewData: true,
|
||||
sourceChannel: { select: { id: true, title: true } },
|
||||
},
|
||||
})
|
||||
: [];
|
||||
|
||||
const groups = groupIds.length > 0
|
||||
? await prisma.packageGroup.findMany({
|
||||
where: { id: { in: groupIds } },
|
||||
select: {
|
||||
id: true, name: true, previewData: true,
|
||||
sourceChannel: { select: { id: true, title: true } },
|
||||
packages: {
|
||||
select: {
|
||||
id: true, fileName: true, fileSize: true, contentHash: true,
|
||||
archiveType: true, fileCount: true, isMultipart: true,
|
||||
indexedAt: true, creator: true, tags: true, previewData: true,
|
||||
sourceChannel: { select: { id: true, title: true } },
|
||||
},
|
||||
orderBy: { indexedAt: "desc" },
|
||||
},
|
||||
},
|
||||
})
|
||||
: [];
|
||||
|
||||
// Build DisplayItem array in the original sort order
|
||||
const packageMap = new Map(standalonePackages.map((p) => [p.id, p]));
|
||||
const groupMap = new Map(groups.map((g) => [g.id, g]));
|
||||
|
||||
const items: DisplayItem[] = displayRows.map((row) => {
|
||||
if (row.display_type === "package") {
|
||||
const pkg = packageMap.get(row.display_id)!;
|
||||
return {
|
||||
type: "package" as const,
|
||||
data: {
|
||||
id: pkg.id,
|
||||
fileName: pkg.fileName,
|
||||
fileSize: pkg.fileSize.toString(),
|
||||
contentHash: pkg.contentHash,
|
||||
archiveType: pkg.archiveType,
|
||||
fileCount: pkg.fileCount,
|
||||
isMultipart: pkg.isMultipart,
|
||||
hasPreview: pkg.previewData !== null,
|
||||
creator: pkg.creator,
|
||||
tags: pkg.tags,
|
||||
indexedAt: pkg.indexedAt.toISOString(),
|
||||
sourceChannel: pkg.sourceChannel,
|
||||
matchedFileCount: 0,
|
||||
matchedByContent: false,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
const grp = groupMap.get(row.display_id)!;
|
||||
const allTags = [...new Set(grp.packages.flatMap((p) => p.tags))];
|
||||
const archiveTypes = [...new Set(grp.packages.map((p) => p.archiveType))] as PackageGroupRow["archiveTypes"];
|
||||
return {
|
||||
type: "group" as const,
|
||||
data: {
|
||||
id: grp.id,
|
||||
name: grp.name,
|
||||
hasPreview: grp.previewData !== null,
|
||||
totalFileSize: grp.packages.reduce((sum, p) => sum + p.fileSize, BigInt(0)).toString(),
|
||||
totalFileCount: grp.packages.reduce((sum, p) => sum + p.fileCount, 0),
|
||||
packageCount: grp.packages.length,
|
||||
combinedTags: allTags,
|
||||
archiveTypes,
|
||||
latestIndexedAt: grp.packages.length > 0
|
||||
? grp.packages[0].indexedAt.toISOString()
|
||||
: new Date().toISOString(),
|
||||
sourceChannel: grp.sourceChannel,
|
||||
packages: grp.packages.map((pkg) => ({
|
||||
id: pkg.id,
|
||||
fileName: pkg.fileName,
|
||||
fileSize: pkg.fileSize.toString(),
|
||||
contentHash: pkg.contentHash,
|
||||
archiveType: pkg.archiveType,
|
||||
fileCount: pkg.fileCount,
|
||||
isMultipart: pkg.isMultipart,
|
||||
hasPreview: pkg.previewData !== null,
|
||||
creator: pkg.creator,
|
||||
tags: pkg.tags,
|
||||
indexedAt: pkg.indexedAt.toISOString(),
|
||||
sourceChannel: pkg.sourceChannel,
|
||||
matchedFileCount: 0,
|
||||
matchedByContent: false,
|
||||
})),
|
||||
},
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
items,
|
||||
pagination: { page, limit, total, totalPages: Math.ceil(total / limit) },
|
||||
};
|
||||
}
|
||||
|
||||
export async function getPackageById(
|
||||
id: string
|
||||
): Promise<PackageDetail | null> {
|
||||
@@ -106,6 +282,8 @@ export async function getPackageById(
|
||||
partCount: pkg.partCount,
|
||||
indexedAt: pkg.indexedAt.toISOString(),
|
||||
sourceChannel: pkg.sourceChannel,
|
||||
matchedFileCount: 0,
|
||||
matchedByContent: false,
|
||||
destChannel,
|
||||
destMessageId: pkg.destMessageId?.toString() ?? null,
|
||||
sourceMessageId: pkg.sourceMessageId.toString(),
|
||||
@@ -162,6 +340,30 @@ export async function listPackageFiles(options: {
|
||||
};
|
||||
}
|
||||
|
||||
async function fullTextSearchPackageIds(query: string, limit: number): Promise<string[]> {
|
||||
// Convert user query to tsquery — handle multi-word by joining with &
|
||||
const tsQuery = query
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.filter((w) => w.length >= 2)
|
||||
.map((w) => w.replace(/[^a-zA-Z0-9]/g, ""))
|
||||
.filter(Boolean)
|
||||
.join(" & ");
|
||||
|
||||
if (!tsQuery) return [];
|
||||
|
||||
const results = await prisma.$queryRawUnsafe<{ id: string }[]>(
|
||||
`SELECT id FROM packages
|
||||
WHERE "searchVector" @@ to_tsquery('english', $1)
|
||||
ORDER BY ts_rank("searchVector", to_tsquery('english', $1)) DESC
|
||||
LIMIT $2`,
|
||||
tsQuery,
|
||||
limit
|
||||
);
|
||||
|
||||
return results.map((r) => r.id);
|
||||
}
|
||||
|
||||
export async function searchPackages(options: {
|
||||
query: string;
|
||||
page: number;
|
||||
@@ -171,23 +373,38 @@ export async function searchPackages(options: {
|
||||
const q = options.query;
|
||||
|
||||
if (options.searchIn === "files" || options.searchIn === "both") {
|
||||
// Search in package files, return parent packages
|
||||
const fileMatches = await prisma.packageFile.findMany({
|
||||
// Get per-package file match counts
|
||||
const fileMatches = await prisma.packageFile.groupBy({
|
||||
by: ["packageId"],
|
||||
where: {
|
||||
OR: [
|
||||
{ fileName: { contains: q, mode: "insensitive" } },
|
||||
{ path: { contains: q, mode: "insensitive" } },
|
||||
],
|
||||
},
|
||||
select: { packageId: true },
|
||||
distinct: ["packageId"],
|
||||
_count: { _all: true },
|
||||
});
|
||||
|
||||
const packageIds = fileMatches.map((f) => f.packageId);
|
||||
const fileMatchMap = new Map(
|
||||
fileMatches.map((m) => [m.packageId, m._count._all])
|
||||
);
|
||||
const fileMatchedIds = fileMatches.map((f) => f.packageId);
|
||||
|
||||
// Try full-text search first (better ranking, handles word stemming)
|
||||
let ftsPackageNameIds: string[] = [];
|
||||
if (options.searchIn === "both" && q.length >= 3) {
|
||||
try {
|
||||
ftsPackageNameIds = await fullTextSearchPackageIds(q, 200);
|
||||
} catch {
|
||||
// FTS failed — fall back to ILIKE below
|
||||
}
|
||||
}
|
||||
|
||||
const packageNameIds =
|
||||
options.searchIn === "both"
|
||||
? (
|
||||
? ftsPackageNameIds.length > 0
|
||||
? ftsPackageNameIds
|
||||
: (
|
||||
await prisma.package.findMany({
|
||||
where: { fileName: { contains: q, mode: "insensitive" } },
|
||||
select: { id: true },
|
||||
@@ -195,7 +412,16 @@ export async function searchPackages(options: {
|
||||
).map((p) => p.id)
|
||||
: [];
|
||||
|
||||
const allIds = [...new Set([...packageIds, ...packageNameIds])];
|
||||
// Also match by group name
|
||||
const groupNameMatches = await prisma.package.findMany({
|
||||
where: {
|
||||
packageGroup: { name: { contains: q, mode: "insensitive" } },
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
const groupMatchedIds = groupNameMatches.map((p) => p.id);
|
||||
|
||||
const allIds = [...new Set([...fileMatchedIds, ...packageNameIds, ...groupMatchedIds])];
|
||||
|
||||
const [items, total] = await Promise.all([
|
||||
prisma.package.findMany({
|
||||
@@ -234,6 +460,8 @@ export async function searchPackages(options: {
|
||||
tags: pkg.tags,
|
||||
indexedAt: pkg.indexedAt.toISOString(),
|
||||
sourceChannel: pkg.sourceChannel,
|
||||
matchedFileCount: fileMatchMap.get(pkg.id) ?? 0,
|
||||
matchedByContent: fileMatchMap.has(pkg.id),
|
||||
}));
|
||||
|
||||
return {
|
||||
@@ -329,3 +557,275 @@ export async function getIngestionStatus(): Promise<IngestionAccountStatus[]> {
|
||||
|
||||
return statuses;
|
||||
}
|
||||
|
||||
export async function listSkippedPackages(options: {
|
||||
page: number;
|
||||
limit: number;
|
||||
reason?: "SIZE_LIMIT" | "DOWNLOAD_FAILED" | "EXTRACT_FAILED" | "UPLOAD_FAILED";
|
||||
}) {
|
||||
const where: Record<string, unknown> = {};
|
||||
if (options.reason) where.reason = options.reason;
|
||||
|
||||
const [items, total] = await Promise.all([
|
||||
prisma.skippedPackage.findMany({
|
||||
where,
|
||||
orderBy: { createdAt: "desc" },
|
||||
skip: (options.page - 1) * options.limit,
|
||||
take: options.limit,
|
||||
include: {
|
||||
sourceChannel: { select: { id: true, title: true } },
|
||||
},
|
||||
}),
|
||||
prisma.skippedPackage.count({ where }),
|
||||
]);
|
||||
|
||||
const mapped: SkippedPackageItem[] = items.map((s) => ({
|
||||
id: s.id,
|
||||
fileName: s.fileName,
|
||||
fileSize: s.fileSize.toString(),
|
||||
reason: s.reason,
|
||||
errorMessage: s.errorMessage,
|
||||
sourceChannel: s.sourceChannel,
|
||||
sourceMessageId: s.sourceMessageId.toString(),
|
||||
isMultipart: s.isMultipart,
|
||||
partCount: s.partCount,
|
||||
createdAt: s.createdAt.toISOString(),
|
||||
}));
|
||||
|
||||
return {
|
||||
items: mapped,
|
||||
pagination: {
|
||||
page: options.page,
|
||||
limit: options.limit,
|
||||
total,
|
||||
totalPages: Math.ceil(total / options.limit),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export async function countSkippedPackages(): Promise<number> {
|
||||
return prisma.skippedPackage.count();
|
||||
}
|
||||
|
||||
export async function listUngroupedPackages(options: {
|
||||
page: number;
|
||||
limit: number;
|
||||
}) {
|
||||
const { page, limit } = options;
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
const where = { packageGroupId: null, destMessageId: { not: null } };
|
||||
|
||||
const [items, total] = await Promise.all([
|
||||
prisma.package.findMany({
|
||||
where,
|
||||
orderBy: { indexedAt: "desc" },
|
||||
skip,
|
||||
take: limit,
|
||||
select: {
|
||||
id: true,
|
||||
fileName: true,
|
||||
fileSize: true,
|
||||
archiveType: true,
|
||||
creator: true,
|
||||
fileCount: true,
|
||||
isMultipart: true,
|
||||
partCount: true,
|
||||
tags: true,
|
||||
indexedAt: true,
|
||||
previewData: true,
|
||||
sourceChannel: { select: { id: true, title: true } },
|
||||
},
|
||||
}),
|
||||
prisma.package.count({ where }),
|
||||
]);
|
||||
|
||||
return {
|
||||
items: items.map((p) => ({
|
||||
id: p.id,
|
||||
fileName: p.fileName,
|
||||
fileSize: p.fileSize.toString(),
|
||||
contentHash: "",
|
||||
archiveType: p.archiveType,
|
||||
creator: p.creator,
|
||||
fileCount: p.fileCount,
|
||||
isMultipart: p.isMultipart,
|
||||
partCount: p.partCount,
|
||||
tags: p.tags,
|
||||
indexedAt: p.indexedAt.toISOString(),
|
||||
hasPreview: !!p.previewData,
|
||||
sourceChannel: p.sourceChannel,
|
||||
matchedFileCount: 0,
|
||||
matchedByContent: false,
|
||||
})),
|
||||
pagination: {
|
||||
total,
|
||||
totalPages: Math.ceil(total / limit),
|
||||
page,
|
||||
limit,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export async function countUngroupedPackages(): Promise<number> {
|
||||
return prisma.package.count({
|
||||
where: { packageGroupId: null, destMessageId: { not: null } },
|
||||
});
|
||||
}
|
||||
|
||||
export async function getPackageGroup(groupId: string) {
|
||||
return prisma.packageGroup.findUnique({
|
||||
where: { id: groupId },
|
||||
select: {
|
||||
id: true, name: true, previewData: true, mediaAlbumId: true,
|
||||
sourceChannelId: true, createdAt: true,
|
||||
sourceChannel: { select: { id: true, title: true } },
|
||||
packages: {
|
||||
select: {
|
||||
id: true, fileName: true, fileSize: true, archiveType: true,
|
||||
fileCount: true, creator: true, tags: true,
|
||||
},
|
||||
orderBy: { indexedAt: "desc" },
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function updatePackageGroupName(groupId: string, name: string) {
|
||||
return prisma.packageGroup.update({
|
||||
where: { id: groupId },
|
||||
data: { name: name.trim() },
|
||||
});
|
||||
}
|
||||
|
||||
export async function updatePackageGroupPreview(groupId: string, previewData: Buffer) {
|
||||
return prisma.packageGroup.update({
|
||||
where: { id: groupId },
|
||||
data: { previewData: new Uint8Array(previewData) },
|
||||
});
|
||||
}
|
||||
|
||||
export async function createManualGroup(name: string, packageIds: string[]) {
|
||||
// Verify all packages belong to the same channel
|
||||
const pkgs = await prisma.package.findMany({
|
||||
where: { id: { in: packageIds } },
|
||||
select: { sourceChannelId: true },
|
||||
});
|
||||
if (pkgs.length === 0) {
|
||||
throw new Error("No matching packages found");
|
||||
}
|
||||
const channelIds = new Set(pkgs.map((p) => p.sourceChannelId));
|
||||
if (channelIds.size > 1) {
|
||||
throw new Error("Cannot group packages from different channels");
|
||||
}
|
||||
|
||||
const firstPkg = pkgs[0];
|
||||
const group = await prisma.packageGroup.create({
|
||||
data: {
|
||||
name: name.trim(),
|
||||
sourceChannelId: firstPkg.sourceChannelId,
|
||||
},
|
||||
});
|
||||
|
||||
await prisma.package.updateMany({
|
||||
where: { id: { in: packageIds } },
|
||||
data: { packageGroupId: group.id },
|
||||
});
|
||||
|
||||
// Learn a grouping rule from the manual override
|
||||
try {
|
||||
const linkedPkgs = await prisma.package.findMany({
|
||||
where: { id: { in: packageIds } },
|
||||
select: { fileName: true, creator: true },
|
||||
});
|
||||
|
||||
// Extract the common filename pattern
|
||||
const fileNames = linkedPkgs.map((p) => p.fileName);
|
||||
let pattern = "";
|
||||
if (fileNames.length > 1) {
|
||||
// Find longest common prefix
|
||||
let prefix = fileNames[0];
|
||||
for (let i = 1; i < fileNames.length; i++) {
|
||||
while (!fileNames[i].startsWith(prefix)) {
|
||||
prefix = prefix.slice(0, -1);
|
||||
if (!prefix) break;
|
||||
}
|
||||
}
|
||||
const trimmed = prefix.replace(/[\s\-_.(]+$/, "");
|
||||
if (trimmed.length >= 4) {
|
||||
pattern = trimmed;
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to shared creator
|
||||
if (!pattern) {
|
||||
const creators = [...new Set(linkedPkgs.map((p) => p.creator).filter(Boolean))];
|
||||
if (creators.length === 1 && creators[0]) {
|
||||
pattern = creators[0];
|
||||
}
|
||||
}
|
||||
|
||||
if (pattern) {
|
||||
await prisma.groupingRule.create({
|
||||
data: {
|
||||
sourceChannelId: firstPkg.sourceChannelId,
|
||||
pattern,
|
||||
signalType: "MANUAL",
|
||||
createdByGroupId: group.id,
|
||||
},
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Best-effort — don't fail the group creation if rule learning fails
|
||||
}
|
||||
|
||||
// Clean up empty groups left behind
|
||||
await prisma.packageGroup.deleteMany({
|
||||
where: { packages: { none: {} }, id: { not: group.id } },
|
||||
});
|
||||
|
||||
return group;
|
||||
}
|
||||
|
||||
export async function addPackagesToGroup(packageIds: string[], groupId: string) {
|
||||
await prisma.package.updateMany({
|
||||
where: { id: { in: packageIds } },
|
||||
data: { packageGroupId: groupId },
|
||||
});
|
||||
await prisma.packageGroup.deleteMany({
|
||||
where: { packages: { none: {} } },
|
||||
});
|
||||
}
|
||||
|
||||
export async function removePackageFromGroup(packageId: string) {
|
||||
const pkg = await prisma.package.findUniqueOrThrow({
|
||||
where: { id: packageId },
|
||||
select: { packageGroupId: true },
|
||||
});
|
||||
if (!pkg.packageGroupId) return;
|
||||
await prisma.package.update({
|
||||
where: { id: packageId },
|
||||
data: { packageGroupId: null },
|
||||
});
|
||||
await prisma.packageGroup.deleteMany({
|
||||
where: { id: pkg.packageGroupId, packages: { none: {} } },
|
||||
});
|
||||
}
|
||||
|
||||
export async function dissolveGroup(groupId: string) {
|
||||
await prisma.package.updateMany({
|
||||
where: { packageGroupId: groupId },
|
||||
data: { packageGroupId: null },
|
||||
});
|
||||
await prisma.packageGroup.delete({ where: { id: groupId } });
|
||||
}
|
||||
|
||||
export async function mergeGroups(targetGroupId: string, sourceGroupId: string) {
|
||||
// Move all packages from source group to target group
|
||||
await prisma.package.updateMany({
|
||||
where: { packageGroupId: sourceGroupId },
|
||||
data: { packageGroupId: targetGroupId },
|
||||
});
|
||||
// Delete the now-empty source group
|
||||
await prisma.packageGroup.delete({ where: { id: sourceGroupId } });
|
||||
}
|
||||
|
||||
@@ -14,6 +14,8 @@ export interface PackageListItem {
|
||||
id: string;
|
||||
title: string;
|
||||
};
|
||||
matchedFileCount: number;
|
||||
matchedByContent: boolean;
|
||||
}
|
||||
|
||||
export interface PackageDetail extends PackageListItem {
|
||||
@@ -40,6 +42,22 @@ export interface PackageFileItem {
|
||||
crc32: string | null;
|
||||
}
|
||||
|
||||
export interface SkippedPackageItem {
|
||||
id: string;
|
||||
fileName: string;
|
||||
fileSize: string;
|
||||
reason: "SIZE_LIMIT" | "DOWNLOAD_FAILED" | "EXTRACT_FAILED" | "UPLOAD_FAILED";
|
||||
errorMessage: string | null;
|
||||
sourceChannel: {
|
||||
id: string;
|
||||
title: string;
|
||||
};
|
||||
sourceMessageId: string;
|
||||
isMultipart: boolean;
|
||||
partCount: number;
|
||||
createdAt: string;
|
||||
}
|
||||
|
||||
export interface PaginatedResponse<T> {
|
||||
items: T[];
|
||||
pagination: {
|
||||
@@ -50,6 +68,24 @@ export interface PaginatedResponse<T> {
|
||||
};
|
||||
}
|
||||
|
||||
export interface PackageGroupRow {
|
||||
id: string;
|
||||
name: string;
|
||||
hasPreview: boolean;
|
||||
totalFileSize: string;
|
||||
totalFileCount: number;
|
||||
packageCount: number;
|
||||
combinedTags: string[];
|
||||
archiveTypes: ("ZIP" | "RAR" | "SEVEN_Z" | "DOCUMENT")[];
|
||||
latestIndexedAt: string;
|
||||
sourceChannel: { id: string; title: string };
|
||||
packages: PackageListItem[];
|
||||
}
|
||||
|
||||
export type DisplayItem =
|
||||
| { type: "package"; data: PackageListItem }
|
||||
| { type: "group"; data: PackageGroupRow };
|
||||
|
||||
export interface IngestionAccountStatus {
|
||||
id: string;
|
||||
displayName: string | null;
|
||||
|
||||
@@ -10,6 +10,9 @@ export interface TelegramMessage {
|
||||
fileId: string;
|
||||
fileSize: bigint;
|
||||
date: Date;
|
||||
mediaAlbumId?: string;
|
||||
replyToMessageId?: bigint; // NEW
|
||||
caption?: string; // NEW
|
||||
}
|
||||
|
||||
export interface ArchiveSet {
|
||||
|
||||
@@ -3,27 +3,37 @@ import { stat } from "fs/promises";
|
||||
import path from "path";
|
||||
import { pipeline } from "stream/promises";
|
||||
import { childLogger } from "../util/logger.js";
|
||||
import { config } from "../util/config.js";
|
||||
|
||||
const log = childLogger("split");
|
||||
|
||||
/** 2GB in bytes — Telegram's file size limit */
|
||||
const MAX_PART_SIZE = 2n * 1024n * 1024n * 1024n;
|
||||
/**
|
||||
* Maximum part size for Telegram upload. Configurable via MAX_PART_SIZE_MB env var.
|
||||
* Default: 1950 MiB (safely under 2GB non-Premium limit).
|
||||
* Premium: set to 3900 MiB (safely under 4GB Premium limit).
|
||||
*
|
||||
* At exactly 2/4 GiB, TDLib's internal 512KB chunking can exceed Telegram's
|
||||
* 4000-part threshold, causing FILE_PARTS_INVALID errors.
|
||||
*/
|
||||
const MAX_PART_SIZE = BigInt(config.maxPartSizeMB) * 1024n * 1024n;
|
||||
|
||||
/**
|
||||
* Split a file into ≤2GB parts using byte-level splitting.
|
||||
* Returns paths to the split parts. If the file is already ≤2GB, returns the original path.
|
||||
* Split a file into parts using byte-level splitting.
|
||||
* Returns paths to the split parts. If the file fits in one part, returns the original path.
|
||||
* Pass maxPartSize to override the global default (e.g., 3950 MiB for Premium accounts).
|
||||
*/
|
||||
export async function byteLevelSplit(filePath: string): Promise<string[]> {
|
||||
export async function byteLevelSplit(filePath: string, maxPartSize?: bigint): Promise<string[]> {
|
||||
const effectiveMax = maxPartSize ?? MAX_PART_SIZE;
|
||||
const stats = await stat(filePath);
|
||||
const fileSize = BigInt(stats.size);
|
||||
|
||||
if (fileSize <= MAX_PART_SIZE) {
|
||||
if (fileSize <= effectiveMax) {
|
||||
return [filePath];
|
||||
}
|
||||
|
||||
const dir = path.dirname(filePath);
|
||||
const baseName = path.basename(filePath);
|
||||
const partSize = Number(MAX_PART_SIZE);
|
||||
const partSize = Number(effectiveMax);
|
||||
const totalParts = Math.ceil(Number(fileSize) / partSize);
|
||||
const parts: string[] = [];
|
||||
|
||||
|
||||
119
worker/src/audit.ts
Normal file
119
worker/src/audit.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import { db } from "./db/client.js";
|
||||
import { childLogger } from "./util/logger.js";
|
||||
|
||||
const log = childLogger("audit");
|
||||
|
||||
/**
|
||||
* Periodic integrity audit: checks all packages for consistency.
|
||||
* Creates SystemNotification records for any issues found.
|
||||
*
|
||||
* Checks performed:
|
||||
* 1. Multipart completeness: destMessageIds.length should match partCount
|
||||
* 2. Missing destination: packages with destChannelId but no destMessageId
|
||||
*/
|
||||
export async function runIntegrityAudit(): Promise<{ checked: number; issues: number }> {
|
||||
log.info("Starting integrity audit");
|
||||
|
||||
let checked = 0;
|
||||
let issues = 0;
|
||||
|
||||
// Check 1: Multipart packages with wrong number of destination message IDs
|
||||
const multipartPackages = await db.package.findMany({
|
||||
where: {
|
||||
isMultipart: true,
|
||||
partCount: { gt: 1 },
|
||||
destMessageId: { not: null },
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
fileName: true,
|
||||
partCount: true,
|
||||
destMessageIds: true,
|
||||
sourceChannelId: true,
|
||||
sourceChannel: { select: { title: true } },
|
||||
},
|
||||
});
|
||||
|
||||
checked += multipartPackages.length;
|
||||
|
||||
for (const pkg of multipartPackages) {
|
||||
const actualParts = pkg.destMessageIds.length;
|
||||
// Only flag when we have >1 stored IDs but count doesn't match.
|
||||
// Packages with exactly 1 ID are legacy (backfilled from single destMessageId) — not actionable.
|
||||
if (actualParts > 1 && actualParts !== pkg.partCount) {
|
||||
issues++;
|
||||
|
||||
// Check if we already have a notification for this
|
||||
const existing = await db.systemNotification.findFirst({
|
||||
where: {
|
||||
type: "MISSING_PART",
|
||||
context: { path: ["packageId"], equals: pkg.id },
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
await db.systemNotification.create({
|
||||
data: {
|
||||
type: "MISSING_PART",
|
||||
severity: "WARNING",
|
||||
title: `Incomplete multipart: ${pkg.fileName}`,
|
||||
message: `Expected ${pkg.partCount} parts but only ${actualParts} destination message IDs stored`,
|
||||
context: {
|
||||
packageId: pkg.id,
|
||||
fileName: pkg.fileName,
|
||||
expectedParts: pkg.partCount,
|
||||
actualParts,
|
||||
sourceChannelId: pkg.sourceChannelId,
|
||||
channelTitle: pkg.sourceChannel.title,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
log.warn(
|
||||
{ packageId: pkg.id, fileName: pkg.fileName, expected: pkg.partCount, actual: actualParts },
|
||||
"Multipart package has mismatched part count"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check 2: Packages with dest channel but no dest message (orphaned index)
|
||||
const orphanedCount = await db.package.count({
|
||||
where: {
|
||||
destChannelId: { not: null },
|
||||
destMessageId: null,
|
||||
},
|
||||
});
|
||||
|
||||
if (orphanedCount > 0) {
|
||||
issues++;
|
||||
|
||||
const existing = await db.systemNotification.findFirst({
|
||||
where: {
|
||||
type: "INTEGRITY_AUDIT",
|
||||
context: { path: ["check"], equals: "orphaned_index" },
|
||||
createdAt: { gte: new Date(Date.now() - 24 * 60 * 60 * 1000) },
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
await db.systemNotification.create({
|
||||
data: {
|
||||
type: "INTEGRITY_AUDIT",
|
||||
severity: "INFO",
|
||||
title: `${orphanedCount} packages with missing destination message`,
|
||||
message: `Found ${orphanedCount} packages that have a destination channel set but no destination message ID. These may be from interrupted uploads.`,
|
||||
context: {
|
||||
check: "orphaned_index",
|
||||
count: orphanedCount,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
log.info({ checked, issues }, "Integrity audit complete");
|
||||
return { checked, issues };
|
||||
}
|
||||
@@ -5,7 +5,14 @@ import { config } from "../util/config.js";
|
||||
|
||||
const pool = new pg.Pool({
|
||||
connectionString: config.databaseUrl,
|
||||
max: 5,
|
||||
// Pool needs headroom for: 2 account advisory locks (held for entire cycle),
|
||||
// up to 2 concurrent hash locks, plus Prisma operations from both accounts.
|
||||
// Previously max=5 caused pool exhaustion and indefinite hangs.
|
||||
max: 15,
|
||||
// Prevent pool.connect() from blocking forever when pool is exhausted.
|
||||
// Throws an error after 30s so the operation can fail and retry instead of
|
||||
// silently hanging for hours (as happened with the Turnbase.7z stall).
|
||||
connectionTimeoutMillis: 30_000,
|
||||
});
|
||||
|
||||
const adapter = new PrismaPg(pool);
|
||||
|
||||
@@ -79,3 +79,66 @@ export async function releaseLock(accountId: string): Promise<void> {
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Derive a lock ID for a content hash. Prefixes with "hash:" so the resulting
|
||||
* 32-bit integer does not collide with account advisory lock IDs.
|
||||
*/
|
||||
function contentHashToLockId(contentHash: string): number {
|
||||
return hashToLockId(`hash:${contentHash}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Acquire a per-content-hash advisory lock before uploading.
|
||||
* Prevents two concurrent workers from uploading the same archive
|
||||
* when both scan a shared source channel.
|
||||
*
|
||||
* Returns true if acquired (proceed with upload).
|
||||
* Returns false if already held (another worker is handling this archive — skip).
|
||||
*
|
||||
* MUST be released via releaseHashLock() after createPackageStub() completes,
|
||||
* including on all error paths (use try/finally).
|
||||
*/
|
||||
export async function tryAcquireHashLock(contentHash: string): Promise<boolean> {
|
||||
const lockId = contentHashToLockId(contentHash);
|
||||
const client = await pool.connect();
|
||||
try {
|
||||
const result = await client.query<{ pg_try_advisory_lock: boolean }>(
|
||||
"SELECT pg_try_advisory_lock($1)",
|
||||
[lockId]
|
||||
);
|
||||
const acquired = result.rows[0]?.pg_try_advisory_lock ?? false;
|
||||
if (acquired) {
|
||||
heldConnections.set(`hash:${contentHash}`, client);
|
||||
log.debug({ hash: contentHash.slice(0, 16), lockId }, "Hash lock acquired");
|
||||
return true;
|
||||
} else {
|
||||
client.release();
|
||||
log.debug({ hash: contentHash.slice(0, 16), lockId }, "Hash lock held by another worker — skipping");
|
||||
return false;
|
||||
}
|
||||
} catch (err) {
|
||||
client.release();
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Release the per-content-hash advisory lock.
|
||||
* Call after createPackageStub() completes (or on any error path).
|
||||
*/
|
||||
export async function releaseHashLock(contentHash: string): Promise<void> {
|
||||
const lockId = contentHashToLockId(contentHash);
|
||||
const client = heldConnections.get(`hash:${contentHash}`);
|
||||
if (!client) {
|
||||
log.warn({ hash: contentHash.slice(0, 16) }, "No held connection for hash lock release");
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await client.query("SELECT pg_advisory_unlock($1)", [lockId]);
|
||||
log.debug({ hash: contentHash.slice(0, 16) }, "Hash lock released");
|
||||
} finally {
|
||||
heldConnections.delete(`hash:${contentHash}`);
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -62,6 +62,117 @@ export async function packageExistsByHash(contentHash: string) {
|
||||
return pkg !== null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find an already-uploaded package by content hash.
|
||||
* Used to detect orphaned uploads — files that reached Telegram
|
||||
* but whose package record was created from a previous successful run.
|
||||
*/
|
||||
export async function getUploadedPackageByHash(contentHash: string) {
|
||||
return db.package.findFirst({
|
||||
where: { contentHash, destMessageId: { not: null }, destChannelId: { not: null } },
|
||||
select: { destChannelId: true, destMessageId: true, destMessageIds: true },
|
||||
});
|
||||
}
|
||||
|
||||
export interface CreatePackageStubInput {
|
||||
contentHash: string;
|
||||
fileName: string;
|
||||
fileSize: bigint;
|
||||
archiveType: ArchiveType;
|
||||
sourceChannelId: string;
|
||||
sourceMessageId: bigint;
|
||||
sourceTopicId?: bigint | null;
|
||||
destChannelId: string;
|
||||
destMessageId: bigint;
|
||||
destMessageIds: bigint[];
|
||||
isMultipart: boolean;
|
||||
partCount: number;
|
||||
ingestionRunId: string;
|
||||
creator?: string | null;
|
||||
tags?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a minimal Package record immediately after Telegram confirms the upload.
|
||||
* Call this before preview/metadata extraction so recoverIncompleteUploads() can
|
||||
* detect and verify the package if the worker crashes mid-metadata.
|
||||
*
|
||||
* Follow with updatePackageWithMetadata() once file entries and preview are ready.
|
||||
*/
|
||||
export async function createPackageStub(
|
||||
input: CreatePackageStubInput
|
||||
): Promise<{ id: string }> {
|
||||
const pkg = await db.package.create({
|
||||
data: {
|
||||
contentHash: input.contentHash,
|
||||
fileName: input.fileName,
|
||||
fileSize: input.fileSize,
|
||||
archiveType: input.archiveType,
|
||||
sourceChannelId: input.sourceChannelId,
|
||||
sourceMessageId: input.sourceMessageId,
|
||||
sourceTopicId: input.sourceTopicId ?? undefined,
|
||||
destChannelId: input.destChannelId,
|
||||
destMessageId: input.destMessageId,
|
||||
destMessageIds: input.destMessageIds,
|
||||
isMultipart: input.isMultipart,
|
||||
partCount: input.partCount,
|
||||
fileCount: 0,
|
||||
ingestionRunId: input.ingestionRunId,
|
||||
creator: input.creator ?? undefined,
|
||||
tags: input.tags?.length ? input.tags : undefined,
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
try {
|
||||
await db.$queryRawUnsafe(
|
||||
`SELECT pg_notify('new_package', $1)`,
|
||||
JSON.stringify({
|
||||
packageId: pkg.id,
|
||||
fileName: input.fileName,
|
||||
creator: input.creator ?? null,
|
||||
tags: input.tags ?? [],
|
||||
})
|
||||
);
|
||||
} catch {
|
||||
// Best-effort
|
||||
}
|
||||
|
||||
return pkg;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a stub Package with file entries and preview after metadata extraction.
|
||||
* Called as Phase 2 of the two-phase write after createPackageStub().
|
||||
*/
|
||||
export async function updatePackageWithMetadata(
|
||||
packageId: string,
|
||||
input: {
|
||||
files: {
|
||||
path: string;
|
||||
fileName: string;
|
||||
extension: string | null;
|
||||
compressedSize: bigint;
|
||||
uncompressedSize: bigint;
|
||||
crc32: string | null;
|
||||
}[];
|
||||
previewData?: Buffer | null;
|
||||
previewMsgId?: bigint | null;
|
||||
}
|
||||
): Promise<void> {
|
||||
await db.package.update({
|
||||
where: { id: packageId },
|
||||
data: {
|
||||
fileCount: input.files.length,
|
||||
previewData: input.previewData ? new Uint8Array(input.previewData) : undefined,
|
||||
previewMsgId: input.previewMsgId ?? undefined,
|
||||
files: {
|
||||
create: input.files,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a package already exists for a given source message ID
|
||||
* AND was successfully uploaded to the destination (destMessageId is set).
|
||||
@@ -99,6 +210,7 @@ export interface CreatePackageInput {
|
||||
sourceTopicId?: bigint | null;
|
||||
destChannelId?: string;
|
||||
destMessageId?: bigint;
|
||||
destMessageIds?: bigint[];
|
||||
isMultipart: boolean;
|
||||
partCount: number;
|
||||
ingestionRunId: string;
|
||||
@@ -106,6 +218,8 @@ export interface CreatePackageInput {
|
||||
tags?: string[];
|
||||
previewData?: Buffer | null;
|
||||
previewMsgId?: bigint | null;
|
||||
sourceCaption?: string | null;
|
||||
replyToMessageId?: bigint | null;
|
||||
files: {
|
||||
path: string;
|
||||
fileName: string;
|
||||
@@ -128,6 +242,7 @@ export async function createPackageWithFiles(input: CreatePackageInput) {
|
||||
sourceTopicId: input.sourceTopicId ?? undefined,
|
||||
destChannelId: input.destChannelId,
|
||||
destMessageId: input.destMessageId,
|
||||
destMessageIds: input.destMessageIds ?? (input.destMessageId ? [input.destMessageId] : []),
|
||||
isMultipart: input.isMultipart,
|
||||
partCount: input.partCount,
|
||||
fileCount: input.files.length,
|
||||
@@ -136,6 +251,8 @@ export async function createPackageWithFiles(input: CreatePackageInput) {
|
||||
tags: input.tags && input.tags.length > 0 ? input.tags : undefined,
|
||||
previewData: input.previewData ? new Uint8Array(input.previewData) : undefined,
|
||||
previewMsgId: input.previewMsgId ?? undefined,
|
||||
sourceCaption: input.sourceCaption ?? undefined,
|
||||
replyToMessageId: input.replyToMessageId ?? undefined,
|
||||
files: {
|
||||
create: input.files,
|
||||
},
|
||||
@@ -290,6 +407,16 @@ export async function updateAccountAuthState(
|
||||
});
|
||||
}
|
||||
|
||||
export async function updateAccountPremiumStatus(
|
||||
accountId: string,
|
||||
isPremium: boolean
|
||||
): Promise<void> {
|
||||
await db.telegramAccount.update({
|
||||
where: { id: accountId },
|
||||
data: { isPremium },
|
||||
});
|
||||
}
|
||||
|
||||
export async function getAccountAuthCode(accountId: string) {
|
||||
const account = await db.telegramAccount.findUnique({
|
||||
where: { id: accountId },
|
||||
@@ -473,3 +600,146 @@ export async function resetPackageDestination(packageId: string) {
|
||||
data: { destChannelId: null, destMessageId: null },
|
||||
});
|
||||
}
|
||||
|
||||
export async function upsertSkippedPackage(data: {
|
||||
fileName: string;
|
||||
fileSize: bigint;
|
||||
reason: "SIZE_LIMIT" | "DOWNLOAD_FAILED" | "EXTRACT_FAILED" | "UPLOAD_FAILED";
|
||||
errorMessage?: string;
|
||||
sourceChannelId: string;
|
||||
sourceMessageId: bigint;
|
||||
sourceTopicId?: bigint | null;
|
||||
isMultipart: boolean;
|
||||
partCount: number;
|
||||
accountId: string;
|
||||
}) {
|
||||
return db.skippedPackage.upsert({
|
||||
where: {
|
||||
sourceChannelId_sourceMessageId: {
|
||||
sourceChannelId: data.sourceChannelId,
|
||||
sourceMessageId: data.sourceMessageId,
|
||||
},
|
||||
},
|
||||
update: {
|
||||
reason: data.reason,
|
||||
errorMessage: data.errorMessage ?? null,
|
||||
fileName: data.fileName,
|
||||
fileSize: data.fileSize,
|
||||
createdAt: new Date(),
|
||||
},
|
||||
create: {
|
||||
fileName: data.fileName,
|
||||
fileSize: data.fileSize,
|
||||
reason: data.reason,
|
||||
errorMessage: data.errorMessage ?? null,
|
||||
sourceChannelId: data.sourceChannelId,
|
||||
sourceMessageId: data.sourceMessageId,
|
||||
sourceTopicId: data.sourceTopicId ?? null,
|
||||
isMultipart: data.isMultipart,
|
||||
partCount: data.partCount,
|
||||
accountId: data.accountId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function deleteSkippedPackage(
|
||||
sourceChannelId: string,
|
||||
sourceMessageId: bigint
|
||||
) {
|
||||
return db.skippedPackage.deleteMany({
|
||||
where: { sourceChannelId, sourceMessageId },
|
||||
});
|
||||
}
|
||||
|
||||
export async function createOrFindPackageGroup(input: {
|
||||
mediaAlbumId: string;
|
||||
sourceChannelId: string;
|
||||
name: string;
|
||||
previewData?: Buffer | null;
|
||||
}): Promise<string> {
|
||||
// findFirst + conditional create (Prisma doesn't support upsert on nullable compound unique)
|
||||
const existing = await db.packageGroup.findFirst({
|
||||
where: {
|
||||
mediaAlbumId: input.mediaAlbumId,
|
||||
sourceChannelId: input.sourceChannelId,
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (existing) return existing.id;
|
||||
|
||||
try {
|
||||
const group = await db.packageGroup.create({
|
||||
data: {
|
||||
mediaAlbumId: input.mediaAlbumId,
|
||||
sourceChannelId: input.sourceChannelId,
|
||||
name: input.name,
|
||||
previewData: input.previewData ? new Uint8Array(input.previewData) : undefined,
|
||||
},
|
||||
});
|
||||
return group.id;
|
||||
} catch (err) {
|
||||
// Handle race condition: another process created the group between our findFirst and create
|
||||
if (err instanceof Error && err.message.includes("Unique constraint")) {
|
||||
const raced = await db.packageGroup.findFirst({
|
||||
where: { mediaAlbumId: input.mediaAlbumId, sourceChannelId: input.sourceChannelId },
|
||||
select: { id: true },
|
||||
});
|
||||
if (raced) return raced.id;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export async function linkPackagesToGroup(
|
||||
packageIds: string[],
|
||||
groupId: string
|
||||
): Promise<void> {
|
||||
await db.package.updateMany({
|
||||
where: { id: { in: packageIds } },
|
||||
data: { packageGroupId: groupId },
|
||||
});
|
||||
}
|
||||
|
||||
export async function createTimeWindowGroup(input: {
|
||||
sourceChannelId: string;
|
||||
name: string;
|
||||
packageIds: string[];
|
||||
}): Promise<string> {
|
||||
const group = await db.packageGroup.create({
|
||||
data: {
|
||||
sourceChannelId: input.sourceChannelId,
|
||||
name: input.name,
|
||||
groupingSource: "AUTO_TIME",
|
||||
},
|
||||
});
|
||||
|
||||
await db.package.updateMany({
|
||||
where: { id: { in: input.packageIds } },
|
||||
data: { packageGroupId: group.id },
|
||||
});
|
||||
|
||||
return group.id;
|
||||
}
|
||||
|
||||
export async function createAutoGroup(input: {
|
||||
sourceChannelId: string;
|
||||
name: string;
|
||||
packageIds: string[];
|
||||
groupingSource: "ALBUM" | "MANUAL" | "AUTO_TIME" | "AUTO_PATTERN" | "AUTO_ZIP" | "AUTO_CAPTION" | "AUTO_REPLY";
|
||||
}): Promise<string> {
|
||||
const group = await db.packageGroup.create({
|
||||
data: {
|
||||
sourceChannelId: input.sourceChannelId,
|
||||
name: input.name,
|
||||
groupingSource: input.groupingSource,
|
||||
},
|
||||
});
|
||||
|
||||
await db.package.updateMany({
|
||||
where: { id: { in: input.packageIds } },
|
||||
data: { packageGroupId: group.id },
|
||||
});
|
||||
|
||||
return group.id;
|
||||
}
|
||||
|
||||
@@ -101,16 +101,14 @@ export async function processExtractRequest(requestId: string): Promise<void> {
|
||||
try {
|
||||
await mkdir(tempDir, { recursive: true });
|
||||
|
||||
// Wrap the entire TDLib session in the mutex so no other TDLib
|
||||
// operation can run concurrently (TDLib is single-session).
|
||||
await withTdlibMutex("extract", async () => {
|
||||
const accounts = await getActiveAccounts();
|
||||
if (accounts.length === 0) {
|
||||
throw new Error("No authenticated Telegram accounts available");
|
||||
}
|
||||
|
||||
const account = accounts[0];
|
||||
const client = await createTdlibClient({ id: account.id, phone: account.phone });
|
||||
|
||||
await withTdlibMutex(account.phone, "extract", async () => {
|
||||
const { client } = await createTdlibClient({ id: account.id, phone: account.phone });
|
||||
|
||||
try {
|
||||
// Load chat list so TDLib can find the dest channel
|
||||
|
||||
@@ -5,6 +5,7 @@ import { withTdlibMutex } from "./util/mutex.js";
|
||||
import { processFetchRequest } from "./worker.js";
|
||||
import { processExtractRequest } from "./extract-listener.js";
|
||||
import { rebuildPackageDatabase } from "./rebuild.js";
|
||||
import { processManualUpload } from "./manual-upload.js";
|
||||
import { generateInviteLink, createSupergroup, searchPublicChat } from "./tdlib/chats.js";
|
||||
import { createTdlibClient, closeTdlibClient } from "./tdlib/client.js";
|
||||
import { triggerImmediateCycle } from "./scheduler.js";
|
||||
@@ -13,6 +14,7 @@ import {
|
||||
getGlobalSetting,
|
||||
setGlobalSetting,
|
||||
getActiveAccounts,
|
||||
getChannelFetchRequest,
|
||||
upsertChannel,
|
||||
ensureAccountChannelLink,
|
||||
updateFetchRequestStatus,
|
||||
@@ -55,6 +57,7 @@ async function connectListener(): Promise<void> {
|
||||
await pgClient.query("LISTEN join_channel");
|
||||
await pgClient.query("LISTEN archive_extract");
|
||||
await pgClient.query("LISTEN rebuild_packages");
|
||||
await pgClient.query("LISTEN manual_upload");
|
||||
|
||||
pgClient.on("notification", (msg) => {
|
||||
if (msg.channel === "channel_fetch" && msg.payload) {
|
||||
@@ -71,6 +74,8 @@ async function connectListener(): Promise<void> {
|
||||
handleArchiveExtract(msg.payload);
|
||||
} else if (msg.channel === "rebuild_packages" && msg.payload) {
|
||||
handleRebuildPackages(msg.payload);
|
||||
} else if (msg.channel === "manual_upload" && msg.payload) {
|
||||
handleManualUpload(msg.payload);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -96,7 +101,7 @@ async function connectListener(): Promise<void> {
|
||||
}
|
||||
});
|
||||
|
||||
log.info("Fetch listener started (channel_fetch, generate_invite, create_destination, ingestion_trigger, join_channel, archive_extract, rebuild_packages)");
|
||||
log.info("Fetch listener started (channel_fetch, generate_invite, create_destination, ingestion_trigger, join_channel, archive_extract, rebuild_packages, manual_upload)");
|
||||
} catch (err) {
|
||||
log.error({ err }, "Failed to start fetch listener — retrying");
|
||||
scheduleReconnect();
|
||||
@@ -129,7 +134,9 @@ let fetchQueue: Promise<void> = Promise.resolve();
|
||||
function handleChannelFetch(requestId: string): void {
|
||||
fetchQueue = fetchQueue.then(async () => {
|
||||
try {
|
||||
await withTdlibMutex("fetch-channels", () =>
|
||||
const request = await getChannelFetchRequest(requestId);
|
||||
const key = request?.account?.phone ?? "global";
|
||||
await withTdlibMutex(key, "fetch-channels", () =>
|
||||
processFetchRequest(requestId)
|
||||
);
|
||||
} catch (err) {
|
||||
@@ -143,22 +150,20 @@ function handleChannelFetch(requestId: string): void {
|
||||
function handleGenerateInvite(channelId: string): void {
|
||||
fetchQueue = fetchQueue.then(async () => {
|
||||
try {
|
||||
await withTdlibMutex("generate-invite", async () => {
|
||||
const accounts = await getActiveAccounts();
|
||||
if (accounts.length === 0) {
|
||||
log.warn("No authenticated accounts to generate invite link");
|
||||
return;
|
||||
}
|
||||
const account = accounts[0];
|
||||
await withTdlibMutex(account.phone, "generate-invite", async () => {
|
||||
const destChannel = await getGlobalDestinationChannel();
|
||||
if (!destChannel || destChannel.id !== channelId) {
|
||||
log.warn({ channelId }, "Destination channel mismatch, skipping invite generation");
|
||||
return;
|
||||
}
|
||||
|
||||
// Use the first available authenticated account to generate the link
|
||||
const accounts = await getActiveAccounts();
|
||||
if (accounts.length === 0) {
|
||||
log.warn("No authenticated accounts to generate invite link");
|
||||
return;
|
||||
}
|
||||
|
||||
const account = accounts[0];
|
||||
const client = await createTdlibClient({ id: account.id, phone: account.phone });
|
||||
const { client } = await createTdlibClient({ id: account.id, phone: account.phone });
|
||||
|
||||
try {
|
||||
const link = await generateInviteLink(client, destChannel.telegramId);
|
||||
@@ -183,7 +188,13 @@ function handleCreateDestination(payload: string): void {
|
||||
const parsed = JSON.parse(payload) as { requestId: string; title: string };
|
||||
requestId = parsed.requestId;
|
||||
|
||||
await withTdlibMutex("create-destination", async () => {
|
||||
const accounts = await getActiveAccounts();
|
||||
if (accounts.length === 0) {
|
||||
throw new Error("No authenticated accounts available to create the group");
|
||||
}
|
||||
const account = accounts[0];
|
||||
|
||||
await withTdlibMutex(account.phone, "create-destination", async () => {
|
||||
const { db } = await import("./db/client.js");
|
||||
|
||||
// Mark the request as in-progress
|
||||
@@ -192,14 +203,7 @@ function handleCreateDestination(payload: string): void {
|
||||
data: { status: "IN_PROGRESS" },
|
||||
});
|
||||
|
||||
// Use the first available authenticated account
|
||||
const accounts = await getActiveAccounts();
|
||||
if (accounts.length === 0) {
|
||||
throw new Error("No authenticated accounts available to create the group");
|
||||
}
|
||||
|
||||
const account = accounts[0];
|
||||
const client = await createTdlibClient({ id: account.id, phone: account.phone });
|
||||
const { client } = await createTdlibClient({ id: account.id, phone: account.phone });
|
||||
|
||||
try {
|
||||
// Create the supergroup via TDLib
|
||||
@@ -324,16 +328,16 @@ function handleJoinChannel(payload: string): void {
|
||||
const parsed = JSON.parse(payload) as { requestId: string; input: string; accountId: string };
|
||||
requestId = parsed.requestId;
|
||||
|
||||
await withTdlibMutex("join-channel", async () => {
|
||||
await updateFetchRequestStatus(requestId!, "IN_PROGRESS");
|
||||
|
||||
const accounts = await getActiveAccounts();
|
||||
const account = accounts.find((a) => a.id === parsed.accountId) ?? accounts[0];
|
||||
if (!account) {
|
||||
throw new Error("No authenticated accounts available");
|
||||
}
|
||||
|
||||
const client = await createTdlibClient({ id: account.id, phone: account.phone });
|
||||
await withTdlibMutex(account.phone, "join-channel", async () => {
|
||||
await updateFetchRequestStatus(requestId!, "IN_PROGRESS");
|
||||
|
||||
const { client } = await createTdlibClient({ id: account.id, phone: account.phone });
|
||||
|
||||
try {
|
||||
const linkInfo = parseTelegramInput(parsed.input);
|
||||
@@ -503,7 +507,12 @@ function handleIngestionTrigger(): void {
|
||||
function handleRebuildPackages(requestId: string): void {
|
||||
fetchQueue = fetchQueue.then(async () => {
|
||||
try {
|
||||
await withTdlibMutex("rebuild-packages", () =>
|
||||
const accounts = await getActiveAccounts();
|
||||
if (accounts.length === 0) {
|
||||
log.warn("No authenticated accounts to rebuild packages");
|
||||
return;
|
||||
}
|
||||
await withTdlibMutex(accounts[0].phone, "rebuild-packages", () =>
|
||||
rebuildPackageDatabase(requestId)
|
||||
);
|
||||
} catch (err) {
|
||||
@@ -511,3 +520,11 @@ function handleRebuildPackages(requestId: string): void {
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// ── Manual upload handler ──
|
||||
|
||||
function handleManualUpload(uploadId: string): void {
|
||||
fetchQueue = fetchQueue
|
||||
.then(() => processManualUpload(uploadId))
|
||||
.catch((err) => log.error({ err, uploadId }, "Manual upload processing failed"));
|
||||
}
|
||||
|
||||
668
worker/src/grouping.ts
Normal file
668
worker/src/grouping.ts
Normal file
@@ -0,0 +1,668 @@
|
||||
import type { Client } from "tdl";
|
||||
import type { TelegramPhoto } from "./preview/match.js";
|
||||
import { downloadPhotoThumbnail } from "./tdlib/download.js";
|
||||
import { createOrFindPackageGroup, linkPackagesToGroup, createTimeWindowGroup, createAutoGroup } from "./db/queries.js";
|
||||
import { config } from "./util/config.js";
|
||||
import { childLogger } from "./util/logger.js";
|
||||
import { db } from "./db/client.js";
|
||||
|
||||
const log = childLogger("grouping");
|
||||
|
||||
export interface IndexedPackageRef {
|
||||
packageId: string;
|
||||
sourceMessageId: bigint;
|
||||
mediaAlbumId?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* After a scan cycle's packages are individually indexed, detect album groups
|
||||
* and create PackageGroup records linking the members.
|
||||
*/
|
||||
export async function processAlbumGroups(
|
||||
client: Client,
|
||||
sourceChannelId: string,
|
||||
indexedPackages: IndexedPackageRef[],
|
||||
photos: TelegramPhoto[]
|
||||
): Promise<void> {
|
||||
// Group indexed packages by mediaAlbumId
|
||||
const albumMap = new Map<string, IndexedPackageRef[]>();
|
||||
for (const pkg of indexedPackages) {
|
||||
if (!pkg.mediaAlbumId || pkg.mediaAlbumId === "0") continue;
|
||||
const group = albumMap.get(pkg.mediaAlbumId) ?? [];
|
||||
group.push(pkg);
|
||||
albumMap.set(pkg.mediaAlbumId, group);
|
||||
}
|
||||
|
||||
if (albumMap.size === 0) return;
|
||||
|
||||
log.info({ albumCount: albumMap.size }, "Detected album groups to process");
|
||||
|
||||
for (const [albumId, members] of albumMap) {
|
||||
if (members.length < 2) continue;
|
||||
|
||||
try {
|
||||
// Find the first package's fileName for the group name fallback
|
||||
const firstPkg = await db.package.findFirst({
|
||||
where: { id: { in: members.map((m) => m.packageId) } },
|
||||
orderBy: { sourceMessageId: "asc" },
|
||||
select: { id: true, fileName: true },
|
||||
});
|
||||
|
||||
// Try to find a caption from the album's photo message
|
||||
const albumPhoto = photos.find((p) => p.mediaAlbumId === albumId);
|
||||
const groupName = albumPhoto?.caption || firstPkg?.fileName || "Unnamed Group";
|
||||
|
||||
// Download preview from album photo if available
|
||||
let previewData: Buffer | null = null;
|
||||
if (albumPhoto) {
|
||||
previewData = await downloadPhotoThumbnail(client, albumPhoto.fileId);
|
||||
}
|
||||
|
||||
const groupId = await createOrFindPackageGroup({
|
||||
mediaAlbumId: albumId,
|
||||
sourceChannelId,
|
||||
name: groupName,
|
||||
previewData,
|
||||
});
|
||||
|
||||
// Idempotent link — safe to re-run if some packages were indexed in prior scans
|
||||
const packageIds = members.map((m) => m.packageId);
|
||||
await linkPackagesToGroup(packageIds, groupId);
|
||||
|
||||
log.info(
|
||||
{ albumId, groupId, groupName, memberCount: packageIds.length },
|
||||
"Linked packages to album group"
|
||||
);
|
||||
} catch (err) {
|
||||
log.warn({ albumId, err }, "Failed to create album group — packages still indexed individually");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply learned GroupingRules from manual overrides.
|
||||
* For each rule, find ungrouped packages whose fileName contains the pattern.
|
||||
*/
|
||||
export async function processRuleBasedGroups(
|
||||
sourceChannelId: string,
|
||||
indexedPackages: IndexedPackageRef[]
|
||||
): Promise<void> {
|
||||
const rules = await db.groupingRule.findMany({
|
||||
where: { sourceChannelId },
|
||||
orderBy: { confidence: "desc" },
|
||||
});
|
||||
|
||||
if (rules.length === 0) return;
|
||||
|
||||
const ungrouped = await db.package.findMany({
|
||||
where: {
|
||||
id: { in: indexedPackages.map((p) => p.packageId) },
|
||||
packageGroupId: null,
|
||||
},
|
||||
select: { id: true, fileName: true, creator: true },
|
||||
});
|
||||
|
||||
if (ungrouped.length < 2) return;
|
||||
|
||||
for (const rule of rules) {
|
||||
const matches = ungrouped.filter((pkg) => {
|
||||
const lower = rule.pattern.toLowerCase();
|
||||
return pkg.fileName.toLowerCase().includes(lower) ||
|
||||
(pkg.creator && pkg.creator.toLowerCase().includes(lower));
|
||||
});
|
||||
|
||||
if (matches.length < 2) continue;
|
||||
|
||||
// Check if any are already grouped (by a previous rule in this loop)
|
||||
const stillUngrouped = await db.package.findMany({
|
||||
where: {
|
||||
id: { in: matches.map((m) => m.id) },
|
||||
packageGroupId: null,
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (stillUngrouped.length < 2) continue;
|
||||
|
||||
try {
|
||||
const groupId = await createAutoGroup({
|
||||
sourceChannelId,
|
||||
name: rule.pattern,
|
||||
packageIds: stillUngrouped.map((m) => m.id),
|
||||
groupingSource: "MANUAL",
|
||||
});
|
||||
|
||||
log.info(
|
||||
{ groupId, ruleId: rule.id, pattern: rule.pattern, memberCount: stillUngrouped.length },
|
||||
"Applied learned grouping rule"
|
||||
);
|
||||
} catch (err) {
|
||||
log.warn({ err, ruleId: rule.id }, "Failed to apply grouping rule");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* After album grouping, cluster remaining ungrouped packages from the same channel
|
||||
* that were posted within a configurable time window.
|
||||
* Only groups packages that were just indexed in this scan cycle (the `indexedPackages` list).
|
||||
*/
|
||||
export async function processTimeWindowGroups(
|
||||
sourceChannelId: string,
|
||||
indexedPackages: IndexedPackageRef[]
|
||||
): Promise<void> {
|
||||
if (config.autoGroupTimeWindowMinutes <= 0) return;
|
||||
|
||||
// Find which of the just-indexed packages are still ungrouped
|
||||
const ungrouped = await db.package.findMany({
|
||||
where: {
|
||||
id: { in: indexedPackages.map((p) => p.packageId) },
|
||||
packageGroupId: null,
|
||||
},
|
||||
orderBy: { sourceMessageId: "asc" },
|
||||
select: {
|
||||
id: true,
|
||||
fileName: true,
|
||||
sourceMessageId: true,
|
||||
indexedAt: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (ungrouped.length < 2) return;
|
||||
|
||||
const windowMs = config.autoGroupTimeWindowMinutes * 60 * 1000;
|
||||
|
||||
// Cluster by time proximity: walk through sorted list, start new cluster when gap > window
|
||||
const clusters: typeof ungrouped[] = [];
|
||||
let current: typeof ungrouped = [ungrouped[0]];
|
||||
|
||||
for (let i = 1; i < ungrouped.length; i++) {
|
||||
const prev = current[current.length - 1];
|
||||
const gap = Math.abs(ungrouped[i].indexedAt.getTime() - prev.indexedAt.getTime());
|
||||
|
||||
if (gap <= windowMs) {
|
||||
current.push(ungrouped[i]);
|
||||
} else {
|
||||
clusters.push(current);
|
||||
current = [ungrouped[i]];
|
||||
}
|
||||
}
|
||||
clusters.push(current);
|
||||
|
||||
// Create groups for clusters with 2+ packages
|
||||
for (const cluster of clusters) {
|
||||
if (cluster.length < 2) continue;
|
||||
|
||||
// Derive group name from common filename prefix
|
||||
const name = findCommonPrefix(cluster.map((p) => p.fileName)) || cluster[0].fileName;
|
||||
|
||||
try {
|
||||
const groupId = await createTimeWindowGroup({
|
||||
sourceChannelId,
|
||||
name,
|
||||
packageIds: cluster.map((p) => p.id),
|
||||
});
|
||||
|
||||
log.info(
|
||||
{ groupId, name, memberCount: cluster.length },
|
||||
"Created time-window group"
|
||||
);
|
||||
} catch (err) {
|
||||
log.warn({ err, clusterSize: cluster.length }, "Failed to create time-window group");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Group ungrouped packages that share a date pattern (YYYY-MM, YYYY_MM, etc.)
|
||||
* or project slug extracted from their filenames.
|
||||
*/
|
||||
export async function processPatternGroups(
|
||||
sourceChannelId: string,
|
||||
indexedPackages: IndexedPackageRef[]
|
||||
): Promise<void> {
|
||||
const ungrouped = await db.package.findMany({
|
||||
where: {
|
||||
id: { in: indexedPackages.map((p) => p.packageId) },
|
||||
packageGroupId: null,
|
||||
},
|
||||
select: { id: true, fileName: true },
|
||||
});
|
||||
|
||||
if (ungrouped.length < 2) return;
|
||||
|
||||
// Group by extracted pattern
|
||||
const patternMap = new Map<string, typeof ungrouped>();
|
||||
for (const pkg of ungrouped) {
|
||||
const pattern = extractPattern(pkg.fileName);
|
||||
if (!pattern) continue;
|
||||
const group = patternMap.get(pattern) ?? [];
|
||||
group.push(pkg);
|
||||
patternMap.set(pattern, group);
|
||||
}
|
||||
|
||||
for (const [pattern, members] of patternMap) {
|
||||
if (members.length < 2) continue;
|
||||
|
||||
try {
|
||||
const groupId = await createAutoGroup({
|
||||
sourceChannelId,
|
||||
name: pattern,
|
||||
packageIds: members.map((m) => m.id),
|
||||
groupingSource: "AUTO_PATTERN",
|
||||
});
|
||||
|
||||
log.info(
|
||||
{ groupId, pattern, memberCount: members.length },
|
||||
"Created pattern-based group"
|
||||
);
|
||||
} catch (err) {
|
||||
log.warn({ err, pattern }, "Failed to create pattern group");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a grouping pattern from a filename.
|
||||
* Matches: YYYY-MM, YYYY_MM, "Month Year", or a project prefix before common separators.
|
||||
* Returns null if no usable pattern found.
|
||||
*/
|
||||
function extractPattern(fileName: string): string | null {
|
||||
// Strip extension for matching
|
||||
const name = fileName.replace(/\.(zip|rar|7z|pdf|stl)(\.\d+)?$/i, "");
|
||||
|
||||
// Match YYYY-MM or YYYY_MM patterns
|
||||
const dateMatch = name.match(/(\d{4})[\-_](\d{2})/);
|
||||
if (dateMatch) {
|
||||
return `${dateMatch[1]}-${dateMatch[2]}`;
|
||||
}
|
||||
|
||||
// Match "Month Year" patterns (e.g., "January 2025", "Jan 2025")
|
||||
const months = "(?:jan(?:uary)?|feb(?:ruary)?|mar(?:ch)?|apr(?:il)?|may|jun(?:e)?|jul(?:y)?|aug(?:ust)?|sep(?:tember)?|oct(?:ober)?|nov(?:ember)?|dec(?:ember)?)";
|
||||
const monthYearMatch = name.match(new RegExp(`(${months})\\s*(\\d{4})`, "i"));
|
||||
if (monthYearMatch) {
|
||||
const monthStr = monthYearMatch[1].toLowerCase().slice(0, 3);
|
||||
const monthNum = ["jan","feb","mar","apr","may","jun","jul","aug","sep","oct","nov","dec"].indexOf(monthStr) + 1;
|
||||
if (monthNum > 0) {
|
||||
return `${monthYearMatch[2]}-${String(monthNum).padStart(2, "0")}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Match project prefix: text before " - ", " – ", or "(". Must be at least 5 chars.
|
||||
const prefixMatch = name.match(/^(.{5,}?)(?:\s*[\-–]\s|\s*\()/);
|
||||
if (prefixMatch) {
|
||||
return prefixMatch[1].trim();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Group ungrouped packages that share the same creator within a channel.
|
||||
* Only groups if there are 3+ packages from the same creator (to avoid
|
||||
* over-grouping when a creator only has a couple files).
|
||||
*/
|
||||
export async function processCreatorGroups(
|
||||
sourceChannelId: string,
|
||||
indexedPackages: IndexedPackageRef[]
|
||||
): Promise<void> {
|
||||
const ungrouped = await db.package.findMany({
|
||||
where: {
|
||||
id: { in: indexedPackages.map((p) => p.packageId) },
|
||||
packageGroupId: null,
|
||||
creator: { not: null },
|
||||
},
|
||||
select: { id: true, fileName: true, creator: true },
|
||||
});
|
||||
|
||||
if (ungrouped.length < 3) return;
|
||||
|
||||
// Group by creator
|
||||
const creatorMap = new Map<string, typeof ungrouped>();
|
||||
for (const pkg of ungrouped) {
|
||||
if (!pkg.creator) continue;
|
||||
const key = pkg.creator.toLowerCase();
|
||||
const group = creatorMap.get(key) ?? [];
|
||||
group.push(pkg);
|
||||
creatorMap.set(key, group);
|
||||
}
|
||||
|
||||
for (const [, members] of creatorMap) {
|
||||
if (members.length < 3) continue;
|
||||
|
||||
const creatorName = members[0].creator!;
|
||||
const name = findCommonPrefix(members.map((m) => m.fileName)) || creatorName;
|
||||
|
||||
try {
|
||||
const groupId = await createAutoGroup({
|
||||
sourceChannelId,
|
||||
name,
|
||||
packageIds: members.map((m) => m.id),
|
||||
groupingSource: "AUTO_PATTERN",
|
||||
});
|
||||
|
||||
log.info(
|
||||
{ groupId, creator: creatorName, memberCount: members.length },
|
||||
"Created creator-based group"
|
||||
);
|
||||
} catch (err) {
|
||||
log.warn({ err, creator: creatorName }, "Failed to create creator group");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Group ungrouped packages that share the same root folder inside their archives.
|
||||
* E.g., if two packages both contain files under "ProjectX/", they're likely related.
|
||||
* Only considers packages with 3+ files (to avoid false positives from flat archives).
|
||||
*/
|
||||
export async function processZipPathGroups(
|
||||
sourceChannelId: string,
|
||||
indexedPackages: IndexedPackageRef[]
|
||||
): Promise<void> {
|
||||
// Find ungrouped packages that have indexed files
|
||||
const ungrouped = await db.package.findMany({
|
||||
where: {
|
||||
id: { in: indexedPackages.map((p) => p.packageId) },
|
||||
packageGroupId: null,
|
||||
fileCount: { gte: 3 },
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
fileName: true,
|
||||
files: {
|
||||
select: { path: true },
|
||||
take: 50,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (ungrouped.length < 2) return;
|
||||
|
||||
// Extract the dominant root folder for each package
|
||||
const packageRoots = new Map<string, { id: string; fileName: string }[]>();
|
||||
|
||||
for (const pkg of ungrouped) {
|
||||
const root = extractRootFolder(pkg.files.map((f) => f.path));
|
||||
if (!root) continue;
|
||||
|
||||
const key = root.toLowerCase();
|
||||
const group = packageRoots.get(key) ?? [];
|
||||
group.push({ id: pkg.id, fileName: pkg.fileName });
|
||||
packageRoots.set(key, group);
|
||||
}
|
||||
|
||||
// Create groups for roots shared by 2+ packages
|
||||
for (const [root, members] of packageRoots) {
|
||||
if (members.length < 2) continue;
|
||||
|
||||
try {
|
||||
const groupId = await createAutoGroup({
|
||||
sourceChannelId,
|
||||
name: root,
|
||||
packageIds: members.map((m) => m.id),
|
||||
groupingSource: "AUTO_ZIP",
|
||||
});
|
||||
|
||||
log.info(
|
||||
{ groupId, rootFolder: root, memberCount: members.length },
|
||||
"Created ZIP path prefix group"
|
||||
);
|
||||
} catch (err) {
|
||||
log.warn({ err, rootFolder: root }, "Failed to create ZIP path group");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Group ungrouped packages that reply to the same root message.
|
||||
* If message B and C both reply to message A, they're grouped together.
|
||||
*/
|
||||
export async function processReplyChainGroups(
|
||||
sourceChannelId: string,
|
||||
indexedPackages: IndexedPackageRef[]
|
||||
): Promise<void> {
|
||||
const ungrouped = await db.package.findMany({
|
||||
where: {
|
||||
id: { in: indexedPackages.map((p) => p.packageId) },
|
||||
packageGroupId: null,
|
||||
replyToMessageId: { not: null },
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
fileName: true,
|
||||
replyToMessageId: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (ungrouped.length < 2) return;
|
||||
|
||||
// Group by replyToMessageId
|
||||
const replyMap = new Map<string, typeof ungrouped>();
|
||||
for (const pkg of ungrouped) {
|
||||
if (!pkg.replyToMessageId) continue;
|
||||
const key = pkg.replyToMessageId.toString();
|
||||
const group = replyMap.get(key) ?? [];
|
||||
group.push(pkg);
|
||||
replyMap.set(key, group);
|
||||
}
|
||||
|
||||
for (const [replyId, members] of replyMap) {
|
||||
if (members.length < 2) continue;
|
||||
|
||||
const name = findCommonPrefix(members.map((m) => m.fileName)) || members[0].fileName;
|
||||
|
||||
try {
|
||||
const groupId = await createAutoGroup({
|
||||
sourceChannelId,
|
||||
name,
|
||||
packageIds: members.map((m) => m.id),
|
||||
groupingSource: "AUTO_REPLY" as const,
|
||||
});
|
||||
|
||||
log.info(
|
||||
{ groupId, replyToMessageId: replyId, memberCount: members.length },
|
||||
"Created reply-chain group"
|
||||
);
|
||||
} catch (err) {
|
||||
log.warn({ err, replyToMessageId: replyId }, "Failed to create reply-chain group");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Group ungrouped packages with similar captions from the same channel.
|
||||
* Uses normalized caption comparison — two captions match if they share
|
||||
* the same significant words (ignoring common words and file extensions).
|
||||
*/
|
||||
export async function processCaptionGroups(
|
||||
sourceChannelId: string,
|
||||
indexedPackages: IndexedPackageRef[]
|
||||
): Promise<void> {
|
||||
const ungrouped = await db.package.findMany({
|
||||
where: {
|
||||
id: { in: indexedPackages.map((p) => p.packageId) },
|
||||
packageGroupId: null,
|
||||
sourceCaption: { not: null },
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
fileName: true,
|
||||
sourceCaption: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (ungrouped.length < 2) return;
|
||||
|
||||
// Group by normalized caption key
|
||||
const captionMap = new Map<string, typeof ungrouped>();
|
||||
for (const pkg of ungrouped) {
|
||||
if (!pkg.sourceCaption) continue;
|
||||
const key = normalizeCaptionKey(pkg.sourceCaption);
|
||||
if (!key) continue;
|
||||
const group = captionMap.get(key) ?? [];
|
||||
group.push(pkg);
|
||||
captionMap.set(key, group);
|
||||
}
|
||||
|
||||
for (const [, members] of captionMap) {
|
||||
if (members.length < 2) continue;
|
||||
|
||||
const name = members[0].sourceCaption!.slice(0, 80);
|
||||
|
||||
try {
|
||||
const groupId = await createAutoGroup({
|
||||
sourceChannelId,
|
||||
name,
|
||||
packageIds: members.map((m) => m.id),
|
||||
groupingSource: "AUTO_CAPTION" as const,
|
||||
});
|
||||
|
||||
log.info(
|
||||
{ groupId, memberCount: members.length },
|
||||
"Created caption-match group"
|
||||
);
|
||||
} catch (err) {
|
||||
log.warn({ err }, "Failed to create caption group");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize a caption for grouping: lowercase, strip extensions and numbers,
|
||||
* extract significant words (3+ chars), sort, and join.
|
||||
* Two captions with the same key are considered a match.
|
||||
*/
|
||||
function normalizeCaptionKey(caption: string): string | null {
|
||||
const stripped = caption
|
||||
.toLowerCase()
|
||||
.replace(/\.(zip|rar|7z|stl|pdf|obj|gcode)(\.\d+)?/gi, "")
|
||||
.replace(/[^a-z0-9\s]/g, " ");
|
||||
|
||||
const words = stripped
|
||||
.split(/\s+/)
|
||||
.filter((w) => w.length >= 3)
|
||||
.filter((w) => !["the", "and", "for", "with", "from", "part", "file", "files"].includes(w));
|
||||
|
||||
if (words.length < 2) return null;
|
||||
|
||||
return words.sort().join(" ");
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the dominant root folder from a list of archive file paths.
|
||||
* Returns the first path segment that appears in >50% of files.
|
||||
* Returns null for flat archives or archives with no common root.
|
||||
*/
|
||||
function extractRootFolder(paths: string[]): string | null {
|
||||
if (paths.length === 0) return null;
|
||||
|
||||
// Count first path segments
|
||||
const segmentCounts = new Map<string, number>();
|
||||
for (const p of paths) {
|
||||
// Normalize separators and get first segment
|
||||
const normalized = p.replace(/\\/g, "/");
|
||||
const firstSlash = normalized.indexOf("/");
|
||||
if (firstSlash <= 0) continue; // Skip root-level files
|
||||
const segment = normalized.slice(0, firstSlash);
|
||||
// Skip common noise folders
|
||||
if (segment === "__MACOSX" || segment === ".DS_Store" || segment === "Thumbs.db") continue;
|
||||
segmentCounts.set(segment, (segmentCounts.get(segment) ?? 0) + 1);
|
||||
}
|
||||
|
||||
if (segmentCounts.size === 0) return null;
|
||||
|
||||
// Find the most common segment
|
||||
let maxSegment = "";
|
||||
let maxCount = 0;
|
||||
for (const [seg, count] of segmentCounts) {
|
||||
if (count > maxCount) {
|
||||
maxSegment = seg;
|
||||
maxCount = count;
|
||||
}
|
||||
}
|
||||
|
||||
// Must appear in >50% of files and be at least 3 chars
|
||||
if (maxCount < paths.length * 0.5 || maxSegment.length < 3) return null;
|
||||
|
||||
return maxSegment;
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect packages that could have been grouped differently.
|
||||
* Checks if any grouped package's filename matches a GroupingRule
|
||||
* that would place it in a different group.
|
||||
*/
|
||||
export async function detectGroupingConflicts(
|
||||
sourceChannelId: string,
|
||||
indexedPackages: IndexedPackageRef[]
|
||||
): Promise<void> {
|
||||
const rules = await db.groupingRule.findMany({
|
||||
where: { sourceChannelId },
|
||||
});
|
||||
if (rules.length === 0) return;
|
||||
|
||||
const grouped = await db.package.findMany({
|
||||
where: {
|
||||
id: { in: indexedPackages.map((p) => p.packageId) },
|
||||
packageGroupId: { not: null },
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
fileName: true,
|
||||
packageGroupId: true,
|
||||
packageGroup: { select: { name: true, groupingSource: true } },
|
||||
},
|
||||
});
|
||||
|
||||
for (const pkg of grouped) {
|
||||
for (const rule of rules) {
|
||||
if (pkg.fileName.toLowerCase().includes(rule.pattern.toLowerCase())) {
|
||||
// Check if the rule's source group is different from current group
|
||||
if (rule.createdByGroupId && rule.createdByGroupId !== pkg.packageGroupId) {
|
||||
try {
|
||||
await db.systemNotification.create({
|
||||
data: {
|
||||
type: "GROUPING_CONFLICT",
|
||||
severity: "INFO",
|
||||
title: `Potential grouping conflict: ${pkg.fileName}`,
|
||||
message: `Grouped by ${pkg.packageGroup?.groupingSource ?? "unknown"} into "${pkg.packageGroup?.name}", but also matches rule "${rule.pattern}" from a different manual group`,
|
||||
context: {
|
||||
packageId: pkg.id,
|
||||
fileName: pkg.fileName,
|
||||
currentGroupId: pkg.packageGroupId,
|
||||
matchedRuleId: rule.id,
|
||||
matchedPattern: rule.pattern,
|
||||
},
|
||||
},
|
||||
});
|
||||
} catch {
|
||||
// Best-effort
|
||||
}
|
||||
break; // One notification per package
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the longest common prefix among a list of filenames,
|
||||
* trimming trailing separators and partial words.
|
||||
*/
|
||||
function findCommonPrefix(names: string[]): string {
|
||||
if (names.length === 0) return "";
|
||||
if (names.length === 1) return names[0];
|
||||
|
||||
let prefix = names[0];
|
||||
for (let i = 1; i < names.length; i++) {
|
||||
while (!names[i].startsWith(prefix)) {
|
||||
prefix = prefix.slice(0, -1);
|
||||
if (prefix.length === 0) return "";
|
||||
}
|
||||
}
|
||||
|
||||
// Trim trailing separators and partial words
|
||||
const trimmed = prefix.replace(/[\s\-_.(]+$/, "");
|
||||
return trimmed.length >= 3 ? trimmed : "";
|
||||
}
|
||||
@@ -27,6 +27,33 @@ async function main(): Promise<void> {
|
||||
await cleanupTempDir();
|
||||
await markStaleRunsAsFailed();
|
||||
|
||||
// Release any advisory locks orphaned by a previous worker instance.
|
||||
// When Docker kills a container, PostgreSQL may keep the session alive
|
||||
// (zombie connections), holding advisory locks that block the new worker.
|
||||
try {
|
||||
const result = await pool.query(`
|
||||
SELECT pid, state, left(query, 80) as query, age(clock_timestamp(), state_change) as idle_time
|
||||
FROM pg_stat_activity
|
||||
WHERE datname = current_database()
|
||||
AND pid != pg_backend_pid()
|
||||
AND state = 'idle'
|
||||
AND query LIKE '%pg_try_advisory_lock%'
|
||||
AND state_change < clock_timestamp() - interval '5 minutes'
|
||||
`);
|
||||
for (const row of result.rows) {
|
||||
log.warn(
|
||||
{ pid: row.pid, idleTime: row.idle_time, query: row.query },
|
||||
"Terminating stale advisory lock session from previous worker"
|
||||
);
|
||||
await pool.query("SELECT pg_terminate_backend($1)", [row.pid]);
|
||||
}
|
||||
if (result.rows.length > 0) {
|
||||
log.info({ terminated: result.rows.length }, "Cleaned up stale advisory lock sessions");
|
||||
}
|
||||
} catch (err) {
|
||||
log.warn({ err }, "Failed to clean up stale advisory locks (non-fatal)");
|
||||
}
|
||||
|
||||
// Verify destination messages exist for all "uploaded" packages.
|
||||
// Resets any packages whose dest message is missing so they get re-processed.
|
||||
await recoverIncompleteUploads();
|
||||
|
||||
211
worker/src/manual-upload.ts
Normal file
211
worker/src/manual-upload.ts
Normal file
@@ -0,0 +1,211 @@
|
||||
import path from "path";
|
||||
import { rm } from "fs/promises";
|
||||
import { db } from "./db/client.js";
|
||||
import { childLogger } from "./util/logger.js";
|
||||
import { config } from "./util/config.js";
|
||||
import { hashParts } from "./archive/hash.js";
|
||||
import { byteLevelSplit } from "./archive/split.js";
|
||||
import { uploadToChannel } from "./upload/channel.js";
|
||||
import { createTdlibClient, closeTdlibClient } from "./tdlib/client.js";
|
||||
import { readZipCentralDirectory } from "./archive/zip-reader.js";
|
||||
import { readRarContents } from "./archive/rar-reader.js";
|
||||
import { read7zContents } from "./archive/sevenz-reader.js";
|
||||
import { getActiveAccounts } from "./db/queries.js";
|
||||
|
||||
const log = childLogger("manual-upload");
|
||||
|
||||
export async function processManualUpload(uploadId: string): Promise<void> {
|
||||
log.info({ uploadId }, "Processing manual upload");
|
||||
|
||||
const upload = await db.manualUpload.findUnique({
|
||||
where: { id: uploadId },
|
||||
include: { files: true },
|
||||
});
|
||||
|
||||
if (!upload || upload.status !== "PENDING") {
|
||||
log.warn({ uploadId }, "Manual upload not found or not pending");
|
||||
return;
|
||||
}
|
||||
|
||||
await db.manualUpload.update({
|
||||
where: { id: uploadId },
|
||||
data: { status: "PROCESSING" },
|
||||
});
|
||||
|
||||
try {
|
||||
// Get destination channel
|
||||
const destSetting = await db.globalSetting.findUnique({
|
||||
where: { key: "destination_channel_id" },
|
||||
});
|
||||
if (!destSetting) throw new Error("No destination channel configured");
|
||||
|
||||
const destChannel = await db.telegramChannel.findFirst({
|
||||
where: { id: destSetting.value, type: "DESTINATION", isActive: true },
|
||||
});
|
||||
if (!destChannel) throw new Error("Destination channel not found or inactive");
|
||||
|
||||
// Get a TDLib client (use first active account)
|
||||
const accounts = await getActiveAccounts();
|
||||
const account = accounts[0];
|
||||
if (!account) throw new Error("No authenticated Telegram account available");
|
||||
|
||||
const { client } = await createTdlibClient({ id: account.id, phone: account.phone });
|
||||
|
||||
try {
|
||||
const packageIds: string[] = [];
|
||||
|
||||
for (const file of upload.files) {
|
||||
try {
|
||||
const filePath = file.filePath;
|
||||
const fileName = file.fileName;
|
||||
const fileSize = file.fileSize;
|
||||
|
||||
log.info({ fileName, fileSize: Number(fileSize) }, "Processing file");
|
||||
|
||||
// Determine archive type
|
||||
let archiveType: "ZIP" | "RAR" | "SEVEN_Z" | "DOCUMENT" = "DOCUMENT";
|
||||
const ext = fileName.toLowerCase();
|
||||
if (ext.endsWith(".zip")) archiveType = "ZIP";
|
||||
else if (ext.endsWith(".rar")) archiveType = "RAR";
|
||||
else if (ext.endsWith(".7z")) archiveType = "SEVEN_Z";
|
||||
|
||||
// Hash the file
|
||||
const contentHash = await hashParts([filePath]);
|
||||
|
||||
// Check for duplicates
|
||||
const existing = await db.package.findFirst({
|
||||
where: { contentHash, destMessageId: { not: null } },
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (existing) {
|
||||
log.info({ fileName, contentHash }, "Duplicate file, skipping upload");
|
||||
await db.manualUploadFile.update({
|
||||
where: { id: file.id },
|
||||
data: { packageId: existing.id },
|
||||
});
|
||||
packageIds.push(existing.id);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Read archive metadata
|
||||
let entries: {
|
||||
path: string;
|
||||
fileName: string;
|
||||
extension: string | null;
|
||||
compressedSize: bigint;
|
||||
uncompressedSize: bigint;
|
||||
crc32: string | null;
|
||||
}[] = [];
|
||||
try {
|
||||
if (archiveType === "ZIP") entries = await readZipCentralDirectory([filePath]);
|
||||
else if (archiveType === "RAR") entries = await readRarContents(filePath);
|
||||
else if (archiveType === "SEVEN_Z") entries = await read7zContents(filePath);
|
||||
} catch {
|
||||
log.debug({ fileName }, "Could not read archive metadata");
|
||||
}
|
||||
|
||||
// Split if needed
|
||||
const MAX_UPLOAD_SIZE = BigInt(config.maxPartSizeMB) * 1024n * 1024n;
|
||||
let uploadPaths = [filePath];
|
||||
if (fileSize > MAX_UPLOAD_SIZE) {
|
||||
uploadPaths = await byteLevelSplit(filePath);
|
||||
}
|
||||
|
||||
// Upload to Telegram
|
||||
const destResult = await uploadToChannel(
|
||||
client,
|
||||
destChannel.telegramId,
|
||||
uploadPaths
|
||||
);
|
||||
|
||||
// Create package record
|
||||
const pkg = await db.package.create({
|
||||
data: {
|
||||
contentHash,
|
||||
fileName,
|
||||
fileSize,
|
||||
archiveType,
|
||||
sourceChannelId: destChannel.id,
|
||||
sourceMessageId: destResult.messageId,
|
||||
destChannelId: destChannel.id,
|
||||
destMessageId: destResult.messageId,
|
||||
destMessageIds: destResult.messageIds,
|
||||
isMultipart: uploadPaths.length > 1,
|
||||
partCount: uploadPaths.length,
|
||||
fileCount: entries.length,
|
||||
files: entries.length > 0 ? { create: entries } : undefined,
|
||||
},
|
||||
});
|
||||
|
||||
await db.manualUploadFile.update({
|
||||
where: { id: file.id },
|
||||
data: { packageId: pkg.id },
|
||||
});
|
||||
|
||||
packageIds.push(pkg.id);
|
||||
log.info({ fileName, packageId: pkg.id }, "File processed and uploaded");
|
||||
|
||||
// Clean up split files (but not the original)
|
||||
if (uploadPaths.length > 1) {
|
||||
for (const splitPath of uploadPaths) {
|
||||
if (splitPath !== filePath) {
|
||||
await rm(splitPath, { force: true }).catch(() => {});
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (fileErr) {
|
||||
log.error({ err: fileErr, fileName: file.fileName }, "Failed to process file");
|
||||
}
|
||||
}
|
||||
|
||||
// Group packages if multiple files
|
||||
if (packageIds.length >= 2) {
|
||||
const groupName =
|
||||
upload.groupName ?? upload.files[0].fileName.replace(/\.[^.]+$/, "");
|
||||
const group = await db.packageGroup.create({
|
||||
data: {
|
||||
name: groupName,
|
||||
sourceChannelId: destChannel.id,
|
||||
groupingSource: "MANUAL",
|
||||
},
|
||||
});
|
||||
await db.package.updateMany({
|
||||
where: { id: { in: packageIds } },
|
||||
data: { packageGroupId: group.id },
|
||||
});
|
||||
log.info(
|
||||
{ groupId: group.id, groupName, packageCount: packageIds.length },
|
||||
"Created group for uploaded files"
|
||||
);
|
||||
}
|
||||
|
||||
await db.manualUpload.update({
|
||||
where: { id: uploadId },
|
||||
data: { status: "COMPLETED", completedAt: new Date() },
|
||||
});
|
||||
|
||||
log.info(
|
||||
{ uploadId, fileCount: upload.files.length, packageCount: packageIds.length },
|
||||
"Manual upload completed"
|
||||
);
|
||||
} finally {
|
||||
await closeTdlibClient(client);
|
||||
}
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
log.error({ err, uploadId }, "Manual upload failed");
|
||||
await db.manualUpload.update({
|
||||
where: { id: uploadId },
|
||||
data: { status: "FAILED", errorMessage: message },
|
||||
});
|
||||
}
|
||||
|
||||
// Clean up uploaded files
|
||||
try {
|
||||
const uploadDir = path.join("/data/uploads", uploadId);
|
||||
await rm(uploadDir, { recursive: true, force: true });
|
||||
} catch {
|
||||
// Best-effort cleanup
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,7 @@ export interface TelegramPhoto {
|
||||
/** The smallest photo size available — used as thumbnail. */
|
||||
fileId: string;
|
||||
fileSize: number;
|
||||
mediaAlbumId?: string;
|
||||
}
|
||||
|
||||
export interface ArchiveRef {
|
||||
|
||||
@@ -63,7 +63,7 @@ export async function rebuildPackageDatabase(
|
||||
}
|
||||
|
||||
const account = accounts[0];
|
||||
const client = await createTdlibClient({
|
||||
const { client } = await createTdlibClient({
|
||||
id: account.id,
|
||||
phone: account.phone,
|
||||
});
|
||||
|
||||
@@ -63,7 +63,7 @@ export async function recoverIncompleteUploads(): Promise<void> {
|
||||
let client: Client | undefined;
|
||||
|
||||
try {
|
||||
client = await createTdlibClient({ id: account.id, phone: account.phone });
|
||||
({ client } = await createTdlibClient({ id: account.id, phone: account.phone }));
|
||||
|
||||
// Load the chat list so TDLib can resolve chat IDs
|
||||
try {
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { config } from "./util/config.js";
|
||||
import { childLogger } from "./util/logger.js";
|
||||
import { withTdlibMutex } from "./util/mutex.js";
|
||||
import { withTdlibMutex, forceReleaseMutex } from "./util/mutex.js";
|
||||
import { getActiveAccounts, getPendingAccounts } from "./db/queries.js";
|
||||
import { runWorkerForAccount, authenticateAccount } from "./worker.js";
|
||||
import { runIntegrityAudit } from "./audit.js";
|
||||
|
||||
const log = childLogger("scheduler");
|
||||
|
||||
@@ -23,8 +24,8 @@ const CYCLE_TIMEOUT_MS = (parseInt(process.env.WORKER_CYCLE_TIMEOUT_MINUTES ?? "
|
||||
* 1. Authenticate any PENDING accounts (triggers SMS code flow + auto-fetch channels)
|
||||
* 2. Process all active AUTHENTICATED accounts for ingestion
|
||||
*
|
||||
* All TDLib operations are wrapped in the mutex to ensure only one client
|
||||
* runs at a time (also shared with the fetch listener for on-demand requests).
|
||||
* Each account's TDLib operations are wrapped in a per-key mutex so different
|
||||
* accounts run concurrently while the same account is still serialized.
|
||||
*
|
||||
* The cycle has a configurable timeout (WORKER_CYCLE_TIMEOUT_MINUTES, default 4h).
|
||||
* Once the timeout elapses, no new accounts will be started but any in-progress
|
||||
@@ -54,7 +55,7 @@ async function runCycle(): Promise<void> {
|
||||
log.warn("Cycle timeout reached during authentication phase, stopping");
|
||||
break;
|
||||
}
|
||||
await withTdlibMutex(`auth:${account.phone}`, () =>
|
||||
await withTdlibMutex(account.phone, `auth:${account.phone}`, () =>
|
||||
authenticateAccount(account)
|
||||
);
|
||||
}
|
||||
@@ -70,23 +71,54 @@ async function runCycle(): Promise<void> {
|
||||
|
||||
log.info({ accountCount: accounts.length }, "Processing accounts");
|
||||
|
||||
for (const account of accounts) {
|
||||
if (Date.now() - cycleStart > CYCLE_TIMEOUT_MS) {
|
||||
log.warn(
|
||||
{ elapsed: Math.round((Date.now() - cycleStart) / 60_000), timeoutMinutes: CYCLE_TIMEOUT_MS / 60_000 },
|
||||
"Cycle timeout reached, skipping remaining accounts"
|
||||
);
|
||||
break;
|
||||
}
|
||||
await withTdlibMutex(`ingest:${account.phone}`, () =>
|
||||
const results = await Promise.allSettled(
|
||||
accounts.map((account) => {
|
||||
let timer: ReturnType<typeof setTimeout>;
|
||||
return Promise.race([
|
||||
withTdlibMutex(account.phone, `ingest:${account.phone}`, () =>
|
||||
runWorkerForAccount(account)
|
||||
),
|
||||
new Promise<never>((_, reject) => {
|
||||
timer = setTimeout(
|
||||
() => reject(new Error(`Account ${account.phone} ingestion timed out after ${CYCLE_TIMEOUT_MS / 60_000}min`)),
|
||||
CYCLE_TIMEOUT_MS
|
||||
);
|
||||
}),
|
||||
]).finally(() => clearTimeout(timer));
|
||||
})
|
||||
);
|
||||
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
if (results[i].status === "rejected") {
|
||||
const reason = (results[i] as PromiseRejectedResult).reason;
|
||||
log.error(
|
||||
{ phone: accounts[i].phone, err: reason },
|
||||
"Account ingestion failed"
|
||||
);
|
||||
// If the cycle timed out, force-release the mutex so the next cycle
|
||||
// (or other operations like fetch-channels) can proceed immediately
|
||||
// instead of waiting 30 minutes for the mutex timeout.
|
||||
const errMsg = reason instanceof Error ? reason.message : String(reason);
|
||||
if (errMsg.includes("timed out") || errMsg.includes("mutex wait timeout")) {
|
||||
forceReleaseMutex(accounts[i].phone);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log.info(
|
||||
{ elapsed: Math.round((Date.now() - cycleStart) / 1000) },
|
||||
"Ingestion cycle complete"
|
||||
);
|
||||
|
||||
// Run integrity audit after all accounts are processed
|
||||
try {
|
||||
const auditResult = await runIntegrityAudit();
|
||||
if (auditResult.issues > 0) {
|
||||
log.info({ ...auditResult }, "Integrity audit found issues");
|
||||
}
|
||||
} catch (auditErr) {
|
||||
log.warn({ err: auditErr }, "Integrity audit failed");
|
||||
}
|
||||
} catch (err) {
|
||||
log.error({ err }, "Ingestion cycle failed");
|
||||
} finally {
|
||||
|
||||
@@ -6,6 +6,7 @@ import { childLogger } from "../util/logger.js";
|
||||
import {
|
||||
updateAccountAuthState,
|
||||
getAccountAuthCode,
|
||||
updateAccountPremiumStatus,
|
||||
} from "../db/queries.js";
|
||||
|
||||
const log = childLogger("tdlib-client");
|
||||
@@ -27,7 +28,7 @@ interface AccountConfig {
|
||||
*/
|
||||
export async function createTdlibClient(
|
||||
account: AccountConfig
|
||||
): Promise<Client> {
|
||||
): Promise<{ client: Client; isPremium: boolean }> {
|
||||
const dbPath = path.join(config.tdlibStateDir, account.id);
|
||||
|
||||
const client = createClient({
|
||||
@@ -78,7 +79,30 @@ export async function createTdlibClient(
|
||||
|
||||
await updateAccountAuthState(account.id, "AUTHENTICATED");
|
||||
log.info({ accountId: account.id }, "TDLib client authenticated");
|
||||
return client;
|
||||
|
||||
let isPremium = false;
|
||||
try {
|
||||
const me = await client.invoke({ _: "getMe" }) as { is_premium?: boolean };
|
||||
isPremium = me.is_premium ?? false;
|
||||
await updateAccountPremiumStatus(account.id, isPremium);
|
||||
log.info({ accountId: account.id, isPremium }, "Account Premium status detected");
|
||||
} catch (err) {
|
||||
log.warn({ err, accountId: account.id }, "Could not detect Premium status, defaulting to false");
|
||||
}
|
||||
|
||||
client.on("update", (update: unknown) => {
|
||||
const u = update as { _?: string; is_upload?: boolean };
|
||||
if (u?._ === "updateSpeedLimitNotification") {
|
||||
log.warn(
|
||||
{ accountId: account.id, isUpload: u.is_upload },
|
||||
u.is_upload
|
||||
? "Upload speed limited by Telegram (account is not Premium)"
|
||||
: "Download speed limited by Telegram (account is not Premium)"
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
return { client, isPremium };
|
||||
} catch (err) {
|
||||
log.error({ err, accountId: account.id }, "TDLib authentication failed");
|
||||
await updateAccountAuthState(account.id, "EXPIRED");
|
||||
|
||||
@@ -2,13 +2,16 @@ import type { Client } from "tdl";
|
||||
import { readFile, rename, copyFile, unlink, stat } from "fs/promises";
|
||||
import { config } from "../util/config.js";
|
||||
import { childLogger } from "../util/logger.js";
|
||||
import { withFloodWait } from "../util/retry.js";
|
||||
import { withFloodWait, extractFloodWaitSeconds } from "../util/retry.js";
|
||||
import { isArchiveAttachment } from "../archive/detect.js";
|
||||
import type { TelegramMessage } from "../archive/multipart.js";
|
||||
import type { TelegramPhoto } from "../preview/match.js";
|
||||
|
||||
const log = childLogger("download");
|
||||
|
||||
/** Maximum retry attempts for stalled/failed downloads */
|
||||
const MAX_DOWNLOAD_RETRIES = 3;
|
||||
|
||||
/** Maximum number of pages to scan per channel/topic to prevent infinite loops */
|
||||
export const MAX_SCAN_PAGES = 5000;
|
||||
|
||||
@@ -35,6 +38,8 @@ interface TdPhotoSize {
|
||||
interface TdMessage {
|
||||
id: number;
|
||||
date: number;
|
||||
media_album_id?: string;
|
||||
reply_to_message_id?: number;
|
||||
content: {
|
||||
_: string;
|
||||
document?: {
|
||||
@@ -74,6 +79,8 @@ export interface ChannelScanResult {
|
||||
archives: TelegramMessage[];
|
||||
photos: TelegramPhoto[];
|
||||
totalScanned: number;
|
||||
/** Highest message ID seen during scan (for watermark, even when no archives found). */
|
||||
maxScannedMessageId: bigint | null;
|
||||
}
|
||||
|
||||
export type ScanProgressCallback = (messagesScanned: number) => void;
|
||||
@@ -153,6 +160,7 @@ export async function getChannelMessages(
|
||||
const archives: TelegramMessage[] = [];
|
||||
const photos: TelegramPhoto[] = [];
|
||||
const boundary = lastProcessedMessageId ? Number(lastProcessedMessageId) : null;
|
||||
let maxScannedMessageId: bigint | null = null;
|
||||
|
||||
// Open the chat so TDLib can access it
|
||||
try {
|
||||
@@ -199,6 +207,12 @@ export async function getChannelMessages(
|
||||
|
||||
totalScanned += result.messages.length;
|
||||
|
||||
// Track highest message ID (first message in batch = newest, since results are newest-first)
|
||||
const batchMaxId = BigInt(result.messages[0].id);
|
||||
if (maxScannedMessageId === null || batchMaxId > maxScannedMessageId) {
|
||||
maxScannedMessageId = batchMaxId;
|
||||
}
|
||||
|
||||
for (const msg of result.messages) {
|
||||
// Check for archive documents
|
||||
const doc = msg.content?.document;
|
||||
@@ -211,6 +225,9 @@ export async function getChannelMessages(
|
||||
fileId: String(doc.document.id),
|
||||
fileSize: BigInt(doc.document.size),
|
||||
date: new Date(msg.date * 1000),
|
||||
mediaAlbumId: msg.media_album_id && msg.media_album_id !== "0" ? msg.media_album_id : undefined,
|
||||
replyToMessageId: msg.reply_to_message_id ? BigInt(msg.reply_to_message_id) : undefined,
|
||||
caption: msg.content?.caption?.text || undefined,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
@@ -227,6 +244,7 @@ export async function getChannelMessages(
|
||||
caption,
|
||||
fileId: String(smallest.photo.id),
|
||||
fileSize: smallest.photo.size || smallest.photo.expected_size,
|
||||
mediaAlbumId: msg.media_album_id && msg.media_album_id !== "0" ? msg.media_album_id : undefined,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -237,6 +255,11 @@ export async function getChannelMessages(
|
||||
fromMessageId = result.messages[result.messages.length - 1].id;
|
||||
if (result.messages.length < Math.min(limit, 100)) break;
|
||||
|
||||
// Early exit: searchChatMessages returns newest-first. Once the oldest
|
||||
// message on this page is at or below the boundary, all remaining pages
|
||||
// are even older — no new messages exist, stop scanning immediately.
|
||||
if (boundary && fromMessageId <= boundary) break;
|
||||
|
||||
await sleep(config.apiDelayMs);
|
||||
}
|
||||
}
|
||||
@@ -257,6 +280,7 @@ export async function getChannelMessages(
|
||||
archives: archives.reverse(),
|
||||
photos: photos.reverse(),
|
||||
totalScanned,
|
||||
maxScannedMessageId,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -350,14 +374,86 @@ export async function downloadFile(
|
||||
isComplete: false,
|
||||
});
|
||||
|
||||
for (let attempt = 0; attempt <= MAX_DOWNLOAD_RETRIES; attempt++) {
|
||||
try {
|
||||
return await downloadFileAttempt(client, numericId, fileId, destPath, totalBytes, fileName, onProgress);
|
||||
} catch (err) {
|
||||
const isLastAttempt = attempt >= MAX_DOWNLOAD_RETRIES;
|
||||
|
||||
// Rate limit from Telegram
|
||||
const waitSeconds = extractFloodWaitSeconds(err);
|
||||
if (waitSeconds !== null && !isLastAttempt) {
|
||||
const jitter = 1000 + Math.random() * 4000;
|
||||
const waitMs = waitSeconds * 1000 + jitter;
|
||||
log.warn(
|
||||
{ fileName, attempt: attempt + 1, maxRetries: MAX_DOWNLOAD_RETRIES, waitSeconds },
|
||||
`Download rate-limited — sleeping ${waitSeconds}s before retry`
|
||||
);
|
||||
await cancelDownload(client, numericId);
|
||||
await sleep(waitMs);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Stall, timeout, or unexpected stop — cancel and retry
|
||||
const errMsg = err instanceof Error ? err.message : "";
|
||||
if (
|
||||
(errMsg.includes("stalled") || errMsg.includes("timed out") || errMsg.includes("stopped unexpectedly")) &&
|
||||
!isLastAttempt
|
||||
) {
|
||||
log.warn(
|
||||
{ fileName, attempt: attempt + 1, maxRetries: MAX_DOWNLOAD_RETRIES },
|
||||
"Download failed — cancelling and retrying"
|
||||
);
|
||||
await cancelDownload(client, numericId);
|
||||
await sleep(5_000);
|
||||
continue;
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
throw new Error(`Download failed after ${MAX_DOWNLOAD_RETRIES} retries for ${fileName}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel an active TDLib download so it can be retried cleanly.
|
||||
*/
|
||||
async function cancelDownload(client: Client, fileId: number): Promise<void> {
|
||||
try {
|
||||
await client.invoke({
|
||||
_: "cancelDownloadFile",
|
||||
file_id: fileId,
|
||||
only_if_pending: false,
|
||||
});
|
||||
log.debug({ fileId }, "Cancelled TDLib download for retry");
|
||||
} catch {
|
||||
// Best-effort
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Single download attempt with progress tracking, stall detection, and verification.
|
||||
*/
|
||||
async function downloadFileAttempt(
|
||||
client: Client,
|
||||
numericId: number,
|
||||
fileId: string,
|
||||
destPath: string,
|
||||
totalBytes: number,
|
||||
fileName: string,
|
||||
onProgress?: ProgressCallback
|
||||
): Promise<void> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
let lastLoggedPercent = 0;
|
||||
let settled = false;
|
||||
let downloadStarted = false; // True once TDLib reports is_downloading_active
|
||||
let lastProgressBytes = 0;
|
||||
let lastProgressTime = Date.now();
|
||||
|
||||
// Timeout: 10 minutes per GB, minimum 5 minutes
|
||||
// Timeout: 20 minutes per GB, minimum 15 minutes
|
||||
const timeoutMs = Math.max(
|
||||
5 * 60_000,
|
||||
(totalBytes / (1024 * 1024 * 1024)) * 10 * 60_000
|
||||
15 * 60_000,
|
||||
(totalBytes / (1024 * 1024 * 1024)) * 20 * 60_000
|
||||
);
|
||||
const timer = setTimeout(() => {
|
||||
if (!settled) {
|
||||
@@ -371,6 +467,23 @@ export async function downloadFile(
|
||||
}
|
||||
}, timeoutMs);
|
||||
|
||||
// Stall detection: no progress for 5 minutes after download started → reject
|
||||
const STALL_TIMEOUT_MS = 5 * 60_000;
|
||||
const stallChecker = setInterval(() => {
|
||||
if (settled || !downloadStarted) return;
|
||||
const stallMs = Date.now() - lastProgressTime;
|
||||
if (stallMs >= STALL_TIMEOUT_MS) {
|
||||
settled = true;
|
||||
cleanup();
|
||||
reject(
|
||||
new Error(
|
||||
`Download stalled for ${fileName} — no progress for ${Math.round(stallMs / 60_000)}min ` +
|
||||
`(${lastProgressBytes}/${totalBytes} bytes)`
|
||||
)
|
||||
);
|
||||
}
|
||||
}, 30_000);
|
||||
|
||||
// Listen for file update events to track progress
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const handleUpdate = (update: any) => {
|
||||
@@ -382,6 +495,17 @@ export async function downloadFile(
|
||||
const percent =
|
||||
totalBytes > 0 ? Math.round((downloaded / totalBytes) * 100) : 0;
|
||||
|
||||
// Track whether the download has actually started
|
||||
if (file.local.is_downloading_active) {
|
||||
downloadStarted = true;
|
||||
}
|
||||
|
||||
// Reset stall timer when bytes advance
|
||||
if (downloaded > lastProgressBytes) {
|
||||
lastProgressBytes = downloaded;
|
||||
lastProgressTime = Date.now();
|
||||
}
|
||||
|
||||
// Log at every 10% increment
|
||||
if (percent >= lastLoggedPercent + 10) {
|
||||
lastLoggedPercent = percent - (percent % 10);
|
||||
@@ -412,8 +536,11 @@ export async function downloadFile(
|
||||
}
|
||||
}
|
||||
|
||||
// Download stopped without completing (network error, cancelled, etc.)
|
||||
// Download stopped without completing — only if it had actually started.
|
||||
// TDLib may emit an initial updateFile with is_downloading_active=false
|
||||
// before the download begins; ignoring that prevents false positives.
|
||||
if (
|
||||
downloadStarted &&
|
||||
!file.local.is_downloading_active &&
|
||||
!file.local.is_downloading_completed
|
||||
) {
|
||||
@@ -432,6 +559,7 @@ export async function downloadFile(
|
||||
|
||||
const cleanup = () => {
|
||||
clearTimeout(timer);
|
||||
clearInterval(stallChecker);
|
||||
client.off("update", handleUpdate);
|
||||
};
|
||||
|
||||
|
||||
@@ -178,6 +178,7 @@ export async function getTopicMessages(
|
||||
const archives: TelegramMessage[] = [];
|
||||
const photos: TelegramPhoto[] = [];
|
||||
const boundary = lastProcessedMessageId ? Number(lastProcessedMessageId) : null;
|
||||
let maxScannedMessageId: bigint | null = null;
|
||||
|
||||
let currentFromId = 0;
|
||||
let totalScanned = 0;
|
||||
@@ -201,6 +202,7 @@ export async function getTopicMessages(
|
||||
messages?: {
|
||||
id: number;
|
||||
date: number;
|
||||
media_album_id?: string;
|
||||
content: {
|
||||
_: string;
|
||||
document?: {
|
||||
@@ -238,6 +240,12 @@ export async function getTopicMessages(
|
||||
|
||||
totalScanned += result.messages.length;
|
||||
|
||||
// Track highest message ID (first message = newest, since results are newest-first)
|
||||
const batchMaxId = BigInt(result.messages[0].id);
|
||||
if (maxScannedMessageId === null || batchMaxId > maxScannedMessageId) {
|
||||
maxScannedMessageId = batchMaxId;
|
||||
}
|
||||
|
||||
for (const msg of result.messages) {
|
||||
// Check for archive documents
|
||||
const doc = msg.content?.document;
|
||||
@@ -248,6 +256,7 @@ export async function getTopicMessages(
|
||||
fileId: String(doc.document.id),
|
||||
fileSize: BigInt(doc.document.size),
|
||||
date: new Date(msg.date * 1000),
|
||||
mediaAlbumId: msg.media_album_id && msg.media_album_id !== "0" ? msg.media_album_id : undefined,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
@@ -263,6 +272,7 @@ export async function getTopicMessages(
|
||||
caption,
|
||||
fileId: String(smallest.photo.id),
|
||||
fileSize: smallest.photo.size || smallest.photo.expected_size,
|
||||
mediaAlbumId: msg.media_album_id && msg.media_album_id !== "0" ? msg.media_album_id : undefined,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -299,6 +309,7 @@ export async function getTopicMessages(
|
||||
archives: archives.reverse(),
|
||||
photos: photos.reverse(),
|
||||
totalScanned,
|
||||
maxScannedMessageId,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -3,12 +3,25 @@ import { stat } from "fs/promises";
|
||||
import type { Client } from "tdl";
|
||||
import { config } from "../util/config.js";
|
||||
import { childLogger } from "../util/logger.js";
|
||||
import { withFloodWait } from "../util/retry.js";
|
||||
import { withFloodWait, extractFloodWaitSeconds } from "../util/retry.js";
|
||||
|
||||
const log = childLogger("upload");
|
||||
|
||||
/**
|
||||
* Custom error class to distinguish upload stalls from other errors.
|
||||
* When consecutive stalls occur, the caller can use this signal to
|
||||
* recreate the TDLib client (whose event stream may have degraded).
|
||||
*/
|
||||
export class UploadStallError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = "UploadStallError";
|
||||
}
|
||||
}
|
||||
|
||||
export interface UploadResult {
|
||||
messageId: bigint;
|
||||
messageIds: bigint[];
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -28,7 +41,7 @@ export async function uploadToChannel(
|
||||
filePaths: string[],
|
||||
caption?: string
|
||||
): Promise<UploadResult> {
|
||||
let firstMessageId: bigint | null = null;
|
||||
const allMessageIds: bigint[] = [];
|
||||
|
||||
for (let i = 0; i < filePaths.length; i++) {
|
||||
const filePath = filePaths[i];
|
||||
@@ -49,11 +62,9 @@ export async function uploadToChannel(
|
||||
"Uploading file to channel"
|
||||
);
|
||||
|
||||
const serverMsgId = await sendAndWaitForUpload(client, chatId, filePath, fileCaption, fileName, fileSizeMB);
|
||||
const serverMsgId = await sendWithRetry(client, chatId, filePath, fileCaption, fileName, fileSizeMB);
|
||||
|
||||
if (i === 0) {
|
||||
firstMessageId = serverMsgId;
|
||||
}
|
||||
allMessageIds.push(serverMsgId);
|
||||
|
||||
// Rate limit delay between uploads
|
||||
if (i < filePaths.length - 1) {
|
||||
@@ -61,16 +72,76 @@ export async function uploadToChannel(
|
||||
}
|
||||
}
|
||||
|
||||
if (firstMessageId === null) {
|
||||
if (allMessageIds.length === 0) {
|
||||
throw new Error("Upload failed: no messages sent");
|
||||
}
|
||||
|
||||
log.info(
|
||||
{ chatId: Number(chatId), messageId: Number(firstMessageId), files: filePaths.length },
|
||||
{ chatId: Number(chatId), messageId: Number(allMessageIds[0]), files: filePaths.length },
|
||||
"All uploads confirmed by Telegram"
|
||||
);
|
||||
|
||||
return { messageId: firstMessageId };
|
||||
return { messageId: allMessageIds[0], messageIds: allMessageIds };
|
||||
}
|
||||
|
||||
/**
|
||||
* Retry wrapper for sendAndWaitForUpload.
|
||||
* Handles:
|
||||
* - Rate limits (429 / FLOOD_WAIT) from updateMessageSendFailed — waits and retries
|
||||
* - Stall / timeout — retries with a cooldown
|
||||
*/
|
||||
const MAX_UPLOAD_RETRIES = 3;
|
||||
|
||||
async function sendWithRetry(
|
||||
client: Client,
|
||||
chatId: bigint,
|
||||
filePath: string,
|
||||
caption: string | undefined,
|
||||
fileName: string,
|
||||
fileSizeMB: number
|
||||
): Promise<bigint> {
|
||||
for (let attempt = 0; attempt <= MAX_UPLOAD_RETRIES; attempt++) {
|
||||
try {
|
||||
return await sendAndWaitForUpload(client, chatId, filePath, caption, fileName, fileSizeMB);
|
||||
} catch (err) {
|
||||
const isLastAttempt = attempt >= MAX_UPLOAD_RETRIES;
|
||||
|
||||
// Rate limit from Telegram (429 / FLOOD_WAIT / "retry after N")
|
||||
const waitSeconds = extractFloodWaitSeconds(err);
|
||||
if (waitSeconds !== null && !isLastAttempt) {
|
||||
const jitter = 1000 + Math.random() * 4000;
|
||||
const waitMs = waitSeconds * 1000 + jitter;
|
||||
log.warn(
|
||||
{ fileName, attempt: attempt + 1, maxRetries: MAX_UPLOAD_RETRIES, waitSeconds },
|
||||
`Upload rate-limited — sleeping ${waitSeconds}s before retry`
|
||||
);
|
||||
await sleep(waitMs);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Stall or timeout — retry with a cooldown
|
||||
const errMsg = err instanceof Error ? err.message : "";
|
||||
if (errMsg.includes("stalled") || errMsg.includes("timed out")) {
|
||||
if (!isLastAttempt) {
|
||||
log.warn(
|
||||
{ fileName, attempt: attempt + 1, maxRetries: MAX_UPLOAD_RETRIES },
|
||||
"Upload stalled/timed out — retrying"
|
||||
);
|
||||
await sleep(10_000);
|
||||
continue;
|
||||
}
|
||||
// All stall retries exhausted — throw UploadStallError so the caller
|
||||
// knows the TDLib client's event stream is likely degraded and can
|
||||
// recreate the client before continuing.
|
||||
throw new UploadStallError(
|
||||
`Upload stalled after ${MAX_UPLOAD_RETRIES} retries for ${fileName}`
|
||||
);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
throw new Error(`Upload failed after ${MAX_UPLOAD_RETRIES} retries for ${fileName}`);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -93,11 +164,14 @@ async function sendAndWaitForUpload(
|
||||
let settled = false;
|
||||
let lastLoggedPercent = 0;
|
||||
let tempMsgId: number | null = null;
|
||||
let uploadStarted = false;
|
||||
let lastProgressBytes = 0;
|
||||
let lastProgressTime = Date.now();
|
||||
|
||||
// Timeout: 10 minutes per GB, minimum 10 minutes
|
||||
// Timeout: 20 minutes per GB, minimum 15 minutes
|
||||
const timeoutMs = Math.max(
|
||||
10 * 60_000,
|
||||
(fileSizeMB / 1024) * 10 * 60_000
|
||||
15 * 60_000,
|
||||
(fileSizeMB / 1024) * 20 * 60_000
|
||||
);
|
||||
|
||||
const timer = setTimeout(() => {
|
||||
@@ -112,13 +186,39 @@ async function sendAndWaitForUpload(
|
||||
}
|
||||
}, timeoutMs);
|
||||
|
||||
// Stall detection: no progress for 3 minutes after upload started → reject
|
||||
// (reduced from 5min — once data is fully sent, confirmation should arrive quickly;
|
||||
// a 3min silence strongly indicates a degraded TDLib event stream)
|
||||
const STALL_TIMEOUT_MS = 3 * 60_000;
|
||||
const stallChecker = setInterval(() => {
|
||||
if (settled || !uploadStarted) return;
|
||||
const stallMs = Date.now() - lastProgressTime;
|
||||
if (stallMs >= STALL_TIMEOUT_MS) {
|
||||
settled = true;
|
||||
cleanup();
|
||||
reject(
|
||||
new Error(
|
||||
`Upload stalled for ${fileName} — no progress for ${Math.round(stallMs / 60_000)}min`
|
||||
)
|
||||
);
|
||||
}
|
||||
}, 30_000);
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const handleUpdate = (update: any) => {
|
||||
// Track upload progress via updateFile events
|
||||
if (update?._ === "updateFile") {
|
||||
const file = update.file;
|
||||
if (file?.remote?.is_uploading_active && file.expected_size > 0) {
|
||||
uploadStarted = true;
|
||||
|
||||
const uploaded = file.remote.uploaded_size ?? 0;
|
||||
|
||||
// Only reset stall timer when bytes actually advance
|
||||
if (uploaded > lastProgressBytes) {
|
||||
lastProgressBytes = uploaded;
|
||||
lastProgressTime = Date.now();
|
||||
}
|
||||
const total = file.expected_size;
|
||||
const percent = Math.round((uploaded / total) * 100);
|
||||
if (percent >= lastLoggedPercent + 20) {
|
||||
@@ -157,7 +257,9 @@ async function sendAndWaitForUpload(
|
||||
settled = true;
|
||||
cleanup();
|
||||
const errorMsg = update.error?.message ?? "Unknown upload error";
|
||||
reject(new Error(`Upload failed for ${fileName}: ${errorMsg}`));
|
||||
const error = new Error(`Upload failed for ${fileName}: ${errorMsg}`);
|
||||
(error as Error & { code?: number }).code = update.error?.code;
|
||||
reject(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -165,6 +267,7 @@ async function sendAndWaitForUpload(
|
||||
|
||||
const cleanup = () => {
|
||||
clearTimeout(timer);
|
||||
clearInterval(stallChecker);
|
||||
client.off("update", handleUpdate);
|
||||
};
|
||||
|
||||
|
||||
@@ -3,10 +3,15 @@ export const config = {
|
||||
workerIntervalMinutes: parseInt(process.env.WORKER_INTERVAL_MINUTES ?? "60", 10),
|
||||
tempDir: process.env.WORKER_TEMP_DIR ?? "/tmp/zips",
|
||||
tdlibStateDir: process.env.TDLIB_STATE_DIR ?? "/data/tdlib",
|
||||
maxZipSizeMB: parseInt(process.env.WORKER_MAX_ZIP_SIZE_MB ?? "4096", 10),
|
||||
maxZipSizeMB: parseInt(process.env.WORKER_MAX_ZIP_SIZE_MB ?? "204800", 10),
|
||||
logLevel: (process.env.LOG_LEVEL ?? "info") as "debug" | "info" | "warn" | "error",
|
||||
telegramApiId: parseInt(process.env.TELEGRAM_API_ID ?? "0", 10),
|
||||
telegramApiHash: process.env.TELEGRAM_API_HASH ?? "",
|
||||
/** Maximum file part size for Telegram upload (in MiB). Default 1950 (under 2GB non-Premium limit).
|
||||
* Set to 3900 for Premium accounts (under 4GB limit). */
|
||||
maxPartSizeMB: parseInt(process.env.MAX_PART_SIZE_MB ?? "1950", 10),
|
||||
/** Time window for auto-grouping ungrouped packages from the same channel (minutes). 0 = disabled. */
|
||||
autoGroupTimeWindowMinutes: parseInt(process.env.AUTO_GROUP_TIME_WINDOW_MINUTES ?? "5", 10),
|
||||
/** Maximum jitter added to scheduler interval (in minutes) */
|
||||
jitterMinutes: 5,
|
||||
/** Maximum time span for multipart archive parts (in hours). 0 = no limit. */
|
||||
|
||||
@@ -2,39 +2,66 @@ import { childLogger } from "./logger.js";
|
||||
|
||||
const log = childLogger("mutex");
|
||||
|
||||
let locked = false;
|
||||
let holder = "";
|
||||
const queue: Array<{ resolve: () => void; reject: (err: Error) => void; label: string }> = [];
|
||||
|
||||
/**
|
||||
* Maximum time to wait for the TDLib mutex (ms).
|
||||
* If the mutex is not available within this time, the operation is rejected.
|
||||
* Default: 30 minutes (long enough for large downloads, short enough to detect hangs).
|
||||
*/
|
||||
const MUTEX_WAIT_TIMEOUT_MS = 30 * 60 * 1000;
|
||||
|
||||
const locks = new Map<string, boolean>();
|
||||
const holders = new Map<string, string>();
|
||||
const queues = new Map<
|
||||
string,
|
||||
Array<{ resolve: () => void; reject: (err: Error) => void; label: string }>
|
||||
>();
|
||||
|
||||
/**
|
||||
* Ensures only one TDLib client runs at a time across the entire worker process.
|
||||
* Both the scheduler (auth, ingestion) and the fetch listener acquire this
|
||||
* before creating any TDLib client.
|
||||
* Force-release a stuck mutex.
|
||||
* This should only be called when the holder is known to be stuck (e.g. after
|
||||
* a cycle timeout). It releases the lock and lets the next queued waiter proceed.
|
||||
*/
|
||||
export function forceReleaseMutex(key: string): void {
|
||||
if (!locks.has(key)) return;
|
||||
|
||||
const holder = holders.get(key);
|
||||
log.warn({ key, holder }, "Force-releasing stuck TDLib mutex");
|
||||
|
||||
locks.delete(key);
|
||||
holders.delete(key);
|
||||
const next = queues.get(key)?.shift();
|
||||
if (next) {
|
||||
log.info({ key, next: next.label }, "TDLib mutex force-released to next waiter");
|
||||
next.resolve();
|
||||
} else {
|
||||
queues.delete(key);
|
||||
log.info({ key }, "TDLib mutex force-released (no waiters)");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures only one TDLib operation runs at a time FOR THE SAME KEY.
|
||||
* Different keys run concurrently — this allows two accounts to ingest in parallel
|
||||
* while still preventing concurrent use of the same account's TDLib state dir.
|
||||
*
|
||||
* Includes a wait timeout to prevent indefinite blocking if the current holder hangs.
|
||||
* key: the account phone number for account-specific ops (auth, ingest),
|
||||
* or 'global' for ops that don't belong to a specific account.
|
||||
* label: human-readable name for logging.
|
||||
*/
|
||||
export async function withTdlibMutex<T>(
|
||||
key: string,
|
||||
label: string,
|
||||
fn: () => Promise<T>
|
||||
): Promise<T> {
|
||||
if (locked) {
|
||||
log.info({ waiting: label, holder }, "Waiting for TDLib mutex");
|
||||
if (locks.get(key)) {
|
||||
log.info({ waiting: label, key, holder: holders.get(key) }, "Waiting for TDLib mutex");
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const timer = setTimeout(() => {
|
||||
const idx = queue.indexOf(entry);
|
||||
const q = queues.get(key) ?? [];
|
||||
const idx = q.indexOf(entry);
|
||||
if (idx !== -1) {
|
||||
queue.splice(idx, 1);
|
||||
reject(new Error(
|
||||
q.splice(idx, 1);
|
||||
reject(
|
||||
new Error(
|
||||
`TDLib mutex wait timeout after ${MUTEX_WAIT_TIMEOUT_MS / 60_000}min ` +
|
||||
`(waiting: ${label}, holder: ${holder})`
|
||||
));
|
||||
`(waiting: ${label}, key: ${key}, holder: ${holders.get(key)})`
|
||||
)
|
||||
);
|
||||
}
|
||||
}, MUTEX_WAIT_TIMEOUT_MS);
|
||||
|
||||
@@ -46,25 +73,28 @@ export async function withTdlibMutex<T>(
|
||||
reject,
|
||||
label,
|
||||
};
|
||||
queue.push(entry);
|
||||
|
||||
if (!queues.has(key)) queues.set(key, []);
|
||||
queues.get(key)!.push(entry);
|
||||
});
|
||||
}
|
||||
|
||||
locked = true;
|
||||
holder = label;
|
||||
log.debug({ label }, "TDLib mutex acquired");
|
||||
locks.set(key, true);
|
||||
holders.set(key, label);
|
||||
log.debug({ key, label }, "TDLib mutex acquired");
|
||||
|
||||
try {
|
||||
return await fn();
|
||||
} finally {
|
||||
locked = false;
|
||||
holder = "";
|
||||
const next = queue.shift();
|
||||
locks.delete(key);
|
||||
holders.delete(key);
|
||||
const next = queues.get(key)?.shift();
|
||||
if (next) {
|
||||
log.debug({ next: next.label }, "TDLib mutex releasing to next waiter");
|
||||
log.debug({ key, next: next.label }, "TDLib mutex releasing to next waiter");
|
||||
next.resolve();
|
||||
} else {
|
||||
log.debug({ label }, "TDLib mutex released");
|
||||
queues.delete(key);
|
||||
log.debug({ key, label }, "TDLib mutex released");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,13 +2,14 @@ import path from "path";
|
||||
import { unlink, readdir, mkdir, rm } from "fs/promises";
|
||||
import { config } from "./util/config.js";
|
||||
import { childLogger } from "./util/logger.js";
|
||||
import { tryAcquireLock, releaseLock } from "./db/locks.js";
|
||||
import { tryAcquireLock, releaseLock, tryAcquireHashLock, releaseHashLock } from "./db/locks.js";
|
||||
import {
|
||||
getSourceChannelMappings,
|
||||
getGlobalDestinationChannel,
|
||||
packageExistsByHash,
|
||||
packageExistsBySourceMessage,
|
||||
createPackageWithFiles,
|
||||
createPackageStub,
|
||||
updatePackageWithMetadata,
|
||||
createIngestionRun,
|
||||
completeIngestionRun,
|
||||
failIngestionRun,
|
||||
@@ -26,6 +27,9 @@ import {
|
||||
getExistingChannelsByTelegramId,
|
||||
getAccountById,
|
||||
deleteOrphanedPackageByHash,
|
||||
getUploadedPackageByHash,
|
||||
upsertSkippedPackage,
|
||||
deleteSkippedPackage,
|
||||
} from "./db/queries.js";
|
||||
import type { ActivityUpdate } from "./db/queries.js";
|
||||
import { createTdlibClient, closeTdlibClient } from "./tdlib/client.js";
|
||||
@@ -43,7 +47,9 @@ import { readZipCentralDirectory } from "./archive/zip-reader.js";
|
||||
import { readRarContents } from "./archive/rar-reader.js";
|
||||
import { read7zContents } from "./archive/sevenz-reader.js";
|
||||
import { byteLevelSplit, concatenateFiles } from "./archive/split.js";
|
||||
import { uploadToChannel } from "./upload/channel.js";
|
||||
import { uploadToChannel, UploadStallError } from "./upload/channel.js";
|
||||
import { processAlbumGroups, processRuleBasedGroups, processTimeWindowGroups, processPatternGroups, processCreatorGroups, processZipPathGroups, processReplyChainGroups, processCaptionGroups, detectGroupingConflicts, type IndexedPackageRef } from "./grouping.js";
|
||||
import { db } from "./db/client.js";
|
||||
import type { TelegramAccount, TelegramChannel } from "@prisma/client";
|
||||
import type { Client } from "tdl";
|
||||
|
||||
@@ -68,10 +74,10 @@ export async function authenticateAccount(
|
||||
|
||||
let client: Client | undefined;
|
||||
try {
|
||||
client = await createTdlibClient({
|
||||
client = (await createTdlibClient({
|
||||
id: account.id,
|
||||
phone: account.phone,
|
||||
});
|
||||
})).client;
|
||||
aLog.info("Authentication successful");
|
||||
|
||||
// Auto-fetch channels and create a fetch request result
|
||||
@@ -126,7 +132,7 @@ export async function processFetchRequest(requestId: string): Promise<void> {
|
||||
await updateFetchRequestStatus(requestId, "IN_PROGRESS");
|
||||
aLog.info({ accountId: request.accountId }, "Processing fetch request");
|
||||
|
||||
const client = await createTdlibClient({
|
||||
const { client } = await createTdlibClient({
|
||||
id: request.account.id,
|
||||
phone: request.account.phone,
|
||||
});
|
||||
@@ -279,6 +285,8 @@ function createThrottledActivityUpdater(runId: string, minIntervalMs = 2000) {
|
||||
interface PipelineContext {
|
||||
client: Client;
|
||||
runId: string;
|
||||
accountId: string;
|
||||
accountPhone: string;
|
||||
channelTitle: string;
|
||||
channel: TelegramChannel;
|
||||
destChannelTelegramId: bigint;
|
||||
@@ -295,6 +303,9 @@ interface PipelineContext {
|
||||
/** Forum topic ID (null for non-forum). */
|
||||
sourceTopicId: bigint | null;
|
||||
accountLog: ReturnType<typeof childLogger>;
|
||||
maxUploadSize: bigint;
|
||||
/** How many consecutive upload stalls have occurred (resets on success). */
|
||||
consecutiveStalls: number;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -330,10 +341,14 @@ export async function runWorkerForAccount(
|
||||
currentStep: "connecting",
|
||||
});
|
||||
|
||||
const client = await createTdlibClient({
|
||||
// Use let so the client can be replaced on TDLib recreation after stalls
|
||||
let { client, isPremium } = await createTdlibClient({
|
||||
id: account.id,
|
||||
phone: account.phone,
|
||||
});
|
||||
const maxUploadSize = isPremium
|
||||
? 3950n * 1024n * 1024n
|
||||
: BigInt(config.maxPartSizeMB) * 1024n * 1024n;
|
||||
|
||||
// Load all chats into TDLib's local cache using loadChats (the recommended API).
|
||||
// Without this, getChat/searchChatMessages fail with "Chat not found".
|
||||
@@ -436,6 +451,8 @@ export async function runWorkerForAccount(
|
||||
const pipelineCtx: PipelineContext = {
|
||||
client,
|
||||
runId: activeRunId,
|
||||
accountId: account.id,
|
||||
accountPhone: account.phone,
|
||||
channelTitle: channel.title,
|
||||
channel,
|
||||
destChannelTelegramId: destChannel.telegramId,
|
||||
@@ -445,6 +462,8 @@ export async function runWorkerForAccount(
|
||||
topicCreator: null,
|
||||
sourceTopicId: null,
|
||||
accountLog,
|
||||
maxUploadSize,
|
||||
consecutiveStalls: 0,
|
||||
};
|
||||
|
||||
if (forum) {
|
||||
@@ -519,6 +538,15 @@ export async function runWorkerForAccount(
|
||||
{ channelId: channel.id, topic: topic.name, totalScanned: scanResult.totalScanned },
|
||||
"No new archives in topic"
|
||||
);
|
||||
// Still advance topic watermark so we don't re-scan these messages next cycle
|
||||
if (scanResult.maxScannedMessageId) {
|
||||
await upsertTopicProgress(
|
||||
mapping.id,
|
||||
topic.topicId,
|
||||
topic.name,
|
||||
scanResult.maxScannedMessageId
|
||||
);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -533,14 +561,17 @@ export async function runWorkerForAccount(
|
||||
pipelineCtx.channelTitle = `${channel.title} › ${topic.name}`;
|
||||
|
||||
const maxProcessedId = await processArchiveSets(pipelineCtx, scanResult, run.id, progress?.lastProcessedMessageId);
|
||||
// Sync client back in case it was recreated during upload stall recovery
|
||||
client = pipelineCtx.client;
|
||||
|
||||
// Only advance progress to the highest successfully processed message
|
||||
if (maxProcessedId) {
|
||||
// Advance progress: use archive watermark if available, fall back to scan watermark
|
||||
const topicWatermark = maxProcessedId ?? scanResult.maxScannedMessageId;
|
||||
if (topicWatermark) {
|
||||
await upsertTopicProgress(
|
||||
mapping.id,
|
||||
topic.topicId,
|
||||
topic.name,
|
||||
maxProcessedId
|
||||
topicWatermark
|
||||
);
|
||||
}
|
||||
} catch (topicErr) {
|
||||
@@ -590,6 +621,11 @@ export async function runWorkerForAccount(
|
||||
|
||||
if (scanResult.archives.length === 0) {
|
||||
accountLog.info({ channelId: channel.id, title: channel.title, totalScanned: scanResult.totalScanned }, "No new archives in channel");
|
||||
// Still advance watermark to highest scanned message so we don't
|
||||
// re-scan these messages next cycle
|
||||
if (scanResult.maxScannedMessageId) {
|
||||
await updateLastProcessedMessage(mapping.id, scanResult.maxScannedMessageId);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -604,10 +640,13 @@ export async function runWorkerForAccount(
|
||||
pipelineCtx.channelTitle = channel.title;
|
||||
|
||||
const maxProcessedId = await processArchiveSets(pipelineCtx, scanResult, run.id, mapping.lastProcessedMessageId);
|
||||
// Sync client back in case it was recreated during upload stall recovery
|
||||
client = pipelineCtx.client;
|
||||
|
||||
// Only advance progress to the highest successfully processed message
|
||||
if (maxProcessedId) {
|
||||
await updateLastProcessedMessage(mapping.id, maxProcessedId);
|
||||
// Advance progress: use archive watermark if available, fall back to scan watermark
|
||||
const channelWatermark = maxProcessedId ?? scanResult.maxScannedMessageId;
|
||||
if (channelWatermark) {
|
||||
await updateLastProcessedMessage(mapping.id, channelWatermark);
|
||||
}
|
||||
}
|
||||
} catch (channelErr) {
|
||||
@@ -639,6 +678,20 @@ export async function runWorkerForAccount(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Infer the SkipReason from an error message so the UI shows the correct badge.
|
||||
*/
|
||||
function inferSkipReason(errMsg: string): "DOWNLOAD_FAILED" | "UPLOAD_FAILED" | "EXTRACT_FAILED" {
|
||||
const lower = errMsg.toLowerCase();
|
||||
if (lower.includes("upload") || lower.includes("too many requests") || lower.includes("retry after") || lower.includes("send")) {
|
||||
return "UPLOAD_FAILED";
|
||||
}
|
||||
if (lower.includes("extract") || lower.includes("metadata") || lower.includes("central directory") || lower.includes("archive")) {
|
||||
return "EXTRACT_FAILED";
|
||||
}
|
||||
return "DOWNLOAD_FAILED";
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a scan result through the archive pipeline:
|
||||
* group → download → hash → dedup → metadata → split → upload → preview → index.
|
||||
@@ -703,10 +756,11 @@ async function processArchiveSets(
|
||||
|
||||
// Track the highest message ID that was successfully processed
|
||||
let maxProcessedId: bigint | null = null;
|
||||
const indexedPackageRefs: IndexedPackageRef[] = [];
|
||||
|
||||
for (let setIdx = 0; setIdx < archiveSets.length; setIdx++) {
|
||||
try {
|
||||
await processOneArchiveSet(
|
||||
const packageId = await processOneArchiveSet(
|
||||
ctx,
|
||||
archiveSets[setIdx],
|
||||
setIdx,
|
||||
@@ -715,6 +769,15 @@ async function processArchiveSets(
|
||||
ingestionRunId
|
||||
);
|
||||
|
||||
if (packageId) {
|
||||
const firstPart = archiveSets[setIdx].parts[0];
|
||||
indexedPackageRefs.push({
|
||||
packageId,
|
||||
sourceMessageId: firstPart.id,
|
||||
mediaAlbumId: firstPart.mediaAlbumId,
|
||||
});
|
||||
}
|
||||
|
||||
// Set completed (ingested or confirmed duplicate) — advance watermark
|
||||
const setMaxId = archiveSets[setIdx].parts.reduce(
|
||||
(max, p) => (p.id > max ? p.id : max),
|
||||
@@ -723,13 +786,147 @@ async function processArchiveSets(
|
||||
if (setMaxId > (maxProcessedId ?? 0n)) {
|
||||
maxProcessedId = setMaxId;
|
||||
}
|
||||
|
||||
// Reset stall counter on any successful upload
|
||||
ctx.consecutiveStalls = 0;
|
||||
} catch (setErr) {
|
||||
// If a set fails, do NOT advance the watermark past it
|
||||
accountLog.warn(
|
||||
{ err: setErr, baseName: archiveSets[setIdx].baseName },
|
||||
"Archive set failed, watermark will not advance past this set"
|
||||
);
|
||||
|
||||
// ── TDLib client recreation on repeated upload stalls ──
|
||||
// When the TDLib event stream degrades, uploads complete (bytes sent)
|
||||
// but confirmations never arrive. Retrying with the same broken client
|
||||
// is futile. Recreate the client to get a fresh connection.
|
||||
if (setErr instanceof UploadStallError) {
|
||||
ctx.consecutiveStalls++;
|
||||
accountLog.warn(
|
||||
{ consecutiveStalls: ctx.consecutiveStalls },
|
||||
"Upload stall detected — TDLib event stream may be degraded"
|
||||
);
|
||||
|
||||
// After 1 stalled set (= 3 failed retry attempts already), recreate the client
|
||||
if (ctx.consecutiveStalls >= 1) {
|
||||
accountLog.info("Recreating TDLib client after consecutive upload stalls");
|
||||
try {
|
||||
await closeTdlibClient(ctx.client);
|
||||
} catch (closeErr) {
|
||||
accountLog.warn({ err: closeErr }, "Error closing stale TDLib client");
|
||||
}
|
||||
|
||||
try {
|
||||
const { client: newClient } = await createTdlibClient({
|
||||
id: ctx.accountId,
|
||||
phone: ctx.accountPhone,
|
||||
});
|
||||
ctx.client = newClient;
|
||||
|
||||
// Reload chats so the new client can access channels
|
||||
try {
|
||||
for (let page = 0; page < 500; page++) {
|
||||
await newClient.invoke({
|
||||
_: "loadChats",
|
||||
chat_list: { _: "chatListMain" },
|
||||
limit: 100,
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// 404 = all loaded (expected)
|
||||
}
|
||||
|
||||
ctx.consecutiveStalls = 0;
|
||||
accountLog.info("TDLib client recreated successfully — continuing ingestion");
|
||||
} catch (recreateErr) {
|
||||
accountLog.error(
|
||||
{ err: recreateErr },
|
||||
"Failed to recreate TDLib client — aborting remaining uploads"
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Record the failure for visibility in the UI
|
||||
try {
|
||||
const archiveSet = archiveSets[setIdx];
|
||||
const totalSize = archiveSet.parts.reduce((sum, p) => sum + p.fileSize, 0n);
|
||||
const errMsg = setErr instanceof Error ? setErr.message : String(setErr);
|
||||
await upsertSkippedPackage({
|
||||
fileName: archiveSet.parts[0].fileName,
|
||||
fileSize: totalSize,
|
||||
reason: inferSkipReason(errMsg),
|
||||
errorMessage: errMsg,
|
||||
sourceChannelId: ctx.channel.id,
|
||||
sourceMessageId: archiveSet.parts[0].id,
|
||||
sourceTopicId: ctx.sourceTopicId,
|
||||
isMultipart: archiveSet.isMultipart,
|
||||
partCount: archiveSet.parts.length,
|
||||
accountId: ctx.accountId,
|
||||
});
|
||||
// Also create a persistent notification
|
||||
await db.systemNotification.create({
|
||||
data: {
|
||||
type: inferSkipReason(errMsg) === "UPLOAD_FAILED" ? "UPLOAD_FAILED" : "DOWNLOAD_FAILED",
|
||||
severity: "WARNING",
|
||||
title: `Failed to process ${archiveSet.parts[0].fileName}`,
|
||||
message: errMsg,
|
||||
context: {
|
||||
fileName: archiveSet.parts[0].fileName,
|
||||
sourceChannelId: ctx.channel.id,
|
||||
sourceMessageId: Number(archiveSet.parts[0].id),
|
||||
channelTitle: ctx.channelTitle,
|
||||
reason: inferSkipReason(errMsg),
|
||||
},
|
||||
},
|
||||
});
|
||||
} catch {
|
||||
// Best-effort — don't fail the run if skip recording fails
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Post-processing: group packages by Telegram album ID
|
||||
if (indexedPackageRefs.length > 0) {
|
||||
await processAlbumGroups(
|
||||
ctx.client,
|
||||
channel.id,
|
||||
indexedPackageRefs,
|
||||
scanResult.photos
|
||||
);
|
||||
|
||||
// Auto-grouping passes (gated by per-channel flag)
|
||||
const channelRecord = await db.telegramChannel.findUnique({
|
||||
where: { id: channel.id },
|
||||
select: { autoGroupEnabled: true },
|
||||
});
|
||||
|
||||
if (channelRecord?.autoGroupEnabled !== false) {
|
||||
// Learned rule-based grouping (from manual overrides)
|
||||
await processRuleBasedGroups(channel.id, indexedPackageRefs);
|
||||
|
||||
// Time-window grouping for remaining ungrouped packages
|
||||
await processTimeWindowGroups(channel.id, indexedPackageRefs);
|
||||
|
||||
// Pattern-based grouping (date patterns, project slugs)
|
||||
await processPatternGroups(channel.id, indexedPackageRefs);
|
||||
|
||||
// Creator-based grouping (3+ files from same creator)
|
||||
await processCreatorGroups(channel.id, indexedPackageRefs);
|
||||
|
||||
// ZIP path prefix grouping (shared root folder inside archives)
|
||||
await processZipPathGroups(channel.id, indexedPackageRefs);
|
||||
|
||||
// Reply chain grouping (messages replying to same root)
|
||||
await processReplyChainGroups(channel.id, indexedPackageRefs);
|
||||
|
||||
// Caption fuzzy match grouping
|
||||
await processCaptionGroups(channel.id, indexedPackageRefs);
|
||||
}
|
||||
|
||||
// Check for potential grouping conflicts
|
||||
await detectGroupingConflicts(channel.id, indexedPackageRefs);
|
||||
}
|
||||
|
||||
return maxProcessedId;
|
||||
@@ -745,7 +942,7 @@ async function processOneArchiveSet(
|
||||
totalSets: number,
|
||||
previewMatches: Map<string, { id: bigint; fileId: string }>,
|
||||
ingestionRunId: string
|
||||
): Promise<void> {
|
||||
): Promise<string | null> {
|
||||
const {
|
||||
client, runId, channelTitle, channel,
|
||||
destChannelTelegramId, destChannelId,
|
||||
@@ -775,7 +972,7 @@ async function processOneArchiveSet(
|
||||
totalFiles: totalSets,
|
||||
zipsDuplicate: counters.zipsDuplicate,
|
||||
});
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
// ── Size guard: skip archives that exceed WORKER_MAX_ZIP_SIZE_MB ──
|
||||
@@ -798,7 +995,18 @@ async function processOneArchiveSet(
|
||||
currentFileNum: setIdx + 1,
|
||||
totalFiles: totalSets,
|
||||
});
|
||||
return;
|
||||
await upsertSkippedPackage({
|
||||
fileName: archiveName,
|
||||
fileSize: totalArchiveSize,
|
||||
reason: "SIZE_LIMIT",
|
||||
sourceChannelId: channel.id,
|
||||
sourceMessageId: archiveSet.parts[0].id,
|
||||
sourceTopicId: ctx.sourceTopicId,
|
||||
isMultipart: archiveSet.isMultipart,
|
||||
partCount: archiveSet.parts.length,
|
||||
accountId: ctx.accountId,
|
||||
});
|
||||
return null;
|
||||
}
|
||||
|
||||
const tempPaths: string[] = [];
|
||||
@@ -904,7 +1112,36 @@ async function processOneArchiveSet(
|
||||
totalFiles: totalSets,
|
||||
zipsDuplicate: counters.zipsDuplicate,
|
||||
});
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
// ── Hash lock: prevent concurrent workers racing on shared-channel archives ──
|
||||
const hashLockAcquired = await tryAcquireHashLock(contentHash);
|
||||
if (!hashLockAcquired) {
|
||||
counters.zipsDuplicate++;
|
||||
accountLog.info(
|
||||
{ fileName: archiveName, hash: contentHash.slice(0, 16) },
|
||||
"Hash lock held by another worker — skipping concurrent duplicate"
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
let entries: { path: string; fileName: string; extension: string | null; compressedSize: bigint; uncompressedSize: bigint; crc32: string | null }[] = [];
|
||||
let creator: string | null = null;
|
||||
const tags: string[] = [];
|
||||
let stub: { id: string } | null = null;
|
||||
|
||||
try {
|
||||
// Re-check after acquiring lock: another worker may have finished between
|
||||
// the first check above and this point.
|
||||
const existsAfterLock = await packageExistsByHash(contentHash);
|
||||
if (existsAfterLock) {
|
||||
counters.zipsDuplicate++;
|
||||
accountLog.debug(
|
||||
{ fileName: archiveName, hash: contentHash.slice(0, 16) },
|
||||
"Duplicate detected after acquiring hash lock — skipping"
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
// ── Reading metadata ──
|
||||
@@ -917,7 +1154,6 @@ async function processOneArchiveSet(
|
||||
totalFiles: totalSets,
|
||||
});
|
||||
|
||||
let entries: { path: string; fileName: string; extension: string | null; compressedSize: bigint; uncompressedSize: bigint; crc32: string | null }[] = [];
|
||||
try {
|
||||
if (archiveSet.type === "ZIP") {
|
||||
entries = await readZipCentralDirectory(tempPaths);
|
||||
@@ -949,7 +1185,7 @@ async function processOneArchiveSet(
|
||||
(sum, p) => sum + p.fileSize,
|
||||
0n
|
||||
);
|
||||
const MAX_UPLOAD_SIZE = 2n * 1024n * 1024n * 1024n;
|
||||
const MAX_UPLOAD_SIZE = ctx.maxUploadSize;
|
||||
const hasOversizedPart = archiveSet.parts.some((p) => p.fileSize > MAX_UPLOAD_SIZE);
|
||||
|
||||
if (hasOversizedPart) {
|
||||
@@ -964,7 +1200,7 @@ async function processOneArchiveSet(
|
||||
});
|
||||
const concatPath = path.join(setDir, `${archiveSet.baseName}.concat`);
|
||||
await concatenateFiles(tempPaths, concatPath);
|
||||
splitPaths = await byteLevelSplit(concatPath);
|
||||
splitPaths = await byteLevelSplit(concatPath, ctx.maxUploadSize);
|
||||
uploadPaths = splitPaths;
|
||||
// Clean up the concat intermediate file
|
||||
await unlink(concatPath).catch(() => {});
|
||||
@@ -978,11 +1214,65 @@ async function processOneArchiveSet(
|
||||
currentFileNum: setIdx + 1,
|
||||
totalFiles: totalSets,
|
||||
});
|
||||
splitPaths = await byteLevelSplit(tempPaths[0]);
|
||||
splitPaths = await byteLevelSplit(tempPaths[0], ctx.maxUploadSize);
|
||||
uploadPaths = splitPaths;
|
||||
}
|
||||
|
||||
// ── Hash verification after split ──
|
||||
// If we split/repacked, verify the split parts hash matches the original
|
||||
if (splitPaths.length > 0) {
|
||||
const splitHash = await hashParts(splitPaths);
|
||||
if (splitHash !== contentHash) {
|
||||
accountLog.error(
|
||||
{ fileName: archiveName, originalHash: contentHash, splitHash, parts: splitPaths.length },
|
||||
"Hash mismatch after split — file may be corrupted"
|
||||
);
|
||||
// Record notification for visibility
|
||||
try {
|
||||
await db.systemNotification.create({
|
||||
data: {
|
||||
type: "HASH_MISMATCH",
|
||||
severity: "ERROR",
|
||||
title: `Hash mismatch after splitting ${archiveName}`,
|
||||
message: `Expected ${contentHash.slice(0, 16)}… but got ${splitHash.slice(0, 16)}… after splitting into ${splitPaths.length} parts`,
|
||||
context: {
|
||||
fileName: archiveName,
|
||||
originalHash: contentHash,
|
||||
splitHash,
|
||||
partCount: splitPaths.length,
|
||||
sourceChannelId: channel.id,
|
||||
},
|
||||
},
|
||||
});
|
||||
} catch {
|
||||
// Best-effort notification
|
||||
}
|
||||
throw new Error(`Hash mismatch after split for ${archiveName}: expected ${contentHash}, got ${splitHash}`);
|
||||
}
|
||||
accountLog.debug(
|
||||
{ fileName: archiveName, hash: contentHash.slice(0, 16), parts: splitPaths.length },
|
||||
"Split hash verified — matches original"
|
||||
);
|
||||
}
|
||||
|
||||
// ── Uploading ──
|
||||
// Check if a prior run already uploaded this file (orphaned upload scenario:
|
||||
// file reached Telegram but DB write failed or worker crashed before indexing)
|
||||
const existingUpload = await getUploadedPackageByHash(contentHash);
|
||||
let destResult: { messageId: bigint; messageIds: bigint[] };
|
||||
|
||||
if (existingUpload && existingUpload.destMessageId) {
|
||||
accountLog.info(
|
||||
{ fileName: archiveName, destMessageId: Number(existingUpload.destMessageId) },
|
||||
"Reusing existing upload (file already on destination channel)"
|
||||
);
|
||||
destResult = {
|
||||
messageId: existingUpload.destMessageId,
|
||||
messageIds: existingUpload.destMessageIds?.length
|
||||
? (existingUpload.destMessageIds as bigint[])
|
||||
: [existingUpload.destMessageId],
|
||||
};
|
||||
} else {
|
||||
const uploadLabel = uploadPaths.length > 1
|
||||
? ` (${uploadPaths.length} parts)`
|
||||
: "";
|
||||
@@ -995,13 +1285,82 @@ async function processOneArchiveSet(
|
||||
totalFiles: totalSets,
|
||||
});
|
||||
|
||||
const destResult = await uploadToChannel(
|
||||
destResult = await uploadToChannel(
|
||||
client,
|
||||
destChannelTelegramId,
|
||||
uploadPaths
|
||||
);
|
||||
}
|
||||
|
||||
// ── Post-upload integrity check ──
|
||||
// Verify the files on disk still match before we index
|
||||
if (uploadPaths.length > 0 && !existingUpload) {
|
||||
try {
|
||||
const postUploadHash = await hashParts(uploadPaths);
|
||||
if (splitPaths.length > 0) {
|
||||
// Split files — hash should match the split hash (already verified above)
|
||||
// No additional check needed since we verified split hash = original hash
|
||||
} else if (postUploadHash !== contentHash) {
|
||||
accountLog.error(
|
||||
{ fileName: archiveName, originalHash: contentHash, postUploadHash },
|
||||
"Hash changed between hashing and upload — possible disk corruption"
|
||||
);
|
||||
await db.systemNotification.create({
|
||||
data: {
|
||||
type: "HASH_MISMATCH",
|
||||
severity: "ERROR",
|
||||
title: `Post-upload hash mismatch: ${archiveName}`,
|
||||
message: `Hash changed between download and upload. Original: ${contentHash.slice(0, 16)}…, post-upload: ${postUploadHash.slice(0, 16)}…`,
|
||||
context: { fileName: archiveName, originalHash: contentHash, postUploadHash, sourceChannelId: channel.id },
|
||||
},
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Best-effort — don't fail the ingestion
|
||||
}
|
||||
}
|
||||
|
||||
// ── Phase 1: Stub record — persisted immediately after upload ──
|
||||
await deleteOrphanedPackageByHash(contentHash);
|
||||
|
||||
creator =
|
||||
topicCreator ??
|
||||
extractCreatorFromFileName(archiveName) ??
|
||||
extractCreatorFromChannelTitle(channelTitle) ??
|
||||
null;
|
||||
|
||||
if (channel.category) {
|
||||
tags.push(channel.category);
|
||||
}
|
||||
|
||||
stub = await createPackageStub({
|
||||
contentHash,
|
||||
fileName: archiveName,
|
||||
fileSize: totalSize,
|
||||
archiveType: archiveSet.type === "7Z" ? "SEVEN_Z" : archiveSet.type,
|
||||
sourceChannelId: channel.id,
|
||||
sourceMessageId: archiveSet.parts[0].id,
|
||||
sourceTopicId,
|
||||
destChannelId,
|
||||
destMessageId: destResult.messageId,
|
||||
destMessageIds: destResult.messageIds,
|
||||
isMultipart: archiveSet.parts.length > 1 || uploadPaths.length > 1,
|
||||
partCount: uploadPaths.length,
|
||||
ingestionRunId,
|
||||
creator,
|
||||
tags,
|
||||
});
|
||||
|
||||
counters.zipsIngested++;
|
||||
await deleteSkippedPackage(channel.id, archiveSet.parts[0].id);
|
||||
} finally {
|
||||
await releaseHashLock(contentHash);
|
||||
}
|
||||
|
||||
if (!stub) return null;
|
||||
|
||||
// ── Preview thumbnail ──
|
||||
// (moved here from before stub creation — lock is released, preview doesn't need it)
|
||||
let previewData: Buffer | null = null;
|
||||
let previewMsgId: bigint | null = null;
|
||||
const matchedPhoto = previewMatches.get(archiveSet.baseName);
|
||||
@@ -1015,8 +1374,6 @@ async function processOneArchiveSet(
|
||||
totalFiles: totalSets,
|
||||
});
|
||||
previewData = await downloadPhotoThumbnail(client, matchedPhoto.fileId);
|
||||
// Only set previewMsgId if we actually got the image data —
|
||||
// otherwise the UI thinks there's a preview but the API returns 404
|
||||
if (previewData) {
|
||||
previewMsgId = matchedPhoto.id;
|
||||
}
|
||||
@@ -1039,13 +1396,7 @@ async function processOneArchiveSet(
|
||||
}
|
||||
}
|
||||
|
||||
// ── Resolve creator: topic name > filename extraction > channel title > null ──
|
||||
const creator = topicCreator
|
||||
?? extractCreatorFromFileName(archiveName)
|
||||
?? extractCreatorFromChannelTitle(channelTitle)
|
||||
?? null;
|
||||
|
||||
// ── Indexing ──
|
||||
// ── Phase 2: Update stub with file entries and preview ──
|
||||
await updateRunActivity(runId, {
|
||||
currentActivity: `Saving metadata for ${archiveName} (${entries.length} files)`,
|
||||
currentStep: "indexing",
|
||||
@@ -1055,38 +1406,12 @@ async function processOneArchiveSet(
|
||||
totalFiles: totalSets,
|
||||
});
|
||||
|
||||
// Clean up any orphaned record (same hash but no dest upload) before creating
|
||||
await deleteOrphanedPackageByHash(contentHash);
|
||||
|
||||
// Auto-inherit source channel category as initial tag
|
||||
const tags: string[] = [];
|
||||
if (channel.category) {
|
||||
tags.push(channel.category);
|
||||
}
|
||||
|
||||
await createPackageWithFiles({
|
||||
contentHash,
|
||||
fileName: archiveName,
|
||||
fileSize: totalSize,
|
||||
archiveType: archiveSet.type === "7Z" ? "SEVEN_Z" : archiveSet.type,
|
||||
sourceChannelId: channel.id,
|
||||
sourceMessageId: archiveSet.parts[0].id,
|
||||
sourceTopicId,
|
||||
destChannelId,
|
||||
destMessageId: destResult.messageId,
|
||||
isMultipart:
|
||||
archiveSet.parts.length > 1 || uploadPaths.length > 1,
|
||||
partCount: uploadPaths.length,
|
||||
ingestionRunId,
|
||||
creator,
|
||||
tags,
|
||||
await updatePackageWithMetadata(stub.id, {
|
||||
files: entries,
|
||||
previewData,
|
||||
previewMsgId,
|
||||
files: entries,
|
||||
});
|
||||
|
||||
counters.zipsIngested++;
|
||||
|
||||
await updateRunActivity(runId, {
|
||||
currentActivity: `Ingested ${archiveName} (${entries.length} files indexed)`,
|
||||
currentStep: "complete",
|
||||
@@ -1101,6 +1426,8 @@ async function processOneArchiveSet(
|
||||
{ fileName: archiveName, contentHash, fileCount: entries.length, creator },
|
||||
"Archive ingested"
|
||||
);
|
||||
|
||||
return stub.id;
|
||||
} finally {
|
||||
// ALWAYS delete temp files and the set directory
|
||||
await deleteFiles([...tempPaths, ...splitPaths]);
|
||||
|
||||
Reference in New Issue
Block a user