mirror of
https://github.com/xCyanGrizzly/DragonsStash.git
synced 2026-05-11 06:11:15 +00:00
Compare commits
108 Commits
copilot/fi
...
527aca7c25
| Author | SHA1 | Date | |
|---|---|---|---|
| 527aca7c25 | |||
| a4156b2ac6 | |||
| d50c68f67c | |||
| f6e7f5ed3c | |||
| e7f213eec4 | |||
| 20b7d28fdf | |||
| 21663fc29e | |||
| 218ccb9282 | |||
| b632533f54 | |||
| 4baf5aad83 | |||
| ad7790c07b | |||
| e4398caebe | |||
| 6eb7129637 | |||
| d6386209be | |||
| fe28c31b9e | |||
| 55bdf3c890 | |||
| 5506c7d91b | |||
| 5a3550fa10 | |||
| ad3d42a997 | |||
| dd0d246a77 | |||
| dcc1c97053 | |||
| 71c3228e44 | |||
| 094001f9f7 | |||
| 0faacc214b | |||
| d53e581623 | |||
| 780e6200d8 | |||
| 9642adaba7 | |||
| 9bc9271f11 | |||
| bd358a134b | |||
| 1425db8774 | |||
| aef76828ef | |||
| 29e95f780c | |||
| 5fd341dfc4 | |||
|
|
e2dd3bb9d0 | ||
|
|
ccf6f9000d | ||
|
|
a4c264a144 | ||
|
|
f4488a079f | ||
|
|
729f296232 | ||
|
|
a48f9c24a7 | ||
|
|
84bb167ce6 | ||
|
|
7cd84dbf02 | ||
|
|
c00fc528ac | ||
|
|
1fc2d3e1ae | ||
|
|
ab558e00f5 | ||
|
|
bf093cdfca | ||
|
|
a90f653314 | ||
|
|
9ac66e9d7d | ||
|
|
36a7e3d5f4 | ||
|
|
53a76a8136 | ||
|
|
ba3d3a6040 | ||
|
|
fe7a548fef | ||
|
|
4a44374bb7 | ||
|
|
c7eb077e0d | ||
|
|
031a4687fb | ||
|
|
30fb96b3f9 | ||
|
|
9a077a3648 | ||
|
|
2ceba66313 | ||
|
|
036dadcb21 | ||
|
|
541ae0c614 | ||
|
|
b7a76fd932 | ||
|
|
b75b0e1f91 | ||
|
|
50e7e02b2d | ||
|
|
dea419b778 | ||
|
|
053eeed6be | ||
|
|
d5725bd52e | ||
|
|
48726b9122 | ||
|
|
1b8df48768 | ||
|
|
726f55a943 | ||
|
|
b08140b4f9 | ||
|
|
761d5e0790 | ||
|
|
d7bbb7587e | ||
| 2763de2711 | |||
|
|
6926df9a2c | ||
|
|
651e9e6bdd | ||
|
|
8d508d5a86 | ||
|
|
2bb3caf7d9 | ||
|
|
8d95752106 | ||
|
|
22419106c1 | ||
|
|
e45de85c69 | ||
|
|
71a2e6a5e8 | ||
|
|
1436b630e2 | ||
|
|
43af23d3be | ||
|
|
49b82a352b | ||
|
|
2e242912af | ||
|
|
9adbdb2a77 | ||
|
|
ad71346468 | ||
|
|
e19a80897d | ||
|
|
22da4dfad2 | ||
|
|
22bcacf3bd | ||
|
|
15da57b8c0 | ||
|
|
8f1a912ccb | ||
|
|
81b65912aa | ||
|
|
5eb2cf05b9 | ||
|
|
f73d06b3d9 | ||
|
|
cac3d518e1 | ||
|
|
987167de0c | ||
|
|
4f331d5411 | ||
|
|
8088a86feb | ||
|
|
b53934ebf2 | ||
|
|
464c86b32a | ||
|
|
fc00fb6f2e | ||
|
|
0c0c9c7f23 | ||
|
|
82d5fc1812 | ||
|
|
9120f0fb5d | ||
|
|
5d88f9beb3 | ||
|
|
3704708970 | ||
|
|
0c789eabd6 | ||
|
|
9a88914f11 |
5
.claude/settings.json
Normal file
5
.claude/settings.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"enabledPlugins": {
|
||||
"superpowers@superpowers-marketplace": true
|
||||
}
|
||||
}
|
||||
@@ -83,7 +83,13 @@
|
||||
"Bash(git -C /mnt/c/Users/A00963355/OneDrive - Amaris Zorggroep/Documents/VScodeProjects/DragonsStash log --oneline -10)",
|
||||
"Bash(git -C \"C:/Users/A00963355/OneDrive - Amaris Zorggroep/Documents/VScodeProjects/DragonsStash\" status --short)",
|
||||
"Bash(timeout:*)",
|
||||
"mcp__Claude_Preview__preview_start"
|
||||
"mcp__Claude_Preview__preview_start",
|
||||
"Bash(cat:*)",
|
||||
"Bash(grep:*)",
|
||||
"Bash(wait:*)",
|
||||
"WebSearch",
|
||||
"Bash(SKILL_CREATOR_PATH=\"C:\\\\Users\\\\A00963355\\\\.claude\\\\plugins\\\\cache\\\\claude-plugins-official\\\\skill-creator\\\\d5c15b861cd2\\\\skills\\\\skill-creator\" && WORKSPACE=\"C:\\\\Users\\\\A00963355\\\\OneDrive - Amaris Zorggroep\\\\Documents\\\\VScodeProjects\\\\DragonsStash\\\\.claude\\\\skills\\\\tdlib-telegram-workspace\\\\iteration-1\" && python \"$SKILL_CREATOR_PATH/eval-viewer/generate_review.py\" \"$WORKSPACE\" --skill-name \"tdlib-telegram\" --benchmark \"$WORKSPACE/benchmark.json\" --static \"$WORKSPACE/review.html\" 2>&1)",
|
||||
"Bash(start:*)"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"skill_name": "tdlib-telegram",
|
||||
"iteration": 1,
|
||||
"configs": [
|
||||
{
|
||||
"name": "with_skill",
|
||||
"pass_rate": {"mean": 1.0, "stddev": 0.0},
|
||||
"tokens": {"mean": 53200, "stddev": 14800},
|
||||
"time_seconds": {"mean": 123.5, "stddev": 16.7}
|
||||
},
|
||||
{
|
||||
"name": "without_skill",
|
||||
"pass_rate": {"mean": 0.857, "stddev": 0.134},
|
||||
"tokens": {"mean": 56467, "stddev": 12100},
|
||||
"time_seconds": {"mean": 156.4, "stddev": 39.7}
|
||||
}
|
||||
],
|
||||
"delta": {
|
||||
"pass_rate": "+14.3%",
|
||||
"tokens": "-5.8%",
|
||||
"time": "-21.0%"
|
||||
},
|
||||
"evals": [
|
||||
{
|
||||
"name": "broadcast-to-all-users",
|
||||
"with_skill": {"pass_rate": 1.0, "passed": 5, "total": 5, "tokens": 35365, "time_seconds": 107.6},
|
||||
"without_skill": {"pass_rate": 0.6, "passed": 3, "total": 5, "tokens": 69214, "time_seconds": 200.2}
|
||||
},
|
||||
{
|
||||
"name": "flood-wait-during-scan",
|
||||
"with_skill": {"pass_rate": 1.0, "passed": 4, "total": 4, "tokens": 63079, "time_seconds": 140.9},
|
||||
"without_skill": {"pass_rate": 1.0, "passed": 4, "total": 4, "tokens": 45601, "time_seconds": 122.3}
|
||||
},
|
||||
{
|
||||
"name": "download-and-reupload-file",
|
||||
"with_skill": {"pass_rate": 1.0, "passed": 5, "total": 5, "tokens": 61157, "time_seconds": 122.1},
|
||||
"without_skill": {"pass_rate": 1.0, "passed": 5, "total": 5, "tokens": 54587, "time_seconds": 146.7}
|
||||
}
|
||||
],
|
||||
"analyst_notes": [
|
||||
"The skill's biggest impact was on Eval 1 (broadcast): the baseline MISSED both withFloodWait retry wrapping and inter-message delay — the two most critical patterns for avoiding rate limits during bulk sends. This is exactly the kind of bug the skill is designed to prevent.",
|
||||
"Eval 2 (FLOOD_WAIT debugging) was a near-tie. Both versions correctly diagnosed the problem and proposed adaptive backoff. The skill version was slightly more thorough: it added pagination-level retry with sleep(waitSec) instead of just re-throwing, meaning it can survive even after withFloodWait's retries are exhausted.",
|
||||
"Eval 3 (download/reupload) was also close. Both correctly composed existing primitives. The skill version was more explicit about WHY certain patterns matter (referencing the skill's documentation), which helps future maintainers understand the code.",
|
||||
"The skill version was faster on average (-21% time) and used fewer tokens (-5.8%), likely because the skill front-loaded the knowledge instead of requiring the agent to discover it by reading source files."
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"eval_id": 1,
|
||||
"eval_name": "broadcast-to-all-users",
|
||||
"prompt": "Add a new bot command /broadcast that sends a text message to ALL users who have a TelegramLink in the database. The admin triggers it from the web app. Add it to the bot's command handler and create an API endpoint that triggers it.",
|
||||
"assertions": [
|
||||
{"text": "uses_sequential_queue: Messages sent one at a time, no Promise.all or concurrent sends", "type": "required"},
|
||||
{"text": "uses_flood_wait_retry: Each send call wrapped in withFloodWait or equivalent FLOOD_WAIT-aware retry", "type": "required"},
|
||||
{"text": "has_per_message_delay: Includes a sleep/delay between individual sends", "type": "recommended"},
|
||||
{"text": "handles_per_user_errors: One failed send does not abort the entire broadcast loop", "type": "required"},
|
||||
{"text": "has_api_endpoint: Creates an API route or server action to trigger the broadcast from the web app", "type": "required"}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"run_id": "eval-1-with_skill",
|
||||
"expectations": [
|
||||
{"text": "uses_sequential_queue", "passed": true, "evidence": "Uses a for...of loop iterating links one at a time. No Promise.all. Comment: '~20 msgs/sec — well under the 30 msgs/sec global bot limit'"},
|
||||
{"text": "uses_flood_wait_retry", "passed": true, "evidence": "Each send wrapped in withFloodWait(): 'await withFloodWait(() => sendTextMessage(link.telegramUserId, data.message, \"textParseModeHTML\"))'. Created a new bot/src/util/flood-wait.ts with full extractFloodWaitSeconds + retry logic."},
|
||||
{"text": "has_per_message_delay", "passed": true, "evidence": "await sleep(50) after each send — explicitly commented as '~20 msgs/sec'"},
|
||||
{"text": "handles_per_user_errors", "passed": true, "evidence": "try/catch around each send with 'failed++' counter, continues to next user"},
|
||||
{"text": "has_api_endpoint", "passed": true, "evidence": "Created src/app/api/telegram/bot/broadcast/route.ts with admin auth, 4096-char validation, pg_notify('bot_broadcast')"}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,340 @@
|
||||
# /broadcast Implementation Plan
|
||||
|
||||
## Approach
|
||||
|
||||
The broadcast feature has two parts:
|
||||
|
||||
1. **API endpoint** (`POST /api/telegram/bot/broadcast`) — called from the web app by an admin. It accepts a `message` string, looks up ALL `TelegramLink` records in the database, and triggers the bot to send the message to each linked user via `pg_notify`.
|
||||
2. **Bot-side handler** — a new `bot_broadcast` pg_notify channel listener in `send-listener.ts` that receives the broadcast payload and sequentially sends the text message to every linked Telegram user.
|
||||
|
||||
The `/broadcast` bot command itself is not a user-facing Telegram command (regular users should not be able to trigger it). It is triggered exclusively through the admin API endpoint.
|
||||
|
||||
## Skill Patterns Applied
|
||||
|
||||
- **Sequential Send Queue** (from skill): Never fire concurrent sends to multiple users. The broadcast iterates users sequentially with `await sleep(50)` between sends (~20 msgs/sec, well under the 30 msgs/sec global bot limit).
|
||||
- **FLOOD_WAIT handling** (from skill): Every `sendTextMessage` call is wrapped with `withFloodWait()` which extracts the wait duration from errors and retries with jitter.
|
||||
- **Anti-pattern avoidance**: No `Promise.all(users.map(...))` — that would instantly hit the 30 msg/sec global limit.
|
||||
- **Message text length limit**: The API endpoint validates that the broadcast message does not exceed 4,096 characters (Telegram's limit from the skill).
|
||||
|
||||
---
|
||||
|
||||
## File 1: `bot/src/util/flood-wait.ts` (NEW)
|
||||
|
||||
Extracted from the skill's recommended FLOOD_WAIT pattern so it can be reused by both existing send logic and the new broadcast logic.
|
||||
|
||||
```typescript
|
||||
import { childLogger } from "./logger.js";
|
||||
|
||||
const log = childLogger("flood-wait");
|
||||
|
||||
function sleep(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the mandatory wait duration (in seconds) from a Telegram
|
||||
* FLOOD_WAIT error. Returns null when the error is not rate-limit related.
|
||||
*/
|
||||
export function extractFloodWaitSeconds(err: unknown): number | null {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
|
||||
// Pattern 1: FLOOD_WAIT_30
|
||||
const flood = message.match(/FLOOD_WAIT_(\d+)/i);
|
||||
if (flood) return parseInt(flood[1], 10);
|
||||
|
||||
// Pattern 2: "retry after 30"
|
||||
const retry = message.match(/retry after (\d+)/i);
|
||||
if (retry) return parseInt(retry[1], 10);
|
||||
|
||||
// Pattern 3: HTTP 429 without explicit seconds
|
||||
if (String((err as any)?.code) === "429") return 30;
|
||||
|
||||
return null; // Not a rate limit error
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap any async Telegram operation with automatic FLOOD_WAIT retry.
|
||||
* Adds random jitter (1-5 s) to prevent thundering-herd retries.
|
||||
*/
|
||||
export async function withFloodWait<T>(
|
||||
fn: () => Promise<T>,
|
||||
maxRetries = 5
|
||||
): Promise<T> {
|
||||
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
||||
try {
|
||||
return await fn();
|
||||
} catch (err) {
|
||||
const wait = extractFloodWaitSeconds(err);
|
||||
if (wait === null || attempt >= maxRetries) throw err;
|
||||
|
||||
const jitter = 1000 + Math.random() * 4000;
|
||||
log.warn(
|
||||
{ wait, attempt, jitter: Math.round(jitter) },
|
||||
"FLOOD_WAIT received — backing off"
|
||||
);
|
||||
await sleep(wait * 1000 + jitter);
|
||||
}
|
||||
}
|
||||
throw new Error("Unreachable");
|
||||
}
|
||||
|
||||
export { sleep };
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## File 2: `bot/src/db/queries.ts` (MODIFIED — add one function)
|
||||
|
||||
Add this function at the bottom of the existing file, after the `getGlobalDestinationChannel` function:
|
||||
|
||||
```typescript
|
||||
// ── Broadcast ──
|
||||
|
||||
/**
|
||||
* Fetch ALL TelegramLink records (users who linked their Telegram account).
|
||||
* Used by the broadcast feature to send a message to every linked user.
|
||||
*/
|
||||
export async function getAllTelegramLinks() {
|
||||
return db.telegramLink.findMany({
|
||||
select: {
|
||||
telegramUserId: true,
|
||||
telegramName: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## File 3: `bot/src/send-listener.ts` (MODIFIED — add broadcast channel)
|
||||
|
||||
Add the `bot_broadcast` channel to the existing listener. The changes are:
|
||||
|
||||
### 3a. Add import for the new query and flood-wait utility
|
||||
|
||||
At the top of the file, update the imports:
|
||||
|
||||
```typescript
|
||||
import {
|
||||
getPendingSendRequest,
|
||||
updateSendRequest,
|
||||
findMatchingSubscriptions,
|
||||
getGlobalDestinationChannel,
|
||||
getAllTelegramLinks, // ← NEW
|
||||
} from "./db/queries.js";
|
||||
import { copyMessageToUser, sendTextMessage, sendPhotoMessage } from "./tdlib/client.js";
|
||||
import { withFloodWait, sleep } from "./util/flood-wait.js"; // ← NEW
|
||||
```
|
||||
|
||||
### 3b. Subscribe to the new pg_notify channel
|
||||
|
||||
Inside `connectListener()`, after the existing LISTEN statements, add:
|
||||
|
||||
```typescript
|
||||
await pgClient.query("LISTEN bot_broadcast");
|
||||
```
|
||||
|
||||
### 3c. Add the notification handler
|
||||
|
||||
Inside the `pgClient.on("notification", ...)` callback, add the new branch:
|
||||
|
||||
```typescript
|
||||
pgClient.on("notification", (msg) => {
|
||||
if (msg.channel === "bot_send" && msg.payload) {
|
||||
handleBotSend(msg.payload);
|
||||
} else if (msg.channel === "new_package" && msg.payload) {
|
||||
handleNewPackage(msg.payload);
|
||||
} else if (msg.channel === "bot_broadcast" && msg.payload) { // ← NEW
|
||||
handleBroadcast(msg.payload);
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
Update the log message:
|
||||
|
||||
```typescript
|
||||
log.info("Send listener started (bot_send, new_package, bot_broadcast)");
|
||||
```
|
||||
|
||||
### 3d. Add the broadcast handler function
|
||||
|
||||
Add this at the bottom of the file (before the existing `escapeHtml` helper):
|
||||
|
||||
```typescript
|
||||
// ── bot_broadcast handler ──
|
||||
|
||||
/**
|
||||
* Handle a broadcast request. The payload is a JSON string:
|
||||
* { message: string }
|
||||
*
|
||||
* Sends the message to every user who has a TelegramLink.
|
||||
* Uses a sequential loop with a 50 ms delay between sends (~20 msgs/sec)
|
||||
* to stay well under Telegram's 30 msgs/sec global bot limit.
|
||||
* Each send is wrapped with withFloodWait to automatically retry on
|
||||
* rate-limit errors.
|
||||
*/
|
||||
async function handleBroadcast(payload: string): Promise<void> {
|
||||
try {
|
||||
const data = JSON.parse(payload) as { message: string };
|
||||
if (!data.message) {
|
||||
log.warn("Broadcast payload missing message — ignoring");
|
||||
return;
|
||||
}
|
||||
|
||||
const links = await getAllTelegramLinks();
|
||||
if (links.length === 0) {
|
||||
log.info("Broadcast requested but no linked users found");
|
||||
return;
|
||||
}
|
||||
|
||||
log.info(
|
||||
{ recipientCount: links.length },
|
||||
"Starting broadcast to all linked users"
|
||||
);
|
||||
|
||||
let sent = 0;
|
||||
let failed = 0;
|
||||
|
||||
for (const link of links) {
|
||||
try {
|
||||
await withFloodWait(() =>
|
||||
sendTextMessage(link.telegramUserId, data.message, "textParseModeHTML")
|
||||
);
|
||||
sent++;
|
||||
} catch (err) {
|
||||
failed++;
|
||||
log.warn(
|
||||
{ err, telegramUserId: link.telegramUserId.toString() },
|
||||
"Broadcast send failed for user"
|
||||
);
|
||||
}
|
||||
// ~20 msgs/sec — well under the 30 msgs/sec global bot limit
|
||||
await sleep(50);
|
||||
}
|
||||
|
||||
log.info({ sent, failed, total: links.length }, "Broadcast completed");
|
||||
} catch (err) {
|
||||
log.error({ err, payload }, "Failed to process broadcast");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## File 4: `src/app/api/telegram/bot/broadcast/route.ts` (NEW)
|
||||
|
||||
This is the Next.js API endpoint that the admin triggers from the web app.
|
||||
|
||||
```typescript
|
||||
import { NextResponse } from "next/server";
|
||||
import { auth } from "@/lib/auth";
|
||||
import { prisma } from "@/lib/prisma";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
/**
|
||||
* POST /api/telegram/bot/broadcast
|
||||
* Send a text message to ALL users who have a linked Telegram account.
|
||||
*
|
||||
* Body: { message: string }
|
||||
*
|
||||
* Admin-only. The actual sending is done by the bot process — this endpoint
|
||||
* simply validates input and fires a pg_notify('bot_broadcast', ...) signal.
|
||||
*/
|
||||
export async function POST(request: Request) {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
||||
}
|
||||
|
||||
// Only admins can broadcast
|
||||
if (session.user.role !== "ADMIN") {
|
||||
return NextResponse.json(
|
||||
{ error: "Only admins can send broadcast messages" },
|
||||
{ status: 403 }
|
||||
);
|
||||
}
|
||||
|
||||
let body: { message?: string };
|
||||
try {
|
||||
body = await request.json();
|
||||
} catch {
|
||||
return NextResponse.json({ error: "Invalid JSON body" }, { status: 400 });
|
||||
}
|
||||
|
||||
if (!body.message || typeof body.message !== "string") {
|
||||
return NextResponse.json(
|
||||
{ error: "message is required and must be a string" },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
const trimmed = body.message.trim();
|
||||
if (trimmed.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: "message must not be empty" },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Telegram message text limit is 4096 characters
|
||||
if (trimmed.length > 4096) {
|
||||
return NextResponse.json(
|
||||
{ error: "message exceeds Telegram's 4096-character limit" },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Count how many users will receive the broadcast
|
||||
const recipientCount = await prisma.telegramLink.count();
|
||||
if (recipientCount === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: "No users have linked Telegram accounts" },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Notify the bot via pg_notify
|
||||
const payload = JSON.stringify({ message: trimmed });
|
||||
try {
|
||||
await prisma.$queryRawUnsafe(
|
||||
`SELECT pg_notify('bot_broadcast', $1)`,
|
||||
payload
|
||||
);
|
||||
} catch (err) {
|
||||
return NextResponse.json(
|
||||
{ error: "Failed to queue broadcast — bot notification channel unavailable" },
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
status: "QUEUED",
|
||||
recipientCount,
|
||||
message: `Broadcast queued for ${recipientCount} user(s)`,
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Summary of Changes
|
||||
|
||||
| File | Action | Description |
|
||||
|------|--------|-------------|
|
||||
| `bot/src/util/flood-wait.ts` | NEW | Reusable FLOOD_WAIT retry + sleep utilities (from skill) |
|
||||
| `bot/src/db/queries.ts` | MODIFIED | Add `getAllTelegramLinks()` query |
|
||||
| `bot/src/send-listener.ts` | MODIFIED | Subscribe to `bot_broadcast` channel; add `handleBroadcast()` with sequential send loop |
|
||||
| `src/app/api/telegram/bot/broadcast/route.ts` | NEW | Admin-only API endpoint that validates input and fires `pg_notify` |
|
||||
|
||||
## Skill Patterns Used
|
||||
|
||||
1. **Sequential Send Queue**: The broadcast loop sends messages one at a time with `await sleep(50)` between each, keeping throughput at ~20 msgs/sec — safely under the 30 msgs/sec bot global limit documented in the skill.
|
||||
|
||||
2. **FLOOD_WAIT handling with `withFloodWait()`**: Every `sendTextMessage` call is wrapped so that if Telegram returns a `FLOOD_WAIT_X` error, the bot automatically waits the required duration plus random jitter (1-5s) before retrying. This prevents the entire broadcast from failing due to a temporary rate limit.
|
||||
|
||||
3. **Message text length validation**: The API endpoint rejects messages over 4,096 characters, which is the `message_text_length_max` limit documented in the skill.
|
||||
|
||||
4. **No concurrent sends (anti-pattern avoidance)**: The skill explicitly warns against `Promise.all(users.map(...))` for bulk sends. The implementation uses a sequential `for...of` loop instead.
|
||||
|
||||
5. **pg_notify pattern**: Follows the existing project convention (seen in `bot_send` and `new_package`) where the web app fires a `pg_notify` and the bot's `send-listener.ts` picks it up — keeping the two processes decoupled.
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"total_tokens": 35365,
|
||||
"duration_ms": 107616,
|
||||
"total_duration_seconds": 107.6
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"run_id": "eval-1-without_skill",
|
||||
"expectations": [
|
||||
{"text": "uses_sequential_queue", "passed": true, "evidence": "Uses for...of loop over links, sends one at a time. No Promise.all."},
|
||||
{"text": "uses_flood_wait_retry", "passed": false, "evidence": "No withFloodWait wrapper. Bare sendTextMessage calls in both send-listener handleBroadcast and commands handleBroadcastCommand. No flood-wait.ts utility created."},
|
||||
{"text": "has_per_message_delay", "passed": false, "evidence": "No sleep/delay between sends in either the send-listener handler or the command handler."},
|
||||
{"text": "handles_per_user_errors", "passed": true, "evidence": "try/catch per user with sent++/failed++ counters, continues to next user."},
|
||||
{"text": "has_api_endpoint", "passed": true, "evidence": "Created src/app/api/telegram/bot/broadcast/route.ts with admin auth, 4096-char limit, pg_notify."}
|
||||
]
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"total_tokens": 69214,
|
||||
"duration_ms": 200249,
|
||||
"total_duration_seconds": 200.2
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"eval_id": 2,
|
||||
"eval_name": "flood-wait-during-scan",
|
||||
"prompt": "The worker keeps crashing with 'FLOOD_WAIT_35' errors when scanning a source channel that has about 10,000 messages. It happens during the getChannelMessages pagination loop. How do I fix this?",
|
||||
"assertions": [
|
||||
{"text": "identifies_retry_as_fix: Recommends wrapping pagination calls in FLOOD_WAIT-aware retry logic", "type": "required"},
|
||||
{"text": "recommends_pagination_delay: Mentions adding or increasing sleep between pages", "type": "required"},
|
||||
{"text": "respects_retry_after: Fix extracts and uses the actual wait duration from the error, not a hardcoded value", "type": "required"},
|
||||
{"text": "does_not_reduce_frequency_as_primary_fix: Does not suggest scanning less often as the main solution", "type": "required"}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"run_id": "eval-2-with_skill",
|
||||
"expectations": [
|
||||
{"text": "identifies_retry_as_fix", "passed": true, "evidence": "Identifies that withFloodWait already exists but the pagination loop needs its own adaptive backoff layer. Adds extractFloodWaitSeconds at pagination level."},
|
||||
{"text": "recommends_pagination_delay", "passed": true, "evidence": "Keeps existing sleep(config.apiDelayMs) and makes it adaptive: doubles on FLOOD_WAIT, relaxes by 0.8x on success."},
|
||||
{"text": "respects_retry_after", "passed": true, "evidence": "Extracts actual wait duration: 'await sleep(waitSec * 1000 + jitter)' using extractFloodWaitSeconds. Explicitly cites skill rule: 'Always respect the wait duration.'"},
|
||||
{"text": "does_not_reduce_frequency_as_primary_fix", "passed": true, "evidence": "Primary fix is adaptive backoff at pagination level, not reducing scan frequency. Does not suggest lowering WORKER_INTERVAL_MINUTES."}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,378 @@
|
||||
# Diagnosis: FLOOD_WAIT_35 During Channel Scanning
|
||||
|
||||
## Root Cause
|
||||
|
||||
The `getChannelMessages` function in `worker/src/tdlib/download.ts` (line 146) paginates through channel history using `getChatHistory` with a fixed 1-second delay between pages (`config.apiDelayMs = 1000`). While each individual `getChatHistory` call is wrapped in `invokeWithTimeout` which delegates to `withFloodWait` for retry logic, the pagination loop itself has a fundamental problem for large channels:
|
||||
|
||||
**A 10,000-message channel at 100 messages/page = 100 consecutive API calls.** With only a 1-second delay between pages, this sustained burst of ~1 req/sec for 100+ seconds eventually triggers Telegram's dynamic rate limiter. Once `FLOOD_WAIT_35` fires, `withFloodWait` does correctly sleep for 35+ seconds and retry that single call -- but the loop immediately resumes its 1-second cadence, triggering another FLOOD_WAIT shortly after. After 5 retries (`config.maxRetries = 5`), the function throws and the worker crashes.
|
||||
|
||||
The same issue exists in `getTopicMessages` in `worker/src/tdlib/topics.ts` (line 170), which uses identical pagination logic.
|
||||
|
||||
## Why the Current Code Is Almost Right But Not Quite
|
||||
|
||||
The codebase already has the right building blocks:
|
||||
|
||||
1. `withFloodWait` in `worker/src/util/retry.ts` correctly extracts wait durations and adds jitter -- this matches the skill's recommended FLOOD_WAIT handler pattern exactly.
|
||||
2. `invokeWithTimeout` in `worker/src/tdlib/download.ts` wraps every TDLib call through `withFloodWait`.
|
||||
3. There is a 1-second inter-page delay (`config.apiDelayMs`).
|
||||
|
||||
**The gap:** After a FLOOD_WAIT recovery, the pagination loop does not back off its inter-page delay. It goes right back to 1-second spacing, which is what triggers repeated FLOOD_WAITs until max retries is exhausted.
|
||||
|
||||
## The Fix
|
||||
|
||||
Apply **adaptive backoff** to the pagination delay: when a FLOOD_WAIT is encountered during scanning, increase the inter-page delay for subsequent pages. This prevents the "recover then immediately re-trigger" cycle.
|
||||
|
||||
### Fix 1: Add adaptive delay to `getChannelMessages` (`worker/src/tdlib/download.ts`)
|
||||
|
||||
Replace lines 146-250 with:
|
||||
|
||||
```typescript
|
||||
/**
|
||||
* Fetch messages from a channel, stopping once we've scanned past the
|
||||
* last-processed boundary (with one page of lookback for multipart safety).
|
||||
* Collects both archive attachments AND photo messages (for preview matching).
|
||||
* Returns messages in chronological order (oldest first).
|
||||
*
|
||||
* When `lastProcessedMessageId` is null (first run), scans everything.
|
||||
* The worker applies a post-grouping filter to skip fully-processed sets,
|
||||
* and keeps `packageExistsBySourceMessage` as a safety net.
|
||||
*
|
||||
* Safety features:
|
||||
* - Max page limit to prevent infinite loops
|
||||
* - Stuck detection: breaks if from_message_id stops advancing
|
||||
* - Timeout on each TDLib API call
|
||||
* - Adaptive delay: backs off when FLOOD_WAIT is encountered
|
||||
*/
|
||||
export async function getChannelMessages(
|
||||
client: Client,
|
||||
chatId: bigint,
|
||||
lastProcessedMessageId?: bigint | null,
|
||||
limit = 100,
|
||||
onProgress?: ScanProgressCallback
|
||||
): Promise<ChannelScanResult> {
|
||||
const archives: TelegramMessage[] = [];
|
||||
const photos: TelegramPhoto[] = [];
|
||||
const boundary = lastProcessedMessageId ? Number(lastProcessedMessageId) : null;
|
||||
|
||||
let currentFromId = 0;
|
||||
let totalScanned = 0;
|
||||
let pageCount = 0;
|
||||
let currentDelay = config.apiDelayMs; // starts at 1000ms, adapts on FLOOD_WAIT
|
||||
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (pageCount >= MAX_SCAN_PAGES) {
|
||||
log.warn(
|
||||
{ chatId: chatId.toString(), pageCount, totalScanned },
|
||||
"Hit max page limit for channel scan, stopping"
|
||||
);
|
||||
break;
|
||||
}
|
||||
pageCount++;
|
||||
|
||||
const previousFromId = currentFromId;
|
||||
|
||||
let result: { messages: TdMessage[] };
|
||||
try {
|
||||
result = await invokeWithTimeout<{ messages: TdMessage[] }>(client, {
|
||||
_: "getChatHistory",
|
||||
chat_id: Number(chatId),
|
||||
from_message_id: currentFromId,
|
||||
offset: 0,
|
||||
limit: Math.min(limit, 100),
|
||||
only_local: false,
|
||||
});
|
||||
} catch (err) {
|
||||
// If invokeWithTimeout exhausted its retries on FLOOD_WAIT, check if
|
||||
// we can recover at the pagination level by increasing the delay further.
|
||||
const waitSec = extractFloodWaitSeconds(err);
|
||||
if (waitSec !== null) {
|
||||
// The retry wrapper already slept; bump the inter-page delay to
|
||||
// prevent the next page from immediately re-triggering.
|
||||
currentDelay = Math.min(currentDelay * 2, 30_000);
|
||||
log.warn(
|
||||
{ chatId: chatId.toString(), newDelay: currentDelay, totalScanned },
|
||||
"FLOOD_WAIT persisted after retries — increasing inter-page delay and retrying"
|
||||
);
|
||||
// Sleep the full flood wait duration + jitter before continuing
|
||||
const jitter = 1000 + Math.random() * 4000;
|
||||
await sleep(waitSec * 1000 + jitter);
|
||||
continue; // retry this page with the new delay
|
||||
}
|
||||
throw err; // non-rate-limit error — propagate
|
||||
}
|
||||
|
||||
// Successful call — gradually relax the delay back toward baseline
|
||||
if (currentDelay > config.apiDelayMs) {
|
||||
currentDelay = Math.max(config.apiDelayMs, Math.floor(currentDelay * 0.8));
|
||||
}
|
||||
|
||||
if (!result.messages || result.messages.length === 0) break;
|
||||
|
||||
totalScanned += result.messages.length;
|
||||
|
||||
for (const msg of result.messages) {
|
||||
// Check for archive documents
|
||||
const doc = msg.content?.document;
|
||||
if (doc?.file_name && doc.document && isArchiveAttachment(doc.file_name)) {
|
||||
archives.push({
|
||||
id: BigInt(msg.id),
|
||||
fileName: doc.file_name,
|
||||
fileId: String(doc.document.id),
|
||||
fileSize: BigInt(doc.document.size),
|
||||
date: new Date(msg.date * 1000),
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for photo messages (potential previews)
|
||||
const photo = msg.content?.photo;
|
||||
const caption = msg.content?.caption?.text ?? "";
|
||||
if (photo?.sizes && photo.sizes.length > 0) {
|
||||
const smallest = photo.sizes[0];
|
||||
photos.push({
|
||||
id: BigInt(msg.id),
|
||||
date: new Date(msg.date * 1000),
|
||||
caption,
|
||||
fileId: String(smallest.photo.id),
|
||||
fileSize: smallest.photo.size || smallest.photo.expected_size,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Report scanning progress after each page
|
||||
onProgress?.(totalScanned);
|
||||
|
||||
currentFromId = result.messages[result.messages.length - 1].id;
|
||||
|
||||
// Stuck detection: if from_message_id didn't advance, break to prevent infinite loop
|
||||
if (currentFromId === previousFromId) {
|
||||
log.warn(
|
||||
{ chatId: chatId.toString(), currentFromId, totalScanned },
|
||||
"Pagination stuck (from_message_id not advancing), breaking"
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
// Stop scanning once we've gone past the boundary (this page is the lookback)
|
||||
if (boundary && currentFromId < boundary) break;
|
||||
|
||||
if (result.messages.length < Math.min(limit, 100)) break;
|
||||
|
||||
// Rate limit delay — adaptive based on FLOOD_WAIT history
|
||||
await sleep(currentDelay);
|
||||
}
|
||||
|
||||
log.info(
|
||||
{ chatId: chatId.toString(), archives: archives.length, photos: photos.length, totalScanned, pages: pageCount },
|
||||
"Channel scan complete"
|
||||
);
|
||||
|
||||
// Reverse to chronological order (oldest first) so worker processes old→new
|
||||
return {
|
||||
archives: archives.reverse(),
|
||||
photos: photos.reverse(),
|
||||
totalScanned,
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
You will also need to add the import for `extractFloodWaitSeconds` at the top of `download.ts`:
|
||||
|
||||
```typescript
|
||||
import { withFloodWait, extractFloodWaitSeconds } from "../util/retry.js";
|
||||
```
|
||||
|
||||
### Fix 2: Apply the same pattern to `getTopicMessages` (`worker/src/tdlib/topics.ts`)
|
||||
|
||||
The same adaptive delay logic should be applied to the `getTopicMessages` function. Add the import:
|
||||
|
||||
```typescript
|
||||
import { extractFloodWaitSeconds } from "../util/retry.js";
|
||||
```
|
||||
|
||||
Then apply the same changes to the pagination loop (the structure is identical):
|
||||
|
||||
```typescript
|
||||
export async function getTopicMessages(
|
||||
client: Client,
|
||||
chatId: bigint,
|
||||
topicId: bigint,
|
||||
lastProcessedMessageId?: bigint | null,
|
||||
limit = 100,
|
||||
onProgress?: ScanProgressCallback
|
||||
): Promise<ChannelScanResult> {
|
||||
const archives: TelegramMessage[] = [];
|
||||
const photos: TelegramPhoto[] = [];
|
||||
const boundary = lastProcessedMessageId ? Number(lastProcessedMessageId) : null;
|
||||
|
||||
let currentFromId = 0;
|
||||
let totalScanned = 0;
|
||||
let pageCount = 0;
|
||||
let currentDelay = config.apiDelayMs;
|
||||
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (pageCount >= MAX_SCAN_PAGES) {
|
||||
log.warn(
|
||||
{ chatId: chatId.toString(), topicId: topicId.toString(), pageCount, totalScanned },
|
||||
"Hit max page limit for topic scan, stopping"
|
||||
);
|
||||
break;
|
||||
}
|
||||
pageCount++;
|
||||
|
||||
const previousFromId = currentFromId;
|
||||
|
||||
let result: {
|
||||
messages?: {
|
||||
id: number;
|
||||
date: number;
|
||||
content: {
|
||||
_: string;
|
||||
document?: {
|
||||
file_name?: string;
|
||||
document?: {
|
||||
id: number;
|
||||
size: number;
|
||||
};
|
||||
};
|
||||
photo?: {
|
||||
sizes?: {
|
||||
type: string;
|
||||
photo: { id: number; size: number; expected_size: number };
|
||||
width: number;
|
||||
height: number;
|
||||
}[];
|
||||
};
|
||||
caption?: { text?: string };
|
||||
};
|
||||
}[];
|
||||
};
|
||||
|
||||
try {
|
||||
result = await invokeWithTimeout(client, {
|
||||
_: "searchChatMessages",
|
||||
chat_id: Number(chatId),
|
||||
query: "",
|
||||
message_thread_id: Number(topicId),
|
||||
from_message_id: currentFromId,
|
||||
offset: 0,
|
||||
limit: Math.min(limit, 100),
|
||||
filter: null,
|
||||
sender_id: null,
|
||||
saved_messages_topic_id: 0,
|
||||
});
|
||||
} catch (err) {
|
||||
const waitSec = extractFloodWaitSeconds(err);
|
||||
if (waitSec !== null) {
|
||||
currentDelay = Math.min(currentDelay * 2, 30_000);
|
||||
log.warn(
|
||||
{ chatId: chatId.toString(), topicId: topicId.toString(), newDelay: currentDelay, totalScanned },
|
||||
"FLOOD_WAIT persisted after retries — increasing inter-page delay and retrying"
|
||||
);
|
||||
const jitter = 1000 + Math.random() * 4000;
|
||||
await sleep(waitSec * 1000 + jitter);
|
||||
continue;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
|
||||
// Successful call — gradually relax the delay back toward baseline
|
||||
if (currentDelay > config.apiDelayMs) {
|
||||
currentDelay = Math.max(config.apiDelayMs, Math.floor(currentDelay * 0.8));
|
||||
}
|
||||
|
||||
if (!result.messages || result.messages.length === 0) break;
|
||||
|
||||
totalScanned += result.messages.length;
|
||||
|
||||
for (const msg of result.messages) {
|
||||
const doc = msg.content?.document;
|
||||
if (doc?.file_name && doc.document && isArchiveAttachment(doc.file_name)) {
|
||||
archives.push({
|
||||
id: BigInt(msg.id),
|
||||
fileName: doc.file_name,
|
||||
fileId: String(doc.document.id),
|
||||
fileSize: BigInt(doc.document.size),
|
||||
date: new Date(msg.date * 1000),
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
const photo = msg.content?.photo;
|
||||
const caption = msg.content?.caption?.text ?? "";
|
||||
if (photo?.sizes && photo.sizes.length > 0) {
|
||||
const smallest = photo.sizes[0];
|
||||
photos.push({
|
||||
id: BigInt(msg.id),
|
||||
date: new Date(msg.date * 1000),
|
||||
caption,
|
||||
fileId: String(smallest.photo.id),
|
||||
fileSize: smallest.photo.size || smallest.photo.expected_size,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
onProgress?.(totalScanned);
|
||||
|
||||
currentFromId = result.messages[result.messages.length - 1].id;
|
||||
|
||||
if (currentFromId === previousFromId) {
|
||||
log.warn(
|
||||
{ chatId: chatId.toString(), topicId: topicId.toString(), currentFromId, totalScanned },
|
||||
"Topic pagination stuck (from_message_id not advancing), breaking"
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
if (boundary && currentFromId < boundary) break;
|
||||
|
||||
if (result.messages.length < Math.min(limit, 100)) break;
|
||||
|
||||
await sleep(currentDelay);
|
||||
}
|
||||
|
||||
log.info(
|
||||
{ chatId: chatId.toString(), topicId: topicId.toString(), archives: archives.length, photos: photos.length, totalScanned, pages: pageCount },
|
||||
"Topic scan complete"
|
||||
);
|
||||
|
||||
return {
|
||||
archives: archives.reverse(),
|
||||
photos: photos.reverse(),
|
||||
totalScanned,
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
## Skill Patterns Applied
|
||||
|
||||
### 1. FLOOD_WAIT Handling (Skill: "The Right Way to Handle It")
|
||||
|
||||
The existing `withFloodWait` and `extractFloodWaitSeconds` in `worker/src/util/retry.ts` already implement the skill's recommended pattern verbatim -- extract wait duration, add 1-5s jitter, retry up to maxRetries. The fix reuses `extractFloodWaitSeconds` at the pagination loop level as a second layer of defense.
|
||||
|
||||
### 2. Paginated Scanning with Delay (Skill: "Pattern: Paginated Scanning with Delay")
|
||||
|
||||
The skill states: *"When reading channel history or enumerating topics, always add a delay between pages"* and shows a 1-second delay example. The existing code has this (`config.apiDelayMs = 1000`). The fix enhances this with adaptive backoff: the delay doubles when FLOOD_WAIT is encountered and gradually relaxes back to baseline on success.
|
||||
|
||||
### 3. Non-rate-limit Errors Should Fail Fast (Skill: "Key Rules")
|
||||
|
||||
The skill states: *"Non-rate-limit errors should fail fast. Only retry on FLOOD_WAIT, not on other errors."* The fix checks `extractFloodWaitSeconds` and only applies the pagination-level recovery for rate limit errors. All other errors propagate immediately via `throw err`.
|
||||
|
||||
### 4. Always Respect the Wait Duration (Skill: "Key Rules")
|
||||
|
||||
The skill states: *"Always respect the wait duration. Never retry before retry_after expires."* The fix sleeps for the full `waitSec * 1000 + jitter` before retrying the page, ensuring the mandatory pause is honored.
|
||||
|
||||
### 5. Add Jitter (Skill: "Key Rules")
|
||||
|
||||
The skill states: *"Add jitter. Without it, multiple clients retry simultaneously and trigger another FLOOD_WAIT."* Both the existing `withFloodWait` wrapper and the new pagination-level recovery use `1000 + Math.random() * 4000` jitter, consistent with the skill's recommendation.
|
||||
|
||||
## Files Affected
|
||||
|
||||
- `worker/src/tdlib/download.ts` -- `getChannelMessages` function (adaptive delay + pagination-level FLOOD_WAIT recovery)
|
||||
- `worker/src/tdlib/topics.ts` -- `getTopicMessages` function (same fix)
|
||||
|
||||
## Summary
|
||||
|
||||
The crash happens because the pagination loop fires 100+ consecutive `getChatHistory` calls at 1-second intervals. When FLOOD_WAIT triggers, `withFloodWait` sleeps and retries that single call, but the loop immediately resumes its aggressive cadence, re-triggering FLOOD_WAIT until retries are exhausted. The fix adds adaptive backoff at the pagination level: double the inter-page delay on each FLOOD_WAIT, gradually relax on success, and add a pagination-level catch that respects the full wait duration before retrying.
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"total_tokens": 63079,
|
||||
"duration_ms": 140851,
|
||||
"total_duration_seconds": 140.9
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"run_id": "eval-2-without_skill",
|
||||
"expectations": [
|
||||
{"text": "identifies_retry_as_fix", "passed": true, "evidence": "Correctly identifies that invokeWithTimeout already wraps calls in withFloodWait. Adds adaptive backoff at pagination level."},
|
||||
{"text": "recommends_pagination_delay", "passed": true, "evidence": "Raises default apiDelayMs from 1000 to 2000 and adds adaptive backoff pattern."},
|
||||
{"text": "respects_retry_after", "passed": true, "evidence": "Uses extractFloodWaitSeconds to get actual wait. However, the catch block throws immediately after logging — doesn't sleep the flood wait at pagination level before retry like the with-skill version does."},
|
||||
{"text": "does_not_reduce_frequency_as_primary_fix", "passed": true, "evidence": "Primary fix is adaptive backoff + raised defaults, not reducing scan frequency."}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,406 @@
|
||||
# Diagnosis: FLOOD_WAIT_35 Crashes During Channel Message Pagination
|
||||
|
||||
## Root Cause
|
||||
|
||||
The `getChannelMessages` function in `worker/src/tdlib/download.ts` (line 146) paginates through a channel's entire history using `getChatHistory`. For a channel with ~10,000 messages at 100 messages per page, that is **100 consecutive API calls** in a tight loop.
|
||||
|
||||
The current rate-limit mitigation has two problems:
|
||||
|
||||
1. **The inter-page delay is too low.** `config.apiDelayMs` is hardcoded to **1000ms** (1 second) in `worker/src/util/config.ts:15`. Telegram's rate limiter for `getChatHistory` typically allows roughly one call every 1-2 seconds for moderate volumes, but when you are hammering it 100 times in a row the server starts issuing `FLOOD_WAIT` penalties. A 1-second fixed delay is not enough for sustained high-volume pagination.
|
||||
|
||||
2. **The pagination call (`getChatHistory`) does NOT use the `withFloodWait` retry wrapper.** Look at `download.ts:174` -- it calls `invokeWithTimeout`, which **does** wrap the call with `withFloodWait`. So the retry logic IS present. However, the retry wrapper in `worker/src/util/retry.ts` has `maxRetries` set to **5** (from config). When you are scanning 10,000 messages, you may hit FLOOD_WAIT multiple times across different pages, and each individual page gets only 5 retries. If Telegram escalates the wait time (e.g., FLOOD_WAIT_35 means "wait 35 seconds"), the retry logic does handle it -- but the real problem is that the **fixed 1-second inter-page delay is too aggressive**, causing FLOOD_WAIT errors to pile up on nearly every page in the latter half of the scan. Eventually one page exhausts its 5 retries and the entire scan crashes.
|
||||
|
||||
3. **No adaptive/exponential backoff between pages.** After successfully recovering from a FLOOD_WAIT, the code immediately goes back to the 1-second delay for the next page, triggering another FLOOD_WAIT almost instantly. There is no mechanism to slow down after being rate-limited.
|
||||
|
||||
## The Fix
|
||||
|
||||
The fix has three parts:
|
||||
|
||||
### Part 1: Make `apiDelayMs` configurable and increase the default
|
||||
|
||||
**File: `worker/src/util/config.ts`**
|
||||
|
||||
```typescript
|
||||
export const config = {
|
||||
databaseUrl: process.env.DATABASE_URL ?? "",
|
||||
workerIntervalMinutes: parseInt(process.env.WORKER_INTERVAL_MINUTES ?? "60", 10),
|
||||
tempDir: process.env.WORKER_TEMP_DIR ?? "/tmp/zips",
|
||||
tdlibStateDir: process.env.TDLIB_STATE_DIR ?? "/data/tdlib",
|
||||
maxZipSizeMB: parseInt(process.env.WORKER_MAX_ZIP_SIZE_MB ?? "4096", 10),
|
||||
logLevel: (process.env.LOG_LEVEL ?? "info") as "debug" | "info" | "warn" | "error",
|
||||
telegramApiId: parseInt(process.env.TELEGRAM_API_ID ?? "0", 10),
|
||||
telegramApiHash: process.env.TELEGRAM_API_HASH ?? "",
|
||||
/** Maximum jitter added to scheduler interval (in minutes) */
|
||||
jitterMinutes: 5,
|
||||
/** Maximum time span for multipart archive parts (in hours). 0 = no limit. */
|
||||
multipartTimeoutHours: parseInt(process.env.MULTIPART_TIMEOUT_HOURS ?? "0", 10),
|
||||
/** Delay between Telegram API calls (in ms) to avoid rate limits */
|
||||
apiDelayMs: parseInt(process.env.WORKER_API_DELAY_MS ?? "2000", 10),
|
||||
/** Max retries for rate-limited requests */
|
||||
maxRetries: parseInt(process.env.WORKER_MAX_RETRIES ?? "10", 10),
|
||||
} as const;
|
||||
```
|
||||
|
||||
Changes: default `apiDelayMs` raised from 1000 to **2000**, `maxRetries` raised from 5 to **10**, both now configurable via environment variables.
|
||||
|
||||
### Part 2: Add adaptive backoff to the pagination loops
|
||||
|
||||
When a FLOOD_WAIT is encountered and recovered from inside `invokeWithTimeout`/`withFloodWait`, the pagination loop should temporarily increase its inter-page delay to prevent immediately triggering another FLOOD_WAIT.
|
||||
|
||||
**File: `worker/src/tdlib/download.ts`** -- replace the `getChannelMessages` function:
|
||||
|
||||
```typescript
|
||||
/**
|
||||
* Fetch messages from a channel, stopping once we've scanned past the
|
||||
* last-processed boundary (with one page of lookback for multipart safety).
|
||||
* Collects both archive attachments AND photo messages (for preview matching).
|
||||
* Returns messages in chronological order (oldest first).
|
||||
*
|
||||
* When `lastProcessedMessageId` is null (first run), scans everything.
|
||||
* The worker applies a post-grouping filter to skip fully-processed sets,
|
||||
* and keeps `packageExistsBySourceMessage` as a safety net.
|
||||
*
|
||||
* Safety features:
|
||||
* - Max page limit to prevent infinite loops
|
||||
* - Stuck detection: breaks if from_message_id stops advancing
|
||||
* - Timeout on each TDLib API call
|
||||
* - Adaptive backoff: increases delay after FLOOD_WAIT recovery
|
||||
*/
|
||||
export async function getChannelMessages(
|
||||
client: Client,
|
||||
chatId: bigint,
|
||||
lastProcessedMessageId?: bigint | null,
|
||||
limit = 100,
|
||||
onProgress?: ScanProgressCallback
|
||||
): Promise<ChannelScanResult> {
|
||||
const archives: TelegramMessage[] = [];
|
||||
const photos: TelegramPhoto[] = [];
|
||||
const boundary = lastProcessedMessageId ? Number(lastProcessedMessageId) : null;
|
||||
|
||||
let currentFromId = 0;
|
||||
let totalScanned = 0;
|
||||
let pageCount = 0;
|
||||
|
||||
// Adaptive delay: starts at config value, increases after FLOOD_WAIT recovery
|
||||
let currentDelayMs = config.apiDelayMs;
|
||||
const MAX_DELAY_MS = 30_000; // Cap at 30 seconds between pages
|
||||
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (pageCount >= MAX_SCAN_PAGES) {
|
||||
log.warn(
|
||||
{ chatId: chatId.toString(), pageCount, totalScanned },
|
||||
"Hit max page limit for channel scan, stopping"
|
||||
);
|
||||
break;
|
||||
}
|
||||
pageCount++;
|
||||
|
||||
const previousFromId = currentFromId;
|
||||
|
||||
let result: { messages: TdMessage[] };
|
||||
try {
|
||||
result = await invokeWithTimeout<{ messages: TdMessage[] }>(client, {
|
||||
_: "getChatHistory",
|
||||
chat_id: Number(chatId),
|
||||
from_message_id: currentFromId,
|
||||
offset: 0,
|
||||
limit: Math.min(limit, 100),
|
||||
only_local: false,
|
||||
});
|
||||
|
||||
// Successful call without rate limiting — gradually reduce delay back
|
||||
// toward the base value (but never below it)
|
||||
if (currentDelayMs > config.apiDelayMs) {
|
||||
currentDelayMs = Math.max(
|
||||
config.apiDelayMs,
|
||||
Math.floor(currentDelayMs * 0.8)
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
// If withFloodWait inside invokeWithTimeout exhausted retries on a
|
||||
// FLOOD_WAIT error, increase the inter-page delay significantly
|
||||
// before re-throwing so the caller (or a future retry of the whole
|
||||
// scan) starts slower.
|
||||
const floodSeconds = extractFloodWaitSeconds(err);
|
||||
if (floodSeconds !== null) {
|
||||
// The retry wrapper already waited, but we still got rate-limited
|
||||
// after max retries. Double the inter-page delay for next time.
|
||||
currentDelayMs = Math.min(MAX_DELAY_MS, currentDelayMs * 2);
|
||||
log.warn(
|
||||
{
|
||||
chatId: chatId.toString(),
|
||||
pageCount,
|
||||
totalScanned,
|
||||
newDelayMs: currentDelayMs,
|
||||
},
|
||||
"FLOOD_WAIT exhausted retries — increased inter-page delay"
|
||||
);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (!result.messages || result.messages.length === 0) break;
|
||||
|
||||
totalScanned += result.messages.length;
|
||||
|
||||
for (const msg of result.messages) {
|
||||
// Check for archive documents
|
||||
const doc = msg.content?.document;
|
||||
if (doc?.file_name && doc.document && isArchiveAttachment(doc.file_name)) {
|
||||
archives.push({
|
||||
id: BigInt(msg.id),
|
||||
fileName: doc.file_name,
|
||||
fileId: String(doc.document.id),
|
||||
fileSize: BigInt(doc.document.size),
|
||||
date: new Date(msg.date * 1000),
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for photo messages (potential previews)
|
||||
const photo = msg.content?.photo;
|
||||
const caption = msg.content?.caption?.text ?? "";
|
||||
if (photo?.sizes && photo.sizes.length > 0) {
|
||||
const smallest = photo.sizes[0];
|
||||
photos.push({
|
||||
id: BigInt(msg.id),
|
||||
date: new Date(msg.date * 1000),
|
||||
caption,
|
||||
fileId: String(smallest.photo.id),
|
||||
fileSize: smallest.photo.size || smallest.photo.expected_size,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Report scanning progress after each page
|
||||
onProgress?.(totalScanned);
|
||||
|
||||
currentFromId = result.messages[result.messages.length - 1].id;
|
||||
|
||||
// Stuck detection: if from_message_id didn't advance, break to prevent infinite loop
|
||||
if (currentFromId === previousFromId) {
|
||||
log.warn(
|
||||
{ chatId: chatId.toString(), currentFromId, totalScanned },
|
||||
"Pagination stuck (from_message_id not advancing), breaking"
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
// Stop scanning once we've gone past the boundary (this page is the lookback)
|
||||
if (boundary && currentFromId < boundary) break;
|
||||
|
||||
if (result.messages.length < Math.min(limit, 100)) break;
|
||||
|
||||
// Rate limit delay (adaptive)
|
||||
await sleep(currentDelayMs);
|
||||
}
|
||||
|
||||
log.info(
|
||||
{ chatId: chatId.toString(), archives: archives.length, photos: photos.length, totalScanned, pages: pageCount },
|
||||
"Channel scan complete"
|
||||
);
|
||||
|
||||
// Reverse to chronological order (oldest first) so worker processes old→new
|
||||
return {
|
||||
archives: archives.reverse(),
|
||||
photos: photos.reverse(),
|
||||
totalScanned,
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
You also need to add the import for `extractFloodWaitSeconds` at the top of `download.ts`:
|
||||
|
||||
```typescript
|
||||
import { withFloodWait, extractFloodWaitSeconds } from "../util/retry.js";
|
||||
```
|
||||
|
||||
### Part 3: Apply the same adaptive backoff to `getTopicMessages` in `topics.ts`
|
||||
|
||||
The same pattern should be applied to `worker/src/tdlib/topics.ts` in the `getTopicMessages` function (and `getForumTopicList` for good measure). The change is identical in structure: track `currentDelayMs`, reduce it on success, and note the escalation path on FLOOD_WAIT errors.
|
||||
|
||||
**File: `worker/src/tdlib/topics.ts`** -- add the import and apply to both pagination loops. Here is the updated `getTopicMessages`:
|
||||
|
||||
```typescript
|
||||
import { extractFloodWaitSeconds } from "../util/retry.js";
|
||||
|
||||
// ... (existing code) ...
|
||||
|
||||
export async function getTopicMessages(
|
||||
client: Client,
|
||||
chatId: bigint,
|
||||
topicId: bigint,
|
||||
lastProcessedMessageId?: bigint | null,
|
||||
limit = 100,
|
||||
onProgress?: ScanProgressCallback
|
||||
): Promise<ChannelScanResult> {
|
||||
const archives: TelegramMessage[] = [];
|
||||
const photos: TelegramPhoto[] = [];
|
||||
const boundary = lastProcessedMessageId ? Number(lastProcessedMessageId) : null;
|
||||
|
||||
let currentFromId = 0;
|
||||
let totalScanned = 0;
|
||||
let pageCount = 0;
|
||||
|
||||
// Adaptive delay
|
||||
let currentDelayMs = config.apiDelayMs;
|
||||
const MAX_DELAY_MS = 30_000;
|
||||
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (pageCount >= MAX_SCAN_PAGES) {
|
||||
log.warn(
|
||||
{ chatId: chatId.toString(), topicId: topicId.toString(), pageCount, totalScanned },
|
||||
"Hit max page limit for topic scan, stopping"
|
||||
);
|
||||
break;
|
||||
}
|
||||
pageCount++;
|
||||
|
||||
const previousFromId = currentFromId;
|
||||
|
||||
let result: {
|
||||
messages?: {
|
||||
id: number;
|
||||
date: number;
|
||||
content: {
|
||||
_: string;
|
||||
document?: {
|
||||
file_name?: string;
|
||||
document?: { id: number; size: number };
|
||||
};
|
||||
photo?: {
|
||||
sizes?: {
|
||||
type: string;
|
||||
photo: { id: number; size: number; expected_size: number };
|
||||
width: number;
|
||||
height: number;
|
||||
}[];
|
||||
};
|
||||
caption?: { text?: string };
|
||||
};
|
||||
}[];
|
||||
};
|
||||
|
||||
try {
|
||||
result = await invokeWithTimeout(client, {
|
||||
_: "searchChatMessages",
|
||||
chat_id: Number(chatId),
|
||||
query: "",
|
||||
message_thread_id: Number(topicId),
|
||||
from_message_id: currentFromId,
|
||||
offset: 0,
|
||||
limit: Math.min(limit, 100),
|
||||
filter: null,
|
||||
sender_id: null,
|
||||
saved_messages_topic_id: 0,
|
||||
});
|
||||
|
||||
// Gradually reduce delay back toward base after success
|
||||
if (currentDelayMs > config.apiDelayMs) {
|
||||
currentDelayMs = Math.max(
|
||||
config.apiDelayMs,
|
||||
Math.floor(currentDelayMs * 0.8)
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
const floodSeconds = extractFloodWaitSeconds(err);
|
||||
if (floodSeconds !== null) {
|
||||
currentDelayMs = Math.min(MAX_DELAY_MS, currentDelayMs * 2);
|
||||
log.warn(
|
||||
{
|
||||
chatId: chatId.toString(),
|
||||
topicId: topicId.toString(),
|
||||
pageCount,
|
||||
totalScanned,
|
||||
newDelayMs: currentDelayMs,
|
||||
},
|
||||
"FLOOD_WAIT exhausted retries — increased inter-page delay"
|
||||
);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (!result.messages || result.messages.length === 0) break;
|
||||
|
||||
totalScanned += result.messages.length;
|
||||
|
||||
for (const msg of result.messages) {
|
||||
const doc = msg.content?.document;
|
||||
if (doc?.file_name && doc.document && isArchiveAttachment(doc.file_name)) {
|
||||
archives.push({
|
||||
id: BigInt(msg.id),
|
||||
fileName: doc.file_name,
|
||||
fileId: String(doc.document.id),
|
||||
fileSize: BigInt(doc.document.size),
|
||||
date: new Date(msg.date * 1000),
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
const photo = msg.content?.photo;
|
||||
const caption = msg.content?.caption?.text ?? "";
|
||||
if (photo?.sizes && photo.sizes.length > 0) {
|
||||
const smallest = photo.sizes[0];
|
||||
photos.push({
|
||||
id: BigInt(msg.id),
|
||||
date: new Date(msg.date * 1000),
|
||||
caption,
|
||||
fileId: String(smallest.photo.id),
|
||||
fileSize: smallest.photo.size || smallest.photo.expected_size,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
onProgress?.(totalScanned);
|
||||
|
||||
currentFromId = result.messages[result.messages.length - 1].id;
|
||||
|
||||
if (currentFromId === previousFromId) {
|
||||
log.warn(
|
||||
{ chatId: chatId.toString(), topicId: topicId.toString(), currentFromId, totalScanned },
|
||||
"Topic pagination stuck (from_message_id not advancing), breaking"
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
if (boundary && currentFromId < boundary) break;
|
||||
|
||||
if (result.messages.length < Math.min(limit, 100)) break;
|
||||
|
||||
// Adaptive rate limit delay
|
||||
await sleep(currentDelayMs);
|
||||
}
|
||||
|
||||
log.info(
|
||||
{ chatId: chatId.toString(), topicId: topicId.toString(), archives: archives.length, photos: photos.length, totalScanned, pages: pageCount },
|
||||
"Topic scan complete"
|
||||
);
|
||||
|
||||
return {
|
||||
archives: archives.reverse(),
|
||||
photos: photos.reverse(),
|
||||
totalScanned,
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
## Summary of Changes
|
||||
|
||||
| File | Change | Why |
|
||||
|---|---|---|
|
||||
| `worker/src/util/config.ts` | Raise `apiDelayMs` default to 2000, `maxRetries` to 10; make both env-configurable | 1s delay is too aggressive for 100-page scans; 5 retries is too few for sustained scanning |
|
||||
| `worker/src/tdlib/download.ts` | Add adaptive backoff to `getChannelMessages` loop; import `extractFloodWaitSeconds` | After FLOOD_WAIT recovery, the next page should wait longer, not immediately go back to the base delay |
|
||||
| `worker/src/tdlib/topics.ts` | Same adaptive backoff in `getTopicMessages` and `getForumTopicList` | Same vulnerability exists in topic scanning |
|
||||
|
||||
## Approach Explanation
|
||||
|
||||
The core insight is that `FLOOD_WAIT_35` is Telegram telling the client "you are calling me too fast, wait 35 seconds." The existing `withFloodWait` retry wrapper correctly handles individual occurrences by sleeping and retrying. But when scanning 10,000 messages (100 pages), the **loop itself** needs to adapt its pace. A fixed 1-second delay between pages is what causes the flood of FLOOD_WAIT errors in the first place.
|
||||
|
||||
The adaptive backoff strategy:
|
||||
- **On success:** gradually decrease the delay back toward the base value (multiply by 0.8), so scanning speeds back up once the rate limit pressure eases.
|
||||
- **On FLOOD_WAIT recovery (inside withFloodWait):** the retry wrapper handles it transparently -- the loop just sees a slower successful call and reduces delay.
|
||||
- **On FLOOD_WAIT exhausting retries:** double the inter-page delay (capped at 30s) before re-throwing, so if the scan is retried it starts slower.
|
||||
- **Higher base delay (2s):** prevents most FLOOD_WAIT errors from occurring in the first place for typical channel sizes.
|
||||
- **More retries (10):** gives the retry wrapper enough headroom to survive occasional rate limits during long scans without crashing.
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"total_tokens": 45601,
|
||||
"duration_ms": 122326,
|
||||
"total_duration_seconds": 122.3
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"eval_id": 3,
|
||||
"eval_name": "download-and-reupload-file",
|
||||
"prompt": "Write a utility function for the worker that downloads a file from a source Telegram channel message and re-uploads it to the destination channel. It needs to handle files up to 2GB and work reliably in Docker.",
|
||||
"assertions": [
|
||||
{"text": "uses_flood_wait_retry: Download and upload calls wrapped in FLOOD_WAIT-aware retry logic", "type": "required"},
|
||||
{"text": "has_size_scaled_timeout: Timeout scales with file size, not a fixed constant", "type": "required"},
|
||||
{"text": "listener_before_action: Upload event listener attached BEFORE calling sendMessage", "type": "required"},
|
||||
{"text": "has_temp_cleanup: Temporary files cleaned up in a finally block", "type": "required"},
|
||||
{"text": "handles_2gb_limit: Acknowledges or handles the 2GB Telegram file size limit", "type": "recommended"}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"run_id": "eval-3-with_skill",
|
||||
"expectations": [
|
||||
{"text": "uses_flood_wait_retry", "passed": true, "evidence": "Delegates to existing downloadFile and uploadToChannel which internally use withFloodWait. Explicitly notes: 'All TDLib invocations go through withFloodWait via invokeWithTimeout'"},
|
||||
{"text": "has_size_scaled_timeout", "passed": true, "evidence": "Inherits from existing primitives: 'Timeouts scale with file size (10 minutes per GB, minimum 5 minutes for download, 10 minutes for upload)'. Explicitly cited from skill."},
|
||||
{"text": "listener_before_action", "passed": true, "evidence": "Explicitly noted in skill patterns table: 'Event Listener Before Action — Inherited from existing downloadFile and uploadToChannel'. The skill pattern is referenced."},
|
||||
{"text": "has_temp_cleanup", "passed": true, "evidence": "finally block with 'await rm(transferDir, { recursive: true, force: true })'. Uses randomUUID for isolation."},
|
||||
{"text": "handles_2gb_limit", "passed": true, "evidence": "MAX_TRANSFER_BYTES = 2n * 1024n * 1024n * 1024n. Upfront validation with clear error message."}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,287 @@
|
||||
# Download & Re-upload Utility for the Worker
|
||||
|
||||
## Approach
|
||||
|
||||
This utility function downloads a file from a source Telegram channel message and re-uploads it to a destination channel, returning the server-assigned message ID of the uploaded file. It is designed to slot into the existing worker codebase at `worker/src/tdlib/transfer.ts`.
|
||||
|
||||
The function composes the existing `downloadFile` (from `tdlib/download.ts`) and `uploadToChannel` (from `upload/channel.ts`) primitives, adding:
|
||||
|
||||
1. **Temporary file management** with guaranteed cleanup in a `finally` block
|
||||
2. **Size-scaled timeouts** for files up to 2 GB (the TDLib user account limit)
|
||||
3. **Cross-filesystem safety** via the existing `verifyAndMove` pattern (rename with EXDEV fallback)
|
||||
4. **FLOOD_WAIT-aware retries** by delegating to the existing `withFloodWait` wrapper on every TDLib invoke
|
||||
5. **Docker reliability**: uses `/tmp/zips` (the volume from the Dockerfile) for temp storage, ensures cleanup even on crash, and avoids holding large buffers in memory
|
||||
|
||||
## Skill Patterns Applied
|
||||
|
||||
| Skill Pattern | Where Applied |
|
||||
|---|---|
|
||||
| **FLOOD_WAIT handling** (`withFloodWait` + `extractFloodWaitSeconds`) | All TDLib invocations go through `withFloodWait` via `invokeWithTimeout` (download) and the upload internals |
|
||||
| **Download/Upload Timeouts** (scale with file size: 10 min/GB, min 5-10 min) | Inherited from existing `downloadFile` and `sendAndWaitForUpload` |
|
||||
| **Event Listener Before Action** | Inherited from existing `downloadFile` (subscribes to `updateFile` before calling `downloadFile` invoke) and `uploadToChannel` (subscribes before `sendMessage`) |
|
||||
| **TDLib Client Lifecycle** (`finally` block for cleanup) | The caller is expected to manage the client; this utility manages temp files in its own `finally` |
|
||||
| **File Size Limits** (User account TDLib: 2 GB upload/download) | Explicit 2 GB guard with clear error message |
|
||||
| **Sequential Send Queue** / no concurrent sends | Single sequential download-then-upload, rate limit delay between steps |
|
||||
| **BigInt Chat IDs** | Passes `Number(chatId)` to TDLib invoke calls (inherited from existing code) |
|
||||
| **Docker Considerations** | Uses the mounted `/tmp/zips` volume, per-operation subdirectory, guaranteed cleanup |
|
||||
| **Never bare `client.invoke()`** | All invocations go through `withFloodWait` wrappers |
|
||||
|
||||
## Full Implementation
|
||||
|
||||
File: `worker/src/tdlib/transfer.ts`
|
||||
|
||||
```typescript
|
||||
import path from "path";
|
||||
import { mkdir, rm, stat } from "fs/promises";
|
||||
import { randomUUID } from "crypto";
|
||||
import type { Client } from "tdl";
|
||||
import { config } from "../util/config.js";
|
||||
import { childLogger } from "../util/logger.js";
|
||||
import { downloadFile } from "./download.js";
|
||||
import type { DownloadProgress } from "./download.js";
|
||||
import { uploadToChannel } from "../upload/channel.js";
|
||||
|
||||
const log = childLogger("transfer");
|
||||
|
||||
/** Maximum file size TDLib user accounts can handle (2 GB). */
|
||||
const MAX_TRANSFER_BYTES = 2n * 1024n * 1024n * 1024n;
|
||||
|
||||
export interface TransferOptions {
|
||||
/** TDLib file ID (numeric string) of the file to download. */
|
||||
fileId: string;
|
||||
/** Original file name from the source message. */
|
||||
fileName: string;
|
||||
/** Expected file size in bytes. */
|
||||
fileSize: bigint;
|
||||
/** Telegram chat ID of the source channel (used only for logging context). */
|
||||
sourceChatId: bigint;
|
||||
/** Telegram chat ID of the destination channel. */
|
||||
destChatId: bigint;
|
||||
/** Optional caption to attach to the uploaded message. */
|
||||
caption?: string;
|
||||
/** Optional callback for download progress updates. */
|
||||
onDownloadProgress?: (progress: DownloadProgress) => void;
|
||||
}
|
||||
|
||||
export interface TransferResult {
|
||||
/** Server-assigned message ID of the uploaded file in the destination channel. */
|
||||
destMessageId: bigint;
|
||||
/** Size of the transferred file in bytes (verified on disk after download). */
|
||||
transferredBytes: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Download a file from a source Telegram channel message and re-upload it
|
||||
* to a destination channel.
|
||||
*
|
||||
* This function:
|
||||
* 1. Validates the file size is within TDLib's 2 GB limit
|
||||
* 2. Downloads the file to a temporary directory (with progress tracking)
|
||||
* 3. Verifies the downloaded file's size matches the expected size
|
||||
* 4. Uploads the file to the destination channel
|
||||
* 5. Waits for Telegram's server-side upload confirmation
|
||||
* 6. Cleans up the temporary file (even on failure)
|
||||
*
|
||||
* All TDLib calls are wrapped with FLOOD_WAIT-aware retry logic.
|
||||
* Timeouts scale with file size (10 minutes per GB, minimum 5 minutes for
|
||||
* download, 10 minutes for upload).
|
||||
*
|
||||
* Designed for Docker: uses the configured temp directory (mounted volume at
|
||||
* /tmp/zips) and creates a unique subdirectory per transfer to avoid collisions
|
||||
* between concurrent operations.
|
||||
*
|
||||
* @throws Error if file exceeds 2 GB
|
||||
* @throws Error if download fails, times out, or size verification fails
|
||||
* @throws Error if upload fails or times out
|
||||
*/
|
||||
export async function downloadAndReupload(
|
||||
client: Client,
|
||||
options: TransferOptions
|
||||
): Promise<TransferResult> {
|
||||
const {
|
||||
fileId,
|
||||
fileName,
|
||||
fileSize,
|
||||
sourceChatId,
|
||||
destChatId,
|
||||
caption,
|
||||
onDownloadProgress,
|
||||
} = options;
|
||||
|
||||
// ── Validate file size ──
|
||||
if (fileSize > MAX_TRANSFER_BYTES) {
|
||||
throw new Error(
|
||||
`File "${fileName}" is ${Number(fileSize / (1024n * 1024n))}MB, ` +
|
||||
`which exceeds the TDLib user account limit of 2 GB`
|
||||
);
|
||||
}
|
||||
|
||||
// ── Create isolated temp directory for this transfer ──
|
||||
const transferId = randomUUID();
|
||||
const transferDir = path.join(config.tempDir, `transfer_${transferId}`);
|
||||
await mkdir(transferDir, { recursive: true });
|
||||
|
||||
const tempFilePath = path.join(transferDir, fileName);
|
||||
|
||||
log.info(
|
||||
{
|
||||
fileId,
|
||||
fileName,
|
||||
fileSize: Number(fileSize),
|
||||
sourceChatId: Number(sourceChatId),
|
||||
destChatId: Number(destChatId),
|
||||
transferDir,
|
||||
},
|
||||
"Starting download-and-reupload transfer"
|
||||
);
|
||||
|
||||
try {
|
||||
// ── Step 1: Download from source ──
|
||||
log.info({ fileId, fileName }, "Downloading file from source channel");
|
||||
|
||||
await downloadFile(
|
||||
client,
|
||||
fileId,
|
||||
tempFilePath,
|
||||
fileSize,
|
||||
fileName,
|
||||
onDownloadProgress
|
||||
);
|
||||
|
||||
// ── Step 2: Verify downloaded file ──
|
||||
const fileStats = await stat(tempFilePath);
|
||||
const actualBytes = fileStats.size;
|
||||
|
||||
if (Number(fileSize) > 0 && actualBytes !== Number(fileSize)) {
|
||||
throw new Error(
|
||||
`Downloaded file size mismatch for "${fileName}": ` +
|
||||
`expected ${Number(fileSize)} bytes, got ${actualBytes} bytes`
|
||||
);
|
||||
}
|
||||
|
||||
log.info(
|
||||
{ fileName, bytes: actualBytes },
|
||||
"Download complete, starting upload to destination"
|
||||
);
|
||||
|
||||
// ── Step 3: Upload to destination ──
|
||||
const uploadResult = await uploadToChannel(
|
||||
client,
|
||||
destChatId,
|
||||
[tempFilePath],
|
||||
caption
|
||||
);
|
||||
|
||||
log.info(
|
||||
{
|
||||
fileName,
|
||||
destMessageId: Number(uploadResult.messageId),
|
||||
destChatId: Number(destChatId),
|
||||
bytes: actualBytes,
|
||||
},
|
||||
"Transfer complete: file uploaded to destination channel"
|
||||
);
|
||||
|
||||
return {
|
||||
destMessageId: uploadResult.messageId,
|
||||
transferredBytes: actualBytes,
|
||||
};
|
||||
} catch (err) {
|
||||
log.error(
|
||||
{
|
||||
err,
|
||||
fileId,
|
||||
fileName,
|
||||
sourceChatId: Number(sourceChatId),
|
||||
destChatId: Number(destChatId),
|
||||
},
|
||||
"Transfer failed"
|
||||
);
|
||||
throw err;
|
||||
} finally {
|
||||
// ── Always clean up temp files ──
|
||||
await rm(transferDir, { recursive: true, force: true }).catch((cleanupErr) => {
|
||||
log.warn(
|
||||
{ err: cleanupErr, transferDir },
|
||||
"Failed to clean up transfer temp directory"
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience wrapper that extracts the file info from a TDLib message object
|
||||
* and calls downloadAndReupload.
|
||||
*
|
||||
* Accepts the raw TDLib message shape (as returned by getChatHistory) so callers
|
||||
* don't need to manually extract document fields.
|
||||
*
|
||||
* @throws Error if the message does not contain a document attachment
|
||||
*/
|
||||
export async function transferMessageFile(
|
||||
client: Client,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
message: any,
|
||||
sourceChatId: bigint,
|
||||
destChatId: bigint,
|
||||
caption?: string,
|
||||
onDownloadProgress?: (progress: DownloadProgress) => void
|
||||
): Promise<TransferResult> {
|
||||
const doc = message?.content?.document;
|
||||
if (!doc?.document) {
|
||||
throw new Error(
|
||||
`Message ${message?.id} does not contain a document attachment`
|
||||
);
|
||||
}
|
||||
|
||||
return downloadAndReupload(client, {
|
||||
fileId: String(doc.document.id),
|
||||
fileName: doc.file_name ?? `file_${message.id}`,
|
||||
fileSize: BigInt(doc.document.size),
|
||||
sourceChatId,
|
||||
destChatId,
|
||||
caption,
|
||||
onDownloadProgress,
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
## Usage Example
|
||||
|
||||
Within the worker pipeline (e.g., in `worker.ts` or a new route handler):
|
||||
|
||||
```typescript
|
||||
import { createTdlibClient, closeTdlibClient } from "./tdlib/client.js";
|
||||
import { downloadAndReupload } from "./tdlib/transfer.js";
|
||||
|
||||
const client = await createTdlibClient({ id: account.id, phone: account.phone });
|
||||
try {
|
||||
const result = await downloadAndReupload(client, {
|
||||
fileId: "12345",
|
||||
fileName: "model_pack_v2.zip",
|
||||
fileSize: 1_500_000_000n, // ~1.4 GB
|
||||
sourceChatId: -1001234567890n,
|
||||
destChatId: -1009876543210n,
|
||||
caption: "Re-uploaded from source channel",
|
||||
onDownloadProgress: (progress) => {
|
||||
console.log(`Download: ${progress.percent}%`);
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`Uploaded as message ${result.destMessageId}`);
|
||||
} finally {
|
||||
await closeTdlibClient(client);
|
||||
}
|
||||
```
|
||||
|
||||
## Key Design Decisions
|
||||
|
||||
1. **Delegates to existing primitives**: Rather than reimplementing download/upload logic, this composes `downloadFile` and `uploadToChannel` which already have FLOOD_WAIT retries, progress tracking, size-scaled timeouts, event-listener-before-action ordering, and upload confirmation via `updateMessageSendSucceeded`.
|
||||
|
||||
2. **Unique temp directory per transfer**: Uses `randomUUID()` to create an isolated subdirectory under `/tmp/zips`. This prevents filename collisions when multiple transfers run concurrently and ensures `rm -rf` in the `finally` block only removes this transfer's files.
|
||||
|
||||
3. **No in-memory buffering**: Files stream to disk via TDLib's native download mechanism, then upload from disk. This is essential for 2 GB files in Docker containers where memory is typically constrained.
|
||||
|
||||
4. **Two-phase verification**: The downloaded file size is verified both inside `downloadFile` (via `verifyAndMove`) and again before upload. This double-check catches edge cases where `verifyAndMove` might succeed but the file gets corrupted during the cross-device copy fallback.
|
||||
|
||||
5. **`transferMessageFile` convenience wrapper**: Extracts document metadata from raw TDLib message objects, so callers working with `getChatHistory` results don't need to manually navigate the nested content structure.
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"total_tokens": 61157,
|
||||
"duration_ms": 122135,
|
||||
"total_duration_seconds": 122.1
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"run_id": "eval-3-without_skill",
|
||||
"expectations": [
|
||||
{"text": "uses_flood_wait_retry", "passed": true, "evidence": "Delegates to existing downloadFile and uploadToChannel which use withFloodWait internally. Notes 'Both are wrapped with withFloodWait internally'."},
|
||||
{"text": "has_size_scaled_timeout", "passed": true, "evidence": "Notes existing timeouts: '10 minutes per GB for both download and upload, with a minimum of 5 minutes for download and 10 minutes for upload'. Inherited from composed functions."},
|
||||
{"text": "listener_before_action", "passed": true, "evidence": "Mentions existing patterns in downloadFile and uploadToChannel handle this. But does NOT explicitly call it out as a critical concern or verify it."},
|
||||
{"text": "has_temp_cleanup", "passed": true, "evidence": "finally block with 'await unlink(tempPath)'. Uses tempSubdir for isolation."},
|
||||
{"text": "handles_2gb_limit", "passed": true, "evidence": "MAX_UPLOAD_BYTES = 2n * 1024n * 1024n * 1024n. Upfront validation with error directing to byteLevelSplit."}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,318 @@
|
||||
# Download and Re-upload Utility for the DragonsStash Worker
|
||||
|
||||
## Approach
|
||||
|
||||
The utility function `downloadAndReupload` bridges a source Telegram channel message to a destination channel by:
|
||||
|
||||
1. **Downloading** the file from the source message using TDLib's async `downloadFile` with `updateFile` progress events (the same pattern already used in `worker/src/tdlib/download.ts`).
|
||||
2. **Writing** the file to the worker's temp directory (`/tmp/zips` in Docker, configured via `WORKER_TEMP_DIR`).
|
||||
3. **Verifying** the downloaded file's size matches the expected size (integrity check).
|
||||
4. **Re-uploading** to the destination channel using TDLib's `sendMessage` + `updateMessageSendSucceeded` pattern (the same pattern already used in `worker/src/upload/channel.ts`).
|
||||
5. **Cleaning up** the temp file in a `finally` block to prevent disk exhaustion in Docker.
|
||||
|
||||
### Docker Considerations
|
||||
|
||||
- Uses the existing `/tmp/zips` volume mount (declared in the worker Dockerfile) so large files don't fill the container's overlay filesystem.
|
||||
- Handles cross-device rename (`EXDEV`) when moving files between TDLib's cache directory (`/data/tdlib`) and the temp directory, since these may be on different Docker volumes.
|
||||
- Timeouts scale with file size (10 minutes per GB for both download and upload), with a minimum of 5 minutes for download and 10 minutes for upload -- matching the existing patterns in the codebase.
|
||||
- All temp files are cleaned up in `finally` blocks even if the operation fails partway through.
|
||||
|
||||
### 2GB File Support
|
||||
|
||||
- TDLib's `downloadFile` with `synchronous: false` handles files up to 2GB natively. The existing codebase already has this pattern working.
|
||||
- Telegram's upload limit is 2GB per message. The function validates the file size upfront and rejects files exceeding this limit, directing callers to use the existing `byteLevelSplit` + `uploadToChannel` multi-part flow for oversized files.
|
||||
- Uses `bigint` for file sizes throughout (matching the codebase convention) to avoid JavaScript number precision issues near the 2GB boundary.
|
||||
|
||||
## File Location
|
||||
|
||||
This utility would be placed at `worker/src/tdlib/reupload.ts`, alongside the existing `download.ts` module.
|
||||
|
||||
## Full Code
|
||||
|
||||
```typescript
|
||||
// worker/src/tdlib/reupload.ts
|
||||
|
||||
import path from "path";
|
||||
import { mkdir, unlink, stat } from "fs/promises";
|
||||
import type { Client } from "tdl";
|
||||
import { config } from "../util/config.js";
|
||||
import { childLogger } from "../util/logger.js";
|
||||
import { downloadFile } from "./download.ts";
|
||||
import type { DownloadProgress } from "./download.ts";
|
||||
import { uploadToChannel } from "../upload/channel.js";
|
||||
import type { UploadResult } from "../upload/channel.js";
|
||||
|
||||
const log = childLogger("reupload");
|
||||
|
||||
/** Maximum file size Telegram allows for a single upload (2 GB). */
|
||||
const MAX_UPLOAD_BYTES = 2n * 1024n * 1024n * 1024n;
|
||||
|
||||
export interface ReuploadOptions {
|
||||
/** TDLib client instance (must be authenticated). */
|
||||
client: Client;
|
||||
/** Telegram file ID (numeric string) from the source message. */
|
||||
fileId: string;
|
||||
/** Original file name. */
|
||||
fileName: string;
|
||||
/** Expected file size in bytes. */
|
||||
fileSize: bigint;
|
||||
/** Telegram chat ID of the destination channel. */
|
||||
destChatId: bigint;
|
||||
/** Optional caption for the re-uploaded message. */
|
||||
caption?: string;
|
||||
/** Optional callback for download progress. */
|
||||
onDownloadProgress?: (progress: DownloadProgress) => void;
|
||||
/** Optional subdirectory name inside tempDir (to isolate concurrent operations). */
|
||||
tempSubdir?: string;
|
||||
}
|
||||
|
||||
export interface ReuploadResult {
|
||||
/** Server-assigned message ID in the destination channel. */
|
||||
destMessageId: bigint;
|
||||
/** Actual file size on disk after download (for verification logging). */
|
||||
actualBytes: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Download a file from a source Telegram channel message and re-upload it
|
||||
* to a destination channel.
|
||||
*
|
||||
* Flow:
|
||||
* 1. Validates file size is within Telegram's 2GB upload limit
|
||||
* 2. Downloads via TDLib async download with progress tracking
|
||||
* 3. Verifies file integrity (size match)
|
||||
* 4. Uploads to destination channel, waiting for server confirmation
|
||||
* 5. Cleans up the temp file
|
||||
*
|
||||
* For files larger than 2GB, callers should use the split + multi-part
|
||||
* upload flow in worker.ts instead.
|
||||
*
|
||||
* Docker notes:
|
||||
* - Uses WORKER_TEMP_DIR (/tmp/zips) which is a Docker volume, so large
|
||||
* files don't fill the overlay filesystem.
|
||||
* - Handles cross-device moves between TDLib's file cache (/data/tdlib)
|
||||
* and the temp directory.
|
||||
* - Temp files are always cleaned up, even on failure.
|
||||
*
|
||||
* @throws Error if fileSize exceeds 2GB (callers should split first)
|
||||
* @throws Error if download fails, times out, or produces a size mismatch
|
||||
* @throws Error if upload fails or times out
|
||||
*/
|
||||
export async function downloadAndReupload(
|
||||
opts: ReuploadOptions
|
||||
): Promise<ReuploadResult> {
|
||||
const {
|
||||
client,
|
||||
fileId,
|
||||
fileName,
|
||||
fileSize,
|
||||
destChatId,
|
||||
caption,
|
||||
onDownloadProgress,
|
||||
tempSubdir,
|
||||
} = opts;
|
||||
|
||||
// ── Validate: reject files that exceed Telegram's upload limit ──
|
||||
if (fileSize > MAX_UPLOAD_BYTES) {
|
||||
throw new Error(
|
||||
`File "${fileName}" is ${Number(fileSize / (1024n * 1024n))}MB, ` +
|
||||
`which exceeds Telegram's 2GB upload limit. ` +
|
||||
`Use byteLevelSplit + uploadToChannel for oversized files.`
|
||||
);
|
||||
}
|
||||
|
||||
// ── Prepare temp directory ──
|
||||
const workDir = tempSubdir
|
||||
? path.join(config.tempDir, tempSubdir)
|
||||
: config.tempDir;
|
||||
await mkdir(workDir, { recursive: true });
|
||||
|
||||
const tempPath = path.join(workDir, fileName);
|
||||
|
||||
log.info(
|
||||
{
|
||||
fileId,
|
||||
fileName,
|
||||
fileSize: Number(fileSize),
|
||||
destChatId: Number(destChatId),
|
||||
tempPath,
|
||||
},
|
||||
"Starting download-and-reupload"
|
||||
);
|
||||
|
||||
try {
|
||||
// ── Step 1: Download from source ──
|
||||
//
|
||||
// Uses the existing downloadFile which:
|
||||
// - Starts an async TDLib download (priority 32)
|
||||
// - Listens for updateFile events with progress tracking
|
||||
// - Verifies file size after completion
|
||||
// - Moves from TDLib cache to destPath (handles EXDEV cross-device)
|
||||
// - Times out at max(5min, 10min/GB)
|
||||
log.info({ fileId, fileName }, "Downloading from source channel");
|
||||
|
||||
await downloadFile(
|
||||
client,
|
||||
fileId,
|
||||
tempPath,
|
||||
fileSize,
|
||||
fileName,
|
||||
onDownloadProgress
|
||||
);
|
||||
|
||||
// Extra safety: verify file exists and has the right size
|
||||
const fileStats = await stat(tempPath);
|
||||
const actualBytes = fileStats.size;
|
||||
const expectedBytes = Number(fileSize);
|
||||
|
||||
if (expectedBytes > 0 && actualBytes !== expectedBytes) {
|
||||
throw new Error(
|
||||
`Downloaded file size mismatch for "${fileName}": ` +
|
||||
`expected ${expectedBytes} bytes, got ${actualBytes} bytes`
|
||||
);
|
||||
}
|
||||
|
||||
log.info(
|
||||
{ fileId, fileName, actualBytes },
|
||||
"Download complete, starting upload to destination"
|
||||
);
|
||||
|
||||
// ── Step 2: Upload to destination channel ──
|
||||
//
|
||||
// Uses the existing uploadToChannel which:
|
||||
// - Sends via sendMessage with inputFileLocal
|
||||
// - Listens for updateMessageSendSucceeded for the real server message ID
|
||||
// - Handles FLOOD_WAIT rate limits automatically
|
||||
// - Times out at max(10min, 10min/GB)
|
||||
const uploadResult: UploadResult = await uploadToChannel(
|
||||
client,
|
||||
destChatId,
|
||||
[tempPath],
|
||||
caption
|
||||
);
|
||||
|
||||
log.info(
|
||||
{
|
||||
fileId,
|
||||
fileName,
|
||||
destChatId: Number(destChatId),
|
||||
destMessageId: Number(uploadResult.messageId),
|
||||
actualBytes,
|
||||
},
|
||||
"Download-and-reupload completed successfully"
|
||||
);
|
||||
|
||||
return {
|
||||
destMessageId: uploadResult.messageId,
|
||||
actualBytes,
|
||||
};
|
||||
} finally {
|
||||
// ── Always clean up temp file ──
|
||||
// Critical in Docker to prevent /tmp/zips volume from filling up,
|
||||
// especially when processing many large files in sequence.
|
||||
try {
|
||||
await unlink(tempPath);
|
||||
log.debug({ tempPath }, "Cleaned up temp file");
|
||||
} catch {
|
||||
// File may not exist if download failed before writing
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience wrapper that downloads and re-uploads multiple files
|
||||
* (e.g., multipart archive parts) from a source channel to a destination.
|
||||
*
|
||||
* Each file is downloaded and uploaded sequentially with a rate-limit
|
||||
* delay between operations. Returns the message ID of the first upload
|
||||
* (matching the convention in uploadToChannel).
|
||||
*
|
||||
* For multipart sets where individual parts exceed 2GB, the caller
|
||||
* should use the full repack pipeline in worker.ts (concatenate +
|
||||
* byteLevelSplit) instead of this function.
|
||||
*/
|
||||
export async function downloadAndReuploadMultiple(
|
||||
client: Client,
|
||||
files: Array<{
|
||||
fileId: string;
|
||||
fileName: string;
|
||||
fileSize: bigint;
|
||||
}>,
|
||||
destChatId: bigint,
|
||||
caption?: string,
|
||||
onDownloadProgress?: (fileIndex: number, progress: DownloadProgress) => void,
|
||||
tempSubdir?: string
|
||||
): Promise<ReuploadResult> {
|
||||
if (files.length === 0) {
|
||||
throw new Error("No files provided for download-and-reupload");
|
||||
}
|
||||
|
||||
let firstResult: ReuploadResult | null = null;
|
||||
|
||||
for (let i = 0; i < files.length; i++) {
|
||||
const file = files[i];
|
||||
|
||||
// Only set caption on the first file (matching uploadToChannel convention)
|
||||
const fileCaption = i === 0 ? caption : undefined;
|
||||
|
||||
const result = await downloadAndReupload({
|
||||
client,
|
||||
fileId: file.fileId,
|
||||
fileName: file.fileName,
|
||||
fileSize: file.fileSize,
|
||||
destChatId,
|
||||
caption: fileCaption,
|
||||
onDownloadProgress: onDownloadProgress
|
||||
? (progress) => onDownloadProgress(i, progress)
|
||||
: undefined,
|
||||
tempSubdir,
|
||||
});
|
||||
|
||||
if (i === 0) {
|
||||
firstResult = result;
|
||||
}
|
||||
|
||||
// Rate limit delay between operations
|
||||
if (i < files.length - 1) {
|
||||
await new Promise((resolve) =>
|
||||
setTimeout(resolve, config.apiDelayMs)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return firstResult!;
|
||||
}
|
||||
```
|
||||
|
||||
## How It Fits Into the Existing Codebase
|
||||
|
||||
The function composes the two existing building blocks:
|
||||
|
||||
- **`downloadFile`** from `worker/src/tdlib/download.ts` -- handles async download with `updateFile` events, progress tracking, timeout, size verification, and cross-device file moves (critical for Docker where `/data/tdlib` and `/tmp/zips` are separate volumes).
|
||||
- **`uploadToChannel`** from `worker/src/upload/channel.ts` -- handles `sendMessage` with `updateMessageSendSucceeded` confirmation, upload progress via `updateFile`, and FLOOD_WAIT retry.
|
||||
|
||||
Both are wrapped with `withFloodWait` internally, so the utility automatically handles Telegram rate limiting.
|
||||
|
||||
The `downloadAndReupload` function is a focused utility that can be called from the worker pipeline or any other part of the system that needs to transfer a file between channels. The existing `processOneArchiveSet` in `worker.ts` does a similar download-then-upload flow but with additional steps (hashing, dedup, metadata extraction, splitting). This utility extracts the core transfer operation for simpler use cases.
|
||||
|
||||
## Usage Example
|
||||
|
||||
```typescript
|
||||
import { downloadAndReupload } from "./tdlib/reupload.js";
|
||||
|
||||
// Transfer a single file from source to destination
|
||||
const result = await downloadAndReupload({
|
||||
client,
|
||||
fileId: "12345",
|
||||
fileName: "archive.zip",
|
||||
fileSize: 1_500_000_000n, // ~1.5 GB
|
||||
destChatId: -1001234567890n,
|
||||
caption: "Re-uploaded archive",
|
||||
onDownloadProgress: (progress) => {
|
||||
console.log(`Download: ${progress.percent}%`);
|
||||
},
|
||||
tempSubdir: `reupload_${Date.now()}`,
|
||||
});
|
||||
|
||||
console.log(`Uploaded as message ${result.destMessageId}`);
|
||||
```
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"total_tokens": 54587,
|
||||
"duration_ms": 146695,
|
||||
"total_duration_seconds": 146.7
|
||||
}
|
||||
1325
.claude/skills/tdlib-telegram-workspace/iteration-1/review.html
Normal file
1325
.claude/skills/tdlib-telegram-workspace/iteration-1/review.html
Normal file
File diff suppressed because one or more lines are too long
301
.claude/skills/tdlib-telegram/SKILL.md
Normal file
301
.claude/skills/tdlib-telegram/SKILL.md
Normal file
@@ -0,0 +1,301 @@
|
||||
---
|
||||
name: tdlib-telegram
|
||||
description: >
|
||||
Reference guide for building Telegram integrations with TDLib (tdl/node).
|
||||
Covers rate limits, FLOOD_WAIT handling, file size constraints, bot vs user account
|
||||
differences, and safe code patterns. Use this skill whenever writing or modifying code
|
||||
that calls Telegram APIs via TDLib, the Bot API, or any Telegram client library — including
|
||||
sending messages, downloading/uploading files, scanning channels, forwarding messages,
|
||||
managing subscriptions, or handling notifications. Also use when debugging 429 errors,
|
||||
FLOOD_WAIT, or silent message drops.
|
||||
---
|
||||
|
||||
# TDLib / Telegram Development Guide
|
||||
|
||||
This skill provides the rate limits, constraints, and patterns you need to write correct
|
||||
Telegram integrations. The limits below come from official Telegram documentation and
|
||||
well-established community findings (Telegram does not publish exact numbers for all limits).
|
||||
|
||||
## Telegram Rate Limits
|
||||
|
||||
These are approximate safe boundaries. Telegram's actual limits are dynamic and depend on
|
||||
account age, history, and request type. The correct strategy is to respect these as guidelines
|
||||
and always handle FLOOD_WAIT errors gracefully.
|
||||
|
||||
### Bot Accounts
|
||||
|
||||
| Operation | Limit | Notes |
|
||||
|-----------|-------|-------|
|
||||
| Messages to same chat | ~1 msg/sec | Bursts OK, sustained exceeds limit |
|
||||
| Messages in a group | 20 msgs/min | Hard limit per group chat |
|
||||
| Bulk notifications (different users) | ~30 msgs/sec | Global across all chats |
|
||||
| Message edits in a group | ~20 edits/min | Community-observed |
|
||||
| API requests (global) | ~30 req/sec | All request types combined |
|
||||
| Paid broadcasts | up to 1000 msgs/sec | Requires Telegram Stars balance |
|
||||
|
||||
### User Accounts (TDLib)
|
||||
|
||||
| Operation | Limit | Notes |
|
||||
|-----------|-------|-------|
|
||||
| API requests (global) | ~30 req/sec | All request types combined |
|
||||
| Messages in a group | ~20 msgs/min | Same as bot |
|
||||
| Channel history reads | No published limit | But pagination + delay is essential |
|
||||
| Joining groups | Very strict | FLOOD_WAIT often 30-300+ seconds |
|
||||
|
||||
### File Size Limits
|
||||
|
||||
| Context | Upload | Download |
|
||||
|---------|--------|----------|
|
||||
| Bot API (standard) | 50 MB | 20 MB |
|
||||
| Bot API (local server) | 2,000 MB | 2,000 MB |
|
||||
| User account (TDLib) | 2 GB | 2 GB |
|
||||
| Premium user (TDLib) | 4 GB | 4 GB |
|
||||
|
||||
### Message & Content Limits
|
||||
|
||||
| Item | Limit |
|
||||
|------|-------|
|
||||
| Message text length | 4,096 chars |
|
||||
| Media caption | 1,024 chars (4,096 premium) |
|
||||
| Album / media group | 10 items max |
|
||||
| Forwarded messages per request | `forwarded_message_count_max` (TDLib option) |
|
||||
| Inline keyboard buttons | 100 entities |
|
||||
| Formatting entities per message | 100 |
|
||||
| Scheduled messages per chat | 100 |
|
||||
| Bot commands | 100 max |
|
||||
|
||||
### Forum & Group Limits
|
||||
|
||||
| Item | Limit |
|
||||
|------|-------|
|
||||
| Topics per group | 1,000,000 |
|
||||
| Topic title | 128 chars |
|
||||
| Group members | 200,000 |
|
||||
| Admins per group | 50 |
|
||||
| Bots per group | 20 |
|
||||
| Pinned topics | 5 |
|
||||
|
||||
## FLOOD_WAIT — How It Works
|
||||
|
||||
When you exceed rate limits, Telegram returns a `FLOOD_WAIT_X` error (or HTTP 429 with
|
||||
`retry_after`). This is a **mandatory pause** — the value `X` is the number of seconds you
|
||||
must wait before ANY request will succeed. It blocks the entire client, not just the
|
||||
operation that triggered it.
|
||||
|
||||
### The Right Way to Handle It
|
||||
|
||||
```typescript
|
||||
// Extract the wait duration from the error
|
||||
function extractFloodWaitSeconds(err: unknown): number | null {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
|
||||
// Pattern 1: FLOOD_WAIT_30
|
||||
const flood = message.match(/FLOOD_WAIT_(\d+)/i);
|
||||
if (flood) return parseInt(flood[1], 10);
|
||||
|
||||
// Pattern 2: "retry after 30"
|
||||
const retry = message.match(/retry after (\d+)/i);
|
||||
if (retry) return parseInt(retry[1], 10);
|
||||
|
||||
// Pattern 3: HTTP 429 without explicit seconds
|
||||
if (String((err as any)?.code) === "429") return 30;
|
||||
|
||||
return null; // Not a rate limit error
|
||||
}
|
||||
|
||||
// Wrap any TDLib call with automatic retry
|
||||
async function withFloodWait<T>(fn: () => Promise<T>, maxRetries = 5): Promise<T> {
|
||||
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
||||
try {
|
||||
return await fn();
|
||||
} catch (err) {
|
||||
const wait = extractFloodWaitSeconds(err);
|
||||
if (wait === null || attempt >= maxRetries) throw err;
|
||||
|
||||
// Add 1-5s jitter to prevent thundering herd
|
||||
const jitter = 1000 + Math.random() * 4000;
|
||||
await sleep(wait * 1000 + jitter);
|
||||
}
|
||||
}
|
||||
throw new Error("Unreachable");
|
||||
}
|
||||
```
|
||||
|
||||
### Key Rules
|
||||
|
||||
- **Always respect the wait duration.** Never retry before `retry_after` expires.
|
||||
- **Add jitter.** Without it, multiple clients retry simultaneously and trigger another FLOOD_WAIT.
|
||||
- **Non-rate-limit errors should fail fast.** Only retry on FLOOD_WAIT, not on other errors.
|
||||
- **Don't artificially throttle below ~1 req/sec.** Telegram's own guidance (via grammY docs)
|
||||
is to send requests as fast as you need and handle 429 errors. Fixed low-frequency throttling
|
||||
wastes throughput without preventing floods.
|
||||
|
||||
## Code Patterns
|
||||
|
||||
### Pattern: Sequential Send Queue
|
||||
|
||||
When sending notifications to multiple users, use a sequential queue with a per-message delay.
|
||||
Never fire concurrent sends — you will hit the 30 msg/sec global limit instantly.
|
||||
|
||||
```typescript
|
||||
let sendQueue: Promise<void> = Promise.resolve();
|
||||
|
||||
function queueSend(chatId: bigint, text: string): void {
|
||||
sendQueue = sendQueue
|
||||
.then(() => withFloodWait(() => sendTextMessage(chatId, text)))
|
||||
.then(() => sleep(50)) // ~20 msgs/sec, well under 30 limit
|
||||
.catch((err) => log.error({ err, chatId }, "Send failed"));
|
||||
}
|
||||
```
|
||||
|
||||
### Pattern: Paginated Scanning with Delay
|
||||
|
||||
When reading channel history or enumerating topics, always add a delay between pages:
|
||||
|
||||
```typescript
|
||||
while (hasMorePages) {
|
||||
const result = await invokeWithTimeout(client, { _: "getChatHistory", ... });
|
||||
processMessages(result.messages);
|
||||
|
||||
if (result.messages.length < limit) break;
|
||||
|
||||
await sleep(1000); // 1 second between pages — prevents FLOOD_WAIT on large channels
|
||||
}
|
||||
```
|
||||
|
||||
### Pattern: Event Listener Before Action
|
||||
|
||||
When waiting for TDLib async events (upload confirmation, download completion), always
|
||||
attach the event listener BEFORE starting the operation. If you attach after, fast
|
||||
operations can complete before the listener exists, causing the promise to hang forever.
|
||||
|
||||
```typescript
|
||||
// CORRECT: listener first, then action
|
||||
client.on("update", handleUpdate);
|
||||
const tempMsg = await client.invoke({ _: "sendMessage", ... });
|
||||
tempMsgId = tempMsg.id; // handler now knows which message to match
|
||||
|
||||
// WRONG: action first, then listener — race condition!
|
||||
const tempMsg = await client.invoke({ _: "sendMessage", ... });
|
||||
client.on("update", handleUpdate); // may miss updateMessageSendSucceeded
|
||||
```
|
||||
|
||||
### Pattern: Download/Upload Timeouts
|
||||
|
||||
Scale timeouts with file size. TDLib downloads/uploads are asynchronous — without a timeout,
|
||||
a stalled transfer hangs the entire pipeline.
|
||||
|
||||
```typescript
|
||||
const timeoutMs = Math.max(
|
||||
10 * 60_000, // minimum 10 minutes
|
||||
(fileSizeMB / 1024) * 10 * 60_000 // 10 minutes per GB
|
||||
);
|
||||
```
|
||||
|
||||
### Pattern: TDLib Client Lifecycle
|
||||
|
||||
Always close TDLib clients in a `finally` block. Unclosed clients leak memory and file
|
||||
descriptors, and can leave TDLib's internal database locked.
|
||||
|
||||
```typescript
|
||||
const client = await createTdlibClient(account);
|
||||
try {
|
||||
// ... use client ...
|
||||
} finally {
|
||||
await closeTdlibClient(client);
|
||||
}
|
||||
```
|
||||
|
||||
## Anti-Patterns
|
||||
|
||||
### Never: Concurrent TDLib Sends Without Queue
|
||||
|
||||
```typescript
|
||||
// BAD: fires all sends concurrently — will trigger FLOOD_WAIT immediately
|
||||
await Promise.all(users.map((u) => sendTextMessage(u.chatId, msg)));
|
||||
|
||||
// GOOD: sequential with delay
|
||||
for (const user of users) {
|
||||
await withFloodWait(() => sendTextMessage(user.chatId, msg));
|
||||
await sleep(50);
|
||||
}
|
||||
```
|
||||
|
||||
### Never: Bare client.invoke() Without Retry
|
||||
|
||||
Every `client.invoke()` call can return FLOOD_WAIT at any time. Bare calls will crash
|
||||
on rate limits instead of retrying.
|
||||
|
||||
```typescript
|
||||
// BAD: crashes on FLOOD_WAIT
|
||||
await client.invoke({ _: "sendMessage", ... });
|
||||
|
||||
// GOOD: retries automatically
|
||||
await withFloodWait(() => client.invoke({ _: "sendMessage", ... }));
|
||||
```
|
||||
|
||||
### Never: Retry Without Respecting retry_after
|
||||
|
||||
```typescript
|
||||
// BAD: fixed 1-second retry ignores Telegram's wait requirement
|
||||
catch (err) { await sleep(1000); retry(); }
|
||||
|
||||
// GOOD: extract and respect the actual wait time
|
||||
catch (err) {
|
||||
const wait = extractFloodWaitSeconds(err);
|
||||
if (wait !== null) await sleep(wait * 1000 + jitter);
|
||||
else throw err;
|
||||
}
|
||||
```
|
||||
|
||||
### Never: Ignore FLOOD_WAIT in Bots
|
||||
|
||||
Bot accounts get the same FLOOD_WAIT as user accounts. The bot API's 429 response
|
||||
blocks ALL operations for the specified duration — not just the chat that triggered it.
|
||||
A single unhandled flood in a notification loop can make the entire bot unresponsive.
|
||||
|
||||
## Bot vs User Account Differences
|
||||
|
||||
| Capability | Bot | User (TDLib) |
|
||||
|-----------|-----|-------------|
|
||||
| Read channel history | No (unless admin) | Yes |
|
||||
| Send to users who haven't started bot | No | N/A |
|
||||
| Join groups via invite link | No (must be added) | Yes |
|
||||
| Forward messages (send_copy) | Yes | Yes |
|
||||
| File upload limit | 50 MB (standard API) | 2 GB |
|
||||
| File download limit | 20 MB (standard API) | 2 GB |
|
||||
| Auth method | Bot token | Phone + SMS code |
|
||||
| Rate limit profile | Same FLOOD_WAIT | Same FLOOD_WAIT |
|
||||
|
||||
## TDLib-Specific Notes
|
||||
|
||||
### BigInt Chat IDs
|
||||
|
||||
TDLib uses numeric chat IDs. Supergroups and channels use negative IDs (e.g., `-1001234567890`).
|
||||
When passing to `client.invoke()`, convert with `Number(chatId)` — TDLib's JSON interface
|
||||
doesn't handle BigInt. Be aware that very large IDs may lose precision with `Number()`,
|
||||
though current Telegram IDs are within safe integer range.
|
||||
|
||||
### TDLib Options (Runtime Queryable)
|
||||
|
||||
These are read-only values you can query at runtime via `getOption`:
|
||||
- `message_text_length_max` — max message text length
|
||||
- `message_caption_length_max` — max caption length
|
||||
- `forwarded_message_count_max` — max forwards per request
|
||||
|
||||
### Session State
|
||||
|
||||
TDLib persists session state to disk. Each account needs its own state directory.
|
||||
Running two clients on the same state directory simultaneously will corrupt the database.
|
||||
Use separate directories per account, and separate volumes in Docker for worker vs bot.
|
||||
|
||||
## Docker Considerations
|
||||
|
||||
- **prebuilt-tdlib**: The `prebuilt-tdlib` npm package provides platform-specific TDLib
|
||||
binaries. Container base image must match (e.g., `node:20-bookworm-slim` for Debian x64).
|
||||
- **Volumes**: Mount persistent volumes for TDLib state directories — losing state forces
|
||||
full re-authentication.
|
||||
- **Graceful shutdown**: Wait for active operations to finish before closing DB connections.
|
||||
TDLib operations in flight will fail if the database pool is closed underneath them.
|
||||
- **Health checks**: TDLib services don't expose HTTP — use database connectivity as the
|
||||
health signal instead.
|
||||
23
.claude/skills/tdlib-telegram/evals/evals.json
Normal file
23
.claude/skills/tdlib-telegram/evals/evals.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"skill_name": "tdlib-telegram",
|
||||
"evals": [
|
||||
{
|
||||
"id": 1,
|
||||
"prompt": "Add a new bot command /broadcast that sends a text message to ALL users who have a TelegramLink in the database. The admin triggers it from the web app. Add it to the bot's command handler and create an API endpoint that triggers it.",
|
||||
"expected_output": "Code that uses a sequential send queue with withFloodWait wrapping each sendTextMessage call, a delay between sends (~50ms), and does NOT use Promise.all or concurrent sends. Should handle errors per-user without stopping the broadcast.",
|
||||
"files": []
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"prompt": "The worker keeps crashing with 'FLOOD_WAIT_35' errors when scanning a source channel that has about 10,000 messages. It happens during the getChannelMessages pagination loop. How do I fix this?",
|
||||
"expected_output": "Diagnosis that the apiDelayMs between pages may be too low or the retry logic isn't wrapping the pagination calls. Should recommend ensuring all getChatHistory/searchChatMessages calls go through withFloodWait/invokeWithTimeout, and that sleep(config.apiDelayMs) exists between pages. Should NOT suggest reducing scan frequency as the primary fix.",
|
||||
"files": []
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"prompt": "Write a utility function for the worker that downloads a file from a source Telegram channel message and re-uploads it to the destination channel. It needs to handle files up to 2GB and work reliably in Docker.",
|
||||
"expected_output": "Code that: (1) wraps download in withFloodWait with size-scaled timeout, (2) attaches upload event listener BEFORE calling sendMessage, (3) uses temp directory with cleanup in finally block, (4) handles the 2GB Telegram limit correctly, (5) uses try/finally for client cleanup if applicable.",
|
||||
"files": []
|
||||
}
|
||||
]
|
||||
}
|
||||
69
.drone.yml
Normal file
69
.drone.yml
Normal file
@@ -0,0 +1,69 @@
|
||||
---
|
||||
kind: pipeline
|
||||
type: docker
|
||||
name: build-and-deploy
|
||||
|
||||
trigger:
|
||||
branch: [main]
|
||||
event: [push]
|
||||
|
||||
steps:
|
||||
- name: build-app
|
||||
image: plugins/docker
|
||||
settings:
|
||||
repo: git.samagsteribbe.nl/admin/dragonsstash
|
||||
registry: git.samagsteribbe.nl
|
||||
dockerfile: Dockerfile
|
||||
tags:
|
||||
- latest
|
||||
- "${DRONE_COMMIT_SHA:0:8}"
|
||||
build_args:
|
||||
- NEXT_PUBLIC_APP_URL=https://dragonsstash.samagsteribbe.nl
|
||||
username:
|
||||
from_secret: gitea_username
|
||||
password:
|
||||
from_secret: gitea_password
|
||||
|
||||
- name: build-worker
|
||||
image: plugins/docker
|
||||
depends_on: [clone]
|
||||
settings:
|
||||
repo: git.samagsteribbe.nl/admin/dragonsstash-worker
|
||||
registry: git.samagsteribbe.nl
|
||||
dockerfile: worker/Dockerfile
|
||||
tags:
|
||||
- latest
|
||||
- "${DRONE_COMMIT_SHA:0:8}"
|
||||
username:
|
||||
from_secret: gitea_username
|
||||
password:
|
||||
from_secret: gitea_password
|
||||
|
||||
- name: build-bot
|
||||
image: plugins/docker
|
||||
depends_on: [clone]
|
||||
settings:
|
||||
repo: git.samagsteribbe.nl/admin/dragonsstash-bot
|
||||
registry: git.samagsteribbe.nl
|
||||
dockerfile: bot/Dockerfile
|
||||
tags:
|
||||
- latest
|
||||
- "${DRONE_COMMIT_SHA:0:8}"
|
||||
username:
|
||||
from_secret: gitea_username
|
||||
password:
|
||||
from_secret: gitea_password
|
||||
|
||||
- name: deploy
|
||||
image: alpine
|
||||
depends_on: [build-app, build-worker, build-bot]
|
||||
environment:
|
||||
SSH_KEY:
|
||||
from_secret: ssh_key
|
||||
commands:
|
||||
- apk add --no-cache openssh-client
|
||||
- mkdir -p ~/.ssh
|
||||
- printf "%s" "$SSH_KEY" > ~/.ssh/id_ed25519
|
||||
- chmod 600 ~/.ssh/id_ed25519
|
||||
- ssh-keyscan -t ed25519 192.168.68.68 > ~/.ssh/known_hosts 2>/dev/null
|
||||
- ssh sam@192.168.68.68 "cd /opt/stacks/DragonsStash && docker compose pull && docker compose up -d"
|
||||
@@ -13,6 +13,8 @@ AUTH_GITHUB_ID=""
|
||||
AUTH_GITHUB_SECRET=""
|
||||
|
||||
# App
|
||||
# APP_PORT controls the port the container listens on AND how it is exposed on the host.
|
||||
# If you change APP_PORT, also update NEXT_PUBLIC_APP_URL to match.
|
||||
NEXT_PUBLIC_APP_URL="http://localhost:3000"
|
||||
APP_PORT=3000
|
||||
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -18,6 +18,7 @@ worker/node_modules
|
||||
|
||||
# production
|
||||
/build
|
||||
worker/dist
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
|
||||
102
CLAUDE.md
Normal file
102
CLAUDE.md
Normal file
@@ -0,0 +1,102 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Project Overview
|
||||
|
||||
Dragon's Stash is a self-hosted inventory management system for 3D printing filament, SLA resin, miniature paints, and supplies. It includes an integrated Telegram archive worker that scans channels for ZIP/RAR archives, indexes their contents, and a bot that lets users search and receive packages via Telegram.
|
||||
|
||||
## Tech Stack
|
||||
|
||||
- **App**: Next.js 16 (App Router), TypeScript 5.9 (strict), Tailwind CSS 4, shadcn/ui
|
||||
- **Database**: PostgreSQL 16+ via Prisma v7.4 with `@prisma/adapter-pg`
|
||||
- **Auth**: Auth.js v5 (NextAuth) with credentials + optional GitHub OAuth
|
||||
- **Worker**: TypeScript + TDLib (via `tdl`) for Telegram channel scanning
|
||||
- **Bot**: TypeScript + TDLib for Telegram bot interface
|
||||
- **Forms**: React Hook Form + Zod v4
|
||||
|
||||
## Commands
|
||||
|
||||
### App (root package.json)
|
||||
```bash
|
||||
npm run dev # Next.js dev server with hot reload
|
||||
npm run build # Production build (standalone output)
|
||||
npm run start # Production server
|
||||
npm run lint # ESLint (next/core-web-vitals + TypeScript)
|
||||
```
|
||||
|
||||
### Database
|
||||
```bash
|
||||
npm run db:generate # Generate Prisma client
|
||||
npm run db:migrate # Run migrations (dev mode)
|
||||
npm run db:push # Push schema without migrations
|
||||
npm run db:seed # Seed database with test data
|
||||
npm run db:studio # Prisma Studio UI
|
||||
npx prisma migrate dev --name <description> # Create new migration
|
||||
```
|
||||
|
||||
### Worker & Bot (each in their own directory)
|
||||
```bash
|
||||
cd worker && npm run dev # Dev mode with tsx watch
|
||||
cd worker && npm run build # TypeScript compile to dist/
|
||||
cd bot && npm run dev # Dev mode with tsx watch
|
||||
cd bot && npm run build # TypeScript compile to dist/
|
||||
```
|
||||
|
||||
### Dev Environment Setup
|
||||
```bash
|
||||
docker compose -f docker-compose.dev.yml up -d # Start PostgreSQL + worker
|
||||
npm run dev # Run app locally
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
### Three-Service Design
|
||||
The project is split into three independent services sharing one PostgreSQL database:
|
||||
1. **App** (root `src/`): Next.js web UI for inventory management and Telegram admin
|
||||
2. **Worker** (`worker/`): Scans Telegram source channels, processes archives, uploads to destination channel
|
||||
3. **Bot** (`bot/`): Telegram bot for user search, package delivery, keyword subscriptions
|
||||
|
||||
Services communicate asynchronously via `pg_notify` (e.g., on-demand channel fetches, bot send requests).
|
||||
|
||||
### App Source Layout (`src/`)
|
||||
- `app/(auth)/` — Login/register pages (public)
|
||||
- `app/(app)/` — Protected routes behind auth middleware (dashboard, filaments, resins, paints, supplies, vendors, locations, settings, stls, telegram, usage)
|
||||
- `app/api/` — API routes (NextAuth, health check, bot endpoints)
|
||||
- `data/` — Server-side Prisma query functions (`*.queries.ts`), one file per domain model
|
||||
- `schemas/` — Zod validation schemas, one file per domain model
|
||||
- `components/ui/` — shadcn/ui primitives
|
||||
- `components/shared/` — Reusable business components (data-table, status-badge, color-swatch, stat-card, page-header)
|
||||
- `components/layout/` — Sidebar and header
|
||||
- `lib/` — Auth config, Prisma singleton, constants, utilities, Telegram query helpers
|
||||
- `hooks/` — Custom React hooks (use-modal, use-debounce, use-current-user)
|
||||
- `types/` — Shared TypeScript types
|
||||
|
||||
### Key Patterns
|
||||
- **Server Components by default** — pages are async server components that fetch data directly. Only interactive components use `"use client"`.
|
||||
- **Server Actions for mutations** — each page directory has an `actions.ts` file with create/update/delete actions.
|
||||
- **Data queries centralized** — all Prisma reads go through `src/data/*.queries.ts`, not inline in components.
|
||||
- **Modal-based CRUD** — add/edit forms use dialog modals, not separate pages.
|
||||
- **TanStack Table** with server-side pagination for all inventory tables.
|
||||
- **All Prisma PKs use `cuid()`** string IDs.
|
||||
|
||||
### Worker Pipeline
|
||||
1. Authenticate Telegram account via TDLib (SMS code flow, managed via admin UI)
|
||||
2. Scan source channels for messages since `lastProcessedMessageId`
|
||||
3. Detect archives (ZIP/RAR), group multipart sets, extract file listings
|
||||
4. Hash for dedup, match preview images, extract creator from filename
|
||||
5. Split files >2GB, upload to destination channel, track progress
|
||||
|
||||
### ESLint Scope
|
||||
ESLint covers `src/` only. The `worker/`, `bot/`, `scripts/`, and `prisma/seed.ts` directories are excluded from linting.
|
||||
|
||||
## Docker Deployment
|
||||
|
||||
- `docker-compose.yml` — Production: app + worker + bot + db
|
||||
- `docker-compose.dev.yml` — Dev: db + worker only (app runs locally)
|
||||
- `docker-entrypoint.sh` — Runs migrations, optional seeding, then starts app
|
||||
- Bot service uses Docker Compose profiles (`bot` or `full`) — not started by default
|
||||
|
||||
## Testing
|
||||
|
||||
No test framework is configured. Testing is manual.
|
||||
15
Dockerfile
15
Dockerfile
@@ -17,6 +17,8 @@ COPY --from=deps /app/node_modules ./node_modules
|
||||
COPY . .
|
||||
|
||||
ENV NEXT_TELEMETRY_DISABLED=1
|
||||
ARG NEXT_PUBLIC_APP_URL=http://localhost:3000
|
||||
ENV NEXT_PUBLIC_APP_URL=${NEXT_PUBLIC_APP_URL}
|
||||
RUN npm run build
|
||||
|
||||
# --- Production image ---
|
||||
@@ -30,19 +32,19 @@ RUN addgroup --system --gid 1001 nodejs && \
|
||||
adduser --system --uid 1001 nextjs
|
||||
|
||||
# Copy public assets
|
||||
COPY --from=builder /app/public ./public
|
||||
|
||||
# Copy prisma schema + migrations for runtime migrate deploy
|
||||
COPY --from=builder /app/prisma ./prisma
|
||||
COPY --from=builder /app/prisma.config.ts ./prisma.config.ts
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/public ./public
|
||||
|
||||
# Copy standalone build output
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
|
||||
|
||||
# Copy prisma schema + migrations for runtime migrate deploy
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/prisma ./prisma
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/prisma.config.ts ./prisma.config.ts
|
||||
|
||||
# Copy node_modules for prisma CLI (needed for migrate deploy at startup).
|
||||
# Copying the full directory ensures all transitive dependencies are present.
|
||||
COPY --from=builder /app/node_modules ./node_modules
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/node_modules ./node_modules
|
||||
# Recreate the .bin/prisma symlink so Node resolves __dirname to prisma/build/,
|
||||
# where the WASM files live (COPY dereferences symlinks, breaking WASM resolution)
|
||||
RUN mkdir -p ./node_modules/.bin && \
|
||||
@@ -54,6 +56,7 @@ RUN chmod +x docker-entrypoint.sh
|
||||
|
||||
USER nextjs
|
||||
|
||||
# Default port — overridden at runtime by the PORT env var (set via docker-compose APP_PORT)
|
||||
EXPOSE 3000
|
||||
ENV PORT=3000
|
||||
ENV HOSTNAME="0.0.0.0"
|
||||
|
||||
13
README.md
13
README.md
@@ -125,18 +125,15 @@ docker compose up -d
|
||||
|
||||
The app will be available at [http://localhost:3000](http://localhost:3000).
|
||||
|
||||
### Adding Telegram Services
|
||||
### Adding the Telegram Bot
|
||||
|
||||
The worker and bot run as optional profiles so `docker compose up` works with just the app + database:
|
||||
The worker starts by default with `docker compose up`. The bot runs as an optional profile:
|
||||
|
||||
```bash
|
||||
# App + DB + Telegram worker (needs TELEGRAM_API_ID + TELEGRAM_API_HASH in .env)
|
||||
docker compose --profile telegram up -d
|
||||
|
||||
# App + DB + Worker + Bot (also needs BOT_TOKEN in .env)
|
||||
docker compose --profile full up -d
|
||||
|
||||
# Or just the bot (alongside app + db)
|
||||
# Or just the bot (alongside app + db + worker)
|
||||
docker compose --profile bot up -d
|
||||
```
|
||||
|
||||
@@ -297,5 +294,7 @@ curl http://localhost:3000/api/health
|
||||
5. Open a Pull Request
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -21,7 +21,16 @@ export async function findLinkByUserId(userId: string) {
|
||||
export async function validateLinkCode(code: string): Promise<string | null> {
|
||||
const key = `link_code:${code}`;
|
||||
const setting = await db.globalSetting.findUnique({ where: { key } });
|
||||
return setting?.value ?? null;
|
||||
if (!setting) return null;
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(setting.value);
|
||||
if (parsed.expiresAt && new Date(parsed.expiresAt) < new Date()) return null;
|
||||
return parsed.userId ?? null;
|
||||
} catch {
|
||||
// Legacy format: value is the userId directly
|
||||
return setting.value;
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteLinkCode(code: string): Promise<void> {
|
||||
@@ -106,9 +115,15 @@ export async function getPendingSendRequest(requestId: string) {
|
||||
select: {
|
||||
id: true,
|
||||
fileName: true,
|
||||
fileSize: true,
|
||||
fileCount: true,
|
||||
creator: true,
|
||||
tags: true,
|
||||
archiveType: true,
|
||||
destChannelId: true,
|
||||
destMessageId: true,
|
||||
previewData: true,
|
||||
sourceChannel: { select: { title: true, telegramId: true } },
|
||||
},
|
||||
},
|
||||
telegramLink: true,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { config } from "./util/config.js";
|
||||
import { logger } from "./util/logger.js";
|
||||
import { db, pool } from "./db/client.js";
|
||||
import { createBotClient, closeBotClient, onBotUpdate } from "./tdlib/client.js";
|
||||
import { createBotClient, closeBotClient, onBotUpdate, getUser } from "./tdlib/client.js";
|
||||
import { startSendListener, stopSendListener } from "./send-listener.js";
|
||||
import { handleMessage } from "./commands.js";
|
||||
import { mkdir } from "fs/promises";
|
||||
@@ -49,14 +49,27 @@ async function main(): Promise<void> {
|
||||
const userId = senderId.user_id as number;
|
||||
|
||||
if (text && userId) {
|
||||
// Get user info for display name (async but fire-and-forget for perf)
|
||||
handleMessage({
|
||||
chatId: BigInt(chatId),
|
||||
userId: BigInt(userId),
|
||||
text,
|
||||
firstName: "User", // TDLib provides this via a separate getUser call
|
||||
username: undefined,
|
||||
}).catch((err) => {
|
||||
(async () => {
|
||||
let firstName = "User";
|
||||
let lastName: string | undefined;
|
||||
let username: string | undefined;
|
||||
try {
|
||||
const userInfo = await getUser(userId);
|
||||
firstName = userInfo.firstName;
|
||||
lastName = userInfo.lastName;
|
||||
username = userInfo.username;
|
||||
} catch {
|
||||
// Fall back to defaults if getUser fails
|
||||
}
|
||||
await handleMessage({
|
||||
chatId: BigInt(chatId),
|
||||
userId: BigInt(userId),
|
||||
text,
|
||||
firstName,
|
||||
lastName,
|
||||
username,
|
||||
});
|
||||
})().catch((err) => {
|
||||
log.error({ err, chatId, userId }, "Failed to handle message");
|
||||
});
|
||||
}
|
||||
|
||||
@@ -8,33 +8,83 @@ import {
|
||||
getGlobalDestinationChannel,
|
||||
} from "./db/queries.js";
|
||||
import { copyMessageToUser, sendTextMessage, sendPhotoMessage } from "./tdlib/client.js";
|
||||
import { sleep } from "./util/flood-wait.js";
|
||||
|
||||
const log = childLogger("send-listener");
|
||||
|
||||
let pgClient: pg.PoolClient | null = null;
|
||||
let stopped = false;
|
||||
|
||||
/** Delay (ms) before attempting to reconnect after a connection loss. */
|
||||
const RECONNECT_DELAY_MS = 5_000;
|
||||
|
||||
/**
|
||||
* Start listening for pg_notify signals:
|
||||
* - `bot_send` — payload = requestId → send a package to a user
|
||||
* - `new_package` — payload = JSON { packageId, fileName, creator } → notify subscribers
|
||||
*
|
||||
* If the underlying connection is lost, the listener automatically reconnects
|
||||
* so that pg_notify signals are never silently dropped.
|
||||
*/
|
||||
export async function startSendListener(): Promise<void> {
|
||||
pgClient = await pool.connect();
|
||||
await pgClient.query("LISTEN bot_send");
|
||||
await pgClient.query("LISTEN new_package");
|
||||
stopped = false;
|
||||
await connectListener();
|
||||
}
|
||||
|
||||
pgClient.on("notification", (msg) => {
|
||||
if (msg.channel === "bot_send" && msg.payload) {
|
||||
handleBotSend(msg.payload);
|
||||
} else if (msg.channel === "new_package" && msg.payload) {
|
||||
handleNewPackage(msg.payload);
|
||||
async function connectListener(): Promise<void> {
|
||||
try {
|
||||
pgClient = await pool.connect();
|
||||
await pgClient.query("LISTEN bot_send");
|
||||
await pgClient.query("LISTEN new_package");
|
||||
|
||||
pgClient.on("notification", (msg) => {
|
||||
if (msg.channel === "bot_send" && msg.payload) {
|
||||
handleBotSend(msg.payload);
|
||||
} else if (msg.channel === "new_package" && msg.payload) {
|
||||
handleNewPackage(msg.payload);
|
||||
}
|
||||
});
|
||||
|
||||
// Reconnect automatically when the connection ends unexpectedly
|
||||
pgClient.on("end", () => {
|
||||
if (!stopped) {
|
||||
log.warn("Send listener connection lost — reconnecting");
|
||||
pgClient = null;
|
||||
scheduleReconnect();
|
||||
}
|
||||
});
|
||||
|
||||
pgClient.on("error", (err) => {
|
||||
log.error({ err }, "Send listener connection error");
|
||||
if (!stopped && pgClient) {
|
||||
try {
|
||||
pgClient.release(true);
|
||||
} catch (releaseErr) {
|
||||
log.debug({ err: releaseErr }, "Failed to release pg client after error");
|
||||
}
|
||||
pgClient = null;
|
||||
scheduleReconnect();
|
||||
}
|
||||
});
|
||||
|
||||
log.info("Send listener started (bot_send, new_package)");
|
||||
} catch (err) {
|
||||
log.error({ err }, "Failed to start send listener — retrying");
|
||||
scheduleReconnect();
|
||||
}
|
||||
}
|
||||
|
||||
function scheduleReconnect(): void {
|
||||
if (stopped) return;
|
||||
setTimeout(() => {
|
||||
if (!stopped) {
|
||||
connectListener();
|
||||
}
|
||||
});
|
||||
|
||||
log.info("Send listener started (bot_send, new_package)");
|
||||
}, RECONNECT_DELAY_MS);
|
||||
}
|
||||
|
||||
export function stopSendListener(): void {
|
||||
stopped = true;
|
||||
if (pgClient) {
|
||||
pgClient.release();
|
||||
pgClient = null;
|
||||
@@ -84,9 +134,22 @@ async function processSendRequest(requestId: string): Promise<void> {
|
||||
throw new Error("No global destination channel configured");
|
||||
}
|
||||
|
||||
// Send preview if available
|
||||
// Send preview with rich caption if available
|
||||
if (pkg.previewData) {
|
||||
const caption = `📦 *${pkg.fileName}*\n\nSent from Dragon's Stash`;
|
||||
const lines: string[] = [];
|
||||
lines.push(`📦 *${escapeMarkdown(pkg.fileName)}*`);
|
||||
if (pkg.creator) lines.push(`👤 ${escapeMarkdown(pkg.creator)}`);
|
||||
if (pkg.fileCount > 0) lines.push(`📁 ${pkg.fileCount} files`);
|
||||
if (pkg.tags && pkg.tags.length > 0) {
|
||||
lines.push(`🏷️ ${pkg.tags.map((t: string) => escapeMarkdown(t)).join(", ")}`);
|
||||
}
|
||||
if (pkg.sourceChannel) {
|
||||
lines.push(`📡 Source: ${escapeMarkdown(pkg.sourceChannel.title)}`);
|
||||
}
|
||||
lines.push("");
|
||||
lines.push("_Sent from Dragon's Stash_");
|
||||
|
||||
const caption = lines.join("\n");
|
||||
await sendPhotoMessage(targetUserId, Buffer.from(pkg.previewData), caption);
|
||||
}
|
||||
|
||||
@@ -114,6 +177,7 @@ async function handleNewPackage(payload: string): Promise<void> {
|
||||
packageId: string;
|
||||
fileName: string;
|
||||
creator: string | null;
|
||||
tags?: string[];
|
||||
};
|
||||
|
||||
const subs = await findMatchingSubscriptions(data.fileName, data.creator);
|
||||
@@ -133,12 +197,15 @@ async function handleNewPackage(payload: string): Promise<void> {
|
||||
userSubs.set(key, patterns);
|
||||
}
|
||||
|
||||
const creator = data.creator ? ` by ${data.creator}` : "";
|
||||
const creator = data.creator ? ` by ${escapeHtml(data.creator)}` : "";
|
||||
for (const [telegramUserId, patterns] of userSubs) {
|
||||
const msg = [
|
||||
`🔔 <b>New package matching your subscriptions:</b>`,
|
||||
``,
|
||||
`📦 <b>${escapeHtml(data.fileName)}</b>${creator}`,
|
||||
...(data.tags && data.tags.length > 0
|
||||
? [`🏷️ ${data.tags.map((t: string) => escapeHtml(t)).join(", ")}`]
|
||||
: []),
|
||||
``,
|
||||
`Matched: ${patterns.map((p) => `"${escapeHtml(p)}"`).join(", ")}`,
|
||||
``,
|
||||
@@ -151,6 +218,9 @@ async function handleNewPackage(payload: string): Promise<void> {
|
||||
"Failed to notify subscriber"
|
||||
);
|
||||
});
|
||||
|
||||
// Rate limit delay between notifications (~20 msgs/sec, under 30 msgs/sec bot limit)
|
||||
await sleep(50);
|
||||
}
|
||||
} catch (err) {
|
||||
log.error({ err, payload }, "Failed to process new_package notification");
|
||||
@@ -160,3 +230,7 @@ async function handleNewPackage(payload: string): Promise<void> {
|
||||
function escapeHtml(text: string): string {
|
||||
return text.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
}
|
||||
|
||||
function escapeMarkdown(text: string): string {
|
||||
return text.replace(/([_*[\]()~`>#+\-=|{}.!\\])/g, "\\$1");
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import tdl from "tdl";
|
||||
import { getTdjson } from "prebuilt-tdlib";
|
||||
import { config } from "../util/config.js";
|
||||
import { childLogger } from "../util/logger.js";
|
||||
import { withFloodWait } from "../util/flood-wait.js";
|
||||
|
||||
const log = childLogger("tdlib-bot");
|
||||
|
||||
@@ -33,7 +34,7 @@ export async function createBotClient(): Promise<tdl.Client> {
|
||||
|
||||
await client.login(() => ({
|
||||
type: "bot",
|
||||
token: config.botToken,
|
||||
getToken: () => Promise.resolve(config.botToken),
|
||||
}));
|
||||
|
||||
log.info("Bot client authenticated successfully");
|
||||
@@ -53,8 +54,14 @@ export async function closeBotClient(): Promise<void> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Forward a message from a channel to a user's DM.
|
||||
* Uses copyMessage to make it appear as sent by the bot.
|
||||
* Send a document from a channel to a user's DM.
|
||||
*
|
||||
* Instead of forwardMessages (unreliable for bot accounts with send_copy),
|
||||
* we fetch the original message to get the file's remote ID, then send a
|
||||
* new message with inputFileRemote. This is the documented reliable approach
|
||||
* for bots — the file is already on Telegram's servers so no re-upload is needed.
|
||||
*
|
||||
* Falls back to a plain forward (without send_copy) if getMessage fails.
|
||||
*/
|
||||
export async function copyMessageToUser(
|
||||
fromChatId: bigint,
|
||||
@@ -62,18 +69,137 @@ export async function copyMessageToUser(
|
||||
toUserId: bigint
|
||||
): Promise<void> {
|
||||
if (!client) throw new Error("Bot client not initialized");
|
||||
const c = client;
|
||||
|
||||
// TDLib uses negative chat IDs for channels/supergroups
|
||||
// The telegramId from the DB is the raw Telegram ID; for channels it needs -100 prefix
|
||||
const fromChatIdNum = Number(-100n * 1n) + Number(fromChatId);
|
||||
log.info(
|
||||
{ fromChatId: fromChatId.toString(), messageId: messageId.toString(), toUserId: toUserId.toString() },
|
||||
"Sending file to user"
|
||||
);
|
||||
|
||||
await client.invoke({
|
||||
_: "forwardMessages",
|
||||
chat_id: Number(toUserId),
|
||||
from_chat_id: Number(fromChatId) > 0 ? -Number(fromChatId) : Number(fromChatId),
|
||||
message_ids: [Number(messageId)],
|
||||
send_copy: true,
|
||||
remove_caption: false,
|
||||
// Step 1: Get the original message to extract the file's remote ID
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let message: any;
|
||||
try {
|
||||
message = await withFloodWait(
|
||||
() => c.invoke({
|
||||
_: "getMessage",
|
||||
chat_id: Number(fromChatId),
|
||||
message_id: Number(messageId),
|
||||
}),
|
||||
"getMessage"
|
||||
);
|
||||
} catch (err) {
|
||||
log.error({ err, fromChatId: fromChatId.toString(), messageId: messageId.toString() }, "getMessage failed");
|
||||
throw new Error(`Cannot get source message: ${err instanceof Error ? err.message : String(err)}`);
|
||||
}
|
||||
|
||||
// Step 2: Extract the document's remote file ID
|
||||
const doc = message?.content?.document;
|
||||
if (!doc?.document?.remote?.id) {
|
||||
log.error(
|
||||
{ messageContent: message?.content?._, messageId: messageId.toString() },
|
||||
"Source message has no document with remote file ID"
|
||||
);
|
||||
throw new Error(`Source message is not a document or has no remote file ID (type: ${message?.content?._})`);
|
||||
}
|
||||
|
||||
const remoteFileId: string = doc.document.remote.id;
|
||||
const fileName: string = doc.file_name ?? "file";
|
||||
const caption = message.content?.caption;
|
||||
|
||||
log.info(
|
||||
{ remoteFileId: remoteFileId.slice(0, 20) + "...", fileName, toUserId: toUserId.toString() },
|
||||
"Sending document via inputFileRemote"
|
||||
);
|
||||
|
||||
// Step 3: Send the document to the user using the remote file ID
|
||||
// This doesn't require downloading — Telegram serves the existing file.
|
||||
await waitForSendConfirmation(c, Number(toUserId), {
|
||||
_: "inputMessageDocument",
|
||||
document: { _: "inputFileRemote", id: remoteFileId },
|
||||
caption: caption ?? undefined,
|
||||
}, fileName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a message and wait for Telegram to confirm delivery.
|
||||
* Returns when updateMessageSendSucceeded fires for the temp message.
|
||||
* Throws if updateMessageSendFailed fires or timeout is reached.
|
||||
*/
|
||||
async function waitForSendConfirmation(
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
c: any,
|
||||
chatId: number,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
inputMessageContent: any,
|
||||
label: string
|
||||
): Promise<void> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
let settled = false;
|
||||
let tempMsgId: number | null = null;
|
||||
|
||||
const TIMEOUT_MS = 5 * 60_000;
|
||||
const timer = setTimeout(() => {
|
||||
if (!settled) {
|
||||
settled = true;
|
||||
cleanup();
|
||||
reject(new Error(`Send timed out after 5min for ${label}`));
|
||||
}
|
||||
}, TIMEOUT_MS);
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const handleUpdate = (update: any) => {
|
||||
if (update?._ === "updateMessageSendSucceeded") {
|
||||
if (tempMsgId !== null && update.old_message_id === tempMsgId) {
|
||||
if (!settled) {
|
||||
settled = true;
|
||||
cleanup();
|
||||
log.info({ tempMsgId, finalMsgId: update.message?.id, label }, "Send confirmed");
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (update?._ === "updateMessageSendFailed") {
|
||||
if (tempMsgId !== null && update.old_message_id === tempMsgId) {
|
||||
if (!settled) {
|
||||
settled = true;
|
||||
cleanup();
|
||||
const errorMsg = update.error?.message ?? "Unknown";
|
||||
const errorCode = update.error?.code ?? 0;
|
||||
log.error({ tempMsgId, errorCode, errorMsg, label }, "Send failed");
|
||||
reject(new Error(`Send failed for ${label}: [${errorCode}] ${errorMsg}`));
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const cleanup = () => {
|
||||
clearTimeout(timer);
|
||||
c.off("update", handleUpdate);
|
||||
};
|
||||
|
||||
// Attach BEFORE sending to avoid race
|
||||
c.on("update", handleUpdate);
|
||||
|
||||
withFloodWait(
|
||||
() => c.invoke({
|
||||
_: "sendMessage",
|
||||
chat_id: chatId,
|
||||
input_message_content: inputMessageContent,
|
||||
}),
|
||||
"sendMessage:copyToUser"
|
||||
)
|
||||
.then((result) => {
|
||||
tempMsgId = (result as { id: number }).id;
|
||||
log.debug({ tempMsgId, label }, "Message queued, waiting for confirmation");
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
if (!settled) {
|
||||
settled = true;
|
||||
cleanup();
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -86,22 +212,31 @@ export async function sendTextMessage(
|
||||
parseMode: "textParseModeMarkdown" | "textParseModeHTML" = "textParseModeMarkdown"
|
||||
): Promise<void> {
|
||||
if (!client) throw new Error("Bot client not initialized");
|
||||
const c = client;
|
||||
|
||||
// Parse the text first
|
||||
const parsed = await client.invoke({
|
||||
_: "parseTextEntities",
|
||||
text,
|
||||
parse_mode: { _: parseMode, version: parseMode === "textParseModeMarkdown" ? 2 : 0 },
|
||||
});
|
||||
const parsed = await withFloodWait(
|
||||
() =>
|
||||
c.invoke({
|
||||
_: "parseTextEntities",
|
||||
text,
|
||||
parse_mode: { _: parseMode, version: parseMode === "textParseModeMarkdown" ? 2 : 0 },
|
||||
}),
|
||||
"parseTextEntities"
|
||||
);
|
||||
|
||||
await client.invoke({
|
||||
_: "sendMessage",
|
||||
chat_id: Number(chatId),
|
||||
input_message_content: {
|
||||
_: "inputMessageText",
|
||||
text: parsed,
|
||||
},
|
||||
});
|
||||
await withFloodWait(
|
||||
() =>
|
||||
c.invoke({
|
||||
_: "sendMessage",
|
||||
chat_id: Number(chatId),
|
||||
input_message_content: {
|
||||
_: "inputMessageText",
|
||||
text: parsed,
|
||||
},
|
||||
}),
|
||||
"sendTextMessage"
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -113,6 +248,7 @@ export async function sendPhotoMessage(
|
||||
caption: string
|
||||
): Promise<void> {
|
||||
if (!client) throw new Error("Bot client not initialized");
|
||||
const c = client;
|
||||
|
||||
// Write the photo to a temp file
|
||||
const { writeFile, unlink } = await import("fs/promises");
|
||||
@@ -122,28 +258,63 @@ export async function sendPhotoMessage(
|
||||
try {
|
||||
await writeFile(tempPath, photoData);
|
||||
|
||||
const parsedCaption = await client.invoke({
|
||||
_: "parseTextEntities",
|
||||
text: caption,
|
||||
parse_mode: { _: "textParseModeMarkdown", version: 2 },
|
||||
});
|
||||
const parsedCaption = await withFloodWait(
|
||||
() =>
|
||||
c.invoke({
|
||||
_: "parseTextEntities",
|
||||
text: caption,
|
||||
parse_mode: { _: "textParseModeMarkdown", version: 2 },
|
||||
}),
|
||||
"parsePhotoCaption"
|
||||
);
|
||||
|
||||
await client.invoke({
|
||||
_: "sendMessage",
|
||||
chat_id: Number(chatId),
|
||||
input_message_content: {
|
||||
_: "inputMessagePhoto",
|
||||
photo: { _: "inputFileLocal", path: tempPath },
|
||||
caption: parsedCaption,
|
||||
width: 0,
|
||||
height: 0,
|
||||
},
|
||||
});
|
||||
await withFloodWait(
|
||||
() =>
|
||||
c.invoke({
|
||||
_: "sendMessage",
|
||||
chat_id: Number(chatId),
|
||||
input_message_content: {
|
||||
_: "inputMessagePhoto",
|
||||
photo: { _: "inputFileLocal", path: tempPath },
|
||||
caption: parsedCaption,
|
||||
width: 0,
|
||||
height: 0,
|
||||
},
|
||||
}),
|
||||
"sendPhotoMessage"
|
||||
);
|
||||
} finally {
|
||||
await unlink(tempPath).catch(() => {});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get basic info about a Telegram user (name, username).
|
||||
*/
|
||||
export async function getUser(
|
||||
userId: number
|
||||
): Promise<{ firstName: string; lastName?: string; username?: string }> {
|
||||
if (!client) throw new Error("Bot client not initialized");
|
||||
const c = client;
|
||||
const user = (await withFloodWait(
|
||||
() =>
|
||||
c.invoke({
|
||||
_: "getUser",
|
||||
user_id: userId,
|
||||
}),
|
||||
"getUser"
|
||||
)) as {
|
||||
first_name?: string;
|
||||
last_name?: string;
|
||||
usernames?: { editable_username?: string };
|
||||
};
|
||||
return {
|
||||
firstName: user.first_name ?? "User",
|
||||
lastName: user.last_name || undefined,
|
||||
username: user.usernames?.editable_username || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get updates from TDLib. The bot listens for new messages this way.
|
||||
*/
|
||||
|
||||
60
bot/src/util/flood-wait.ts
Normal file
60
bot/src/util/flood-wait.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { childLogger } from "./logger.js";
|
||||
|
||||
const log = childLogger("flood-wait");
|
||||
|
||||
function sleep(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the mandatory wait duration (in seconds) from a Telegram
|
||||
* FLOOD_WAIT error. Returns null when the error is not rate-limit related.
|
||||
*/
|
||||
export function extractFloodWaitSeconds(err: unknown): number | null {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
|
||||
// Pattern 1: FLOOD_WAIT_30
|
||||
const flood = message.match(/FLOOD_WAIT_(\d+)/i);
|
||||
if (flood) return parseInt(flood[1], 10);
|
||||
|
||||
// Pattern 2: "retry after 30"
|
||||
const retry = message.match(/retry after (\d+)/i);
|
||||
if (retry) return parseInt(retry[1], 10);
|
||||
|
||||
// Pattern 3: HTTP 429 without explicit seconds
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
if (String((err as any)?.code) === "429") return 30;
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap any async Telegram operation with automatic FLOOD_WAIT retry.
|
||||
* Adds random jitter (1-5s) to prevent thundering-herd retries.
|
||||
*
|
||||
* Non-rate-limit errors are re-thrown immediately (fail-fast).
|
||||
*/
|
||||
export async function withFloodWait<T>(
|
||||
fn: () => Promise<T>,
|
||||
context?: string,
|
||||
maxRetries = 5
|
||||
): Promise<T> {
|
||||
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
||||
try {
|
||||
return await fn();
|
||||
} catch (err) {
|
||||
const wait = extractFloodWaitSeconds(err);
|
||||
if (wait === null || attempt >= maxRetries) throw err;
|
||||
|
||||
const jitter = 1000 + Math.random() * 4000;
|
||||
log.warn(
|
||||
{ context, wait, attempt: attempt + 1, maxRetries, jitter: Math.round(jitter) },
|
||||
"FLOOD_WAIT received — backing off"
|
||||
);
|
||||
await sleep(wait * 1000 + jitter);
|
||||
}
|
||||
}
|
||||
throw new Error("Unreachable");
|
||||
}
|
||||
|
||||
export { sleep };
|
||||
@@ -16,7 +16,6 @@ services:
|
||||
retries: 5
|
||||
|
||||
worker:
|
||||
profiles: ["telegram", "full"]
|
||||
build:
|
||||
context: .
|
||||
dockerfile: worker/Dockerfile
|
||||
|
||||
@@ -5,23 +5,29 @@ services:
|
||||
dockerfile: Dockerfile
|
||||
pull_policy: never
|
||||
ports:
|
||||
- "${APP_PORT:-3000}:3000"
|
||||
- "${APP_PORT:-3000}:${APP_PORT:-3000}"
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://${POSTGRES_USER:-dragons}:${POSTGRES_PASSWORD:-stash}@db:5432/${POSTGRES_DB:-dragonsstash}
|
||||
- AUTH_SECRET=${AUTH_SECRET:?Set AUTH_SECRET in .env}
|
||||
- AUTH_TRUST_HOST=true
|
||||
- AUTH_GITHUB_ID=${AUTH_GITHUB_ID:-}
|
||||
- AUTH_GITHUB_SECRET=${AUTH_GITHUB_SECRET:-}
|
||||
- NEXT_PUBLIC_APP_URL=${NEXT_PUBLIC_APP_URL:-http://localhost:3000}
|
||||
- TELEGRAM_API_KEY=${TELEGRAM_API_KEY:-}
|
||||
- BOT_TOKEN=${BOT_TOKEN:-}
|
||||
- BOT_USERNAME=${BOT_USERNAME:-}
|
||||
- LOG_LEVEL=${LOG_LEVEL:-info}
|
||||
- WORKER_INTERVAL_MINUTES=${WORKER_INTERVAL_MINUTES:-60}
|
||||
- PORT=${APP_PORT:-3000}
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-q", "--spider", "http://localhost:3000/api/health"]
|
||||
test: ["CMD-SHELL", "wget -q --spider http://localhost:$$PORT/api/health || exit 1"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
start_period: 30s
|
||||
start_period: 60s
|
||||
restart: unless-stopped
|
||||
deploy:
|
||||
resources:
|
||||
@@ -31,7 +37,6 @@ services:
|
||||
- frontend
|
||||
|
||||
worker:
|
||||
profiles: ["telegram", "full"]
|
||||
build:
|
||||
context: .
|
||||
dockerfile: worker/Dockerfile
|
||||
|
||||
@@ -10,7 +10,10 @@ if [ "$AUTH_SECRET" = "change-me-to-a-random-secret-in-production" ] || [ -z "$A
|
||||
fi
|
||||
|
||||
echo "Running database migrations..."
|
||||
./node_modules/.bin/prisma migrate deploy
|
||||
if ! ./node_modules/.bin/prisma migrate deploy; then
|
||||
echo "ERROR: Database migration failed. Check DATABASE_URL and database connectivity."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$SEED_DATABASE" = "true" ]; then
|
||||
echo "Seeding database..."
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
1343
docs/superpowers/plans/2026-03-25-package-grouping.md
Normal file
1343
docs/superpowers/plans/2026-03-25-package-grouping.md
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,241 @@
|
||||
# Design: Search Match Indicators, Size Limit Increase, Skipped/Failed Files Overview
|
||||
|
||||
**Date:** 2026-03-24
|
||||
**Status:** Approved
|
||||
|
||||
## Overview
|
||||
|
||||
Three related improvements to the STL packages system:
|
||||
|
||||
1. **Search match indicators** — Show which internal files matched a search query, with highlighted files in the drawer
|
||||
2. **Size limit increase** — Raise the ingestion limit from 4 GB to 200 GB so large multipart archives aren't skipped
|
||||
3. **Skipped/failed files overview** — Track and display archives that were skipped or failed, with retry capability
|
||||
|
||||
---
|
||||
|
||||
## Feature 1: Size Limit Increase
|
||||
|
||||
### Change
|
||||
|
||||
`worker/src/util/config.ts` line 6 — change default from `"4096"` to `"204800"`.
|
||||
|
||||
One-line change. The split/upload pipeline already handles arbitrary sizes. The 2 GB per-part Telegram API limit is a separate hard-coded constant and stays as-is.
|
||||
|
||||
### Impact
|
||||
|
||||
- Archives up to 200 GB will now be attempted
|
||||
- Multipart archives where individual parts are under 2 GB (but total exceeds 4 GB) will no longer be skipped — these upload directly without any splitting
|
||||
- Single files over 2 GB are automatically split into 2 GB parts (existing behavior)
|
||||
- Temp disk usage during processing can now reach up to ~200 GB per archive
|
||||
|
||||
---
|
||||
|
||||
## Feature 2: Search Match Indicators
|
||||
|
||||
### Backend Changes
|
||||
|
||||
**File:** `src/lib/telegram/queries.ts` — `searchPackages()`
|
||||
|
||||
When `searchIn` is `"files"` or `"both"`, change the PackageFile query from `distinct` to a **grouped count**:
|
||||
|
||||
```typescript
|
||||
// Current: findMany with select: { packageId }, distinct: ["packageId"]
|
||||
// New: groupBy packageId with _count
|
||||
const fileMatches = await prisma.packageFile.groupBy({
|
||||
by: ["packageId"],
|
||||
where: {
|
||||
OR: [
|
||||
{ fileName: { contains: q, mode: "insensitive" } },
|
||||
{ path: { contains: q, mode: "insensitive" } },
|
||||
],
|
||||
},
|
||||
_count: { _all: true },
|
||||
});
|
||||
```
|
||||
|
||||
This returns `{ packageId: string, _count: { _all: number } }[]`.
|
||||
|
||||
Note: `PackageRow` in `package-columns.tsx` mirrors `PackageListItem` and must also receive the two new fields.
|
||||
|
||||
**File:** `src/lib/telegram/types.ts` — `PackageListItem`
|
||||
|
||||
Add two fields:
|
||||
- `matchedFileCount: number` — how many files inside matched (0 if matched by package name only)
|
||||
- `matchedByContent: boolean` — true if any files inside matched
|
||||
|
||||
### Frontend Changes
|
||||
|
||||
**File:** `src/app/(app)/stls/page.tsx`
|
||||
|
||||
Pass the search term to `StlTable` as a new prop.
|
||||
|
||||
**File:** `src/app/(app)/stls/_components/stl-table.tsx`
|
||||
|
||||
Pass search term to columns via TanStack Table column meta.
|
||||
|
||||
**File:** `src/app/(app)/stls/_components/package-columns.tsx`
|
||||
|
||||
When search is active and `matchedByContent` is true, render a clickable badge below the filename: e.g., "3 file matches". Clicking opens the `PackageFilesDrawer` with a `highlightTerm` prop set to the search term.
|
||||
|
||||
**File:** `src/app/(app)/stls/_components/package-files-drawer.tsx`
|
||||
|
||||
- Accept optional `highlightTerm: string` prop
|
||||
- Render full file tree as normal (all files visible)
|
||||
- Files whose `fileName` or `path` case-insensitively contains `highlightTerm` get a subtle highlight (amber/yellow background on the row)
|
||||
- Auto-expand folders that contain highlighted files
|
||||
- The drawer's own search input remains independent
|
||||
|
||||
### Data Flow
|
||||
|
||||
1. User types search term in STL table search input
|
||||
2. URL updates with `?search=value`, page reloads
|
||||
3. `page.tsx` calls `searchPackages()` with `searchIn: "both"`
|
||||
4. Query returns packages with `matchedFileCount` and `matchedByContent`
|
||||
5. Table renders "N file matches" badge on content-matched rows
|
||||
6. User clicks badge -> drawer opens with full tree, matching files highlighted
|
||||
7. Folders containing matches auto-expanded
|
||||
|
||||
---
|
||||
|
||||
## Feature 3: Skipped/Failed Files Overview
|
||||
|
||||
### Database Schema
|
||||
|
||||
New model in `prisma/schema.prisma`:
|
||||
|
||||
```prisma
|
||||
enum SkipReason {
|
||||
SIZE_LIMIT
|
||||
DOWNLOAD_FAILED
|
||||
EXTRACT_FAILED
|
||||
UPLOAD_FAILED
|
||||
}
|
||||
|
||||
model SkippedPackage {
|
||||
id String @id @default(cuid())
|
||||
fileName String
|
||||
fileSize BigInt
|
||||
reason SkipReason
|
||||
errorMessage String?
|
||||
sourceChannelId String
|
||||
sourceChannel TelegramChannel @relation(fields: [sourceChannelId], references: [id], onDelete: Cascade)
|
||||
sourceMessageId BigInt
|
||||
sourceTopicId BigInt?
|
||||
isMultipart Boolean @default(false)
|
||||
partCount Int @default(1)
|
||||
accountId String
|
||||
account TelegramAccount @relation(fields: [accountId], references: [id], onDelete: Cascade)
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
@@unique([sourceChannelId, sourceMessageId])
|
||||
@@index([reason])
|
||||
@@index([accountId])
|
||||
@@map("skipped_packages")
|
||||
}
|
||||
```
|
||||
|
||||
Reverse relations must be added to `TelegramChannel` and `TelegramAccount` models:
|
||||
```prisma
|
||||
// In TelegramChannel:
|
||||
skippedPackages SkippedPackage[]
|
||||
|
||||
// In TelegramAccount:
|
||||
skippedPackages SkippedPackage[]
|
||||
```
|
||||
|
||||
### Worker Changes
|
||||
|
||||
**File:** `worker/src/worker.ts`
|
||||
|
||||
Extend `PipelineContext` interface to include `accountId` (derived from the ingestion run's account).
|
||||
|
||||
At each skip/failure point, upsert a `SkippedPackage` record:
|
||||
|
||||
- **Size limit skip** (line 784): reason `SIZE_LIMIT`, no error message
|
||||
- **Download failure** (catch in download loop): reason `DOWNLOAD_FAILED` + error text
|
||||
- **Extract/metadata failure** (catch in extract): reason `EXTRACT_FAILED` + error text
|
||||
- **Upload failure** (catch in upload): reason `UPLOAD_FAILED` + error text
|
||||
|
||||
On **successful ingestion** of a package, delete any existing `SkippedPackage` with the same `(sourceChannelId, sourceMessageId)` — so successful retries clean up after themselves.
|
||||
|
||||
**File:** `worker/src/db/queries.ts`
|
||||
|
||||
Add functions:
|
||||
- `upsertSkippedPackage(data)` — create or update skip record
|
||||
- `deleteSkippedPackage(sourceChannelId, sourceMessageId)` — remove on success
|
||||
|
||||
### Retry Mechanism
|
||||
|
||||
Retrying a skipped package:
|
||||
1. Delete the `SkippedPackage` record
|
||||
2. Find the `AccountChannelMap` record using both `accountId` and `sourceChannelId`, then reset its `lastProcessedMessageId` to `sourceMessageId - 1` (only if less than current watermark)
|
||||
3. If `sourceTopicId` is non-null, also reset the corresponding `TopicProgress.lastProcessedMessageId` for that topic
|
||||
4. The next ingestion cycle picks up the message and re-attempts processing
|
||||
|
||||
For "Retry All" (e.g., all `SIZE_LIMIT` skips after raising the limit):
|
||||
- Delete all matching `SkippedPackage` records
|
||||
- For each affected (account, channel) pair, reset `AccountChannelMap` watermark to the minimum `sourceMessageId - 1` among deleted records
|
||||
- For each affected (account, channel, topic) triple, reset `TopicProgress` watermark similarly
|
||||
|
||||
**Note on behavioral distinction:** `DOWNLOAD_FAILED`, `EXTRACT_FAILED`, and `UPLOAD_FAILED` archives already naturally retry because the worker does not advance the watermark past failed sets. The `SkippedPackage` record provides visibility into these failures. The explicit retry/watermark reset is only strictly needed for `SIZE_LIMIT` skips (where the watermark does advance past the skipped message). The UI should present both types but the retry button is most impactful for `SIZE_LIMIT` skips.
|
||||
|
||||
**Performance note:** "Retry All" can cause the worker to re-scan large message ranges. The existing dedup logic (`packageExistsBySourceMessage`) ensures already-ingested packages are skipped quickly, but there is a scanning cost proportional to the number of messages between the reset watermark and the current position.
|
||||
|
||||
### Frontend Changes
|
||||
|
||||
**File:** `src/app/(app)/stls/_components/stl-table.tsx`
|
||||
|
||||
Add a "Skipped / Failed" tab alongside the main packages table.
|
||||
|
||||
**New file:** `src/app/(app)/stls/_components/skipped-packages-tab.tsx`
|
||||
|
||||
Table columns:
|
||||
- **fileName** — archive name
|
||||
- **fileSize** — formatted size
|
||||
- **reason** — color-coded badge: `SIZE_LIMIT` (yellow), `DOWNLOAD_FAILED` (red), `EXTRACT_FAILED` (red), `UPLOAD_FAILED` (red)
|
||||
- **errorMessage** — truncated with expandable tooltip/popover for full text
|
||||
- **channel** — source channel title
|
||||
- **createdAt** — when the skip/failure was recorded
|
||||
|
||||
Actions:
|
||||
- **Retry** button per row — server action that deletes record + resets watermark
|
||||
- **Retry All** button in the header — bulk retry, filterable by reason
|
||||
|
||||
**File:** `src/app/(app)/stls/page.tsx`
|
||||
|
||||
Fetch skipped packages count (for tab badge) alongside existing queries.
|
||||
|
||||
**File:** `src/data/` or `src/lib/telegram/queries.ts`
|
||||
|
||||
Add query functions:
|
||||
- `listSkippedPackages(options)` — paginated list with reason filter
|
||||
- `countSkippedPackages()` — for tab badge
|
||||
- `retrySkippedPackage(id)` — delete record + reset watermark
|
||||
- `retryAllSkippedPackages(reason?)` — bulk retry
|
||||
|
||||
**File:** `src/app/(app)/stls/actions.ts`
|
||||
|
||||
Add server actions:
|
||||
- `retrySkippedPackageAction(id)`
|
||||
- `retryAllSkippedPackagesAction(reason?)`
|
||||
|
||||
---
|
||||
|
||||
## Files to Create/Modify
|
||||
|
||||
### Create
|
||||
- `src/app/(app)/stls/_components/skipped-packages-tab.tsx` — skipped packages table UI
|
||||
- Prisma migration for `SkippedPackage` model
|
||||
|
||||
### Modify
|
||||
- `worker/src/util/config.ts` — raise default max size
|
||||
- `worker/src/worker.ts` — record skips/failures, clean up on success
|
||||
- `worker/src/db/queries.ts` — add skip record CRUD functions
|
||||
- `prisma/schema.prisma` — add `SkippedPackage` model and `SkipReason` enum
|
||||
- `src/lib/telegram/queries.ts` — modify `searchPackages()` for match counts, add skipped package queries
|
||||
- `src/lib/telegram/types.ts` — add `matchedFileCount`/`matchedByContent` to `PackageListItem`, add skipped package types
|
||||
- `src/app/(app)/stls/page.tsx` — pass search term, fetch skipped count, add tab
|
||||
- `src/app/(app)/stls/_components/stl-table.tsx` — accept search prop, render tabs
|
||||
- `src/app/(app)/stls/_components/package-columns.tsx` — render match badge
|
||||
- `src/app/(app)/stls/_components/package-files-drawer.tsx` — accept highlightTerm, highlight matching files, auto-expand matched folders
|
||||
- `src/app/(app)/stls/actions.ts` — add retry server actions
|
||||
246
docs/superpowers/specs/2026-03-25-package-grouping-design.md
Normal file
246
docs/superpowers/specs/2026-03-25-package-grouping-design.md
Normal file
@@ -0,0 +1,246 @@
|
||||
# Package Grouping Design
|
||||
|
||||
## Overview
|
||||
|
||||
Add the ability to group related packages that were posted together in a Telegram channel (e.g., "DUNGEON BLOCKS - Colossal Dungeon" with 6 separate archive files). Groups appear as collapsible rows in the STL files table, with support for both automatic detection via Telegram album IDs and manual grouping through the UI.
|
||||
|
||||
## Goals
|
||||
|
||||
- Automatically detect and group files posted together in Telegram (same `media_album_id`)
|
||||
- Display groups as collapsed rows in the STL table with aggregated metadata
|
||||
- Allow manual grouping/ungrouping of packages via the UI
|
||||
- Support editable group names and preview images
|
||||
- Enable "Send All" to deliver every package in a group via the bot
|
||||
|
||||
## Non-Goals
|
||||
|
||||
- Merging grouped packages into a single Package record (each stays independent)
|
||||
- Time-proximity heuristics for grouping (too error-prone)
|
||||
- Grouping across different source channels
|
||||
|
||||
---
|
||||
|
||||
## Data Model
|
||||
|
||||
### New `PackageGroup` Table
|
||||
|
||||
```prisma
|
||||
model PackageGroup {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
mediaAlbumId String?
|
||||
sourceChannelId String
|
||||
previewData Bytes?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
packages Package[]
|
||||
sourceChannel TelegramChannel @relation(fields: [sourceChannelId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([mediaAlbumId, sourceChannelId])
|
||||
@@index([sourceChannelId])
|
||||
@@map("package_groups")
|
||||
}
|
||||
```
|
||||
|
||||
### Package Model Changes
|
||||
|
||||
Add optional group membership:
|
||||
|
||||
```prisma
|
||||
model Package {
|
||||
// ... existing fields ...
|
||||
packageGroupId String?
|
||||
packageGroup PackageGroup? @relation(fields: [packageGroupId], references: [id], onDelete: SetNull)
|
||||
|
||||
@@index([packageGroupId])
|
||||
}
|
||||
```
|
||||
|
||||
### TelegramChannel Model Changes
|
||||
|
||||
Add back-relation for the new `PackageGroup` model:
|
||||
|
||||
```prisma
|
||||
model TelegramChannel {
|
||||
// ... existing fields and relations ...
|
||||
packageGroups PackageGroup[]
|
||||
}
|
||||
```
|
||||
|
||||
### Key Decisions
|
||||
|
||||
- `mediaAlbumId` is `String?` (TDLib int64 stringified) — only used for dedup lookups, avoids BigInt complexity
|
||||
- `@@unique([mediaAlbumId, sourceChannelId])` prevents duplicate album-derived groups when re-scanning. PostgreSQL treats NULLs as distinct in unique constraints, so manually-created groups (with `mediaAlbumId = null`) are not constrained by this — which is correct behavior
|
||||
- Idempotency for album groups uses `findFirst({ where: { mediaAlbumId, sourceChannelId } })` + conditional `create`, not `upsert`, because Prisma does not support `upsert` on compound unique keys with nullable fields
|
||||
- `onDelete: SetNull` on `Package.packageGroup` means dissolving a group automatically unlinks all members
|
||||
- `onDelete: Cascade` on `PackageGroup.sourceChannel` means deleting a channel cleans up its groups
|
||||
- `sourceTopicId` is omitted from `PackageGroup` — it can be inferred from member packages, and manual groups may span topics
|
||||
- `@@map("package_groups")` follows the project's snake_case table naming convention
|
||||
- `previewData` stores JPEG thumbnail bytes directly on the group (same pattern as Package)
|
||||
|
||||
---
|
||||
|
||||
## Worker Changes
|
||||
|
||||
### TelegramMessage Interface
|
||||
|
||||
Add optional `mediaAlbumId` field:
|
||||
|
||||
```typescript
|
||||
export interface TelegramMessage {
|
||||
id: bigint;
|
||||
fileName: string;
|
||||
fileId: string;
|
||||
fileSize: bigint;
|
||||
date: Date;
|
||||
mediaAlbumId?: string; // Absent or "0" when not part of an album
|
||||
}
|
||||
```
|
||||
|
||||
The field is optional to minimize call-site changes. The grouping step treats `undefined` and `"0"` equivalently as "not part of an album."
|
||||
|
||||
### TelegramPhoto Interface
|
||||
|
||||
Add optional `mediaAlbumId` field:
|
||||
|
||||
```typescript
|
||||
export interface TelegramPhoto {
|
||||
id: bigint;
|
||||
date: Date;
|
||||
caption: string;
|
||||
fileId: string;
|
||||
fileSize: number;
|
||||
mediaAlbumId?: string; // For album-to-preview correlation
|
||||
}
|
||||
```
|
||||
|
||||
### Channel Scanning
|
||||
|
||||
In `getChannelMessages()`, read `media_album_id` from the TDLib message object (already present in TDLib responses, just not captured today). Add `media_album_id?: string` to the `TdMessage` interface and pass through to both `TelegramMessage` and `TelegramPhoto`.
|
||||
|
||||
The document pass and photo pass already run as separate loops over `searchChatMessages`. Both loops capture `media_album_id` independently. Correlation happens at grouping time: album photos are matched to album documents by comparing their `mediaAlbumId` values, not at scan time.
|
||||
|
||||
### Group Creation (Post-Processing)
|
||||
|
||||
After each scan cycle's packages are individually processed (downloaded, hashed, uploaded, indexed), a post-processing step handles grouping:
|
||||
|
||||
1. Collect all packages from the current scan batch that share the same non-zero `mediaAlbumId`
|
||||
2. For each distinct `mediaAlbumId`, check if a `PackageGroup` already exists via `findFirst({ where: { mediaAlbumId, sourceChannelId } })`
|
||||
3. If no group exists, create one:
|
||||
- **Name:** caption of the first message in the album (falls back to first file's base name)
|
||||
- **Preview:** find a `TelegramPhoto` from the scan's `photos[]` array with the same `mediaAlbumId`. If found, download via `downloadPhotoThumbnail`. If not, the group starts with no preview (can be added in UI later)
|
||||
4. Link all member packages via an idempotent `updateMany` — sets `packageGroupId` on all packages whose `sourceMessageId` is in the album's message set. This handles both newly-indexed packages and previously-indexed ones that were created in an earlier partial scan (e.g., if one package failed and was retried later)
|
||||
|
||||
The per-package pipeline is unchanged — each file is still downloaded, hashed, deduped, split, uploaded, and indexed independently. Grouping is a layer on top.
|
||||
|
||||
---
|
||||
|
||||
## Query Layer
|
||||
|
||||
### Paginated Listing with Groups
|
||||
|
||||
The STL table shows "display items" — either a group (collapsed) or a standalone package. Pagination operates on display items so that a group occupies exactly one slot regardless of member count.
|
||||
|
||||
**Two-step query approach** (handles filters correctly):
|
||||
|
||||
**Step 1 — Find matching display item IDs:**
|
||||
|
||||
```sql
|
||||
-- Find all group IDs and standalone package IDs where at least one member matches filters
|
||||
SELECT DISTINCT COALESCE(p."packageGroupId", p.id) AS display_id,
|
||||
CASE WHEN p."packageGroupId" IS NOT NULL THEN 'group' ELSE 'package' END AS display_type,
|
||||
MAX(p."indexedAt") AS sort_date
|
||||
FROM packages p
|
||||
LEFT JOIN package_groups pg ON pg.id = p."packageGroupId"
|
||||
WHERE 1=1
|
||||
-- Optional filters applied here (creator, tags, search text, channelId)
|
||||
GROUP BY COALESCE(p."packageGroupId", p.id),
|
||||
CASE WHEN p."packageGroupId" IS NOT NULL THEN 'group' ELSE 'package' END
|
||||
ORDER BY sort_date DESC
|
||||
LIMIT $1 OFFSET $2
|
||||
```
|
||||
|
||||
**Step 2 — Fetch full data:**
|
||||
|
||||
For groups on the current page, fetch all member packages (including those that didn't match filters — the group appears because at least one member matched, but the expanded view shows all members). For standalone packages, fetch the full package data.
|
||||
|
||||
**Count query** (for pagination total):
|
||||
|
||||
```sql
|
||||
SELECT COUNT(*) FROM (
|
||||
SELECT DISTINCT COALESCE(p."packageGroupId", p.id)
|
||||
FROM packages p
|
||||
WHERE 1=1
|
||||
-- Same filters as step 1
|
||||
) AS display_items
|
||||
```
|
||||
|
||||
### Group Row Aggregates
|
||||
|
||||
Computed in the step 2 fetch: total file size (sum), total file count (sum), combined tags (array union), member package count per group. These populate the collapsed group row.
|
||||
|
||||
### Search
|
||||
|
||||
`searchPackages` adds `PackageGroup.name` to search targets via a `LEFT JOIN` to `package_groups`. If any package in a group matches by name/file content, or the group name matches, the whole group appears.
|
||||
|
||||
### Filtering
|
||||
|
||||
Creator/tag filters apply to member packages. A group appears if any member matches the filter. The group row shows aggregates of all members (not just matching ones).
|
||||
|
||||
### New Query Functions
|
||||
|
||||
| Function | Purpose |
|
||||
|----------|---------|
|
||||
| `listDisplayItems(page, limit, filters)` | Two-step paginated query returning groups + standalone packages |
|
||||
| `getDisplayItemCount(filters)` | Count of display items for pagination total |
|
||||
| `getPackageGroup(groupId)` | Group metadata + all member packages |
|
||||
| `updatePackageGroupName(groupId, name)` | Rename group |
|
||||
| `updatePackageGroupPreview(groupId, previewData)` | Replace group preview |
|
||||
| `addPackagesToGroup(packageIds, groupId)` | Manual grouping — add to existing group |
|
||||
| `removePackageFromGroup(packageId)` | Ungroup single package |
|
||||
| `createManualGroup(name, packageIds)` | Create new group from UI |
|
||||
| `dissolveGroup(groupId)` | Ungroup all members, delete group record |
|
||||
|
||||
For manual grouping of packages that already belong to different groups: the UI first dissolves empty source groups (groups where all members were moved), then links the selected packages to the target group. Non-selected members of source groups remain in their original group.
|
||||
|
||||
---
|
||||
|
||||
## UI Changes
|
||||
|
||||
### STL Table — Group Rows
|
||||
|
||||
- **Collapsed (default):** Single row showing preview thumbnail, group name (editable inline), archive type badge ("Mixed" if heterogeneous), combined size, combined file count, combined tags (editable), source channel, latest `indexedAt`, actions
|
||||
- **Expanded:** Chevron toggle reveals member packages as indented sub-rows with their existing columns and per-package actions
|
||||
- Chevron icon on the left of the row toggles expand/collapse
|
||||
|
||||
**Loading strategy:** Member packages for all groups on the current page are prefetched in a single batched query during the step 2 fetch. This means expand/collapse is instant (no on-demand loading) and avoids per-row loading states.
|
||||
|
||||
### Group Row Actions
|
||||
|
||||
- **Send All** — Queues bot send requests for every package in the group. Checks for existing PENDING/SENDING requests per package to avoid duplicates.
|
||||
- **View Files** — Opens file drawer showing all member packages' files, separated by package name headers
|
||||
- **Dissolve Group** — Ungroups all members (confirmation required)
|
||||
|
||||
### Individual Package Actions (Within a Group)
|
||||
|
||||
- Existing: Send, View Files
|
||||
- New: "Remove from group" in dropdown menu
|
||||
|
||||
### Manual Grouping
|
||||
|
||||
- Checkbox selection column on package rows
|
||||
- When 2+ packages selected, a "Group Selected" button appears in the table toolbar
|
||||
- Prompts for a group name, creates the group
|
||||
- If selected packages belong to existing groups, those packages are moved to the new group. Source groups that become empty are automatically dissolved.
|
||||
|
||||
### Preview Editing
|
||||
|
||||
- Click the group's preview thumbnail to upload a replacement image
|
||||
- Same upload flow as individual packages (existing component reuse)
|
||||
|
||||
### No Changes To
|
||||
|
||||
- Skipped/failed packages tab
|
||||
- Package detail drawer internals
|
||||
- Search UI (just broader matching behind the scenes)
|
||||
221
install.cmd
Normal file
221
install.cmd
Normal file
@@ -0,0 +1,221 @@
|
||||
@echo off
|
||||
setlocal enabledelayedexpansion
|
||||
|
||||
REM Claude Code Windows CMD Bootstrap Script
|
||||
REM Installs Claude Code for environments where PowerShell is not available
|
||||
|
||||
REM Parse command line argument
|
||||
set "TARGET=%~1"
|
||||
if "!TARGET!"=="" set "TARGET=latest"
|
||||
|
||||
REM Validate target parameter
|
||||
if /i "!TARGET!"=="stable" goto :target_valid
|
||||
if /i "!TARGET!"=="latest" goto :target_valid
|
||||
echo !TARGET! | findstr /r "^[0-9][0-9]*\.[0-9][0-9]*\.[0-9][0-9]*" >nul
|
||||
if !ERRORLEVEL! equ 0 goto :target_valid
|
||||
|
||||
echo Usage: %0 [stable^|latest^|VERSION] >&2
|
||||
echo Example: %0 1.0.58 >&2
|
||||
exit /b 1
|
||||
|
||||
:target_valid
|
||||
|
||||
REM Check for 64-bit Windows
|
||||
if /i "%PROCESSOR_ARCHITECTURE%"=="AMD64" goto :arch_valid
|
||||
if /i "%PROCESSOR_ARCHITECTURE%"=="ARM64" goto :arch_valid
|
||||
if /i "%PROCESSOR_ARCHITEW6432%"=="AMD64" goto :arch_valid
|
||||
if /i "%PROCESSOR_ARCHITEW6432%"=="ARM64" goto :arch_valid
|
||||
|
||||
echo Claude Code does not support 32-bit Windows. Please use a 64-bit version of Windows. >&2
|
||||
exit /b 1
|
||||
|
||||
:arch_valid
|
||||
|
||||
REM Set constants
|
||||
set "GCS_BUCKET=https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases"
|
||||
set "DOWNLOAD_DIR=%USERPROFILE%\.claude\downloads"
|
||||
REM Use native ARM64 binary on ARM64 Windows, x64 otherwise
|
||||
if /i "%PROCESSOR_ARCHITECTURE%"=="ARM64" (
|
||||
set "PLATFORM=win32-arm64"
|
||||
) else (
|
||||
set "PLATFORM=win32-x64"
|
||||
)
|
||||
|
||||
REM Create download directory
|
||||
if not exist "!DOWNLOAD_DIR!" mkdir "!DOWNLOAD_DIR!"
|
||||
|
||||
REM Check for curl availability
|
||||
curl --version >nul 2>&1
|
||||
if !ERRORLEVEL! neq 0 (
|
||||
echo curl is required but not available. Please install curl or use PowerShell installer. >&2
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
REM Always download latest version (which has the most up-to-date installer)
|
||||
call :download_file "!GCS_BUCKET!/latest" "!DOWNLOAD_DIR!\latest"
|
||||
if !ERRORLEVEL! neq 0 (
|
||||
echo Failed to get latest version >&2
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
REM Read version from file
|
||||
set /p VERSION=<"!DOWNLOAD_DIR!\latest"
|
||||
del "!DOWNLOAD_DIR!\latest"
|
||||
|
||||
REM Download manifest
|
||||
call :download_file "!GCS_BUCKET!/!VERSION!/manifest.json" "!DOWNLOAD_DIR!\manifest.json"
|
||||
if !ERRORLEVEL! neq 0 (
|
||||
echo Failed to get manifest >&2
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
REM Extract checksum from manifest
|
||||
call :parse_manifest "!DOWNLOAD_DIR!\manifest.json" "!PLATFORM!"
|
||||
if !ERRORLEVEL! neq 0 (
|
||||
echo Platform !PLATFORM! not found in manifest >&2
|
||||
del "!DOWNLOAD_DIR!\manifest.json" 2>nul
|
||||
exit /b 1
|
||||
)
|
||||
del "!DOWNLOAD_DIR!\manifest.json"
|
||||
|
||||
REM Download binary
|
||||
set "BINARY_PATH=!DOWNLOAD_DIR!\claude-!VERSION!-!PLATFORM!.exe"
|
||||
call :download_file "!GCS_BUCKET!/!VERSION!/!PLATFORM!/claude.exe" "!BINARY_PATH!"
|
||||
if !ERRORLEVEL! neq 0 (
|
||||
echo Failed to download binary >&2
|
||||
if exist "!BINARY_PATH!" del "!BINARY_PATH!"
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
REM Verify checksum
|
||||
call :verify_checksum "!BINARY_PATH!" "!EXPECTED_CHECKSUM!"
|
||||
if !ERRORLEVEL! neq 0 (
|
||||
echo Checksum verification failed >&2
|
||||
del "!BINARY_PATH!"
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
REM Run claude install to set up launcher and shell integration
|
||||
echo Setting up Claude Code...
|
||||
"!BINARY_PATH!" install "!TARGET!"
|
||||
set "INSTALL_RESULT=!ERRORLEVEL!"
|
||||
|
||||
REM Clean up downloaded file
|
||||
REM Wait a moment for any file handles to be released
|
||||
timeout /t 1 /nobreak >nul 2>&1
|
||||
del /f "!BINARY_PATH!" >nul 2>&1
|
||||
if exist "!BINARY_PATH!" (
|
||||
echo Warning: Could not remove temporary file: !BINARY_PATH!
|
||||
)
|
||||
|
||||
if !INSTALL_RESULT! neq 0 (
|
||||
echo Installation failed >&2
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
echo.
|
||||
echo Installation complete^^!
|
||||
echo.
|
||||
exit /b 0
|
||||
|
||||
REM ============================================================================
|
||||
REM SUBROUTINES
|
||||
REM ============================================================================
|
||||
|
||||
:download_file
|
||||
REM Downloads a file using curl
|
||||
REM Args: %1=URL, %2=OutputPath
|
||||
set "URL=%~1"
|
||||
set "OUTPUT=%~2"
|
||||
|
||||
curl -fsSL "!URL!" -o "!OUTPUT!"
|
||||
exit /b !ERRORLEVEL!
|
||||
|
||||
:parse_manifest
|
||||
REM Parse JSON manifest to extract checksum for platform
|
||||
REM Args: %1=ManifestPath, %2=Platform
|
||||
set "MANIFEST_PATH=%~1"
|
||||
set "PLATFORM_NAME=%~2"
|
||||
set "EXPECTED_CHECKSUM="
|
||||
|
||||
REM Use findstr to find platform section, then look for checksum
|
||||
set "FOUND_PLATFORM="
|
||||
set "IN_PLATFORM_SECTION="
|
||||
|
||||
REM Read the manifest line by line
|
||||
for /f "usebackq tokens=*" %%i in ("!MANIFEST_PATH!") do (
|
||||
set "LINE=%%i"
|
||||
|
||||
REM Check if this line contains our platform
|
||||
echo !LINE! | findstr /c:"\"%PLATFORM_NAME%\":" >nul
|
||||
if !ERRORLEVEL! equ 0 (
|
||||
set "IN_PLATFORM_SECTION=1"
|
||||
)
|
||||
|
||||
REM If we're in the platform section, look for checksum
|
||||
if defined IN_PLATFORM_SECTION (
|
||||
echo !LINE! | findstr /c:"\"checksum\":" >nul
|
||||
if !ERRORLEVEL! equ 0 (
|
||||
REM Extract checksum value
|
||||
for /f "tokens=2 delims=:" %%j in ("!LINE!") do (
|
||||
set "CHECKSUM_PART=%%j"
|
||||
REM Remove quotes, whitespace, and comma
|
||||
set "CHECKSUM_PART=!CHECKSUM_PART: =!"
|
||||
set "CHECKSUM_PART=!CHECKSUM_PART:"=!"
|
||||
set "CHECKSUM_PART=!CHECKSUM_PART:,=!"
|
||||
|
||||
REM Check if it looks like a SHA256 (64 hex chars)
|
||||
if not "!CHECKSUM_PART!"=="" (
|
||||
call :check_length "!CHECKSUM_PART!" 64
|
||||
if !ERRORLEVEL! equ 0 (
|
||||
set "EXPECTED_CHECKSUM=!CHECKSUM_PART!"
|
||||
exit /b 0
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
REM Check if we've left the platform section (closing brace)
|
||||
echo !LINE! | findstr /c:"}" >nul
|
||||
if !ERRORLEVEL! equ 0 set "IN_PLATFORM_SECTION="
|
||||
)
|
||||
)
|
||||
|
||||
if "!EXPECTED_CHECKSUM!"=="" exit /b 1
|
||||
exit /b 0
|
||||
|
||||
:check_length
|
||||
REM Check if string length equals expected length
|
||||
REM Args: %1=String, %2=ExpectedLength
|
||||
set "STR=%~1"
|
||||
set "EXPECTED_LEN=%~2"
|
||||
set "LEN=0"
|
||||
:count_loop
|
||||
if "!STR:~%LEN%,1!"=="" goto :count_done
|
||||
set /a LEN+=1
|
||||
goto :count_loop
|
||||
:count_done
|
||||
if %LEN%==%EXPECTED_LEN% exit /b 0
|
||||
exit /b 1
|
||||
|
||||
:verify_checksum
|
||||
REM Verify file checksum using certutil
|
||||
REM Args: %1=FilePath, %2=ExpectedChecksum
|
||||
set "FILE_PATH=%~1"
|
||||
set "EXPECTED=%~2"
|
||||
|
||||
for /f "skip=1 tokens=*" %%i in ('certutil -hashfile "!FILE_PATH!" SHA256') do (
|
||||
set "ACTUAL=%%i"
|
||||
set "ACTUAL=!ACTUAL: =!"
|
||||
if "!ACTUAL!"=="CertUtil:Thecommandcompletedsuccessfully." goto :verify_done
|
||||
if "!ACTUAL!" neq "" (
|
||||
if /i "!ACTUAL!"=="!EXPECTED!" (
|
||||
exit /b 0
|
||||
) else (
|
||||
exit /b 1
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
:verify_done
|
||||
exit /b 1
|
||||
2
package-lock.json
generated
2
package-lock.json
generated
@@ -49,7 +49,7 @@
|
||||
"ts-node": "^10.9.2",
|
||||
"tsx": "^4.21.0",
|
||||
"tw-animate-css": "^1.4.0",
|
||||
"typescript": "^5"
|
||||
"typescript": "5.9.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@alloc/quick-lru": {
|
||||
|
||||
@@ -58,6 +58,6 @@
|
||||
"ts-node": "^10.9.2",
|
||||
"tsx": "^4.21.0",
|
||||
"tw-animate-css": "^1.4.0",
|
||||
"typescript": "^5"
|
||||
"typescript": "5.9.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
-- Promote all existing users to ADMIN (self-hosted: every user is an admin)
|
||||
UPDATE "User" SET "role" = 'ADMIN' WHERE "role" = 'USER';
|
||||
|
||||
-- Change the default role for new users to ADMIN
|
||||
ALTER TABLE "User" ALTER COLUMN "role" SET DEFAULT 'ADMIN';
|
||||
@@ -0,0 +1,3 @@
|
||||
-- Change the default for new channels to disabled (isActive = false).
|
||||
-- Existing channels are not affected — admins can manually enable/disable them.
|
||||
ALTER TABLE "telegram_channels" ALTER COLUMN "isActive" SET DEFAULT false;
|
||||
@@ -0,0 +1,21 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "invite_codes" (
|
||||
"id" TEXT NOT NULL,
|
||||
"code" VARCHAR(32) NOT NULL,
|
||||
"maxUses" INTEGER NOT NULL DEFAULT 1,
|
||||
"uses" INTEGER NOT NULL DEFAULT 0,
|
||||
"expiresAt" TIMESTAMP(3),
|
||||
"createdBy" TEXT NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "invite_codes_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "invite_codes_code_key" ON "invite_codes"("code");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "invite_codes_code_idx" ON "invite_codes"("code");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "invite_codes" ADD CONSTRAINT "invite_codes_createdBy_fkey" FOREIGN KEY ("createdBy") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,3 @@
|
||||
-- AlterEnum
|
||||
ALTER TYPE "ArchiveType" ADD VALUE 'SEVEN_Z';
|
||||
ALTER TYPE "ArchiveType" ADD VALUE 'DOCUMENT';
|
||||
@@ -0,0 +1,5 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "telegram_channels" ADD COLUMN "category" VARCHAR(64);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "telegram_channels_category_idx" ON "telegram_channels"("category");
|
||||
@@ -0,0 +1,32 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "ExtractStatus" AS ENUM ('PENDING', 'IN_PROGRESS', 'COMPLETED', 'FAILED');
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "User" ADD COLUMN "usedInviteId" TEXT;
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "archive_extract_requests" (
|
||||
"id" TEXT NOT NULL,
|
||||
"packageId" TEXT NOT NULL,
|
||||
"filePath" VARCHAR(1024) NOT NULL,
|
||||
"status" "ExtractStatus" NOT NULL DEFAULT 'PENDING',
|
||||
"imageData" BYTEA,
|
||||
"contentType" VARCHAR(64),
|
||||
"error" TEXT,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "archive_extract_requests_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "archive_extract_requests_packageId_filePath_idx" ON "archive_extract_requests"("packageId", "filePath");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "archive_extract_requests_status_idx" ON "archive_extract_requests"("status");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "User" ADD CONSTRAINT "User_usedInviteId_fkey" FOREIGN KEY ("usedInviteId") REFERENCES "invite_codes"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "archive_extract_requests" ADD CONSTRAINT "archive_extract_requests_packageId_fkey" FOREIGN KEY ("packageId") REFERENCES "packages"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,10 @@
|
||||
-- Add tags array column to packages
|
||||
ALTER TABLE "packages" ADD COLUMN "tags" TEXT[] NOT NULL DEFAULT '{}';
|
||||
|
||||
-- Backfill: inherit source channel category as initial tag
|
||||
UPDATE "packages" p
|
||||
SET "tags" = ARRAY[c."category"]
|
||||
FROM "telegram_channels" c
|
||||
WHERE p."sourceChannelId" = c."id"
|
||||
AND c."category" IS NOT NULL
|
||||
AND c."category" != '';
|
||||
@@ -0,0 +1,50 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "DeliveryStatus" AS ENUM ('NOT_DELIVERED', 'PARTIAL', 'DELIVERED');
|
||||
CREATE TYPE "PaymentStatus" AS ENUM ('PAID', 'UNPAID');
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "kickstarter_hosts" (
|
||||
"id" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "kickstarter_hosts_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "kickstarters" (
|
||||
"id" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"link" TEXT,
|
||||
"filesUrl" TEXT,
|
||||
"deliveryStatus" "DeliveryStatus" NOT NULL DEFAULT 'NOT_DELIVERED',
|
||||
"paymentStatus" "PaymentStatus" NOT NULL DEFAULT 'UNPAID',
|
||||
"notes" TEXT,
|
||||
"hostId" TEXT,
|
||||
"userId" TEXT NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "kickstarters_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "kickstarter_packages" (
|
||||
"kickstarterId" TEXT NOT NULL,
|
||||
"packageId" TEXT NOT NULL,
|
||||
|
||||
CONSTRAINT "kickstarter_packages_pkey" PRIMARY KEY ("kickstarterId","packageId")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "kickstarter_hosts_name_key" ON "kickstarter_hosts"("name");
|
||||
CREATE INDEX "kickstarters_hostId_idx" ON "kickstarters"("hostId");
|
||||
CREATE INDEX "kickstarters_userId_idx" ON "kickstarters"("userId");
|
||||
CREATE INDEX "kickstarters_deliveryStatus_idx" ON "kickstarters"("deliveryStatus");
|
||||
CREATE INDEX "kickstarters_paymentStatus_idx" ON "kickstarters"("paymentStatus");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "kickstarters" ADD CONSTRAINT "kickstarters_hostId_fkey" FOREIGN KEY ("hostId") REFERENCES "kickstarter_hosts"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
ALTER TABLE "kickstarters" ADD CONSTRAINT "kickstarters_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE "kickstarter_packages" ADD CONSTRAINT "kickstarter_packages_kickstarterId_fkey" FOREIGN KEY ("kickstarterId") REFERENCES "kickstarters"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
ALTER TABLE "kickstarter_packages" ADD CONSTRAINT "kickstarter_packages_packageId_fkey" FOREIGN KEY ("packageId") REFERENCES "packages"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,35 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "SkipReason" AS ENUM ('SIZE_LIMIT', 'DOWNLOAD_FAILED', 'EXTRACT_FAILED', 'UPLOAD_FAILED');
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "skipped_packages" (
|
||||
"id" TEXT NOT NULL,
|
||||
"fileName" TEXT NOT NULL,
|
||||
"fileSize" BIGINT NOT NULL,
|
||||
"reason" "SkipReason" NOT NULL,
|
||||
"errorMessage" TEXT,
|
||||
"sourceChannelId" TEXT NOT NULL,
|
||||
"sourceMessageId" BIGINT NOT NULL,
|
||||
"sourceTopicId" BIGINT,
|
||||
"isMultipart" BOOLEAN NOT NULL DEFAULT false,
|
||||
"partCount" INTEGER NOT NULL DEFAULT 1,
|
||||
"accountId" TEXT NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "skipped_packages_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "skipped_packages_sourceChannelId_sourceMessageId_key" ON "skipped_packages"("sourceChannelId", "sourceMessageId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "skipped_packages_reason_idx" ON "skipped_packages"("reason");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "skipped_packages_accountId_idx" ON "skipped_packages"("accountId");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "skipped_packages" ADD CONSTRAINT "skipped_packages_sourceChannelId_fkey" FOREIGN KEY ("sourceChannelId") REFERENCES "telegram_channels"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "skipped_packages" ADD CONSTRAINT "skipped_packages_accountId_fkey" FOREIGN KEY ("accountId") REFERENCES "telegram_accounts"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,30 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "packages" ADD COLUMN "packageGroupId" TEXT;
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "package_groups" (
|
||||
"id" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"mediaAlbumId" TEXT,
|
||||
"sourceChannelId" TEXT NOT NULL,
|
||||
"previewData" BYTEA,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "package_groups_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "package_groups_sourceChannelId_idx" ON "package_groups"("sourceChannelId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "package_groups_mediaAlbumId_sourceChannelId_key" ON "package_groups"("mediaAlbumId", "sourceChannelId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "packages_packageGroupId_idx" ON "packages"("packageGroupId");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "packages" ADD CONSTRAINT "packages_packageGroupId_fkey" FOREIGN KEY ("packageGroupId") REFERENCES "package_groups"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "package_groups" ADD CONSTRAINT "package_groups_sourceChannelId_fkey" FOREIGN KEY ("sourceChannelId") REFERENCES "telegram_channels"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -22,7 +22,7 @@ model User {
|
||||
emailVerified DateTime?
|
||||
image String?
|
||||
hashedPassword String?
|
||||
role Role @default(USER)
|
||||
role Role @default(ADMIN)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@ -38,6 +38,10 @@ model User {
|
||||
tags Tag[]
|
||||
settings UserSettings?
|
||||
telegramLink TelegramLink?
|
||||
kickstarters Kickstarter[]
|
||||
inviteCodes InviteCode[] @relation("InviteCreator")
|
||||
usedInvite InviteCode? @relation("InviteUser", fields: [usedInviteId], references: [id], onDelete: SetNull)
|
||||
usedInviteId String?
|
||||
}
|
||||
|
||||
model Account {
|
||||
@@ -376,6 +380,8 @@ enum ChannelRole {
|
||||
enum ArchiveType {
|
||||
ZIP
|
||||
RAR
|
||||
SEVEN_Z
|
||||
DOCUMENT
|
||||
}
|
||||
|
||||
enum IngestionStatus {
|
||||
@@ -403,9 +409,10 @@ model TelegramAccount {
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
channelMaps AccountChannelMap[]
|
||||
ingestionRuns IngestionRun[]
|
||||
fetchRequests ChannelFetchRequest[]
|
||||
channelMaps AccountChannelMap[]
|
||||
ingestionRuns IngestionRun[]
|
||||
fetchRequests ChannelFetchRequest[]
|
||||
skippedPackages SkippedPackage[]
|
||||
|
||||
@@index([isActive])
|
||||
@@map("telegram_accounts")
|
||||
@@ -417,14 +424,18 @@ model TelegramChannel {
|
||||
title String
|
||||
type ChannelType
|
||||
isForum Boolean @default(false)
|
||||
isActive Boolean @default(true)
|
||||
isActive Boolean @default(false)
|
||||
category String? @db.VarChar(64)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
accountMaps AccountChannelMap[]
|
||||
packages Package[]
|
||||
accountMaps AccountChannelMap[]
|
||||
packages Package[]
|
||||
skippedPackages SkippedPackage[]
|
||||
packageGroups PackageGroup[]
|
||||
|
||||
@@index([type, isActive])
|
||||
@@index([category])
|
||||
@@map("telegram_channels")
|
||||
}
|
||||
|
||||
@@ -461,16 +472,21 @@ model Package {
|
||||
isMultipart Boolean @default(false)
|
||||
partCount Int @default(1)
|
||||
fileCount Int @default(0)
|
||||
tags String[] @default([])
|
||||
previewData Bytes? // JPEG thumbnail from nearby Telegram photo (stored as raw bytes)
|
||||
previewMsgId BigInt? // Telegram message ID of the matched photo
|
||||
packageGroupId String?
|
||||
indexedAt DateTime @default(now())
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
sourceChannel TelegramChannel @relation(fields: [sourceChannelId], references: [id])
|
||||
files PackageFile[]
|
||||
ingestionRun IngestionRun? @relation(fields: [ingestionRunId], references: [id])
|
||||
ingestionRunId String?
|
||||
sendRequests BotSendRequest[]
|
||||
sourceChannel TelegramChannel @relation(fields: [sourceChannelId], references: [id])
|
||||
packageGroup PackageGroup? @relation(fields: [packageGroupId], references: [id], onDelete: SetNull)
|
||||
files PackageFile[]
|
||||
ingestionRun IngestionRun? @relation(fields: [ingestionRunId], references: [id])
|
||||
ingestionRunId String?
|
||||
sendRequests BotSendRequest[]
|
||||
extractRequests ArchiveExtractRequest[]
|
||||
kickstarterLinks KickstarterPackage[]
|
||||
|
||||
@@index([sourceChannelId])
|
||||
@@index([destChannelId])
|
||||
@@ -478,6 +494,7 @@ model Package {
|
||||
@@index([indexedAt])
|
||||
@@index([archiveType])
|
||||
@@index([creator])
|
||||
@@index([packageGroupId])
|
||||
@@map("packages")
|
||||
}
|
||||
|
||||
@@ -499,6 +516,23 @@ model PackageFile {
|
||||
@@map("package_files")
|
||||
}
|
||||
|
||||
model PackageGroup {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
mediaAlbumId String?
|
||||
sourceChannelId String
|
||||
previewData Bytes?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
packages Package[]
|
||||
sourceChannel TelegramChannel @relation(fields: [sourceChannelId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([mediaAlbumId, sourceChannelId])
|
||||
@@index([sourceChannelId])
|
||||
@@map("package_groups")
|
||||
}
|
||||
|
||||
model IngestionRun {
|
||||
id String @id @default(cuid())
|
||||
accountId String
|
||||
@@ -554,6 +588,22 @@ model GlobalSetting {
|
||||
@@map("global_settings")
|
||||
}
|
||||
|
||||
model InviteCode {
|
||||
id String @id @default(cuid())
|
||||
code String @unique @db.VarChar(32)
|
||||
maxUses Int @default(1)
|
||||
uses Int @default(0)
|
||||
expiresAt DateTime?
|
||||
createdBy String
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
creator User @relation("InviteCreator", fields: [createdBy], references: [id], onDelete: Cascade)
|
||||
usedBy User[] @relation("InviteUser")
|
||||
|
||||
@@index([code])
|
||||
@@map("invite_codes")
|
||||
}
|
||||
|
||||
model ChannelFetchRequest {
|
||||
id String @id @default(cuid())
|
||||
accountId String
|
||||
@@ -626,3 +676,128 @@ model BotSubscription {
|
||||
@@index([telegramUserId])
|
||||
@@map("bot_subscriptions")
|
||||
}
|
||||
|
||||
// ───────────────────────────────────────
|
||||
// Archive image extraction (worker-mediated)
|
||||
// ───────────────────────────────────────
|
||||
|
||||
enum ExtractStatus {
|
||||
PENDING
|
||||
IN_PROGRESS
|
||||
COMPLETED
|
||||
FAILED
|
||||
}
|
||||
|
||||
/// A request for the worker to extract an image from an archive.
|
||||
/// The web app creates this, sends a pg_notify, and the worker
|
||||
/// downloads the archive, extracts the file, and writes the result.
|
||||
model ArchiveExtractRequest {
|
||||
id String @id @default(cuid())
|
||||
packageId String
|
||||
filePath String @db.VarChar(1024) // path within archive to extract
|
||||
status ExtractStatus @default(PENDING)
|
||||
imageData Bytes? // extracted image bytes (JPEG/PNG/WebP)
|
||||
contentType String? @db.VarChar(64) // MIME type of extracted image
|
||||
error String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
package Package @relation(fields: [packageId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([packageId, filePath])
|
||||
@@index([status])
|
||||
@@map("archive_extract_requests")
|
||||
}
|
||||
|
||||
// ───────────────────────────────────────
|
||||
// Skipped/Failed Archives
|
||||
// ───────────────────────────────────────
|
||||
|
||||
enum SkipReason {
|
||||
SIZE_LIMIT
|
||||
DOWNLOAD_FAILED
|
||||
EXTRACT_FAILED
|
||||
UPLOAD_FAILED
|
||||
}
|
||||
|
||||
model SkippedPackage {
|
||||
id String @id @default(cuid())
|
||||
fileName String
|
||||
fileSize BigInt
|
||||
reason SkipReason
|
||||
errorMessage String?
|
||||
sourceChannelId String
|
||||
sourceChannel TelegramChannel @relation(fields: [sourceChannelId], references: [id], onDelete: Cascade)
|
||||
sourceMessageId BigInt
|
||||
sourceTopicId BigInt?
|
||||
isMultipart Boolean @default(false)
|
||||
partCount Int @default(1)
|
||||
accountId String
|
||||
account TelegramAccount @relation(fields: [accountId], references: [id], onDelete: Cascade)
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
@@unique([sourceChannelId, sourceMessageId])
|
||||
@@index([reason])
|
||||
@@index([accountId])
|
||||
@@map("skipped_packages")
|
||||
}
|
||||
|
||||
// ───────────────────────────────────────
|
||||
// Purchased Kickstarters
|
||||
// ───────────────────────────────────────
|
||||
|
||||
enum DeliveryStatus {
|
||||
NOT_DELIVERED
|
||||
PARTIAL
|
||||
DELIVERED
|
||||
}
|
||||
|
||||
enum PaymentStatus {
|
||||
PAID
|
||||
UNPAID
|
||||
}
|
||||
|
||||
model KickstarterHost {
|
||||
id String @id @default(cuid())
|
||||
name String @unique
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
kickstarters Kickstarter[]
|
||||
|
||||
@@map("kickstarter_hosts")
|
||||
}
|
||||
|
||||
model Kickstarter {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
link String?
|
||||
filesUrl String?
|
||||
deliveryStatus DeliveryStatus @default(NOT_DELIVERED)
|
||||
paymentStatus PaymentStatus @default(UNPAID)
|
||||
notes String?
|
||||
hostId String?
|
||||
userId String
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
host KickstarterHost? @relation(fields: [hostId], references: [id], onDelete: SetNull)
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
packages KickstarterPackage[]
|
||||
|
||||
@@index([hostId])
|
||||
@@index([userId])
|
||||
@@index([deliveryStatus])
|
||||
@@index([paymentStatus])
|
||||
@@map("kickstarters")
|
||||
}
|
||||
|
||||
model KickstarterPackage {
|
||||
kickstarterId String
|
||||
packageId String
|
||||
|
||||
kickstarter Kickstarter @relation(fields: [kickstarterId], references: [id], onDelete: Cascade)
|
||||
package Package @relation(fields: [packageId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@id([kickstarterId, packageId])
|
||||
@@map("kickstarter_packages")
|
||||
}
|
||||
|
||||
415
src/app/(app)/invites/_components/invite-manager.tsx
Normal file
415
src/app/(app)/invites/_components/invite-manager.tsx
Normal file
@@ -0,0 +1,415 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useTransition } from "react";
|
||||
import { Copy, Link2, Plus, Trash2 } from "lucide-react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { Switch } from "@/components/ui/switch";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogAction,
|
||||
AlertDialogCancel,
|
||||
AlertDialogContent,
|
||||
AlertDialogDescription,
|
||||
AlertDialogFooter,
|
||||
AlertDialogHeader,
|
||||
AlertDialogTitle,
|
||||
AlertDialogTrigger,
|
||||
} from "@/components/ui/alert-dialog";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip";
|
||||
import { createInviteCode, createBulkInviteCodes, deleteInviteCode } from "../actions";
|
||||
|
||||
type InviteUser = {
|
||||
id: string;
|
||||
name: string | null;
|
||||
email: string | null;
|
||||
createdAt: string;
|
||||
};
|
||||
|
||||
type InviteCode = {
|
||||
id: string;
|
||||
code: string;
|
||||
maxUses: number;
|
||||
uses: number;
|
||||
expiresAt: string | null;
|
||||
createdAt: string;
|
||||
creator: { name: string | null };
|
||||
usedBy: InviteUser[];
|
||||
};
|
||||
|
||||
export function InviteManager({
|
||||
inviteCodes,
|
||||
appUrl,
|
||||
}: {
|
||||
inviteCodes: InviteCode[];
|
||||
appUrl: string;
|
||||
}) {
|
||||
const [maxUses, setMaxUses] = useState(1);
|
||||
const [expiresInDays, setExpiresInDays] = useState(7);
|
||||
const [noExpiry, setNoExpiry] = useState(false);
|
||||
const [bulkCount, setBulkCount] = useState(5);
|
||||
const [isPending, startTransition] = useTransition();
|
||||
const [copiedId, setCopiedId] = useState<string | null>(null);
|
||||
const [copiedType, setCopiedType] = useState<"code" | "link" | null>(null);
|
||||
|
||||
function handleCreate() {
|
||||
startTransition(async () => {
|
||||
await createInviteCode({
|
||||
maxUses,
|
||||
expiresInDays: noExpiry ? null : expiresInDays,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function handleBulkCreate() {
|
||||
startTransition(async () => {
|
||||
await createBulkInviteCodes({
|
||||
count: bulkCount,
|
||||
maxUses,
|
||||
expiresInDays: noExpiry ? null : expiresInDays,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function handleDelete(id: string) {
|
||||
startTransition(async () => {
|
||||
await deleteInviteCode(id);
|
||||
});
|
||||
}
|
||||
|
||||
function copyToClipboard(text: string, id: string, type: "code" | "link") {
|
||||
navigator.clipboard.writeText(text);
|
||||
setCopiedId(id);
|
||||
setCopiedType(type);
|
||||
setTimeout(() => {
|
||||
setCopiedId(null);
|
||||
setCopiedType(null);
|
||||
}, 2000);
|
||||
}
|
||||
|
||||
function getStatus(invite: InviteCode): "active" | "used" | "expired" {
|
||||
if (invite.uses >= invite.maxUses) return "used";
|
||||
if (invite.expiresAt && new Date(invite.expiresAt) < new Date()) return "expired";
|
||||
return "active";
|
||||
}
|
||||
|
||||
function formatRelativeDate(dateStr: string) {
|
||||
const date = new Date(dateStr);
|
||||
const now = new Date();
|
||||
const diffMs = date.getTime() - now.getTime();
|
||||
const diffDays = Math.ceil(diffMs / (1000 * 60 * 60 * 24));
|
||||
|
||||
if (diffDays < 0) return "Expired";
|
||||
if (diffDays === 0) return "Today";
|
||||
if (diffDays === 1) return "Tomorrow";
|
||||
return `${diffDays} days`;
|
||||
}
|
||||
|
||||
const activeCount = inviteCodes.filter((i) => getStatus(i) === "active").length;
|
||||
const usedCount = inviteCodes.filter((i) => getStatus(i) === "used").length;
|
||||
|
||||
return (
|
||||
<div className="max-w-5xl space-y-6">
|
||||
{/* Create Card */}
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>Generate Invite Codes</CardTitle>
|
||||
<CardDescription>
|
||||
Create single or bulk invite codes to share with new users
|
||||
</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className="space-y-4">
|
||||
<div className="flex flex-wrap items-end gap-4">
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="maxUses">Max Uses</Label>
|
||||
<Input
|
||||
id="maxUses"
|
||||
type="number"
|
||||
min={1}
|
||||
max={100}
|
||||
value={maxUses}
|
||||
onChange={(e) => setMaxUses(Number(e.target.value))}
|
||||
className="w-24"
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="expiresInDays">Expires in (days)</Label>
|
||||
<Input
|
||||
id="expiresInDays"
|
||||
type="number"
|
||||
min={1}
|
||||
max={365}
|
||||
value={expiresInDays}
|
||||
onChange={(e) => setExpiresInDays(Number(e.target.value))}
|
||||
disabled={noExpiry}
|
||||
className="w-24"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex items-center gap-2 pb-1">
|
||||
<Switch
|
||||
id="noExpiry"
|
||||
checked={noExpiry}
|
||||
onCheckedChange={setNoExpiry}
|
||||
/>
|
||||
<Label htmlFor="noExpiry" className="text-sm">
|
||||
No expiry
|
||||
</Label>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-wrap items-end gap-3 border-t pt-4">
|
||||
<Button onClick={handleCreate} disabled={isPending}>
|
||||
<Plus className="mr-2 h-4 w-4" />
|
||||
{isPending ? "Creating..." : "Create One"}
|
||||
</Button>
|
||||
|
||||
<div className="flex items-end gap-2">
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="bulkCount">Count</Label>
|
||||
<Input
|
||||
id="bulkCount"
|
||||
type="number"
|
||||
min={2}
|
||||
max={25}
|
||||
value={bulkCount}
|
||||
onChange={(e) => setBulkCount(Number(e.target.value))}
|
||||
className="w-20"
|
||||
/>
|
||||
</div>
|
||||
<Button
|
||||
variant="secondary"
|
||||
onClick={handleBulkCreate}
|
||||
disabled={isPending}
|
||||
>
|
||||
<Plus className="mr-2 h-4 w-4" />
|
||||
{isPending ? "Creating..." : `Create ${bulkCount}`}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
{/* Codes Table */}
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>Invite Codes</CardTitle>
|
||||
<CardDescription>
|
||||
{inviteCodes.length} total · {activeCount} active · {usedCount} fully used
|
||||
</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
{inviteCodes.length === 0 ? (
|
||||
<p className="text-sm text-muted-foreground">
|
||||
No invite codes yet. Create one above.
|
||||
</p>
|
||||
) : (
|
||||
<Table>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
<TableHead>Code</TableHead>
|
||||
<TableHead>Status</TableHead>
|
||||
<TableHead>Uses</TableHead>
|
||||
<TableHead>Redeemed By</TableHead>
|
||||
<TableHead>Expires</TableHead>
|
||||
<TableHead>Created</TableHead>
|
||||
<TableHead className="text-right">Actions</TableHead>
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{inviteCodes.map((invite) => {
|
||||
const status = getStatus(invite);
|
||||
const isCopiedCode =
|
||||
copiedId === invite.id && copiedType === "code";
|
||||
const isCopiedLink =
|
||||
copiedId === invite.id && copiedType === "link";
|
||||
|
||||
return (
|
||||
<TableRow key={invite.id}>
|
||||
<TableCell className="font-mono text-sm">
|
||||
{invite.code}
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Badge
|
||||
variant={
|
||||
status === "active"
|
||||
? "default"
|
||||
: status === "used"
|
||||
? "secondary"
|
||||
: "destructive"
|
||||
}
|
||||
>
|
||||
{status}
|
||||
</Badge>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
{invite.uses} / {invite.maxUses}
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
{invite.usedBy.length === 0 ? (
|
||||
<span className="text-muted-foreground">--</span>
|
||||
) : (
|
||||
<div className="space-y-0.5">
|
||||
{invite.usedBy.map((user) => (
|
||||
<Tooltip key={user.id}>
|
||||
<TooltipTrigger asChild>
|
||||
<div className="text-sm cursor-default">
|
||||
{user.name ?? user.email ?? "Unknown"}
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<div className="text-xs">
|
||||
{user.email && <div>{user.email}</div>}
|
||||
<div>
|
||||
Joined{" "}
|
||||
{new Date(user.createdAt).toLocaleDateString()}
|
||||
</div>
|
||||
</div>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
{invite.expiresAt ? (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span className="cursor-default">
|
||||
{formatRelativeDate(invite.expiresAt)}
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{new Date(invite.expiresAt).toLocaleString()}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
) : (
|
||||
<span className="text-muted-foreground">Never</span>
|
||||
)}
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span className="cursor-default">
|
||||
{new Date(invite.createdAt).toLocaleDateString()}
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
by {invite.creator.name ?? "Unknown"}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TableCell>
|
||||
<TableCell className="text-right">
|
||||
<div className="flex justify-end gap-1">
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={() =>
|
||||
copyToClipboard(
|
||||
invite.code,
|
||||
invite.id,
|
||||
"code"
|
||||
)
|
||||
}
|
||||
>
|
||||
<Copy className="h-3 w-3" />
|
||||
{isCopiedCode && (
|
||||
<span className="ml-1">Copied!</span>
|
||||
)}
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>Copy code</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={() =>
|
||||
copyToClipboard(
|
||||
`${appUrl}/register?code=${invite.code}`,
|
||||
invite.id,
|
||||
"link"
|
||||
)
|
||||
}
|
||||
disabled={status !== "active"}
|
||||
>
|
||||
<Link2 className="h-3 w-3" />
|
||||
{isCopiedLink && (
|
||||
<span className="ml-1">Copied!</span>
|
||||
)}
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>Copy registration link</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
<AlertDialog>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<AlertDialogTrigger asChild>
|
||||
<Button
|
||||
variant="destructive"
|
||||
size="sm"
|
||||
disabled={isPending}
|
||||
>
|
||||
<Trash2 className="h-3 w-3" />
|
||||
</Button>
|
||||
</AlertDialogTrigger>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>Delete code</TooltipContent>
|
||||
</Tooltip>
|
||||
<AlertDialogContent>
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>
|
||||
Delete invite code?
|
||||
</AlertDialogTitle>
|
||||
<AlertDialogDescription>
|
||||
This will permanently delete the invite code{" "}
|
||||
<span className="font-mono font-semibold">
|
||||
{invite.code}
|
||||
</span>
|
||||
.{" "}
|
||||
{status === "active" &&
|
||||
"Anyone with this code will no longer be able to register."}
|
||||
</AlertDialogDescription>
|
||||
</AlertDialogHeader>
|
||||
<AlertDialogFooter>
|
||||
<AlertDialogCancel>Cancel</AlertDialogCancel>
|
||||
<AlertDialogAction
|
||||
onClick={() => handleDelete(invite.id)}
|
||||
>
|
||||
Delete
|
||||
</AlertDialogAction>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
</AlertDialog>
|
||||
</div>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
);
|
||||
})}
|
||||
</TableBody>
|
||||
</Table>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
96
src/app/(app)/invites/actions.ts
Normal file
96
src/app/(app)/invites/actions.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
"use server";
|
||||
|
||||
import crypto from "crypto";
|
||||
import { auth } from "@/lib/auth";
|
||||
import { prisma } from "@/lib/prisma";
|
||||
import type { ActionResult } from "@/types/api.types";
|
||||
import { revalidatePath } from "next/cache";
|
||||
|
||||
export async function createInviteCode(input: {
|
||||
maxUses: number;
|
||||
expiresInDays: number | null;
|
||||
}): Promise<ActionResult<{ code: string }>> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id || session.user.role !== "ADMIN") {
|
||||
return { success: false, error: "Unauthorized" };
|
||||
}
|
||||
|
||||
const code = crypto.randomBytes(6).toString("hex");
|
||||
const expiresAt = input.expiresInDays
|
||||
? new Date(Date.now() + input.expiresInDays * 24 * 60 * 60 * 1000)
|
||||
: null;
|
||||
|
||||
await prisma.inviteCode.create({
|
||||
data: {
|
||||
code,
|
||||
maxUses: input.maxUses,
|
||||
expiresAt,
|
||||
createdBy: session.user.id,
|
||||
},
|
||||
});
|
||||
|
||||
revalidatePath("/invites");
|
||||
return { success: true, data: { code } };
|
||||
}
|
||||
|
||||
export async function createBulkInviteCodes(input: {
|
||||
count: number;
|
||||
maxUses: number;
|
||||
expiresInDays: number | null;
|
||||
}): Promise<ActionResult<{ codes: string[] }>> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id || session.user.role !== "ADMIN") {
|
||||
return { success: false, error: "Unauthorized" };
|
||||
}
|
||||
|
||||
if (input.count < 1 || input.count > 25) {
|
||||
return { success: false, error: "Can generate between 1 and 25 codes at a time" };
|
||||
}
|
||||
|
||||
const expiresAt = input.expiresInDays
|
||||
? new Date(Date.now() + input.expiresInDays * 24 * 60 * 60 * 1000)
|
||||
: null;
|
||||
|
||||
const codes: string[] = [];
|
||||
|
||||
await prisma.$transaction(async (tx) => {
|
||||
for (let i = 0; i < input.count; i++) {
|
||||
const code = crypto.randomBytes(6).toString("hex");
|
||||
codes.push(code);
|
||||
await tx.inviteCode.create({
|
||||
data: {
|
||||
code,
|
||||
maxUses: input.maxUses,
|
||||
expiresAt,
|
||||
createdBy: session.user.id,
|
||||
},
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
revalidatePath("/invites");
|
||||
return { success: true, data: { codes } };
|
||||
}
|
||||
|
||||
export async function deleteInviteCode(id: string): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id || session.user.role !== "ADMIN") {
|
||||
return { success: false, error: "Unauthorized" };
|
||||
}
|
||||
|
||||
await prisma.inviteCode.delete({ where: { id } });
|
||||
|
||||
revalidatePath("/invites");
|
||||
return { success: true, data: undefined };
|
||||
}
|
||||
|
||||
export async function getInviteCodes() {
|
||||
const codes = await prisma.inviteCode.findMany({
|
||||
orderBy: { createdAt: "desc" },
|
||||
include: {
|
||||
creator: { select: { name: true } },
|
||||
usedBy: { select: { id: true, name: true, email: true, createdAt: true } },
|
||||
},
|
||||
});
|
||||
return codes;
|
||||
}
|
||||
26
src/app/(app)/invites/page.tsx
Normal file
26
src/app/(app)/invites/page.tsx
Normal file
@@ -0,0 +1,26 @@
|
||||
import { auth } from "@/lib/auth";
|
||||
import { redirect } from "next/navigation";
|
||||
import { PageHeader } from "@/components/shared/page-header";
|
||||
import { getInviteCodes } from "./actions";
|
||||
import { InviteManager } from "./_components/invite-manager";
|
||||
|
||||
export default async function InvitesPage() {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) redirect("/login");
|
||||
if (session.user.role !== "ADMIN") redirect("/dashboard");
|
||||
|
||||
const inviteCodes = await getInviteCodes();
|
||||
|
||||
return (
|
||||
<div className="space-y-6">
|
||||
<PageHeader
|
||||
title="Invite Codes"
|
||||
description="Manage invite codes for new user registration"
|
||||
/>
|
||||
<InviteManager
|
||||
inviteCodes={JSON.parse(JSON.stringify(inviteCodes))}
|
||||
appUrl={process.env.NEXT_PUBLIC_APP_URL ?? ""}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
187
src/app/(app)/kickstarters/_components/kickstarter-columns.tsx
Normal file
187
src/app/(app)/kickstarters/_components/kickstarter-columns.tsx
Normal file
@@ -0,0 +1,187 @@
|
||||
"use client";
|
||||
|
||||
import { type ColumnDef } from "@tanstack/react-table";
|
||||
import { MoreHorizontal, Pencil, Trash2, ExternalLink } from "lucide-react";
|
||||
import { DataTableColumnHeader } from "@/components/shared/data-table-column-header";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuSeparator,
|
||||
DropdownMenuTrigger,
|
||||
} from "@/components/ui/dropdown-menu";
|
||||
|
||||
export interface KickstarterRow {
|
||||
id: string;
|
||||
name: string;
|
||||
link: string | null;
|
||||
filesUrl: string | null;
|
||||
deliveryStatus: "NOT_DELIVERED" | "PARTIAL" | "DELIVERED";
|
||||
paymentStatus: "PAID" | "UNPAID";
|
||||
notes: string | null;
|
||||
hostId: string | null;
|
||||
userId: string;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
host: { id: string; name: string } | null;
|
||||
_count: { packages: number };
|
||||
}
|
||||
|
||||
interface KickstarterColumnsProps {
|
||||
onEdit: (kickstarter: KickstarterRow) => void;
|
||||
onDelete: (id: string) => void;
|
||||
}
|
||||
|
||||
const deliveryConfig: Record<string, { label: string; className: string }> = {
|
||||
NOT_DELIVERED: {
|
||||
label: "Not Delivered",
|
||||
className: "bg-red-500/15 text-red-400 border-red-500/30",
|
||||
},
|
||||
PARTIAL: {
|
||||
label: "Partial",
|
||||
className: "bg-orange-500/15 text-orange-400 border-orange-500/30",
|
||||
},
|
||||
DELIVERED: {
|
||||
label: "Delivered",
|
||||
className: "bg-emerald-500/15 text-emerald-400 border-emerald-500/30",
|
||||
},
|
||||
};
|
||||
|
||||
const paymentConfig: Record<string, { label: string; className: string }> = {
|
||||
PAID: {
|
||||
label: "Paid",
|
||||
className: "bg-emerald-500/15 text-emerald-400 border-emerald-500/30",
|
||||
},
|
||||
UNPAID: {
|
||||
label: "Unpaid",
|
||||
className: "bg-red-500/15 text-red-400 border-red-500/30",
|
||||
},
|
||||
};
|
||||
|
||||
export function getKickstarterColumns({
|
||||
onEdit,
|
||||
onDelete,
|
||||
}: KickstarterColumnsProps): ColumnDef<KickstarterRow, unknown>[] {
|
||||
return [
|
||||
{
|
||||
accessorKey: "name",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Name" />,
|
||||
cell: ({ row }) => (
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="font-medium">{row.original.name}</span>
|
||||
{row.original.link && (
|
||||
<a
|
||||
href={row.original.link}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-primary hover:text-primary/80"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<ExternalLink className="h-3.5 w-3.5" />
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
enableHiding: false,
|
||||
},
|
||||
{
|
||||
accessorKey: "host",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Host" />,
|
||||
cell: ({ row }) =>
|
||||
row.original.host ? (
|
||||
<span className="text-sm">{row.original.host.name}</span>
|
||||
) : (
|
||||
<span className="text-muted-foreground">--</span>
|
||||
),
|
||||
},
|
||||
{
|
||||
id: "files",
|
||||
header: "Files",
|
||||
cell: ({ row }) =>
|
||||
row.original.filesUrl ? (
|
||||
<a
|
||||
href={row.original.filesUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="flex items-center gap-1 text-sm text-primary hover:underline"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<ExternalLink className="h-3 w-3" />
|
||||
</a>
|
||||
) : (
|
||||
<span className="text-muted-foreground">--</span>
|
||||
),
|
||||
},
|
||||
{
|
||||
accessorKey: "deliveryStatus",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Delivery" />,
|
||||
cell: ({ row }) => {
|
||||
const config = deliveryConfig[row.original.deliveryStatus];
|
||||
return (
|
||||
<Badge variant="outline" className={`text-[10px] font-medium ${config.className}`}>
|
||||
{config.label}
|
||||
</Badge>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: "paymentStatus",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Payment" />,
|
||||
cell: ({ row }) => {
|
||||
const config = paymentConfig[row.original.paymentStatus];
|
||||
return (
|
||||
<Badge variant="outline" className={`text-[10px] font-medium ${config.className}`}>
|
||||
{config.label}
|
||||
</Badge>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "packages",
|
||||
header: "Packages",
|
||||
cell: ({ row }) => (
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{row.original._count.packages}
|
||||
</span>
|
||||
),
|
||||
},
|
||||
{
|
||||
accessorKey: "createdAt",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Created" />,
|
||||
cell: ({ row }) => (
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{new Date(row.original.createdAt).toLocaleDateString()}
|
||||
</span>
|
||||
),
|
||||
},
|
||||
{
|
||||
id: "actions",
|
||||
cell: ({ row }) => (
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button variant="ghost" size="icon" className="h-8 w-8">
|
||||
<MoreHorizontal className="h-4 w-4" />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align="end">
|
||||
<DropdownMenuItem onClick={() => onEdit(row.original)}>
|
||||
<Pencil className="mr-2 h-3.5 w-3.5" />
|
||||
Edit
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuItem
|
||||
onClick={() => onDelete(row.original.id)}
|
||||
className="text-destructive focus:text-destructive"
|
||||
>
|
||||
<Trash2 className="mr-2 h-3.5 w-3.5" />
|
||||
Delete
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
),
|
||||
enableHiding: false,
|
||||
},
|
||||
];
|
||||
}
|
||||
301
src/app/(app)/kickstarters/_components/kickstarter-form.tsx
Normal file
301
src/app/(app)/kickstarters/_components/kickstarter-form.tsx
Normal file
@@ -0,0 +1,301 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useTransition } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { toast } from "sonner";
|
||||
import { Plus } from "lucide-react";
|
||||
import { kickstarterSchema, type KickstarterInput } from "@/schemas/kickstarter.schema";
|
||||
import { createKickstarter, updateKickstarter, createHost } from "../actions";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Textarea } from "@/components/ui/textarea";
|
||||
import {
|
||||
Form,
|
||||
FormControl,
|
||||
FormField,
|
||||
FormItem,
|
||||
FormLabel,
|
||||
FormMessage,
|
||||
} from "@/components/ui/form";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
|
||||
interface HostOption {
|
||||
id: string;
|
||||
name: string;
|
||||
_count: { kickstarters: number };
|
||||
}
|
||||
|
||||
interface KickstarterFormProps {
|
||||
kickstarter?: {
|
||||
id: string;
|
||||
name: string;
|
||||
link: string | null;
|
||||
filesUrl: string | null;
|
||||
deliveryStatus: "NOT_DELIVERED" | "PARTIAL" | "DELIVERED";
|
||||
paymentStatus: "PAID" | "UNPAID";
|
||||
hostId: string | null;
|
||||
notes: string | null;
|
||||
};
|
||||
hosts: HostOption[];
|
||||
onSuccess: () => void;
|
||||
}
|
||||
|
||||
export function KickstarterForm({ kickstarter, hosts, onSuccess }: KickstarterFormProps) {
|
||||
const [isPending, startTransition] = useTransition();
|
||||
const [hostList, setHostList] = useState(hosts);
|
||||
const [showNewHost, setShowNewHost] = useState(false);
|
||||
const [newHostName, setNewHostName] = useState("");
|
||||
const isEditing = !!kickstarter;
|
||||
|
||||
const form = useForm<KickstarterInput>({
|
||||
resolver: zodResolver(kickstarterSchema),
|
||||
defaultValues: {
|
||||
name: kickstarter?.name ?? "",
|
||||
link: kickstarter?.link ?? "",
|
||||
filesUrl: kickstarter?.filesUrl ?? "",
|
||||
deliveryStatus: kickstarter?.deliveryStatus ?? "NOT_DELIVERED",
|
||||
paymentStatus: kickstarter?.paymentStatus ?? "UNPAID",
|
||||
hostId: kickstarter?.hostId ?? "",
|
||||
notes: kickstarter?.notes ?? "",
|
||||
},
|
||||
});
|
||||
|
||||
function onSubmit(values: KickstarterInput) {
|
||||
startTransition(async () => {
|
||||
const result = isEditing
|
||||
? await updateKickstarter(kickstarter!.id, values)
|
||||
: await createKickstarter(values);
|
||||
|
||||
if (!result.success) {
|
||||
toast.error(result.error);
|
||||
return;
|
||||
}
|
||||
|
||||
toast.success(isEditing ? "Kickstarter updated" : "Kickstarter created");
|
||||
form.reset();
|
||||
onSuccess();
|
||||
});
|
||||
}
|
||||
|
||||
function handleAddHost() {
|
||||
if (!newHostName.trim()) return;
|
||||
startTransition(async () => {
|
||||
const result = await createHost({ name: newHostName.trim() });
|
||||
if (!result.success) {
|
||||
toast.error(result.error);
|
||||
return;
|
||||
}
|
||||
toast.success(`Host "${result.data!.name}" created`);
|
||||
setHostList((prev) => [
|
||||
...prev,
|
||||
{ id: result.data!.id, name: result.data!.name, _count: { kickstarters: 0 } },
|
||||
]);
|
||||
form.setValue("hostId", result.data!.id);
|
||||
setNewHostName("");
|
||||
setShowNewHost(false);
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
<Form {...form}>
|
||||
<form onSubmit={form.handleSubmit(onSubmit)} className="space-y-4">
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="name"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Name</FormLabel>
|
||||
<FormControl>
|
||||
<Input placeholder="Kickstarter name" {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="link"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Link</FormLabel>
|
||||
<FormControl>
|
||||
<Input placeholder="https://kickstarter.com/..." {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="filesUrl"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Files URL</FormLabel>
|
||||
<FormControl>
|
||||
<Input placeholder="https://drive.google.com/..." {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<div className="grid grid-cols-2 gap-4">
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="deliveryStatus"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Delivery Status</FormLabel>
|
||||
<Select onValueChange={field.onChange} defaultValue={field.value}>
|
||||
<FormControl>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder="Select status" />
|
||||
</SelectTrigger>
|
||||
</FormControl>
|
||||
<SelectContent>
|
||||
<SelectItem value="NOT_DELIVERED">Not Delivered</SelectItem>
|
||||
<SelectItem value="PARTIAL">Partial</SelectItem>
|
||||
<SelectItem value="DELIVERED">Delivered</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="paymentStatus"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Payment Status</FormLabel>
|
||||
<Select onValueChange={field.onChange} defaultValue={field.value}>
|
||||
<FormControl>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder="Select status" />
|
||||
</SelectTrigger>
|
||||
</FormControl>
|
||||
<SelectContent>
|
||||
<SelectItem value="PAID">Paid</SelectItem>
|
||||
<SelectItem value="UNPAID">Unpaid</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="hostId"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Host</FormLabel>
|
||||
{!showNewHost ? (
|
||||
<div className="flex gap-2">
|
||||
<Select
|
||||
onValueChange={(v) => field.onChange(v === "none" ? "" : v)}
|
||||
defaultValue={field.value || "none"}
|
||||
>
|
||||
<FormControl>
|
||||
<SelectTrigger className="flex-1">
|
||||
<SelectValue placeholder="Select host (optional)" />
|
||||
</SelectTrigger>
|
||||
</FormControl>
|
||||
<SelectContent>
|
||||
<SelectItem value="none">No Host</SelectItem>
|
||||
{hostList.map((host) => (
|
||||
<SelectItem key={host.id} value={host.id}>
|
||||
{host.name}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
<Button
|
||||
type="button"
|
||||
variant="outline"
|
||||
size="icon"
|
||||
onClick={() => setShowNewHost(true)}
|
||||
>
|
||||
<Plus className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex gap-2">
|
||||
<Input
|
||||
placeholder="New host name"
|
||||
value={newHostName}
|
||||
onChange={(e) => setNewHostName(e.target.value)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === "Enter") {
|
||||
e.preventDefault();
|
||||
handleAddHost();
|
||||
}
|
||||
if (e.key === "Escape") {
|
||||
setShowNewHost(false);
|
||||
setNewHostName("");
|
||||
}
|
||||
}}
|
||||
autoFocus
|
||||
className="flex-1"
|
||||
/>
|
||||
<Button
|
||||
type="button"
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={handleAddHost}
|
||||
disabled={isPending || !newHostName.trim()}
|
||||
>
|
||||
Add
|
||||
</Button>
|
||||
<Button
|
||||
type="button"
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() => {
|
||||
setShowNewHost(false);
|
||||
setNewHostName("");
|
||||
}}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="notes"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Notes</FormLabel>
|
||||
<FormControl>
|
||||
<Textarea placeholder="Optional notes" rows={3} {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<div className="flex justify-end gap-2">
|
||||
<Button type="submit" disabled={isPending}>
|
||||
{isPending ? "Saving..." : isEditing ? "Update" : "Create"}
|
||||
</Button>
|
||||
</div>
|
||||
</form>
|
||||
</Form>
|
||||
);
|
||||
}
|
||||
54
src/app/(app)/kickstarters/_components/kickstarter-modal.tsx
Normal file
54
src/app/(app)/kickstarters/_components/kickstarter-modal.tsx
Normal file
@@ -0,0 +1,54 @@
|
||||
"use client";
|
||||
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { KickstarterForm } from "./kickstarter-form";
|
||||
|
||||
interface HostOption {
|
||||
id: string;
|
||||
name: string;
|
||||
_count: { kickstarters: number };
|
||||
}
|
||||
|
||||
interface KickstarterModalProps {
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
hosts: HostOption[];
|
||||
kickstarter?: {
|
||||
id: string;
|
||||
name: string;
|
||||
link: string | null;
|
||||
filesUrl: string | null;
|
||||
deliveryStatus: "NOT_DELIVERED" | "PARTIAL" | "DELIVERED";
|
||||
paymentStatus: "PAID" | "UNPAID";
|
||||
hostId: string | null;
|
||||
notes: string | null;
|
||||
};
|
||||
}
|
||||
|
||||
export function KickstarterModal({ open, onOpenChange, hosts, kickstarter }: KickstarterModalProps) {
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogContent className="sm:max-w-lg">
|
||||
<DialogHeader>
|
||||
<DialogTitle>{kickstarter ? "Edit Kickstarter" : "Add Kickstarter"}</DialogTitle>
|
||||
<DialogDescription>
|
||||
{kickstarter
|
||||
? "Update the kickstarter details below."
|
||||
: "Track a new Kickstarter or crowdfunding campaign."}
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<KickstarterForm
|
||||
kickstarter={kickstarter}
|
||||
hosts={hosts}
|
||||
onSuccess={() => onOpenChange(false)}
|
||||
/>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
193
src/app/(app)/kickstarters/_components/kickstarter-table.tsx
Normal file
193
src/app/(app)/kickstarters/_components/kickstarter-table.tsx
Normal file
@@ -0,0 +1,193 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useCallback, useTransition } from "react";
|
||||
import { useRouter, usePathname, useSearchParams } from "next/navigation";
|
||||
import { Plus, Search } from "lucide-react";
|
||||
import { toast } from "sonner";
|
||||
import { useDataTable } from "@/hooks/use-data-table";
|
||||
import { getKickstarterColumns, type KickstarterRow } from "./kickstarter-columns";
|
||||
import { KickstarterModal } from "./kickstarter-modal";
|
||||
import { deleteKickstarter } from "../actions";
|
||||
import { DataTable } from "@/components/shared/data-table";
|
||||
import { DataTablePagination } from "@/components/shared/data-table-pagination";
|
||||
import { DataTableViewOptions } from "@/components/shared/data-table-view-options";
|
||||
import { DeleteDialog } from "@/components/shared/delete-dialog";
|
||||
import { PageHeader } from "@/components/shared/page-header";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
|
||||
interface HostOption {
|
||||
id: string;
|
||||
name: string;
|
||||
_count: { kickstarters: number };
|
||||
}
|
||||
|
||||
interface KickstarterTableProps {
|
||||
data: KickstarterRow[];
|
||||
pageCount: number;
|
||||
totalCount: number;
|
||||
hosts: HostOption[];
|
||||
}
|
||||
|
||||
export function KickstarterTable({
|
||||
data,
|
||||
pageCount,
|
||||
totalCount,
|
||||
hosts,
|
||||
}: KickstarterTableProps) {
|
||||
const router = useRouter();
|
||||
const pathname = usePathname();
|
||||
const searchParams = useSearchParams();
|
||||
const [isPending, startTransition] = useTransition();
|
||||
|
||||
const [modalOpen, setModalOpen] = useState(false);
|
||||
const [editKickstarter, setEditKickstarter] = useState<KickstarterRow | undefined>();
|
||||
const [deleteId, setDeleteId] = useState<string | null>(null);
|
||||
|
||||
const [searchValue, setSearchValue] = useState(searchParams.get("search") ?? "");
|
||||
|
||||
const updateSearch = useCallback(
|
||||
(value: string) => {
|
||||
setSearchValue(value);
|
||||
const params = new URLSearchParams(searchParams.toString());
|
||||
if (value) {
|
||||
params.set("search", value);
|
||||
params.set("page", "1");
|
||||
} else {
|
||||
params.delete("search");
|
||||
}
|
||||
router.push(`${pathname}?${params.toString()}`, { scroll: false });
|
||||
},
|
||||
[router, pathname, searchParams]
|
||||
);
|
||||
|
||||
const updateFilter = useCallback(
|
||||
(key: string, value: string) => {
|
||||
const params = new URLSearchParams(searchParams.toString());
|
||||
if (value && value !== "all") {
|
||||
params.set(key, value);
|
||||
params.set("page", "1");
|
||||
} else {
|
||||
params.delete(key);
|
||||
}
|
||||
router.push(`${pathname}?${params.toString()}`, { scroll: false });
|
||||
},
|
||||
[router, pathname, searchParams]
|
||||
);
|
||||
|
||||
const columns = getKickstarterColumns({
|
||||
onEdit: (kickstarter) => {
|
||||
setEditKickstarter(kickstarter);
|
||||
setModalOpen(true);
|
||||
},
|
||||
onDelete: (id) => setDeleteId(id),
|
||||
});
|
||||
|
||||
const { table } = useDataTable({ data, columns, pageCount });
|
||||
|
||||
const handleDelete = () => {
|
||||
if (!deleteId) return;
|
||||
startTransition(async () => {
|
||||
const result = await deleteKickstarter(deleteId);
|
||||
if (result.success) {
|
||||
toast.success("Kickstarter deleted");
|
||||
setDeleteId(null);
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const activeDelivery = searchParams.get("delivery") ?? "";
|
||||
const activePayment = searchParams.get("payment") ?? "";
|
||||
const activeHost = searchParams.get("host") ?? "";
|
||||
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
<PageHeader title="Kickstarters" description="Track your crowdfunding campaigns and deliveries">
|
||||
<Button onClick={() => { setEditKickstarter(undefined); setModalOpen(true); }}>
|
||||
<Plus className="mr-2 h-4 w-4" />
|
||||
Add Kickstarter
|
||||
</Button>
|
||||
</PageHeader>
|
||||
|
||||
<div className="flex flex-wrap items-center gap-2">
|
||||
<div className="relative flex-1 min-w-[200px] max-w-sm">
|
||||
<Search className="absolute left-2.5 top-2.5 h-4 w-4 text-muted-foreground" />
|
||||
<Input
|
||||
placeholder="Search kickstarters..."
|
||||
value={searchValue}
|
||||
onChange={(e) => updateSearch(e.target.value)}
|
||||
className="pl-9 h-9"
|
||||
/>
|
||||
</div>
|
||||
<Select value={activeDelivery || "all"} onValueChange={(v) => updateFilter("delivery", v)}>
|
||||
<SelectTrigger className="w-[160px] h-9">
|
||||
<SelectValue placeholder="All Delivery" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value="all">All Delivery</SelectItem>
|
||||
<SelectItem value="NOT_DELIVERED">Not Delivered</SelectItem>
|
||||
<SelectItem value="PARTIAL">Partial</SelectItem>
|
||||
<SelectItem value="DELIVERED">Delivered</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
<Select value={activePayment || "all"} onValueChange={(v) => updateFilter("payment", v)}>
|
||||
<SelectTrigger className="w-[140px] h-9">
|
||||
<SelectValue placeholder="All Payment" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value="all">All Payment</SelectItem>
|
||||
<SelectItem value="PAID">Paid</SelectItem>
|
||||
<SelectItem value="UNPAID">Unpaid</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
{hosts.length > 0 && (
|
||||
<Select value={activeHost || "all"} onValueChange={(v) => updateFilter("host", v)}>
|
||||
<SelectTrigger className="w-[160px] h-9">
|
||||
<SelectValue placeholder="All Hosts" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value="all">All Hosts</SelectItem>
|
||||
{hosts.map((host) => (
|
||||
<SelectItem key={host.id} value={host.id}>
|
||||
{host.name}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
)}
|
||||
<DataTableViewOptions table={table} />
|
||||
</div>
|
||||
|
||||
<DataTable table={table} emptyMessage="No kickstarters found. Add your first campaign!" />
|
||||
<DataTablePagination table={table} totalCount={totalCount} />
|
||||
|
||||
<KickstarterModal
|
||||
open={modalOpen}
|
||||
onOpenChange={(open) => {
|
||||
setModalOpen(open);
|
||||
if (!open) setEditKickstarter(undefined);
|
||||
}}
|
||||
hosts={hosts}
|
||||
kickstarter={editKickstarter}
|
||||
/>
|
||||
|
||||
<DeleteDialog
|
||||
open={!!deleteId}
|
||||
onOpenChange={(open) => !open && setDeleteId(null)}
|
||||
title="Delete Kickstarter"
|
||||
description="This will permanently delete this kickstarter and unlink any associated packages."
|
||||
onConfirm={handleDelete}
|
||||
isLoading={isPending}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
148
src/app/(app)/kickstarters/actions.ts
Normal file
148
src/app/(app)/kickstarters/actions.ts
Normal file
@@ -0,0 +1,148 @@
|
||||
"use server";
|
||||
|
||||
import { auth } from "@/lib/auth";
|
||||
import { prisma } from "@/lib/prisma";
|
||||
import { kickstarterSchema, kickstarterHostSchema } from "@/schemas/kickstarter.schema";
|
||||
import { revalidatePath } from "next/cache";
|
||||
import type { ActionResult } from "@/types/api.types";
|
||||
|
||||
const REVALIDATE_PATH = "/kickstarters";
|
||||
|
||||
export async function createKickstarter(
|
||||
input: unknown
|
||||
): Promise<ActionResult<{ id: string }>> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
const parsed = kickstarterSchema.safeParse(input);
|
||||
if (!parsed.success) return { success: false, error: "Validation failed" };
|
||||
|
||||
try {
|
||||
const ks = await prisma.kickstarter.create({
|
||||
data: {
|
||||
name: parsed.data.name,
|
||||
link: parsed.data.link || null,
|
||||
filesUrl: parsed.data.filesUrl || null,
|
||||
deliveryStatus: parsed.data.deliveryStatus,
|
||||
paymentStatus: parsed.data.paymentStatus,
|
||||
hostId: parsed.data.hostId || null,
|
||||
notes: parsed.data.notes || null,
|
||||
userId: session.user.id,
|
||||
},
|
||||
});
|
||||
revalidatePath(REVALIDATE_PATH);
|
||||
return { success: true, data: { id: ks.id } };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to create kickstarter" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function updateKickstarter(
|
||||
id: string,
|
||||
input: unknown
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
const parsed = kickstarterSchema.safeParse(input);
|
||||
if (!parsed.success) return { success: false, error: "Validation failed" };
|
||||
|
||||
const existing = await prisma.kickstarter.findFirst({
|
||||
where: { id, userId: session.user.id },
|
||||
});
|
||||
if (!existing) return { success: false, error: "Not found" };
|
||||
|
||||
try {
|
||||
await prisma.kickstarter.update({
|
||||
where: { id },
|
||||
data: {
|
||||
name: parsed.data.name,
|
||||
link: parsed.data.link || null,
|
||||
filesUrl: parsed.data.filesUrl || null,
|
||||
deliveryStatus: parsed.data.deliveryStatus,
|
||||
paymentStatus: parsed.data.paymentStatus,
|
||||
hostId: parsed.data.hostId || null,
|
||||
notes: parsed.data.notes || null,
|
||||
},
|
||||
});
|
||||
revalidatePath(REVALIDATE_PATH);
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to update kickstarter" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteKickstarter(id: string): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
const existing = await prisma.kickstarter.findFirst({
|
||||
where: { id, userId: session.user.id },
|
||||
});
|
||||
if (!existing) return { success: false, error: "Not found" };
|
||||
|
||||
try {
|
||||
await prisma.kickstarter.delete({ where: { id } });
|
||||
revalidatePath(REVALIDATE_PATH);
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to delete kickstarter" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function createHost(
|
||||
input: unknown
|
||||
): Promise<ActionResult<{ id: string; name: string }>> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
const parsed = kickstarterHostSchema.safeParse(input);
|
||||
if (!parsed.success) return { success: false, error: "Validation failed" };
|
||||
|
||||
try {
|
||||
const host = await prisma.kickstarterHost.create({
|
||||
data: { name: parsed.data.name },
|
||||
});
|
||||
revalidatePath(REVALIDATE_PATH);
|
||||
return { success: true, data: { id: host.id, name: host.name } };
|
||||
} catch (err: unknown) {
|
||||
if (
|
||||
err instanceof Error &&
|
||||
err.message.includes("Unique constraint")
|
||||
) {
|
||||
return { success: false, error: "A host with that name already exists" };
|
||||
}
|
||||
return { success: false, error: "Failed to create host" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function linkPackages(
|
||||
kickstarterId: string,
|
||||
packageIds: string[]
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
const existing = await prisma.kickstarter.findFirst({
|
||||
where: { id: kickstarterId, userId: session.user.id },
|
||||
});
|
||||
if (!existing) return { success: false, error: "Not found" };
|
||||
|
||||
try {
|
||||
// Replace all linked packages
|
||||
await prisma.$transaction([
|
||||
prisma.kickstarterPackage.deleteMany({
|
||||
where: { kickstarterId },
|
||||
}),
|
||||
...packageIds.map((packageId) =>
|
||||
prisma.kickstarterPackage.create({
|
||||
data: { kickstarterId, packageId },
|
||||
})
|
||||
),
|
||||
]);
|
||||
revalidatePath(REVALIDATE_PATH);
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to link packages" };
|
||||
}
|
||||
}
|
||||
29
src/app/(app)/kickstarters/page.tsx
Normal file
29
src/app/(app)/kickstarters/page.tsx
Normal file
@@ -0,0 +1,29 @@
|
||||
import { auth } from "@/lib/auth";
|
||||
import { redirect } from "next/navigation";
|
||||
import { getKickstarters, getKickstarterHosts } from "@/data/kickstarter.queries";
|
||||
import type { DataTableSearchParams } from "@/types/table.types";
|
||||
import { KickstarterTable } from "./_components/kickstarter-table";
|
||||
|
||||
interface Props {
|
||||
searchParams: Promise<DataTableSearchParams & { delivery?: string; payment?: string; host?: string }>;
|
||||
}
|
||||
|
||||
export default async function KickstartersPage({ searchParams }: Props) {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) redirect("/login");
|
||||
|
||||
const params = await searchParams;
|
||||
const [{ data, pageCount, totalCount }, hosts] = await Promise.all([
|
||||
getKickstarters(session.user.id, params),
|
||||
getKickstarterHosts(),
|
||||
]);
|
||||
|
||||
return (
|
||||
<KickstarterTable
|
||||
data={data}
|
||||
pageCount={pageCount}
|
||||
totalCount={totalCount}
|
||||
hosts={hosts}
|
||||
/>
|
||||
);
|
||||
}
|
||||
399
src/app/(app)/stls/_components/archive-preview-picker.tsx
Normal file
399
src/app/(app)/stls/_components/archive-preview-picker.tsx
Normal file
@@ -0,0 +1,399 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useState, useCallback, useRef, useTransition } from "react";
|
||||
import {
|
||||
Image as ImageIcon,
|
||||
Loader2,
|
||||
Check,
|
||||
AlertCircle,
|
||||
ImageOff,
|
||||
} from "lucide-react";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
DialogDescription,
|
||||
} from "@/components/ui/dialog";
|
||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { toast } from "sonner";
|
||||
import { setPreviewFromExtract } from "../actions";
|
||||
|
||||
interface ArchiveImage {
|
||||
id: string;
|
||||
path: string;
|
||||
fileName: string;
|
||||
extension: string | null;
|
||||
size: string;
|
||||
}
|
||||
|
||||
interface ThumbnailState {
|
||||
status: "idle" | "loading" | "loaded" | "failed";
|
||||
requestId?: string;
|
||||
imageUrl?: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
interface ArchivePreviewPickerProps {
|
||||
packageId: string;
|
||||
packageName: string;
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
onPreviewSet?: () => void;
|
||||
}
|
||||
|
||||
function formatBytes(bytesStr: string): string {
|
||||
const bytes = Number(bytesStr);
|
||||
if (bytes === 0) return "0 B";
|
||||
const k = 1024;
|
||||
const sizes = ["B", "KB", "MB", "GB"];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(1))} ${sizes[i]}`;
|
||||
}
|
||||
|
||||
export function ArchivePreviewPicker({
|
||||
packageId,
|
||||
packageName,
|
||||
open,
|
||||
onOpenChange,
|
||||
onPreviewSet,
|
||||
}: ArchivePreviewPickerProps) {
|
||||
const [images, setImages] = useState<ArchiveImage[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [thumbnails, setThumbnails] = useState<Map<string, ThumbnailState>>(new Map());
|
||||
const [selectedPath, setSelectedPath] = useState<string | null>(null);
|
||||
const [isPending, startTransition] = useTransition();
|
||||
const pollTimers = useRef<Map<string, ReturnType<typeof setInterval>>>(new Map());
|
||||
// Track which paths have already been requested to avoid re-requesting
|
||||
const requestedPaths = useRef<Set<string>>(new Set());
|
||||
|
||||
// Cleanup poll timers on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
for (const timer of pollTimers.current.values()) {
|
||||
clearInterval(timer);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Fetch image list when opened
|
||||
useEffect(() => {
|
||||
if (!open) return;
|
||||
|
||||
setImages([]);
|
||||
setThumbnails(new Map());
|
||||
setSelectedPath(null);
|
||||
requestedPaths.current.clear();
|
||||
|
||||
// Clear any leftover poll timers
|
||||
for (const timer of pollTimers.current.values()) {
|
||||
clearInterval(timer);
|
||||
}
|
||||
pollTimers.current.clear();
|
||||
|
||||
const fetchImages = async () => {
|
||||
setLoading(true);
|
||||
try {
|
||||
const res = await fetch(`/api/zips/${packageId}/images`);
|
||||
if (!res.ok) throw new Error("Failed to fetch images");
|
||||
const data = await res.json();
|
||||
setImages(data.images);
|
||||
} catch {
|
||||
toast.error("Failed to load archive images");
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
fetchImages();
|
||||
}, [open, packageId]);
|
||||
|
||||
// Poll callback for a specific request
|
||||
const startPolling = useCallback(
|
||||
(filePath: string, requestId: string) => {
|
||||
// Clear any existing poll for this path
|
||||
const existing = pollTimers.current.get(filePath);
|
||||
if (existing) clearInterval(existing);
|
||||
|
||||
const pollId = setInterval(async () => {
|
||||
try {
|
||||
const pollRes = await fetch(
|
||||
`/api/zips/${packageId}/extract/${requestId}`
|
||||
);
|
||||
if (!pollRes.ok) return;
|
||||
const pollData = await pollRes.json();
|
||||
|
||||
if (pollData.status === "COMPLETED") {
|
||||
clearInterval(pollId);
|
||||
pollTimers.current.delete(filePath);
|
||||
setThumbnails((prev) => {
|
||||
const next = new Map(prev);
|
||||
next.set(filePath, {
|
||||
status: "loaded",
|
||||
requestId,
|
||||
imageUrl: `/api/zips/${packageId}/extract/${requestId}?image=true`,
|
||||
});
|
||||
return next;
|
||||
});
|
||||
} else if (pollData.status === "FAILED") {
|
||||
clearInterval(pollId);
|
||||
pollTimers.current.delete(filePath);
|
||||
setThumbnails((prev) => {
|
||||
const next = new Map(prev);
|
||||
next.set(filePath, {
|
||||
status: "failed",
|
||||
error: pollData.error || "Extraction failed",
|
||||
});
|
||||
return next;
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Silently retry on network error
|
||||
}
|
||||
}, 2000);
|
||||
|
||||
pollTimers.current.set(filePath, pollId);
|
||||
},
|
||||
[packageId]
|
||||
);
|
||||
|
||||
// Request extraction for a specific image
|
||||
const requestThumbnail = useCallback(
|
||||
async (filePath: string) => {
|
||||
// Don't re-request if already in progress
|
||||
if (requestedPaths.current.has(filePath)) return;
|
||||
requestedPaths.current.add(filePath);
|
||||
|
||||
setThumbnails((prev) => {
|
||||
const next = new Map(prev);
|
||||
next.set(filePath, { status: "loading" });
|
||||
return next;
|
||||
});
|
||||
|
||||
try {
|
||||
const res = await fetch(`/api/zips/${packageId}/extract`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ filePath }),
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
const err = await res.json();
|
||||
throw new Error(err.error || "Extract failed");
|
||||
}
|
||||
|
||||
const data = await res.json();
|
||||
|
||||
if (data.status === "COMPLETED") {
|
||||
setThumbnails((prev) => {
|
||||
const next = new Map(prev);
|
||||
next.set(filePath, {
|
||||
status: "loaded",
|
||||
requestId: data.requestId,
|
||||
imageUrl: `/api/zips/${packageId}/extract/${data.requestId}?image=true`,
|
||||
});
|
||||
return next;
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Pending or in-progress: start polling
|
||||
setThumbnails((prev) => {
|
||||
const next = new Map(prev);
|
||||
next.set(filePath, { status: "loading", requestId: data.requestId });
|
||||
return next;
|
||||
});
|
||||
|
||||
startPolling(filePath, data.requestId);
|
||||
} catch (err) {
|
||||
requestedPaths.current.delete(filePath);
|
||||
setThumbnails((prev) => {
|
||||
const next = new Map(prev);
|
||||
next.set(filePath, {
|
||||
status: "failed",
|
||||
error: err instanceof Error ? err.message : "Failed to extract",
|
||||
});
|
||||
return next;
|
||||
});
|
||||
}
|
||||
},
|
||||
[packageId, startPolling]
|
||||
);
|
||||
|
||||
// Auto-request thumbnails for the first batch of images
|
||||
useEffect(() => {
|
||||
if (!open || images.length === 0) return;
|
||||
|
||||
// Request the first 12 images automatically
|
||||
const toRequest = images.slice(0, 12);
|
||||
for (const img of toRequest) {
|
||||
requestThumbnail(img.path);
|
||||
}
|
||||
// Only trigger when images list changes, not on every requestThumbnail change
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [images, open]);
|
||||
|
||||
// Handle selection confirmation
|
||||
const handleConfirm = () => {
|
||||
if (!selectedPath) return;
|
||||
const thumbState = thumbnails.get(selectedPath);
|
||||
if (!thumbState?.requestId) return;
|
||||
|
||||
startTransition(async () => {
|
||||
const result = await setPreviewFromExtract(packageId, thumbState.requestId!);
|
||||
if (result.success) {
|
||||
toast.success("Preview updated from archive image");
|
||||
onOpenChange(false);
|
||||
onPreviewSet?.();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogContent className="sm:max-w-2xl max-h-[80vh] flex flex-col gap-0 p-0">
|
||||
<DialogHeader className="px-6 pt-6 pb-4 border-b border-border space-y-1">
|
||||
<DialogTitle>Select Preview Image</DialogTitle>
|
||||
<DialogDescription className="text-sm">
|
||||
Choose an image from the archive to use as the preview for{" "}
|
||||
<span className="font-medium text-foreground">{packageName}</span>
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
<ScrollArea className="flex-1 min-h-0">
|
||||
<div className="p-4">
|
||||
{loading ? (
|
||||
<div className="flex flex-col items-center justify-center gap-2 py-12">
|
||||
<Loader2 className="h-5 w-5 animate-spin text-muted-foreground" />
|
||||
<span className="text-sm text-muted-foreground">
|
||||
Loading image list...
|
||||
</span>
|
||||
</div>
|
||||
) : images.length === 0 ? (
|
||||
<div className="flex flex-col items-center justify-center gap-2 py-12">
|
||||
<ImageOff className="h-6 w-6 text-muted-foreground/50" />
|
||||
<span className="text-sm text-muted-foreground">
|
||||
No images found in this archive
|
||||
</span>
|
||||
</div>
|
||||
) : (
|
||||
<div className="grid grid-cols-3 sm:grid-cols-4 gap-3">
|
||||
{images.map((img) => {
|
||||
const thumbState = thumbnails.get(img.path);
|
||||
const isSelected = selectedPath === img.path;
|
||||
const isLoaded = thumbState?.status === "loaded";
|
||||
const isLoading = thumbState?.status === "loading";
|
||||
const isFailed = thumbState?.status === "failed";
|
||||
|
||||
return (
|
||||
<button
|
||||
key={img.id}
|
||||
type="button"
|
||||
className={cn(
|
||||
"relative aspect-square rounded-lg overflow-hidden border-2 transition-all",
|
||||
"hover:border-primary/50 cursor-pointer group",
|
||||
isSelected
|
||||
? "border-primary ring-2 ring-primary/30"
|
||||
: "border-border",
|
||||
isFailed && "opacity-60"
|
||||
)}
|
||||
onClick={() => {
|
||||
if (isLoaded) {
|
||||
setSelectedPath(img.path);
|
||||
} else if (isFailed) {
|
||||
// Allow retry on failed
|
||||
requestedPaths.current.delete(img.path);
|
||||
requestThumbnail(img.path);
|
||||
} else if (!thumbState || thumbState.status === "idle") {
|
||||
requestThumbnail(img.path);
|
||||
}
|
||||
}}
|
||||
title={img.path}
|
||||
>
|
||||
{isLoaded && thumbState.imageUrl ? (
|
||||
<img
|
||||
src={thumbState.imageUrl}
|
||||
alt={img.fileName}
|
||||
className="h-full w-full object-cover"
|
||||
loading="lazy"
|
||||
/>
|
||||
) : isLoading ? (
|
||||
<div className="h-full w-full flex items-center justify-center bg-muted">
|
||||
<Loader2 className="h-5 w-5 animate-spin text-muted-foreground" />
|
||||
</div>
|
||||
) : isFailed ? (
|
||||
<div className="h-full w-full flex flex-col items-center justify-center bg-muted gap-1">
|
||||
<AlertCircle className="h-4 w-4 text-destructive" />
|
||||
<span className="text-[10px] text-destructive px-1 text-center">
|
||||
Click to retry
|
||||
</span>
|
||||
</div>
|
||||
) : (
|
||||
<div className="h-full w-full flex items-center justify-center bg-muted">
|
||||
<ImageIcon className="h-5 w-5 text-muted-foreground" />
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Selection checkmark */}
|
||||
{isSelected && (
|
||||
<div className="absolute top-1.5 right-1.5 h-5 w-5 rounded-full bg-primary flex items-center justify-center">
|
||||
<Check className="h-3 w-3 text-primary-foreground" />
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* File info overlay */}
|
||||
<div className="absolute bottom-0 left-0 right-0 bg-black/60 px-1.5 py-1 opacity-0 group-hover:opacity-100 transition-opacity">
|
||||
<p className="text-[10px] text-white truncate">
|
||||
{img.fileName}
|
||||
</p>
|
||||
<p className="text-[9px] text-white/70">
|
||||
{formatBytes(img.size)}
|
||||
</p>
|
||||
</div>
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</ScrollArea>
|
||||
|
||||
{/* Footer */}
|
||||
{images.length > 0 && (
|
||||
<div className="px-6 py-4 border-t border-border flex items-center justify-between">
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{images.length} image{images.length !== 1 ? "s" : ""} found
|
||||
</span>
|
||||
<div className="flex gap-2">
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={() => onOpenChange(false)}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
size="sm"
|
||||
disabled={!selectedPath || isPending}
|
||||
onClick={handleConfirm}
|
||||
>
|
||||
{isPending ? (
|
||||
<>
|
||||
<Loader2 className="h-3.5 w-3.5 animate-spin mr-1" />
|
||||
Setting...
|
||||
</>
|
||||
) : (
|
||||
"Use as Preview"
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -1,10 +1,11 @@
|
||||
"use client";
|
||||
|
||||
import { type ColumnDef } from "@tanstack/react-table";
|
||||
import { FileArchive, Eye, ImageIcon } from "lucide-react";
|
||||
import { FileArchive, Eye, ChevronRight, Layers, Ungroup, Send, ImagePlus } from "lucide-react";
|
||||
import { DataTableColumnHeader } from "@/components/shared/data-table-column-header";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Checkbox } from "@/components/ui/checkbox";
|
||||
import { SendToTelegramButton } from "./send-to-telegram-button";
|
||||
|
||||
export interface PackageRow {
|
||||
@@ -12,23 +13,65 @@ export interface PackageRow {
|
||||
fileName: string;
|
||||
fileSize: string;
|
||||
contentHash: string;
|
||||
archiveType: "ZIP" | "RAR";
|
||||
archiveType: "ZIP" | "RAR" | "SEVEN_Z" | "DOCUMENT";
|
||||
fileCount: number;
|
||||
isMultipart: boolean;
|
||||
hasPreview: boolean;
|
||||
creator: string | null;
|
||||
tags: string[];
|
||||
indexedAt: string;
|
||||
sourceChannel: {
|
||||
id: string;
|
||||
title: string;
|
||||
};
|
||||
matchedFileCount: number;
|
||||
matchedByContent: boolean;
|
||||
packageGroupId?: string | null;
|
||||
}
|
||||
|
||||
export interface GroupHeaderRow {
|
||||
_rowType: "group";
|
||||
id: string;
|
||||
name: string;
|
||||
hasPreview: boolean;
|
||||
totalFileSize: string;
|
||||
totalFileCount: number;
|
||||
packageCount: number;
|
||||
combinedTags: string[];
|
||||
archiveTypes: ("ZIP" | "RAR" | "SEVEN_Z" | "DOCUMENT")[];
|
||||
latestIndexedAt: string;
|
||||
sourceChannel: { id: string; title: string };
|
||||
_expanded: boolean;
|
||||
}
|
||||
|
||||
export interface PackageTableRow extends PackageRow {
|
||||
_rowType: "package";
|
||||
_groupId: string | null;
|
||||
_isGroupMember: boolean;
|
||||
}
|
||||
|
||||
export type StlTableRow = GroupHeaderRow | PackageTableRow;
|
||||
|
||||
function isGroupRow(row: StlTableRow): row is GroupHeaderRow {
|
||||
return row._rowType === "group";
|
||||
}
|
||||
|
||||
interface PackageColumnsProps {
|
||||
onViewFiles: (pkg: PackageRow) => void;
|
||||
onSetCreator: (pkg: PackageRow) => void;
|
||||
onSetTags: (pkg: PackageRow) => void;
|
||||
searchTerm: string;
|
||||
onToggleGroup: (groupId: string) => void;
|
||||
onRenameGroup: (groupId: string, currentName: string) => void;
|
||||
onDissolveGroup: (groupId: string) => void;
|
||||
onSendAllInGroup: (groupId: string) => void;
|
||||
onRemoveFromGroup: (packageId: string) => void;
|
||||
onGroupPreviewUpload: (groupId: string) => void;
|
||||
selectedPackages: Set<string>;
|
||||
onToggleSelect: (packageId: string) => void;
|
||||
}
|
||||
|
||||
function formatBytes(bytesStr: string): string {
|
||||
export function formatBytes(bytesStr: string): string {
|
||||
const bytes = Number(bytesStr);
|
||||
if (bytes === 0) return "0 B";
|
||||
const k = 1024;
|
||||
@@ -55,68 +98,271 @@ function PreviewCell({ pkg }: { pkg: PackageRow }) {
|
||||
);
|
||||
}
|
||||
|
||||
function GroupPreviewCell({
|
||||
group,
|
||||
onUpload,
|
||||
}: {
|
||||
group: GroupHeaderRow;
|
||||
onUpload: (groupId: string) => void;
|
||||
}) {
|
||||
if (group.hasPreview) {
|
||||
return (
|
||||
<button
|
||||
className="relative group/preview cursor-pointer"
|
||||
onClick={() => onUpload(group.id)}
|
||||
title="Click to change preview image"
|
||||
>
|
||||
<img
|
||||
src={`/api/groups/${group.id}/preview`}
|
||||
alt=""
|
||||
className="h-9 w-9 rounded-md object-cover bg-muted"
|
||||
loading="lazy"
|
||||
/>
|
||||
<div className="absolute inset-0 flex items-center justify-center rounded-md bg-black/50 opacity-0 group-hover/preview:opacity-100 transition-opacity">
|
||||
<ImagePlus className="h-3.5 w-3.5 text-white" />
|
||||
</div>
|
||||
</button>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<button
|
||||
className="flex h-9 w-9 items-center justify-center rounded-md bg-muted hover:bg-muted/80 transition-colors cursor-pointer"
|
||||
onClick={() => onUpload(group.id)}
|
||||
title="Click to add preview image"
|
||||
>
|
||||
<Layers className="h-4 w-4 text-muted-foreground" />
|
||||
</button>
|
||||
);
|
||||
}
|
||||
|
||||
export function getPackageColumns({
|
||||
onViewFiles,
|
||||
}: PackageColumnsProps): ColumnDef<PackageRow, unknown>[] {
|
||||
onSetCreator,
|
||||
onSetTags,
|
||||
searchTerm,
|
||||
onToggleGroup,
|
||||
onRenameGroup,
|
||||
onDissolveGroup,
|
||||
onSendAllInGroup,
|
||||
onRemoveFromGroup,
|
||||
onGroupPreviewUpload,
|
||||
selectedPackages,
|
||||
onToggleSelect,
|
||||
}: PackageColumnsProps): ColumnDef<StlTableRow, unknown>[] {
|
||||
return [
|
||||
{
|
||||
id: "select",
|
||||
header: "",
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
if (isGroupRow(data)) return null;
|
||||
return (
|
||||
<Checkbox
|
||||
checked={selectedPackages.has(data.id)}
|
||||
onCheckedChange={() => onToggleSelect(data.id)}
|
||||
aria-label="Select package"
|
||||
className="translate-y-[2px]"
|
||||
/>
|
||||
);
|
||||
},
|
||||
enableHiding: false,
|
||||
enableSorting: false,
|
||||
size: 32,
|
||||
},
|
||||
{
|
||||
id: "preview",
|
||||
header: "",
|
||||
cell: ({ row }) => <PreviewCell pkg={row.original} />,
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
if (isGroupRow(data)) {
|
||||
return (
|
||||
<div className="flex items-center gap-1">
|
||||
<button
|
||||
className="shrink-0 p-0.5 cursor-pointer"
|
||||
onClick={() => onToggleGroup(data.id)}
|
||||
aria-label={data._expanded ? "Collapse group" : "Expand group"}
|
||||
>
|
||||
<ChevronRight
|
||||
className={`h-4 w-4 text-muted-foreground transition-transform ${
|
||||
data._expanded ? "rotate-90" : ""
|
||||
}`}
|
||||
/>
|
||||
</button>
|
||||
<GroupPreviewCell group={data} onUpload={onGroupPreviewUpload} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<div className={data._isGroupMember ? "pl-5" : ""}>
|
||||
<PreviewCell pkg={data} />
|
||||
</div>
|
||||
);
|
||||
},
|
||||
enableHiding: false,
|
||||
enableSorting: false,
|
||||
size: 52,
|
||||
size: 72,
|
||||
},
|
||||
{
|
||||
accessorKey: "fileName",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="File Name" />,
|
||||
cell: ({ row }) => (
|
||||
<div className="flex items-center gap-2 min-w-0">
|
||||
<span className="font-medium truncate max-w-[300px]">{row.original.fileName}</span>
|
||||
{row.original.isMultipart && (
|
||||
<Badge variant="outline" className="text-[10px] shrink-0">
|
||||
Multi
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
if (isGroupRow(data)) {
|
||||
return (
|
||||
<div className="min-w-0">
|
||||
<div className="flex items-center gap-2">
|
||||
<button
|
||||
className="font-semibold truncate max-w-[300px] cursor-pointer hover:underline text-left"
|
||||
onClick={() => onRenameGroup(data.id, data.name)}
|
||||
title="Click to rename group"
|
||||
>
|
||||
{data.name}
|
||||
</button>
|
||||
<Badge variant="secondary" className="text-[10px] shrink-0">
|
||||
{data.packageCount} pkg{data.packageCount !== 1 ? "s" : ""}
|
||||
</Badge>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<div className="min-w-0">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="font-medium truncate max-w-[300px]">{data.fileName}</span>
|
||||
{data.isMultipart && (
|
||||
<Badge variant="outline" className="text-[10px] shrink-0">
|
||||
Multi
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
{searchTerm && data.matchedByContent && (
|
||||
<button
|
||||
className="text-[11px] text-amber-500 hover:text-amber-400 hover:underline cursor-pointer mt-0.5"
|
||||
onClick={() => onViewFiles(data)}
|
||||
>
|
||||
{data.matchedFileCount.toLocaleString()} file match{data.matchedFileCount !== 1 ? "es" : ""}
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
},
|
||||
enableHiding: false,
|
||||
},
|
||||
{
|
||||
accessorKey: "archiveType",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Type" />,
|
||||
cell: ({ row }) => (
|
||||
<Badge variant="secondary" className="text-[10px]">
|
||||
{row.original.archiveType}
|
||||
</Badge>
|
||||
),
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
if (isGroupRow(data)) {
|
||||
const types = data.archiveTypes;
|
||||
if (types.length === 1) {
|
||||
return (
|
||||
<Badge variant="secondary" className="text-[10px]">
|
||||
{types[0]}
|
||||
</Badge>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<Badge variant="secondary" className="text-[10px]">
|
||||
Mixed
|
||||
</Badge>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<Badge variant="secondary" className="text-[10px]">
|
||||
{data.archiveType}
|
||||
</Badge>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: "fileSize",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Size" />,
|
||||
cell: ({ row }) => (
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{formatBytes(row.original.fileSize)}
|
||||
</span>
|
||||
),
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
const size = isGroupRow(data) ? data.totalFileSize : data.fileSize;
|
||||
return (
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{formatBytes(size)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: "fileCount",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Files" />,
|
||||
cell: ({ row }) => (
|
||||
<span className="text-sm">
|
||||
{row.original.fileCount.toLocaleString()}
|
||||
</span>
|
||||
),
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
const count = isGroupRow(data) ? data.totalFileCount : data.fileCount;
|
||||
return (
|
||||
<span className="text-sm">
|
||||
{count.toLocaleString()}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: "creator",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Creator" />,
|
||||
cell: ({ row }) => (
|
||||
<span className="text-sm text-muted-foreground truncate max-w-[160px] block">
|
||||
{row.original.creator ?? "\u2014"}
|
||||
</span>
|
||||
),
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
if (isGroupRow(data)) {
|
||||
return <span className="text-sm text-muted-foreground">{"\u2014"}</span>;
|
||||
}
|
||||
return (
|
||||
<button
|
||||
className="text-sm text-muted-foreground truncate max-w-[160px] block hover:text-foreground hover:underline cursor-pointer text-left"
|
||||
onClick={() => onSetCreator(data)}
|
||||
title="Click to edit creator"
|
||||
>
|
||||
{data.creator || "\u2014"}
|
||||
</button>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "tags",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Tags" />,
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
const tags = isGroupRow(data) ? data.combinedTags : data.tags;
|
||||
if (tags.length === 0) {
|
||||
if (isGroupRow(data)) {
|
||||
return <span className="text-sm text-muted-foreground">{"\u2014"}</span>;
|
||||
}
|
||||
return (
|
||||
<button
|
||||
className="text-sm text-muted-foreground hover:text-foreground cursor-pointer"
|
||||
onClick={() => onSetTags(data)}
|
||||
title="Click to add tags"
|
||||
>
|
||||
{"\u2014"}
|
||||
</button>
|
||||
);
|
||||
}
|
||||
const clickHandler = isGroupRow(data) ? undefined : () => onSetTags(data as PackageTableRow);
|
||||
return (
|
||||
<button
|
||||
className={`flex flex-wrap gap-1 ${clickHandler ? "cursor-pointer" : "cursor-default"}`}
|
||||
onClick={clickHandler}
|
||||
title={clickHandler ? "Click to edit tags" : undefined}
|
||||
>
|
||||
{tags.map((tag) => (
|
||||
<Badge
|
||||
key={tag}
|
||||
variant="outline"
|
||||
className="text-[10px] bg-primary/5"
|
||||
>
|
||||
{tag}
|
||||
</Badge>
|
||||
))}
|
||||
</button>
|
||||
);
|
||||
},
|
||||
accessorFn: (row) => {
|
||||
if (isGroupRow(row)) return row.combinedTags.join(", ");
|
||||
return row.tags.join(", ");
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "channel",
|
||||
@@ -131,31 +377,73 @@ export function getPackageColumns({
|
||||
{
|
||||
accessorKey: "indexedAt",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Indexed" />,
|
||||
cell: ({ row }) => (
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{new Date(row.original.indexedAt).toLocaleDateString()}
|
||||
</span>
|
||||
),
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
const date = isGroupRow(data) ? data.latestIndexedAt : data.indexedAt;
|
||||
return (
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{new Date(date).toLocaleDateString()}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "actions",
|
||||
cell: ({ row }) => (
|
||||
<div className="flex items-center gap-0.5">
|
||||
<SendToTelegramButton
|
||||
packageId={row.original.id}
|
||||
packageName={row.original.fileName}
|
||||
variant="icon"
|
||||
/>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="h-8 w-8"
|
||||
onClick={() => onViewFiles(row.original)}
|
||||
>
|
||||
<Eye className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
),
|
||||
cell: ({ row }) => {
|
||||
const data = row.original;
|
||||
if (isGroupRow(data)) {
|
||||
return (
|
||||
<div className="flex items-center gap-0.5">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="h-8 w-8"
|
||||
onClick={() => onSendAllInGroup(data.id)}
|
||||
title="Send all packages in group"
|
||||
>
|
||||
<Send className="h-4 w-4" />
|
||||
</Button>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="h-8 w-8"
|
||||
onClick={() => onDissolveGroup(data.id)}
|
||||
title="Dissolve group"
|
||||
>
|
||||
<Ungroup className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<div className="flex items-center gap-0.5">
|
||||
<SendToTelegramButton
|
||||
packageId={data.id}
|
||||
packageName={data.fileName}
|
||||
variant="icon"
|
||||
/>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="h-8 w-8"
|
||||
onClick={() => onViewFiles(data)}
|
||||
>
|
||||
<Eye className="h-4 w-4" />
|
||||
</Button>
|
||||
{data._isGroupMember && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="h-8 w-8"
|
||||
onClick={() => onRemoveFromGroup(data.id)}
|
||||
title="Remove from group"
|
||||
>
|
||||
<Ungroup className="h-3.5 w-3.5" />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
},
|
||||
enableHiding: false,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useState, useCallback, useMemo } from "react";
|
||||
import { useEffect, useState, useCallback, useMemo, useRef } from "react";
|
||||
import { toast } from "sonner";
|
||||
import {
|
||||
FileText,
|
||||
Folder,
|
||||
@@ -9,6 +10,9 @@ import {
|
||||
Search,
|
||||
ChevronDown,
|
||||
ChevronRight,
|
||||
Upload,
|
||||
ImagePlus,
|
||||
Images,
|
||||
} from "lucide-react";
|
||||
import {
|
||||
Dialog,
|
||||
@@ -24,6 +28,8 @@ import { Button } from "@/components/ui/button";
|
||||
import { cn } from "@/lib/utils";
|
||||
import type { PackageRow } from "./package-columns";
|
||||
import { SendToTelegramButton } from "./send-to-telegram-button";
|
||||
import { uploadPackagePreview } from "../actions";
|
||||
import { ArchivePreviewPicker } from "./archive-preview-picker";
|
||||
|
||||
interface FileItem {
|
||||
id: string;
|
||||
@@ -46,6 +52,7 @@ interface PackageFilesDrawerProps {
|
||||
pkg: PackageRow | null;
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
highlightTerm?: string;
|
||||
}
|
||||
|
||||
function formatBytes(bytesStr: string): string {
|
||||
@@ -75,6 +82,15 @@ function getExtBadgeClass(ext: string | null): string {
|
||||
return EXTENSION_COLORS[ext.toLowerCase()] ?? "bg-zinc-500/15 text-zinc-400 border-zinc-500/30";
|
||||
}
|
||||
|
||||
function fileMatchesHighlight(file: FileItem, term: string): boolean {
|
||||
if (!term) return false;
|
||||
const lower = term.toLowerCase();
|
||||
return (
|
||||
file.fileName.toLowerCase().includes(lower) ||
|
||||
file.path.toLowerCase().includes(lower)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a tree structure from flat file paths.
|
||||
*/
|
||||
@@ -114,11 +130,13 @@ function TreeNodeView({
|
||||
depth,
|
||||
search,
|
||||
defaultOpen,
|
||||
highlightTerm,
|
||||
}: {
|
||||
node: TreeNode;
|
||||
depth: number;
|
||||
search: string;
|
||||
defaultOpen: boolean;
|
||||
highlightTerm?: string;
|
||||
}) {
|
||||
const [open, setOpen] = useState(defaultOpen);
|
||||
|
||||
@@ -131,10 +149,22 @@ function TreeNodeView({
|
||||
});
|
||||
}, [node.children]);
|
||||
|
||||
// If searching, force all open
|
||||
const hasHighlightedDescendant = useMemo(() => {
|
||||
if (!highlightTerm) return false;
|
||||
function check(n: TreeNode): boolean {
|
||||
if (n.file && fileMatchesHighlight(n.file, highlightTerm!)) return true;
|
||||
for (const child of n.children.values()) {
|
||||
if (check(child)) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
return check(node);
|
||||
}, [node, highlightTerm]);
|
||||
|
||||
// If searching or has highlighted descendants, force all open
|
||||
useEffect(() => {
|
||||
if (search) setOpen(true);
|
||||
}, [search]);
|
||||
if (search || hasHighlightedDescendant) setOpen(true);
|
||||
}, [search, hasHighlightedDescendant]);
|
||||
|
||||
if (node.isFolder && node.children.size > 0) {
|
||||
return (
|
||||
@@ -171,6 +201,7 @@ function TreeNodeView({
|
||||
depth={depth + 1}
|
||||
search={search}
|
||||
defaultOpen={depth < 1} // Auto-expand first 2 levels
|
||||
highlightTerm={highlightTerm}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
@@ -179,9 +210,15 @@ function TreeNodeView({
|
||||
|
||||
// File node
|
||||
if (node.file) {
|
||||
const isHighlighted = highlightTerm ? fileMatchesHighlight(node.file, highlightTerm) : false;
|
||||
return (
|
||||
<div
|
||||
className="flex items-center gap-2 rounded-md px-1 py-1 hover:bg-muted/50 transition-colors"
|
||||
className={cn(
|
||||
"flex items-center gap-2 rounded-md px-1 py-1 transition-colors",
|
||||
isHighlighted
|
||||
? "bg-amber-500/15 hover:bg-amber-500/20"
|
||||
: "hover:bg-muted/50"
|
||||
)}
|
||||
style={{ paddingLeft: `${Math.max(0, depth) * 16 + 4}px` }}
|
||||
>
|
||||
<FileText className="h-3.5 w-3.5 shrink-0 text-muted-foreground" />
|
||||
@@ -217,13 +254,53 @@ function countFiles(node: TreeNode): number {
|
||||
|
||||
const PAGE_SIZE = 100;
|
||||
|
||||
export function PackageFilesDrawer({ pkg, open, onOpenChange }: PackageFilesDrawerProps) {
|
||||
export function PackageFilesDrawer({ pkg, open, onOpenChange, highlightTerm }: PackageFilesDrawerProps) {
|
||||
const [files, setFiles] = useState<FileItem[]>([]);
|
||||
const [total, setTotal] = useState(0);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [loadingMore, setLoadingMore] = useState(false);
|
||||
const [search, setSearch] = useState("");
|
||||
const [page, setPage] = useState(1);
|
||||
const [uploading, setUploading] = useState(false);
|
||||
const [localPreviewUrl, setLocalPreviewUrl] = useState<string | null>(null);
|
||||
const [showPreviewPicker, setShowPreviewPicker] = useState(false);
|
||||
const fileInputRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
const handlePreviewUpload = useCallback(
|
||||
async (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const file = e.target.files?.[0];
|
||||
if (!file || !pkg) return;
|
||||
|
||||
// Reset file input so the same file can be re-selected
|
||||
e.target.value = "";
|
||||
|
||||
setUploading(true);
|
||||
try {
|
||||
const formData = new FormData();
|
||||
formData.append("file", file);
|
||||
const result = await uploadPackagePreview(pkg.id, formData);
|
||||
if (result.success) {
|
||||
toast.success("Preview image uploaded");
|
||||
// Show uploaded image immediately via local object URL
|
||||
setLocalPreviewUrl(URL.createObjectURL(file));
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
} catch {
|
||||
toast.error("Failed to upload preview image");
|
||||
} finally {
|
||||
setUploading(false);
|
||||
}
|
||||
},
|
||||
[pkg]
|
||||
);
|
||||
|
||||
// Clean up local preview URL when drawer closes or package changes
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (localPreviewUrl) URL.revokeObjectURL(localPreviewUrl);
|
||||
};
|
||||
}, [localPreviewUrl]);
|
||||
|
||||
const fetchFiles = useCallback(
|
||||
async (pageNum: number, append: boolean) => {
|
||||
@@ -258,6 +335,7 @@ export function PackageFilesDrawer({ pkg, open, onOpenChange }: PackageFilesDraw
|
||||
setTotal(0);
|
||||
setSearch("");
|
||||
setPage(1);
|
||||
setLocalPreviewUrl(null);
|
||||
fetchFiles(1, false);
|
||||
}
|
||||
}, [open, pkg, fetchFiles]);
|
||||
@@ -288,17 +366,55 @@ export function PackageFilesDrawer({ pkg, open, onOpenChange }: PackageFilesDraw
|
||||
}, [filtered]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogContent className="sm:max-w-2xl max-h-[80vh] flex flex-col gap-0 p-0">
|
||||
<DialogHeader className="px-6 pt-6 pb-4 border-b border-border space-y-3">
|
||||
{/* Preview image + title row */}
|
||||
<div className="flex gap-4">
|
||||
{pkg?.hasPreview && (
|
||||
<img
|
||||
src={`/api/zips/${pkg.id}/preview`}
|
||||
alt=""
|
||||
className="h-20 w-20 rounded-lg object-cover bg-muted shrink-0"
|
||||
/>
|
||||
{/* Preview image area with upload capability */}
|
||||
<input
|
||||
ref={fileInputRef}
|
||||
type="file"
|
||||
accept="image/jpeg,image/png,image/webp"
|
||||
className="hidden"
|
||||
onChange={handlePreviewUpload}
|
||||
/>
|
||||
{(pkg?.hasPreview || localPreviewUrl) ? (
|
||||
<button
|
||||
type="button"
|
||||
className="relative group h-20 w-20 shrink-0 rounded-lg overflow-hidden bg-muted"
|
||||
onClick={() => fileInputRef.current?.click()}
|
||||
disabled={uploading}
|
||||
title="Click to replace preview image"
|
||||
>
|
||||
<img
|
||||
src={localPreviewUrl ?? `/api/zips/${pkg!.id}/preview`}
|
||||
alt=""
|
||||
className="h-full w-full object-cover"
|
||||
/>
|
||||
<div className="absolute inset-0 bg-black/50 opacity-0 group-hover:opacity-100 transition-opacity flex items-center justify-center">
|
||||
{uploading ? (
|
||||
<Loader2 className="h-5 w-5 text-white animate-spin" />
|
||||
) : (
|
||||
<Upload className="h-5 w-5 text-white" />
|
||||
)}
|
||||
</div>
|
||||
</button>
|
||||
) : (
|
||||
<button
|
||||
type="button"
|
||||
className="flex h-20 w-20 shrink-0 items-center justify-center rounded-lg border border-dashed border-muted-foreground/30 bg-muted/50 hover:bg-muted hover:border-muted-foreground/50 transition-colors cursor-pointer"
|
||||
onClick={() => fileInputRef.current?.click()}
|
||||
disabled={uploading}
|
||||
title="Upload preview image"
|
||||
>
|
||||
{uploading ? (
|
||||
<Loader2 className="h-5 w-5 text-muted-foreground animate-spin" />
|
||||
) : (
|
||||
<ImagePlus className="h-5 w-5 text-muted-foreground" />
|
||||
)}
|
||||
</button>
|
||||
)}
|
||||
<div className="min-w-0 flex-1">
|
||||
<DialogTitle className="truncate pr-8">
|
||||
@@ -308,11 +424,36 @@ export function PackageFilesDrawer({ pkg, open, onOpenChange }: PackageFilesDraw
|
||||
{total.toLocaleString()} file{total !== 1 ? "s" : ""} in archive
|
||||
</DialogDescription>
|
||||
{pkg && (
|
||||
<div className="mt-2">
|
||||
<div className="mt-2 flex items-center gap-2">
|
||||
<SendToTelegramButton
|
||||
packageId={pkg.id}
|
||||
packageName={pkg.fileName}
|
||||
/>
|
||||
{pkg.archiveType !== "DOCUMENT" && !pkg.isMultipart && (
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
className="h-8 gap-1.5 text-xs"
|
||||
onClick={() => setShowPreviewPicker(true)}
|
||||
>
|
||||
<Images className="h-3.5 w-3.5" />
|
||||
Pick Preview
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
className="h-8 gap-1.5 text-xs"
|
||||
onClick={() => fileInputRef.current?.click()}
|
||||
disabled={uploading}
|
||||
>
|
||||
{uploading ? (
|
||||
<Loader2 className="h-3.5 w-3.5 animate-spin" />
|
||||
) : (
|
||||
<Upload className="h-3.5 w-3.5" />
|
||||
)}
|
||||
Upload Preview
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
@@ -361,36 +502,45 @@ export function PackageFilesDrawer({ pkg, open, onOpenChange }: PackageFilesDraw
|
||||
depth={0}
|
||||
search={search}
|
||||
defaultOpen={true}
|
||||
highlightTerm={highlightTerm}
|
||||
/>
|
||||
))}
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
{/* Flat list for archives without folders */}
|
||||
{filtered.map((file) => (
|
||||
<div
|
||||
key={file.id}
|
||||
className="flex items-center gap-3 rounded-md px-2 py-1.5 hover:bg-muted/50 transition-colors"
|
||||
>
|
||||
<FileText className="h-3.5 w-3.5 shrink-0 text-muted-foreground" />
|
||||
<div className="min-w-0 flex-1">
|
||||
<p className="text-sm truncate" title={file.path}>
|
||||
{file.fileName}
|
||||
</p>
|
||||
{filtered.map((file) => {
|
||||
const isHighlighted = highlightTerm ? fileMatchesHighlight(file, highlightTerm) : false;
|
||||
return (
|
||||
<div
|
||||
key={file.id}
|
||||
className={cn(
|
||||
"flex items-center gap-3 rounded-md px-2 py-1.5 transition-colors",
|
||||
isHighlighted
|
||||
? "bg-amber-500/15 hover:bg-amber-500/20"
|
||||
: "hover:bg-muted/50"
|
||||
)}
|
||||
>
|
||||
<FileText className="h-3.5 w-3.5 shrink-0 text-muted-foreground" />
|
||||
<div className="min-w-0 flex-1">
|
||||
<p className="text-sm truncate" title={file.path}>
|
||||
{file.fileName}
|
||||
</p>
|
||||
</div>
|
||||
{file.extension && (
|
||||
<Badge
|
||||
variant="outline"
|
||||
className={`text-[10px] shrink-0 ${getExtBadgeClass(file.extension)}`}
|
||||
>
|
||||
.{file.extension}
|
||||
</Badge>
|
||||
)}
|
||||
<span className="text-[11px] text-muted-foreground shrink-0 tabular-nums">
|
||||
{formatBytes(file.uncompressedSize)}
|
||||
</span>
|
||||
</div>
|
||||
{file.extension && (
|
||||
<Badge
|
||||
variant="outline"
|
||||
className={`text-[10px] shrink-0 ${getExtBadgeClass(file.extension)}`}
|
||||
>
|
||||
.{file.extension}
|
||||
</Badge>
|
||||
)}
|
||||
<span className="text-[11px] text-muted-foreground shrink-0 tabular-nums">
|
||||
{formatBytes(file.uncompressedSize)}
|
||||
</span>
|
||||
</div>
|
||||
))}
|
||||
);
|
||||
})}
|
||||
</>
|
||||
)}
|
||||
|
||||
@@ -416,6 +566,22 @@ export function PackageFilesDrawer({ pkg, open, onOpenChange }: PackageFilesDraw
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</DialogContent>
|
||||
|
||||
</Dialog>
|
||||
|
||||
{/* Archive preview picker modal — rendered as sibling to avoid nested Dialog issues */}
|
||||
{pkg && pkg.archiveType !== "DOCUMENT" && !pkg.isMultipart && (
|
||||
<ArchivePreviewPicker
|
||||
packageId={pkg.id}
|
||||
packageName={pkg.fileName}
|
||||
open={showPreviewPicker}
|
||||
onOpenChange={setShowPreviewPicker}
|
||||
onPreviewSet={() => {
|
||||
// Refresh the preview by setting a cache-busting URL
|
||||
setLocalPreviewUrl(`/api/zips/${pkg.id}/preview?t=${Date.now()}`);
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
135
src/app/(app)/stls/_components/skipped-columns.tsx
Normal file
135
src/app/(app)/stls/_components/skipped-columns.tsx
Normal file
@@ -0,0 +1,135 @@
|
||||
"use client";
|
||||
|
||||
import { type ColumnDef } from "@tanstack/react-table";
|
||||
import { DataTableColumnHeader } from "@/components/shared/data-table-column-header";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { RotateCw } from "lucide-react";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip";
|
||||
|
||||
export interface SkippedRow {
|
||||
id: string;
|
||||
fileName: string;
|
||||
fileSize: string;
|
||||
reason: "SIZE_LIMIT" | "DOWNLOAD_FAILED" | "EXTRACT_FAILED" | "UPLOAD_FAILED";
|
||||
errorMessage: string | null;
|
||||
sourceChannel: { id: string; title: string };
|
||||
isMultipart: boolean;
|
||||
partCount: number;
|
||||
createdAt: string;
|
||||
}
|
||||
|
||||
function formatBytes(bytesStr: string): string {
|
||||
const bytes = Number(bytesStr);
|
||||
if (bytes === 0) return "0 B";
|
||||
const k = 1024;
|
||||
const sizes = ["B", "KB", "MB", "GB", "TB"];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(1))} ${sizes[i]}`;
|
||||
}
|
||||
|
||||
const REASON_LABELS: Record<SkippedRow["reason"], { label: string; variant: "default" | "destructive" | "outline" | "secondary" }> = {
|
||||
SIZE_LIMIT: { label: "Size Limit", variant: "secondary" },
|
||||
DOWNLOAD_FAILED: { label: "Download Failed", variant: "destructive" },
|
||||
EXTRACT_FAILED: { label: "Extract Failed", variant: "destructive" },
|
||||
UPLOAD_FAILED: { label: "Upload Failed", variant: "destructive" },
|
||||
};
|
||||
|
||||
export function getSkippedColumns({
|
||||
onRetry,
|
||||
}: {
|
||||
onRetry: (row: SkippedRow) => void;
|
||||
}): ColumnDef<SkippedRow, unknown>[] {
|
||||
return [
|
||||
{
|
||||
accessorKey: "fileName",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="File Name" />,
|
||||
cell: ({ row }) => (
|
||||
<div className="flex items-center gap-2 min-w-0">
|
||||
<span className="font-medium truncate max-w-[300px]">{row.original.fileName}</span>
|
||||
{row.original.isMultipart && (
|
||||
<Badge variant="outline" className="text-[10px] shrink-0">
|
||||
{row.original.partCount} parts
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
enableHiding: false,
|
||||
},
|
||||
{
|
||||
accessorKey: "fileSize",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Size" />,
|
||||
cell: ({ row }) => (
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{formatBytes(row.original.fileSize)}
|
||||
</span>
|
||||
),
|
||||
},
|
||||
{
|
||||
accessorKey: "reason",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Reason" />,
|
||||
cell: ({ row }) => {
|
||||
const { label, variant } = REASON_LABELS[row.original.reason];
|
||||
return <Badge variant={variant} className="text-[10px]">{label}</Badge>;
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: "errorMessage",
|
||||
header: "Error",
|
||||
cell: ({ row }) => {
|
||||
const msg = row.original.errorMessage;
|
||||
if (!msg) return <span className="text-sm text-muted-foreground">{"\u2014"}</span>;
|
||||
return (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span className="text-sm text-muted-foreground truncate max-w-[200px] block cursor-help">
|
||||
{msg}
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent className="max-w-sm">
|
||||
<p className="text-xs break-all">{msg}</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "channel",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Source" />,
|
||||
cell: ({ row }) => (
|
||||
<span className="text-sm text-muted-foreground truncate max-w-[160px] block">
|
||||
{row.original.sourceChannel.title}
|
||||
</span>
|
||||
),
|
||||
accessorFn: (row) => row.sourceChannel.title,
|
||||
},
|
||||
{
|
||||
accessorKey: "createdAt",
|
||||
header: ({ column }) => <DataTableColumnHeader column={column} title="Skipped" />,
|
||||
cell: ({ row }) => (
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{new Date(row.original.createdAt).toLocaleDateString()}
|
||||
</span>
|
||||
),
|
||||
},
|
||||
{
|
||||
id: "actions",
|
||||
cell: ({ row }) => (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="h-8 w-8"
|
||||
onClick={() => onRetry(row.original)}
|
||||
title="Retry this package"
|
||||
>
|
||||
<RotateCw className="h-4 w-4" />
|
||||
</Button>
|
||||
),
|
||||
enableHiding: false,
|
||||
},
|
||||
];
|
||||
}
|
||||
77
src/app/(app)/stls/_components/skipped-packages-tab.tsx
Normal file
77
src/app/(app)/stls/_components/skipped-packages-tab.tsx
Normal file
@@ -0,0 +1,77 @@
|
||||
"use client";
|
||||
|
||||
import { useTransition } from "react";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { toast } from "sonner";
|
||||
import { RotateCw } from "lucide-react";
|
||||
import { useDataTable } from "@/hooks/use-data-table";
|
||||
import { getSkippedColumns, type SkippedRow } from "./skipped-columns";
|
||||
import { DataTable } from "@/components/shared/data-table";
|
||||
import { DataTablePagination } from "@/components/shared/data-table-pagination";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { retrySkippedPackageAction, retryAllSkippedPackagesAction } from "../actions";
|
||||
|
||||
interface SkippedPackagesTabProps {
|
||||
data: SkippedRow[];
|
||||
pageCount: number;
|
||||
totalCount: number;
|
||||
}
|
||||
|
||||
export function SkippedPackagesTab({
|
||||
data,
|
||||
pageCount,
|
||||
totalCount,
|
||||
}: SkippedPackagesTabProps) {
|
||||
const router = useRouter();
|
||||
const [isPending, startTransition] = useTransition();
|
||||
|
||||
const columns = getSkippedColumns({
|
||||
onRetry: (row) => {
|
||||
startTransition(async () => {
|
||||
const result = await retrySkippedPackageAction(row.id);
|
||||
if (result.success) {
|
||||
toast.success(`"${row.fileName}" queued for retry`);
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const { table } = useDataTable({ data, columns, pageCount });
|
||||
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
{totalCount > 0 && (
|
||||
<div className="flex justify-end">
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
className="gap-1.5"
|
||||
disabled={isPending}
|
||||
onClick={() => {
|
||||
startTransition(async () => {
|
||||
const result = await retryAllSkippedPackagesAction();
|
||||
if (result.success) {
|
||||
toast.success(`All ${totalCount} skipped packages queued for retry`);
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
}}
|
||||
>
|
||||
<RotateCw className="h-3.5 w-3.5" />
|
||||
Retry All ({totalCount})
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
<DataTable
|
||||
table={table}
|
||||
emptyMessage="No skipped or failed packages."
|
||||
/>
|
||||
<DataTablePagination table={table} totalCount={totalCount} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,24 +1,66 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useCallback } from "react";
|
||||
import { useState, useCallback, useTransition, useMemo, useRef } from "react";
|
||||
import { useRouter, usePathname, useSearchParams } from "next/navigation";
|
||||
import { Search, FileBox } from "lucide-react";
|
||||
import { toast } from "sonner";
|
||||
import { Search, Layers } from "lucide-react";
|
||||
import { useDataTable } from "@/hooks/use-data-table";
|
||||
import { getPackageColumns, type PackageRow } from "./package-columns";
|
||||
import {
|
||||
getPackageColumns,
|
||||
type PackageRow,
|
||||
type StlTableRow,
|
||||
type PackageTableRow,
|
||||
type GroupHeaderRow,
|
||||
} from "./package-columns";
|
||||
import { PackageFilesDrawer } from "./package-files-drawer";
|
||||
import { IngestionStatus } from "./ingestion-status";
|
||||
import { SkippedPackagesTab } from "./skipped-packages-tab";
|
||||
import { DataTable } from "@/components/shared/data-table";
|
||||
import { DataTablePagination } from "@/components/shared/data-table-pagination";
|
||||
import { DataTableViewOptions } from "@/components/shared/data-table-view-options";
|
||||
import { PageHeader } from "@/components/shared/page-header";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import type { IngestionAccountStatus } from "@/lib/telegram/types";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import type { DisplayItem, IngestionAccountStatus } from "@/lib/telegram/types";
|
||||
import type { SkippedRow } from "./skipped-columns";
|
||||
import {
|
||||
updatePackageCreator,
|
||||
updatePackageTags,
|
||||
renameGroupAction,
|
||||
dissolveGroupAction,
|
||||
createGroupAction,
|
||||
removeFromGroupAction,
|
||||
sendAllInGroupAction,
|
||||
updateGroupPreviewAction,
|
||||
} from "../actions";
|
||||
|
||||
interface StlTableProps {
|
||||
data: PackageRow[];
|
||||
data: DisplayItem[];
|
||||
pageCount: number;
|
||||
totalCount: number;
|
||||
ingestionStatus: IngestionAccountStatus[];
|
||||
availableTags: string[];
|
||||
searchTerm: string;
|
||||
skippedData: SkippedRow[];
|
||||
skippedPageCount: number;
|
||||
skippedTotalCount: number;
|
||||
}
|
||||
|
||||
export function StlTable({
|
||||
@@ -26,6 +68,11 @@ export function StlTable({
|
||||
pageCount,
|
||||
totalCount,
|
||||
ingestionStatus,
|
||||
availableTags,
|
||||
searchTerm,
|
||||
skippedData,
|
||||
skippedPageCount,
|
||||
skippedTotalCount,
|
||||
}: StlTableProps) {
|
||||
const router = useRouter();
|
||||
const pathname = usePathname();
|
||||
@@ -33,6 +80,89 @@ export function StlTable({
|
||||
|
||||
const [searchValue, setSearchValue] = useState(searchParams.get("search") ?? "");
|
||||
const [viewPkg, setViewPkg] = useState<PackageRow | null>(null);
|
||||
const [, startTransition] = useTransition();
|
||||
|
||||
// Group expansion state
|
||||
const [expandedGroups, setExpandedGroups] = useState<Set<string>>(new Set());
|
||||
|
||||
// Package selection state (for manual grouping)
|
||||
const [selectedPackages, setSelectedPackages] = useState<Set<string>>(new Set());
|
||||
|
||||
// Create group dialog state
|
||||
const [createGroupOpen, setCreateGroupOpen] = useState(false);
|
||||
const [groupName, setGroupName] = useState("");
|
||||
|
||||
// Group preview upload ref
|
||||
const previewInputRef = useRef<HTMLInputElement>(null);
|
||||
const [uploadGroupId, setUploadGroupId] = useState<string | null>(null);
|
||||
|
||||
const toggleGroup = useCallback((groupId: string) => {
|
||||
setExpandedGroups((prev) => {
|
||||
const next = new Set(prev);
|
||||
if (next.has(groupId)) {
|
||||
next.delete(groupId);
|
||||
} else {
|
||||
next.add(groupId);
|
||||
}
|
||||
return next;
|
||||
});
|
||||
}, []);
|
||||
|
||||
const toggleSelect = useCallback((packageId: string) => {
|
||||
setSelectedPackages((prev) => {
|
||||
const next = new Set(prev);
|
||||
if (next.has(packageId)) {
|
||||
next.delete(packageId);
|
||||
} else {
|
||||
next.add(packageId);
|
||||
}
|
||||
return next;
|
||||
});
|
||||
}, []);
|
||||
|
||||
// Flatten DisplayItem[] into StlTableRow[] based on expansion state
|
||||
const tableRows: StlTableRow[] = useMemo(() => {
|
||||
const rows: StlTableRow[] = [];
|
||||
for (const item of data) {
|
||||
if (item.type === "package") {
|
||||
rows.push({
|
||||
...item.data,
|
||||
_rowType: "package" as const,
|
||||
_groupId: null,
|
||||
_isGroupMember: false,
|
||||
});
|
||||
} else {
|
||||
const group = item.data;
|
||||
const isExpanded = expandedGroups.has(group.id);
|
||||
rows.push({
|
||||
_rowType: "group" as const,
|
||||
id: group.id,
|
||||
name: group.name,
|
||||
hasPreview: group.hasPreview,
|
||||
totalFileSize: group.totalFileSize,
|
||||
totalFileCount: group.totalFileCount,
|
||||
packageCount: group.packageCount,
|
||||
combinedTags: group.combinedTags,
|
||||
archiveTypes: group.archiveTypes,
|
||||
latestIndexedAt: group.latestIndexedAt,
|
||||
sourceChannel: group.sourceChannel,
|
||||
_expanded: isExpanded,
|
||||
});
|
||||
if (isExpanded) {
|
||||
for (const pkg of group.packages) {
|
||||
rows.push({
|
||||
...pkg,
|
||||
_rowType: "package" as const,
|
||||
_groupId: group.id,
|
||||
_isGroupMember: true,
|
||||
packageGroupId: group.id,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return rows;
|
||||
}, [data, expandedGroups]);
|
||||
|
||||
const updateSearch = useCallback(
|
||||
(value: string) => {
|
||||
@@ -49,11 +179,207 @@ export function StlTable({
|
||||
[router, pathname, searchParams]
|
||||
);
|
||||
|
||||
const updateTagFilter = useCallback(
|
||||
(value: string) => {
|
||||
const params = new URLSearchParams(searchParams.toString());
|
||||
if (value && value !== "all") {
|
||||
params.set("tag", value);
|
||||
params.set("page", "1");
|
||||
} else {
|
||||
params.delete("tag");
|
||||
}
|
||||
router.push(`${pathname}?${params.toString()}`, { scroll: false });
|
||||
},
|
||||
[router, pathname, searchParams]
|
||||
);
|
||||
|
||||
const activeTab = searchParams.get("tab") ?? "packages";
|
||||
|
||||
const updateTab = useCallback(
|
||||
(value: string) => {
|
||||
const params = new URLSearchParams(searchParams.toString());
|
||||
if (value === "packages") {
|
||||
params.delete("tab");
|
||||
} else {
|
||||
params.set("tab", value);
|
||||
}
|
||||
params.set("page", "1");
|
||||
router.push(`${pathname}?${params.toString()}`, { scroll: false });
|
||||
},
|
||||
[router, pathname, searchParams]
|
||||
);
|
||||
|
||||
const handleRenameGroup = useCallback(
|
||||
(groupId: string, currentName: string) => {
|
||||
const value = prompt("Enter group name:", currentName);
|
||||
if (value === null || value.trim() === currentName) return;
|
||||
startTransition(async () => {
|
||||
const result = await renameGroupAction(groupId, value);
|
||||
if (result.success) {
|
||||
toast.success(`Group renamed to "${value.trim()}"`);
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
},
|
||||
[router]
|
||||
);
|
||||
|
||||
const handleDissolveGroup = useCallback(
|
||||
(groupId: string) => {
|
||||
if (!confirm("Dissolve this group? Packages will become standalone items.")) return;
|
||||
startTransition(async () => {
|
||||
const result = await dissolveGroupAction(groupId);
|
||||
if (result.success) {
|
||||
toast.success("Group dissolved");
|
||||
setExpandedGroups((prev) => {
|
||||
const next = new Set(prev);
|
||||
next.delete(groupId);
|
||||
return next;
|
||||
});
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
},
|
||||
[router]
|
||||
);
|
||||
|
||||
const handleSendAllInGroup = useCallback(
|
||||
(groupId: string) => {
|
||||
if (!confirm("Send all packages in this group to your Telegram?")) return;
|
||||
startTransition(async () => {
|
||||
const result = await sendAllInGroupAction(groupId);
|
||||
if (result.success) {
|
||||
toast.success("Group packages queued for sending");
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
},
|
||||
[router]
|
||||
);
|
||||
|
||||
const handleRemoveFromGroup = useCallback(
|
||||
(packageId: string) => {
|
||||
startTransition(async () => {
|
||||
const result = await removeFromGroupAction(packageId);
|
||||
if (result.success) {
|
||||
toast.success("Package removed from group");
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
},
|
||||
[router]
|
||||
);
|
||||
|
||||
const handleCreateGroup = useCallback(() => {
|
||||
if (selectedPackages.size < 2) return;
|
||||
setGroupName("");
|
||||
setCreateGroupOpen(true);
|
||||
}, [selectedPackages.size]);
|
||||
|
||||
const submitCreateGroup = useCallback(() => {
|
||||
if (!groupName.trim() || selectedPackages.size < 2) return;
|
||||
const ids = Array.from(selectedPackages);
|
||||
startTransition(async () => {
|
||||
const result = await createGroupAction(groupName, ids);
|
||||
if (result.success) {
|
||||
toast.success(`Group "${groupName.trim()}" created`);
|
||||
setSelectedPackages(new Set());
|
||||
setCreateGroupOpen(false);
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
}, [groupName, selectedPackages, router]);
|
||||
|
||||
// Group preview upload handler (Task 12)
|
||||
const handleGroupPreviewUpload = useCallback((groupId: string) => {
|
||||
setUploadGroupId(groupId);
|
||||
// Trigger file input after state update
|
||||
setTimeout(() => {
|
||||
previewInputRef.current?.click();
|
||||
}, 0);
|
||||
}, []);
|
||||
|
||||
const handlePreviewFileChange = useCallback(
|
||||
(e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const file = e.target.files?.[0];
|
||||
if (!file || !uploadGroupId) return;
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append("file", file);
|
||||
|
||||
startTransition(async () => {
|
||||
const result = await updateGroupPreviewAction(uploadGroupId, formData);
|
||||
if (result.success) {
|
||||
toast.success("Group preview updated");
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
setUploadGroupId(null);
|
||||
});
|
||||
|
||||
// Reset input so the same file can be selected again
|
||||
e.target.value = "";
|
||||
},
|
||||
[uploadGroupId, router]
|
||||
);
|
||||
|
||||
const columns = getPackageColumns({
|
||||
onViewFiles: (pkg) => setViewPkg(pkg),
|
||||
searchTerm,
|
||||
onSetCreator: (pkg) => {
|
||||
const value = prompt("Enter creator name:", pkg.creator ?? "");
|
||||
if (value === null) return;
|
||||
startTransition(async () => {
|
||||
const result = await updatePackageCreator(pkg.id, value || null);
|
||||
if (result.success) {
|
||||
toast.success(value ? `Creator set to "${value}"` : "Creator removed");
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
},
|
||||
onSetTags: (pkg) => {
|
||||
const value = prompt(
|
||||
"Enter tags (comma-separated):",
|
||||
pkg.tags.join(", ")
|
||||
);
|
||||
if (value === null) return;
|
||||
const tags = value.split(",").map((t) => t.trim()).filter(Boolean);
|
||||
startTransition(async () => {
|
||||
const result = await updatePackageTags(pkg.id, tags);
|
||||
if (result.success) {
|
||||
toast.success(tags.length > 0 ? `Tags updated` : "Tags removed");
|
||||
router.refresh();
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
},
|
||||
onToggleGroup: toggleGroup,
|
||||
onRenameGroup: handleRenameGroup,
|
||||
onDissolveGroup: handleDissolveGroup,
|
||||
onSendAllInGroup: handleSendAllInGroup,
|
||||
onRemoveFromGroup: handleRemoveFromGroup,
|
||||
onGroupPreviewUpload: handleGroupPreviewUpload,
|
||||
selectedPackages,
|
||||
onToggleSelect: toggleSelect,
|
||||
});
|
||||
|
||||
const { table } = useDataTable({ data, columns, pageCount });
|
||||
const { table } = useDataTable({ data: tableRows, columns, pageCount });
|
||||
|
||||
const activeTag = searchParams.get("tag") ?? "";
|
||||
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
@@ -64,24 +390,89 @@ export function StlTable({
|
||||
<IngestionStatus initialStatus={ingestionStatus} />
|
||||
</PageHeader>
|
||||
|
||||
<div className="flex flex-wrap items-center gap-2">
|
||||
<div className="relative flex-1 min-w-[200px] max-w-sm">
|
||||
<Search className="absolute left-2.5 top-2.5 h-4 w-4 text-muted-foreground" />
|
||||
<Input
|
||||
placeholder="Search packages or files..."
|
||||
value={searchValue}
|
||||
onChange={(e) => updateSearch(e.target.value)}
|
||||
className="pl-9 h-9"
|
||||
/>
|
||||
</div>
|
||||
<DataTableViewOptions table={table} />
|
||||
</div>
|
||||
<Tabs value={activeTab} onValueChange={updateTab}>
|
||||
<TabsList>
|
||||
<TabsTrigger value="packages">Packages</TabsTrigger>
|
||||
<TabsTrigger value="skipped" className="gap-1.5">
|
||||
Skipped / Failed
|
||||
{skippedTotalCount > 0 && (
|
||||
<Badge variant="secondary" className="text-[10px] ml-1">
|
||||
{skippedTotalCount}
|
||||
</Badge>
|
||||
)}
|
||||
</TabsTrigger>
|
||||
</TabsList>
|
||||
|
||||
<DataTable
|
||||
table={table}
|
||||
emptyMessage="No packages found. Archives will appear here after ingestion."
|
||||
/>
|
||||
<DataTablePagination table={table} totalCount={totalCount} />
|
||||
<TabsContent value="packages" className="space-y-4">
|
||||
<div className="flex flex-wrap items-center gap-2">
|
||||
<div className="relative flex-1 min-w-[200px] max-w-sm">
|
||||
<Search className="absolute left-2.5 top-2.5 h-4 w-4 text-muted-foreground" />
|
||||
<Input
|
||||
placeholder="Search packages or files..."
|
||||
value={searchValue}
|
||||
onChange={(e) => updateSearch(e.target.value)}
|
||||
className="pl-9 h-9"
|
||||
/>
|
||||
</div>
|
||||
{availableTags.length > 0 && (
|
||||
<Select value={activeTag || "all"} onValueChange={updateTagFilter}>
|
||||
<SelectTrigger className="w-[160px] h-9">
|
||||
<SelectValue placeholder="All Tags" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value="all">All Tags</SelectItem>
|
||||
{availableTags.map((tag) => (
|
||||
<SelectItem key={tag} value={tag}>
|
||||
{tag}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
)}
|
||||
<DataTableViewOptions table={table} />
|
||||
{selectedPackages.size >= 2 && (
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
className="h-9 gap-1.5"
|
||||
onClick={handleCreateGroup}
|
||||
>
|
||||
<Layers className="h-3.5 w-3.5" />
|
||||
Group {selectedPackages.size} Selected
|
||||
</Button>
|
||||
)}
|
||||
{selectedPackages.size > 0 && selectedPackages.size < 2 && (
|
||||
<span className="text-xs text-muted-foreground">
|
||||
Select at least 2 packages to group
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<DataTable
|
||||
table={table}
|
||||
emptyMessage="No packages found. Archives will appear here after ingestion."
|
||||
rowClassName={(row) => {
|
||||
const data = row.original as StlTableRow;
|
||||
if (data._rowType === "group") {
|
||||
return "bg-muted/30 border-border";
|
||||
}
|
||||
if (data._rowType === "package" && (data as PackageTableRow)._isGroupMember) {
|
||||
return "bg-muted/10";
|
||||
}
|
||||
return "";
|
||||
}}
|
||||
/>
|
||||
<DataTablePagination table={table} totalCount={totalCount} />
|
||||
</TabsContent>
|
||||
|
||||
<TabsContent value="skipped">
|
||||
<SkippedPackagesTab
|
||||
data={skippedData}
|
||||
pageCount={skippedPageCount}
|
||||
totalCount={skippedTotalCount}
|
||||
/>
|
||||
</TabsContent>
|
||||
</Tabs>
|
||||
|
||||
<PackageFilesDrawer
|
||||
pkg={viewPkg}
|
||||
@@ -89,6 +480,48 @@ export function StlTable({
|
||||
onOpenChange={(open) => {
|
||||
if (!open) setViewPkg(null);
|
||||
}}
|
||||
highlightTerm={searchTerm}
|
||||
/>
|
||||
|
||||
{/* Create Group Dialog */}
|
||||
<Dialog open={createGroupOpen} onOpenChange={setCreateGroupOpen}>
|
||||
<DialogContent className="sm:max-w-md">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Create Package Group</DialogTitle>
|
||||
<DialogDescription>
|
||||
Group {selectedPackages.size} selected packages together. Enter a name for the group.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<div className="py-4">
|
||||
<Input
|
||||
placeholder="Group name..."
|
||||
value={groupName}
|
||||
onChange={(e) => setGroupName(e.target.value)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === "Enter") submitCreateGroup();
|
||||
}}
|
||||
autoFocus
|
||||
/>
|
||||
</div>
|
||||
<DialogFooter>
|
||||
<Button variant="outline" onClick={() => setCreateGroupOpen(false)}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button onClick={submitCreateGroup} disabled={!groupName.trim()}>
|
||||
<Layers className="h-4 w-4 mr-1" />
|
||||
Create Group
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
|
||||
{/* Hidden file input for group preview upload (Task 12) */}
|
||||
<input
|
||||
ref={previewInputRef}
|
||||
type="file"
|
||||
accept="image/jpeg,image/png,image/webp"
|
||||
className="hidden"
|
||||
onChange={handlePreviewFileChange}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
514
src/app/(app)/stls/actions.ts
Normal file
514
src/app/(app)/stls/actions.ts
Normal file
@@ -0,0 +1,514 @@
|
||||
"use server";
|
||||
|
||||
import { auth } from "@/lib/auth";
|
||||
import { prisma } from "@/lib/prisma";
|
||||
import type { ActionResult } from "@/types/api.types";
|
||||
import { revalidatePath } from "next/cache";
|
||||
import {
|
||||
updatePackageGroupName,
|
||||
updatePackageGroupPreview,
|
||||
createManualGroup,
|
||||
removePackageFromGroup,
|
||||
dissolveGroup,
|
||||
} from "@/lib/telegram/queries";
|
||||
|
||||
const ALLOWED_IMAGE_TYPES = [
|
||||
"image/jpeg",
|
||||
"image/png",
|
||||
"image/webp",
|
||||
] as const;
|
||||
const MAX_IMAGE_SIZE = 2 * 1024 * 1024; // 2 MB
|
||||
|
||||
export async function updatePackageCreator(
|
||||
packageId: string,
|
||||
creator: string | null
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
await prisma.package.update({
|
||||
where: { id: packageId },
|
||||
data: { creator: creator?.trim() || null },
|
||||
});
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to update creator" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function uploadPackagePreview(
|
||||
packageId: string,
|
||||
formData: FormData
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
const file = formData.get("file");
|
||||
if (!(file instanceof File)) {
|
||||
return { success: false, error: "No file provided" };
|
||||
}
|
||||
|
||||
if (!ALLOWED_IMAGE_TYPES.includes(file.type as (typeof ALLOWED_IMAGE_TYPES)[number])) {
|
||||
return { success: false, error: "Only JPG, PNG, and WebP images are accepted" };
|
||||
}
|
||||
|
||||
if (file.size > MAX_IMAGE_SIZE) {
|
||||
return { success: false, error: "Image must be smaller than 2 MB" };
|
||||
}
|
||||
|
||||
try {
|
||||
const arrayBuffer = await file.arrayBuffer();
|
||||
const buffer = Buffer.from(arrayBuffer);
|
||||
|
||||
await prisma.package.update({
|
||||
where: { id: packageId },
|
||||
data: {
|
||||
previewData: buffer,
|
||||
// Set previewMsgId to 0 as sentinel so hasPreview checks work
|
||||
previewMsgId: BigInt(0),
|
||||
},
|
||||
});
|
||||
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to upload preview image" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function updatePackageTags(
|
||||
packageId: string,
|
||||
tags: string[]
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
const cleaned = tags.map((t) => t.trim()).filter(Boolean);
|
||||
// Deduplicate
|
||||
const unique = [...new Set(cleaned)];
|
||||
await prisma.package.update({
|
||||
where: { id: packageId },
|
||||
data: { tags: unique },
|
||||
});
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to update tags" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function bulkSetTags(
|
||||
packageIds: string[],
|
||||
tags: string[]
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
const cleaned = [...new Set(tags.map((t) => t.trim()).filter(Boolean))];
|
||||
await prisma.package.updateMany({
|
||||
where: { id: { in: packageIds } },
|
||||
data: { tags: cleaned },
|
||||
});
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to update tags" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function bulkSetCreator(
|
||||
packageIds: string[],
|
||||
creator: string
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
await prisma.package.updateMany({
|
||||
where: { id: { in: packageIds } },
|
||||
data: { creator: creator.trim() },
|
||||
});
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to update creators" };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a package's preview from an extracted archive image.
|
||||
* Reads the image data from a completed ArchiveExtractRequest.
|
||||
*/
|
||||
export async function setPreviewFromExtract(
|
||||
packageId: string,
|
||||
extractRequestId: string
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
const extractReq = await prisma.archiveExtractRequest.findUnique({
|
||||
where: { id: extractRequestId },
|
||||
select: { status: true, imageData: true, packageId: true },
|
||||
});
|
||||
|
||||
if (!extractReq) {
|
||||
return { success: false, error: "Extract request not found" };
|
||||
}
|
||||
|
||||
if (extractReq.packageId !== packageId) {
|
||||
return { success: false, error: "Extract request does not belong to this package" };
|
||||
}
|
||||
|
||||
if (extractReq.status !== "COMPLETED" || !extractReq.imageData) {
|
||||
return { success: false, error: "Image extraction not yet completed" };
|
||||
}
|
||||
|
||||
await prisma.package.update({
|
||||
where: { id: packageId },
|
||||
data: {
|
||||
previewData: extractReq.imageData,
|
||||
// Set previewMsgId to 0 as sentinel so hasPreview checks work
|
||||
// (original Telegram-matched previews have the actual message ID)
|
||||
previewMsgId: BigInt(0),
|
||||
},
|
||||
});
|
||||
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to set preview from archive image" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function retrySkippedPackageAction(
|
||||
id: string
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
const skipped = await prisma.skippedPackage.findUnique({
|
||||
where: { id },
|
||||
});
|
||||
if (!skipped) return { success: false, error: "Skipped package not found" };
|
||||
|
||||
// Find the AccountChannelMap and reset watermark if needed
|
||||
const mapping = await prisma.accountChannelMap.findUnique({
|
||||
where: {
|
||||
accountId_channelId: {
|
||||
accountId: skipped.accountId,
|
||||
channelId: skipped.sourceChannelId,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (mapping) {
|
||||
const targetId = skipped.sourceMessageId - BigInt(1);
|
||||
|
||||
// Only reset if the watermark is past this message
|
||||
if (mapping.lastProcessedMessageId && mapping.lastProcessedMessageId >= skipped.sourceMessageId) {
|
||||
await prisma.accountChannelMap.update({
|
||||
where: { id: mapping.id },
|
||||
data: { lastProcessedMessageId: targetId },
|
||||
});
|
||||
}
|
||||
|
||||
// Also reset TopicProgress if this was a forum topic message
|
||||
if (skipped.sourceTopicId) {
|
||||
const topicProgress = await prisma.topicProgress.findFirst({
|
||||
where: {
|
||||
accountChannelMapId: mapping.id,
|
||||
topicId: skipped.sourceTopicId,
|
||||
},
|
||||
});
|
||||
if (topicProgress && topicProgress.lastProcessedMessageId && topicProgress.lastProcessedMessageId >= skipped.sourceMessageId) {
|
||||
await prisma.topicProgress.update({
|
||||
where: { id: topicProgress.id },
|
||||
data: { lastProcessedMessageId: targetId },
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the skip record
|
||||
await prisma.skippedPackage.delete({ where: { id } });
|
||||
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to retry skipped package" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function retryAllSkippedPackagesAction(
|
||||
reason?: "SIZE_LIMIT" | "DOWNLOAD_FAILED" | "EXTRACT_FAILED" | "UPLOAD_FAILED"
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
const where: Record<string, unknown> = {};
|
||||
if (reason) where.reason = reason;
|
||||
|
||||
const skippedItems = await prisma.skippedPackage.findMany({ where });
|
||||
|
||||
if (skippedItems.length === 0) {
|
||||
return { success: true, data: undefined };
|
||||
}
|
||||
|
||||
// Group by (accountId, channelId) to find minimum messageId per channel
|
||||
const channelResets = new Map<string, { mappingKey: { accountId: string; channelId: string }; minMessageId: bigint; topicResets: Map<bigint, bigint> }>();
|
||||
|
||||
for (const item of skippedItems) {
|
||||
const key = `${item.accountId}:${item.sourceChannelId}`;
|
||||
const existing = channelResets.get(key);
|
||||
const targetId = item.sourceMessageId - BigInt(1);
|
||||
|
||||
if (!existing) {
|
||||
const topicResets = new Map<bigint, bigint>();
|
||||
if (item.sourceTopicId) {
|
||||
topicResets.set(item.sourceTopicId, targetId);
|
||||
}
|
||||
channelResets.set(key, {
|
||||
mappingKey: { accountId: item.accountId, channelId: item.sourceChannelId },
|
||||
minMessageId: targetId,
|
||||
topicResets,
|
||||
});
|
||||
} else {
|
||||
if (targetId < existing.minMessageId) {
|
||||
existing.minMessageId = targetId;
|
||||
}
|
||||
if (item.sourceTopicId) {
|
||||
const existingTopic = existing.topicResets.get(item.sourceTopicId);
|
||||
if (!existingTopic || targetId < existingTopic) {
|
||||
existing.topicResets.set(item.sourceTopicId, targetId);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reset watermarks
|
||||
for (const reset of channelResets.values()) {
|
||||
const mapping = await prisma.accountChannelMap.findUnique({
|
||||
where: { accountId_channelId: reset.mappingKey },
|
||||
});
|
||||
if (!mapping) continue;
|
||||
|
||||
if (mapping.lastProcessedMessageId && mapping.lastProcessedMessageId > reset.minMessageId) {
|
||||
await prisma.accountChannelMap.update({
|
||||
where: { id: mapping.id },
|
||||
data: { lastProcessedMessageId: reset.minMessageId },
|
||||
});
|
||||
}
|
||||
|
||||
// Reset topic progress
|
||||
for (const [topicId, targetId] of reset.topicResets) {
|
||||
const topicProgress = await prisma.topicProgress.findFirst({
|
||||
where: { accountChannelMapId: mapping.id, topicId },
|
||||
});
|
||||
if (topicProgress && topicProgress.lastProcessedMessageId && topicProgress.lastProcessedMessageId > targetId) {
|
||||
await prisma.topicProgress.update({
|
||||
where: { id: topicProgress.id },
|
||||
data: { lastProcessedMessageId: targetId },
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete all matching skip records
|
||||
await prisma.skippedPackage.deleteMany({ where });
|
||||
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to retry skipped packages" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function renameGroupAction(
|
||||
groupId: string,
|
||||
name: string
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
if (!name.trim()) {
|
||||
return { success: false, error: "Group name cannot be empty" };
|
||||
}
|
||||
|
||||
try {
|
||||
await updatePackageGroupName(groupId, name);
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to rename group" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function dissolveGroupAction(
|
||||
groupId: string
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
await dissolveGroup(groupId);
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to dissolve group" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function createGroupAction(
|
||||
name: string,
|
||||
packageIds: string[]
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
if (!name.trim()) {
|
||||
return { success: false, error: "Group name cannot be empty" };
|
||||
}
|
||||
if (packageIds.length < 2) {
|
||||
return { success: false, error: "At least 2 packages are required to create a group" };
|
||||
}
|
||||
|
||||
try {
|
||||
await createManualGroup(name, packageIds);
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : "Failed to create group";
|
||||
return { success: false, error: message };
|
||||
}
|
||||
}
|
||||
|
||||
export async function removeFromGroupAction(
|
||||
packageId: string
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
await removePackageFromGroup(packageId);
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to remove package from group" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function updateGroupPreviewAction(
|
||||
groupId: string,
|
||||
formData: FormData
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
const file = formData.get("file");
|
||||
if (!(file instanceof File)) {
|
||||
return { success: false, error: "No file provided" };
|
||||
}
|
||||
|
||||
if (!ALLOWED_IMAGE_TYPES.includes(file.type as (typeof ALLOWED_IMAGE_TYPES)[number])) {
|
||||
return { success: false, error: "Only JPG, PNG, and WebP images are accepted" };
|
||||
}
|
||||
|
||||
if (file.size > MAX_IMAGE_SIZE) {
|
||||
return { success: false, error: "Image must be smaller than 2 MB" };
|
||||
}
|
||||
|
||||
try {
|
||||
const arrayBuffer = await file.arrayBuffer();
|
||||
const buffer = Buffer.from(arrayBuffer);
|
||||
await updatePackageGroupPreview(groupId, buffer);
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to upload group preview image" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function sendAllInGroupAction(
|
||||
groupId: string
|
||||
): Promise<ActionResult> {
|
||||
const session = await auth();
|
||||
if (!session?.user?.id) return { success: false, error: "Unauthorized" };
|
||||
|
||||
try {
|
||||
const telegramLink = await prisma.telegramLink.findUnique({
|
||||
where: { userId: session.user.id },
|
||||
});
|
||||
|
||||
if (!telegramLink) {
|
||||
return { success: false, error: "No linked Telegram account. Link one in Settings." };
|
||||
}
|
||||
|
||||
const group = await prisma.packageGroup.findUnique({
|
||||
where: { id: groupId },
|
||||
select: {
|
||||
packages: {
|
||||
select: { id: true, destChannelId: true, destMessageId: true, fileName: true },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!group) {
|
||||
return { success: false, error: "Group not found" };
|
||||
}
|
||||
|
||||
const sendablePackages = group.packages.filter(
|
||||
(p) => p.destChannelId && p.destMessageId
|
||||
);
|
||||
|
||||
if (sendablePackages.length === 0) {
|
||||
return { success: false, error: "No packages in this group have been uploaded to a destination channel" };
|
||||
}
|
||||
|
||||
let queued = 0;
|
||||
for (const pkg of sendablePackages) {
|
||||
// Only create if no existing PENDING/SENDING request for this package+link combo
|
||||
const existing = await prisma.botSendRequest.findFirst({
|
||||
where: {
|
||||
packageId: pkg.id,
|
||||
telegramLinkId: telegramLink.id,
|
||||
status: { in: ["PENDING", "SENDING"] },
|
||||
},
|
||||
});
|
||||
|
||||
if (!existing) {
|
||||
const sendRequest = await prisma.botSendRequest.create({
|
||||
data: {
|
||||
packageId: pkg.id,
|
||||
telegramLinkId: telegramLink.id,
|
||||
requestedByUserId: session.user.id,
|
||||
status: "PENDING",
|
||||
},
|
||||
});
|
||||
|
||||
// Notify the bot via pg_notify
|
||||
try {
|
||||
await prisma.$queryRawUnsafe(
|
||||
`SELECT pg_notify('bot_send', $1)`,
|
||||
sendRequest.id
|
||||
);
|
||||
} catch {
|
||||
// Best-effort — the bot also polls periodically
|
||||
}
|
||||
|
||||
queued++;
|
||||
}
|
||||
}
|
||||
|
||||
revalidatePath("/stls");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to send group packages" };
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,8 @@
|
||||
import { auth } from "@/lib/auth";
|
||||
import { redirect } from "next/navigation";
|
||||
import { listPackages, searchPackages, getIngestionStatus } from "@/lib/telegram/queries";
|
||||
import { listDisplayItems, searchPackages, getIngestionStatus, getAllPackageTags, listSkippedPackages, countSkippedPackages } from "@/lib/telegram/queries";
|
||||
import { StlTable } from "./_components/stl-table";
|
||||
import type { DisplayItem, PackageListItem } from "@/lib/telegram/types";
|
||||
|
||||
interface Props {
|
||||
searchParams: Promise<Record<string, string | string[] | undefined>>;
|
||||
@@ -19,9 +20,11 @@ export default async function StlFilesPage({ searchParams }: Props) {
|
||||
const order = (params.order as "asc" | "desc") ?? "desc";
|
||||
const search = (params.search as string) ?? "";
|
||||
const creator = (params.creator as string) || undefined;
|
||||
const tag = (params.tag as string) || undefined;
|
||||
const tab = (params.tab as string) ?? "packages";
|
||||
|
||||
// Fetch packages and ingestion status in parallel
|
||||
const [result, ingestionStatus] = await Promise.all([
|
||||
// Fetch packages, ingestion status, tags, and skipped count in parallel
|
||||
const [result, ingestionStatus, availableTags, skippedCount] = await Promise.all([
|
||||
search
|
||||
? searchPackages({
|
||||
query: search,
|
||||
@@ -29,22 +32,40 @@ export default async function StlFilesPage({ searchParams }: Props) {
|
||||
limit: perPage,
|
||||
searchIn: "both",
|
||||
})
|
||||
: listPackages({
|
||||
: listDisplayItems({
|
||||
page,
|
||||
limit: perPage,
|
||||
creator,
|
||||
tag,
|
||||
sortBy: sort as "indexedAt" | "fileName" | "fileSize",
|
||||
order,
|
||||
}),
|
||||
getIngestionStatus(),
|
||||
getAllPackageTags(),
|
||||
countSkippedPackages(),
|
||||
]);
|
||||
|
||||
// For search results, wrap as DisplayItem[]; for non-search, already DisplayItem[]
|
||||
const displayItems: DisplayItem[] = search
|
||||
? (result as { items: PackageListItem[] }).items.map((item) => ({ type: "package" as const, data: item }))
|
||||
: (result as { items: DisplayItem[] }).items;
|
||||
|
||||
// Fetch skipped packages only if on that tab
|
||||
const skippedResult = tab === "skipped"
|
||||
? await listSkippedPackages({ page, limit: perPage })
|
||||
: null;
|
||||
|
||||
return (
|
||||
<StlTable
|
||||
data={result.items}
|
||||
data={displayItems}
|
||||
pageCount={result.pagination.totalPages}
|
||||
totalCount={result.pagination.total}
|
||||
ingestionStatus={ingestionStatus}
|
||||
availableTags={availableTags}
|
||||
searchTerm={search}
|
||||
skippedData={skippedResult?.items ?? []}
|
||||
skippedPageCount={skippedResult?.pagination.totalPages ?? 0}
|
||||
skippedTotalCount={skippedCount}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -7,6 +7,8 @@ import {
|
||||
Power,
|
||||
ArrowDownToLine,
|
||||
ArrowUpFromLine,
|
||||
RefreshCcw,
|
||||
Tag,
|
||||
} from "lucide-react";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
@@ -23,12 +25,16 @@ interface ChannelColumnsProps {
|
||||
onToggleActive: (id: string) => void;
|
||||
onDelete: (id: string) => void;
|
||||
onSetType: (id: string, type: "SOURCE" | "DESTINATION") => void;
|
||||
onRescan: (id: string) => void;
|
||||
onSetCategory: (id: string, category: string | null) => void;
|
||||
}
|
||||
|
||||
export function getChannelColumns({
|
||||
onToggleActive,
|
||||
onDelete,
|
||||
onSetType,
|
||||
onRescan,
|
||||
onSetCategory,
|
||||
}: ChannelColumnsProps): ColumnDef<ChannelRow, unknown>[] {
|
||||
return [
|
||||
{
|
||||
@@ -60,6 +66,18 @@ export function getChannelColumns({
|
||||
</Badge>
|
||||
),
|
||||
},
|
||||
{
|
||||
accessorKey: "category",
|
||||
header: "Category",
|
||||
cell: ({ row }) => {
|
||||
const category = row.original.category;
|
||||
return category ? (
|
||||
<Badge variant="outline">{category}</Badge>
|
||||
) : (
|
||||
<span className="text-xs text-muted-foreground">—</span>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: "isActive",
|
||||
header: "Status",
|
||||
@@ -121,6 +139,23 @@ export function getChannelColumns({
|
||||
Set as Source
|
||||
</DropdownMenuItem>
|
||||
)}
|
||||
{row.original.type === "SOURCE" && (
|
||||
<DropdownMenuItem
|
||||
onClick={() => onRescan(row.original.id)}
|
||||
>
|
||||
<RefreshCcw className="mr-2 h-3.5 w-3.5" />
|
||||
Rescan Channel
|
||||
</DropdownMenuItem>
|
||||
)}
|
||||
<DropdownMenuItem
|
||||
onClick={() => {
|
||||
const cat = prompt("Enter category (e.g. STL, PDF, D&D, Cosplay):", row.original.category ?? "");
|
||||
if (cat !== null) onSetCategory(row.original.id, cat || null);
|
||||
}}
|
||||
>
|
||||
<Tag className="mr-2 h-3.5 w-3.5" />
|
||||
Set Category
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
onClick={() => onToggleActive(row.original.id)}
|
||||
>
|
||||
|
||||
@@ -21,7 +21,7 @@ import { ScrollArea } from "@/components/ui/scroll-area";
|
||||
interface FetchedChannel {
|
||||
chatId: string;
|
||||
title: string;
|
||||
type: "channel" | "supergroup";
|
||||
type: string;
|
||||
isForum: boolean;
|
||||
memberCount: number | null;
|
||||
alreadyLinked: boolean;
|
||||
|
||||
@@ -2,26 +2,39 @@
|
||||
|
||||
import { useState, useTransition } from "react";
|
||||
import { toast } from "sonner";
|
||||
import { Download, Plus } from "lucide-react";
|
||||
import { getChannelColumns } from "./channel-columns";
|
||||
import { DestinationCard } from "./destination-card";
|
||||
import { ChannelPickerDialog } from "./channel-picker-dialog";
|
||||
import { JoinChannelDialog } from "./join-channel-dialog";
|
||||
import {
|
||||
deleteChannel,
|
||||
toggleChannelActive,
|
||||
setChannelType,
|
||||
setChannelCategory,
|
||||
rescanChannel,
|
||||
} from "../actions";
|
||||
import { DataTable } from "@/components/shared/data-table";
|
||||
import { DeleteDialog } from "@/components/shared/delete-dialog";
|
||||
import type { ChannelRow, GlobalDestination } from "@/lib/telegram/admin-queries";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import type { AccountRow, ChannelRow, GlobalDestination } from "@/lib/telegram/admin-queries";
|
||||
import { useDataTable } from "@/hooks/use-data-table";
|
||||
|
||||
interface ChannelsTabProps {
|
||||
channels: ChannelRow[];
|
||||
globalDestination: GlobalDestination;
|
||||
accounts: AccountRow[];
|
||||
}
|
||||
|
||||
export function ChannelsTab({ channels, globalDestination }: ChannelsTabProps) {
|
||||
export function ChannelsTab({ channels, globalDestination, accounts }: ChannelsTabProps) {
|
||||
const [isPending, startTransition] = useTransition();
|
||||
const [deleteId, setDeleteId] = useState<string | null>(null);
|
||||
const [rescanId, setRescanId] = useState<string | null>(null);
|
||||
const [fetchChannelsAccountId, setFetchChannelsAccountId] = useState<string | null>(null);
|
||||
const [joinDialogOpen, setJoinDialogOpen] = useState(false);
|
||||
|
||||
// Find the first authenticated account for "Fetch Channels"
|
||||
const authenticatedAccounts = accounts.filter((a) => a.authState === "AUTHENTICATED" && a.isActive);
|
||||
|
||||
const columns = getChannelColumns({
|
||||
onToggleActive: (id) => {
|
||||
@@ -39,6 +52,14 @@ export function ChannelsTab({ channels, globalDestination }: ChannelsTabProps) {
|
||||
else toast.error(result.error);
|
||||
});
|
||||
},
|
||||
onRescan: (id) => setRescanId(id),
|
||||
onSetCategory: (id, category) => {
|
||||
startTransition(async () => {
|
||||
const result = await setChannelCategory(id, category);
|
||||
if (result.success) toast.success(category ? `Category set to "${category}"` : "Category removed");
|
||||
else toast.error(result.error);
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const { table } = useDataTable({
|
||||
@@ -60,19 +81,59 @@ export function ChannelsTab({ channels, globalDestination }: ChannelsTabProps) {
|
||||
});
|
||||
};
|
||||
|
||||
const handleRescan = () => {
|
||||
if (!rescanId) return;
|
||||
startTransition(async () => {
|
||||
const result = await rescanChannel(rescanId);
|
||||
if (result.success) {
|
||||
toast.success("Channel scan progress reset — it will be fully rescanned on the next sync");
|
||||
setRescanId(null);
|
||||
} else {
|
||||
toast.error(result.error);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const handleFetchChannels = () => {
|
||||
if (authenticatedAccounts.length > 0) {
|
||||
setFetchChannelsAccountId(authenticatedAccounts[0].id);
|
||||
} else {
|
||||
toast.error("No authenticated accounts available. Add and authenticate an account first.");
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
<DestinationCard destination={globalDestination} />
|
||||
<DestinationCard destination={globalDestination} channels={channels} />
|
||||
|
||||
<div className="flex items-center gap-2">
|
||||
<Button
|
||||
variant="outline"
|
||||
onClick={handleFetchChannels}
|
||||
disabled={authenticatedAccounts.length === 0}
|
||||
>
|
||||
<Download className="mr-2 h-4 w-4" />
|
||||
Fetch Channels
|
||||
</Button>
|
||||
<Button
|
||||
variant="outline"
|
||||
onClick={() => setJoinDialogOpen(true)}
|
||||
disabled={authenticatedAccounts.length === 0}
|
||||
>
|
||||
<Plus className="mr-2 h-4 w-4" />
|
||||
Add Channel
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{channels.length > 0 && (
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Source channels are added per-account via the "Fetch Channels" button on the Accounts tab.
|
||||
Channels discovered via "Fetch Channels" are automatically activated as sources.
|
||||
</p>
|
||||
)}
|
||||
|
||||
<DataTable
|
||||
table={table}
|
||||
emptyMessage="No channels yet. Use "Fetch Channels" on an account to discover and add source channels."
|
||||
emptyMessage="No channels yet. Click "Fetch Channels" above to discover and add source channels."
|
||||
/>
|
||||
|
||||
<DeleteDialog
|
||||
@@ -83,6 +144,29 @@ export function ChannelsTab({ channels, globalDestination }: ChannelsTabProps) {
|
||||
onConfirm={handleDelete}
|
||||
isLoading={isPending}
|
||||
/>
|
||||
|
||||
<DeleteDialog
|
||||
open={!!rescanId}
|
||||
onOpenChange={(open) => !open && setRescanId(null)}
|
||||
title="Rescan Channel"
|
||||
description="This will reset all scan progress for this channel. On the next sync the worker will re-process every message from the beginning. Packages that are already in the library will be skipped (deduplication by hash), but any missing files will be re-downloaded and re-uploaded. This may take a long time for large channels."
|
||||
confirmLabel="Rescan"
|
||||
onConfirm={handleRescan}
|
||||
isLoading={isPending}
|
||||
/>
|
||||
|
||||
<ChannelPickerDialog
|
||||
accountId={fetchChannelsAccountId}
|
||||
open={!!fetchChannelsAccountId}
|
||||
onOpenChange={(open) => {
|
||||
if (!open) setFetchChannelsAccountId(null);
|
||||
}}
|
||||
/>
|
||||
|
||||
<JoinChannelDialog
|
||||
open={joinDialogOpen}
|
||||
onOpenChange={setJoinDialogOpen}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,9 +1,21 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useEffect, useTransition } from "react";
|
||||
import { Database, AlertTriangle, Link2, Plus, Loader2 } from "lucide-react";
|
||||
import {
|
||||
Database,
|
||||
AlertTriangle,
|
||||
Link2,
|
||||
Plus,
|
||||
Loader2,
|
||||
ArrowRight,
|
||||
RefreshCw,
|
||||
} from "lucide-react";
|
||||
import { toast } from "sonner";
|
||||
import { createDestinationViaWorker } from "../actions";
|
||||
import {
|
||||
createDestinationViaWorker,
|
||||
setGlobalDestination,
|
||||
rebuildPackageDatabase,
|
||||
} from "../actions";
|
||||
import { Card, CardContent } from "@/components/ui/card";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
@@ -17,10 +29,19 @@ import {
|
||||
DialogTitle,
|
||||
DialogFooter,
|
||||
} from "@/components/ui/dialog";
|
||||
import type { GlobalDestination } from "@/lib/telegram/admin-queries";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
|
||||
import type { GlobalDestination, ChannelRow } from "@/lib/telegram/admin-queries";
|
||||
|
||||
interface DestinationCardProps {
|
||||
destination: GlobalDestination;
|
||||
channels?: ChannelRow[];
|
||||
}
|
||||
|
||||
type CreateState =
|
||||
@@ -29,11 +50,34 @@ type CreateState =
|
||||
| { phase: "done"; title: string; telegramId: string }
|
||||
| { phase: "error"; message: string };
|
||||
|
||||
export function DestinationCard({ destination }: DestinationCardProps) {
|
||||
type RebuildState =
|
||||
| { phase: "idle" }
|
||||
| { phase: "running"; requestId: string }
|
||||
| { phase: "done"; created: number; skipped: number; scanned: number }
|
||||
| { phase: "error"; message: string };
|
||||
|
||||
interface RebuildProgress {
|
||||
status: string;
|
||||
messagesScanned: number;
|
||||
documentsFound: number;
|
||||
packagesCreated: number;
|
||||
packagesSkipped: number;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export function DestinationCard({ destination, channels = [] }: DestinationCardProps) {
|
||||
const [isPending, startTransition] = useTransition();
|
||||
const [createOpen, setCreateOpen] = useState(false);
|
||||
const [title, setTitle] = useState("dragonsstash db");
|
||||
const [createState, setCreateState] = useState<CreateState>({ phase: "idle" });
|
||||
const [selectedChannelId, setSelectedChannelId] = useState<string>("");
|
||||
const [rebuildState, setRebuildState] = useState<RebuildState>({ phase: "idle" });
|
||||
const [rebuildProgress, setRebuildProgress] = useState<RebuildProgress | null>(null);
|
||||
|
||||
// Channels that can be assigned as destination (SOURCE channels only, exclude current destination)
|
||||
const assignableChannels = channels.filter(
|
||||
(c) => c.type === "SOURCE" && c.id !== destination?.id
|
||||
);
|
||||
|
||||
// Poll for worker result when creating
|
||||
useEffect(() => {
|
||||
@@ -90,6 +134,86 @@ export function DestinationCard({ destination }: DestinationCardProps) {
|
||||
return () => { mounted = false; };
|
||||
}, [createState]);
|
||||
|
||||
// Poll for rebuild progress
|
||||
useEffect(() => {
|
||||
if (rebuildState.phase !== "running") return;
|
||||
|
||||
let mounted = true;
|
||||
const requestId = rebuildState.requestId;
|
||||
|
||||
const poll = async () => {
|
||||
for (let i = 0; i < 300; i++) {
|
||||
await new Promise((r) => setTimeout(r, 2000));
|
||||
if (!mounted) return;
|
||||
|
||||
try {
|
||||
const res = await fetch(
|
||||
`/api/telegram/worker-request?requestId=${requestId}`
|
||||
);
|
||||
if (!res.ok) continue;
|
||||
|
||||
const data = await res.json();
|
||||
|
||||
// Update live progress from resultJson
|
||||
if (data.result && typeof data.result === "object") {
|
||||
if (mounted) setRebuildProgress(data.result as RebuildProgress);
|
||||
}
|
||||
|
||||
if (data.status === "COMPLETED" && data.result) {
|
||||
const result = data.result as RebuildProgress;
|
||||
if (mounted) {
|
||||
setRebuildState({
|
||||
phase: "done",
|
||||
created: result.packagesCreated,
|
||||
skipped: result.packagesSkipped,
|
||||
scanned: result.messagesScanned,
|
||||
});
|
||||
setRebuildProgress(null);
|
||||
toast.success(
|
||||
`Rebuild complete: ${result.packagesCreated} packages restored, ${result.packagesSkipped} skipped`
|
||||
);
|
||||
}
|
||||
return;
|
||||
} else if (data.status === "FAILED") {
|
||||
if (mounted) {
|
||||
setRebuildState({
|
||||
phase: "error",
|
||||
message: data.error || "Rebuild failed",
|
||||
});
|
||||
setRebuildProgress(null);
|
||||
}
|
||||
return;
|
||||
}
|
||||
} catch {
|
||||
// Network blip — keep polling
|
||||
}
|
||||
}
|
||||
|
||||
if (mounted) {
|
||||
setRebuildState({ phase: "error", message: "Timed out waiting for rebuild" });
|
||||
setRebuildProgress(null);
|
||||
}
|
||||
};
|
||||
|
||||
poll();
|
||||
return () => {
|
||||
mounted = false;
|
||||
};
|
||||
}, [rebuildState]);
|
||||
|
||||
const handleRebuild = () => {
|
||||
startTransition(async () => {
|
||||
const result = await rebuildPackageDatabase();
|
||||
if (result.success) {
|
||||
setRebuildState({ phase: "running", requestId: result.data.requestId });
|
||||
setRebuildProgress(null);
|
||||
toast.info("Rebuild started — scanning destination channel...");
|
||||
} else {
|
||||
toast.error(result.error ?? "Failed to start rebuild");
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const handleCreate = () => {
|
||||
if (!title.trim()) return;
|
||||
|
||||
@@ -103,6 +227,21 @@ export function DestinationCard({ destination }: DestinationCardProps) {
|
||||
});
|
||||
};
|
||||
|
||||
const handleAssignExisting = () => {
|
||||
if (!selectedChannelId) return;
|
||||
|
||||
startTransition(async () => {
|
||||
const result = await setGlobalDestination(selectedChannelId);
|
||||
if (result.success) {
|
||||
toast.success("Channel set as destination!");
|
||||
setCreateOpen(false);
|
||||
setSelectedChannelId("");
|
||||
} else {
|
||||
toast.error(result.error ?? "Failed to set destination");
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const handleOpenChange = (open: boolean) => {
|
||||
setCreateOpen(open);
|
||||
if (!open) {
|
||||
@@ -110,6 +249,7 @@ export function DestinationCard({ destination }: DestinationCardProps) {
|
||||
if (createState.phase !== "creating") {
|
||||
setCreateState({ phase: "idle" });
|
||||
}
|
||||
setSelectedChannelId("");
|
||||
}
|
||||
};
|
||||
|
||||
@@ -132,19 +272,23 @@ export function DestinationCard({ destination }: DestinationCardProps) {
|
||||
</div>
|
||||
<Button size="sm" onClick={() => setCreateOpen(true)}>
|
||||
<Plus className="mr-2 h-3.5 w-3.5" />
|
||||
Create Destination
|
||||
Set Destination
|
||||
</Button>
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
<CreateDestinationDialog
|
||||
<DestinationDialog
|
||||
open={createOpen}
|
||||
onOpenChange={handleOpenChange}
|
||||
title={title}
|
||||
setTitle={setTitle}
|
||||
onSubmit={handleCreate}
|
||||
onSubmitCreate={handleCreate}
|
||||
createState={createState}
|
||||
isPending={isPending}
|
||||
assignableChannels={assignableChannels}
|
||||
selectedChannelId={selectedChannelId}
|
||||
setSelectedChannelId={setSelectedChannelId}
|
||||
onSubmitAssign={handleAssignExisting}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
@@ -153,80 +297,171 @@ export function DestinationCard({ destination }: DestinationCardProps) {
|
||||
return (
|
||||
<>
|
||||
<Card>
|
||||
<CardContent className="flex items-center justify-between gap-4 py-4">
|
||||
<div className="flex items-center gap-3">
|
||||
<Database className="h-5 w-5 text-purple-500 shrink-0" />
|
||||
<div>
|
||||
<div className="flex items-center gap-2">
|
||||
<p className="text-sm font-medium">{destination.title}</p>
|
||||
<Badge
|
||||
variant="outline"
|
||||
className="bg-purple-500/10 text-purple-600 border-purple-500/20 text-[10px]"
|
||||
>
|
||||
DESTINATION
|
||||
</Badge>
|
||||
</div>
|
||||
<div className="flex items-center gap-3 text-xs text-muted-foreground">
|
||||
<span>ID: {destination.telegramId}</span>
|
||||
{destination.inviteLink && (
|
||||
<span className="flex items-center gap-1">
|
||||
<Link2 className="h-3 w-3" />
|
||||
Invite link active
|
||||
</span>
|
||||
)}
|
||||
<CardContent className="py-4 space-y-3">
|
||||
<div className="flex items-center justify-between gap-4">
|
||||
<div className="flex items-center gap-3">
|
||||
<Database className="h-5 w-5 text-purple-500 shrink-0" />
|
||||
<div>
|
||||
<div className="flex items-center gap-2">
|
||||
<p className="text-sm font-medium">{destination.title}</p>
|
||||
<Badge
|
||||
variant="outline"
|
||||
className="bg-purple-500/10 text-purple-600 border-purple-500/20 text-[10px]"
|
||||
>
|
||||
DESTINATION
|
||||
</Badge>
|
||||
</div>
|
||||
<div className="flex items-center gap-3 text-xs text-muted-foreground">
|
||||
<span>ID: {destination.telegramId}</span>
|
||||
{destination.inviteLink && (
|
||||
<span className="flex items-center gap-1">
|
||||
<Link2 className="h-3 w-3" />
|
||||
Invite link active
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={handleRebuild}
|
||||
disabled={isPending || rebuildState.phase === "running"}
|
||||
title="Scan destination channel and rebuild the package database"
|
||||
>
|
||||
{rebuildState.phase === "running" ? (
|
||||
<Loader2 className="h-3.5 w-3.5 animate-spin mr-1.5" />
|
||||
) : (
|
||||
<RefreshCw className="h-3.5 w-3.5 mr-1.5" />
|
||||
)}
|
||||
Rebuild DB
|
||||
</Button>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={() => setCreateOpen(true)}
|
||||
>
|
||||
Change
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={() => setCreateOpen(true)}
|
||||
>
|
||||
Change
|
||||
</Button>
|
||||
|
||||
{/* Rebuild progress */}
|
||||
{rebuildState.phase === "running" && rebuildProgress && (
|
||||
<div className="border-t pt-3">
|
||||
<div className="flex items-center gap-2">
|
||||
<Loader2 className="h-3.5 w-3.5 animate-spin text-primary shrink-0" />
|
||||
<span className="text-xs text-muted-foreground">
|
||||
Rebuilding package database...
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-4 pl-6 mt-1 text-xs text-muted-foreground">
|
||||
<span>
|
||||
<span className="text-foreground tabular-nums">
|
||||
{rebuildProgress.messagesScanned}
|
||||
</span>{" "}
|
||||
messages scanned
|
||||
</span>
|
||||
<span>
|
||||
<span className="text-foreground tabular-nums">
|
||||
{rebuildProgress.documentsFound}
|
||||
</span>{" "}
|
||||
archives found
|
||||
</span>
|
||||
<span>
|
||||
<span className="text-foreground tabular-nums">
|
||||
{rebuildProgress.packagesCreated}
|
||||
</span>{" "}
|
||||
restored
|
||||
</span>
|
||||
<span>
|
||||
<span className="text-foreground tabular-nums">
|
||||
{rebuildProgress.packagesSkipped}
|
||||
</span>{" "}
|
||||
skipped
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{rebuildState.phase === "done" && (
|
||||
<div className="border-t pt-3">
|
||||
<div className="flex items-center gap-2 text-xs text-emerald-500">
|
||||
<Database className="h-3.5 w-3.5 shrink-0" />
|
||||
<span>
|
||||
Rebuild complete: {rebuildState.created} packages restored,{" "}
|
||||
{rebuildState.skipped} skipped ({rebuildState.scanned} messages
|
||||
scanned)
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{rebuildState.phase === "error" && (
|
||||
<div className="border-t pt-3">
|
||||
<div className="flex items-center gap-2 text-xs text-red-500">
|
||||
<AlertTriangle className="h-3.5 w-3.5 shrink-0" />
|
||||
<span>Rebuild failed: {rebuildState.message}</span>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
<CreateDestinationDialog
|
||||
<DestinationDialog
|
||||
open={createOpen}
|
||||
onOpenChange={handleOpenChange}
|
||||
title={title}
|
||||
setTitle={setTitle}
|
||||
onSubmit={handleCreate}
|
||||
onSubmitCreate={handleCreate}
|
||||
createState={createState}
|
||||
isPending={isPending}
|
||||
assignableChannels={assignableChannels}
|
||||
selectedChannelId={selectedChannelId}
|
||||
setSelectedChannelId={setSelectedChannelId}
|
||||
onSubmitAssign={handleAssignExisting}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
function CreateDestinationDialog({
|
||||
function DestinationDialog({
|
||||
open,
|
||||
onOpenChange,
|
||||
title,
|
||||
setTitle,
|
||||
onSubmit,
|
||||
onSubmitCreate,
|
||||
createState,
|
||||
isPending,
|
||||
assignableChannels,
|
||||
selectedChannelId,
|
||||
setSelectedChannelId,
|
||||
onSubmitAssign,
|
||||
}: {
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
title: string;
|
||||
setTitle: (v: string) => void;
|
||||
onSubmit: () => void;
|
||||
onSubmitCreate: () => void;
|
||||
createState: CreateState;
|
||||
isPending: boolean;
|
||||
assignableChannels: ChannelRow[];
|
||||
selectedChannelId: string;
|
||||
setSelectedChannelId: (v: string) => void;
|
||||
onSubmitAssign: () => void;
|
||||
}) {
|
||||
const isCreating = createState.phase === "creating";
|
||||
const hasAssignable = assignableChannels.length > 0;
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogContent className="sm:max-w-md">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Create Destination Channel</DialogTitle>
|
||||
<DialogTitle>Set Destination Channel</DialogTitle>
|
||||
<DialogDescription>
|
||||
A private Telegram group will be created automatically using one of
|
||||
your authenticated accounts. All accounts will write archives here.
|
||||
Choose an existing channel or create a new private group. All
|
||||
accounts will write archives to this destination.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
@@ -241,46 +476,111 @@ function CreateDestinationDialog({
|
||||
</p>
|
||||
</div>
|
||||
) : (
|
||||
<div className="space-y-4">
|
||||
{createState.phase === "error" && (
|
||||
<div className="rounded-md border border-destructive/50 bg-destructive/10 p-3">
|
||||
<p className="text-sm text-destructive">{createState.message}</p>
|
||||
<Tabs defaultValue={hasAssignable ? "existing" : "create"} className="w-full">
|
||||
<TabsList className="grid w-full grid-cols-2">
|
||||
<TabsTrigger value="existing" disabled={!hasAssignable}>
|
||||
<ArrowRight className="mr-1.5 h-3.5 w-3.5" />
|
||||
Use Existing
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="create">
|
||||
<Plus className="mr-1.5 h-3.5 w-3.5" />
|
||||
Create New
|
||||
</TabsTrigger>
|
||||
</TabsList>
|
||||
|
||||
<TabsContent value="existing" className="space-y-4 pt-2">
|
||||
{createState.phase === "error" && (
|
||||
<div className="rounded-md border border-destructive/50 bg-destructive/10 p-3">
|
||||
<p className="text-sm text-destructive">{createState.message}</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="space-y-2">
|
||||
<Label>Select Channel</Label>
|
||||
<Select
|
||||
value={selectedChannelId}
|
||||
onValueChange={setSelectedChannelId}
|
||||
>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder="Pick a channel..." />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{assignableChannels.map((ch) => (
|
||||
<SelectItem key={ch.id} value={ch.id}>
|
||||
{ch.title}{" "}
|
||||
<span className="text-muted-foreground text-xs">
|
||||
({ch.telegramId})
|
||||
</span>
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
The selected channel will become the destination. All accounts
|
||||
will be linked as writers automatically.
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="dest-title">Group Name</Label>
|
||||
<Input
|
||||
id="dest-title"
|
||||
placeholder="e.g. dragonsstash db"
|
||||
value={title}
|
||||
onChange={(e) => setTitle(e.target.value)}
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
This will be the name of the Telegram group. You can rename it later in Telegram.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<DialogFooter>
|
||||
<Button
|
||||
variant="outline"
|
||||
onClick={() => onOpenChange(false)}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={onSubmitAssign}
|
||||
disabled={isPending || !selectedChannelId}
|
||||
>
|
||||
{isPending && (
|
||||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||
)}
|
||||
Set as Destination
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</TabsContent>
|
||||
|
||||
<TabsContent value="create" className="space-y-4 pt-2">
|
||||
{createState.phase === "error" && (
|
||||
<div className="rounded-md border border-destructive/50 bg-destructive/10 p-3">
|
||||
<p className="text-sm text-destructive">{createState.message}</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="dest-title">Group Name</Label>
|
||||
<Input
|
||||
id="dest-title"
|
||||
placeholder="e.g. dragonsstash db"
|
||||
value={title}
|
||||
onChange={(e) => setTitle(e.target.value)}
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
A new private Telegram group will be created using one of your
|
||||
authenticated accounts. You can rename it later in Telegram.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<Button
|
||||
variant="outline"
|
||||
onClick={() => onOpenChange(false)}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={onSubmitCreate}
|
||||
disabled={isPending || !title.trim()}
|
||||
>
|
||||
{isPending && (
|
||||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||
)}
|
||||
Create Group
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</TabsContent>
|
||||
</Tabs>
|
||||
)}
|
||||
|
||||
<DialogFooter>
|
||||
<Button
|
||||
variant="outline"
|
||||
onClick={() => onOpenChange(false)}
|
||||
disabled={isCreating}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={onSubmit}
|
||||
disabled={isPending || isCreating || !title.trim()}
|
||||
>
|
||||
{(isPending || isCreating) && (
|
||||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||
)}
|
||||
Create Group
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
|
||||
179
src/app/(app)/telegram/_components/join-channel-dialog.tsx
Normal file
179
src/app/(app)/telegram/_components/join-channel-dialog.tsx
Normal file
@@ -0,0 +1,179 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useEffect, useCallback } from "react";
|
||||
import { Loader2, Link as LinkIcon } from "lucide-react";
|
||||
import { toast } from "sonner";
|
||||
import { joinChannelByLink } from "../actions";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
DialogFooter,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
|
||||
interface JoinChannelDialogProps {
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
}
|
||||
|
||||
type JoinState =
|
||||
| { phase: "idle" }
|
||||
| { phase: "submitting"; requestId?: string }
|
||||
| { phase: "success"; title: string }
|
||||
| { phase: "error"; message: string };
|
||||
|
||||
export function JoinChannelDialog({
|
||||
open,
|
||||
onOpenChange,
|
||||
}: JoinChannelDialogProps) {
|
||||
const [input, setInput] = useState("");
|
||||
const [joinState, setJoinState] = useState<JoinState>({ phase: "idle" });
|
||||
|
||||
// Reset on close
|
||||
useEffect(() => {
|
||||
if (!open) {
|
||||
setInput("");
|
||||
setJoinState({ phase: "idle" });
|
||||
}
|
||||
}, [open]);
|
||||
|
||||
const pollForResult = useCallback(async (requestId: string) => {
|
||||
for (let i = 0; i < 30; i++) {
|
||||
await new Promise((r) => setTimeout(r, 2000));
|
||||
|
||||
try {
|
||||
const res = await fetch(
|
||||
`/api/telegram/worker-request?requestId=${requestId}`
|
||||
);
|
||||
if (!res.ok) continue;
|
||||
|
||||
const data = await res.json();
|
||||
if (data.status === "COMPLETED") {
|
||||
const result = data.result;
|
||||
setJoinState({
|
||||
phase: "success",
|
||||
title: result?.title ?? "Unknown channel",
|
||||
});
|
||||
toast.success(`Channel "${result?.title}" added as source`);
|
||||
// Auto-close after short delay
|
||||
setTimeout(() => onOpenChange(false), 1500);
|
||||
return;
|
||||
} else if (data.status === "FAILED") {
|
||||
setJoinState({
|
||||
phase: "error",
|
||||
message: data.error || "Failed to join channel",
|
||||
});
|
||||
return;
|
||||
}
|
||||
} catch {
|
||||
// Network error, keep polling
|
||||
}
|
||||
}
|
||||
|
||||
setJoinState({
|
||||
phase: "error",
|
||||
message: "Request timed out. The worker may be busy -- try again later.",
|
||||
});
|
||||
}, [onOpenChange]);
|
||||
|
||||
const handleSubmit = async () => {
|
||||
if (!input.trim()) return;
|
||||
|
||||
setJoinState({ phase: "submitting" });
|
||||
|
||||
try {
|
||||
const result = await joinChannelByLink(input);
|
||||
if (!result.success) {
|
||||
setJoinState({ phase: "error", message: result.error ?? "Unknown error" });
|
||||
return;
|
||||
}
|
||||
|
||||
const requestId = result.data!.requestId;
|
||||
setJoinState({ phase: "submitting", requestId });
|
||||
await pollForResult(requestId);
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : "Network error";
|
||||
setJoinState({ phase: "error", message });
|
||||
}
|
||||
};
|
||||
|
||||
const isSubmitting = joinState.phase === "submitting";
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogContent className="sm:max-w-md">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Add Channel</DialogTitle>
|
||||
<DialogDescription>
|
||||
Join a Telegram channel or group by link, username, or invite link.
|
||||
The channel will be added as an active source.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
<div className="space-y-4 py-2">
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="channel-input">Channel link or username</Label>
|
||||
<Input
|
||||
id="channel-input"
|
||||
placeholder="@channel, t.me/channel, or t.me/+invite"
|
||||
value={input}
|
||||
onChange={(e) => setInput(e.target.value)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === "Enter" && !isSubmitting && input.trim()) {
|
||||
handleSubmit();
|
||||
}
|
||||
}}
|
||||
disabled={isSubmitting}
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Supported formats: @username, https://t.me/username, https://t.me/+invitecode
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{joinState.phase === "submitting" && (
|
||||
<div className="flex items-center gap-2 text-sm text-muted-foreground">
|
||||
<Loader2 className="h-4 w-4 animate-spin" />
|
||||
{joinState.requestId
|
||||
? "Joining channel via worker..."
|
||||
: "Sending request..."}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{joinState.phase === "error" && (
|
||||
<p className="text-sm text-destructive">{joinState.message}</p>
|
||||
)}
|
||||
|
||||
{joinState.phase === "success" && (
|
||||
<p className="text-sm text-emerald-600">
|
||||
Successfully added "{joinState.title}"
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<Button variant="outline" onClick={() => onOpenChange(false)}>
|
||||
{joinState.phase === "success" ? "Close" : "Cancel"}
|
||||
</Button>
|
||||
{joinState.phase !== "success" && (
|
||||
<Button
|
||||
onClick={handleSubmit}
|
||||
disabled={isSubmitting || !input.trim()}
|
||||
>
|
||||
{isSubmitting ? (
|
||||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||
) : (
|
||||
<LinkIcon className="mr-2 h-4 w-4" />
|
||||
)}
|
||||
Add Channel
|
||||
</Button>
|
||||
)}
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -16,6 +16,7 @@ interface TelegramAdminProps {
|
||||
ingestionStatus: IngestionAccountStatus[];
|
||||
globalDestination: GlobalDestination;
|
||||
sendHistory: SendHistoryRow[];
|
||||
workerIntervalMinutes: number;
|
||||
}
|
||||
|
||||
export function TelegramAdmin({
|
||||
@@ -24,6 +25,7 @@ export function TelegramAdmin({
|
||||
ingestionStatus,
|
||||
globalDestination,
|
||||
sendHistory,
|
||||
workerIntervalMinutes,
|
||||
}: TelegramAdminProps) {
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
@@ -32,7 +34,7 @@ export function TelegramAdmin({
|
||||
description="Manage Telegram accounts, channels, and ingestion"
|
||||
/>
|
||||
|
||||
<WorkerStatusPanel initialStatus={ingestionStatus} />
|
||||
<WorkerStatusPanel initialStatus={ingestionStatus} initialIntervalMinutes={workerIntervalMinutes} />
|
||||
|
||||
<Tabs defaultValue="accounts" className="space-y-4">
|
||||
<TabsList>
|
||||
@@ -51,7 +53,7 @@ export function TelegramAdmin({
|
||||
<AccountsTab accounts={accounts} />
|
||||
</TabsContent>
|
||||
<TabsContent value="channels">
|
||||
<ChannelsTab channels={channels} globalDestination={globalDestination} />
|
||||
<ChannelsTab channels={channels} globalDestination={globalDestination} accounts={accounts} />
|
||||
</TabsContent>
|
||||
<TabsContent value="sends">
|
||||
<BotSendsTab history={sendHistory} />
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useState, useCallback } from "react";
|
||||
import { useEffect, useState, useCallback, useTransition } from "react";
|
||||
import {
|
||||
Loader2,
|
||||
CheckCircle2,
|
||||
@@ -14,10 +14,13 @@ import { Card, CardContent } from "@/components/ui/card";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { toast } from "sonner";
|
||||
import { triggerIngestion } from "../actions";
|
||||
import type { IngestionAccountStatus } from "@/lib/telegram/types";
|
||||
|
||||
interface WorkerStatusPanelProps {
|
||||
initialStatus: IngestionAccountStatus[];
|
||||
initialIntervalMinutes?: number;
|
||||
}
|
||||
|
||||
const AUTH_STATE_CONFIG: Record<
|
||||
@@ -39,15 +42,28 @@ const AUTH_STATE_CONFIG: Record<
|
||||
EXPIRED: { label: "Expired", color: "text-red-500", icon: "x" },
|
||||
};
|
||||
|
||||
export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
|
||||
export function WorkerStatusPanel({ initialStatus, initialIntervalMinutes = 60 }: WorkerStatusPanelProps) {
|
||||
const [accounts, setAccounts] = useState(initialStatus);
|
||||
const [error, setError] = useState(false);
|
||||
const [nextRunCountdown, setNextRunCountdown] = useState<string | null>(null);
|
||||
const [workerIntervalMinutes, setWorkerIntervalMinutes] = useState(initialIntervalMinutes);
|
||||
const [isPending, startTransition] = useTransition();
|
||||
|
||||
// Find active run
|
||||
const activeRun = accounts.find((a) => a.currentRun);
|
||||
const isRunning = !!activeRun;
|
||||
|
||||
const handleSyncNow = useCallback(() => {
|
||||
startTransition(async () => {
|
||||
const result = await triggerIngestion();
|
||||
if (result.success) {
|
||||
toast.success("Sync triggered — worker will start shortly");
|
||||
} else {
|
||||
toast.error(result.error ?? "Failed to trigger sync");
|
||||
}
|
||||
});
|
||||
}, []);
|
||||
|
||||
// Poll for status
|
||||
useEffect(() => {
|
||||
let timer: ReturnType<typeof setTimeout>;
|
||||
@@ -60,6 +76,9 @@ export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
|
||||
const data = await res.json();
|
||||
if (mounted) {
|
||||
setAccounts(data.accounts ?? []);
|
||||
if (data.workerIntervalMinutes) {
|
||||
setWorkerIntervalMinutes(data.workerIntervalMinutes);
|
||||
}
|
||||
setError(false);
|
||||
}
|
||||
} catch {
|
||||
@@ -86,7 +105,7 @@ export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Estimate next run based on last run finish time + interval (5 min + up to 5 min jitter)
|
||||
// Estimate next run based on last run finish time + configured interval + up to 5 min jitter
|
||||
const lastFinished = accounts
|
||||
.filter((a) => a.lastRun?.finishedAt)
|
||||
.map((a) => new Date(a.lastRun!.finishedAt!).getTime())
|
||||
@@ -97,7 +116,7 @@ export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
|
||||
return;
|
||||
}
|
||||
|
||||
const intervalMs = 5 * 60 * 1000; // 5 min base
|
||||
const intervalMs = workerIntervalMinutes * 60 * 1000;
|
||||
const estimatedNext = lastFinished + intervalMs;
|
||||
|
||||
const tick = () => {
|
||||
@@ -116,7 +135,7 @@ export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
|
||||
tick();
|
||||
const interval = setInterval(tick, 1_000);
|
||||
return () => clearInterval(interval);
|
||||
}, [isRunning, accounts]);
|
||||
}, [isRunning, accounts, workerIntervalMinutes]);
|
||||
|
||||
if (accounts.length === 0 && !error) {
|
||||
return (
|
||||
@@ -182,7 +201,12 @@ export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
|
||||
) : isRunning && activeRun?.currentRun ? (
|
||||
<RunningStatus run={activeRun.currentRun} />
|
||||
) : (
|
||||
<IdleStatus accounts={accounts} nextRunCountdown={nextRunCountdown} />
|
||||
<IdleStatus
|
||||
accounts={accounts}
|
||||
nextRunCountdown={nextRunCountdown}
|
||||
onSyncNow={handleSyncNow}
|
||||
isSyncing={isPending}
|
||||
/>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
@@ -233,6 +257,11 @@ function RunningStatus({
|
||||
</span>
|
||||
</span>
|
||||
)}
|
||||
{run.messagesScanned > 0 && (
|
||||
<span>
|
||||
<span className="text-foreground tabular-nums">{run.messagesScanned}</span> messages
|
||||
</span>
|
||||
)}
|
||||
{run.zipsIngested > 0 && (
|
||||
<span>
|
||||
<span className="text-foreground tabular-nums">{run.zipsIngested}</span> ingested
|
||||
@@ -251,9 +280,13 @@ function RunningStatus({
|
||||
function IdleStatus({
|
||||
accounts,
|
||||
nextRunCountdown,
|
||||
onSyncNow,
|
||||
isSyncing,
|
||||
}: {
|
||||
accounts: IngestionAccountStatus[];
|
||||
nextRunCountdown: string | null;
|
||||
onSyncNow: () => void;
|
||||
isSyncing: boolean;
|
||||
}) {
|
||||
const lastRun = accounts
|
||||
.filter((a) => a.lastRun)
|
||||
@@ -316,14 +349,32 @@ function IdleStatus({
|
||||
)}
|
||||
</div>
|
||||
|
||||
{nextRunCountdown && hasAuthenticated && (
|
||||
<div className="flex items-center gap-1.5 shrink-0">
|
||||
<RefreshCw className="h-3 w-3 text-muted-foreground" />
|
||||
<span className="text-xs text-muted-foreground tabular-nums">
|
||||
Next: {nextRunCountdown}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
<div className="flex items-center gap-2 shrink-0">
|
||||
{nextRunCountdown && hasAuthenticated && (
|
||||
<div className="flex items-center gap-1.5">
|
||||
<RefreshCw className="h-3 w-3 text-muted-foreground" />
|
||||
<span className="text-xs text-muted-foreground tabular-nums">
|
||||
Next: {nextRunCountdown}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
{hasAuthenticated && (
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
className="h-7 text-xs px-2"
|
||||
onClick={onSyncNow}
|
||||
disabled={isSyncing}
|
||||
>
|
||||
{isSyncing ? (
|
||||
<Loader2 className="h-3 w-3 animate-spin mr-1" />
|
||||
) : (
|
||||
<RefreshCw className="h-3 w-3 mr-1" />
|
||||
)}
|
||||
Sync Now
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -173,6 +173,7 @@ export async function createChannel(
|
||||
telegramId: BigInt(parsed.data.telegramId),
|
||||
title: parsed.data.title,
|
||||
type: parsed.data.type,
|
||||
isActive: false,
|
||||
},
|
||||
});
|
||||
revalidatePath(REVALIDATE_PATH);
|
||||
@@ -230,11 +231,35 @@ export async function toggleChannelActive(id: string): Promise<ActionResult> {
|
||||
const existing = await prisma.telegramChannel.findUnique({ where: { id } });
|
||||
if (!existing) return { success: false, error: "Channel not found" };
|
||||
|
||||
const newActive = !existing.isActive;
|
||||
|
||||
try {
|
||||
await prisma.telegramChannel.update({
|
||||
where: { id },
|
||||
data: { isActive: !existing.isActive },
|
||||
data: { isActive: newActive },
|
||||
});
|
||||
|
||||
// When enabling a SOURCE channel, auto-create READER links for all
|
||||
// active authenticated accounts so the worker can scan it.
|
||||
// Without this, toggling a channel active without going through the
|
||||
// channel picker leaves it with no AccountChannelMap READER link.
|
||||
if (newActive && existing.type === "SOURCE") {
|
||||
const accounts = await prisma.telegramAccount.findMany({
|
||||
where: { isActive: true, authState: "AUTHENTICATED" },
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
for (const account of accounts) {
|
||||
try {
|
||||
await prisma.accountChannelMap.create({
|
||||
data: { accountId: account.id, channelId: id, role: "READER" },
|
||||
});
|
||||
} catch {
|
||||
// Already linked — ignore unique constraint violation
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
revalidatePath(REVALIDATE_PATH);
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
@@ -258,6 +283,25 @@ export async function deleteChannel(id: string): Promise<ActionResult> {
|
||||
}
|
||||
}
|
||||
|
||||
export async function setChannelCategory(
|
||||
id: string,
|
||||
category: string | null
|
||||
): Promise<ActionResult> {
|
||||
const admin = await requireAdmin();
|
||||
if (!admin.success) return admin;
|
||||
|
||||
try {
|
||||
await prisma.telegramChannel.update({
|
||||
where: { id },
|
||||
data: { category: category?.trim() || null },
|
||||
});
|
||||
revalidatePath("/telegram");
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to update category" };
|
||||
}
|
||||
}
|
||||
|
||||
export async function setChannelType(
|
||||
id: string,
|
||||
type: "SOURCE" | "DESTINATION"
|
||||
@@ -269,6 +313,13 @@ export async function setChannelType(
|
||||
if (!existing) return { success: false, error: "Channel not found" };
|
||||
|
||||
try {
|
||||
if (type === "DESTINATION") {
|
||||
// Setting as destination: use the full global destination logic
|
||||
// so it updates the global settings key, creates WRITER links, etc.
|
||||
return await setGlobalDestination(id);
|
||||
}
|
||||
|
||||
// Setting as SOURCE — just change the type
|
||||
await prisma.telegramChannel.update({
|
||||
where: { id },
|
||||
data: { type },
|
||||
@@ -280,19 +331,49 @@ export async function setChannelType(
|
||||
}
|
||||
}
|
||||
|
||||
export async function triggerChannelSync(): Promise<ActionResult> {
|
||||
/**
|
||||
* Reset all scan progress for a channel so the worker will re-process it
|
||||
* from the very beginning on the next ingestion cycle.
|
||||
*
|
||||
* This clears:
|
||||
* - `lastProcessedMessageId` on every AccountChannelMap linked to this channel
|
||||
* - All TopicProgress records for those maps (for forum channels)
|
||||
*/
|
||||
export async function rescanChannel(channelId: string): Promise<ActionResult> {
|
||||
const admin = await requireAdmin();
|
||||
if (!admin.success) return admin;
|
||||
|
||||
const channel = await prisma.telegramChannel.findUnique({
|
||||
where: { id: channelId },
|
||||
});
|
||||
if (!channel) return { success: false, error: "Channel not found" };
|
||||
|
||||
try {
|
||||
// Signal the worker to do a channel sync via pg_notify
|
||||
await prisma.$queryRawUnsafe(
|
||||
`SELECT pg_notify('channel_sync', 'requested')`
|
||||
);
|
||||
// Find all account-channel maps for this channel
|
||||
const maps = await prisma.accountChannelMap.findMany({
|
||||
where: { channelId },
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
const mapIds = maps.map((m) => m.id);
|
||||
|
||||
// Delete all topic progress records for these maps (forum channels)
|
||||
if (mapIds.length > 0) {
|
||||
await prisma.topicProgress.deleteMany({
|
||||
where: { accountChannelMapId: { in: mapIds } },
|
||||
});
|
||||
}
|
||||
|
||||
// Reset the scan cursor so the worker re-processes from the start
|
||||
await prisma.accountChannelMap.updateMany({
|
||||
where: { channelId },
|
||||
data: { lastProcessedMessageId: null },
|
||||
});
|
||||
|
||||
revalidatePath(REVALIDATE_PATH);
|
||||
return { success: true, data: undefined };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to trigger channel sync" };
|
||||
return { success: false, error: "Failed to reset channel scan progress" };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -371,23 +452,12 @@ export async function triggerIngestion(
|
||||
return { success: false, error: "No eligible accounts found" };
|
||||
}
|
||||
|
||||
// Create ingestion runs — the worker picks these up
|
||||
for (const account of accounts) {
|
||||
const existing = await prisma.ingestionRun.findFirst({
|
||||
where: { accountId: account.id, status: "RUNNING" },
|
||||
});
|
||||
if (!existing) {
|
||||
await prisma.ingestionRun.create({
|
||||
data: { accountId: account.id, status: "RUNNING" },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// pg_notify for immediate worker pickup
|
||||
// Signal the worker to run an immediate ingestion cycle via pg_notify.
|
||||
// The worker will create its own IngestionRun records with proper activity tracking.
|
||||
try {
|
||||
await prisma.$queryRawUnsafe(
|
||||
`SELECT pg_notify('ingestion_trigger', $1)`,
|
||||
accounts.map((a) => a.id).join(",")
|
||||
accounts.map((a: { id: string }) => a.id).join(",")
|
||||
);
|
||||
} catch {
|
||||
// Best-effort
|
||||
@@ -417,7 +487,7 @@ export async function saveChannelSelections(
|
||||
try {
|
||||
let linked = 0;
|
||||
for (const ch of channels) {
|
||||
// Upsert the channel record
|
||||
// Upsert the channel record and activate it (user explicitly selected it)
|
||||
const channel = await prisma.telegramChannel.upsert({
|
||||
where: { telegramId: BigInt(ch.telegramId) },
|
||||
create: {
|
||||
@@ -425,10 +495,12 @@ export async function saveChannelSelections(
|
||||
title: ch.title,
|
||||
type: "SOURCE",
|
||||
isForum: ch.isForum,
|
||||
isActive: true,
|
||||
},
|
||||
update: {
|
||||
title: ch.title,
|
||||
isForum: ch.isForum,
|
||||
isActive: true,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -453,6 +525,56 @@ export async function saveChannelSelections(
|
||||
}
|
||||
}
|
||||
|
||||
// ── Join channel by link/username ──
|
||||
|
||||
/**
|
||||
* Request the worker to join a channel by t.me link, invite link, or @username.
|
||||
* Uses ChannelFetchRequest as a generic DB-mediated request with pg_notify.
|
||||
* Returns the requestId so the UI can poll for completion.
|
||||
*/
|
||||
export async function joinChannelByLink(
|
||||
input: string
|
||||
): Promise<ActionResult<{ requestId: string }>> {
|
||||
const admin = await requireAdmin();
|
||||
if (!admin.success) return admin;
|
||||
|
||||
const trimmed = input.trim();
|
||||
if (!trimmed) return { success: false, error: "Input is required" };
|
||||
|
||||
try {
|
||||
// Need at least one authenticated account for TDLib
|
||||
const account = await prisma.telegramAccount.findFirst({
|
||||
where: { isActive: true, authState: "AUTHENTICATED" },
|
||||
select: { id: true },
|
||||
});
|
||||
if (!account) {
|
||||
return { success: false, error: "At least one authenticated account is needed" };
|
||||
}
|
||||
|
||||
// Create a fetch request to track progress
|
||||
const fetchRequest = await prisma.channelFetchRequest.create({
|
||||
data: {
|
||||
accountId: account.id,
|
||||
status: "PENDING",
|
||||
},
|
||||
});
|
||||
|
||||
// Signal worker via pg_notify
|
||||
await prisma.$queryRawUnsafe(
|
||||
`SELECT pg_notify('join_channel', $1)`,
|
||||
JSON.stringify({
|
||||
requestId: fetchRequest.id,
|
||||
input: trimmed,
|
||||
accountId: account.id,
|
||||
})
|
||||
);
|
||||
|
||||
return { success: true, data: { requestId: fetchRequest.id } };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to request channel join" };
|
||||
}
|
||||
}
|
||||
|
||||
// ── Global destination channel ──
|
||||
|
||||
export async function setGlobalDestination(
|
||||
@@ -467,10 +589,10 @@ export async function setGlobalDestination(
|
||||
if (!channel) return { success: false, error: "Channel not found" };
|
||||
|
||||
try {
|
||||
// Set the channel type to DESTINATION
|
||||
// Set the channel type to DESTINATION and ensure it's active
|
||||
await prisma.telegramChannel.update({
|
||||
where: { id: channelId },
|
||||
data: { type: "DESTINATION" },
|
||||
data: { type: "DESTINATION", isActive: true },
|
||||
});
|
||||
|
||||
// Save as global destination
|
||||
@@ -521,17 +643,19 @@ export async function createDestinationChannel(
|
||||
if (!admin.success) return admin;
|
||||
|
||||
try {
|
||||
// Create the channel as DESTINATION
|
||||
// Create the channel as DESTINATION (active by default — needed for uploads)
|
||||
const channel = await prisma.telegramChannel.upsert({
|
||||
where: { telegramId: BigInt(telegramId) },
|
||||
create: {
|
||||
telegramId: BigInt(telegramId),
|
||||
title,
|
||||
type: "DESTINATION",
|
||||
isActive: true,
|
||||
},
|
||||
update: {
|
||||
title,
|
||||
type: "DESTINATION",
|
||||
isActive: true,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -581,6 +705,63 @@ export async function createDestinationChannel(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Request the worker to rebuild the package database by scanning the
|
||||
* destination channel for uploaded archives and recreating Package records.
|
||||
* Uses ChannelFetchRequest as a generic DB-mediated request with pg_notify.
|
||||
* Returns the requestId so the UI can poll for progress.
|
||||
*/
|
||||
export async function rebuildPackageDatabase(): Promise<
|
||||
ActionResult<{ requestId: string }>
|
||||
> {
|
||||
const admin = await requireAdmin();
|
||||
if (!admin.success) return admin;
|
||||
|
||||
try {
|
||||
// Need at least one authenticated account for TDLib
|
||||
const hasAccount = await prisma.telegramAccount.findFirst({
|
||||
where: { isActive: true, authState: "AUTHENTICATED" },
|
||||
select: { id: true },
|
||||
});
|
||||
if (!hasAccount) {
|
||||
return {
|
||||
success: false,
|
||||
error:
|
||||
"At least one authenticated account is needed to scan the destination channel",
|
||||
};
|
||||
}
|
||||
|
||||
// Need a destination channel
|
||||
const destSetting = await prisma.globalSetting.findUnique({
|
||||
where: { key: "destination_channel_id" },
|
||||
});
|
||||
if (!destSetting) {
|
||||
return {
|
||||
success: false,
|
||||
error: "No destination channel configured",
|
||||
};
|
||||
}
|
||||
|
||||
// Create a fetch request to track progress
|
||||
const fetchRequest = await prisma.channelFetchRequest.create({
|
||||
data: {
|
||||
accountId: hasAccount.id,
|
||||
status: "PENDING",
|
||||
},
|
||||
});
|
||||
|
||||
// Signal worker via pg_notify
|
||||
await prisma.$queryRawUnsafe(
|
||||
`SELECT pg_notify('rebuild_packages', $1)`,
|
||||
fetchRequest.id
|
||||
);
|
||||
|
||||
return { success: true, data: { requestId: fetchRequest.id } };
|
||||
} catch {
|
||||
return { success: false, error: "Failed to request package database rebuild" };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Request the worker to create a new Telegram supergroup as the destination.
|
||||
* Uses ChannelFetchRequest as a generic DB-mediated request with pg_notify.
|
||||
|
||||
@@ -25,7 +25,7 @@ export default async function TelegramPage() {
|
||||
}),
|
||||
]);
|
||||
|
||||
const serializedHistory = sendHistory.map((r) => ({
|
||||
const serializedHistory = sendHistory.map((r: typeof sendHistory[number]) => ({
|
||||
id: r.id,
|
||||
packageName: r.package.fileName,
|
||||
recipientName: r.telegramLink.telegramName,
|
||||
@@ -42,6 +42,7 @@ export default async function TelegramPage() {
|
||||
ingestionStatus={ingestionStatus}
|
||||
globalDestination={globalDestination}
|
||||
sendHistory={serializedHistory}
|
||||
workerIntervalMinutes={parseInt(process.env.WORKER_INTERVAL_MINUTES ?? "60", 10)}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -11,6 +11,23 @@ export async function registerUser(input: unknown): Promise<ActionResult<{ id: s
|
||||
return { success: false, error: "Validation failed" };
|
||||
}
|
||||
|
||||
// Validate invite code
|
||||
const invite = await prisma.inviteCode.findUnique({
|
||||
where: { code: parsed.data.inviteCode },
|
||||
});
|
||||
|
||||
if (!invite) {
|
||||
return { success: false, error: "Invalid invite code. Please check the code and try again." };
|
||||
}
|
||||
|
||||
if (invite.uses >= invite.maxUses) {
|
||||
return { success: false, error: "This invite code has reached its maximum number of uses" };
|
||||
}
|
||||
|
||||
if (invite.expiresAt && invite.expiresAt < new Date()) {
|
||||
return { success: false, error: "This invite code has expired. Please request a new one." };
|
||||
}
|
||||
|
||||
const existing = await prisma.user.findUnique({
|
||||
where: { email: parsed.data.email },
|
||||
});
|
||||
@@ -21,21 +38,32 @@ export async function registerUser(input: unknown): Promise<ActionResult<{ id: s
|
||||
|
||||
const hashedPassword = await bcrypt.hash(parsed.data.password, 10);
|
||||
|
||||
const user = await prisma.user.create({
|
||||
data: {
|
||||
name: parsed.data.name,
|
||||
email: parsed.data.email,
|
||||
hashedPassword,
|
||||
role: "USER",
|
||||
settings: {
|
||||
create: {
|
||||
lowStockThreshold: 10,
|
||||
currency: "USD",
|
||||
theme: "dark",
|
||||
units: "metric",
|
||||
// Create user and increment invite usage in a transaction
|
||||
const user = await prisma.$transaction(async (tx) => {
|
||||
const newUser = await tx.user.create({
|
||||
data: {
|
||||
name: parsed.data.name,
|
||||
email: parsed.data.email,
|
||||
hashedPassword,
|
||||
role: "USER",
|
||||
usedInviteId: invite.id,
|
||||
settings: {
|
||||
create: {
|
||||
lowStockThreshold: 10,
|
||||
currency: "USD",
|
||||
theme: "dark",
|
||||
units: "metric",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await tx.inviteCode.update({
|
||||
where: { id: invite.id },
|
||||
data: { uses: { increment: 1 } },
|
||||
});
|
||||
|
||||
return newUser;
|
||||
});
|
||||
|
||||
return { success: true, data: { id: user.id } };
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useTransition } from "react";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useRouter, useSearchParams } from "next/navigation";
|
||||
import Link from "next/link";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
@@ -24,12 +24,19 @@ import { APP_NAME } from "@/lib/constants";
|
||||
|
||||
export default function RegisterPage() {
|
||||
const router = useRouter();
|
||||
const searchParams = useSearchParams();
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [isPending, startTransition] = useTransition();
|
||||
|
||||
const form = useForm<RegisterInput>({
|
||||
resolver: zodResolver(registerSchema),
|
||||
defaultValues: { name: "", email: "", password: "", confirmPassword: "" },
|
||||
defaultValues: {
|
||||
name: "",
|
||||
email: "",
|
||||
password: "",
|
||||
confirmPassword: "",
|
||||
inviteCode: searchParams.get("code") ?? "",
|
||||
},
|
||||
});
|
||||
|
||||
function onSubmit(values: RegisterInput) {
|
||||
@@ -75,7 +82,7 @@ export default function RegisterPage() {
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>Create Account</CardTitle>
|
||||
<CardDescription>Fill in your details below</CardDescription>
|
||||
<CardDescription>You need an invite code to register</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<Form {...form}>
|
||||
@@ -86,6 +93,24 @@ export default function RegisterPage() {
|
||||
</div>
|
||||
)}
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="inviteCode"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Invite Code</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder="Enter your invite code"
|
||||
autoComplete="off"
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="name"
|
||||
|
||||
36
src/app/api/groups/[id]/preview/route.ts
Normal file
36
src/app/api/groups/[id]/preview/route.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { prisma } from "@/lib/prisma";
|
||||
import { authenticateApiRequest } from "@/lib/telegram/api-auth";
|
||||
|
||||
export async function GET(
|
||||
request: Request,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
const authResult = await authenticateApiRequest(request);
|
||||
if ("error" in authResult) return authResult.error;
|
||||
|
||||
const { id } = await params;
|
||||
|
||||
const group = await prisma.packageGroup.findUnique({
|
||||
where: { id },
|
||||
select: { previewData: true },
|
||||
});
|
||||
|
||||
if (!group || !group.previewData) {
|
||||
return new NextResponse(null, { status: 404 });
|
||||
}
|
||||
|
||||
const buffer =
|
||||
group.previewData instanceof Buffer
|
||||
? group.previewData
|
||||
: Buffer.from(group.previewData);
|
||||
|
||||
return new NextResponse(buffer, {
|
||||
status: 200,
|
||||
headers: {
|
||||
"Content-Type": "image/jpeg",
|
||||
"Content-Length": String(buffer.length),
|
||||
"Cache-Control": "public, max-age=3600, immutable",
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -9,5 +9,9 @@ export async function GET(request: Request) {
|
||||
if ("error" in authResult) return authResult.error;
|
||||
|
||||
const accounts = await getIngestionStatus();
|
||||
return NextResponse.json({ accounts });
|
||||
const workerIntervalMinutes = parseInt(
|
||||
process.env.WORKER_INTERVAL_MINUTES ?? "60",
|
||||
10
|
||||
);
|
||||
return NextResponse.json({ accounts, workerIntervalMinutes });
|
||||
}
|
||||
|
||||
@@ -45,33 +45,20 @@ export async function POST(request: Request) {
|
||||
);
|
||||
}
|
||||
|
||||
// Create ingestion runs marked as RUNNING — the worker will pick these up
|
||||
// when it next polls, or we use pg_notify for immediate pickup
|
||||
for (const account of accounts) {
|
||||
// Only create if no run is already RUNNING for this account
|
||||
const existing = await prisma.ingestionRun.findFirst({
|
||||
where: { accountId: account.id, status: "RUNNING" },
|
||||
});
|
||||
if (!existing) {
|
||||
await prisma.ingestionRun.create({
|
||||
data: { accountId: account.id, status: "RUNNING" },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Send pg_notify for immediate worker pickup
|
||||
// Send pg_notify for immediate worker pickup.
|
||||
// The worker creates its own IngestionRun records with proper activity tracking.
|
||||
try {
|
||||
await prisma.$queryRawUnsafe(
|
||||
`SELECT pg_notify('ingestion_trigger', $1)`,
|
||||
accounts.map((a) => a.id).join(",")
|
||||
accounts.map((a: { id: string }) => a.id).join(",")
|
||||
);
|
||||
} catch {
|
||||
// pg_notify is best-effort — worker will pick up on next cycle anyway
|
||||
// pg_notify is best-effort — worker will pick up on next scheduled cycle anyway
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
triggered: true,
|
||||
accountIds: accounts.map((a) => a.id),
|
||||
message: `Ingestion queued for ${accounts.length} account(s)`,
|
||||
accountIds: accounts.map((a: { id: string }) => a.id),
|
||||
message: `Ingestion triggered for ${accounts.length} account(s)`,
|
||||
});
|
||||
}
|
||||
|
||||
73
src/app/api/zips/[id]/extract/[requestId]/route.ts
Normal file
73
src/app/api/zips/[id]/extract/[requestId]/route.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { authenticateApiRequest } from "@/lib/telegram/api-auth";
|
||||
import { prisma } from "@/lib/prisma";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
/**
|
||||
* GET /api/zips/:id/extract/:requestId
|
||||
* Get the status and/or image data for an extraction request.
|
||||
* Query param: ?image=true returns the raw image bytes if completed.
|
||||
* Otherwise returns status JSON.
|
||||
*/
|
||||
export async function GET(
|
||||
request: Request,
|
||||
{ params }: { params: Promise<{ id: string; requestId: string }> }
|
||||
) {
|
||||
const authResult = await authenticateApiRequest(request);
|
||||
if ("error" in authResult) return authResult.error;
|
||||
|
||||
const { requestId } = await params;
|
||||
const url = new URL(request.url);
|
||||
const wantImage = url.searchParams.get("image") === "true";
|
||||
|
||||
if (wantImage) {
|
||||
// Return the raw image bytes
|
||||
const req = await prisma.archiveExtractRequest.findUnique({
|
||||
where: { id: requestId },
|
||||
select: { status: true, imageData: true, contentType: true, error: true },
|
||||
});
|
||||
|
||||
if (!req) {
|
||||
return new NextResponse(null, { status: 404 });
|
||||
}
|
||||
|
||||
if (req.status !== "COMPLETED" || !req.imageData) {
|
||||
return NextResponse.json(
|
||||
{ status: req.status, error: req.error },
|
||||
{ status: req.status === "FAILED" ? 400 : 202 }
|
||||
);
|
||||
}
|
||||
|
||||
const buffer =
|
||||
req.imageData instanceof Buffer
|
||||
? req.imageData
|
||||
: Buffer.from(req.imageData);
|
||||
|
||||
return new NextResponse(buffer, {
|
||||
status: 200,
|
||||
headers: {
|
||||
"Content-Type": req.contentType || "image/jpeg",
|
||||
"Content-Length": String(buffer.length),
|
||||
"Cache-Control": "public, max-age=3600, immutable",
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Return status JSON (without image data to avoid large payloads)
|
||||
const req = await prisma.archiveExtractRequest.findUnique({
|
||||
where: { id: requestId },
|
||||
select: { id: true, status: true, error: true, contentType: true },
|
||||
});
|
||||
|
||||
if (!req) {
|
||||
return NextResponse.json({ error: "Request not found" }, { status: 404 });
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
requestId: req.id,
|
||||
status: req.status,
|
||||
error: req.error,
|
||||
contentType: req.contentType,
|
||||
});
|
||||
}
|
||||
118
src/app/api/zips/[id]/extract/route.ts
Normal file
118
src/app/api/zips/[id]/extract/route.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { authenticateApiRequest } from "@/lib/telegram/api-auth";
|
||||
import { prisma } from "@/lib/prisma";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
/**
|
||||
* POST /api/zips/:id/extract
|
||||
* Request extraction of an image from a package archive.
|
||||
* Body: { filePath: string }
|
||||
* Returns: { requestId: string, status: string }
|
||||
*
|
||||
* If a completed extraction already exists for this package+filePath,
|
||||
* returns it immediately.
|
||||
*/
|
||||
export async function POST(
|
||||
request: Request,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
const authResult = await authenticateApiRequest(request);
|
||||
if ("error" in authResult) return authResult.error;
|
||||
|
||||
const { id } = await params;
|
||||
const body = await request.json();
|
||||
const filePath = body?.filePath;
|
||||
|
||||
if (!filePath || typeof filePath !== "string") {
|
||||
return NextResponse.json(
|
||||
{ error: "filePath is required" },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Verify package exists
|
||||
const pkg = await prisma.package.findUnique({
|
||||
where: { id },
|
||||
select: { id: true, destChannelId: true, destMessageId: true, archiveType: true, isMultipart: true, partCount: true },
|
||||
});
|
||||
|
||||
if (!pkg) {
|
||||
return NextResponse.json({ error: "Package not found" }, { status: 404 });
|
||||
}
|
||||
|
||||
if (!pkg.destChannelId || !pkg.destMessageId) {
|
||||
return NextResponse.json(
|
||||
{ error: "Package has not been uploaded to destination channel" },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
if (pkg.archiveType === "DOCUMENT") {
|
||||
return NextResponse.json(
|
||||
{ error: "Cannot extract images from standalone documents" },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
if (pkg.isMultipart && pkg.partCount > 1) {
|
||||
return NextResponse.json(
|
||||
{ error: "Image extraction is not supported for multipart archives" },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Check for an existing completed extraction
|
||||
const existing = await prisma.archiveExtractRequest.findFirst({
|
||||
where: {
|
||||
packageId: id,
|
||||
filePath,
|
||||
status: "COMPLETED",
|
||||
imageData: { not: null },
|
||||
},
|
||||
select: { id: true, status: true },
|
||||
});
|
||||
|
||||
if (existing) {
|
||||
return NextResponse.json({
|
||||
requestId: existing.id,
|
||||
status: "COMPLETED",
|
||||
});
|
||||
}
|
||||
|
||||
// Check for an in-progress request
|
||||
const pending = await prisma.archiveExtractRequest.findFirst({
|
||||
where: {
|
||||
packageId: id,
|
||||
filePath,
|
||||
status: { in: ["PENDING", "IN_PROGRESS"] },
|
||||
},
|
||||
select: { id: true, status: true },
|
||||
});
|
||||
|
||||
if (pending) {
|
||||
return NextResponse.json({
|
||||
requestId: pending.id,
|
||||
status: pending.status,
|
||||
});
|
||||
}
|
||||
|
||||
// Create a new extraction request
|
||||
const extractRequest = await prisma.archiveExtractRequest.create({
|
||||
data: {
|
||||
packageId: id,
|
||||
filePath,
|
||||
},
|
||||
});
|
||||
|
||||
// Notify the worker via pg_notify
|
||||
await prisma.$queryRawUnsafe(
|
||||
`SELECT pg_notify('archive_extract', $1)`,
|
||||
extractRequest.id
|
||||
);
|
||||
|
||||
return NextResponse.json({
|
||||
requestId: extractRequest.id,
|
||||
status: "PENDING",
|
||||
});
|
||||
}
|
||||
56
src/app/api/zips/[id]/images/route.ts
Normal file
56
src/app/api/zips/[id]/images/route.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { authenticateApiRequest } from "@/lib/telegram/api-auth";
|
||||
import { prisma } from "@/lib/prisma";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
const IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "webp", "gif", "bmp"];
|
||||
|
||||
/**
|
||||
* GET /api/zips/:id/images
|
||||
* Lists image files inside a package's archive (from PackageFile records).
|
||||
* Returns a list of image file paths that can be used as preview candidates.
|
||||
*/
|
||||
export async function GET(
|
||||
request: Request,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
const authResult = await authenticateApiRequest(request);
|
||||
if ("error" in authResult) return authResult.error;
|
||||
|
||||
const { id } = await params;
|
||||
|
||||
const pkg = await prisma.package.findUnique({
|
||||
where: { id },
|
||||
select: { id: true, archiveType: true },
|
||||
});
|
||||
|
||||
if (!pkg) {
|
||||
return NextResponse.json({ error: "Package not found" }, { status: 404 });
|
||||
}
|
||||
|
||||
const images = await prisma.packageFile.findMany({
|
||||
where: {
|
||||
packageId: id,
|
||||
extension: { in: IMAGE_EXTENSIONS },
|
||||
},
|
||||
orderBy: { path: "asc" },
|
||||
select: {
|
||||
id: true,
|
||||
path: true,
|
||||
fileName: true,
|
||||
extension: true,
|
||||
uncompressedSize: true,
|
||||
},
|
||||
});
|
||||
|
||||
const mapped = images.map((img) => ({
|
||||
id: img.id,
|
||||
path: img.path,
|
||||
fileName: img.fileName,
|
||||
extension: img.extension,
|
||||
size: img.uncompressedSize.toString(),
|
||||
}));
|
||||
|
||||
return NextResponse.json({ images: mapped });
|
||||
}
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import Link from "next/link";
|
||||
import { usePathname } from "next/navigation";
|
||||
import { useSession } from "next-auth/react";
|
||||
import {
|
||||
LayoutDashboard,
|
||||
Cylinder,
|
||||
@@ -9,35 +10,27 @@ import {
|
||||
Paintbrush,
|
||||
Gem,
|
||||
FileBox,
|
||||
Gift,
|
||||
Send,
|
||||
ClipboardList,
|
||||
Building2,
|
||||
MapPin,
|
||||
Settings,
|
||||
UserPlus,
|
||||
Flame,
|
||||
} from "lucide-react";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { APP_NAME } from "@/lib/constants";
|
||||
import { APP_NAME, NAV_ITEMS } from "@/lib/constants";
|
||||
import { SheetHeader, SheetTitle } from "@/components/ui/sheet";
|
||||
|
||||
const icons = { LayoutDashboard, Cylinder, Droplets, Paintbrush, Gem, FileBox, Send, ClipboardList, Building2, MapPin, Settings };
|
||||
|
||||
const navItems = [
|
||||
{ label: "Dashboard", href: "/dashboard", icon: "LayoutDashboard" as const },
|
||||
{ label: "Filaments", href: "/filaments", icon: "Cylinder" as const },
|
||||
{ label: "Resins", href: "/resins", icon: "Droplets" as const },
|
||||
{ label: "Paints", href: "/paints", icon: "Paintbrush" as const },
|
||||
{ label: "Supplies", href: "/supplies", icon: "Gem" as const },
|
||||
{ label: "STL Files", href: "/stls", icon: "FileBox" as const },
|
||||
{ label: "Telegram", href: "/telegram", icon: "Send" as const },
|
||||
{ label: "Usage", href: "/usage", icon: "ClipboardList" as const },
|
||||
{ label: "Vendors", href: "/vendors", icon: "Building2" as const },
|
||||
{ label: "Locations", href: "/locations", icon: "MapPin" as const },
|
||||
{ label: "Settings", href: "/settings", icon: "Settings" as const },
|
||||
];
|
||||
const icons = { LayoutDashboard, Cylinder, Droplets, Paintbrush, Gem, FileBox, Gift, Send, ClipboardList, Building2, MapPin, Settings, UserPlus };
|
||||
|
||||
export function MobileSidebar() {
|
||||
const pathname = usePathname();
|
||||
const { data: session } = useSession();
|
||||
const isAdmin = session?.user?.role === "ADMIN";
|
||||
|
||||
const visibleItems = NAV_ITEMS.filter((item) => !item.adminOnly || isAdmin);
|
||||
|
||||
return (
|
||||
<div className="flex h-full flex-col">
|
||||
@@ -48,7 +41,7 @@ export function MobileSidebar() {
|
||||
</SheetTitle>
|
||||
</SheetHeader>
|
||||
<nav className="flex-1 space-y-1 p-2">
|
||||
{navItems.map((item) => {
|
||||
{visibleItems.map((item) => {
|
||||
const Icon = icons[item.icon];
|
||||
const isActive = pathname.startsWith(item.href);
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
import { useState } from "react";
|
||||
import Link from "next/link";
|
||||
import { usePathname } from "next/navigation";
|
||||
import { useSession } from "next-auth/react";
|
||||
import {
|
||||
LayoutDashboard,
|
||||
Cylinder,
|
||||
@@ -10,17 +11,19 @@ import {
|
||||
Paintbrush,
|
||||
Gem,
|
||||
FileBox,
|
||||
Gift,
|
||||
Send,
|
||||
ClipboardList,
|
||||
Building2,
|
||||
MapPin,
|
||||
Settings,
|
||||
UserPlus,
|
||||
Flame,
|
||||
PanelLeftClose,
|
||||
PanelLeft,
|
||||
} from "lucide-react";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { APP_NAME } from "@/lib/constants";
|
||||
import { APP_NAME, NAV_ITEMS } from "@/lib/constants";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
|
||||
|
||||
@@ -31,30 +34,22 @@ const icons = {
|
||||
Paintbrush,
|
||||
Gem,
|
||||
FileBox,
|
||||
Gift,
|
||||
Send,
|
||||
ClipboardList,
|
||||
Building2,
|
||||
MapPin,
|
||||
Settings,
|
||||
UserPlus,
|
||||
} as const;
|
||||
|
||||
const navItems = [
|
||||
{ label: "Dashboard", href: "/dashboard", icon: "LayoutDashboard" as const },
|
||||
{ label: "Filaments", href: "/filaments", icon: "Cylinder" as const },
|
||||
{ label: "Resins", href: "/resins", icon: "Droplets" as const },
|
||||
{ label: "Paints", href: "/paints", icon: "Paintbrush" as const },
|
||||
{ label: "Supplies", href: "/supplies", icon: "Gem" as const },
|
||||
{ label: "STL Files", href: "/stls", icon: "FileBox" as const },
|
||||
{ label: "Telegram", href: "/telegram", icon: "Send" as const },
|
||||
{ label: "Usage", href: "/usage", icon: "ClipboardList" as const },
|
||||
{ label: "Vendors", href: "/vendors", icon: "Building2" as const },
|
||||
{ label: "Locations", href: "/locations", icon: "MapPin" as const },
|
||||
{ label: "Settings", href: "/settings", icon: "Settings" as const },
|
||||
];
|
||||
|
||||
export function Sidebar() {
|
||||
const pathname = usePathname();
|
||||
const [collapsed, setCollapsed] = useState(false);
|
||||
const { data: session } = useSession();
|
||||
const isAdmin = session?.user?.role === "ADMIN";
|
||||
|
||||
const visibleItems = NAV_ITEMS.filter((item) => !item.adminOnly || isAdmin);
|
||||
|
||||
return (
|
||||
<aside
|
||||
@@ -73,7 +68,7 @@ export function Sidebar() {
|
||||
|
||||
{/* Navigation */}
|
||||
<nav className="flex-1 space-y-1 p-2">
|
||||
{navItems.map((item) => {
|
||||
{visibleItems.map((item) => {
|
||||
const Icon = icons[item.icon];
|
||||
const isActive = pathname.startsWith(item.href);
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { type Table as TanStackTable, flexRender } from "@tanstack/react-table";
|
||||
import { type Table as TanStackTable, type Row, flexRender } from "@tanstack/react-table";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
@@ -10,13 +10,15 @@ import {
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
import { EmptyState } from "./empty-state";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
interface DataTableProps<TData> {
|
||||
table: TanStackTable<TData>;
|
||||
emptyMessage?: string;
|
||||
rowClassName?: (row: Row<TData>) => string;
|
||||
}
|
||||
|
||||
export function DataTable<TData>({ table, emptyMessage }: DataTableProps<TData>) {
|
||||
export function DataTable<TData>({ table, emptyMessage, rowClassName }: DataTableProps<TData>) {
|
||||
return (
|
||||
<div className="rounded-md border border-border">
|
||||
<Table>
|
||||
@@ -36,7 +38,10 @@ export function DataTable<TData>({ table, emptyMessage }: DataTableProps<TData>)
|
||||
<TableBody>
|
||||
{table.getRowModel().rows?.length ? (
|
||||
table.getRowModel().rows.map((row) => (
|
||||
<TableRow key={row.id} className="h-10 border-border hover:bg-muted/50">
|
||||
<TableRow
|
||||
key={row.id}
|
||||
className={cn("h-10 border-border hover:bg-muted/50", rowClassName?.(row))}
|
||||
>
|
||||
{row.getVisibleCells().map((cell) => (
|
||||
<TableCell key={cell.id} className="py-1.5 text-sm">
|
||||
{flexRender(cell.column.columnDef.cell, cell.getContext())}
|
||||
|
||||
@@ -18,6 +18,8 @@ interface DeleteDialogProps {
|
||||
description?: string;
|
||||
onConfirm: () => void;
|
||||
isLoading?: boolean;
|
||||
confirmLabel?: string;
|
||||
confirmLoadingLabel?: string;
|
||||
}
|
||||
|
||||
export function DeleteDialog({
|
||||
@@ -27,6 +29,8 @@ export function DeleteDialog({
|
||||
description = "This action cannot be undone.",
|
||||
onConfirm,
|
||||
isLoading,
|
||||
confirmLabel = "Delete",
|
||||
confirmLoadingLabel,
|
||||
}: DeleteDialogProps) {
|
||||
return (
|
||||
<AlertDialog open={open} onOpenChange={onOpenChange}>
|
||||
@@ -42,7 +46,7 @@ export function DeleteDialog({
|
||||
disabled={isLoading}
|
||||
className="bg-destructive text-destructive-foreground hover:bg-destructive/90"
|
||||
>
|
||||
{isLoading ? "Deleting..." : "Delete"}
|
||||
{isLoading ? (confirmLoadingLabel ?? `${confirmLabel}...`) : confirmLabel}
|
||||
</AlertDialogAction>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
|
||||
97
src/data/kickstarter.queries.ts
Normal file
97
src/data/kickstarter.queries.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import { prisma } from "@/lib/prisma";
|
||||
import { Prisma } from "@prisma/client";
|
||||
import type { DataTableSearchParams } from "@/types/table.types";
|
||||
|
||||
interface KickstarterSearchParams extends DataTableSearchParams {
|
||||
delivery?: string;
|
||||
payment?: string;
|
||||
host?: string;
|
||||
}
|
||||
|
||||
export async function getKickstarters(
|
||||
userId: string,
|
||||
params: KickstarterSearchParams
|
||||
) {
|
||||
const page = Number(params.page) || 1;
|
||||
const perPage = Number(params.perPage) || 20;
|
||||
const skip = (page - 1) * perPage;
|
||||
|
||||
const where: Prisma.KickstarterWhereInput = {
|
||||
userId,
|
||||
...(params.search && {
|
||||
OR: [
|
||||
{
|
||||
name: {
|
||||
contains: params.search,
|
||||
mode: "insensitive" as Prisma.QueryMode,
|
||||
},
|
||||
},
|
||||
{
|
||||
notes: {
|
||||
contains: params.search,
|
||||
mode: "insensitive" as Prisma.QueryMode,
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
...(params.delivery && {
|
||||
deliveryStatus: params.delivery as Prisma.EnumDeliveryStatusFilter,
|
||||
}),
|
||||
...(params.payment && {
|
||||
paymentStatus: params.payment as Prisma.EnumPaymentStatusFilter,
|
||||
}),
|
||||
...(params.host && { hostId: params.host }),
|
||||
};
|
||||
|
||||
const sortField = params.sort || "createdAt";
|
||||
const sortOrder = params.order || "desc";
|
||||
|
||||
const [data, totalCount] = await Promise.all([
|
||||
prisma.kickstarter.findMany({
|
||||
where,
|
||||
orderBy: { [sortField]: sortOrder },
|
||||
skip,
|
||||
take: perPage,
|
||||
include: {
|
||||
host: { select: { id: true, name: true } },
|
||||
_count: { select: { packages: true } },
|
||||
},
|
||||
}),
|
||||
prisma.kickstarter.count({ where }),
|
||||
]);
|
||||
|
||||
return {
|
||||
data,
|
||||
pageCount: Math.ceil(totalCount / perPage),
|
||||
totalCount,
|
||||
};
|
||||
}
|
||||
|
||||
export async function getKickstarterById(id: string, userId: string) {
|
||||
return prisma.kickstarter.findFirst({
|
||||
where: { id, userId },
|
||||
include: {
|
||||
host: { select: { id: true, name: true } },
|
||||
packages: {
|
||||
include: {
|
||||
package: {
|
||||
select: {
|
||||
id: true,
|
||||
fileName: true,
|
||||
fileSize: true,
|
||||
archiveType: true,
|
||||
creator: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function getKickstarterHosts() {
|
||||
return prisma.kickstarterHost.findMany({
|
||||
orderBy: { name: "asc" },
|
||||
include: { _count: { select: { kickstarters: true } } },
|
||||
});
|
||||
}
|
||||
@@ -18,7 +18,12 @@ export const { auth, handlers, signIn, signOut } = NextAuth({
|
||||
async jwt({ token, user }) {
|
||||
if (user) {
|
||||
token.id = user.id!;
|
||||
token.role = user.role ?? "USER";
|
||||
// Fetch the role from the database to ensure token reflects current role
|
||||
const dbUser = await prisma.user.findUnique({
|
||||
where: { id: user.id! },
|
||||
select: { role: true },
|
||||
});
|
||||
token.role = dbUser?.role ?? user.role ?? "ADMIN";
|
||||
}
|
||||
return token;
|
||||
},
|
||||
@@ -33,6 +38,12 @@ export const { auth, handlers, signIn, signOut } = NextAuth({
|
||||
events: {
|
||||
async createUser({ user }) {
|
||||
if (user.id) {
|
||||
// Self-hosted: all users are admins
|
||||
await prisma.user.update({
|
||||
where: { id: user.id },
|
||||
data: { role: "ADMIN" },
|
||||
});
|
||||
|
||||
await prisma.userSettings.upsert({
|
||||
where: { userId: user.id },
|
||||
update: {},
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
export const APP_NAME = "Dragon's Stash";
|
||||
|
||||
export const NAV_ITEMS = [
|
||||
{ label: "Dashboard", href: "/dashboard", icon: "LayoutDashboard" },
|
||||
{ label: "Filaments", href: "/filaments", icon: "Cylinder" },
|
||||
{ label: "Resins", href: "/resins", icon: "Droplets" },
|
||||
{ label: "Paints", href: "/paints", icon: "Paintbrush" },
|
||||
{ label: "Supplies", href: "/supplies", icon: "Gem" },
|
||||
{ label: "STL Files", href: "/stls", icon: "FileBox" },
|
||||
{ label: "Telegram", href: "/telegram", icon: "Send" },
|
||||
{ label: "Usage", href: "/usage", icon: "ClipboardList" },
|
||||
{ label: "Vendors", href: "/vendors", icon: "Building2" },
|
||||
{ label: "Locations", href: "/locations", icon: "MapPin" },
|
||||
{ label: "Settings", href: "/settings", icon: "Settings" },
|
||||
{ label: "Dashboard", href: "/dashboard", icon: "LayoutDashboard", adminOnly: false },
|
||||
{ label: "Filaments", href: "/filaments", icon: "Cylinder", adminOnly: false },
|
||||
{ label: "Resins", href: "/resins", icon: "Droplets", adminOnly: false },
|
||||
{ label: "Paints", href: "/paints", icon: "Paintbrush", adminOnly: false },
|
||||
{ label: "Supplies", href: "/supplies", icon: "Gem", adminOnly: false },
|
||||
{ label: "STL Files", href: "/stls", icon: "FileBox", adminOnly: false },
|
||||
{ label: "Kickstarters", href: "/kickstarters", icon: "Gift", adminOnly: false },
|
||||
{ label: "Telegram", href: "/telegram", icon: "Send", adminOnly: true },
|
||||
{ label: "Invites", href: "/invites", icon: "UserPlus", adminOnly: true },
|
||||
{ label: "Usage", href: "/usage", icon: "ClipboardList", adminOnly: false },
|
||||
{ label: "Vendors", href: "/vendors", icon: "Building2", adminOnly: false },
|
||||
{ label: "Locations", href: "/locations", icon: "MapPin", adminOnly: false },
|
||||
{ label: "Settings", href: "/settings", icon: "Settings", adminOnly: false },
|
||||
] as const;
|
||||
|
||||
export const MATERIALS = [
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user