mirror of
https://github.com/xCyanGrizzly/DragonsStash.git
synced 2026-05-11 06:11:15 +00:00
Compare commits
57 Commits
copilot/fi
...
4a44374bb7
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4a44374bb7 | ||
|
|
c7eb077e0d | ||
|
|
031a4687fb | ||
|
|
30fb96b3f9 | ||
|
|
9a077a3648 | ||
|
|
2ceba66313 | ||
|
|
036dadcb21 | ||
|
|
541ae0c614 | ||
|
|
b7a76fd932 | ||
|
|
b75b0e1f91 | ||
|
|
50e7e02b2d | ||
|
|
dea419b778 | ||
|
|
053eeed6be | ||
|
|
d5725bd52e | ||
|
|
48726b9122 | ||
|
|
1b8df48768 | ||
|
|
726f55a943 | ||
|
|
b08140b4f9 | ||
|
|
761d5e0790 | ||
|
|
d7bbb7587e | ||
| 2763de2711 | |||
|
|
6926df9a2c | ||
|
|
651e9e6bdd | ||
|
|
8d508d5a86 | ||
|
|
2bb3caf7d9 | ||
|
|
8d95752106 | ||
|
|
22419106c1 | ||
|
|
e45de85c69 | ||
|
|
71a2e6a5e8 | ||
|
|
1436b630e2 | ||
|
|
43af23d3be | ||
|
|
49b82a352b | ||
|
|
2e242912af | ||
|
|
9adbdb2a77 | ||
|
|
ad71346468 | ||
|
|
e19a80897d | ||
|
|
22da4dfad2 | ||
|
|
22bcacf3bd | ||
|
|
15da57b8c0 | ||
|
|
8f1a912ccb | ||
|
|
81b65912aa | ||
|
|
5eb2cf05b9 | ||
|
|
f73d06b3d9 | ||
|
|
cac3d518e1 | ||
|
|
987167de0c | ||
|
|
4f331d5411 | ||
|
|
8088a86feb | ||
|
|
b53934ebf2 | ||
|
|
464c86b32a | ||
|
|
fc00fb6f2e | ||
|
|
0c0c9c7f23 | ||
|
|
82d5fc1812 | ||
|
|
9120f0fb5d | ||
|
|
5d88f9beb3 | ||
|
|
3704708970 | ||
|
|
0c789eabd6 | ||
|
|
9a88914f11 |
5
.claude/settings.json
Normal file
5
.claude/settings.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"enabledPlugins": {
|
||||||
|
"superpowers@superpowers-marketplace": true
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -83,7 +83,13 @@
|
|||||||
"Bash(git -C /mnt/c/Users/A00963355/OneDrive - Amaris Zorggroep/Documents/VScodeProjects/DragonsStash log --oneline -10)",
|
"Bash(git -C /mnt/c/Users/A00963355/OneDrive - Amaris Zorggroep/Documents/VScodeProjects/DragonsStash log --oneline -10)",
|
||||||
"Bash(git -C \"C:/Users/A00963355/OneDrive - Amaris Zorggroep/Documents/VScodeProjects/DragonsStash\" status --short)",
|
"Bash(git -C \"C:/Users/A00963355/OneDrive - Amaris Zorggroep/Documents/VScodeProjects/DragonsStash\" status --short)",
|
||||||
"Bash(timeout:*)",
|
"Bash(timeout:*)",
|
||||||
"mcp__Claude_Preview__preview_start"
|
"mcp__Claude_Preview__preview_start",
|
||||||
|
"Bash(cat:*)",
|
||||||
|
"Bash(grep:*)",
|
||||||
|
"Bash(wait:*)",
|
||||||
|
"WebSearch",
|
||||||
|
"Bash(SKILL_CREATOR_PATH=\"C:\\\\Users\\\\A00963355\\\\.claude\\\\plugins\\\\cache\\\\claude-plugins-official\\\\skill-creator\\\\d5c15b861cd2\\\\skills\\\\skill-creator\" && WORKSPACE=\"C:\\\\Users\\\\A00963355\\\\OneDrive - Amaris Zorggroep\\\\Documents\\\\VScodeProjects\\\\DragonsStash\\\\.claude\\\\skills\\\\tdlib-telegram-workspace\\\\iteration-1\" && python \"$SKILL_CREATOR_PATH/eval-viewer/generate_review.py\" \"$WORKSPACE\" --skill-name \"tdlib-telegram\" --benchmark \"$WORKSPACE/benchmark.json\" --static \"$WORKSPACE/review.html\" 2>&1)",
|
||||||
|
"Bash(start:*)"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,46 @@
|
|||||||
|
{
|
||||||
|
"skill_name": "tdlib-telegram",
|
||||||
|
"iteration": 1,
|
||||||
|
"configs": [
|
||||||
|
{
|
||||||
|
"name": "with_skill",
|
||||||
|
"pass_rate": {"mean": 1.0, "stddev": 0.0},
|
||||||
|
"tokens": {"mean": 53200, "stddev": 14800},
|
||||||
|
"time_seconds": {"mean": 123.5, "stddev": 16.7}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "without_skill",
|
||||||
|
"pass_rate": {"mean": 0.857, "stddev": 0.134},
|
||||||
|
"tokens": {"mean": 56467, "stddev": 12100},
|
||||||
|
"time_seconds": {"mean": 156.4, "stddev": 39.7}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"delta": {
|
||||||
|
"pass_rate": "+14.3%",
|
||||||
|
"tokens": "-5.8%",
|
||||||
|
"time": "-21.0%"
|
||||||
|
},
|
||||||
|
"evals": [
|
||||||
|
{
|
||||||
|
"name": "broadcast-to-all-users",
|
||||||
|
"with_skill": {"pass_rate": 1.0, "passed": 5, "total": 5, "tokens": 35365, "time_seconds": 107.6},
|
||||||
|
"without_skill": {"pass_rate": 0.6, "passed": 3, "total": 5, "tokens": 69214, "time_seconds": 200.2}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "flood-wait-during-scan",
|
||||||
|
"with_skill": {"pass_rate": 1.0, "passed": 4, "total": 4, "tokens": 63079, "time_seconds": 140.9},
|
||||||
|
"without_skill": {"pass_rate": 1.0, "passed": 4, "total": 4, "tokens": 45601, "time_seconds": 122.3}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "download-and-reupload-file",
|
||||||
|
"with_skill": {"pass_rate": 1.0, "passed": 5, "total": 5, "tokens": 61157, "time_seconds": 122.1},
|
||||||
|
"without_skill": {"pass_rate": 1.0, "passed": 5, "total": 5, "tokens": 54587, "time_seconds": 146.7}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"analyst_notes": [
|
||||||
|
"The skill's biggest impact was on Eval 1 (broadcast): the baseline MISSED both withFloodWait retry wrapping and inter-message delay — the two most critical patterns for avoiding rate limits during bulk sends. This is exactly the kind of bug the skill is designed to prevent.",
|
||||||
|
"Eval 2 (FLOOD_WAIT debugging) was a near-tie. Both versions correctly diagnosed the problem and proposed adaptive backoff. The skill version was slightly more thorough: it added pagination-level retry with sleep(waitSec) instead of just re-throwing, meaning it can survive even after withFloodWait's retries are exhausted.",
|
||||||
|
"Eval 3 (download/reupload) was also close. Both correctly composed existing primitives. The skill version was more explicit about WHY certain patterns matter (referencing the skill's documentation), which helps future maintainers understand the code.",
|
||||||
|
"The skill version was faster on average (-21% time) and used fewer tokens (-5.8%), likely because the skill front-loaded the knowledge instead of requiring the agent to discover it by reading source files."
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"eval_id": 1,
|
||||||
|
"eval_name": "broadcast-to-all-users",
|
||||||
|
"prompt": "Add a new bot command /broadcast that sends a text message to ALL users who have a TelegramLink in the database. The admin triggers it from the web app. Add it to the bot's command handler and create an API endpoint that triggers it.",
|
||||||
|
"assertions": [
|
||||||
|
{"text": "uses_sequential_queue: Messages sent one at a time, no Promise.all or concurrent sends", "type": "required"},
|
||||||
|
{"text": "uses_flood_wait_retry: Each send call wrapped in withFloodWait or equivalent FLOOD_WAIT-aware retry", "type": "required"},
|
||||||
|
{"text": "has_per_message_delay: Includes a sleep/delay between individual sends", "type": "recommended"},
|
||||||
|
{"text": "handles_per_user_errors: One failed send does not abort the entire broadcast loop", "type": "required"},
|
||||||
|
{"text": "has_api_endpoint: Creates an API route or server action to trigger the broadcast from the web app", "type": "required"}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"run_id": "eval-1-with_skill",
|
||||||
|
"expectations": [
|
||||||
|
{"text": "uses_sequential_queue", "passed": true, "evidence": "Uses a for...of loop iterating links one at a time. No Promise.all. Comment: '~20 msgs/sec — well under the 30 msgs/sec global bot limit'"},
|
||||||
|
{"text": "uses_flood_wait_retry", "passed": true, "evidence": "Each send wrapped in withFloodWait(): 'await withFloodWait(() => sendTextMessage(link.telegramUserId, data.message, \"textParseModeHTML\"))'. Created a new bot/src/util/flood-wait.ts with full extractFloodWaitSeconds + retry logic."},
|
||||||
|
{"text": "has_per_message_delay", "passed": true, "evidence": "await sleep(50) after each send — explicitly commented as '~20 msgs/sec'"},
|
||||||
|
{"text": "handles_per_user_errors", "passed": true, "evidence": "try/catch around each send with 'failed++' counter, continues to next user"},
|
||||||
|
{"text": "has_api_endpoint", "passed": true, "evidence": "Created src/app/api/telegram/bot/broadcast/route.ts with admin auth, 4096-char validation, pg_notify('bot_broadcast')"}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,340 @@
|
|||||||
|
# /broadcast Implementation Plan
|
||||||
|
|
||||||
|
## Approach
|
||||||
|
|
||||||
|
The broadcast feature has two parts:
|
||||||
|
|
||||||
|
1. **API endpoint** (`POST /api/telegram/bot/broadcast`) — called from the web app by an admin. It accepts a `message` string, looks up ALL `TelegramLink` records in the database, and triggers the bot to send the message to each linked user via `pg_notify`.
|
||||||
|
2. **Bot-side handler** — a new `bot_broadcast` pg_notify channel listener in `send-listener.ts` that receives the broadcast payload and sequentially sends the text message to every linked Telegram user.
|
||||||
|
|
||||||
|
The `/broadcast` bot command itself is not a user-facing Telegram command (regular users should not be able to trigger it). It is triggered exclusively through the admin API endpoint.
|
||||||
|
|
||||||
|
## Skill Patterns Applied
|
||||||
|
|
||||||
|
- **Sequential Send Queue** (from skill): Never fire concurrent sends to multiple users. The broadcast iterates users sequentially with `await sleep(50)` between sends (~20 msgs/sec, well under the 30 msgs/sec global bot limit).
|
||||||
|
- **FLOOD_WAIT handling** (from skill): Every `sendTextMessage` call is wrapped with `withFloodWait()` which extracts the wait duration from errors and retries with jitter.
|
||||||
|
- **Anti-pattern avoidance**: No `Promise.all(users.map(...))` — that would instantly hit the 30 msg/sec global limit.
|
||||||
|
- **Message text length limit**: The API endpoint validates that the broadcast message does not exceed 4,096 characters (Telegram's limit from the skill).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## File 1: `bot/src/util/flood-wait.ts` (NEW)
|
||||||
|
|
||||||
|
Extracted from the skill's recommended FLOOD_WAIT pattern so it can be reused by both existing send logic and the new broadcast logic.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { childLogger } from "./logger.js";
|
||||||
|
|
||||||
|
const log = childLogger("flood-wait");
|
||||||
|
|
||||||
|
function sleep(ms: number): Promise<void> {
|
||||||
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract the mandatory wait duration (in seconds) from a Telegram
|
||||||
|
* FLOOD_WAIT error. Returns null when the error is not rate-limit related.
|
||||||
|
*/
|
||||||
|
export function extractFloodWaitSeconds(err: unknown): number | null {
|
||||||
|
const message = err instanceof Error ? err.message : String(err);
|
||||||
|
|
||||||
|
// Pattern 1: FLOOD_WAIT_30
|
||||||
|
const flood = message.match(/FLOOD_WAIT_(\d+)/i);
|
||||||
|
if (flood) return parseInt(flood[1], 10);
|
||||||
|
|
||||||
|
// Pattern 2: "retry after 30"
|
||||||
|
const retry = message.match(/retry after (\d+)/i);
|
||||||
|
if (retry) return parseInt(retry[1], 10);
|
||||||
|
|
||||||
|
// Pattern 3: HTTP 429 without explicit seconds
|
||||||
|
if (String((err as any)?.code) === "429") return 30;
|
||||||
|
|
||||||
|
return null; // Not a rate limit error
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wrap any async Telegram operation with automatic FLOOD_WAIT retry.
|
||||||
|
* Adds random jitter (1-5 s) to prevent thundering-herd retries.
|
||||||
|
*/
|
||||||
|
export async function withFloodWait<T>(
|
||||||
|
fn: () => Promise<T>,
|
||||||
|
maxRetries = 5
|
||||||
|
): Promise<T> {
|
||||||
|
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
||||||
|
try {
|
||||||
|
return await fn();
|
||||||
|
} catch (err) {
|
||||||
|
const wait = extractFloodWaitSeconds(err);
|
||||||
|
if (wait === null || attempt >= maxRetries) throw err;
|
||||||
|
|
||||||
|
const jitter = 1000 + Math.random() * 4000;
|
||||||
|
log.warn(
|
||||||
|
{ wait, attempt, jitter: Math.round(jitter) },
|
||||||
|
"FLOOD_WAIT received — backing off"
|
||||||
|
);
|
||||||
|
await sleep(wait * 1000 + jitter);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new Error("Unreachable");
|
||||||
|
}
|
||||||
|
|
||||||
|
export { sleep };
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## File 2: `bot/src/db/queries.ts` (MODIFIED — add one function)
|
||||||
|
|
||||||
|
Add this function at the bottom of the existing file, after the `getGlobalDestinationChannel` function:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ── Broadcast ──
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch ALL TelegramLink records (users who linked their Telegram account).
|
||||||
|
* Used by the broadcast feature to send a message to every linked user.
|
||||||
|
*/
|
||||||
|
export async function getAllTelegramLinks() {
|
||||||
|
return db.telegramLink.findMany({
|
||||||
|
select: {
|
||||||
|
telegramUserId: true,
|
||||||
|
telegramName: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## File 3: `bot/src/send-listener.ts` (MODIFIED — add broadcast channel)
|
||||||
|
|
||||||
|
Add the `bot_broadcast` channel to the existing listener. The changes are:
|
||||||
|
|
||||||
|
### 3a. Add import for the new query and flood-wait utility
|
||||||
|
|
||||||
|
At the top of the file, update the imports:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import {
|
||||||
|
getPendingSendRequest,
|
||||||
|
updateSendRequest,
|
||||||
|
findMatchingSubscriptions,
|
||||||
|
getGlobalDestinationChannel,
|
||||||
|
getAllTelegramLinks, // ← NEW
|
||||||
|
} from "./db/queries.js";
|
||||||
|
import { copyMessageToUser, sendTextMessage, sendPhotoMessage } from "./tdlib/client.js";
|
||||||
|
import { withFloodWait, sleep } from "./util/flood-wait.js"; // ← NEW
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3b. Subscribe to the new pg_notify channel
|
||||||
|
|
||||||
|
Inside `connectListener()`, after the existing LISTEN statements, add:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
await pgClient.query("LISTEN bot_broadcast");
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3c. Add the notification handler
|
||||||
|
|
||||||
|
Inside the `pgClient.on("notification", ...)` callback, add the new branch:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
pgClient.on("notification", (msg) => {
|
||||||
|
if (msg.channel === "bot_send" && msg.payload) {
|
||||||
|
handleBotSend(msg.payload);
|
||||||
|
} else if (msg.channel === "new_package" && msg.payload) {
|
||||||
|
handleNewPackage(msg.payload);
|
||||||
|
} else if (msg.channel === "bot_broadcast" && msg.payload) { // ← NEW
|
||||||
|
handleBroadcast(msg.payload);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
Update the log message:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
log.info("Send listener started (bot_send, new_package, bot_broadcast)");
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3d. Add the broadcast handler function
|
||||||
|
|
||||||
|
Add this at the bottom of the file (before the existing `escapeHtml` helper):
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ── bot_broadcast handler ──
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle a broadcast request. The payload is a JSON string:
|
||||||
|
* { message: string }
|
||||||
|
*
|
||||||
|
* Sends the message to every user who has a TelegramLink.
|
||||||
|
* Uses a sequential loop with a 50 ms delay between sends (~20 msgs/sec)
|
||||||
|
* to stay well under Telegram's 30 msgs/sec global bot limit.
|
||||||
|
* Each send is wrapped with withFloodWait to automatically retry on
|
||||||
|
* rate-limit errors.
|
||||||
|
*/
|
||||||
|
async function handleBroadcast(payload: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
const data = JSON.parse(payload) as { message: string };
|
||||||
|
if (!data.message) {
|
||||||
|
log.warn("Broadcast payload missing message — ignoring");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const links = await getAllTelegramLinks();
|
||||||
|
if (links.length === 0) {
|
||||||
|
log.info("Broadcast requested but no linked users found");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
{ recipientCount: links.length },
|
||||||
|
"Starting broadcast to all linked users"
|
||||||
|
);
|
||||||
|
|
||||||
|
let sent = 0;
|
||||||
|
let failed = 0;
|
||||||
|
|
||||||
|
for (const link of links) {
|
||||||
|
try {
|
||||||
|
await withFloodWait(() =>
|
||||||
|
sendTextMessage(link.telegramUserId, data.message, "textParseModeHTML")
|
||||||
|
);
|
||||||
|
sent++;
|
||||||
|
} catch (err) {
|
||||||
|
failed++;
|
||||||
|
log.warn(
|
||||||
|
{ err, telegramUserId: link.telegramUserId.toString() },
|
||||||
|
"Broadcast send failed for user"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// ~20 msgs/sec — well under the 30 msgs/sec global bot limit
|
||||||
|
await sleep(50);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info({ sent, failed, total: links.length }, "Broadcast completed");
|
||||||
|
} catch (err) {
|
||||||
|
log.error({ err, payload }, "Failed to process broadcast");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## File 4: `src/app/api/telegram/bot/broadcast/route.ts` (NEW)
|
||||||
|
|
||||||
|
This is the Next.js API endpoint that the admin triggers from the web app.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { NextResponse } from "next/server";
|
||||||
|
import { auth } from "@/lib/auth";
|
||||||
|
import { prisma } from "@/lib/prisma";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POST /api/telegram/bot/broadcast
|
||||||
|
* Send a text message to ALL users who have a linked Telegram account.
|
||||||
|
*
|
||||||
|
* Body: { message: string }
|
||||||
|
*
|
||||||
|
* Admin-only. The actual sending is done by the bot process — this endpoint
|
||||||
|
* simply validates input and fires a pg_notify('bot_broadcast', ...) signal.
|
||||||
|
*/
|
||||||
|
export async function POST(request: Request) {
|
||||||
|
const session = await auth();
|
||||||
|
if (!session?.user?.id) {
|
||||||
|
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only admins can broadcast
|
||||||
|
if (session.user.role !== "ADMIN") {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: "Only admins can send broadcast messages" },
|
||||||
|
{ status: 403 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let body: { message?: string };
|
||||||
|
try {
|
||||||
|
body = await request.json();
|
||||||
|
} catch {
|
||||||
|
return NextResponse.json({ error: "Invalid JSON body" }, { status: 400 });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!body.message || typeof body.message !== "string") {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: "message is required and must be a string" },
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const trimmed = body.message.trim();
|
||||||
|
if (trimmed.length === 0) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: "message must not be empty" },
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Telegram message text limit is 4096 characters
|
||||||
|
if (trimmed.length > 4096) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: "message exceeds Telegram's 4096-character limit" },
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count how many users will receive the broadcast
|
||||||
|
const recipientCount = await prisma.telegramLink.count();
|
||||||
|
if (recipientCount === 0) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: "No users have linked Telegram accounts" },
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify the bot via pg_notify
|
||||||
|
const payload = JSON.stringify({ message: trimmed });
|
||||||
|
try {
|
||||||
|
await prisma.$queryRawUnsafe(
|
||||||
|
`SELECT pg_notify('bot_broadcast', $1)`,
|
||||||
|
payload
|
||||||
|
);
|
||||||
|
} catch (err) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: "Failed to queue broadcast — bot notification channel unavailable" },
|
||||||
|
{ status: 500 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
status: "QUEUED",
|
||||||
|
recipientCount,
|
||||||
|
message: `Broadcast queued for ${recipientCount} user(s)`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Summary of Changes
|
||||||
|
|
||||||
|
| File | Action | Description |
|
||||||
|
|------|--------|-------------|
|
||||||
|
| `bot/src/util/flood-wait.ts` | NEW | Reusable FLOOD_WAIT retry + sleep utilities (from skill) |
|
||||||
|
| `bot/src/db/queries.ts` | MODIFIED | Add `getAllTelegramLinks()` query |
|
||||||
|
| `bot/src/send-listener.ts` | MODIFIED | Subscribe to `bot_broadcast` channel; add `handleBroadcast()` with sequential send loop |
|
||||||
|
| `src/app/api/telegram/bot/broadcast/route.ts` | NEW | Admin-only API endpoint that validates input and fires `pg_notify` |
|
||||||
|
|
||||||
|
## Skill Patterns Used
|
||||||
|
|
||||||
|
1. **Sequential Send Queue**: The broadcast loop sends messages one at a time with `await sleep(50)` between each, keeping throughput at ~20 msgs/sec — safely under the 30 msgs/sec bot global limit documented in the skill.
|
||||||
|
|
||||||
|
2. **FLOOD_WAIT handling with `withFloodWait()`**: Every `sendTextMessage` call is wrapped so that if Telegram returns a `FLOOD_WAIT_X` error, the bot automatically waits the required duration plus random jitter (1-5s) before retrying. This prevents the entire broadcast from failing due to a temporary rate limit.
|
||||||
|
|
||||||
|
3. **Message text length validation**: The API endpoint rejects messages over 4,096 characters, which is the `message_text_length_max` limit documented in the skill.
|
||||||
|
|
||||||
|
4. **No concurrent sends (anti-pattern avoidance)**: The skill explicitly warns against `Promise.all(users.map(...))` for bulk sends. The implementation uses a sequential `for...of` loop instead.
|
||||||
|
|
||||||
|
5. **pg_notify pattern**: Follows the existing project convention (seen in `bot_send` and `new_package`) where the web app fires a `pg_notify` and the bot's `send-listener.ts` picks it up — keeping the two processes decoupled.
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"total_tokens": 35365,
|
||||||
|
"duration_ms": 107616,
|
||||||
|
"total_duration_seconds": 107.6
|
||||||
|
}
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"run_id": "eval-1-without_skill",
|
||||||
|
"expectations": [
|
||||||
|
{"text": "uses_sequential_queue", "passed": true, "evidence": "Uses for...of loop over links, sends one at a time. No Promise.all."},
|
||||||
|
{"text": "uses_flood_wait_retry", "passed": false, "evidence": "No withFloodWait wrapper. Bare sendTextMessage calls in both send-listener handleBroadcast and commands handleBroadcastCommand. No flood-wait.ts utility created."},
|
||||||
|
{"text": "has_per_message_delay", "passed": false, "evidence": "No sleep/delay between sends in either the send-listener handler or the command handler."},
|
||||||
|
{"text": "handles_per_user_errors", "passed": true, "evidence": "try/catch per user with sent++/failed++ counters, continues to next user."},
|
||||||
|
{"text": "has_api_endpoint", "passed": true, "evidence": "Created src/app/api/telegram/bot/broadcast/route.ts with admin auth, 4096-char limit, pg_notify."}
|
||||||
|
]
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"total_tokens": 69214,
|
||||||
|
"duration_ms": 200249,
|
||||||
|
"total_duration_seconds": 200.2
|
||||||
|
}
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"eval_id": 2,
|
||||||
|
"eval_name": "flood-wait-during-scan",
|
||||||
|
"prompt": "The worker keeps crashing with 'FLOOD_WAIT_35' errors when scanning a source channel that has about 10,000 messages. It happens during the getChannelMessages pagination loop. How do I fix this?",
|
||||||
|
"assertions": [
|
||||||
|
{"text": "identifies_retry_as_fix: Recommends wrapping pagination calls in FLOOD_WAIT-aware retry logic", "type": "required"},
|
||||||
|
{"text": "recommends_pagination_delay: Mentions adding or increasing sleep between pages", "type": "required"},
|
||||||
|
{"text": "respects_retry_after: Fix extracts and uses the actual wait duration from the error, not a hardcoded value", "type": "required"},
|
||||||
|
{"text": "does_not_reduce_frequency_as_primary_fix: Does not suggest scanning less often as the main solution", "type": "required"}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"run_id": "eval-2-with_skill",
|
||||||
|
"expectations": [
|
||||||
|
{"text": "identifies_retry_as_fix", "passed": true, "evidence": "Identifies that withFloodWait already exists but the pagination loop needs its own adaptive backoff layer. Adds extractFloodWaitSeconds at pagination level."},
|
||||||
|
{"text": "recommends_pagination_delay", "passed": true, "evidence": "Keeps existing sleep(config.apiDelayMs) and makes it adaptive: doubles on FLOOD_WAIT, relaxes by 0.8x on success."},
|
||||||
|
{"text": "respects_retry_after", "passed": true, "evidence": "Extracts actual wait duration: 'await sleep(waitSec * 1000 + jitter)' using extractFloodWaitSeconds. Explicitly cites skill rule: 'Always respect the wait duration.'"},
|
||||||
|
{"text": "does_not_reduce_frequency_as_primary_fix", "passed": true, "evidence": "Primary fix is adaptive backoff at pagination level, not reducing scan frequency. Does not suggest lowering WORKER_INTERVAL_MINUTES."}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,378 @@
|
|||||||
|
# Diagnosis: FLOOD_WAIT_35 During Channel Scanning
|
||||||
|
|
||||||
|
## Root Cause
|
||||||
|
|
||||||
|
The `getChannelMessages` function in `worker/src/tdlib/download.ts` (line 146) paginates through channel history using `getChatHistory` with a fixed 1-second delay between pages (`config.apiDelayMs = 1000`). While each individual `getChatHistory` call is wrapped in `invokeWithTimeout` which delegates to `withFloodWait` for retry logic, the pagination loop itself has a fundamental problem for large channels:
|
||||||
|
|
||||||
|
**A 10,000-message channel at 100 messages/page = 100 consecutive API calls.** With only a 1-second delay between pages, this sustained burst of ~1 req/sec for 100+ seconds eventually triggers Telegram's dynamic rate limiter. Once `FLOOD_WAIT_35` fires, `withFloodWait` does correctly sleep for 35+ seconds and retry that single call -- but the loop immediately resumes its 1-second cadence, triggering another FLOOD_WAIT shortly after. After 5 retries (`config.maxRetries = 5`), the function throws and the worker crashes.
|
||||||
|
|
||||||
|
The same issue exists in `getTopicMessages` in `worker/src/tdlib/topics.ts` (line 170), which uses identical pagination logic.
|
||||||
|
|
||||||
|
## Why the Current Code Is Almost Right But Not Quite
|
||||||
|
|
||||||
|
The codebase already has the right building blocks:
|
||||||
|
|
||||||
|
1. `withFloodWait` in `worker/src/util/retry.ts` correctly extracts wait durations and adds jitter -- this matches the skill's recommended FLOOD_WAIT handler pattern exactly.
|
||||||
|
2. `invokeWithTimeout` in `worker/src/tdlib/download.ts` wraps every TDLib call through `withFloodWait`.
|
||||||
|
3. There is a 1-second inter-page delay (`config.apiDelayMs`).
|
||||||
|
|
||||||
|
**The gap:** After a FLOOD_WAIT recovery, the pagination loop does not back off its inter-page delay. It goes right back to 1-second spacing, which is what triggers repeated FLOOD_WAITs until max retries is exhausted.
|
||||||
|
|
||||||
|
## The Fix
|
||||||
|
|
||||||
|
Apply **adaptive backoff** to the pagination delay: when a FLOOD_WAIT is encountered during scanning, increase the inter-page delay for subsequent pages. This prevents the "recover then immediately re-trigger" cycle.
|
||||||
|
|
||||||
|
### Fix 1: Add adaptive delay to `getChannelMessages` (`worker/src/tdlib/download.ts`)
|
||||||
|
|
||||||
|
Replace lines 146-250 with:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
/**
|
||||||
|
* Fetch messages from a channel, stopping once we've scanned past the
|
||||||
|
* last-processed boundary (with one page of lookback for multipart safety).
|
||||||
|
* Collects both archive attachments AND photo messages (for preview matching).
|
||||||
|
* Returns messages in chronological order (oldest first).
|
||||||
|
*
|
||||||
|
* When `lastProcessedMessageId` is null (first run), scans everything.
|
||||||
|
* The worker applies a post-grouping filter to skip fully-processed sets,
|
||||||
|
* and keeps `packageExistsBySourceMessage` as a safety net.
|
||||||
|
*
|
||||||
|
* Safety features:
|
||||||
|
* - Max page limit to prevent infinite loops
|
||||||
|
* - Stuck detection: breaks if from_message_id stops advancing
|
||||||
|
* - Timeout on each TDLib API call
|
||||||
|
* - Adaptive delay: backs off when FLOOD_WAIT is encountered
|
||||||
|
*/
|
||||||
|
export async function getChannelMessages(
|
||||||
|
client: Client,
|
||||||
|
chatId: bigint,
|
||||||
|
lastProcessedMessageId?: bigint | null,
|
||||||
|
limit = 100,
|
||||||
|
onProgress?: ScanProgressCallback
|
||||||
|
): Promise<ChannelScanResult> {
|
||||||
|
const archives: TelegramMessage[] = [];
|
||||||
|
const photos: TelegramPhoto[] = [];
|
||||||
|
const boundary = lastProcessedMessageId ? Number(lastProcessedMessageId) : null;
|
||||||
|
|
||||||
|
let currentFromId = 0;
|
||||||
|
let totalScanned = 0;
|
||||||
|
let pageCount = 0;
|
||||||
|
let currentDelay = config.apiDelayMs; // starts at 1000ms, adapts on FLOOD_WAIT
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-constant-condition
|
||||||
|
while (true) {
|
||||||
|
if (pageCount >= MAX_SCAN_PAGES) {
|
||||||
|
log.warn(
|
||||||
|
{ chatId: chatId.toString(), pageCount, totalScanned },
|
||||||
|
"Hit max page limit for channel scan, stopping"
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
pageCount++;
|
||||||
|
|
||||||
|
const previousFromId = currentFromId;
|
||||||
|
|
||||||
|
let result: { messages: TdMessage[] };
|
||||||
|
try {
|
||||||
|
result = await invokeWithTimeout<{ messages: TdMessage[] }>(client, {
|
||||||
|
_: "getChatHistory",
|
||||||
|
chat_id: Number(chatId),
|
||||||
|
from_message_id: currentFromId,
|
||||||
|
offset: 0,
|
||||||
|
limit: Math.min(limit, 100),
|
||||||
|
only_local: false,
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
// If invokeWithTimeout exhausted its retries on FLOOD_WAIT, check if
|
||||||
|
// we can recover at the pagination level by increasing the delay further.
|
||||||
|
const waitSec = extractFloodWaitSeconds(err);
|
||||||
|
if (waitSec !== null) {
|
||||||
|
// The retry wrapper already slept; bump the inter-page delay to
|
||||||
|
// prevent the next page from immediately re-triggering.
|
||||||
|
currentDelay = Math.min(currentDelay * 2, 30_000);
|
||||||
|
log.warn(
|
||||||
|
{ chatId: chatId.toString(), newDelay: currentDelay, totalScanned },
|
||||||
|
"FLOOD_WAIT persisted after retries — increasing inter-page delay and retrying"
|
||||||
|
);
|
||||||
|
// Sleep the full flood wait duration + jitter before continuing
|
||||||
|
const jitter = 1000 + Math.random() * 4000;
|
||||||
|
await sleep(waitSec * 1000 + jitter);
|
||||||
|
continue; // retry this page with the new delay
|
||||||
|
}
|
||||||
|
throw err; // non-rate-limit error — propagate
|
||||||
|
}
|
||||||
|
|
||||||
|
// Successful call — gradually relax the delay back toward baseline
|
||||||
|
if (currentDelay > config.apiDelayMs) {
|
||||||
|
currentDelay = Math.max(config.apiDelayMs, Math.floor(currentDelay * 0.8));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!result.messages || result.messages.length === 0) break;
|
||||||
|
|
||||||
|
totalScanned += result.messages.length;
|
||||||
|
|
||||||
|
for (const msg of result.messages) {
|
||||||
|
// Check for archive documents
|
||||||
|
const doc = msg.content?.document;
|
||||||
|
if (doc?.file_name && doc.document && isArchiveAttachment(doc.file_name)) {
|
||||||
|
archives.push({
|
||||||
|
id: BigInt(msg.id),
|
||||||
|
fileName: doc.file_name,
|
||||||
|
fileId: String(doc.document.id),
|
||||||
|
fileSize: BigInt(doc.document.size),
|
||||||
|
date: new Date(msg.date * 1000),
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for photo messages (potential previews)
|
||||||
|
const photo = msg.content?.photo;
|
||||||
|
const caption = msg.content?.caption?.text ?? "";
|
||||||
|
if (photo?.sizes && photo.sizes.length > 0) {
|
||||||
|
const smallest = photo.sizes[0];
|
||||||
|
photos.push({
|
||||||
|
id: BigInt(msg.id),
|
||||||
|
date: new Date(msg.date * 1000),
|
||||||
|
caption,
|
||||||
|
fileId: String(smallest.photo.id),
|
||||||
|
fileSize: smallest.photo.size || smallest.photo.expected_size,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Report scanning progress after each page
|
||||||
|
onProgress?.(totalScanned);
|
||||||
|
|
||||||
|
currentFromId = result.messages[result.messages.length - 1].id;
|
||||||
|
|
||||||
|
// Stuck detection: if from_message_id didn't advance, break to prevent infinite loop
|
||||||
|
if (currentFromId === previousFromId) {
|
||||||
|
log.warn(
|
||||||
|
{ chatId: chatId.toString(), currentFromId, totalScanned },
|
||||||
|
"Pagination stuck (from_message_id not advancing), breaking"
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop scanning once we've gone past the boundary (this page is the lookback)
|
||||||
|
if (boundary && currentFromId < boundary) break;
|
||||||
|
|
||||||
|
if (result.messages.length < Math.min(limit, 100)) break;
|
||||||
|
|
||||||
|
// Rate limit delay — adaptive based on FLOOD_WAIT history
|
||||||
|
await sleep(currentDelay);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
{ chatId: chatId.toString(), archives: archives.length, photos: photos.length, totalScanned, pages: pageCount },
|
||||||
|
"Channel scan complete"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Reverse to chronological order (oldest first) so worker processes old→new
|
||||||
|
return {
|
||||||
|
archives: archives.reverse(),
|
||||||
|
photos: photos.reverse(),
|
||||||
|
totalScanned,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You will also need to add the import for `extractFloodWaitSeconds` at the top of `download.ts`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { withFloodWait, extractFloodWaitSeconds } from "../util/retry.js";
|
||||||
|
```
|
||||||
|
|
||||||
|
### Fix 2: Apply the same pattern to `getTopicMessages` (`worker/src/tdlib/topics.ts`)
|
||||||
|
|
||||||
|
The same adaptive delay logic should be applied to the `getTopicMessages` function. Add the import:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { extractFloodWaitSeconds } from "../util/retry.js";
|
||||||
|
```
|
||||||
|
|
||||||
|
Then apply the same changes to the pagination loop (the structure is identical):
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export async function getTopicMessages(
|
||||||
|
client: Client,
|
||||||
|
chatId: bigint,
|
||||||
|
topicId: bigint,
|
||||||
|
lastProcessedMessageId?: bigint | null,
|
||||||
|
limit = 100,
|
||||||
|
onProgress?: ScanProgressCallback
|
||||||
|
): Promise<ChannelScanResult> {
|
||||||
|
const archives: TelegramMessage[] = [];
|
||||||
|
const photos: TelegramPhoto[] = [];
|
||||||
|
const boundary = lastProcessedMessageId ? Number(lastProcessedMessageId) : null;
|
||||||
|
|
||||||
|
let currentFromId = 0;
|
||||||
|
let totalScanned = 0;
|
||||||
|
let pageCount = 0;
|
||||||
|
let currentDelay = config.apiDelayMs;
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-constant-condition
|
||||||
|
while (true) {
|
||||||
|
if (pageCount >= MAX_SCAN_PAGES) {
|
||||||
|
log.warn(
|
||||||
|
{ chatId: chatId.toString(), topicId: topicId.toString(), pageCount, totalScanned },
|
||||||
|
"Hit max page limit for topic scan, stopping"
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
pageCount++;
|
||||||
|
|
||||||
|
const previousFromId = currentFromId;
|
||||||
|
|
||||||
|
let result: {
|
||||||
|
messages?: {
|
||||||
|
id: number;
|
||||||
|
date: number;
|
||||||
|
content: {
|
||||||
|
_: string;
|
||||||
|
document?: {
|
||||||
|
file_name?: string;
|
||||||
|
document?: {
|
||||||
|
id: number;
|
||||||
|
size: number;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
photo?: {
|
||||||
|
sizes?: {
|
||||||
|
type: string;
|
||||||
|
photo: { id: number; size: number; expected_size: number };
|
||||||
|
width: number;
|
||||||
|
height: number;
|
||||||
|
}[];
|
||||||
|
};
|
||||||
|
caption?: { text?: string };
|
||||||
|
};
|
||||||
|
}[];
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
result = await invokeWithTimeout(client, {
|
||||||
|
_: "searchChatMessages",
|
||||||
|
chat_id: Number(chatId),
|
||||||
|
query: "",
|
||||||
|
message_thread_id: Number(topicId),
|
||||||
|
from_message_id: currentFromId,
|
||||||
|
offset: 0,
|
||||||
|
limit: Math.min(limit, 100),
|
||||||
|
filter: null,
|
||||||
|
sender_id: null,
|
||||||
|
saved_messages_topic_id: 0,
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
const waitSec = extractFloodWaitSeconds(err);
|
||||||
|
if (waitSec !== null) {
|
||||||
|
currentDelay = Math.min(currentDelay * 2, 30_000);
|
||||||
|
log.warn(
|
||||||
|
{ chatId: chatId.toString(), topicId: topicId.toString(), newDelay: currentDelay, totalScanned },
|
||||||
|
"FLOOD_WAIT persisted after retries — increasing inter-page delay and retrying"
|
||||||
|
);
|
||||||
|
const jitter = 1000 + Math.random() * 4000;
|
||||||
|
await sleep(waitSec * 1000 + jitter);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Successful call — gradually relax the delay back toward baseline
|
||||||
|
if (currentDelay > config.apiDelayMs) {
|
||||||
|
currentDelay = Math.max(config.apiDelayMs, Math.floor(currentDelay * 0.8));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!result.messages || result.messages.length === 0) break;
|
||||||
|
|
||||||
|
totalScanned += result.messages.length;
|
||||||
|
|
||||||
|
for (const msg of result.messages) {
|
||||||
|
const doc = msg.content?.document;
|
||||||
|
if (doc?.file_name && doc.document && isArchiveAttachment(doc.file_name)) {
|
||||||
|
archives.push({
|
||||||
|
id: BigInt(msg.id),
|
||||||
|
fileName: doc.file_name,
|
||||||
|
fileId: String(doc.document.id),
|
||||||
|
fileSize: BigInt(doc.document.size),
|
||||||
|
date: new Date(msg.date * 1000),
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const photo = msg.content?.photo;
|
||||||
|
const caption = msg.content?.caption?.text ?? "";
|
||||||
|
if (photo?.sizes && photo.sizes.length > 0) {
|
||||||
|
const smallest = photo.sizes[0];
|
||||||
|
photos.push({
|
||||||
|
id: BigInt(msg.id),
|
||||||
|
date: new Date(msg.date * 1000),
|
||||||
|
caption,
|
||||||
|
fileId: String(smallest.photo.id),
|
||||||
|
fileSize: smallest.photo.size || smallest.photo.expected_size,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onProgress?.(totalScanned);
|
||||||
|
|
||||||
|
currentFromId = result.messages[result.messages.length - 1].id;
|
||||||
|
|
||||||
|
if (currentFromId === previousFromId) {
|
||||||
|
log.warn(
|
||||||
|
{ chatId: chatId.toString(), topicId: topicId.toString(), currentFromId, totalScanned },
|
||||||
|
"Topic pagination stuck (from_message_id not advancing), breaking"
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (boundary && currentFromId < boundary) break;
|
||||||
|
|
||||||
|
if (result.messages.length < Math.min(limit, 100)) break;
|
||||||
|
|
||||||
|
await sleep(currentDelay);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
{ chatId: chatId.toString(), topicId: topicId.toString(), archives: archives.length, photos: photos.length, totalScanned, pages: pageCount },
|
||||||
|
"Topic scan complete"
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
archives: archives.reverse(),
|
||||||
|
photos: photos.reverse(),
|
||||||
|
totalScanned,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Skill Patterns Applied
|
||||||
|
|
||||||
|
### 1. FLOOD_WAIT Handling (Skill: "The Right Way to Handle It")
|
||||||
|
|
||||||
|
The existing `withFloodWait` and `extractFloodWaitSeconds` in `worker/src/util/retry.ts` already implement the skill's recommended pattern verbatim -- extract wait duration, add 1-5s jitter, retry up to maxRetries. The fix reuses `extractFloodWaitSeconds` at the pagination loop level as a second layer of defense.
|
||||||
|
|
||||||
|
### 2. Paginated Scanning with Delay (Skill: "Pattern: Paginated Scanning with Delay")
|
||||||
|
|
||||||
|
The skill states: *"When reading channel history or enumerating topics, always add a delay between pages"* and shows a 1-second delay example. The existing code has this (`config.apiDelayMs = 1000`). The fix enhances this with adaptive backoff: the delay doubles when FLOOD_WAIT is encountered and gradually relaxes back to baseline on success.
|
||||||
|
|
||||||
|
### 3. Non-rate-limit Errors Should Fail Fast (Skill: "Key Rules")
|
||||||
|
|
||||||
|
The skill states: *"Non-rate-limit errors should fail fast. Only retry on FLOOD_WAIT, not on other errors."* The fix checks `extractFloodWaitSeconds` and only applies the pagination-level recovery for rate limit errors. All other errors propagate immediately via `throw err`.
|
||||||
|
|
||||||
|
### 4. Always Respect the Wait Duration (Skill: "Key Rules")
|
||||||
|
|
||||||
|
The skill states: *"Always respect the wait duration. Never retry before retry_after expires."* The fix sleeps for the full `waitSec * 1000 + jitter` before retrying the page, ensuring the mandatory pause is honored.
|
||||||
|
|
||||||
|
### 5. Add Jitter (Skill: "Key Rules")
|
||||||
|
|
||||||
|
The skill states: *"Add jitter. Without it, multiple clients retry simultaneously and trigger another FLOOD_WAIT."* Both the existing `withFloodWait` wrapper and the new pagination-level recovery use `1000 + Math.random() * 4000` jitter, consistent with the skill's recommendation.
|
||||||
|
|
||||||
|
## Files Affected
|
||||||
|
|
||||||
|
- `worker/src/tdlib/download.ts` -- `getChannelMessages` function (adaptive delay + pagination-level FLOOD_WAIT recovery)
|
||||||
|
- `worker/src/tdlib/topics.ts` -- `getTopicMessages` function (same fix)
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
The crash happens because the pagination loop fires 100+ consecutive `getChatHistory` calls at 1-second intervals. When FLOOD_WAIT triggers, `withFloodWait` sleeps and retries that single call, but the loop immediately resumes its aggressive cadence, re-triggering FLOOD_WAIT until retries are exhausted. The fix adds adaptive backoff at the pagination level: double the inter-page delay on each FLOOD_WAIT, gradually relax on success, and add a pagination-level catch that respects the full wait duration before retrying.
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"total_tokens": 63079,
|
||||||
|
"duration_ms": 140851,
|
||||||
|
"total_duration_seconds": 140.9
|
||||||
|
}
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"run_id": "eval-2-without_skill",
|
||||||
|
"expectations": [
|
||||||
|
{"text": "identifies_retry_as_fix", "passed": true, "evidence": "Correctly identifies that invokeWithTimeout already wraps calls in withFloodWait. Adds adaptive backoff at pagination level."},
|
||||||
|
{"text": "recommends_pagination_delay", "passed": true, "evidence": "Raises default apiDelayMs from 1000 to 2000 and adds adaptive backoff pattern."},
|
||||||
|
{"text": "respects_retry_after", "passed": true, "evidence": "Uses extractFloodWaitSeconds to get actual wait. However, the catch block throws immediately after logging — doesn't sleep the flood wait at pagination level before retry like the with-skill version does."},
|
||||||
|
{"text": "does_not_reduce_frequency_as_primary_fix", "passed": true, "evidence": "Primary fix is adaptive backoff + raised defaults, not reducing scan frequency."}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,406 @@
|
|||||||
|
# Diagnosis: FLOOD_WAIT_35 Crashes During Channel Message Pagination
|
||||||
|
|
||||||
|
## Root Cause
|
||||||
|
|
||||||
|
The `getChannelMessages` function in `worker/src/tdlib/download.ts` (line 146) paginates through a channel's entire history using `getChatHistory`. For a channel with ~10,000 messages at 100 messages per page, that is **100 consecutive API calls** in a tight loop.
|
||||||
|
|
||||||
|
The current rate-limit mitigation has two problems:
|
||||||
|
|
||||||
|
1. **The inter-page delay is too low.** `config.apiDelayMs` is hardcoded to **1000ms** (1 second) in `worker/src/util/config.ts:15`. Telegram's rate limiter for `getChatHistory` typically allows roughly one call every 1-2 seconds for moderate volumes, but when you are hammering it 100 times in a row the server starts issuing `FLOOD_WAIT` penalties. A 1-second fixed delay is not enough for sustained high-volume pagination.
|
||||||
|
|
||||||
|
2. **The pagination call (`getChatHistory`) does NOT use the `withFloodWait` retry wrapper.** Look at `download.ts:174` -- it calls `invokeWithTimeout`, which **does** wrap the call with `withFloodWait`. So the retry logic IS present. However, the retry wrapper in `worker/src/util/retry.ts` has `maxRetries` set to **5** (from config). When you are scanning 10,000 messages, you may hit FLOOD_WAIT multiple times across different pages, and each individual page gets only 5 retries. If Telegram escalates the wait time (e.g., FLOOD_WAIT_35 means "wait 35 seconds"), the retry logic does handle it -- but the real problem is that the **fixed 1-second inter-page delay is too aggressive**, causing FLOOD_WAIT errors to pile up on nearly every page in the latter half of the scan. Eventually one page exhausts its 5 retries and the entire scan crashes.
|
||||||
|
|
||||||
|
3. **No adaptive/exponential backoff between pages.** After successfully recovering from a FLOOD_WAIT, the code immediately goes back to the 1-second delay for the next page, triggering another FLOOD_WAIT almost instantly. There is no mechanism to slow down after being rate-limited.
|
||||||
|
|
||||||
|
## The Fix
|
||||||
|
|
||||||
|
The fix has three parts:
|
||||||
|
|
||||||
|
### Part 1: Make `apiDelayMs` configurable and increase the default
|
||||||
|
|
||||||
|
**File: `worker/src/util/config.ts`**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export const config = {
|
||||||
|
databaseUrl: process.env.DATABASE_URL ?? "",
|
||||||
|
workerIntervalMinutes: parseInt(process.env.WORKER_INTERVAL_MINUTES ?? "60", 10),
|
||||||
|
tempDir: process.env.WORKER_TEMP_DIR ?? "/tmp/zips",
|
||||||
|
tdlibStateDir: process.env.TDLIB_STATE_DIR ?? "/data/tdlib",
|
||||||
|
maxZipSizeMB: parseInt(process.env.WORKER_MAX_ZIP_SIZE_MB ?? "4096", 10),
|
||||||
|
logLevel: (process.env.LOG_LEVEL ?? "info") as "debug" | "info" | "warn" | "error",
|
||||||
|
telegramApiId: parseInt(process.env.TELEGRAM_API_ID ?? "0", 10),
|
||||||
|
telegramApiHash: process.env.TELEGRAM_API_HASH ?? "",
|
||||||
|
/** Maximum jitter added to scheduler interval (in minutes) */
|
||||||
|
jitterMinutes: 5,
|
||||||
|
/** Maximum time span for multipart archive parts (in hours). 0 = no limit. */
|
||||||
|
multipartTimeoutHours: parseInt(process.env.MULTIPART_TIMEOUT_HOURS ?? "0", 10),
|
||||||
|
/** Delay between Telegram API calls (in ms) to avoid rate limits */
|
||||||
|
apiDelayMs: parseInt(process.env.WORKER_API_DELAY_MS ?? "2000", 10),
|
||||||
|
/** Max retries for rate-limited requests */
|
||||||
|
maxRetries: parseInt(process.env.WORKER_MAX_RETRIES ?? "10", 10),
|
||||||
|
} as const;
|
||||||
|
```
|
||||||
|
|
||||||
|
Changes: default `apiDelayMs` raised from 1000 to **2000**, `maxRetries` raised from 5 to **10**, both now configurable via environment variables.
|
||||||
|
|
||||||
|
### Part 2: Add adaptive backoff to the pagination loops
|
||||||
|
|
||||||
|
When a FLOOD_WAIT is encountered and recovered from inside `invokeWithTimeout`/`withFloodWait`, the pagination loop should temporarily increase its inter-page delay to prevent immediately triggering another FLOOD_WAIT.
|
||||||
|
|
||||||
|
**File: `worker/src/tdlib/download.ts`** -- replace the `getChannelMessages` function:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
/**
|
||||||
|
* Fetch messages from a channel, stopping once we've scanned past the
|
||||||
|
* last-processed boundary (with one page of lookback for multipart safety).
|
||||||
|
* Collects both archive attachments AND photo messages (for preview matching).
|
||||||
|
* Returns messages in chronological order (oldest first).
|
||||||
|
*
|
||||||
|
* When `lastProcessedMessageId` is null (first run), scans everything.
|
||||||
|
* The worker applies a post-grouping filter to skip fully-processed sets,
|
||||||
|
* and keeps `packageExistsBySourceMessage` as a safety net.
|
||||||
|
*
|
||||||
|
* Safety features:
|
||||||
|
* - Max page limit to prevent infinite loops
|
||||||
|
* - Stuck detection: breaks if from_message_id stops advancing
|
||||||
|
* - Timeout on each TDLib API call
|
||||||
|
* - Adaptive backoff: increases delay after FLOOD_WAIT recovery
|
||||||
|
*/
|
||||||
|
export async function getChannelMessages(
|
||||||
|
client: Client,
|
||||||
|
chatId: bigint,
|
||||||
|
lastProcessedMessageId?: bigint | null,
|
||||||
|
limit = 100,
|
||||||
|
onProgress?: ScanProgressCallback
|
||||||
|
): Promise<ChannelScanResult> {
|
||||||
|
const archives: TelegramMessage[] = [];
|
||||||
|
const photos: TelegramPhoto[] = [];
|
||||||
|
const boundary = lastProcessedMessageId ? Number(lastProcessedMessageId) : null;
|
||||||
|
|
||||||
|
let currentFromId = 0;
|
||||||
|
let totalScanned = 0;
|
||||||
|
let pageCount = 0;
|
||||||
|
|
||||||
|
// Adaptive delay: starts at config value, increases after FLOOD_WAIT recovery
|
||||||
|
let currentDelayMs = config.apiDelayMs;
|
||||||
|
const MAX_DELAY_MS = 30_000; // Cap at 30 seconds between pages
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-constant-condition
|
||||||
|
while (true) {
|
||||||
|
if (pageCount >= MAX_SCAN_PAGES) {
|
||||||
|
log.warn(
|
||||||
|
{ chatId: chatId.toString(), pageCount, totalScanned },
|
||||||
|
"Hit max page limit for channel scan, stopping"
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
pageCount++;
|
||||||
|
|
||||||
|
const previousFromId = currentFromId;
|
||||||
|
|
||||||
|
let result: { messages: TdMessage[] };
|
||||||
|
try {
|
||||||
|
result = await invokeWithTimeout<{ messages: TdMessage[] }>(client, {
|
||||||
|
_: "getChatHistory",
|
||||||
|
chat_id: Number(chatId),
|
||||||
|
from_message_id: currentFromId,
|
||||||
|
offset: 0,
|
||||||
|
limit: Math.min(limit, 100),
|
||||||
|
only_local: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Successful call without rate limiting — gradually reduce delay back
|
||||||
|
// toward the base value (but never below it)
|
||||||
|
if (currentDelayMs > config.apiDelayMs) {
|
||||||
|
currentDelayMs = Math.max(
|
||||||
|
config.apiDelayMs,
|
||||||
|
Math.floor(currentDelayMs * 0.8)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
// If withFloodWait inside invokeWithTimeout exhausted retries on a
|
||||||
|
// FLOOD_WAIT error, increase the inter-page delay significantly
|
||||||
|
// before re-throwing so the caller (or a future retry of the whole
|
||||||
|
// scan) starts slower.
|
||||||
|
const floodSeconds = extractFloodWaitSeconds(err);
|
||||||
|
if (floodSeconds !== null) {
|
||||||
|
// The retry wrapper already waited, but we still got rate-limited
|
||||||
|
// after max retries. Double the inter-page delay for next time.
|
||||||
|
currentDelayMs = Math.min(MAX_DELAY_MS, currentDelayMs * 2);
|
||||||
|
log.warn(
|
||||||
|
{
|
||||||
|
chatId: chatId.toString(),
|
||||||
|
pageCount,
|
||||||
|
totalScanned,
|
||||||
|
newDelayMs: currentDelayMs,
|
||||||
|
},
|
||||||
|
"FLOOD_WAIT exhausted retries — increased inter-page delay"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!result.messages || result.messages.length === 0) break;
|
||||||
|
|
||||||
|
totalScanned += result.messages.length;
|
||||||
|
|
||||||
|
for (const msg of result.messages) {
|
||||||
|
// Check for archive documents
|
||||||
|
const doc = msg.content?.document;
|
||||||
|
if (doc?.file_name && doc.document && isArchiveAttachment(doc.file_name)) {
|
||||||
|
archives.push({
|
||||||
|
id: BigInt(msg.id),
|
||||||
|
fileName: doc.file_name,
|
||||||
|
fileId: String(doc.document.id),
|
||||||
|
fileSize: BigInt(doc.document.size),
|
||||||
|
date: new Date(msg.date * 1000),
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for photo messages (potential previews)
|
||||||
|
const photo = msg.content?.photo;
|
||||||
|
const caption = msg.content?.caption?.text ?? "";
|
||||||
|
if (photo?.sizes && photo.sizes.length > 0) {
|
||||||
|
const smallest = photo.sizes[0];
|
||||||
|
photos.push({
|
||||||
|
id: BigInt(msg.id),
|
||||||
|
date: new Date(msg.date * 1000),
|
||||||
|
caption,
|
||||||
|
fileId: String(smallest.photo.id),
|
||||||
|
fileSize: smallest.photo.size || smallest.photo.expected_size,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Report scanning progress after each page
|
||||||
|
onProgress?.(totalScanned);
|
||||||
|
|
||||||
|
currentFromId = result.messages[result.messages.length - 1].id;
|
||||||
|
|
||||||
|
// Stuck detection: if from_message_id didn't advance, break to prevent infinite loop
|
||||||
|
if (currentFromId === previousFromId) {
|
||||||
|
log.warn(
|
||||||
|
{ chatId: chatId.toString(), currentFromId, totalScanned },
|
||||||
|
"Pagination stuck (from_message_id not advancing), breaking"
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop scanning once we've gone past the boundary (this page is the lookback)
|
||||||
|
if (boundary && currentFromId < boundary) break;
|
||||||
|
|
||||||
|
if (result.messages.length < Math.min(limit, 100)) break;
|
||||||
|
|
||||||
|
// Rate limit delay (adaptive)
|
||||||
|
await sleep(currentDelayMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
{ chatId: chatId.toString(), archives: archives.length, photos: photos.length, totalScanned, pages: pageCount },
|
||||||
|
"Channel scan complete"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Reverse to chronological order (oldest first) so worker processes old→new
|
||||||
|
return {
|
||||||
|
archives: archives.reverse(),
|
||||||
|
photos: photos.reverse(),
|
||||||
|
totalScanned,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You also need to add the import for `extractFloodWaitSeconds` at the top of `download.ts`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { withFloodWait, extractFloodWaitSeconds } from "../util/retry.js";
|
||||||
|
```
|
||||||
|
|
||||||
|
### Part 3: Apply the same adaptive backoff to `getTopicMessages` in `topics.ts`
|
||||||
|
|
||||||
|
The same pattern should be applied to `worker/src/tdlib/topics.ts` in the `getTopicMessages` function (and `getForumTopicList` for good measure). The change is identical in structure: track `currentDelayMs`, reduce it on success, and note the escalation path on FLOOD_WAIT errors.
|
||||||
|
|
||||||
|
**File: `worker/src/tdlib/topics.ts`** -- add the import and apply to both pagination loops. Here is the updated `getTopicMessages`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { extractFloodWaitSeconds } from "../util/retry.js";
|
||||||
|
|
||||||
|
// ... (existing code) ...
|
||||||
|
|
||||||
|
export async function getTopicMessages(
|
||||||
|
client: Client,
|
||||||
|
chatId: bigint,
|
||||||
|
topicId: bigint,
|
||||||
|
lastProcessedMessageId?: bigint | null,
|
||||||
|
limit = 100,
|
||||||
|
onProgress?: ScanProgressCallback
|
||||||
|
): Promise<ChannelScanResult> {
|
||||||
|
const archives: TelegramMessage[] = [];
|
||||||
|
const photos: TelegramPhoto[] = [];
|
||||||
|
const boundary = lastProcessedMessageId ? Number(lastProcessedMessageId) : null;
|
||||||
|
|
||||||
|
let currentFromId = 0;
|
||||||
|
let totalScanned = 0;
|
||||||
|
let pageCount = 0;
|
||||||
|
|
||||||
|
// Adaptive delay
|
||||||
|
let currentDelayMs = config.apiDelayMs;
|
||||||
|
const MAX_DELAY_MS = 30_000;
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-constant-condition
|
||||||
|
while (true) {
|
||||||
|
if (pageCount >= MAX_SCAN_PAGES) {
|
||||||
|
log.warn(
|
||||||
|
{ chatId: chatId.toString(), topicId: topicId.toString(), pageCount, totalScanned },
|
||||||
|
"Hit max page limit for topic scan, stopping"
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
pageCount++;
|
||||||
|
|
||||||
|
const previousFromId = currentFromId;
|
||||||
|
|
||||||
|
let result: {
|
||||||
|
messages?: {
|
||||||
|
id: number;
|
||||||
|
date: number;
|
||||||
|
content: {
|
||||||
|
_: string;
|
||||||
|
document?: {
|
||||||
|
file_name?: string;
|
||||||
|
document?: { id: number; size: number };
|
||||||
|
};
|
||||||
|
photo?: {
|
||||||
|
sizes?: {
|
||||||
|
type: string;
|
||||||
|
photo: { id: number; size: number; expected_size: number };
|
||||||
|
width: number;
|
||||||
|
height: number;
|
||||||
|
}[];
|
||||||
|
};
|
||||||
|
caption?: { text?: string };
|
||||||
|
};
|
||||||
|
}[];
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
result = await invokeWithTimeout(client, {
|
||||||
|
_: "searchChatMessages",
|
||||||
|
chat_id: Number(chatId),
|
||||||
|
query: "",
|
||||||
|
message_thread_id: Number(topicId),
|
||||||
|
from_message_id: currentFromId,
|
||||||
|
offset: 0,
|
||||||
|
limit: Math.min(limit, 100),
|
||||||
|
filter: null,
|
||||||
|
sender_id: null,
|
||||||
|
saved_messages_topic_id: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Gradually reduce delay back toward base after success
|
||||||
|
if (currentDelayMs > config.apiDelayMs) {
|
||||||
|
currentDelayMs = Math.max(
|
||||||
|
config.apiDelayMs,
|
||||||
|
Math.floor(currentDelayMs * 0.8)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
const floodSeconds = extractFloodWaitSeconds(err);
|
||||||
|
if (floodSeconds !== null) {
|
||||||
|
currentDelayMs = Math.min(MAX_DELAY_MS, currentDelayMs * 2);
|
||||||
|
log.warn(
|
||||||
|
{
|
||||||
|
chatId: chatId.toString(),
|
||||||
|
topicId: topicId.toString(),
|
||||||
|
pageCount,
|
||||||
|
totalScanned,
|
||||||
|
newDelayMs: currentDelayMs,
|
||||||
|
},
|
||||||
|
"FLOOD_WAIT exhausted retries — increased inter-page delay"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!result.messages || result.messages.length === 0) break;
|
||||||
|
|
||||||
|
totalScanned += result.messages.length;
|
||||||
|
|
||||||
|
for (const msg of result.messages) {
|
||||||
|
const doc = msg.content?.document;
|
||||||
|
if (doc?.file_name && doc.document && isArchiveAttachment(doc.file_name)) {
|
||||||
|
archives.push({
|
||||||
|
id: BigInt(msg.id),
|
||||||
|
fileName: doc.file_name,
|
||||||
|
fileId: String(doc.document.id),
|
||||||
|
fileSize: BigInt(doc.document.size),
|
||||||
|
date: new Date(msg.date * 1000),
|
||||||
|
});
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const photo = msg.content?.photo;
|
||||||
|
const caption = msg.content?.caption?.text ?? "";
|
||||||
|
if (photo?.sizes && photo.sizes.length > 0) {
|
||||||
|
const smallest = photo.sizes[0];
|
||||||
|
photos.push({
|
||||||
|
id: BigInt(msg.id),
|
||||||
|
date: new Date(msg.date * 1000),
|
||||||
|
caption,
|
||||||
|
fileId: String(smallest.photo.id),
|
||||||
|
fileSize: smallest.photo.size || smallest.photo.expected_size,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onProgress?.(totalScanned);
|
||||||
|
|
||||||
|
currentFromId = result.messages[result.messages.length - 1].id;
|
||||||
|
|
||||||
|
if (currentFromId === previousFromId) {
|
||||||
|
log.warn(
|
||||||
|
{ chatId: chatId.toString(), topicId: topicId.toString(), currentFromId, totalScanned },
|
||||||
|
"Topic pagination stuck (from_message_id not advancing), breaking"
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (boundary && currentFromId < boundary) break;
|
||||||
|
|
||||||
|
if (result.messages.length < Math.min(limit, 100)) break;
|
||||||
|
|
||||||
|
// Adaptive rate limit delay
|
||||||
|
await sleep(currentDelayMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
{ chatId: chatId.toString(), topicId: topicId.toString(), archives: archives.length, photos: photos.length, totalScanned, pages: pageCount },
|
||||||
|
"Topic scan complete"
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
archives: archives.reverse(),
|
||||||
|
photos: photos.reverse(),
|
||||||
|
totalScanned,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Summary of Changes
|
||||||
|
|
||||||
|
| File | Change | Why |
|
||||||
|
|---|---|---|
|
||||||
|
| `worker/src/util/config.ts` | Raise `apiDelayMs` default to 2000, `maxRetries` to 10; make both env-configurable | 1s delay is too aggressive for 100-page scans; 5 retries is too few for sustained scanning |
|
||||||
|
| `worker/src/tdlib/download.ts` | Add adaptive backoff to `getChannelMessages` loop; import `extractFloodWaitSeconds` | After FLOOD_WAIT recovery, the next page should wait longer, not immediately go back to the base delay |
|
||||||
|
| `worker/src/tdlib/topics.ts` | Same adaptive backoff in `getTopicMessages` and `getForumTopicList` | Same vulnerability exists in topic scanning |
|
||||||
|
|
||||||
|
## Approach Explanation
|
||||||
|
|
||||||
|
The core insight is that `FLOOD_WAIT_35` is Telegram telling the client "you are calling me too fast, wait 35 seconds." The existing `withFloodWait` retry wrapper correctly handles individual occurrences by sleeping and retrying. But when scanning 10,000 messages (100 pages), the **loop itself** needs to adapt its pace. A fixed 1-second delay between pages is what causes the flood of FLOOD_WAIT errors in the first place.
|
||||||
|
|
||||||
|
The adaptive backoff strategy:
|
||||||
|
- **On success:** gradually decrease the delay back toward the base value (multiply by 0.8), so scanning speeds back up once the rate limit pressure eases.
|
||||||
|
- **On FLOOD_WAIT recovery (inside withFloodWait):** the retry wrapper handles it transparently -- the loop just sees a slower successful call and reduces delay.
|
||||||
|
- **On FLOOD_WAIT exhausting retries:** double the inter-page delay (capped at 30s) before re-throwing, so if the scan is retried it starts slower.
|
||||||
|
- **Higher base delay (2s):** prevents most FLOOD_WAIT errors from occurring in the first place for typical channel sizes.
|
||||||
|
- **More retries (10):** gives the retry wrapper enough headroom to survive occasional rate limits during long scans without crashing.
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"total_tokens": 45601,
|
||||||
|
"duration_ms": 122326,
|
||||||
|
"total_duration_seconds": 122.3
|
||||||
|
}
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"eval_id": 3,
|
||||||
|
"eval_name": "download-and-reupload-file",
|
||||||
|
"prompt": "Write a utility function for the worker that downloads a file from a source Telegram channel message and re-uploads it to the destination channel. It needs to handle files up to 2GB and work reliably in Docker.",
|
||||||
|
"assertions": [
|
||||||
|
{"text": "uses_flood_wait_retry: Download and upload calls wrapped in FLOOD_WAIT-aware retry logic", "type": "required"},
|
||||||
|
{"text": "has_size_scaled_timeout: Timeout scales with file size, not a fixed constant", "type": "required"},
|
||||||
|
{"text": "listener_before_action: Upload event listener attached BEFORE calling sendMessage", "type": "required"},
|
||||||
|
{"text": "has_temp_cleanup: Temporary files cleaned up in a finally block", "type": "required"},
|
||||||
|
{"text": "handles_2gb_limit: Acknowledges or handles the 2GB Telegram file size limit", "type": "recommended"}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"run_id": "eval-3-with_skill",
|
||||||
|
"expectations": [
|
||||||
|
{"text": "uses_flood_wait_retry", "passed": true, "evidence": "Delegates to existing downloadFile and uploadToChannel which internally use withFloodWait. Explicitly notes: 'All TDLib invocations go through withFloodWait via invokeWithTimeout'"},
|
||||||
|
{"text": "has_size_scaled_timeout", "passed": true, "evidence": "Inherits from existing primitives: 'Timeouts scale with file size (10 minutes per GB, minimum 5 minutes for download, 10 minutes for upload)'. Explicitly cited from skill."},
|
||||||
|
{"text": "listener_before_action", "passed": true, "evidence": "Explicitly noted in skill patterns table: 'Event Listener Before Action — Inherited from existing downloadFile and uploadToChannel'. The skill pattern is referenced."},
|
||||||
|
{"text": "has_temp_cleanup", "passed": true, "evidence": "finally block with 'await rm(transferDir, { recursive: true, force: true })'. Uses randomUUID for isolation."},
|
||||||
|
{"text": "handles_2gb_limit", "passed": true, "evidence": "MAX_TRANSFER_BYTES = 2n * 1024n * 1024n * 1024n. Upfront validation with clear error message."}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,287 @@
|
|||||||
|
# Download & Re-upload Utility for the Worker
|
||||||
|
|
||||||
|
## Approach
|
||||||
|
|
||||||
|
This utility function downloads a file from a source Telegram channel message and re-uploads it to a destination channel, returning the server-assigned message ID of the uploaded file. It is designed to slot into the existing worker codebase at `worker/src/tdlib/transfer.ts`.
|
||||||
|
|
||||||
|
The function composes the existing `downloadFile` (from `tdlib/download.ts`) and `uploadToChannel` (from `upload/channel.ts`) primitives, adding:
|
||||||
|
|
||||||
|
1. **Temporary file management** with guaranteed cleanup in a `finally` block
|
||||||
|
2. **Size-scaled timeouts** for files up to 2 GB (the TDLib user account limit)
|
||||||
|
3. **Cross-filesystem safety** via the existing `verifyAndMove` pattern (rename with EXDEV fallback)
|
||||||
|
4. **FLOOD_WAIT-aware retries** by delegating to the existing `withFloodWait` wrapper on every TDLib invoke
|
||||||
|
5. **Docker reliability**: uses `/tmp/zips` (the volume from the Dockerfile) for temp storage, ensures cleanup even on crash, and avoids holding large buffers in memory
|
||||||
|
|
||||||
|
## Skill Patterns Applied
|
||||||
|
|
||||||
|
| Skill Pattern | Where Applied |
|
||||||
|
|---|---|
|
||||||
|
| **FLOOD_WAIT handling** (`withFloodWait` + `extractFloodWaitSeconds`) | All TDLib invocations go through `withFloodWait` via `invokeWithTimeout` (download) and the upload internals |
|
||||||
|
| **Download/Upload Timeouts** (scale with file size: 10 min/GB, min 5-10 min) | Inherited from existing `downloadFile` and `sendAndWaitForUpload` |
|
||||||
|
| **Event Listener Before Action** | Inherited from existing `downloadFile` (subscribes to `updateFile` before calling `downloadFile` invoke) and `uploadToChannel` (subscribes before `sendMessage`) |
|
||||||
|
| **TDLib Client Lifecycle** (`finally` block for cleanup) | The caller is expected to manage the client; this utility manages temp files in its own `finally` |
|
||||||
|
| **File Size Limits** (User account TDLib: 2 GB upload/download) | Explicit 2 GB guard with clear error message |
|
||||||
|
| **Sequential Send Queue** / no concurrent sends | Single sequential download-then-upload, rate limit delay between steps |
|
||||||
|
| **BigInt Chat IDs** | Passes `Number(chatId)` to TDLib invoke calls (inherited from existing code) |
|
||||||
|
| **Docker Considerations** | Uses the mounted `/tmp/zips` volume, per-operation subdirectory, guaranteed cleanup |
|
||||||
|
| **Never bare `client.invoke()`** | All invocations go through `withFloodWait` wrappers |
|
||||||
|
|
||||||
|
## Full Implementation
|
||||||
|
|
||||||
|
File: `worker/src/tdlib/transfer.ts`
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import path from "path";
|
||||||
|
import { mkdir, rm, stat } from "fs/promises";
|
||||||
|
import { randomUUID } from "crypto";
|
||||||
|
import type { Client } from "tdl";
|
||||||
|
import { config } from "../util/config.js";
|
||||||
|
import { childLogger } from "../util/logger.js";
|
||||||
|
import { downloadFile } from "./download.js";
|
||||||
|
import type { DownloadProgress } from "./download.js";
|
||||||
|
import { uploadToChannel } from "../upload/channel.js";
|
||||||
|
|
||||||
|
const log = childLogger("transfer");
|
||||||
|
|
||||||
|
/** Maximum file size TDLib user accounts can handle (2 GB). */
|
||||||
|
const MAX_TRANSFER_BYTES = 2n * 1024n * 1024n * 1024n;
|
||||||
|
|
||||||
|
export interface TransferOptions {
|
||||||
|
/** TDLib file ID (numeric string) of the file to download. */
|
||||||
|
fileId: string;
|
||||||
|
/** Original file name from the source message. */
|
||||||
|
fileName: string;
|
||||||
|
/** Expected file size in bytes. */
|
||||||
|
fileSize: bigint;
|
||||||
|
/** Telegram chat ID of the source channel (used only for logging context). */
|
||||||
|
sourceChatId: bigint;
|
||||||
|
/** Telegram chat ID of the destination channel. */
|
||||||
|
destChatId: bigint;
|
||||||
|
/** Optional caption to attach to the uploaded message. */
|
||||||
|
caption?: string;
|
||||||
|
/** Optional callback for download progress updates. */
|
||||||
|
onDownloadProgress?: (progress: DownloadProgress) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TransferResult {
|
||||||
|
/** Server-assigned message ID of the uploaded file in the destination channel. */
|
||||||
|
destMessageId: bigint;
|
||||||
|
/** Size of the transferred file in bytes (verified on disk after download). */
|
||||||
|
transferredBytes: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Download a file from a source Telegram channel message and re-upload it
|
||||||
|
* to a destination channel.
|
||||||
|
*
|
||||||
|
* This function:
|
||||||
|
* 1. Validates the file size is within TDLib's 2 GB limit
|
||||||
|
* 2. Downloads the file to a temporary directory (with progress tracking)
|
||||||
|
* 3. Verifies the downloaded file's size matches the expected size
|
||||||
|
* 4. Uploads the file to the destination channel
|
||||||
|
* 5. Waits for Telegram's server-side upload confirmation
|
||||||
|
* 6. Cleans up the temporary file (even on failure)
|
||||||
|
*
|
||||||
|
* All TDLib calls are wrapped with FLOOD_WAIT-aware retry logic.
|
||||||
|
* Timeouts scale with file size (10 minutes per GB, minimum 5 minutes for
|
||||||
|
* download, 10 minutes for upload).
|
||||||
|
*
|
||||||
|
* Designed for Docker: uses the configured temp directory (mounted volume at
|
||||||
|
* /tmp/zips) and creates a unique subdirectory per transfer to avoid collisions
|
||||||
|
* between concurrent operations.
|
||||||
|
*
|
||||||
|
* @throws Error if file exceeds 2 GB
|
||||||
|
* @throws Error if download fails, times out, or size verification fails
|
||||||
|
* @throws Error if upload fails or times out
|
||||||
|
*/
|
||||||
|
export async function downloadAndReupload(
|
||||||
|
client: Client,
|
||||||
|
options: TransferOptions
|
||||||
|
): Promise<TransferResult> {
|
||||||
|
const {
|
||||||
|
fileId,
|
||||||
|
fileName,
|
||||||
|
fileSize,
|
||||||
|
sourceChatId,
|
||||||
|
destChatId,
|
||||||
|
caption,
|
||||||
|
onDownloadProgress,
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
// ── Validate file size ──
|
||||||
|
if (fileSize > MAX_TRANSFER_BYTES) {
|
||||||
|
throw new Error(
|
||||||
|
`File "${fileName}" is ${Number(fileSize / (1024n * 1024n))}MB, ` +
|
||||||
|
`which exceeds the TDLib user account limit of 2 GB`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Create isolated temp directory for this transfer ──
|
||||||
|
const transferId = randomUUID();
|
||||||
|
const transferDir = path.join(config.tempDir, `transfer_${transferId}`);
|
||||||
|
await mkdir(transferDir, { recursive: true });
|
||||||
|
|
||||||
|
const tempFilePath = path.join(transferDir, fileName);
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
{
|
||||||
|
fileId,
|
||||||
|
fileName,
|
||||||
|
fileSize: Number(fileSize),
|
||||||
|
sourceChatId: Number(sourceChatId),
|
||||||
|
destChatId: Number(destChatId),
|
||||||
|
transferDir,
|
||||||
|
},
|
||||||
|
"Starting download-and-reupload transfer"
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// ── Step 1: Download from source ──
|
||||||
|
log.info({ fileId, fileName }, "Downloading file from source channel");
|
||||||
|
|
||||||
|
await downloadFile(
|
||||||
|
client,
|
||||||
|
fileId,
|
||||||
|
tempFilePath,
|
||||||
|
fileSize,
|
||||||
|
fileName,
|
||||||
|
onDownloadProgress
|
||||||
|
);
|
||||||
|
|
||||||
|
// ── Step 2: Verify downloaded file ──
|
||||||
|
const fileStats = await stat(tempFilePath);
|
||||||
|
const actualBytes = fileStats.size;
|
||||||
|
|
||||||
|
if (Number(fileSize) > 0 && actualBytes !== Number(fileSize)) {
|
||||||
|
throw new Error(
|
||||||
|
`Downloaded file size mismatch for "${fileName}": ` +
|
||||||
|
`expected ${Number(fileSize)} bytes, got ${actualBytes} bytes`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
{ fileName, bytes: actualBytes },
|
||||||
|
"Download complete, starting upload to destination"
|
||||||
|
);
|
||||||
|
|
||||||
|
// ── Step 3: Upload to destination ──
|
||||||
|
const uploadResult = await uploadToChannel(
|
||||||
|
client,
|
||||||
|
destChatId,
|
||||||
|
[tempFilePath],
|
||||||
|
caption
|
||||||
|
);
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
{
|
||||||
|
fileName,
|
||||||
|
destMessageId: Number(uploadResult.messageId),
|
||||||
|
destChatId: Number(destChatId),
|
||||||
|
bytes: actualBytes,
|
||||||
|
},
|
||||||
|
"Transfer complete: file uploaded to destination channel"
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
destMessageId: uploadResult.messageId,
|
||||||
|
transferredBytes: actualBytes,
|
||||||
|
};
|
||||||
|
} catch (err) {
|
||||||
|
log.error(
|
||||||
|
{
|
||||||
|
err,
|
||||||
|
fileId,
|
||||||
|
fileName,
|
||||||
|
sourceChatId: Number(sourceChatId),
|
||||||
|
destChatId: Number(destChatId),
|
||||||
|
},
|
||||||
|
"Transfer failed"
|
||||||
|
);
|
||||||
|
throw err;
|
||||||
|
} finally {
|
||||||
|
// ── Always clean up temp files ──
|
||||||
|
await rm(transferDir, { recursive: true, force: true }).catch((cleanupErr) => {
|
||||||
|
log.warn(
|
||||||
|
{ err: cleanupErr, transferDir },
|
||||||
|
"Failed to clean up transfer temp directory"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convenience wrapper that extracts the file info from a TDLib message object
|
||||||
|
* and calls downloadAndReupload.
|
||||||
|
*
|
||||||
|
* Accepts the raw TDLib message shape (as returned by getChatHistory) so callers
|
||||||
|
* don't need to manually extract document fields.
|
||||||
|
*
|
||||||
|
* @throws Error if the message does not contain a document attachment
|
||||||
|
*/
|
||||||
|
export async function transferMessageFile(
|
||||||
|
client: Client,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
message: any,
|
||||||
|
sourceChatId: bigint,
|
||||||
|
destChatId: bigint,
|
||||||
|
caption?: string,
|
||||||
|
onDownloadProgress?: (progress: DownloadProgress) => void
|
||||||
|
): Promise<TransferResult> {
|
||||||
|
const doc = message?.content?.document;
|
||||||
|
if (!doc?.document) {
|
||||||
|
throw new Error(
|
||||||
|
`Message ${message?.id} does not contain a document attachment`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return downloadAndReupload(client, {
|
||||||
|
fileId: String(doc.document.id),
|
||||||
|
fileName: doc.file_name ?? `file_${message.id}`,
|
||||||
|
fileSize: BigInt(doc.document.size),
|
||||||
|
sourceChatId,
|
||||||
|
destChatId,
|
||||||
|
caption,
|
||||||
|
onDownloadProgress,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage Example
|
||||||
|
|
||||||
|
Within the worker pipeline (e.g., in `worker.ts` or a new route handler):
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { createTdlibClient, closeTdlibClient } from "./tdlib/client.js";
|
||||||
|
import { downloadAndReupload } from "./tdlib/transfer.js";
|
||||||
|
|
||||||
|
const client = await createTdlibClient({ id: account.id, phone: account.phone });
|
||||||
|
try {
|
||||||
|
const result = await downloadAndReupload(client, {
|
||||||
|
fileId: "12345",
|
||||||
|
fileName: "model_pack_v2.zip",
|
||||||
|
fileSize: 1_500_000_000n, // ~1.4 GB
|
||||||
|
sourceChatId: -1001234567890n,
|
||||||
|
destChatId: -1009876543210n,
|
||||||
|
caption: "Re-uploaded from source channel",
|
||||||
|
onDownloadProgress: (progress) => {
|
||||||
|
console.log(`Download: ${progress.percent}%`);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Uploaded as message ${result.destMessageId}`);
|
||||||
|
} finally {
|
||||||
|
await closeTdlibClient(client);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Design Decisions
|
||||||
|
|
||||||
|
1. **Delegates to existing primitives**: Rather than reimplementing download/upload logic, this composes `downloadFile` and `uploadToChannel` which already have FLOOD_WAIT retries, progress tracking, size-scaled timeouts, event-listener-before-action ordering, and upload confirmation via `updateMessageSendSucceeded`.
|
||||||
|
|
||||||
|
2. **Unique temp directory per transfer**: Uses `randomUUID()` to create an isolated subdirectory under `/tmp/zips`. This prevents filename collisions when multiple transfers run concurrently and ensures `rm -rf` in the `finally` block only removes this transfer's files.
|
||||||
|
|
||||||
|
3. **No in-memory buffering**: Files stream to disk via TDLib's native download mechanism, then upload from disk. This is essential for 2 GB files in Docker containers where memory is typically constrained.
|
||||||
|
|
||||||
|
4. **Two-phase verification**: The downloaded file size is verified both inside `downloadFile` (via `verifyAndMove`) and again before upload. This double-check catches edge cases where `verifyAndMove` might succeed but the file gets corrupted during the cross-device copy fallback.
|
||||||
|
|
||||||
|
5. **`transferMessageFile` convenience wrapper**: Extracts document metadata from raw TDLib message objects, so callers working with `getChatHistory` results don't need to manually navigate the nested content structure.
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"total_tokens": 61157,
|
||||||
|
"duration_ms": 122135,
|
||||||
|
"total_duration_seconds": 122.1
|
||||||
|
}
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"run_id": "eval-3-without_skill",
|
||||||
|
"expectations": [
|
||||||
|
{"text": "uses_flood_wait_retry", "passed": true, "evidence": "Delegates to existing downloadFile and uploadToChannel which use withFloodWait internally. Notes 'Both are wrapped with withFloodWait internally'."},
|
||||||
|
{"text": "has_size_scaled_timeout", "passed": true, "evidence": "Notes existing timeouts: '10 minutes per GB for both download and upload, with a minimum of 5 minutes for download and 10 minutes for upload'. Inherited from composed functions."},
|
||||||
|
{"text": "listener_before_action", "passed": true, "evidence": "Mentions existing patterns in downloadFile and uploadToChannel handle this. But does NOT explicitly call it out as a critical concern or verify it."},
|
||||||
|
{"text": "has_temp_cleanup", "passed": true, "evidence": "finally block with 'await unlink(tempPath)'. Uses tempSubdir for isolation."},
|
||||||
|
{"text": "handles_2gb_limit", "passed": true, "evidence": "MAX_UPLOAD_BYTES = 2n * 1024n * 1024n * 1024n. Upfront validation with error directing to byteLevelSplit."}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -0,0 +1,318 @@
|
|||||||
|
# Download and Re-upload Utility for the DragonsStash Worker
|
||||||
|
|
||||||
|
## Approach
|
||||||
|
|
||||||
|
The utility function `downloadAndReupload` bridges a source Telegram channel message to a destination channel by:
|
||||||
|
|
||||||
|
1. **Downloading** the file from the source message using TDLib's async `downloadFile` with `updateFile` progress events (the same pattern already used in `worker/src/tdlib/download.ts`).
|
||||||
|
2. **Writing** the file to the worker's temp directory (`/tmp/zips` in Docker, configured via `WORKER_TEMP_DIR`).
|
||||||
|
3. **Verifying** the downloaded file's size matches the expected size (integrity check).
|
||||||
|
4. **Re-uploading** to the destination channel using TDLib's `sendMessage` + `updateMessageSendSucceeded` pattern (the same pattern already used in `worker/src/upload/channel.ts`).
|
||||||
|
5. **Cleaning up** the temp file in a `finally` block to prevent disk exhaustion in Docker.
|
||||||
|
|
||||||
|
### Docker Considerations
|
||||||
|
|
||||||
|
- Uses the existing `/tmp/zips` volume mount (declared in the worker Dockerfile) so large files don't fill the container's overlay filesystem.
|
||||||
|
- Handles cross-device rename (`EXDEV`) when moving files between TDLib's cache directory (`/data/tdlib`) and the temp directory, since these may be on different Docker volumes.
|
||||||
|
- Timeouts scale with file size (10 minutes per GB for both download and upload), with a minimum of 5 minutes for download and 10 minutes for upload -- matching the existing patterns in the codebase.
|
||||||
|
- All temp files are cleaned up in `finally` blocks even if the operation fails partway through.
|
||||||
|
|
||||||
|
### 2GB File Support
|
||||||
|
|
||||||
|
- TDLib's `downloadFile` with `synchronous: false` handles files up to 2GB natively. The existing codebase already has this pattern working.
|
||||||
|
- Telegram's upload limit is 2GB per message. The function validates the file size upfront and rejects files exceeding this limit, directing callers to use the existing `byteLevelSplit` + `uploadToChannel` multi-part flow for oversized files.
|
||||||
|
- Uses `bigint` for file sizes throughout (matching the codebase convention) to avoid JavaScript number precision issues near the 2GB boundary.
|
||||||
|
|
||||||
|
## File Location
|
||||||
|
|
||||||
|
This utility would be placed at `worker/src/tdlib/reupload.ts`, alongside the existing `download.ts` module.
|
||||||
|
|
||||||
|
## Full Code
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// worker/src/tdlib/reupload.ts
|
||||||
|
|
||||||
|
import path from "path";
|
||||||
|
import { mkdir, unlink, stat } from "fs/promises";
|
||||||
|
import type { Client } from "tdl";
|
||||||
|
import { config } from "../util/config.js";
|
||||||
|
import { childLogger } from "../util/logger.js";
|
||||||
|
import { downloadFile } from "./download.ts";
|
||||||
|
import type { DownloadProgress } from "./download.ts";
|
||||||
|
import { uploadToChannel } from "../upload/channel.js";
|
||||||
|
import type { UploadResult } from "../upload/channel.js";
|
||||||
|
|
||||||
|
const log = childLogger("reupload");
|
||||||
|
|
||||||
|
/** Maximum file size Telegram allows for a single upload (2 GB). */
|
||||||
|
const MAX_UPLOAD_BYTES = 2n * 1024n * 1024n * 1024n;
|
||||||
|
|
||||||
|
export interface ReuploadOptions {
|
||||||
|
/** TDLib client instance (must be authenticated). */
|
||||||
|
client: Client;
|
||||||
|
/** Telegram file ID (numeric string) from the source message. */
|
||||||
|
fileId: string;
|
||||||
|
/** Original file name. */
|
||||||
|
fileName: string;
|
||||||
|
/** Expected file size in bytes. */
|
||||||
|
fileSize: bigint;
|
||||||
|
/** Telegram chat ID of the destination channel. */
|
||||||
|
destChatId: bigint;
|
||||||
|
/** Optional caption for the re-uploaded message. */
|
||||||
|
caption?: string;
|
||||||
|
/** Optional callback for download progress. */
|
||||||
|
onDownloadProgress?: (progress: DownloadProgress) => void;
|
||||||
|
/** Optional subdirectory name inside tempDir (to isolate concurrent operations). */
|
||||||
|
tempSubdir?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ReuploadResult {
|
||||||
|
/** Server-assigned message ID in the destination channel. */
|
||||||
|
destMessageId: bigint;
|
||||||
|
/** Actual file size on disk after download (for verification logging). */
|
||||||
|
actualBytes: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Download a file from a source Telegram channel message and re-upload it
|
||||||
|
* to a destination channel.
|
||||||
|
*
|
||||||
|
* Flow:
|
||||||
|
* 1. Validates file size is within Telegram's 2GB upload limit
|
||||||
|
* 2. Downloads via TDLib async download with progress tracking
|
||||||
|
* 3. Verifies file integrity (size match)
|
||||||
|
* 4. Uploads to destination channel, waiting for server confirmation
|
||||||
|
* 5. Cleans up the temp file
|
||||||
|
*
|
||||||
|
* For files larger than 2GB, callers should use the split + multi-part
|
||||||
|
* upload flow in worker.ts instead.
|
||||||
|
*
|
||||||
|
* Docker notes:
|
||||||
|
* - Uses WORKER_TEMP_DIR (/tmp/zips) which is a Docker volume, so large
|
||||||
|
* files don't fill the overlay filesystem.
|
||||||
|
* - Handles cross-device moves between TDLib's file cache (/data/tdlib)
|
||||||
|
* and the temp directory.
|
||||||
|
* - Temp files are always cleaned up, even on failure.
|
||||||
|
*
|
||||||
|
* @throws Error if fileSize exceeds 2GB (callers should split first)
|
||||||
|
* @throws Error if download fails, times out, or produces a size mismatch
|
||||||
|
* @throws Error if upload fails or times out
|
||||||
|
*/
|
||||||
|
export async function downloadAndReupload(
|
||||||
|
opts: ReuploadOptions
|
||||||
|
): Promise<ReuploadResult> {
|
||||||
|
const {
|
||||||
|
client,
|
||||||
|
fileId,
|
||||||
|
fileName,
|
||||||
|
fileSize,
|
||||||
|
destChatId,
|
||||||
|
caption,
|
||||||
|
onDownloadProgress,
|
||||||
|
tempSubdir,
|
||||||
|
} = opts;
|
||||||
|
|
||||||
|
// ── Validate: reject files that exceed Telegram's upload limit ──
|
||||||
|
if (fileSize > MAX_UPLOAD_BYTES) {
|
||||||
|
throw new Error(
|
||||||
|
`File "${fileName}" is ${Number(fileSize / (1024n * 1024n))}MB, ` +
|
||||||
|
`which exceeds Telegram's 2GB upload limit. ` +
|
||||||
|
`Use byteLevelSplit + uploadToChannel for oversized files.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Prepare temp directory ──
|
||||||
|
const workDir = tempSubdir
|
||||||
|
? path.join(config.tempDir, tempSubdir)
|
||||||
|
: config.tempDir;
|
||||||
|
await mkdir(workDir, { recursive: true });
|
||||||
|
|
||||||
|
const tempPath = path.join(workDir, fileName);
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
{
|
||||||
|
fileId,
|
||||||
|
fileName,
|
||||||
|
fileSize: Number(fileSize),
|
||||||
|
destChatId: Number(destChatId),
|
||||||
|
tempPath,
|
||||||
|
},
|
||||||
|
"Starting download-and-reupload"
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// ── Step 1: Download from source ──
|
||||||
|
//
|
||||||
|
// Uses the existing downloadFile which:
|
||||||
|
// - Starts an async TDLib download (priority 32)
|
||||||
|
// - Listens for updateFile events with progress tracking
|
||||||
|
// - Verifies file size after completion
|
||||||
|
// - Moves from TDLib cache to destPath (handles EXDEV cross-device)
|
||||||
|
// - Times out at max(5min, 10min/GB)
|
||||||
|
log.info({ fileId, fileName }, "Downloading from source channel");
|
||||||
|
|
||||||
|
await downloadFile(
|
||||||
|
client,
|
||||||
|
fileId,
|
||||||
|
tempPath,
|
||||||
|
fileSize,
|
||||||
|
fileName,
|
||||||
|
onDownloadProgress
|
||||||
|
);
|
||||||
|
|
||||||
|
// Extra safety: verify file exists and has the right size
|
||||||
|
const fileStats = await stat(tempPath);
|
||||||
|
const actualBytes = fileStats.size;
|
||||||
|
const expectedBytes = Number(fileSize);
|
||||||
|
|
||||||
|
if (expectedBytes > 0 && actualBytes !== expectedBytes) {
|
||||||
|
throw new Error(
|
||||||
|
`Downloaded file size mismatch for "${fileName}": ` +
|
||||||
|
`expected ${expectedBytes} bytes, got ${actualBytes} bytes`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
{ fileId, fileName, actualBytes },
|
||||||
|
"Download complete, starting upload to destination"
|
||||||
|
);
|
||||||
|
|
||||||
|
// ── Step 2: Upload to destination channel ──
|
||||||
|
//
|
||||||
|
// Uses the existing uploadToChannel which:
|
||||||
|
// - Sends via sendMessage with inputFileLocal
|
||||||
|
// - Listens for updateMessageSendSucceeded for the real server message ID
|
||||||
|
// - Handles FLOOD_WAIT rate limits automatically
|
||||||
|
// - Times out at max(10min, 10min/GB)
|
||||||
|
const uploadResult: UploadResult = await uploadToChannel(
|
||||||
|
client,
|
||||||
|
destChatId,
|
||||||
|
[tempPath],
|
||||||
|
caption
|
||||||
|
);
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
{
|
||||||
|
fileId,
|
||||||
|
fileName,
|
||||||
|
destChatId: Number(destChatId),
|
||||||
|
destMessageId: Number(uploadResult.messageId),
|
||||||
|
actualBytes,
|
||||||
|
},
|
||||||
|
"Download-and-reupload completed successfully"
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
destMessageId: uploadResult.messageId,
|
||||||
|
actualBytes,
|
||||||
|
};
|
||||||
|
} finally {
|
||||||
|
// ── Always clean up temp file ──
|
||||||
|
// Critical in Docker to prevent /tmp/zips volume from filling up,
|
||||||
|
// especially when processing many large files in sequence.
|
||||||
|
try {
|
||||||
|
await unlink(tempPath);
|
||||||
|
log.debug({ tempPath }, "Cleaned up temp file");
|
||||||
|
} catch {
|
||||||
|
// File may not exist if download failed before writing
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convenience wrapper that downloads and re-uploads multiple files
|
||||||
|
* (e.g., multipart archive parts) from a source channel to a destination.
|
||||||
|
*
|
||||||
|
* Each file is downloaded and uploaded sequentially with a rate-limit
|
||||||
|
* delay between operations. Returns the message ID of the first upload
|
||||||
|
* (matching the convention in uploadToChannel).
|
||||||
|
*
|
||||||
|
* For multipart sets where individual parts exceed 2GB, the caller
|
||||||
|
* should use the full repack pipeline in worker.ts (concatenate +
|
||||||
|
* byteLevelSplit) instead of this function.
|
||||||
|
*/
|
||||||
|
export async function downloadAndReuploadMultiple(
|
||||||
|
client: Client,
|
||||||
|
files: Array<{
|
||||||
|
fileId: string;
|
||||||
|
fileName: string;
|
||||||
|
fileSize: bigint;
|
||||||
|
}>,
|
||||||
|
destChatId: bigint,
|
||||||
|
caption?: string,
|
||||||
|
onDownloadProgress?: (fileIndex: number, progress: DownloadProgress) => void,
|
||||||
|
tempSubdir?: string
|
||||||
|
): Promise<ReuploadResult> {
|
||||||
|
if (files.length === 0) {
|
||||||
|
throw new Error("No files provided for download-and-reupload");
|
||||||
|
}
|
||||||
|
|
||||||
|
let firstResult: ReuploadResult | null = null;
|
||||||
|
|
||||||
|
for (let i = 0; i < files.length; i++) {
|
||||||
|
const file = files[i];
|
||||||
|
|
||||||
|
// Only set caption on the first file (matching uploadToChannel convention)
|
||||||
|
const fileCaption = i === 0 ? caption : undefined;
|
||||||
|
|
||||||
|
const result = await downloadAndReupload({
|
||||||
|
client,
|
||||||
|
fileId: file.fileId,
|
||||||
|
fileName: file.fileName,
|
||||||
|
fileSize: file.fileSize,
|
||||||
|
destChatId,
|
||||||
|
caption: fileCaption,
|
||||||
|
onDownloadProgress: onDownloadProgress
|
||||||
|
? (progress) => onDownloadProgress(i, progress)
|
||||||
|
: undefined,
|
||||||
|
tempSubdir,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (i === 0) {
|
||||||
|
firstResult = result;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rate limit delay between operations
|
||||||
|
if (i < files.length - 1) {
|
||||||
|
await new Promise((resolve) =>
|
||||||
|
setTimeout(resolve, config.apiDelayMs)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return firstResult!;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## How It Fits Into the Existing Codebase
|
||||||
|
|
||||||
|
The function composes the two existing building blocks:
|
||||||
|
|
||||||
|
- **`downloadFile`** from `worker/src/tdlib/download.ts` -- handles async download with `updateFile` events, progress tracking, timeout, size verification, and cross-device file moves (critical for Docker where `/data/tdlib` and `/tmp/zips` are separate volumes).
|
||||||
|
- **`uploadToChannel`** from `worker/src/upload/channel.ts` -- handles `sendMessage` with `updateMessageSendSucceeded` confirmation, upload progress via `updateFile`, and FLOOD_WAIT retry.
|
||||||
|
|
||||||
|
Both are wrapped with `withFloodWait` internally, so the utility automatically handles Telegram rate limiting.
|
||||||
|
|
||||||
|
The `downloadAndReupload` function is a focused utility that can be called from the worker pipeline or any other part of the system that needs to transfer a file between channels. The existing `processOneArchiveSet` in `worker.ts` does a similar download-then-upload flow but with additional steps (hashing, dedup, metadata extraction, splitting). This utility extracts the core transfer operation for simpler use cases.
|
||||||
|
|
||||||
|
## Usage Example
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { downloadAndReupload } from "./tdlib/reupload.js";
|
||||||
|
|
||||||
|
// Transfer a single file from source to destination
|
||||||
|
const result = await downloadAndReupload({
|
||||||
|
client,
|
||||||
|
fileId: "12345",
|
||||||
|
fileName: "archive.zip",
|
||||||
|
fileSize: 1_500_000_000n, // ~1.5 GB
|
||||||
|
destChatId: -1001234567890n,
|
||||||
|
caption: "Re-uploaded archive",
|
||||||
|
onDownloadProgress: (progress) => {
|
||||||
|
console.log(`Download: ${progress.percent}%`);
|
||||||
|
},
|
||||||
|
tempSubdir: `reupload_${Date.now()}`,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Uploaded as message ${result.destMessageId}`);
|
||||||
|
```
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"total_tokens": 54587,
|
||||||
|
"duration_ms": 146695,
|
||||||
|
"total_duration_seconds": 146.7
|
||||||
|
}
|
||||||
1325
.claude/skills/tdlib-telegram-workspace/iteration-1/review.html
Normal file
1325
.claude/skills/tdlib-telegram-workspace/iteration-1/review.html
Normal file
File diff suppressed because one or more lines are too long
301
.claude/skills/tdlib-telegram/SKILL.md
Normal file
301
.claude/skills/tdlib-telegram/SKILL.md
Normal file
@@ -0,0 +1,301 @@
|
|||||||
|
---
|
||||||
|
name: tdlib-telegram
|
||||||
|
description: >
|
||||||
|
Reference guide for building Telegram integrations with TDLib (tdl/node).
|
||||||
|
Covers rate limits, FLOOD_WAIT handling, file size constraints, bot vs user account
|
||||||
|
differences, and safe code patterns. Use this skill whenever writing or modifying code
|
||||||
|
that calls Telegram APIs via TDLib, the Bot API, or any Telegram client library — including
|
||||||
|
sending messages, downloading/uploading files, scanning channels, forwarding messages,
|
||||||
|
managing subscriptions, or handling notifications. Also use when debugging 429 errors,
|
||||||
|
FLOOD_WAIT, or silent message drops.
|
||||||
|
---
|
||||||
|
|
||||||
|
# TDLib / Telegram Development Guide
|
||||||
|
|
||||||
|
This skill provides the rate limits, constraints, and patterns you need to write correct
|
||||||
|
Telegram integrations. The limits below come from official Telegram documentation and
|
||||||
|
well-established community findings (Telegram does not publish exact numbers for all limits).
|
||||||
|
|
||||||
|
## Telegram Rate Limits
|
||||||
|
|
||||||
|
These are approximate safe boundaries. Telegram's actual limits are dynamic and depend on
|
||||||
|
account age, history, and request type. The correct strategy is to respect these as guidelines
|
||||||
|
and always handle FLOOD_WAIT errors gracefully.
|
||||||
|
|
||||||
|
### Bot Accounts
|
||||||
|
|
||||||
|
| Operation | Limit | Notes |
|
||||||
|
|-----------|-------|-------|
|
||||||
|
| Messages to same chat | ~1 msg/sec | Bursts OK, sustained exceeds limit |
|
||||||
|
| Messages in a group | 20 msgs/min | Hard limit per group chat |
|
||||||
|
| Bulk notifications (different users) | ~30 msgs/sec | Global across all chats |
|
||||||
|
| Message edits in a group | ~20 edits/min | Community-observed |
|
||||||
|
| API requests (global) | ~30 req/sec | All request types combined |
|
||||||
|
| Paid broadcasts | up to 1000 msgs/sec | Requires Telegram Stars balance |
|
||||||
|
|
||||||
|
### User Accounts (TDLib)
|
||||||
|
|
||||||
|
| Operation | Limit | Notes |
|
||||||
|
|-----------|-------|-------|
|
||||||
|
| API requests (global) | ~30 req/sec | All request types combined |
|
||||||
|
| Messages in a group | ~20 msgs/min | Same as bot |
|
||||||
|
| Channel history reads | No published limit | But pagination + delay is essential |
|
||||||
|
| Joining groups | Very strict | FLOOD_WAIT often 30-300+ seconds |
|
||||||
|
|
||||||
|
### File Size Limits
|
||||||
|
|
||||||
|
| Context | Upload | Download |
|
||||||
|
|---------|--------|----------|
|
||||||
|
| Bot API (standard) | 50 MB | 20 MB |
|
||||||
|
| Bot API (local server) | 2,000 MB | 2,000 MB |
|
||||||
|
| User account (TDLib) | 2 GB | 2 GB |
|
||||||
|
| Premium user (TDLib) | 4 GB | 4 GB |
|
||||||
|
|
||||||
|
### Message & Content Limits
|
||||||
|
|
||||||
|
| Item | Limit |
|
||||||
|
|------|-------|
|
||||||
|
| Message text length | 4,096 chars |
|
||||||
|
| Media caption | 1,024 chars (4,096 premium) |
|
||||||
|
| Album / media group | 10 items max |
|
||||||
|
| Forwarded messages per request | `forwarded_message_count_max` (TDLib option) |
|
||||||
|
| Inline keyboard buttons | 100 entities |
|
||||||
|
| Formatting entities per message | 100 |
|
||||||
|
| Scheduled messages per chat | 100 |
|
||||||
|
| Bot commands | 100 max |
|
||||||
|
|
||||||
|
### Forum & Group Limits
|
||||||
|
|
||||||
|
| Item | Limit |
|
||||||
|
|------|-------|
|
||||||
|
| Topics per group | 1,000,000 |
|
||||||
|
| Topic title | 128 chars |
|
||||||
|
| Group members | 200,000 |
|
||||||
|
| Admins per group | 50 |
|
||||||
|
| Bots per group | 20 |
|
||||||
|
| Pinned topics | 5 |
|
||||||
|
|
||||||
|
## FLOOD_WAIT — How It Works
|
||||||
|
|
||||||
|
When you exceed rate limits, Telegram returns a `FLOOD_WAIT_X` error (or HTTP 429 with
|
||||||
|
`retry_after`). This is a **mandatory pause** — the value `X` is the number of seconds you
|
||||||
|
must wait before ANY request will succeed. It blocks the entire client, not just the
|
||||||
|
operation that triggered it.
|
||||||
|
|
||||||
|
### The Right Way to Handle It
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Extract the wait duration from the error
|
||||||
|
function extractFloodWaitSeconds(err: unknown): number | null {
|
||||||
|
const message = err instanceof Error ? err.message : String(err);
|
||||||
|
|
||||||
|
// Pattern 1: FLOOD_WAIT_30
|
||||||
|
const flood = message.match(/FLOOD_WAIT_(\d+)/i);
|
||||||
|
if (flood) return parseInt(flood[1], 10);
|
||||||
|
|
||||||
|
// Pattern 2: "retry after 30"
|
||||||
|
const retry = message.match(/retry after (\d+)/i);
|
||||||
|
if (retry) return parseInt(retry[1], 10);
|
||||||
|
|
||||||
|
// Pattern 3: HTTP 429 without explicit seconds
|
||||||
|
if (String((err as any)?.code) === "429") return 30;
|
||||||
|
|
||||||
|
return null; // Not a rate limit error
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wrap any TDLib call with automatic retry
|
||||||
|
async function withFloodWait<T>(fn: () => Promise<T>, maxRetries = 5): Promise<T> {
|
||||||
|
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
||||||
|
try {
|
||||||
|
return await fn();
|
||||||
|
} catch (err) {
|
||||||
|
const wait = extractFloodWaitSeconds(err);
|
||||||
|
if (wait === null || attempt >= maxRetries) throw err;
|
||||||
|
|
||||||
|
// Add 1-5s jitter to prevent thundering herd
|
||||||
|
const jitter = 1000 + Math.random() * 4000;
|
||||||
|
await sleep(wait * 1000 + jitter);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new Error("Unreachable");
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Key Rules
|
||||||
|
|
||||||
|
- **Always respect the wait duration.** Never retry before `retry_after` expires.
|
||||||
|
- **Add jitter.** Without it, multiple clients retry simultaneously and trigger another FLOOD_WAIT.
|
||||||
|
- **Non-rate-limit errors should fail fast.** Only retry on FLOOD_WAIT, not on other errors.
|
||||||
|
- **Don't artificially throttle below ~1 req/sec.** Telegram's own guidance (via grammY docs)
|
||||||
|
is to send requests as fast as you need and handle 429 errors. Fixed low-frequency throttling
|
||||||
|
wastes throughput without preventing floods.
|
||||||
|
|
||||||
|
## Code Patterns
|
||||||
|
|
||||||
|
### Pattern: Sequential Send Queue
|
||||||
|
|
||||||
|
When sending notifications to multiple users, use a sequential queue with a per-message delay.
|
||||||
|
Never fire concurrent sends — you will hit the 30 msg/sec global limit instantly.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
let sendQueue: Promise<void> = Promise.resolve();
|
||||||
|
|
||||||
|
function queueSend(chatId: bigint, text: string): void {
|
||||||
|
sendQueue = sendQueue
|
||||||
|
.then(() => withFloodWait(() => sendTextMessage(chatId, text)))
|
||||||
|
.then(() => sleep(50)) // ~20 msgs/sec, well under 30 limit
|
||||||
|
.catch((err) => log.error({ err, chatId }, "Send failed"));
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern: Paginated Scanning with Delay
|
||||||
|
|
||||||
|
When reading channel history or enumerating topics, always add a delay between pages:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
while (hasMorePages) {
|
||||||
|
const result = await invokeWithTimeout(client, { _: "getChatHistory", ... });
|
||||||
|
processMessages(result.messages);
|
||||||
|
|
||||||
|
if (result.messages.length < limit) break;
|
||||||
|
|
||||||
|
await sleep(1000); // 1 second between pages — prevents FLOOD_WAIT on large channels
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern: Event Listener Before Action
|
||||||
|
|
||||||
|
When waiting for TDLib async events (upload confirmation, download completion), always
|
||||||
|
attach the event listener BEFORE starting the operation. If you attach after, fast
|
||||||
|
operations can complete before the listener exists, causing the promise to hang forever.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// CORRECT: listener first, then action
|
||||||
|
client.on("update", handleUpdate);
|
||||||
|
const tempMsg = await client.invoke({ _: "sendMessage", ... });
|
||||||
|
tempMsgId = tempMsg.id; // handler now knows which message to match
|
||||||
|
|
||||||
|
// WRONG: action first, then listener — race condition!
|
||||||
|
const tempMsg = await client.invoke({ _: "sendMessage", ... });
|
||||||
|
client.on("update", handleUpdate); // may miss updateMessageSendSucceeded
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern: Download/Upload Timeouts
|
||||||
|
|
||||||
|
Scale timeouts with file size. TDLib downloads/uploads are asynchronous — without a timeout,
|
||||||
|
a stalled transfer hangs the entire pipeline.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const timeoutMs = Math.max(
|
||||||
|
10 * 60_000, // minimum 10 minutes
|
||||||
|
(fileSizeMB / 1024) * 10 * 60_000 // 10 minutes per GB
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern: TDLib Client Lifecycle
|
||||||
|
|
||||||
|
Always close TDLib clients in a `finally` block. Unclosed clients leak memory and file
|
||||||
|
descriptors, and can leave TDLib's internal database locked.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const client = await createTdlibClient(account);
|
||||||
|
try {
|
||||||
|
// ... use client ...
|
||||||
|
} finally {
|
||||||
|
await closeTdlibClient(client);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Anti-Patterns
|
||||||
|
|
||||||
|
### Never: Concurrent TDLib Sends Without Queue
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// BAD: fires all sends concurrently — will trigger FLOOD_WAIT immediately
|
||||||
|
await Promise.all(users.map((u) => sendTextMessage(u.chatId, msg)));
|
||||||
|
|
||||||
|
// GOOD: sequential with delay
|
||||||
|
for (const user of users) {
|
||||||
|
await withFloodWait(() => sendTextMessage(user.chatId, msg));
|
||||||
|
await sleep(50);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Never: Bare client.invoke() Without Retry
|
||||||
|
|
||||||
|
Every `client.invoke()` call can return FLOOD_WAIT at any time. Bare calls will crash
|
||||||
|
on rate limits instead of retrying.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// BAD: crashes on FLOOD_WAIT
|
||||||
|
await client.invoke({ _: "sendMessage", ... });
|
||||||
|
|
||||||
|
// GOOD: retries automatically
|
||||||
|
await withFloodWait(() => client.invoke({ _: "sendMessage", ... }));
|
||||||
|
```
|
||||||
|
|
||||||
|
### Never: Retry Without Respecting retry_after
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// BAD: fixed 1-second retry ignores Telegram's wait requirement
|
||||||
|
catch (err) { await sleep(1000); retry(); }
|
||||||
|
|
||||||
|
// GOOD: extract and respect the actual wait time
|
||||||
|
catch (err) {
|
||||||
|
const wait = extractFloodWaitSeconds(err);
|
||||||
|
if (wait !== null) await sleep(wait * 1000 + jitter);
|
||||||
|
else throw err;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Never: Ignore FLOOD_WAIT in Bots
|
||||||
|
|
||||||
|
Bot accounts get the same FLOOD_WAIT as user accounts. The bot API's 429 response
|
||||||
|
blocks ALL operations for the specified duration — not just the chat that triggered it.
|
||||||
|
A single unhandled flood in a notification loop can make the entire bot unresponsive.
|
||||||
|
|
||||||
|
## Bot vs User Account Differences
|
||||||
|
|
||||||
|
| Capability | Bot | User (TDLib) |
|
||||||
|
|-----------|-----|-------------|
|
||||||
|
| Read channel history | No (unless admin) | Yes |
|
||||||
|
| Send to users who haven't started bot | No | N/A |
|
||||||
|
| Join groups via invite link | No (must be added) | Yes |
|
||||||
|
| Forward messages (send_copy) | Yes | Yes |
|
||||||
|
| File upload limit | 50 MB (standard API) | 2 GB |
|
||||||
|
| File download limit | 20 MB (standard API) | 2 GB |
|
||||||
|
| Auth method | Bot token | Phone + SMS code |
|
||||||
|
| Rate limit profile | Same FLOOD_WAIT | Same FLOOD_WAIT |
|
||||||
|
|
||||||
|
## TDLib-Specific Notes
|
||||||
|
|
||||||
|
### BigInt Chat IDs
|
||||||
|
|
||||||
|
TDLib uses numeric chat IDs. Supergroups and channels use negative IDs (e.g., `-1001234567890`).
|
||||||
|
When passing to `client.invoke()`, convert with `Number(chatId)` — TDLib's JSON interface
|
||||||
|
doesn't handle BigInt. Be aware that very large IDs may lose precision with `Number()`,
|
||||||
|
though current Telegram IDs are within safe integer range.
|
||||||
|
|
||||||
|
### TDLib Options (Runtime Queryable)
|
||||||
|
|
||||||
|
These are read-only values you can query at runtime via `getOption`:
|
||||||
|
- `message_text_length_max` — max message text length
|
||||||
|
- `message_caption_length_max` — max caption length
|
||||||
|
- `forwarded_message_count_max` — max forwards per request
|
||||||
|
|
||||||
|
### Session State
|
||||||
|
|
||||||
|
TDLib persists session state to disk. Each account needs its own state directory.
|
||||||
|
Running two clients on the same state directory simultaneously will corrupt the database.
|
||||||
|
Use separate directories per account, and separate volumes in Docker for worker vs bot.
|
||||||
|
|
||||||
|
## Docker Considerations
|
||||||
|
|
||||||
|
- **prebuilt-tdlib**: The `prebuilt-tdlib` npm package provides platform-specific TDLib
|
||||||
|
binaries. Container base image must match (e.g., `node:20-bookworm-slim` for Debian x64).
|
||||||
|
- **Volumes**: Mount persistent volumes for TDLib state directories — losing state forces
|
||||||
|
full re-authentication.
|
||||||
|
- **Graceful shutdown**: Wait for active operations to finish before closing DB connections.
|
||||||
|
TDLib operations in flight will fail if the database pool is closed underneath them.
|
||||||
|
- **Health checks**: TDLib services don't expose HTTP — use database connectivity as the
|
||||||
|
health signal instead.
|
||||||
23
.claude/skills/tdlib-telegram/evals/evals.json
Normal file
23
.claude/skills/tdlib-telegram/evals/evals.json
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
{
|
||||||
|
"skill_name": "tdlib-telegram",
|
||||||
|
"evals": [
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"prompt": "Add a new bot command /broadcast that sends a text message to ALL users who have a TelegramLink in the database. The admin triggers it from the web app. Add it to the bot's command handler and create an API endpoint that triggers it.",
|
||||||
|
"expected_output": "Code that uses a sequential send queue with withFloodWait wrapping each sendTextMessage call, a delay between sends (~50ms), and does NOT use Promise.all or concurrent sends. Should handle errors per-user without stopping the broadcast.",
|
||||||
|
"files": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"prompt": "The worker keeps crashing with 'FLOOD_WAIT_35' errors when scanning a source channel that has about 10,000 messages. It happens during the getChannelMessages pagination loop. How do I fix this?",
|
||||||
|
"expected_output": "Diagnosis that the apiDelayMs between pages may be too low or the retry logic isn't wrapping the pagination calls. Should recommend ensuring all getChatHistory/searchChatMessages calls go through withFloodWait/invokeWithTimeout, and that sleep(config.apiDelayMs) exists between pages. Should NOT suggest reducing scan frequency as the primary fix.",
|
||||||
|
"files": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"prompt": "Write a utility function for the worker that downloads a file from a source Telegram channel message and re-uploads it to the destination channel. It needs to handle files up to 2GB and work reliably in Docker.",
|
||||||
|
"expected_output": "Code that: (1) wraps download in withFloodWait with size-scaled timeout, (2) attaches upload event listener BEFORE calling sendMessage, (3) uses temp directory with cleanup in finally block, (4) handles the 2GB Telegram limit correctly, (5) uses try/finally for client cleanup if applicable.",
|
||||||
|
"files": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
66
.drone.yml
Normal file
66
.drone.yml
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
type: docker
|
||||||
|
name: build-and-deploy
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
branch: [main]
|
||||||
|
event: [push]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: build-app
|
||||||
|
image: plugins/docker
|
||||||
|
settings:
|
||||||
|
repo: git.samagsteribbe.nl/admin/dragonsstash
|
||||||
|
registry: git.samagsteribbe.nl
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
tags:
|
||||||
|
- latest
|
||||||
|
- "${DRONE_COMMIT_SHA:0:8}"
|
||||||
|
build_args:
|
||||||
|
- NEXT_PUBLIC_APP_URL=https://dragonsstash.samagsteribbe.nl
|
||||||
|
username:
|
||||||
|
from_secret: gitea_username
|
||||||
|
password:
|
||||||
|
from_secret: gitea_password
|
||||||
|
|
||||||
|
- name: build-worker
|
||||||
|
image: plugins/docker
|
||||||
|
settings:
|
||||||
|
repo: git.samagsteribbe.nl/admin/dragonsstash-worker
|
||||||
|
registry: git.samagsteribbe.nl
|
||||||
|
dockerfile: worker/Dockerfile
|
||||||
|
tags:
|
||||||
|
- latest
|
||||||
|
- "${DRONE_COMMIT_SHA:0:8}"
|
||||||
|
username:
|
||||||
|
from_secret: gitea_username
|
||||||
|
password:
|
||||||
|
from_secret: gitea_password
|
||||||
|
|
||||||
|
- name: build-bot
|
||||||
|
image: plugins/docker
|
||||||
|
settings:
|
||||||
|
repo: git.samagsteribbe.nl/admin/dragonsstash-bot
|
||||||
|
registry: git.samagsteribbe.nl
|
||||||
|
dockerfile: bot/Dockerfile
|
||||||
|
tags:
|
||||||
|
- latest
|
||||||
|
- "${DRONE_COMMIT_SHA:0:8}"
|
||||||
|
username:
|
||||||
|
from_secret: gitea_username
|
||||||
|
password:
|
||||||
|
from_secret: gitea_password
|
||||||
|
|
||||||
|
- name: deploy
|
||||||
|
image: alpine
|
||||||
|
environment:
|
||||||
|
SSH_KEY:
|
||||||
|
from_secret: ssh_key
|
||||||
|
commands:
|
||||||
|
- apk add --no-cache openssh-client
|
||||||
|
- mkdir -p ~/.ssh
|
||||||
|
- printf "%s" "$SSH_KEY" > ~/.ssh/id_ed25519
|
||||||
|
- chmod 600 ~/.ssh/id_ed25519
|
||||||
|
- ssh-keyscan -t ed25519 192.168.68.68 > ~/.ssh/known_hosts 2>/dev/null
|
||||||
|
- ssh sam@192.168.68.68 "cd /opt/stacks/DragonsStash && docker compose pull && docker compose up -d"
|
||||||
@@ -13,6 +13,8 @@ AUTH_GITHUB_ID=""
|
|||||||
AUTH_GITHUB_SECRET=""
|
AUTH_GITHUB_SECRET=""
|
||||||
|
|
||||||
# App
|
# App
|
||||||
|
# APP_PORT controls the port the container listens on AND how it is exposed on the host.
|
||||||
|
# If you change APP_PORT, also update NEXT_PUBLIC_APP_URL to match.
|
||||||
NEXT_PUBLIC_APP_URL="http://localhost:3000"
|
NEXT_PUBLIC_APP_URL="http://localhost:3000"
|
||||||
APP_PORT=3000
|
APP_PORT=3000
|
||||||
|
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -18,6 +18,7 @@ worker/node_modules
|
|||||||
|
|
||||||
# production
|
# production
|
||||||
/build
|
/build
|
||||||
|
worker/dist
|
||||||
|
|
||||||
# misc
|
# misc
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
|||||||
102
CLAUDE.md
Normal file
102
CLAUDE.md
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
# CLAUDE.md
|
||||||
|
|
||||||
|
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||||
|
|
||||||
|
## Project Overview
|
||||||
|
|
||||||
|
Dragon's Stash is a self-hosted inventory management system for 3D printing filament, SLA resin, miniature paints, and supplies. It includes an integrated Telegram archive worker that scans channels for ZIP/RAR archives, indexes their contents, and a bot that lets users search and receive packages via Telegram.
|
||||||
|
|
||||||
|
## Tech Stack
|
||||||
|
|
||||||
|
- **App**: Next.js 16 (App Router), TypeScript 5.9 (strict), Tailwind CSS 4, shadcn/ui
|
||||||
|
- **Database**: PostgreSQL 16+ via Prisma v7.4 with `@prisma/adapter-pg`
|
||||||
|
- **Auth**: Auth.js v5 (NextAuth) with credentials + optional GitHub OAuth
|
||||||
|
- **Worker**: TypeScript + TDLib (via `tdl`) for Telegram channel scanning
|
||||||
|
- **Bot**: TypeScript + TDLib for Telegram bot interface
|
||||||
|
- **Forms**: React Hook Form + Zod v4
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
|
||||||
|
### App (root package.json)
|
||||||
|
```bash
|
||||||
|
npm run dev # Next.js dev server with hot reload
|
||||||
|
npm run build # Production build (standalone output)
|
||||||
|
npm run start # Production server
|
||||||
|
npm run lint # ESLint (next/core-web-vitals + TypeScript)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Database
|
||||||
|
```bash
|
||||||
|
npm run db:generate # Generate Prisma client
|
||||||
|
npm run db:migrate # Run migrations (dev mode)
|
||||||
|
npm run db:push # Push schema without migrations
|
||||||
|
npm run db:seed # Seed database with test data
|
||||||
|
npm run db:studio # Prisma Studio UI
|
||||||
|
npx prisma migrate dev --name <description> # Create new migration
|
||||||
|
```
|
||||||
|
|
||||||
|
### Worker & Bot (each in their own directory)
|
||||||
|
```bash
|
||||||
|
cd worker && npm run dev # Dev mode with tsx watch
|
||||||
|
cd worker && npm run build # TypeScript compile to dist/
|
||||||
|
cd bot && npm run dev # Dev mode with tsx watch
|
||||||
|
cd bot && npm run build # TypeScript compile to dist/
|
||||||
|
```
|
||||||
|
|
||||||
|
### Dev Environment Setup
|
||||||
|
```bash
|
||||||
|
docker compose -f docker-compose.dev.yml up -d # Start PostgreSQL + worker
|
||||||
|
npm run dev # Run app locally
|
||||||
|
```
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
### Three-Service Design
|
||||||
|
The project is split into three independent services sharing one PostgreSQL database:
|
||||||
|
1. **App** (root `src/`): Next.js web UI for inventory management and Telegram admin
|
||||||
|
2. **Worker** (`worker/`): Scans Telegram source channels, processes archives, uploads to destination channel
|
||||||
|
3. **Bot** (`bot/`): Telegram bot for user search, package delivery, keyword subscriptions
|
||||||
|
|
||||||
|
Services communicate asynchronously via `pg_notify` (e.g., on-demand channel fetches, bot send requests).
|
||||||
|
|
||||||
|
### App Source Layout (`src/`)
|
||||||
|
- `app/(auth)/` — Login/register pages (public)
|
||||||
|
- `app/(app)/` — Protected routes behind auth middleware (dashboard, filaments, resins, paints, supplies, vendors, locations, settings, stls, telegram, usage)
|
||||||
|
- `app/api/` — API routes (NextAuth, health check, bot endpoints)
|
||||||
|
- `data/` — Server-side Prisma query functions (`*.queries.ts`), one file per domain model
|
||||||
|
- `schemas/` — Zod validation schemas, one file per domain model
|
||||||
|
- `components/ui/` — shadcn/ui primitives
|
||||||
|
- `components/shared/` — Reusable business components (data-table, status-badge, color-swatch, stat-card, page-header)
|
||||||
|
- `components/layout/` — Sidebar and header
|
||||||
|
- `lib/` — Auth config, Prisma singleton, constants, utilities, Telegram query helpers
|
||||||
|
- `hooks/` — Custom React hooks (use-modal, use-debounce, use-current-user)
|
||||||
|
- `types/` — Shared TypeScript types
|
||||||
|
|
||||||
|
### Key Patterns
|
||||||
|
- **Server Components by default** — pages are async server components that fetch data directly. Only interactive components use `"use client"`.
|
||||||
|
- **Server Actions for mutations** — each page directory has an `actions.ts` file with create/update/delete actions.
|
||||||
|
- **Data queries centralized** — all Prisma reads go through `src/data/*.queries.ts`, not inline in components.
|
||||||
|
- **Modal-based CRUD** — add/edit forms use dialog modals, not separate pages.
|
||||||
|
- **TanStack Table** with server-side pagination for all inventory tables.
|
||||||
|
- **All Prisma PKs use `cuid()`** string IDs.
|
||||||
|
|
||||||
|
### Worker Pipeline
|
||||||
|
1. Authenticate Telegram account via TDLib (SMS code flow, managed via admin UI)
|
||||||
|
2. Scan source channels for messages since `lastProcessedMessageId`
|
||||||
|
3. Detect archives (ZIP/RAR), group multipart sets, extract file listings
|
||||||
|
4. Hash for dedup, match preview images, extract creator from filename
|
||||||
|
5. Split files >2GB, upload to destination channel, track progress
|
||||||
|
|
||||||
|
### ESLint Scope
|
||||||
|
ESLint covers `src/` only. The `worker/`, `bot/`, `scripts/`, and `prisma/seed.ts` directories are excluded from linting.
|
||||||
|
|
||||||
|
## Docker Deployment
|
||||||
|
|
||||||
|
- `docker-compose.yml` — Production: app + worker + bot + db
|
||||||
|
- `docker-compose.dev.yml` — Dev: db + worker only (app runs locally)
|
||||||
|
- `docker-entrypoint.sh` — Runs migrations, optional seeding, then starts app
|
||||||
|
- Bot service uses Docker Compose profiles (`bot` or `full`) — not started by default
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
No test framework is configured. Testing is manual.
|
||||||
15
Dockerfile
15
Dockerfile
@@ -17,6 +17,8 @@ COPY --from=deps /app/node_modules ./node_modules
|
|||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
ENV NEXT_TELEMETRY_DISABLED=1
|
ENV NEXT_TELEMETRY_DISABLED=1
|
||||||
|
ARG NEXT_PUBLIC_APP_URL=http://localhost:3000
|
||||||
|
ENV NEXT_PUBLIC_APP_URL=${NEXT_PUBLIC_APP_URL}
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
# --- Production image ---
|
# --- Production image ---
|
||||||
@@ -30,19 +32,19 @@ RUN addgroup --system --gid 1001 nodejs && \
|
|||||||
adduser --system --uid 1001 nextjs
|
adduser --system --uid 1001 nextjs
|
||||||
|
|
||||||
# Copy public assets
|
# Copy public assets
|
||||||
COPY --from=builder /app/public ./public
|
COPY --from=builder --chown=nextjs:nodejs /app/public ./public
|
||||||
|
|
||||||
# Copy prisma schema + migrations for runtime migrate deploy
|
|
||||||
COPY --from=builder /app/prisma ./prisma
|
|
||||||
COPY --from=builder /app/prisma.config.ts ./prisma.config.ts
|
|
||||||
|
|
||||||
# Copy standalone build output
|
# Copy standalone build output
|
||||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
|
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
|
||||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
|
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
|
||||||
|
|
||||||
|
# Copy prisma schema + migrations for runtime migrate deploy
|
||||||
|
COPY --from=builder --chown=nextjs:nodejs /app/prisma ./prisma
|
||||||
|
COPY --from=builder --chown=nextjs:nodejs /app/prisma.config.ts ./prisma.config.ts
|
||||||
|
|
||||||
# Copy node_modules for prisma CLI (needed for migrate deploy at startup).
|
# Copy node_modules for prisma CLI (needed for migrate deploy at startup).
|
||||||
# Copying the full directory ensures all transitive dependencies are present.
|
# Copying the full directory ensures all transitive dependencies are present.
|
||||||
COPY --from=builder /app/node_modules ./node_modules
|
COPY --from=builder --chown=nextjs:nodejs /app/node_modules ./node_modules
|
||||||
# Recreate the .bin/prisma symlink so Node resolves __dirname to prisma/build/,
|
# Recreate the .bin/prisma symlink so Node resolves __dirname to prisma/build/,
|
||||||
# where the WASM files live (COPY dereferences symlinks, breaking WASM resolution)
|
# where the WASM files live (COPY dereferences symlinks, breaking WASM resolution)
|
||||||
RUN mkdir -p ./node_modules/.bin && \
|
RUN mkdir -p ./node_modules/.bin && \
|
||||||
@@ -54,6 +56,7 @@ RUN chmod +x docker-entrypoint.sh
|
|||||||
|
|
||||||
USER nextjs
|
USER nextjs
|
||||||
|
|
||||||
|
# Default port — overridden at runtime by the PORT env var (set via docker-compose APP_PORT)
|
||||||
EXPOSE 3000
|
EXPOSE 3000
|
||||||
ENV PORT=3000
|
ENV PORT=3000
|
||||||
ENV HOSTNAME="0.0.0.0"
|
ENV HOSTNAME="0.0.0.0"
|
||||||
|
|||||||
13
README.md
13
README.md
@@ -125,18 +125,15 @@ docker compose up -d
|
|||||||
|
|
||||||
The app will be available at [http://localhost:3000](http://localhost:3000).
|
The app will be available at [http://localhost:3000](http://localhost:3000).
|
||||||
|
|
||||||
### Adding Telegram Services
|
### Adding the Telegram Bot
|
||||||
|
|
||||||
The worker and bot run as optional profiles so `docker compose up` works with just the app + database:
|
The worker starts by default with `docker compose up`. The bot runs as an optional profile:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# App + DB + Telegram worker (needs TELEGRAM_API_ID + TELEGRAM_API_HASH in .env)
|
|
||||||
docker compose --profile telegram up -d
|
|
||||||
|
|
||||||
# App + DB + Worker + Bot (also needs BOT_TOKEN in .env)
|
# App + DB + Worker + Bot (also needs BOT_TOKEN in .env)
|
||||||
docker compose --profile full up -d
|
docker compose --profile full up -d
|
||||||
|
|
||||||
# Or just the bot (alongside app + db)
|
# Or just the bot (alongside app + db + worker)
|
||||||
docker compose --profile bot up -d
|
docker compose --profile bot up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -297,5 +294,7 @@ curl http://localhost:3000/api/health
|
|||||||
5. Open a Pull Request
|
5. Open a Pull Request
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { config } from "./util/config.js";
|
import { config } from "./util/config.js";
|
||||||
import { logger } from "./util/logger.js";
|
import { logger } from "./util/logger.js";
|
||||||
import { db, pool } from "./db/client.js";
|
import { db, pool } from "./db/client.js";
|
||||||
import { createBotClient, closeBotClient, onBotUpdate } from "./tdlib/client.js";
|
import { createBotClient, closeBotClient, onBotUpdate, getUser } from "./tdlib/client.js";
|
||||||
import { startSendListener, stopSendListener } from "./send-listener.js";
|
import { startSendListener, stopSendListener } from "./send-listener.js";
|
||||||
import { handleMessage } from "./commands.js";
|
import { handleMessage } from "./commands.js";
|
||||||
import { mkdir } from "fs/promises";
|
import { mkdir } from "fs/promises";
|
||||||
@@ -49,14 +49,27 @@ async function main(): Promise<void> {
|
|||||||
const userId = senderId.user_id as number;
|
const userId = senderId.user_id as number;
|
||||||
|
|
||||||
if (text && userId) {
|
if (text && userId) {
|
||||||
// Get user info for display name (async but fire-and-forget for perf)
|
(async () => {
|
||||||
handleMessage({
|
let firstName = "User";
|
||||||
|
let lastName: string | undefined;
|
||||||
|
let username: string | undefined;
|
||||||
|
try {
|
||||||
|
const userInfo = await getUser(userId);
|
||||||
|
firstName = userInfo.firstName;
|
||||||
|
lastName = userInfo.lastName;
|
||||||
|
username = userInfo.username;
|
||||||
|
} catch {
|
||||||
|
// Fall back to defaults if getUser fails
|
||||||
|
}
|
||||||
|
await handleMessage({
|
||||||
chatId: BigInt(chatId),
|
chatId: BigInt(chatId),
|
||||||
userId: BigInt(userId),
|
userId: BigInt(userId),
|
||||||
text,
|
text,
|
||||||
firstName: "User", // TDLib provides this via a separate getUser call
|
firstName,
|
||||||
username: undefined,
|
lastName,
|
||||||
}).catch((err) => {
|
username,
|
||||||
|
});
|
||||||
|
})().catch((err) => {
|
||||||
log.error({ err, chatId, userId }, "Failed to handle message");
|
log.error({ err, chatId, userId }, "Failed to handle message");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,17 +8,31 @@ import {
|
|||||||
getGlobalDestinationChannel,
|
getGlobalDestinationChannel,
|
||||||
} from "./db/queries.js";
|
} from "./db/queries.js";
|
||||||
import { copyMessageToUser, sendTextMessage, sendPhotoMessage } from "./tdlib/client.js";
|
import { copyMessageToUser, sendTextMessage, sendPhotoMessage } from "./tdlib/client.js";
|
||||||
|
import { sleep } from "./util/flood-wait.js";
|
||||||
|
|
||||||
const log = childLogger("send-listener");
|
const log = childLogger("send-listener");
|
||||||
|
|
||||||
let pgClient: pg.PoolClient | null = null;
|
let pgClient: pg.PoolClient | null = null;
|
||||||
|
let stopped = false;
|
||||||
|
|
||||||
|
/** Delay (ms) before attempting to reconnect after a connection loss. */
|
||||||
|
const RECONNECT_DELAY_MS = 5_000;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Start listening for pg_notify signals:
|
* Start listening for pg_notify signals:
|
||||||
* - `bot_send` — payload = requestId → send a package to a user
|
* - `bot_send` — payload = requestId → send a package to a user
|
||||||
* - `new_package` — payload = JSON { packageId, fileName, creator } → notify subscribers
|
* - `new_package` — payload = JSON { packageId, fileName, creator } → notify subscribers
|
||||||
|
*
|
||||||
|
* If the underlying connection is lost, the listener automatically reconnects
|
||||||
|
* so that pg_notify signals are never silently dropped.
|
||||||
*/
|
*/
|
||||||
export async function startSendListener(): Promise<void> {
|
export async function startSendListener(): Promise<void> {
|
||||||
|
stopped = false;
|
||||||
|
await connectListener();
|
||||||
|
}
|
||||||
|
|
||||||
|
async function connectListener(): Promise<void> {
|
||||||
|
try {
|
||||||
pgClient = await pool.connect();
|
pgClient = await pool.connect();
|
||||||
await pgClient.query("LISTEN bot_send");
|
await pgClient.query("LISTEN bot_send");
|
||||||
await pgClient.query("LISTEN new_package");
|
await pgClient.query("LISTEN new_package");
|
||||||
@@ -31,10 +45,46 @@ export async function startSendListener(): Promise<void> {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Reconnect automatically when the connection ends unexpectedly
|
||||||
|
pgClient.on("end", () => {
|
||||||
|
if (!stopped) {
|
||||||
|
log.warn("Send listener connection lost — reconnecting");
|
||||||
|
pgClient = null;
|
||||||
|
scheduleReconnect();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
pgClient.on("error", (err) => {
|
||||||
|
log.error({ err }, "Send listener connection error");
|
||||||
|
if (!stopped && pgClient) {
|
||||||
|
try {
|
||||||
|
pgClient.release(true);
|
||||||
|
} catch (releaseErr) {
|
||||||
|
log.debug({ err: releaseErr }, "Failed to release pg client after error");
|
||||||
|
}
|
||||||
|
pgClient = null;
|
||||||
|
scheduleReconnect();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
log.info("Send listener started (bot_send, new_package)");
|
log.info("Send listener started (bot_send, new_package)");
|
||||||
|
} catch (err) {
|
||||||
|
log.error({ err }, "Failed to start send listener — retrying");
|
||||||
|
scheduleReconnect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function scheduleReconnect(): void {
|
||||||
|
if (stopped) return;
|
||||||
|
setTimeout(() => {
|
||||||
|
if (!stopped) {
|
||||||
|
connectListener();
|
||||||
|
}
|
||||||
|
}, RECONNECT_DELAY_MS);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function stopSendListener(): void {
|
export function stopSendListener(): void {
|
||||||
|
stopped = true;
|
||||||
if (pgClient) {
|
if (pgClient) {
|
||||||
pgClient.release();
|
pgClient.release();
|
||||||
pgClient = null;
|
pgClient = null;
|
||||||
@@ -133,7 +183,7 @@ async function handleNewPackage(payload: string): Promise<void> {
|
|||||||
userSubs.set(key, patterns);
|
userSubs.set(key, patterns);
|
||||||
}
|
}
|
||||||
|
|
||||||
const creator = data.creator ? ` by ${data.creator}` : "";
|
const creator = data.creator ? ` by ${escapeHtml(data.creator)}` : "";
|
||||||
for (const [telegramUserId, patterns] of userSubs) {
|
for (const [telegramUserId, patterns] of userSubs) {
|
||||||
const msg = [
|
const msg = [
|
||||||
`🔔 <b>New package matching your subscriptions:</b>`,
|
`🔔 <b>New package matching your subscriptions:</b>`,
|
||||||
@@ -151,6 +201,9 @@ async function handleNewPackage(payload: string): Promise<void> {
|
|||||||
"Failed to notify subscriber"
|
"Failed to notify subscriber"
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Rate limit delay between notifications (~20 msgs/sec, under 30 msgs/sec bot limit)
|
||||||
|
await sleep(50);
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
log.error({ err, payload }, "Failed to process new_package notification");
|
log.error({ err, payload }, "Failed to process new_package notification");
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import tdl from "tdl";
|
|||||||
import { getTdjson } from "prebuilt-tdlib";
|
import { getTdjson } from "prebuilt-tdlib";
|
||||||
import { config } from "../util/config.js";
|
import { config } from "../util/config.js";
|
||||||
import { childLogger } from "../util/logger.js";
|
import { childLogger } from "../util/logger.js";
|
||||||
|
import { withFloodWait } from "../util/flood-wait.js";
|
||||||
|
|
||||||
const log = childLogger("tdlib-bot");
|
const log = childLogger("tdlib-bot");
|
||||||
|
|
||||||
@@ -33,7 +34,7 @@ export async function createBotClient(): Promise<tdl.Client> {
|
|||||||
|
|
||||||
await client.login(() => ({
|
await client.login(() => ({
|
||||||
type: "bot",
|
type: "bot",
|
||||||
token: config.botToken,
|
getToken: () => Promise.resolve(config.botToken),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
log.info("Bot client authenticated successfully");
|
log.info("Bot client authenticated successfully");
|
||||||
@@ -54,7 +55,10 @@ export async function closeBotClient(): Promise<void> {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Forward a message from a channel to a user's DM.
|
* Forward a message from a channel to a user's DM.
|
||||||
* Uses copyMessage to make it appear as sent by the bot.
|
* Uses forwardMessages with send_copy to make it appear as sent by the bot.
|
||||||
|
*
|
||||||
|
* The fromChatId is the TDLib chat ID stored in the DB — already in the correct
|
||||||
|
* format (negative for supergroups/channels, e.g. -1001234567890).
|
||||||
*/
|
*/
|
||||||
export async function copyMessageToUser(
|
export async function copyMessageToUser(
|
||||||
fromChatId: bigint,
|
fromChatId: bigint,
|
||||||
@@ -62,19 +66,20 @@ export async function copyMessageToUser(
|
|||||||
toUserId: bigint
|
toUserId: bigint
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
if (!client) throw new Error("Bot client not initialized");
|
if (!client) throw new Error("Bot client not initialized");
|
||||||
|
const c = client;
|
||||||
|
|
||||||
// TDLib uses negative chat IDs for channels/supergroups
|
await withFloodWait(
|
||||||
// The telegramId from the DB is the raw Telegram ID; for channels it needs -100 prefix
|
() =>
|
||||||
const fromChatIdNum = Number(-100n * 1n) + Number(fromChatId);
|
c.invoke({
|
||||||
|
|
||||||
await client.invoke({
|
|
||||||
_: "forwardMessages",
|
_: "forwardMessages",
|
||||||
chat_id: Number(toUserId),
|
chat_id: Number(toUserId),
|
||||||
from_chat_id: Number(fromChatId) > 0 ? -Number(fromChatId) : Number(fromChatId),
|
from_chat_id: Number(fromChatId),
|
||||||
message_ids: [Number(messageId)],
|
message_ids: [Number(messageId)],
|
||||||
send_copy: true,
|
send_copy: true,
|
||||||
remove_caption: false,
|
remove_caption: false,
|
||||||
});
|
}),
|
||||||
|
"copyMessageToUser"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -86,22 +91,31 @@ export async function sendTextMessage(
|
|||||||
parseMode: "textParseModeMarkdown" | "textParseModeHTML" = "textParseModeMarkdown"
|
parseMode: "textParseModeMarkdown" | "textParseModeHTML" = "textParseModeMarkdown"
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
if (!client) throw new Error("Bot client not initialized");
|
if (!client) throw new Error("Bot client not initialized");
|
||||||
|
const c = client;
|
||||||
|
|
||||||
// Parse the text first
|
// Parse the text first
|
||||||
const parsed = await client.invoke({
|
const parsed = await withFloodWait(
|
||||||
|
() =>
|
||||||
|
c.invoke({
|
||||||
_: "parseTextEntities",
|
_: "parseTextEntities",
|
||||||
text,
|
text,
|
||||||
parse_mode: { _: parseMode, version: parseMode === "textParseModeMarkdown" ? 2 : 0 },
|
parse_mode: { _: parseMode, version: parseMode === "textParseModeMarkdown" ? 2 : 0 },
|
||||||
});
|
}),
|
||||||
|
"parseTextEntities"
|
||||||
|
);
|
||||||
|
|
||||||
await client.invoke({
|
await withFloodWait(
|
||||||
|
() =>
|
||||||
|
c.invoke({
|
||||||
_: "sendMessage",
|
_: "sendMessage",
|
||||||
chat_id: Number(chatId),
|
chat_id: Number(chatId),
|
||||||
input_message_content: {
|
input_message_content: {
|
||||||
_: "inputMessageText",
|
_: "inputMessageText",
|
||||||
text: parsed,
|
text: parsed,
|
||||||
},
|
},
|
||||||
});
|
}),
|
||||||
|
"sendTextMessage"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -113,6 +127,7 @@ export async function sendPhotoMessage(
|
|||||||
caption: string
|
caption: string
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
if (!client) throw new Error("Bot client not initialized");
|
if (!client) throw new Error("Bot client not initialized");
|
||||||
|
const c = client;
|
||||||
|
|
||||||
// Write the photo to a temp file
|
// Write the photo to a temp file
|
||||||
const { writeFile, unlink } = await import("fs/promises");
|
const { writeFile, unlink } = await import("fs/promises");
|
||||||
@@ -122,13 +137,19 @@ export async function sendPhotoMessage(
|
|||||||
try {
|
try {
|
||||||
await writeFile(tempPath, photoData);
|
await writeFile(tempPath, photoData);
|
||||||
|
|
||||||
const parsedCaption = await client.invoke({
|
const parsedCaption = await withFloodWait(
|
||||||
|
() =>
|
||||||
|
c.invoke({
|
||||||
_: "parseTextEntities",
|
_: "parseTextEntities",
|
||||||
text: caption,
|
text: caption,
|
||||||
parse_mode: { _: "textParseModeMarkdown", version: 2 },
|
parse_mode: { _: "textParseModeMarkdown", version: 2 },
|
||||||
});
|
}),
|
||||||
|
"parsePhotoCaption"
|
||||||
|
);
|
||||||
|
|
||||||
await client.invoke({
|
await withFloodWait(
|
||||||
|
() =>
|
||||||
|
c.invoke({
|
||||||
_: "sendMessage",
|
_: "sendMessage",
|
||||||
chat_id: Number(chatId),
|
chat_id: Number(chatId),
|
||||||
input_message_content: {
|
input_message_content: {
|
||||||
@@ -138,12 +159,41 @@ export async function sendPhotoMessage(
|
|||||||
width: 0,
|
width: 0,
|
||||||
height: 0,
|
height: 0,
|
||||||
},
|
},
|
||||||
});
|
}),
|
||||||
|
"sendPhotoMessage"
|
||||||
|
);
|
||||||
} finally {
|
} finally {
|
||||||
await unlink(tempPath).catch(() => {});
|
await unlink(tempPath).catch(() => {});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get basic info about a Telegram user (name, username).
|
||||||
|
*/
|
||||||
|
export async function getUser(
|
||||||
|
userId: number
|
||||||
|
): Promise<{ firstName: string; lastName?: string; username?: string }> {
|
||||||
|
if (!client) throw new Error("Bot client not initialized");
|
||||||
|
const c = client;
|
||||||
|
const user = (await withFloodWait(
|
||||||
|
() =>
|
||||||
|
c.invoke({
|
||||||
|
_: "getUser",
|
||||||
|
user_id: userId,
|
||||||
|
}),
|
||||||
|
"getUser"
|
||||||
|
)) as {
|
||||||
|
first_name?: string;
|
||||||
|
last_name?: string;
|
||||||
|
usernames?: { editable_username?: string };
|
||||||
|
};
|
||||||
|
return {
|
||||||
|
firstName: user.first_name ?? "User",
|
||||||
|
lastName: user.last_name || undefined,
|
||||||
|
username: user.usernames?.editable_username || undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get updates from TDLib. The bot listens for new messages this way.
|
* Get updates from TDLib. The bot listens for new messages this way.
|
||||||
*/
|
*/
|
||||||
|
|||||||
60
bot/src/util/flood-wait.ts
Normal file
60
bot/src/util/flood-wait.ts
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import { childLogger } from "./logger.js";
|
||||||
|
|
||||||
|
const log = childLogger("flood-wait");
|
||||||
|
|
||||||
|
function sleep(ms: number): Promise<void> {
|
||||||
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract the mandatory wait duration (in seconds) from a Telegram
|
||||||
|
* FLOOD_WAIT error. Returns null when the error is not rate-limit related.
|
||||||
|
*/
|
||||||
|
export function extractFloodWaitSeconds(err: unknown): number | null {
|
||||||
|
const message = err instanceof Error ? err.message : String(err);
|
||||||
|
|
||||||
|
// Pattern 1: FLOOD_WAIT_30
|
||||||
|
const flood = message.match(/FLOOD_WAIT_(\d+)/i);
|
||||||
|
if (flood) return parseInt(flood[1], 10);
|
||||||
|
|
||||||
|
// Pattern 2: "retry after 30"
|
||||||
|
const retry = message.match(/retry after (\d+)/i);
|
||||||
|
if (retry) return parseInt(retry[1], 10);
|
||||||
|
|
||||||
|
// Pattern 3: HTTP 429 without explicit seconds
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
if (String((err as any)?.code) === "429") return 30;
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wrap any async Telegram operation with automatic FLOOD_WAIT retry.
|
||||||
|
* Adds random jitter (1-5s) to prevent thundering-herd retries.
|
||||||
|
*
|
||||||
|
* Non-rate-limit errors are re-thrown immediately (fail-fast).
|
||||||
|
*/
|
||||||
|
export async function withFloodWait<T>(
|
||||||
|
fn: () => Promise<T>,
|
||||||
|
context?: string,
|
||||||
|
maxRetries = 5
|
||||||
|
): Promise<T> {
|
||||||
|
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
||||||
|
try {
|
||||||
|
return await fn();
|
||||||
|
} catch (err) {
|
||||||
|
const wait = extractFloodWaitSeconds(err);
|
||||||
|
if (wait === null || attempt >= maxRetries) throw err;
|
||||||
|
|
||||||
|
const jitter = 1000 + Math.random() * 4000;
|
||||||
|
log.warn(
|
||||||
|
{ context, wait, attempt: attempt + 1, maxRetries, jitter: Math.round(jitter) },
|
||||||
|
"FLOOD_WAIT received — backing off"
|
||||||
|
);
|
||||||
|
await sleep(wait * 1000 + jitter);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new Error("Unreachable");
|
||||||
|
}
|
||||||
|
|
||||||
|
export { sleep };
|
||||||
@@ -16,7 +16,6 @@ services:
|
|||||||
retries: 5
|
retries: 5
|
||||||
|
|
||||||
worker:
|
worker:
|
||||||
profiles: ["telegram", "full"]
|
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: worker/Dockerfile
|
dockerfile: worker/Dockerfile
|
||||||
|
|||||||
@@ -5,23 +5,29 @@ services:
|
|||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
pull_policy: never
|
pull_policy: never
|
||||||
ports:
|
ports:
|
||||||
- "${APP_PORT:-3000}:3000"
|
- "${APP_PORT:-3000}:${APP_PORT:-3000}"
|
||||||
environment:
|
environment:
|
||||||
- DATABASE_URL=postgresql://${POSTGRES_USER:-dragons}:${POSTGRES_PASSWORD:-stash}@db:5432/${POSTGRES_DB:-dragonsstash}
|
- DATABASE_URL=postgresql://${POSTGRES_USER:-dragons}:${POSTGRES_PASSWORD:-stash}@db:5432/${POSTGRES_DB:-dragonsstash}
|
||||||
- AUTH_SECRET=${AUTH_SECRET:?Set AUTH_SECRET in .env}
|
- AUTH_SECRET=${AUTH_SECRET:?Set AUTH_SECRET in .env}
|
||||||
- AUTH_TRUST_HOST=true
|
- AUTH_TRUST_HOST=true
|
||||||
|
- AUTH_GITHUB_ID=${AUTH_GITHUB_ID:-}
|
||||||
|
- AUTH_GITHUB_SECRET=${AUTH_GITHUB_SECRET:-}
|
||||||
- NEXT_PUBLIC_APP_URL=${NEXT_PUBLIC_APP_URL:-http://localhost:3000}
|
- NEXT_PUBLIC_APP_URL=${NEXT_PUBLIC_APP_URL:-http://localhost:3000}
|
||||||
- TELEGRAM_API_KEY=${TELEGRAM_API_KEY:-}
|
- TELEGRAM_API_KEY=${TELEGRAM_API_KEY:-}
|
||||||
- BOT_TOKEN=${BOT_TOKEN:-}
|
- BOT_TOKEN=${BOT_TOKEN:-}
|
||||||
|
- BOT_USERNAME=${BOT_USERNAME:-}
|
||||||
|
- LOG_LEVEL=${LOG_LEVEL:-info}
|
||||||
|
- WORKER_INTERVAL_MINUTES=${WORKER_INTERVAL_MINUTES:-60}
|
||||||
|
- PORT=${APP_PORT:-3000}
|
||||||
depends_on:
|
depends_on:
|
||||||
db:
|
db:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "wget", "-q", "--spider", "http://localhost:3000/api/health"]
|
test: ["CMD-SHELL", "wget -q --spider http://localhost:$$PORT/api/health || exit 1"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 3
|
retries: 3
|
||||||
start_period: 30s
|
start_period: 60s
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
deploy:
|
deploy:
|
||||||
resources:
|
resources:
|
||||||
@@ -31,7 +37,6 @@ services:
|
|||||||
- frontend
|
- frontend
|
||||||
|
|
||||||
worker:
|
worker:
|
||||||
profiles: ["telegram", "full"]
|
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: worker/Dockerfile
|
dockerfile: worker/Dockerfile
|
||||||
|
|||||||
@@ -10,7 +10,10 @@ if [ "$AUTH_SECRET" = "change-me-to-a-random-secret-in-production" ] || [ -z "$A
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Running database migrations..."
|
echo "Running database migrations..."
|
||||||
./node_modules/.bin/prisma migrate deploy
|
if ! ./node_modules/.bin/prisma migrate deploy; then
|
||||||
|
echo "ERROR: Database migration failed. Check DATABASE_URL and database connectivity."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
if [ "$SEED_DATABASE" = "true" ]; then
|
if [ "$SEED_DATABASE" = "true" ]; then
|
||||||
echo "Seeding database..."
|
echo "Seeding database..."
|
||||||
|
|||||||
221
install.cmd
Normal file
221
install.cmd
Normal file
@@ -0,0 +1,221 @@
|
|||||||
|
@echo off
|
||||||
|
setlocal enabledelayedexpansion
|
||||||
|
|
||||||
|
REM Claude Code Windows CMD Bootstrap Script
|
||||||
|
REM Installs Claude Code for environments where PowerShell is not available
|
||||||
|
|
||||||
|
REM Parse command line argument
|
||||||
|
set "TARGET=%~1"
|
||||||
|
if "!TARGET!"=="" set "TARGET=latest"
|
||||||
|
|
||||||
|
REM Validate target parameter
|
||||||
|
if /i "!TARGET!"=="stable" goto :target_valid
|
||||||
|
if /i "!TARGET!"=="latest" goto :target_valid
|
||||||
|
echo !TARGET! | findstr /r "^[0-9][0-9]*\.[0-9][0-9]*\.[0-9][0-9]*" >nul
|
||||||
|
if !ERRORLEVEL! equ 0 goto :target_valid
|
||||||
|
|
||||||
|
echo Usage: %0 [stable^|latest^|VERSION] >&2
|
||||||
|
echo Example: %0 1.0.58 >&2
|
||||||
|
exit /b 1
|
||||||
|
|
||||||
|
:target_valid
|
||||||
|
|
||||||
|
REM Check for 64-bit Windows
|
||||||
|
if /i "%PROCESSOR_ARCHITECTURE%"=="AMD64" goto :arch_valid
|
||||||
|
if /i "%PROCESSOR_ARCHITECTURE%"=="ARM64" goto :arch_valid
|
||||||
|
if /i "%PROCESSOR_ARCHITEW6432%"=="AMD64" goto :arch_valid
|
||||||
|
if /i "%PROCESSOR_ARCHITEW6432%"=="ARM64" goto :arch_valid
|
||||||
|
|
||||||
|
echo Claude Code does not support 32-bit Windows. Please use a 64-bit version of Windows. >&2
|
||||||
|
exit /b 1
|
||||||
|
|
||||||
|
:arch_valid
|
||||||
|
|
||||||
|
REM Set constants
|
||||||
|
set "GCS_BUCKET=https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases"
|
||||||
|
set "DOWNLOAD_DIR=%USERPROFILE%\.claude\downloads"
|
||||||
|
REM Use native ARM64 binary on ARM64 Windows, x64 otherwise
|
||||||
|
if /i "%PROCESSOR_ARCHITECTURE%"=="ARM64" (
|
||||||
|
set "PLATFORM=win32-arm64"
|
||||||
|
) else (
|
||||||
|
set "PLATFORM=win32-x64"
|
||||||
|
)
|
||||||
|
|
||||||
|
REM Create download directory
|
||||||
|
if not exist "!DOWNLOAD_DIR!" mkdir "!DOWNLOAD_DIR!"
|
||||||
|
|
||||||
|
REM Check for curl availability
|
||||||
|
curl --version >nul 2>&1
|
||||||
|
if !ERRORLEVEL! neq 0 (
|
||||||
|
echo curl is required but not available. Please install curl or use PowerShell installer. >&2
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
REM Always download latest version (which has the most up-to-date installer)
|
||||||
|
call :download_file "!GCS_BUCKET!/latest" "!DOWNLOAD_DIR!\latest"
|
||||||
|
if !ERRORLEVEL! neq 0 (
|
||||||
|
echo Failed to get latest version >&2
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
REM Read version from file
|
||||||
|
set /p VERSION=<"!DOWNLOAD_DIR!\latest"
|
||||||
|
del "!DOWNLOAD_DIR!\latest"
|
||||||
|
|
||||||
|
REM Download manifest
|
||||||
|
call :download_file "!GCS_BUCKET!/!VERSION!/manifest.json" "!DOWNLOAD_DIR!\manifest.json"
|
||||||
|
if !ERRORLEVEL! neq 0 (
|
||||||
|
echo Failed to get manifest >&2
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
REM Extract checksum from manifest
|
||||||
|
call :parse_manifest "!DOWNLOAD_DIR!\manifest.json" "!PLATFORM!"
|
||||||
|
if !ERRORLEVEL! neq 0 (
|
||||||
|
echo Platform !PLATFORM! not found in manifest >&2
|
||||||
|
del "!DOWNLOAD_DIR!\manifest.json" 2>nul
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
del "!DOWNLOAD_DIR!\manifest.json"
|
||||||
|
|
||||||
|
REM Download binary
|
||||||
|
set "BINARY_PATH=!DOWNLOAD_DIR!\claude-!VERSION!-!PLATFORM!.exe"
|
||||||
|
call :download_file "!GCS_BUCKET!/!VERSION!/!PLATFORM!/claude.exe" "!BINARY_PATH!"
|
||||||
|
if !ERRORLEVEL! neq 0 (
|
||||||
|
echo Failed to download binary >&2
|
||||||
|
if exist "!BINARY_PATH!" del "!BINARY_PATH!"
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
REM Verify checksum
|
||||||
|
call :verify_checksum "!BINARY_PATH!" "!EXPECTED_CHECKSUM!"
|
||||||
|
if !ERRORLEVEL! neq 0 (
|
||||||
|
echo Checksum verification failed >&2
|
||||||
|
del "!BINARY_PATH!"
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
REM Run claude install to set up launcher and shell integration
|
||||||
|
echo Setting up Claude Code...
|
||||||
|
"!BINARY_PATH!" install "!TARGET!"
|
||||||
|
set "INSTALL_RESULT=!ERRORLEVEL!"
|
||||||
|
|
||||||
|
REM Clean up downloaded file
|
||||||
|
REM Wait a moment for any file handles to be released
|
||||||
|
timeout /t 1 /nobreak >nul 2>&1
|
||||||
|
del /f "!BINARY_PATH!" >nul 2>&1
|
||||||
|
if exist "!BINARY_PATH!" (
|
||||||
|
echo Warning: Could not remove temporary file: !BINARY_PATH!
|
||||||
|
)
|
||||||
|
|
||||||
|
if !INSTALL_RESULT! neq 0 (
|
||||||
|
echo Installation failed >&2
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
echo.
|
||||||
|
echo Installation complete^^!
|
||||||
|
echo.
|
||||||
|
exit /b 0
|
||||||
|
|
||||||
|
REM ============================================================================
|
||||||
|
REM SUBROUTINES
|
||||||
|
REM ============================================================================
|
||||||
|
|
||||||
|
:download_file
|
||||||
|
REM Downloads a file using curl
|
||||||
|
REM Args: %1=URL, %2=OutputPath
|
||||||
|
set "URL=%~1"
|
||||||
|
set "OUTPUT=%~2"
|
||||||
|
|
||||||
|
curl -fsSL "!URL!" -o "!OUTPUT!"
|
||||||
|
exit /b !ERRORLEVEL!
|
||||||
|
|
||||||
|
:parse_manifest
|
||||||
|
REM Parse JSON manifest to extract checksum for platform
|
||||||
|
REM Args: %1=ManifestPath, %2=Platform
|
||||||
|
set "MANIFEST_PATH=%~1"
|
||||||
|
set "PLATFORM_NAME=%~2"
|
||||||
|
set "EXPECTED_CHECKSUM="
|
||||||
|
|
||||||
|
REM Use findstr to find platform section, then look for checksum
|
||||||
|
set "FOUND_PLATFORM="
|
||||||
|
set "IN_PLATFORM_SECTION="
|
||||||
|
|
||||||
|
REM Read the manifest line by line
|
||||||
|
for /f "usebackq tokens=*" %%i in ("!MANIFEST_PATH!") do (
|
||||||
|
set "LINE=%%i"
|
||||||
|
|
||||||
|
REM Check if this line contains our platform
|
||||||
|
echo !LINE! | findstr /c:"\"%PLATFORM_NAME%\":" >nul
|
||||||
|
if !ERRORLEVEL! equ 0 (
|
||||||
|
set "IN_PLATFORM_SECTION=1"
|
||||||
|
)
|
||||||
|
|
||||||
|
REM If we're in the platform section, look for checksum
|
||||||
|
if defined IN_PLATFORM_SECTION (
|
||||||
|
echo !LINE! | findstr /c:"\"checksum\":" >nul
|
||||||
|
if !ERRORLEVEL! equ 0 (
|
||||||
|
REM Extract checksum value
|
||||||
|
for /f "tokens=2 delims=:" %%j in ("!LINE!") do (
|
||||||
|
set "CHECKSUM_PART=%%j"
|
||||||
|
REM Remove quotes, whitespace, and comma
|
||||||
|
set "CHECKSUM_PART=!CHECKSUM_PART: =!"
|
||||||
|
set "CHECKSUM_PART=!CHECKSUM_PART:"=!"
|
||||||
|
set "CHECKSUM_PART=!CHECKSUM_PART:,=!"
|
||||||
|
|
||||||
|
REM Check if it looks like a SHA256 (64 hex chars)
|
||||||
|
if not "!CHECKSUM_PART!"=="" (
|
||||||
|
call :check_length "!CHECKSUM_PART!" 64
|
||||||
|
if !ERRORLEVEL! equ 0 (
|
||||||
|
set "EXPECTED_CHECKSUM=!CHECKSUM_PART!"
|
||||||
|
exit /b 0
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
REM Check if we've left the platform section (closing brace)
|
||||||
|
echo !LINE! | findstr /c:"}" >nul
|
||||||
|
if !ERRORLEVEL! equ 0 set "IN_PLATFORM_SECTION="
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if "!EXPECTED_CHECKSUM!"=="" exit /b 1
|
||||||
|
exit /b 0
|
||||||
|
|
||||||
|
:check_length
|
||||||
|
REM Check if string length equals expected length
|
||||||
|
REM Args: %1=String, %2=ExpectedLength
|
||||||
|
set "STR=%~1"
|
||||||
|
set "EXPECTED_LEN=%~2"
|
||||||
|
set "LEN=0"
|
||||||
|
:count_loop
|
||||||
|
if "!STR:~%LEN%,1!"=="" goto :count_done
|
||||||
|
set /a LEN+=1
|
||||||
|
goto :count_loop
|
||||||
|
:count_done
|
||||||
|
if %LEN%==%EXPECTED_LEN% exit /b 0
|
||||||
|
exit /b 1
|
||||||
|
|
||||||
|
:verify_checksum
|
||||||
|
REM Verify file checksum using certutil
|
||||||
|
REM Args: %1=FilePath, %2=ExpectedChecksum
|
||||||
|
set "FILE_PATH=%~1"
|
||||||
|
set "EXPECTED=%~2"
|
||||||
|
|
||||||
|
for /f "skip=1 tokens=*" %%i in ('certutil -hashfile "!FILE_PATH!" SHA256') do (
|
||||||
|
set "ACTUAL=%%i"
|
||||||
|
set "ACTUAL=!ACTUAL: =!"
|
||||||
|
if "!ACTUAL!"=="CertUtil:Thecommandcompletedsuccessfully." goto :verify_done
|
||||||
|
if "!ACTUAL!" neq "" (
|
||||||
|
if /i "!ACTUAL!"=="!EXPECTED!" (
|
||||||
|
exit /b 0
|
||||||
|
) else (
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
:verify_done
|
||||||
|
exit /b 1
|
||||||
2
package-lock.json
generated
2
package-lock.json
generated
@@ -49,7 +49,7 @@
|
|||||||
"ts-node": "^10.9.2",
|
"ts-node": "^10.9.2",
|
||||||
"tsx": "^4.21.0",
|
"tsx": "^4.21.0",
|
||||||
"tw-animate-css": "^1.4.0",
|
"tw-animate-css": "^1.4.0",
|
||||||
"typescript": "^5"
|
"typescript": "5.9.3"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@alloc/quick-lru": {
|
"node_modules/@alloc/quick-lru": {
|
||||||
|
|||||||
@@ -58,6 +58,6 @@
|
|||||||
"ts-node": "^10.9.2",
|
"ts-node": "^10.9.2",
|
||||||
"tsx": "^4.21.0",
|
"tsx": "^4.21.0",
|
||||||
"tw-animate-css": "^1.4.0",
|
"tw-animate-css": "^1.4.0",
|
||||||
"typescript": "^5"
|
"typescript": "5.9.3"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,5 @@
|
|||||||
|
-- Promote all existing users to ADMIN (self-hosted: every user is an admin)
|
||||||
|
UPDATE "User" SET "role" = 'ADMIN' WHERE "role" = 'USER';
|
||||||
|
|
||||||
|
-- Change the default role for new users to ADMIN
|
||||||
|
ALTER TABLE "User" ALTER COLUMN "role" SET DEFAULT 'ADMIN';
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
-- Change the default for new channels to disabled (isActive = false).
|
||||||
|
-- Existing channels are not affected — admins can manually enable/disable them.
|
||||||
|
ALTER TABLE "telegram_channels" ALTER COLUMN "isActive" SET DEFAULT false;
|
||||||
@@ -0,0 +1,21 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "invite_codes" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"code" VARCHAR(32) NOT NULL,
|
||||||
|
"maxUses" INTEGER NOT NULL DEFAULT 1,
|
||||||
|
"uses" INTEGER NOT NULL DEFAULT 0,
|
||||||
|
"expiresAt" TIMESTAMP(3),
|
||||||
|
"createdBy" TEXT NOT NULL,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "invite_codes_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "invite_codes_code_key" ON "invite_codes"("code");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "invite_codes_code_idx" ON "invite_codes"("code");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "invite_codes" ADD CONSTRAINT "invite_codes_createdBy_fkey" FOREIGN KEY ("createdBy") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
@@ -22,7 +22,7 @@ model User {
|
|||||||
emailVerified DateTime?
|
emailVerified DateTime?
|
||||||
image String?
|
image String?
|
||||||
hashedPassword String?
|
hashedPassword String?
|
||||||
role Role @default(USER)
|
role Role @default(ADMIN)
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
|
|
||||||
@@ -38,6 +38,7 @@ model User {
|
|||||||
tags Tag[]
|
tags Tag[]
|
||||||
settings UserSettings?
|
settings UserSettings?
|
||||||
telegramLink TelegramLink?
|
telegramLink TelegramLink?
|
||||||
|
inviteCodes InviteCode[]
|
||||||
}
|
}
|
||||||
|
|
||||||
model Account {
|
model Account {
|
||||||
@@ -417,7 +418,7 @@ model TelegramChannel {
|
|||||||
title String
|
title String
|
||||||
type ChannelType
|
type ChannelType
|
||||||
isForum Boolean @default(false)
|
isForum Boolean @default(false)
|
||||||
isActive Boolean @default(true)
|
isActive Boolean @default(false)
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
|
|
||||||
@@ -554,6 +555,21 @@ model GlobalSetting {
|
|||||||
@@map("global_settings")
|
@@map("global_settings")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model InviteCode {
|
||||||
|
id String @id @default(cuid())
|
||||||
|
code String @unique @db.VarChar(32)
|
||||||
|
maxUses Int @default(1)
|
||||||
|
uses Int @default(0)
|
||||||
|
expiresAt DateTime?
|
||||||
|
createdBy String
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
|
||||||
|
creator User @relation(fields: [createdBy], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@index([code])
|
||||||
|
@@map("invite_codes")
|
||||||
|
}
|
||||||
|
|
||||||
model ChannelFetchRequest {
|
model ChannelFetchRequest {
|
||||||
id String @id @default(cuid())
|
id String @id @default(cuid())
|
||||||
accountId String
|
accountId String
|
||||||
|
|||||||
214
src/app/(app)/invites/_components/invite-manager.tsx
Normal file
214
src/app/(app)/invites/_components/invite-manager.tsx
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import { useState, useTransition } from "react";
|
||||||
|
import { Copy, Plus, Trash2 } from "lucide-react";
|
||||||
|
import { Button } from "@/components/ui/button";
|
||||||
|
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
|
||||||
|
import { Input } from "@/components/ui/input";
|
||||||
|
import { Label } from "@/components/ui/label";
|
||||||
|
import {
|
||||||
|
Table,
|
||||||
|
TableBody,
|
||||||
|
TableCell,
|
||||||
|
TableHead,
|
||||||
|
TableHeader,
|
||||||
|
TableRow,
|
||||||
|
} from "@/components/ui/table";
|
||||||
|
import { Badge } from "@/components/ui/badge";
|
||||||
|
import { createInviteCode, deleteInviteCode } from "../actions";
|
||||||
|
|
||||||
|
type InviteCode = {
|
||||||
|
id: string;
|
||||||
|
code: string;
|
||||||
|
maxUses: number;
|
||||||
|
uses: number;
|
||||||
|
expiresAt: string | null;
|
||||||
|
createdAt: string;
|
||||||
|
creator: { name: string | null };
|
||||||
|
};
|
||||||
|
|
||||||
|
export function InviteManager({
|
||||||
|
inviteCodes,
|
||||||
|
appUrl,
|
||||||
|
}: {
|
||||||
|
inviteCodes: InviteCode[];
|
||||||
|
appUrl: string;
|
||||||
|
}) {
|
||||||
|
const [maxUses, setMaxUses] = useState(1);
|
||||||
|
const [expiresInDays, setExpiresInDays] = useState(7);
|
||||||
|
const [noExpiry, setNoExpiry] = useState(false);
|
||||||
|
const [isPending, startTransition] = useTransition();
|
||||||
|
const [copiedId, setCopiedId] = useState<string | null>(null);
|
||||||
|
|
||||||
|
function handleCreate() {
|
||||||
|
startTransition(async () => {
|
||||||
|
await createInviteCode({
|
||||||
|
maxUses,
|
||||||
|
expiresInDays: noExpiry ? null : expiresInDays,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleDelete(id: string) {
|
||||||
|
startTransition(async () => {
|
||||||
|
await deleteInviteCode(id);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function copyLink(code: string, id: string) {
|
||||||
|
const url = `${appUrl}/register?code=${code}`;
|
||||||
|
navigator.clipboard.writeText(url);
|
||||||
|
setCopiedId(id);
|
||||||
|
setTimeout(() => setCopiedId(null), 2000);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getStatus(invite: InviteCode) {
|
||||||
|
if (invite.uses >= invite.maxUses) return "used";
|
||||||
|
if (invite.expiresAt && new Date(invite.expiresAt) < new Date()) return "expired";
|
||||||
|
return "active";
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="max-w-4xl space-y-6">
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>Create Invite Code</CardTitle>
|
||||||
|
<CardDescription>
|
||||||
|
Generate a new invite code to share with someone
|
||||||
|
</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="flex flex-wrap items-end gap-4">
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Label htmlFor="maxUses">Max Uses</Label>
|
||||||
|
<Input
|
||||||
|
id="maxUses"
|
||||||
|
type="number"
|
||||||
|
min={1}
|
||||||
|
max={100}
|
||||||
|
value={maxUses}
|
||||||
|
onChange={(e) => setMaxUses(Number(e.target.value))}
|
||||||
|
className="w-24"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Label htmlFor="expiresInDays">
|
||||||
|
Expires in (days)
|
||||||
|
</Label>
|
||||||
|
<Input
|
||||||
|
id="expiresInDays"
|
||||||
|
type="number"
|
||||||
|
min={1}
|
||||||
|
max={365}
|
||||||
|
value={expiresInDays}
|
||||||
|
onChange={(e) => setExpiresInDays(Number(e.target.value))}
|
||||||
|
disabled={noExpiry}
|
||||||
|
className="w-24"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-2 pb-1">
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
id="noExpiry"
|
||||||
|
checked={noExpiry}
|
||||||
|
onChange={(e) => setNoExpiry(e.target.checked)}
|
||||||
|
className="h-4 w-4"
|
||||||
|
/>
|
||||||
|
<Label htmlFor="noExpiry" className="text-sm">No expiry</Label>
|
||||||
|
</div>
|
||||||
|
<Button onClick={handleCreate} disabled={isPending}>
|
||||||
|
<Plus className="mr-2 h-4 w-4" />
|
||||||
|
{isPending ? "Creating..." : "Create"}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>Invite Codes</CardTitle>
|
||||||
|
<CardDescription>
|
||||||
|
{inviteCodes.length} invite code{inviteCodes.length !== 1 ? "s" : ""} created
|
||||||
|
</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
{inviteCodes.length === 0 ? (
|
||||||
|
<p className="text-sm text-muted-foreground">
|
||||||
|
No invite codes yet. Create one above.
|
||||||
|
</p>
|
||||||
|
) : (
|
||||||
|
<Table>
|
||||||
|
<TableHeader>
|
||||||
|
<TableRow>
|
||||||
|
<TableHead>Code</TableHead>
|
||||||
|
<TableHead>Status</TableHead>
|
||||||
|
<TableHead>Uses</TableHead>
|
||||||
|
<TableHead>Expires</TableHead>
|
||||||
|
<TableHead>Created</TableHead>
|
||||||
|
<TableHead className="text-right">Actions</TableHead>
|
||||||
|
</TableRow>
|
||||||
|
</TableHeader>
|
||||||
|
<TableBody>
|
||||||
|
{inviteCodes.map((invite) => {
|
||||||
|
const status = getStatus(invite);
|
||||||
|
return (
|
||||||
|
<TableRow key={invite.id}>
|
||||||
|
<TableCell className="font-mono text-sm">
|
||||||
|
{invite.code}
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
<Badge
|
||||||
|
variant={
|
||||||
|
status === "active"
|
||||||
|
? "default"
|
||||||
|
: status === "used"
|
||||||
|
? "secondary"
|
||||||
|
: "destructive"
|
||||||
|
}
|
||||||
|
>
|
||||||
|
{status}
|
||||||
|
</Badge>
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
{invite.uses} / {invite.maxUses}
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
{invite.expiresAt
|
||||||
|
? new Date(invite.expiresAt).toLocaleDateString()
|
||||||
|
: "Never"}
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
{new Date(invite.createdAt).toLocaleDateString()}
|
||||||
|
</TableCell>
|
||||||
|
<TableCell className="text-right">
|
||||||
|
<div className="flex justify-end gap-2">
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => copyLink(invite.code, invite.id)}
|
||||||
|
disabled={status !== "active"}
|
||||||
|
>
|
||||||
|
<Copy className="mr-1 h-3 w-3" />
|
||||||
|
{copiedId === invite.id ? "Copied!" : "Copy Link"}
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
variant="destructive"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => handleDelete(invite.id)}
|
||||||
|
disabled={isPending}
|
||||||
|
>
|
||||||
|
<Trash2 className="h-3 w-3" />
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</TableCell>
|
||||||
|
</TableRow>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</TableBody>
|
||||||
|
</Table>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
54
src/app/(app)/invites/actions.ts
Normal file
54
src/app/(app)/invites/actions.ts
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
"use server";
|
||||||
|
|
||||||
|
import crypto from "crypto";
|
||||||
|
import { auth } from "@/lib/auth";
|
||||||
|
import { prisma } from "@/lib/prisma";
|
||||||
|
import type { ActionResult } from "@/types/api.types";
|
||||||
|
import { revalidatePath } from "next/cache";
|
||||||
|
|
||||||
|
export async function createInviteCode(input: {
|
||||||
|
maxUses: number;
|
||||||
|
expiresInDays: number | null;
|
||||||
|
}): Promise<ActionResult<{ code: string }>> {
|
||||||
|
const session = await auth();
|
||||||
|
if (!session?.user?.id || session.user.role !== "ADMIN") {
|
||||||
|
return { success: false, error: "Unauthorized" };
|
||||||
|
}
|
||||||
|
|
||||||
|
const code = crypto.randomBytes(6).toString("hex");
|
||||||
|
const expiresAt = input.expiresInDays
|
||||||
|
? new Date(Date.now() + input.expiresInDays * 24 * 60 * 60 * 1000)
|
||||||
|
: null;
|
||||||
|
|
||||||
|
await prisma.inviteCode.create({
|
||||||
|
data: {
|
||||||
|
code,
|
||||||
|
maxUses: input.maxUses,
|
||||||
|
expiresAt,
|
||||||
|
createdBy: session.user.id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
revalidatePath("/invites");
|
||||||
|
return { success: true, data: { code } };
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function deleteInviteCode(id: string): Promise<ActionResult> {
|
||||||
|
const session = await auth();
|
||||||
|
if (!session?.user?.id || session.user.role !== "ADMIN") {
|
||||||
|
return { success: false, error: "Unauthorized" };
|
||||||
|
}
|
||||||
|
|
||||||
|
await prisma.inviteCode.delete({ where: { id } });
|
||||||
|
|
||||||
|
revalidatePath("/invites");
|
||||||
|
return { success: true, data: undefined };
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getInviteCodes() {
|
||||||
|
const codes = await prisma.inviteCode.findMany({
|
||||||
|
orderBy: { createdAt: "desc" },
|
||||||
|
include: { creator: { select: { name: true } } },
|
||||||
|
});
|
||||||
|
return codes;
|
||||||
|
}
|
||||||
26
src/app/(app)/invites/page.tsx
Normal file
26
src/app/(app)/invites/page.tsx
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import { auth } from "@/lib/auth";
|
||||||
|
import { redirect } from "next/navigation";
|
||||||
|
import { PageHeader } from "@/components/shared/page-header";
|
||||||
|
import { getInviteCodes } from "./actions";
|
||||||
|
import { InviteManager } from "./_components/invite-manager";
|
||||||
|
|
||||||
|
export default async function InvitesPage() {
|
||||||
|
const session = await auth();
|
||||||
|
if (!session?.user?.id) redirect("/login");
|
||||||
|
if (session.user.role !== "ADMIN") redirect("/dashboard");
|
||||||
|
|
||||||
|
const inviteCodes = await getInviteCodes();
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<PageHeader
|
||||||
|
title="Invite Codes"
|
||||||
|
description="Manage invite codes for new user registration"
|
||||||
|
/>
|
||||||
|
<InviteManager
|
||||||
|
inviteCodes={JSON.parse(JSON.stringify(inviteCodes))}
|
||||||
|
appUrl={process.env.NEXT_PUBLIC_APP_URL ?? ""}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -7,6 +7,7 @@ import {
|
|||||||
Power,
|
Power,
|
||||||
ArrowDownToLine,
|
ArrowDownToLine,
|
||||||
ArrowUpFromLine,
|
ArrowUpFromLine,
|
||||||
|
RefreshCcw,
|
||||||
} from "lucide-react";
|
} from "lucide-react";
|
||||||
import { Badge } from "@/components/ui/badge";
|
import { Badge } from "@/components/ui/badge";
|
||||||
import { Button } from "@/components/ui/button";
|
import { Button } from "@/components/ui/button";
|
||||||
@@ -23,12 +24,14 @@ interface ChannelColumnsProps {
|
|||||||
onToggleActive: (id: string) => void;
|
onToggleActive: (id: string) => void;
|
||||||
onDelete: (id: string) => void;
|
onDelete: (id: string) => void;
|
||||||
onSetType: (id: string, type: "SOURCE" | "DESTINATION") => void;
|
onSetType: (id: string, type: "SOURCE" | "DESTINATION") => void;
|
||||||
|
onRescan: (id: string) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getChannelColumns({
|
export function getChannelColumns({
|
||||||
onToggleActive,
|
onToggleActive,
|
||||||
onDelete,
|
onDelete,
|
||||||
onSetType,
|
onSetType,
|
||||||
|
onRescan,
|
||||||
}: ChannelColumnsProps): ColumnDef<ChannelRow, unknown>[] {
|
}: ChannelColumnsProps): ColumnDef<ChannelRow, unknown>[] {
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
@@ -121,6 +124,14 @@ export function getChannelColumns({
|
|||||||
Set as Source
|
Set as Source
|
||||||
</DropdownMenuItem>
|
</DropdownMenuItem>
|
||||||
)}
|
)}
|
||||||
|
{row.original.type === "SOURCE" && (
|
||||||
|
<DropdownMenuItem
|
||||||
|
onClick={() => onRescan(row.original.id)}
|
||||||
|
>
|
||||||
|
<RefreshCcw className="mr-2 h-3.5 w-3.5" />
|
||||||
|
Rescan Channel
|
||||||
|
</DropdownMenuItem>
|
||||||
|
)}
|
||||||
<DropdownMenuItem
|
<DropdownMenuItem
|
||||||
onClick={() => onToggleActive(row.original.id)}
|
onClick={() => onToggleActive(row.original.id)}
|
||||||
>
|
>
|
||||||
|
|||||||
@@ -2,26 +2,36 @@
|
|||||||
|
|
||||||
import { useState, useTransition } from "react";
|
import { useState, useTransition } from "react";
|
||||||
import { toast } from "sonner";
|
import { toast } from "sonner";
|
||||||
|
import { Download } from "lucide-react";
|
||||||
import { getChannelColumns } from "./channel-columns";
|
import { getChannelColumns } from "./channel-columns";
|
||||||
import { DestinationCard } from "./destination-card";
|
import { DestinationCard } from "./destination-card";
|
||||||
|
import { ChannelPickerDialog } from "./channel-picker-dialog";
|
||||||
import {
|
import {
|
||||||
deleteChannel,
|
deleteChannel,
|
||||||
toggleChannelActive,
|
toggleChannelActive,
|
||||||
setChannelType,
|
setChannelType,
|
||||||
|
rescanChannel,
|
||||||
} from "../actions";
|
} from "../actions";
|
||||||
import { DataTable } from "@/components/shared/data-table";
|
import { DataTable } from "@/components/shared/data-table";
|
||||||
import { DeleteDialog } from "@/components/shared/delete-dialog";
|
import { DeleteDialog } from "@/components/shared/delete-dialog";
|
||||||
import type { ChannelRow, GlobalDestination } from "@/lib/telegram/admin-queries";
|
import { Button } from "@/components/ui/button";
|
||||||
|
import type { AccountRow, ChannelRow, GlobalDestination } from "@/lib/telegram/admin-queries";
|
||||||
import { useDataTable } from "@/hooks/use-data-table";
|
import { useDataTable } from "@/hooks/use-data-table";
|
||||||
|
|
||||||
interface ChannelsTabProps {
|
interface ChannelsTabProps {
|
||||||
channels: ChannelRow[];
|
channels: ChannelRow[];
|
||||||
globalDestination: GlobalDestination;
|
globalDestination: GlobalDestination;
|
||||||
|
accounts: AccountRow[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export function ChannelsTab({ channels, globalDestination }: ChannelsTabProps) {
|
export function ChannelsTab({ channels, globalDestination, accounts }: ChannelsTabProps) {
|
||||||
const [isPending, startTransition] = useTransition();
|
const [isPending, startTransition] = useTransition();
|
||||||
const [deleteId, setDeleteId] = useState<string | null>(null);
|
const [deleteId, setDeleteId] = useState<string | null>(null);
|
||||||
|
const [rescanId, setRescanId] = useState<string | null>(null);
|
||||||
|
const [fetchChannelsAccountId, setFetchChannelsAccountId] = useState<string | null>(null);
|
||||||
|
|
||||||
|
// Find the first authenticated account for "Fetch Channels"
|
||||||
|
const authenticatedAccounts = accounts.filter((a) => a.authState === "AUTHENTICATED" && a.isActive);
|
||||||
|
|
||||||
const columns = getChannelColumns({
|
const columns = getChannelColumns({
|
||||||
onToggleActive: (id) => {
|
onToggleActive: (id) => {
|
||||||
@@ -39,6 +49,7 @@ export function ChannelsTab({ channels, globalDestination }: ChannelsTabProps) {
|
|||||||
else toast.error(result.error);
|
else toast.error(result.error);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
onRescan: (id) => setRescanId(id),
|
||||||
});
|
});
|
||||||
|
|
||||||
const { table } = useDataTable({
|
const { table } = useDataTable({
|
||||||
@@ -60,19 +71,51 @@ export function ChannelsTab({ channels, globalDestination }: ChannelsTabProps) {
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleRescan = () => {
|
||||||
|
if (!rescanId) return;
|
||||||
|
startTransition(async () => {
|
||||||
|
const result = await rescanChannel(rescanId);
|
||||||
|
if (result.success) {
|
||||||
|
toast.success("Channel scan progress reset — it will be fully rescanned on the next sync");
|
||||||
|
setRescanId(null);
|
||||||
|
} else {
|
||||||
|
toast.error(result.error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleFetchChannels = () => {
|
||||||
|
if (authenticatedAccounts.length > 0) {
|
||||||
|
setFetchChannelsAccountId(authenticatedAccounts[0].id);
|
||||||
|
} else {
|
||||||
|
toast.error("No authenticated accounts available. Add and authenticate an account first.");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="space-y-4">
|
<div className="space-y-4">
|
||||||
<DestinationCard destination={globalDestination} />
|
<DestinationCard destination={globalDestination} channels={channels} />
|
||||||
|
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
onClick={handleFetchChannels}
|
||||||
|
disabled={authenticatedAccounts.length === 0}
|
||||||
|
>
|
||||||
|
<Download className="mr-2 h-4 w-4" />
|
||||||
|
Fetch Channels
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
|
||||||
{channels.length > 0 && (
|
{channels.length > 0 && (
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-xs text-muted-foreground">
|
||||||
Source channels are added per-account via the "Fetch Channels" button on the Accounts tab.
|
Channels discovered via "Fetch Channels" are automatically activated as sources.
|
||||||
</p>
|
</p>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
<DataTable
|
<DataTable
|
||||||
table={table}
|
table={table}
|
||||||
emptyMessage="No channels yet. Use "Fetch Channels" on an account to discover and add source channels."
|
emptyMessage="No channels yet. Click "Fetch Channels" above to discover and add source channels."
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<DeleteDialog
|
<DeleteDialog
|
||||||
@@ -83,6 +126,24 @@ export function ChannelsTab({ channels, globalDestination }: ChannelsTabProps) {
|
|||||||
onConfirm={handleDelete}
|
onConfirm={handleDelete}
|
||||||
isLoading={isPending}
|
isLoading={isPending}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
<DeleteDialog
|
||||||
|
open={!!rescanId}
|
||||||
|
onOpenChange={(open) => !open && setRescanId(null)}
|
||||||
|
title="Rescan Channel"
|
||||||
|
description="This will reset all scan progress for this channel. On the next sync the worker will re-process every message from the beginning. Packages that are already in the library will be skipped (deduplication by hash), but any missing files will be re-downloaded and re-uploaded. This may take a long time for large channels."
|
||||||
|
confirmLabel="Rescan"
|
||||||
|
onConfirm={handleRescan}
|
||||||
|
isLoading={isPending}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<ChannelPickerDialog
|
||||||
|
accountId={fetchChannelsAccountId}
|
||||||
|
open={!!fetchChannelsAccountId}
|
||||||
|
onOpenChange={(open) => {
|
||||||
|
if (!open) setFetchChannelsAccountId(null);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
import { useState, useEffect, useTransition } from "react";
|
import { useState, useEffect, useTransition } from "react";
|
||||||
import { Database, AlertTriangle, Link2, Plus, Loader2 } from "lucide-react";
|
import { Database, AlertTriangle, Link2, Plus, Loader2, ArrowRight } from "lucide-react";
|
||||||
import { toast } from "sonner";
|
import { toast } from "sonner";
|
||||||
import { createDestinationViaWorker } from "../actions";
|
import { createDestinationViaWorker, setGlobalDestination } from "../actions";
|
||||||
import { Card, CardContent } from "@/components/ui/card";
|
import { Card, CardContent } from "@/components/ui/card";
|
||||||
import { Badge } from "@/components/ui/badge";
|
import { Badge } from "@/components/ui/badge";
|
||||||
import { Button } from "@/components/ui/button";
|
import { Button } from "@/components/ui/button";
|
||||||
@@ -17,10 +17,19 @@ import {
|
|||||||
DialogTitle,
|
DialogTitle,
|
||||||
DialogFooter,
|
DialogFooter,
|
||||||
} from "@/components/ui/dialog";
|
} from "@/components/ui/dialog";
|
||||||
import type { GlobalDestination } from "@/lib/telegram/admin-queries";
|
import {
|
||||||
|
Select,
|
||||||
|
SelectContent,
|
||||||
|
SelectItem,
|
||||||
|
SelectTrigger,
|
||||||
|
SelectValue,
|
||||||
|
} from "@/components/ui/select";
|
||||||
|
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
|
||||||
|
import type { GlobalDestination, ChannelRow } from "@/lib/telegram/admin-queries";
|
||||||
|
|
||||||
interface DestinationCardProps {
|
interface DestinationCardProps {
|
||||||
destination: GlobalDestination;
|
destination: GlobalDestination;
|
||||||
|
channels?: ChannelRow[];
|
||||||
}
|
}
|
||||||
|
|
||||||
type CreateState =
|
type CreateState =
|
||||||
@@ -29,11 +38,17 @@ type CreateState =
|
|||||||
| { phase: "done"; title: string; telegramId: string }
|
| { phase: "done"; title: string; telegramId: string }
|
||||||
| { phase: "error"; message: string };
|
| { phase: "error"; message: string };
|
||||||
|
|
||||||
export function DestinationCard({ destination }: DestinationCardProps) {
|
export function DestinationCard({ destination, channels = [] }: DestinationCardProps) {
|
||||||
const [isPending, startTransition] = useTransition();
|
const [isPending, startTransition] = useTransition();
|
||||||
const [createOpen, setCreateOpen] = useState(false);
|
const [createOpen, setCreateOpen] = useState(false);
|
||||||
const [title, setTitle] = useState("dragonsstash db");
|
const [title, setTitle] = useState("dragonsstash db");
|
||||||
const [createState, setCreateState] = useState<CreateState>({ phase: "idle" });
|
const [createState, setCreateState] = useState<CreateState>({ phase: "idle" });
|
||||||
|
const [selectedChannelId, setSelectedChannelId] = useState<string>("");
|
||||||
|
|
||||||
|
// Channels that can be assigned as destination (SOURCE channels only, exclude current destination)
|
||||||
|
const assignableChannels = channels.filter(
|
||||||
|
(c) => c.type === "SOURCE" && c.id !== destination?.id
|
||||||
|
);
|
||||||
|
|
||||||
// Poll for worker result when creating
|
// Poll for worker result when creating
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -103,6 +118,21 @@ export function DestinationCard({ destination }: DestinationCardProps) {
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleAssignExisting = () => {
|
||||||
|
if (!selectedChannelId) return;
|
||||||
|
|
||||||
|
startTransition(async () => {
|
||||||
|
const result = await setGlobalDestination(selectedChannelId);
|
||||||
|
if (result.success) {
|
||||||
|
toast.success("Channel set as destination!");
|
||||||
|
setCreateOpen(false);
|
||||||
|
setSelectedChannelId("");
|
||||||
|
} else {
|
||||||
|
toast.error(result.error ?? "Failed to set destination");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
const handleOpenChange = (open: boolean) => {
|
const handleOpenChange = (open: boolean) => {
|
||||||
setCreateOpen(open);
|
setCreateOpen(open);
|
||||||
if (!open) {
|
if (!open) {
|
||||||
@@ -110,6 +140,7 @@ export function DestinationCard({ destination }: DestinationCardProps) {
|
|||||||
if (createState.phase !== "creating") {
|
if (createState.phase !== "creating") {
|
||||||
setCreateState({ phase: "idle" });
|
setCreateState({ phase: "idle" });
|
||||||
}
|
}
|
||||||
|
setSelectedChannelId("");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -132,19 +163,23 @@ export function DestinationCard({ destination }: DestinationCardProps) {
|
|||||||
</div>
|
</div>
|
||||||
<Button size="sm" onClick={() => setCreateOpen(true)}>
|
<Button size="sm" onClick={() => setCreateOpen(true)}>
|
||||||
<Plus className="mr-2 h-3.5 w-3.5" />
|
<Plus className="mr-2 h-3.5 w-3.5" />
|
||||||
Create Destination
|
Set Destination
|
||||||
</Button>
|
</Button>
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
|
|
||||||
<CreateDestinationDialog
|
<DestinationDialog
|
||||||
open={createOpen}
|
open={createOpen}
|
||||||
onOpenChange={handleOpenChange}
|
onOpenChange={handleOpenChange}
|
||||||
title={title}
|
title={title}
|
||||||
setTitle={setTitle}
|
setTitle={setTitle}
|
||||||
onSubmit={handleCreate}
|
onSubmitCreate={handleCreate}
|
||||||
createState={createState}
|
createState={createState}
|
||||||
isPending={isPending}
|
isPending={isPending}
|
||||||
|
assignableChannels={assignableChannels}
|
||||||
|
selectedChannelId={selectedChannelId}
|
||||||
|
setSelectedChannelId={setSelectedChannelId}
|
||||||
|
onSubmitAssign={handleAssignExisting}
|
||||||
/>
|
/>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
@@ -187,46 +222,59 @@ export function DestinationCard({ destination }: DestinationCardProps) {
|
|||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
|
|
||||||
<CreateDestinationDialog
|
<DestinationDialog
|
||||||
open={createOpen}
|
open={createOpen}
|
||||||
onOpenChange={handleOpenChange}
|
onOpenChange={handleOpenChange}
|
||||||
title={title}
|
title={title}
|
||||||
setTitle={setTitle}
|
setTitle={setTitle}
|
||||||
onSubmit={handleCreate}
|
onSubmitCreate={handleCreate}
|
||||||
createState={createState}
|
createState={createState}
|
||||||
isPending={isPending}
|
isPending={isPending}
|
||||||
|
assignableChannels={assignableChannels}
|
||||||
|
selectedChannelId={selectedChannelId}
|
||||||
|
setSelectedChannelId={setSelectedChannelId}
|
||||||
|
onSubmitAssign={handleAssignExisting}
|
||||||
/>
|
/>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
function CreateDestinationDialog({
|
function DestinationDialog({
|
||||||
open,
|
open,
|
||||||
onOpenChange,
|
onOpenChange,
|
||||||
title,
|
title,
|
||||||
setTitle,
|
setTitle,
|
||||||
onSubmit,
|
onSubmitCreate,
|
||||||
createState,
|
createState,
|
||||||
isPending,
|
isPending,
|
||||||
|
assignableChannels,
|
||||||
|
selectedChannelId,
|
||||||
|
setSelectedChannelId,
|
||||||
|
onSubmitAssign,
|
||||||
}: {
|
}: {
|
||||||
open: boolean;
|
open: boolean;
|
||||||
onOpenChange: (open: boolean) => void;
|
onOpenChange: (open: boolean) => void;
|
||||||
title: string;
|
title: string;
|
||||||
setTitle: (v: string) => void;
|
setTitle: (v: string) => void;
|
||||||
onSubmit: () => void;
|
onSubmitCreate: () => void;
|
||||||
createState: CreateState;
|
createState: CreateState;
|
||||||
isPending: boolean;
|
isPending: boolean;
|
||||||
|
assignableChannels: ChannelRow[];
|
||||||
|
selectedChannelId: string;
|
||||||
|
setSelectedChannelId: (v: string) => void;
|
||||||
|
onSubmitAssign: () => void;
|
||||||
}) {
|
}) {
|
||||||
const isCreating = createState.phase === "creating";
|
const isCreating = createState.phase === "creating";
|
||||||
|
const hasAssignable = assignableChannels.length > 0;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||||
<DialogContent className="sm:max-w-md">
|
<DialogContent className="sm:max-w-md">
|
||||||
<DialogHeader>
|
<DialogHeader>
|
||||||
<DialogTitle>Create Destination Channel</DialogTitle>
|
<DialogTitle>Set Destination Channel</DialogTitle>
|
||||||
<DialogDescription>
|
<DialogDescription>
|
||||||
A private Telegram group will be created automatically using one of
|
Choose an existing channel or create a new private group. All
|
||||||
your authenticated accounts. All accounts will write archives here.
|
accounts will write archives to this destination.
|
||||||
</DialogDescription>
|
</DialogDescription>
|
||||||
</DialogHeader>
|
</DialogHeader>
|
||||||
|
|
||||||
@@ -241,7 +289,71 @@ function CreateDestinationDialog({
|
|||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<div className="space-y-4">
|
<Tabs defaultValue={hasAssignable ? "existing" : "create"} className="w-full">
|
||||||
|
<TabsList className="grid w-full grid-cols-2">
|
||||||
|
<TabsTrigger value="existing" disabled={!hasAssignable}>
|
||||||
|
<ArrowRight className="mr-1.5 h-3.5 w-3.5" />
|
||||||
|
Use Existing
|
||||||
|
</TabsTrigger>
|
||||||
|
<TabsTrigger value="create">
|
||||||
|
<Plus className="mr-1.5 h-3.5 w-3.5" />
|
||||||
|
Create New
|
||||||
|
</TabsTrigger>
|
||||||
|
</TabsList>
|
||||||
|
|
||||||
|
<TabsContent value="existing" className="space-y-4 pt-2">
|
||||||
|
{createState.phase === "error" && (
|
||||||
|
<div className="rounded-md border border-destructive/50 bg-destructive/10 p-3">
|
||||||
|
<p className="text-sm text-destructive">{createState.message}</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="space-y-2">
|
||||||
|
<Label>Select Channel</Label>
|
||||||
|
<Select
|
||||||
|
value={selectedChannelId}
|
||||||
|
onValueChange={setSelectedChannelId}
|
||||||
|
>
|
||||||
|
<SelectTrigger>
|
||||||
|
<SelectValue placeholder="Pick a channel..." />
|
||||||
|
</SelectTrigger>
|
||||||
|
<SelectContent>
|
||||||
|
{assignableChannels.map((ch) => (
|
||||||
|
<SelectItem key={ch.id} value={ch.id}>
|
||||||
|
{ch.title}{" "}
|
||||||
|
<span className="text-muted-foreground text-xs">
|
||||||
|
({ch.telegramId})
|
||||||
|
</span>
|
||||||
|
</SelectItem>
|
||||||
|
))}
|
||||||
|
</SelectContent>
|
||||||
|
</Select>
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
The selected channel will become the destination. All accounts
|
||||||
|
will be linked as writers automatically.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<DialogFooter>
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
onClick={() => onOpenChange(false)}
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
onClick={onSubmitAssign}
|
||||||
|
disabled={isPending || !selectedChannelId}
|
||||||
|
>
|
||||||
|
{isPending && (
|
||||||
|
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||||
|
)}
|
||||||
|
Set as Destination
|
||||||
|
</Button>
|
||||||
|
</DialogFooter>
|
||||||
|
</TabsContent>
|
||||||
|
|
||||||
|
<TabsContent value="create" className="space-y-4 pt-2">
|
||||||
{createState.phase === "error" && (
|
{createState.phase === "error" && (
|
||||||
<div className="rounded-md border border-destructive/50 bg-destructive/10 p-3">
|
<div className="rounded-md border border-destructive/50 bg-destructive/10 p-3">
|
||||||
<p className="text-sm text-destructive">{createState.message}</p>
|
<p className="text-sm text-destructive">{createState.message}</p>
|
||||||
@@ -257,30 +369,31 @@ function CreateDestinationDialog({
|
|||||||
onChange={(e) => setTitle(e.target.value)}
|
onChange={(e) => setTitle(e.target.value)}
|
||||||
/>
|
/>
|
||||||
<p className="text-xs text-muted-foreground">
|
<p className="text-xs text-muted-foreground">
|
||||||
This will be the name of the Telegram group. You can rename it later in Telegram.
|
A new private Telegram group will be created using one of your
|
||||||
|
authenticated accounts. You can rename it later in Telegram.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<DialogFooter>
|
<DialogFooter>
|
||||||
<Button
|
<Button
|
||||||
variant="outline"
|
variant="outline"
|
||||||
onClick={() => onOpenChange(false)}
|
onClick={() => onOpenChange(false)}
|
||||||
disabled={isCreating}
|
|
||||||
>
|
>
|
||||||
Cancel
|
Cancel
|
||||||
</Button>
|
</Button>
|
||||||
<Button
|
<Button
|
||||||
onClick={onSubmit}
|
onClick={onSubmitCreate}
|
||||||
disabled={isPending || isCreating || !title.trim()}
|
disabled={isPending || !title.trim()}
|
||||||
>
|
>
|
||||||
{(isPending || isCreating) && (
|
{isPending && (
|
||||||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||||
)}
|
)}
|
||||||
Create Group
|
Create Group
|
||||||
</Button>
|
</Button>
|
||||||
</DialogFooter>
|
</DialogFooter>
|
||||||
|
</TabsContent>
|
||||||
|
</Tabs>
|
||||||
|
)}
|
||||||
</DialogContent>
|
</DialogContent>
|
||||||
</Dialog>
|
</Dialog>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ interface TelegramAdminProps {
|
|||||||
ingestionStatus: IngestionAccountStatus[];
|
ingestionStatus: IngestionAccountStatus[];
|
||||||
globalDestination: GlobalDestination;
|
globalDestination: GlobalDestination;
|
||||||
sendHistory: SendHistoryRow[];
|
sendHistory: SendHistoryRow[];
|
||||||
|
workerIntervalMinutes: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function TelegramAdmin({
|
export function TelegramAdmin({
|
||||||
@@ -24,6 +25,7 @@ export function TelegramAdmin({
|
|||||||
ingestionStatus,
|
ingestionStatus,
|
||||||
globalDestination,
|
globalDestination,
|
||||||
sendHistory,
|
sendHistory,
|
||||||
|
workerIntervalMinutes,
|
||||||
}: TelegramAdminProps) {
|
}: TelegramAdminProps) {
|
||||||
return (
|
return (
|
||||||
<div className="space-y-4">
|
<div className="space-y-4">
|
||||||
@@ -32,7 +34,7 @@ export function TelegramAdmin({
|
|||||||
description="Manage Telegram accounts, channels, and ingestion"
|
description="Manage Telegram accounts, channels, and ingestion"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<WorkerStatusPanel initialStatus={ingestionStatus} />
|
<WorkerStatusPanel initialStatus={ingestionStatus} initialIntervalMinutes={workerIntervalMinutes} />
|
||||||
|
|
||||||
<Tabs defaultValue="accounts" className="space-y-4">
|
<Tabs defaultValue="accounts" className="space-y-4">
|
||||||
<TabsList>
|
<TabsList>
|
||||||
@@ -51,7 +53,7 @@ export function TelegramAdmin({
|
|||||||
<AccountsTab accounts={accounts} />
|
<AccountsTab accounts={accounts} />
|
||||||
</TabsContent>
|
</TabsContent>
|
||||||
<TabsContent value="channels">
|
<TabsContent value="channels">
|
||||||
<ChannelsTab channels={channels} globalDestination={globalDestination} />
|
<ChannelsTab channels={channels} globalDestination={globalDestination} accounts={accounts} />
|
||||||
</TabsContent>
|
</TabsContent>
|
||||||
<TabsContent value="sends">
|
<TabsContent value="sends">
|
||||||
<BotSendsTab history={sendHistory} />
|
<BotSendsTab history={sendHistory} />
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
import { useEffect, useState, useCallback } from "react";
|
import { useEffect, useState, useCallback, useTransition } from "react";
|
||||||
import {
|
import {
|
||||||
Loader2,
|
Loader2,
|
||||||
CheckCircle2,
|
CheckCircle2,
|
||||||
@@ -14,10 +14,13 @@ import { Card, CardContent } from "@/components/ui/card";
|
|||||||
import { Badge } from "@/components/ui/badge";
|
import { Badge } from "@/components/ui/badge";
|
||||||
import { Button } from "@/components/ui/button";
|
import { Button } from "@/components/ui/button";
|
||||||
import { cn } from "@/lib/utils";
|
import { cn } from "@/lib/utils";
|
||||||
|
import { toast } from "sonner";
|
||||||
|
import { triggerIngestion } from "../actions";
|
||||||
import type { IngestionAccountStatus } from "@/lib/telegram/types";
|
import type { IngestionAccountStatus } from "@/lib/telegram/types";
|
||||||
|
|
||||||
interface WorkerStatusPanelProps {
|
interface WorkerStatusPanelProps {
|
||||||
initialStatus: IngestionAccountStatus[];
|
initialStatus: IngestionAccountStatus[];
|
||||||
|
initialIntervalMinutes?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
const AUTH_STATE_CONFIG: Record<
|
const AUTH_STATE_CONFIG: Record<
|
||||||
@@ -39,15 +42,28 @@ const AUTH_STATE_CONFIG: Record<
|
|||||||
EXPIRED: { label: "Expired", color: "text-red-500", icon: "x" },
|
EXPIRED: { label: "Expired", color: "text-red-500", icon: "x" },
|
||||||
};
|
};
|
||||||
|
|
||||||
export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
|
export function WorkerStatusPanel({ initialStatus, initialIntervalMinutes = 60 }: WorkerStatusPanelProps) {
|
||||||
const [accounts, setAccounts] = useState(initialStatus);
|
const [accounts, setAccounts] = useState(initialStatus);
|
||||||
const [error, setError] = useState(false);
|
const [error, setError] = useState(false);
|
||||||
const [nextRunCountdown, setNextRunCountdown] = useState<string | null>(null);
|
const [nextRunCountdown, setNextRunCountdown] = useState<string | null>(null);
|
||||||
|
const [workerIntervalMinutes, setWorkerIntervalMinutes] = useState(initialIntervalMinutes);
|
||||||
|
const [isPending, startTransition] = useTransition();
|
||||||
|
|
||||||
// Find active run
|
// Find active run
|
||||||
const activeRun = accounts.find((a) => a.currentRun);
|
const activeRun = accounts.find((a) => a.currentRun);
|
||||||
const isRunning = !!activeRun;
|
const isRunning = !!activeRun;
|
||||||
|
|
||||||
|
const handleSyncNow = useCallback(() => {
|
||||||
|
startTransition(async () => {
|
||||||
|
const result = await triggerIngestion();
|
||||||
|
if (result.success) {
|
||||||
|
toast.success("Sync triggered — worker will start shortly");
|
||||||
|
} else {
|
||||||
|
toast.error(result.error ?? "Failed to trigger sync");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}, []);
|
||||||
|
|
||||||
// Poll for status
|
// Poll for status
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
let timer: ReturnType<typeof setTimeout>;
|
let timer: ReturnType<typeof setTimeout>;
|
||||||
@@ -60,6 +76,9 @@ export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
|
|||||||
const data = await res.json();
|
const data = await res.json();
|
||||||
if (mounted) {
|
if (mounted) {
|
||||||
setAccounts(data.accounts ?? []);
|
setAccounts(data.accounts ?? []);
|
||||||
|
if (data.workerIntervalMinutes) {
|
||||||
|
setWorkerIntervalMinutes(data.workerIntervalMinutes);
|
||||||
|
}
|
||||||
setError(false);
|
setError(false);
|
||||||
}
|
}
|
||||||
} catch {
|
} catch {
|
||||||
@@ -86,7 +105,7 @@ export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Estimate next run based on last run finish time + interval (5 min + up to 5 min jitter)
|
// Estimate next run based on last run finish time + configured interval + up to 5 min jitter
|
||||||
const lastFinished = accounts
|
const lastFinished = accounts
|
||||||
.filter((a) => a.lastRun?.finishedAt)
|
.filter((a) => a.lastRun?.finishedAt)
|
||||||
.map((a) => new Date(a.lastRun!.finishedAt!).getTime())
|
.map((a) => new Date(a.lastRun!.finishedAt!).getTime())
|
||||||
@@ -97,7 +116,7 @@ export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const intervalMs = 5 * 60 * 1000; // 5 min base
|
const intervalMs = workerIntervalMinutes * 60 * 1000;
|
||||||
const estimatedNext = lastFinished + intervalMs;
|
const estimatedNext = lastFinished + intervalMs;
|
||||||
|
|
||||||
const tick = () => {
|
const tick = () => {
|
||||||
@@ -116,7 +135,7 @@ export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
|
|||||||
tick();
|
tick();
|
||||||
const interval = setInterval(tick, 1_000);
|
const interval = setInterval(tick, 1_000);
|
||||||
return () => clearInterval(interval);
|
return () => clearInterval(interval);
|
||||||
}, [isRunning, accounts]);
|
}, [isRunning, accounts, workerIntervalMinutes]);
|
||||||
|
|
||||||
if (accounts.length === 0 && !error) {
|
if (accounts.length === 0 && !error) {
|
||||||
return (
|
return (
|
||||||
@@ -182,7 +201,12 @@ export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
|
|||||||
) : isRunning && activeRun?.currentRun ? (
|
) : isRunning && activeRun?.currentRun ? (
|
||||||
<RunningStatus run={activeRun.currentRun} />
|
<RunningStatus run={activeRun.currentRun} />
|
||||||
) : (
|
) : (
|
||||||
<IdleStatus accounts={accounts} nextRunCountdown={nextRunCountdown} />
|
<IdleStatus
|
||||||
|
accounts={accounts}
|
||||||
|
nextRunCountdown={nextRunCountdown}
|
||||||
|
onSyncNow={handleSyncNow}
|
||||||
|
isSyncing={isPending}
|
||||||
|
/>
|
||||||
)}
|
)}
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
@@ -233,6 +257,11 @@ function RunningStatus({
|
|||||||
</span>
|
</span>
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
|
{run.messagesScanned > 0 && (
|
||||||
|
<span>
|
||||||
|
<span className="text-foreground tabular-nums">{run.messagesScanned}</span> messages
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
{run.zipsIngested > 0 && (
|
{run.zipsIngested > 0 && (
|
||||||
<span>
|
<span>
|
||||||
<span className="text-foreground tabular-nums">{run.zipsIngested}</span> ingested
|
<span className="text-foreground tabular-nums">{run.zipsIngested}</span> ingested
|
||||||
@@ -251,9 +280,13 @@ function RunningStatus({
|
|||||||
function IdleStatus({
|
function IdleStatus({
|
||||||
accounts,
|
accounts,
|
||||||
nextRunCountdown,
|
nextRunCountdown,
|
||||||
|
onSyncNow,
|
||||||
|
isSyncing,
|
||||||
}: {
|
}: {
|
||||||
accounts: IngestionAccountStatus[];
|
accounts: IngestionAccountStatus[];
|
||||||
nextRunCountdown: string | null;
|
nextRunCountdown: string | null;
|
||||||
|
onSyncNow: () => void;
|
||||||
|
isSyncing: boolean;
|
||||||
}) {
|
}) {
|
||||||
const lastRun = accounts
|
const lastRun = accounts
|
||||||
.filter((a) => a.lastRun)
|
.filter((a) => a.lastRun)
|
||||||
@@ -316,14 +349,32 @@ function IdleStatus({
|
|||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div className="flex items-center gap-2 shrink-0">
|
||||||
{nextRunCountdown && hasAuthenticated && (
|
{nextRunCountdown && hasAuthenticated && (
|
||||||
<div className="flex items-center gap-1.5 shrink-0">
|
<div className="flex items-center gap-1.5">
|
||||||
<RefreshCw className="h-3 w-3 text-muted-foreground" />
|
<RefreshCw className="h-3 w-3 text-muted-foreground" />
|
||||||
<span className="text-xs text-muted-foreground tabular-nums">
|
<span className="text-xs text-muted-foreground tabular-nums">
|
||||||
Next: {nextRunCountdown}
|
Next: {nextRunCountdown}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
{hasAuthenticated && (
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
className="h-7 text-xs px-2"
|
||||||
|
onClick={onSyncNow}
|
||||||
|
disabled={isSyncing}
|
||||||
|
>
|
||||||
|
{isSyncing ? (
|
||||||
|
<Loader2 className="h-3 w-3 animate-spin mr-1" />
|
||||||
|
) : (
|
||||||
|
<RefreshCw className="h-3 w-3 mr-1" />
|
||||||
|
)}
|
||||||
|
Sync Now
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -173,6 +173,7 @@ export async function createChannel(
|
|||||||
telegramId: BigInt(parsed.data.telegramId),
|
telegramId: BigInt(parsed.data.telegramId),
|
||||||
title: parsed.data.title,
|
title: parsed.data.title,
|
||||||
type: parsed.data.type,
|
type: parsed.data.type,
|
||||||
|
isActive: false,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
revalidatePath(REVALIDATE_PATH);
|
revalidatePath(REVALIDATE_PATH);
|
||||||
@@ -269,6 +270,13 @@ export async function setChannelType(
|
|||||||
if (!existing) return { success: false, error: "Channel not found" };
|
if (!existing) return { success: false, error: "Channel not found" };
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
if (type === "DESTINATION") {
|
||||||
|
// Setting as destination: use the full global destination logic
|
||||||
|
// so it updates the global settings key, creates WRITER links, etc.
|
||||||
|
return await setGlobalDestination(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Setting as SOURCE — just change the type
|
||||||
await prisma.telegramChannel.update({
|
await prisma.telegramChannel.update({
|
||||||
where: { id },
|
where: { id },
|
||||||
data: { type },
|
data: { type },
|
||||||
@@ -280,19 +288,49 @@ export async function setChannelType(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function triggerChannelSync(): Promise<ActionResult> {
|
/**
|
||||||
|
* Reset all scan progress for a channel so the worker will re-process it
|
||||||
|
* from the very beginning on the next ingestion cycle.
|
||||||
|
*
|
||||||
|
* This clears:
|
||||||
|
* - `lastProcessedMessageId` on every AccountChannelMap linked to this channel
|
||||||
|
* - All TopicProgress records for those maps (for forum channels)
|
||||||
|
*/
|
||||||
|
export async function rescanChannel(channelId: string): Promise<ActionResult> {
|
||||||
const admin = await requireAdmin();
|
const admin = await requireAdmin();
|
||||||
if (!admin.success) return admin;
|
if (!admin.success) return admin;
|
||||||
|
|
||||||
|
const channel = await prisma.telegramChannel.findUnique({
|
||||||
|
where: { id: channelId },
|
||||||
|
});
|
||||||
|
if (!channel) return { success: false, error: "Channel not found" };
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Signal the worker to do a channel sync via pg_notify
|
// Find all account-channel maps for this channel
|
||||||
await prisma.$queryRawUnsafe(
|
const maps = await prisma.accountChannelMap.findMany({
|
||||||
`SELECT pg_notify('channel_sync', 'requested')`
|
where: { channelId },
|
||||||
);
|
select: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
const mapIds = maps.map((m) => m.id);
|
||||||
|
|
||||||
|
// Delete all topic progress records for these maps (forum channels)
|
||||||
|
if (mapIds.length > 0) {
|
||||||
|
await prisma.topicProgress.deleteMany({
|
||||||
|
where: { accountChannelMapId: { in: mapIds } },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reset the scan cursor so the worker re-processes from the start
|
||||||
|
await prisma.accountChannelMap.updateMany({
|
||||||
|
where: { channelId },
|
||||||
|
data: { lastProcessedMessageId: null },
|
||||||
|
});
|
||||||
|
|
||||||
revalidatePath(REVALIDATE_PATH);
|
revalidatePath(REVALIDATE_PATH);
|
||||||
return { success: true, data: undefined };
|
return { success: true, data: undefined };
|
||||||
} catch {
|
} catch {
|
||||||
return { success: false, error: "Failed to trigger channel sync" };
|
return { success: false, error: "Failed to reset channel scan progress" };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -371,23 +409,12 @@ export async function triggerIngestion(
|
|||||||
return { success: false, error: "No eligible accounts found" };
|
return { success: false, error: "No eligible accounts found" };
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create ingestion runs — the worker picks these up
|
// Signal the worker to run an immediate ingestion cycle via pg_notify.
|
||||||
for (const account of accounts) {
|
// The worker will create its own IngestionRun records with proper activity tracking.
|
||||||
const existing = await prisma.ingestionRun.findFirst({
|
|
||||||
where: { accountId: account.id, status: "RUNNING" },
|
|
||||||
});
|
|
||||||
if (!existing) {
|
|
||||||
await prisma.ingestionRun.create({
|
|
||||||
data: { accountId: account.id, status: "RUNNING" },
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// pg_notify for immediate worker pickup
|
|
||||||
try {
|
try {
|
||||||
await prisma.$queryRawUnsafe(
|
await prisma.$queryRawUnsafe(
|
||||||
`SELECT pg_notify('ingestion_trigger', $1)`,
|
`SELECT pg_notify('ingestion_trigger', $1)`,
|
||||||
accounts.map((a) => a.id).join(",")
|
accounts.map((a: { id: string }) => a.id).join(",")
|
||||||
);
|
);
|
||||||
} catch {
|
} catch {
|
||||||
// Best-effort
|
// Best-effort
|
||||||
@@ -417,7 +444,7 @@ export async function saveChannelSelections(
|
|||||||
try {
|
try {
|
||||||
let linked = 0;
|
let linked = 0;
|
||||||
for (const ch of channels) {
|
for (const ch of channels) {
|
||||||
// Upsert the channel record
|
// Upsert the channel record and activate it (user explicitly selected it)
|
||||||
const channel = await prisma.telegramChannel.upsert({
|
const channel = await prisma.telegramChannel.upsert({
|
||||||
where: { telegramId: BigInt(ch.telegramId) },
|
where: { telegramId: BigInt(ch.telegramId) },
|
||||||
create: {
|
create: {
|
||||||
@@ -425,10 +452,12 @@ export async function saveChannelSelections(
|
|||||||
title: ch.title,
|
title: ch.title,
|
||||||
type: "SOURCE",
|
type: "SOURCE",
|
||||||
isForum: ch.isForum,
|
isForum: ch.isForum,
|
||||||
|
isActive: true,
|
||||||
},
|
},
|
||||||
update: {
|
update: {
|
||||||
title: ch.title,
|
title: ch.title,
|
||||||
isForum: ch.isForum,
|
isForum: ch.isForum,
|
||||||
|
isActive: true,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -467,10 +496,10 @@ export async function setGlobalDestination(
|
|||||||
if (!channel) return { success: false, error: "Channel not found" };
|
if (!channel) return { success: false, error: "Channel not found" };
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Set the channel type to DESTINATION
|
// Set the channel type to DESTINATION and ensure it's active
|
||||||
await prisma.telegramChannel.update({
|
await prisma.telegramChannel.update({
|
||||||
where: { id: channelId },
|
where: { id: channelId },
|
||||||
data: { type: "DESTINATION" },
|
data: { type: "DESTINATION", isActive: true },
|
||||||
});
|
});
|
||||||
|
|
||||||
// Save as global destination
|
// Save as global destination
|
||||||
@@ -521,17 +550,19 @@ export async function createDestinationChannel(
|
|||||||
if (!admin.success) return admin;
|
if (!admin.success) return admin;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Create the channel as DESTINATION
|
// Create the channel as DESTINATION (active by default — needed for uploads)
|
||||||
const channel = await prisma.telegramChannel.upsert({
|
const channel = await prisma.telegramChannel.upsert({
|
||||||
where: { telegramId: BigInt(telegramId) },
|
where: { telegramId: BigInt(telegramId) },
|
||||||
create: {
|
create: {
|
||||||
telegramId: BigInt(telegramId),
|
telegramId: BigInt(telegramId),
|
||||||
title,
|
title,
|
||||||
type: "DESTINATION",
|
type: "DESTINATION",
|
||||||
|
isActive: true,
|
||||||
},
|
},
|
||||||
update: {
|
update: {
|
||||||
title,
|
title,
|
||||||
type: "DESTINATION",
|
type: "DESTINATION",
|
||||||
|
isActive: true,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ export default async function TelegramPage() {
|
|||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const serializedHistory = sendHistory.map((r) => ({
|
const serializedHistory = sendHistory.map((r: typeof sendHistory[number]) => ({
|
||||||
id: r.id,
|
id: r.id,
|
||||||
packageName: r.package.fileName,
|
packageName: r.package.fileName,
|
||||||
recipientName: r.telegramLink.telegramName,
|
recipientName: r.telegramLink.telegramName,
|
||||||
@@ -42,6 +42,7 @@ export default async function TelegramPage() {
|
|||||||
ingestionStatus={ingestionStatus}
|
ingestionStatus={ingestionStatus}
|
||||||
globalDestination={globalDestination}
|
globalDestination={globalDestination}
|
||||||
sendHistory={serializedHistory}
|
sendHistory={serializedHistory}
|
||||||
|
workerIntervalMinutes={parseInt(process.env.WORKER_INTERVAL_MINUTES ?? "60", 10)}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,6 +11,23 @@ export async function registerUser(input: unknown): Promise<ActionResult<{ id: s
|
|||||||
return { success: false, error: "Validation failed" };
|
return { success: false, error: "Validation failed" };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Validate invite code
|
||||||
|
const invite = await prisma.inviteCode.findUnique({
|
||||||
|
where: { code: parsed.data.inviteCode },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!invite) {
|
||||||
|
return { success: false, error: "Invalid invite code" };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (invite.uses >= invite.maxUses) {
|
||||||
|
return { success: false, error: "This invite code has already been used" };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (invite.expiresAt && invite.expiresAt < new Date()) {
|
||||||
|
return { success: false, error: "This invite code has expired" };
|
||||||
|
}
|
||||||
|
|
||||||
const existing = await prisma.user.findUnique({
|
const existing = await prisma.user.findUnique({
|
||||||
where: { email: parsed.data.email },
|
where: { email: parsed.data.email },
|
||||||
});
|
});
|
||||||
@@ -21,7 +38,9 @@ export async function registerUser(input: unknown): Promise<ActionResult<{ id: s
|
|||||||
|
|
||||||
const hashedPassword = await bcrypt.hash(parsed.data.password, 10);
|
const hashedPassword = await bcrypt.hash(parsed.data.password, 10);
|
||||||
|
|
||||||
const user = await prisma.user.create({
|
// Create user and increment invite usage in a transaction
|
||||||
|
const user = await prisma.$transaction(async (tx) => {
|
||||||
|
const newUser = await tx.user.create({
|
||||||
data: {
|
data: {
|
||||||
name: parsed.data.name,
|
name: parsed.data.name,
|
||||||
email: parsed.data.email,
|
email: parsed.data.email,
|
||||||
@@ -38,5 +57,13 @@ export async function registerUser(input: unknown): Promise<ActionResult<{ id: s
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
await tx.inviteCode.update({
|
||||||
|
where: { id: invite.id },
|
||||||
|
data: { uses: { increment: 1 } },
|
||||||
|
});
|
||||||
|
|
||||||
|
return newUser;
|
||||||
|
});
|
||||||
|
|
||||||
return { success: true, data: { id: user.id } };
|
return { success: true, data: { id: user.id } };
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
import { useState, useTransition } from "react";
|
import { useState, useTransition } from "react";
|
||||||
import { useRouter } from "next/navigation";
|
import { useRouter, useSearchParams } from "next/navigation";
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
import { useForm } from "react-hook-form";
|
import { useForm } from "react-hook-form";
|
||||||
import { zodResolver } from "@hookform/resolvers/zod";
|
import { zodResolver } from "@hookform/resolvers/zod";
|
||||||
@@ -24,12 +24,19 @@ import { APP_NAME } from "@/lib/constants";
|
|||||||
|
|
||||||
export default function RegisterPage() {
|
export default function RegisterPage() {
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
|
const searchParams = useSearchParams();
|
||||||
const [error, setError] = useState<string | null>(null);
|
const [error, setError] = useState<string | null>(null);
|
||||||
const [isPending, startTransition] = useTransition();
|
const [isPending, startTransition] = useTransition();
|
||||||
|
|
||||||
const form = useForm<RegisterInput>({
|
const form = useForm<RegisterInput>({
|
||||||
resolver: zodResolver(registerSchema),
|
resolver: zodResolver(registerSchema),
|
||||||
defaultValues: { name: "", email: "", password: "", confirmPassword: "" },
|
defaultValues: {
|
||||||
|
name: "",
|
||||||
|
email: "",
|
||||||
|
password: "",
|
||||||
|
confirmPassword: "",
|
||||||
|
inviteCode: searchParams.get("code") ?? "",
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
function onSubmit(values: RegisterInput) {
|
function onSubmit(values: RegisterInput) {
|
||||||
@@ -75,7 +82,7 @@ export default function RegisterPage() {
|
|||||||
<Card>
|
<Card>
|
||||||
<CardHeader>
|
<CardHeader>
|
||||||
<CardTitle>Create Account</CardTitle>
|
<CardTitle>Create Account</CardTitle>
|
||||||
<CardDescription>Fill in your details below</CardDescription>
|
<CardDescription>You need an invite code to register</CardDescription>
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
<Form {...form}>
|
<Form {...form}>
|
||||||
@@ -86,6 +93,20 @@ export default function RegisterPage() {
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
<FormField
|
||||||
|
control={form.control}
|
||||||
|
name="inviteCode"
|
||||||
|
render={({ field }) => (
|
||||||
|
<FormItem>
|
||||||
|
<FormLabel>Invite Code</FormLabel>
|
||||||
|
<FormControl>
|
||||||
|
<Input placeholder="Enter your invite code" {...field} />
|
||||||
|
</FormControl>
|
||||||
|
<FormMessage />
|
||||||
|
</FormItem>
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
|
||||||
<FormField
|
<FormField
|
||||||
control={form.control}
|
control={form.control}
|
||||||
name="name"
|
name="name"
|
||||||
|
|||||||
@@ -9,5 +9,9 @@ export async function GET(request: Request) {
|
|||||||
if ("error" in authResult) return authResult.error;
|
if ("error" in authResult) return authResult.error;
|
||||||
|
|
||||||
const accounts = await getIngestionStatus();
|
const accounts = await getIngestionStatus();
|
||||||
return NextResponse.json({ accounts });
|
const workerIntervalMinutes = parseInt(
|
||||||
|
process.env.WORKER_INTERVAL_MINUTES ?? "60",
|
||||||
|
10
|
||||||
|
);
|
||||||
|
return NextResponse.json({ accounts, workerIntervalMinutes });
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -45,33 +45,20 @@ export async function POST(request: Request) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create ingestion runs marked as RUNNING — the worker will pick these up
|
// Send pg_notify for immediate worker pickup.
|
||||||
// when it next polls, or we use pg_notify for immediate pickup
|
// The worker creates its own IngestionRun records with proper activity tracking.
|
||||||
for (const account of accounts) {
|
|
||||||
// Only create if no run is already RUNNING for this account
|
|
||||||
const existing = await prisma.ingestionRun.findFirst({
|
|
||||||
where: { accountId: account.id, status: "RUNNING" },
|
|
||||||
});
|
|
||||||
if (!existing) {
|
|
||||||
await prisma.ingestionRun.create({
|
|
||||||
data: { accountId: account.id, status: "RUNNING" },
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Send pg_notify for immediate worker pickup
|
|
||||||
try {
|
try {
|
||||||
await prisma.$queryRawUnsafe(
|
await prisma.$queryRawUnsafe(
|
||||||
`SELECT pg_notify('ingestion_trigger', $1)`,
|
`SELECT pg_notify('ingestion_trigger', $1)`,
|
||||||
accounts.map((a) => a.id).join(",")
|
accounts.map((a: { id: string }) => a.id).join(",")
|
||||||
);
|
);
|
||||||
} catch {
|
} catch {
|
||||||
// pg_notify is best-effort — worker will pick up on next cycle anyway
|
// pg_notify is best-effort — worker will pick up on next scheduled cycle anyway
|
||||||
}
|
}
|
||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
triggered: true,
|
triggered: true,
|
||||||
accountIds: accounts.map((a) => a.id),
|
accountIds: accounts.map((a: { id: string }) => a.id),
|
||||||
message: `Ingestion queued for ${accounts.length} account(s)`,
|
message: `Ingestion triggered for ${accounts.length} account(s)`,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
import { usePathname } from "next/navigation";
|
import { usePathname } from "next/navigation";
|
||||||
|
import { useSession } from "next-auth/react";
|
||||||
import {
|
import {
|
||||||
LayoutDashboard,
|
LayoutDashboard,
|
||||||
Cylinder,
|
Cylinder,
|
||||||
@@ -14,30 +15,21 @@ import {
|
|||||||
Building2,
|
Building2,
|
||||||
MapPin,
|
MapPin,
|
||||||
Settings,
|
Settings,
|
||||||
|
UserPlus,
|
||||||
Flame,
|
Flame,
|
||||||
} from "lucide-react";
|
} from "lucide-react";
|
||||||
import { cn } from "@/lib/utils";
|
import { cn } from "@/lib/utils";
|
||||||
import { APP_NAME } from "@/lib/constants";
|
import { APP_NAME, NAV_ITEMS } from "@/lib/constants";
|
||||||
import { SheetHeader, SheetTitle } from "@/components/ui/sheet";
|
import { SheetHeader, SheetTitle } from "@/components/ui/sheet";
|
||||||
|
|
||||||
const icons = { LayoutDashboard, Cylinder, Droplets, Paintbrush, Gem, FileBox, Send, ClipboardList, Building2, MapPin, Settings };
|
const icons = { LayoutDashboard, Cylinder, Droplets, Paintbrush, Gem, FileBox, Send, ClipboardList, Building2, MapPin, Settings, UserPlus };
|
||||||
|
|
||||||
const navItems = [
|
|
||||||
{ label: "Dashboard", href: "/dashboard", icon: "LayoutDashboard" as const },
|
|
||||||
{ label: "Filaments", href: "/filaments", icon: "Cylinder" as const },
|
|
||||||
{ label: "Resins", href: "/resins", icon: "Droplets" as const },
|
|
||||||
{ label: "Paints", href: "/paints", icon: "Paintbrush" as const },
|
|
||||||
{ label: "Supplies", href: "/supplies", icon: "Gem" as const },
|
|
||||||
{ label: "STL Files", href: "/stls", icon: "FileBox" as const },
|
|
||||||
{ label: "Telegram", href: "/telegram", icon: "Send" as const },
|
|
||||||
{ label: "Usage", href: "/usage", icon: "ClipboardList" as const },
|
|
||||||
{ label: "Vendors", href: "/vendors", icon: "Building2" as const },
|
|
||||||
{ label: "Locations", href: "/locations", icon: "MapPin" as const },
|
|
||||||
{ label: "Settings", href: "/settings", icon: "Settings" as const },
|
|
||||||
];
|
|
||||||
|
|
||||||
export function MobileSidebar() {
|
export function MobileSidebar() {
|
||||||
const pathname = usePathname();
|
const pathname = usePathname();
|
||||||
|
const { data: session } = useSession();
|
||||||
|
const isAdmin = session?.user?.role === "ADMIN";
|
||||||
|
|
||||||
|
const visibleItems = NAV_ITEMS.filter((item) => !item.adminOnly || isAdmin);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex h-full flex-col">
|
<div className="flex h-full flex-col">
|
||||||
@@ -48,7 +40,7 @@ export function MobileSidebar() {
|
|||||||
</SheetTitle>
|
</SheetTitle>
|
||||||
</SheetHeader>
|
</SheetHeader>
|
||||||
<nav className="flex-1 space-y-1 p-2">
|
<nav className="flex-1 space-y-1 p-2">
|
||||||
{navItems.map((item) => {
|
{visibleItems.map((item) => {
|
||||||
const Icon = icons[item.icon];
|
const Icon = icons[item.icon];
|
||||||
const isActive = pathname.startsWith(item.href);
|
const isActive = pathname.startsWith(item.href);
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
import { useState } from "react";
|
import { useState } from "react";
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
import { usePathname } from "next/navigation";
|
import { usePathname } from "next/navigation";
|
||||||
|
import { useSession } from "next-auth/react";
|
||||||
import {
|
import {
|
||||||
LayoutDashboard,
|
LayoutDashboard,
|
||||||
Cylinder,
|
Cylinder,
|
||||||
@@ -15,12 +16,13 @@ import {
|
|||||||
Building2,
|
Building2,
|
||||||
MapPin,
|
MapPin,
|
||||||
Settings,
|
Settings,
|
||||||
|
UserPlus,
|
||||||
Flame,
|
Flame,
|
||||||
PanelLeftClose,
|
PanelLeftClose,
|
||||||
PanelLeft,
|
PanelLeft,
|
||||||
} from "lucide-react";
|
} from "lucide-react";
|
||||||
import { cn } from "@/lib/utils";
|
import { cn } from "@/lib/utils";
|
||||||
import { APP_NAME } from "@/lib/constants";
|
import { APP_NAME, NAV_ITEMS } from "@/lib/constants";
|
||||||
import { Button } from "@/components/ui/button";
|
import { Button } from "@/components/ui/button";
|
||||||
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
|
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
|
||||||
|
|
||||||
@@ -36,25 +38,16 @@ const icons = {
|
|||||||
Building2,
|
Building2,
|
||||||
MapPin,
|
MapPin,
|
||||||
Settings,
|
Settings,
|
||||||
|
UserPlus,
|
||||||
} as const;
|
} as const;
|
||||||
|
|
||||||
const navItems = [
|
|
||||||
{ label: "Dashboard", href: "/dashboard", icon: "LayoutDashboard" as const },
|
|
||||||
{ label: "Filaments", href: "/filaments", icon: "Cylinder" as const },
|
|
||||||
{ label: "Resins", href: "/resins", icon: "Droplets" as const },
|
|
||||||
{ label: "Paints", href: "/paints", icon: "Paintbrush" as const },
|
|
||||||
{ label: "Supplies", href: "/supplies", icon: "Gem" as const },
|
|
||||||
{ label: "STL Files", href: "/stls", icon: "FileBox" as const },
|
|
||||||
{ label: "Telegram", href: "/telegram", icon: "Send" as const },
|
|
||||||
{ label: "Usage", href: "/usage", icon: "ClipboardList" as const },
|
|
||||||
{ label: "Vendors", href: "/vendors", icon: "Building2" as const },
|
|
||||||
{ label: "Locations", href: "/locations", icon: "MapPin" as const },
|
|
||||||
{ label: "Settings", href: "/settings", icon: "Settings" as const },
|
|
||||||
];
|
|
||||||
|
|
||||||
export function Sidebar() {
|
export function Sidebar() {
|
||||||
const pathname = usePathname();
|
const pathname = usePathname();
|
||||||
const [collapsed, setCollapsed] = useState(false);
|
const [collapsed, setCollapsed] = useState(false);
|
||||||
|
const { data: session } = useSession();
|
||||||
|
const isAdmin = session?.user?.role === "ADMIN";
|
||||||
|
|
||||||
|
const visibleItems = NAV_ITEMS.filter((item) => !item.adminOnly || isAdmin);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<aside
|
<aside
|
||||||
@@ -73,7 +66,7 @@ export function Sidebar() {
|
|||||||
|
|
||||||
{/* Navigation */}
|
{/* Navigation */}
|
||||||
<nav className="flex-1 space-y-1 p-2">
|
<nav className="flex-1 space-y-1 p-2">
|
||||||
{navItems.map((item) => {
|
{visibleItems.map((item) => {
|
||||||
const Icon = icons[item.icon];
|
const Icon = icons[item.icon];
|
||||||
const isActive = pathname.startsWith(item.href);
|
const isActive = pathname.startsWith(item.href);
|
||||||
|
|
||||||
|
|||||||
@@ -18,6 +18,8 @@ interface DeleteDialogProps {
|
|||||||
description?: string;
|
description?: string;
|
||||||
onConfirm: () => void;
|
onConfirm: () => void;
|
||||||
isLoading?: boolean;
|
isLoading?: boolean;
|
||||||
|
confirmLabel?: string;
|
||||||
|
confirmLoadingLabel?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function DeleteDialog({
|
export function DeleteDialog({
|
||||||
@@ -27,6 +29,8 @@ export function DeleteDialog({
|
|||||||
description = "This action cannot be undone.",
|
description = "This action cannot be undone.",
|
||||||
onConfirm,
|
onConfirm,
|
||||||
isLoading,
|
isLoading,
|
||||||
|
confirmLabel = "Delete",
|
||||||
|
confirmLoadingLabel,
|
||||||
}: DeleteDialogProps) {
|
}: DeleteDialogProps) {
|
||||||
return (
|
return (
|
||||||
<AlertDialog open={open} onOpenChange={onOpenChange}>
|
<AlertDialog open={open} onOpenChange={onOpenChange}>
|
||||||
@@ -42,7 +46,7 @@ export function DeleteDialog({
|
|||||||
disabled={isLoading}
|
disabled={isLoading}
|
||||||
className="bg-destructive text-destructive-foreground hover:bg-destructive/90"
|
className="bg-destructive text-destructive-foreground hover:bg-destructive/90"
|
||||||
>
|
>
|
||||||
{isLoading ? "Deleting..." : "Delete"}
|
{isLoading ? (confirmLoadingLabel ?? `${confirmLabel}...`) : confirmLabel}
|
||||||
</AlertDialogAction>
|
</AlertDialogAction>
|
||||||
</AlertDialogFooter>
|
</AlertDialogFooter>
|
||||||
</AlertDialogContent>
|
</AlertDialogContent>
|
||||||
|
|||||||
@@ -18,7 +18,12 @@ export const { auth, handlers, signIn, signOut } = NextAuth({
|
|||||||
async jwt({ token, user }) {
|
async jwt({ token, user }) {
|
||||||
if (user) {
|
if (user) {
|
||||||
token.id = user.id!;
|
token.id = user.id!;
|
||||||
token.role = user.role ?? "USER";
|
// Fetch the role from the database to ensure token reflects current role
|
||||||
|
const dbUser = await prisma.user.findUnique({
|
||||||
|
where: { id: user.id! },
|
||||||
|
select: { role: true },
|
||||||
|
});
|
||||||
|
token.role = dbUser?.role ?? user.role ?? "ADMIN";
|
||||||
}
|
}
|
||||||
return token;
|
return token;
|
||||||
},
|
},
|
||||||
@@ -33,6 +38,12 @@ export const { auth, handlers, signIn, signOut } = NextAuth({
|
|||||||
events: {
|
events: {
|
||||||
async createUser({ user }) {
|
async createUser({ user }) {
|
||||||
if (user.id) {
|
if (user.id) {
|
||||||
|
// Self-hosted: all users are admins
|
||||||
|
await prisma.user.update({
|
||||||
|
where: { id: user.id },
|
||||||
|
data: { role: "ADMIN" },
|
||||||
|
});
|
||||||
|
|
||||||
await prisma.userSettings.upsert({
|
await prisma.userSettings.upsert({
|
||||||
where: { userId: user.id },
|
where: { userId: user.id },
|
||||||
update: {},
|
update: {},
|
||||||
|
|||||||
@@ -1,17 +1,18 @@
|
|||||||
export const APP_NAME = "Dragon's Stash";
|
export const APP_NAME = "Dragon's Stash";
|
||||||
|
|
||||||
export const NAV_ITEMS = [
|
export const NAV_ITEMS = [
|
||||||
{ label: "Dashboard", href: "/dashboard", icon: "LayoutDashboard" },
|
{ label: "Dashboard", href: "/dashboard", icon: "LayoutDashboard", adminOnly: false },
|
||||||
{ label: "Filaments", href: "/filaments", icon: "Cylinder" },
|
{ label: "Filaments", href: "/filaments", icon: "Cylinder", adminOnly: false },
|
||||||
{ label: "Resins", href: "/resins", icon: "Droplets" },
|
{ label: "Resins", href: "/resins", icon: "Droplets", adminOnly: false },
|
||||||
{ label: "Paints", href: "/paints", icon: "Paintbrush" },
|
{ label: "Paints", href: "/paints", icon: "Paintbrush", adminOnly: false },
|
||||||
{ label: "Supplies", href: "/supplies", icon: "Gem" },
|
{ label: "Supplies", href: "/supplies", icon: "Gem", adminOnly: false },
|
||||||
{ label: "STL Files", href: "/stls", icon: "FileBox" },
|
{ label: "STL Files", href: "/stls", icon: "FileBox", adminOnly: false },
|
||||||
{ label: "Telegram", href: "/telegram", icon: "Send" },
|
{ label: "Telegram", href: "/telegram", icon: "Send", adminOnly: true },
|
||||||
{ label: "Usage", href: "/usage", icon: "ClipboardList" },
|
{ label: "Invites", href: "/invites", icon: "UserPlus", adminOnly: true },
|
||||||
{ label: "Vendors", href: "/vendors", icon: "Building2" },
|
{ label: "Usage", href: "/usage", icon: "ClipboardList", adminOnly: false },
|
||||||
{ label: "Locations", href: "/locations", icon: "MapPin" },
|
{ label: "Vendors", href: "/vendors", icon: "Building2", adminOnly: false },
|
||||||
{ label: "Settings", href: "/settings", icon: "Settings" },
|
{ label: "Locations", href: "/locations", icon: "MapPin", adminOnly: false },
|
||||||
|
{ label: "Settings", href: "/settings", icon: "Settings", adminOnly: false },
|
||||||
] as const;
|
] as const;
|
||||||
|
|
||||||
export const MATERIALS = [
|
export const MATERIALS = [
|
||||||
|
|||||||
6
src/lib/fonts.ts
Normal file
6
src/lib/fonts.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
import { Inter } from "next/font/google";
|
||||||
|
|
||||||
|
export const inter = Inter({
|
||||||
|
subsets: ["latin"],
|
||||||
|
variable: "--font-inter",
|
||||||
|
});
|
||||||
@@ -11,6 +11,7 @@ export const registerSchema = z
|
|||||||
email: z.email("Invalid email address"),
|
email: z.email("Invalid email address"),
|
||||||
password: z.string().min(6, "Password must be at least 6 characters"),
|
password: z.string().min(6, "Password must be at least 6 characters"),
|
||||||
confirmPassword: z.string(),
|
confirmPassword: z.string(),
|
||||||
|
inviteCode: z.string().min(1, "Invite code is required"),
|
||||||
})
|
})
|
||||||
.refine((data) => data.password === data.confirmPassword, {
|
.refine((data) => data.password === data.confirmPassword, {
|
||||||
message: "Passwords do not match",
|
message: "Passwords do not match",
|
||||||
|
|||||||
@@ -1,8 +1,16 @@
|
|||||||
|
import type pg from "pg";
|
||||||
import { pool } from "./client.js";
|
import { pool } from "./client.js";
|
||||||
import { childLogger } from "../util/logger.js";
|
import { childLogger } from "../util/logger.js";
|
||||||
|
|
||||||
const log = childLogger("locks");
|
const log = childLogger("locks");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Holds the pooled connection for each active advisory lock.
|
||||||
|
* Session-level advisory locks are tied to the specific PostgreSQL connection,
|
||||||
|
* so we MUST keep the same connection checked out for the entire lock duration.
|
||||||
|
*/
|
||||||
|
const heldConnections = new Map<string, pg.PoolClient>();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Derive a stable 32-bit integer lock ID from an account ID string.
|
* Derive a stable 32-bit integer lock ID from an account ID string.
|
||||||
* PostgreSQL advisory locks use bigint, but we use 32-bit for safety.
|
* PostgreSQL advisory locks use bigint, but we use 32-bit for safety.
|
||||||
@@ -20,6 +28,9 @@ function hashToLockId(accountId: string): number {
|
|||||||
/**
|
/**
|
||||||
* Try to acquire a PostgreSQL advisory lock for an account.
|
* Try to acquire a PostgreSQL advisory lock for an account.
|
||||||
* Returns true if acquired, false if already held by another session.
|
* Returns true if acquired, false if already held by another session.
|
||||||
|
*
|
||||||
|
* IMPORTANT: The pooled connection is kept checked out for the duration
|
||||||
|
* of the lock. You MUST call releaseLock() when done to return it to the pool.
|
||||||
*/
|
*/
|
||||||
export async function tryAcquireLock(accountId: string): Promise<boolean> {
|
export async function tryAcquireLock(accountId: string): Promise<boolean> {
|
||||||
const lockId = hashToLockId(accountId);
|
const lockId = hashToLockId(accountId);
|
||||||
@@ -31,26 +42,40 @@ export async function tryAcquireLock(accountId: string): Promise<boolean> {
|
|||||||
);
|
);
|
||||||
const acquired = result.rows[0]?.pg_try_advisory_lock ?? false;
|
const acquired = result.rows[0]?.pg_try_advisory_lock ?? false;
|
||||||
if (acquired) {
|
if (acquired) {
|
||||||
|
// Keep the connection checked out — lock is tied to this connection
|
||||||
|
heldConnections.set(accountId, client);
|
||||||
log.debug({ accountId, lockId }, "Advisory lock acquired");
|
log.debug({ accountId, lockId }, "Advisory lock acquired");
|
||||||
|
return true;
|
||||||
} else {
|
} else {
|
||||||
log.debug({ accountId, lockId }, "Advisory lock already held");
|
// Lock not acquired — release the connection back to the pool
|
||||||
}
|
|
||||||
return acquired;
|
|
||||||
} finally {
|
|
||||||
client.release();
|
client.release();
|
||||||
|
log.debug({ accountId, lockId }, "Advisory lock already held");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
client.release();
|
||||||
|
throw err;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Release the advisory lock for an account.
|
* Release the advisory lock for an account.
|
||||||
|
* Uses the SAME connection that acquired the lock, then returns it to the pool.
|
||||||
*/
|
*/
|
||||||
export async function releaseLock(accountId: string): Promise<void> {
|
export async function releaseLock(accountId: string): Promise<void> {
|
||||||
const lockId = hashToLockId(accountId);
|
const lockId = hashToLockId(accountId);
|
||||||
const client = await pool.connect();
|
const client = heldConnections.get(accountId);
|
||||||
|
|
||||||
|
if (!client) {
|
||||||
|
log.warn({ accountId, lockId }, "No held connection for lock release — lock may have already been released");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await client.query("SELECT pg_advisory_unlock($1)", [lockId]);
|
await client.query("SELECT pg_advisory_unlock($1)", [lockId]);
|
||||||
log.debug({ accountId, lockId }, "Advisory lock released");
|
log.debug({ accountId, lockId }, "Advisory lock released");
|
||||||
} finally {
|
} finally {
|
||||||
|
heldConnections.delete(accountId);
|
||||||
client.release();
|
client.release();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -302,11 +302,15 @@ export interface UpsertChannelInput {
|
|||||||
title: string;
|
title: string;
|
||||||
type: "SOURCE" | "DESTINATION";
|
type: "SOURCE" | "DESTINATION";
|
||||||
isForum: boolean;
|
isForum: boolean;
|
||||||
|
isActive?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Upsert a channel by telegramId. Returns the channel record.
|
* Upsert a channel by telegramId. Returns the channel record.
|
||||||
* If it already exists, update title and forum status.
|
* If it already exists, update title and forum status.
|
||||||
|
* New channels default to disabled (isActive: false) so the admin must
|
||||||
|
* explicitly enable them before the worker processes them.
|
||||||
|
* Pass isActive: true for DESTINATION channels that must be active immediately.
|
||||||
*/
|
*/
|
||||||
export async function upsertChannel(input: UpsertChannelInput) {
|
export async function upsertChannel(input: UpsertChannelInput) {
|
||||||
return db.telegramChannel.upsert({
|
return db.telegramChannel.upsert({
|
||||||
@@ -316,6 +320,7 @@ export async function upsertChannel(input: UpsertChannelInput) {
|
|||||||
title: input.title,
|
title: input.title,
|
||||||
type: input.type,
|
type: input.type,
|
||||||
isForum: input.isForum,
|
isForum: input.isForum,
|
||||||
|
isActive: input.isActive ?? false,
|
||||||
},
|
},
|
||||||
update: {
|
update: {
|
||||||
title: input.title,
|
title: input.title,
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import { withTdlibMutex } from "./util/mutex.js";
|
|||||||
import { processFetchRequest } from "./worker.js";
|
import { processFetchRequest } from "./worker.js";
|
||||||
import { generateInviteLink, createSupergroup } from "./tdlib/chats.js";
|
import { generateInviteLink, createSupergroup } from "./tdlib/chats.js";
|
||||||
import { createTdlibClient, closeTdlibClient } from "./tdlib/client.js";
|
import { createTdlibClient, closeTdlibClient } from "./tdlib/client.js";
|
||||||
|
import { triggerImmediateCycle } from "./scheduler.js";
|
||||||
import {
|
import {
|
||||||
getGlobalDestinationChannel,
|
getGlobalDestinationChannel,
|
||||||
getGlobalSetting,
|
getGlobalSetting,
|
||||||
@@ -17,6 +18,10 @@ import {
|
|||||||
const log = childLogger("fetch-listener");
|
const log = childLogger("fetch-listener");
|
||||||
|
|
||||||
let pgClient: pg.PoolClient | null = null;
|
let pgClient: pg.PoolClient | null = null;
|
||||||
|
let stopped = false;
|
||||||
|
|
||||||
|
/** Delay (ms) before attempting to reconnect after a connection loss. */
|
||||||
|
const RECONNECT_DELAY_MS = 5_000;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Start listening for pg_notify signals from the web app.
|
* Start listening for pg_notify signals from the web app.
|
||||||
@@ -25,12 +30,23 @@ let pgClient: pg.PoolClient | null = null;
|
|||||||
* - `channel_fetch` — payload = requestId → fetch channels for an account
|
* - `channel_fetch` — payload = requestId → fetch channels for an account
|
||||||
* - `generate_invite` — payload = channelId → generate invite link for destination
|
* - `generate_invite` — payload = channelId → generate invite link for destination
|
||||||
* - `create_destination` — payload = JSON { requestId, title } → create supergroup via TDLib
|
* - `create_destination` — payload = JSON { requestId, title } → create supergroup via TDLib
|
||||||
|
* - `ingestion_trigger` — trigger an immediate ingestion cycle
|
||||||
|
*
|
||||||
|
* If the underlying connection is lost, the listener automatically reconnects
|
||||||
|
* so that pg_notify signals are never silently dropped.
|
||||||
*/
|
*/
|
||||||
export async function startFetchListener(): Promise<void> {
|
export async function startFetchListener(): Promise<void> {
|
||||||
|
stopped = false;
|
||||||
|
await connectListener();
|
||||||
|
}
|
||||||
|
|
||||||
|
async function connectListener(): Promise<void> {
|
||||||
|
try {
|
||||||
pgClient = await pool.connect();
|
pgClient = await pool.connect();
|
||||||
await pgClient.query("LISTEN channel_fetch");
|
await pgClient.query("LISTEN channel_fetch");
|
||||||
await pgClient.query("LISTEN generate_invite");
|
await pgClient.query("LISTEN generate_invite");
|
||||||
await pgClient.query("LISTEN create_destination");
|
await pgClient.query("LISTEN create_destination");
|
||||||
|
await pgClient.query("LISTEN ingestion_trigger");
|
||||||
|
|
||||||
pgClient.on("notification", (msg) => {
|
pgClient.on("notification", (msg) => {
|
||||||
if (msg.channel === "channel_fetch" && msg.payload) {
|
if (msg.channel === "channel_fetch" && msg.payload) {
|
||||||
@@ -39,13 +55,51 @@ export async function startFetchListener(): Promise<void> {
|
|||||||
handleGenerateInvite(msg.payload);
|
handleGenerateInvite(msg.payload);
|
||||||
} else if (msg.channel === "create_destination" && msg.payload) {
|
} else if (msg.channel === "create_destination" && msg.payload) {
|
||||||
handleCreateDestination(msg.payload);
|
handleCreateDestination(msg.payload);
|
||||||
|
} else if (msg.channel === "ingestion_trigger") {
|
||||||
|
handleIngestionTrigger();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
log.info("Fetch listener started (channel_fetch, generate_invite, create_destination)");
|
// Reconnect automatically when the connection ends unexpectedly
|
||||||
|
pgClient.on("end", () => {
|
||||||
|
if (!stopped) {
|
||||||
|
log.warn("Fetch listener connection lost — reconnecting");
|
||||||
|
pgClient = null;
|
||||||
|
scheduleReconnect();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
pgClient.on("error", (err) => {
|
||||||
|
log.error({ err }, "Fetch listener connection error");
|
||||||
|
if (!stopped && pgClient) {
|
||||||
|
try {
|
||||||
|
pgClient.release(true);
|
||||||
|
} catch (releaseErr) {
|
||||||
|
log.debug({ err: releaseErr }, "Failed to release pg client after error");
|
||||||
|
}
|
||||||
|
pgClient = null;
|
||||||
|
scheduleReconnect();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
log.info("Fetch listener started (channel_fetch, generate_invite, create_destination, ingestion_trigger)");
|
||||||
|
} catch (err) {
|
||||||
|
log.error({ err }, "Failed to start fetch listener — retrying");
|
||||||
|
scheduleReconnect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function scheduleReconnect(): void {
|
||||||
|
if (stopped) return;
|
||||||
|
setTimeout(() => {
|
||||||
|
if (!stopped) {
|
||||||
|
connectListener();
|
||||||
|
}
|
||||||
|
}, RECONNECT_DELAY_MS);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function stopFetchListener(): void {
|
export function stopFetchListener(): void {
|
||||||
|
stopped = true;
|
||||||
if (pgClient) {
|
if (pgClient) {
|
||||||
pgClient.release();
|
pgClient.release();
|
||||||
pgClient = null;
|
pgClient = null;
|
||||||
@@ -138,12 +192,13 @@ function handleCreateDestination(payload: string): void {
|
|||||||
const result = await createSupergroup(client, parsed.title);
|
const result = await createSupergroup(client, parsed.title);
|
||||||
log.info({ chatId: result.chatId.toString(), title: result.title }, "Supergroup created");
|
log.info({ chatId: result.chatId.toString(), title: result.title }, "Supergroup created");
|
||||||
|
|
||||||
// Upsert it as a DESTINATION channel in the DB
|
// Upsert it as a DESTINATION channel in the DB (active by default)
|
||||||
const channel = await upsertChannel({
|
const channel = await upsertChannel({
|
||||||
telegramId: result.chatId,
|
telegramId: result.chatId,
|
||||||
title: result.title,
|
title: result.title,
|
||||||
type: "DESTINATION",
|
type: "DESTINATION",
|
||||||
isForum: false,
|
isForum: false,
|
||||||
|
isActive: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Set as global destination
|
// Set as global destination
|
||||||
@@ -204,3 +259,16 @@ function handleCreateDestination(payload: string): void {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ── Ingestion trigger handler ──
|
||||||
|
|
||||||
|
function handleIngestionTrigger(): void {
|
||||||
|
fetchQueue = fetchQueue.then(async () => {
|
||||||
|
try {
|
||||||
|
log.info("Ingestion trigger received from UI");
|
||||||
|
await triggerImmediateCycle();
|
||||||
|
} catch (err) {
|
||||||
|
log.error({ err }, "Failed to trigger immediate ingestion cycle");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|||||||
@@ -36,11 +36,13 @@ async function main(): Promise<void> {
|
|||||||
// Graceful shutdown
|
// Graceful shutdown
|
||||||
function shutdown(signal: string): void {
|
function shutdown(signal: string): void {
|
||||||
log.info({ signal }, "Shutdown signal received");
|
log.info({ signal }, "Shutdown signal received");
|
||||||
stopScheduler();
|
|
||||||
|
// Stop accepting new work
|
||||||
stopFetchListener();
|
stopFetchListener();
|
||||||
|
|
||||||
// Close DB connections
|
// Wait for any active cycle to finish before closing DB
|
||||||
Promise.all([db.$disconnect(), pool.end()])
|
stopScheduler()
|
||||||
|
.then(() => Promise.all([db.$disconnect(), pool.end()]))
|
||||||
.then(() => {
|
.then(() => {
|
||||||
log.info("Shutdown complete");
|
log.info("Shutdown complete");
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
|
|||||||
@@ -9,6 +9,14 @@ const log = childLogger("scheduler");
|
|||||||
let running = false;
|
let running = false;
|
||||||
let timer: ReturnType<typeof setTimeout> | null = null;
|
let timer: ReturnType<typeof setTimeout> | null = null;
|
||||||
let cycleCount = 0;
|
let cycleCount = 0;
|
||||||
|
let activeCyclePromise: Promise<void> | null = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maximum time for a single ingestion cycle (ms).
|
||||||
|
* After this, new accounts won't be started (in-progress work finishes).
|
||||||
|
* Default: 4 hours. Configurable via WORKER_CYCLE_TIMEOUT_MINUTES.
|
||||||
|
*/
|
||||||
|
const CYCLE_TIMEOUT_MS = (parseInt(process.env.WORKER_CYCLE_TIMEOUT_MINUTES ?? "240", 10)) * 60 * 1000;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Run one ingestion cycle:
|
* Run one ingestion cycle:
|
||||||
@@ -17,6 +25,10 @@ let cycleCount = 0;
|
|||||||
*
|
*
|
||||||
* All TDLib operations are wrapped in the mutex to ensure only one client
|
* All TDLib operations are wrapped in the mutex to ensure only one client
|
||||||
* runs at a time (also shared with the fetch listener for on-demand requests).
|
* runs at a time (also shared with the fetch listener for on-demand requests).
|
||||||
|
*
|
||||||
|
* The cycle has a configurable timeout (WORKER_CYCLE_TIMEOUT_MINUTES, default 4h).
|
||||||
|
* Once the timeout elapses, no new accounts will be started but any in-progress
|
||||||
|
* account processing is allowed to finish its current archive set.
|
||||||
*/
|
*/
|
||||||
async function runCycle(): Promise<void> {
|
async function runCycle(): Promise<void> {
|
||||||
if (running) {
|
if (running) {
|
||||||
@@ -26,7 +38,8 @@ async function runCycle(): Promise<void> {
|
|||||||
|
|
||||||
running = true;
|
running = true;
|
||||||
cycleCount++;
|
cycleCount++;
|
||||||
log.info({ cycle: cycleCount }, "Starting ingestion cycle");
|
const cycleStart = Date.now();
|
||||||
|
log.info({ cycle: cycleCount, timeoutMinutes: CYCLE_TIMEOUT_MS / 60_000 }, "Starting ingestion cycle");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// ── Phase 1: Authenticate pending accounts ──
|
// ── Phase 1: Authenticate pending accounts ──
|
||||||
@@ -37,6 +50,10 @@ async function runCycle(): Promise<void> {
|
|||||||
"Found pending accounts, starting authentication"
|
"Found pending accounts, starting authentication"
|
||||||
);
|
);
|
||||||
for (const account of pendingAccounts) {
|
for (const account of pendingAccounts) {
|
||||||
|
if (Date.now() - cycleStart > CYCLE_TIMEOUT_MS) {
|
||||||
|
log.warn("Cycle timeout reached during authentication phase, stopping");
|
||||||
|
break;
|
||||||
|
}
|
||||||
await withTdlibMutex(`auth:${account.phone}`, () =>
|
await withTdlibMutex(`auth:${account.phone}`, () =>
|
||||||
authenticateAccount(account)
|
authenticateAccount(account)
|
||||||
);
|
);
|
||||||
@@ -54,12 +71,22 @@ async function runCycle(): Promise<void> {
|
|||||||
log.info({ accountCount: accounts.length }, "Processing accounts");
|
log.info({ accountCount: accounts.length }, "Processing accounts");
|
||||||
|
|
||||||
for (const account of accounts) {
|
for (const account of accounts) {
|
||||||
|
if (Date.now() - cycleStart > CYCLE_TIMEOUT_MS) {
|
||||||
|
log.warn(
|
||||||
|
{ elapsed: Math.round((Date.now() - cycleStart) / 60_000), timeoutMinutes: CYCLE_TIMEOUT_MS / 60_000 },
|
||||||
|
"Cycle timeout reached, skipping remaining accounts"
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
await withTdlibMutex(`ingest:${account.phone}`, () =>
|
await withTdlibMutex(`ingest:${account.phone}`, () =>
|
||||||
runWorkerForAccount(account)
|
runWorkerForAccount(account)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info("Ingestion cycle complete");
|
log.info(
|
||||||
|
{ elapsed: Math.round((Date.now() - cycleStart) / 1000) },
|
||||||
|
"Ingestion cycle complete"
|
||||||
|
);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
log.error({ err }, "Ingestion cycle failed");
|
log.error({ err }, "Ingestion cycle failed");
|
||||||
} finally {
|
} finally {
|
||||||
@@ -81,7 +108,9 @@ function scheduleNext(): void {
|
|||||||
);
|
);
|
||||||
|
|
||||||
timer = setTimeout(async () => {
|
timer = setTimeout(async () => {
|
||||||
await runCycle();
|
activeCyclePromise = runCycle();
|
||||||
|
await activeCyclePromise;
|
||||||
|
activeCyclePromise = null;
|
||||||
scheduleNext();
|
scheduleNext();
|
||||||
}, delay);
|
}, delay);
|
||||||
}
|
}
|
||||||
@@ -99,19 +128,44 @@ export async function startScheduler(): Promise<void> {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Run immediately on start
|
// Run immediately on start
|
||||||
await runCycle();
|
activeCyclePromise = runCycle();
|
||||||
|
await activeCyclePromise;
|
||||||
|
activeCyclePromise = null;
|
||||||
|
|
||||||
// Then schedule recurring cycles
|
// Then schedule recurring cycles
|
||||||
scheduleNext();
|
scheduleNext();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stop the scheduler gracefully.
|
* Trigger an immediate ingestion cycle (e.g. from the admin UI).
|
||||||
|
* If a cycle is already running, this is a no-op.
|
||||||
*/
|
*/
|
||||||
export function stopScheduler(): void {
|
export async function triggerImmediateCycle(): Promise<void> {
|
||||||
|
if (running) {
|
||||||
|
log.info("Cycle already running, ignoring trigger");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
log.info("Immediate cycle triggered via UI");
|
||||||
|
await runCycle();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop the scheduler gracefully.
|
||||||
|
* Returns a promise that resolves when any active cycle finishes,
|
||||||
|
* so callers can wait before closing DB connections.
|
||||||
|
*/
|
||||||
|
export function stopScheduler(): Promise<void> {
|
||||||
if (timer) {
|
if (timer) {
|
||||||
clearTimeout(timer);
|
clearTimeout(timer);
|
||||||
timer = null;
|
timer = null;
|
||||||
}
|
}
|
||||||
|
if (activeCyclePromise) {
|
||||||
|
log.info("Scheduler stopping — waiting for active cycle to finish");
|
||||||
|
return activeCyclePromise.finally(() => {
|
||||||
|
activeCyclePromise = null;
|
||||||
log.info("Scheduler stopped");
|
log.info("Scheduler stopped");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
log.info("Scheduler stopped");
|
||||||
|
return Promise.resolve();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import type { Client } from "tdl";
|
import type { Client } from "tdl";
|
||||||
import { childLogger } from "../util/logger.js";
|
import { childLogger } from "../util/logger.js";
|
||||||
import { config } from "../util/config.js";
|
import { config } from "../util/config.js";
|
||||||
|
import { withFloodWait } from "../util/retry.js";
|
||||||
|
|
||||||
const log = childLogger("chats");
|
const log = childLogger("chats");
|
||||||
|
|
||||||
@@ -29,11 +30,14 @@ export async function getAccountChats(
|
|||||||
|
|
||||||
while (hasMore) {
|
while (hasMore) {
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
const result = (await client.invoke({
|
const result = (await withFloodWait(
|
||||||
|
() => client.invoke({
|
||||||
_: "getChats",
|
_: "getChats",
|
||||||
chat_list: { _: "chatListMain" },
|
chat_list: { _: "chatListMain" },
|
||||||
limit: 100,
|
limit: 100,
|
||||||
})) as { chat_ids: number[] };
|
}),
|
||||||
|
"getChats"
|
||||||
|
)) as { chat_ids: number[] };
|
||||||
|
|
||||||
if (!result.chat_ids || result.chat_ids.length === 0) {
|
if (!result.chat_ids || result.chat_ids.length === 0) {
|
||||||
break;
|
break;
|
||||||
@@ -42,10 +46,13 @@ export async function getAccountChats(
|
|||||||
for (const chatId of result.chat_ids) {
|
for (const chatId of result.chat_ids) {
|
||||||
try {
|
try {
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
const chat = (await client.invoke({
|
const chat = (await withFloodWait(
|
||||||
|
() => client.invoke({
|
||||||
_: "getChat",
|
_: "getChat",
|
||||||
chat_id: chatId,
|
chat_id: chatId,
|
||||||
})) as any;
|
}),
|
||||||
|
"getChat"
|
||||||
|
)) as any;
|
||||||
|
|
||||||
const chatType = chat.type?._;
|
const chatType = chat.type?._;
|
||||||
let type: TelegramChatInfo["type"] = "other";
|
let type: TelegramChatInfo["type"] = "other";
|
||||||
@@ -55,10 +62,13 @@ export async function getAccountChats(
|
|||||||
// Get supergroup details to check if it's a channel or group
|
// Get supergroup details to check if it's a channel or group
|
||||||
try {
|
try {
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
const sg = (await client.invoke({
|
const sg = (await withFloodWait(
|
||||||
|
() => client.invoke({
|
||||||
_: "getSupergroup",
|
_: "getSupergroup",
|
||||||
supergroup_id: chat.type.supergroup_id,
|
supergroup_id: chat.type.supergroup_id,
|
||||||
})) as any;
|
}),
|
||||||
|
"getSupergroup"
|
||||||
|
)) as any;
|
||||||
|
|
||||||
type = sg.is_channel ? "channel" : "supergroup";
|
type = sg.is_channel ? "channel" : "supergroup";
|
||||||
isForum = sg.is_forum ?? false;
|
isForum = sg.is_forum ?? false;
|
||||||
@@ -109,12 +119,15 @@ export async function generateInviteLink(
|
|||||||
chatId: bigint
|
chatId: bigint
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
const result = (await client.invoke({
|
const result = (await withFloodWait(
|
||||||
|
() => client.invoke({
|
||||||
_: "createChatInviteLink",
|
_: "createChatInviteLink",
|
||||||
chat_id: Number(chatId),
|
chat_id: Number(chatId),
|
||||||
name: "DragonsStash Auto-Join",
|
name: "DragonsStash Auto-Join",
|
||||||
creates_join_request: false,
|
creates_join_request: false,
|
||||||
})) as any;
|
}),
|
||||||
|
"createChatInviteLink"
|
||||||
|
)) as any;
|
||||||
|
|
||||||
const link = result.invite_link as string;
|
const link = result.invite_link as string;
|
||||||
log.info({ chatId: chatId.toString(), link }, "Generated invite link");
|
log.info({ chatId: chatId.toString(), link }, "Generated invite link");
|
||||||
@@ -130,13 +143,16 @@ export async function createSupergroup(
|
|||||||
title: string
|
title: string
|
||||||
): Promise<{ chatId: bigint; title: string }> {
|
): Promise<{ chatId: bigint; title: string }> {
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
const result = (await client.invoke({
|
const result = (await withFloodWait(
|
||||||
|
() => client.invoke({
|
||||||
_: "createNewSupergroupChat",
|
_: "createNewSupergroupChat",
|
||||||
title,
|
title,
|
||||||
is_forum: false,
|
is_forum: false,
|
||||||
is_channel: false,
|
is_channel: false,
|
||||||
description: "DragonsStash archive destination — all accounts write here",
|
description: "DragonsStash archive destination — all accounts write here",
|
||||||
})) as any;
|
}),
|
||||||
|
"createNewSupergroupChat"
|
||||||
|
)) as any;
|
||||||
|
|
||||||
const chatId = BigInt(result.id);
|
const chatId = BigInt(result.id);
|
||||||
log.info({ chatId: chatId.toString(), title }, "Created new supergroup");
|
log.info({ chatId: chatId.toString(), title }, "Created new supergroup");
|
||||||
@@ -150,10 +166,13 @@ export async function joinChatByInviteLink(
|
|||||||
client: Client,
|
client: Client,
|
||||||
inviteLink: string
|
inviteLink: string
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
await client.invoke({
|
await withFloodWait(
|
||||||
|
() => client.invoke({
|
||||||
_: "joinChatByInviteLink",
|
_: "joinChatByInviteLink",
|
||||||
invite_link: inviteLink,
|
invite_link: inviteLink,
|
||||||
});
|
}),
|
||||||
|
"joinChatByInviteLink"
|
||||||
|
);
|
||||||
log.info({ inviteLink }, "Joined chat by invite link");
|
log.info({ inviteLink }, "Joined chat by invite link");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,12 +2,19 @@ import type { Client } from "tdl";
|
|||||||
import { readFile, rename, copyFile, unlink, stat } from "fs/promises";
|
import { readFile, rename, copyFile, unlink, stat } from "fs/promises";
|
||||||
import { config } from "../util/config.js";
|
import { config } from "../util/config.js";
|
||||||
import { childLogger } from "../util/logger.js";
|
import { childLogger } from "../util/logger.js";
|
||||||
|
import { withFloodWait } from "../util/retry.js";
|
||||||
import { isArchiveAttachment } from "../archive/detect.js";
|
import { isArchiveAttachment } from "../archive/detect.js";
|
||||||
import type { TelegramMessage } from "../archive/multipart.js";
|
import type { TelegramMessage } from "../archive/multipart.js";
|
||||||
import type { TelegramPhoto } from "../preview/match.js";
|
import type { TelegramPhoto } from "../preview/match.js";
|
||||||
|
|
||||||
const log = childLogger("download");
|
const log = childLogger("download");
|
||||||
|
|
||||||
|
/** Maximum number of pages to scan per channel/topic to prevent infinite loops */
|
||||||
|
export const MAX_SCAN_PAGES = 5000;
|
||||||
|
|
||||||
|
/** Timeout for a single TDLib API call (ms) */
|
||||||
|
export const INVOKE_TIMEOUT_MS = 120_000; // 2 minutes
|
||||||
|
|
||||||
interface TdPhotoSize {
|
interface TdPhotoSize {
|
||||||
type: string;
|
type: string;
|
||||||
photo: {
|
photo: {
|
||||||
@@ -66,6 +73,59 @@ interface TdFile {
|
|||||||
export interface ChannelScanResult {
|
export interface ChannelScanResult {
|
||||||
archives: TelegramMessage[];
|
archives: TelegramMessage[];
|
||||||
photos: TelegramPhoto[];
|
photos: TelegramPhoto[];
|
||||||
|
totalScanned: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ScanProgressCallback = (messagesScanned: number) => void;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Invoke a TDLib method with a timeout to prevent indefinite hangs,
|
||||||
|
* and automatic retry on FLOOD_WAIT rate-limit errors.
|
||||||
|
*
|
||||||
|
* If TDLib does not respond within the timeout, the promise rejects.
|
||||||
|
* If Telegram returns a rate limit error, sleeps for the required
|
||||||
|
* duration and retries (up to maxRetries times).
|
||||||
|
*/
|
||||||
|
export async function invokeWithTimeout<T>(
|
||||||
|
client: Client,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
request: Record<string, any>,
|
||||||
|
timeoutMs = INVOKE_TIMEOUT_MS
|
||||||
|
): Promise<T> {
|
||||||
|
return withFloodWait(
|
||||||
|
() =>
|
||||||
|
new Promise<T>((resolve, reject) => {
|
||||||
|
let settled = false;
|
||||||
|
|
||||||
|
const timer = setTimeout(() => {
|
||||||
|
if (!settled) {
|
||||||
|
settled = true;
|
||||||
|
reject(
|
||||||
|
new Error(
|
||||||
|
`TDLib invoke timed out after ${timeoutMs}ms for ${request._}`
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}, timeoutMs);
|
||||||
|
|
||||||
|
(client.invoke(request) as Promise<T>)
|
||||||
|
.then((result) => {
|
||||||
|
if (!settled) {
|
||||||
|
settled = true;
|
||||||
|
clearTimeout(timer);
|
||||||
|
resolve(result);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
if (!settled) {
|
||||||
|
settled = true;
|
||||||
|
clearTimeout(timer);
|
||||||
|
reject(err);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
`TDLib:${request._}`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -77,32 +137,59 @@ export interface ChannelScanResult {
|
|||||||
* When `lastProcessedMessageId` is null (first run), scans everything.
|
* When `lastProcessedMessageId` is null (first run), scans everything.
|
||||||
* The worker applies a post-grouping filter to skip fully-processed sets,
|
* The worker applies a post-grouping filter to skip fully-processed sets,
|
||||||
* and keeps `packageExistsBySourceMessage` as a safety net.
|
* and keeps `packageExistsBySourceMessage` as a safety net.
|
||||||
|
*
|
||||||
|
* Safety features:
|
||||||
|
* - Max page limit to prevent infinite loops
|
||||||
|
* - Stuck detection: breaks if from_message_id stops advancing
|
||||||
|
* - Timeout on each TDLib API call
|
||||||
*/
|
*/
|
||||||
export async function getChannelMessages(
|
export async function getChannelMessages(
|
||||||
client: Client,
|
client: Client,
|
||||||
chatId: bigint,
|
chatId: bigint,
|
||||||
lastProcessedMessageId?: bigint | null,
|
lastProcessedMessageId?: bigint | null,
|
||||||
limit = 100
|
limit = 100,
|
||||||
|
onProgress?: ScanProgressCallback
|
||||||
): Promise<ChannelScanResult> {
|
): Promise<ChannelScanResult> {
|
||||||
const archives: TelegramMessage[] = [];
|
const archives: TelegramMessage[] = [];
|
||||||
const photos: TelegramPhoto[] = [];
|
const photos: TelegramPhoto[] = [];
|
||||||
const boundary = lastProcessedMessageId ? Number(lastProcessedMessageId) : null;
|
const boundary = lastProcessedMessageId ? Number(lastProcessedMessageId) : null;
|
||||||
|
|
||||||
|
// Open the chat so TDLib loads remote messages
|
||||||
|
await invokeWithTimeout(client, {
|
||||||
|
_: "openChat",
|
||||||
|
chat_id: Number(chatId),
|
||||||
|
});
|
||||||
|
|
||||||
let currentFromId = 0;
|
let currentFromId = 0;
|
||||||
|
let totalScanned = 0;
|
||||||
|
let pageCount = 0;
|
||||||
|
|
||||||
// eslint-disable-next-line no-constant-condition
|
// eslint-disable-next-line no-constant-condition
|
||||||
while (true) {
|
while (true) {
|
||||||
const result = (await client.invoke({
|
if (pageCount >= MAX_SCAN_PAGES) {
|
||||||
|
log.warn(
|
||||||
|
{ chatId: chatId.toString(), pageCount, totalScanned },
|
||||||
|
"Hit max page limit for channel scan, stopping"
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
pageCount++;
|
||||||
|
|
||||||
|
const previousFromId = currentFromId;
|
||||||
|
|
||||||
|
const result = await invokeWithTimeout<{ messages: TdMessage[] }>(client, {
|
||||||
_: "getChatHistory",
|
_: "getChatHistory",
|
||||||
chat_id: Number(chatId),
|
chat_id: Number(chatId),
|
||||||
from_message_id: currentFromId,
|
from_message_id: currentFromId,
|
||||||
offset: 0,
|
offset: 0,
|
||||||
limit: Math.min(limit, 100),
|
limit: Math.min(limit, 100),
|
||||||
only_local: false,
|
only_local: false,
|
||||||
})) as { messages: TdMessage[] };
|
});
|
||||||
|
|
||||||
if (!result.messages || result.messages.length === 0) break;
|
if (!result.messages || result.messages.length === 0) break;
|
||||||
|
|
||||||
|
totalScanned += result.messages.length;
|
||||||
|
|
||||||
for (const msg of result.messages) {
|
for (const msg of result.messages) {
|
||||||
// Check for archive documents
|
// Check for archive documents
|
||||||
const doc = msg.content?.document;
|
const doc = msg.content?.document;
|
||||||
@@ -132,19 +219,37 @@ export async function getChannelMessages(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Report scanning progress after each page
|
||||||
|
onProgress?.(totalScanned);
|
||||||
|
|
||||||
currentFromId = result.messages[result.messages.length - 1].id;
|
currentFromId = result.messages[result.messages.length - 1].id;
|
||||||
|
|
||||||
|
// Stuck detection: if from_message_id didn't advance, break to prevent infinite loop
|
||||||
|
if (currentFromId === previousFromId) {
|
||||||
|
log.warn(
|
||||||
|
{ chatId: chatId.toString(), currentFromId, totalScanned },
|
||||||
|
"Pagination stuck (from_message_id not advancing), breaking"
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
// Stop scanning once we've gone past the boundary (this page is the lookback)
|
// Stop scanning once we've gone past the boundary (this page is the lookback)
|
||||||
if (boundary && currentFromId < boundary) break;
|
if (boundary && currentFromId < boundary) break;
|
||||||
|
|
||||||
if (result.messages.length < 100) break;
|
if (result.messages.length < Math.min(limit, 100)) break;
|
||||||
|
|
||||||
// Rate limit delay
|
// Rate limit delay
|
||||||
await sleep(config.apiDelayMs);
|
await sleep(config.apiDelayMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Close the chat after scanning
|
||||||
|
await invokeWithTimeout(client, {
|
||||||
|
_: "closeChat",
|
||||||
|
chat_id: Number(chatId),
|
||||||
|
}).catch(() => {}); // Ignore close errors
|
||||||
|
|
||||||
log.info(
|
log.info(
|
||||||
{ chatId: chatId.toString(), archives: archives.length, photos: photos.length },
|
{ chatId: chatId.toString(), archives: archives.length, photos: photos.length, totalScanned, pages: pageCount },
|
||||||
"Channel scan complete"
|
"Channel scan complete"
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -152,6 +257,7 @@ export async function getChannelMessages(
|
|||||||
return {
|
return {
|
||||||
archives: archives.reverse(),
|
archives: archives.reverse(),
|
||||||
photos: photos.reverse(),
|
photos: photos.reverse(),
|
||||||
|
totalScanned,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -334,15 +440,20 @@ export async function downloadFile(
|
|||||||
client.on("update", handleUpdate);
|
client.on("update", handleUpdate);
|
||||||
|
|
||||||
// Start async download (non-blocking — progress via updateFile events)
|
// Start async download (non-blocking — progress via updateFile events)
|
||||||
client
|
// Wrapped in withFloodWait: if the initial invoke is rate-limited,
|
||||||
.invoke({
|
// it will sleep and retry before the download event loop begins.
|
||||||
|
withFloodWait(
|
||||||
|
() =>
|
||||||
|
client.invoke({
|
||||||
_: "downloadFile",
|
_: "downloadFile",
|
||||||
file_id: numericId,
|
file_id: numericId,
|
||||||
priority: 32,
|
priority: 32,
|
||||||
offset: 0,
|
offset: 0,
|
||||||
limit: 0,
|
limit: 0,
|
||||||
synchronous: false,
|
synchronous: false,
|
||||||
})
|
}),
|
||||||
|
`downloadFile:${fileName}`
|
||||||
|
)
|
||||||
.then((result: unknown) => {
|
.then((result: unknown) => {
|
||||||
// If the file was already cached locally, invoke returns immediately
|
// If the file was already cached locally, invoke returns immediately
|
||||||
const file = result as TdFile | undefined;
|
const file = result as TdFile | undefined;
|
||||||
|
|||||||
@@ -4,7 +4,8 @@ import { childLogger } from "../util/logger.js";
|
|||||||
import { isArchiveAttachment } from "../archive/detect.js";
|
import { isArchiveAttachment } from "../archive/detect.js";
|
||||||
import type { TelegramMessage } from "../archive/multipart.js";
|
import type { TelegramMessage } from "../archive/multipart.js";
|
||||||
import type { TelegramPhoto } from "../preview/match.js";
|
import type { TelegramPhoto } from "../preview/match.js";
|
||||||
import type { ChannelScanResult } from "./download.js";
|
import type { ChannelScanResult, ScanProgressCallback } from "./download.js";
|
||||||
|
import { invokeWithTimeout, MAX_SCAN_PAGES, INVOKE_TIMEOUT_MS } from "./download.js";
|
||||||
|
|
||||||
const log = childLogger("topics");
|
const log = childLogger("topics");
|
||||||
|
|
||||||
@@ -21,16 +22,16 @@ export async function isChatForum(
|
|||||||
chatId: bigint
|
chatId: bigint
|
||||||
): Promise<boolean> {
|
): Promise<boolean> {
|
||||||
try {
|
try {
|
||||||
const chat = (await client.invoke({
|
const chat = await invokeWithTimeout<{
|
||||||
_: "getChat",
|
|
||||||
chat_id: Number(chatId),
|
|
||||||
})) as {
|
|
||||||
type?: {
|
type?: {
|
||||||
_: string;
|
_: string;
|
||||||
supergroup_id?: number;
|
supergroup_id?: number;
|
||||||
is_forum?: boolean;
|
is_forum?: boolean;
|
||||||
};
|
};
|
||||||
};
|
}>(client, {
|
||||||
|
_: "getChat",
|
||||||
|
chat_id: Number(chatId),
|
||||||
|
});
|
||||||
|
|
||||||
if (chat.type?._ === "chatTypeSupergroup" && chat.type.is_forum) {
|
if (chat.type?._ === "chatTypeSupergroup" && chat.type.is_forum) {
|
||||||
return true;
|
return true;
|
||||||
@@ -38,10 +39,10 @@ export async function isChatForum(
|
|||||||
|
|
||||||
// Also check via getSupergroup for older TDLib versions
|
// Also check via getSupergroup for older TDLib versions
|
||||||
if (chat.type?._ === "chatTypeSupergroup" && chat.type.supergroup_id) {
|
if (chat.type?._ === "chatTypeSupergroup" && chat.type.supergroup_id) {
|
||||||
const sg = (await client.invoke({
|
const sg = await invokeWithTimeout<{ is_forum?: boolean }>(client, {
|
||||||
_: "getSupergroup",
|
_: "getSupergroup",
|
||||||
supergroup_id: chat.type.supergroup_id,
|
supergroup_id: chat.type.supergroup_id,
|
||||||
})) as { is_forum?: boolean };
|
});
|
||||||
return sg.is_forum === true;
|
return sg.is_forum === true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -54,6 +55,7 @@ export async function isChatForum(
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Get all forum topics in a supergroup.
|
* Get all forum topics in a supergroup.
|
||||||
|
* Includes stuck detection and timeout protection on API calls.
|
||||||
*/
|
*/
|
||||||
export async function getForumTopicList(
|
export async function getForumTopicList(
|
||||||
client: Client,
|
client: Client,
|
||||||
@@ -63,18 +65,24 @@ export async function getForumTopicList(
|
|||||||
let offsetDate = 0;
|
let offsetDate = 0;
|
||||||
let offsetMessageId = 0;
|
let offsetMessageId = 0;
|
||||||
let offsetMessageThreadId = 0;
|
let offsetMessageThreadId = 0;
|
||||||
|
let pageCount = 0;
|
||||||
|
|
||||||
// eslint-disable-next-line no-constant-condition
|
// eslint-disable-next-line no-constant-condition
|
||||||
while (true) {
|
while (true) {
|
||||||
const result = (await client.invoke({
|
if (pageCount >= MAX_SCAN_PAGES) {
|
||||||
_: "getForumTopics",
|
log.warn(
|
||||||
chat_id: Number(chatId),
|
{ chatId: chatId.toString(), pageCount, topicCount: topics.length },
|
||||||
query: "",
|
"Hit max page limit for topic enumeration, stopping"
|
||||||
offset_date: offsetDate,
|
);
|
||||||
offset_message_id: offsetMessageId,
|
break;
|
||||||
offset_message_thread_id: offsetMessageThreadId,
|
}
|
||||||
limit: 100,
|
pageCount++;
|
||||||
})) as {
|
|
||||||
|
const prevOffsetDate = offsetDate;
|
||||||
|
const prevOffsetMessageId = offsetMessageId;
|
||||||
|
const prevOffsetMessageThreadId = offsetMessageThreadId;
|
||||||
|
|
||||||
|
const result = await invokeWithTimeout<{
|
||||||
topics?: {
|
topics?: {
|
||||||
info?: {
|
info?: {
|
||||||
message_thread_id?: number;
|
message_thread_id?: number;
|
||||||
@@ -85,18 +93,24 @@ export async function getForumTopicList(
|
|||||||
next_offset_date?: number;
|
next_offset_date?: number;
|
||||||
next_offset_message_id?: number;
|
next_offset_message_id?: number;
|
||||||
next_offset_message_thread_id?: number;
|
next_offset_message_thread_id?: number;
|
||||||
};
|
}>(client, {
|
||||||
|
_: "getForumTopics",
|
||||||
|
chat_id: Number(chatId),
|
||||||
|
query: "",
|
||||||
|
offset_date: offsetDate,
|
||||||
|
offset_message_id: offsetMessageId,
|
||||||
|
offset_message_thread_id: offsetMessageThreadId,
|
||||||
|
limit: 100,
|
||||||
|
});
|
||||||
|
|
||||||
if (!result.topics || result.topics.length === 0) break;
|
if (!result.topics || result.topics.length === 0) break;
|
||||||
|
|
||||||
for (const t of result.topics) {
|
for (const t of result.topics) {
|
||||||
if (!t.info?.message_thread_id) continue;
|
if (!t.info?.message_thread_id) continue;
|
||||||
// Skip the "General" topic — it's not creator-specific
|
|
||||||
if (t.info.is_general) continue;
|
|
||||||
|
|
||||||
topics.push({
|
topics.push({
|
||||||
topicId: BigInt(t.info.message_thread_id),
|
topicId: BigInt(t.info.message_thread_id),
|
||||||
name: t.info.name ?? "Unnamed",
|
name: t.info.is_general ? "General" : (t.info.name ?? "Unnamed"),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -113,6 +127,19 @@ export async function getForumTopicList(
|
|||||||
offsetMessageId = result.next_offset_message_id ?? 0;
|
offsetMessageId = result.next_offset_message_id ?? 0;
|
||||||
offsetMessageThreadId = result.next_offset_message_thread_id ?? 0;
|
offsetMessageThreadId = result.next_offset_message_thread_id ?? 0;
|
||||||
|
|
||||||
|
// Stuck detection: if offsets didn't advance, break
|
||||||
|
if (
|
||||||
|
offsetDate === prevOffsetDate &&
|
||||||
|
offsetMessageId === prevOffsetMessageId &&
|
||||||
|
offsetMessageThreadId === prevOffsetMessageThreadId
|
||||||
|
) {
|
||||||
|
log.warn(
|
||||||
|
{ chatId: chatId.toString(), topicCount: topics.length },
|
||||||
|
"Topic pagination stuck (offsets not advancing), breaking"
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
await sleep(config.apiDelayMs);
|
await sleep(config.apiDelayMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -134,35 +161,43 @@ export async function getForumTopicList(
|
|||||||
* When `lastProcessedMessageId` is null (first run), scans everything.
|
* When `lastProcessedMessageId` is null (first run), scans everything.
|
||||||
* The worker applies a post-grouping filter to skip fully-processed sets,
|
* The worker applies a post-grouping filter to skip fully-processed sets,
|
||||||
* and keeps `packageExistsBySourceMessage` as a safety net.
|
* and keeps `packageExistsBySourceMessage` as a safety net.
|
||||||
|
*
|
||||||
|
* Safety features:
|
||||||
|
* - Max page limit to prevent infinite loops
|
||||||
|
* - Stuck detection: breaks if from_message_id stops advancing
|
||||||
|
* - Timeout on each TDLib API call
|
||||||
*/
|
*/
|
||||||
export async function getTopicMessages(
|
export async function getTopicMessages(
|
||||||
client: Client,
|
client: Client,
|
||||||
chatId: bigint,
|
chatId: bigint,
|
||||||
topicId: bigint,
|
topicId: bigint,
|
||||||
lastProcessedMessageId?: bigint | null,
|
lastProcessedMessageId?: bigint | null,
|
||||||
limit = 100
|
limit = 100,
|
||||||
|
onProgress?: ScanProgressCallback
|
||||||
): Promise<ChannelScanResult> {
|
): Promise<ChannelScanResult> {
|
||||||
const archives: TelegramMessage[] = [];
|
const archives: TelegramMessage[] = [];
|
||||||
const photos: TelegramPhoto[] = [];
|
const photos: TelegramPhoto[] = [];
|
||||||
const boundary = lastProcessedMessageId ? Number(lastProcessedMessageId) : null;
|
const boundary = lastProcessedMessageId ? Number(lastProcessedMessageId) : null;
|
||||||
|
|
||||||
let currentFromId = 0;
|
let currentFromId = 0;
|
||||||
|
let totalScanned = 0;
|
||||||
|
let pageCount = 0;
|
||||||
|
|
||||||
// eslint-disable-next-line no-constant-condition
|
// eslint-disable-next-line no-constant-condition
|
||||||
while (true) {
|
while (true) {
|
||||||
|
if (pageCount >= MAX_SCAN_PAGES) {
|
||||||
|
log.warn(
|
||||||
|
{ chatId: chatId.toString(), topicId: topicId.toString(), pageCount, totalScanned },
|
||||||
|
"Hit max page limit for topic scan, stopping"
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
pageCount++;
|
||||||
|
|
||||||
|
const previousFromId = currentFromId;
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
const result = (await client.invoke({
|
const result = await invokeWithTimeout<{
|
||||||
_: "searchChatMessages",
|
|
||||||
chat_id: Number(chatId),
|
|
||||||
query: "",
|
|
||||||
message_thread_id: Number(topicId),
|
|
||||||
from_message_id: currentFromId,
|
|
||||||
offset: 0,
|
|
||||||
limit: Math.min(limit, 100),
|
|
||||||
filter: null,
|
|
||||||
sender_id: null,
|
|
||||||
saved_messages_topic_id: 0,
|
|
||||||
})) as {
|
|
||||||
messages?: {
|
messages?: {
|
||||||
id: number;
|
id: number;
|
||||||
date: number;
|
date: number;
|
||||||
@@ -186,10 +221,23 @@ export async function getTopicMessages(
|
|||||||
caption?: { text?: string };
|
caption?: { text?: string };
|
||||||
};
|
};
|
||||||
}[];
|
}[];
|
||||||
};
|
}>(client, {
|
||||||
|
_: "searchChatMessages",
|
||||||
|
chat_id: Number(chatId),
|
||||||
|
query: "",
|
||||||
|
message_thread_id: Number(topicId),
|
||||||
|
from_message_id: currentFromId,
|
||||||
|
offset: 0,
|
||||||
|
limit: Math.min(limit, 100),
|
||||||
|
filter: null,
|
||||||
|
sender_id: null,
|
||||||
|
saved_messages_topic_id: 0,
|
||||||
|
});
|
||||||
|
|
||||||
if (!result.messages || result.messages.length === 0) break;
|
if (!result.messages || result.messages.length === 0) break;
|
||||||
|
|
||||||
|
totalScanned += result.messages.length;
|
||||||
|
|
||||||
for (const msg of result.messages) {
|
for (const msg of result.messages) {
|
||||||
// Check for archive documents
|
// Check for archive documents
|
||||||
const doc = msg.content?.document;
|
const doc = msg.content?.document;
|
||||||
@@ -219,18 +267,30 @@ export async function getTopicMessages(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Report scanning progress after each page
|
||||||
|
onProgress?.(totalScanned);
|
||||||
|
|
||||||
currentFromId = result.messages[result.messages.length - 1].id;
|
currentFromId = result.messages[result.messages.length - 1].id;
|
||||||
|
|
||||||
|
// Stuck detection: if from_message_id didn't advance, break to prevent infinite loop
|
||||||
|
if (currentFromId === previousFromId) {
|
||||||
|
log.warn(
|
||||||
|
{ chatId: chatId.toString(), topicId: topicId.toString(), currentFromId, totalScanned },
|
||||||
|
"Topic pagination stuck (from_message_id not advancing), breaking"
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
// Stop scanning once we've gone past the boundary (this page is the lookback)
|
// Stop scanning once we've gone past the boundary (this page is the lookback)
|
||||||
if (boundary && currentFromId < boundary) break;
|
if (boundary && currentFromId < boundary) break;
|
||||||
|
|
||||||
if (result.messages.length < 100) break;
|
if (result.messages.length < Math.min(limit, 100)) break;
|
||||||
|
|
||||||
await sleep(config.apiDelayMs);
|
await sleep(config.apiDelayMs);
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info(
|
log.info(
|
||||||
{ chatId: chatId.toString(), topicId: topicId.toString(), archives: archives.length, photos: photos.length },
|
{ chatId: chatId.toString(), topicId: topicId.toString(), archives: archives.length, photos: photos.length, totalScanned, pages: pageCount },
|
||||||
"Topic scan complete"
|
"Topic scan complete"
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -238,6 +298,7 @@ export async function getTopicMessages(
|
|||||||
return {
|
return {
|
||||||
archives: archives.reverse(),
|
archives: archives.reverse(),
|
||||||
photos: photos.reverse(),
|
photos: photos.reverse(),
|
||||||
|
totalScanned,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { stat } from "fs/promises";
|
|||||||
import type { Client } from "tdl";
|
import type { Client } from "tdl";
|
||||||
import { config } from "../util/config.js";
|
import { config } from "../util/config.js";
|
||||||
import { childLogger } from "../util/logger.js";
|
import { childLogger } from "../util/logger.js";
|
||||||
|
import { withFloodWait } from "../util/retry.js";
|
||||||
|
|
||||||
const log = childLogger("upload");
|
const log = childLogger("upload");
|
||||||
|
|
||||||
@@ -75,6 +76,10 @@ export async function uploadToChannel(
|
|||||||
/**
|
/**
|
||||||
* Send a single file message and wait for Telegram to confirm the upload.
|
* Send a single file message and wait for Telegram to confirm the upload.
|
||||||
* Returns the final server-assigned message ID.
|
* Returns the final server-assigned message ID.
|
||||||
|
*
|
||||||
|
* IMPORTANT: The update listener is attached BEFORE sending the message to
|
||||||
|
* avoid a race where fast uploads (cached files) complete before the listener
|
||||||
|
* is registered, which would cause the promise to hang forever.
|
||||||
*/
|
*/
|
||||||
async function sendAndWaitForUpload(
|
async function sendAndWaitForUpload(
|
||||||
client: Client,
|
client: Client,
|
||||||
@@ -84,36 +89,10 @@ async function sendAndWaitForUpload(
|
|||||||
fileName: string,
|
fileName: string,
|
||||||
fileSizeMB: number
|
fileSizeMB: number
|
||||||
): Promise<bigint> {
|
): Promise<bigint> {
|
||||||
// Send the message — this returns a temporary message immediately
|
|
||||||
const tempMsg = (await client.invoke({
|
|
||||||
_: "sendMessage",
|
|
||||||
chat_id: Number(chatId),
|
|
||||||
input_message_content: {
|
|
||||||
_: "inputMessageDocument",
|
|
||||||
document: {
|
|
||||||
_: "inputFileLocal",
|
|
||||||
path: filePath,
|
|
||||||
},
|
|
||||||
caption: caption
|
|
||||||
? {
|
|
||||||
_: "formattedText",
|
|
||||||
text: caption,
|
|
||||||
}
|
|
||||||
: undefined,
|
|
||||||
},
|
|
||||||
})) as { id: number };
|
|
||||||
|
|
||||||
const tempMsgId = tempMsg.id;
|
|
||||||
|
|
||||||
log.debug(
|
|
||||||
{ fileName, tempMsgId },
|
|
||||||
"Message queued, waiting for upload confirmation"
|
|
||||||
);
|
|
||||||
|
|
||||||
// Wait for the actual upload to complete
|
|
||||||
return new Promise<bigint>((resolve, reject) => {
|
return new Promise<bigint>((resolve, reject) => {
|
||||||
let settled = false;
|
let settled = false;
|
||||||
let lastLoggedPercent = 0;
|
let lastLoggedPercent = 0;
|
||||||
|
let tempMsgId: number | null = null;
|
||||||
|
|
||||||
// Timeout: 10 minutes per GB, minimum 10 minutes
|
// Timeout: 10 minutes per GB, minimum 10 minutes
|
||||||
const timeoutMs = Math.max(
|
const timeoutMs = Math.max(
|
||||||
@@ -156,7 +135,7 @@ async function sendAndWaitForUpload(
|
|||||||
if (update?._ === "updateMessageSendSucceeded") {
|
if (update?._ === "updateMessageSendSucceeded") {
|
||||||
const msg = update.message;
|
const msg = update.message;
|
||||||
const oldMsgId = update.old_message_id;
|
const oldMsgId = update.old_message_id;
|
||||||
if (oldMsgId === tempMsgId) {
|
if (tempMsgId !== null && oldMsgId === tempMsgId) {
|
||||||
if (!settled) {
|
if (!settled) {
|
||||||
settled = true;
|
settled = true;
|
||||||
cleanup();
|
cleanup();
|
||||||
@@ -173,7 +152,7 @@ async function sendAndWaitForUpload(
|
|||||||
// Upload failed
|
// Upload failed
|
||||||
if (update?._ === "updateMessageSendFailed") {
|
if (update?._ === "updateMessageSendFailed") {
|
||||||
const oldMsgId = update.old_message_id;
|
const oldMsgId = update.old_message_id;
|
||||||
if (oldMsgId === tempMsgId) {
|
if (tempMsgId !== null && oldMsgId === tempMsgId) {
|
||||||
if (!settled) {
|
if (!settled) {
|
||||||
settled = true;
|
settled = true;
|
||||||
cleanup();
|
cleanup();
|
||||||
@@ -189,7 +168,47 @@ async function sendAndWaitForUpload(
|
|||||||
client.off("update", handleUpdate);
|
client.off("update", handleUpdate);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Attach listener BEFORE sending to avoid missing fast completions
|
||||||
client.on("update", handleUpdate);
|
client.on("update", handleUpdate);
|
||||||
|
|
||||||
|
// Send the message — this returns a temporary message immediately.
|
||||||
|
// Wrapped in withFloodWait to handle Telegram rate limits on upload.
|
||||||
|
withFloodWait(
|
||||||
|
() =>
|
||||||
|
client.invoke({
|
||||||
|
_: "sendMessage",
|
||||||
|
chat_id: Number(chatId),
|
||||||
|
input_message_content: {
|
||||||
|
_: "inputMessageDocument",
|
||||||
|
document: {
|
||||||
|
_: "inputFileLocal",
|
||||||
|
path: filePath,
|
||||||
|
},
|
||||||
|
caption: caption
|
||||||
|
? {
|
||||||
|
_: "formattedText",
|
||||||
|
text: caption,
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
"sendMessage:upload"
|
||||||
|
)
|
||||||
|
.then((result) => {
|
||||||
|
const tempMsg = result as { id: number };
|
||||||
|
tempMsgId = tempMsg.id;
|
||||||
|
log.debug(
|
||||||
|
{ fileName, tempMsgId },
|
||||||
|
"Message queued, waiting for upload confirmation"
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
if (!settled) {
|
||||||
|
settled = true;
|
||||||
|
cleanup();
|
||||||
|
reject(err);
|
||||||
|
}
|
||||||
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -4,12 +4,21 @@ const log = childLogger("mutex");
|
|||||||
|
|
||||||
let locked = false;
|
let locked = false;
|
||||||
let holder = "";
|
let holder = "";
|
||||||
const queue: Array<{ resolve: () => void; label: string }> = [];
|
const queue: Array<{ resolve: () => void; reject: (err: Error) => void; label: string }> = [];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maximum time to wait for the TDLib mutex (ms).
|
||||||
|
* If the mutex is not available within this time, the operation is rejected.
|
||||||
|
* Default: 30 minutes (long enough for large downloads, short enough to detect hangs).
|
||||||
|
*/
|
||||||
|
const MUTEX_WAIT_TIMEOUT_MS = 30 * 60 * 1000;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ensures only one TDLib client runs at a time across the entire worker process.
|
* Ensures only one TDLib client runs at a time across the entire worker process.
|
||||||
* Both the scheduler (auth, ingestion) and the fetch listener acquire this
|
* Both the scheduler (auth, ingestion) and the fetch listener acquire this
|
||||||
* before creating any TDLib client.
|
* before creating any TDLib client.
|
||||||
|
*
|
||||||
|
* Includes a wait timeout to prevent indefinite blocking if the current holder hangs.
|
||||||
*/
|
*/
|
||||||
export async function withTdlibMutex<T>(
|
export async function withTdlibMutex<T>(
|
||||||
label: string,
|
label: string,
|
||||||
@@ -17,7 +26,28 @@ export async function withTdlibMutex<T>(
|
|||||||
): Promise<T> {
|
): Promise<T> {
|
||||||
if (locked) {
|
if (locked) {
|
||||||
log.info({ waiting: label, holder }, "Waiting for TDLib mutex");
|
log.info({ waiting: label, holder }, "Waiting for TDLib mutex");
|
||||||
await new Promise<void>((resolve) => queue.push({ resolve, label }));
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
const timer = setTimeout(() => {
|
||||||
|
const idx = queue.indexOf(entry);
|
||||||
|
if (idx !== -1) {
|
||||||
|
queue.splice(idx, 1);
|
||||||
|
reject(new Error(
|
||||||
|
`TDLib mutex wait timeout after ${MUTEX_WAIT_TIMEOUT_MS / 60_000}min ` +
|
||||||
|
`(waiting: ${label}, holder: ${holder})`
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}, MUTEX_WAIT_TIMEOUT_MS);
|
||||||
|
|
||||||
|
const entry = {
|
||||||
|
resolve: () => {
|
||||||
|
clearTimeout(timer);
|
||||||
|
resolve();
|
||||||
|
},
|
||||||
|
reject,
|
||||||
|
label,
|
||||||
|
};
|
||||||
|
queue.push(entry);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
locked = true;
|
locked = true;
|
||||||
|
|||||||
109
worker/src/util/retry.ts
Normal file
109
worker/src/util/retry.ts
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
import { childLogger } from "./logger.js";
|
||||||
|
import { config } from "./config.js";
|
||||||
|
|
||||||
|
const log = childLogger("retry");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract the FLOOD_WAIT duration (in seconds) from a TDLib error.
|
||||||
|
*
|
||||||
|
* TDLib errors for rate limiting look like:
|
||||||
|
* - Error message: "Too Many Requests: retry after 30"
|
||||||
|
* - Error message: "FLOOD_WAIT_30"
|
||||||
|
* - Error code: 429
|
||||||
|
*/
|
||||||
|
export function extractFloodWaitSeconds(err: unknown): number | null {
|
||||||
|
if (!err || typeof err !== "object") return null;
|
||||||
|
|
||||||
|
const message = (err as { message?: string }).message ?? "";
|
||||||
|
const code = (err as { code?: number }).code;
|
||||||
|
|
||||||
|
// Match "FLOOD_WAIT_<seconds>" pattern
|
||||||
|
const floodMatch = message.match(/FLOOD_WAIT_(\d+)/i);
|
||||||
|
if (floodMatch) {
|
||||||
|
return parseInt(floodMatch[1], 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Match "retry after <seconds>" pattern (from Telegram HTTP API style errors)
|
||||||
|
const retryMatch = message.match(/retry after (\d+)/i);
|
||||||
|
if (retryMatch) {
|
||||||
|
return parseInt(retryMatch[1], 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If error code is 429 but no explicit wait time, default to 30 seconds
|
||||||
|
if (code === 429) {
|
||||||
|
return 30;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sleep for a given number of milliseconds, with a descriptive log message.
|
||||||
|
*/
|
||||||
|
function sleepMs(ms: number): Promise<void> {
|
||||||
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wraps a TDLib invoke operation with FLOOD_WAIT-aware retry logic.
|
||||||
|
*
|
||||||
|
* When Telegram returns a rate limit error (FLOOD_WAIT / 429), this:
|
||||||
|
* 1. Extracts the required wait time from the error
|
||||||
|
* 2. Logs a warning with the wait duration
|
||||||
|
* 3. Sleeps for the required duration + small jitter
|
||||||
|
* 4. Retries the operation (up to maxRetries times)
|
||||||
|
*
|
||||||
|
* Non-rate-limit errors are re-thrown immediately.
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* const result = await withFloodWait(() => client.invoke({ ... }));
|
||||||
|
*/
|
||||||
|
export async function withFloodWait<T>(
|
||||||
|
fn: () => Promise<T>,
|
||||||
|
context?: string,
|
||||||
|
maxRetries?: number
|
||||||
|
): Promise<T> {
|
||||||
|
const limit = maxRetries ?? config.maxRetries;
|
||||||
|
let lastError: unknown;
|
||||||
|
|
||||||
|
for (let attempt = 0; attempt <= limit; attempt++) {
|
||||||
|
try {
|
||||||
|
return await fn();
|
||||||
|
} catch (err) {
|
||||||
|
lastError = err;
|
||||||
|
const waitSeconds = extractFloodWaitSeconds(err);
|
||||||
|
|
||||||
|
if (waitSeconds === null) {
|
||||||
|
// Not a rate limit error — re-throw immediately
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (attempt >= limit) {
|
||||||
|
log.error(
|
||||||
|
{ context, attempt, waitSeconds },
|
||||||
|
"Rate limit exceeded max retries — giving up"
|
||||||
|
);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add small jitter (1–5 seconds) to avoid multiple clients retrying simultaneously
|
||||||
|
const jitter = 1000 + Math.random() * 4000;
|
||||||
|
const totalWaitMs = waitSeconds * 1000 + jitter;
|
||||||
|
|
||||||
|
log.warn(
|
||||||
|
{
|
||||||
|
context,
|
||||||
|
attempt: attempt + 1,
|
||||||
|
maxRetries: limit,
|
||||||
|
waitSeconds,
|
||||||
|
totalWaitMs: Math.round(totalWaitMs),
|
||||||
|
},
|
||||||
|
`Rate-limited by Telegram — sleeping ${waitSeconds}s before retry`
|
||||||
|
);
|
||||||
|
|
||||||
|
await sleepMs(totalWaitMs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw lastError;
|
||||||
|
}
|
||||||
@@ -349,8 +349,18 @@ export async function runWorkerForAccount(
|
|||||||
throw new Error("No global destination channel configured — set one in the admin UI");
|
throw new Error("No global destination channel configured — set one in the admin UI");
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const mapping of channelMappings) {
|
const totalChannels = channelMappings.length;
|
||||||
|
|
||||||
|
if (totalChannels === 0) {
|
||||||
|
accountLog.info("No active source channels linked to this account — nothing to ingest");
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let chIdx = 0; chIdx < channelMappings.length; chIdx++) {
|
||||||
|
const mapping = channelMappings[chIdx];
|
||||||
const channel = mapping.channel;
|
const channel = mapping.channel;
|
||||||
|
const channelLabel = totalChannels > 1
|
||||||
|
? `[${chIdx + 1}/${totalChannels}] ${channel.title}`
|
||||||
|
: channel.title;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// ── Check if channel is a forum ──
|
// ── Check if channel is a forum ──
|
||||||
@@ -380,15 +390,16 @@ export async function runWorkerForAccount(
|
|||||||
if (forum) {
|
if (forum) {
|
||||||
// ── Forum channel: scan per-topic ──
|
// ── Forum channel: scan per-topic ──
|
||||||
await updateRunActivity(activeRunId, {
|
await updateRunActivity(activeRunId, {
|
||||||
currentActivity: `Enumerating topics in "${channel.title}"`,
|
currentActivity: `Enumerating topics in "${channelLabel}"`,
|
||||||
currentStep: "scanning",
|
currentStep: "scanning",
|
||||||
currentChannel: channel.title,
|
currentChannel: channelLabel,
|
||||||
currentFile: null,
|
currentFile: null,
|
||||||
currentFileNum: null,
|
currentFileNum: null,
|
||||||
totalFiles: null,
|
totalFiles: null,
|
||||||
downloadedBytes: null,
|
downloadedBytes: null,
|
||||||
totalBytes: null,
|
totalBytes: null,
|
||||||
downloadPercent: null,
|
downloadPercent: null,
|
||||||
|
messagesScanned: counters.messagesScanned,
|
||||||
});
|
});
|
||||||
|
|
||||||
const topics = await getForumTopicList(client, channel.telegramId);
|
const topics = await getForumTopicList(client, channel.telegramId);
|
||||||
@@ -399,34 +410,53 @@ export async function runWorkerForAccount(
|
|||||||
"Scanning forum channel by topic"
|
"Scanning forum channel by topic"
|
||||||
);
|
);
|
||||||
|
|
||||||
for (const topic of topics) {
|
for (let tIdx = 0; tIdx < topics.length; tIdx++) {
|
||||||
|
const topic = topics[tIdx];
|
||||||
try {
|
try {
|
||||||
const progress = topicProgressList.find(
|
const progress = topicProgressList.find(
|
||||||
(tp) => tp.topicId === topic.topicId
|
(tp) => tp.topicId === topic.topicId
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const topicLabel = `${channel.title} › ${topic.name}`;
|
||||||
|
const topicProgress = topics.length > 1
|
||||||
|
? ` (topic ${tIdx + 1}/${topics.length})`
|
||||||
|
: "";
|
||||||
|
|
||||||
await updateRunActivity(activeRunId, {
|
await updateRunActivity(activeRunId, {
|
||||||
currentActivity: `Scanning topic "${topic.name}" in "${channel.title}"`,
|
currentActivity: `Scanning "${topicLabel}"${topicProgress}`,
|
||||||
currentStep: "scanning",
|
currentStep: "scanning",
|
||||||
currentChannel: `${channel.title} › ${topic.name}`,
|
currentChannel: channelLabel,
|
||||||
currentFile: null,
|
currentFile: null,
|
||||||
currentFileNum: null,
|
currentFileNum: null,
|
||||||
totalFiles: null,
|
totalFiles: null,
|
||||||
downloadedBytes: null,
|
downloadedBytes: null,
|
||||||
totalBytes: null,
|
totalBytes: null,
|
||||||
downloadPercent: null,
|
downloadPercent: null,
|
||||||
|
messagesScanned: counters.messagesScanned,
|
||||||
});
|
});
|
||||||
|
|
||||||
const scanResult = await getTopicMessages(
|
const scanResult = await getTopicMessages(
|
||||||
client,
|
client,
|
||||||
channel.telegramId,
|
channel.telegramId,
|
||||||
topic.topicId,
|
topic.topicId,
|
||||||
progress?.lastProcessedMessageId
|
progress?.lastProcessedMessageId,
|
||||||
|
100,
|
||||||
|
(scanned) => {
|
||||||
|
throttled.update({
|
||||||
|
currentActivity: `Scanning "${topicLabel}"${topicProgress} — ${scanned} messages scanned`,
|
||||||
|
currentStep: "scanning",
|
||||||
|
currentChannel: channelLabel,
|
||||||
|
messagesScanned: counters.messagesScanned + scanned,
|
||||||
|
});
|
||||||
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Add scanned messages to global counter
|
||||||
|
counters.messagesScanned += scanResult.totalScanned;
|
||||||
|
|
||||||
if (scanResult.archives.length === 0) {
|
if (scanResult.archives.length === 0) {
|
||||||
accountLog.debug(
|
accountLog.info(
|
||||||
{ channelId: channel.id, topic: topic.name },
|
{ channelId: channel.id, topic: topic.name, totalScanned: scanResult.totalScanned },
|
||||||
"No new archives in topic"
|
"No new archives in topic"
|
||||||
);
|
);
|
||||||
continue;
|
continue;
|
||||||
@@ -463,15 +493,16 @@ export async function runWorkerForAccount(
|
|||||||
} else {
|
} else {
|
||||||
// ── Non-forum channel: flat scan (existing behavior) ──
|
// ── Non-forum channel: flat scan (existing behavior) ──
|
||||||
await updateRunActivity(activeRunId, {
|
await updateRunActivity(activeRunId, {
|
||||||
currentActivity: `Scanning "${channel.title}" for new archives`,
|
currentActivity: `Scanning "${channelLabel}" for new archives`,
|
||||||
currentStep: "scanning",
|
currentStep: "scanning",
|
||||||
currentChannel: channel.title,
|
currentChannel: channelLabel,
|
||||||
currentFile: null,
|
currentFile: null,
|
||||||
currentFileNum: null,
|
currentFileNum: null,
|
||||||
totalFiles: null,
|
totalFiles: null,
|
||||||
downloadedBytes: null,
|
downloadedBytes: null,
|
||||||
totalBytes: null,
|
totalBytes: null,
|
||||||
downloadPercent: null,
|
downloadPercent: null,
|
||||||
|
messagesScanned: counters.messagesScanned,
|
||||||
});
|
});
|
||||||
|
|
||||||
accountLog.info(
|
accountLog.info(
|
||||||
@@ -482,11 +513,23 @@ export async function runWorkerForAccount(
|
|||||||
const scanResult = await getChannelMessages(
|
const scanResult = await getChannelMessages(
|
||||||
client,
|
client,
|
||||||
channel.telegramId,
|
channel.telegramId,
|
||||||
mapping.lastProcessedMessageId
|
mapping.lastProcessedMessageId,
|
||||||
|
100,
|
||||||
|
(scanned) => {
|
||||||
|
throttled.update({
|
||||||
|
currentActivity: `Scanning "${channelLabel}" — ${scanned} messages scanned`,
|
||||||
|
currentStep: "scanning",
|
||||||
|
currentChannel: channelLabel,
|
||||||
|
messagesScanned: counters.messagesScanned + scanned,
|
||||||
|
});
|
||||||
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Add scanned messages to global counter
|
||||||
|
counters.messagesScanned += scanResult.totalScanned;
|
||||||
|
|
||||||
if (scanResult.archives.length === 0) {
|
if (scanResult.archives.length === 0) {
|
||||||
accountLog.debug({ channelId: channel.id }, "No new archives");
|
accountLog.info({ channelId: channel.id, title: channel.title, totalScanned: scanResult.totalScanned }, "No new archives in channel");
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -516,9 +559,11 @@ export async function runWorkerForAccount(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ── Done ──
|
// ── Done ──
|
||||||
|
await throttled.flush();
|
||||||
await completeIngestionRun(activeRunId, counters);
|
await completeIngestionRun(activeRunId, counters);
|
||||||
accountLog.info({ counters }, "Ingestion run completed");
|
accountLog.info({ counters }, "Ingestion run completed");
|
||||||
} finally {
|
} finally {
|
||||||
|
await throttled.flush();
|
||||||
await closeTdlibClient(client);
|
await closeTdlibClient(client);
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@@ -593,6 +638,7 @@ async function processArchiveSets(
|
|||||||
currentChannel: channelTitle,
|
currentChannel: channelTitle,
|
||||||
totalFiles: archiveSets.length,
|
totalFiles: archiveSets.length,
|
||||||
zipsFound: counters.zipsFound,
|
zipsFound: counters.zipsFound,
|
||||||
|
messagesScanned: counters.messagesScanned,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Track the highest message ID that was successfully processed
|
// Track the highest message ID that was successfully processed
|
||||||
@@ -646,7 +692,6 @@ async function processOneArchiveSet(
|
|||||||
throttled, counters, topicCreator, sourceTopicId, accountLog,
|
throttled, counters, topicCreator, sourceTopicId, accountLog,
|
||||||
} = ctx;
|
} = ctx;
|
||||||
|
|
||||||
counters.messagesScanned += archiveSet.parts.length;
|
|
||||||
const archiveName = archiveSet.parts[0].fileName;
|
const archiveName = archiveSet.parts[0].fileName;
|
||||||
|
|
||||||
// ── Early skip: check if this archive set was already ingested ──
|
// ── Early skip: check if this archive set was already ingested ──
|
||||||
@@ -673,6 +718,29 @@ async function processOneArchiveSet(
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ── Size guard: skip archives that exceed WORKER_MAX_ZIP_SIZE_MB ──
|
||||||
|
const totalArchiveSize = archiveSet.parts.reduce((sum, p) => sum + p.fileSize, 0n);
|
||||||
|
const maxSizeBytes = BigInt(config.maxZipSizeMB) * 1024n * 1024n;
|
||||||
|
if (totalArchiveSize > maxSizeBytes) {
|
||||||
|
accountLog.warn(
|
||||||
|
{
|
||||||
|
fileName: archiveName,
|
||||||
|
totalSizeMB: Number(totalArchiveSize / (1024n * 1024n)),
|
||||||
|
maxSizeMB: config.maxZipSizeMB,
|
||||||
|
},
|
||||||
|
"Archive exceeds max size limit, skipping"
|
||||||
|
);
|
||||||
|
await updateRunActivity(runId, {
|
||||||
|
currentActivity: `Skipped ${archiveName} (exceeds ${config.maxZipSizeMB}MB limit)`,
|
||||||
|
currentStep: "skipping",
|
||||||
|
currentChannel: channelTitle,
|
||||||
|
currentFile: archiveName,
|
||||||
|
currentFileNum: setIdx + 1,
|
||||||
|
totalFiles: totalSets,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const tempPaths: string[] = [];
|
const tempPaths: string[] = [];
|
||||||
let splitPaths: string[] = [];
|
let splitPaths: string[] = [];
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user