addd TG integration

This commit is contained in:
xCyanGrizzly
2026-03-02 11:57:17 +01:00
parent b427193d17
commit 4d0df6b1a4
35 changed files with 4436 additions and 242 deletions

11
.claude/launch.json Normal file
View File

@@ -0,0 +1,11 @@
{
"version": "0.0.1",
"configurations": [
{
"name": "nextjs-dev",
"runtimeExecutable": "cmd.exe",
"runtimeArgs": ["/c", "npx next dev --port 3001"],
"port": 3001
}
]
}

View File

@@ -43,7 +43,47 @@
"Bash(gh run:*)", "Bash(gh run:*)",
"Bash(npx next lint:*)", "Bash(npx next lint:*)",
"Bash(npx eslint .)", "Bash(npx eslint .)",
"Bash(echo:*)" "Bash(echo:*)",
"Bash(npx next build:*)",
"Bash(npx eslint:*)",
"Bash(git add:*)",
"Bash(git commit -m \"$\\(cat <<''EOF''\nfix: suppress remaining ESLint warnings blocking CI\n\n- Disable react-hooks/incompatible-library warnings for RHF watch\\(\\)\n and TanStack useReactTable\\(\\) — these are false positives from the\n React Compiler plugin\n- Remove unused useDebounce import and variable from vendor-table\n\nCo-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>\nEOF\n\\)\")",
"Bash(git push:*)",
"Bash(git fetch:*)",
"Bash(git commit -m \"$\\(cat <<''EOF''\nfix: track migration_lock.toml required by prisma migrate deploy\n\nThe migration_lock.toml was excluded by a gitignore pattern\n\\(prisma/migrations/**/migration_lock.toml\\) which caused\n`prisma migrate deploy` to fail in CI with no lock file present.\nPrisma requires this file to be version-controlled.\n\nCo-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>\nEOF\n\\)\")",
"Bash(git commit -m \"$\\(cat <<''EOF''\nfix: make DATABASE_URL available to all CI steps and add prisma verify\n\n- Move DATABASE_URL to workflow-level env so all steps can access it\n- Add verification step to confirm prisma generate creates output files\n- This should fix TS2307 ''Cannot find module @/generated/prisma'' in CI\n\nCo-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>\nEOF\n\\)\")",
"Bash(git commit:*)",
"Bash(cd:*)",
"Bash(find:*)",
"Bash(curl.exe:*)",
"Bash(powershell -Command \"\\(Invoke-WebRequest -Uri ''http://localhost:3000/telegram'' -UseBasicParsing\\).Content.Length\")",
"Bash(powershell -Command \"\\(Invoke-WebRequest -Uri ''http://localhost:3000/telegram'' -UseBasicParsing\\).Content\")",
"Bash(powershell -Command \"\\(\\(Invoke-WebRequest -Uri ''http://localhost:3000/telegram'' -UseBasicParsing\\).Content\\) -match ''Telegram''\")",
"Bash(powershell -Command \"$c = \\(Invoke-WebRequest -Uri ''http://localhost:3000/telegram'' -UseBasicParsing\\).Content; @\\(''Telegram'',''Manage Telegram'',''Accounts'',''Channels'',''Add Account'',''Sync All'',''Add Channel'',''No accounts configured'',''No channels configured''\\) | ForEach-Object { if \\($c -match [regex]::Escape\\($_\\) { Write-Host \"\"FOUND: $_\" } else { Write-Host \"\"MISSING: $_\" } }\")",
"Bash(node test-ux.mjs:*)",
"Bash(npx prisma db execute:*)",
"Bash(docker.exe ps:*)",
"Bash(node test-content.mjs:*)",
"Bash(node:*)",
"Bash(tee:*)",
"Bash(git ls-remote:*)",
"Bash(npm info:*)",
"Bash(npm view:*)",
"WebFetch(domain:core.telegram.org)",
"Bash(Remove-Item -Recurse -Force \"C:\\\\Users\\\\A00963355\\\\OneDrive - Amaris Zorggroep\\\\Documents\\\\VScodeProjects\\\\DragonsStash\\\\.next\")",
"Bash(Write-Output \"Done\")",
"Bash(powershell -Command \"Remove-Item -Recurse -Force ''C:\\\\Users\\\\A00963355\\\\OneDrive - Amaris Zorggroep\\\\Documents\\\\VScodeProjects\\\\DragonsStash\\\\.next''\")",
"Bash(1:*)",
"Bash(findstr:*)",
"Bash(git reset:*)",
"Bash(git rm:*)",
"Bash(taskkill /F /FI \"WINDOWTITLE eq npm*\")",
"Bash(taskkill:*)",
"Bash(wmic process where \"name=''node.exe''\" get ProcessId,CommandLine)",
"Bash(git -C /mnt/c/Users/A00963355/OneDrive - Amaris Zorggroep/Documents/VScodeProjects/DragonsStash log --oneline -10)",
"Bash(git -C \"C:/Users/A00963355/OneDrive - Amaris Zorggroep/Documents/VScodeProjects/DragonsStash\" status --short)",
"Bash(timeout:*)",
"mcp__Claude_Preview__preview_start"
] ]
} }
} }

113
README.md
View File

@@ -1,9 +1,11 @@
# Dragon's Stash # Dragon's Stash
A self-hosted inventory management system for 3D printing filament, SLA resin, and miniature paints. Built with a dark, data-dense UI inspired by [Spoolman](https://github.com/Donkie/Spoolman). A self-hosted inventory management system for 3D printing filament, SLA resin, and miniature paints — with an integrated Telegram archive worker that ingests, indexes, and redistributes archive files. Built with a dark, data-dense UI inspired by [Spoolman](https://github.com/Donkie/Spoolman).
## Features ## Features
### Inventory Management
- **Filament tracking** with spool weight, material type, color swatches, and usage logging - **Filament tracking** with spool weight, material type, color swatches, and usage logging
- **SLA resin management** with bottle sizes, resin types, and remaining volume tracking - **SLA resin management** with bottle sizes, resin types, and remaining volume tracking
- **Miniature paint inventory** with product lines, finishes, and volume tracking - **Miniature paint inventory** with product lines, finishes, and volume tracking
@@ -13,7 +15,18 @@ A self-hosted inventory management system for 3D printing filament, SLA resin, a
- **Low-stock alerts** with configurable threshold percentage - **Low-stock alerts** with configurable threshold percentage
- **Dark theme** optimized for workshop environments - **Dark theme** optimized for workshop environments
- **Role-based auth** with admin and user roles - **Role-based auth** with admin and user roles
- **Docker-ready** for easy self-hosting
### Telegram Archive Worker
- **Channel scanning** — monitors configured Telegram channels (including forum topics) for archive files (ZIP, RAR, 7z)
- **Multipart detection** — automatically groups related multipart archives (`.part01.rar`, `.z01`, `.001`, etc.)
- **Content indexing** — extracts file listings from archives and stores them in the database
- **Destination upload** — re-uploads processed archives to a configured destination channel
- **Byte-level splitting** — splits files exceeding Telegram's 2GB limit into uploadable chunks
- **Full repack** — concatenates and re-splits multipart sets where any single part exceeds 2GB
- **Progress tracking** — resumes from the last successfully processed message on each run
- **Upload verification** — confirms files reached the destination before marking them complete
- **Preview matching** — associates photo messages with their corresponding archive sets
## Tech Stack ## Tech Stack
@@ -24,6 +37,8 @@ A self-hosted inventory management system for 3D printing filament, SLA resin, a
- **UI**: Tailwind CSS, shadcn/ui, Lucide icons - **UI**: Tailwind CSS, shadcn/ui, Lucide icons
- **Tables**: TanStack Table v8 with server-side pagination - **Tables**: TanStack Table v8 with server-side pagination
- **Validation**: Zod v4 + React Hook Form - **Validation**: Zod v4 + React Hook Form
- **Worker**: Node.js + TDLib (via tdl)
- **Archive handling**: unrar, zlib
## Quick Start ## Quick Start
@@ -31,6 +46,7 @@ A self-hosted inventory management system for 3D printing filament, SLA resin, a
- Node.js 20+ - Node.js 20+
- PostgreSQL 16+ (or Docker) - PostgreSQL 16+ (or Docker)
- Telegram API credentials (for the worker — get from [my.telegram.org/apps](https://my.telegram.org/apps))
### Development Setup ### Development Setup
@@ -50,7 +66,7 @@ npm install
3. Start a PostgreSQL database (using Docker): 3. Start a PostgreSQL database (using Docker):
```bash ```bash
docker compose -f docker-compose.dev.yml up -d docker compose -f docker-compose.dev.yml up -d db
``` ```
4. Copy the environment file and update values: 4. Copy the environment file and update values:
@@ -62,8 +78,8 @@ cp .env.example .env.local
5. Run database migrations and seed: 5. Run database migrations and seed:
```bash ```bash
npx prisma migrate dev npx prisma migrate dev # Run migrations
npx prisma db seed npx prisma db seed # Seed with sample data (admin/user accounts + inventory)
``` ```
6. Start the development server: 6. Start the development server:
@@ -76,20 +92,75 @@ npm run dev
- **Admin**: admin@dragonsstash.local / password123 - **Admin**: admin@dragonsstash.local / password123
- **User**: user@dragonsstash.local / password123 - **User**: user@dragonsstash.local / password123
### Docker Deployment ### Running the Worker in Development
To also run the Telegram worker alongside the dev database:
```bash ```bash
docker compose -f docker-compose.dev.yml up -d
```
This starts both the PostgreSQL database and the worker container. The worker reads `TELEGRAM_API_ID` and `TELEGRAM_API_HASH` from your `.env.local` file.
## Docker Deployment
### Full Stack (App + Worker + Database)
Run the entire application from Docker:
```bash
cp .env.example .env
# Edit .env — set TELEGRAM_API_ID, TELEGRAM_API_HASH, and a secure AUTH_SECRET
docker compose up -d docker compose up -d
``` ```
This starts both the application and PostgreSQL database. The app will be available at `http://localhost:3000`. The app will be available at [http://localhost:3000](http://localhost:3000).
To seed the database on first run: ### Seeding the Database
To seed the database with sample data on first run:
```bash ```bash
SEED_DATABASE=true docker compose up -d SEED_DATABASE=true docker compose up -d
``` ```
This creates default admin/user accounts and sample inventory data. The seed runs once during the app container's entrypoint (before the Next.js server starts). On subsequent runs without `SEED_DATABASE=true`, seeding is skipped automatically.
You can also seed manually at any time:
```bash
npx prisma db seed
```
### Development Mode (DB + Worker Only)
If you prefer to run the Next.js app locally with hot reload:
```bash
docker compose -f docker-compose.dev.yml up -d # Start DB + worker
npm run dev # Start Next.js locally
```
### Rebuilding After Code Changes
```bash
docker compose build && docker compose up -d --force-recreate
```
To rebuild only the worker:
```bash
docker compose build worker && docker compose up -d worker --force-recreate
```
### Viewing Logs
```bash
docker compose logs -f worker # Worker logs
docker compose logs -f app # App logs
docker compose logs -f db # Database logs
```
## Project Structure ## Project Structure
``` ```
@@ -116,6 +187,16 @@ src/
lib/ # Auth config, Prisma client, constants lib/ # Auth config, Prisma client, constants
schemas/ # Zod validation schemas schemas/ # Zod validation schemas
types/ # TypeScript type definitions types/ # TypeScript type definitions
worker/
src/
archive/ # Archive detection, multipart grouping, byte-level splitting
db/ # Prisma queries for packages, progress tracking
preview/ # Preview image matching
tdlib/ # TDLib client, channel scanning, topic/forum handling
upload/ # Telegram upload logic
util/ # Config, logger
worker.ts # Main processing pipeline
index.ts # Entry point + scheduler
prisma/ prisma/
schema.prisma # Database schema schema.prisma # Database schema
seed.ts # Seed data seed.ts # Seed data
@@ -125,6 +206,8 @@ prisma/
Environment variables (see `.env.example`): Environment variables (see `.env.example`):
### Application
| Variable | Description | Default | | Variable | Description | Default |
|----------|-------------|---------| |----------|-------------|---------|
| `DATABASE_URL` | PostgreSQL connection string | Required | | `DATABASE_URL` | PostgreSQL connection string | Required |
@@ -133,6 +216,20 @@ Environment variables (see `.env.example`):
| `AUTH_GITHUB_ID` | GitHub OAuth client ID | Optional | | `AUTH_GITHUB_ID` | GitHub OAuth client ID | Optional |
| `AUTH_GITHUB_SECRET` | GitHub OAuth client secret | Optional | | `AUTH_GITHUB_SECRET` | GitHub OAuth client secret | Optional |
| `NEXT_PUBLIC_APP_URL` | Public application URL | `http://localhost:3000` | | `NEXT_PUBLIC_APP_URL` | Public application URL | `http://localhost:3000` |
| `SEED_DATABASE` | Seed the database on app container start | `false` |
### Telegram Worker
| Variable | Description | Default |
|----------|-------------|---------|
| `TELEGRAM_API_ID` | Telegram API ID (from [my.telegram.org](https://my.telegram.org/apps)) | Required |
| `TELEGRAM_API_HASH` | Telegram API hash | Required |
| `WORKER_INTERVAL_MINUTES` | Scan interval in minutes | `60` |
| `WORKER_TEMP_DIR` | Temp directory for downloads | `/tmp/zips` |
| `TDLIB_STATE_DIR` | TDLib session state persistence directory | `/data/tdlib` |
| `WORKER_MAX_ZIP_SIZE_MB` | Max archive size to process (MB) | `4096` |
| `MULTIPART_TIMEOUT_HOURS` | Max time span for multipart set parts (0 = no limit) | `0` |
| `LOG_LEVEL` | Worker log level (`debug`, `info`, `warn`, `error`) | `info` |
## Health Check ## Health Check

1192
TELEGRAM_INTEGRATION_PLAN.md Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -19,6 +19,8 @@ services:
build: build:
context: . context: .
dockerfile: worker/Dockerfile dockerfile: worker/Dockerfile
env_file:
- .env.local
environment: environment:
- DATABASE_URL=postgresql://dragons:stash@db:5432/dragonsstash - DATABASE_URL=postgresql://dragons:stash@db:5432/dragonsstash
- WORKER_INTERVAL_MINUTES=5 - WORKER_INTERVAL_MINUTES=5
@@ -26,8 +28,6 @@ services:
- TDLIB_STATE_DIR=/data/tdlib - TDLIB_STATE_DIR=/data/tdlib
- WORKER_MAX_ZIP_SIZE_MB=4096 - WORKER_MAX_ZIP_SIZE_MB=4096
- LOG_LEVEL=debug - LOG_LEVEL=debug
- TELEGRAM_API_ID=${TELEGRAM_API_ID}
- TELEGRAM_API_HASH=${TELEGRAM_API_HASH}
volumes: volumes:
- tdlib_dev_state:/data/tdlib - tdlib_dev_state:/data/tdlib
- tmp_dev_zips:/tmp/zips - tmp_dev_zips:/tmp/zips

View File

@@ -0,0 +1,30 @@
-- CreateEnum
CREATE TYPE "FetchStatus" AS ENUM ('PENDING', 'IN_PROGRESS', 'COMPLETED', 'FAILED');
-- CreateTable
CREATE TABLE "global_settings" (
"key" VARCHAR(64) NOT NULL,
"value" TEXT NOT NULL,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "global_settings_pkey" PRIMARY KEY ("key")
);
-- CreateTable
CREATE TABLE "channel_fetch_requests" (
"id" TEXT NOT NULL,
"accountId" TEXT NOT NULL,
"status" "FetchStatus" NOT NULL DEFAULT 'PENDING',
"resultJson" TEXT,
"error" TEXT,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "channel_fetch_requests_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE INDEX "channel_fetch_requests_accountId_status_idx" ON "channel_fetch_requests"("accountId", "status");
-- AddForeignKey
ALTER TABLE "channel_fetch_requests" ADD CONSTRAINT "channel_fetch_requests_accountId_fkey" FOREIGN KEY ("accountId") REFERENCES "telegram_accounts"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -384,6 +384,13 @@ enum IngestionStatus {
CANCELLED CANCELLED
} }
enum FetchStatus {
PENDING
IN_PROGRESS
COMPLETED
FAILED
}
model TelegramAccount { model TelegramAccount {
id String @id @default(cuid()) id String @id @default(cuid())
phone String @unique phone String @unique
@@ -397,6 +404,7 @@ model TelegramAccount {
channelMaps AccountChannelMap[] channelMaps AccountChannelMap[]
ingestionRuns IngestionRun[] ingestionRuns IngestionRun[]
fetchRequests ChannelFetchRequest[]
@@index([isActive]) @@index([isActive])
@@map("telegram_accounts") @@map("telegram_accounts")
@@ -535,3 +543,26 @@ model TopicProgress {
@@index([accountChannelMapId]) @@index([accountChannelMapId])
@@map("topic_progress") @@map("topic_progress")
} }
model GlobalSetting {
key String @id @db.VarChar(64)
value String @db.Text
updatedAt DateTime @updatedAt
@@map("global_settings")
}
model ChannelFetchRequest {
id String @id @default(cuid())
accountId String
status FetchStatus @default(PENDING)
resultJson String? @db.Text
error String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
account TelegramAccount @relation(fields: [accountId], references: [id], onDelete: Cascade)
@@index([accountId, status])
@@map("channel_fetch_requests")
}

View File

@@ -9,6 +9,7 @@ import {
Link2, Link2,
Play, Play,
KeyRound, KeyRound,
Download,
} from "lucide-react"; } from "lucide-react";
import { Badge } from "@/components/ui/badge"; import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
@@ -36,6 +37,7 @@ interface AccountColumnsProps {
onViewLinks: (id: string) => void; onViewLinks: (id: string) => void;
onTriggerSync: (id: string) => void; onTriggerSync: (id: string) => void;
onEnterCode: (account: AccountRow) => void; onEnterCode: (account: AccountRow) => void;
onFetchChannels: (id: string) => void;
} }
export function getAccountColumns({ export function getAccountColumns({
@@ -45,6 +47,7 @@ export function getAccountColumns({
onViewLinks, onViewLinks,
onTriggerSync, onTriggerSync,
onEnterCode, onEnterCode,
onFetchChannels,
}: AccountColumnsProps): ColumnDef<AccountRow, unknown>[] { }: AccountColumnsProps): ColumnDef<AccountRow, unknown>[] {
return [ return [
{ {
@@ -157,6 +160,13 @@ export function getAccountColumns({
<Link2 className="mr-2 h-3.5 w-3.5" /> <Link2 className="mr-2 h-3.5 w-3.5" />
Manage Channels Manage Channels
</DropdownMenuItem> </DropdownMenuItem>
<DropdownMenuItem
onClick={() => onFetchChannels(row.original.id)}
disabled={row.original.authState !== "AUTHENTICATED"}
>
<Download className="mr-2 h-3.5 w-3.5" />
Fetch Channels
</DropdownMenuItem>
<DropdownMenuItem onClick={() => onTriggerSync(row.original.id)}> <DropdownMenuItem onClick={() => onTriggerSync(row.original.id)}>
<Play className="mr-2 h-3.5 w-3.5" /> <Play className="mr-2 h-3.5 w-3.5" />
Sync Now Sync Now

View File

@@ -1,12 +1,14 @@
"use client"; "use client";
import { useState, useTransition } from "react"; import { useState, useEffect, useTransition } from "react";
import { useRouter } from "next/navigation";
import { Plus, Play } from "lucide-react"; import { Plus, Play } from "lucide-react";
import { toast } from "sonner"; import { toast } from "sonner";
import { getAccountColumns } from "./account-columns"; import { getAccountColumns } from "./account-columns";
import { AccountModal } from "./account-modal"; import { AccountModal } from "./account-modal";
import { AccountLinksDrawer } from "./account-links-drawer"; import { AccountLinksDrawer } from "./account-links-drawer";
import { AuthCodeDialog } from "./auth-code-dialog"; import { AuthCodeDialog } from "./auth-code-dialog";
import { ChannelPickerDialog } from "./channel-picker-dialog";
import { deleteAccount, toggleAccountActive, triggerIngestion } from "../actions"; import { deleteAccount, toggleAccountActive, triggerIngestion } from "../actions";
import { DataTable } from "@/components/shared/data-table"; import { DataTable } from "@/components/shared/data-table";
import { DeleteDialog } from "@/components/shared/delete-dialog"; import { DeleteDialog } from "@/components/shared/delete-dialog";
@@ -19,12 +21,27 @@ interface AccountsTabProps {
} }
export function AccountsTab({ accounts }: AccountsTabProps) { export function AccountsTab({ accounts }: AccountsTabProps) {
const router = useRouter();
const [isPending, startTransition] = useTransition(); const [isPending, startTransition] = useTransition();
const [modalOpen, setModalOpen] = useState(false); const [modalOpen, setModalOpen] = useState(false);
const [editAccount, setEditAccount] = useState<AccountRow | undefined>(); const [editAccount, setEditAccount] = useState<AccountRow | undefined>();
const [deleteId, setDeleteId] = useState<string | null>(null); const [deleteId, setDeleteId] = useState<string | null>(null);
const [linksAccountId, setLinksAccountId] = useState<string | null>(null); const [linksAccountId, setLinksAccountId] = useState<string | null>(null);
const [authCodeAccount, setAuthCodeAccount] = useState<AccountRow | null>(null); const [authCodeAccount, setAuthCodeAccount] = useState<AccountRow | null>(null);
const [fetchChannelsAccountId, setFetchChannelsAccountId] = useState<string | null>(null);
// Auto-refresh when accounts are in transitional states (PENDING, AWAITING_CODE, AWAITING_PASSWORD)
const hasTransitional = accounts.some(
(a) => a.authState === "PENDING" || a.authState === "AWAITING_CODE" || a.authState === "AWAITING_PASSWORD"
);
useEffect(() => {
if (!hasTransitional) return;
const interval = setInterval(() => {
router.refresh();
}, 3_000);
return () => clearInterval(interval);
}, [hasTransitional, router]);
const columns = getAccountColumns({ const columns = getAccountColumns({
onEdit: (account) => { onEdit: (account) => {
@@ -48,6 +65,7 @@ export function AccountsTab({ accounts }: AccountsTabProps) {
else toast.error(result.error); else toast.error(result.error);
}); });
}, },
onFetchChannels: (id) => setFetchChannelsAccountId(id),
}); });
const { table } = useDataTable({ const { table } = useDataTable({
@@ -135,6 +153,14 @@ export function AccountsTab({ accounts }: AccountsTabProps) {
if (!open) setAuthCodeAccount(null); if (!open) setAuthCodeAccount(null);
}} }}
/> />
<ChannelPickerDialog
accountId={fetchChannelsAccountId}
open={!!fetchChannelsAccountId}
onOpenChange={(open) => {
if (!open) setFetchChannelsAccountId(null);
}}
/>
</div> </div>
); );
} }

View File

@@ -3,9 +3,10 @@
import { type ColumnDef } from "@tanstack/react-table"; import { type ColumnDef } from "@tanstack/react-table";
import { import {
MoreHorizontal, MoreHorizontal,
Pencil,
Trash2, Trash2,
Power, Power,
ArrowDownToLine,
ArrowUpFromLine,
} from "lucide-react"; } from "lucide-react";
import { Badge } from "@/components/ui/badge"; import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
@@ -19,15 +20,15 @@ import {
import type { ChannelRow } from "@/lib/telegram/admin-queries"; import type { ChannelRow } from "@/lib/telegram/admin-queries";
interface ChannelColumnsProps { interface ChannelColumnsProps {
onEdit: (channel: ChannelRow) => void;
onToggleActive: (id: string) => void; onToggleActive: (id: string) => void;
onDelete: (id: string) => void; onDelete: (id: string) => void;
onSetType: (id: string, type: "SOURCE" | "DESTINATION") => void;
} }
export function getChannelColumns({ export function getChannelColumns({
onEdit,
onToggleActive, onToggleActive,
onDelete, onDelete,
onSetType,
}: ChannelColumnsProps): ColumnDef<ChannelRow, unknown>[] { }: ChannelColumnsProps): ColumnDef<ChannelRow, unknown>[] {
return [ return [
{ {
@@ -105,10 +106,21 @@ export function getChannelColumns({
</Button> </Button>
</DropdownMenuTrigger> </DropdownMenuTrigger>
<DropdownMenuContent align="end"> <DropdownMenuContent align="end">
<DropdownMenuItem onClick={() => onEdit(row.original)}> {row.original.type === "SOURCE" ? (
<Pencil className="mr-2 h-3.5 w-3.5" /> <DropdownMenuItem
Edit onClick={() => onSetType(row.original.id, "DESTINATION")}
</DropdownMenuItem> >
<ArrowDownToLine className="mr-2 h-3.5 w-3.5" />
Set as Destination
</DropdownMenuItem>
) : (
<DropdownMenuItem
onClick={() => onSetType(row.original.id, "SOURCE")}
>
<ArrowUpFromLine className="mr-2 h-3.5 w-3.5" />
Set as Source
</DropdownMenuItem>
)}
<DropdownMenuItem <DropdownMenuItem
onClick={() => onToggleActive(row.original.id)} onClick={() => onToggleActive(row.original.id)}
> >

View File

@@ -0,0 +1,337 @@
"use client";
import { useState, useEffect, useCallback, useTransition } from "react";
import { Loader2, Search, CheckSquare, Square, Radio } from "lucide-react";
import { toast } from "sonner";
import { saveChannelSelections } from "../actions";
import {
Dialog,
DialogContent,
DialogDescription,
DialogHeader,
DialogTitle,
DialogFooter,
} from "@/components/ui/dialog";
import { Button } from "@/components/ui/button";
import { Input } from "@/components/ui/input";
import { Badge } from "@/components/ui/badge";
import { Checkbox } from "@/components/ui/checkbox";
import { ScrollArea } from "@/components/ui/scroll-area";
interface FetchedChannel {
chatId: string;
title: string;
type: "channel" | "supergroup";
isForum: boolean;
memberCount: number | null;
alreadyLinked: boolean;
existingChannelId: string | null;
}
interface ChannelPickerDialogProps {
accountId: string | null;
open: boolean;
onOpenChange: (open: boolean) => void;
}
type FetchState =
| { phase: "idle" }
| { phase: "fetching"; requestId?: string }
| { phase: "loaded"; channels: FetchedChannel[] }
| { phase: "error"; message: string };
export function ChannelPickerDialog({
accountId,
open,
onOpenChange,
}: ChannelPickerDialogProps) {
const [isPending, startTransition] = useTransition();
const [fetchState, setFetchState] = useState<FetchState>({ phase: "idle" });
const [selected, setSelected] = useState<Set<string>>(new Set());
const [search, setSearch] = useState("");
// Start fetching when dialog opens
useEffect(() => {
if (!open || !accountId) {
setFetchState({ phase: "idle" });
setSelected(new Set());
setSearch("");
return;
}
let mounted = true;
const startFetch = async () => {
setFetchState({ phase: "fetching" });
try {
// POST to create a fetch request
const postRes = await fetch(
`/api/telegram/accounts/${accountId}/fetch-channels`,
{ method: "POST" }
);
if (!postRes.ok) {
let message = `Server error (${postRes.status})`;
try {
const err = await postRes.json();
message = err.error || message;
} catch {
// response wasn't JSON
}
if (mounted) setFetchState({ phase: "error", message });
return;
}
const { requestId } = await postRes.json();
if (mounted) setFetchState({ phase: "fetching", requestId });
// Poll for result
const poll = async () => {
for (let i = 0; i < 30; i++) {
await new Promise((r) => setTimeout(r, 2000));
if (!mounted) return;
const getRes = await fetch(
`/api/telegram/accounts/${accountId}/fetch-channels?requestId=${requestId}`
);
if (!getRes.ok) continue;
const data = await getRes.json();
if (data.status === "COMPLETED") {
if (mounted) {
// Filter out already-linked channels
const available = (data.channels as FetchedChannel[]).filter(
(ch) => !ch.alreadyLinked
);
setFetchState({ phase: "loaded", channels: available });
}
return;
} else if (data.status === "FAILED") {
if (mounted) {
setFetchState({
phase: "error",
message: data.error || "Fetch failed",
});
}
return;
}
}
if (mounted) {
setFetchState({ phase: "error", message: "Fetch timed out" });
}
};
await poll();
} catch (err) {
if (mounted) {
const message = err instanceof Error ? err.message : "Network error";
setFetchState({ phase: "error", message: `Network error: ${message}` });
}
}
};
startFetch();
return () => { mounted = false; };
}, [open, accountId]);
const channels =
fetchState.phase === "loaded" ? fetchState.channels : [];
const filteredChannels = channels.filter((ch) =>
ch.title.toLowerCase().includes(search.toLowerCase())
);
const toggleChannel = (chatId: string) => {
setSelected((prev) => {
const next = new Set(prev);
if (next.has(chatId)) {
next.delete(chatId);
} else {
next.add(chatId);
}
return next;
});
};
const selectAll = () => {
setSelected(new Set(filteredChannels.map((ch) => ch.chatId)));
};
const deselectAll = () => {
setSelected(new Set());
};
const handleSave = () => {
if (!accountId || selected.size === 0) return;
const selectedChannels = channels
.filter((ch) => selected.has(ch.chatId))
.map((ch) => ({
telegramId: ch.chatId,
title: ch.title,
isForum: ch.isForum,
}));
startTransition(async () => {
const result = await saveChannelSelections(accountId, selectedChannels);
if (result.success) {
toast.success(`${selectedChannels.length} channel(s) linked as source`);
onOpenChange(false);
} else {
toast.error(result.error);
}
});
};
return (
<Dialog open={open} onOpenChange={onOpenChange}>
<DialogContent className="sm:max-w-xl max-h-[85vh] flex flex-col">
<DialogHeader>
<DialogTitle>Select Source Channels</DialogTitle>
<DialogDescription>
Choose which channels to scan for archives. Already-linked channels
are hidden.
</DialogDescription>
</DialogHeader>
{fetchState.phase === "fetching" && (
<div className="flex flex-col items-center justify-center gap-3 py-12">
<Loader2 className="h-8 w-8 animate-spin text-primary" />
<p className="text-sm text-muted-foreground">
Fetching channels from Telegram...
</p>
<p className="text-xs text-muted-foreground">
This may take a few seconds
</p>
</div>
)}
{fetchState.phase === "error" && (
<div className="flex flex-col items-center justify-center gap-3 py-12">
<p className="text-sm text-destructive">{fetchState.message}</p>
<Button
variant="outline"
size="sm"
onClick={() => {
// Reopen to re-trigger fetch
onOpenChange(false);
setTimeout(() => onOpenChange(true), 100);
}}
>
Retry
</Button>
</div>
)}
{fetchState.phase === "loaded" && (
<>
{channels.length === 0 ? (
<div className="flex flex-col items-center justify-center gap-2 py-12">
<p className="text-sm text-muted-foreground">
All channels are already linked to this account.
</p>
</div>
) : (
<>
{/* Search + bulk actions */}
<div className="flex items-center gap-2">
<div className="relative flex-1">
<Search className="absolute left-3 top-1/2 h-4 w-4 -translate-y-1/2 text-muted-foreground" />
<Input
placeholder="Filter channels..."
value={search}
onChange={(e) => setSearch(e.target.value)}
className="pl-9"
/>
</div>
<Button variant="outline" size="sm" onClick={selectAll}>
All
</Button>
<Button variant="outline" size="sm" onClick={deselectAll}>
None
</Button>
</div>
<p className="text-xs text-muted-foreground">
{filteredChannels.length} channel(s) available
{selected.size > 0 && ` \u2014 ${selected.size} selected`}
</p>
{/* Channel list */}
<ScrollArea className="flex-1 max-h-[400px] -mx-2 px-2">
<div className="space-y-1">
{filteredChannels.map((ch) => (
<label
key={ch.chatId}
className="flex items-center gap-3 rounded-md border p-3 cursor-pointer hover:bg-accent/50 transition-colors"
>
<Checkbox
checked={selected.has(ch.chatId)}
onCheckedChange={() => toggleChannel(ch.chatId)}
/>
<div className="flex-1 min-w-0">
<div className="flex items-center gap-2">
<span className="text-sm font-medium truncate">
{ch.title}
</span>
<Badge
variant="outline"
className="text-[10px] shrink-0"
>
{ch.type}
</Badge>
{ch.isForum && (
<Badge
variant="secondary"
className="text-[10px] shrink-0"
>
forum
</Badge>
)}
{!ch.existingChannelId && (
<Badge
variant="secondary"
className="text-[10px] bg-emerald-500/10 text-emerald-600 border-emerald-500/20 shrink-0"
>
new
</Badge>
)}
</div>
<span className="text-xs text-muted-foreground">
ID: {ch.chatId}
{ch.memberCount ? ` \u2022 ${ch.memberCount} members` : ""}
</span>
</div>
</label>
))}
</div>
</ScrollArea>
</>
)}
</>
)}
<DialogFooter>
<Button variant="outline" onClick={() => onOpenChange(false)}>
Cancel
</Button>
<Button
onClick={handleSave}
disabled={
isPending ||
selected.size === 0 ||
fetchState.phase !== "loaded"
}
>
{isPending ? (
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
) : null}
Link {selected.size} Channel{selected.size !== 1 ? "s" : ""}
</Button>
</DialogFooter>
</DialogContent>
</Dialog>
);
}

View File

@@ -1,32 +1,29 @@
"use client"; "use client";
import { useState, useTransition } from "react"; import { useState, useTransition } from "react";
import { Plus } from "lucide-react";
import { toast } from "sonner"; import { toast } from "sonner";
import { getChannelColumns } from "./channel-columns"; import { getChannelColumns } from "./channel-columns";
import { ChannelModal } from "./channel-modal"; import { DestinationCard } from "./destination-card";
import { deleteChannel, toggleChannelActive } from "../actions"; import {
deleteChannel,
toggleChannelActive,
setChannelType,
} from "../actions";
import { DataTable } from "@/components/shared/data-table"; import { DataTable } from "@/components/shared/data-table";
import { DeleteDialog } from "@/components/shared/delete-dialog"; import { DeleteDialog } from "@/components/shared/delete-dialog";
import { Button } from "@/components/ui/button"; import type { ChannelRow, GlobalDestination } from "@/lib/telegram/admin-queries";
import type { ChannelRow } from "@/lib/telegram/admin-queries";
import { useDataTable } from "@/hooks/use-data-table"; import { useDataTable } from "@/hooks/use-data-table";
interface ChannelsTabProps { interface ChannelsTabProps {
channels: ChannelRow[]; channels: ChannelRow[];
globalDestination: GlobalDestination;
} }
export function ChannelsTab({ channels }: ChannelsTabProps) { export function ChannelsTab({ channels, globalDestination }: ChannelsTabProps) {
const [isPending, startTransition] = useTransition(); const [isPending, startTransition] = useTransition();
const [modalOpen, setModalOpen] = useState(false);
const [editChannel, setEditChannel] = useState<ChannelRow | undefined>();
const [deleteId, setDeleteId] = useState<string | null>(null); const [deleteId, setDeleteId] = useState<string | null>(null);
const columns = getChannelColumns({ const columns = getChannelColumns({
onEdit: (channel) => {
setEditChannel(channel);
setModalOpen(true);
},
onToggleActive: (id) => { onToggleActive: (id) => {
startTransition(async () => { startTransition(async () => {
const result = await toggleChannelActive(id); const result = await toggleChannelActive(id);
@@ -35,6 +32,13 @@ export function ChannelsTab({ channels }: ChannelsTabProps) {
}); });
}, },
onDelete: (id) => setDeleteId(id), onDelete: (id) => setDeleteId(id),
onSetType: (id, type) => {
startTransition(async () => {
const result = await setChannelType(id, type);
if (result.success) toast.success(`Channel set as ${type.toLowerCase()}`);
else toast.error(result.error);
});
},
}); });
const { table } = useDataTable({ const { table } = useDataTable({
@@ -58,30 +62,17 @@ export function ChannelsTab({ channels }: ChannelsTabProps) {
return ( return (
<div className="space-y-4"> <div className="space-y-4">
<div className="flex items-center gap-2"> <DestinationCard destination={globalDestination} />
<Button
onClick={() => { {channels.length > 0 && (
setEditChannel(undefined); <p className="text-xs text-muted-foreground">
setModalOpen(true); Source channels are added per-account via the &quot;Fetch Channels&quot; button on the Accounts tab.
}} </p>
> )}
<Plus className="mr-2 h-4 w-4" />
Add Channel
</Button>
</div>
<DataTable <DataTable
table={table} table={table}
emptyMessage="No channels configured. Add a Telegram channel to start ingesting." emptyMessage="No channels yet. Use &quot;Fetch Channels&quot; on an account to discover and add source channels."
/>
<ChannelModal
open={modalOpen}
onOpenChange={(open) => {
setModalOpen(open);
if (!open) setEditChannel(undefined);
}}
channel={editChannel}
/> />
<DeleteDialog <DeleteDialog

View File

@@ -0,0 +1,287 @@
"use client";
import { useState, useEffect, useTransition } from "react";
import { Database, AlertTriangle, Link2, Plus, Loader2 } from "lucide-react";
import { toast } from "sonner";
import { createDestinationViaWorker } from "../actions";
import { Card, CardContent } from "@/components/ui/card";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import {
Dialog,
DialogContent,
DialogDescription,
DialogHeader,
DialogTitle,
DialogFooter,
} from "@/components/ui/dialog";
import type { GlobalDestination } from "@/lib/telegram/admin-queries";
interface DestinationCardProps {
destination: GlobalDestination;
}
type CreateState =
| { phase: "idle" }
| { phase: "creating"; requestId?: string }
| { phase: "done"; title: string; telegramId: string }
| { phase: "error"; message: string };
export function DestinationCard({ destination }: DestinationCardProps) {
const [isPending, startTransition] = useTransition();
const [createOpen, setCreateOpen] = useState(false);
const [title, setTitle] = useState("dragonsstash db");
const [createState, setCreateState] = useState<CreateState>({ phase: "idle" });
// Poll for worker result when creating
useEffect(() => {
if (createState.phase !== "creating" || !createState.requestId) return;
let mounted = true;
const requestId = createState.requestId;
const poll = async () => {
for (let i = 0; i < 60; i++) {
await new Promise((r) => setTimeout(r, 2000));
if (!mounted) return;
try {
const res = await fetch(
`/api/telegram/worker-request?requestId=${requestId}`
);
if (!res.ok) continue;
const data = await res.json();
if (data.status === "COMPLETED" && data.result) {
if (mounted) {
setCreateState({
phase: "done",
title: data.result.title,
telegramId: data.result.telegramId,
});
toast.success(`Telegram group "${data.result.title}" created and set as destination!`);
setCreateOpen(false);
// Refresh the page to show the new destination
window.location.reload();
}
return;
} else if (data.status === "FAILED") {
if (mounted) {
setCreateState({
phase: "error",
message: data.error || "Worker failed to create the group",
});
}
return;
}
} catch {
// Network blip — keep polling
}
}
if (mounted) {
setCreateState({ phase: "error", message: "Timed out waiting for the worker" });
}
};
poll();
return () => { mounted = false; };
}, [createState]);
const handleCreate = () => {
if (!title.trim()) return;
startTransition(async () => {
const result = await createDestinationViaWorker(title.trim());
if (result.success) {
setCreateState({ phase: "creating", requestId: result.data.requestId });
} else {
setCreateState({ phase: "error", message: result.error ?? "Unknown error" });
}
});
};
const handleOpenChange = (open: boolean) => {
setCreateOpen(open);
if (!open) {
// Reset state when closing (unless actively creating)
if (createState.phase !== "creating") {
setCreateState({ phase: "idle" });
}
}
};
if (!destination) {
return (
<>
<Card className="border-dashed border-yellow-500/40">
<CardContent className="flex items-center justify-between gap-4 py-4">
<div className="flex items-center gap-3">
<AlertTriangle className="h-5 w-5 text-yellow-500 shrink-0" />
<div>
<p className="text-sm font-medium">
No destination channel configured
</p>
<p className="text-xs text-muted-foreground">
Create a private Telegram group that all accounts will write
archives to. Requires at least one authenticated account.
</p>
</div>
</div>
<Button size="sm" onClick={() => setCreateOpen(true)}>
<Plus className="mr-2 h-3.5 w-3.5" />
Create Destination
</Button>
</CardContent>
</Card>
<CreateDestinationDialog
open={createOpen}
onOpenChange={handleOpenChange}
title={title}
setTitle={setTitle}
onSubmit={handleCreate}
createState={createState}
isPending={isPending}
/>
</>
);
}
return (
<>
<Card>
<CardContent className="flex items-center justify-between gap-4 py-4">
<div className="flex items-center gap-3">
<Database className="h-5 w-5 text-purple-500 shrink-0" />
<div>
<div className="flex items-center gap-2">
<p className="text-sm font-medium">{destination.title}</p>
<Badge
variant="outline"
className="bg-purple-500/10 text-purple-600 border-purple-500/20 text-[10px]"
>
DESTINATION
</Badge>
</div>
<div className="flex items-center gap-3 text-xs text-muted-foreground">
<span>ID: {destination.telegramId}</span>
{destination.inviteLink && (
<span className="flex items-center gap-1">
<Link2 className="h-3 w-3" />
Invite link active
</span>
)}
</div>
</div>
</div>
<Button
variant="outline"
size="sm"
onClick={() => setCreateOpen(true)}
>
Change
</Button>
</CardContent>
</Card>
<CreateDestinationDialog
open={createOpen}
onOpenChange={handleOpenChange}
title={title}
setTitle={setTitle}
onSubmit={handleCreate}
createState={createState}
isPending={isPending}
/>
</>
);
}
function CreateDestinationDialog({
open,
onOpenChange,
title,
setTitle,
onSubmit,
createState,
isPending,
}: {
open: boolean;
onOpenChange: (open: boolean) => void;
title: string;
setTitle: (v: string) => void;
onSubmit: () => void;
createState: CreateState;
isPending: boolean;
}) {
const isCreating = createState.phase === "creating";
return (
<Dialog open={open} onOpenChange={onOpenChange}>
<DialogContent className="sm:max-w-md">
<DialogHeader>
<DialogTitle>Create Destination Channel</DialogTitle>
<DialogDescription>
A private Telegram group will be created automatically using one of
your authenticated accounts. All accounts will write archives here.
</DialogDescription>
</DialogHeader>
{isCreating ? (
<div className="flex flex-col items-center justify-center gap-3 py-8">
<Loader2 className="h-8 w-8 animate-spin text-primary" />
<p className="text-sm text-muted-foreground">
Creating Telegram group...
</p>
<p className="text-xs text-muted-foreground">
This may take a few seconds
</p>
</div>
) : (
<div className="space-y-4">
{createState.phase === "error" && (
<div className="rounded-md border border-destructive/50 bg-destructive/10 p-3">
<p className="text-sm text-destructive">{createState.message}</p>
</div>
)}
<div className="space-y-2">
<Label htmlFor="dest-title">Group Name</Label>
<Input
id="dest-title"
placeholder="e.g. dragonsstash db"
value={title}
onChange={(e) => setTitle(e.target.value)}
/>
<p className="text-xs text-muted-foreground">
This will be the name of the Telegram group. You can rename it later in Telegram.
</p>
</div>
</div>
)}
<DialogFooter>
<Button
variant="outline"
onClick={() => onOpenChange(false)}
disabled={isCreating}
>
Cancel
</Button>
<Button
onClick={onSubmit}
disabled={isPending || isCreating || !title.trim()}
>
{(isPending || isCreating) && (
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
)}
Create Group
</Button>
</DialogFooter>
</DialogContent>
</Dialog>
);
}

View File

@@ -4,14 +4,23 @@ import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import { PageHeader } from "@/components/shared/page-header"; import { PageHeader } from "@/components/shared/page-header";
import { AccountsTab } from "./accounts-tab"; import { AccountsTab } from "./accounts-tab";
import { ChannelsTab } from "./channels-tab"; import { ChannelsTab } from "./channels-tab";
import type { AccountRow, ChannelRow } from "@/lib/telegram/admin-queries"; import { WorkerStatusPanel } from "./worker-status-panel";
import type { AccountRow, ChannelRow, GlobalDestination } from "@/lib/telegram/admin-queries";
import type { IngestionAccountStatus } from "@/lib/telegram/types";
interface TelegramAdminProps { interface TelegramAdminProps {
accounts: AccountRow[]; accounts: AccountRow[];
channels: ChannelRow[]; channels: ChannelRow[];
ingestionStatus: IngestionAccountStatus[];
globalDestination: GlobalDestination;
} }
export function TelegramAdmin({ accounts, channels }: TelegramAdminProps) { export function TelegramAdmin({
accounts,
channels,
ingestionStatus,
globalDestination,
}: TelegramAdminProps) {
return ( return (
<div className="space-y-4"> <div className="space-y-4">
<PageHeader <PageHeader
@@ -19,6 +28,8 @@ export function TelegramAdmin({ accounts, channels }: TelegramAdminProps) {
description="Manage Telegram accounts, channels, and ingestion" description="Manage Telegram accounts, channels, and ingestion"
/> />
<WorkerStatusPanel initialStatus={ingestionStatus} />
<Tabs defaultValue="accounts" className="space-y-4"> <Tabs defaultValue="accounts" className="space-y-4">
<TabsList> <TabsList>
<TabsTrigger value="accounts"> <TabsTrigger value="accounts">
@@ -33,7 +44,7 @@ export function TelegramAdmin({ accounts, channels }: TelegramAdminProps) {
<AccountsTab accounts={accounts} /> <AccountsTab accounts={accounts} />
</TabsContent> </TabsContent>
<TabsContent value="channels"> <TabsContent value="channels">
<ChannelsTab channels={channels} /> <ChannelsTab channels={channels} globalDestination={globalDestination} />
</TabsContent> </TabsContent>
</Tabs> </Tabs>
</div> </div>

View File

@@ -0,0 +1,340 @@
"use client";
import { useEffect, useState, useCallback } from "react";
import {
Loader2,
CheckCircle2,
XCircle,
Clock,
Radio,
AlertTriangle,
RefreshCw,
} from "lucide-react";
import { Card, CardContent } from "@/components/ui/card";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
import { cn } from "@/lib/utils";
import type { IngestionAccountStatus } from "@/lib/telegram/types";
interface WorkerStatusPanelProps {
initialStatus: IngestionAccountStatus[];
}
const AUTH_STATE_CONFIG: Record<
string,
{ label: string; color: string; icon: string }
> = {
PENDING: { label: "Pending", color: "text-yellow-500", icon: "clock" },
AWAITING_CODE: {
label: "Awaiting Code",
color: "text-orange-500",
icon: "alert",
},
AWAITING_PASSWORD: {
label: "Awaiting Password",
color: "text-orange-500",
icon: "alert",
},
AUTHENTICATED: { label: "Connected", color: "text-emerald-500", icon: "check" },
EXPIRED: { label: "Expired", color: "text-red-500", icon: "x" },
};
export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
const [accounts, setAccounts] = useState(initialStatus);
const [error, setError] = useState(false);
const [nextRunCountdown, setNextRunCountdown] = useState<string | null>(null);
// Find active run
const activeRun = accounts.find((a) => a.currentRun);
const isRunning = !!activeRun;
// Poll for status
useEffect(() => {
let timer: ReturnType<typeof setTimeout>;
let mounted = true;
const poll = async () => {
try {
const res = await fetch("/api/ingestion/status");
if (!res.ok) throw new Error("fetch failed");
const data = await res.json();
if (mounted) {
setAccounts(data.accounts ?? []);
setError(false);
}
} catch {
if (mounted) setError(true);
}
if (mounted) {
const interval = accounts.some((a) => a.currentRun) ? 2_000 : 10_000;
timer = setTimeout(poll, interval);
}
};
timer = setTimeout(poll, 2_000);
return () => {
mounted = false;
clearTimeout(timer);
};
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [isRunning]);
// Countdown timer to next run
useEffect(() => {
if (isRunning) {
setNextRunCountdown(null);
return;
}
// Estimate next run based on last run finish time + interval (5 min + up to 5 min jitter)
const lastFinished = accounts
.filter((a) => a.lastRun?.finishedAt)
.map((a) => new Date(a.lastRun!.finishedAt!).getTime())
.sort((a, b) => b - a)[0];
if (!lastFinished) {
setNextRunCountdown(null);
return;
}
const intervalMs = 5 * 60 * 1000; // 5 min base
const estimatedNext = lastFinished + intervalMs;
const tick = () => {
const remaining = estimatedNext - Date.now();
if (remaining <= 0) {
setNextRunCountdown("any moment...");
} else {
const mins = Math.floor(remaining / 60_000);
const secs = Math.floor((remaining % 60_000) / 1_000);
setNextRunCountdown(
mins > 0 ? `~${mins}m ${secs}s` : `~${secs}s`
);
}
};
tick();
const interval = setInterval(tick, 1_000);
return () => clearInterval(interval);
}, [isRunning, accounts]);
if (accounts.length === 0 && !error) {
return (
<Card>
<CardContent className="flex items-center gap-3 py-4">
<AlertTriangle className="h-5 w-5 text-yellow-500 shrink-0" />
<div>
<p className="text-sm font-medium">No accounts configured</p>
<p className="text-xs text-muted-foreground">
Add a Telegram account below to get started. You&apos;ll need your
phone number and the API credentials in your .env.local file.
</p>
</div>
</CardContent>
</Card>
);
}
return (
<Card>
<CardContent className="py-4 space-y-3">
{/* Account status row */}
<div className="flex items-center gap-4 flex-wrap">
{accounts.map((account) => {
const config = AUTH_STATE_CONFIG[account.authState] ?? AUTH_STATE_CONFIG.PENDING;
return (
<div key={account.id} className="flex items-center gap-2">
{config.icon === "check" && (
<CheckCircle2 className={cn("h-4 w-4", config.color)} />
)}
{config.icon === "clock" && (
<Clock className={cn("h-4 w-4", config.color)} />
)}
{config.icon === "alert" && (
<AlertTriangle className={cn("h-4 w-4", config.color)} />
)}
{config.icon === "x" && (
<XCircle className={cn("h-4 w-4", config.color)} />
)}
<span className="text-sm font-medium">
{account.displayName || account.phone}
</span>
<Badge
variant="outline"
className={cn("text-[10px]", config.color)}
>
{config.label}
</Badge>
</div>
);
})}
</div>
{/* Divider */}
<div className="border-t" />
{/* Worker activity */}
{error ? (
<div className="flex items-center gap-2 text-xs text-muted-foreground">
<XCircle className="h-3.5 w-3.5" />
<span>Could not reach worker status</span>
</div>
) : isRunning && activeRun?.currentRun ? (
<RunningStatus run={activeRun.currentRun} />
) : (
<IdleStatus accounts={accounts} nextRunCountdown={nextRunCountdown} />
)}
</CardContent>
</Card>
);
}
function RunningStatus({
run,
}: {
run: NonNullable<IngestionAccountStatus["currentRun"]>;
}) {
return (
<div className="space-y-2">
<div className="flex items-center gap-2">
<Loader2 className="h-4 w-4 animate-spin text-primary shrink-0" />
<span className="text-sm font-medium text-primary truncate">
{run.currentActivity ?? "Working..."}
</span>
</div>
{/* Progress bar for downloads */}
{run.downloadPercent != null && run.downloadPercent > 0 && (
<div className="flex items-center gap-3 pl-6">
<div className="h-1.5 flex-1 max-w-[200px] rounded-full bg-primary/20">
<div
className="h-full rounded-full bg-primary transition-all duration-500"
style={{ width: `${Math.min(100, run.downloadPercent)}%` }}
/>
</div>
<span className="text-xs text-primary/70 tabular-nums">
{run.downloadPercent}%
</span>
</div>
)}
{/* Stats line */}
<div className="flex items-center gap-4 pl-6 text-xs text-muted-foreground">
{run.currentChannel && (
<span>
Channel: <span className="text-foreground">{run.currentChannel}</span>
</span>
)}
{run.totalFiles != null && run.currentFileNum != null && (
<span>
Archive{" "}
<span className="text-foreground tabular-nums">
{run.currentFileNum}/{run.totalFiles}
</span>
</span>
)}
{run.zipsIngested > 0 && (
<span>
<span className="text-foreground tabular-nums">{run.zipsIngested}</span> ingested
</span>
)}
{run.zipsDuplicate > 0 && (
<span>
<span className="text-foreground tabular-nums">{run.zipsDuplicate}</span> skipped
</span>
)}
</div>
</div>
);
}
function IdleStatus({
accounts,
nextRunCountdown,
}: {
accounts: IngestionAccountStatus[];
nextRunCountdown: string | null;
}) {
const lastRun = accounts
.filter((a) => a.lastRun)
.sort(
(a, b) =>
new Date(b.lastRun!.finishedAt ?? b.lastRun!.startedAt).getTime() -
new Date(a.lastRun!.finishedAt ?? a.lastRun!.startedAt).getTime()
)[0]?.lastRun;
const hasAuthenticated = accounts.some(
(a) => a.authState === "AUTHENTICATED"
);
return (
<div className="flex items-center justify-between gap-4">
<div className="flex items-center gap-2 min-w-0">
{lastRun ? (
<>
{lastRun.status === "FAILED" ? (
<XCircle className="h-3.5 w-3.5 text-red-500 shrink-0" />
) : (
<CheckCircle2 className="h-3.5 w-3.5 text-emerald-500 shrink-0" />
)}
<span className="text-xs text-muted-foreground truncate">
{lastRun.status === "FAILED"
? `Last sync failed ${getTimeAgo(lastRun.finishedAt ?? lastRun.startedAt)}`
: `Last sync ${getTimeAgo(lastRun.finishedAt ?? lastRun.startedAt)}${lastRun.zipsIngested} new, ${lastRun.zipsDuplicate} skipped, ${lastRun.messagesScanned} messages`}
</span>
</>
) : hasAuthenticated ? (
<>
<Clock className="h-3.5 w-3.5 text-muted-foreground shrink-0" />
<span className="text-xs text-muted-foreground">
Waiting for first sync...
</span>
</>
) : accounts.some((a) => a.authState === "PENDING") ? (
<>
<Clock className="h-3.5 w-3.5 text-yellow-500 shrink-0" />
<span className="text-xs text-muted-foreground">
Pending account detected worker will send an SMS code on the next cycle. Please wait...
</span>
</>
) : accounts.some(
(a) => a.authState === "AWAITING_CODE" || a.authState === "AWAITING_PASSWORD"
) ? (
<>
<AlertTriangle className="h-3.5 w-3.5 text-orange-500 shrink-0" />
<span className="text-xs text-muted-foreground">
Waiting for you to enter the auth code check the Accounts table below
</span>
</>
) : (
<>
<Radio className="h-3.5 w-3.5 text-muted-foreground shrink-0" />
<span className="text-xs text-muted-foreground">
Worker idle authenticate an account to start syncing
</span>
</>
)}
</div>
{nextRunCountdown && hasAuthenticated && (
<div className="flex items-center gap-1.5 shrink-0">
<RefreshCw className="h-3 w-3 text-muted-foreground" />
<span className="text-xs text-muted-foreground tabular-nums">
Next: {nextRunCountdown}
</span>
</div>
)}
</div>
);
}
function getTimeAgo(dateStr: string): string {
const diff = Date.now() - new Date(dateStr).getTime();
const mins = Math.floor(diff / 60_000);
if (mins < 1) return "just now";
if (mins < 60) return `${mins}m ago`;
const hours = Math.floor(mins / 60);
if (hours < 24) return `${hours}h ago`;
const days = Math.floor(hours / 24);
return `${days}d ago`;
}

View File

@@ -258,6 +258,44 @@ export async function deleteChannel(id: string): Promise<ActionResult> {
} }
} }
export async function setChannelType(
id: string,
type: "SOURCE" | "DESTINATION"
): Promise<ActionResult> {
const admin = await requireAdmin();
if (!admin.success) return admin;
const existing = await prisma.telegramChannel.findUnique({ where: { id } });
if (!existing) return { success: false, error: "Channel not found" };
try {
await prisma.telegramChannel.update({
where: { id },
data: { type },
});
revalidatePath(REVALIDATE_PATH);
return { success: true, data: undefined };
} catch {
return { success: false, error: "Failed to update channel type" };
}
}
export async function triggerChannelSync(): Promise<ActionResult> {
const admin = await requireAdmin();
if (!admin.success) return admin;
try {
// Signal the worker to do a channel sync via pg_notify
await prisma.$queryRawUnsafe(
`SELECT pg_notify('channel_sync', 'requested')`
);
revalidatePath(REVALIDATE_PATH);
return { success: true, data: undefined };
} catch {
return { success: false, error: "Failed to trigger channel sync" };
}
}
// ── Account-Channel link actions ── // ── Account-Channel link actions ──
export async function linkChannel( export async function linkChannel(
@@ -317,24 +355,42 @@ export async function triggerIngestion(
if (!admin.success) return admin; if (!admin.success) return admin;
try { try {
const res = await fetch( // Find eligible accounts
`${process.env.NEXT_PUBLIC_APP_URL || "http://localhost:3000"}/api/ingestion/trigger`, const where: { isActive: boolean; authState: "AUTHENTICATED"; id?: string } = {
{ isActive: true,
method: "POST", authState: "AUTHENTICATED",
headers: { };
"Content-Type": "application/json", if (accountId) where.id = accountId;
"X-API-Key": process.env.INGESTION_API_KEY || "",
},
body: JSON.stringify({ accountId }),
}
);
if (!res.ok) { const accounts = await prisma.telegramAccount.findMany({
const data = await res.json().catch(() => ({})); where,
return { select: { id: true },
success: false, });
error: (data as { error?: string }).error || "Failed to trigger ingestion",
}; if (accounts.length === 0) {
return { success: false, error: "No eligible accounts found" };
}
// Create ingestion runs — the worker picks these up
for (const account of accounts) {
const existing = await prisma.ingestionRun.findFirst({
where: { accountId: account.id, status: "RUNNING" },
});
if (!existing) {
await prisma.ingestionRun.create({
data: { accountId: account.id, status: "RUNNING" },
});
}
}
// pg_notify for immediate worker pickup
try {
await prisma.$queryRawUnsafe(
`SELECT pg_notify('ingestion_trigger', $1)`,
accounts.map((a) => a.id).join(",")
);
} catch {
// Best-effort
} }
revalidatePath(REVALIDATE_PATH); revalidatePath(REVALIDATE_PATH);
@@ -343,3 +399,227 @@ export async function triggerIngestion(
return { success: false, error: "Failed to trigger ingestion" }; return { success: false, error: "Failed to trigger ingestion" };
} }
} }
// ── Channel selection (from fetch results) ──
export async function saveChannelSelections(
accountId: string,
channels: { telegramId: string; title: string; isForum: boolean }[]
): Promise<ActionResult> {
const admin = await requireAdmin();
if (!admin.success) return admin;
const existing = await prisma.telegramAccount.findUnique({
where: { id: accountId },
});
if (!existing) return { success: false, error: "Account not found" };
try {
let linked = 0;
for (const ch of channels) {
// Upsert the channel record
const channel = await prisma.telegramChannel.upsert({
where: { telegramId: BigInt(ch.telegramId) },
create: {
telegramId: BigInt(ch.telegramId),
title: ch.title,
type: "SOURCE",
isForum: ch.isForum,
},
update: {
title: ch.title,
isForum: ch.isForum,
},
});
// Create READER link (idempotent)
try {
await prisma.accountChannelMap.create({
data: { accountId, channelId: channel.id, role: "READER" },
});
linked++;
} catch (err: unknown) {
// Unique constraint = already linked, that's fine
if (!(err instanceof Error && err.message.includes("Unique constraint"))) {
throw err;
}
}
}
revalidatePath(REVALIDATE_PATH);
return { success: true, data: undefined };
} catch {
return { success: false, error: "Failed to save channel selections" };
}
}
// ── Global destination channel ──
export async function setGlobalDestination(
channelId: string
): Promise<ActionResult> {
const admin = await requireAdmin();
if (!admin.success) return admin;
const channel = await prisma.telegramChannel.findUnique({
where: { id: channelId },
});
if (!channel) return { success: false, error: "Channel not found" };
try {
// Set the channel type to DESTINATION
await prisma.telegramChannel.update({
where: { id: channelId },
data: { type: "DESTINATION" },
});
// Save as global destination
await prisma.globalSetting.upsert({
where: { key: "destination_channel_id" },
create: { key: "destination_channel_id", value: channelId },
update: { value: channelId },
});
// Auto-create WRITER links for all active authenticated accounts
const accounts = await prisma.telegramAccount.findMany({
where: { isActive: true, authState: "AUTHENTICATED" },
select: { id: true },
});
for (const account of accounts) {
try {
await prisma.accountChannelMap.create({
data: { accountId: account.id, channelId, role: "WRITER" },
});
} catch {
// Already linked — ignore
}
}
// Signal worker to generate invite link
try {
await prisma.$queryRawUnsafe(
`SELECT pg_notify('generate_invite', $1)`,
channelId
);
} catch {
// Best-effort
}
revalidatePath(REVALIDATE_PATH);
return { success: true, data: undefined };
} catch {
return { success: false, error: "Failed to set global destination" };
}
}
export async function createDestinationChannel(
telegramId: string,
title: string
): Promise<ActionResult<{ id: string }>> {
const admin = await requireAdmin();
if (!admin.success) return admin;
try {
// Create the channel as DESTINATION
const channel = await prisma.telegramChannel.upsert({
where: { telegramId: BigInt(telegramId) },
create: {
telegramId: BigInt(telegramId),
title,
type: "DESTINATION",
},
update: {
title,
type: "DESTINATION",
},
});
// Set as global destination
await prisma.globalSetting.upsert({
where: { key: "destination_channel_id" },
create: { key: "destination_channel_id", value: channel.id },
update: { value: channel.id },
});
// Auto-create WRITER links for all active authenticated accounts
const accounts = await prisma.telegramAccount.findMany({
where: { isActive: true, authState: "AUTHENTICATED" },
select: { id: true },
});
for (const account of accounts) {
try {
await prisma.accountChannelMap.create({
data: { accountId: account.id, channelId: channel.id, role: "WRITER" },
});
} catch {
// Already linked
}
}
// Signal worker to generate invite link
try {
await prisma.$queryRawUnsafe(
`SELECT pg_notify('generate_invite', $1)`,
channel.id
);
} catch {
// Best-effort
}
revalidatePath(REVALIDATE_PATH);
return { success: true, data: { id: channel.id } };
} catch (err: unknown) {
if (
err instanceof Error &&
err.message.includes("Unique constraint failed")
) {
return { success: false, error: "A channel with this Telegram ID already exists" };
}
return { success: false, error: "Failed to create destination channel" };
}
}
/**
* Request the worker to create a new Telegram supergroup as the destination.
* Uses ChannelFetchRequest as a generic DB-mediated request with pg_notify.
* Returns the requestId so the UI can poll for completion.
*/
export async function createDestinationViaWorker(
title: string
): Promise<ActionResult<{ requestId: string }>> {
const admin = await requireAdmin();
if (!admin.success) return admin;
if (!title.trim()) return { success: false, error: "Title is required" };
try {
// Need at least one authenticated account for TDLib
const hasAccount = await prisma.telegramAccount.findFirst({
where: { isActive: true, authState: "AUTHENTICATED" },
select: { id: true },
});
if (!hasAccount) {
return { success: false, error: "At least one authenticated account is needed to create a Telegram group" };
}
// Create a fetch request to track progress (reusing the model as a generic worker request)
const fetchRequest = await prisma.channelFetchRequest.create({
data: {
accountId: hasAccount.id,
status: "PENDING",
},
});
// Signal worker via pg_notify
await prisma.$queryRawUnsafe(
`SELECT pg_notify('create_destination', $1)`,
JSON.stringify({ requestId: fetchRequest.id, title: title.trim() })
);
return { success: true, data: { requestId: fetchRequest.id } };
} catch {
return { success: false, error: "Failed to request destination creation" };
}
}

View File

@@ -1,6 +1,7 @@
import { auth } from "@/lib/auth"; import { auth } from "@/lib/auth";
import { redirect } from "next/navigation"; import { redirect } from "next/navigation";
import { listAccounts, listChannels } from "@/lib/telegram/admin-queries"; import { listAccounts, listChannels, getGlobalDestination } from "@/lib/telegram/admin-queries";
import { getIngestionStatus } from "@/lib/telegram/queries";
import { TelegramAdmin } from "./_components/telegram-admin"; import { TelegramAdmin } from "./_components/telegram-admin";
export default async function TelegramPage() { export default async function TelegramPage() {
@@ -8,10 +9,19 @@ export default async function TelegramPage() {
if (!session?.user?.id) redirect("/login"); if (!session?.user?.id) redirect("/login");
if (session.user.role !== "ADMIN") redirect("/dashboard"); if (session.user.role !== "ADMIN") redirect("/dashboard");
const [accounts, channels] = await Promise.all([ const [accounts, channels, ingestionStatus, globalDestination] = await Promise.all([
listAccounts(), listAccounts(),
listChannels(), listChannels(),
getIngestionStatus(),
getGlobalDestination(),
]); ]);
return <TelegramAdmin accounts={accounts} channels={channels} />; return (
<TelegramAdmin
accounts={accounts}
channels={channels}
ingestionStatus={ingestionStatus}
globalDestination={globalDestination}
/>
);
} }

View File

@@ -0,0 +1,149 @@
import { NextResponse } from "next/server";
import { authenticateApiRequest } from "@/lib/telegram/api-auth";
import { prisma } from "@/lib/prisma";
export const dynamic = "force-dynamic";
/**
* POST: Create a channel fetch request for this account.
* Signals the worker via pg_notify to fetch channels from Telegram.
*/
export async function POST(
request: Request,
{ params }: { params: Promise<{ accountId: string }> }
) {
const authResult = await authenticateApiRequest(request, true);
if ("error" in authResult) return authResult.error;
const { accountId } = await params;
try {
// Verify account exists and is authenticated
const account = await prisma.telegramAccount.findUnique({
where: { id: accountId },
select: { id: true, authState: true },
});
if (!account) {
return NextResponse.json({ error: "Account not found" }, { status: 404 });
}
if (account.authState !== "AUTHENTICATED") {
return NextResponse.json(
{ error: "Account must be authenticated to fetch channels" },
{ status: 400 }
);
}
// Check for an existing recent request that's still pending/in-progress
const existing = await prisma.channelFetchRequest.findFirst({
where: {
accountId,
status: { in: ["PENDING", "IN_PROGRESS"] },
},
});
if (existing) {
return NextResponse.json({ requestId: existing.id }, { status: 202 });
}
// Also check for a recently completed request (within last 30 seconds)
const recent = await prisma.channelFetchRequest.findFirst({
where: {
accountId,
status: "COMPLETED",
updatedAt: { gte: new Date(Date.now() - 30_000) },
},
orderBy: { createdAt: "desc" },
});
if (recent) {
return NextResponse.json({ requestId: recent.id }, { status: 200 });
}
// Create a new fetch request
const fetchRequest = await prisma.channelFetchRequest.create({
data: { accountId, status: "PENDING" },
});
// Signal the worker via pg_notify
try {
await prisma.$queryRawUnsafe(
`SELECT pg_notify('channel_fetch', $1)`,
fetchRequest.id
);
} catch {
// Best-effort — worker will also pick it up on next poll
}
return NextResponse.json({ requestId: fetchRequest.id }, { status: 202 });
} catch (err) {
console.error("fetch-channels POST error:", err);
return NextResponse.json(
{ error: "Server error — try restarting the dev server if the schema changed" },
{ status: 500 }
);
}
}
/**
* GET: Poll for the result of a channel fetch request.
* Query param: ?requestId=xxx
*/
export async function GET(
request: Request,
{ params }: { params: Promise<{ accountId: string }> }
) {
const authResult = await authenticateApiRequest(request, true);
if ("error" in authResult) return authResult.error;
const { accountId } = await params;
const url = new URL(request.url);
const requestId = url.searchParams.get("requestId");
try {
if (!requestId) {
// Return the most recent completed fetch request for this account
const latest = await prisma.channelFetchRequest.findFirst({
where: { accountId, status: "COMPLETED" },
orderBy: { createdAt: "desc" },
});
if (!latest) {
return NextResponse.json(
{ status: "NOT_FOUND", channels: [] },
{ status: 200 }
);
}
return NextResponse.json({
requestId: latest.id,
status: latest.status,
channels: latest.resultJson ? JSON.parse(latest.resultJson) : [],
});
}
const fetchRequest = await prisma.channelFetchRequest.findUnique({
where: { id: requestId },
});
if (!fetchRequest || fetchRequest.accountId !== accountId) {
return NextResponse.json({ error: "Request not found" }, { status: 404 });
}
return NextResponse.json({
requestId: fetchRequest.id,
status: fetchRequest.status,
error: fetchRequest.error,
channels: fetchRequest.status === "COMPLETED" && fetchRequest.resultJson
? JSON.parse(fetchRequest.resultJson)
: [],
});
} catch (err) {
console.error("fetch-channels GET error:", err);
return NextResponse.json(
{ error: "Server error — try restarting the dev server if the schema changed" },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,41 @@
import { NextResponse } from "next/server";
import { authenticateApiRequest } from "@/lib/telegram/api-auth";
import { prisma } from "@/lib/prisma";
export const dynamic = "force-dynamic";
/**
* GET: Poll for the result of a worker request (ChannelFetchRequest used as generic request).
* Query param: ?requestId=xxx
*/
export async function GET(request: Request) {
const authResult = await authenticateApiRequest(request, true);
if ("error" in authResult) return authResult.error;
const url = new URL(request.url);
const requestId = url.searchParams.get("requestId");
if (!requestId) {
return NextResponse.json(
{ error: "requestId is required" },
{ status: 400 }
);
}
const fetchRequest = await prisma.channelFetchRequest.findUnique({
where: { id: requestId },
});
if (!fetchRequest) {
return NextResponse.json({ error: "Request not found" }, { status: 404 });
}
return NextResponse.json({
requestId: fetchRequest.id,
status: fetchRequest.status,
error: fetchRequest.error,
result: fetchRequest.status === "COMPLETED" && fetchRequest.resultJson
? JSON.parse(fetchRequest.resultJson)
: null,
});
}

View File

@@ -80,6 +80,42 @@ export type AccountChannelLinkRow = Awaited<
ReturnType<typeof listAccountChannelLinks> ReturnType<typeof listAccountChannelLinks>
>[number]; >[number];
// ── Global destination ──
export async function getGlobalDestination() {
try {
const setting = await prisma.globalSetting.findUnique({
where: { key: "destination_channel_id" },
});
if (!setting) return null;
const channel = await prisma.telegramChannel.findUnique({
where: { id: setting.value },
select: { id: true, title: true, telegramId: true, isActive: true },
});
if (!channel) return null;
// Also get the invite link if it exists
const inviteSetting = await prisma.globalSetting.findUnique({
where: { key: "destination_invite_link" },
});
return {
id: channel.id,
title: channel.title,
telegramId: channel.telegramId.toString(),
isActive: channel.isActive,
inviteLink: inviteSetting?.value ?? null,
};
} catch (error) {
console.error("Failed to fetch global destination (restart dev server if schema changed):", error);
return null;
}
}
export type GlobalDestination = Awaited<ReturnType<typeof getGlobalDestination>>;
export async function getUnlinkedChannels(accountId: string) { export async function getUnlinkedChannels(accountId: string) {
const linked = await prisma.accountChannelMap.findMany({ const linked = await prisma.accountChannelMap.findMany({
where: { accountId }, where: { accountId },

25
telegram_test.html Normal file

File diff suppressed because one or more lines are too long

View File

@@ -57,17 +57,19 @@ export function groupArchiveSets(messages: TelegramMessage[]): ArchiveSet[] {
// Check if any single entry is the "final part" of a legacy split // Check if any single entry is the "final part" of a legacy split
const allEntries = [...multipartEntries, ...singleEntries]; const allEntries = [...multipartEntries, ...singleEntries];
// Check time span — skip if parts span too long // Check time span — skip if parts span too long (0 = no limit)
const dates = allEntries.map((e) => e.msg.date.getTime()); if (config.multipartTimeoutHours > 0) {
const span = Math.max(...dates) - Math.min(...dates); const dates = allEntries.map((e) => e.msg.date.getTime());
const maxSpanMs = config.multipartTimeoutHours * 60 * 60 * 1000; const span = Math.max(...dates) - Math.min(...dates);
const maxSpanMs = config.multipartTimeoutHours * 60 * 60 * 1000;
if (span > maxSpanMs) { if (span > maxSpanMs) {
log.warn( log.warn(
{ baseName, format, span: span / 3600000 }, { baseName, format, span: span / 3600000 },
"Multipart set spans too long, skipping" "Multipart set spans too long, skipping"
); );
continue; continue;
}
} }
// Sort by part number (singles get a very high number so they come last — they're the final part) // Sort by part number (singles get a very high number so they come last — they're the final part)

View File

@@ -46,3 +46,35 @@ export async function byteLevelSplit(filePath: string): Promise<string[]> {
log.info({ filePath, parts: parts.length }, "File split complete"); log.info({ filePath, parts: parts.length }, "File split complete");
return parts; return parts;
} }
/**
* Concatenate multiple files into a single output file by streaming
* each input sequentially. Used for repacking multipart archives
* that have oversized parts (>2GB) before re-splitting.
*/
export async function concatenateFiles(
inputPaths: string[],
outputPath: string
): Promise<void> {
const out = createWriteStream(outputPath);
for (let i = 0; i < inputPaths.length; i++) {
log.info(
{ part: i + 1, total: inputPaths.length, file: path.basename(inputPaths[i]) },
"Concatenating part"
);
await pipeline(createReadStream(inputPaths[i]), out, { end: false });
}
// Close the output stream
await new Promise<void>((resolve, reject) => {
out.end(() => resolve());
out.on("error", reject);
});
const stats = await stat(outputPath);
log.info(
{ outputPath, totalBytes: stats.size, parts: inputPaths.length },
"Concatenation complete"
);
}

View File

@@ -1,5 +1,7 @@
import yauzl from "yauzl"; import yauzl from "yauzl";
import { open as fsOpen, stat as fsStat } from "fs/promises";
import path from "path"; import path from "path";
import { Readable } from "stream";
import { childLogger } from "../util/logger.js"; import { childLogger } from "../util/logger.js";
const log = childLogger("zip-reader"); const log = childLogger("zip-reader");
@@ -15,20 +17,28 @@ export interface FileEntry {
/** /**
* Read the central directory of a ZIP file without extracting any contents. * Read the central directory of a ZIP file without extracting any contents.
* For multipart ZIPs, pass the paths sorted by part order. * For multipart ZIPs (.zip.001, .zip.002 etc.), uses a custom random-access
* We attempt to read from the last part first (central directory is at the end). * reader that spans all parts seamlessly so yauzl can find the central
* directory at the end of the combined data.
*/ */
export async function readZipCentralDirectory( export async function readZipCentralDirectory(
filePaths: string[] filePaths: string[]
): Promise<FileEntry[]> { ): Promise<FileEntry[]> {
// The central directory lives at the end of the last file if (filePaths.length === 1) {
const targetFile = filePaths[filePaths.length - 1]; return readSingleZip(filePaths[0]);
}
return new Promise((resolve, reject) => { // Multipart: use a spanning random-access reader
return readMultipartZip(filePaths);
}
/** Read a single (non-split) ZIP file. */
function readSingleZip(targetFile: string): Promise<FileEntry[]> {
return new Promise((resolve) => {
yauzl.open(targetFile, { lazyEntries: true, autoClose: true }, (err, zipFile) => { yauzl.open(targetFile, { lazyEntries: true, autoClose: true }, (err, zipFile) => {
if (err) { if (err) {
log.warn({ err, file: targetFile }, "Failed to open ZIP for reading"); log.warn({ err, file: targetFile }, "Failed to open ZIP for reading");
resolve([]); // Fallback: return empty on error resolve([]);
return; return;
} }
@@ -36,13 +46,12 @@ export async function readZipCentralDirectory(
zipFile.readEntry(); zipFile.readEntry();
zipFile.on("entry", (entry: yauzl.Entry) => { zipFile.on("entry", (entry: yauzl.Entry) => {
// Skip directories
if (!entry.fileName.endsWith("/")) { if (!entry.fileName.endsWith("/")) {
const ext = path.extname(entry.fileName).toLowerCase(); const ext = path.extname(entry.fileName).toLowerCase();
entries.push({ entries.push({
path: entry.fileName, path: entry.fileName,
fileName: path.basename(entry.fileName), fileName: path.basename(entry.fileName),
extension: ext ? ext.slice(1) : null, // Remove leading dot extension: ext ? ext.slice(1) : null,
compressedSize: BigInt(entry.compressedSize), compressedSize: BigInt(entry.compressedSize),
uncompressedSize: BigInt(entry.uncompressedSize), uncompressedSize: BigInt(entry.uncompressedSize),
crc32: entry.crc32 !== 0 ? entry.crc32.toString(16).padStart(8, "0") : null, crc32: entry.crc32 !== 0 ? entry.crc32.toString(16).padStart(8, "0") : null,
@@ -54,8 +63,144 @@ export async function readZipCentralDirectory(
zipFile.on("end", () => resolve(entries)); zipFile.on("end", () => resolve(entries));
zipFile.on("error", (error) => { zipFile.on("error", (error) => {
log.warn({ error, file: targetFile }, "Error reading ZIP entries"); log.warn({ error, file: targetFile }, "Error reading ZIP entries");
resolve(entries); // Return whatever we got resolve(entries);
}); });
}); });
}); });
} }
/**
* Read a multipart split ZIP using yauzl's RandomAccessReader API.
* This creates a virtual "file" that spans all parts so yauzl can
* seek freely across the entire archive to read the central directory.
*/
async function readMultipartZip(filePaths: string[]): Promise<FileEntry[]> {
// Get sizes of all parts
const partSizes: number[] = [];
for (const fp of filePaths) {
const s = await fsStat(fp);
partSizes.push(s.size);
}
const totalSize = partSizes.reduce((a, b) => a + b, 0);
log.debug(
{ parts: filePaths.length, totalSize },
"Reading multipart ZIP via spanning reader"
);
return new Promise((resolve) => {
const reader = createMultiPartReader(filePaths, partSizes);
yauzl.fromRandomAccessReader(
reader,
totalSize,
{ lazyEntries: true, autoClose: true },
(err, zipFile) => {
if (err) {
log.warn({ err }, "Failed to open multipart ZIP for reading");
reader.close(() => {});
resolve([]);
return;
}
const entries: FileEntry[] = [];
zipFile.readEntry();
zipFile.on("entry", (entry: yauzl.Entry) => {
if (!entry.fileName.endsWith("/")) {
const ext = path.extname(entry.fileName).toLowerCase();
entries.push({
path: entry.fileName,
fileName: path.basename(entry.fileName),
extension: ext ? ext.slice(1) : null,
compressedSize: BigInt(entry.compressedSize),
uncompressedSize: BigInt(entry.uncompressedSize),
crc32: entry.crc32 !== 0 ? entry.crc32.toString(16).padStart(8, "0") : null,
});
}
zipFile.readEntry();
});
zipFile.on("end", () => {
log.info({ entries: entries.length }, "Multipart ZIP entries read");
resolve(entries);
});
zipFile.on("error", (error) => {
log.warn({ error }, "Error reading multipart ZIP entries");
resolve(entries);
});
}
);
});
}
/**
* Create a yauzl RandomAccessReader that reads across multiple split part files.
* Maps a global offset to the correct part file and local offset.
*
* Uses Object.create to properly inherit from yauzl.RandomAccessReader
* (whose constructor + prototype is defined at runtime, not as a TS class).
*/
function createMultiPartReader(
filePaths: string[],
partSizes: number[]
): yauzl.RandomAccessReader {
// Build cumulative offset table
const partOffsets: number[] = [];
let offset = 0;
for (const size of partSizes) {
partOffsets.push(offset);
offset += size;
}
// Create an instance by calling the parent constructor
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const reader = new (yauzl.RandomAccessReader as any)() as yauzl.RandomAccessReader;
// Override _readStreamForRange — yauzl calls this to read a range of bytes
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(reader as any)._readStreamForRange = function (start: number, end: number): Readable {
const readable = new Readable({ read() {} });
readRange(start, end, readable).catch((err) => {
readable.destroy(err);
});
return readable;
};
async function readRange(start: number, end: number, readable: Readable): Promise<void> {
let remaining = end - start;
let globalOffset = start;
while (remaining > 0) {
// Find which part this offset falls in
let partIdx = partOffsets.length - 1;
for (let i = 0; i < partOffsets.length; i++) {
if (i + 1 < partOffsets.length && globalOffset < partOffsets[i + 1]) {
partIdx = i;
break;
}
}
const localOffset = globalOffset - partOffsets[partIdx];
const partRemaining = partSizes[partIdx] - localOffset;
const toRead = Math.min(remaining, partRemaining);
const fh = await fsOpen(filePaths[partIdx], "r");
try {
const buf = Buffer.alloc(toRead);
const { bytesRead } = await fh.read(buf, 0, toRead, localOffset);
readable.push(buf.subarray(0, bytesRead));
remaining -= bytesRead;
globalOffset += bytesRead;
} finally {
await fh.close();
}
}
readable.push(null); // Signal end of stream
}
return reader;
}

View File

@@ -1,5 +1,5 @@
import { db } from "./client.js"; import { db } from "./client.js";
import type { ArchiveType } from "@prisma/client"; import type { ArchiveType, FetchStatus } from "@prisma/client";
export async function getActiveAccounts() { export async function getActiveAccounts() {
return db.telegramAccount.findMany({ return db.telegramAccount.findMany({
@@ -7,6 +7,17 @@ export async function getActiveAccounts() {
}); });
} }
export async function getPendingAccounts() {
return db.telegramAccount.findMany({
where: { isActive: true, authState: "PENDING" },
});
}
export async function hasAnyChannels(): Promise<boolean> {
const count = await db.telegramChannel.count();
return count > 0;
}
export async function getSourceChannelMappings(accountId: string) { export async function getSourceChannelMappings(accountId: string) {
return db.accountChannelMap.findMany({ return db.accountChannelMap.findMany({
where: { where: {
@@ -18,26 +29,66 @@ export async function getSourceChannelMappings(accountId: string) {
}); });
} }
export async function getDestinationChannel(accountId: string) { // ── Global destination channel ──
const mapping = await db.accountChannelMap.findFirst({
where: { export async function getGlobalDestinationChannel() {
accountId, const setting = await db.globalSetting.findUnique({
role: "WRITER", where: { key: "destination_channel_id" },
channel: { type: "DESTINATION", isActive: true }, });
}, if (!setting) return null;
include: { channel: true }, return db.telegramChannel.findFirst({
where: { id: setting.value, type: "DESTINATION", isActive: true },
});
}
export async function getGlobalSetting(key: string): Promise<string | null> {
const setting = await db.globalSetting.findUnique({ where: { key } });
return setting?.value ?? null;
}
export async function setGlobalSetting(key: string, value: string) {
return db.globalSetting.upsert({
where: { key },
create: { key, value },
update: { value },
}); });
return mapping?.channel ?? null;
} }
export async function packageExistsByHash(contentHash: string) { export async function packageExistsByHash(contentHash: string) {
const pkg = await db.package.findUnique({ const pkg = await db.package.findFirst({
where: { contentHash }, where: { contentHash, destMessageId: { not: null } },
select: { id: true }, select: { id: true },
}); });
return pkg !== null; return pkg !== null;
} }
/**
* Check if a package already exists for a given source message ID
* AND was successfully uploaded to the destination (destMessageId is set).
* Used as an early skip before downloading.
*/
export async function packageExistsBySourceMessage(
sourceChannelId: string,
sourceMessageId: bigint
): Promise<boolean> {
const pkg = await db.package.findFirst({
where: { sourceChannelId, sourceMessageId, destMessageId: { not: null } },
select: { id: true },
});
return pkg !== null;
}
/**
* Delete orphaned Package rows that have the same content hash but never
* completed the upload (destMessageId is null). Called before creating a
* new complete record to avoid unique constraint violations.
*/
export async function deleteOrphanedPackageByHash(contentHash: string): Promise<void> {
await db.package.deleteMany({
where: { contentHash, destMessageId: null },
});
}
export interface CreatePackageInput { export interface CreatePackageInput {
contentHash: string; contentHash: string;
fileName: string; fileName: string;
@@ -228,6 +279,57 @@ export async function getAccountAuthCode(accountId: string) {
return account; return account;
} }
// ── Channel sync (auto-discovery from Telegram) ──
export interface UpsertChannelInput {
telegramId: bigint;
title: string;
type: "SOURCE" | "DESTINATION";
isForum: boolean;
}
/**
* Upsert a channel by telegramId. Returns the channel record.
* If it already exists, update title and forum status.
*/
export async function upsertChannel(input: UpsertChannelInput) {
return db.telegramChannel.upsert({
where: { telegramId: input.telegramId },
create: {
telegramId: input.telegramId,
title: input.title,
type: input.type,
isForum: input.isForum,
},
update: {
title: input.title,
isForum: input.isForum,
},
});
}
/**
* Link an account to a channel if not already linked.
* Uses a try/catch on unique constraint to make it idempotent.
*/
export async function ensureAccountChannelLink(
accountId: string,
channelId: string,
role: "READER" | "WRITER"
) {
try {
return await db.accountChannelMap.create({
data: { accountId, channelId, role },
});
} catch (err: unknown) {
// Already linked — ignore unique constraint violation
if (err instanceof Error && err.message.includes("Unique constraint")) {
return null;
}
throw err;
}
}
// ── Forum / Topic progress ── // ── Forum / Topic progress ──
export async function setChannelForum(channelId: string, isForum: boolean) { export async function setChannelForum(channelId: string, isForum: boolean) {
@@ -268,3 +370,50 @@ export async function upsertTopicProgress(
}, },
}); });
} }
// ── Channel fetch requests (DB-mediated communication with web app) ──
export async function getChannelFetchRequest(requestId: string) {
return db.channelFetchRequest.findUnique({
where: { id: requestId },
include: { account: true },
});
}
export async function updateFetchRequestStatus(
requestId: string,
status: FetchStatus,
extra?: { resultJson?: string; error?: string }
) {
return db.channelFetchRequest.update({
where: { id: requestId },
data: {
status,
resultJson: extra?.resultJson ?? undefined,
error: extra?.error ?? undefined,
},
});
}
export async function getAccountLinkedChannelIds(accountId: string): Promise<Set<string>> {
const links = await db.accountChannelMap.findMany({
where: { accountId },
select: { channel: { select: { telegramId: true } } },
});
return new Set(links.map((l) => l.channel.telegramId.toString()));
}
export async function getExistingChannelsByTelegramId(): Promise<Map<string, string>> {
const channels = await db.telegramChannel.findMany({
select: { id: true, telegramId: true },
});
const map = new Map<string, string>();
for (const ch of channels) {
map.set(ch.telegramId.toString(), ch.id);
}
return map;
}
export async function getAccountById(accountId: string) {
return db.telegramAccount.findUnique({ where: { id: accountId } });
}

View File

@@ -0,0 +1,206 @@
import type pg from "pg";
import { pool } from "./db/client.js";
import { childLogger } from "./util/logger.js";
import { withTdlibMutex } from "./util/mutex.js";
import { processFetchRequest } from "./worker.js";
import { generateInviteLink, createSupergroup } from "./tdlib/chats.js";
import { createTdlibClient, closeTdlibClient } from "./tdlib/client.js";
import {
getGlobalDestinationChannel,
getGlobalSetting,
setGlobalSetting,
getActiveAccounts,
upsertChannel,
ensureAccountChannelLink,
} from "./db/queries.js";
const log = childLogger("fetch-listener");
let pgClient: pg.PoolClient | null = null;
/**
* Start listening for pg_notify signals from the web app.
*
* Channels:
* - `channel_fetch` — payload = requestId → fetch channels for an account
* - `generate_invite` — payload = channelId → generate invite link for destination
* - `create_destination` — payload = JSON { requestId, title } → create supergroup via TDLib
*/
export async function startFetchListener(): Promise<void> {
pgClient = await pool.connect();
await pgClient.query("LISTEN channel_fetch");
await pgClient.query("LISTEN generate_invite");
await pgClient.query("LISTEN create_destination");
pgClient.on("notification", (msg) => {
if (msg.channel === "channel_fetch" && msg.payload) {
handleChannelFetch(msg.payload);
} else if (msg.channel === "generate_invite" && msg.payload) {
handleGenerateInvite(msg.payload);
} else if (msg.channel === "create_destination" && msg.payload) {
handleCreateDestination(msg.payload);
}
});
log.info("Fetch listener started (channel_fetch, generate_invite, create_destination)");
}
export function stopFetchListener(): void {
if (pgClient) {
pgClient.release();
pgClient = null;
}
log.info("Fetch listener stopped");
}
// ── Channel fetch handler ──
// Chain promises to ensure sequential execution
let fetchQueue: Promise<void> = Promise.resolve();
function handleChannelFetch(requestId: string): void {
fetchQueue = fetchQueue.then(async () => {
try {
await withTdlibMutex("fetch-channels", () =>
processFetchRequest(requestId)
);
} catch (err) {
log.error({ err, requestId }, "Failed to process fetch request");
}
});
}
// ── Invite link generation handler ──
function handleGenerateInvite(channelId: string): void {
fetchQueue = fetchQueue.then(async () => {
try {
await withTdlibMutex("generate-invite", async () => {
const destChannel = await getGlobalDestinationChannel();
if (!destChannel || destChannel.id !== channelId) {
log.warn({ channelId }, "Destination channel mismatch, skipping invite generation");
return;
}
// Use the first available authenticated account to generate the link
const accounts = await getActiveAccounts();
if (accounts.length === 0) {
log.warn("No authenticated accounts to generate invite link");
return;
}
const account = accounts[0];
const client = await createTdlibClient({ id: account.id, phone: account.phone });
try {
const link = await generateInviteLink(client, destChannel.telegramId);
await setGlobalSetting("destination_invite_link", link);
log.info({ link }, "Invite link generated and saved");
} finally {
await closeTdlibClient(client);
}
});
} catch (err) {
log.error({ err, channelId }, "Failed to generate invite link");
}
});
}
// ── Create destination supergroup handler ──
function handleCreateDestination(payload: string): void {
fetchQueue = fetchQueue.then(async () => {
let requestId: string | undefined;
try {
const parsed = JSON.parse(payload) as { requestId: string; title: string };
requestId = parsed.requestId;
await withTdlibMutex("create-destination", async () => {
const { db } = await import("./db/client.js");
// Mark the request as in-progress
await db.channelFetchRequest.update({
where: { id: parsed.requestId },
data: { status: "IN_PROGRESS" },
});
// Use the first available authenticated account
const accounts = await getActiveAccounts();
if (accounts.length === 0) {
throw new Error("No authenticated accounts available to create the group");
}
const account = accounts[0];
const client = await createTdlibClient({ id: account.id, phone: account.phone });
try {
// Create the supergroup via TDLib
const result = await createSupergroup(client, parsed.title);
log.info({ chatId: result.chatId.toString(), title: result.title }, "Supergroup created");
// Upsert it as a DESTINATION channel in the DB
const channel = await upsertChannel({
telegramId: result.chatId,
title: result.title,
type: "DESTINATION",
isForum: false,
});
// Set as global destination
await setGlobalSetting("destination_channel_id", channel.id);
// Generate an invite link
const link = await generateInviteLink(client, result.chatId);
await setGlobalSetting("destination_invite_link", link);
log.info({ link }, "Invite link generated for new destination");
// Link all authenticated accounts as WRITER
for (const acc of accounts) {
try {
await ensureAccountChannelLink(acc.id, channel.id, "WRITER");
} catch {
// Already linked
}
}
// Mark fetch request as completed with the channel info
await db.channelFetchRequest.update({
where: { id: parsed.requestId },
data: {
status: "COMPLETED",
resultJson: JSON.stringify({
channelId: channel.id,
telegramId: result.chatId.toString(),
title: result.title,
inviteLink: link,
}),
},
});
log.info(
{ channelId: channel.id, telegramId: result.chatId.toString() },
"Destination channel created and configured"
);
} finally {
await closeTdlibClient(client);
}
});
} catch (err) {
log.error({ err, payload }, "Failed to create destination channel");
if (requestId) {
try {
const { db } = await import("./db/client.js");
await db.channelFetchRequest.update({
where: { id: requestId },
data: {
status: "FAILED",
error: err instanceof Error ? err.message : String(err),
},
});
} catch {
// Best-effort
}
}
}
});
}

View File

@@ -4,6 +4,7 @@ import { logger } from "./util/logger.js";
import { markStaleRunsAsFailed } from "./db/queries.js"; import { markStaleRunsAsFailed } from "./db/queries.js";
import { cleanupTempDir } from "./worker.js"; import { cleanupTempDir } from "./worker.js";
import { startScheduler, stopScheduler } from "./scheduler.js"; import { startScheduler, stopScheduler } from "./scheduler.js";
import { startFetchListener, stopFetchListener } from "./fetch-listener.js";
import { db, pool } from "./db/client.js"; import { db, pool } from "./db/client.js";
const log = logger.child({ module: "main" }); const log = logger.child({ module: "main" });
@@ -20,6 +21,9 @@ async function main(): Promise<void> {
await cleanupTempDir(); await cleanupTempDir();
await markStaleRunsAsFailed(); await markStaleRunsAsFailed();
// Start the fetch listener (pg_notify for on-demand channel fetching)
await startFetchListener();
// Start the scheduler // Start the scheduler
await startScheduler(); await startScheduler();
} }
@@ -28,6 +32,7 @@ async function main(): Promise<void> {
function shutdown(signal: string): void { function shutdown(signal: string): void {
log.info({ signal }, "Shutdown signal received"); log.info({ signal }, "Shutdown signal received");
stopScheduler(); stopScheduler();
stopFetchListener();
// Close DB connections // Close DB connections
Promise.all([db.$disconnect(), pool.end()]) Promise.all([db.$disconnect(), pool.end()])

View File

@@ -1,15 +1,22 @@
import { config } from "./util/config.js"; import { config } from "./util/config.js";
import { childLogger } from "./util/logger.js"; import { childLogger } from "./util/logger.js";
import { getActiveAccounts } from "./db/queries.js"; import { withTdlibMutex } from "./util/mutex.js";
import { runWorkerForAccount } from "./worker.js"; import { getActiveAccounts, getPendingAccounts } from "./db/queries.js";
import { runWorkerForAccount, authenticateAccount } from "./worker.js";
const log = childLogger("scheduler"); const log = childLogger("scheduler");
let running = false; let running = false;
let timer: ReturnType<typeof setTimeout> | null = null; let timer: ReturnType<typeof setTimeout> | null = null;
let cycleCount = 0;
/** /**
* Run one ingestion cycle: process all active, authenticated accounts sequentially. * Run one ingestion cycle:
* 1. Authenticate any PENDING accounts (triggers SMS code flow + auto-fetch channels)
* 2. Process all active AUTHENTICATED accounts for ingestion
*
* All TDLib operations are wrapped in the mutex to ensure only one client
* runs at a time (also shared with the fetch listener for on-demand requests).
*/ */
async function runCycle(): Promise<void> { async function runCycle(): Promise<void> {
if (running) { if (running) {
@@ -18,20 +25,38 @@ async function runCycle(): Promise<void> {
} }
running = true; running = true;
log.info("Starting ingestion cycle"); cycleCount++;
log.info({ cycle: cycleCount }, "Starting ingestion cycle");
try { try {
// ── Phase 1: Authenticate pending accounts ──
const pendingAccounts = await getPendingAccounts();
if (pendingAccounts.length > 0) {
log.info(
{ count: pendingAccounts.length },
"Found pending accounts, starting authentication"
);
for (const account of pendingAccounts) {
await withTdlibMutex(`auth:${account.phone}`, () =>
authenticateAccount(account)
);
}
}
// ── Phase 2: Ingest for authenticated accounts ──
const accounts = await getActiveAccounts(); const accounts = await getActiveAccounts();
if (accounts.length === 0) { if (accounts.length === 0) {
log.info("No active authenticated accounts, nothing to do"); log.info("No active authenticated accounts, nothing to ingest");
return; return;
} }
log.info({ accountCount: accounts.length }, "Processing accounts"); log.info({ accountCount: accounts.length }, "Processing accounts");
for (const account of accounts) { for (const account of accounts) {
await runWorkerForAccount(account); await withTdlibMutex(`ingest:${account.phone}`, () =>
runWorkerForAccount(account)
);
} }
log.info("Ingestion cycle complete"); log.info("Ingestion cycle complete");

162
worker/src/tdlib/chats.ts Normal file
View File

@@ -0,0 +1,162 @@
import type { Client } from "tdl";
import { childLogger } from "../util/logger.js";
import { config } from "../util/config.js";
const log = childLogger("chats");
export interface TelegramChatInfo {
chatId: bigint;
title: string;
type: "channel" | "supergroup" | "group" | "private" | "other";
isForum: boolean;
memberCount?: number;
}
/**
* Fetch all chats the account is a member of.
* Uses TDLib's getChats to load the chat list, then getChat for details.
* Filters to channels and supergroups only (groups/privates are not useful for ingestion).
*/
export async function getAccountChats(
client: Client
): Promise<TelegramChatInfo[]> {
const chats: TelegramChatInfo[] = [];
// Load main chat list — TDLib loads in batches
let offsetOrder = "9223372036854775807"; // max int64 as string
let offsetChatId = 0;
let hasMore = true;
while (hasMore) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const result = (await client.invoke({
_: "getChats",
chat_list: { _: "chatListMain" },
limit: 100,
})) as { chat_ids: number[] };
if (!result.chat_ids || result.chat_ids.length === 0) {
break;
}
for (const chatId of result.chat_ids) {
try {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const chat = (await client.invoke({
_: "getChat",
chat_id: chatId,
})) as any;
const chatType = chat.type?._;
let type: TelegramChatInfo["type"] = "other";
let isForum = false;
if (chatType === "chatTypeSupergroup") {
// Get supergroup details to check if it's a channel or group
try {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const sg = (await client.invoke({
_: "getSupergroup",
supergroup_id: chat.type.supergroup_id,
})) as any;
type = sg.is_channel ? "channel" : "supergroup";
isForum = sg.is_forum ?? false;
} catch {
type = "supergroup";
}
} else if (chatType === "chatTypeBasicGroup") {
type = "group";
} else if (chatType === "chatTypePrivate" || chatType === "chatTypeSecret") {
type = "private";
}
// Only include channels and supergroups
if (type === "channel" || type === "supergroup") {
chats.push({
chatId: BigInt(chatId),
title: chat.title ?? `Chat ${chatId}`,
type,
isForum,
});
}
} catch (err) {
log.warn({ chatId, err }, "Failed to get chat details, skipping");
}
}
// getChats with chatListMain returns all chats at once in newer TDLib versions
// So we break after the first batch
hasMore = false;
await sleep(config.apiDelayMs);
}
log.info(
{ total: chats.length },
"Fetched channels/supergroups from Telegram"
);
return chats;
}
/**
* Generate an invite link for a chat. The account must be an admin or have
* invite link permissions.
*/
export async function generateInviteLink(
client: Client,
chatId: bigint
): Promise<string> {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const result = (await client.invoke({
_: "createChatInviteLink",
chat_id: Number(chatId),
name: "DragonsStash Auto-Join",
creates_join_request: false,
})) as any;
const link = result.invite_link as string;
log.info({ chatId: chatId.toString(), link }, "Generated invite link");
return link;
}
/**
* Create a new supergroup (private group) via TDLib.
* Returns the chat ID and title.
*/
export async function createSupergroup(
client: Client,
title: string
): Promise<{ chatId: bigint; title: string }> {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const result = (await client.invoke({
_: "createNewSupergroupChat",
title,
is_forum: false,
is_channel: false,
description: "DragonsStash archive destination — all accounts write here",
})) as any;
const chatId = BigInt(result.id);
log.info({ chatId: chatId.toString(), title }, "Created new supergroup");
return { chatId, title: result.title ?? title };
}
/**
* Join a chat using an invite link.
*/
export async function joinChatByInviteLink(
client: Client,
inviteLink: string
): Promise<void> {
await client.invoke({
_: "joinChatByInviteLink",
invite_link: inviteLink,
});
log.info({ inviteLink }, "Joined chat by invite link");
}
function sleep(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}

View File

@@ -1,5 +1,5 @@
import type { Client } from "tdl"; import type { Client } from "tdl";
import { readFile, rename, stat } from "fs/promises"; import { readFile, rename, copyFile, unlink, stat } from "fs/promises";
import { config } from "../util/config.js"; import { config } from "../util/config.js";
import { childLogger } from "../util/logger.js"; import { childLogger } from "../util/logger.js";
import { isArchiveAttachment } from "../archive/detect.js"; import { isArchiveAttachment } from "../archive/detect.js";
@@ -69,19 +69,26 @@ export interface ChannelScanResult {
} }
/** /**
* Fetch messages from a channel since a given message ID. * Fetch messages from a channel, stopping once we've scanned past the
* last-processed boundary (with one page of lookback for multipart safety).
* Collects both archive attachments AND photo messages (for preview matching). * Collects both archive attachments AND photo messages (for preview matching).
* Returns messages in chronological order (oldest first). * Returns messages in chronological order (oldest first).
*
* When `lastProcessedMessageId` is null (first run), scans everything.
* The worker applies a post-grouping filter to skip fully-processed sets,
* and keeps `packageExistsBySourceMessage` as a safety net.
*/ */
export async function getChannelMessages( export async function getChannelMessages(
client: Client, client: Client,
chatId: bigint, chatId: bigint,
fromMessageId?: bigint | null, lastProcessedMessageId?: bigint | null,
limit = 100 limit = 100
): Promise<ChannelScanResult> { ): Promise<ChannelScanResult> {
const archives: TelegramMessage[] = []; const archives: TelegramMessage[] = [];
const photos: TelegramPhoto[] = []; const photos: TelegramPhoto[] = [];
let currentFromId = fromMessageId ? Number(fromMessageId) : 0; const boundary = lastProcessedMessageId ? Number(lastProcessedMessageId) : null;
let currentFromId = 0;
// eslint-disable-next-line no-constant-condition // eslint-disable-next-line no-constant-condition
while (true) { while (true) {
@@ -114,8 +121,6 @@ export async function getChannelMessages(
const photo = msg.content?.photo; const photo = msg.content?.photo;
const caption = msg.content?.caption?.text ?? ""; const caption = msg.content?.caption?.text ?? "";
if (photo?.sizes && photo.sizes.length > 0) { if (photo?.sizes && photo.sizes.length > 0) {
// Pick the smallest size for thumbnail (type "s" or "m")
// TDLib photo sizes are ordered from smallest to largest
const smallest = photo.sizes[0]; const smallest = photo.sizes[0];
photos.push({ photos.push({
id: BigInt(msg.id), id: BigInt(msg.id),
@@ -128,13 +133,22 @@ export async function getChannelMessages(
} }
currentFromId = result.messages[result.messages.length - 1].id; currentFromId = result.messages[result.messages.length - 1].id;
// Stop scanning once we've gone past the boundary (this page is the lookback)
if (boundary && currentFromId < boundary) break;
if (result.messages.length < 100) break; if (result.messages.length < 100) break;
// Rate limit delay // Rate limit delay
await sleep(config.apiDelayMs); await sleep(config.apiDelayMs);
} }
// Return in chronological order (oldest first) log.info(
{ chatId: chatId.toString(), archives: archives.length, photos: photos.length },
"Channel scan complete"
);
// Reverse to chronological order (oldest first) so worker processes old→new
return { return {
archives: archives.reverse(), archives: archives.reverse(),
photos: photos.reverse(), photos: photos.reverse(),
@@ -380,8 +394,23 @@ async function verifyAndMove(
"File verified and complete" "File verified and complete"
); );
// Move from TDLib's cache to our temp directory // Move from TDLib's cache to our temp directory.
await rename(localPath, destPath); // Use rename first (fast, same filesystem), fall back to copy+delete
// when source and destination are on different filesystems (EXDEV).
try {
await rename(localPath, destPath);
} catch (err: unknown) {
if ((err as NodeJS.ErrnoException).code === "EXDEV") {
log.debug(
{ fileId, fileName },
"Cross-device rename — falling back to copy + unlink"
);
await copyFile(localPath, destPath);
await unlink(localPath);
} else {
throw err;
}
}
} }
function sleep(ms: number): Promise<void> { function sleep(ms: number): Promise<void> {

View File

@@ -125,29 +125,43 @@ export async function getForumTopicList(
} }
/** /**
* Fetch messages from a specific forum topic (thread). * Fetch messages from a specific forum topic (thread), stopping once
* Uses getMessageThreadHistory to scan within a topic. * we've scanned past the last-processed boundary (with one page of lookback).
* Uses searchChatMessages with message_thread_id to scan within a topic.
*
* Returns messages in chronological order (oldest first).
*
* When `lastProcessedMessageId` is null (first run), scans everything.
* The worker applies a post-grouping filter to skip fully-processed sets,
* and keeps `packageExistsBySourceMessage` as a safety net.
*/ */
export async function getTopicMessages( export async function getTopicMessages(
client: Client, client: Client,
chatId: bigint, chatId: bigint,
topicId: bigint, topicId: bigint,
fromMessageId?: bigint | null, lastProcessedMessageId?: bigint | null,
limit = 100 limit = 100
): Promise<ChannelScanResult> { ): Promise<ChannelScanResult> {
const archives: TelegramMessage[] = []; const archives: TelegramMessage[] = [];
const photos: TelegramPhoto[] = []; const photos: TelegramPhoto[] = [];
let currentFromId = fromMessageId ? Number(fromMessageId) : 0; const boundary = lastProcessedMessageId ? Number(lastProcessedMessageId) : null;
let currentFromId = 0;
// eslint-disable-next-line no-constant-condition // eslint-disable-next-line no-constant-condition
while (true) { while (true) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const result = (await client.invoke({ const result = (await client.invoke({
_: "getMessageThreadHistory", _: "searchChatMessages",
chat_id: Number(chatId), chat_id: Number(chatId),
message_id: Number(topicId), query: "",
message_thread_id: Number(topicId),
from_message_id: currentFromId, from_message_id: currentFromId,
offset: 0, offset: 0,
limit: Math.min(limit, 100), limit: Math.min(limit, 100),
filter: null,
sender_id: null,
saved_messages_topic_id: 0,
})) as { })) as {
messages?: { messages?: {
id: number; id: number;
@@ -206,11 +220,21 @@ export async function getTopicMessages(
} }
currentFromId = result.messages[result.messages.length - 1].id; currentFromId = result.messages[result.messages.length - 1].id;
// Stop scanning once we've gone past the boundary (this page is the lookback)
if (boundary && currentFromId < boundary) break;
if (result.messages.length < 100) break; if (result.messages.length < 100) break;
await sleep(config.apiDelayMs); await sleep(config.apiDelayMs);
} }
log.info(
{ chatId: chatId.toString(), topicId: topicId.toString(), archives: archives.length, photos: photos.length },
"Topic scan complete"
);
// Reverse to chronological order (oldest first) so worker processes old→new
return { return {
archives: archives.reverse(), archives: archives.reverse(),
photos: photos.reverse(), photos: photos.reverse(),

View File

@@ -1,3 +1,5 @@
import path from "path";
import { stat } from "fs/promises";
import type { Client } from "tdl"; import type { Client } from "tdl";
import { config } from "../util/config.js"; import { config } from "../util/config.js";
import { childLogger } from "../util/logger.js"; import { childLogger } from "../util/logger.js";
@@ -11,7 +13,13 @@ export interface UploadResult {
/** /**
* Upload one or more files to a destination Telegram channel. * Upload one or more files to a destination Telegram channel.
* For multipart archives, each file is sent as a separate message. * For multipart archives, each file is sent as a separate message.
* Returns the message ID of the first uploaded message. * Returns the **final** (server-assigned) message ID of the first uploaded message.
*
* IMPORTANT: `sendMessage` returns a *temporary* message immediately.
* The actual file upload happens asynchronously in TDLib. We listen for
* `updateMessageSendSucceeded` to get the real server-side message ID and
* to make sure the upload is fully committed before we clean up temp files
* or close the TDLib client (which would cancel pending uploads).
*/ */
export async function uploadToChannel( export async function uploadToChannel(
client: Client, client: Client,
@@ -26,31 +34,24 @@ export async function uploadToChannel(
const fileCaption = const fileCaption =
i === 0 && caption ? caption : undefined; i === 0 && caption ? caption : undefined;
log.debug( const fileName = path.basename(filePath);
{ chatId: Number(chatId), filePath, part: i + 1, total: filePaths.length }, let fileSizeMB = 0;
try {
const s = await stat(filePath);
fileSizeMB = Math.round(s.size / (1024 * 1024));
} catch {
// Non-critical
}
log.info(
{ chatId: Number(chatId), fileName, sizeMB: fileSizeMB, part: i + 1, total: filePaths.length },
"Uploading file to channel" "Uploading file to channel"
); );
const result = (await client.invoke({ const serverMsgId = await sendAndWaitForUpload(client, chatId, filePath, fileCaption, fileName, fileSizeMB);
_: "sendMessage",
chat_id: Number(chatId),
input_message_content: {
_: "inputMessageDocument",
document: {
_: "inputFileLocal",
path: filePath,
},
caption: fileCaption
? {
_: "formattedText",
text: fileCaption,
}
: undefined,
},
})) as { id: number };
if (i === 0) { if (i === 0) {
firstMessageId = BigInt(result.id); firstMessageId = serverMsgId;
} }
// Rate limit delay between uploads // Rate limit delay between uploads
@@ -65,12 +66,133 @@ export async function uploadToChannel(
log.info( log.info(
{ chatId: Number(chatId), messageId: Number(firstMessageId), files: filePaths.length }, { chatId: Number(chatId), messageId: Number(firstMessageId), files: filePaths.length },
"Upload complete" "All uploads confirmed by Telegram"
); );
return { messageId: firstMessageId }; return { messageId: firstMessageId };
} }
/**
* Send a single file message and wait for Telegram to confirm the upload.
* Returns the final server-assigned message ID.
*/
async function sendAndWaitForUpload(
client: Client,
chatId: bigint,
filePath: string,
caption: string | undefined,
fileName: string,
fileSizeMB: number
): Promise<bigint> {
// Send the message — this returns a temporary message immediately
const tempMsg = (await client.invoke({
_: "sendMessage",
chat_id: Number(chatId),
input_message_content: {
_: "inputMessageDocument",
document: {
_: "inputFileLocal",
path: filePath,
},
caption: caption
? {
_: "formattedText",
text: caption,
}
: undefined,
},
})) as { id: number };
const tempMsgId = tempMsg.id;
log.debug(
{ fileName, tempMsgId },
"Message queued, waiting for upload confirmation"
);
// Wait for the actual upload to complete
return new Promise<bigint>((resolve, reject) => {
let settled = false;
let lastLoggedPercent = 0;
// Timeout: 10 minutes per GB, minimum 10 minutes
const timeoutMs = Math.max(
10 * 60_000,
(fileSizeMB / 1024) * 10 * 60_000
);
const timer = setTimeout(() => {
if (!settled) {
settled = true;
cleanup();
reject(
new Error(
`Upload timed out after ${Math.round(timeoutMs / 60_000)}min for ${fileName}`
)
);
}
}, timeoutMs);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const handleUpdate = (update: any) => {
// Track upload progress via updateFile events
if (update?._ === "updateFile") {
const file = update.file;
if (file?.remote?.is_uploading_active && file.expected_size > 0) {
const uploaded = file.remote.uploaded_size ?? 0;
const total = file.expected_size;
const percent = Math.round((uploaded / total) * 100);
if (percent >= lastLoggedPercent + 20) {
lastLoggedPercent = percent - (percent % 20);
log.info(
{ fileName, uploaded, total, percent: `${percent}%` },
"Upload progress"
);
}
}
}
// The money event: upload succeeded, we get the final server message ID
if (update?._ === "updateMessageSendSucceeded") {
const msg = update.message;
const oldMsgId = update.old_message_id;
if (oldMsgId === tempMsgId) {
if (!settled) {
settled = true;
cleanup();
const finalId = BigInt(msg.id);
log.info(
{ fileName, tempMsgId, finalMsgId: Number(finalId) },
"Upload confirmed by Telegram"
);
resolve(finalId);
}
}
}
// Upload failed
if (update?._ === "updateMessageSendFailed") {
const oldMsgId = update.old_message_id;
if (oldMsgId === tempMsgId) {
if (!settled) {
settled = true;
cleanup();
const errorMsg = update.error?.message ?? "Unknown upload error";
reject(new Error(`Upload failed for ${fileName}: ${errorMsg}`));
}
}
}
};
const cleanup = () => {
clearTimeout(timer);
client.off("update", handleUpdate);
};
client.on("update", handleUpdate);
});
}
function sleep(ms: number): Promise<void> { function sleep(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms)); return new Promise((resolve) => setTimeout(resolve, ms));
} }

View File

@@ -9,8 +9,8 @@ export const config = {
telegramApiHash: process.env.TELEGRAM_API_HASH ?? "", telegramApiHash: process.env.TELEGRAM_API_HASH ?? "",
/** Maximum jitter added to scheduler interval (in minutes) */ /** Maximum jitter added to scheduler interval (in minutes) */
jitterMinutes: 5, jitterMinutes: 5,
/** Maximum time between multipart archive parts (in hours) */ /** Maximum time span for multipart archive parts (in hours). 0 = no limit. */
multipartTimeoutHours: 24, multipartTimeoutHours: parseInt(process.env.MULTIPART_TIMEOUT_HOURS ?? "0", 10),
/** Delay between Telegram API calls (in ms) to avoid rate limits */ /** Delay between Telegram API calls (in ms) to avoid rate limits */
apiDelayMs: 1000, apiDelayMs: 1000,
/** Max retries for rate-limited requests */ /** Max retries for rate-limited requests */

40
worker/src/util/mutex.ts Normal file
View File

@@ -0,0 +1,40 @@
import { childLogger } from "./logger.js";
const log = childLogger("mutex");
let locked = false;
let holder = "";
const queue: Array<{ resolve: () => void; label: string }> = [];
/**
* Ensures only one TDLib client runs at a time across the entire worker process.
* Both the scheduler (auth, ingestion) and the fetch listener acquire this
* before creating any TDLib client.
*/
export async function withTdlibMutex<T>(
label: string,
fn: () => Promise<T>
): Promise<T> {
if (locked) {
log.info({ waiting: label, holder }, "Waiting for TDLib mutex");
await new Promise<void>((resolve) => queue.push({ resolve, label }));
}
locked = true;
holder = label;
log.debug({ label }, "TDLib mutex acquired");
try {
return await fn();
} finally {
locked = false;
holder = "";
const next = queue.shift();
if (next) {
log.debug({ next: next.label }, "TDLib mutex releasing to next waiter");
next.resolve();
} else {
log.debug({ label }, "TDLib mutex released");
}
}
}

View File

@@ -1,12 +1,13 @@
import path from "path"; import path from "path";
import { unlink, readdir } from "fs/promises"; import { unlink, readdir, mkdir, rm } from "fs/promises";
import { config } from "./util/config.js"; import { config } from "./util/config.js";
import { childLogger } from "./util/logger.js"; import { childLogger } from "./util/logger.js";
import { tryAcquireLock, releaseLock } from "./db/locks.js"; import { tryAcquireLock, releaseLock } from "./db/locks.js";
import { import {
getSourceChannelMappings, getSourceChannelMappings,
getDestinationChannel, getGlobalDestinationChannel,
packageExistsByHash, packageExistsByHash,
packageExistsBySourceMessage,
createPackageWithFiles, createPackageWithFiles,
createIngestionRun, createIngestionRun,
completeIngestionRun, completeIngestionRun,
@@ -16,9 +17,19 @@ import {
setChannelForum, setChannelForum,
getTopicProgress, getTopicProgress,
upsertTopicProgress, upsertTopicProgress,
upsertChannel,
ensureAccountChannelLink,
getGlobalSetting,
getChannelFetchRequest,
updateFetchRequestStatus,
getAccountLinkedChannelIds,
getExistingChannelsByTelegramId,
getAccountById,
deleteOrphanedPackageByHash,
} from "./db/queries.js"; } from "./db/queries.js";
import type { ActivityUpdate } from "./db/queries.js"; import type { ActivityUpdate } from "./db/queries.js";
import { createTdlibClient, closeTdlibClient } from "./tdlib/client.js"; import { createTdlibClient, closeTdlibClient } from "./tdlib/client.js";
import { getAccountChats, joinChatByInviteLink } from "./tdlib/chats.js";
import { getChannelMessages, downloadFile, downloadPhotoThumbnail } from "./tdlib/download.js"; import { getChannelMessages, downloadFile, downloadPhotoThumbnail } from "./tdlib/download.js";
import type { DownloadProgress, ChannelScanResult } from "./tdlib/download.js"; import type { DownloadProgress, ChannelScanResult } from "./tdlib/download.js";
import { isChatForum, getForumTopicList, getTopicMessages } from "./tdlib/topics.js"; import { isChatForum, getForumTopicList, getTopicMessages } from "./tdlib/topics.js";
@@ -29,13 +40,203 @@ import { extractCreatorFromFileName } from "./archive/creator.js";
import { hashParts } from "./archive/hash.js"; import { hashParts } from "./archive/hash.js";
import { readZipCentralDirectory } from "./archive/zip-reader.js"; import { readZipCentralDirectory } from "./archive/zip-reader.js";
import { readRarContents } from "./archive/rar-reader.js"; import { readRarContents } from "./archive/rar-reader.js";
import { byteLevelSplit } from "./archive/split.js"; import { byteLevelSplit, concatenateFiles } from "./archive/split.js";
import { uploadToChannel } from "./upload/channel.js"; import { uploadToChannel } from "./upload/channel.js";
import type { TelegramAccount, TelegramChannel } from "@prisma/client"; import type { TelegramAccount, TelegramChannel } from "@prisma/client";
import type { Client } from "tdl"; import type { Client } from "tdl";
const log = childLogger("worker"); const log = childLogger("worker");
/**
* Authenticate a PENDING account by creating a TDLib client.
* TDLib will send an SMS code to the phone number, and the client.login()
* callbacks set the authState to AWAITING_CODE. Once the admin enters the
* code via the UI, pollForAuthCode picks it up and completes the login.
*
* After successful auth:
* 1. Fetches channels from Telegram and writes as a ChannelFetchRequest
* (so the admin can select sources in the UI)
* 2. Auto-joins the destination group if an invite link is configured
*/
export async function authenticateAccount(
account: TelegramAccount
): Promise<void> {
const aLog = childLogger("auth", { accountId: account.id, phone: account.phone });
aLog.info("Starting authentication flow");
let client: Client | undefined;
try {
client = await createTdlibClient({
id: account.id,
phone: account.phone,
});
aLog.info("Authentication successful");
// Auto-fetch channels and create a fetch request result
aLog.info("Fetching channels from Telegram...");
await createAutoFetchRequest(client, account.id, aLog);
// Auto-join the destination group if an invite link exists
const inviteLink = await getGlobalSetting("destination_invite_link");
if (inviteLink) {
aLog.info("Attempting to join destination group via invite link...");
try {
await joinChatByInviteLink(client, inviteLink);
// Link this account as WRITER to the destination channel
const destChannel = await getGlobalDestinationChannel();
if (destChannel) {
await ensureAccountChannelLink(account.id, destChannel.id, "WRITER");
aLog.info({ destChannel: destChannel.title }, "Joined destination group and linked as WRITER");
}
} catch (err) {
// May already be a member — that's fine
aLog.warn({ err }, "Could not join destination group (may already be a member)");
// Still try to link as WRITER
const destChannel = await getGlobalDestinationChannel();
if (destChannel) {
await ensureAccountChannelLink(account.id, destChannel.id, "WRITER");
}
}
}
} catch (err) {
aLog.error({ err }, "Authentication failed");
} finally {
if (client) {
await closeTdlibClient(client);
}
}
}
/**
* Process a ChannelFetchRequest: fetch channels from Telegram,
* enrich with DB state, and write the result JSON.
* Called by the fetch listener (pg_notify) and by authenticateAccount.
*/
export async function processFetchRequest(requestId: string): Promise<void> {
const aLog = childLogger("fetch-request", { requestId });
const request = await getChannelFetchRequest(requestId);
if (!request || request.status !== "PENDING") {
aLog.warn("Fetch request not found or not pending, skipping");
return;
}
await updateFetchRequestStatus(requestId, "IN_PROGRESS");
aLog.info({ accountId: request.accountId }, "Processing fetch request");
const client = await createTdlibClient({
id: request.account.id,
phone: request.account.phone,
});
try {
const chats = await getAccountChats(client);
// Enrich with DB state
const linkedTelegramIds = await getAccountLinkedChannelIds(request.accountId);
const existingChannels = await getExistingChannelsByTelegramId();
const enrichedChats = chats.map((chat) => {
const telegramIdStr = chat.chatId.toString();
return {
chatId: telegramIdStr,
title: chat.title,
type: chat.type,
isForum: chat.isForum,
memberCount: chat.memberCount ?? null,
alreadyLinked: linkedTelegramIds.has(telegramIdStr),
existingChannelId: existingChannels.get(telegramIdStr) ?? null,
};
});
// Also upsert channel metadata while we have the data
for (const chat of chats) {
try {
await upsertChannel({
telegramId: chat.chatId,
title: chat.title,
type: "SOURCE",
isForum: chat.isForum,
});
} catch {
// Non-critical — metadata sync can fail silently
}
}
await updateFetchRequestStatus(requestId, "COMPLETED", {
resultJson: JSON.stringify(enrichedChats),
});
aLog.info(
{ total: chats.length, linked: [...linkedTelegramIds].length },
"Fetch request completed"
);
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
aLog.error({ err }, "Fetch request failed");
await updateFetchRequestStatus(requestId, "FAILED", { error: message });
} finally {
await closeTdlibClient(client);
}
}
/**
* Internal helper called after authentication to auto-create a fetch request
* with the channel list (so the UI can show the picker immediately).
*/
async function createAutoFetchRequest(
client: Client,
accountId: string,
aLog: ReturnType<typeof childLogger>
): Promise<void> {
const chats = await getAccountChats(client);
const linkedTelegramIds = await getAccountLinkedChannelIds(accountId);
const existingChannels = await getExistingChannelsByTelegramId();
const enrichedChats = chats.map((chat) => {
const telegramIdStr = chat.chatId.toString();
return {
chatId: telegramIdStr,
title: chat.title,
type: chat.type,
isForum: chat.isForum,
memberCount: chat.memberCount ?? null,
alreadyLinked: linkedTelegramIds.has(telegramIdStr),
existingChannelId: existingChannels.get(telegramIdStr) ?? null,
};
});
// Upsert channel metadata
for (const chat of chats) {
try {
await upsertChannel({
telegramId: chat.chatId,
title: chat.title,
type: "SOURCE",
isForum: chat.isForum,
});
} catch {
// Non-critical
}
}
// Create the fetch request record with the result already filled in
const { db } = await import("./db/client.js");
await db.channelFetchRequest.create({
data: {
accountId,
status: "COMPLETED",
resultJson: JSON.stringify(enrichedChats),
},
});
aLog.info(
{ total: chats.length },
"Auto-fetch request created with channel list"
);
}
/** /**
* Throttle DB writes for download progress to avoid hammering the DB. * Throttle DB writes for download progress to avoid hammering the DB.
* Only writes if at least 2 seconds have passed since the last write. * Only writes if at least 2 seconds have passed since the last write.
@@ -140,17 +341,18 @@ export async function runWorkerForAccount(
}; };
try { try {
// 4. Get assigned source channels and destination // 4. Get assigned source channels and global destination
const channelMappings = await getSourceChannelMappings(account.id); const channelMappings = await getSourceChannelMappings(account.id);
const destChannel = await getDestinationChannel(account.id); const destChannel = await getGlobalDestinationChannel();
if (!destChannel) { if (!destChannel) {
throw new Error("No active destination channel configured"); throw new Error("No global destination channel configured — set one in the admin UI");
} }
for (const mapping of channelMappings) { for (const mapping of channelMappings) {
const channel = mapping.channel; const channel = mapping.channel;
try {
// ── Check if channel is a forum ── // ── Check if channel is a forum ──
const forum = await isChatForum(client, channel.telegramId); const forum = await isChatForum(client, channel.telegramId);
if (forum !== channel.isForum) { if (forum !== channel.isForum) {
@@ -198,61 +400,63 @@ export async function runWorkerForAccount(
); );
for (const topic of topics) { for (const topic of topics) {
const progress = topicProgressList.find( try {
(tp) => tp.topicId === topic.topicId const progress = topicProgressList.find(
); (tp) => tp.topicId === topic.topicId
await updateRunActivity(activeRunId, {
currentActivity: `Scanning topic "${topic.name}" in "${channel.title}"`,
currentStep: "scanning",
currentChannel: `${channel.title} ${topic.name}`,
currentFile: null,
currentFileNum: null,
totalFiles: null,
downloadedBytes: null,
totalBytes: null,
downloadPercent: null,
});
const scanResult = await getTopicMessages(
client,
channel.telegramId,
topic.topicId,
progress?.lastProcessedMessageId
);
if (scanResult.archives.length === 0) {
accountLog.debug(
{ channelId: channel.id, topic: topic.name },
"No new archives in topic"
); );
continue;
}
accountLog.info( await updateRunActivity(activeRunId, {
{ topic: topic.name, archives: scanResult.archives.length, photos: scanResult.photos.length }, currentActivity: `Scanning topic "${topic.name}" in "${channel.title}"`,
"Found messages in topic" currentStep: "scanning",
); currentChannel: `${channel.title} ${topic.name}`,
currentFile: null,
currentFileNum: null,
totalFiles: null,
downloadedBytes: null,
totalBytes: null,
downloadPercent: null,
});
// Process archives with topic creator const scanResult = await getTopicMessages(
pipelineCtx.topicCreator = topic.name; client,
pipelineCtx.sourceTopicId = topic.topicId; channel.telegramId,
pipelineCtx.channelTitle = `${channel.title} ${topic.name}`;
await processArchiveSets(pipelineCtx, scanResult, run.id);
// Update topic progress
const allMsgIds = [
...scanResult.archives.map((m) => m.id),
...scanResult.photos.map((p) => p.id),
];
if (allMsgIds.length > 0) {
const maxId = allMsgIds.reduce((a, b) => (a > b ? a : b));
await upsertTopicProgress(
mapping.id,
topic.topicId, topic.topicId,
topic.name, progress?.lastProcessedMessageId
maxId );
if (scanResult.archives.length === 0) {
accountLog.debug(
{ channelId: channel.id, topic: topic.name },
"No new archives in topic"
);
continue;
}
accountLog.info(
{ topic: topic.name, archives: scanResult.archives.length, photos: scanResult.photos.length },
"Found messages in topic"
);
// Process archives with topic creator
pipelineCtx.topicCreator = topic.name;
pipelineCtx.sourceTopicId = topic.topicId;
pipelineCtx.channelTitle = `${channel.title} ${topic.name}`;
const maxProcessedId = await processArchiveSets(pipelineCtx, scanResult, run.id, progress?.lastProcessedMessageId);
// Only advance progress to the highest successfully processed message
if (maxProcessedId) {
await upsertTopicProgress(
mapping.id,
topic.topicId,
topic.name,
maxProcessedId
);
}
} catch (topicErr) {
accountLog.warn(
{ err: topicErr, channelId: channel.id, topic: topic.name, topicId: topic.topicId.toString() },
"Failed to process topic, skipping"
); );
} }
} }
@@ -296,18 +500,19 @@ export async function runWorkerForAccount(
pipelineCtx.sourceTopicId = null; pipelineCtx.sourceTopicId = null;
pipelineCtx.channelTitle = channel.title; pipelineCtx.channelTitle = channel.title;
await processArchiveSets(pipelineCtx, scanResult, run.id); const maxProcessedId = await processArchiveSets(pipelineCtx, scanResult, run.id, mapping.lastProcessedMessageId);
// Update last processed message // Only advance progress to the highest successfully processed message
const allMsgIds = [ if (maxProcessedId) {
...scanResult.archives.map((m) => m.id), await updateLastProcessedMessage(mapping.id, maxProcessedId);
...scanResult.photos.map((p) => p.id),
];
if (allMsgIds.length > 0) {
const maxId = allMsgIds.reduce((a, b) => (a > b ? a : b));
await updateLastProcessedMessage(mapping.id, maxId);
} }
} }
} catch (channelErr) {
accountLog.warn(
{ err: channelErr, channelId: channel.id, title: channel.title },
"Failed to process channel, skipping to next"
);
}
} }
// ── Done ── // ── Done ──
@@ -332,16 +537,37 @@ export async function runWorkerForAccount(
/** /**
* Process a scan result through the archive pipeline: * Process a scan result through the archive pipeline:
* group → download → hash → dedup → metadata → split → upload → preview → index. * group → download → hash → dedup → metadata → split → upload → preview → index.
*
* Returns the highest message ID that was successfully processed (ingested or
* confirmed duplicate). The caller should only advance the progress boundary
* to this value — never to the max of all scanned messages.
*/ */
async function processArchiveSets( async function processArchiveSets(
ctx: PipelineContext, ctx: PipelineContext,
scanResult: ChannelScanResult, scanResult: ChannelScanResult,
ingestionRunId: string ingestionRunId: string,
): Promise<void> { lastProcessedMessageId?: bigint | null
): Promise<bigint | null> {
const { client, runId, channelTitle, channel, throttled, counters, accountLog } = ctx; const { client, runId, channelTitle, channel, throttled, counters, accountLog } = ctx;
// Group into archive sets // Group into archive sets
const archiveSets = groupArchiveSets(scanResult.archives); let archiveSets = groupArchiveSets(scanResult.archives);
// Filter out sets where ALL parts are at or below the boundary (already processed)
if (lastProcessedMessageId) {
const totalBefore = archiveSets.length;
archiveSets = archiveSets.filter((set) =>
set.parts.some((p) => p.id > lastProcessedMessageId)
);
const filtered = totalBefore - archiveSets.length;
if (filtered > 0) {
accountLog.info(
{ filtered, remaining: archiveSets.length },
"Filtered out already-processed archive sets"
);
}
}
counters.zipsFound += archiveSets.length; counters.zipsFound += archiveSets.length;
// Match preview photos to archive sets // Match preview photos to archive sets
@@ -369,16 +595,38 @@ async function processArchiveSets(
zipsFound: counters.zipsFound, zipsFound: counters.zipsFound,
}); });
// Track the highest message ID that was successfully processed
let maxProcessedId: bigint | null = null;
for (let setIdx = 0; setIdx < archiveSets.length; setIdx++) { for (let setIdx = 0; setIdx < archiveSets.length; setIdx++) {
await processOneArchiveSet( try {
ctx, await processOneArchiveSet(
archiveSets[setIdx], ctx,
setIdx, archiveSets[setIdx],
archiveSets.length, setIdx,
previewMatches, archiveSets.length,
ingestionRunId previewMatches,
); ingestionRunId
);
// Set completed (ingested or confirmed duplicate) — advance watermark
const setMaxId = archiveSets[setIdx].parts.reduce(
(max, p) => (p.id > max ? p.id : max),
0n
);
if (setMaxId > (maxProcessedId ?? 0n)) {
maxProcessedId = setMaxId;
}
} catch (setErr) {
// If a set fails, do NOT advance the watermark past it
accountLog.warn(
{ err: setErr, baseName: archiveSets[setIdx].baseName },
"Archive set failed, watermark will not advance past this set"
);
}
} }
return maxProcessedId;
} }
/** /**
@@ -400,17 +648,43 @@ async function processOneArchiveSet(
counters.messagesScanned += archiveSet.parts.length; counters.messagesScanned += archiveSet.parts.length;
const archiveName = archiveSet.parts[0].fileName; const archiveName = archiveSet.parts[0].fileName;
// ── Early skip: check if this archive set was already ingested ──
// This avoids re-downloading large archives that were processed in a prior run.
const alreadyIngested = await packageExistsBySourceMessage(
channel.id,
archiveSet.parts[0].id
);
if (alreadyIngested) {
counters.zipsDuplicate++;
accountLog.debug(
{ fileName: archiveName, sourceMessageId: Number(archiveSet.parts[0].id) },
"Archive already ingested (by source message), skipping"
);
await updateRunActivity(runId, {
currentActivity: `Skipped ${archiveName} (already ingested)`,
currentStep: "deduplicating",
currentChannel: channelTitle,
currentFile: archiveName,
currentFileNum: setIdx + 1,
totalFiles: totalSets,
zipsDuplicate: counters.zipsDuplicate,
});
return;
}
const tempPaths: string[] = []; const tempPaths: string[] = [];
let splitPaths: string[] = []; let splitPaths: string[] = [];
// Per-set subdirectory so uploaded files keep their original filenames
const setDir = path.join(config.tempDir, `${ingestionRunId}_${archiveSet.parts[0].id}`);
await mkdir(setDir, { recursive: true });
try { try {
// ── Downloading ── // ── Downloading ──
for (let partIdx = 0; partIdx < archiveSet.parts.length; partIdx++) { for (let partIdx = 0; partIdx < archiveSet.parts.length; partIdx++) {
const part = archiveSet.parts[partIdx]; const part = archiveSet.parts[partIdx];
const tempPath = path.join( const tempPath = path.join(setDir, part.fileName);
config.tempDir,
`${ingestionRunId}_${part.id}_${part.fileName}`
);
const partLabel = archiveSet.parts.length > 1 const partLabel = archiveSet.parts.length > 1
? ` (part ${partIdx + 1}/${archiveSet.parts.length})` ? ` (part ${partIdx + 1}/${archiveSet.parts.length})`
@@ -526,14 +800,33 @@ async function processOneArchiveSet(
accountLog.warn({ err, baseName: archiveSet.baseName }, "Failed to read archive metadata, ingesting without file list"); accountLog.warn({ err, baseName: archiveSet.baseName }, "Failed to read archive metadata, ingesting without file list");
} }
// ── Splitting (if needed) ── // ── Splitting / Repacking (if needed) ──
let uploadPaths = tempPaths; let uploadPaths = [...tempPaths];
const totalSize = archiveSet.parts.reduce( const totalSize = archiveSet.parts.reduce(
(sum, p) => sum + p.fileSize, (sum, p) => sum + p.fileSize,
0n 0n
); );
const MAX_UPLOAD_SIZE = 2n * 1024n * 1024n * 1024n;
const hasOversizedPart = archiveSet.parts.some((p) => p.fileSize > MAX_UPLOAD_SIZE);
if (!archiveSet.isMultipart && totalSize > 2n * 1024n * 1024n * 1024n) { if (hasOversizedPart) {
// Full repack: concatenate all parts → single file → re-split into uniform 2GB chunks
await updateRunActivity(runId, {
currentActivity: `Repacking ${archiveName} (parts >2GB, concatenating + re-splitting)`,
currentStep: "splitting",
currentChannel: channelTitle,
currentFile: archiveName,
currentFileNum: setIdx + 1,
totalFiles: totalSets,
});
const concatPath = path.join(setDir, `${archiveSet.baseName}.concat`);
await concatenateFiles(tempPaths, concatPath);
splitPaths = await byteLevelSplit(concatPath);
uploadPaths = splitPaths;
// Clean up the concat intermediate file
await unlink(concatPath).catch(() => {});
} else if (!archiveSet.isMultipart && totalSize > MAX_UPLOAD_SIZE) {
// Single file >2GB: split directly
await updateRunActivity(runId, { await updateRunActivity(runId, {
currentActivity: `Splitting ${archiveName} for upload (>2GB)`, currentActivity: `Splitting ${archiveName} for upload (>2GB)`,
currentStep: "splitting", currentStep: "splitting",
@@ -595,6 +888,9 @@ async function processOneArchiveSet(
totalFiles: totalSets, totalFiles: totalSets,
}); });
// Clean up any orphaned record (same hash but no dest upload) before creating
await deleteOrphanedPackageByHash(contentHash);
await createPackageWithFiles({ await createPackageWithFiles({
contentHash, contentHash,
fileName: archiveName, fileName: archiveName,
@@ -632,8 +928,9 @@ async function processOneArchiveSet(
"Archive ingested" "Archive ingested"
); );
} finally { } finally {
// ALWAYS delete temp files // ALWAYS delete temp files and the set directory
await deleteFiles([...tempPaths, ...splitPaths]); await deleteFiles([...tempPaths, ...splitPaths]);
await rm(setDir, { recursive: true, force: true }).catch(() => {});
} }
} }
@@ -648,16 +945,16 @@ async function deleteFiles(paths: string[]): Promise<void> {
} }
/** /**
* Clean up any leftover temp files from previous runs. * Clean up any leftover temp files/directories from previous runs.
*/ */
export async function cleanupTempDir(): Promise<void> { export async function cleanupTempDir(): Promise<void> {
try { try {
const files = await readdir(config.tempDir); const entries = await readdir(config.tempDir);
for (const file of files) { for (const entry of entries) {
await unlink(path.join(config.tempDir, file)).catch(() => {}); await rm(path.join(config.tempDir, entry), { recursive: true, force: true }).catch(() => {});
} }
if (files.length > 0) { if (entries.length > 0) {
log.info({ count: files.length }, "Cleaned up stale temp files"); log.info({ count: entries.length }, "Cleaned up stale temp files");
} }
} catch { } catch {
// Directory might not exist yet // Directory might not exist yet