@@ -32,7 +34,7 @@ export function TelegramAdmin({
description="Manage Telegram accounts, channels, and ingestion"
/>
-
+
diff --git a/src/app/(app)/telegram/_components/worker-status-panel.tsx b/src/app/(app)/telegram/_components/worker-status-panel.tsx
index ef7601d..215ecc3 100644
--- a/src/app/(app)/telegram/_components/worker-status-panel.tsx
+++ b/src/app/(app)/telegram/_components/worker-status-panel.tsx
@@ -1,6 +1,6 @@
"use client";
-import { useEffect, useState, useCallback } from "react";
+import { useEffect, useState, useCallback, useTransition } from "react";
import {
Loader2,
CheckCircle2,
@@ -14,10 +14,13 @@ import { Card, CardContent } from "@/components/ui/card";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
import { cn } from "@/lib/utils";
+import { toast } from "sonner";
+import { triggerIngestion } from "../actions";
import type { IngestionAccountStatus } from "@/lib/telegram/types";
interface WorkerStatusPanelProps {
initialStatus: IngestionAccountStatus[];
+ initialIntervalMinutes?: number;
}
const AUTH_STATE_CONFIG: Record<
@@ -39,15 +42,28 @@ const AUTH_STATE_CONFIG: Record<
EXPIRED: { label: "Expired", color: "text-red-500", icon: "x" },
};
-export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
+export function WorkerStatusPanel({ initialStatus, initialIntervalMinutes = 60 }: WorkerStatusPanelProps) {
const [accounts, setAccounts] = useState(initialStatus);
const [error, setError] = useState(false);
const [nextRunCountdown, setNextRunCountdown] = useState(null);
+ const [workerIntervalMinutes, setWorkerIntervalMinutes] = useState(initialIntervalMinutes);
+ const [isPending, startTransition] = useTransition();
// Find active run
const activeRun = accounts.find((a) => a.currentRun);
const isRunning = !!activeRun;
+ const handleSyncNow = useCallback(() => {
+ startTransition(async () => {
+ const result = await triggerIngestion();
+ if (result.success) {
+ toast.success("Sync triggered — worker will start shortly");
+ } else {
+ toast.error(result.error ?? "Failed to trigger sync");
+ }
+ });
+ }, []);
+
// Poll for status
useEffect(() => {
let timer: ReturnType;
@@ -60,6 +76,9 @@ export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
const data = await res.json();
if (mounted) {
setAccounts(data.accounts ?? []);
+ if (data.workerIntervalMinutes) {
+ setWorkerIntervalMinutes(data.workerIntervalMinutes);
+ }
setError(false);
}
} catch {
@@ -86,7 +105,7 @@ export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
return;
}
- // Estimate next run based on last run finish time + interval (5 min + up to 5 min jitter)
+ // Estimate next run based on last run finish time + configured interval + up to 5 min jitter
const lastFinished = accounts
.filter((a) => a.lastRun?.finishedAt)
.map((a) => new Date(a.lastRun!.finishedAt!).getTime())
@@ -97,7 +116,7 @@ export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
return;
}
- const intervalMs = 5 * 60 * 1000; // 5 min base
+ const intervalMs = workerIntervalMinutes * 60 * 1000;
const estimatedNext = lastFinished + intervalMs;
const tick = () => {
@@ -116,7 +135,7 @@ export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
tick();
const interval = setInterval(tick, 1_000);
return () => clearInterval(interval);
- }, [isRunning, accounts]);
+ }, [isRunning, accounts, workerIntervalMinutes]);
if (accounts.length === 0 && !error) {
return (
@@ -182,7 +201,12 @@ export function WorkerStatusPanel({ initialStatus }: WorkerStatusPanelProps) {
) : isRunning && activeRun?.currentRun ? (
) : (
-
+
)}
@@ -256,9 +280,13 @@ function RunningStatus({
function IdleStatus({
accounts,
nextRunCountdown,
+ onSyncNow,
+ isSyncing,
}: {
accounts: IngestionAccountStatus[];
nextRunCountdown: string | null;
+ onSyncNow: () => void;
+ isSyncing: boolean;
}) {
const lastRun = accounts
.filter((a) => a.lastRun)
@@ -321,14 +349,32 @@ function IdleStatus({
)}
- {nextRunCountdown && hasAuthenticated && (
-
-
-
- Next: {nextRunCountdown}
-
-
- )}
+
+ {nextRunCountdown && hasAuthenticated && (
+
+
+
+ Next: {nextRunCountdown}
+
+
+ )}
+ {hasAuthenticated && (
+
+ )}
+
);
}
diff --git a/src/app/(app)/telegram/page.tsx b/src/app/(app)/telegram/page.tsx
index a0f575d..2100d40 100644
--- a/src/app/(app)/telegram/page.tsx
+++ b/src/app/(app)/telegram/page.tsx
@@ -42,6 +42,7 @@ export default async function TelegramPage() {
ingestionStatus={ingestionStatus}
globalDestination={globalDestination}
sendHistory={serializedHistory}
+ workerIntervalMinutes={parseInt(process.env.WORKER_INTERVAL_MINUTES ?? "60", 10)}
/>
);
}
diff --git a/src/app/api/ingestion/status/route.ts b/src/app/api/ingestion/status/route.ts
index dc8c76d..239fa59 100644
--- a/src/app/api/ingestion/status/route.ts
+++ b/src/app/api/ingestion/status/route.ts
@@ -9,5 +9,9 @@ export async function GET(request: Request) {
if ("error" in authResult) return authResult.error;
const accounts = await getIngestionStatus();
- return NextResponse.json({ accounts });
+ const workerIntervalMinutes = parseInt(
+ process.env.WORKER_INTERVAL_MINUTES ?? "60",
+ 10
+ );
+ return NextResponse.json({ accounts, workerIntervalMinutes });
}
diff --git a/src/app/api/ingestion/trigger/route.ts b/src/app/api/ingestion/trigger/route.ts
index ebf9562..5586b6b 100644
--- a/src/app/api/ingestion/trigger/route.ts
+++ b/src/app/api/ingestion/trigger/route.ts
@@ -45,33 +45,20 @@ export async function POST(request: Request) {
);
}
- // Create ingestion runs marked as RUNNING — the worker will pick these up
- // when it next polls, or we use pg_notify for immediate pickup
- for (const account of accounts) {
- // Only create if no run is already RUNNING for this account
- const existing = await prisma.ingestionRun.findFirst({
- where: { accountId: account.id, status: "RUNNING" },
- });
- if (!existing) {
- await prisma.ingestionRun.create({
- data: { accountId: account.id, status: "RUNNING" },
- });
- }
- }
-
- // Send pg_notify for immediate worker pickup
+ // Send pg_notify for immediate worker pickup.
+ // The worker creates its own IngestionRun records with proper activity tracking.
try {
await prisma.$queryRawUnsafe(
`SELECT pg_notify('ingestion_trigger', $1)`,
accounts.map((a) => a.id).join(",")
);
} catch {
- // pg_notify is best-effort — worker will pick up on next cycle anyway
+ // pg_notify is best-effort — worker will pick up on next scheduled cycle anyway
}
return NextResponse.json({
triggered: true,
accountIds: accounts.map((a) => a.id),
- message: `Ingestion queued for ${accounts.length} account(s)`,
+ message: `Ingestion triggered for ${accounts.length} account(s)`,
});
}
diff --git a/worker/src/fetch-listener.ts b/worker/src/fetch-listener.ts
index 681979f..3d49e38 100644
--- a/worker/src/fetch-listener.ts
+++ b/worker/src/fetch-listener.ts
@@ -18,6 +18,10 @@ import {
const log = childLogger("fetch-listener");
let pgClient: pg.PoolClient | null = null;
+let stopped = false;
+
+/** Delay (ms) before attempting to reconnect after a connection loss. */
+const RECONNECT_DELAY_MS = 5_000;
/**
* Start listening for pg_notify signals from the web app.
@@ -27,30 +31,75 @@ let pgClient: pg.PoolClient | null = null;
* - `generate_invite` — payload = channelId → generate invite link for destination
* - `create_destination` — payload = JSON { requestId, title } → create supergroup via TDLib
* - `ingestion_trigger` — trigger an immediate ingestion cycle
+ *
+ * If the underlying connection is lost, the listener automatically reconnects
+ * so that pg_notify signals are never silently dropped.
*/
export async function startFetchListener(): Promise {
- pgClient = await pool.connect();
- await pgClient.query("LISTEN channel_fetch");
- await pgClient.query("LISTEN generate_invite");
- await pgClient.query("LISTEN create_destination");
- await pgClient.query("LISTEN ingestion_trigger");
+ stopped = false;
+ await connectListener();
+}
- pgClient.on("notification", (msg) => {
- if (msg.channel === "channel_fetch" && msg.payload) {
- handleChannelFetch(msg.payload);
- } else if (msg.channel === "generate_invite" && msg.payload) {
- handleGenerateInvite(msg.payload);
- } else if (msg.channel === "create_destination" && msg.payload) {
- handleCreateDestination(msg.payload);
- } else if (msg.channel === "ingestion_trigger") {
- handleIngestionTrigger();
+async function connectListener(): Promise {
+ try {
+ pgClient = await pool.connect();
+ await pgClient.query("LISTEN channel_fetch");
+ await pgClient.query("LISTEN generate_invite");
+ await pgClient.query("LISTEN create_destination");
+ await pgClient.query("LISTEN ingestion_trigger");
+
+ pgClient.on("notification", (msg) => {
+ if (msg.channel === "channel_fetch" && msg.payload) {
+ handleChannelFetch(msg.payload);
+ } else if (msg.channel === "generate_invite" && msg.payload) {
+ handleGenerateInvite(msg.payload);
+ } else if (msg.channel === "create_destination" && msg.payload) {
+ handleCreateDestination(msg.payload);
+ } else if (msg.channel === "ingestion_trigger") {
+ handleIngestionTrigger();
+ }
+ });
+
+ // Reconnect automatically when the connection ends unexpectedly
+ pgClient.on("end", () => {
+ if (!stopped) {
+ log.warn("Fetch listener connection lost — reconnecting");
+ pgClient = null;
+ scheduleReconnect();
+ }
+ });
+
+ pgClient.on("error", (err) => {
+ log.error({ err }, "Fetch listener connection error");
+ if (!stopped && pgClient) {
+ try {
+ pgClient.release(true);
+ } catch (releaseErr) {
+ log.debug({ err: releaseErr }, "Failed to release pg client after error");
+ }
+ pgClient = null;
+ scheduleReconnect();
+ }
+ });
+
+ log.info("Fetch listener started (channel_fetch, generate_invite, create_destination, ingestion_trigger)");
+ } catch (err) {
+ log.error({ err }, "Failed to start fetch listener — retrying");
+ scheduleReconnect();
+ }
+}
+
+function scheduleReconnect(): void {
+ if (stopped) return;
+ setTimeout(() => {
+ if (!stopped) {
+ connectListener();
}
- });
-
- log.info("Fetch listener started (channel_fetch, generate_invite, create_destination, ingestion_trigger)");
+ }, RECONNECT_DELAY_MS);
}
export function stopFetchListener(): void {
+ stopped = true;
if (pgClient) {
pgClient.release();
pgClient = null;
diff --git a/worker/src/worker.ts b/worker/src/worker.ts
index e61c64c..f72955e 100644
--- a/worker/src/worker.ts
+++ b/worker/src/worker.ts
@@ -351,6 +351,10 @@ export async function runWorkerForAccount(
const totalChannels = channelMappings.length;
+ if (totalChannels === 0) {
+ accountLog.info("No active source channels linked to this account — nothing to ingest");
+ }
+
for (let chIdx = 0; chIdx < channelMappings.length; chIdx++) {
const mapping = channelMappings[chIdx];
const channel = mapping.channel;
@@ -451,8 +455,8 @@ export async function runWorkerForAccount(
counters.messagesScanned += scanResult.totalScanned;
if (scanResult.archives.length === 0) {
- accountLog.debug(
- { channelId: channel.id, topic: topic.name },
+ accountLog.info(
+ { channelId: channel.id, topic: topic.name, totalScanned: scanResult.totalScanned },
"No new archives in topic"
);
continue;
@@ -525,7 +529,7 @@ export async function runWorkerForAccount(
counters.messagesScanned += scanResult.totalScanned;
if (scanResult.archives.length === 0) {
- accountLog.debug({ channelId: channel.id }, "No new archives");
+ accountLog.info({ channelId: channel.id, title: channel.title, totalScanned: scanResult.totalScanned }, "No new archives in channel");
continue;
}
From e45de85c69e15e5bf982b3786f4bbfa44eeb8c24 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Thu, 5 Mar 2026 20:34:53 +0000
Subject: [PATCH 3/4] Add Rescan Channel option to channels tab
Co-authored-by: xCyanGrizzly <53275238+xCyanGrizzly@users.noreply.github.com>
---
package-lock.json | 2 +-
package.json | 2 +-
.../telegram/_components/channel-columns.tsx | 11 +++++
.../telegram/_components/channels-tab.tsx | 26 ++++++++++
src/app/(app)/telegram/actions.ts | 48 ++++++++++++++++++-
src/app/(app)/telegram/page.tsx | 2 +-
src/app/api/ingestion/trigger/route.ts | 4 +-
src/components/shared/delete-dialog.tsx | 6 ++-
8 files changed, 94 insertions(+), 7 deletions(-)
diff --git a/package-lock.json b/package-lock.json
index 2aaa6a1..1e0f8ec 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -49,7 +49,7 @@
"ts-node": "^10.9.2",
"tsx": "^4.21.0",
"tw-animate-css": "^1.4.0",
- "typescript": "^5"
+ "typescript": "5.9.3"
}
},
"node_modules/@alloc/quick-lru": {
diff --git a/package.json b/package.json
index d09d995..76a0a17 100644
--- a/package.json
+++ b/package.json
@@ -58,6 +58,6 @@
"ts-node": "^10.9.2",
"tsx": "^4.21.0",
"tw-animate-css": "^1.4.0",
- "typescript": "^5"
+ "typescript": "5.9.3"
}
}
diff --git a/src/app/(app)/telegram/_components/channel-columns.tsx b/src/app/(app)/telegram/_components/channel-columns.tsx
index 98bdfca..e5f44b3 100644
--- a/src/app/(app)/telegram/_components/channel-columns.tsx
+++ b/src/app/(app)/telegram/_components/channel-columns.tsx
@@ -7,6 +7,7 @@ import {
Power,
ArrowDownToLine,
ArrowUpFromLine,
+ RefreshCcw,
} from "lucide-react";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
@@ -23,12 +24,14 @@ interface ChannelColumnsProps {
onToggleActive: (id: string) => void;
onDelete: (id: string) => void;
onSetType: (id: string, type: "SOURCE" | "DESTINATION") => void;
+ onRescan: (id: string) => void;
}
export function getChannelColumns({
onToggleActive,
onDelete,
onSetType,
+ onRescan,
}: ChannelColumnsProps): ColumnDef[] {
return [
{
@@ -121,6 +124,14 @@ export function getChannelColumns({
Set as Source
)}
+ {row.original.type === "SOURCE" && (
+ onRescan(row.original.id)}
+ >
+
+ Rescan Channel
+
+ )}
onToggleActive(row.original.id)}
>
diff --git a/src/app/(app)/telegram/_components/channels-tab.tsx b/src/app/(app)/telegram/_components/channels-tab.tsx
index 0c12b23..042216a 100644
--- a/src/app/(app)/telegram/_components/channels-tab.tsx
+++ b/src/app/(app)/telegram/_components/channels-tab.tsx
@@ -8,6 +8,7 @@ import {
deleteChannel,
toggleChannelActive,
setChannelType,
+ rescanChannel,
} from "../actions";
import { DataTable } from "@/components/shared/data-table";
import { DeleteDialog } from "@/components/shared/delete-dialog";
@@ -22,6 +23,7 @@ interface ChannelsTabProps {
export function ChannelsTab({ channels, globalDestination }: ChannelsTabProps) {
const [isPending, startTransition] = useTransition();
const [deleteId, setDeleteId] = useState(null);
+ const [rescanId, setRescanId] = useState(null);
const columns = getChannelColumns({
onToggleActive: (id) => {
@@ -39,6 +41,7 @@ export function ChannelsTab({ channels, globalDestination }: ChannelsTabProps) {
else toast.error(result.error);
});
},
+ onRescan: (id) => setRescanId(id),
});
const { table } = useDataTable({
@@ -60,6 +63,19 @@ export function ChannelsTab({ channels, globalDestination }: ChannelsTabProps) {
});
};
+ const handleRescan = () => {
+ if (!rescanId) return;
+ startTransition(async () => {
+ const result = await rescanChannel(rescanId);
+ if (result.success) {
+ toast.success("Channel scan progress reset — it will be fully rescanned on the next sync");
+ setRescanId(null);
+ } else {
+ toast.error(result.error);
+ }
+ });
+ };
+
return (
@@ -83,6 +99,16 @@ export function ChannelsTab({ channels, globalDestination }: ChannelsTabProps) {
onConfirm={handleDelete}
isLoading={isPending}
/>
+
+ !open && setRescanId(null)}
+ title="Rescan Channel"
+ description="This will reset all scan progress for this channel. On the next sync the worker will re-process every message from the beginning. Packages that are already in the library will be skipped (deduplication by hash), but any missing files will be re-downloaded and re-uploaded. This may take a long time for large channels."
+ confirmLabel="Rescan"
+ onConfirm={handleRescan}
+ isLoading={isPending}
+ />
);
}
diff --git a/src/app/(app)/telegram/actions.ts b/src/app/(app)/telegram/actions.ts
index 67acf2e..4e25d11 100644
--- a/src/app/(app)/telegram/actions.ts
+++ b/src/app/(app)/telegram/actions.ts
@@ -297,6 +297,52 @@ export async function triggerChannelSync(): Promise {
}
}
+/**
+ * Reset all scan progress for a channel so the worker will re-process it
+ * from the very beginning on the next ingestion cycle.
+ *
+ * This clears:
+ * - `lastProcessedMessageId` on every AccountChannelMap linked to this channel
+ * - All TopicProgress records for those maps (for forum channels)
+ */
+export async function rescanChannel(channelId: string): Promise {
+ const admin = await requireAdmin();
+ if (!admin.success) return admin;
+
+ const channel = await prisma.telegramChannel.findUnique({
+ where: { id: channelId },
+ });
+ if (!channel) return { success: false, error: "Channel not found" };
+
+ try {
+ // Find all account-channel maps for this channel
+ const maps = await prisma.accountChannelMap.findMany({
+ where: { channelId },
+ select: { id: true },
+ });
+
+ const mapIds = maps.map((m) => m.id);
+
+ // Delete all topic progress records for these maps (forum channels)
+ if (mapIds.length > 0) {
+ await prisma.topicProgress.deleteMany({
+ where: { accountChannelMapId: { in: mapIds } },
+ });
+ }
+
+ // Reset the scan cursor so the worker re-processes from the start
+ await prisma.accountChannelMap.updateMany({
+ where: { channelId },
+ data: { lastProcessedMessageId: null },
+ });
+
+ revalidatePath(REVALIDATE_PATH);
+ return { success: true, data: undefined };
+ } catch {
+ return { success: false, error: "Failed to reset channel scan progress" };
+ }
+}
+
// ── Account-Channel link actions ──
export async function linkChannel(
@@ -377,7 +423,7 @@ export async function triggerIngestion(
try {
await prisma.$queryRawUnsafe(
`SELECT pg_notify('ingestion_trigger', $1)`,
- accounts.map((a) => a.id).join(",")
+ accounts.map((a: { id: string }) => a.id).join(",")
);
} catch {
// Best-effort
diff --git a/src/app/(app)/telegram/page.tsx b/src/app/(app)/telegram/page.tsx
index 2100d40..55247c5 100644
--- a/src/app/(app)/telegram/page.tsx
+++ b/src/app/(app)/telegram/page.tsx
@@ -25,7 +25,7 @@ export default async function TelegramPage() {
}),
]);
- const serializedHistory = sendHistory.map((r) => ({
+ const serializedHistory = sendHistory.map((r: typeof sendHistory[number]) => ({
id: r.id,
packageName: r.package.fileName,
recipientName: r.telegramLink.telegramName,
diff --git a/src/app/api/ingestion/trigger/route.ts b/src/app/api/ingestion/trigger/route.ts
index 5586b6b..0cb3449 100644
--- a/src/app/api/ingestion/trigger/route.ts
+++ b/src/app/api/ingestion/trigger/route.ts
@@ -50,7 +50,7 @@ export async function POST(request: Request) {
try {
await prisma.$queryRawUnsafe(
`SELECT pg_notify('ingestion_trigger', $1)`,
- accounts.map((a) => a.id).join(",")
+ accounts.map((a: { id: string }) => a.id).join(",")
);
} catch {
// pg_notify is best-effort — worker will pick up on next scheduled cycle anyway
@@ -58,7 +58,7 @@ export async function POST(request: Request) {
return NextResponse.json({
triggered: true,
- accountIds: accounts.map((a) => a.id),
+ accountIds: accounts.map((a: { id: string }) => a.id),
message: `Ingestion triggered for ${accounts.length} account(s)`,
});
}
diff --git a/src/components/shared/delete-dialog.tsx b/src/components/shared/delete-dialog.tsx
index be146f7..4ecee82 100644
--- a/src/components/shared/delete-dialog.tsx
+++ b/src/components/shared/delete-dialog.tsx
@@ -18,6 +18,8 @@ interface DeleteDialogProps {
description?: string;
onConfirm: () => void;
isLoading?: boolean;
+ confirmLabel?: string;
+ confirmLoadingLabel?: string;
}
export function DeleteDialog({
@@ -27,6 +29,8 @@ export function DeleteDialog({
description = "This action cannot be undone.",
onConfirm,
isLoading,
+ confirmLabel = "Delete",
+ confirmLoadingLabel,
}: DeleteDialogProps) {
return (
@@ -42,7 +46,7 @@ export function DeleteDialog({
disabled={isLoading}
className="bg-destructive text-destructive-foreground hover:bg-destructive/90"
>
- {isLoading ? "Deleting..." : "Delete"}
+ {isLoading ? (confirmLoadingLabel ?? `${confirmLabel}...`) : confirmLabel}
From 22419106c138b5f4ff50806ad57598cd8fef2448 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Thu, 5 Mar 2026 20:39:26 +0000
Subject: [PATCH 4/4] Fix APP_PORT: make container listen port and healthcheck
follow APP_PORT
Co-authored-by: xCyanGrizzly <53275238+xCyanGrizzly@users.noreply.github.com>
---
.env.example | 2 ++
Dockerfile | 1 +
docker-compose.yml | 5 +++--
3 files changed, 6 insertions(+), 2 deletions(-)
diff --git a/.env.example b/.env.example
index 119b703..bf3752b 100644
--- a/.env.example
+++ b/.env.example
@@ -13,6 +13,8 @@ AUTH_GITHUB_ID=""
AUTH_GITHUB_SECRET=""
# App
+# APP_PORT controls the port the container listens on AND how it is exposed on the host.
+# If you change APP_PORT, also update NEXT_PUBLIC_APP_URL to match.
NEXT_PUBLIC_APP_URL="http://localhost:3000"
APP_PORT=3000
diff --git a/Dockerfile b/Dockerfile
index 7ae07af..f48c101 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -54,6 +54,7 @@ RUN chmod +x docker-entrypoint.sh
USER nextjs
+# Default port — overridden at runtime by the PORT env var (set via docker-compose APP_PORT)
EXPOSE 3000
ENV PORT=3000
ENV HOSTNAME="0.0.0.0"
diff --git a/docker-compose.yml b/docker-compose.yml
index 51839f5..285d36a 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -5,7 +5,7 @@ services:
dockerfile: Dockerfile
pull_policy: never
ports:
- - "${APP_PORT:-3000}:3000"
+ - "${APP_PORT:-3000}:${APP_PORT:-3000}"
environment:
- DATABASE_URL=postgresql://${POSTGRES_USER:-dragons}:${POSTGRES_PASSWORD:-stash}@db:5432/${POSTGRES_DB:-dragonsstash}
- AUTH_SECRET=${AUTH_SECRET:?Set AUTH_SECRET in .env}
@@ -18,11 +18,12 @@ services:
- BOT_USERNAME=${BOT_USERNAME:-}
- LOG_LEVEL=${LOG_LEVEL:-info}
- WORKER_INTERVAL_MINUTES=${WORKER_INTERVAL_MINUTES:-60}
+ - PORT=${APP_PORT:-3000}
depends_on:
db:
condition: service_healthy
healthcheck:
- test: ["CMD", "wget", "-q", "--spider", "http://localhost:3000/api/health"]
+ test: ["CMD-SHELL", "wget -q --spider http://localhost:$$PORT/api/health || exit 1"]
interval: 30s
timeout: 5s
retries: 3