Files
dragonsstash/worker/src/archive/split.ts
xCyanGrizzly d6386209be fix: improve download/upload reliability and fix FILE_PARTS_INVALID
- Add downloadStarted flag to prevent false "stopped unexpectedly" errors
  when TDLib emits initial updateFile before download is active
- Add 5-minute stall detection for both downloads and uploads
- Reduce max split part size from 2GiB to 1950MiB to stay under
  Telegram's internal upload part count limits
- Increase timeouts from max(10min, 15min/GB) to max(15min, 20min/GB)

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-25 21:40:00 +01:00

85 lines
2.5 KiB
TypeScript

import { createReadStream, createWriteStream } from "fs";
import { stat } from "fs/promises";
import path from "path";
import { pipeline } from "stream/promises";
import { childLogger } from "../util/logger.js";
const log = childLogger("split");
/**
* 1950 MiB — safely under Telegram's 2GB upload limit.
* At exactly 2GiB, TDLib's internal 512KB chunking can exceed Telegram's
* 4000-part threshold, causing FILE_PARTS_INVALID errors.
*/
const MAX_PART_SIZE = 1950n * 1024n * 1024n;
/**
* Split a file into ≤2GB parts using byte-level splitting.
* Returns paths to the split parts. If the file is already ≤2GB, returns the original path.
*/
export async function byteLevelSplit(filePath: string): Promise<string[]> {
const stats = await stat(filePath);
const fileSize = BigInt(stats.size);
if (fileSize <= MAX_PART_SIZE) {
return [filePath];
}
const dir = path.dirname(filePath);
const baseName = path.basename(filePath);
const partSize = Number(MAX_PART_SIZE);
const totalParts = Math.ceil(Number(fileSize) / partSize);
const parts: string[] = [];
log.info({ filePath, fileSize: Number(fileSize), totalParts }, "Splitting file");
for (let i = 0; i < totalParts; i++) {
const partNum = String(i + 1).padStart(3, "0");
const partPath = path.join(dir, `${baseName}.${partNum}`);
const start = i * partSize;
const end = Math.min(start + partSize - 1, Number(fileSize) - 1);
await pipeline(
createReadStream(filePath, { start, end }),
createWriteStream(partPath)
);
parts.push(partPath);
}
log.info({ filePath, parts: parts.length }, "File split complete");
return parts;
}
/**
* Concatenate multiple files into a single output file by streaming
* each input sequentially. Used for repacking multipart archives
* that have oversized parts (>2GB) before re-splitting.
*/
export async function concatenateFiles(
inputPaths: string[],
outputPath: string
): Promise<void> {
const out = createWriteStream(outputPath);
for (let i = 0; i < inputPaths.length; i++) {
log.info(
{ part: i + 1, total: inputPaths.length, file: path.basename(inputPaths[i]) },
"Concatenating part"
);
await pipeline(createReadStream(inputPaths[i]), out, { end: false });
}
// Close the output stream
await new Promise<void>((resolve, reject) => {
out.end(() => resolve());
out.on("error", reject);
});
const stats = await stat(outputPath);
log.info(
{ outputPath, totalBytes: stats.size, parts: inputPaths.length },
"Concatenation complete"
);
}