mirror of
https://github.com/xCyanGrizzly/DragonsStash.git
synced 2026-05-11 06:11:15 +00:00
- Add invokeWithTimeout wrapper for TDLib API calls (2min timeout per call) - Add stuck detection to getChannelMessages: break if from_message_id doesn't advance - Add stuck detection to getTopicMessages: same protection for topic scanning - Add stuck detection to getForumTopicList: break if pagination offsets don't advance - Add max page limit (5000) to all scanning loops to prevent infinite pagination - Add mutex wait timeout (30min) to prevent indefinite blocking when holder hangs - Add cycle timeout (4h default, configurable via WORKER_CYCLE_TIMEOUT_MINUTES) - Fix end-of-page detection to use actual limit value instead of hardcoded 100 Co-authored-by: xCyanGrizzly <53275238+xCyanGrizzly@users.noreply.github.com>
22 lines
785 B
JavaScript
22 lines
785 B
JavaScript
import { createReadStream } from "fs";
|
|
import { createHash } from "crypto";
|
|
import { pipeline } from "stream/promises";
|
|
import { PassThrough } from "stream";
|
|
/**
|
|
* Compute SHA-256 hash of one or more files by streaming them in order.
|
|
* Memory usage: O(1) — reads in 64KB chunks regardless of total size.
|
|
* For multipart archives, pass all parts sorted by part number.
|
|
*/
|
|
export async function hashParts(filePaths) {
|
|
const hash = createHash("sha256");
|
|
for (const filePath of filePaths) {
|
|
await pipeline(createReadStream(filePath), new PassThrough({
|
|
transform(chunk, _encoding, callback) {
|
|
hash.update(chunk);
|
|
callback();
|
|
},
|
|
}));
|
|
}
|
|
return hash.digest("hex");
|
|
}
|
|
//# sourceMappingURL=hash.js.map
|