1696 lines
55 KiB
JavaScript
1696 lines
55 KiB
JavaScript
const fs = require("fs");
|
|
const path = require("path");
|
|
const os = require("os");
|
|
const readline = require("readline");
|
|
const mime = require("mime-types");
|
|
const qrcode = require("qrcode-terminal");
|
|
require("dotenv").config();
|
|
|
|
const { Client, LocalAuth } = require("whatsapp-web.js");
|
|
const { google } = require("googleapis");
|
|
|
|
const CREDENTIALS_PATH = process.env.GOOGLE_CREDENTIALS || "./credentials.json";
|
|
const TOKEN_PATH = process.env.GOOGLE_TOKEN || "./token.json";
|
|
const DEFAULT_ROOT_FOLDER = process.env.DEFAULT_ROOT_FOLDER || "Whatsapp-Drive";
|
|
const WA_CLIENT_ID = process.env.WA_CLIENT_ID || "MCB-bot";
|
|
const BATCH_TTL_MIN = Number(process.env.BATCH_TTL_MIN || "30");
|
|
const IDLE_RESTART_CHECK_MIN = Number(process.env.IDLE_RESTART_CHECK_MIN || "60");
|
|
const IDLE_RESTART_MIN = Number(process.env.IDLE_RESTART_MIN || "240");
|
|
const IDLE_RESTART_MAX_PER_DAY = Number(process.env.IDLE_RESTART_MAX_PER_DAY || "6");
|
|
const DOWNLOAD_TIMEOUT_MS = Number(process.env.DOWNLOAD_TIMEOUT_MS || "900000");
|
|
const DOWNLOAD_MAX_RETRIES = Number(process.env.DOWNLOAD_MAX_RETRIES || "2");
|
|
const DOWNLOAD_RETRY_DELAY_MS = Number(process.env.DOWNLOAD_RETRY_DELAY_MS || "5000");
|
|
const MAX_MEDIA_MB = Number(process.env.MAX_MEDIA_MB || "95");
|
|
const WA_INIT_RETRIES = Number(process.env.WA_INIT_RETRIES || "3");
|
|
const WA_INIT_RETRY_DELAY_MS = Number(process.env.WA_INIT_RETRY_DELAY_MS || "5000");
|
|
const WA_HEADLESS = process.env.WA_HEADLESS !== "false";
|
|
const WA_EXECUTABLE_PATH = process.env.WA_EXECUTABLE_PATH || "";
|
|
const AUTO_AUTH_ON_MISSING_TOKEN = process.env.AUTO_AUTH_ON_MISSING_TOKEN !== "false";
|
|
const CMD_PREFIX = "mcb";
|
|
const IS_WINDOWS = process.platform === "win32";
|
|
const PUPPETEER_ARGS = IS_WINDOWS
|
|
? ["--disable-gpu", "--disable-extensions"]
|
|
: ["--no-sandbox", "--disable-setuid-sandbox", "--disable-dev-shm-usage", "--disable-gpu", "--disable-extensions", "--no-zygote"];
|
|
const LOG_DIR = process.env.LOG_DIR || "./logs";
|
|
const HEARTBEAT_INTERVAL_SEC = Number(process.env.HEARTBEAT_INTERVAL_SEC || "60");
|
|
const RETRY_MAX_ITEMS = Number(process.env.RETRY_MAX_ITEMS || "200");
|
|
const SMART_SUBFOLDERS = process.env.SMART_SUBFOLDERS !== "false";
|
|
const FAIL_ALERT_WINDOW = Number(process.env.FAIL_ALERT_WINDOW || "10");
|
|
const FAIL_ALERT_THRESHOLD_PCT = Number(process.env.FAIL_ALERT_THRESHOLD_PCT || "20");
|
|
const FAIL_ALERT_COOLDOWN_MIN = Number(process.env.FAIL_ALERT_COOLDOWN_MIN || "30");
|
|
const BATCH_META_FILE = ".batch.json";
|
|
|
|
const STATE_FILE = "./state.json"; // stores per-user selected project folderId
|
|
const TMP_BASE = path.join(os.tmpdir(), "MCB-batches");
|
|
const LOG_FILE = "./app.log";
|
|
const OWNER_NUMBER = process.env.OWNER_NUMBER || ""; // e.g. 15551234567 (no +)
|
|
const ROOT_CACHE = {
|
|
rootFolderId: null,
|
|
chatFolderByChatId: new Map(),
|
|
};
|
|
let DRIVE_CLIENT = null;
|
|
|
|
function getDateKey(ts = Date.now()) {
|
|
const d = new Date(ts);
|
|
return d.toISOString().slice(0, 10);
|
|
}
|
|
|
|
function normalizeState(state) {
|
|
const base = state && typeof state === "object" ? state : {};
|
|
if (!base.users || typeof base.users !== "object") base.users = {};
|
|
if (!base.analytics || typeof base.analytics !== "object") base.analytics = {};
|
|
if (!base.analytics.daily || typeof base.analytics.daily !== "object") base.analytics.daily = {};
|
|
if (!base.analytics.lifetime || typeof base.analytics.lifetime !== "object") {
|
|
base.analytics.lifetime = {
|
|
batchesStarted: 0,
|
|
batchesCompleted: 0,
|
|
filesQueued: 0,
|
|
filesUploaded: 0,
|
|
filesFailed: 0,
|
|
uploadBytes: 0,
|
|
completedBatchFilesTotal: 0,
|
|
};
|
|
}
|
|
if (!Array.isArray(base.analytics.uploadOutcomes)) base.analytics.uploadOutcomes = [];
|
|
if (!base.analytics.alerts || typeof base.analytics.alerts !== "object") {
|
|
base.analytics.alerts = {
|
|
lastFailureAlertAt: 0,
|
|
lastFailureRate: 0,
|
|
};
|
|
}
|
|
return base;
|
|
}
|
|
|
|
function ensureUserState(state, chatId) {
|
|
if (!state.users[chatId]) {
|
|
state.users[chatId] = {
|
|
projectName: null,
|
|
folderId: null,
|
|
lastBatchFolderName: null,
|
|
lastBatchFolderId: null,
|
|
retryQueue: [],
|
|
};
|
|
}
|
|
if (!Array.isArray(state.users[chatId].retryQueue)) {
|
|
state.users[chatId].retryQueue = [];
|
|
}
|
|
return state.users[chatId];
|
|
}
|
|
|
|
function getDailyStats(state, dateKey = getDateKey()) {
|
|
if (!state.analytics.daily[dateKey] || typeof state.analytics.daily[dateKey] !== "object") {
|
|
state.analytics.daily[dateKey] = {
|
|
uniqueUsers: {},
|
|
messages: 0,
|
|
batchesStarted: 0,
|
|
batchesCompleted: 0,
|
|
filesQueued: 0,
|
|
filesUploaded: 0,
|
|
filesFailed: 0,
|
|
uploadBytes: 0,
|
|
completedBatchFilesTotal: 0,
|
|
};
|
|
}
|
|
return state.analytics.daily[dateKey];
|
|
}
|
|
|
|
function markUserActive(state, chatId) {
|
|
const today = getDailyStats(state);
|
|
today.uniqueUsers[chatId] = 1;
|
|
}
|
|
|
|
function incrementStats(state, updates) {
|
|
const today = getDailyStats(state);
|
|
const lifetime = state.analytics.lifetime;
|
|
for (const [k, v] of Object.entries(updates || {})) {
|
|
if (typeof v !== "number") continue;
|
|
if (typeof today[k] === "number") today[k] += v;
|
|
if (typeof lifetime[k] === "number") lifetime[k] += v;
|
|
}
|
|
}
|
|
|
|
function loadJson(file, fallback) {
|
|
try {
|
|
return JSON.parse(fs.readFileSync(file, "utf8"));
|
|
} catch {
|
|
return fallback;
|
|
}
|
|
}
|
|
|
|
function saveJson(file, obj) {
|
|
fs.writeFileSync(file, JSON.stringify(obj, null, 2));
|
|
}
|
|
|
|
function logLine(level, message, meta) {
|
|
const ts = new Date().toISOString();
|
|
const base = `[${ts}] [${level}] ${message}`;
|
|
const line = meta ? `${base} ${JSON.stringify(meta)}` : base;
|
|
const entry = { ts, level, message, ...(meta || {}) };
|
|
console.log(line);
|
|
try {
|
|
fs.mkdirSync(LOG_DIR, { recursive: true });
|
|
fs.appendFileSync(LOG_FILE, line + "\n");
|
|
fs.appendFileSync(path.join(LOG_DIR, `app-${getDateKey()}.jsonl`), JSON.stringify(entry) + "\n");
|
|
} catch { }
|
|
}
|
|
|
|
function logInfo(message, meta) {
|
|
logLine("INFO", message, meta);
|
|
}
|
|
|
|
function logWarn(message, meta) {
|
|
logLine("WARN", message, meta);
|
|
}
|
|
|
|
function logError(message, meta) {
|
|
logLine("ERROR", message, meta);
|
|
}
|
|
|
|
function getOwnerChatId() {
|
|
if (!OWNER_NUMBER) return null;
|
|
return `${OWNER_NUMBER}@c.us`;
|
|
}
|
|
|
|
function loadCredentials() {
|
|
const raw = fs.readFileSync(CREDENTIALS_PATH, "utf8");
|
|
const json = JSON.parse(raw);
|
|
const cfg = json.installed || json.web;
|
|
if (!cfg) throw new Error("Invalid credentials.json. Expected installed/web client.");
|
|
return cfg;
|
|
}
|
|
|
|
function getDriveClient() {
|
|
if (DRIVE_CLIENT) return DRIVE_CLIENT;
|
|
const cfg = loadCredentials();
|
|
const tokens = loadJson(TOKEN_PATH, null);
|
|
if (!tokens) throw new Error("Missing token.json. Run `node auth.js` first.");
|
|
|
|
const oAuth2Client = new google.auth.OAuth2(
|
|
cfg.client_id,
|
|
cfg.client_secret,
|
|
(cfg.redirect_uris || [])[0]
|
|
);
|
|
|
|
oAuth2Client.setCredentials(tokens);
|
|
|
|
oAuth2Client.on("tokens", (newTokens) => {
|
|
const current = loadJson(TOKEN_PATH, {});
|
|
saveJson(TOKEN_PATH, { ...current, ...newTokens });
|
|
});
|
|
|
|
DRIVE_CLIENT = google.drive({ version: "v3", auth: oAuth2Client });
|
|
return DRIVE_CLIENT;
|
|
}
|
|
|
|
function hasUsableToken() {
|
|
const tokens = loadJson(TOKEN_PATH, null);
|
|
return Boolean(tokens && (tokens.refresh_token || tokens.access_token));
|
|
}
|
|
|
|
function askInput(question) {
|
|
return new Promise((resolve) => {
|
|
const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
|
|
rl.question(question, (answer) => {
|
|
rl.close();
|
|
resolve(String(answer || "").trim());
|
|
});
|
|
});
|
|
}
|
|
|
|
async function ensureGoogleToken() {
|
|
if (hasUsableToken()) return;
|
|
|
|
if (!AUTO_AUTH_ON_MISSING_TOKEN) {
|
|
throw new Error("Missing token.json. Run `node auth.js` first.");
|
|
}
|
|
|
|
if (!process.stdin.isTTY) {
|
|
throw new Error("Missing token.json and no interactive terminal. Run `node auth.js` manually.");
|
|
}
|
|
|
|
const cfg = loadCredentials();
|
|
const oAuth2Client = new google.auth.OAuth2(
|
|
cfg.client_id,
|
|
cfg.client_secret,
|
|
(cfg.redirect_uris || [])[0]
|
|
);
|
|
|
|
const authUrl = oAuth2Client.generateAuthUrl({
|
|
access_type: "offline",
|
|
scope: ["https://www.googleapis.com/auth/drive.file"],
|
|
prompt: "consent",
|
|
});
|
|
|
|
logWarn("token.json not found. Starting interactive Google OAuth flow.");
|
|
console.log("\nAuthorize this app by visiting this URL:\n", authUrl, "\n");
|
|
const code = await askInput("Paste the code from Google here: ");
|
|
if (!code) throw new Error("No authorization code entered.");
|
|
|
|
const { tokens } = await oAuth2Client.getToken(code);
|
|
saveJson(TOKEN_PATH, tokens);
|
|
logInfo("Google token saved", { tokenPath: path.resolve(TOKEN_PATH) });
|
|
}
|
|
|
|
async function findOrCreateFolder(drive, folderName, parentId = null) {
|
|
const qParts = [
|
|
`mimeType='application/vnd.google-apps.folder'`,
|
|
`name='${folderName.replace(/'/g, "\\'")}'`,
|
|
`trashed=false`,
|
|
];
|
|
if (parentId) qParts.push(`'${parentId}' in parents`);
|
|
|
|
const res = await drive.files.list({
|
|
q: qParts.join(" and "),
|
|
fields: "files(id, name)",
|
|
spaces: "drive",
|
|
});
|
|
|
|
if (res.data.files && res.data.files.length > 0) return res.data.files[0].id;
|
|
|
|
const createRes = await drive.files.create({
|
|
requestBody: {
|
|
name: folderName,
|
|
mimeType: "application/vnd.google-apps.folder",
|
|
parents: parentId ? [parentId] : undefined,
|
|
},
|
|
fields: "id",
|
|
});
|
|
|
|
return createRes.data.id;
|
|
}
|
|
|
|
async function getOrCreateRootFolderId(drive) {
|
|
if (ROOT_CACHE.rootFolderId) return ROOT_CACHE.rootFolderId;
|
|
ROOT_CACHE.rootFolderId = await findOrCreateFolder(drive, DEFAULT_ROOT_FOLDER);
|
|
return ROOT_CACHE.rootFolderId;
|
|
}
|
|
|
|
async function getOrCreateClientFolder(drive, msg) {
|
|
const chatId = msg.from;
|
|
if (ROOT_CACHE.chatFolderByChatId.has(chatId)) {
|
|
return ROOT_CACHE.chatFolderByChatId.get(chatId);
|
|
}
|
|
|
|
const rootFolderId = await getOrCreateRootFolderId(drive);
|
|
const clientFolderName = await getClientFolderName(msg);
|
|
const clientFolderId = await findOrCreateFolder(drive, clientFolderName, rootFolderId);
|
|
ROOT_CACHE.chatFolderByChatId.set(chatId, clientFolderId);
|
|
return clientFolderId;
|
|
}
|
|
|
|
async function getClientFolderName(msg) {
|
|
try {
|
|
const contact = await msg.getContact();
|
|
const name = (contact?.name || contact?.pushname || "").trim();
|
|
if (name) return name;
|
|
} catch { }
|
|
const chatId = msg.from;
|
|
const number = String(chatId).split("@")[0];
|
|
return `${number}`;
|
|
}
|
|
|
|
async function setFolderPublicView(drive, folderId) {
|
|
await drive.permissions.create({
|
|
fileId: folderId,
|
|
requestBody: {
|
|
type: "anyone",
|
|
role: "reader",
|
|
},
|
|
});
|
|
}
|
|
|
|
async function uploadFileStreamToDrive(drive, { filePath, filename, parentFolderId, mimeType }) {
|
|
const res = await drive.files.create({
|
|
requestBody: {
|
|
name: filename,
|
|
parents: [parentFolderId],
|
|
},
|
|
media: {
|
|
mimeType: mimeType || "application/octet-stream",
|
|
body: fs.createReadStream(filePath),
|
|
},
|
|
fields: "id, webViewLink, name",
|
|
});
|
|
|
|
return res.data;
|
|
}
|
|
|
|
function sleep(ms) {
|
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
}
|
|
|
|
async function uploadWithRetry(drive, file, opts, maxRetries = 3) {
|
|
let attempt = 0;
|
|
while (true) {
|
|
try {
|
|
return await uploadFileStreamToDrive(drive, opts);
|
|
} catch (err) {
|
|
attempt += 1;
|
|
const isLast = attempt >= maxRetries;
|
|
logWarn("Upload failed", { attempt, maxRetries, error: err?.message, file: file?.filename });
|
|
if (isLast) throw err;
|
|
await sleep(1000 * attempt);
|
|
}
|
|
}
|
|
}
|
|
|
|
// ---------- Batch handling (disk-based) ----------
|
|
const BATCH = new Map();
|
|
const BATCH_TTL_MS = BATCH_TTL_MIN * 60 * 1000;
|
|
|
|
function now() {
|
|
return Date.now();
|
|
}
|
|
|
|
function getBatch(chatId) {
|
|
const b = BATCH.get(chatId);
|
|
if (!b) return null;
|
|
|
|
if (now() - b.lastActivityAt > BATCH_TTL_MS) {
|
|
clearBatch(chatId);
|
|
return null;
|
|
}
|
|
return b;
|
|
}
|
|
|
|
function startBatch(chatId, folderId, folderName) {
|
|
clearBatch(chatId);
|
|
const dir = path.join(TMP_BASE, sanitizeId(chatId), sanitizeId(folderName));
|
|
fs.mkdirSync(dir, { recursive: true });
|
|
BATCH.set(chatId, {
|
|
startedAt: now(),
|
|
lastActivityAt: now(),
|
|
dir,
|
|
files: [],
|
|
fileSignatures: new Set(),
|
|
failedFiles: [],
|
|
activeDownloads: 0,
|
|
doneRequested: false,
|
|
uploadInProgress: false,
|
|
pendingDoneMsg: null,
|
|
pendingDoneTimer: null,
|
|
doneTriggerMsg: null,
|
|
folderId,
|
|
folderName,
|
|
});
|
|
persistBatch(chatId);
|
|
}
|
|
|
|
function touchBatch(chatId) {
|
|
const b = BATCH.get(chatId);
|
|
if (b) {
|
|
b.lastActivityAt = now();
|
|
persistBatch(chatId);
|
|
}
|
|
}
|
|
|
|
function clearBatch(chatId) {
|
|
const b = BATCH.get(chatId);
|
|
if (b?.pendingDoneTimer) {
|
|
clearInterval(b.pendingDoneTimer);
|
|
}
|
|
if (b && b.dir && fs.existsSync(b.dir)) {
|
|
fs.rmSync(b.dir, { recursive: true, force: true });
|
|
}
|
|
BATCH.delete(chatId);
|
|
}
|
|
|
|
function sanitizeId(id) {
|
|
return String(id).replace(/[^a-zA-Z0-9_-]/g, "_");
|
|
}
|
|
|
|
function sanitizeFileName(name) {
|
|
const safe = name.replace(/[<>:"/\\|?*\x00-\x1F]/g, "_").trim();
|
|
return safe || `file_${Date.now()}`;
|
|
}
|
|
|
|
function buildSafeFileName(media, index) {
|
|
const ts = new Date().toISOString().replace(/[:.]/g, "");
|
|
const original = media.filename ? path.basename(media.filename) : "";
|
|
const extFromMime = mime.extension(media.mimetype) || "";
|
|
|
|
if (original) {
|
|
const parsed = path.parse(original);
|
|
const base = sanitizeFileName(parsed.name);
|
|
const ext = parsed.ext || (extFromMime ? `.${extFromMime}` : "");
|
|
return `${base}_${ts}${ext}`;
|
|
}
|
|
|
|
const base = `file_${index}`;
|
|
const ext = extFromMime ? `.${extFromMime}` : "";
|
|
return `${base}_${ts}${ext}`;
|
|
}
|
|
|
|
function formatProgressBar(done, total, width = 12) {
|
|
const ratio = total ? done / total : 0;
|
|
const filled = Math.round(ratio * width);
|
|
const empty = Math.max(0, width - filled);
|
|
return `${"🟩".repeat(filled)}${"⬜".repeat(empty)}`;
|
|
}
|
|
|
|
function bytesToMb(bytes) {
|
|
return (bytes / (1024 * 1024)).toFixed(2);
|
|
}
|
|
|
|
function formatElapsedSec(ms) {
|
|
const totalSec = Math.floor(ms / 1000);
|
|
const min = Math.floor(totalSec / 60);
|
|
const sec = totalSec % 60;
|
|
return `${min}m ${sec}s`;
|
|
}
|
|
|
|
function bytesToDisplay(bytes) {
|
|
if (bytes < 1024) return `${bytes} B`;
|
|
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
|
|
return `${bytesToMb(bytes)} MB`;
|
|
}
|
|
|
|
function buildFileSignature(sourceName, sizeBytes) {
|
|
return `${String(sourceName || "").toLowerCase()}|${Number(sizeBytes || 0)}`;
|
|
}
|
|
|
|
function getSourceNameFromMedia(media) {
|
|
const original = media?.filename ? path.basename(media.filename) : "";
|
|
if (original) return sanitizeFileName(original);
|
|
const ext = mime.extension(media?.mimetype || "") || "bin";
|
|
return `unnamed.${ext}`;
|
|
}
|
|
|
|
function getRetryDir(chatId) {
|
|
return path.join(TMP_BASE, "retry", sanitizeId(chatId));
|
|
}
|
|
|
|
function enqueueRetryFiles(chatId, folderName, failedFiles) {
|
|
if (!Array.isArray(failedFiles) || failedFiles.length === 0) return [];
|
|
const retryDir = getRetryDir(chatId);
|
|
fs.mkdirSync(retryDir, { recursive: true });
|
|
const queued = [];
|
|
|
|
for (const file of failedFiles) {
|
|
try {
|
|
const suffix = `${Date.now()}_${Math.floor(Math.random() * 10000)}`;
|
|
const destName = `${path.parse(file.filename).name}_${suffix}${path.extname(file.filename)}`;
|
|
const destPath = path.join(retryDir, sanitizeFileName(destName));
|
|
fs.renameSync(file.filePath, destPath);
|
|
queued.push({
|
|
filePath: destPath,
|
|
filename: file.filename,
|
|
mimeType: file.mimeType,
|
|
sizeBytes: Number(file.sizeBytes || 0),
|
|
folderId: file.parentFolderId,
|
|
folderName,
|
|
failedAt: new Date().toISOString(),
|
|
sourceName: file.sourceName || file.filename,
|
|
receivedAt: Number(file.receivedAt || Date.now()),
|
|
});
|
|
} catch (err) {
|
|
logWarn("Failed to enqueue retry file", { chatId, file: file?.filename, error: err?.message });
|
|
}
|
|
}
|
|
|
|
return queued;
|
|
}
|
|
|
|
function summarizeAdminStats(state) {
|
|
const today = getDailyStats(state);
|
|
const lifetime = state.analytics.lifetime;
|
|
const dailyActiveUsers = Object.keys(today.uniqueUsers || {}).length;
|
|
const avgBatchToday = today.batchesCompleted > 0
|
|
? (today.completedBatchFilesTotal / today.batchesCompleted).toFixed(2)
|
|
: "0.00";
|
|
const avgBatchLifetime = lifetime.batchesCompleted > 0
|
|
? (lifetime.completedBatchFilesTotal / lifetime.batchesCompleted).toFixed(2)
|
|
: "0.00";
|
|
|
|
return [
|
|
`📊 Admin stats (${getDateKey()})`,
|
|
`Daily active users: *${dailyActiveUsers}*`,
|
|
`Files uploaded today: *${today.filesUploaded}*`,
|
|
`Failures today: *${today.filesFailed}*`,
|
|
`Avg batch size today: *${avgBatchToday}* files`,
|
|
"",
|
|
`Lifetime uploaded: *${lifetime.filesUploaded}*`,
|
|
`Lifetime failures: *${lifetime.filesFailed}*`,
|
|
`Avg batch size lifetime: *${avgBatchLifetime}* files`,
|
|
].join("\n");
|
|
}
|
|
|
|
function summarizeAdminStatsForDate(state, dateKey) {
|
|
const day = state?.analytics?.daily?.[dateKey];
|
|
if (!day) return `📊 Admin stats (${dateKey})\nNo data found for this date.`;
|
|
const dailyActiveUsers = Object.keys(day.uniqueUsers || {}).length;
|
|
const avgBatch = day.batchesCompleted > 0
|
|
? (day.completedBatchFilesTotal / day.batchesCompleted).toFixed(2)
|
|
: "0.00";
|
|
|
|
return [
|
|
`📊 Admin stats (${dateKey})`,
|
|
`Daily active users: *${dailyActiveUsers}*`,
|
|
`Files uploaded: *${day.filesUploaded || 0}*`,
|
|
`Failures: *${day.filesFailed || 0}*`,
|
|
`Avg batch size: *${avgBatch}* files`,
|
|
].join("\n");
|
|
}
|
|
|
|
function summarizeAdminStatsForLastDays(state, days) {
|
|
const safeDays = Math.max(1, Math.min(365, Number(days || 1)));
|
|
let activeUsers = new Set();
|
|
let filesUploaded = 0;
|
|
let filesFailed = 0;
|
|
let batchesCompleted = 0;
|
|
let completedBatchFilesTotal = 0;
|
|
const today = new Date();
|
|
|
|
for (let i = 0; i < safeDays; i++) {
|
|
const d = new Date(today);
|
|
d.setUTCDate(today.getUTCDate() - i);
|
|
const key = d.toISOString().slice(0, 10);
|
|
const day = state?.analytics?.daily?.[key];
|
|
if (!day) continue;
|
|
Object.keys(day.uniqueUsers || {}).forEach((u) => activeUsers.add(u));
|
|
filesUploaded += Number(day.filesUploaded || 0);
|
|
filesFailed += Number(day.filesFailed || 0);
|
|
batchesCompleted += Number(day.batchesCompleted || 0);
|
|
completedBatchFilesTotal += Number(day.completedBatchFilesTotal || 0);
|
|
}
|
|
|
|
const avgBatch = batchesCompleted > 0
|
|
? (completedBatchFilesTotal / batchesCompleted).toFixed(2)
|
|
: "0.00";
|
|
|
|
return [
|
|
`📊 Admin stats (last ${safeDays} day${safeDays > 1 ? "s" : ""})`,
|
|
`Active users: *${activeUsers.size}*`,
|
|
`Files uploaded: *${filesUploaded}*`,
|
|
`Failures: *${filesFailed}*`,
|
|
`Avg batch size: *${avgBatch}* files`,
|
|
].join("\n");
|
|
}
|
|
|
|
function summarizeAdminHealth(state) {
|
|
const uptimeSec = Math.round(process.uptime());
|
|
const lastMsgAgeSec = Math.round((Date.now() - lastMessageAt) / 1000);
|
|
const memRssMb = Math.round(process.memoryUsage().rss / (1024 * 1024));
|
|
const activeBatches = BATCH.size;
|
|
|
|
const users = state?.users || {};
|
|
let retryQueueTotal = 0;
|
|
for (const user of Object.values(users)) {
|
|
retryQueueTotal += Array.isArray(user?.retryQueue) ? user.retryQueue.length : 0;
|
|
}
|
|
|
|
let activeDownloads = 0;
|
|
let uploadsInProgress = 0;
|
|
for (const b of BATCH.values()) {
|
|
activeDownloads += Number(b?.activeDownloads || 0);
|
|
uploadsInProgress += b?.uploadInProgress ? 1 : 0;
|
|
}
|
|
|
|
const outcomes = state?.analytics?.uploadOutcomes || [];
|
|
const windowSize = Math.max(1, FAIL_ALERT_WINDOW);
|
|
const recent = outcomes.slice(-windowSize);
|
|
const recentFails = recent.filter((x) => x === 0).length;
|
|
const recentFailPct = recent.length ? ((recentFails / recent.length) * 100).toFixed(1) : "0.0";
|
|
const alerts = state?.analytics?.alerts || {};
|
|
const lastAlertAt = alerts.lastFailureAlertAt
|
|
? new Date(alerts.lastFailureAlertAt).toISOString()
|
|
: "never";
|
|
|
|
return [
|
|
"🩺 Admin health",
|
|
`Uptime: *${uptimeSec}s*`,
|
|
`Last message age: *${lastMsgAgeSec}s*`,
|
|
`Memory RSS: *${memRssMb} MB*`,
|
|
`Active batches: *${activeBatches}*`,
|
|
`Active downloads: *${activeDownloads}*`,
|
|
`Uploads in progress: *${uploadsInProgress}*`,
|
|
`Retry queue total: *${retryQueueTotal}*`,
|
|
`Recent failure rate (${recent.length || 0}/${windowSize}): *${recentFailPct}%*`,
|
|
`Alert threshold: *${FAIL_ALERT_THRESHOLD_PCT}%*`,
|
|
`Last failure alert: *${lastAlertAt}*`,
|
|
].join("\n");
|
|
}
|
|
|
|
function getBatchMetaPath(dir) {
|
|
return path.join(dir, BATCH_META_FILE);
|
|
}
|
|
|
|
function persistBatch(chatId) {
|
|
const batch = BATCH.get(chatId);
|
|
if (!batch || !batch.dir) return;
|
|
try {
|
|
const files = (batch.files || []).map((f) => ({
|
|
filePath: f.filePath,
|
|
filename: f.filename,
|
|
mimeType: f.mimeType,
|
|
sizeBytes: Number(f.sizeBytes || 0),
|
|
sourceName: f.sourceName || "",
|
|
signature: f.signature || "",
|
|
receivedAt: Number(f.receivedAt || Date.now()),
|
|
}));
|
|
const meta = {
|
|
chatId,
|
|
startedAt: Number(batch.startedAt || Date.now()),
|
|
lastActivityAt: Number(batch.lastActivityAt || Date.now()),
|
|
dir: batch.dir,
|
|
folderId: batch.folderId,
|
|
folderName: batch.folderName,
|
|
files,
|
|
};
|
|
fs.writeFileSync(getBatchMetaPath(batch.dir), JSON.stringify(meta, null, 2));
|
|
} catch (err) {
|
|
logWarn("Failed to persist batch meta", { chatId, error: err?.message });
|
|
}
|
|
}
|
|
|
|
function classifyEventFolder(file) {
|
|
const name = String(file?.sourceName || file?.filename || "").toLowerCase();
|
|
const mimeType = String(file?.mimeType || "").toLowerCase();
|
|
if (/(invoice|receipt|bill|payment|po|estimate)/.test(name)) return "Invoices";
|
|
if (mimeType.startsWith("image/") || mimeType.startsWith("video/")) return "Photos";
|
|
return "Documents";
|
|
}
|
|
|
|
function getDateFolderForFile(file) {
|
|
const ts = Number(file?.receivedAt || Date.now());
|
|
return getDateKey(ts);
|
|
}
|
|
|
|
async function resolveUploadParentFolderId(drive, rootFolderId, file, folderCache) {
|
|
if (!SMART_SUBFOLDERS) return rootFolderId;
|
|
const dateFolder = getDateFolderForFile(file);
|
|
const eventFolder = classifyEventFolder(file);
|
|
const cacheKey = `${rootFolderId}|${dateFolder}|${eventFolder}`;
|
|
if (folderCache.has(cacheKey)) return folderCache.get(cacheKey);
|
|
|
|
const dateFolderId = await findOrCreateFolder(drive, dateFolder, rootFolderId);
|
|
const eventFolderId = await findOrCreateFolder(drive, eventFolder, dateFolderId);
|
|
folderCache.set(cacheKey, eventFolderId);
|
|
return eventFolderId;
|
|
}
|
|
|
|
function recordUploadOutcomes(state, successCount, failedCount) {
|
|
const outcomes = state.analytics.uploadOutcomes;
|
|
for (let i = 0; i < Number(successCount || 0); i++) outcomes.push(1);
|
|
for (let i = 0; i < Number(failedCount || 0); i++) outcomes.push(0);
|
|
const maxKeep = Math.max(FAIL_ALERT_WINDOW * 5, 100);
|
|
if (outcomes.length > maxKeep) {
|
|
state.analytics.uploadOutcomes = outcomes.slice(-maxKeep);
|
|
}
|
|
}
|
|
|
|
async function maybeSendFailureAlert(state) {
|
|
const ownerChatId = getOwnerChatId();
|
|
if (!ownerChatId) return;
|
|
|
|
const outcomes = state.analytics.uploadOutcomes || [];
|
|
const windowSize = Math.max(1, FAIL_ALERT_WINDOW);
|
|
const recent = outcomes.slice(-windowSize);
|
|
if (recent.length < windowSize) return;
|
|
|
|
const failCount = recent.filter((x) => x === 0).length;
|
|
const failRate = (failCount / recent.length) * 100;
|
|
if (failRate < FAIL_ALERT_THRESHOLD_PCT) return;
|
|
|
|
const nowTs = Date.now();
|
|
const cooldownMs = Math.max(1, FAIL_ALERT_COOLDOWN_MIN) * 60 * 1000;
|
|
const alerts = state.analytics.alerts || { lastFailureAlertAt: 0, lastFailureRate: 0 };
|
|
if (alerts.lastFailureAlertAt && nowTs - alerts.lastFailureAlertAt < cooldownMs) return;
|
|
|
|
alerts.lastFailureAlertAt = nowTs;
|
|
alerts.lastFailureRate = failRate;
|
|
state.analytics.alerts = alerts;
|
|
saveJson(STATE_FILE, state);
|
|
|
|
try {
|
|
await client.sendMessage(
|
|
ownerChatId,
|
|
[
|
|
"⚠️ Upload failure alert",
|
|
`Recent window: ${recent.length} files`,
|
|
`Failures: ${failCount} (${failRate.toFixed(1)}%)`,
|
|
`Threshold: ${FAIL_ALERT_THRESHOLD_PCT}%`,
|
|
].join("\n")
|
|
);
|
|
} catch (err) {
|
|
logWarn("Failed to send owner failure alert", { error: err?.message });
|
|
}
|
|
}
|
|
|
|
function restoreBatchesFromDisk() {
|
|
if (!fs.existsSync(TMP_BASE)) return;
|
|
|
|
let restored = 0;
|
|
const seenChat = new Set();
|
|
const chatDirs = fs.readdirSync(TMP_BASE, { withFileTypes: true })
|
|
.filter((d) => d.isDirectory() && d.name !== "retry");
|
|
|
|
for (const chatDir of chatDirs) {
|
|
const chatRoot = path.join(TMP_BASE, chatDir.name);
|
|
const subdirs = fs.readdirSync(chatRoot, { withFileTypes: true }).filter((d) => d.isDirectory());
|
|
let best = null;
|
|
|
|
for (const sd of subdirs) {
|
|
const dir = path.join(chatRoot, sd.name);
|
|
const metaPath = getBatchMetaPath(dir);
|
|
if (!fs.existsSync(metaPath)) continue;
|
|
const meta = loadJson(metaPath, null);
|
|
if (!meta || !meta.chatId || !meta.folderId || !meta.folderName) continue;
|
|
if (!best || Number(meta.lastActivityAt || 0) > Number(best.lastActivityAt || 0)) best = meta;
|
|
}
|
|
|
|
if (!best) continue;
|
|
const chatId = best.chatId;
|
|
if (seenChat.has(chatId)) continue;
|
|
|
|
const expired = now() - Number(best.lastActivityAt || 0) > BATCH_TTL_MS;
|
|
if (expired) {
|
|
try {
|
|
if (best.dir && fs.existsSync(best.dir)) fs.rmSync(best.dir, { recursive: true, force: true });
|
|
} catch { }
|
|
continue;
|
|
}
|
|
|
|
const files = Array.isArray(best.files) ? best.files.filter((f) => f.filePath && fs.existsSync(f.filePath)) : [];
|
|
const fileSignatures = new Set(files.map((f) => f.signature || buildFileSignature(f.sourceName || f.filename, f.sizeBytes || 0)));
|
|
BATCH.set(chatId, {
|
|
startedAt: Number(best.startedAt || now()),
|
|
lastActivityAt: Number(best.lastActivityAt || now()),
|
|
dir: best.dir,
|
|
files,
|
|
fileSignatures,
|
|
failedFiles: [],
|
|
activeDownloads: 0,
|
|
doneRequested: false,
|
|
uploadInProgress: false,
|
|
pendingDoneMsg: null,
|
|
pendingDoneTimer: null,
|
|
doneTriggerMsg: null,
|
|
folderId: best.folderId,
|
|
folderName: best.folderName,
|
|
});
|
|
persistBatch(chatId);
|
|
seenChat.add(chatId);
|
|
restored += 1;
|
|
}
|
|
|
|
if (restored > 0) {
|
|
logInfo("Restored batches from disk", { restored });
|
|
}
|
|
}
|
|
|
|
async function sendChatOrReply(chatId, text, triggerMsg = null) {
|
|
if (triggerMsg) {
|
|
try {
|
|
return await triggerMsg.reply(text);
|
|
} catch { }
|
|
}
|
|
try {
|
|
return await client.sendMessage(chatId, text);
|
|
} catch {
|
|
return null;
|
|
}
|
|
}
|
|
|
|
function clearPendingDoneStatus(batch) {
|
|
if (!batch) return;
|
|
if (batch.pendingDoneTimer) {
|
|
clearInterval(batch.pendingDoneTimer);
|
|
batch.pendingDoneTimer = null;
|
|
}
|
|
batch.pendingDoneMsg = null;
|
|
}
|
|
|
|
async function startPendingDoneStatus(chatId, triggerMsg = null) {
|
|
const batch = getBatch(chatId);
|
|
if (!batch) return;
|
|
if (batch.pendingDoneTimer) return;
|
|
|
|
const initial = [
|
|
"⏳ Received done command.",
|
|
"Some files are still being received.",
|
|
"I will auto-start upload once receiving is complete.",
|
|
].join("\n");
|
|
batch.pendingDoneMsg = await sendChatOrReply(chatId, initial, triggerMsg);
|
|
|
|
batch.pendingDoneTimer = setInterval(async () => {
|
|
const b = getBatch(chatId);
|
|
if (!b || !b.doneRequested || b.uploadInProgress) {
|
|
if (b) clearPendingDoneStatus(b);
|
|
return;
|
|
}
|
|
|
|
const text = [
|
|
"⏳ Waiting for incoming files...",
|
|
`Receiving now: *${b.activeDownloads || 0}*`,
|
|
`Queued: *${b.files.length}*`,
|
|
"Refresh: every 5s. Upload starts automatically.",
|
|
].join("\n");
|
|
|
|
try {
|
|
if (b.pendingDoneMsg?.edit) {
|
|
await b.pendingDoneMsg.edit(text);
|
|
} else {
|
|
await client.sendMessage(chatId, text);
|
|
}
|
|
} catch { }
|
|
}, 5000);
|
|
}
|
|
|
|
async function runBatchUpload(chatId, triggerMsg = null) {
|
|
const batch = getBatch(chatId);
|
|
if (!batch) {
|
|
await sendChatOrReply(chatId, "No active batch or no files received. Type start.", triggerMsg);
|
|
return;
|
|
}
|
|
if (batch.uploadInProgress) {
|
|
await sendChatOrReply(chatId, "⏫ Upload already in progress. I will update status shortly.", triggerMsg);
|
|
return;
|
|
}
|
|
|
|
batch.uploadInProgress = true;
|
|
batch.doneRequested = false;
|
|
clearPendingDoneStatus(batch);
|
|
|
|
try {
|
|
const drive = getDriveClient();
|
|
const state = normalizeState(loadJson(STATE_FILE, { users: {} }));
|
|
const userState = ensureUserState(state, chatId);
|
|
const folderId = batch.folderId || userState.folderId;
|
|
const folderLabel = batch.folderName || userState.projectName || "Default";
|
|
|
|
if (!folderId) {
|
|
batch.uploadInProgress = false;
|
|
await sendChatOrReply(chatId, "❌ Could not resolve upload folder. Type start to create a new batch.", triggerMsg);
|
|
return;
|
|
}
|
|
if (!batch.files.length) {
|
|
batch.uploadInProgress = false;
|
|
await sendChatOrReply(chatId, "No files queued yet.", triggerMsg);
|
|
return;
|
|
}
|
|
|
|
const total = batch.files.length;
|
|
const startMs = Date.now();
|
|
let uploadedBytes = 0;
|
|
let uploadedCount = 0;
|
|
let failedCount = 0;
|
|
const failedFiles = [];
|
|
const folderCache = new Map();
|
|
|
|
const totalBytes = batch.files.reduce((sum, f) => {
|
|
try {
|
|
return sum + fs.statSync(f.filePath).size;
|
|
} catch {
|
|
return sum;
|
|
}
|
|
}, 0);
|
|
|
|
const progressMsg = await sendChatOrReply(
|
|
chatId,
|
|
`⏫ Uploading *${total}* file(s) to *${folderLabel}*...\n${formatProgressBar(0, totalBytes)}\n0/${total} | Elapsed: 0s | ETA: --\nUploaded: 0/${bytesToMb(totalBytes)} MB | Speed: 0.00 MB/s`,
|
|
triggerMsg
|
|
);
|
|
logInfo("Batch upload started", { chatId, count: total, folderLabel });
|
|
|
|
for (let i = 0; i < batch.files.length; i++) {
|
|
const f = batch.files[i];
|
|
let parentFolderId = folderId;
|
|
try {
|
|
parentFolderId = await resolveUploadParentFolderId(drive, folderId, f, folderCache);
|
|
const uploaded = await uploadWithRetry(
|
|
drive,
|
|
f,
|
|
{
|
|
filePath: f.filePath,
|
|
filename: f.filename,
|
|
parentFolderId,
|
|
mimeType: f.mimeType,
|
|
},
|
|
3
|
|
);
|
|
logInfo("File uploaded", { chatId, name: uploaded.name, link: uploaded.webViewLink });
|
|
uploadedCount += 1;
|
|
try {
|
|
const sz = fs.statSync(f.filePath).size;
|
|
uploadedBytes += sz;
|
|
} catch { }
|
|
} catch (err) {
|
|
failedCount += 1;
|
|
failedFiles.push({ ...f, parentFolderId });
|
|
logError("File upload failed", { chatId, filename: f.filename, error: err?.message });
|
|
}
|
|
|
|
const done = i + 1;
|
|
const elapsedMs = Date.now() - startMs;
|
|
const elapsedSec = Math.round(elapsedMs / 1000);
|
|
const speedMbps = elapsedMs > 0 ? Number(bytesToMb(uploadedBytes)) / (elapsedMs / 1000) : 0;
|
|
const speedBytesPerSec = elapsedMs > 0 ? uploadedBytes / (elapsedMs / 1000) : 0;
|
|
const remainingBytes = Math.max(0, totalBytes - uploadedBytes);
|
|
const etaSec = speedBytesPerSec > 0 ? Math.round(remainingBytes / speedBytesPerSec) : 0;
|
|
try {
|
|
if (progressMsg?.edit) {
|
|
await progressMsg.edit(
|
|
`⏫ Uploading *${total}* file(s) to *${folderLabel}*...\n${formatProgressBar(uploadedBytes, totalBytes)}\n${done}/${total} | Elapsed: ${elapsedSec}s | ETA: ${etaSec}s\nUploaded: ${bytesToMb(uploadedBytes)}/${bytesToMb(totalBytes)} MB | Speed: ${speedMbps.toFixed(2)} MB/s`
|
|
);
|
|
}
|
|
} catch { }
|
|
}
|
|
|
|
let folderLink = "Unavailable";
|
|
if (uploadedCount > 0) {
|
|
await setFolderPublicView(drive, folderId);
|
|
const folderMeta = await drive.files.get({
|
|
fileId: folderId,
|
|
fields: "id, name, webViewLink",
|
|
});
|
|
folderLink = folderMeta?.data?.webViewLink || folderLink;
|
|
}
|
|
|
|
const retryItems = enqueueRetryFiles(chatId, folderLabel, failedFiles);
|
|
userState.retryQueue = [...(userState.retryQueue || []), ...retryItems].slice(-RETRY_MAX_ITEMS);
|
|
incrementStats(state, {
|
|
batchesCompleted: 1,
|
|
filesUploaded: uploadedCount,
|
|
filesFailed: failedCount,
|
|
uploadBytes: uploadedBytes,
|
|
completedBatchFilesTotal: total,
|
|
});
|
|
recordUploadOutcomes(state, uploadedCount, failedCount);
|
|
saveJson(STATE_FILE, state);
|
|
await maybeSendFailureAlert(state);
|
|
|
|
clearBatch(chatId);
|
|
await sendChatOrReply(
|
|
chatId,
|
|
[
|
|
"✅ Upload complete",
|
|
`Batch: *${folderLabel}*`,
|
|
`Total files: *${total}*`,
|
|
`Uploaded: *${uploadedCount}*`,
|
|
`Failed: *${failedCount}*`,
|
|
`Retry queued: *${retryItems.length}*`,
|
|
`Total size: *${bytesToMb(totalBytes)} MB*`,
|
|
`📁 Drive link: ${folderLink}`,
|
|
"",
|
|
"Next: send more files directly, or type start | list | done",
|
|
].join("\n"),
|
|
null
|
|
);
|
|
logInfo("Batch upload complete", { chatId, folderLabel, uploadedCount, failedCount, totalBytes });
|
|
} catch (err) {
|
|
const b = BATCH.get(chatId);
|
|
if (b) {
|
|
b.uploadInProgress = false;
|
|
b.doneRequested = false;
|
|
clearPendingDoneStatus(b);
|
|
}
|
|
logError("runBatchUpload failed", { chatId, error: err?.message });
|
|
await sendChatOrReply(chatId, "❌ Upload failed unexpectedly. Please type done again.");
|
|
}
|
|
}
|
|
|
|
async function tryAutoRunDone(chatId) {
|
|
const batch = getBatch(chatId);
|
|
if (!batch) return;
|
|
if (!batch.doneRequested || batch.uploadInProgress) return;
|
|
if ((batch.activeDownloads || 0) > 0) return;
|
|
await runBatchUpload(chatId, batch.doneTriggerMsg || null);
|
|
}
|
|
|
|
async function downloadMediaWithTimeout(msg, timeoutMs) {
|
|
let timer;
|
|
const timeoutPromise = new Promise((_, reject) => {
|
|
timer = setTimeout(() => reject(new Error("download timeout")), timeoutMs);
|
|
});
|
|
|
|
try {
|
|
return await Promise.race([msg.downloadMedia(), timeoutPromise]);
|
|
} finally {
|
|
clearTimeout(timer);
|
|
}
|
|
}
|
|
|
|
// ---------- Idle restart ----------
|
|
let lastMessageAt = Date.now();
|
|
let isRestarting = false;
|
|
|
|
function getRestartStats(state) {
|
|
if (!state.restartStats || state.restartStats.date !== getDateKey()) {
|
|
state.restartStats = { date: getDateKey(), count: 0 };
|
|
}
|
|
return state.restartStats;
|
|
}
|
|
|
|
async function idleRestartCheck() {
|
|
if (isRestarting) return;
|
|
const idleMs = Date.now() - lastMessageAt;
|
|
const idleMin = idleMs / 60000;
|
|
if (idleMin < IDLE_RESTART_MIN) return;
|
|
|
|
const state = loadJson(STATE_FILE, { users: {} });
|
|
const stats = getRestartStats(state);
|
|
if (stats.count >= IDLE_RESTART_MAX_PER_DAY) return;
|
|
|
|
stats.count += 1;
|
|
saveJson(STATE_FILE, state);
|
|
isRestarting = true;
|
|
|
|
logWarn("Idle restart triggered", { idleMin: Math.round(idleMin), count: stats.count });
|
|
const ownerChatId = getOwnerChatId();
|
|
if (ownerChatId) {
|
|
try {
|
|
await client.sendMessage(
|
|
ownerChatId,
|
|
`♻️ Auto-restart (idle ${Math.round(idleMin)} min). Restart #${stats.count} today.`
|
|
);
|
|
} catch (err) {
|
|
logWarn("Failed to send idle restart notice", { error: err?.message });
|
|
}
|
|
}
|
|
|
|
setTimeout(() => process.exit(0), 1000);
|
|
}
|
|
|
|
function startIdleMonitor() {
|
|
const checkMs = IDLE_RESTART_CHECK_MIN * 60 * 1000;
|
|
setInterval(() => {
|
|
idleRestartCheck().catch((err) =>
|
|
logWarn("Idle restart check failed", { error: err?.message })
|
|
);
|
|
}, checkMs);
|
|
}
|
|
|
|
function startHealthMonitor() {
|
|
const intervalMs = Math.max(15, HEARTBEAT_INTERVAL_SEC) * 1000;
|
|
setInterval(() => {
|
|
const uptimeSec = Math.round(process.uptime());
|
|
const lastMsgAgeSec = Math.round((Date.now() - lastMessageAt) / 1000);
|
|
logInfo("HEARTBEAT", {
|
|
uptimeSec,
|
|
lastMsgAgeSec,
|
|
activeBatches: BATCH.size,
|
|
memoryRssMb: Math.round(process.memoryUsage().rss / (1024 * 1024)),
|
|
});
|
|
}, intervalMs);
|
|
}
|
|
|
|
// ---------- WhatsApp ----------
|
|
const client = new Client({
|
|
authStrategy: new LocalAuth({ clientId: WA_CLIENT_ID }),
|
|
puppeteer: {
|
|
headless: WA_HEADLESS,
|
|
args: PUPPETEER_ARGS,
|
|
executablePath: WA_EXECUTABLE_PATH || undefined,
|
|
protocolTimeout: Number(process.env.WA_PROTOCOL_TIMEOUT || "9000000"),
|
|
},
|
|
});
|
|
|
|
client.on("qr", (qr) => {
|
|
qrcode.generate(qr, { small: true });
|
|
logInfo("QR received. Scan to login.");
|
|
});
|
|
|
|
client.on("ready", async () => {
|
|
logInfo("WhatsApp bot is ready");
|
|
const ownerChatId = getOwnerChatId();
|
|
if (ownerChatId) {
|
|
try {
|
|
await client.sendMessage(
|
|
ownerChatId,
|
|
"✅ MCB bot started and ready. Logs are being written to app.log."
|
|
);
|
|
} catch (err) {
|
|
logWarn("Failed to send ready message to owner", { error: err?.message });
|
|
}
|
|
}
|
|
startIdleMonitor();
|
|
startHealthMonitor();
|
|
});
|
|
|
|
client.on("authenticated", () => logInfo("WhatsApp authenticated"));
|
|
client.on("auth_failure", (msg) => logError("WhatsApp auth failure", { msg }));
|
|
client.on("disconnected", (reason) => logWarn("WhatsApp disconnected", { reason }));
|
|
|
|
client.on("message", async (msg) => {
|
|
try {
|
|
lastMessageAt = Date.now();
|
|
const text = (msg.body || "").trim();
|
|
const chatId = msg.from;
|
|
const cmd = parseCommand(text);
|
|
|
|
logInfo("Message received", {
|
|
from: chatId,
|
|
hasMedia: msg.hasMedia,
|
|
textPreview: text.slice(0, 120),
|
|
});
|
|
|
|
const drive = getDriveClient();
|
|
const state = normalizeState(loadJson(STATE_FILE, { users: {} }));
|
|
const userState = ensureUserState(state, chatId);
|
|
markUserActive(state, chatId);
|
|
incrementStats(state, { messages: 1 });
|
|
saveJson(STATE_FILE, state);
|
|
|
|
const clientFolderId = await getOrCreateClientFolder(drive, msg);
|
|
if (cmd) {
|
|
if (cmd.type === "help") {
|
|
await msg.reply(
|
|
[
|
|
"start -> begin collecting files",
|
|
"Send files now. When finished, type done.",
|
|
"Use list/status/undo/rename/cancel anytime.",
|
|
].join("\n")
|
|
);
|
|
return;
|
|
}
|
|
|
|
if (cmd.type === "adminStats") {
|
|
const ownerChatId = getOwnerChatId();
|
|
if (ownerChatId && chatId !== ownerChatId) {
|
|
await msg.reply("❌ admin-stats is restricted.");
|
|
return;
|
|
}
|
|
const arg = String(cmd.arg || "").toLowerCase();
|
|
if (!arg) {
|
|
await msg.reply(summarizeAdminStats(state));
|
|
return;
|
|
}
|
|
if (/^\d+d$/.test(arg)) {
|
|
const days = Number(arg.slice(0, -1));
|
|
await msg.reply(summarizeAdminStatsForLastDays(state, days));
|
|
return;
|
|
}
|
|
if (/^\d{4}-\d{2}-\d{2}$/.test(arg)) {
|
|
await msg.reply(summarizeAdminStatsForDate(state, arg));
|
|
return;
|
|
}
|
|
await msg.reply("Use: admin-stats | admin-stats 7d | admin-stats YYYY-MM-DD");
|
|
return;
|
|
}
|
|
|
|
if (cmd.type === "adminHealth") {
|
|
const ownerChatId = getOwnerChatId();
|
|
if (ownerChatId && chatId !== ownerChatId) {
|
|
await msg.reply("❌ admin-health is restricted.");
|
|
return;
|
|
}
|
|
await msg.reply(summarizeAdminHealth(state));
|
|
return;
|
|
}
|
|
|
|
if (cmd.type === "project") {
|
|
if (!cmd.name) {
|
|
await msg.reply("Send like: mcb-project MyProjectName");
|
|
return;
|
|
}
|
|
const folderId = await findOrCreateFolder(drive, cmd.name, clientFolderId);
|
|
userState.projectName = cmd.name;
|
|
userState.folderId = folderId;
|
|
saveJson(STATE_FILE, state);
|
|
await msg.reply(`✅ Project set: *${cmd.name}*`);
|
|
logInfo("Project set", { chatId, projectName: cmd.name, folderId });
|
|
return;
|
|
}
|
|
|
|
if (cmd.type === "rename") {
|
|
const batch = getBatch(chatId);
|
|
if (!batch) {
|
|
await msg.reply("No active batch. Type start first.");
|
|
return;
|
|
}
|
|
if (!cmd.name) {
|
|
await msg.reply("Use: rename MyFolderName");
|
|
return;
|
|
}
|
|
const folderId = await findOrCreateFolder(drive, cmd.name, clientFolderId);
|
|
batch.folderId = folderId;
|
|
batch.folderName = cmd.name;
|
|
touchBatch(chatId);
|
|
|
|
userState.lastBatchFolderName = cmd.name;
|
|
userState.lastBatchFolderId = folderId;
|
|
saveJson(STATE_FILE, state);
|
|
await msg.reply(`✅ Batch folder renamed to *${cmd.name}*`);
|
|
logInfo("Batch folder renamed", { chatId, folderName: cmd.name, folderId });
|
|
return;
|
|
}
|
|
|
|
if (cmd.type === "start") {
|
|
const target = await resolveBatchTarget(drive, userState, clientFolderId, cmd.name);
|
|
startBatch(chatId, target.folderId, target.folderName);
|
|
userState.lastBatchFolderName = target.folderName;
|
|
userState.lastBatchFolderId = target.folderId;
|
|
incrementStats(state, { batchesStarted: 1 });
|
|
saveJson(STATE_FILE, state);
|
|
await msg.reply(
|
|
`✅ Started upload session in *${target.folderName}*.\nSend more files, then type done.`
|
|
);
|
|
logInfo("Batch started", { chatId, folderName: target.folderName, folderId: target.folderId });
|
|
return;
|
|
}
|
|
|
|
if (cmd.type === "list") {
|
|
const batch = getBatch(chatId);
|
|
if (!batch || batch.files.length === 0) {
|
|
await msg.reply("No files queued yet.");
|
|
return;
|
|
}
|
|
const lines = batch.files.slice(0, 15).map((f, i) => {
|
|
const size = Number(f.sizeBytes || 0);
|
|
return `${i + 1}. ${f.filename} (${bytesToDisplay(size)})`;
|
|
});
|
|
const extra = batch.files.length > 15 ? `\n...and ${batch.files.length - 15} more` : "";
|
|
await msg.reply(
|
|
`📋 Queued files (${batch.files.length}):\n${lines.join("\n")}${extra}`
|
|
);
|
|
return;
|
|
}
|
|
|
|
if (cmd.type === "undo") {
|
|
const batch = getBatch(chatId);
|
|
if (!batch || batch.files.length === 0) {
|
|
await msg.reply("Nothing to undo.");
|
|
return;
|
|
}
|
|
const lastFile = batch.files.pop();
|
|
if (lastFile?.signature && batch.fileSignatures) {
|
|
batch.fileSignatures.delete(lastFile.signature);
|
|
}
|
|
try {
|
|
if (lastFile?.filePath && fs.existsSync(lastFile.filePath)) {
|
|
fs.rmSync(lastFile.filePath, { force: true });
|
|
}
|
|
} catch (err) {
|
|
logWarn("Undo failed to remove local file", { chatId, file: lastFile?.filename, error: err?.message });
|
|
}
|
|
touchBatch(chatId);
|
|
await msg.reply(`↩️ Removed last queued file: ${lastFile?.filename || "unknown"}`);
|
|
return;
|
|
}
|
|
|
|
if (cmd.type === "cancel") {
|
|
clearBatch(chatId);
|
|
await msg.reply("🗑️ Batch cancelled and cleared.");
|
|
logInfo("Batch cancelled", { chatId });
|
|
return;
|
|
}
|
|
|
|
if (cmd.type === "status") {
|
|
const batch = getBatch(chatId);
|
|
if (!batch) {
|
|
await msg.reply("No active batch. Type start.");
|
|
return;
|
|
}
|
|
const msLeft = Math.max(0, BATCH_TTL_MS - (now() - batch.lastActivityAt));
|
|
const minLeft = Math.ceil(msLeft / 60000);
|
|
await msg.reply(`📦 Batch files: *${batch.files.length}*\n⏱️ Time left: *${minLeft} min*`);
|
|
logInfo("Batch status", { chatId, count: batch.files.length, minLeft });
|
|
return;
|
|
}
|
|
|
|
if (cmd.type === "retryFailed") {
|
|
const retryQueue = Array.isArray(userState.retryQueue) ? userState.retryQueue : [];
|
|
if (retryQueue.length === 0) {
|
|
await msg.reply("No failed files waiting for retry.");
|
|
return;
|
|
}
|
|
|
|
const total = retryQueue.length;
|
|
let uploadedCount = 0;
|
|
let failedCount = 0;
|
|
let uploadedBytes = 0;
|
|
const kept = [];
|
|
const folderLabel = retryQueue[0].folderName || "RetryFolder";
|
|
const progressMsg = await msg.reply(`🔁 Retrying ${total} failed file(s)...`);
|
|
|
|
for (const item of retryQueue) {
|
|
try {
|
|
await uploadWithRetry(
|
|
drive,
|
|
item,
|
|
{
|
|
filePath: item.filePath,
|
|
filename: item.filename,
|
|
parentFolderId: item.folderId || userState.folderId || clientFolderId,
|
|
mimeType: item.mimeType,
|
|
},
|
|
3
|
|
);
|
|
uploadedCount += 1;
|
|
uploadedBytes += Number(item.sizeBytes || 0);
|
|
try {
|
|
if (item.filePath && fs.existsSync(item.filePath)) fs.rmSync(item.filePath, { force: true });
|
|
} catch { }
|
|
} catch (err) {
|
|
failedCount += 1;
|
|
kept.push(item);
|
|
logWarn("Retry upload failed", { chatId, file: item.filename, error: err?.message });
|
|
}
|
|
}
|
|
|
|
userState.retryQueue = kept.slice(0, RETRY_MAX_ITEMS);
|
|
incrementStats(state, {
|
|
filesUploaded: uploadedCount,
|
|
filesFailed: failedCount,
|
|
uploadBytes: uploadedBytes,
|
|
});
|
|
recordUploadOutcomes(state, uploadedCount, failedCount);
|
|
saveJson(STATE_FILE, state);
|
|
await maybeSendFailureAlert(state);
|
|
|
|
try {
|
|
await progressMsg.edit(
|
|
`🔁 Retry done for *${folderLabel}*\nUploaded: *${uploadedCount}*\nStill failed: *${failedCount}*`
|
|
);
|
|
} catch { }
|
|
return;
|
|
}
|
|
|
|
if (cmd.type === "done") {
|
|
const batch = getBatch(chatId);
|
|
if (!batch) {
|
|
await msg.reply("No active batch or no files received. Type start.");
|
|
return;
|
|
}
|
|
if (batch.uploadInProgress) {
|
|
await msg.reply("⏫ Upload already in progress. I will update you once done.");
|
|
return;
|
|
}
|
|
if ((batch.activeDownloads || 0) > 0) {
|
|
batch.doneRequested = true;
|
|
batch.doneTriggerMsg = msg;
|
|
touchBatch(chatId);
|
|
await startPendingDoneStatus(chatId, msg);
|
|
await msg.reply("⏳ Still receiving files. I will start upload automatically once receiving finishes.");
|
|
return;
|
|
}
|
|
if (batch.files.length === 0) {
|
|
await msg.reply("No active batch or no files received. Type start.");
|
|
return;
|
|
}
|
|
await runBatchUpload(chatId, msg);
|
|
return;
|
|
}
|
|
|
|
if (cmd.type === "unknown") {
|
|
await msg.reply("Unknown command. Type help.");
|
|
logWarn("Unknown command", { chatId, text });
|
|
return;
|
|
}
|
|
}
|
|
|
|
// ---------- Non-command messages: collect files if batch is active ----------
|
|
let batch = getBatch(chatId);
|
|
if (!batch && msg.hasMedia) {
|
|
const target = await resolveBatchTarget(drive, userState, clientFolderId, "");
|
|
startBatch(chatId, target.folderId, target.folderName);
|
|
userState.lastBatchFolderName = target.folderName;
|
|
userState.lastBatchFolderId = target.folderId;
|
|
incrementStats(state, { batchesStarted: 1 });
|
|
saveJson(STATE_FILE, state);
|
|
|
|
await msg.reply(
|
|
`✅ Started upload session in *${target.folderName}*.\n` +
|
|
"Send more files, then type done."
|
|
);
|
|
logInfo("Batch auto-started from first media", {
|
|
chatId,
|
|
folderName: target.folderName,
|
|
folderId: target.folderId,
|
|
});
|
|
batch = getBatch(chatId);
|
|
}
|
|
|
|
if (!batch) return;
|
|
|
|
if (msg.hasMedia) {
|
|
batch.activeDownloads = (batch.activeDownloads || 0) + 1;
|
|
touchBatch(chatId);
|
|
let statusMsg = null;
|
|
let statusTimer = null;
|
|
try {
|
|
const mediaSizeBytes = Number(msg?._data?.size || msg?._data?.fileSize || 0);
|
|
const maxBytes = MAX_MEDIA_MB > 0 ? MAX_MEDIA_MB * 1024 * 1024 : 0;
|
|
if (maxBytes && mediaSizeBytes > maxBytes) {
|
|
const sizeMb = bytesToMb(mediaSizeBytes);
|
|
const maxMb = MAX_MEDIA_MB.toFixed(0);
|
|
await msg.reply(
|
|
`❌ File too large for WhatsApp Web download (${sizeMb} MB). Limit set to ${maxMb} MB.\n` +
|
|
"Please share a Drive link or split the file into smaller parts."
|
|
);
|
|
logWarn("Media download blocked by size limit", { chatId, sizeMb, maxMb });
|
|
return;
|
|
}
|
|
|
|
const statusStart = Date.now();
|
|
const statusFrames = ["⏳", "⌛", "🔄", "⏬"];
|
|
let statusIndex = 0;
|
|
try {
|
|
statusMsg = await msg.reply("⏳ Downloading... 0m 0s");
|
|
logInfo("Media download started", { chatId });
|
|
statusTimer = setInterval(async () => {
|
|
if (!statusMsg) return;
|
|
const elapsed = formatElapsedSec(Date.now() - statusStart);
|
|
const icon = statusFrames[statusIndex % statusFrames.length];
|
|
statusIndex += 1;
|
|
try {
|
|
await statusMsg.edit(`${icon} Downloading... ${elapsed}`);
|
|
} catch { }
|
|
logInfo("Media download heartbeat", { chatId, elapsed });
|
|
}, 5000);
|
|
} catch { }
|
|
|
|
let media = null;
|
|
let lastErr = null;
|
|
for (let attempt = 1; attempt <= DOWNLOAD_MAX_RETRIES + 1; attempt += 1) {
|
|
try {
|
|
if (attempt > 1 && statusMsg) {
|
|
try {
|
|
await statusMsg.edit(`🔁 Retrying download (${attempt}/${DOWNLOAD_MAX_RETRIES + 1})...`);
|
|
} catch { }
|
|
}
|
|
media = await downloadMediaWithTimeout(msg, DOWNLOAD_TIMEOUT_MS);
|
|
if (media && media.data) break;
|
|
lastErr = new Error("empty media data");
|
|
} catch (err) {
|
|
lastErr = err;
|
|
}
|
|
|
|
logWarn("Media download retry", { chatId, attempt, error: lastErr?.message });
|
|
if (attempt <= DOWNLOAD_MAX_RETRIES) await sleep(DOWNLOAD_RETRY_DELAY_MS);
|
|
}
|
|
|
|
if (!media || !media.data) {
|
|
if (statusMsg) {
|
|
try {
|
|
await statusMsg.edit("❌ Download failed. Try sending as document.");
|
|
} catch { }
|
|
}
|
|
await msg.reply("❌ Could not download the attachment.");
|
|
logWarn("Media download failed", { chatId, error: lastErr?.message });
|
|
return;
|
|
}
|
|
|
|
const filename = buildSafeFileName(media, batch.files.length + 1);
|
|
const filePath = path.join(batch.dir, filename);
|
|
fs.writeFileSync(filePath, Buffer.from(media.data, "base64"));
|
|
|
|
const downloadElapsedMs = Date.now() - statusStart;
|
|
const fileSizeBytes = fs.statSync(filePath).size;
|
|
const sourceName = getSourceNameFromMedia(media);
|
|
const signature = buildFileSignature(sourceName, fileSizeBytes);
|
|
if (batch.fileSignatures && batch.fileSignatures.has(signature)) {
|
|
try {
|
|
fs.rmSync(filePath, { force: true });
|
|
} catch { }
|
|
if (statusMsg) {
|
|
try {
|
|
await statusMsg.edit(`⚠️ Duplicate skipped: ${sourceName} (${bytesToDisplay(fileSizeBytes)})`);
|
|
} catch { }
|
|
}
|
|
await msg.reply("⚠️ Duplicate file skipped (same filename + size in this batch).");
|
|
logInfo("Duplicate skipped", { chatId, sourceName, sizeBytes: fileSizeBytes });
|
|
return;
|
|
}
|
|
logInfo("Media download completed", {
|
|
chatId,
|
|
filename,
|
|
sizeMb: bytesToMb(fileSizeBytes),
|
|
elapsed: formatElapsedSec(downloadElapsedMs),
|
|
});
|
|
|
|
batch.fileSignatures.add(signature);
|
|
batch.files.push({
|
|
filePath,
|
|
filename,
|
|
mimeType: media.mimetype,
|
|
sizeBytes: fileSizeBytes,
|
|
sourceName,
|
|
signature,
|
|
receivedAt: Date.now(),
|
|
});
|
|
touchBatch(chatId);
|
|
incrementStats(state, { filesQueued: 1 });
|
|
saveJson(STATE_FILE, state);
|
|
|
|
if (statusMsg) {
|
|
try {
|
|
await statusMsg.edit(`✅ ${batch.files.length} file(s) queued`);
|
|
} catch { }
|
|
}
|
|
try {
|
|
await msg.react("✅");
|
|
} catch { }
|
|
logInfo("File queued", { chatId, filename, batchCount: batch.files.length });
|
|
} finally {
|
|
if (statusTimer) clearInterval(statusTimer);
|
|
const b = BATCH.get(chatId);
|
|
if (b) {
|
|
b.activeDownloads = Math.max(0, (b.activeDownloads || 0) - 1);
|
|
touchBatch(chatId);
|
|
if (b.doneRequested && !b.uploadInProgress && (b.activeDownloads || 0) === 0) {
|
|
tryAutoRunDone(chatId).catch((err) =>
|
|
logWarn("Auto-run done failed", { chatId, error: err?.message })
|
|
);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
} catch (err) {
|
|
logError("Message handler error", { error: err?.message });
|
|
try {
|
|
await msg.reply("❌ Error while processing. Type help.");
|
|
} catch { }
|
|
}
|
|
});
|
|
|
|
async function initializeClientWithRetry() {
|
|
for (let attempt = 1; attempt <= WA_INIT_RETRIES; attempt += 1) {
|
|
try {
|
|
await client.initialize();
|
|
return;
|
|
} catch (err) {
|
|
const isLast = attempt >= WA_INIT_RETRIES;
|
|
logError("WhatsApp initialize failed", {
|
|
attempt,
|
|
maxRetries: WA_INIT_RETRIES,
|
|
error: err?.message,
|
|
});
|
|
if (isLast) throw err;
|
|
await sleep(WA_INIT_RETRY_DELAY_MS);
|
|
}
|
|
}
|
|
}
|
|
|
|
function parseCommand(rawText) {
|
|
const normalized = String(rawText || "").trim().replace(/\s+/g, " ");
|
|
const lower = normalized.toLowerCase();
|
|
const adminMatch = normalized.match(/^(?:mcb-)?admin-stats(?:\s+(.+))?$/i);
|
|
if (adminMatch) {
|
|
return { type: "adminStats", arg: String(adminMatch[1] || "").trim() };
|
|
}
|
|
if (/^(?:mcb-)?admin-health$/i.test(normalized)) {
|
|
return { type: "adminHealth" };
|
|
}
|
|
|
|
if (lower === "help" || lower === "mcb-help") return { type: "help" };
|
|
if (lower === "list" || lower === "mcb-list") return { type: "list" };
|
|
if (lower === "undo" || lower === "mcb-undo") return { type: "undo" };
|
|
if (lower === "retry-failed" || lower === "mcb-retry-failed") return { type: "retryFailed" };
|
|
if (lower === "start" || lower === "mcb-start" || lower === "hi mcb") {
|
|
return { type: "start", name: "" };
|
|
}
|
|
if (lower.startsWith("hi mcb ")) {
|
|
return { type: "start", name: normalized.slice(7).trim() };
|
|
}
|
|
if (lower.startsWith("rename ")) {
|
|
return { type: "rename", name: normalized.slice(7).trim() };
|
|
}
|
|
if (lower.startsWith("mcb-rename ")) {
|
|
return { type: "rename", name: normalized.slice(11).trim() };
|
|
}
|
|
if (lower === "done" || lower === "mcb-done" || lower === "bye mcb" || lower === "by mcb") {
|
|
return { type: "done" };
|
|
}
|
|
if (lower === "cancel" || lower === "mcb-cancel") return { type: "cancel" };
|
|
if (lower === "status" || lower === "mcb-status") return { type: "status" };
|
|
|
|
if (lower.startsWith("mcb-project")) {
|
|
const name = normalized.replace(/^mcb-project/i, "").trim();
|
|
return { type: "project", name };
|
|
}
|
|
|
|
if (lower.startsWith("mcb-")) return { type: "unknown" };
|
|
return null;
|
|
}
|
|
|
|
function buildAutoBatchFolderName() {
|
|
const stamp = new Date().toISOString().slice(0, 10).replace(/-/g, "");
|
|
return `Batch_${stamp}`;
|
|
}
|
|
|
|
async function resolveBatchTarget(drive, userState, clientFolderId, requestedName = "") {
|
|
const explicitName = String(requestedName || "").trim();
|
|
if (explicitName) {
|
|
const folderId = await findOrCreateFolder(drive, explicitName, clientFolderId);
|
|
return { folderId, folderName: explicitName };
|
|
}
|
|
|
|
if (userState.lastBatchFolderName && userState.lastBatchFolderId) {
|
|
return {
|
|
folderId: userState.lastBatchFolderId,
|
|
folderName: userState.lastBatchFolderName,
|
|
};
|
|
}
|
|
|
|
if (userState.projectName && userState.folderId) {
|
|
return {
|
|
folderId: userState.folderId,
|
|
folderName: userState.projectName,
|
|
};
|
|
}
|
|
|
|
const autoName = buildAutoBatchFolderName();
|
|
const autoFolderId = await findOrCreateFolder(drive, autoName, clientFolderId);
|
|
return { folderId: autoFolderId, folderName: autoName };
|
|
}
|
|
|
|
function clearAllBatches() {
|
|
for (const chatId of BATCH.keys()) {
|
|
clearBatch(chatId);
|
|
}
|
|
}
|
|
|
|
process.on("SIGINT", () => {
|
|
logWarn("SIGINT received, cleaning up.");
|
|
clearAllBatches();
|
|
process.exit(0);
|
|
});
|
|
|
|
process.on("SIGTERM", () => {
|
|
logWarn("SIGTERM received, cleaning up.");
|
|
clearAllBatches();
|
|
process.exit(0);
|
|
});
|
|
|
|
process.on("unhandledRejection", (reason) => {
|
|
logError("Unhandled promise rejection", { reason: String(reason) });
|
|
});
|
|
|
|
process.on("uncaughtException", (err) => {
|
|
logError("Uncaught exception", { error: err?.message, stack: err?.stack });
|
|
});
|
|
|
|
async function boot() {
|
|
restoreBatchesFromDisk();
|
|
await ensureGoogleToken();
|
|
getDriveClient();
|
|
await initializeClientWithRetry();
|
|
}
|
|
|
|
boot().catch((err) => {
|
|
logError("Fatal startup error", { error: err?.message });
|
|
process.exit(1);
|
|
});
|