351 lines
12 KiB
TypeScript
351 lines
12 KiB
TypeScript
import { Job, Queue, QueueEvents, Worker } from "bullmq";
|
|
import { Request, Response } from "express";
|
|
import fs from "fs-extra";
|
|
import path from "path";
|
|
import { logger } from "../server.js";
|
|
import MediaFile from "../util/interfaces/MediaFile.js";
|
|
import ListableChecker from "../util/listableChecker.js";
|
|
import { PathToRoBillsFolder, PathToRoFolder, PathToVendorBillsFile } from "../util/pathGenerators.js";
|
|
import { BillsRelativeFilePath, FolderPaths, JobRelativeFilePath } from "../util/serverInit.js";
|
|
|
|
const DELETE_QUEUE_NAME = "deleteQueue";
|
|
|
|
const connectionOpts = {
|
|
host: "localhost",
|
|
port: 6379,
|
|
enableReadyCheck: true,
|
|
reconnectOnError: (err: Error) => err.message.includes("READONLY")
|
|
};
|
|
|
|
const deleteQueue = new Queue(DELETE_QUEUE_NAME, {
|
|
connection: connectionOpts,
|
|
defaultJobOptions: {
|
|
removeOnComplete: 10,
|
|
removeOnFail: 5,
|
|
attempts: 3,
|
|
backoff: { type: "exponential", delay: 2000 }
|
|
}
|
|
});
|
|
|
|
const deleteQueueEvents = new QueueEvents(DELETE_QUEUE_NAME, {
|
|
connection: connectionOpts
|
|
});
|
|
|
|
const deleteWorker = new Worker(
|
|
DELETE_QUEUE_NAME,
|
|
async (job: Job<{ jobid: string; files: string[] }>) => {
|
|
const { jobid, files } = job.data;
|
|
logger.debug(`[DeleteWorker] Starting delete operation for job ${jobid} with ${files.length} files`);
|
|
|
|
try {
|
|
await job.updateProgress(5);
|
|
|
|
const result = await processDeleteOperation(jobid, files, job);
|
|
|
|
await job.updateProgress(100);
|
|
logger.debug(`[DeleteWorker] Completed delete operation for job ${jobid}`);
|
|
return result;
|
|
} catch (error) {
|
|
logger.error(`[DeleteWorker] Error deleting files for job ${jobid}:`, error);
|
|
throw error;
|
|
}
|
|
},
|
|
{
|
|
connection: connectionOpts,
|
|
concurrency: 2 // Limit concurrent delete operations
|
|
}
|
|
);
|
|
|
|
// Worker event listeners for logging
|
|
deleteWorker.on("ready", () => {
|
|
logger.debug("[DeleteWorker] Worker is ready");
|
|
});
|
|
deleteWorker.on("active", (job, prev) => {
|
|
logger.debug(`[DeleteWorker] Job ${job.id} active (previous: ${prev})`);
|
|
});
|
|
deleteWorker.on("completed", async (job) => {
|
|
logger.debug(`[DeleteWorker] Job ${job.id} completed`);
|
|
});
|
|
deleteWorker.on("failed", (job, err) => {
|
|
logger.error(`[DeleteWorker] Job ${job?.id} failed:`, err);
|
|
});
|
|
deleteWorker.on("stalled", (jobId) => {
|
|
logger.error(`[DeleteWorker] Job stalled: ${jobId}`);
|
|
});
|
|
deleteWorker.on("error", (err) => {
|
|
logger.error("[DeleteWorker] Worker error:", err);
|
|
});
|
|
|
|
// Queue event listeners
|
|
deleteQueue.on("waiting", (job) => {
|
|
logger.debug(`[DeleteQueue] Job waiting in queue: job ${job.data.jobid} - ${job.data.files.length} files`);
|
|
});
|
|
deleteQueue.on("error", (err) => {
|
|
logger.error("[DeleteQueue] Queue error:", err);
|
|
});
|
|
|
|
async function processDeleteOperation(
|
|
jobid: string,
|
|
files: string[],
|
|
job?: Job
|
|
): Promise<{ deleted: number; failed: number }> {
|
|
await fs.ensureDir(PathToRoFolder(jobid));
|
|
logger.debug("Deleting media for job: " + PathToRoFolder(jobid));
|
|
|
|
try {
|
|
// Setup lists for both file locations
|
|
async function readFilteredDir(dirPath: string): Promise<fs.Dirent[]> {
|
|
const filtered: fs.Dirent[] = [];
|
|
try {
|
|
const dir = await fs.opendir(dirPath);
|
|
for await (const dirent of dir) {
|
|
if (dirent.isFile() && ListableChecker(dirent)) {
|
|
filtered.push(dirent);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
logger.error(`Failed to read directory: ${dirPath}`, err);
|
|
}
|
|
return filtered;
|
|
}
|
|
|
|
const jobFileList = await readFilteredDir(PathToRoFolder(jobid));
|
|
const billFileList = await readFilteredDir(PathToRoBillsFolder(jobid));
|
|
|
|
if (job) await job.updateProgress(15);
|
|
|
|
// Helper function for safe file deletion
|
|
const safeUnlink = async (filePath: string, logPrefix: string = "[DeleteWorker] ") => {
|
|
try {
|
|
// lstat first to ensure it's a file and the handle is safe to unlink
|
|
if (await fs.pathExists(filePath)) {
|
|
const stats = await fs.lstat(filePath);
|
|
if (stats.isFile()) {
|
|
await fs.unlink(filePath);
|
|
logger.debug(`${logPrefix}Deleted: ${filePath}`);
|
|
}
|
|
}
|
|
} catch (err) {
|
|
logger.warn(`${logPrefix}Failed to delete ${filePath}: ${err}`);
|
|
}
|
|
};
|
|
|
|
// Helper to delete a file and its thumbnails
|
|
const deleteFileWithThumbs = async (mediaFile: MediaFile, logPrefix: string) => {
|
|
|
|
try {
|
|
await safeUnlink(mediaFile.path, logPrefix);
|
|
|
|
// Delete thumbnails
|
|
const thumbDir = path.dirname(mediaFile.thumbnailPath);
|
|
const baseThumb = path.basename(mediaFile.thumbnailPath, path.extname(mediaFile.thumbnailPath));
|
|
|
|
logger.debug(`${logPrefix}Deleting thumbnails from: ${thumbDir}, baseThumb: ${baseThumb}`);
|
|
|
|
for (const ext of [".jpg", ".png"]) {
|
|
const thumbPath = path.join(thumbDir, `${baseThumb}${ext}`);
|
|
await safeUnlink(thumbPath, logPrefix);
|
|
}
|
|
|
|
// Delete ConvertedOriginal file if it exists
|
|
// The ConvertedOriginal folder contains the original files with the same filename but original extension
|
|
const convertedOriginalDir = path.join(path.dirname(mediaFile.path), FolderPaths.ConvertedOriginalSubDir);
|
|
|
|
try {
|
|
if (await fs.pathExists(convertedOriginalDir)) {
|
|
const convertedOriginalFiles = await fs.readdir(convertedOriginalDir);
|
|
const currentFileName = path.basename(mediaFile.path, path.extname(mediaFile.path));
|
|
|
|
logger.debug(`Looking for ConvertedOriginal files with base name: ${currentFileName}`, {
|
|
convertedOriginalDir,
|
|
currentFileName,
|
|
availableFiles: convertedOriginalFiles
|
|
});
|
|
|
|
for (const file of convertedOriginalFiles) {
|
|
const fileBaseName = path.basename(file, path.extname(file));
|
|
// Match files that have the same base name (same filename, potentially different extension)
|
|
if (fileBaseName === currentFileName) {
|
|
const convertedOriginalPath = path.join(convertedOriginalDir, file);
|
|
await safeUnlink(convertedOriginalPath, logPrefix);
|
|
logger.debug(`Found and deleted ConvertedOriginal file: ${convertedOriginalPath}`);
|
|
}
|
|
}
|
|
}
|
|
} catch (error) {
|
|
logger.warn(`Error checking/deleting ConvertedOriginal files for ${mediaFile.path}:`, error);
|
|
}
|
|
} catch (error) {
|
|
logger.error(`${logPrefix}Error in deleteFileWithThumbs for ${mediaFile.path}:`, error);
|
|
throw error;
|
|
}
|
|
};
|
|
|
|
// Convert to MediaFile objects for better type safety
|
|
const jobMediaFiles: MediaFile[] = jobFileList.map((file) => {
|
|
const thumbName = file.name.replace(/\.[^/.]+$/, ".jpg");
|
|
const thumbPath = path.join(FolderPaths.Jobs, jobid, FolderPaths.ThumbsSubDir, thumbName);
|
|
const filePath = JobRelativeFilePath(jobid, file.name);
|
|
return {
|
|
name: file.name,
|
|
path: filePath,
|
|
thumbnailPath: thumbPath,
|
|
src: filePath,
|
|
thumbnail: thumbPath,
|
|
thumbnailHeight: 0,
|
|
thumbnailWidth: 0,
|
|
filename: file.name
|
|
};
|
|
});
|
|
|
|
// Delete job files with proper error handling
|
|
const jobDeletions = jobMediaFiles
|
|
.filter((mediaFile) => files.includes(path.basename(mediaFile.filename)))
|
|
.map((mediaFile) => deleteFileWithThumbs(mediaFile, "[DeleteWorker] "));
|
|
|
|
// Prepare bill media files
|
|
const billMediaFiles: MediaFile[] = billFileList.map((file) => {
|
|
const thumbName = file.name.replace(/\.[^/.]+$/, ".jpg");
|
|
const thumbPath = path.join(FolderPaths.Jobs, jobid, FolderPaths.BillsSubDir, FolderPaths.ThumbsSubDir, thumbName);
|
|
const filePath = BillsRelativeFilePath(jobid, file.name);
|
|
return {
|
|
name: file.name,
|
|
path: filePath,
|
|
thumbnailPath: thumbPath,
|
|
src: filePath,
|
|
thumbnail: thumbPath,
|
|
thumbnailHeight: 0,
|
|
thumbnailWidth: 0,
|
|
filename: file.name
|
|
};
|
|
});
|
|
|
|
// Delete bill files using the helper function
|
|
const billDeletions = billMediaFiles
|
|
.filter((mediaFile) => files.includes(path.basename(mediaFile.filename)))
|
|
.map((mediaFile) => deleteFileWithThumbs(mediaFile, "[DeleteWorker] Bill: "));
|
|
|
|
// Delete vendor duplicates if DUPLICATE_BILL_TO_VENDOR is enabled
|
|
const vendorDeletions: Promise<any>[] = [];
|
|
const duplicateToVendor = process.env.DUPLICATE_BILL_TO_VENDOR === "true";
|
|
|
|
if (duplicateToVendor) {
|
|
const billFilesToDelete = billMediaFiles
|
|
.filter((mediaFile) => files.includes(path.basename(mediaFile.filename)))
|
|
.map(mediaFile => path.basename(mediaFile.filename));
|
|
|
|
for (const billFile of billFilesToDelete) {
|
|
vendorDeletions.push(
|
|
(async () => {
|
|
try {
|
|
// Search for this file in all vendor directories
|
|
const vendorsDir = FolderPaths.Vendors;
|
|
if (await fs.pathExists(vendorsDir)) {
|
|
const vendors = await fs.readdir(vendorsDir, { withFileTypes: true });
|
|
|
|
for (const vendor of vendors) {
|
|
if (vendor.isDirectory()) {
|
|
const vendorFilePath = path.join(vendorsDir, vendor.name, billFile);
|
|
if (await fs.pathExists(vendorFilePath)) {
|
|
await safeUnlink(vendorFilePath, "[DeleteWorker] Vendor: ");
|
|
logger.debug(`[DeleteWorker] Deleted vendor file: ${vendorFilePath}`);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
} catch (error) {
|
|
logger.warn(`[DeleteWorker] Failed to delete vendor copies for ${billFile}:`, error);
|
|
}
|
|
})()
|
|
);
|
|
}
|
|
}
|
|
|
|
if (job) await job.updateProgress(80);
|
|
|
|
// Wait for all deletions to complete
|
|
const results = await Promise.allSettled([...jobDeletions, ...billDeletions, ...vendorDeletions]);
|
|
const failed = results.filter(r => r.status === 'rejected').length;
|
|
const deleted = results.filter(r => r.status === 'fulfilled').length;
|
|
|
|
logger.debug(`[DeleteWorker] Delete operation completed: ${deleted} successful, ${failed} failed`);
|
|
|
|
return { deleted, failed };
|
|
} catch (error) {
|
|
logger.error("[DeleteWorker] Error in processDeleteOperation:", error);
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
export async function JobsDeleteMedia(req: Request, res: Response) {
|
|
const jobid: string = (req.body.jobid || "").trim();
|
|
const files: string[] = req.body.files || [];
|
|
|
|
try {
|
|
if (!files.length) {
|
|
res.status(400).json({ error: "files must be specified." });
|
|
return;
|
|
}
|
|
|
|
// For small operations (1-5 files), process synchronously for immediate feedback
|
|
if (files.length <= 5) {
|
|
logger.debug("Processing small delete operation synchronously");
|
|
await processDeleteOperation(jobid, files);
|
|
res.sendStatus(200);
|
|
return;
|
|
}
|
|
|
|
// For larger operations, use BullMQ but still return success immediately
|
|
logger.debug(`[JobsDeleteMedia] Queuing delete operation for ${files.length} files`);
|
|
const job = await deleteQueue.add("deleteMedia", { jobid, files });
|
|
|
|
// Return success immediately (optimistic response)
|
|
res.sendStatus(200);
|
|
|
|
// Process in background - if it fails, files will still be there on next refresh
|
|
job.waitUntilFinished(deleteQueueEvents)
|
|
.then(() => {
|
|
logger.debug(`[JobsDeleteMedia] Background delete completed for job ${job.id}`);
|
|
})
|
|
.catch((error) => {
|
|
logger.error(`[JobsDeleteMedia] Background delete failed for job ${job.id}:`, error);
|
|
});
|
|
|
|
} catch (error) {
|
|
logger.error("Error deleting job media.", { jobid, error });
|
|
if (!res.headersSent) res.status(500).json({ error: "Failed to delete media", details: error });
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get the status of a delete operation job
|
|
*/
|
|
export async function JobsDeleteStatus(req: Request, res: Response) {
|
|
const { jobId } = req.params;
|
|
|
|
try {
|
|
const job = await Job.fromId(deleteQueue, jobId);
|
|
|
|
if (!job) {
|
|
res.status(404).json({ error: "Job not found" });
|
|
return;
|
|
}
|
|
|
|
const state = await job.getState();
|
|
const progress = job.progress;
|
|
|
|
res.json({
|
|
jobId,
|
|
state,
|
|
progress,
|
|
data: job.data,
|
|
finishedOn: job.finishedOn,
|
|
processedOn: job.processedOn,
|
|
failedReason: job.failedReason
|
|
});
|
|
} catch (error) {
|
|
logger.error("Error getting delete job status:", error);
|
|
res.status(500).json({ error: "Failed to get job status" });
|
|
}
|
|
}
|