IO-3151 Package Update and Optimizations

This commit is contained in:
Allan Carr
2025-03-06 08:48:48 -08:00
parent fd0d3c072b
commit c3f408f206
9 changed files with 1106 additions and 793 deletions

View File

@@ -1,4 +1,4 @@
import { Job, Queue, QueueEvents, Worker } from "bullmq";
import { Job, Queue, Worker } from "bullmq";
import dotenv from "dotenv";
import { fileTypeFromFile } from "file-type";
import { FileTypeResult } from "file-type/core";
@@ -12,7 +12,29 @@ import { FolderPaths } from "./serverInit.js";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const HeicQueue = new Queue("HEIC Queue", { connection: { host: "localhost", port: 6379 } });
const HeicQueue = new Queue("HEIC Queue", {
connection: {
host: "localhost",
port: 6379,
maxRetriesPerRequest: 3,
enableReadyCheck: true,
reconnectOnError: function (err) {
const targetError = "READONLY";
return err.message.includes(targetError);
}
},
defaultJobOptions: {
removeOnComplete: true,
removeOnFail: true,
attempts: 3,
backoff: {
type: "exponential",
delay: 1000
}
}
});
const cleanupINTERVAL = 1000 * 60 * 10;
setInterval(cleanupQueue, cleanupINTERVAL);
dotenv.config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
@@ -20,32 +42,49 @@ dotenv.config({
const imageMagick = gm.subClass({ imageMagick: true });
async function cleanupQueue() {
const ONE_HOUR = 1000 * 60 * 60;
const SIX_HOURS = ONE_HOUR * 6;
try {
// Clean completed jobs older than 1 hour
await HeicQueue.clean(ONE_HOUR, 500, "completed");
// Clean failed jobs older than 24 hours
await HeicQueue.clean(SIX_HOURS, 500, "failed");
// Get queue health
const jobCounts = await HeicQueue.getJobCounts();
logger.log("debug", `Queue status: ${JSON.stringify(jobCounts)}`);
} catch (error) {
logger.log("error", `Queue cleanup error: ${error}`);
}
}
export async function ConvertHeicFiles(files: Express.Multer.File[]) {
const validFiles = await filterValidHeicFiles(files);
const jobs = await HeicQueue.addBulk(
validFiles.map((file) => ({
name: file.filename,
data: { convertedFileName: generateUniqueHeicFilename(file), file }
}))
);
// await Promise.all(
// validFiles.map(async (file) => {
// const convertedFileName = generateUniqueHeicFilename(file);
// await HeicQueue.add(convertedFileName, { convertedFileName, file },{removeOnComplete: true,});
const jobs = validFiles.map((file) => ({
name: file.filename,
data: {
convertedFileName: generateUniqueHeicFilename(file),
fileInfo: {
path: file.path,
destination: file.destination,
originalFilename: file.filename
}
}
}));
// // try {
// // await ConvertToJpeg(file.path, `${file.destination}/${convertedFileName}`);
// // logger.log("debug", `Converted ${file.filename} image to JPEG from HEIC.`);
// // await handleOriginalFile(file, convertedFileName);
// // file.filename = convertedFileName;
// // file.mimetype = "image/jpeg";
// // file.path = `${file.destination}/${convertedFileName}`;
// // } catch (error) {
// // logger.log("error", `Error converting ${file.filename} image to JPEG from HEIC. ${JSON.stringify(error)}`);
// // }
// })
// );
await HeicQueue.addBulk(jobs);
const fileMap = new Map(files.map((file, index) => [file.filename, index]));
jobs.forEach((job) => {
const fileIndex = fileMap.get(job.data.fileInfo.originalFilename);
if (fileIndex !== undefined) {
files[fileIndex].filename = job.data.convertedFileName;
files[fileIndex].mimetype = "image/jpeg";
}
});
}
async function filterValidHeicFiles(files: Express.Multer.File[]) {
@@ -59,56 +98,81 @@ async function filterValidHeicFiles(files: Express.Multer.File[]) {
return validFiles;
}
async function handleOriginalFile(file: Express.Multer.File, convertedFileName: string) {
if (process.env.KEEP_CONVERTED_ORIGINALS) {
await fs.ensureDir(path.join(file.destination, FolderPaths.ConvertedOriginalSubDir));
await fs.move(file.path, `${path.join(file.destination, FolderPaths.ConvertedOriginalSubDir)}/${file.filename}`);
} else {
await fs.unlink(file.path);
async function handleOriginalFile(fileInfo: { path: string; destination: string; originalFilename: string }) {
try {
if (process.env.KEEP_CONVERTED_ORIGINALS) {
await fs.ensureDir(path.join(fileInfo.destination, FolderPaths.ConvertedOriginalSubDir));
await fs.move(
fileInfo.path,
`${path.join(fileInfo.destination, FolderPaths.ConvertedOriginalSubDir)}/${fileInfo.originalFilename}`
);
} else {
await fs.unlink(fileInfo.path);
}
} catch (error) {
logger.log("error", `Error handling original file: ${error}`);
throw error;
}
}
async function ConvertToJpeg(file: string, newPath: string) {
const fileOnDisk: Buffer = await fs.readFile(file);
// const fileOnDisk: Buffer = await fs.readFile(file);
// return new Promise<string>((resolve, reject) => {
// imageMagick(fileOnDisk)
// .setFormat("jpg")
// .write(newPath, (error) => {
// if (error) reject(error.message);
// resolve(newPath);
// });
// });
return new Promise<string>((resolve, reject) => {
imageMagick(fileOnDisk)
const readStream = fs.createReadStream(file);
const writeStream = fs.createWriteStream(newPath);
imageMagick(readStream)
.setFormat("jpg")
.write(newPath, (error) => {
if (error) reject(error.message);
resolve(newPath);
});
.stream()
.pipe(writeStream)
.on("finish", () => resolve(newPath))
.on("error", (error) => reject(error.message));
});
}
//Previos implementation using sandboxing. Cannot set up because the imports try to launch the server again.
// const processorUrl = pathToFileURL(__dirname + "/heicQueueProcessor.ts");
// const HeicWorker = new Worker("HEIC Queue", processorUrl, {
// connection: { host: "localhost", port: 6379 }
// });
const HeicWorker = new Worker(
"HEIC Queue",
async (job: Job) => {
const { file, convertedFileName } = job.data;
const { fileInfo, convertedFileName } = job.data;
try {
logger.log("debug", `Attempting to Convert ${file.filename} image to JPEG from HEIC.`);
await ConvertToJpeg(file.path, `${file.destination}/${convertedFileName}`);
logger.log("debug", `Converted ${file.filename} image to JPEG from HEIC.`);
await handleOriginalFile(file, convertedFileName);
file.filename = convertedFileName;
file.mimetype = "image/jpeg";
file.path = `${file.destination}/${convertedFileName}`;
logger.log("debug", `Attempting to Convert ${fileInfo.originalFilename} image to JPEG from HEIC.`);
await job.updateProgress(10);
await ConvertToJpeg(fileInfo.path, `${fileInfo.destination}/${convertedFileName}`);
await job.updateProgress(50);
await handleOriginalFile(fileInfo);
logger.log("debug", `Converted ${fileInfo.originalFilename} image to JPEG from HEIC.`);
await job.updateProgress(100);
return true;
} catch (error) {
logger.log(
"error",
`QUEUE ERROR: Error converting ${file.filename} image to JPEG from HEIC. ${JSON.stringify(error)}`
`QUEUE ERROR: Error converting ${fileInfo.originalFilename} image to JPEG from HEIC. ${JSON.stringify(error)}`
);
return false;
throw error;
}
},
{
connection: { host: "localhost", port: 6379 }
connection: {
host: "localhost",
port: 6379,
maxRetriesPerRequest: 3,
enableReadyCheck: true,
reconnectOnError: function (err) {
const targetError = "READONLY";
return err.message.includes(targetError);
}
},
concurrency: 1
}
);
@@ -125,8 +189,10 @@ HeicWorker.on("ready", () => {
HeicWorker.on("active", (job, prev) => {
logger.log("debug", `[BULLMQ] Job ${job.id} is now active; previous status was ${prev}`);
});
HeicWorker.on("completed", (jobId, returnvalue) => {
logger.log("debug", `[BULLMQ] ${jobId.id} has completed and returned ${returnvalue}`);
HeicWorker.on("completed", async (job, returnvalue) => {
logger.log("debug", `[BULLMQ] ${job.id} has completed and returned ${returnvalue}`);
await job.remove();
logger.log("debug", `Job ${job.id} removed from Redis`);
});
HeicWorker.on("failed", (jobId, failedReason) => {
logger.log("error", `[BULLMQ] ${jobId} has failed with reason ${failedReason}`);
@@ -140,17 +206,3 @@ HeicWorker.on("stalled", (error) => {
HeicWorker.on("ioredis:close", () => {
logger.log("error", `[BULLMQ] Redis connection closed!`);
});
// const queueEvents = new QueueEvents( "HEIC Queue");
// queueEvents.on('completed', ( jobId, returnvalue ) => {
// // Called every time a job is completed by any worker.
// });
// queueEvents.on('failed', (jobId, failedReason ) => {
// // Called whenever a job is moved to failed by any worker.
// });
// queueEvents.on('progress', (jobId, data) => {
// // jobId received a progress event
// });

View File

@@ -1,7 +1,6 @@
import dotenv from "dotenv";
import { NextFunction, Request, Response } from "express";
import { resolve } from "path";
//import { logger } from "../server.ts";
dotenv.config({
path: resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)