IO-3151 Package Update and Optimizations

This commit is contained in:
Allan Carr
2025-03-06 08:48:48 -08:00
parent fd0d3c072b
commit c3f408f206
9 changed files with 1106 additions and 793 deletions

View File

@@ -1 +1 @@
MEDIA_PATH=~/Pictures PORT=8000

View File

@@ -1,86 +1,77 @@
FROM node:20 ### ALPINE MULTI-STAGE
# Build stage for libraries
FROM node:22-alpine AS builder
# Create app directory # Install build dependencies
RUN apk add --no-cache \
bash wget build-base autoconf automake cmake libtool pkgconf \
libjpeg-turbo-dev libpng-dev libwebp-dev tiff-dev libde265-dev \
ruby ruby-dev
# Replace source built libde265 and libheif with installed libraries in next release
# libheif-dev libde265-dev x265-dev
# Build libde265
WORKDIR /build/libde265
RUN wget https://github.com/strukturag/libde265/archive/v1.0.15.tar.gz \
&& tar -xvf v1.0.15.tar.gz \
&& cd libde265-1.0.15 \
&& cmake . \
&& make \
&& make install
# Build libheif
WORKDIR /build/libheif
RUN wget https://github.com/strukturag/libheif/archive/v1.19.7.tar.gz \
&& tar -xvf v1.19.7.tar.gz \
&& cd libheif-1.19.7 \
&& cmake --preset=release . \
&& make \
&& make install
# Build ImageMagick
WORKDIR /build/imagemagick
RUN wget https://download.imagemagick.org/archive/releases/ImageMagick-7.1.1-44.tar.xz \
&& tar -xvf ImageMagick-7.1.1-44.tar.xz \
&& cd ImageMagick-7.1.1-44 \
&& ./configure --with-heic=yes --with-webp=yes \
&& make \
&& make install
# Node.js application build stage
WORKDIR /usr/src/app WORKDIR /usr/src/app
# Install app dependencies
# A wildcard is used to ensure both package.json AND package-lock.json are copied
# where available (npm@5+)
COPY package*.json ./ COPY package*.json ./
RUN npm install -g typescript
RUN npm install RUN npm install
# If you are building your code for production
# RUN npm ci --only=production
# Bundle app source
COPY . . COPY . .
RUN apt -y update
RUN apt install -y wget
RUN apt install -y lsb-release curl gpg
RUN curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg
RUN chmod 644 /usr/share/keyrings/redis-archive-keyring.gpg
RUN echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | tee /etc/apt/sources.list.d/redis.list
RUN apt update
RUN apt install -y redis
# PNG ,JPG ,Tiff & WebP support
# Consider adding more support with testing https://gist.github.com/hurricup/e14ae5bc47705fca6b1680e7a1fb6580
RUN apt install -y libjpeg-dev
RUN apt install -y libpng-dev
RUN apt install -y libtiff-dev
RUN apt install -y libwebp-dev
# Install HEIF support (libheic-dev Package does not exist on 16.04)
RUN apt -y install libde265-dev
RUN apt -y install pkg-config m4 libtool automake autoconf cmake
RUN wget https://github.com/strukturag/libde265/archive/v1.0.15.tar.gz
RUN tar -xvf v1.0.15.tar.gz
WORKDIR /usr/src/app/libde265-1.0.15/
RUN cmake .
RUN make
RUN make install
RUN ./autogen.sh
RUN ./configure
WORKDIR /usr/src/app
RUN wget https://github.com/strukturag/libheif/archive/v1.18.2.tar.gz
RUN tar -xvf v1.18.2.tar.gz
WORKDIR /usr/src/app/libheif-1.18.2/
RUN cmake --preset=release .
RUN make
RUN make install
WORKDIR /usr/src/app
# Install ruby 2.3.0 for ImageMagick
RUN apt -y install -y build-essential zlib1g-dev libssl-dev libreadline6-dev libyaml-dev
RUN apt -y install -y ruby-full && ruby -v
# Install ImageMagick
# RUN apt-get install imagemagick -y
# # Install ImageMagick with WEBP and HEIC support
RUN wget https://download.imagemagick.org/archive/releases/ImageMagick-7.1.1-37.tar.xz
RUN tar -xvf ImageMagick-7.1.1-37.tar.xz
WORKDIR /usr/src/app/ImageMagick-7.1.1-37/
RUN ./configure --with-heic=yes --with-webp=yes
RUN make
RUN make install
RUN ldconfig /usr/local/lib
RUN identify --version
RUN apt update && apt install -y ghostscript graphicsmagick \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /usr/src/app
RUN npm run build RUN npm run build
RUN npm install pm2 -g # Final stage
FROM node:22-alpine
# Install runtime dependencies only
RUN apk add --no-cache \
bash redis ghostscript graphicsmagick imagemagick \
libjpeg-turbo libpng libwebp tiff
# Copy built libraries from builder
COPY --from=builder /usr/local/lib/ /usr/local/lib/
COPY --from=builder /usr/local/bin/ /usr/local/bin/
COPY --from=builder /usr/local/include/ /usr/local/include/
# Update library cache
RUN ldconfig /usr/local/lib
RUN npm install -g pm2
WORKDIR /usr/src/app
# Copy built application from builder
COPY --from=builder /usr/src/app/dist ./dist
COPY --from=builder /usr/src/app/node_modules ./node_modules
COPY --from=builder /usr/src/app/.env.production ./.env.production
COPY --from=builder /usr/src/app/ecosystem.config.cjs ./ecosystem.config.cjs
EXPOSE 8000 EXPOSE 8000

View File

@@ -1,9 +1,14 @@
module.exports = [ module.exports = {
{ apps: [
script: "dist/server.js", {
name: "MediaServer", script: "dist/server.js",
exec_mode: "cluster", name: "MediaServer",
instances: 0, exec_mode: "cluster",
cron_restart: "30 8 * * *" instances: 0,
} cron_restart: "30 8 * * *",
]; env: {
NODE_ENV: "production"
}
}
]
};

View File

@@ -1,9 +1,11 @@
import { NextFunction, Request, Response } from "express"; import { NextFunction, Request, Response } from "express";
export default function validateJobRequest(req: Request, res: Response, next: NextFunction) { const validateJobRequest: (req: Request, res: Response, next: NextFunction) => void = (req, res, next) => {
const jobId: string = (req.body.jobid || "").trim(); const jobId: string = (req.body.jobid || "").trim();
if (jobId === "") { if (jobId === "") {
return res.status(400).json({ error: "No RO Number has been specified." }); return res.status(400).json({ error: "No RO Number has been specified." });
} }
next(); next();
} };
export default validateJobRequest;

1419
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "bodyshop-media-server", "name": "bodyshop-media-server",
"version": "1.0.12", "version": "1.0.13",
"license": "UNLICENSED", "license": "UNLICENSED",
"engines": { "engines": {
"node": ">=18.0.0" "node": ">=18.0.0"
@@ -13,40 +13,42 @@
"makeitpretty": "prettier --write \"**/*.{css,js,json,jsx,scss,ts}\"" "makeitpretty": "prettier --write \"**/*.{css,js,json,jsx,scss,ts}\""
}, },
"dependencies": { "dependencies": {
"axios": "^1.7.5", "@types/compression": "^1.7.5",
"body-parser": "^1.20.2", "axios": "^1.8.1",
"bullmq": "^5.12.12", "body-parser": "^1.20.3",
"bullmq": "^5.41.7",
"compression": "^1.8.0",
"cors": "^2.8.5", "cors": "^2.8.5",
"dotenv": "16.4.5", "dotenv": "16.4.7",
"express": "^4.19.2", "express": "^4.21.2",
"file-type": "^19.4.1", "file-type": "^20.4.0",
"fs-extra": "^11.2.0", "fs-extra": "^11.3.0",
"gm": "^1.25.0", "gm": "^1.25.1",
"helmet": "^7.1.0", "helmet": "^8.0.0",
"image-thumbnail": "^1.0.17", "image-thumbnail": "^1.0.17",
"jszip": "^3.10.1", "jszip": "^3.10.1",
"morgan": "^1.10.0", "morgan": "^1.10.0",
"multer": "^1.4.4", "multer": "^1.4.4",
"nocache": "^4.0.0", "nocache": "^4.0.0",
"response-time": "^2.3.2", "response-time": "^2.3.3",
"simple-thumbnail": "^1.6.5", "simple-thumbnail": "^1.6.5",
"winston": "^3.14.2", "winston": "^3.17.0",
"winston-daily-rotate-file": "^5.0.0" "winston-daily-rotate-file": "^5.0.0"
}, },
"devDependencies": { "devDependencies": {
"@types/cors": "^2.8.17", "@types/cors": "^2.8.17",
"@types/express": "^4.17.21", "@types/express": "^5.0.0",
"@types/fs-extra": "^11.0.4", "@types/fs-extra": "^11.0.4",
"@types/gm": "^1.25.4", "@types/gm": "^1.25.4",
"@types/image-thumbnail": "^1.0.4", "@types/image-thumbnail": "^1.0.4",
"@types/morgan": "^1.9.9", "@types/morgan": "^1.9.9",
"@types/multer": "^1.4.12", "@types/multer": "^1.4.12",
"@types/node": "^22.5.1", "@types/node": "^22.13.9",
"@types/response-time": "^2.3.8", "@types/response-time": "^2.3.8",
"nodemon": "^3.1.4", "nodemon": "^3.1.9",
"prettier": "^3.3.3", "prettier": "^3.5.3",
"ts-node": "^10.9.2", "ts-node": "^10.9.2",
"tsconfig-paths": "^4.2.0", "tsconfig-paths": "^4.2.0",
"typescript": "^5.5.4" "typescript": "^5.8.2"
} }
} }

View File

@@ -1,4 +1,5 @@
import bodyParser from "body-parser"; import bodyParser from "body-parser";
import compression from "compression";
import cors from "cors"; import cors from "cors";
import dotenv from "dotenv"; import dotenv from "dotenv";
import express, { Express } from "express"; import express, { Express } from "express";
@@ -19,79 +20,82 @@ import { JobsListMedia } from "./jobs/jobsListMedia.js";
import { JobsMoveMedia } from "./jobs/jobsMoveMedia.js"; import { JobsMoveMedia } from "./jobs/jobsMoveMedia.js";
import { JobMediaUploadMulter, jobsUploadMedia } from "./jobs/jobsUploadMedia.js"; import { JobMediaUploadMulter, jobsUploadMedia } from "./jobs/jobsUploadMedia.js";
import InitServer, { FolderPaths } from "./util/serverInit.js"; import InitServer, { FolderPaths } from "./util/serverInit.js";
import ValidateImsToken from "./util/validateToken.js"; import ValidateImsToken from "./util/validateToken.js";
dotenv.config({ dotenv.config({
path: resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`) path: resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
}); });
const commonTransportConfig = {
maxSize: "20m",
maxFiles: 14,
tailable: true,
zippedArchive: true,
format: winston.format.combine(winston.format.timestamp(), winston.format.json()),
datePattern: "YYYY-MM-DD"
};
const baseFormat = winston.format.combine(
winston.format.timestamp(),
winston.format.errors({ stack: true }),
winston.format.json(),
winston.format.prettyPrint()
);
const consoleTransport = new winston.transports.Console({
format: winston.format.combine(winston.format.colorize(), winston.format.timestamp(), winston.format.simple())
});
export const logger = winston.createLogger({ export const logger = winston.createLogger({
format: winston.format.combine(winston.format.timestamp(), winston.format.json(), winston.format.prettyPrint()), format: baseFormat,
level: "http", level: "http",
levels: { ...winston.config.syslog.levels, http: 8 }, levels: { ...winston.config.syslog.levels, http: 8 },
exceptionHandlers: [ exceptionHandlers: [
new DailyRotateFile({ new DailyRotateFile({
filename: path.join(FolderPaths.Root, "logs", "exceptions-%DATE%.log"), filename: path.join(FolderPaths.Root, "logs", "exceptions-%DATE%.log"),
datePattern: "YYYY-MM-DD-HH", ...commonTransportConfig
zippedArchive: true,
maxSize: "20m",
maxFiles: "14"
}), }),
new winston.transports.Console({ consoleTransport
format: winston.format.combine(winston.format.colorize(), winston.format.simple())
})
], ],
rejectionHandlers: [ rejectionHandlers: [
new DailyRotateFile({ new DailyRotateFile({
filename: path.join(FolderPaths.Root, "logs", "rejections-%DATE%.log"), filename: path.join(FolderPaths.Root, "logs", "rejections-%DATE%.log"),
datePattern: "YYYY-MM-DD-HH", ...commonTransportConfig
zippedArchive: true,
maxSize: "20m",
maxFiles: "14"
}), }),
new winston.transports.Console({ consoleTransport
format: winston.format.combine(winston.format.colorize(), winston.format.simple())
})
], ],
transports: [ transports: [
new DailyRotateFile({ new DailyRotateFile({
filename: path.join(FolderPaths.Root, "logs", "errors-%DATE%.log"), filename: path.join(FolderPaths.Root, "logs", "errors-%DATE%.log"),
datePattern: "YYYY-MM-DD-HH", level: "error",
zippedArchive: true, ...commonTransportConfig
maxSize: "20m",
maxFiles: "14",
level: "error"
}), }),
new DailyRotateFile({ new DailyRotateFile({
filename: path.join(FolderPaths.Root, "logs", "debug-%DATE%.log"), filename: path.join(FolderPaths.Root, "logs", "debug-%DATE%.log"),
datePattern: "YYYY-MM-DD-HH", level: "debug",
zippedArchive: true, ...commonTransportConfig
maxSize: "20m",
maxFiles: "14",
level: "debug"
}), }),
new DailyRotateFile({ new DailyRotateFile({
filename: path.join(FolderPaths.Root, "logs", "ALL-%DATE%.log"), filename: path.join(FolderPaths.Root, "logs", "ALL-%DATE%.log"),
datePattern: "YYYY-MM-DD-HH", ...commonTransportConfig
zippedArchive: true,
maxSize: "20m",
maxFiles: "14"
}) })
] ]
}); });
if (process.env.NODE_ENV !== "production") { logger.add(
logger.add( new winston.transports.Console({
new winston.transports.Console({ format: winston.format.combine(winston.format.colorize(), winston.format.timestamp(), winston.format.simple())
format: winston.format.combine(winston.format.colorize(), winston.format.simple()) })
}) );
);
}
const app: Express = express(); const app: Express = express();
const port = process.env.PORT; const port = process.env.PORT;
app.set("etag", false); app.set("etag", false);
app.use(compression());
app.use((req, res, next) => {
res.setHeader("Connection", "keep-alive");
next();
});
app.use(nocache()); app.use(nocache());
app.use(bodyParser.json({ limit: "1000mb" })); app.use(bodyParser.json({ limit: "1000mb" }));
app.use(bodyParser.urlencoded({ limit: "1000mb", extended: true })); app.use(bodyParser.urlencoded({ limit: "1000mb", extended: true }));
@@ -135,6 +139,9 @@ app.post(
app.get("/", ValidateImsToken, (req: express.Request, res: express.Response) => { app.get("/", ValidateImsToken, (req: express.Request, res: express.Response) => {
res.send("IMS running."); res.send("IMS running.");
}); });
app.get("/health", (req: express.Request, res: express.Response) => {
res.status(200).send("OK");
});
InitServer(); InitServer();
app.use(FolderPaths.StaticPath, express.static(FolderPaths.Root, { etag: false, maxAge: 30 * 1000 })); app.use(FolderPaths.StaticPath, express.static(FolderPaths.Root, { etag: false, maxAge: 30 * 1000 }));

View File

@@ -1,4 +1,4 @@
import { Job, Queue, QueueEvents, Worker } from "bullmq"; import { Job, Queue, Worker } from "bullmq";
import dotenv from "dotenv"; import dotenv from "dotenv";
import { fileTypeFromFile } from "file-type"; import { fileTypeFromFile } from "file-type";
import { FileTypeResult } from "file-type/core"; import { FileTypeResult } from "file-type/core";
@@ -12,7 +12,29 @@ import { FolderPaths } from "./serverInit.js";
const __dirname = path.dirname(fileURLToPath(import.meta.url)); const __dirname = path.dirname(fileURLToPath(import.meta.url));
const HeicQueue = new Queue("HEIC Queue", { connection: { host: "localhost", port: 6379 } }); const HeicQueue = new Queue("HEIC Queue", {
connection: {
host: "localhost",
port: 6379,
maxRetriesPerRequest: 3,
enableReadyCheck: true,
reconnectOnError: function (err) {
const targetError = "READONLY";
return err.message.includes(targetError);
}
},
defaultJobOptions: {
removeOnComplete: true,
removeOnFail: true,
attempts: 3,
backoff: {
type: "exponential",
delay: 1000
}
}
});
const cleanupINTERVAL = 1000 * 60 * 10;
setInterval(cleanupQueue, cleanupINTERVAL);
dotenv.config({ dotenv.config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`) path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
@@ -20,32 +42,49 @@ dotenv.config({
const imageMagick = gm.subClass({ imageMagick: true }); const imageMagick = gm.subClass({ imageMagick: true });
async function cleanupQueue() {
const ONE_HOUR = 1000 * 60 * 60;
const SIX_HOURS = ONE_HOUR * 6;
try {
// Clean completed jobs older than 1 hour
await HeicQueue.clean(ONE_HOUR, 500, "completed");
// Clean failed jobs older than 24 hours
await HeicQueue.clean(SIX_HOURS, 500, "failed");
// Get queue health
const jobCounts = await HeicQueue.getJobCounts();
logger.log("debug", `Queue status: ${JSON.stringify(jobCounts)}`);
} catch (error) {
logger.log("error", `Queue cleanup error: ${error}`);
}
}
export async function ConvertHeicFiles(files: Express.Multer.File[]) { export async function ConvertHeicFiles(files: Express.Multer.File[]) {
const validFiles = await filterValidHeicFiles(files); const validFiles = await filterValidHeicFiles(files);
const jobs = await HeicQueue.addBulk( const jobs = validFiles.map((file) => ({
validFiles.map((file) => ({ name: file.filename,
name: file.filename, data: {
data: { convertedFileName: generateUniqueHeicFilename(file), file } convertedFileName: generateUniqueHeicFilename(file),
})) fileInfo: {
); path: file.path,
// await Promise.all( destination: file.destination,
// validFiles.map(async (file) => { originalFilename: file.filename
// const convertedFileName = generateUniqueHeicFilename(file); }
// await HeicQueue.add(convertedFileName, { convertedFileName, file },{removeOnComplete: true,}); }
}));
// // try { await HeicQueue.addBulk(jobs);
// // await ConvertToJpeg(file.path, `${file.destination}/${convertedFileName}`);
// // logger.log("debug", `Converted ${file.filename} image to JPEG from HEIC.`); const fileMap = new Map(files.map((file, index) => [file.filename, index]));
// // await handleOriginalFile(file, convertedFileName); jobs.forEach((job) => {
// // file.filename = convertedFileName; const fileIndex = fileMap.get(job.data.fileInfo.originalFilename);
// // file.mimetype = "image/jpeg"; if (fileIndex !== undefined) {
// // file.path = `${file.destination}/${convertedFileName}`; files[fileIndex].filename = job.data.convertedFileName;
// // } catch (error) { files[fileIndex].mimetype = "image/jpeg";
// // logger.log("error", `Error converting ${file.filename} image to JPEG from HEIC. ${JSON.stringify(error)}`); }
// // } });
// })
// );
} }
async function filterValidHeicFiles(files: Express.Multer.File[]) { async function filterValidHeicFiles(files: Express.Multer.File[]) {
@@ -59,56 +98,81 @@ async function filterValidHeicFiles(files: Express.Multer.File[]) {
return validFiles; return validFiles;
} }
async function handleOriginalFile(file: Express.Multer.File, convertedFileName: string) { async function handleOriginalFile(fileInfo: { path: string; destination: string; originalFilename: string }) {
if (process.env.KEEP_CONVERTED_ORIGINALS) { try {
await fs.ensureDir(path.join(file.destination, FolderPaths.ConvertedOriginalSubDir)); if (process.env.KEEP_CONVERTED_ORIGINALS) {
await fs.move(file.path, `${path.join(file.destination, FolderPaths.ConvertedOriginalSubDir)}/${file.filename}`); await fs.ensureDir(path.join(fileInfo.destination, FolderPaths.ConvertedOriginalSubDir));
} else { await fs.move(
await fs.unlink(file.path); fileInfo.path,
`${path.join(fileInfo.destination, FolderPaths.ConvertedOriginalSubDir)}/${fileInfo.originalFilename}`
);
} else {
await fs.unlink(fileInfo.path);
}
} catch (error) {
logger.log("error", `Error handling original file: ${error}`);
throw error;
} }
} }
async function ConvertToJpeg(file: string, newPath: string) { async function ConvertToJpeg(file: string, newPath: string) {
const fileOnDisk: Buffer = await fs.readFile(file); // const fileOnDisk: Buffer = await fs.readFile(file);
// return new Promise<string>((resolve, reject) => {
// imageMagick(fileOnDisk)
// .setFormat("jpg")
// .write(newPath, (error) => {
// if (error) reject(error.message);
// resolve(newPath);
// });
// });
return new Promise<string>((resolve, reject) => { return new Promise<string>((resolve, reject) => {
imageMagick(fileOnDisk) const readStream = fs.createReadStream(file);
const writeStream = fs.createWriteStream(newPath);
imageMagick(readStream)
.setFormat("jpg") .setFormat("jpg")
.write(newPath, (error) => { .stream()
if (error) reject(error.message); .pipe(writeStream)
resolve(newPath); .on("finish", () => resolve(newPath))
}); .on("error", (error) => reject(error.message));
}); });
} }
//Previos implementation using sandboxing. Cannot set up because the imports try to launch the server again.
// const processorUrl = pathToFileURL(__dirname + "/heicQueueProcessor.ts");
// const HeicWorker = new Worker("HEIC Queue", processorUrl, {
// connection: { host: "localhost", port: 6379 }
// });
const HeicWorker = new Worker( const HeicWorker = new Worker(
"HEIC Queue", "HEIC Queue",
async (job: Job) => { async (job: Job) => {
const { file, convertedFileName } = job.data; const { fileInfo, convertedFileName } = job.data;
try { try {
logger.log("debug", `Attempting to Convert ${file.filename} image to JPEG from HEIC.`); logger.log("debug", `Attempting to Convert ${fileInfo.originalFilename} image to JPEG from HEIC.`);
await ConvertToJpeg(file.path, `${file.destination}/${convertedFileName}`); await job.updateProgress(10);
logger.log("debug", `Converted ${file.filename} image to JPEG from HEIC.`); await ConvertToJpeg(fileInfo.path, `${fileInfo.destination}/${convertedFileName}`);
await handleOriginalFile(file, convertedFileName); await job.updateProgress(50);
file.filename = convertedFileName; await handleOriginalFile(fileInfo);
file.mimetype = "image/jpeg"; logger.log("debug", `Converted ${fileInfo.originalFilename} image to JPEG from HEIC.`);
file.path = `${file.destination}/${convertedFileName}`; await job.updateProgress(100);
return true; return true;
} catch (error) { } catch (error) {
logger.log( logger.log(
"error", "error",
`QUEUE ERROR: Error converting ${file.filename} image to JPEG from HEIC. ${JSON.stringify(error)}` `QUEUE ERROR: Error converting ${fileInfo.originalFilename} image to JPEG from HEIC. ${JSON.stringify(error)}`
); );
return false; throw error;
} }
}, },
{ {
connection: { host: "localhost", port: 6379 } connection: {
host: "localhost",
port: 6379,
maxRetriesPerRequest: 3,
enableReadyCheck: true,
reconnectOnError: function (err) {
const targetError = "READONLY";
return err.message.includes(targetError);
}
},
concurrency: 1
} }
); );
@@ -125,8 +189,10 @@ HeicWorker.on("ready", () => {
HeicWorker.on("active", (job, prev) => { HeicWorker.on("active", (job, prev) => {
logger.log("debug", `[BULLMQ] Job ${job.id} is now active; previous status was ${prev}`); logger.log("debug", `[BULLMQ] Job ${job.id} is now active; previous status was ${prev}`);
}); });
HeicWorker.on("completed", (jobId, returnvalue) => { HeicWorker.on("completed", async (job, returnvalue) => {
logger.log("debug", `[BULLMQ] ${jobId.id} has completed and returned ${returnvalue}`); logger.log("debug", `[BULLMQ] ${job.id} has completed and returned ${returnvalue}`);
await job.remove();
logger.log("debug", `Job ${job.id} removed from Redis`);
}); });
HeicWorker.on("failed", (jobId, failedReason) => { HeicWorker.on("failed", (jobId, failedReason) => {
logger.log("error", `[BULLMQ] ${jobId} has failed with reason ${failedReason}`); logger.log("error", `[BULLMQ] ${jobId} has failed with reason ${failedReason}`);
@@ -140,17 +206,3 @@ HeicWorker.on("stalled", (error) => {
HeicWorker.on("ioredis:close", () => { HeicWorker.on("ioredis:close", () => {
logger.log("error", `[BULLMQ] Redis connection closed!`); logger.log("error", `[BULLMQ] Redis connection closed!`);
}); });
// const queueEvents = new QueueEvents( "HEIC Queue");
// queueEvents.on('completed', ( jobId, returnvalue ) => {
// // Called every time a job is completed by any worker.
// });
// queueEvents.on('failed', (jobId, failedReason ) => {
// // Called whenever a job is moved to failed by any worker.
// });
// queueEvents.on('progress', (jobId, data) => {
// // jobId received a progress event
// });

View File

@@ -1,7 +1,6 @@
import dotenv from "dotenv"; import dotenv from "dotenv";
import { NextFunction, Request, Response } from "express"; import { NextFunction, Request, Response } from "express";
import { resolve } from "path"; import { resolve } from "path";
//import { logger } from "../server.ts";
dotenv.config({ dotenv.config({
path: resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`) path: resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)