IO-3151 Package Update and Optimizations

This commit is contained in:
Allan Carr
2025-03-06 08:48:48 -08:00
parent fd0d3c072b
commit c3f408f206
9 changed files with 1106 additions and 793 deletions

View File

@@ -1 +1 @@
MEDIA_PATH=~/Pictures
PORT=8000

View File

@@ -1,86 +1,77 @@
FROM node:20
### ALPINE MULTI-STAGE
# Build stage for libraries
FROM node:22-alpine AS builder
# Create app directory
# Install build dependencies
RUN apk add --no-cache \
bash wget build-base autoconf automake cmake libtool pkgconf \
libjpeg-turbo-dev libpng-dev libwebp-dev tiff-dev libde265-dev \
ruby ruby-dev
# Replace source built libde265 and libheif with installed libraries in next release
# libheif-dev libde265-dev x265-dev
# Build libde265
WORKDIR /build/libde265
RUN wget https://github.com/strukturag/libde265/archive/v1.0.15.tar.gz \
&& tar -xvf v1.0.15.tar.gz \
&& cd libde265-1.0.15 \
&& cmake . \
&& make \
&& make install
# Build libheif
WORKDIR /build/libheif
RUN wget https://github.com/strukturag/libheif/archive/v1.19.7.tar.gz \
&& tar -xvf v1.19.7.tar.gz \
&& cd libheif-1.19.7 \
&& cmake --preset=release . \
&& make \
&& make install
# Build ImageMagick
WORKDIR /build/imagemagick
RUN wget https://download.imagemagick.org/archive/releases/ImageMagick-7.1.1-44.tar.xz \
&& tar -xvf ImageMagick-7.1.1-44.tar.xz \
&& cd ImageMagick-7.1.1-44 \
&& ./configure --with-heic=yes --with-webp=yes \
&& make \
&& make install
# Node.js application build stage
WORKDIR /usr/src/app
# Install app dependencies
# A wildcard is used to ensure both package.json AND package-lock.json are copied
# where available (npm@5+)
COPY package*.json ./
RUN npm install -g typescript
RUN npm install
# If you are building your code for production
# RUN npm ci --only=production
# Bundle app source
COPY . .
RUN apt -y update
RUN apt install -y wget
RUN apt install -y lsb-release curl gpg
RUN curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg
RUN chmod 644 /usr/share/keyrings/redis-archive-keyring.gpg
RUN echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | tee /etc/apt/sources.list.d/redis.list
RUN apt update
RUN apt install -y redis
# PNG ,JPG ,Tiff & WebP support
# Consider adding more support with testing https://gist.github.com/hurricup/e14ae5bc47705fca6b1680e7a1fb6580
RUN apt install -y libjpeg-dev
RUN apt install -y libpng-dev
RUN apt install -y libtiff-dev
RUN apt install -y libwebp-dev
# Install HEIF support (libheic-dev Package does not exist on 16.04)
RUN apt -y install libde265-dev
RUN apt -y install pkg-config m4 libtool automake autoconf cmake
RUN wget https://github.com/strukturag/libde265/archive/v1.0.15.tar.gz
RUN tar -xvf v1.0.15.tar.gz
WORKDIR /usr/src/app/libde265-1.0.15/
RUN cmake .
RUN make
RUN make install
RUN ./autogen.sh
RUN ./configure
WORKDIR /usr/src/app
RUN wget https://github.com/strukturag/libheif/archive/v1.18.2.tar.gz
RUN tar -xvf v1.18.2.tar.gz
WORKDIR /usr/src/app/libheif-1.18.2/
RUN cmake --preset=release .
RUN make
RUN make install
WORKDIR /usr/src/app
# Install ruby 2.3.0 for ImageMagick
RUN apt -y install -y build-essential zlib1g-dev libssl-dev libreadline6-dev libyaml-dev
RUN apt -y install -y ruby-full && ruby -v
# Install ImageMagick
# RUN apt-get install imagemagick -y
# # Install ImageMagick with WEBP and HEIC support
RUN wget https://download.imagemagick.org/archive/releases/ImageMagick-7.1.1-37.tar.xz
RUN tar -xvf ImageMagick-7.1.1-37.tar.xz
WORKDIR /usr/src/app/ImageMagick-7.1.1-37/
RUN ./configure --with-heic=yes --with-webp=yes
RUN make
RUN make install
RUN ldconfig /usr/local/lib
RUN identify --version
RUN apt update && apt install -y ghostscript graphicsmagick \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /usr/src/app
RUN npm run build
RUN npm install pm2 -g
# Final stage
FROM node:22-alpine
# Install runtime dependencies only
RUN apk add --no-cache \
bash redis ghostscript graphicsmagick imagemagick \
libjpeg-turbo libpng libwebp tiff
# Copy built libraries from builder
COPY --from=builder /usr/local/lib/ /usr/local/lib/
COPY --from=builder /usr/local/bin/ /usr/local/bin/
COPY --from=builder /usr/local/include/ /usr/local/include/
# Update library cache
RUN ldconfig /usr/local/lib
RUN npm install -g pm2
WORKDIR /usr/src/app
# Copy built application from builder
COPY --from=builder /usr/src/app/dist ./dist
COPY --from=builder /usr/src/app/node_modules ./node_modules
COPY --from=builder /usr/src/app/.env.production ./.env.production
COPY --from=builder /usr/src/app/ecosystem.config.cjs ./ecosystem.config.cjs
EXPOSE 8000

View File

@@ -1,9 +1,14 @@
module.exports = [
{
script: "dist/server.js",
name: "MediaServer",
exec_mode: "cluster",
instances: 0,
cron_restart: "30 8 * * *"
}
];
module.exports = {
apps: [
{
script: "dist/server.js",
name: "MediaServer",
exec_mode: "cluster",
instances: 0,
cron_restart: "30 8 * * *",
env: {
NODE_ENV: "production"
}
}
]
};

View File

@@ -1,9 +1,11 @@
import { NextFunction, Request, Response } from "express";
export default function validateJobRequest(req: Request, res: Response, next: NextFunction) {
const validateJobRequest: (req: Request, res: Response, next: NextFunction) => void = (req, res, next) => {
const jobId: string = (req.body.jobid || "").trim();
if (jobId === "") {
return res.status(400).json({ error: "No RO Number has been specified." });
}
next();
}
};
export default validateJobRequest;

1419
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "bodyshop-media-server",
"version": "1.0.12",
"version": "1.0.13",
"license": "UNLICENSED",
"engines": {
"node": ">=18.0.0"
@@ -13,40 +13,42 @@
"makeitpretty": "prettier --write \"**/*.{css,js,json,jsx,scss,ts}\""
},
"dependencies": {
"axios": "^1.7.5",
"body-parser": "^1.20.2",
"bullmq": "^5.12.12",
"@types/compression": "^1.7.5",
"axios": "^1.8.1",
"body-parser": "^1.20.3",
"bullmq": "^5.41.7",
"compression": "^1.8.0",
"cors": "^2.8.5",
"dotenv": "16.4.5",
"express": "^4.19.2",
"file-type": "^19.4.1",
"fs-extra": "^11.2.0",
"gm": "^1.25.0",
"helmet": "^7.1.0",
"dotenv": "16.4.7",
"express": "^4.21.2",
"file-type": "^20.4.0",
"fs-extra": "^11.3.0",
"gm": "^1.25.1",
"helmet": "^8.0.0",
"image-thumbnail": "^1.0.17",
"jszip": "^3.10.1",
"morgan": "^1.10.0",
"multer": "^1.4.4",
"nocache": "^4.0.0",
"response-time": "^2.3.2",
"response-time": "^2.3.3",
"simple-thumbnail": "^1.6.5",
"winston": "^3.14.2",
"winston": "^3.17.0",
"winston-daily-rotate-file": "^5.0.0"
},
"devDependencies": {
"@types/cors": "^2.8.17",
"@types/express": "^4.17.21",
"@types/express": "^5.0.0",
"@types/fs-extra": "^11.0.4",
"@types/gm": "^1.25.4",
"@types/image-thumbnail": "^1.0.4",
"@types/morgan": "^1.9.9",
"@types/multer": "^1.4.12",
"@types/node": "^22.5.1",
"@types/node": "^22.13.9",
"@types/response-time": "^2.3.8",
"nodemon": "^3.1.4",
"prettier": "^3.3.3",
"nodemon": "^3.1.9",
"prettier": "^3.5.3",
"ts-node": "^10.9.2",
"tsconfig-paths": "^4.2.0",
"typescript": "^5.5.4"
"typescript": "^5.8.2"
}
}

View File

@@ -1,4 +1,5 @@
import bodyParser from "body-parser";
import compression from "compression";
import cors from "cors";
import dotenv from "dotenv";
import express, { Express } from "express";
@@ -25,73 +26,76 @@ dotenv.config({
path: resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const commonTransportConfig = {
maxSize: "20m",
maxFiles: 14,
tailable: true,
zippedArchive: true,
format: winston.format.combine(winston.format.timestamp(), winston.format.json()),
datePattern: "YYYY-MM-DD"
};
const baseFormat = winston.format.combine(
winston.format.timestamp(),
winston.format.errors({ stack: true }),
winston.format.json(),
winston.format.prettyPrint()
);
const consoleTransport = new winston.transports.Console({
format: winston.format.combine(winston.format.colorize(), winston.format.timestamp(), winston.format.simple())
});
export const logger = winston.createLogger({
format: winston.format.combine(winston.format.timestamp(), winston.format.json(), winston.format.prettyPrint()),
format: baseFormat,
level: "http",
levels: { ...winston.config.syslog.levels, http: 8 },
exceptionHandlers: [
new DailyRotateFile({
filename: path.join(FolderPaths.Root, "logs", "exceptions-%DATE%.log"),
datePattern: "YYYY-MM-DD-HH",
zippedArchive: true,
maxSize: "20m",
maxFiles: "14"
...commonTransportConfig
}),
new winston.transports.Console({
format: winston.format.combine(winston.format.colorize(), winston.format.simple())
})
consoleTransport
],
rejectionHandlers: [
new DailyRotateFile({
filename: path.join(FolderPaths.Root, "logs", "rejections-%DATE%.log"),
datePattern: "YYYY-MM-DD-HH",
zippedArchive: true,
maxSize: "20m",
maxFiles: "14"
...commonTransportConfig
}),
new winston.transports.Console({
format: winston.format.combine(winston.format.colorize(), winston.format.simple())
})
consoleTransport
],
transports: [
new DailyRotateFile({
filename: path.join(FolderPaths.Root, "logs", "errors-%DATE%.log"),
datePattern: "YYYY-MM-DD-HH",
zippedArchive: true,
maxSize: "20m",
maxFiles: "14",
level: "error"
level: "error",
...commonTransportConfig
}),
new DailyRotateFile({
filename: path.join(FolderPaths.Root, "logs", "debug-%DATE%.log"),
datePattern: "YYYY-MM-DD-HH",
zippedArchive: true,
maxSize: "20m",
maxFiles: "14",
level: "debug"
level: "debug",
...commonTransportConfig
}),
new DailyRotateFile({
filename: path.join(FolderPaths.Root, "logs", "ALL-%DATE%.log"),
datePattern: "YYYY-MM-DD-HH",
zippedArchive: true,
maxSize: "20m",
maxFiles: "14"
...commonTransportConfig
})
]
});
if (process.env.NODE_ENV !== "production") {
logger.add(
new winston.transports.Console({
format: winston.format.combine(winston.format.colorize(), winston.format.simple())
})
);
}
logger.add(
new winston.transports.Console({
format: winston.format.combine(winston.format.colorize(), winston.format.timestamp(), winston.format.simple())
})
);
const app: Express = express();
const port = process.env.PORT;
app.set("etag", false);
app.use(compression());
app.use((req, res, next) => {
res.setHeader("Connection", "keep-alive");
next();
});
app.use(nocache());
app.use(bodyParser.json({ limit: "1000mb" }));
app.use(bodyParser.urlencoded({ limit: "1000mb", extended: true }));
@@ -135,6 +139,9 @@ app.post(
app.get("/", ValidateImsToken, (req: express.Request, res: express.Response) => {
res.send("IMS running.");
});
app.get("/health", (req: express.Request, res: express.Response) => {
res.status(200).send("OK");
});
InitServer();
app.use(FolderPaths.StaticPath, express.static(FolderPaths.Root, { etag: false, maxAge: 30 * 1000 }));

View File

@@ -1,4 +1,4 @@
import { Job, Queue, QueueEvents, Worker } from "bullmq";
import { Job, Queue, Worker } from "bullmq";
import dotenv from "dotenv";
import { fileTypeFromFile } from "file-type";
import { FileTypeResult } from "file-type/core";
@@ -12,7 +12,29 @@ import { FolderPaths } from "./serverInit.js";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const HeicQueue = new Queue("HEIC Queue", { connection: { host: "localhost", port: 6379 } });
const HeicQueue = new Queue("HEIC Queue", {
connection: {
host: "localhost",
port: 6379,
maxRetriesPerRequest: 3,
enableReadyCheck: true,
reconnectOnError: function (err) {
const targetError = "READONLY";
return err.message.includes(targetError);
}
},
defaultJobOptions: {
removeOnComplete: true,
removeOnFail: true,
attempts: 3,
backoff: {
type: "exponential",
delay: 1000
}
}
});
const cleanupINTERVAL = 1000 * 60 * 10;
setInterval(cleanupQueue, cleanupINTERVAL);
dotenv.config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
@@ -20,32 +42,49 @@ dotenv.config({
const imageMagick = gm.subClass({ imageMagick: true });
async function cleanupQueue() {
const ONE_HOUR = 1000 * 60 * 60;
const SIX_HOURS = ONE_HOUR * 6;
try {
// Clean completed jobs older than 1 hour
await HeicQueue.clean(ONE_HOUR, 500, "completed");
// Clean failed jobs older than 24 hours
await HeicQueue.clean(SIX_HOURS, 500, "failed");
// Get queue health
const jobCounts = await HeicQueue.getJobCounts();
logger.log("debug", `Queue status: ${JSON.stringify(jobCounts)}`);
} catch (error) {
logger.log("error", `Queue cleanup error: ${error}`);
}
}
export async function ConvertHeicFiles(files: Express.Multer.File[]) {
const validFiles = await filterValidHeicFiles(files);
const jobs = await HeicQueue.addBulk(
validFiles.map((file) => ({
name: file.filename,
data: { convertedFileName: generateUniqueHeicFilename(file), file }
}))
);
// await Promise.all(
// validFiles.map(async (file) => {
// const convertedFileName = generateUniqueHeicFilename(file);
// await HeicQueue.add(convertedFileName, { convertedFileName, file },{removeOnComplete: true,});
const jobs = validFiles.map((file) => ({
name: file.filename,
data: {
convertedFileName: generateUniqueHeicFilename(file),
fileInfo: {
path: file.path,
destination: file.destination,
originalFilename: file.filename
}
}
}));
// // try {
// // await ConvertToJpeg(file.path, `${file.destination}/${convertedFileName}`);
// // logger.log("debug", `Converted ${file.filename} image to JPEG from HEIC.`);
// // await handleOriginalFile(file, convertedFileName);
// // file.filename = convertedFileName;
// // file.mimetype = "image/jpeg";
// // file.path = `${file.destination}/${convertedFileName}`;
// // } catch (error) {
// // logger.log("error", `Error converting ${file.filename} image to JPEG from HEIC. ${JSON.stringify(error)}`);
// // }
// })
// );
await HeicQueue.addBulk(jobs);
const fileMap = new Map(files.map((file, index) => [file.filename, index]));
jobs.forEach((job) => {
const fileIndex = fileMap.get(job.data.fileInfo.originalFilename);
if (fileIndex !== undefined) {
files[fileIndex].filename = job.data.convertedFileName;
files[fileIndex].mimetype = "image/jpeg";
}
});
}
async function filterValidHeicFiles(files: Express.Multer.File[]) {
@@ -59,56 +98,81 @@ async function filterValidHeicFiles(files: Express.Multer.File[]) {
return validFiles;
}
async function handleOriginalFile(file: Express.Multer.File, convertedFileName: string) {
if (process.env.KEEP_CONVERTED_ORIGINALS) {
await fs.ensureDir(path.join(file.destination, FolderPaths.ConvertedOriginalSubDir));
await fs.move(file.path, `${path.join(file.destination, FolderPaths.ConvertedOriginalSubDir)}/${file.filename}`);
} else {
await fs.unlink(file.path);
async function handleOriginalFile(fileInfo: { path: string; destination: string; originalFilename: string }) {
try {
if (process.env.KEEP_CONVERTED_ORIGINALS) {
await fs.ensureDir(path.join(fileInfo.destination, FolderPaths.ConvertedOriginalSubDir));
await fs.move(
fileInfo.path,
`${path.join(fileInfo.destination, FolderPaths.ConvertedOriginalSubDir)}/${fileInfo.originalFilename}`
);
} else {
await fs.unlink(fileInfo.path);
}
} catch (error) {
logger.log("error", `Error handling original file: ${error}`);
throw error;
}
}
async function ConvertToJpeg(file: string, newPath: string) {
const fileOnDisk: Buffer = await fs.readFile(file);
// const fileOnDisk: Buffer = await fs.readFile(file);
// return new Promise<string>((resolve, reject) => {
// imageMagick(fileOnDisk)
// .setFormat("jpg")
// .write(newPath, (error) => {
// if (error) reject(error.message);
// resolve(newPath);
// });
// });
return new Promise<string>((resolve, reject) => {
imageMagick(fileOnDisk)
const readStream = fs.createReadStream(file);
const writeStream = fs.createWriteStream(newPath);
imageMagick(readStream)
.setFormat("jpg")
.write(newPath, (error) => {
if (error) reject(error.message);
resolve(newPath);
});
.stream()
.pipe(writeStream)
.on("finish", () => resolve(newPath))
.on("error", (error) => reject(error.message));
});
}
//Previos implementation using sandboxing. Cannot set up because the imports try to launch the server again.
// const processorUrl = pathToFileURL(__dirname + "/heicQueueProcessor.ts");
// const HeicWorker = new Worker("HEIC Queue", processorUrl, {
// connection: { host: "localhost", port: 6379 }
// });
const HeicWorker = new Worker(
"HEIC Queue",
async (job: Job) => {
const { file, convertedFileName } = job.data;
const { fileInfo, convertedFileName } = job.data;
try {
logger.log("debug", `Attempting to Convert ${file.filename} image to JPEG from HEIC.`);
await ConvertToJpeg(file.path, `${file.destination}/${convertedFileName}`);
logger.log("debug", `Converted ${file.filename} image to JPEG from HEIC.`);
await handleOriginalFile(file, convertedFileName);
file.filename = convertedFileName;
file.mimetype = "image/jpeg";
file.path = `${file.destination}/${convertedFileName}`;
logger.log("debug", `Attempting to Convert ${fileInfo.originalFilename} image to JPEG from HEIC.`);
await job.updateProgress(10);
await ConvertToJpeg(fileInfo.path, `${fileInfo.destination}/${convertedFileName}`);
await job.updateProgress(50);
await handleOriginalFile(fileInfo);
logger.log("debug", `Converted ${fileInfo.originalFilename} image to JPEG from HEIC.`);
await job.updateProgress(100);
return true;
} catch (error) {
logger.log(
"error",
`QUEUE ERROR: Error converting ${file.filename} image to JPEG from HEIC. ${JSON.stringify(error)}`
`QUEUE ERROR: Error converting ${fileInfo.originalFilename} image to JPEG from HEIC. ${JSON.stringify(error)}`
);
return false;
throw error;
}
},
{
connection: { host: "localhost", port: 6379 }
connection: {
host: "localhost",
port: 6379,
maxRetriesPerRequest: 3,
enableReadyCheck: true,
reconnectOnError: function (err) {
const targetError = "READONLY";
return err.message.includes(targetError);
}
},
concurrency: 1
}
);
@@ -125,8 +189,10 @@ HeicWorker.on("ready", () => {
HeicWorker.on("active", (job, prev) => {
logger.log("debug", `[BULLMQ] Job ${job.id} is now active; previous status was ${prev}`);
});
HeicWorker.on("completed", (jobId, returnvalue) => {
logger.log("debug", `[BULLMQ] ${jobId.id} has completed and returned ${returnvalue}`);
HeicWorker.on("completed", async (job, returnvalue) => {
logger.log("debug", `[BULLMQ] ${job.id} has completed and returned ${returnvalue}`);
await job.remove();
logger.log("debug", `Job ${job.id} removed from Redis`);
});
HeicWorker.on("failed", (jobId, failedReason) => {
logger.log("error", `[BULLMQ] ${jobId} has failed with reason ${failedReason}`);
@@ -140,17 +206,3 @@ HeicWorker.on("stalled", (error) => {
HeicWorker.on("ioredis:close", () => {
logger.log("error", `[BULLMQ] Redis connection closed!`);
});
// const queueEvents = new QueueEvents( "HEIC Queue");
// queueEvents.on('completed', ( jobId, returnvalue ) => {
// // Called every time a job is completed by any worker.
// });
// queueEvents.on('failed', (jobId, failedReason ) => {
// // Called whenever a job is moved to failed by any worker.
// });
// queueEvents.on('progress', (jobId, data) => {
// // jobId received a progress event
// });

View File

@@ -1,7 +1,6 @@
import dotenv from "dotenv";
import { NextFunction, Request, Response } from "express";
import { resolve } from "path";
//import { logger } from "../server.ts";
dotenv.config({
path: resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)