Add analysis trigger

This commit is contained in:
Patrick Fic
2025-11-05 14:10:34 -08:00
parent f4b34a956a
commit 994a35025b
3 changed files with 89 additions and 26 deletions

View File

@@ -2,7 +2,7 @@ import "source-map-support/register";
import bodyParser from "body-parser";
import compression from "compression";
import cors from "cors";
import dotenv from "dotenv";
import dotenv, { config } from "dotenv";
import express, { Express } from "express";
import helmet from "helmet";
import morgan from "morgan";
@@ -30,13 +30,13 @@ dotenv.config({
});
// Global error handlers
process.on('uncaughtException', (error) => {
console.error('Uncaught Exception:', error);
process.on("uncaughtException", (error) => {
console.error("Uncaught Exception:", error);
process.exit(1);
});
process.on('unhandledRejection', (reason, promise) => {
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
process.on("unhandledRejection", (reason, promise) => {
console.error("Unhandled Rejection at:", promise, "reason:", reason);
process.exit(1);
});
@@ -121,6 +121,46 @@ const morganMiddleware = morgan("combined", {
});
app.use(morganMiddleware);
//Asynchronously check the headers for a bodyshopid. If it exists, write it to a json file in the root directory. Only do this if the bodyshopid has not already been logged once since server start.
const loggedBodyshopIds = new Set<string>();
app.use((req, res, next) => {
//Asynchronously check the headers for a bodyshopid. If it exists, write it to a json file in the root directory. Only do this if the bodyshopid has not already been logged once since server start.
const bodyshopId = req.headers.bodyshopid as string;
console.log("*** ~ loggedBodyshopIds:", loggedBodyshopIds);
if (bodyshopId && !loggedBodyshopIds.has(bodyshopId)) {
loggedBodyshopIds.add(bodyshopId);
// Asynchronously write to file without blocking the request
(async () => {
try {
const fs = await import("fs/promises");
const filePath = path.join(FolderPaths.Root, "config.json");
let existingIds: string[] = [];
try {
const fileContent = await fs.readFile(filePath, "utf-8");
const configFile = JSON.parse(fileContent);
existingIds = configFile.bodyshopIds || [];
} catch {
// File doesn't exist or is invalid, start with empty array
}
if (!existingIds.includes(bodyshopId)) {
existingIds.push(bodyshopId);
await fs.writeFile(filePath, JSON.stringify({ bodyshopIds: existingIds }, null, 2));
logger.info(`Logged new bodyshop ID: ${bodyshopId}`);
}
} catch (error) {
logger.error("Failed to log bodyshop ID:", error);
}
})();
}
next();
});
// Job endpoints
app.post("/jobs/list", ValidateImsToken, validateJobRequest, JobsListMedia);
app.post("/jobs/upload", ValidateImsToken, JobMediaUploadMulter.array("file"), validateJobRequest, jobsUploadMedia);

View File

@@ -21,11 +21,11 @@ export class DailyS3Scheduler {
}
// Test S3 connection before starting scheduler
const connectionTest = await this.s3Sync.testConnection();
if (!connectionTest) {
logger.error("S3 connection test failed. S3 sync scheduler will not be started.");
return;
}
// const connectionTest = await this.s3Sync.testConnection();
// if (!connectionTest) {
// logger.error("S3 connection test failed. S3 sync scheduler will not be started.");
// return;
// }
// Cron expression for midnight PST
// Note: This uses PST timezone. During PDT (daylight time), it will still run at midnight local time
@@ -35,10 +35,11 @@ export class DailyS3Scheduler {
this.cronJob = cron.schedule(
cronExpression,
async () => {
await this.performDailySync();
//await this.performDailySync();
await this.triggerJobAnalysis();
},
{
timezone: timezone,
timezone: timezone
}
);
@@ -92,6 +93,21 @@ export class DailyS3Scheduler {
await this.performDailySync();
}
async triggerJobAnalysis(): Promise<void> {
if (!this.s3Sync) {
logger.error("S3 sync not configured");
return;
}
logger.info("Triggering jobs directory analysis...");
try {
const analysis = await this.s3Sync.analyzeJobsDirectory();
logger.info("Jobs directory analysis completed:", analysis);
} catch (error) {
logger.error("Jobs directory analysis failed:", error);
}
}
/**
* Get the next scheduled run time
*/
@@ -145,7 +161,7 @@ export class DailyS3Scheduler {
isConfigured: this.s3Sync !== null,
isRunning: this.cronJob !== null,
nextRun: this.getNextRunTime(),
syncStats,
syncStats
};
}
}

View File

@@ -236,7 +236,9 @@ export class S3Sync {
/**
* Recursively get document count and total size for a directory
*/
private async getDirectoryStats(dirPath: string): Promise<{ documentCount: number; totalSizeBytes: number; fileTypeStats: { [extension: string]: number } }> {
private async getDirectoryStats(
dirPath: string
): Promise<{ documentCount: number; totalSizeBytes: number; fileTypeStats: { [extension: string]: number } }> {
let documentCount = 0;
let totalSizeBytes = 0;
const fileTypeStats: { [extension: string]: number } = {};
@@ -264,7 +266,7 @@ export class S3Sync {
totalSizeBytes += stat.size;
// Track file extension
const ext = path.extname(item).toLowerCase() || 'no-extension';
const ext = path.extname(item).toLowerCase() || "no-extension";
fileTypeStats[ext] = (fileTypeStats[ext] || 0) + 1;
}
}
@@ -360,6 +362,9 @@ export async function analyzeJobsDirectory(): Promise<JobsDirectoryAnalysis> {
logger.info(
`Jobs directory analysis complete: ${analysis.totalJobs} jobs, ${analysis.totalDocuments} documents, ${analysis.totalSizeMB} MB`
);
//Add an upload to the IO database to categorize all of this.
return analysis;
} catch (error) {
logger.error("Failed to analyze Jobs directory:", error);
@@ -389,7 +394,9 @@ async function analyzeJobFolder(jobsPath: string, jobId: string): Promise<JobFol
/**
* Recursively get document count and total size for a directory (standalone helper function)
*/
async function getDirectoryStats(dirPath: string): Promise<{ documentCount: number; totalSizeBytes: number; fileTypeStats: { [extension: string]: number } }> {
async function getDirectoryStats(
dirPath: string
): Promise<{ documentCount: number; totalSizeBytes: number; fileTypeStats: { [extension: string]: number } }> {
let documentCount = 0;
let totalSizeBytes = 0;
const fileTypeStats: { [extension: string]: number } = {};
@@ -417,7 +424,7 @@ async function getDirectoryStats(dirPath: string): Promise<{ documentCount: numb
totalSizeBytes += stat.size;
// Track file extension
const ext = path.extname(item).toLowerCase() || 'no-extension';
const ext = path.extname(item).toLowerCase() || "no-extension";
fileTypeStats[ext] = (fileTypeStats[ext] || 0) + 1;
}
}