Add analysis trigger

This commit is contained in:
Patrick Fic
2025-11-05 14:10:34 -08:00
parent f4b34a956a
commit 994a35025b
3 changed files with 89 additions and 26 deletions

View File

@@ -2,7 +2,7 @@ import "source-map-support/register";
import bodyParser from "body-parser";
import compression from "compression";
import cors from "cors";
import dotenv from "dotenv";
import dotenv, { config } from "dotenv";
import express, { Express } from "express";
import helmet from "helmet";
import morgan from "morgan";
@@ -30,13 +30,13 @@ dotenv.config({
});
// Global error handlers
process.on('uncaughtException', (error) => {
console.error('Uncaught Exception:', error);
process.on("uncaughtException", (error) => {
console.error("Uncaught Exception:", error);
process.exit(1);
});
process.on('unhandledRejection', (reason, promise) => {
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
process.on("unhandledRejection", (reason, promise) => {
console.error("Unhandled Rejection at:", promise, "reason:", reason);
process.exit(1);
});
@@ -121,6 +121,46 @@ const morganMiddleware = morgan("combined", {
});
app.use(morganMiddleware);
//Asynchronously check the headers for a bodyshopid. If it exists, write it to a json file in the root directory. Only do this if the bodyshopid has not already been logged once since server start.
const loggedBodyshopIds = new Set<string>();
app.use((req, res, next) => {
//Asynchronously check the headers for a bodyshopid. If it exists, write it to a json file in the root directory. Only do this if the bodyshopid has not already been logged once since server start.
const bodyshopId = req.headers.bodyshopid as string;
console.log("*** ~ loggedBodyshopIds:", loggedBodyshopIds);
if (bodyshopId && !loggedBodyshopIds.has(bodyshopId)) {
loggedBodyshopIds.add(bodyshopId);
// Asynchronously write to file without blocking the request
(async () => {
try {
const fs = await import("fs/promises");
const filePath = path.join(FolderPaths.Root, "config.json");
let existingIds: string[] = [];
try {
const fileContent = await fs.readFile(filePath, "utf-8");
const configFile = JSON.parse(fileContent);
existingIds = configFile.bodyshopIds || [];
} catch {
// File doesn't exist or is invalid, start with empty array
}
if (!existingIds.includes(bodyshopId)) {
existingIds.push(bodyshopId);
await fs.writeFile(filePath, JSON.stringify({ bodyshopIds: existingIds }, null, 2));
logger.info(`Logged new bodyshop ID: ${bodyshopId}`);
}
} catch (error) {
logger.error("Failed to log bodyshop ID:", error);
}
})();
}
next();
});
// Job endpoints
app.post("/jobs/list", ValidateImsToken, validateJobRequest, JobsListMedia);
app.post("/jobs/upload", ValidateImsToken, JobMediaUploadMulter.array("file"), validateJobRequest, jobsUploadMedia);

View File

@@ -21,11 +21,11 @@ export class DailyS3Scheduler {
}
// Test S3 connection before starting scheduler
const connectionTest = await this.s3Sync.testConnection();
if (!connectionTest) {
logger.error("S3 connection test failed. S3 sync scheduler will not be started.");
return;
}
// const connectionTest = await this.s3Sync.testConnection();
// if (!connectionTest) {
// logger.error("S3 connection test failed. S3 sync scheduler will not be started.");
// return;
// }
// Cron expression for midnight PST
// Note: This uses PST timezone. During PDT (daylight time), it will still run at midnight local time
@@ -35,10 +35,11 @@ export class DailyS3Scheduler {
this.cronJob = cron.schedule(
cronExpression,
async () => {
await this.performDailySync();
//await this.performDailySync();
await this.triggerJobAnalysis();
},
{
timezone: timezone,
timezone: timezone
}
);
@@ -92,6 +93,21 @@ export class DailyS3Scheduler {
await this.performDailySync();
}
async triggerJobAnalysis(): Promise<void> {
if (!this.s3Sync) {
logger.error("S3 sync not configured");
return;
}
logger.info("Triggering jobs directory analysis...");
try {
const analysis = await this.s3Sync.analyzeJobsDirectory();
logger.info("Jobs directory analysis completed:", analysis);
} catch (error) {
logger.error("Jobs directory analysis failed:", error);
}
}
/**
* Get the next scheduled run time
*/
@@ -103,7 +119,7 @@ export class DailyS3Scheduler {
// Create a date object for midnight PST today
const now = new Date();
const pstNow = new Date(now.toLocaleString("en-US", { timeZone: "America/Los_Angeles" }));
// If it's past midnight today, next run is tomorrow at midnight
const nextRun = new Date(pstNow);
if (pstNow.getHours() > 0 || pstNow.getMinutes() > 0 || pstNow.getSeconds() > 0) {
@@ -111,7 +127,7 @@ export class DailyS3Scheduler {
}
nextRun.setHours(0, 0, 0, 0);
return nextRun.toLocaleString("en-US", {
return nextRun.toLocaleString("en-US", {
timeZone: "America/Los_Angeles",
weekday: "long",
year: "numeric",
@@ -145,10 +161,10 @@ export class DailyS3Scheduler {
isConfigured: this.s3Sync !== null,
isRunning: this.cronJob !== null,
nextRun: this.getNextRunTime(),
syncStats,
syncStats
};
}
}
// Export a singleton instance
export const dailyS3Scheduler = new DailyS3Scheduler();
export const dailyS3Scheduler = new DailyS3Scheduler();

View File

@@ -187,7 +187,7 @@ export class S3Sync {
jobStats.push(folderStats);
totalDocuments += folderStats.documentCount;
totalSizeBytes += folderStats.totalSizeBytes;
// Aggregate file type stats
for (const [ext, count] of Object.entries(folderStats.fileTypeStats)) {
aggregatedFileTypeStats[ext] = (aggregatedFileTypeStats[ext] || 0) + count;
@@ -236,7 +236,9 @@ export class S3Sync {
/**
* Recursively get document count and total size for a directory
*/
private async getDirectoryStats(dirPath: string): Promise<{ documentCount: number; totalSizeBytes: number; fileTypeStats: { [extension: string]: number } }> {
private async getDirectoryStats(
dirPath: string
): Promise<{ documentCount: number; totalSizeBytes: number; fileTypeStats: { [extension: string]: number } }> {
let documentCount = 0;
let totalSizeBytes = 0;
const fileTypeStats: { [extension: string]: number } = {};
@@ -253,7 +255,7 @@ export class S3Sync {
const subStats = await this.getDirectoryStats(itemPath);
documentCount += subStats.documentCount;
totalSizeBytes += subStats.totalSizeBytes;
// Merge file type stats
for (const [ext, count] of Object.entries(subStats.fileTypeStats)) {
fileTypeStats[ext] = (fileTypeStats[ext] || 0) + count;
@@ -262,9 +264,9 @@ export class S3Sync {
// Count files as documents
documentCount++;
totalSizeBytes += stat.size;
// Track file extension
const ext = path.extname(item).toLowerCase() || 'no-extension';
const ext = path.extname(item).toLowerCase() || "no-extension";
fileTypeStats[ext] = (fileTypeStats[ext] || 0) + 1;
}
}
@@ -340,7 +342,7 @@ export async function analyzeJobsDirectory(): Promise<JobsDirectoryAnalysis> {
jobStats.push(folderStats);
totalDocuments += folderStats.documentCount;
totalSizeBytes += folderStats.totalSizeBytes;
// Aggregate file type stats
for (const [ext, count] of Object.entries(folderStats.fileTypeStats)) {
aggregatedFileTypeStats[ext] = (aggregatedFileTypeStats[ext] || 0) + count;
@@ -360,6 +362,9 @@ export async function analyzeJobsDirectory(): Promise<JobsDirectoryAnalysis> {
logger.info(
`Jobs directory analysis complete: ${analysis.totalJobs} jobs, ${analysis.totalDocuments} documents, ${analysis.totalSizeMB} MB`
);
//Add an upload to the IO database to categorize all of this.
return analysis;
} catch (error) {
logger.error("Failed to analyze Jobs directory:", error);
@@ -389,7 +394,9 @@ async function analyzeJobFolder(jobsPath: string, jobId: string): Promise<JobFol
/**
* Recursively get document count and total size for a directory (standalone helper function)
*/
async function getDirectoryStats(dirPath: string): Promise<{ documentCount: number; totalSizeBytes: number; fileTypeStats: { [extension: string]: number } }> {
async function getDirectoryStats(
dirPath: string
): Promise<{ documentCount: number; totalSizeBytes: number; fileTypeStats: { [extension: string]: number } }> {
let documentCount = 0;
let totalSizeBytes = 0;
const fileTypeStats: { [extension: string]: number } = {};
@@ -406,7 +413,7 @@ async function getDirectoryStats(dirPath: string): Promise<{ documentCount: numb
const subStats = await getDirectoryStats(itemPath);
documentCount += subStats.documentCount;
totalSizeBytes += subStats.totalSizeBytes;
// Merge file type stats
for (const [ext, count] of Object.entries(subStats.fileTypeStats)) {
fileTypeStats[ext] = (fileTypeStats[ext] || 0) + count;
@@ -415,9 +422,9 @@ async function getDirectoryStats(dirPath: string): Promise<{ documentCount: numb
// Count files as documents
documentCount++;
totalSizeBytes += stat.size;
// Track file extension
const ext = path.extname(item).toLowerCase() || 'no-extension';
const ext = path.extname(item).toLowerCase() || "no-extension";
fileTypeStats[ext] = (fileTypeStats[ext] || 0) + 1;
}
}