Refactor namings.

This commit is contained in:
Patrick Fic
2025-11-06 14:07:49 -08:00
parent 994a35025b
commit ebe3d8821d
3 changed files with 131 additions and 105 deletions

View File

@@ -127,29 +127,23 @@ const loggedBodyshopIds = new Set<string>();
app.use((req, res, next) => {
//Asynchronously check the headers for a bodyshopid. If it exists, write it to a json file in the root directory. Only do this if the bodyshopid has not already been logged once since server start.
const bodyshopId = req.headers.bodyshopid as string;
console.log("*** ~ loggedBodyshopIds:", loggedBodyshopIds);
if (bodyshopId && !loggedBodyshopIds.has(bodyshopId)) {
loggedBodyshopIds.add(bodyshopId);
// Asynchronously write to file without blocking the request
(async () => {
try {
const fs = await import("fs/promises");
const filePath = path.join(FolderPaths.Root, "config.json");
let existingIds: string[] = [];
try {
const fileContent = await fs.readFile(filePath, "utf-8");
const fileContent = await fs.readFile(FolderPaths.Config, "utf-8");
const configFile = JSON.parse(fileContent);
existingIds = configFile.bodyshopIds || [];
} catch {
// File doesn't exist or is invalid, start with empty array
}
if (!existingIds.includes(bodyshopId)) {
existingIds.push(bodyshopId);
await fs.writeFile(filePath, JSON.stringify({ bodyshopIds: existingIds }, null, 2));
await fs.writeFile(FolderPaths.Config, JSON.stringify({ bodyshopIds: existingIds }, null, 2));
logger.info(`Logged new bodyshop ID: ${bodyshopId}`);
}
} catch (error) {
@@ -157,7 +151,6 @@ app.use((req, res, next) => {
}
})();
}
next();
});

View File

@@ -5,6 +5,8 @@ import { readdir, stat as fsStat } from "fs/promises";
import * as path from "path";
import { logger } from "../server.js";
import { FolderPaths } from "./serverInit.js";
import axios from "axios";
import { UUID } from "crypto";
const execAsync = promisify(exec);
@@ -17,21 +19,25 @@ interface S3SyncConfig {
}
export interface JobFolderStats {
jobId: string;
relativePath: string;
documentCount: number;
totalSizeBytes: number;
totalSizeMB: number;
fileTypeStats: { [extension: string]: number };
bodyshopid: UUID;
jobid: UUID | string | null;
//relativePath: string;
document_count: number;
total_size_bytes: number;
total_size_mb: number;
file_type_stats: { [extension: string]: number };
}
export interface JobsDirectoryAnalysis {
totalJobs: number;
totalDocuments: number;
totalSizeBytes: number;
totalSizeMB: number;
fileTypeStats: { [extension: string]: number };
jobs: JobFolderStats[];
bodyshopid: UUID;
total_jobs: number;
total_documents: number;
total_size_bytes: number;
total_size_mb: number;
file_type_stats: { [extension: string]: number };
media_analytics_details: {
data: JobFolderStats[];
};
}
export class S3Sync {
@@ -162,20 +168,21 @@ export class S3Sync {
if (!(await fs.pathExists(jobsPath))) {
logger.warn(`Jobs directory does not exist: ${jobsPath}`);
return {
totalJobs: 0,
totalDocuments: 0,
totalSizeBytes: 0,
totalSizeMB: 0,
fileTypeStats: {},
jobs: []
bodyshopid,
total_jobs: 0,
total_documents: 0,
total_size_bytes: 0,
total_size_mb: 0,
file_type_stats: {},
media_analytics_details: { data: [] }
};
}
const jobFolders = await readdir(jobsPath);
const jobStats: JobFolderStats[] = [];
let totalDocuments = 0;
let totalSizeBytes = 0;
const aggregatedFileTypeStats: { [extension: string]: number } = {};
let total_documents = 0;
let total_size_bytes = 0;
const aggregated_file_type_stats: { [extension: string]: number } = {};
for (const jobFolder of jobFolders) {
const jobFolderPath = path.join(jobsPath, jobFolder);
@@ -185,27 +192,28 @@ export class S3Sync {
if (stat.isDirectory()) {
const folderStats = await this.analyzeJobFolder(jobsPath, jobFolder);
jobStats.push(folderStats);
totalDocuments += folderStats.documentCount;
totalSizeBytes += folderStats.totalSizeBytes;
total_documents += folderStats.document_count;
total_size_bytes += folderStats.total_size_bytes;
// Aggregate file type stats
for (const [ext, count] of Object.entries(folderStats.fileTypeStats)) {
aggregatedFileTypeStats[ext] = (aggregatedFileTypeStats[ext] || 0) + count;
for (const [ext, count] of Object.entries(folderStats.file_type_stats)) {
aggregated_file_type_stats[ext] = (aggregated_file_type_stats[ext] || 0) + count;
}
}
}
const analysis: JobsDirectoryAnalysis = {
totalJobs: jobStats.length,
totalDocuments,
totalSizeBytes,
totalSizeMB: Math.round((totalSizeBytes / (1024 * 1024)) * 100) / 100,
fileTypeStats: aggregatedFileTypeStats,
jobs: jobStats.sort((a, b) => a.jobId.localeCompare(b.jobId))
bodyshopid,
total_jobs: jobStats.length,
total_documents,
total_size_bytes,
total_size_mb: Math.round((total_size_bytes / (1024 * 1024)) * 100) / 100,
file_type_stats: aggregated_file_type_stats,
media_analytics_details: { data: jobStats.sort((a, b) => a.jobid?.localeCompare(b.jobid!) || 0) }
};
logger.info(
`Jobs directory analysis complete: ${analysis.totalJobs} jobs, ${analysis.totalDocuments} documents, ${analysis.totalSizeMB} MB`
`Jobs directory analysis complete: ${analysis.total_jobs} jobs, ${analysis.total_documents} documents, ${analysis.total_size_mb} MB`
);
return analysis;
} catch (error) {
@@ -217,19 +225,20 @@ export class S3Sync {
/**
* Analyze a single job folder
*/
private async analyzeJobFolder(jobsPath: string, jobId: string): Promise<JobFolderStats> {
const jobFolderPath = path.join(jobsPath, jobId);
const relativePath = path.relative(FolderPaths.Root, jobFolderPath);
private async analyzeJobFolder(jobsPath: string, jobid: string): Promise<JobFolderStats> {
const jobFolderPath = path.join(jobsPath, jobid);
// const relativePath = path.relative(FolderPaths.Root, jobFolderPath);
const { documentCount, totalSizeBytes, fileTypeStats } = await this.getDirectoryStats(jobFolderPath);
const { document_count, total_size_bytes, file_type_stats } = await this.getDirectoryStats(jobFolderPath);
return {
jobId,
relativePath,
documentCount,
totalSizeBytes,
totalSizeMB: Math.round((totalSizeBytes / (1024 * 1024)) * 100) / 100,
fileTypeStats
jobid: jobid === "temporary" ? null : (jobid as UUID),
bodyshopid,
//relativePath,
document_count,
total_size_bytes,
total_size_mb: Math.round((total_size_bytes / (1024 * 1024)) * 100) / 100,
file_type_stats
};
}
@@ -238,10 +247,10 @@ export class S3Sync {
*/
private async getDirectoryStats(
dirPath: string
): Promise<{ documentCount: number; totalSizeBytes: number; fileTypeStats: { [extension: string]: number } }> {
let documentCount = 0;
let totalSizeBytes = 0;
const fileTypeStats: { [extension: string]: number } = {};
): Promise<{ document_count: number; total_size_bytes: number; file_type_stats: { [extension: string]: number } }> {
let document_count = 0;
let total_size_bytes = 0;
const file_type_stats: { [extension: string]: number } = {};
try {
const items = await readdir(dirPath);
@@ -253,28 +262,28 @@ export class S3Sync {
if (stat.isDirectory()) {
// Recursively analyze subdirectories
const subStats = await this.getDirectoryStats(itemPath);
documentCount += subStats.documentCount;
totalSizeBytes += subStats.totalSizeBytes;
document_count += subStats.document_count;
total_size_bytes += subStats.total_size_bytes;
// Merge file type stats
for (const [ext, count] of Object.entries(subStats.fileTypeStats)) {
fileTypeStats[ext] = (fileTypeStats[ext] || 0) + count;
for (const [ext, count] of Object.entries(subStats.file_type_stats)) {
file_type_stats[ext] = (file_type_stats[ext] || 0) + count;
}
} else {
// Count files as documents
documentCount++;
totalSizeBytes += stat.size;
document_count++;
total_size_bytes += stat.size;
// Track file extension
const ext = path.extname(item).toLowerCase() || "no-extension";
fileTypeStats[ext] = (fileTypeStats[ext] || 0) + 1;
file_type_stats[ext] = (file_type_stats[ext] || 0) + 1;
}
}
} catch (error) {
logger.error(`Error analyzing directory ${dirPath}:`, error);
}
return { documentCount, totalSizeBytes, fileTypeStats };
return { document_count, total_size_bytes, file_type_stats };
}
}
@@ -310,6 +319,8 @@ export function createS3SyncFromEnv(): S3Sync | null {
export async function analyzeJobsDirectory(): Promise<JobsDirectoryAnalysis> {
try {
logger.info("Starting Jobs directory analysis...");
//Read from the config.json file in the root directory to get the bodyshopid
const bodyshopid: UUID = await getBodyshopIdFromConfig();
const jobsPath = FolderPaths.Jobs;
@@ -317,20 +328,21 @@ export async function analyzeJobsDirectory(): Promise<JobsDirectoryAnalysis> {
if (!(await fs.pathExists(jobsPath))) {
logger.warn(`Jobs directory does not exist: ${jobsPath}`);
return {
totalJobs: 0,
totalDocuments: 0,
totalSizeBytes: 0,
totalSizeMB: 0,
fileTypeStats: {},
jobs: []
bodyshopid,
total_jobs: 0,
total_documents: 0,
total_size_bytes: 0,
total_size_mb: 0,
file_type_stats: {},
media_analytics_details: { data: [] }
};
}
const jobFolders = await readdir(jobsPath);
const jobStats: JobFolderStats[] = [];
let totalDocuments = 0;
let totalSizeBytes = 0;
const aggregatedFileTypeStats: { [extension: string]: number } = {};
let total_documents = 0;
let total_size_bytes = 0;
const aggregated_file_type_stats: { [extension: string]: number } = {};
for (const jobFolder of jobFolders) {
const jobFolderPath = path.join(jobsPath, jobFolder);
@@ -340,34 +352,38 @@ export async function analyzeJobsDirectory(): Promise<JobsDirectoryAnalysis> {
if (stat.isDirectory()) {
const folderStats = await analyzeJobFolder(jobsPath, jobFolder);
jobStats.push(folderStats);
totalDocuments += folderStats.documentCount;
totalSizeBytes += folderStats.totalSizeBytes;
total_documents += folderStats.document_count;
total_size_bytes += folderStats.total_size_bytes;
// Aggregate file type stats
for (const [ext, count] of Object.entries(folderStats.fileTypeStats)) {
aggregatedFileTypeStats[ext] = (aggregatedFileTypeStats[ext] || 0) + count;
for (const [ext, count] of Object.entries(folderStats.file_type_stats)) {
aggregated_file_type_stats[ext] = (aggregated_file_type_stats[ext] || 0) + count;
}
}
}
const analysis: JobsDirectoryAnalysis = {
totalJobs: jobStats.length,
totalDocuments,
totalSizeBytes,
totalSizeMB: Math.round((totalSizeBytes / (1024 * 1024)) * 100) / 100,
fileTypeStats: aggregatedFileTypeStats,
jobs: jobStats.sort((a, b) => a.jobId.localeCompare(b.jobId))
bodyshopid, //read from the config.json file in the root directory
total_jobs: jobStats.length,
total_documents,
total_size_bytes,
total_size_mb: Math.round((total_size_bytes / (1024 * 1024)) * 100) / 100,
file_type_stats: aggregated_file_type_stats,
media_analytics_details: { data: jobStats.sort((a, b) => a.jobid?.localeCompare(b.jobid!) || 0) }
};
logger.info(
`Jobs directory analysis complete: ${analysis.totalJobs} jobs, ${analysis.totalDocuments} documents, ${analysis.totalSizeMB} MB`
`Jobs directory analysis complete: ${analysis.total_jobs} jobs, ${analysis.total_documents} documents, ${analysis.total_size_mb} MB`
);
//Add an upload to the IO database to categorize all of this.
const apiURL = "http://host.docker.internal:4000/analytics/documents";
const result = await axios.post(apiURL, { data: analysis });
return analysis;
} catch (error) {
logger.error("Failed to analyze Jobs directory:", error);
logger.error("Failed to analyze Jobs directory:", JSON.stringify(error, null, 4));
throw error;
}
}
@@ -375,19 +391,20 @@ export async function analyzeJobsDirectory(): Promise<JobsDirectoryAnalysis> {
/**
* Analyze a single job folder (standalone helper function)
*/
async function analyzeJobFolder(jobsPath: string, jobId: string): Promise<JobFolderStats> {
const jobFolderPath = path.join(jobsPath, jobId);
async function analyzeJobFolder(jobsPath: string, jobid: string): Promise<JobFolderStats> {
const jobFolderPath = path.join(jobsPath, jobid);
const relativePath = path.relative(FolderPaths.Root, jobFolderPath);
const { documentCount, totalSizeBytes, fileTypeStats } = await getDirectoryStats(jobFolderPath);
const { document_count, total_size_bytes, file_type_stats } = await getDirectoryStats(jobFolderPath);
return {
jobId,
relativePath,
documentCount,
totalSizeBytes,
totalSizeMB: Math.round((totalSizeBytes / (1024 * 1024)) * 100) / 100,
fileTypeStats
bodyshopid,
jobid,
// relativePath,
document_count,
total_size_bytes,
total_size_mb: Math.round((total_size_bytes / (1024 * 1024)) * 100) / 100,
file_type_stats
};
}
@@ -396,10 +413,10 @@ async function analyzeJobFolder(jobsPath: string, jobId: string): Promise<JobFol
*/
async function getDirectoryStats(
dirPath: string
): Promise<{ documentCount: number; totalSizeBytes: number; fileTypeStats: { [extension: string]: number } }> {
let documentCount = 0;
let totalSizeBytes = 0;
const fileTypeStats: { [extension: string]: number } = {};
): Promise<{ document_count: number; total_size_bytes: number; file_type_stats: { [extension: string]: number } }> {
let document_count = 0;
let total_size_bytes = 0;
const file_type_stats: { [extension: string]: number } = {};
try {
const items = await readdir(dirPath);
@@ -411,26 +428,41 @@ async function getDirectoryStats(
if (stat.isDirectory()) {
// Recursively analyze subdirectories
const subStats = await getDirectoryStats(itemPath);
documentCount += subStats.documentCount;
totalSizeBytes += subStats.totalSizeBytes;
document_count += subStats.document_count;
total_size_bytes += subStats.total_size_bytes;
// Merge file type stats
for (const [ext, count] of Object.entries(subStats.fileTypeStats)) {
fileTypeStats[ext] = (fileTypeStats[ext] || 0) + count;
for (const [ext, count] of Object.entries(subStats.file_type_stats)) {
file_type_stats[ext] = (file_type_stats[ext] || 0) + count;
}
} else {
// Count files as documents
documentCount++;
totalSizeBytes += stat.size;
document_count++;
total_size_bytes += stat.size;
// Track file extension
const ext = path.extname(item).toLowerCase() || "no-extension";
fileTypeStats[ext] = (fileTypeStats[ext] || 0) + 1;
file_type_stats[ext] = (file_type_stats[ext] || 0) + 1;
}
}
} catch (error) {
logger.error(`Error analyzing directory ${dirPath}:`, error);
}
return { documentCount, totalSizeBytes, fileTypeStats };
return { document_count, total_size_bytes, file_type_stats };
}
let bodyshopid: UUID;
const getBodyshopIdFromConfig = async (): Promise<UUID> => {
if (bodyshopid) return bodyshopid;
try {
const fs = await import("fs/promises"); //Required as fs-extra fails.
const configData = await fs.readFile(FolderPaths.Config, "utf-8");
const config = JSON.parse(configData);
bodyshopid = config.bodyshopIds[0] as UUID;
return bodyshopid;
} catch (error) {
logger.error("Failed to read bodyshopid from config.json:", error, (error as Error).stack);
throw new Error("Could not read bodyshopid from config.json");
}
};

View File

@@ -25,6 +25,7 @@ export const FolderPaths = {
ConvertedOriginalSubDir: "ConvertedOriginal",
DamagedSubDir: "DamagedOriginal",
StaticPath: "/static",
Config: path.join(RootDirectory, "config.json"),
JobsFolder,
VendorsFolder
};