Basic directory analysis.

This commit is contained in:
Patrick Fic
2025-11-04 15:00:44 -08:00
parent 6a1f02c3cb
commit 40b2e0fdf7
9 changed files with 148 additions and 18 deletions

View File

@@ -41,6 +41,11 @@ WORKDIR /usr/src/app
# Copy built application from builder
COPY --from=builder /usr/src/app/dist ./dist
# Copy TypeScript source files for source map support
COPY --from=builder /usr/src/app/*.ts ./
COPY --from=builder /usr/src/app/util ./util
COPY --from=builder /usr/src/app/jobs ./jobs
COPY --from=builder /usr/src/app/bills ./bills
COPY ./assets /assets
COPY --from=builder /usr/src/app/node_modules ./node_modules
COPY --from=builder /usr/src/app/.env.production ./.env.production

View File

@@ -41,6 +41,11 @@ WORKDIR /usr/src/app
# Copy built application from builder
COPY --from=builder /usr/src/app/dist ./dist
# Copy TypeScript source files for source map support
COPY --from=builder /usr/src/app/*.ts ./
COPY --from=builder /usr/src/app/util ./util
COPY --from=builder /usr/src/app/jobs ./jobs
COPY --from=builder /usr/src/app/bills ./bills
COPY ./assets /assets
COPY --from=builder /usr/src/app/node_modules ./node_modules
COPY --from=builder /usr/src/app/.env.production ./.env.production

View File

@@ -6,6 +6,7 @@ module.exports = {
exec_mode: "cluster",
instances: 0,
cron_restart: "30 8 * * *",
node_args: "--enable-source-maps",
env: {
NODE_ENV: "production"
}

54
package-lock.json generated
View File

@@ -29,6 +29,7 @@
"node-cron": "^4.2.1",
"response-time": "^2.3.4",
"simple-thumbnail": "^1.6.5",
"source-map-support": "^0.5.21",
"winston": "^3.18.3",
"winston-daily-rotate-file": "^5.0.0"
},
@@ -43,6 +44,7 @@
"@types/node": "^24.9.2",
"@types/node-cron": "^3.0.11",
"@types/response-time": "^2.3.9",
"@types/source-map-support": "^0.5.10",
"nodemon": "^3.1.10",
"prettier": "^3.6.2",
"ts-node": "^10.9.2",
@@ -1250,6 +1252,16 @@
"@types/node": "*"
}
},
"node_modules/@types/source-map-support": {
"version": "0.5.10",
"resolved": "https://registry.npmjs.org/@types/source-map-support/-/source-map-support-0.5.10.tgz",
"integrity": "sha512-tgVP2H469x9zq34Z0m/fgPewGhg/MLClalNOiPIzQlXrSS2YrKu/xCdSCKnEDwkFha51VKEKB6A9wW26/ZNwzA==",
"dev": true,
"license": "MIT",
"dependencies": {
"source-map": "^0.6.0"
}
},
"node_modules/@types/triple-beam": {
"version": "1.3.5",
"resolved": "https://registry.npmjs.org/@types/triple-beam/-/triple-beam-1.3.5.tgz",
@@ -3811,6 +3823,25 @@
"node": ">=10"
}
},
"node_modules/source-map": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
"license": "BSD-3-Clause",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/source-map-support": {
"version": "0.5.21",
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
"integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
"license": "MIT",
"dependencies": {
"buffer-from": "^1.0.0",
"source-map": "^0.6.0"
}
},
"node_modules/stack-trace": {
"version": "0.0.10",
"resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz",
@@ -4903,6 +4934,15 @@
"@types/node": "*"
}
},
"@types/source-map-support": {
"version": "0.5.10",
"resolved": "https://registry.npmjs.org/@types/source-map-support/-/source-map-support-0.5.10.tgz",
"integrity": "sha512-tgVP2H469x9zq34Z0m/fgPewGhg/MLClalNOiPIzQlXrSS2YrKu/xCdSCKnEDwkFha51VKEKB6A9wW26/ZNwzA==",
"dev": true,
"requires": {
"source-map": "^0.6.0"
}
},
"@types/triple-beam": {
"version": "1.3.5",
"resolved": "https://registry.npmjs.org/@types/triple-beam/-/triple-beam-1.3.5.tgz",
@@ -6654,6 +6694,20 @@
"semver": "^7.5.3"
}
},
"source-map": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
},
"source-map-support": {
"version": "0.5.21",
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
"integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
"requires": {
"buffer-from": "^1.0.0",
"source-map": "^0.6.0"
}
},
"stack-trace": {
"version": "0.0.10",
"resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz",

View File

@@ -7,8 +7,8 @@
},
"type": "module",
"scripts": {
"server": "nodemon --exec \"node --import tsx\" server.ts",
"start": "node dist/server.js",
"server": "nodemon --exec \"node --enable-source-maps --import tsx\" server.ts",
"start": "node --enable-source-maps dist/server.js",
"build": "tsc -p .",
"makeitpretty": "prettier --write \"**/*.{css,js,json,jsx,scss,ts}\""
},
@@ -33,6 +33,7 @@
"node-cron": "^4.2.1",
"response-time": "^2.3.4",
"simple-thumbnail": "^1.6.5",
"source-map-support": "^0.5.21",
"winston": "^3.18.3",
"winston-daily-rotate-file": "^5.0.0"
},
@@ -47,6 +48,7 @@
"@types/node": "^24.9.2",
"@types/node-cron": "^3.0.11",
"@types/response-time": "^2.3.9",
"@types/source-map-support": "^0.5.10",
"nodemon": "^3.1.10",
"prettier": "^3.6.2",
"ts-node": "^10.9.2",

View File

@@ -1,3 +1,4 @@
import "source-map-support/register";
import bodyParser from "body-parser";
import compression from "compression";
import cors from "cors";

16
test-sourcemap.js Normal file
View File

@@ -0,0 +1,16 @@
// Simple test to verify source maps are working
import "source-map-support/register";
console.log("Testing source map support...");
function testError() {
// Force an error on a specific line
const obj = null;
console.log(obj.someProperty); // This will throw an error on this line
}
try {
testError();
} catch (error) {
console.log("Error caught:", error.stack);
}

View File

@@ -8,6 +8,11 @@
"resolveJsonModule": true /* Enable importing .json files */,
"outDir": "./dist" /* Specify an output folder for all emitted files. */,
/* Emit */
"sourceMap": true /* Create source map files for emitted JavaScript files. */,
"inlineSourceMap": false /* Include sourcemap files inside the emitted JavaScript. */,
"inlineSources": true /* Include source code in the sourcemaps inside the emitted JavaScript. */,
/* Interop Constraints */
"isolatedModules": true /* Ensure that each file can be safely transpiled without relying on other imports. */,
"esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */,

View File

@@ -1,6 +1,7 @@
import { exec } from "child_process";
import { promisify } from "util";
import * as fs from "fs-extra";
import { readdir, stat as fsStat } from "fs/promises";
import * as path from "path";
import { logger } from "../server.js";
import { FolderPaths } from "./serverInit.js";
@@ -21,6 +22,7 @@ export interface JobFolderStats {
documentCount: number;
totalSizeBytes: number;
totalSizeMB: number;
fileTypeStats: { [extension: string]: number };
}
export interface JobsDirectoryAnalysis {
@@ -28,6 +30,7 @@ export interface JobsDirectoryAnalysis {
totalDocuments: number;
totalSizeBytes: number;
totalSizeMB: number;
fileTypeStats: { [extension: string]: number };
jobs: JobFolderStats[];
}
@@ -163,18 +166,20 @@ export class S3Sync {
totalDocuments: 0,
totalSizeBytes: 0,
totalSizeMB: 0,
fileTypeStats: {},
jobs: []
};
}
const jobFolders = await fs.readdir(jobsPath);
const jobFolders = await readdir(jobsPath);
const jobStats: JobFolderStats[] = [];
let totalDocuments = 0;
let totalSizeBytes = 0;
const aggregatedFileTypeStats: { [extension: string]: number } = {};
for (const jobFolder of jobFolders) {
const jobFolderPath = path.join(jobsPath, jobFolder);
const stat = await fs.stat(jobFolderPath);
const stat = await fsStat(jobFolderPath);
// Only process directories
if (stat.isDirectory()) {
@@ -182,6 +187,11 @@ export class S3Sync {
jobStats.push(folderStats);
totalDocuments += folderStats.documentCount;
totalSizeBytes += folderStats.totalSizeBytes;
// Aggregate file type stats
for (const [ext, count] of Object.entries(folderStats.fileTypeStats)) {
aggregatedFileTypeStats[ext] = (aggregatedFileTypeStats[ext] || 0) + count;
}
}
}
@@ -190,6 +200,7 @@ export class S3Sync {
totalDocuments,
totalSizeBytes,
totalSizeMB: Math.round((totalSizeBytes / (1024 * 1024)) * 100) / 100,
fileTypeStats: aggregatedFileTypeStats,
jobs: jobStats.sort((a, b) => a.jobId.localeCompare(b.jobId))
};
@@ -210,47 +221,58 @@ export class S3Sync {
const jobFolderPath = path.join(jobsPath, jobId);
const relativePath = path.relative(FolderPaths.Root, jobFolderPath);
const { documentCount, totalSizeBytes } = await this.getDirectoryStats(jobFolderPath);
const { documentCount, totalSizeBytes, fileTypeStats } = await this.getDirectoryStats(jobFolderPath);
return {
jobId,
relativePath,
documentCount,
totalSizeBytes,
totalSizeMB: Math.round((totalSizeBytes / (1024 * 1024)) * 100) / 100
totalSizeMB: Math.round((totalSizeBytes / (1024 * 1024)) * 100) / 100,
fileTypeStats
};
}
/**
* Recursively get document count and total size for a directory
*/
private async getDirectoryStats(dirPath: string): Promise<{ documentCount: number; totalSizeBytes: number }> {
private async getDirectoryStats(dirPath: string): Promise<{ documentCount: number; totalSizeBytes: number; fileTypeStats: { [extension: string]: number } }> {
let documentCount = 0;
let totalSizeBytes = 0;
const fileTypeStats: { [extension: string]: number } = {};
try {
const items = await fs.readdir(dirPath);
const items = await readdir(dirPath);
for (const item of items) {
const itemPath = path.join(dirPath, item);
const stat = await fs.stat(itemPath);
const stat = await fsStat(itemPath);
if (stat.isDirectory()) {
// Recursively analyze subdirectories
const subStats = await this.getDirectoryStats(itemPath);
documentCount += subStats.documentCount;
totalSizeBytes += subStats.totalSizeBytes;
// Merge file type stats
for (const [ext, count] of Object.entries(subStats.fileTypeStats)) {
fileTypeStats[ext] = (fileTypeStats[ext] || 0) + count;
}
} else {
// Count files as documents
documentCount++;
totalSizeBytes += stat.size;
// Track file extension
const ext = path.extname(item).toLowerCase() || 'no-extension';
fileTypeStats[ext] = (fileTypeStats[ext] || 0) + 1;
}
}
} catch (error) {
logger.error(`Error analyzing directory ${dirPath}:`, error);
}
return { documentCount, totalSizeBytes };
return { documentCount, totalSizeBytes, fileTypeStats };
}
}
@@ -297,18 +319,20 @@ export async function analyzeJobsDirectory(): Promise<JobsDirectoryAnalysis> {
totalDocuments: 0,
totalSizeBytes: 0,
totalSizeMB: 0,
fileTypeStats: {},
jobs: []
};
}
const jobFolders = await fs.readdir(jobsPath);
const jobFolders = await readdir(jobsPath);
const jobStats: JobFolderStats[] = [];
let totalDocuments = 0;
let totalSizeBytes = 0;
const aggregatedFileTypeStats: { [extension: string]: number } = {};
for (const jobFolder of jobFolders) {
const jobFolderPath = path.join(jobsPath, jobFolder);
const stat = await fs.stat(jobFolderPath);
const stat = await fsStat(jobFolderPath);
// Only process directories
if (stat.isDirectory()) {
@@ -316,6 +340,11 @@ export async function analyzeJobsDirectory(): Promise<JobsDirectoryAnalysis> {
jobStats.push(folderStats);
totalDocuments += folderStats.documentCount;
totalSizeBytes += folderStats.totalSizeBytes;
// Aggregate file type stats
for (const [ext, count] of Object.entries(folderStats.fileTypeStats)) {
aggregatedFileTypeStats[ext] = (aggregatedFileTypeStats[ext] || 0) + count;
}
}
}
@@ -324,6 +353,7 @@ export async function analyzeJobsDirectory(): Promise<JobsDirectoryAnalysis> {
totalDocuments,
totalSizeBytes,
totalSizeMB: Math.round((totalSizeBytes / (1024 * 1024)) * 100) / 100,
fileTypeStats: aggregatedFileTypeStats,
jobs: jobStats.sort((a, b) => a.jobId.localeCompare(b.jobId))
};
@@ -344,45 +374,56 @@ async function analyzeJobFolder(jobsPath: string, jobId: string): Promise<JobFol
const jobFolderPath = path.join(jobsPath, jobId);
const relativePath = path.relative(FolderPaths.Root, jobFolderPath);
const { documentCount, totalSizeBytes } = await getDirectoryStats(jobFolderPath);
const { documentCount, totalSizeBytes, fileTypeStats } = await getDirectoryStats(jobFolderPath);
return {
jobId,
relativePath,
documentCount,
totalSizeBytes,
totalSizeMB: Math.round((totalSizeBytes / (1024 * 1024)) * 100) / 100
totalSizeMB: Math.round((totalSizeBytes / (1024 * 1024)) * 100) / 100,
fileTypeStats
};
}
/**
* Recursively get document count and total size for a directory (standalone helper function)
*/
async function getDirectoryStats(dirPath: string): Promise<{ documentCount: number; totalSizeBytes: number }> {
async function getDirectoryStats(dirPath: string): Promise<{ documentCount: number; totalSizeBytes: number; fileTypeStats: { [extension: string]: number } }> {
let documentCount = 0;
let totalSizeBytes = 0;
const fileTypeStats: { [extension: string]: number } = {};
try {
const items = await fs.readdir(dirPath);
const items = await readdir(dirPath);
for (const item of items) {
const itemPath = path.join(dirPath, item);
const stat = await fs.stat(itemPath);
const stat = await fsStat(itemPath);
if (stat.isDirectory()) {
// Recursively analyze subdirectories
const subStats = await getDirectoryStats(itemPath);
documentCount += subStats.documentCount;
totalSizeBytes += subStats.totalSizeBytes;
// Merge file type stats
for (const [ext, count] of Object.entries(subStats.fileTypeStats)) {
fileTypeStats[ext] = (fileTypeStats[ext] || 0) + count;
}
} else {
// Count files as documents
documentCount++;
totalSizeBytes += stat.size;
// Track file extension
const ext = path.extname(item).toLowerCase() || 'no-extension';
fileTypeStats[ext] = (fileTypeStats[ext] || 0) + 1;
}
}
} catch (error) {
logger.error(`Error analyzing directory ${dirPath}:`, error);
}
return { documentCount, totalSizeBytes };
return { documentCount, totalSizeBytes, fileTypeStats };
}