From 38cdb1e04c7c5749682580b43d8a9d218c111c66 Mon Sep 17 00:00:00 2001 From: Patrick FIc Date: Mon, 31 Mar 2025 12:12:39 -0700 Subject: [PATCH] Add add all to dev menu, hot reloading for main, and log cleanup. --- .vscode/launch.json | 25 ++++++++++ src/main/decoder/decode-ad1.ts | 11 ++--- src/main/decoder/decode-ad2.ts | 11 ++--- src/main/decoder/decoder.ts | 89 ++++++++++++++++++++++++++++++++++ src/main/index.ts | 24 +++++++++ 5 files changed, 146 insertions(+), 14 deletions(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index 0b6b9a6..44b19a0 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -25,8 +25,23 @@ "presentation": { "hidden": true } + }, + { + "name": "Debug Main Process w/ Hot Reloading", + "type": "node", + "request": "launch", + "cwd": "${workspaceRoot}", + "runtimeExecutable": "${workspaceRoot}/node_modules/.bin/electron-vite", + "windows": { + "runtimeExecutable": "${workspaceRoot}/node_modules/.bin/electron-vite.cmd" + }, + "runtimeArgs": ["--sourcemap", "--watch"], + "env": { + "REMOTE_DEBUGGING_PORT": "9222" + } } ], + "compounds": [ { "name": "Debug All", @@ -34,6 +49,16 @@ "presentation": { "order": 1 } + }, + { + "name": "Debug All (Hot Reload)", + "configurations": [ + "Debug Main Process w/ Hot Reloading", + "Debug Renderer Process" + ], + "presentation": { + "order": 1 + } } ] } diff --git a/src/main/decoder/decode-ad1.ts b/src/main/decoder/decode-ad1.ts index d0a55a4..30ebb2e 100644 --- a/src/main/decoder/decode-ad1.ts +++ b/src/main/decoder/decode-ad1.ts @@ -2,7 +2,6 @@ import { DBFFile } from "dbffile"; import log from "electron-log/main"; import _ from "lodash"; import deepLowerCaseKeys from "../../util/deepLowercaseKeys"; -import errorTypeCheck from "../../util/errorTypeCheck"; import store from "../store/store"; import { DecodedAd1, OwnerRecordInterface } from "./decode-ad1.interface"; @@ -13,9 +12,9 @@ const DecodeAD1 = async ( try { dbf = await DBFFile.open(`${extensionlessFilePath}A.AD1`); } catch (error) { - log.error("Error opening AD1 File.", errorTypeCheck(error)); + // log.debug("Error opening AD1 File.", errorTypeCheck(error)); dbf = await DBFFile.open(`${extensionlessFilePath}.AD1`); - log.log("Trying to find AD1 file using regular CIECA Id."); + // log.debug("Trying to find AD1 file using regular CIECA Id."); } if (!dbf) { @@ -27,9 +26,6 @@ const DecodeAD1 = async ( //AD1 will always have only 1 row. //Commented lines have been cross referenced with existing partner fields. - const d = rawDBFRecord[0].ASGN_DATE; - console.log(d); - console.log(typeof rawDBFRecord[0].ASGN_DATE); const rawAd1Data: DecodedAd1 = deepLowerCaseKeys( _.pick(rawDBFRecord[0], [ //TODO: Add typings for EMS File Formats. @@ -206,8 +202,7 @@ const DecodeAD1 = async ( shopid: store.get("app.bodyshop.id"), }; } - const s = store.get("app"); - console.log(s); + return { ...rawAd1Data, owner: { data: ownerRecord } }; }; export default DecodeAD1; diff --git a/src/main/decoder/decode-ad2.ts b/src/main/decoder/decode-ad2.ts index 2d476b1..ddfb702 100644 --- a/src/main/decoder/decode-ad2.ts +++ b/src/main/decoder/decode-ad2.ts @@ -3,7 +3,6 @@ import log from "electron-log/main"; import _ from "lodash"; import deepLowerCaseKeys from "../../util/deepLowercaseKeys"; import { DecodedAD2 } from "./decode-ad2.interface"; -import errorTypeCheck from "../../util/errorTypeCheck"; const DecodeAD2 = async ( extensionlessFilePath: string, @@ -12,9 +11,9 @@ const DecodeAD2 = async ( try { dbf = await DBFFile.open(`${extensionlessFilePath}B.AD2`); } catch (error) { - log.error("Error opening AD2 File.", errorTypeCheck(error)); + // log.error("Error opening AD2 File.", errorTypeCheck(error)); dbf = await DBFFile.open(`${extensionlessFilePath}.AD2`); - log.log("Trying to find AD2 file using regular CIECA Id."); + // log.log("Trying to find AD2 file using regular CIECA Id."); } if (!dbf) { @@ -41,10 +40,10 @@ const DecodeAD2 = async ( "CLMT_ZIP", "CLMT_CTRY", "CLMT_PH1", - "CLMT_PH1X", + //"CLMT_PH1X", "CLMT_PH2", - "CLMT_PH2X", - "CLMT_FAX", + //"CLMT_PH2X", + //"CLMT_FAX", //"CLMT_FAXX", "CLMT_EA", //"EST_CO_ID", diff --git a/src/main/decoder/decoder.ts b/src/main/decoder/decoder.ts index 4e273e9..cf3d071 100644 --- a/src/main/decoder/decoder.ts +++ b/src/main/decoder/decoder.ts @@ -41,6 +41,7 @@ import DecodeTtl from "./decode-ttl"; import { DecodedTtl } from "./decode-ttl.interface"; import DecodeVeh from "./decode-veh"; import { DecodedVeh } from "./decode-veh.interface"; +import { platform } from "@electron-toolkit/utils"; async function ImportJob(filepath: string): Promise { const parsedFilePath = path.parse(filepath); @@ -51,6 +52,8 @@ async function ImportJob(filepath: string): Promise { log.debug("Importing Job", extensionlessFilePath); try { + await WaitForAllFiles(extensionlessFilePath, requiredExtensions); + //The below all end up returning parts of the job object. //Some of them return additional info - e.g. owner or vehicle record data at both the job and corresponding table level. const env: DecodedEnv = await DecodeEnv(extensionlessFilePath); @@ -154,6 +157,9 @@ async function ImportJob(filepath: string): Promise { log.info(`Job data saved to: ${filePath}`); } + //Temporarily adjust the claim number to ensure we are running on the right set of claims. + jobObject.clm_no = `ELECTRONAPP-${jobObject.clm_no}`; + const newAvailableJob: AvailableJobSchema = { uploaded_by: store.get("user.email"), bodyshopid: store.get("app.bodyshop.id"), @@ -251,3 +257,86 @@ export interface AvailableJobSchema { issupplement: boolean; jobid: UUID | null; } + +async function WaitForAllFiles( + baseFilePath: string, + requiredExtensions: string[], + maxRetries: number = 5, + backoffMs: number = 1000, +): Promise { + if (platform.isWindows) { + for (let attempt = 1; attempt <= maxRetries; attempt++) { + const missingFiles = requiredExtensions.filter((ext) => { + const filePath = `${baseFilePath}.${ext}`; + const filePathA = `${baseFilePath}A.${ext}`; + const filePathB = `${baseFilePath}B.${ext}`; + const filePathV = `${baseFilePath}V.${ext}`; + return !( + fs.existsSync(filePath) || + fs.existsSync(filePathA) || + fs.existsSync(filePathB) || + fs.existsSync(filePathV) + ); + }); + + if (missingFiles.length === 0) { + return; // All files are present + } + + log.debug( + `Attempt ${attempt}: Missing files: ${missingFiles.join(", ")}. Retrying in ${backoffMs}ms...`, + ); + + if (attempt < maxRetries) { + await new Promise((resolve) => setTimeout(resolve, backoffMs)); + backoffMs *= 2; // Exponential backoff + } else { + throw new Error( + `The set of files is not valid. Missing files for CIECA ID ${baseFilePath}: ${missingFiles.join(", ")}`, + ); + } + } + } else { + //Linux and MacOS are case sensitive + //TODO: Implement case insensitivity. + for (let attempt = 1; attempt <= maxRetries; attempt++) { + const missingFiles = requiredExtensions.filter((ext) => { + const filePath = `${baseFilePath}.${ext}`; + return !fs.existsSync(filePath); + }); + + if (missingFiles.length === 0) { + return; // All files are present + } + + log.debug( + `Attempt ${attempt}: Missing files: ${missingFiles.join(", ")}. Retrying in ${backoffMs}ms...`, + ); + + if (attempt < maxRetries) { + await new Promise((resolve) => setTimeout(resolve, backoffMs)); + backoffMs *= 2; // Exponential backoff + } else { + throw new Error( + `The set of files is not valid. Missing files for CIECA ID ${baseFilePath}: ${missingFiles.join(", ")}`, + ); + } + } + } +} + +const requiredExtensions = [ + "env", + "ad1", + "ad2", + "veh", + "lin", + "pfh", + "pfl", + "pft", + "pfm", + "pfo", + "stl", + "ttl", + "pfp", +]; diff --git a/src/main/index.ts b/src/main/index.ts index 6e6cb2d..cbf1792 100644 --- a/src/main/index.ts +++ b/src/main/index.ts @@ -14,6 +14,7 @@ import ImportJob from "./decoder/decoder"; import LocalServer from "./http-server/http-server"; import { TestQB } from "./quickbooks-desktop/quickbooks-desktop"; import store from "./store/store"; +import fs from "fs"; log.initialize(); const isMac = process.platform === "darwin"; @@ -185,6 +186,29 @@ function createWindow(): void { ImportJob(`C:\\EMS\\CCC\\9ee762f4.ENV`); }, }, + { + label: "Add All Estimes in watched directories", + click: (): void => { + const directories = store.get("settings.filepaths") as string[]; + const files: string[] = []; + directories.forEach((directory) => { + try { + const envFiles = fs + .readdirSync(directory) + .filter((file: string) => file.endsWith(".env")); + envFiles.forEach((file) => { + const fullPath = path.join(directory, file); + files.push(fullPath); + ImportJob(fullPath); + }); + } catch (error) { + log.error(`Failed to read directory ${directory}:`, error); + } + }); + + files.forEach((file) => ImportJob(file)); + }, + }, ], }, ];