Add folder scan & case insensitivity on ready check.
This commit is contained in:
@@ -1,3 +1,4 @@
|
||||
import { platform } from "@electron-toolkit/utils";
|
||||
import { UUID } from "crypto";
|
||||
import { Notification } from "electron";
|
||||
import log from "electron-log/main";
|
||||
@@ -41,7 +42,6 @@ import DecodeTtl from "./decode-ttl";
|
||||
import { DecodedTtl } from "./decode-ttl.interface";
|
||||
import DecodeVeh from "./decode-veh";
|
||||
import { DecodedVeh } from "./decode-veh.interface";
|
||||
import { platform } from "@electron-toolkit/utils";
|
||||
|
||||
async function ImportJob(filepath: string): Promise<void> {
|
||||
const parsedFilePath = path.parse(filepath);
|
||||
@@ -70,7 +70,7 @@ async function ImportJob(filepath: string): Promise<void> {
|
||||
const ttl: DecodedTtl = await DecodeTtl(extensionlessFilePath);
|
||||
const pfp: DecodedPfp = await DecodePfp(extensionlessFilePath);
|
||||
|
||||
const jobObject: RawJobDataObject = {
|
||||
const jobObjectUncleaned: RawJobDataObject = {
|
||||
...env,
|
||||
...ad1,
|
||||
...ad2,
|
||||
@@ -87,56 +87,8 @@ async function ImportJob(filepath: string): Promise<void> {
|
||||
shopid: store.get("app.bodyshop.id") as UUID,
|
||||
};
|
||||
|
||||
//In some scenarios, the owner information is missing. So we use the claimant instead.
|
||||
//We pull the claimant info for this, but we don't store it in our system, so it needs to be deleted regardless.
|
||||
if (
|
||||
_.isEmpty(jobObject.ownr_ln) &&
|
||||
_.isEmpty(jobObject.ownr_fn) &&
|
||||
_.isEmpty(jobObject.ownr_co_nm)
|
||||
) {
|
||||
jobObject.ownr_ln = jobObject.clmt_ln;
|
||||
jobObject.ownr_fn = jobObject.clmt_fn;
|
||||
jobObject.ownr_title = jobObject.clmt_title;
|
||||
jobObject.ownr_co_nm = jobObject.clmt_co_nm;
|
||||
jobObject.ownr_addr1 = jobObject.clmt_addr1;
|
||||
jobObject.ownr_addr2 = jobObject.clmt_addr2;
|
||||
jobObject.ownr_city = jobObject.clmt_city;
|
||||
jobObject.ownr_st = jobObject.clmt_st;
|
||||
jobObject.ownr_zip = jobObject.clmt_zip;
|
||||
jobObject.ownr_ctry = jobObject.clmt_ctry;
|
||||
jobObject.ownr_ph1 = jobObject.clmt_ph1;
|
||||
jobObject.ownr_ph2 = jobObject.clmt_ph2;
|
||||
jobObject.ownr_ea = jobObject.clmt_ea;
|
||||
|
||||
jobObject.owner.data.ownr_ln = jobObject.clmt_ln;
|
||||
jobObject.owner.data.ownr_fn = jobObject.clmt_fn;
|
||||
jobObject.owner.data.ownr_title = jobObject.clmt_title;
|
||||
jobObject.owner.data.ownr_co_nm = jobObject.clmt_co_nm;
|
||||
jobObject.owner.data.ownr_addr1 = jobObject.clmt_addr1;
|
||||
jobObject.owner.data.ownr_addr2 = jobObject.clmt_addr2;
|
||||
jobObject.owner.data.ownr_city = jobObject.clmt_city;
|
||||
jobObject.owner.data.ownr_st = jobObject.clmt_st;
|
||||
jobObject.owner.data.ownr_zip = jobObject.clmt_zip;
|
||||
jobObject.owner.data.ownr_ctry = jobObject.clmt_ctry;
|
||||
jobObject.owner.data.ownr_ph1 = jobObject.clmt_ph1;
|
||||
jobObject.owner.data.ownr_ph2 = jobObject.clmt_ph2;
|
||||
jobObject.owner.data.ownr_ea = jobObject.clmt_ea;
|
||||
}
|
||||
|
||||
//Delete the claimant info as it's not needed.
|
||||
delete jobObject.clmt_ln;
|
||||
delete jobObject.clmt_fn;
|
||||
delete jobObject.clmt_title;
|
||||
delete jobObject.clmt_co_nm;
|
||||
delete jobObject.clmt_addr1;
|
||||
delete jobObject.clmt_addr2;
|
||||
delete jobObject.clmt_city;
|
||||
delete jobObject.clmt_st;
|
||||
delete jobObject.clmt_zip;
|
||||
delete jobObject.clmt_ctry;
|
||||
delete jobObject.clmt_ph1;
|
||||
delete jobObject.clmt_ph2;
|
||||
delete jobObject.clmt_ea;
|
||||
// Replace owner information with claimant information if necessary
|
||||
const jobObject = ReplaceOwnerInfoWithClaimant(jobObjectUncleaned);
|
||||
|
||||
if (import.meta.env.DEV) {
|
||||
// Save jobObject to a timestamped JSON file
|
||||
@@ -264,63 +216,56 @@ async function WaitForAllFiles(
|
||||
maxRetries: number = 5,
|
||||
backoffMs: number = 1000,
|
||||
): Promise<void> {
|
||||
if (platform.isWindows) {
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||
const missingFiles = requiredExtensions.filter((ext) => {
|
||||
const filePath = `${baseFilePath}.${ext}`;
|
||||
const filePathA = `${baseFilePath}A.${ext}`;
|
||||
const filePathB = `${baseFilePath}B.${ext}`;
|
||||
const filePathV = `${baseFilePath}V.${ext}`;
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||
//Get all files in directory if Mac.
|
||||
let filesInDir: string[] = [];
|
||||
if (platform.isMacOS) {
|
||||
const dir: string = path.dirname(baseFilePath);
|
||||
filesInDir = fs.readdirSync(dir).map((file) => file.toLowerCase());
|
||||
}
|
||||
|
||||
const missingFiles = requiredExtensions.filter((ext) => {
|
||||
const filePath: string = `${baseFilePath}.${ext}`;
|
||||
const filePathA: string = `${baseFilePath}A.${ext}`;
|
||||
const filePathB: string = `${baseFilePath}B.${ext}`;
|
||||
const filePathV: string = `${baseFilePath}V.${ext}`;
|
||||
|
||||
if (!platform.isWindows) {
|
||||
// Case-insensitive check for macOS/Linux
|
||||
const baseName: string = path.basename(baseFilePath);
|
||||
|
||||
return !(
|
||||
filesInDir.includes(`${baseName}.${ext}`.toLowerCase()) ||
|
||||
filesInDir.includes(`${baseName}A.${ext}`.toLowerCase()) ||
|
||||
filesInDir.includes(`${baseName}B.${ext}`.toLowerCase()) ||
|
||||
filesInDir.includes(`${baseName}V.${ext}`.toLowerCase())
|
||||
);
|
||||
} else {
|
||||
// Case-sensitive check for other platforms
|
||||
return !(
|
||||
fs.existsSync(filePath) ||
|
||||
fs.existsSync(filePathA) ||
|
||||
fs.existsSync(filePathB) ||
|
||||
fs.existsSync(filePathV)
|
||||
);
|
||||
});
|
||||
|
||||
if (missingFiles.length === 0) {
|
||||
return; // All files are present
|
||||
}
|
||||
});
|
||||
|
||||
log.debug(
|
||||
`Attempt ${attempt}: Missing files: ${missingFiles.join(", ")}. Retrying in ${backoffMs}ms...`,
|
||||
);
|
||||
|
||||
if (attempt < maxRetries) {
|
||||
await new Promise((resolve) => setTimeout(resolve, backoffMs));
|
||||
backoffMs *= 2; // Exponential backoff
|
||||
} else {
|
||||
throw new Error(
|
||||
`The set of files is not valid. Missing files for CIECA ID ${baseFilePath}: ${missingFiles.join(", ")}`,
|
||||
);
|
||||
}
|
||||
if (missingFiles.length === 0) {
|
||||
return; // All files are present
|
||||
}
|
||||
} else {
|
||||
//Linux and MacOS are case sensitive
|
||||
//TODO: Implement case insensitivity.
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||
const missingFiles = requiredExtensions.filter((ext) => {
|
||||
const filePath = `${baseFilePath}.${ext}`;
|
||||
return !fs.existsSync(filePath);
|
||||
});
|
||||
|
||||
if (missingFiles.length === 0) {
|
||||
return; // All files are present
|
||||
}
|
||||
log.debug(
|
||||
`Attempt ${attempt}: Missing files: ${missingFiles.join(", ")}. Retrying in ${backoffMs}ms...`,
|
||||
);
|
||||
|
||||
log.debug(
|
||||
`Attempt ${attempt}: Missing files: ${missingFiles.join(", ")}. Retrying in ${backoffMs}ms...`,
|
||||
if (attempt < maxRetries) {
|
||||
await new Promise((resolve) => setTimeout(resolve, backoffMs));
|
||||
backoffMs *= 2; // Exponential backoff
|
||||
} else {
|
||||
throw new Error(
|
||||
`The set of files is not valid. Missing files for CIECA ID ${baseFilePath}: ${missingFiles.join(", ")}`,
|
||||
);
|
||||
|
||||
if (attempt < maxRetries) {
|
||||
await new Promise((resolve) => setTimeout(resolve, backoffMs));
|
||||
backoffMs *= 2; // Exponential backoff
|
||||
} else {
|
||||
throw new Error(
|
||||
`The set of files is not valid. Missing files for CIECA ID ${baseFilePath}: ${missingFiles.join(", ")}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -340,3 +285,95 @@ const requiredExtensions = [
|
||||
"ttl",
|
||||
"pfp",
|
||||
];
|
||||
|
||||
export function ReplaceOwnerInfoWithClaimant<
|
||||
T extends Partial<
|
||||
Pick<
|
||||
RawJobDataObject,
|
||||
| "ownr_ln"
|
||||
| "ownr_fn"
|
||||
| "ownr_co_nm"
|
||||
| "ownr_title"
|
||||
| "ownr_co_nm"
|
||||
| "ownr_addr1"
|
||||
| "ownr_addr2"
|
||||
| "ownr_city"
|
||||
| "ownr_st"
|
||||
| "ownr_zip"
|
||||
| "ownr_ctry"
|
||||
| "ownr_ph1"
|
||||
| "ownr_ph2"
|
||||
| "ownr_ea"
|
||||
| "clmt_ln"
|
||||
| "clmt_fn"
|
||||
| "clmt_title"
|
||||
| "clmt_co_nm"
|
||||
| "clmt_addr1"
|
||||
| "clmt_addr2"
|
||||
| "clmt_city"
|
||||
| "clmt_st"
|
||||
| "clmt_zip"
|
||||
| "clmt_ctry"
|
||||
| "clmt_ph1"
|
||||
| "clmt_ph2"
|
||||
| "clmt_ea"
|
||||
| "owner"
|
||||
>
|
||||
>,
|
||||
>(jobObject: T): T {
|
||||
// In some scenarios, the owner information is missing. So we use the claimant instead.
|
||||
// We pull the claimant info for this, but we don't store it in our system, so it needs to be deleted regardless.
|
||||
if (
|
||||
_.isEmpty(jobObject.ownr_ln) &&
|
||||
_.isEmpty(jobObject.ownr_fn) &&
|
||||
_.isEmpty(jobObject.ownr_co_nm)
|
||||
) {
|
||||
jobObject.ownr_ln = jobObject.clmt_ln;
|
||||
jobObject.ownr_fn = jobObject.clmt_fn;
|
||||
jobObject.ownr_title = jobObject.clmt_title;
|
||||
jobObject.ownr_co_nm = jobObject.clmt_co_nm;
|
||||
jobObject.ownr_addr1 = jobObject.clmt_addr1;
|
||||
jobObject.ownr_addr2 = jobObject.clmt_addr2;
|
||||
jobObject.ownr_city = jobObject.clmt_city;
|
||||
jobObject.ownr_st = jobObject.clmt_st;
|
||||
jobObject.ownr_zip = jobObject.clmt_zip;
|
||||
jobObject.ownr_ctry = jobObject.clmt_ctry;
|
||||
jobObject.ownr_ph1 = jobObject.clmt_ph1;
|
||||
jobObject.ownr_ph2 = jobObject.clmt_ph2;
|
||||
jobObject.ownr_ea = jobObject.clmt_ea;
|
||||
|
||||
// Ensure the owner and owner.data fields exist before assigning values
|
||||
if (jobObject.owner?.data) {
|
||||
jobObject.owner.data.ownr_ln = jobObject.clmt_ln;
|
||||
jobObject.owner.data.ownr_fn = jobObject.clmt_fn;
|
||||
jobObject.owner.data.ownr_title = jobObject.clmt_title;
|
||||
jobObject.owner.data.ownr_co_nm = jobObject.clmt_co_nm;
|
||||
jobObject.owner.data.ownr_addr1 = jobObject.clmt_addr1;
|
||||
jobObject.owner.data.ownr_addr2 = jobObject.clmt_addr2;
|
||||
jobObject.owner.data.ownr_city = jobObject.clmt_city;
|
||||
jobObject.owner.data.ownr_st = jobObject.clmt_st;
|
||||
jobObject.owner.data.ownr_zip = jobObject.clmt_zip;
|
||||
jobObject.owner.data.ownr_ctry = jobObject.clmt_ctry;
|
||||
jobObject.owner.data.ownr_ph1 = jobObject.clmt_ph1;
|
||||
jobObject.owner.data.ownr_ph2 = jobObject.clmt_ph2;
|
||||
jobObject.owner.data.ownr_ea = jobObject.clmt_ea;
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the claimant info as it's not needed.
|
||||
delete jobObject.clmt_ln;
|
||||
delete jobObject.clmt_fn;
|
||||
delete jobObject.clmt_title;
|
||||
delete jobObject.clmt_co_nm;
|
||||
delete jobObject.clmt_addr1;
|
||||
delete jobObject.clmt_addr2;
|
||||
delete jobObject.clmt_city;
|
||||
delete jobObject.clmt_st;
|
||||
delete jobObject.clmt_zip;
|
||||
delete jobObject.clmt_ctry;
|
||||
delete jobObject.clmt_ph1;
|
||||
delete jobObject.clmt_ph2;
|
||||
delete jobObject.clmt_ea;
|
||||
|
||||
return jobObject;
|
||||
}
|
||||
|
||||
57
src/main/decoder/folder-scan.ts
Normal file
57
src/main/decoder/folder-scan.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import path from "path";
|
||||
import { GetAllEnvFiles } from "../watcher/watcher";
|
||||
import DecodeAD1 from "./decode-ad1";
|
||||
import DecodeAD2 from "./decode-ad2";
|
||||
import DecodeEnv from "./decode-env";
|
||||
import DecodeVeh from "./decode-veh";
|
||||
import { ReplaceOwnerInfoWithClaimant } from "./decoder";
|
||||
|
||||
const folderScan = async (): Promise<FolderScanResult[]> => {
|
||||
//Get all ENV files for watched paths.
|
||||
const allEnvFiles = GetAllEnvFiles();
|
||||
//Run a simplified decode on them
|
||||
const returnedFiles: FolderScanResult[] = [];
|
||||
|
||||
for (const filepath of allEnvFiles) {
|
||||
const parsedFilePath = path.parse(filepath);
|
||||
const extensionlessFilePath = path.join(
|
||||
parsedFilePath.dir,
|
||||
parsedFilePath.name,
|
||||
);
|
||||
|
||||
const rawJob = {
|
||||
...(await DecodeEnv(extensionlessFilePath)),
|
||||
...(await DecodeAD1(extensionlessFilePath)),
|
||||
...(await DecodeAD2(extensionlessFilePath)),
|
||||
...(await DecodeVeh(extensionlessFilePath)),
|
||||
};
|
||||
const job = ReplaceOwnerInfoWithClaimant(rawJob);
|
||||
|
||||
const scanResult: FolderScanResult = {
|
||||
id: job.ciecaid,
|
||||
filepath: filepath,
|
||||
cieca_id: job.ciecaid,
|
||||
clm_no: job.clm_no,
|
||||
owner: `${job.ownr_fn} ${job.ownr_ln} ${job.ownr_co_nm}`.trim(),
|
||||
vehicle:
|
||||
`${job.vehicle?.data.v_model_yr} ${job.vehicle?.data.v_make_desc} ${job.vehicle?.data.v_model_desc}`.trim(),
|
||||
ins_co_nm: job.ins_co_nm,
|
||||
};
|
||||
|
||||
returnedFiles.push(scanResult);
|
||||
}
|
||||
//Build up the object and return it
|
||||
return returnedFiles;
|
||||
};
|
||||
|
||||
export interface FolderScanResult {
|
||||
id?: string;
|
||||
filepath: string;
|
||||
cieca_id?: string;
|
||||
clm_no?: string;
|
||||
owner: string;
|
||||
ins_co_nm?: string;
|
||||
vehicle: string;
|
||||
}
|
||||
|
||||
export default folderScan;
|
||||
@@ -4,6 +4,8 @@ import log from "electron-log/main";
|
||||
import express from "express";
|
||||
import http from "http";
|
||||
import errorTypeCheck from "../../util/errorTypeCheck";
|
||||
import ImportJob from "../decoder/decoder";
|
||||
import folderScan from "../decoder/folder-scan";
|
||||
import { handleQuickBookRequest } from "../quickbooks-desktop/quickbooks-desktop";
|
||||
|
||||
export default class LocalServer {
|
||||
@@ -109,6 +111,38 @@ export default class LocalServer {
|
||||
});
|
||||
|
||||
this.app.post("/qb", handleQuickBookRequest);
|
||||
this.app.post("/scan", async (_req, res): Promise<void> => {
|
||||
log.debug("[HTTP Server] Scan request received");
|
||||
const files = await folderScan();
|
||||
res.status(200).json(files);
|
||||
return;
|
||||
});
|
||||
this.app.post(
|
||||
"/import",
|
||||
async (req: express.Request, res: express.Response) => {
|
||||
log.debug("[HTTP Server] Import request received");
|
||||
const { filepath } = req.body;
|
||||
if (!filepath) {
|
||||
res.status(400).json({ error: "filepath is required" });
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await ImportJob(filepath);
|
||||
res.status(200).json({ success: true });
|
||||
} catch (error) {
|
||||
log.error(
|
||||
"[HTTP Server] Error importing file",
|
||||
errorTypeCheck(error),
|
||||
);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Error importing file",
|
||||
...errorTypeCheck(error),
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Add more routes as needed
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { electronApp, is, optimizer } from "@electron-toolkit/utils";
|
||||
import Sentry from "@sentry/electron/main";
|
||||
import { app, BrowserWindow, Menu, nativeImage, shell, Tray } from "electron";
|
||||
import log from "electron-log/main";
|
||||
import { autoUpdater } from "electron-updater";
|
||||
@@ -14,8 +15,7 @@ import ImportJob from "./decoder/decoder";
|
||||
import LocalServer from "./http-server/http-server";
|
||||
import { TestQB } from "./quickbooks-desktop/quickbooks-desktop";
|
||||
import store from "./store/store";
|
||||
import fs from "fs";
|
||||
import Sentry from "@sentry/electron/main";
|
||||
import { GetAllEnvFiles } from "./watcher/watcher";
|
||||
|
||||
Sentry.init({
|
||||
dsn: "https://ba41d22656999a8c1fd63bcb7df98650@o492140.ingest.us.sentry.io/4509074139447296",
|
||||
@@ -196,24 +196,7 @@ function createWindow(): void {
|
||||
{
|
||||
label: "Add All Estimes in watched directories",
|
||||
click: (): void => {
|
||||
const directories = store.get("settings.filepaths") as string[];
|
||||
const files: string[] = [];
|
||||
directories.forEach((directory) => {
|
||||
try {
|
||||
const envFiles = fs
|
||||
.readdirSync(directory)
|
||||
.filter((file: string) => file.endsWith(".env"));
|
||||
envFiles.forEach((file) => {
|
||||
const fullPath = path.join(directory, file);
|
||||
files.push(fullPath);
|
||||
ImportJob(fullPath);
|
||||
});
|
||||
} catch (error) {
|
||||
log.error(`Failed to read directory ${directory}:`, error);
|
||||
}
|
||||
});
|
||||
|
||||
files.forEach((file) => ImportJob(file));
|
||||
GetAllEnvFiles().forEach((file) => ImportJob(file));
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import chokidar, { FSWatcher } from "chokidar";
|
||||
import { BrowserWindow, Notification } from "electron";
|
||||
import log from "electron-log/main";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import errorTypeCheck from "../../util/errorTypeCheck";
|
||||
import ipcTypes from "../../util/ipcTypes.json";
|
||||
import ImportJob from "../decoder/decoder";
|
||||
import store from "../store/store";
|
||||
|
||||
let watcher: FSWatcher | null;
|
||||
|
||||
async function StartWatcher(): Promise<boolean> {
|
||||
@@ -136,10 +136,31 @@ async function HandleNewFile(path): Promise<void> {
|
||||
await ImportJob(path);
|
||||
}
|
||||
|
||||
function GetAllEnvFiles(): string[] {
|
||||
const directories = store.get("settings.filepaths") as string[];
|
||||
const files: string[] = [];
|
||||
directories.forEach((directory) => {
|
||||
try {
|
||||
const envFiles = fs
|
||||
.readdirSync(directory)
|
||||
.filter((file: string) => file.toLowerCase().endsWith(".env"));
|
||||
envFiles.forEach((file) => {
|
||||
const fullPath = path.join(directory, file);
|
||||
files.push(fullPath);
|
||||
});
|
||||
} catch (error) {
|
||||
log.error(`Failed to read directory ${directory}:`, error);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
return files;
|
||||
}
|
||||
|
||||
export {
|
||||
addWatcherPath,
|
||||
GetAllEnvFiles,
|
||||
removeWatcherPath,
|
||||
StartWatcher,
|
||||
StopWatcher,
|
||||
watcher,
|
||||
removeWatcherPath,
|
||||
addWatcherPath,
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user