416 lines
13 KiB
TypeScript
416 lines
13 KiB
TypeScript
import { platform } from "@electron-toolkit/utils";
|
|
import { UUID } from "crypto";
|
|
import { Notification, shell } from "electron";
|
|
import log from "electron-log/main";
|
|
import fs from "fs";
|
|
import _ from "lodash";
|
|
import path from "path";
|
|
import errorTypeCheck from "../../util/errorTypeCheck";
|
|
import client from "../graphql/graphql-client";
|
|
import {
|
|
INSERT_AVAILABLE_JOB_TYPED,
|
|
InsertAvailableJobResult,
|
|
QUERY_JOB_BY_CLM_NO_TYPED,
|
|
QUERY_VEHICLE_BY_VIN_TYPED,
|
|
QueryJobByClmNoResult,
|
|
VehicleQueryResult,
|
|
} from "../graphql/queries";
|
|
import store from "../store/store";
|
|
import DecodeAD1 from "./decode-ad1";
|
|
import { DecodedAd1 } from "./decode-ad1.interface";
|
|
import DecodeAD2 from "./decode-ad2";
|
|
import { DecodedAD2 } from "./decode-ad2.interface";
|
|
import DecodeEnv from "./decode-env";
|
|
import { DecodedEnv } from "./decode-env.interface";
|
|
import DecodeLin from "./decode-lin";
|
|
import { DecodedLin } from "./decode-lin.interface";
|
|
import DecodePfh from "./decode-pfh";
|
|
import { DecodedPfh } from "./decode-pfh.interface";
|
|
import DecodePfl from "./decode-pfl";
|
|
import { DecodedPfl } from "./decode-pfl.interface";
|
|
import DecodePfm from "./decode-pfm";
|
|
import { DecodedPfm } from "./decode-pfm.interface";
|
|
import DecodePfo from "./decode-pfo";
|
|
import { DecodedPfo } from "./decode-pfo.interface";
|
|
import DecodePfp from "./decode-pfp";
|
|
import { DecodedPfp } from "./decode-pfp.interface";
|
|
import DecodePft from "./decode-pft";
|
|
import { DecodedPft } from "./decode-pft.interface";
|
|
import DecodeStl from "./decode-stl";
|
|
import { DecodedStl } from "./decode-stl.interface";
|
|
import DecodeTtl from "./decode-ttl";
|
|
import { DecodedTtl } from "./decode-ttl.interface";
|
|
import DecodeVeh from "./decode-veh";
|
|
import { DecodedVeh } from "./decode-veh.interface";
|
|
import setAppProgressbar from "../util/setAppProgressBar";
|
|
import UploadEmsToS3 from "./emsbackup";
|
|
|
|
async function ImportJob(filepath: string): Promise<void> {
|
|
const parsedFilePath = path.parse(filepath);
|
|
const extensionlessFilePath = path.join(
|
|
parsedFilePath.dir,
|
|
parsedFilePath.name,
|
|
);
|
|
log.debug("Importing Job", extensionlessFilePath);
|
|
|
|
try {
|
|
await WaitForAllFiles(extensionlessFilePath, requiredExtensions);
|
|
|
|
//The below all end up returning parts of the job object.
|
|
//Some of them return additional info - e.g. owner or vehicle record data at both the job and corresponding table level.
|
|
setAppProgressbar(0.1);
|
|
const env: DecodedEnv = await DecodeEnv(extensionlessFilePath);
|
|
setAppProgressbar(0.15);
|
|
const ad1: DecodedAd1 = await DecodeAD1(extensionlessFilePath);
|
|
setAppProgressbar(0.2);
|
|
const ad2: DecodedAD2 = await DecodeAD2(extensionlessFilePath);
|
|
setAppProgressbar(0.25);
|
|
const veh: DecodedVeh = await DecodeVeh(extensionlessFilePath);
|
|
setAppProgressbar(0.3);
|
|
const lin: DecodedLin = await DecodeLin(extensionlessFilePath);
|
|
setAppProgressbar(0.35);
|
|
const pfh: DecodedPfh = await DecodePfh(extensionlessFilePath);
|
|
setAppProgressbar(0.4);
|
|
const pfl: DecodedPfl = await DecodePfl(extensionlessFilePath);
|
|
setAppProgressbar(0.45);
|
|
const pft: DecodedPft = await DecodePft(extensionlessFilePath);
|
|
setAppProgressbar(0.5);
|
|
const pfm: DecodedPfm = await DecodePfm(extensionlessFilePath);
|
|
setAppProgressbar(0.55);
|
|
const pfo: DecodedPfo = await DecodePfo(extensionlessFilePath); // TODO: This will be the `cieca_pfo` object
|
|
setAppProgressbar(0.6);
|
|
const stl: DecodedStl = await DecodeStl(extensionlessFilePath); // TODO: This will be the `cieca_stl` object
|
|
setAppProgressbar(0.65);
|
|
const ttl: DecodedTtl = await DecodeTtl(extensionlessFilePath);
|
|
setAppProgressbar(0.7);
|
|
const pfp: DecodedPfp = await DecodePfp(extensionlessFilePath);
|
|
setAppProgressbar(0.75);
|
|
|
|
const jobObjectUncleaned: RawJobDataObject = {
|
|
...env,
|
|
...ad1,
|
|
...ad2,
|
|
...veh,
|
|
...lin,
|
|
...pfh,
|
|
...pfl,
|
|
...pft,
|
|
...pfm,
|
|
...pfo,
|
|
...stl,
|
|
...ttl,
|
|
...pfp,
|
|
shopid: store.get("app.bodyshop.id") as UUID,
|
|
};
|
|
|
|
// Replace owner information with claimant information if necessary
|
|
const jobObject = ReplaceOwnerInfoWithClaimant(jobObjectUncleaned);
|
|
setAppProgressbar(0.8);
|
|
|
|
if (import.meta.env.DEV) {
|
|
// Save jobObject to a timestamped JSON file
|
|
const timestamp = new Date()
|
|
.toISOString()
|
|
.replace(/:/g, "-")
|
|
.replace(/\..+/, "");
|
|
const fileName = `job_${timestamp}_${parsedFilePath.name}.json`;
|
|
const logsDir = path.join(process.cwd(), "logs");
|
|
|
|
// Create logs directory if it doesn't exist
|
|
if (!fs.existsSync(logsDir)) {
|
|
fs.mkdirSync(logsDir, { recursive: true });
|
|
}
|
|
|
|
const filePath = path.join(logsDir, fileName);
|
|
fs.writeFileSync(filePath, JSON.stringify(jobObject, null, 2), "utf8");
|
|
log.info(`Job data saved to: ${filePath}`);
|
|
}
|
|
|
|
const newAvailableJob: AvailableJobSchema = {
|
|
uploaded_by: store.get("user.email"),
|
|
bodyshopid: store.get("app.bodyshop.id"),
|
|
cieca_id: jobObject.ciecaid,
|
|
est_data: jobObject,
|
|
ownr_name: `${jobObject.ownr_fn} ${jobObject.ownr_ln} ${jobObject.ownr_co_nm}`,
|
|
ins_co_nm: jobObject.ins_co_nm,
|
|
vehicle_info: `${jobObject.v_model_yr} ${jobObject.v_make_desc} ${jobObject.v_model_desc}`,
|
|
clm_no: jobObject.clm_no,
|
|
clm_amt: jobObject.clm_total,
|
|
// source_system: jobObject.source_system, //TODO: Add back source system if needed.
|
|
issupplement: false,
|
|
jobid: null,
|
|
};
|
|
setAppProgressbar(0.85);
|
|
|
|
const existingVehicleRecord: VehicleQueryResult = await client.request(
|
|
QUERY_VEHICLE_BY_VIN_TYPED,
|
|
{
|
|
vin: jobObject.v_vin,
|
|
},
|
|
);
|
|
|
|
if (existingVehicleRecord.vehicles.length > 0) {
|
|
delete newAvailableJob.est_data.vehicle;
|
|
newAvailableJob.est_data.vehicleid = existingVehicleRecord.vehicles[0].id;
|
|
}
|
|
|
|
console.log("Available Job record to upload;", newAvailableJob);
|
|
|
|
setAppProgressbar(0.95);
|
|
const existingJobRecord: QueryJobByClmNoResult = await client.request(
|
|
QUERY_JOB_BY_CLM_NO_TYPED,
|
|
{ clm_no: jobObject.clm_no },
|
|
);
|
|
|
|
if (existingJobRecord.jobs.length > 0) {
|
|
newAvailableJob.issupplement = true;
|
|
newAvailableJob.jobid = existingJobRecord.jobs[0].id;
|
|
}
|
|
|
|
const insertRecordResult: InsertAvailableJobResult = await client.request(
|
|
INSERT_AVAILABLE_JOB_TYPED,
|
|
{
|
|
jobInput: [newAvailableJob],
|
|
},
|
|
);
|
|
setAppProgressbar(-1);
|
|
const uploadNotification = new Notification({
|
|
title: "Job Imported",
|
|
//subtitle: `${newAvailableJob.ownr_name} - ${newAvailableJob.vehicle_info}`,
|
|
body: `${newAvailableJob.ownr_name} - ${newAvailableJob.vehicle_info}. Click to view.`,
|
|
actions: [{ text: "View Job", type: "button" }],
|
|
});
|
|
uploadNotification.on("click", () => {
|
|
shell.openExternal(
|
|
`${
|
|
store.get("app.isTest")
|
|
? import.meta.env.VITE_FE_URL_TEST
|
|
: import.meta.env.VITE_FE_URL
|
|
}/manage/available`,
|
|
);
|
|
});
|
|
uploadNotification.show();
|
|
|
|
log.debug("Job inserted", insertRecordResult);
|
|
|
|
UploadEmsToS3({
|
|
extensionlessFilePath,
|
|
bodyshopid: newAvailableJob.bodyshopid,
|
|
ciecaid: jobObject.ciecaid ?? "",
|
|
clm_no: jobObject.clm_no ?? "",
|
|
ownr_ln: jobObject.ownr_ln ?? "",
|
|
});
|
|
} catch (error) {
|
|
log.error("Error encountered while decoding job. ", errorTypeCheck(error));
|
|
const uploadNotificationFailure = new Notification({
|
|
title: "Job Upload Failure",
|
|
body: errorTypeCheck(error).message, //TODO: Remove after debug.
|
|
});
|
|
|
|
uploadNotificationFailure.show();
|
|
}
|
|
}
|
|
|
|
export default ImportJob;
|
|
|
|
export interface RawJobDataObject
|
|
extends DecodedEnv,
|
|
DecodedAd1,
|
|
DecodedAD2,
|
|
DecodedVeh,
|
|
DecodedLin,
|
|
DecodedPfh,
|
|
DecodedPfl,
|
|
DecodedPft,
|
|
DecodedPfm,
|
|
DecodedPfo,
|
|
DecodedStl,
|
|
DecodedTtl,
|
|
DecodedPfp {
|
|
vehicleid?: UUID;
|
|
shopid: UUID;
|
|
}
|
|
|
|
export interface AvailableJobSchema {
|
|
uploaded_by: string;
|
|
bodyshopid: UUID;
|
|
cieca_id?: string;
|
|
est_data: RawJobDataObject;
|
|
ownr_name: string;
|
|
ins_co_nm?: string;
|
|
vehicle_info: string;
|
|
clm_no?: string;
|
|
clm_amt: number;
|
|
source_system?: string | null;
|
|
issupplement: boolean;
|
|
jobid: UUID | null;
|
|
}
|
|
|
|
async function WaitForAllFiles(
|
|
baseFilePath: string,
|
|
requiredExtensions: string[],
|
|
maxRetries: number = 5,
|
|
backoffMs: number = 1000,
|
|
): Promise<void> {
|
|
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
|
//Get all files in directory if Mac.
|
|
let filesInDir: string[] = [];
|
|
if (platform.isMacOS) {
|
|
const dir: string = path.dirname(baseFilePath);
|
|
filesInDir = fs.readdirSync(dir).map((file) => file.toLowerCase());
|
|
}
|
|
|
|
const missingFiles = requiredExtensions.filter((ext) => {
|
|
const filePath: string = `${baseFilePath}.${ext}`;
|
|
const filePathA: string = `${baseFilePath}A.${ext}`;
|
|
const filePathB: string = `${baseFilePath}B.${ext}`;
|
|
const filePathV: string = `${baseFilePath}V.${ext}`;
|
|
|
|
if (!platform.isWindows) {
|
|
// Case-insensitive check for macOS/Linux
|
|
const baseName: string = path.basename(baseFilePath);
|
|
|
|
return !(
|
|
filesInDir.includes(`${baseName}.${ext}`.toLowerCase()) ||
|
|
filesInDir.includes(`${baseName}A.${ext}`.toLowerCase()) ||
|
|
filesInDir.includes(`${baseName}B.${ext}`.toLowerCase()) ||
|
|
filesInDir.includes(`${baseName}V.${ext}`.toLowerCase())
|
|
);
|
|
} else {
|
|
// Case-sensitive check for other platforms
|
|
return !(
|
|
fs.existsSync(filePath) ||
|
|
fs.existsSync(filePathA) ||
|
|
fs.existsSync(filePathB) ||
|
|
fs.existsSync(filePathV)
|
|
);
|
|
}
|
|
});
|
|
|
|
if (missingFiles.length === 0) {
|
|
return; // All files are present
|
|
}
|
|
|
|
log.debug(
|
|
`Attempt ${attempt}: Missing files: ${missingFiles.join(", ")}. Retrying in ${backoffMs}ms...`,
|
|
);
|
|
|
|
if (attempt < maxRetries) {
|
|
await new Promise((resolve) => setTimeout(resolve, backoffMs));
|
|
backoffMs *= 2; // Exponential backoff
|
|
} else {
|
|
throw new Error(
|
|
`The set of files is not valid. Missing files for CIECA ID ${baseFilePath}: ${missingFiles.join(", ")}`,
|
|
);
|
|
}
|
|
}
|
|
}
|
|
|
|
const requiredExtensions = [
|
|
"env",
|
|
"ad1",
|
|
"ad2",
|
|
"veh",
|
|
"lin",
|
|
"pfh",
|
|
"pfl",
|
|
"pft",
|
|
"pfm",
|
|
"pfo",
|
|
"stl",
|
|
"ttl",
|
|
"pfp",
|
|
];
|
|
|
|
export function ReplaceOwnerInfoWithClaimant<
|
|
T extends Partial<
|
|
Pick<
|
|
RawJobDataObject,
|
|
| "ownr_ln"
|
|
| "ownr_fn"
|
|
| "ownr_co_nm"
|
|
| "ownr_title"
|
|
| "ownr_co_nm"
|
|
| "ownr_addr1"
|
|
| "ownr_addr2"
|
|
| "ownr_city"
|
|
| "ownr_st"
|
|
| "ownr_zip"
|
|
| "ownr_ctry"
|
|
| "ownr_ph1"
|
|
| "ownr_ph2"
|
|
| "ownr_ea"
|
|
| "clmt_ln"
|
|
| "clmt_fn"
|
|
| "clmt_title"
|
|
| "clmt_co_nm"
|
|
| "clmt_addr1"
|
|
| "clmt_addr2"
|
|
| "clmt_city"
|
|
| "clmt_st"
|
|
| "clmt_zip"
|
|
| "clmt_ctry"
|
|
| "clmt_ph1"
|
|
| "clmt_ph2"
|
|
| "clmt_ea"
|
|
| "owner"
|
|
>
|
|
>,
|
|
>(jobObject: T): T {
|
|
// In some scenarios, the owner information is missing. So we use the claimant instead.
|
|
// We pull the claimant info for this, but we don't store it in our system, so it needs to be deleted regardless.
|
|
if (
|
|
_.isEmpty(jobObject.ownr_ln) &&
|
|
_.isEmpty(jobObject.ownr_fn) &&
|
|
_.isEmpty(jobObject.ownr_co_nm)
|
|
) {
|
|
jobObject.ownr_ln = jobObject.clmt_ln;
|
|
jobObject.ownr_fn = jobObject.clmt_fn;
|
|
jobObject.ownr_title = jobObject.clmt_title;
|
|
jobObject.ownr_co_nm = jobObject.clmt_co_nm;
|
|
jobObject.ownr_addr1 = jobObject.clmt_addr1;
|
|
jobObject.ownr_addr2 = jobObject.clmt_addr2;
|
|
jobObject.ownr_city = jobObject.clmt_city;
|
|
jobObject.ownr_st = jobObject.clmt_st;
|
|
jobObject.ownr_zip = jobObject.clmt_zip;
|
|
jobObject.ownr_ctry = jobObject.clmt_ctry;
|
|
jobObject.ownr_ph1 = jobObject.clmt_ph1;
|
|
jobObject.ownr_ph2 = jobObject.clmt_ph2;
|
|
jobObject.ownr_ea = jobObject.clmt_ea;
|
|
|
|
// Ensure the owner and owner.data fields exist before assigning values
|
|
if (jobObject.owner?.data) {
|
|
jobObject.owner.data.ownr_ln = jobObject.clmt_ln;
|
|
jobObject.owner.data.ownr_fn = jobObject.clmt_fn;
|
|
jobObject.owner.data.ownr_title = jobObject.clmt_title;
|
|
jobObject.owner.data.ownr_co_nm = jobObject.clmt_co_nm;
|
|
jobObject.owner.data.ownr_addr1 = jobObject.clmt_addr1;
|
|
jobObject.owner.data.ownr_addr2 = jobObject.clmt_addr2;
|
|
jobObject.owner.data.ownr_city = jobObject.clmt_city;
|
|
jobObject.owner.data.ownr_st = jobObject.clmt_st;
|
|
jobObject.owner.data.ownr_zip = jobObject.clmt_zip;
|
|
jobObject.owner.data.ownr_ctry = jobObject.clmt_ctry;
|
|
jobObject.owner.data.ownr_ph1 = jobObject.clmt_ph1;
|
|
jobObject.owner.data.ownr_ph2 = jobObject.clmt_ph2;
|
|
jobObject.owner.data.ownr_ea = jobObject.clmt_ea;
|
|
}
|
|
}
|
|
|
|
// Delete the claimant info as it's not needed.
|
|
delete jobObject.clmt_ln;
|
|
delete jobObject.clmt_fn;
|
|
delete jobObject.clmt_title;
|
|
delete jobObject.clmt_co_nm;
|
|
delete jobObject.clmt_addr1;
|
|
delete jobObject.clmt_addr2;
|
|
delete jobObject.clmt_city;
|
|
delete jobObject.clmt_st;
|
|
delete jobObject.clmt_zip;
|
|
delete jobObject.clmt_ctry;
|
|
delete jobObject.clmt_ph1;
|
|
delete jobObject.clmt_ph2;
|
|
delete jobObject.clmt_ea;
|
|
|
|
return jobObject;
|
|
}
|