Merge branch 'master-AIO' into feature/IO-3356-pbs-ro-posting

This commit is contained in:
Patrick Fic
2025-09-11 14:40:08 -07:00
758 changed files with 12971 additions and 8456 deletions

View File

@@ -1,7 +1,3 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const GraphQLClient = require("graphql-request").GraphQLClient;
const queries = require("../../graphql-client/queries");

View File

@@ -1,8 +1,3 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const IMEX_PBS_USER = process.env.IMEX_PBS_USER,
IMEX_PBS_PASSWORD = process.env.IMEX_PBS_PASSWORD;
const PBS_CREDENTIALS = {

View File

@@ -1,7 +1,3 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const GraphQLClient = require("graphql-request").GraphQLClient;
const AxiosLib = require("axios").default;
const queries = require("../../graphql-client/queries");
@@ -116,6 +112,7 @@ exports.PbsSelectedCustomer = async function PbsSelectedCustomer(socket, selecte
await InsertFailedExportLog(socket, error);
}
};
// Was Successful
async function CheckForErrors(socket, response) {
if (response.WasSuccessful === undefined || response.WasSuccessful === true) {
@@ -142,7 +139,7 @@ async function QueryVehicleFromDms(socket) {
try {
if (!socket.JobData.v_vin) return null;
const { data: VehicleGetResponse, request } = await axios.post(
const { data: VehicleGetResponse } = await axios.post(
PBS_ENDPOINTS.VehicleGet,
{
SerialNumber: socket.JobData.bodyshop.pbs_serialnumber,

View File

@@ -1,7 +1,3 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const OAuthClient = require("intuit-oauth");
const logger = require("../../utils/logger");

View File

@@ -1,7 +1,3 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const logger = require("../../utils/logger");
const OAuthClient = require("intuit-oauth");
const client = require("../../graphql-client/graphql-client").client;
@@ -13,7 +9,7 @@ const oauthClient = new OAuthClient({
clientId: process.env.QBO_CLIENT_ID,
clientSecret: process.env.QBO_SECRET,
environment: process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
redirectUri: process.env.QBO_REDIRECT_URI
});
const url = InstanceEndpoints();

View File

@@ -1,9 +1,6 @@
const urlBuilder = require("./qbo").urlBuilder;
const StandardizeName = require("./qbo").StandardizeName;
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const logger = require("../../utils/logger");
const Dinero = require("dinero.js");
const DineroQbFormat = require("../accounting-constants").DineroQbFormat;
@@ -19,7 +16,7 @@ exports.default = async (req, res) => {
clientId: process.env.QBO_CLIENT_ID,
clientSecret: process.env.QBO_SECRET,
environment: process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
redirectUri: process.env.QBO_REDIRECT_URI
});
try {
@@ -65,36 +62,34 @@ exports.default = async (req, res) => {
vendorRecord = await InsertVendorRecord(oauthClient, qbo_realmId, req, bill);
}
const insertResults = await InsertBill(oauthClient, qbo_realmId, req, bill, vendorRecord, bodyshop);
await InsertBill(oauthClient, qbo_realmId, req, bill, vendorRecord, bodyshop);
// //No error. Mark the job exported & insert export log.
if (elgen) {
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QBO_MARK_BILL_EXPORTED, {
billId: bill.id,
bill: {
exported: true,
exported_at: moment().tz(bodyshop.timezone)
},
logs: [
{
bodyshopid: bodyshop.id,
billid: bill.id,
successful: true,
useremail: req.user.email
}
]
});
await client.setHeaders({ Authorization: BearerToken }).request(queries.QBO_MARK_BILL_EXPORTED, {
billId: bill.id,
bill: {
exported: true,
exported_at: moment().tz(bodyshop.timezone)
},
logs: [
{
bodyshopid: bodyshop.id,
billid: bill.id,
successful: true,
useremail: req.user.email
}
]
});
}
ret.push({ billid: bill.id, success: true });
} catch (error) {
logger.log("qbo-paybles-create-error", "ERROR", req.user.email, null, {
error:
(error && error.authResponse && error.authResponse.body) ||
(error?.authResponse && error.authResponse.body) ||
error.response?.data?.Fault?.Error.map((e) => e.Detail).join(", ") ||
(error && error.message)
error?.message
});
ret.push({
billid: bill.id,
@@ -107,7 +102,7 @@ exports.default = async (req, res) => {
//Add the export log error.
if (elgen) {
const result = await client.setHeaders({ Authorization: BearerToken }).request(queries.INSERT_EXPORT_LOG, {
await client.setHeaders({ Authorization: BearerToken }).request(queries.INSERT_EXPORT_LOG, {
logs: [
{
bodyshopid: bodyshop.id,
@@ -141,7 +136,9 @@ async function QueryVendorRecord(oauthClient, qbo_realmId, req, bill) {
url: urlBuilder(
qbo_realmId,
"query",
`select * From vendor where DisplayName = '${StandardizeName(bill.vendor.name)}'`
`select *
From vendor
where DisplayName = '${StandardizeName(bill.vendor.name)}'`
),
method: "POST",
headers: {
@@ -156,7 +153,7 @@ async function QueryVendorRecord(oauthClient, qbo_realmId, req, bill) {
status: result.response?.status,
bodyshopid: bill.job.shopid,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return (
result.json &&
@@ -194,7 +191,7 @@ async function InsertVendorRecord(oauthClient, qbo_realmId, req, bill) {
status: result.response?.status,
bodyshopid: bill.job.shopid,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Vendor;
} catch (error) {
@@ -263,11 +260,11 @@ async function InsertBill(oauthClient, qbo_realmId, req, bill, vendor, bodyshop)
.format("YYYY-MM-DD"),
...(!bill.is_credit_memo &&
bill.vendor.due_date && {
DueDate: moment(bill.date)
//.tz(bill.job.bodyshop.timezone)
.add(bill.vendor.due_date, "days")
.format("YYYY-MM-DD")
}),
DueDate: moment(bill.date)
//.tz(bill.job.bodyshop.timezone)
.add(bill.vendor.due_date, "days")
.format("YYYY-MM-DD")
}),
DocNumber: bill.invoice_number,
//...(bill.job.class ? { ClassRef: { Id: classes[bill.job.class] } } : {}),
...(!(
@@ -280,8 +277,8 @@ async function InsertBill(oauthClient, qbo_realmId, req, bill, vendor, bodyshop)
: {}),
...(bodyshop.accountingconfig.qbo_departmentid &&
bodyshop.accountingconfig.qbo_departmentid.trim() !== "" && {
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
PrivateNote: `RO ${bill.job.ro_number || ""}`,
Line: lines
};
@@ -305,7 +302,7 @@ async function InsertBill(oauthClient, qbo_realmId, req, bill, vendor, bodyshop)
status: result.response?.status,
bodyshopid: bill.job.shopid,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Bill;
} catch (error) {
@@ -353,8 +350,8 @@ const generateBillLine = (
accountingconfig.qbo && accountingconfig.qbo_usa && region_config.includes("CA_")
? {}
: {
value: taxCodes[findTaxCode(billLine.applicable_taxes, ioSalesTaxCodes)]
},
value: taxCodes[findTaxCode(billLine.applicable_taxes, ioSalesTaxCodes)]
},
AccountRef: {
value: accounts[account.accountname]
}
@@ -373,7 +370,9 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid) {
url: urlBuilder(
qbo_realmId,
"query",
`select * From Account where AccountType in ('Cost of Goods Sold', 'Other Current Liability')`
`select *
From Account
where AccountType in ('Cost of Goods Sold', 'Other Current Liability')`
),
method: "POST",
headers: {
@@ -387,10 +386,15 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid) {
status: accounts.response?.status,
bodyshopid,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, accounts);
const taxCodes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From TaxCode`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From TaxCode`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -403,9 +407,14 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid) {
status: taxCodes.status,
bodyshopid,
email: req.user.email
})
});
const classes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Class`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From Class`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -418,7 +427,7 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid) {
status: classes.status,
bodyshopid,
email: req.user.email
})
});
const taxCodeMapping = {};
taxCodes.json &&

View File

@@ -1,7 +1,3 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const logger = require("../../utils/logger");
const Dinero = require("dinero.js");
@@ -274,7 +270,13 @@ async function InsertPayment(oauthClient, qbo_realmId, req, payment, parentRef)
async function QueryMetaData(oauthClient, qbo_realmId, req, ro_number, isCreditMemo, parentTierRef, bodyshopid) {
const invoice = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Invoice where DocNumber like '${ro_number}%'`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From Invoice
where DocNumber like '${ro_number}%'`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -290,7 +292,12 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, ro_number, isCreditM
email: req.user.email
});
const paymentMethods = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From PaymentMethod`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From PaymentMethod`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -341,7 +348,12 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, ro_number, isCreditM
if (isCreditMemo) {
const taxCodes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From TaxCode`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From TaxCode`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -357,7 +369,12 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, ro_number, isCreditM
email: req.user.email
});
const items = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Item`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From Item`
),
method: "POST",
headers: {
"Content-Type": "application/json"

View File

@@ -1,10 +1,6 @@
const urlBuilder = require("./qbo").urlBuilder;
const StandardizeName = require("./qbo").StandardizeName;
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const logger = require("../../utils/logger");
const apiGqlClient = require("../../graphql-client/graphql-client").client;
const queries = require("../../graphql-client/queries");
@@ -12,7 +8,6 @@ const { refresh: refreshOauthToken, setNewRefreshToken } = require("./qbo-callba
const OAuthClient = require("intuit-oauth");
const CreateInvoiceLines = require("../qb-receivables-lines").default;
const moment = require("moment-timezone");
const GraphQLClient = require("graphql-request").GraphQLClient;
const { generateOwnerTier } = require("../qbxml/qbxml-utils");
const { createMultiQbPayerLines } = require("../qb-receivables-lines");
@@ -21,7 +16,7 @@ exports.default = async (req, res) => {
clientId: process.env.QBO_CLIENT_ID,
clientSecret: process.env.QBO_SECRET,
environment: process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
redirectUri: process.env.QBO_REDIRECT_URI
});
try {
@@ -226,7 +221,10 @@ async function QueryInsuranceCo(oauthClient, qbo_realmId, req, job) {
url: urlBuilder(
qbo_realmId,
"query",
`select * From Customer where DisplayName = '${StandardizeName(job.ins_co_nm.trim())}' and Active = true`
`select *
From Customer
where DisplayName = '${StandardizeName(job.ins_co_nm.trim())}'
and Active = true`
),
method: "POST",
headers: {
@@ -241,7 +239,7 @@ async function QueryInsuranceCo(oauthClient, qbo_realmId, req, job) {
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return (
result.json &&
@@ -296,7 +294,7 @@ async function InsertInsuranceCo(oauthClient, qbo_realmId, req, job, bodyshop) {
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json.Customer;
} catch (error) {
@@ -316,7 +314,10 @@ async function QueryOwner(oauthClient, qbo_realmId, req, job, isThreeTier, paren
url: urlBuilder(
qbo_realmId,
"query",
`select * From Customer where DisplayName = '${StandardizeName(ownerName)}' and Active = true`
`select *
From Customer
where DisplayName = '${StandardizeName(ownerName)}'
and Active = true`
),
method: "POST",
headers: {
@@ -331,7 +332,7 @@ async function QueryOwner(oauthClient, qbo_realmId, req, job, isThreeTier, paren
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return (
result.json &&
@@ -358,11 +359,11 @@ async function InsertOwner(oauthClient, qbo_realmId, req, job, isThreeTier, pare
...(job.ownr_ea ? { PrimaryEmailAddr: { Address: job.ownr_ea.trim() } } : {}),
...(isThreeTier
? {
Job: true,
ParentRef: {
value: parentTierRef.Id
Job: true,
ParentRef: {
value: parentTierRef.Id
}
}
}
: {})
};
try {
@@ -382,7 +383,7 @@ async function InsertOwner(oauthClient, qbo_realmId, req, job, isThreeTier, pare
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json.Customer;
} catch (error) {
@@ -401,7 +402,10 @@ async function QueryJob(oauthClient, qbo_realmId, req, job, parentTierRef) {
url: urlBuilder(
qbo_realmId,
"query",
`select * From Customer where DisplayName = '${job.ro_number}' and Active = true`
`select *
From Customer
where DisplayName = '${job.ro_number}'
and Active = true`
),
method: "POST",
headers: {
@@ -416,7 +420,7 @@ async function QueryJob(oauthClient, qbo_realmId, req, job, parentTierRef) {
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return (
result.json &&
@@ -464,7 +468,7 @@ async function InsertJob(oauthClient, qbo_realmId, req, job, parentTierRef) {
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json.Customer;
} catch (error) {
@@ -480,7 +484,13 @@ exports.InsertJob = InsertJob;
async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid, jobid) {
const items = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Item where active=true maxresults 1000`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From Item
where active = true maxresults 1000`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -494,10 +504,16 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid, jobid) {
bodyshopid,
jobid: jobid,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, items);
const taxCodes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From TaxCode where active=true`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From TaxCode
where active = true`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -511,9 +527,14 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid, jobid) {
bodyshopid,
jobid: jobid,
email: req.user.email
})
});
const classes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Class`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From Class`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -527,7 +548,7 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid, jobid) {
bodyshopid,
jobid: jobid,
email: req.user.email
})
});
const taxCodeMapping = {};
taxCodes.json &&
@@ -578,55 +599,57 @@ async function InsertInvoice(oauthClient, qbo_realmId, req, job, bodyshop, paren
DocNumber: job.ro_number,
...(job.class ? { ClassRef: { value: classes[job.class] } } : {}),
CustomerMemo: {
value: `${job.clm_no ? `Claim No: ${job.clm_no}` : ``}${job.po_number ? `PO No: ${job.po_number}` : ``
} Vehicle:${job.v_model_yr || ""} ${job.v_make_desc || ""} ${job.v_model_desc || ""
} ${job.v_vin || ""} ${job.plate_no || ""} `.trim()
value: `${job.clm_no ? `Claim No: ${job.clm_no}` : ``}${
job.po_number ? `PO No: ${job.po_number}` : ``
} Vehicle:${job.v_model_yr || ""} ${job.v_make_desc || ""} ${
job.v_model_desc || ""
} ${job.v_vin || ""} ${job.plate_no || ""} `.trim()
},
CustomerRef: {
value: parentTierRef.Id
},
...(bodyshop.accountingconfig.qbo_departmentid &&
bodyshop.accountingconfig.qbo_departmentid.trim() !== "" && {
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
CustomField: [
...(bodyshop.accountingconfig.ReceivableCustomField1
? [
{
DefinitionId: "1",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField1],
Type: "StringType"
}
]
{
DefinitionId: "1",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField1],
Type: "StringType"
}
]
: []),
...(bodyshop.accountingconfig.ReceivableCustomField2
? [
{
DefinitionId: "2",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField2],
Type: "StringType"
}
]
{
DefinitionId: "2",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField2],
Type: "StringType"
}
]
: []),
...(bodyshop.accountingconfig.ReceivableCustomField3
? [
{
DefinitionId: "3",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField3],
Type: "StringType"
}
]
{
DefinitionId: "3",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField3],
Type: "StringType"
}
]
: [])
],
...(bodyshop.accountingconfig &&
bodyshop.accountingconfig.qbo &&
bodyshop.accountingconfig.qbo_usa && {
TxnTaxDetail: {
TxnTaxCodeRef: {
value: taxCodes[bodyshop.md_responsibility_centers.taxes.state.accountitem]
TxnTaxDetail: {
TxnTaxCodeRef: {
value: taxCodes[bodyshop.md_responsibility_centers.taxes.state.accountitem]
}
}
}
}),
}),
...(bodyshop.accountingconfig.printlater ? { PrintStatus: "NeedToPrint" } : {}),
...(bodyshop.accountingconfig.emaillater && job.ownr_ea ? { EmailStatus: "NeedToSend" } : {}),
@@ -660,7 +683,7 @@ async function InsertInvoice(oauthClient, qbo_realmId, req, job, bodyshop, paren
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Invoice;
} catch (error) {
@@ -702,56 +725,58 @@ async function InsertInvoiceMultiPayerInvoice(
DocNumber: job.ro_number + suffix,
...(job.class ? { ClassRef: { value: classes[job.class] } } : {}),
CustomerMemo: {
value: `${job.clm_no ? `Claim No: ${job.clm_no}` : ``}${job.po_number ? `PO No: ${job.po_number}` : ``
} Vehicle:${job.v_model_yr || ""} ${job.v_make_desc || ""} ${job.v_model_desc || ""
} ${job.v_vin || ""} ${job.plate_no || ""} `.trim()
value: `${job.clm_no ? `Claim No: ${job.clm_no}` : ``}${
job.po_number ? `PO No: ${job.po_number}` : ``
} Vehicle:${job.v_model_yr || ""} ${job.v_make_desc || ""} ${
job.v_model_desc || ""
} ${job.v_vin || ""} ${job.plate_no || ""} `.trim()
},
CustomerRef: {
value: parentTierRef.Id
},
...(bodyshop.accountingconfig.qbo_departmentid &&
bodyshop.accountingconfig.qbo_departmentid.trim() !== "" && {
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
CustomField: [
...(bodyshop.accountingconfig.ReceivableCustomField1
? [
{
DefinitionId: "1",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField1],
Type: "StringType"
}
]
{
DefinitionId: "1",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField1],
Type: "StringType"
}
]
: []),
...(bodyshop.accountingconfig.ReceivableCustomField2
? [
{
DefinitionId: "2",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField2],
Type: "StringType"
}
]
{
DefinitionId: "2",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField2],
Type: "StringType"
}
]
: []),
...(bodyshop.accountingconfig.ReceivableCustomField3
? [
{
DefinitionId: "3",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField3],
Type: "StringType"
}
]
{
DefinitionId: "3",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField3],
Type: "StringType"
}
]
: [])
],
...(bodyshop.accountingconfig &&
bodyshop.accountingconfig.qbo &&
bodyshop.accountingconfig.qbo_usa &&
bodyshop.region_config.includes("CA_") && {
TxnTaxDetail: {
TxnTaxCodeRef: {
value: taxCodes[bodyshop.md_responsibility_centers.taxes.state.accountitem]
TxnTaxDetail: {
TxnTaxCodeRef: {
value: taxCodes[bodyshop.md_responsibility_centers.taxes.state.accountitem]
}
}
}
}),
}),
...(bodyshop.accountingconfig.printlater ? { PrintStatus: "NeedToPrint" } : {}),
...(bodyshop.accountingconfig.emaillater && job.ownr_ea ? { EmailStatus: "NeedToSend" } : {}),
@@ -785,7 +810,7 @@ async function InsertInvoiceMultiPayerInvoice(
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Invoice;
} catch (error) {

View File

@@ -1,8 +1,3 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
function urlBuilder(realmId, object, query = null) {
return `https://${
process.env.NODE_ENV === "production" ? "" : "sandbox-"

View File

@@ -1,5 +1,3 @@
const GraphQLClient = require("graphql-request").GraphQLClient;
const path = require("path");
const DineroQbFormat = require("../accounting-constants").DineroQbFormat;
const queries = require("../../graphql-client/queries");
const Dinero = require("dinero.js");
@@ -9,10 +7,6 @@ const moment = require("moment-timezone");
const logger = require("../../utils/logger");
const InstanceManager = require("../../utils/instanceMgr").default;
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
exports.default = async (req, res) => {
const { bills: billsToQuery } = req.body;

View File

@@ -1,4 +1,3 @@
const path = require("path");
const DineroQbFormat = require("../accounting-constants").DineroQbFormat;
const queries = require("../../graphql-client/queries");
const Dinero = require("dinero.js");
@@ -8,10 +7,6 @@ const QbXmlUtils = require("./qbxml-utils");
const QbxmlReceivables = require("./qbxml-receivables");
const logger = require("../../utils/logger");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const { generateJobTier, generateOwnerTier, generateSourceTier } = QbXmlUtils;
exports.default = async (req, res) => {

View File

@@ -1,4 +1,3 @@
const path = require("path");
const DineroQbFormat = require("../accounting-constants").DineroQbFormat;
const queries = require("../../graphql-client/queries");
const Dinero = require("dinero.js");
@@ -9,10 +8,6 @@ const CreateInvoiceLines = require("../qb-receivables-lines").default;
const logger = require("../../utils/logger");
const InstanceManager = require("../../utils/instanceMgr").default;
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
Dinero.globalRoundingMode = "HALF_EVEN";
const { generateJobTier, generateOwnerTier, generateSourceTier } = QbXmlUtils;

View File

@@ -1,10 +1,12 @@
exports.addQbxmlHeader = addQbxmlHeader = (xml) => {
const addQbxmlHeader = (xml) => {
return `<?xml version="1.0" encoding="utf-8"?>
<?qbxml version="13.0"?>
${xml}
`;
};
exports.addQbxmlHeader = addQbxmlHeader;
exports.generateSourceTier = (jobs_by_pk) => {
return jobs_by_pk.ins_co_nm && jobs_by_pk.ins_co_nm.trim().replace(":", " ");
};

View File

@@ -1,11 +1,4 @@
const path = require("path");
const _ = require("lodash");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const client = require("../graphql-client/graphql-client").client;
exports.createAssociation = async (req, res) => {

View File

@@ -1,14 +1,8 @@
const path = require("path");
const _ = require("lodash");
const logger = require("../utils/logger");
const queries = require("../graphql-client/queries");
const GraphQLClient = require("graphql-request").GraphQLClient;
const moment = require("moment-timezone");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
exports.generatePpc = async (req, res) => {
const { jobid } = req.body;
const BearerToken = req.headers.authorization;

View File

@@ -1,7 +1,3 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const GraphQLClient = require("graphql-request").GraphQLClient;
const queries = require("../graphql-client/queries");
@@ -248,7 +244,7 @@ function calculateAllocations(connectionData, job) {
// Number.isInteger(bodyshop?.cdk_configuration?.sendmaterialscosting),
// typeof Number.isInteger(bodyshop?.cdk_configuration?.sendmaterialscosting)
// );
if (!!bodyshop?.cdk_configuration?.sendmaterialscosting) {
if (bodyshop?.cdk_configuration?.sendmaterialscosting) {
//Manually send the percentage of the costing.
//Paint Mat

View File

@@ -1,7 +1,3 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const soap = require("soap");
const queries = require("../graphql-client/queries");

View File

@@ -1,7 +1,3 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const GraphQLClient = require("graphql-request").GraphQLClient;
const soap = require("soap");
const queries = require("../graphql-client/queries");

View File

@@ -1,7 +1,3 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const CdkBase = require("../web-sockets/web-socket");
const IMEX_CDK_USER = process.env.IMEX_CDK_USER,

View File

@@ -1,9 +1,5 @@
const path = require("path");
const queries = require("../graphql-client/queries");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const client = require("../graphql-client/graphql-client").client;

View File

@@ -1,9 +1,5 @@
const path = require("path");
const queries = require("../graphql-client/queries");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const client = require("../graphql-client/graphql-client").client;

View File

@@ -1,4 +1,3 @@
const path = require("path");
const queries = require("../graphql-client/queries");
const Dinero = require("dinero.js");
const moment = require("moment-timezone");
@@ -6,9 +5,7 @@ const fs = require("fs");
const storage = require("node-persist");
const _ = require("lodash");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const soap = require("soap");
const { sendServerEmail } = require("../email/sendemail");
@@ -24,7 +21,7 @@ const momentFormat = "yyyy-MM-DDTHH:mm:ss.SSS";
function pollFunc(fn, timeout, interval) {
var startTime = new Date().getTime();
(interval = interval || 1000), (canPoll = true);
((interval = interval || 1000), (canPoll = true));
(function p() {
canPoll = timeout === 0 ? true : new Date().getTime() - startTime <= timeout;

View File

@@ -1,13 +1,10 @@
const path = require("path");
const queries = require("../graphql-client/queries");
const Dinero = require("dinero.js");
const moment = require("moment-timezone");
var builder = require("xmlbuilder2");
const logger = require("../utils/logger");
const fs = require("fs");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
let Client = require("ssh2-sftp-client");
const client = require("../graphql-client/graphql-client").client;

View File

@@ -1,4 +1,3 @@
const path = require("path");
const queries = require("../graphql-client/queries");
const Dinero = require("dinero.js");
const moment = require("moment-timezone");
@@ -11,9 +10,6 @@ const { sendServerEmail } = require("../email/sendemail");
const { uploadFileToS3 } = require("../utils/s3");
const crypto = require("crypto");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
let Client = require("ssh2-sftp-client");
const AHDateFormat = "YYYY-MM-DD";
@@ -124,7 +120,7 @@ exports.default = async (req, res) => {
async function processShopData(shopsToProcess, start, end, skipUpload, ignoreDateFilter, allXMLResults, allErrors) {
for (const bodyshop of shopsToProcess) {
const shopid = bodyshop.imexshopid?.toLowerCase() || bodyshop.shopname.replace(/[^a-zA-Z0-9]/g, "").toLowerCase()
const shopid = bodyshop.imexshopid?.toLowerCase() || bodyshop.shopname.replace(/[^a-zA-Z0-9]/g, "").toLowerCase();
const erroredJobs = [];
try {
logger.log("CARFAX-start-shop-extract", "DEBUG", "api", bodyshop.id, {

View File

@@ -1,11 +1,9 @@
const path = require("path");
const queries = require("../graphql-client/queries");
const moment = require("moment-timezone");
const converter = require("json-2-csv");
const logger = require("../utils/logger");
const fs = require("fs");
const { SecretsManagerClient, GetSecretValueCommand } = require("@aws-sdk/client-secrets-manager");
require("dotenv").config({ path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`) });
let Client = require("ssh2-sftp-client");
const client = require("../graphql-client/graphql-client").client;
@@ -77,7 +75,15 @@ exports.default = async (req, res) => {
await sendServerEmail({
subject: `Chatter Report ${moment().format("MM-DD-YY")}`,
text: `Errors:\n${JSON.stringify(allErrors, null, 2)}\n\n
Uploaded:\n${JSON.stringify({ filename: csvToUpload.filename, count: csvToUpload.count, result: csvToUpload.result }, null, 2)}`
Uploaded:\n${JSON.stringify(
{
filename: csvToUpload.filename,
count: csvToUpload.count,
result: csvToUpload.result
},
null,
2
)}`
});
logger.log("chatter-end", "DEBUG", "api", null, null);
@@ -107,7 +113,8 @@ async function processBatch(shopsToProcess, start, end, allChatterObjects, allEr
transaction_id: j.ro_number,
email: j.ownr_ea,
phone_number: j.ownr_ph1,
transaction_time: (j.actual_delivery && moment(j.actual_delivery).tz(bodyshop.timezone).format("YYYYMMDD-HHmm")) || ""
transaction_time:
(j.actual_delivery && moment(j.actual_delivery).tz(bodyshop.timezone).format("YYYYMMDD-HHmm")) || ""
};
});
allChatterObjects.push(...chatterObject);

View File

@@ -1,13 +1,10 @@
const path = require("path");
const queries = require("../graphql-client/queries");
const Dinero = require("dinero.js");
const moment = require("moment-timezone");
var builder = require("xmlbuilder2");
const logger = require("../utils/logger");
const fs = require("fs");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
let Client = require("ssh2-sftp-client");
const client = require("../graphql-client/graphql-client").client;

View File

@@ -6,4 +6,5 @@ exports.kaizen = require("./kaizen").default;
exports.usageReport = require("./usageReport").default;
exports.podium = require("./podium").default;
exports.emsUpload = require("./emsUpload").default;
exports.carfax = require("./carfax").default;
exports.carfax = require("./carfax").default;
exports.vehicletype = require("./vehicletype/vehicletype").default;

View File

@@ -1,13 +1,10 @@
const path = require("path");
const queries = require("../graphql-client/queries");
const Dinero = require("dinero.js");
const moment = require("moment-timezone");
var builder = require("xmlbuilder2");
const logger = require("../utils/logger");
const fs = require("fs");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
let Client = require("ssh2-sftp-client");
const client = require("../graphql-client/graphql-client").client;

View File

@@ -1,12 +1,9 @@
const path = require("path");
const queries = require("../graphql-client/queries");
const moment = require("moment-timezone");
const converter = require("json-2-csv");
const logger = require("../utils/logger");
const fs = require("fs");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
let Client = require("ssh2-sftp-client");
const client = require("../graphql-client/graphql-client").client;

View File

@@ -1,7 +1,3 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const client = require("../graphql-client/graphql-client").client;
const emailer = require("../email/sendemail");
const moment = require("moment-timezone");

View File

@@ -0,0 +1,126 @@
[
"PROMASTER 1500",
"PROMASTER 2500",
"PROMASTER CITY",
"NV 1500",
"NV 200",
"NV 2500",
"NV 3500",
"NV1500",
"NV200",
"NV2500",
"NV3500",
"SPRINTER",
"E150 ECONOLINE CARGO VAN",
"E150 ECONOLINE XL",
"E250 ECONOLINE CARGO",
"E250 ECONOLINE CARGO (AMALGAM)",
"E250 ECONOLINE CARGO (INSPECT)",
"E250 ECONOLINE CARGO VAN EXT",
"E250 ECONOLINE SUPER CARGO VAN",
"E350 CUTAWAY VAN",
"E350 ECONO SD CARGO VAN EXT",
"E350 ECONOLINE CARGO VAN",
"E350 ECONOLINE CUTAWAY",
"E350 ECONOLINE SD CARGO VAN",
"E350 ECONOLINE SD XL",
"E350 ECONOLINE SD XL EXT",
"E350 ECONOLINE SD XLT",
"E350 ECONOLINE SD XLT EXT",
"E350 SD CUTAWAY",
"E450",
"E450 ECONOLINE",
"E450 ECONOLINE SD",
"E450 ECONOLINE SD CUTAWAY",
"TRANSIT 150 WB 130 CARGO VAN",
"TRANSIT 150 WB 130 XLT",
"TRANSIT 150 WB 148 CARGO VAN",
"TRANSIT 250 WB 130 CARGO VAN",
"TRANSIT 250 WB 148 CARGO VAN",
"TRANSIT 250 WB 148 EL CARGO",
"TRANSIT 350 WB 148 CARGO VAN",
"TRANSIT 350 WB 148 EL CARGO",
"TRANSIT CONNECT XL CARGO VAN",
"TRANSIT CONNECT XLT CARGO VAN",
"250 TRANSIT",
"CITY EXPRESS LS CARGO VAN",
"CITY EXPRESS LT CARGO VAN",
"EXPRESS 1500",
"EXPRESS 1500 CARGO VAN",
"EXPRESS 1500 LS",
"EXPRESS 1500 LT",
"EXPRESS 2500 CARGO VAN",
"EXPRESS 2500 CARGO VAN EXT",
"EXPRESS 2500 LS",
"EXPRESS 2500 LT",
"EXPRESS 3500",
"EXPRESS 3500 CARGO VAN",
"EXPRESS 3500 CARGO VAN EXT",
"EXPRESS 3500 EXT",
"EXPRESS 3500 LS",
"EXPRESS 3500 LS EXT",
"EXPRESS 3500 LT",
"EXPRESS 3500 LT EXT",
"G3500 EXPRESS CUTAWAY",
"SAVANA 1500 CARGO VAN",
"SAVANA 1500 SL",
"SAVANA 1500 SLE",
"SAVANA 2500",
"2500 SAVANA",
"SAVANA 2500 CARGO VAN",
"SAVANA 2500 CARGO VAN EXT",
"SAVANA 2500 LT",
"SAVANA 2500 SLE",
"SAVANA 3500",
"SAVANA 3500 CARGO VAN",
"SAVANA 3500 CARGO VAN EXT",
"SAVANA 3500 EXT",
"SAVANA 3500 LT EXT",
"SAVANA 3500 SLE EXT",
"SAVANA G3500 CUTAWAY",
"SAVANA G4500 CUTAWAY",
"EXPRESS 1500 LS CARGO VAN",
"G20 SPORTVAN",
"NV 3500 S V8 CARGO VAN",
"E-150",
"E-250",
"E-350",
"E-450",
"E150",
"E250",
"E350",
"TRANSIT",
"CITY",
"CITY EXPRESS",
"EXPRESS",
"EXPRESS 2500",
"G3500",
"SAVANA",
"SAVANA 1500",
"CHEVY EXPRESS G2500",
"CLUBWAGON E350",
"TRANSIT CONNECT",
"SPRINTER 2500",
"TRANSIT 150",
"ECONOLINE E250",
"TRANSIT 250",
"ECONOLINE E350",
"NV3500 HD",
"TRANSIT 350HD",
"ECONOLINE E150",
"E250 ECONOLINE",
"C/V",
"E350 CHSCAB",
"G1500 CHEVY EXPRESS",
"2500 SPRINTER",
"E150 ECONOLINE",
"350 TRANSIT",
"E450 CUTAWAY",
"PROMASTER 3500",
"CHEVY EXPRESS G3500",
"SAVANA G3500",
"1500 PROMASTER",
"2500 EXPRESS",
"3500 EXPRESS",
"3500 SPRINTER"
]

View File

@@ -0,0 +1,33 @@
[
"GRAND CARAVAN",
"GRANDCARAVAN",
"GRAND CARAVAN CREW",
"GRAND CARAVAN CV",
"GRAND CARAVAN CVP",
"GRAND CARAVAN SE",
"GRAND CARAVAN SXT",
"CARAVAN CV",
"SIENNA CE V6",
"SIENNA LE V6",
"SIENNA XLE V6",
"SIENNA",
"ODYSSEY",
"SEDONA",
"PACIFICA (NEW)",
"QUEST",
"CARAVAN",
"MONTANA SV6",
"FREESTAR",
"UPLANDER",
"MONTANA",
"VOYAGER",
"ENTOURAGE",
"PACIFICA",
"CARNIVAL",
"VENTURE",
"SAFARI",
"VANAGON",
"WINDSTAR",
"TOWN&COUNTRY",
"ROUTAN"
]

View File

@@ -0,0 +1,485 @@
[
"EDGE SEL",
"ESCAPE",
"ESCAPE SE",
"ESCAPE SEL",
"ESCAPE XLT V6",
"EXPEDITION",
"EXPEDITION LIMITED",
"EXPEDITION MAX",
"EXPEDITION MAX LIMITED",
"EXPLORER",
"EXCURSION",
"EXPLORER LIMITED",
"EXPLORER PLATINUM ECOBOOST",
"EXPLORER XLT",
"FLEX",
"FLEX SE",
"ECOSPORT",
"ESCAPE HYBRID",
"MUSTANG MACH-E",
"BRONCO",
"BRONCO SPORT",
"TRAILBLAZER",
"BLAZER LT",
"CHEROKEE",
"CHEROKEE CLASSIC",
"CHEROKEE COUNTRY",
"CHEROKEE LIMITED",
"CHEROKEE NORTH",
"CHEROKEE OVERLAND",
"CHEROKEE SPORT",
"CHEROKEE TRAILHAWK",
"CJ",
"CJ7",
"CJ7 RENEGADE",
"COMMANDER",
"COMMANDER LIMITED",
"COMMANDER SPORT",
"COMPASS",
"COMPASS HIGH ALTITUDE",
"COMPASS LATITUDE",
"COMPASS LIMITED",
"COMPASS NORTH",
"COMPASS SPORT",
"COMPASS TRAILHAWK",
"GLADIATOR OVERLAND",
"GLADIATOR RUBICON",
"GRAND CHEROKEE LAREDO",
"GRAND CHEROKEE LIMITED",
"GRAND CHEROKEE OVERLAND",
"GRAND CHEROKEE SE",
"GRAND CHEROKEE SRT",
"GRAND CHEROKEE SRT8",
"GRAND CHEROKEE SUMMIT",
"GRAND CHEROKEE TRACKHAWK",
"GRAND CHEROKEE TRAILHAWK",
"GRAND CHEROKEE",
"GRANDCHEROKEE",
"LIBERTY LIMITED",
"LIBERTY RENEGADE",
"LIBERTY SPORT",
"LIBERTY",
"PATRIOT",
"PATRIOT HIGH ALTITUDE",
"PATRIOT LATITUDE",
"PATRIOT LIMITED",
"PATRIOT NORTH",
"PATRIOT SPORT",
"RENEGADE LIMITED",
"RENEGADE NORTH",
"RENEGADE SPORT",
"RENEGADE TRAILHAWK",
"TJ",
"TJ RUBICON",
"TJ SAHARA",
"TJ SPORT",
"TJ UNLIMITED",
"WRANGLER",
"WRANGLER RUBICON",
"WRANGLER SAHARA",
"WRANGLER SPORT",
"WRANGLER UNLIMITED",
"WRANGLER UNLIMITED 70TH ANNIV",
"WRANGLER UNLIMITED RUBICON",
"WRANGLER UNLIMITED SAHARA",
"WRANGLER UNLIMITED SPORT",
"WRANGLER UNLIMITED X",
"WRANGLER X",
"YJ WRANGLER",
"AVIATOR",
"AVIATOR RESERVE",
"MKC",
"MKC RESERVE",
"MKC SELECT",
"MKT",
"MKT ECOBOOST",
"MKX",
"MKX RESERVE",
"NAUTILUS RESERVE",
"NAUTILUS RESERVE V6",
"NAVIGATOR",
"NAVIGATOR L",
"NAVIGATOR L RESERVE",
"NAVIGATOR L SELECT",
"NAVIGATOR RESERVE",
"PILOT",
"PILOT BLACK EDITION",
"PILOT ELITE",
"PILOT EX",
"PILOT EX-L",
"PILOT GRANITE",
"PILOT LX",
"PILOT SE",
"PILOT SE-L",
"PILOT TOURING",
"DURANGO R/T",
"DURANGO SLT PLUS",
"DURANGO SRT",
"DURANGO",
"JOURNEY",
"JOURNEY CROSSROAD",
"JOURNEY CVP",
"JOURNEY LIMITED",
"JOURNEY R/T",
"JOURNEY SXT",
"NITRO SE",
"NITRO",
"K1500 SUBURBAN",
"SUBURBAN 1500 LT",
"SUBURBAN 1500 LTZ",
"SUBURBAN 1500 PREMIER",
"SUBURBAN 2500 LS",
"TAHOE LT",
"TRAVERSE LS",
"TRAVERSE LT",
"TRAVERSE PREMIER",
"TRAX LT",
"TRAX PREMIER",
"UPLANDER LT EXT",
"SUBURBAN",
"TAHOE",
"TRAVERSE",
"TRAX",
"UPLANDER",
"YUKON",
"YUKON DENALI",
"YUKON XL",
"YUKON XL DENALI",
"EQUINOX LS",
"EQUINOX LT",
"EQUINOX PREMIER",
"EQUINOX",
"RAV4 LE",
"RAV4 XLE",
"HIGHLANDER SPORT V6",
"4RUNNER SR5 V6",
"RAV4",
"RAV4 HYBRID",
"RAV4 XLE HYBRID",
"HIGHLANDER",
"4RUNNER",
"SEQUOIA",
"PATHFINDER SE",
"PATHFINDER SL",
"PATHFINDER",
"MURANO PLATINUM",
"MURANO SV",
"MURANO",
"TUCSON",
"TERRAIN",
"SORENTO",
"EDGE",
"KICKS",
"QASHQAI",
"SANTA FE",
"ARMADA",
"TELLURIDE",
"PALISADE",
"SELTOS",
"TORRENT",
"C-HR",
"SPORTAGE",
"VENZA",
"ACADIA",
"CR-V",
"HR-V",
"CX-5",
"CX-50",
"CX-7",
"CX-9",
"CX-3",
"Q3",
"Q5",
"Q7",
"Q8",
"JUKE SV",
"JUKE",
"ROGUE",
"ROGUE SV",
"XTERRA",
"COROLLA CROSS",
"ACADIA DENALI",
"TAURUS X",
"MACAN",
"FJ CRUISER",
"BRONCO SPORT BADLANDS",
"ESCALADE",
"RX 350",
"KONA",
"MDX",
"RDX",
"COOPER COUNTRYMAN",
"V70",
"OUTLANDER",
"RIO5",
"GLC300 COUPE",
"ENCORE",
"SRX",
"SANTA FE SPORT",
"NX 300",
"WRANGLER UNLIMITE",
"WRANGLER JK UNLIM",
"RANGEROVER EVOQUE",
"CROSSTREK",
"FORESTER",
"TIGUAN",
"XV CROSSTREK",
"ENDEAVOR",
"RX 330",
"ATLAS",
"XC90",
"TOUAREG",
"STELVIO",
"RANGE ROVER SPORT",
"GLE350D",
"EX35",
"RVR",
"MONTERO",
"X-TRAIL",
"GRAND VITARA",
"TRIBUTE",
"X3",
"XC60",
"GLK250 BLUETEC",
"ENVOY",
"ML350 BLUETEC",
"ENVISION",
"FX35",
"X1",
"VENUE",
"TAOS",
"KONA ELECTRIC",
"OUTLANDER PHEV",
"PASSPORT",
"H3",
"EXPLORERSPORTTRAC",
"F-PACE",
"ML320 BLUETEC",
"REGAL SPORTBACK",
"DISCOVERY SPORT",
"RENDEZVOUS",
"XC70",
"COMPASS (NEW)",
"CUBE",
"V60 CROSS COUNTRY",
"QX70",
"X6",
"ELEMENT",
"RX 400H",
"VUE",
"RANGE ROVER VELAR",
"E-PACE",
"RAV4 PRIME",
"LX 570",
"GX 470",
"EX37",
"GLE43",
"NAUTILUS",
"XT6",
"RX 450H",
"ESCALADE ESV",
"OUTLOOK",
"CAYENNE",
"XC90 PLUG-IN",
"MODEL X",
"MODEL Y",
"GLC300",
"SANTA FE HYBRID",
"G63",
"XV CROSSTREK HYBR",
"JX35",
"JIMMY",
"TUCSON HYBRID",
"XC40 ELECTRIC",
"RX 300",
"ML320",
"WRANGLER JK UNLIMITED",
"POLICE INTERCEPTOR UTILITY",
"WRANGLER JK",
"TRIBECA",
"E-TRON SPORTBACK",
"500X",
"RX 350H",
"GL350 BLUETEC",
"WRANGLER UNLIMITED 4XE",
"GV80",
"GL550",
"Q5 E",
"H2 SUV",
"Q5 HYBRID",
"IONIQ 5",
"SQ5 SPORTBACK",
"LEVANTE",
"TONALE",
"GLE43 COUPE",
"GRAND CHEROKEE WK",
"DEFENDER",
"NX 450H+",
"ML400",
"LX 600",
"RX 450HL",
"SORENTO HYBRID",
"NX 350",
"TRACKER",
"GLE450",
"Q5 SPORTBACK",
"CR-V HYBRID",
"LX 470",
"EQS580 SUV",
"H2",
"EV9",
"SORENTO PLUG-IN",
"LYRIQ",
"GLE550",
"RX 500H",
"X1 SAV",
"E-TRON S SPORTBACK",
"ML500",
"GRAND HIGHLANDER HYBRID",
"RS Q8",
"GLS550",
"GLS580",
"IX",
"CAYENNE COUPE",
"SOLTERRA",
"PATHFINDER HYBRID",
"Q8 E-TRON",
"TX 350",
"TX 500H",
"EQUINOX EV",
"NAUTILUS HYBRID",
"TRAVERSE LIMITED",
"CX-70",
"SANTA FE XL",
"RENEGADE",
"QX50",
"ECLIPSE CROSS",
"QX80",
"X5",
"X3",
"X1",
"X4",
"ENCLAVE",
"ENCORE GX",
"CAYENNE HYBRID",
"SOUL",
"GX 460",
"UX 250H",
"XT5",
"GLE53",
"XT4",
"SQ7",
"NX 350H",
"GLK350",
"GLE350",
"NX 300H",
"NX 200T",
"RANGE ROVER EVOQUE",
"GLS450",
"TERRAIN DENALI",
"GRAND CHEROKEE L",
"GLE400",
"TUCSON PLUG-IN",
"BLAZER",
"ASCENT",
"HIGHLANDER HYBRID",
"ATLAS CROSS SPORT",
"XC40",
"VENZA HYBRID",
"GLA45",
"GLB250",
"GRAND HIGHLANDER",
"GV70",
"NIRO",
"NIRO EV",
"GLA250",
"ESCAPE PLUG-IN",
"WAGONEER",
"CX-30",
"QX60",
"GRAND CHEROKEE 4XE",
"SPORTAGE HYBRID",
"EV6",
"TONALE PLUG-IN",
"GLC43 COUPE",
"X2",
"RX 350L",
"HORNET",
"ENVISTA",
"LEVANTE S",
"SPORTAGE PLUG-IN",
"ORLANDO",
"X5 M",
"EXPLORER HYBRID",
"FREESTYLE",
"CORSAIR",
"K1500 YUKON XL",
"RANGE ROVER",
"SUV W/O LABOR",
"ID.4",
"CX-90",
"X7",
"CORSAIR PLUG-IN",
"ESCALADE EXT",
"QX55",
"DISCOVERY",
"BOLT EUV",
"C40 ELECTRIC",
"LR4",
"GRAND WAGONEER",
"XC60 PLUG-IN",
"LR2",
"EQE350 SUV",
"COROLLA CROSS HYBRID",
"SOUL EV",
"GRECALE",
"SUV W/O LABOR",
"QX30",
"SQ5",
"NIRO PLUG-IN",
"BORREGO",
"CX-90 PLUG-IN",
"XL-7",
"SUV W/O LABOR",
"SUV W/O LABOR",
"I-PACE",
"HORNET PLUG-IN",
"UX 300H",
"ML320 CDI",
"VERACRUZ",
"SQ8",
"GLE53 COUPE",
"ZDX",
"9-7X",
"ARIYA",
"ASPEN",
"AVIATOR PLUG-IN",
"B9 TRIBECA",
"BRAVADA",
"ENVOY XL",
"EQB350",
"EQB350 SUV",
"ESCALADE-V",
"E-TRON",
"FX37",
"GL320 CDI",
"GLADIATOR",
"GLC43",
"GLE450 COUPE",
"GLE63",
"GV60",
"MKT TOWN CAR",
"ML350",
"ML550",
"ML63",
"NX 250",
"Q4 E-TRON",
"Q8 E-TRON SPORTBACK",
"QX4",
"QX56",
"SANTA FE PLUG-IN",
"UX 200",
"WAGONEER L",
"XB"
]

View File

@@ -0,0 +1,567 @@
[
"MARK LT",
"F-150",
"F-250",
"F-350",
"F-450",
"F-550",
"F-650",
"F100 PICKUP",
"F150 FX2 SUPERCAB",
"F150 FX4 PICKUP",
"F150 FX4 SUPERCAB",
"F150 FX4 SUPERCREW",
"F150 HARLEY DAVIDSON SUPERCAB",
"F150 HARLEY DAVIDSON SUPERCREW",
"F150 KING RANCH SUPERCREW",
"F150 LARIAT FX4 SUPERCREW",
"F150 LARIAT HARLEY DAVIDSON SC",
"F150 LARIAT KING RANCH SUPCREW",
"F150 LARIAT LIMITED SUPERCREW",
"F150 LARIAT PICKUP",
"F150 LARIAT SUPERCAB",
"F150 LARIAT SUPERCAB (AMALGAM)",
"F150 LARIAT SUPERCREW",
"F150 LARIAT SUPERCREW (AMALGA)",
"F150 LIMITED SUPERCREW",
"F150 PICKUP",
"F150 PLATINUM SUPERCREW",
"F150 RAPTOR SUPERCAB",
"F150 RAPTOR SUPERCREW",
"F150 STX PICKUP",
"F150 STX SUPERCAB",
"F150 SUPERCAB",
"F150 SUPERCREW",
"F150 SUPERCREW (AMALGAMATED)",
"F150 SVT RAPTOR SUPERCAB",
"F150 XL PICKUP",
"F150 XL SUPERCAB",
"F150 XL SUPERCREW",
"F150 XLT LARIAT SUPERCAB",
"F150 XLT PICKUP",
"F150 XLT SUPERCAB",
"F150 XLT SUPERCREW",
"F150 XLT SUPERCREW (AMALGAMAT)",
"F150 XTR SUPERCAB",
"F250 PICKUP",
"F250 SD CREW CAB",
"F250 SD FX4 CREW CAB",
"F250 SD FX4 SUPERCAB",
"F250 SD KING RANCH CREW CAB",
"F250 SD LARIAT CREW CAB",
"F250 SD LARIAT CREW CAB (AMAL)",
"F250 SD LARIAT PICKUP",
"F250 SD LARIAT SUPERCAB",
"F250 SD LIMITED CREW CAB",
"F250 SD PLATINUM CREW CAB",
"F250 SD SUPERCAB",
"F250 SD XL CREW CAB",
"F250 SD XL PICKUP",
"F250 SD XL SUPERCAB",
"F250 SD XLT CREW CAB",
"F250 SD XLT PICKUP",
"F250 SD XLT SUPERCAB",
"F250 SUPERCAB",
"F250 XL CREW CAB",
"F350 CREW CAB",
"F350 PICKUP",
"F350 PICKUP 2WD",
"F350 SD CABELAS CREW CAB",
"F350 SD CREW CAB",
"F350 SD FX4 CREW CAB",
"F350 SD FX4 SUPERCAB",
"F350 SD HARLEY DAVIDSON",
"F350 SD KING RANCH CREW CAB",
"F350 SD LARIAT CREW CAB",
"F350 SD LARIAT CREW CAB (AMAL)",
"F350 SD LARIAT KING RANCH",
"F350 SD LARIAT SUPERCAB",
"F350 SD LIMITED CREW CAB",
"F350 SD PICKUP",
"F350 SD PLATINUM CREW CAB",
"F350 SD SUPERCAB",
"F350 SD XL CREW CAB",
"F350 SD XL PICKUP",
"F350 SD XL SUPERCAB",
"F350 SD XLT CREW CAB",
"F350 SD XLT SUPERCAB",
"F350 SUPER DUTY",
"F350 SUPER DUTY XL",
"F350 XL PICKUP",
"F450",
"F450 Pickup",
"F450 SD KING RANCH CREW CAB",
"F450 SD LARIAT CREW CAB",
"F450 SD PICKUP",
"F450 SD PLATINUM CREW CAB",
"F450 SD XL",
"F450 SD XL CREW CAB",
"F450 SD XL PICKUP",
"F450 SD XLT CREW CAB",
"F450 SUPER DUTY XLT",
"F550",
"F550 SD",
"F550 SD XL",
"F550 SD XL PICKUP",
"F550 SD XLT CREW CAB",
"F550 SD XLT SUPERCAB",
"F550 SUPER DUTY",
"F550 SUPER DUTY XL",
"F550 SUPER DUTY XLT",
"F550 SUPER DUTY XLT CREW CAB",
"F550 XL",
"F650 SD XLT SUPERCAB",
"F68",
"F750 XL",
"RANGER",
"RANGER EDGE SUPERCAB",
"RANGER FX4 SUPERCAB",
"RANGER LARIAT SUPERCREW",
"RANGER SPORT SUPERCAB",
"RANGER STX SUPERCAB",
"RANGER SUPERCAB",
"RANGER XL",
"RANGER XL SUPERCAB",
"RANGER XLT",
"RANGER XLT SUPERCAB",
"RANGER XLT SUPERCREW",
"FRONTIER LE CREW CAB V6",
"FRONTIER NISMO CREW CAB V6",
"FRONTIER NISMO KING CAB V6",
"FRONTIER PRO-4X CREW CAB V6",
"FRONTIER PRO-4X KING CAB V6",
"FRONTIER S KING CAB",
"FRONTIER SC CREW CAB V6",
"FRONTIER SC V6",
"FRONTIER SE CREW CAB V6",
"FRONTIER SE KING CAB V6",
"FRONTIER SL CREW CAB V6",
"FRONTIER SV CREW CAB V6",
"FRONTIER SV KING CAB V6",
"FRONTIER XE KING CAB",
"FRONTIER XE KING CAB V6",
"KING CAB",
"TITAN 5.6 LE CREW CAB",
"TITAN 5.6 LE KING CAB",
"TITAN 5.6 MIDNIGHT CREW CAB",
"TITAN 5.6 PLATINUM RESERVE CC",
"TITAN 5.6 PRO-4X CREW CAB",
"TITAN 5.6 PRO-4X KING CAB",
"TITAN 5.6 S CREW CAB",
"TITAN 5.6 SE CREW CAB",
"TITAN 5.6 SE KING CAB",
"TITAN 5.6 SL CREW CAB",
"TITAN 5.6 SV CREW CAB",
"TITAN 5.6 SV KING CAB",
"TITAN 5.6 XE CREW CAB",
"TITAN 5.6 XE KING CAB",
"TITAN XD PLATINUM CREW CAB",
"TITAN XD PRO-4X CREW CAB",
"TITAN XD S CREW CAB",
"TITAN XD SL CREW CAB",
"TITAN XD SV CREW CAB",
"PICKUP SR5",
"TACOMA",
"TACOMA ACCESS CAB",
"TACOMA DOUBLE CAB V6",
"TACOMA LIMITED DOUBLE CAB V6",
"TACOMA PRERUNNER DOUBLE CAB V6",
"TACOMA PRERUNNER V6 ACCESS CAB",
"TACOMA PRERUNNER XTRACAB",
"TACOMA PRERUNNER XTRACAB V6",
"TACOMA SR5 DOUBLE CAB V6",
"TACOMA SR5 V6 ACCESS CAB",
"TACOMA SR5 V6 XTRACAB",
"TACOMA V6 ACCESS CAB",
"TACOMA XTRACAB",
"TACOMA XTRACAB V6",
"TUNDRA ACCESS CAB V8",
"TUNDRA DOUBLE CAB V8",
"TUNDRA LIMITED ACCESS CAB V8",
"TUNDRA LIMITED SR5 DBLCAB V8",
"TUNDRA LIMITED V8",
"TUNDRA LIMITED V8 CREWMAX",
"TUNDRA LIMITED V8 DOUBLE CAB",
"TUNDRA PLATINUM V8 CREWMAX",
"TUNDRA SR DOUBLE CAB V8",
"TUNDRA SR V8",
"TUNDRA SR5 DOUBLE CAB V8",
"TUNDRA SR5 TRD DOUBLE CAB V8",
"TUNDRA SR5 V8 CREWMAX",
"TUNDRA V8",
"TUNDRA V8 CREWMAX",
"XTRACAB LONG BOX",
"AVALANCHE 1500",
"AVALANCHE 1500 LS",
"AVALANCHE 1500 LS Z71",
"AVALANCHE 1500 LT",
"AVALANCHE 1500 LT Z71",
"AVALANCHE 1500 LTZ",
"C/R 10/1500 4+CAB",
"C/R 10/1500 PICKUP",
"C/R 20/2500 4+CAB",
"C/R 20/2500 PICKUP",
"C3500",
"COLORADO",
"COLORADO EXT CAB",
"COLORADO LS",
"COLORADO LS CREW CAB",
"COLORADO LS EXT CAB",
"COLORADO LT",
"COLORADO LT CREW CAB",
"COLORADO LT EXT CAB",
"COLORADO WT CREW CAB",
"COLORADO WT EXT CAB",
"COLORADO Z71 CREW CAB",
"COLORADO Z71 EXT CAB",
"COLORADO ZR2 CREW CAB",
"COLORADO ZR2 EXT CAB",
"HHR LS PANEL",
"K/V 10/1500 4+CAB",
"K/V 10/1500 PICKUP",
"K/V 20/2500 4+CAB",
"K/V 20/2500 PICKUP",
"K/V 30/3500 4+CAB",
"Pickup K3500",
"Pickup Silverado C2500 HD",
"S10 4+CAB",
"S10 LS 4+CAB",
"SILVERADO 1500",
"SILVERADO 1500 CHEYENNE CREW",
"SILVERADO 1500 CREW CAB",
"SILVERADO 1500 CREW CAB (AMAL)",
"SILVERADO 1500 CUST TRAIL DC",
"SILVERADO 1500 CUSTOM CREW CAB",
"SILVERADO 1500 CUSTOM DC",
"SILVERADO 1500 CUSTOM TRAIL CC",
"SILVERADO 1500 DOUBLE (AMALGA)",
"SILVERADO 1500 EXT CAB",
"SILVERADO 1500 HD LS CREW CAB",
"SILVERADO 1500 HD LT CREW CAB",
"SILVERADO 1500 HIGH COUNTRY CC",
"SILVERADO 1500 HYBRID CREW CAB",
"SILVERADO 1500 LS",
"SILVERADO 1500 LS CREW CAB",
"SILVERADO 1500 LS DOUBLE CAB",
"SILVERADO 1500 LS EXT CAB",
"SILVERADO 1500 LT",
"SILVERADO 1500 LT CC (AMALGAM)",
"SILVERADO 1500 LT CREW CAB",
"SILVERADO 1500 LT DOUBLE CAB",
"SILVERADO 1500 LT EXT CAB",
"SILVERADO 1500 LT TRAIL CC",
"SILVERADO 1500 LT TRAIL DC",
"SILVERADO 1500 LTZ CREW CAB",
"SILVERADO 1500 LTZ DOUBLE CAB",
"SILVERADO 1500 LTZ EXT CAB",
"SILVERADO 1500 RST CREW CAB",
"SILVERADO 1500 RST DOUBLE CAB",
"SILVERADO 1500 SS EXT CAB",
"SILVERADO 1500 WT",
"SILVERADO 1500 WT CREW CAB",
"SILVERADO 1500 WT DOUBLE CAB",
"SILVERADO 1500 WT EXT CAB",
"SILVERADO 2500 EXT CAB",
"SILVERADO 2500 HD",
"SILVERADO 2500 HD CREW CAB",
"SILVERADO 2500 HD EXT CAB",
"SILVERADO 2500 HD HC CREW CAB",
"SILVERADO 2500 HD LS CREW CAB",
"SILVERADO 2500 HD LS EXT CAB",
"SILVERADO 2500 HD LT",
"SILVERADO 2500 HD LT CREW CAB",
"SILVERADO 2500 HD LT DBL CAB",
"SILVERADO 2500 HD LT EXT CAB",
"SILVERADO 2500 HD LTZ CREW CAB",
"SILVERADO 2500 HD LTZ DBL CAB",
"SILVERADO 2500 HD LTZ EXT CAB",
"SILVERADO 2500 HD WT",
"SILVERADO 2500 HD WT CREW CAB",
"SILVERADO 2500 HD WT DBL CAB",
"SILVERADO 2500 HD WT EXT CAB",
"SILVERADO 3500",
"SILVERADO 3500 CREW CAB",
"SILVERADO 3500 CREW CAB (AMAL)",
"SILVERADO 3500 EXT CAB",
"SILVERADO 3500 HC CREW CAB",
"SILVERADO 3500 HD (AMALGAMATE)",
"SILVERADO 3500 LS",
"SILVERADO 3500 LS CREW CAB",
"SILVERADO 3500 LS EXT CAB",
"SILVERADO 3500 LT CREW CAB",
"SILVERADO 3500 LT DOUBLE CAB",
"SILVERADO 3500 LT EXT CAB",
"SILVERADO 3500 LTZ CREW CAB",
"SILVERADO 3500 LTZ EXT CAB",
"SILVERADO 3500 WT CREW CAB",
"Silverado 3500HD",
"B250 SPORTSMAN",
"DAKOTA CLUB CAB",
"DAKOTA LARAMIE V8 CLUB CAB",
"DAKOTA LARAMIE V8 QUAD CAB",
"DAKOTA QUAD CAB",
"DAKOTA SLT CREW CAB",
"DAKOTA SLT EXT CAB",
"DAKOTA SLT PLUS QUAD CAB",
"DAKOTA SLT PLUS V8 CLUB CAB",
"DAKOTA SLT PLUS V8 QUAD CAB",
"DAKOTA SLT QUAD CAB",
"DAKOTA SLT V8 CLUB CAB",
"DAKOTA SLT V8 CREW CAB",
"DAKOTA SLT V8 EXT CAB",
"DAKOTA SLT V8 QUAD CAB",
"DAKOTA SPORT V8",
"DAKOTA SPORT V8 CLUB CAB",
"DAKOTA SPORT V8 QUAD CAB",
"DAKOTA ST CLUB CAB",
"DAKOTA ST QUAD CAB",
"DAKOTA ST V8 QUAD CAB",
"DAKOTA SXT CREW CAB",
"DAKOTA SXT EXT CAB",
"DAKOTA SXT V8 CREW CAB",
"DAKOTA SXT V8 EXT CAB",
"DAKOTA V8 CLUB CAB",
"DAKOTA V8 QUAD CAB",
"RAM 1500",
"RAM 1500 BIG HORN CREW CAB",
"RAM 1500 BIG HORN QUAD CAB",
"RAM 1500 CLUB CAB",
"RAM 1500 CREW CAB (AMALGAMATE)",
"RAM 1500 EXPRESS",
"RAM 1500 LARAMIE CREW (AMALGA)",
"RAM 1500 LARAMIE CREW CAB",
"RAM 1500 LARAMIE LONGHORN CREW",
"RAM 1500 LARAMIE MEGA CAB",
"RAM 1500 LARAMIE QUAD CAB",
"RAM 1500 LARAMIE SLT QUAD CAB",
"RAM 1500 LIMITED CREW CAB",
"RAM 1500 LONGHORN CREW CAB",
"RAM 1500 OUTDOORSMAN CREW CAB",
"RAM 1500 OUTDOORSMAN QC (AMAL)",
"RAM 1500 OUTDOORSMAN QUAD CAB",
"RAM 1500 QUAD CAB",
"RAM 1500 R/T",
"RAM 1500 REBEL CREW CAB",
"RAM 1500 REBEL QUAD CAB",
"RAM 1500 SLT",
"RAM 1500 SLT CREW (AMALGAMATE)",
"RAM 1500 SLT CREW CAB",
"RAM 1500 SLT MEGA CAB",
"RAM 1500 SLT QUAD (AMALGAMATE)",
"RAM 1500 SLT QUAD CAB",
"RAM 1500 SPORT",
"RAM 1500 SPORT CLUB CAB",
"RAM 1500 SPORT CREW CAB",
"RAM 1500 SPORT CREW CAB (AMAL)",
"RAM 1500 SPORT QUAD CAB",
"RAM 1500 ST",
"RAM 1500 ST CREW CAB",
"RAM 1500 ST QUAD CAB",
"RAM 1500 SXT CREW CAB",
"RAM 1500 SXT QUAD CAB",
"RAM 1500 TRADESMAN CREW CAB",
"RAM 1500 TRADESMAN QUAD CAB",
"RAM 1500 TRX QUAD CAB",
"RAM 2500",
"RAM 2500 BIG HORN CREW CAB",
"RAM 2500 BIG HORN MEGA CAB",
"RAM 2500 CLUB CAB",
"RAM 2500 LARAMIE CREW CAB",
"RAM 2500 LARAMIE LONGHORN CREW",
"RAM 2500 LARAMIE LONGHORN MEGA",
"RAM 2500 LARAMIE MEGA CAB",
"RAM 2500 LARAMIE QUAD CAB",
"RAM 2500 LARAMIE SLT",
"RAM 2500 LARAMIE SLT QUAD CAB",
"RAM 2500 LIMITED CREW CAB",
"RAM 2500 OUTDOORSMAN CREW CAB",
"RAM 2500 POWER WAGON CREW CAB",
"RAM 2500 QUAD CAB",
"RAM 2500 SLT",
"RAM 2500 SLT CREW CAB",
"RAM 2500 SLT MEGA CAB",
"RAM 2500 SLT QUAD CAB",
"RAM 2500 SLT QUAD CAB (AMALGA)",
"RAM 2500 SPORT QUAD CAB",
"RAM 2500 ST",
"RAM 2500 ST CREW CAB",
"RAM 2500 ST QUAD CAB",
"RAM 2500 SXT QUAD CAB",
"RAM 2500 TRADESMAN",
"RAM 2500 TRADESMAN CREW CAB",
"RAM 2500 TRX CREW CAB",
"RAM 2500 TRX QUAD CAB",
"RAM 3500",
"RAM 3500 4WD",
"RAM 3500 BIG HORN CREW CAB",
"RAM 3500 CREW CAB",
"RAM 3500 CREW CAB (AMALGAMATE)",
"RAM 3500 LARAMIE CREW CAB",
"RAM 3500 LARAMIE LONGHORN CREW",
"RAM 3500 LARAMIE LONGHORN MEGA",
"RAM 3500 LARAMIE MEGA CAB",
"RAM 3500 LARAMIE QUAD CAB",
"RAM 3500 LARAMIE SLT",
"RAM 3500 LARAMIE SLT QUAD CAB",
"RAM 3500 LIMITED MEGA CAB",
"RAM 3500 LONGHORN CREW CAB",
"RAM 3500 QUAD CAB",
"RAM 3500 SLT",
"RAM 3500 SLT CREW CAB",
"RAM 3500 SLT MEGA CAB",
"RAM 3500 SLT QUAD CAB",
"RAM 3500 SPORT QUAD CAB",
"RAM 3500 ST",
"RAM 3500 ST CREW CAB",
"RAM 3500 ST QUAD CAB",
"RAM 3500 TRX QUAD CAB",
"RAM 4500",
"RAM 4500 CREW CAB",
"RAM 5500",
"RAM 5500 CREW CAB",
"W250 TURBO DIESEL",
"C Series 5500",
"C/R 1500 4+CAB",
"C/R 1500 PICKUP",
"C/R 1500 SIERRA SL EXT CAB",
"C/R 3500",
"C/R 3500 PICKUP",
"CANYON ALL TERRAIN CREW CAB",
"CANYON CREW CAB",
"CANYON DENALI CREW CAB",
"CANYON EXT CAB",
"CANYON SL",
"CANYON SL EXT CAB",
"CANYON SLE",
"CANYON SLE CREW CAB",
"CANYON SLE EXT CAB",
"CANYON SLT CREW CAB",
"CANYON SLT CREW CAB (AMALGAMA)",
"K/V 1500 4+CAB",
"K/V 1500 PICKUP",
"K/V 2500 4+CAB",
"K/V 2500 PICKUP",
"K/V 3500 SIERRA SL CREW CAB",
"K/V 3500 SIERRA SLE CREW CAB",
"SIERRA 1500 AT4 CREW CAB",
"SIERRA 1500 AT4 DOUBLE CAB",
"SIERRA 1500 CREW CAB",
"SIERRA 1500 CREW CAB (AMALGAM)",
"SIERRA 1500 DENALI CREW CAB",
"SIERRA 1500 DENALI EXT CAB",
"SIERRA 1500 DOUBLE CAB",
"SIERRA 1500 ELEVATION CREW CAB",
"SIERRA 1500 ELEVATION DC",
"SIERRA 1500 EXT CAB",
"SIERRA 1500 HD CREW CAB",
"SIERRA 1500 HD SLE CREW CAB",
"SIERRA 1500 HD SLT CREW CAB",
"SIERRA 1500 NEVADA EDITION",
"SIERRA 1500 PICKUP",
"SIERRA 1500 SL CREW CAB",
"SIERRA 1500 SL EXT CAB",
"SIERRA 1500 SL PICKUP",
"SIERRA 1500 SLE CREW CAB",
"SIERRA 1500 SLE DC (AMALGAMAT)",
"SIERRA 1500 SLE DOUBLE CAB",
"SIERRA 1500 SLE EXT CAB",
"SIERRA 1500 SLE EXT CAB (AMAL)",
"SIERRA 1500 SLE PICKUP",
"SIERRA 1500 SLT CREW (AMALGAM)",
"SIERRA 1500 SLT CREW CAB",
"SIERRA 1500 SLT DOUBLE CAB",
"SIERRA 1500 SLT EXT CAB",
"SIERRA 1500 WT CREW CAB",
"SIERRA 1500 WT EXT CAB",
"SIERRA 1500 WT PICKUP",
"SIERRA 2500 EXT CAB",
"SIERRA 2500 HD AT4 CREW CAB",
"SIERRA 2500 HD CREW CAB",
"SIERRA 2500 HD DENALI CREW CAB",
"SIERRA 2500 HD DOUBLE CAB",
"SIERRA 2500 HD EXT CAB",
"SIERRA 2500 HD PICKUP",
"SIERRA 2500 HD SL EXT CAB",
"SIERRA 2500 HD SL PICKUP",
"SIERRA 2500 HD SLE CREW CAB",
"SIERRA 2500 HD SLE DOUBLE CAB",
"SIERRA 2500 HD SLE EXT CAB",
"SIERRA 2500 HD SLE PICKUP",
"SIERRA 2500 HD SLT CREW CAB",
"SIERRA 2500 HD SLT DOUBLE CAB",
"SIERRA 2500 HD SLT EXT CAB",
"SIERRA 2500 HD WT CREW CAB",
"SIERRA 2500 HD WT DOUBLE CAB",
"SIERRA 2500 HD WT EXT CAB",
"SIERRA 2500 HD WT PICKUP",
"SIERRA 2500 SLE EXT CAB",
"SIERRA 3500 AT4 CREW CAB",
"SIERRA 3500 CREW CAB",
"SIERRA 3500 DENALI CREW CAB",
"SIERRA 3500 EXT CAB",
"SIERRA 3500 PICKUP",
"SIERRA 3500 SL CREW CAB",
"SIERRA 3500 SLE",
"SIERRA 3500 SLE CREW CAB",
"SIERRA 3500 SLE EXT CAB",
"SIERRA 3500 SLT CREW CAB",
"SIERRA 3500 WT CREW CAB",
"SONOMA",
"SONOMA CREW CAB",
"SONOMA EXT CAB",
"1500",
"1500 Classic",
"Pickup 1500",
"Pickup 3500",
"ProMaster 1500",
"RIDGELINE",
"RIDGELINE BLACK EDITION",
"RIDGELINE DX",
"RIDGELINE EX-L",
"RIDGELINE LX",
"RIDGELINE RT",
"RIDGELINE RTL",
"RIDGELINE RTS",
"RIDGELINE RTX",
"RIDGELINE SE",
"RIDGELINE SPORT",
"RIDGELINE TOURING",
"RIDGELINE VP",
"TITAN",
"TACOMA",
"TUNDRA",
"AVALANCE",
"COLORADO",
"SILVERADO",
"SILVERADO 1500",
"SILVERADO 2500",
"SILVERADO 3500",
"DAKOTA",
"RAM 1500",
"RAM 2500",
"RAM 3500",
"RAM 4500",
"RAM 5500",
"CANYON",
"SIERRA 1500",
"SIERRA 2500",
"SIERRA 3500",
"SONOMA",
"1500"
]

View File

@@ -0,0 +1,39 @@
const logger = require("../../utils/logger");
const TrucksList = require("./trucks.json");
const CargoVanList = require("./cargovans.json");
const PassengerVanList = require("./passengervans.json");
const SuvList = require("./suvs.json");
const vehicletype = async (req, res) => {
try {
const { model } = req.body;
if (!model || model.trim() === "") {
res.status(400).json({ success: false, error: "Please provide a model" });
} else {
const type = getVehicleType(model.trim())
res.status(200).json({ success: true, ...type });
}
} catch (error) {
logger.log("vehicletype-error", "ERROR", req?.user?.email, null, {
error: error.message,
stack: error.stack
});
res.status(500).json({ error: error.message, stack: error.stack });
}
};
function getVehicleType(model) {
const inTrucks = TrucksList.includes(model.toUpperCase());
const inPV = PassengerVanList.includes(model.toUpperCase());
const inSuv = SuvList.includes(model.toUpperCase());
const inCv = CargoVanList.includes(model.toUpperCase());
if (inTrucks) return { type: "TK", match: true };
else if (inPV) return { type: "PC", match: true };
else if (inSuv) return { type: "SUV", match: true };
else if (inCv) return { type: "VN", match: true };
else return { type: "PC", match: false };
}
exports.default = vehicletype;

View File

@@ -1,7 +1,3 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const axios = require("axios");
const InstanceManager = require("../utils/instanceMgr").default;
const logger = require("../utils/logger");
@@ -101,7 +97,10 @@ const sendWelcomeEmail = async ({ to, resetLink, dateLine, features, bcc }) => {
imex: "Welcome to the ImEX Online platform.",
rome: "Welcome to the Rome Online platform."
}),
subHeader: `Your ${InstanceManager({imex: features?.allAccess ? "ImEX Online": "ImEX Lite", rome: features?.allAccess ? "RO Manager" : "RO Basic"})} shop setup has been completed, and this email will include all the information you need to begin.`,
subHeader: `Your ${InstanceManager({
imex: features?.allAccess ? "ImEX Online" : "ImEX Lite",
rome: features?.allAccess ? "RO Manager" : "RO Basic"
})} shop setup has been completed, and this email will include all the information you need to begin.`,
body: `
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;">To finish setting up your account, visit this link and enter your desired password. <a href=${resetLink} style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">Reset Password</a></p>
</td></tr>
@@ -110,12 +109,25 @@ const sendWelcomeEmail = async ({ to, resetLink, dateLine, features, bcc }) => {
<table class="row" style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; padding: 0; width: 100%; position: relative; display: table;"><tbody style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; display: table-row-group;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">
<th class="small-12 large-12 columns first last" style="word-wrap: break-word; -webkit-hyphens: auto; -moz-hyphens: auto; hyphens: auto; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; line-height: 1.2; margin: 0 auto; Margin: 0 auto; padding-bottom: 16px; width: 734px; padding-left: 8px; padding-right: 8px; border-collapse: collapse;"><table style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; width: 100%;">
<tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;">To access your ${InstanceManager({imex: features.allAccess ? "ImEX Online": "ImEX Lite", rome: features.allAccess ? "RO Manager" : "RO Basic"})} shop, visit <a href=${InstanceManager({imex: "https://imex.online/", rome: "https://romeonline.io/"})} style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">${InstanceManager({imex: "imex.online", rome: "romeonline.io"})}</a>. Your username is your email, and your password is what you previously set up. Contact support for additional logins.</p>
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;">To access your ${InstanceManager(
{
imex: features.allAccess ? "ImEX Online" : "ImEX Lite",
rome: features.allAccess ? "RO Manager" : "RO Basic"
}
)} shop, visit <a href=${InstanceManager({
imex: "https://imex.online/",
rome: "https://romeonline.io/"
})} style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">${InstanceManager(
{
imex: "imex.online",
rome: "romeonline.io"
}
)}</a>. Your username is your email, and your password is what you previously set up. Contact support for additional logins.</p>
</td></tr>
</tbody></table></th>
</tr></tbody></table>
${InstanceManager({
rome: `
rome: `
<table class="row" style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; padding: 0; width: 100%; position: relative; display: table;"><tbody style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; display: table-row-group;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">
<th class="small-12 large-12 columns first last" style="word-wrap: break-word; -webkit-hyphens: auto; -moz-hyphens: auto; hyphens: auto; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; line-height: 1.2; margin: 0 auto; Margin: 0 auto; padding-bottom: 16px; width: 734px; padding-left: 8px; padding-right: 8px; border-collapse: collapse;"><table style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; width: 100%;">
<tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
@@ -161,7 +173,12 @@ const sendWelcomeEmail = async ({ to, resetLink, dateLine, features, bcc }) => {
<table class="row" style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; padding: 0; width: 100%; position: relative; display: table;"><tbody style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; display: table-row-group;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">
<th class="small-12 large-12 columns first last" style="word-wrap: break-word; -webkit-hyphens: auto; -moz-hyphens: auto; hyphens: auto; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; line-height: 1.2; margin: 0 auto; Margin: 0 auto; padding-bottom: 16px; width: 734px; padding-left: 8px; padding-right: 8px; border-collapse: collapse;"><table style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; width: 100%;">
<tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;">In addition to the training tour, you can also book a live one-on-one demo to see exactly how our system can help streamline the repair process at your shop, schedule by clicking this link - <a href="https://outlook.office.com/bookwithme/user/0aa3ae2c6d59497d9f93fb72479848dc@imexsystems.ca/meetingtype/Qy7CsXl5MkuUJ0NRD7B1AA2?anonymous&ep=mlink" style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">${InstanceManager({imex: "ImEX Lite", rome: "Rome Basic"})} Demo Booking</a></p>
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;">In addition to the training tour, you can also book a live one-on-one demo to see exactly how our system can help streamline the repair process at your shop, schedule by clicking this link - <a href="https://outlook.office.com/bookwithme/user/0aa3ae2c6d59497d9f93fb72479848dc@imexsystems.ca/meetingtype/Qy7CsXl5MkuUJ0NRD7B1AA2?anonymous&ep=mlink" style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">${InstanceManager(
{
imex: "ImEX Lite",
rome: "Rome Basic"
}
)} Demo Booking</a></p>
</td></tr>
</tbody></table></th>
</tr></tbody></table>
@@ -175,7 +192,12 @@ const sendWelcomeEmail = async ({ to, resetLink, dateLine, features, bcc }) => {
<table class="row" style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; padding: 0; width: 100%; position: relative; display: table;"><tbody style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; display: table-row-group;"><tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;">
<th class="small-12 large-12 columns first last" style="word-wrap: break-word; -webkit-hyphens: auto; -moz-hyphens: auto; hyphens: auto; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; line-height: 1.2; margin: 0 auto; Margin: 0 auto; padding-bottom: 8px; width: 734px; padding-left: 0px; padding-right: 8px; border-collapse: collapse;"><table style="border-spacing: 0; border-collapse: collapse; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; width: 100%;">
<tr style="padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; vertical-align: top; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif;"><td style="word-wrap: break-word; vertical-align: top; color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; margin: 0; Margin: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 15px; word-break: keep-all; -moz-hyphens: none; -ms-hyphens: none; -webkit-hyphens: none; hyphens: none; line-height: 1.2; border-collapse: collapse;">
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;">The ${InstanceManager({imex: "ImEX Online", rome: "Rome Online"})} Team</p>
<p style="color: #0a0a0a; font-weight: normal; padding-top: 0; padding-right: 0; padding-bottom: 0; padding-left: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; margin: 0 0 0 0px; Margin: 0 0 0 0px; line-height: 1.2; margin-bottom: 0px; Margin-bottom: 0px; font-size: 90%;">The ${InstanceManager(
{
imex: "ImEX Online",
rome: "Rome Online"
}
)} Team</p>
`,
dateLine
})

View File

@@ -1,7 +1,3 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const InstanceManager = require("../utils/instanceMgr").default;
const logger = require("../utils/logger");
const client = require("../graphql-client/graphql-client").client;
@@ -23,7 +19,7 @@ const tasksEmailQueueCleanup = async () => {
// Example async operation
// console.log("Performing Tasks Email Reminder process cleanup...");
await new Promise((resolve) => tasksEmailQueue.destroy(() => resolve()));
// eslint-disable-next-line no-unused-vars
// eslint-disable-next-line no-unused-vars
} catch (err) {
// console.error("Tasks Email Reminder process cleanup failed:", err);
}
@@ -264,7 +260,7 @@ const tasksRemindEmail = async (req, res) => {
<li style="font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%;">
<a href="${InstanceEndpoints()}/manage/tasks/alltasks?taskid=${task.id}" style="color: #2199e8; text-decoration: none; font-weight: normal; padding: 0; text-align: left; font-family: 'Montserrat', 'Montserrat Alternates', sans-serif; font-size: 90%; line-height: 1.2;">${task.title} - Priority: ${formatTaskPriority(task.priority)} ${task.due_date ? `${formatDate(task.due_date)}` : ""} | Bodyshop: ${task.bodyshop.shopname}</a>
</li>
`.trim()
`.trim()
)
.join("")}
</ul>`

View File

@@ -1,7 +1,3 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const Queue = require("better-queue");
const moment = require("moment");
const { client } = require("../graphql-client/graphql-client");

View File

@@ -1854,7 +1854,7 @@ exports.GET_CHATTER_SHOPS = `query GET_CHATTER_SHOPS {
}`;
exports.GET_CARFAX_SHOPS = `query GET_CARFAX_SHOPS {
bodyshops{
bodyshops(where: {external_shop_id: {_is_null: true}}){
id
shopname
imexshopid

View File

@@ -35,6 +35,11 @@
"headerMargin": "135"
},
"md_ro_statuses": {
"parts_statuses": [
"Open",
"In Progress",
"Completed"
],
"statuses": [
"Open",
"Scheduled",
@@ -54,6 +59,10 @@
"Void"
],
"default_void": "Void",
"parts_active_statuses": [
"Open",
"In Progress"
],
"active_statuses": [
"Open",
"Scheduled",
@@ -766,6 +775,7 @@
"csi:page": 11,
"jobs:void": 80,
"shop:rbac": 99,
"shop:responsibilitycenter": 99,
"bills:list": 11,
"bills:view": 11,
"csi:export": 11,

View File

@@ -0,0 +1,72 @@
const KNOWN_PART_RATE_TYPES = [
"PAA",
"PAC",
"PAG",
"PAL",
"PAM",
"PAN",
"PAO",
"PAP",
"PAR",
"PAS",
"PASL",
"CCC",
"CCD",
"CCF",
"CCM",
"CCDR"
];
/**
* Extracts and processes parts tax rates from profile info.
* @param {object} profile - The ProfileInfo object from XML.
* @returns {object} The parts tax rates object.
*/
//TODO: Major validation would be required on this - EMS files are inconsistent with things like 5% being passed as 5.0 or .05.
const extractPartsTaxRates = (profile = {}) => {
const rateInfos = Array.isArray(profile.RateInfo) ? profile.RateInfo : [profile.RateInfo || {}];
const partsTaxRates = {};
/**
* In this context, r.RateType._ accesses the property named _ on the RateType object.
* This pattern is common when handling data parsed from XML, where element values are stored under the _ key. So,
* _ aligns to the actual value/content of the RateType field when RateType is an object (not a string).
*/
for (const r of rateInfos) {
const rateTypeRaw =
typeof r?.RateType === "string"
? r.RateType
: typeof r?.RateType === "object" && r?.RateType._
? r.RateType._
: "";
const rateType = (rateTypeRaw || "").toUpperCase();
if (!KNOWN_PART_RATE_TYPES.includes(rateType)) continue;
const taxInfo = r.TaxInfo;
const taxTier = taxInfo?.TaxTierInfo;
let percentage = parseFloat(taxTier?.Percentage ?? "NaN");
if (isNaN(percentage)) {
const tierRate = Array.isArray(r.RateTierInfo) ? r.RateTierInfo[0]?.Rate : r.RateTierInfo?.Rate;
percentage = parseFloat(tierRate ?? "NaN");
}
if (!isNaN(percentage)) {
partsTaxRates[rateType] = {
prt_discp: 0,
prt_mktyp: false,
prt_mkupp: 0,
prt_tax_in: true,
prt_tax_rt: percentage / 100
};
}
}
return partsTaxRates;
};
module.exports = {
extractPartsTaxRates,
KNOWN_PART_RATE_TYPES
};

View File

@@ -0,0 +1,197 @@
{
"OP0": {
"desc": "REMOVE / REPLACE PARTIAL",
"opcode": "OP11",
"partcode": "PAA"
},
"OP1": {
"desc": "REFINISH / REPAIR",
"opcode": "OP1",
"partcode": "PAE"
},
"OP2": {
"desc": "REMOVE / INSTALL",
"opcode": "OP2",
"partcode": "PAE"
},
"OP3": {
"desc": "ADDITIONAL LABOR",
"opcode": "OP9",
"partcode": "PAE"
},
"OP4": {
"desc": "ALIGNMENT",
"opcode": "OP4",
"partcode": "PAS"
},
"OP5": {
"desc": "OVERHAUL",
"opcode": "OP5",
"partcode": "PAE"
},
"OP6": {
"desc": "REFINISH",
"opcode": "OP6",
"partcode": "PAE"
},
"OP7": {
"desc": "INSPECT",
"opcode": "OP7",
"partcode": "PAE"
},
"OP8": {
"desc": "CHECK / ADJUST",
"opcode": "OP8",
"partcode": "PAE"
},
"OP9": {
"desc": "REPAIR",
"opcode": "OP9",
"partcode": "PAE"
},
"OP10": {
"desc": "REPAIR , PARTIAL",
"opcode": "OP9",
"partcode": "PAE"
},
"OP11": {
"desc": "REMOVE / REPLACE",
"opcode": "OP11",
"partcode": "PAN"
},
"OP12": {
"desc": "REMOVE / REPLACE PARTIAL",
"opcode": "OP11",
"partcode": "PAN"
},
"OP13": {
"desc": "ADDITIONAL COSTS",
"opcode": "OP13",
"partcode": "PAE"
},
"OP14": {
"desc": "ADDITIONAL OPERATIONS",
"opcode": "OP14",
"partcode": "PAE"
},
"OP15": {
"desc": "BLEND",
"opcode": "OP15",
"partcode": "PAE"
},
"OP16": {
"desc": "SUBLET",
"opcode": "OP16",
"partcode": "PAS"
},
"OP17": {
"desc": "POLICY LIMIT ADJUSTMENT",
"opcode": "OP9",
"partcode": "PAE"
},
"OP18": {
"desc": "APPEAR ALLOWANCE",
"opcode": "OP7",
"partcode": "PAE"
},
"OP20": {
"desc": "REMOVE AND REINSTALL",
"opcode": "OP20",
"partcode": "PAE"
},
"OP24": {
"desc": "CHIPGUARD",
"opcode": "OP6",
"partcode": "PAE"
},
"OP25": {
"desc": "TWO TONE",
"opcode": "OP6",
"partcode": "PAE"
},
"OP26": {
"desc": "PAINTLESS DENT REPAIR",
"opcode": "OP16",
"partcode": "PAE"
},
"OP100": {
"desc": "REPLACE PRE-PRICED",
"opcode": "OP11",
"partcode": "PAA"
},
"OP101": {
"desc": "REMOVE/REPLACE RECYCLED PART",
"opcode": "OP11",
"partcode": "PAL"
},
"OP103": {
"desc": "REMOVE / REPLACE PARTIAL",
"opcode": "OP11",
"partcode": "PAA"
},
"OP104": {
"desc": "REMOVE / REPLACE PARTIAL LABOUR",
"opcode": "OP11",
"partcode": "PAA"
},
"OP105": {
"desc": "!!ADJUST MANUALLY!!",
"opcode": "OP99",
"partcode": "PAE"
},
"OP106": {
"desc": "REPAIR , PARTIAL",
"opcode": "OP9",
"partcode": "PAE"
},
"OP107": {
"desc": "CHIPGUARD",
"opcode": "OP6",
"partcode": "PAE"
},
"OP108": {
"desc": "MULTI TONE",
"opcode": "OP6",
"partcode": "PAE"
},
"OP109": {
"desc": "REPLACE PRE-PRICED",
"opcode": "OP11",
"partcode": "PAA"
},
"OP110": {
"desc": "REFINISH / REPAIR",
"opcode": "OP1",
"partcode": "PAE"
},
"OP111": {
"desc": "REMOVE / REPLACE",
"opcode": "OP11",
"partcode": "PAN"
},
"OP112": {
"desc": "REMOVE / REPLACE",
"opcode": "OP11",
"partcode": "PAA"
},
"OP113": {
"desc": "REPLACE PRE-PRICED",
"opcode": "OP11",
"partcode": "PAA"
},
"OP114": {
"desc": "REPLACE PRE-PRICED",
"opcode": "OP11",
"partcode": "PAA"
},
"OP120": {
"desc": "REPAIR , PARTIAL",
"opcode": "OP9",
"partcode": "PAE"
},
"OP260": {
"desc": "SUBLET",
"opcode": "OP16",
"partcode": "PAE"
}
}

View File

@@ -0,0 +1,196 @@
const admin = require("firebase-admin");
const client = require("../../../graphql-client/graphql-client").client;
const {
DELETE_SHOP,
DELETE_VENDORS_BY_SHOP,
GET_BODYSHOP,
GET_ASSOCIATED_USERS,
DELETE_ASSOCIATIONS_BY_SHOP,
GET_USER_ASSOCIATIONS_COUNT,
DELETE_USER,
GET_VENDORS,
GET_JOBS_BY_SHOP,
DELETE_JOBLINES_BY_JOB_IDS,
DELETE_JOBS_BY_IDS,
DELETE_AUDIT_TRAIL_BY_SHOP
} = require("../partsManagement.queries");
/**
* Deletes a Firebase user by UID.
* @param {string} uid - The Firebase user ID
* @returns {Promise<void>}
*/
const deleteFirebaseUser = async (uid) => {
if (!uid) throw new Error("User UID is required");
return admin.auth().deleteUser(uid);
};
/**
* Deletes all vendors associated with a shop.
* @param {string} shopId - The shop ID
* @returns {Promise<void>}
*/
const deleteVendorsByShop = async (shopId) => {
if (!shopId) throw new Error("Shop ID is required");
await client.request(DELETE_VENDORS_BY_SHOP, { shopId });
};
/**
* Deletes a bodyshop from the database.
* @param {string} shopId - The shop ID
* @returns {Promise<void>}
*/
const deleteBodyshop = async (shopId) => {
if (!shopId) throw new Error("Shop ID is required");
await client.request(DELETE_SHOP, { id: shopId });
};
/**
* Fetch job ids for a given shop
* @param {string} shopId - The shop ID
* @returns {Promise<string[]>}
*/
const getJobIdsForShop = async (shopId) => {
if (!shopId) throw new Error("Shop ID is required");
const resp = await client.request(GET_JOBS_BY_SHOP, { shopId });
return resp.jobs?.map((j) => j.id) || [];
};
/**
* Delete joblines for the given job ids
* @param {string[]} jobIds - Array of job IDs
* @returns {Promise<number>} affected rows
*/
const deleteJoblinesForJobs = async (jobIds) => {
if (!jobIds?.length) return 0;
const resp = await client.request(DELETE_JOBLINES_BY_JOB_IDS, { jobIds });
return resp.delete_joblines?.affected_rows || 0;
};
/**
* Delete jobs for the given job ids
* @param {string[]} jobIds - Array of job IDs
* @returns {Promise<number>} affected rows
*/
const deleteJobsByIds = async (jobIds) => {
if (!jobIds?.length) return 0;
const resp = await client.request(DELETE_JOBS_BY_IDS, { jobIds });
return resp.delete_jobs?.affected_rows || 0;
};
/**
* Handles deprovisioning a shop for parts management.
* @param {Object} req - Express request object
* @param {Object} res - Express response object
* @returns {Promise<*>}
*/
const partsManagementDeprovisioning = async (req, res) => {
const { logger } = req;
const { shopId } = req.body;
if (process.env.NODE_ENV === "production" || process.env.HOSTNAME?.endsWith("compute.internal")) {
return res.status(403).json({ error: "Deprovisioning not allowed in production environment." });
}
try {
if (!shopId) {
throw { status: 400, message: "shopId is required." };
}
// Fetch bodyshop and check external_shop_id
const shopResp = await client.request(GET_BODYSHOP, { id: shopId });
const shop = shopResp.bodyshops_by_pk;
if (!shop) {
throw { status: 404, message: `Bodyshop with id ${shopId} not found.` };
}
if (!shop.external_shop_id) {
throw { status: 400, message: "Cannot delete bodyshop without external_shop_id." };
}
logger.log("admin-delete-shop", "debug", null, null, {
shopId,
shopname: shop.shopname,
ioadmin: true
});
// Get vendors
const vendorsResp = await client.request(GET_VENDORS, { shopId });
const deletedVendors = vendorsResp.vendors?.map((v) => v.name) || [];
// Get associated users
const assocResp = await client.request(GET_ASSOCIATED_USERS, { shopId });
const associatedUsers =
assocResp.associations?.map((assoc) => ({
authId: assoc.user?.authid,
email: assoc.user?.email
})) || [];
// Delete associations for the shop
const assocDeleteResp = await client.request(DELETE_ASSOCIATIONS_BY_SHOP, { shopId });
const associationsDeleted = assocDeleteResp.delete_associations?.affected_rows || 0;
// Delete users with no remaining associations
const deletedUsers = [];
for (const user of associatedUsers) {
if (!user.email || !user.authId) continue;
try {
const countResp = await client.request(GET_USER_ASSOCIATIONS_COUNT, { userEmail: user.email });
const assocCount = countResp.associations_aggregate?.aggregate?.count || 0;
if (assocCount === 0) {
await client.request(DELETE_USER, { email: user.email });
await deleteFirebaseUser(user.authId);
deletedUsers.push(user.email);
}
} catch (userError) {
logger.log("admin-delete-user-error", "warn", null, null, {
email: user.email,
error: userError.message || userError
});
}
}
// Delete jobs and joblines
const jobIds = await getJobIdsForShop(shopId);
const joblinesDeleted = await deleteJoblinesForJobs(jobIds);
const jobsDeleted = await deleteJobsByIds(jobIds);
// Delete audit trail
const auditResp = await client.request(DELETE_AUDIT_TRAIL_BY_SHOP, { shopId });
const auditDeleted = auditResp.delete_audit_trail?.affected_rows || 0;
// Delete vendors and shop
await deleteVendorsByShop(shopId);
await deleteBodyshop(shopId);
// Summary log
logger.log("admin-delete-shop-summary", "info", null, null, {
shopId,
shopname: shop.shopname,
associationsDeleted,
deletedUsers,
deletedVendors,
joblinesDeleted,
jobsDeleted,
auditDeleted
});
return res.status(200).json({
message: `Bodyshop ${shopId} and associated resources deleted successfully.`,
deletedShop: { id: shopId, name: shop.shopname },
deletedAssociationsCount: associationsDeleted,
deletedUsers,
deletedVendors,
deletedJoblinesCount: joblinesDeleted,
deletedJobsCount: jobsDeleted,
deletedAuditTrailCount: auditDeleted
});
} catch (err) {
logger.log("admin-delete-shop-error", "error", null, null, {
message: err.message,
detail: err.detail || err.stack || err
});
return res.status(err.status || 500).json({ error: err.message || "Internal server error" });
}
};
module.exports = partsManagementDeprovisioning;

View File

@@ -1,10 +1,17 @@
const crypto = require("crypto");
const admin = require("firebase-admin");
const client = require("../../graphql-client/graphql-client").client;
const DefaultNewShop = require("./defaultNewShop.json");
const client = require("../../../graphql-client/graphql-client").client;
const DefaultNewShop = require("../defaultNewShop.json");
const {
CHECK_EXTERNAL_SHOP_ID,
CREATE_SHOP,
DELETE_VENDORS_BY_SHOP,
DELETE_SHOP,
CREATE_USER
} = require("../partsManagement.queries");
/**
* Ensures that the required fields are present in the payload.
* Checks if the required fields are present in the payload.
* @param payload
* @param fields
*/
@@ -17,7 +24,7 @@ const requireFields = (payload, fields) => {
};
/**
* Ensures that the email is not already registered in Firebase.
* Ensures that the provided email is not already registered in Firebase.
* @param email
* @returns {Promise<void>}
*/
@@ -33,16 +40,19 @@ const ensureEmailNotRegistered = async (email) => {
};
/**
* Creates a new Firebase user with the provided email.
* Creates a new Firebase user with the given email and optional password.
* @param email
* @param password
* @returns {Promise<UserRecord>}
*/
const createFirebaseUser = async (email) => {
return admin.auth().createUser({ email });
const createFirebaseUser = async (email, password = null) => {
const userData = { email };
if (password) userData.password = password;
return admin.auth().createUser(userData);
};
/**
* Deletes a Firebase user by their UID.
* Deletes a Firebase user by UID.
* @param uid
* @returns {Promise<void>}
*/
@@ -60,18 +70,12 @@ const generateResetLink = async (email) => {
};
/**
* Ensures that the external shop ID is unique in the database.
* Ensures that the provided external shop ID is unique.
* @param externalId
* @returns {Promise<void>}
*/
const ensureExternalIdUnique = async (externalId) => {
const query = `
query CHECK_KEY($key: String!) {
bodyshops(where: { external_shop_id: { _eq: $key } }) {
external_shop_id
}
}`;
const resp = await client.request(query, { key: externalId });
const resp = await client.request(CHECK_EXTERNAL_SHOP_ID, { key: externalId });
if (resp.bodyshops.length) {
throw { status: 400, message: `external_shop_id '${externalId}' is already in use.` };
}
@@ -83,40 +87,26 @@ const ensureExternalIdUnique = async (externalId) => {
* @returns {Promise<*>}
*/
const insertBodyshop = async (input) => {
const mutation = `
mutation CREATE_SHOP($bs: bodyshops_insert_input!) {
insert_bodyshops_one(object: $bs) { id }
}`;
const resp = await client.request(mutation, { bs: input });
const resp = await client.request(CREATE_SHOP, { bs: input });
return resp.insert_bodyshops_one.id;
};
/**
* Deletes all vendors associated with a specific shop ID.
* Deletes all vendors associated with a shop.
* @param shopId
* @returns {Promise<void>}
*/
const deleteVendorsByShop = async (shopId) => {
const mutation = `
mutation DELETE_VENDORS($shopId: uuid!) {
delete_vendors(where: { shopid: { _eq: $shopId } }) {
affected_rows
}
}`;
await client.request(mutation, { shopId });
await client.request(DELETE_VENDORS_BY_SHOP, { shopId });
};
/**
* Deletes a bodyshop by its ID.
* Deletes a bodyshop from the database.
* @param shopId
* @returns {Promise<void>}
*/
const deleteBodyshop = async (shopId) => {
const mutation = `
mutation DELETE_SHOP($id: uuid!) {
delete_bodyshops_by_pk(id: $id) { id }
}`;
await client.request(mutation, { id: shopId });
await client.request(DELETE_SHOP, { id: shopId });
};
/**
@@ -127,41 +117,33 @@ const deleteBodyshop = async (shopId) => {
* @returns {Promise<*>}
*/
const insertUserAssociation = async (uid, email, shopId) => {
const mutation = `
mutation CREATE_USER($u: users_insert_input!) {
insert_users_one(object: $u) {
id: authid
email
}
}`;
const vars = {
u: {
email,
authid: uid,
validemail: true,
associations: {
data: [{ shopid: shopId, authlevel: 80, active: true }]
data: [{ shopid: shopId, authlevel: 99, active: true }]
}
}
};
const resp = await client.request(mutation, vars);
const resp = await client.request(CREATE_USER, vars);
return resp.insert_users_one;
};
/**
* Handles the provisioning of a new parts management shop and user.
* Handles provisioning a new shop for parts management.
* @param req
* @param res
* @returns {Promise<*>}
*/
const partsManagementProvisioning = async (req, res) => {
const { logger } = req;
const p = { ...req.body, userEmail: req.body.userEmail?.toLowerCase() };
const body = { ...req.body, userEmail: req.body.userEmail?.toLowerCase() };
try {
// Validate inputs
await ensureEmailNotRegistered(p.userEmail);
requireFields(p, [
await ensureEmailNotRegistered(body.userEmail);
requireFields(body, [
"external_shop_id",
"shopname",
"address1",
@@ -173,35 +155,52 @@ const partsManagementProvisioning = async (req, res) => {
"phone",
"userEmail"
]);
await ensureExternalIdUnique(p.external_shop_id);
await ensureExternalIdUnique(body.external_shop_id);
logger.log("admin-create-shop-user", "debug", p.userEmail, null, {
logger.log("admin-create-shop-user", "debug", body.userEmail, null, {
request: req.body,
ioadmin: true
});
// Create shop
const shopInput = {
shopname: p.shopname,
address1: p.address1,
address2: p.address2 || null,
city: p.city,
state: p.state,
zip_post: p.zip_post,
country: p.country,
email: p.email,
external_shop_id: p.external_shop_id,
timezone: p.timezone,
phone: p.phone,
shopname: body.shopname,
address1: body.address1,
address2: body.address2 || null,
city: body.city,
state: body.state,
zip_post: body.zip_post,
country: body.country,
email: body.email,
external_shop_id: body.external_shop_id,
timezone: body.timezone || DefaultNewShop.timezone,
phone: body.phone,
logo_img_path: {
src: p.logoUrl,
src: body.logoUrl,
width: "",
height: "",
headerMargin: DefaultNewShop.logo_img_path.headerMargin
},
features: {
allAccess: false,
partsManagementOnly: true
},
md_ro_statuses: DefaultNewShop.md_ro_statuses,
md_order_statuses: DefaultNewShop.md_order_statuses,
md_responsibility_centers: DefaultNewShop.md_responsibility_centers,
md_referral_sources: DefaultNewShop.md_referral_sources,
md_messaging_presets: DefaultNewShop.md_messaging_presets,
md_rbac: DefaultNewShop.md_rbac,
md_classes: DefaultNewShop.md_classes,
md_ins_cos: DefaultNewShop.md_ins_cos,
md_categories: DefaultNewShop.md_categories,
md_labor_rates: DefaultNewShop.md_labor_rates,
md_payment_types: DefaultNewShop.md_payment_types,
md_hour_split: DefaultNewShop.md_hour_split,
md_ccc_rates: DefaultNewShop.md_ccc_rates,
appt_alt_transport: DefaultNewShop.appt_alt_transport,
md_jobline_presets: DefaultNewShop.md_jobline_presets,
vendors: {
data: p.vendors.map((v) => ({
data: body.vendors.map((v) => ({
name: v.name,
street1: v.street1 || null,
street2: v.street2 || null,
@@ -220,34 +219,40 @@ const partsManagementProvisioning = async (req, res) => {
}))
}
};
const newShopId = await insertBodyshop(shopInput);
// Create user + association
const userRecord = await createFirebaseUser(p.userEmail);
const resetLink = await generateResetLink(p.userEmail);
const createdUser = await insertUserAssociation(userRecord.uid, p.userEmail, newShopId);
const newShopId = await insertBodyshop(shopInput);
const userRecord = await createFirebaseUser(body.userEmail, body.userPassword);
let resetLink = null;
if (!body.userPassword) resetLink = await generateResetLink(body.userEmail);
const createdUser = await insertUserAssociation(userRecord.uid, body.userEmail, newShopId);
return res.status(200).json({
shop: { id: newShopId, shopname: p.shopname },
shop: { id: newShopId, shopname: body.shopname },
user: {
id: createdUser.id,
email: createdUser.email,
resetLink
resetLink: resetLink || undefined
}
});
} catch (err) {
logger.log("admin-create-shop-user-error", "error", p.userEmail, null, {
logger.log("admin-create-shop-user-error", "error", body.userEmail, null, {
message: err.message,
detail: err.detail || err
});
// Cleanup on failure
if (err.userRecord) {
await deleteFirebaseUser(err.userRecord.uid).catch(() => {});
await deleteFirebaseUser(err.userRecord.uid).catch(() => {
/* empty */
});
}
if (err.newShopId) {
await deleteVendorsByShop(err.newShopId).catch(() => {});
await deleteBodyshop(err.newShopId).catch(() => {});
await deleteVendorsByShop(err.newShopId).catch(() => {
/* empty */
});
await deleteBodyshop(err.newShopId).catch(() => {
/* empty */
});
}
return res.status(err.status || 500).json({ error: err.message || "Internal server error" });

View File

@@ -0,0 +1,634 @@
const client = require("../../../graphql-client/graphql-client").client;
const { extractPartsTaxRates } = require("./lib/extractPartsTaxRates");
const { parseXml, normalizeXmlObject } = require("../partsManagementUtils");
const opCodes = require("./lib/opCodes.json");
// New imports for S3 XML archival
const { uploadFileToS3 } = require("../../../utils/s3");
const InstanceMgr = require("../../../utils/instanceMgr").default;
// GraphQL Queries and Mutations
const {
GET_BODYSHOP_STATUS,
GET_VEHICLE_BY_SHOP_VIN,
INSERT_OWNER,
INSERT_JOB_WITH_LINES
} = require("../partsManagement.queries");
const { v4: uuidv4 } = require("uuid");
// Defaults
const FALLBACK_DEFAULT_JOB_STATUS = "Open";
const ESTIMATE_XML_BUCKET =
process.env?.NODE_ENV === "development"
? "parts-estimates" // local/dev shared bucket name
: InstanceMgr({
imex: `imex-webest-xml`,
rome: `rome-webest-xml`
});
const buildEstimateXmlKey = (rq) => {
const refClaimNum = rq.RefClaimNum;
const shopId = rq.ShopID;
const ts = new Date().toISOString().replace(/:/g, "-");
const safeClaim = (refClaimNum || "no-claim").toString().replace(/[^A-Za-z0-9_-]/g, "_");
return `addRequest/${shopId}/${safeClaim}/${ts}-${uuidv4()}.xml`;
};
/**
* Fetches the default order status for a bodyshop.
* @param {string} shopId - The bodyshop UUID.
* @param {object} logger - The logger instance.
* @returns {Promise<string>} The default status or fallback.
*/
const getDefaultJobStatus = async (shopId, logger) => {
try {
const { bodyshop_by_pk } = await client.request(GET_BODYSHOP_STATUS, { id: shopId });
return bodyshop_by_pk?.md_ro_statuses?.default_imported || FALLBACK_DEFAULT_JOB_STATUS;
} catch (err) {
logger.log("parts-bodyshop-fetch-failed", "warn", shopId, null, { error: err });
return FALLBACK_DEFAULT_JOB_STATUS;
}
};
/**
* Finds an existing vehicle by shopId and VIN.
* @param {string} shopId - The bodyshop UUID.
* @param {string} v_vin - The vehicle VIN.
* @param {object} logger - The logger instance.
* @returns {Promise<string|null>} The vehicle ID or null if not found.
*/
const findExistingVehicle = async (shopId, v_vin, logger) => {
if (!v_vin) return null;
try {
const { vehicles } = await client.request(GET_VEHICLE_BY_SHOP_VIN, { shopid: shopId, v_vin });
if (vehicles?.length > 0) {
logger.log("parts-vehicle-found", "info", vehicles[0].id, null, { shopid: shopId, v_vin });
return vehicles[0].id;
}
} catch (err) {
logger.log("parts-vehicle-fetch-failed", "warn", null, null, { error: err });
}
return null;
};
/**
* Extracts job-related data from the XML request.
* @param {object} rq - The VehicleDamageEstimateAddRq object.
* @returns {object} Extracted job data.
*/
const extractJobData = (rq) => {
const doc = rq.DocumentInfo || {};
const ev = rq.EventInfo || {};
const asgn = ev.AssignmentEvent || {};
const ci = rq.ClaimInfo || {};
return {
driveable: !!rq.VehicleInfo?.Condition?.DrivableInd,
shopId: rq.ShopID || rq.shopId,
// status: ci.ClaimStatus || null, Proper, setting it default for now
refClaimNum: rq.RefClaimNum,
ciecaid: rq.RqUID || null,
// Pull Cieca_ttl from ClaimInfo per schema/sample
cieca_ttl: parseFloat(ci.Cieca_ttl || 0),
cat_no: doc.VendorCode || null,
category: doc.DocumentType || null,
classType: doc.DocumentStatus || null,
comment: doc.Comment || null,
asgn_no: asgn.AssignmentNumber || null,
asgn_type: asgn.AssignmentType || null,
asgn_date: asgn.AssignmentDate || null,
// asgn_created: asgn.CreateDateTime || null,
scheduled_in: ev.RepairEvent?.RequestedPickUpDateTime || null,
scheduled_completion: ev.RepairEvent?.TargetCompletionDateTime || null,
clm_no: ci.ClaimNum || null,
policy_no: ci.PolicyInfo?.PolicyInfo?.PolicyNum || ci.PolicyInfo?.PolicyNum || null,
ded_amt: parseFloat(ci.PolicyInfo?.CoverageInfo?.Coverage?.DeductibleInfo?.DeductibleAmt || 0)
};
};
/**
* Extracts owner data from the XML request.
* Falls back to Claimant if Owner is missing.
* @param {object} rq - The VehicleDamageEstimateAddRq object.
* @param {string} shopId - The bodyshop UUID.
* @returns {object} Owner data for insertion and inline use.
*/
const extractOwnerData = (rq, shopId) => {
const ownerOrClaimant = rq.AdminInfo?.Owner?.Party || rq.AdminInfo?.Claimant?.Party || {};
const personInfo = ownerOrClaimant.PersonInfo || {};
const personName = personInfo.PersonName || {};
const address = personInfo.Communications?.Address || {};
let ownr_ph1, ownr_ph2, ownr_ea;
const comms = Array.isArray(ownerOrClaimant.ContactInfo?.Communications)
? ownerOrClaimant.ContactInfo.Communications
: [ownerOrClaimant.ContactInfo?.Communications || {}];
for (const c of comms) {
// -- Document
if (c.CommQualifier === "CP") ownr_ph1 = c.CommPhone;
if (c.CommQualifier === "WP") ownr_ph2 = c.CommPhone;
if (c.CommQualifier === "EM") ownr_ea = c.CommEmail;
// if (c.CommQualifier === "AL") ownr_alt_ph = c.CommPhone;
}
return {
shopid: shopId,
ownr_fn: personName.FirstName || null,
ownr_ln: personName.LastName || null,
ownr_co_nm: ownerOrClaimant.OrgInfo?.CompanyName || null,
ownr_addr1: address.Address1 || null,
ownr_addr2: address.Address2 || null,
ownr_city: address.City || null,
ownr_st: address.StateProvince || null,
ownr_zip: address.PostalCode || null,
ownr_ctry: address.Country || null,
ownr_ph1,
ownr_ph2,
ownr_ea
// ownr_alt_ph
// ownr_id_qualifier: ownerOrClaimant.IDInfo?.IDQualifierCode || null // New
// ownr_id_num: ownerOrClaimant.IDInfo?.IDNum || null, // New
// ownr_preferred_contact: ownerOrClaimant.PreferredContactMethod || null // New
};
};
/**
* Extracts estimator data from the XML request.
* @param {object} rq - The VehicleDamageEstimateAddRq object.
* @returns {object} Estimator data.
*/
const extractEstimatorData = (rq) => {
const estParty = rq.AdminInfo?.Estimator?.Party || {};
const estComms = Array.isArray(estParty.ContactInfo?.Communications)
? estParty.ContactInfo.Communications
: [estParty.ContactInfo?.Communications || {}];
return {
est_co_nm: rq.AdminInfo?.Estimator?.Affiliation || null,
est_ct_fn: estParty.PersonInfo?.PersonName?.FirstName || null,
est_ct_ln: estParty.PersonInfo?.PersonName?.LastName || null,
est_ea: estComms.find((c) => c.CommQualifier === "EM")?.CommEmail || null
};
};
/**
* Extracts adjuster data from the XML request.
* @param {object} rq - The VehicleDamageEstimateAddRq object.
* @returns {object} Adjuster data.
*/
// const extractAdjusterData = (rq) => {
// const adjParty = rq.AdminInfo?.Adjuster?.Party || {};
// const adjComms = Array.isArray(adjParty.ContactInfo?.Communications)
// ? adjParty.ContactInfo.Communications
// : [adjParty.ContactInfo?.Communications || {}];
//
// return {
// //TODO (FUTURE): I dont think we display agt_ct_* fields in app. Have they typically been sending data here?
// agt_ct_fn: adjParty.PersonInfo?.PersonName?.FirstName || null,
// agt_ct_ln: adjParty.PersonInfo?.PersonName?.LastName || null,
// agt_ct_ph: adjComms.find((c) => c.CommQualifier === "CP")?.CommPhone || null,
// agt_ea: adjComms.find((c) => c.CommQualifier === "EM")?.CommEmail || null
// };
// };
/**
* Extracts repair facility data from the XML request.
* @param {object} rq - The VehicleDamageEstimateAddRq object.
* @returns {object} Repair facility data.
*/
// const extractRepairFacilityData = (rq) => {
// const rfParty = rq.AdminInfo?.RepairFacility?.Party || {};
// const rfComms = Array.isArray(rfParty.ContactInfo?.Communications)
// ? rfParty.ContactInfo.Communications
// : [rfParty.ContactInfo?.Communications || {}];
//
// return {
// servicing_dealer: rfParty.OrgInfo?.CompanyName || null,
// // TODO (Future): The servicing dealer fields are a relic from synergy for a few folks
// // TODO (Future): I suspect RF data could be ignored since they are the RF.
// servicing_dealer_contact:
// rfComms.find((c) => c.CommQualifier === "WP" || c.CommQualifier === "FX")?.CommPhone || null
// };
// };
/**
* Extracts loss information from the XML request.
* @param rq
* @returns {{loss_dt: (*|null), reported_dt: (*|null), loss_type_code: (*|null), loss_type_desc: (*|null)}}
*/
const extractLossInfo = (rq) => {
const loss = rq.ClaimInfo?.LossInfo?.Facts || {};
const custom = rq.ClaimInfo?.CustomElement || {};
return {
loss_date: loss.LossDateTime || null,
loss_type: custom.LossTypeCode || null,
loss_desc: custom.LossTypeDesc || null
// area_of_impact: {
// impact_1: loss.PrimaryPOI?.POICode || null,
// imact_2 :loss.SecondaryPOI?.POICode || null,
// },
// tlosind: rq.ClaimInfo?.LossInfo?.TotalLossInd || null,
// damage_memo: loss.DamageMemo || null, //(maybe ins_memo)
};
};
/**
* Extracts insurance data from the XML request.
* @param rq
* @returns {{insd_ln: (*|null), insd_fn: (string|null), insd_title: (*|null), insd_co_nm: (*|string|null), insd_addr1:
* (*|null), insd_addr2: (*|null), insd_city: (*|null), insd_st: (*|null), insd_zip: (*|null), insd_ctry: (*|null),
* insd_ph1, insd_ph1x, insd_ph2, insd_ph2x, insd_fax, insd_faxx, insd_ea}}
*/
const extractInsuranceData = (rq) => {
const insuredParty = rq.AdminInfo?.Insured?.Party || {};
const insuredPerson = insuredParty.PersonInfo || {};
const insuredComms = Array.isArray(insuredParty.ContactInfo?.Communications)
? insuredParty.ContactInfo.Communications
: [insuredParty.ContactInfo?.Communications || {}];
const insuredAddress = insuredPerson.Communications?.Address || {};
const insurerParty = rq.AdminInfo?.InsuranceCompany?.Party || {};
let insd_ph1, insd_ph1x, insd_ph2, insd_ph2x, insd_fax, insd_faxx, insd_ea;
for (const c of insuredComms) {
if (c.CommQualifier === "CP") {
insd_ph1 = c.CommPhone;
insd_ph1x = c.CommPhoneExt;
}
if (c.CommQualifier === "WP") {
insd_ph2 = c.CommPhone;
insd_ph2x = c.CommPhoneExt;
}
if (c.CommQualifier === "FX") {
insd_fax = c.CommPhone;
insd_faxx = c.CommPhoneExt;
}
if (c.CommQualifier === "EM") insd_ea = c.CommEmail;
}
return {
insd_ln: insuredPerson.PersonName?.LastName || null,
insd_fn: insuredPerson.PersonName?.FirstName || null,
insd_title: insuredPerson.PersonName?.Title || null,
insd_co_nm: insurerParty.OrgInfo?.CompanyName || insuredParty.OrgInfo?.CompanyName || null,
insd_addr1: insuredAddress.Address1 || null,
insd_addr2: insuredAddress.Address2 || null,
insd_city: insuredAddress.City || null,
insd_st: insuredAddress.StateProvince || null,
insd_zip: insuredAddress.PostalCode || null,
insd_ctry: insuredAddress.Country || null,
insd_ph1,
insd_ph1x,
insd_ph2,
insd_ph2x,
insd_fax,
insd_faxx,
insd_ea
};
};
/**
* Extracts vehicle data from the XML request.
* @param {object} rq - The VehicleDamageEstimateAddRq object.
* @param {string} shopId - The bodyshop UUID.
* @returns {object} Vehicle data for insertion and inline use.
*/
const extractVehicleData = (rq, shopId) => {
const desc = rq.VehicleInfo?.VehicleDesc || {};
const exterior = rq.VehicleInfo?.Paint?.Exterior || {};
const interior = rq.VehicleInfo?.Paint?.Interior || {};
return {
shopid: shopId,
// VIN may be either VINInfo.VINNum or VINInfo.VIN.VINNum depending on producer
v_vin: rq.VehicleInfo?.VINInfo?.VINNum || rq.VehicleInfo?.VINInfo?.VIN?.VINNum || null,
plate_no: rq.VehicleInfo?.License?.LicensePlateNum || null,
plate_st: rq.VehicleInfo?.License?.LicensePlateStateProvince || null,
v_model_yr: desc.ModelYear || null,
v_make_desc: desc.MakeDesc || null,
v_model_desc: desc.ModelName || null,
v_color: exterior.Color?.ColorName || null,
v_bstyle: desc.BodyStyle || null,
v_engine: desc.EngineDesc || null,
// TODO (for future) Need to confirm with exact data, but this is typically a list of options. Not used AFAIK.
// v_options: desc.SubModelDesc || null,
v_type: desc.FuelType || null,
v_cond: rq.VehicleInfo?.Condition?.DrivableInd,
v_trimcode: desc.TrimCode || null,
v_tone: exterior.Tone || null,
v_stage: exterior.RefinishStage || rq.VehicleInfo?.Paint?.RefinishStage || null,
v_prod_dt: desc.ProductionDate || null,
v_paint_codes: Array.isArray(exterior.PaintCodeInfo)
? exterior.PaintCodeInfo.map((p) => p.PaintCode).join(",")
: exterior.PaintCode || null,
v_mldgcode: desc.MldgCode || null,
v_makecode: desc.MakeCode || null,
trim_color: interior.ColorName || desc.TrimColor || null,
db_v_code: desc.DatabaseCode || null
};
};
/**
* Extracts job lines from the XML request.
* @param {object} rq - The VehicleDamageEstimateAddRq object.
* @returns {object[]} Array of job line objects.
*/
const extractJobLines = (rq) => {
// Normalize to array without lodash toArray (which flattens object values incorrectly)
const dl = rq.DamageLineInfo;
const damageLines = Array.isArray(dl) ? dl : dl ? [dl] : [];
if (damageLines.length === 0) {
return [];
}
const out = [];
for (const line of damageLines) {
const partInfo = line.PartInfo || {};
const laborInfo = line.LaborInfo || {};
const refinishInfo = line.RefinishLaborInfo || {};
const subletInfo = line.SubletInfo || {};
const base = {
line_no: parseInt(line.LineNum || 0, 10),
unq_seq: parseInt(line.UniqueSequenceNum || 0, 10),
status: line.LineStatusCode || null,
line_desc: line.LineDesc || null,
notes: line.LineMemo || null
};
const lineOut = { ...base };
// Manual line flag coercion
// if (line.ManualLineInd !== undefined) {
// lineOut.manual_line =
// line.ManualLineInd === true ||
// line.ManualLineInd === 1 ||
// line.ManualLineInd === "1" ||
// // TODO (FUTURE): manual line tracks manual in IO or not, this woudl presumably always be false
// (typeof line.ManualLineInd === "string" && line.ManualLineInd.toUpperCase() === "Y");
// } else {
// lineOut.manual_line = null;
// }
// Is set to false because anything coming from the DMS is considered not a manual line, it becomes
// a manual line once it is edited in OUR system.
lineOut.manual_line = false;
// Parts (preferred) or Sublet (fallback when no PartInfo)
const hasPart = Object.keys(partInfo).length > 0;
const hasSublet = Object.keys(subletInfo).length > 0;
if (hasPart) {
lineOut.part_type = partInfo.PartType || null ? String(partInfo.PartType).toUpperCase() : null;
lineOut.part_qty = parseFloat(partInfo.Quantity || 0) || 1;
lineOut.oem_partno = partInfo.OEMPartNum;
lineOut.alt_partno = partInfo?.NonOEM?.NonOEMPartNum;
// THIS NEEDS TO BE CHANGED IN CHANGE REQUEST
lineOut.act_price = parseFloat(partInfo?.PartPrice || 0);
lineOut.db_price = parseFloat(partInfo?.OEMPartPrice || 0);
// Tax flag from PartInfo.TaxableInd when provided
if (
partInfo.TaxableInd !== undefined &&
(typeof partInfo.TaxableInd === "string" ||
typeof partInfo.TaxableInd === "number" ||
typeof partInfo.TaxableInd === "boolean")
) {
lineOut.tax_part =
partInfo.TaxableInd === true ||
partInfo.TaxableInd === 1 ||
partInfo.TaxableInd === "1" ||
(typeof partInfo.TaxableInd === "string" && partInfo.TaxableInd.toUpperCase() === "Y");
}
}
//TODO (FUTURE): Some nuance here. Usually a part and sublet amount shouldnt be on the same line, but they theoretically
// could. May require additional discussion.
// EMS - > Misc Amount, calibration for example, painting, etc
else if (hasSublet) {
const amt = parseFloat(subletInfo.SubletAmount || 0);
lineOut.part_type = "PAS"; // Sublet as parts-as-service
lineOut.part_qty = 1;
lineOut.act_price = isNaN(amt) ? 0 : amt;
}
// Primary labor (if present) recorded on the same line
const hrs = parseFloat(laborInfo.LaborHours || 0);
const amt = parseFloat(laborInfo.LaborAmt || 0);
const hasLabor =
(!!laborInfo.LaborType && String(laborInfo.LaborType).length > 0) ||
(!isNaN(hrs) && hrs !== 0) ||
(!isNaN(amt) && amt !== 0);
if (hasLabor) {
lineOut.mod_lbr_ty = laborInfo.LaborType || null;
lineOut.mod_lb_hrs = isNaN(hrs) ? 0 : hrs;
const opCodeKey =
typeof laborInfo.LaborOperation === "string" ? laborInfo.LaborOperation.trim().toUpperCase() : null;
lineOut.op_code_desc = opCodes?.[opCodeKey]?.desc || null;
lineOut.lbr_amt = isNaN(amt) ? 0 : amt;
}
//TODO (FUTURE): what's the BMS logic for this? Body and refinish operations can often happen to the same part,
// but most systems output a second line for the refinish labor.
//TODO (FUTURE): 2nd line may include a duplicate of the part price, but that can be removed. This is the case for CCC.
// Refinish labor (if present) recorded on the same line using secondary labor fields
const rHrs = parseFloat(refinishInfo.LaborHours || 0);
const rAmt = parseFloat(refinishInfo.LaborAmt || 0);
const hasRefinish =
Object.keys(refinishInfo).length > 0 &&
((refinishInfo.LaborType && String(refinishInfo.LaborType).length > 0) ||
!isNaN(rHrs) ||
!isNaN(rAmt) ||
!!refinishInfo.LaborOperation);
if (hasRefinish) {
lineOut.lbr_typ_j = !!refinishInfo?.LaborAmtJudgmentInd;
lineOut.lbr_hrs_j = !!refinishInfo?.LaborHoursJudgmentInd;
lineOut.lbr_op_j = !!refinishInfo.LaborOperationJudgmentInd;
// Aggregate refinish labor amount into the total labor amount for the line
if (!isNaN(rAmt)) {
lineOut.lbr_amt = (Number.isFinite(lineOut.lbr_amt) ? lineOut.lbr_amt : 0) + rAmt;
}
if (refinishInfo.PaintStagesNum !== undefined) lineOut.paint_stg = refinishInfo.PaintStagesNum;
if (refinishInfo.PaintTonesNum !== undefined) lineOut.paint_tone = refinishInfo.PaintTonesNum;
}
out.push(lineOut);
}
return out;
};
// Helper to extract a GRAND TOTAL amount from RepairTotalsInfo
// const extractGrandTotal = (rq) => {
// const rti = rq.RepairTotalsInfo;
// const groups = Array.isArray(rti) ? rti : rti ? [rti] : [];
// for (const grp of groups) {
// const sums = Array.isArray(grp.SummaryTotalsInfo)
// ? grp.SummaryTotalsInfo
// : grp.SummaryTotalsInfo
// ? [grp.SummaryTotalsInfo]
// : [];
// for (const s of sums) {
// const type = (s.TotalType || "").toString().toUpperCase();
// const desc = (s.TotalTypeDesc || "").toString().toUpperCase();
// if (type.includes("GRAND") || type === "TOTAL" || desc.includes("GRAND")) {
// const amt = parseFloat(s.TotalAmt ?? "NaN");
// if (!isNaN(amt)) return amt;
// }
// }
// }
// return null;
// };
/**
* Inserts an owner and returns the owner ID.
* @param {object} ownerInput - The owner data to insert.
* @param {object} logger - The logger instance.
* @returns {Promise<string|null>} The owner ID or null if insertion fails.
*/
const insertOwner = async (ownerInput, logger) => {
try {
const { insert_owners_one } = await client.request(INSERT_OWNER, { owner: ownerInput });
return insert_owners_one?.id;
} catch (err) {
logger.log("parts-owner-insert-failed", "warn", null, null, { error: err });
return null;
}
};
// Fallback: compute a naive total from joblines (parts + sublet + labor amounts)
// const computeLinesTotal = (joblines = []) => {
// let parts = 0;
// let labor = 0;
// for (const jl of joblines) {
// if (jl?.part_type) {
// const qty = Number.isFinite(jl.part_qty) ? jl.part_qty : 1;
// const price = Number.isFinite(jl.act_price) ? jl.act_price : 0;
// parts += price * (qty || 1);
// } else if (!jl.part_type && Number.isFinite(jl.act_price)) {
// parts += jl.act_price;
// }
// if (Number.isFinite(jl.lbr_amt)) {
// labor += jl.lbr_amt;
// }
// }
// const total = parts + labor;
//
// //TODO (FUTURE): clm_total is the 100% full amount of the repair including deductible, betterment and taxes. Typically provided by the source system.
// return Number.isFinite(total) && total > 0 ? total : 0;
// //TODO (FUTURE): clm_total is the 100% full amount of the repair including deductible,
// // betterment and taxes. Typically provided by the source system.
/**
* Handles the VehicleDamageEstimateAddRq XML request from parts management.
* @param {object} req - The HTTP request object.
* @param {object} res - The HTTP response object.
* @returns {Promise<void>}
*/
const vehicleDamageEstimateAddRq = async (req, res) => {
const { logger } = req;
const rawXml = typeof req.body === "string" ? req.body : Buffer.isBuffer(req.body) ? req.body.toString("utf8") : "";
try {
const payload = await parseXml(req.body, logger);
const rq = normalizeXmlObject(payload.VehicleDamageEstimateAddRq);
const {
shopId,
refClaimNum,
ciecaid,
cieca_ttl,
cat_no,
category,
classType,
comment,
date_exported,
asgn_no,
asgn_type,
asgn_date,
scheduled_in,
scheduled_completion,
clm_no,
policy_no,
ded_amt,
driveable
} = extractJobData(rq);
const defaultStatus = await getDefaultJobStatus(shopId, logger);
const parts_tax_rates = extractPartsTaxRates(rq.ProfileInfo);
const ownerData = extractOwnerData(rq, shopId);
const estimatorData = extractEstimatorData(rq);
const vehicleData = extractVehicleData(rq, shopId);
const lossInfo = extractLossInfo(rq);
const joblinesData = extractJobLines(rq);
const insuranceData = extractInsuranceData(rq);
const ownerid = await insertOwner(ownerData, logger);
const vehicleid = await findExistingVehicle(shopId, vehicleData.v_vin, logger);
const jobInput = {
shopid: shopId,
driveable,
converted: true,
ownerid,
ro_number: refClaimNum,
ciecaid,
cieca_ttl,
cat_no,
category,
class: classType,
parts_tax_rates,
clm_no,
status: defaultStatus,
clm_total: 0,
policy_no,
ded_amt,
comment,
date_exported,
asgn_no,
asgn_type,
asgn_date,
scheduled_in,
scheduled_completion,
...insuranceData,
...lossInfo,
...ownerData,
...estimatorData,
v_vin: vehicleData.v_vin,
v_model_yr: vehicleData.v_model_yr,
v_model_desc: vehicleData.v_model_desc,
v_make_desc: vehicleData.v_make_desc,
v_color: vehicleData.v_color,
plate_no: vehicleData.plate_no,
plate_st: vehicleData.plate_st,
...(vehicleid ? { vehicleid } : { vehicle: { data: vehicleData } }),
joblines: { data: joblinesData }
};
const { insert_jobs_one: newJob } = await client.request(INSERT_JOB_WITH_LINES, { job: jobInput });
// Upload AFTER job creation to include job id in filename
(async () => {
try {
const key = buildEstimateXmlKey(rq);
await uploadFileToS3({
bucketName: ESTIMATE_XML_BUCKET,
key,
content: rawXml || "",
contentType: "application/xml"
});
logger.log("parts-estimate-xml-uploaded", "info", shopId, newJob.id, { key, bytes: rawXml?.length || 0 });
} catch (e) {
logger.log("parts-estimate-xml-upload-failed", "warn", shopId, null, { error: e?.message });
}
})();
return res.status(200).json({ success: true, jobId: newJob.id });
} catch (err) {
logger.log("parts-route-error", "error", null, null, { error: err });
return res.status(err.status || 500).json({ error: err.message || "Internal error" });
}
};
module.exports = vehicleDamageEstimateAddRq;

View File

@@ -0,0 +1,366 @@
// no-dd-sa:javascript-code-style/assignment-name
// Handler for VehicleDamageEstimateChgRq
const client = require("../../../graphql-client/graphql-client").client;
const { parseXml, normalizeXmlObject } = require("../partsManagementUtils");
const { extractPartsTaxRates } = require("./lib/extractPartsTaxRates");
const opCodes = require("./lib/opCodes.json");
const { uploadFileToS3 } = require("../../../utils/s3");
const InstanceMgr = require("../../../utils/instanceMgr").default;
const {
GET_JOB_BY_ID,
UPDATE_JOB_BY_ID,
SOFT_DELETE_JOBLINES_BY_IDS,
GET_JOBLINES_NOTES_BY_JOBID_UNQSEQ,
GET_JOBLINE_IDS_BY_JOBID_UNQSEQ,
UPDATE_JOBLINE_BY_PK,
INSERT_JOBLINES
} = require("../partsManagement.queries");
/**
* Finds a job by shop ID and job ID.
* @param shopId
* @param jobId
* @param logger
* @returns {Promise<*|null>}
*/
const findJob = async (shopId, jobId, logger) => {
try {
const { jobs } = await client.request(GET_JOB_BY_ID, { shopid: shopId, jobid: jobId });
return jobs?.[0] || null;
} catch (err) {
logger.log("parts-job-lookup-failed", "error", null, null, { error: err });
return null;
}
};
/**
* Extracts updated job data from the request payload.
* Mirrors AddRq for parts_tax_rates + driveable when present.
* @param rq
*/
const extractUpdatedJobData = (rq) => {
const doc = rq.DocumentInfo || {};
const claim = rq.ClaimInfo || {};
const policyNo = claim.PolicyInfo?.PolicyInfo?.PolicyNum || claim.PolicyInfo?.PolicyNum || null;
const out = {
comment: doc.Comment || null,
clm_no: claim.ClaimNum || null,
// TODO (future): status omitted intentionally to avoid overwriting with 'Auth Cust'
policy_no: policyNo
};
if (rq.ProfileInfo) {
out.parts_tax_rates = extractPartsTaxRates(rq.ProfileInfo);
}
if (rq.VehicleInfo?.Condition?.DrivableInd !== undefined) {
out.driveable = !!rq.VehicleInfo.Condition.DrivableInd;
}
return out;
};
/**
* Build jobline payloads for updates/inserts (no split between parts & labor).
* - Refinish labor aggregated into lbr_* secondary fields and lbr_amt.
* - SUBLET-only -> PAS line with act_price = SubletAmount.
* - Notes merged with current DB value by unq_seq.
*/
const extractUpdatedJobLines = (addsChgs = {}, jobId, currentJobLineNotes = {}) => {
const linesIn = Array.isArray(addsChgs.DamageLineInfo) ? addsChgs.DamageLineInfo : [addsChgs.DamageLineInfo || {}];
const coerceManual = (val) =>
val === true || val === 1 || val === "1" || (typeof val === "string" && val.toUpperCase() === "Y");
const out = [];
for (const line of linesIn) {
if (!line || Object.keys(line).length === 0) continue;
const partInfo = line.PartInfo || {};
const laborInfo = line.LaborInfo || {};
const refinishInfo = line.RefinishLaborInfo || {};
const subletInfo = line.SubletInfo || {};
const base = {
jobid: jobId,
line_no: parseInt(line.LineNum || 0, 10),
unq_seq: parseInt(line.UniqueSequenceNum || 0, 10),
status: line.LineStatusCode || null,
line_desc: line.LineDesc || null,
manual_line: false
// manual_line: line.ManualLineInd !== undefined ? coerceManual(line.ManualLineInd) : null
};
const lineOut = { ...base };
// --- Notes merge ---
const unqSeq = lineOut.unq_seq;
const currentNotes = currentJobLineNotes?.[unqSeq] || null;
const newNotes = line.LineMemo || null;
if (newNotes && currentNotes) {
if (currentNotes === newNotes || currentNotes.includes(newNotes)) lineOut.notes = currentNotes;
else lineOut.notes = `${currentNotes} | ${newNotes}`;
} else if (newNotes) lineOut.notes = newNotes;
else if (currentNotes) lineOut.notes = currentNotes;
else lineOut.notes = null;
// --- end notes merge ---
const hasPart = Object.keys(partInfo).length > 0;
const hasSublet = Object.keys(subletInfo).length > 0;
if (hasPart) {
lineOut.part_qty = parseFloat(partInfo.Quantity || 0) || 1;
lineOut.oem_partno = partInfo.OEMPartNum;
lineOut.alt_partno = partInfo?.NonOEM?.NonOEMPartNum;
lineOut.part_type = partInfo.PartType || null ? String(partInfo.PartType).toUpperCase() : null;
lineOut.act_price = parseFloat(partInfo?.PartPrice || 0);
lineOut.db_price = parseFloat(partInfo?.OEMPartPrice || 0);
if (partInfo.TaxableInd !== undefined) {
const t = partInfo.TaxableInd;
lineOut.tax_part = t === true || t === 1 || t === "1" || (typeof t === "string" && t.toUpperCase() === "Y");
}
} else if (hasSublet) {
const amt = parseFloat(subletInfo.SubletAmount || 0);
lineOut.part_type = "PAS";
lineOut.part_qty = 1;
lineOut.act_price = isNaN(amt) ? 0 : amt;
}
// Primary labor
const hrs = parseFloat(laborInfo.LaborHours || 0);
const amt = parseFloat(laborInfo.LaborAmt || 0);
const hasLabor =
(!!laborInfo.LaborType && String(laborInfo.LaborType).length > 0) ||
(!isNaN(hrs) && hrs !== 0) ||
(!isNaN(amt) && amt !== 0);
if (hasLabor) {
lineOut.mod_lbr_ty = laborInfo.LaborType || null;
lineOut.mod_lb_hrs = isNaN(hrs) ? 0 : hrs;
const opCodeKey =
typeof laborInfo.LaborOperation === "string" ? laborInfo.LaborOperation.trim().toUpperCase() : null;
lineOut.op_code_desc = opCodeKey && opCodes?.[opCodeKey]?.desc ? opCodes[opCodeKey].desc : null;
lineOut.lbr_amt = isNaN(amt) ? 0 : amt;
}
// Refinish (secondary fields, add amount)
const rHrs = parseFloat(refinishInfo.LaborHours || 0);
const rAmt = parseFloat(refinishInfo.LaborAmt || 0);
const hasRefinish =
Object.keys(refinishInfo).length > 0 &&
((refinishInfo.LaborType && String(refinishInfo.LaborType).length > 0) ||
!isNaN(rHrs) ||
!isNaN(rAmt) ||
!!refinishInfo.LaborOperation);
if (hasRefinish) {
lineOut.lbr_typ_j = refinishInfo.LaborType || "LAR";
lineOut.lbr_hrs_j = isNaN(rHrs) ? 0 : rHrs;
lineOut.lbr_op_j = refinishInfo.LaborOperation || null;
if (!isNaN(rAmt)) lineOut.lbr_amt = (Number.isFinite(lineOut.lbr_amt) ? lineOut.lbr_amt : 0) + rAmt;
if (refinishInfo.PaintStagesNum !== undefined) lineOut.paint_stg = refinishInfo.PaintStagesNum;
if (refinishInfo.PaintTonesNum !== undefined) lineOut.paint_tone = refinishInfo.PaintTonesNum;
}
out.push(lineOut);
}
return out;
};
/**
* Expand deletion IDs to include derived labor/refinish offsets.
*/
const extractDeletions = (deletions = {}) => {
const items = Array.isArray(deletions.DamageLineInfo) ? deletions.DamageLineInfo : [deletions.DamageLineInfo || {}];
const baseSeqs = items.map((line) => parseInt(line.UniqueSequenceNum, 10)).filter((id) => Number.isInteger(id));
const allSeqs = [];
for (const u of baseSeqs) allSeqs.push(u, u + 400000, u + 500000);
return Array.from(new Set(allSeqs));
};
// S3 bucket + key builder (mirrors AddRq but with changeRequest prefix)
const ESTIMATE_XML_BUCKET =
process.env?.NODE_ENV === "development"
? "parts-estimates"
: InstanceMgr({
imex: `imex-webest-xml`,
rome: `rome-webest-xml`
});
const buildEstimateXmlKey = (rq) => {
const shopId = rq.ShopID;
const jobId = rq.JobID;
const ts = new Date().toISOString().replace(/:/g, "-");
return `changeRequest/${shopId}/${jobId}/${ts}.xml`;
};
/**
* Convert a full jobline object into a jobs_set_input for update_by_pk (omit immutable fields).
*/
const toJoblineSetInput = (jl) => {
const {
// immutable identity fields:
// jobid,
// unq_seq,
// everything else:
line_no,
status,
line_desc,
manual_line,
notes,
part_qty,
oem_partno,
alt_partno,
part_type,
act_price,
db_price,
tax_part,
mod_lbr_ty,
mod_lb_hrs,
op_code_desc,
lbr_amt,
lbr_typ_j,
lbr_hrs_j,
lbr_op_j,
paint_stg,
paint_tone
} = jl;
return {
line_no,
status,
line_desc,
manual_line,
notes,
part_qty,
oem_partno,
alt_partno,
part_type,
act_price,
db_price,
tax_part,
mod_lbr_ty,
mod_lb_hrs,
op_code_desc,
lbr_amt,
lbr_typ_j,
lbr_hrs_j,
lbr_op_j,
paint_stg,
paint_tone
};
};
/**
* Handles VehicleDamageEstimateChgRq requests:
* - Update core job fields
* - For lines: update by PK if existing; otherwise bulk insert
* - Soft-delete only explicit deletions (exclude any updated seqs)
*/
const partsManagementVehicleDamageEstimateChgRq = async (req, res) => {
const { logger } = req;
const rawXml = typeof req.body === "string" ? req.body : Buffer.isBuffer(req.body) ? req.body.toString("utf8") : "";
try {
const payload = await parseXml(req.body, logger);
const rq = normalizeXmlObject(payload.VehicleDamageEstimateChgRq);
const jobId = rq.JobID;
const shopId = rq.ShopID;
// Fire-and-forget archival on valid request
(async () => {
try {
const key = buildEstimateXmlKey(rq);
await uploadFileToS3({
bucketName: ESTIMATE_XML_BUCKET,
key,
content: rawXml || "",
contentType: "application/xml"
});
logger.log("parts-estimate-xml-uploaded", "info", jobId, null, { key, bytes: rawXml?.length || 0 });
} catch (e) {
logger.log("parts-estimate-xml-upload-failed", "warn", jobId, null, { error: e?.message });
}
})();
const job = await findJob(shopId, jobId, logger);
if (!job) return res.status(404).send("Job not found");
// --- Updated seqs from incoming changes ---
const linesIn = Array.isArray(rq.AddsChgs?.DamageLineInfo)
? rq.AddsChgs.DamageLineInfo
: [rq.AddsChgs?.DamageLineInfo || {}];
const updatedSeqs = Array.from(
new Set((linesIn || []).map((l) => parseInt(l?.UniqueSequenceNum || 0, 10)).filter((v) => Number.isInteger(v)))
);
// --- Fetch current notes for merge ---
let currentJobLineNotes = {};
if (updatedSeqs.length > 0) {
const resp = await client.request(GET_JOBLINES_NOTES_BY_JOBID_UNQSEQ, { jobid: job.id, unqSeqs: updatedSeqs });
if (resp?.joblines) {
for (const jl of resp.joblines) currentJobLineNotes[jl.unq_seq] = jl.notes;
}
}
const updatedJobData = extractUpdatedJobData(rq);
const updatedLines = extractUpdatedJobLines(rq.AddsChgs, job.id, currentJobLineNotes);
// --- Look up existing rows (by natural key) to decide update vs insert ---
let existingIdByUnqSeq = {};
if (updatedSeqs.length > 0) {
const existing = await client.request(GET_JOBLINE_IDS_BY_JOBID_UNQSEQ, { jobid: job.id, unqSeqs: updatedSeqs });
if (existing?.joblines) {
for (const row of existing.joblines) existingIdByUnqSeq[row.unq_seq] = row.id;
}
}
const toUpdate = [];
const toInsert = [];
for (const jl of updatedLines) {
const id = existingIdByUnqSeq[jl.unq_seq];
if (id) toUpdate.push({ id, _set: toJoblineSetInput(jl) });
else toInsert.push(jl);
}
// Build deletions list and exclude any seqs we are updating (avoid accidental removal)
const deletedLineIdsAll = extractDeletions(rq.Deletions);
const deletionSeqs = deletedLineIdsAll.filter((u) => !updatedSeqs.includes(u));
// Mutations:
const updateJobPromise = client.request(UPDATE_JOB_BY_ID, { id: job.id, job: updatedJobData });
const softDeletePromise = deletionSeqs.length
? client.request(SOFT_DELETE_JOBLINES_BY_IDS, { jobid: job.id, unqSeqs: deletionSeqs })
: Promise.resolve({});
// Update each existing row by primary key (parallelized)
const perRowUpdatesPromise =
toUpdate.length > 0
? Promise.all(toUpdate.map(({ id, _set }) => client.request(UPDATE_JOBLINE_BY_PK, { id, jl: _set })))
: Promise.resolve([]);
// Insert brand-new rows in bulk
const insertPromise =
toInsert.length > 0 ? client.request(INSERT_JOBLINES, { joblines: toInsert }) : Promise.resolve({});
await Promise.all([updateJobPromise, softDeletePromise, perRowUpdatesPromise, insertPromise]);
logger.log("parts-job-changed", "info", job.id, null);
return res.status(200).json({ success: true, jobId: job.id });
} catch (err) {
logger.log("parts-chgrq-error", "error", null, null, { error: err });
return res.status(err.status || 500).json({ error: err.message || "Internal error" });
}
};
module.exports = partsManagementVehicleDamageEstimateChgRq;

View File

@@ -0,0 +1,333 @@
// GraphQL Queries and Mutations
const GET_BODYSHOP_STATUS = `
query GetBodyshopStatus($id: uuid!) {
bodyshops_by_pk(id: $id) {
md_ro_statuses
}
}
`;
const GET_VEHICLE_BY_SHOP_VIN = `
query GetVehicleByShopVin($shopid: uuid!, $v_vin: String!) {
vehicles(where: { shopid: { _eq: $shopid }, v_vin: { _eq: $v_vin } }, limit: 1) {
id
}
}
`;
const INSERT_OWNER = `
mutation InsertOwner($owner: owners_insert_input!) {
insert_owners_one(object: $owner) {
id
}
}
`;
const INSERT_JOB_WITH_LINES = `
mutation InsertJob($job: jobs_insert_input!) {
insert_jobs_one(object: $job) {
id
joblines { id unq_seq }
}
}
`;
const GET_JOB_BY_CLAIM = `
query GetJobByClaim($shopid: uuid!, $clm_no: String!) {
jobs(
where: { shopid: { _eq: $shopid }, clm_no: { _eq: $clm_no } }
order_by: { created_at: desc }
limit: 1
) {
id
}
}
`;
const GET_JOB_BY_ID = `
query GetJobByID($shopid: uuid!, $jobid: uuid!) {
jobs(
where: { shopid: { _eq: $shopid }, id: { _eq: $jobid } }
order_by: { created_at: desc }
limit: 1
) {
id
}
}
`;
const UPDATE_JOB_BY_ID = `
mutation UpdateJobById($id: uuid!, $job: jobs_set_input!) {
update_jobs_by_pk(pk_columns: { id: $id }, _set: $job) {
id
}
}
`;
// Soft delete joblines by marking removed=true instead of hard-deleting
const SOFT_DELETE_JOBLINES_BY_IDS = `
mutation SoftDeleteJoblinesByIds($jobid: uuid!, $unqSeqs: [Int!]!) {
update_joblines(
where: { jobid: { _eq: $jobid }, unq_seq: { _in: $unqSeqs } },
_set: { removed: true }
) {
affected_rows
}
}
`;
const INSERT_JOBLINES = `
mutation InsertJoblines($joblines: [joblines_insert_input!]!) {
insert_joblines(objects: $joblines) {
affected_rows
}
}
`;
const CHECK_EXTERNAL_SHOP_ID = `
query CHECK_KEY($key: String!) {
bodyshops(where: { external_shop_id: { _eq: $key } }) {
external_shop_id
}
}
`;
const CREATE_SHOP = `
mutation CREATE_SHOP($bs: bodyshops_insert_input!) {
insert_bodyshops_one(object: $bs) { id }
}
`;
const DELETE_VENDORS_BY_SHOP = `
mutation DELETE_VENDORS($shopId: uuid!) {
delete_vendors(where: { bodyshopid: { _eq: $shopId } }) {
affected_rows
}
}
`;
const DELETE_SHOP = `
mutation DELETE_SHOP($id: uuid!) {
delete_bodyshops_by_pk(id: $id) { id }
}
`;
const CREATE_USER = `
mutation CREATE_USER($u: users_insert_input!) {
insert_users_one(object: $u) {
id: authid
email
}
}
`;
const GET_BODYSHOP = `
query GetBodyshop($id: uuid!) {
bodyshops_by_pk(id: $id) {
external_shop_id
shopname
}
}
`;
const GET_ASSOCIATED_USERS = `
query GetAssociatedUsers($shopId: uuid!) {
associations(where: {shopid: {_eq: $shopId}}) {
user {
authid
email
}
}
}
`;
const DELETE_ASSOCIATIONS_BY_SHOP = `
mutation DeleteAssociationsByShop($shopId: uuid!) {
delete_associations(where: {shopid: {_eq: $shopId}}) {
affected_rows
}
}
`;
const GET_USER_ASSOCIATIONS_COUNT = `
query GetUserAssociationsCount($userEmail: String!) {
associations_aggregate(where: {useremail: {_eq: $userEmail}}) {
aggregate {
count
}
}
}
`;
const DELETE_USER = `
mutation DeleteUser($email: String!) {
delete_users(where: {email: {_eq: $email}}) {
affected_rows
}
}
`;
const GET_VENDORS = `
query GetVendors($shopId: uuid!) {
vendors(where: {bodyshopid: {_eq: $shopId}}) {
name
}
}
`;
const GET_JOBS_BY_SHOP = `
query GetJobsByShop($shopId: uuid!) {
jobs(where: {shopid: {_eq: $shopId}}) {
id
}
}
`;
const DELETE_JOBLINES_BY_JOB_IDS = `
mutation DeleteJoblinesByJobIds($jobIds: [uuid!]!) {
delete_joblines(where: {jobid: {_in: $jobIds}}) {
affected_rows
}
}
`;
const DELETE_JOBS_BY_IDS = `
mutation DeleteJobsByIds($jobIds: [uuid!]!) {
delete_jobs(where: {id: {_in: $jobIds}}) {
affected_rows
}
}
`;
const DELETE_AUDIT_TRAIL_BY_SHOP = `
mutation DeleteAuditTrailByShop($shopId: uuid!) {
delete_audit_trail(where: {bodyshopid: {_eq: $shopId}}) {
affected_rows
}
}
`;
const GET_JOBLINES_NOTES_BY_JOBID_UNQSEQ = `
query GetJoblinesNotesByJobIdUnqSeq($jobid: uuid!, $unqSeqs: [Int!]!) {
joblines(where: { jobid: { _eq: $jobid }, unq_seq: { _in: $unqSeqs }, removed: { _neq: true } }) {
unq_seq
notes
}
}
`;
// Clear task links to parts orders for all jobs in a shop to avoid FK violations when deleting parts orders
const CLEAR_TASKS_PARTSORDER_LINKS_BY_JOBIDS = `
mutation ClearTasksPartsOrderLinks($jobIds: [uuid!]!) {
update_tasks(
where: { parts_order: { jobid: { _in: $jobIds } } },
_set: { partsorderid: null }
) {
affected_rows
}
}
`;
// Delete parts order lines where the parent order belongs to any of the provided job IDs
const DELETE_PARTS_ORDER_LINES_BY_JOB_IDS = `
mutation DeletePartsOrderLinesByJobIds($jobIds: [uuid!]!) {
delete_parts_order_lines(where: { parts_order: { jobid: { _in: $jobIds } } }) {
affected_rows
}
}
`;
// Delete parts orders for the given job IDs
const DELETE_PARTS_ORDERS_BY_JOB_IDS = `
mutation DeletePartsOrdersByJobIds($jobIds: [uuid!]!) {
delete_parts_orders(where: { jobid: { _in: $jobIds } }) {
affected_rows
}
}
`;
const UPSERT_JOBLINES = `
mutation UpsertJoblines($joblines: [joblines_insert_input!]!) {
insert_joblines(
objects: $joblines,
on_conflict: {
constraint: joblines_jobid_unq_seq_key,
update_columns: [
status,
line_desc,
notes,
manual_line,
part_qty,
oem_partno,
alt_partno,
part_type,
act_price,
db_price,
tax_part,
mod_lbr_ty,
mod_lb_hrs,
op_code_desc,
lbr_amt,
lbr_typ_j,
lbr_hrs_j,
lbr_op_j,
paint_stg,
paint_tone
]
}
) {
affected_rows
}
}
`;
// Get jobline IDs for the incoming unq_seq values (only non-removed)
const GET_JOBLINE_IDS_BY_JOBID_UNQSEQ = `
query GetJoblineIdsByJobIdUnqSeq($jobid: uuid!, $unqSeqs: [Int!]!) {
joblines(where: { jobid: { _eq: $jobid }, unq_seq: { _in: $unqSeqs }, removed: { _neq: true } }) {
id
unq_seq
}
}
`;
// Update a single jobline by primary key
const UPDATE_JOBLINE_BY_PK = `
mutation UpdateJoblineByPk($id: uuid!, $jl: joblines_set_input!) {
update_joblines_by_pk(pk_columns: { id: $id }, _set: $jl) { id }
}
`;
module.exports = {
GET_BODYSHOP_STATUS,
GET_VEHICLE_BY_SHOP_VIN,
INSERT_OWNER,
INSERT_JOB_WITH_LINES,
GET_JOB_BY_CLAIM,
UPDATE_JOB_BY_ID,
SOFT_DELETE_JOBLINES_BY_IDS,
INSERT_JOBLINES,
CHECK_EXTERNAL_SHOP_ID,
CREATE_SHOP,
DELETE_VENDORS_BY_SHOP,
DELETE_SHOP,
CREATE_USER,
GET_BODYSHOP,
GET_ASSOCIATED_USERS,
DELETE_ASSOCIATIONS_BY_SHOP,
GET_USER_ASSOCIATIONS_COUNT,
DELETE_USER,
GET_VENDORS,
GET_JOBS_BY_SHOP,
DELETE_JOBLINES_BY_JOB_IDS,
DELETE_JOBS_BY_IDS,
DELETE_AUDIT_TRAIL_BY_SHOP,
GET_JOBLINES_NOTES_BY_JOBID_UNQSEQ,
GET_JOB_BY_ID,
CLEAR_TASKS_PARTSORDER_LINKS_BY_JOBIDS,
DELETE_PARTS_ORDER_LINES_BY_JOB_IDS,
DELETE_PARTS_ORDERS_BY_JOB_IDS,
UPSERT_JOBLINES,
GET_JOBLINE_IDS_BY_JOBID_UNQSEQ,
UPDATE_JOBLINE_BY_PK
};

View File

@@ -0,0 +1,54 @@
const xml2js = require("xml2js");
/**
* Parses XML string into a JavaScript object.
* @param {string} xml - The XML string to parse.
* @param {object} logger - The logger instance.
* @returns {Promise<object>} The parsed XML object.
* @throws {Error} If XML parsing fails.
*/
const parseXml = async (xml, logger) => {
try {
return await xml2js.parseStringPromise(xml, {
explicitArray: false,
tagNameProcessors: [xml2js.processors.stripPrefix],
attrNameProcessors: [xml2js.processors.stripPrefix]
});
} catch (err) {
logger.log("parts-xml-parse-error", "error", null, null, { error: err });
throw new Error("Invalid XML");
}
};
/**
* Recursively strip `xml2js`-style { _: 'value', $: { ... } } nodes into plain strings.
* @param {*} obj - Parsed XML object
* @returns {*} Normalized object
*/
const normalizeXmlObject = (obj) => {
if (Array.isArray(obj)) {
return obj.map(normalizeXmlObject);
}
if (typeof obj === "object" && obj !== null) {
if (Object.keys(obj).length === 2 && "_" in obj && "$" in obj) {
return normalizeXmlObject(obj._); // unwrap {_:"value",$:{...}} to just "value"
}
if (Object.keys(obj).length === 1 && "_" in obj) {
return normalizeXmlObject(obj._); // unwrap {_:"value"}
}
const normalized = {};
for (const key in obj) {
normalized[key] = normalizeXmlObject(obj[key]);
}
return normalized;
}
return obj;
};
module.exports = {
parseXml,
normalizeXmlObject
};

View File

@@ -0,0 +1,340 @@
<?xml version="1.0" encoding="UTF-8"?>
<VehicleDamageEstimateAddRq xmlns="http://www.cieca.com/BMS">
<!-- Shop identifier -->
<ShopID>71f8494c-89f0-43e0-8eb2-820b52d723bc</ShopID>
<!-- Request & Claim -->
<RqUID>17e5ccc4-cdfb-4cf3-a08d-ecfa8d145d6f</RqUID>
<RefClaimNum>CLM123</RefClaimNum>
<!-- Document metadata -->
<DocumentInfo>
<DocumentVer>
<DocumentVerCode>SV</DocumentVerCode>
<DocumentVerNum>1</DocumentVerNum>
</DocumentVer>
<DocumentVer>
<DocumentVerCode>VN</DocumentVerCode>
<DocumentVerNum>1</DocumentVerNum>
</DocumentVer>
<ReferenceInfo>
<OtherReferenceInfo>
<OtherReferenceName>RO Number</OtherReferenceName>
<OtherRefNum>RO-987</OtherRefNum>
</OtherReferenceInfo>
<OtherReferenceInfo>
<OtherReferenceName>Job UUID</OtherReferenceName>
<OtherRefNum>abcde-12345-uuid</OtherRefNum>
</OtherReferenceInfo>
</ReferenceInfo>
<Comment>Include OEM where possible</Comment>
<TransmitDateTime>2025-06-18T12:00:00Z</TransmitDateTime>
</DocumentInfo>
<!-- Event classification -->
<EventInfo>
<AssignmentEvent>
<AssignmentNumber>1</AssignmentNumber>
<AssignmentType>Estimate</AssignmentType>
<AssignmentDate>2025-06-18T11:30:00Z</AssignmentDate>
<CreateDateTime>2025-06-18T11:29:00Z</CreateDateTime>
</AssignmentEvent>
<RepairEvent>
<TargetCompletionDateTime>2025-06-25T17:00:00Z</TargetCompletionDateTime>
<RequestedPickUpDateTime>2025-06-22T09:00:00Z</RequestedPickUpDateTime>
</RepairEvent>
</EventInfo>
<!-- Claim & Policy -->
<ClaimInfo>
<ClaimNum>CLM123</ClaimNum>
<ClaimStatus>Open</ClaimStatus>
<PolicyInfo>
<PolicyNum>POL456</PolicyNum>
<CoverageInfo>
<Coverage>
<DeductibleInfo>
<DeductibleAmt>500.00</DeductibleAmt>
</DeductibleInfo>
</Coverage>
</CoverageInfo>
</PolicyInfo>
<Cieca_ttl>1500.50</Cieca_ttl>
</ClaimInfo>
<!-- Administrative Parties -->
<AdminInfo>
<!-- Owner -->
<Owner>
<Party>
<PersonInfo>
<PersonName>
<FirstName>John</FirstName>
<LastName>Doe</LastName>
</PersonName>
<Communications>
<CommQualifier>AL</CommQualifier>
<Address>
<Address1>100 Main St</Address1>
<City>Metropolis</City>
<StateProvince>NY</StateProvince>
<PostalCode>10001</PostalCode>
<Country>USA</Country>
</Address>
</Communications>
</PersonInfo>
<ContactInfo>
<Communications>
<CommQualifier>CP</CommQualifier>
<CommPhone>5551234567</CommPhone>
</Communications>
<Communications>
<CommQualifier>EM</CommQualifier>
<CommEmail>john.doe@example.com</CommEmail>
</Communications>
</ContactInfo>
</Party>
</Owner>
<!-- Estimator -->
<Estimator>
<Party>
<PersonInfo>
<PersonName>
<FirstName>Jane</FirstName>
<LastName>Smith</LastName>
</PersonName>
</PersonInfo>
<ContactInfo>
<Communications>
<CommQualifier>EM</CommQualifier>
<CommEmail>jane.smith@example.com</CommEmail>
</Communications>
</ContactInfo>
</Party>
<Affiliation>EST001</Affiliation>
</Estimator>
<!-- Repair Facility -->
<RepairFacility>
<Party>
<OrgInfo>
<CompanyName>AutoFix</CompanyName>
<Communications>
<CommQualifier>AL</CommQualifier>
<Address>
<Address1>200 Repair Rd</Address1>
<City>Mechanicsburg</City>
<StateProvince>PA</StateProvince>
<PostalCode>17055</PostalCode>
<Country>USA</Country>
</Address>
</Communications>
</OrgInfo>
<ContactInfo>
<Communications>
<CommQualifier>WP</CommQualifier>
<CommPhone>5559876543</CommPhone>
</Communications>
<Communications>
<CommQualifier>FX</CommQualifier>
<CommPhone>5559876544</CommPhone>
</Communications>
</ContactInfo>
</Party>
</RepairFacility>
<!-- Adjuster -->
<Adjuster>
<Party>
<PersonInfo>
<PersonName>
<FirstName>Alice</FirstName>
<LastName>Johnson</LastName>
</PersonName>
</PersonInfo>
</Party>
</Adjuster>
<!-- Supplier -->
<Supplier>
<Party>
<OrgInfo>
<CompanyName>PartsRUs</CompanyName>
</OrgInfo>
</Party>
</Supplier>
<!-- Sender -->
<Sender>
<Party>
<OrgInfo>
<CompanyName>XmlSender</CompanyName>
</OrgInfo>
</Party>
</Sender>
<!-- Other Admin Party -->
<OtherParty>
<Party>
<OrgInfo>
<CompanyName>ThirdPartyAdmin</CompanyName>
</OrgInfo>
</Party>
<AdminType>TPA</AdminType>
</OtherParty>
</AdminInfo>
<!-- (Optional) Rates -->
<ProfileInfo>
<RateInfo>
<RateType>LABOR</RateType>
<Rate>100.0</Rate>
<RateTierInfo>
<TierNum>1</TierNum>
<Percentage>50.0</Percentage>
</RateTierInfo>
</RateInfo>
</ProfileInfo>
<!-- Vehicle details -->
<VehicleInfo>
<VINInfo>
<VINNum>1HGCM82633A004352</VINNum>
</VINInfo>
<License>
<LicensePlateNum>ABC1234</LicensePlateNum>
<LicensePlateStateProvince>CA</LicensePlateStateProvince>
</License>
<VehicleDesc>
<ModelYear>2020</ModelYear>
<MakeDesc>Honda</MakeDesc>
<ModelName>Accord</ModelName>
<BodyStyle>Sedan</BodyStyle>
<EngineDesc>2.0L</EngineDesc>
<ProductionDate>2019-10-10</ProductionDate>
<SubModelDesc>Sport</SubModelDesc>
<FuelType>Gasoline</FuelType>
</VehicleDesc>
<Paint>
<Exterior>
<ColorName>Blue</ColorName>
</Exterior>
</Paint>
<Condition>
<DrivableInd>Y</DrivableInd>
</Condition>
</VehicleInfo>
<!-- Damage line with non-OEM part -->
<DamageLineInfo>
<LineNum>1</LineNum>
<UniqueSequenceNum>1001</UniqueSequenceNum>
<ParentLineNum>0</ParentLineNum>
<ManualLineInd>0</ManualLineInd>
<AutomatedEntry>1</AutomatedEntry>
<DescJudgmentInd>0</DescJudgmentInd>
<LineStatusCode>Draft</LineStatusCode>
<LineDesc>Front Bumper</LineDesc>
<PartInfo>
<PartType>PAA</PartType>
<Quantity>1</Quantity>
<PartPrice>200.00</PartPrice>
<OEMPartNum>OEM123</OEMPartNum>
<NonOEM>
<NonOEMPartNum>NONOEM123</NonOEMPartNum>
<NonOEMPartPrice>180.00</NonOEMPartPrice>
<SupplierRefNum>4c2ff2c4-af2b-4a5f-970e-3e026f0bbf9f</SupplierRefNum>
<PartSelectedInd>1</PartSelectedInd>
</NonOEM>
<TaxableInd>1</TaxableInd>
<AfterMarketUsage>OV</AfterMarketUsage>
<CertificationType>C</CertificationType>
<PriceJudgmentInd>0</PriceJudgmentInd>
<GlassPartInd>0</GlassPartInd>
<PriceInclInd>0</PriceInclInd>
<OrderByApplicationInd>false</OrderByApplicationInd>
</PartInfo>
<LaborInfo>
<LaborType>LAB</LaborType>
<LaborOperation>OP1</LaborOperation>
<LaborHours>2.5</LaborHours>
<LaborAmt>250.00</LaborAmt>
</LaborInfo>
<LineMemo>Replace bumper</LineMemo>
</DamageLineInfo>
<!-- Damage line with glass part
<DamageLineInfo>
<LineNum>2</LineNum>
<UniqueSequenceNum>1002</UniqueSequenceNum>
<ParentLineNum>0</ParentLineNum>
<ManualLineInd>0</ManualLineInd>
<AutomatedEntry>0</AutomatedEntry>
<DescJudgmentInd>0</DescJudgmentInd>
<LineStatusCode>Draft</LineStatusCode>
<LineDesc>Windshield</LineDesc>
<PartInfo>
<PartType>PAG</PartType>
<Quantity>1</Quantity>
<PartPrice>572.06</PartPrice>
<OEMPartNum>5610104082</OEMPartNum>
<NonOEM>
<NonOEMPartNum>5610104082</NonOEMPartNum>
<NonOEMPartPrice>572.06</NonOEMPartPrice>
<SupplierRefNum>VEND2</SupplierRefNum>
<PartSelectedInd>1</PartSelectedInd>
</NonOEM>
<TaxableInd>1</TaxableInd>
<AfterMarketUsage>NU</AfterMarketUsage>
<GlassPartInd>1</GlassPartInd>
<PriceJudgmentInd>0</PriceJudgmentInd>
<PriceInclInd>0</PriceInclInd>
<OrderByApplicationInd>false</OrderByApplicationInd>
</PartInfo>
<LaborInfo>
<LaborType>LAB</LaborType>
<LaborOperation>OP11</LaborOperation>
<LaborHours>3.7</LaborHours>
<LaborAmt>370.00</LaborAmt>
</LaborInfo>
<LineMemo>Replace windshield</LineMemo>
</DamageLineInfo> -->
<!-- Damage line with sublet info -->
<DamageLineInfo>
<LineNum>3</LineNum>
<UniqueSequenceNum>1003</UniqueSequenceNum>
<ParentLineNum>0</ParentLineNum>
<ManualLineInd>0</ManualLineInd>
<AutomatedEntry>1</AutomatedEntry>
<DescJudgmentInd>0</DescJudgmentInd>
<LineStatusCode>Draft</LineStatusCode>
<LineDesc>Sublet Upholstery Repair</LineDesc>
<SubletInfo>
<SubletVendorName>UpholsteryCo</SubletVendorName>
<SubletAmount>200.00</SubletAmount>
<SubletLaborHours>2.0</SubletLaborHours>
</SubletInfo>
<LineMemo>Stitching match required</LineMemo>
</DamageLineInfo>
<!-- Damage line with labor-only work -->
<DamageLineInfo>
<LineNum>4</LineNum>
<UniqueSequenceNum>1004</UniqueSequenceNum>
<ParentLineNum>0</ParentLineNum>
<ManualLineInd>0</ManualLineInd>
<AutomatedEntry>1</AutomatedEntry>
<DescJudgmentInd>0</DescJudgmentInd>
<LineStatusCode>Draft</LineStatusCode>
<LineDesc>Dent Repair Door</LineDesc>
<LaborInfo>
<LaborType>LAD</LaborType>
<LaborOperation>OP3</LaborOperation>
<LaborHours>1.5</LaborHours>
<LaborAmt>150.00</LaborAmt>
</LaborInfo>
<LineMemo>Requires touch-up</LineMemo>
</DamageLineInfo>
</VehicleDamageEstimateAddRq>

View File

@@ -0,0 +1,62 @@
<?xml version="1.0" encoding="UTF-8"?>
<VehicleDamageEstimateChgRq xmlns="http://www.cieca.com/BMS">
<ShopID>71f8494c-89f0-43e0-8eb2-820b52d723bc</ShopID>
<RqUID>chg-0001-uuid</RqUID>
<ClaimInfo>
<ClaimNum>CLM123</ClaimNum>
<ClaimStatus>In Progress</ClaimStatus>
<PolicyInfo>
<PolicyNum>POL456</PolicyNum>
</PolicyInfo>
</ClaimInfo>
<DocumentInfo>
<Comment>Revised bumper labor hours and added notes</Comment>
</DocumentInfo>
<AddsChgs>
<DamageLineInfo>
<LineNum>1</LineNum>
<UniqueSequenceNum>1001</UniqueSequenceNum>
<ManualLineInd>0</ManualLineInd>
<LineStatusCode>Updated</LineStatusCode>
<LineDesc>Front Bumper</LineDesc>
<PartInfo>
<PartType>PAA</PartType>
<Quantity>1</Quantity>
<PartPrice>200.00</PartPrice>
<OEMPartNum>OEM123</OEMPartNum>
<TaxableInd>1</TaxableInd>
</PartInfo>
<LaborInfo>
<LaborType>LAB</LaborType>
<LaborOperation>OP1</LaborOperation>
<LaborHours>3.0</LaborHours> <!-- Modified -->
<LaborAmt>300.00</LaborAmt> <!-- Modified -->
</LaborInfo>
<LineMemo>Increased time due to hidden damage</LineMemo> <!-- Added -->
</DamageLineInfo>
<DamageLineInfo>
<LineNum>4</LineNum>
<UniqueSequenceNum>1004</UniqueSequenceNum>
<ManualLineInd>0</ManualLineInd>
<LineStatusCode>Updated</LineStatusCode>
<LineDesc>Dent Repair Door</LineDesc>
<LaborInfo>
<LaborType>LAD</LaborType>
<LaborOperation>OP3</LaborOperation>
<LaborHours>2.0</LaborHours> <!-- Modified -->
<LaborAmt>200.00</LaborAmt> <!-- Modified -->
</LaborInfo>
<LineMemo>Increased scope of repair</LineMemo> <!-- Added -->
</DamageLineInfo>
</AddsChgs>
<Deletions>
<DamageLineInfo>
<UniqueSequenceNum>1003</UniqueSequenceNum> <!-- Sublet line -->
</DamageLineInfo>
</Deletions>
</VehicleDamageEstimateChgRq>

View File

@@ -0,0 +1,431 @@
Awesome — thanks for the dumps. I pulled the structures directly from the XSDs you uploaded and
focused on **`VehicleDamageEstimateAddRq`** and the graph of types it depends on. Below is a
developer-grade map you can hand to a coding agent.
---
# What it is & where it lives
* **Global element**: `VehicleDamageEstimateAddRq`
* **Namespace**: `http://www.cieca.com/BMS` (default ns in your files)
* **Defined in**: `BMSEstimateMessages_2024R1_V6.9.0.xsd`
* **Type**: `EstimateRqType` (from `BMSEstimateCommonTypes_2024R1_V6.9.0.xsd`)
* **Service group** (where this message is accepted): `EstimateService` in
`BMSEstimateService_2024R1_V6.9.0.xsd`
Includes: `PropertyDamageEstimateAddRq/Rs`, `VehicleDamageEstimateAddRq/Rs`,
`VehicleDamageEstimateChgRq/Rs`, `VehicleDamagePhotoEstimateAddRq/Rs`.
---
# Top-level schema (for `VehicleDamageEstimateAddRq` → `EstimateRqType`)
`EstimateRqType` **extends** `MessageHeaderType` (from `BMSCommonGlobalTypes_2024R1_V6.9.0.xsd`) and
then adds the following sequence. Ive marked **required** vs *optional* and multiplicity:
**Header (inherited from `MessageHeaderType`):**
* **`RqUID`** (UUID) — **required**
* `AsyncRqUID` (UUID) — *optional*
* `PartnerKey` (Identifier) — *optional*
**Body (from `EstimateRqType`):**
* `SvcProviderName` (Identifier) — *optional*
* `RefClaimNum` (Char\_50) — *optional*
* **`DocumentInfo`** (`DocumentInfoType`) — **required, 1**
* **`ApplicationInfo`** (`ApplicationInfoType`) — **required, 1..**\*
* `EventInfo` (`EventInfoType`) — *optional*
* **`AdminInfo`** (`AdminInfoType`) — **required**
* **`EstimatorIDs`** (`EstimatorIDsTypeType`) — **required**
* `ClaimInfo` (`ClaimInfoType`) — *optional*
* **`VehicleInfo`** (`VehicleInfoType`) **OR** `PropertyInfo` (`PropertyInfoType`) — **choice**
for vehicle, use **`VehicleInfo`**
* **`ProfileInfo`** (`ProfileInfoType`) — **required**
* **`DamageLineInfo`** (`DamageLineInfoType`) — **required, 1..**\* (line items)
* `CalibrationInfo` (`CalibrationInfoType`) — *optional, 0..*
* `ScanInfo` (`ScanInfoType`) — *optional, 0..*
* `FileAttachment` (`FileAttachmentType`) — *optional*
* `NonNewOEMPartInd` (Boolean) — *optional*
* `StorageDuration` (Integer\_Range\_0-999) — *optional*
* **`RepairTotalsInfo`** (`RepairTotalsInfoType`) — **required, 1..**\*
* `RepairTotalsHistory` (`RepairTotalsHistoryType`) — *optional, 0..*
* `PaymentInfo` (`PaymentInfoType`) — *optional*
* `EstimateMemo` (C) — *optional*
* `AdministrativeMemo` (C) — *optional*
* `Disclaimers` (C) — *optional*
* `CustomMemo` (C) — *optional*
* `CustomPrintImage` (C) — *optional*
* `OtherMemos` (`OtherMemosType`) — *optional, 0..*
**Files involved:**
`BMSEstimateMessages_2024R1_V6.9.0.xsd`, `BMSEstimateCommonTypes_2024R1_V6.9.0.xsd`,
`BMSCommonGlobalTypes_2024R1_V6.9.0.xsd`, `BMSSimpleTypes_2024R1_V6.9.0.xsd` + code lists XSDs for
enums.
---
# Key dependent types (immediate children youll actually populate)
Below are the **first-level** structures youll typically use. Ive trimmed to the practical fields;
each type has many optional parties and details you can ignore for a minimal AddRq.
## `DocumentInfoType` (BMSCommonGlobalTypes)
Typical header metadata:
* **`BMSVer`** (`BMSVersionClosedEnumType`) — e.g. **`6.9.0`**
* **`DocumentType`** (`DocumentTypeClosedEnumType`) — code for message family (e.g. `E` for
estimate; codelists provide the letter codes)
* `DocumentSubType` (`DocumentSubTypeClosedEnumType`) — e.g. “Original Estimate”, “Copy”, etc.
* `DocumentID` (Char\_50) — your ID
* `VendorCode` (VendorCodeOpenEnumType) — optional
* `DocumentVer` (`DocumentVerType`) — versioning container (0..\*)
* **`CreateDateTime`** (DateTime)
* `TransmitDateTime` (DateTime)
* `ReferenceInfo` (`RefInfoType`) — links to prior docs
* `CountryCode`, `CurrencyInfo`, `CultureCode` — optional locale bits
## `ApplicationInfoType` (BMSCommonGlobalTypes) **(1..\*)**
* **`ApplicationType`** (`ApplicationTypeClosedEnumType`) — e.g., Estimator, Shop Mgmt, etc.
* **`ApplicationName`** (Char\_30)
* **`ApplicationVer`** (Char\_12)
* `DatabaseVer` (Char\_12)
* `DatabaseDateTime` (DateTime)
## `AdminInfoType` (BMSCommonGlobalTypes)
Large party/role roster; **all child elements are optional**, but the container itself is required.
Common ones:
* `InsuranceCompany` (`InsuranceCompanyType`)
* `PolicyHolder` (`PolicyHolderType`)
* `Insured` / `Owner` / `Customer` (`GenericPartyType`)
* `Claimant` (`ClaimantType`)
* `Estimator` (0..\*) (`EstimatorType`)
* `RepairFacility` (`RepairFacilityType`)
* `RentalProvider`, `TowCompany`, `Lender`, `Lienholder` (0..\*), etc.
(You can send `<AdminInfo/>` if you dont need parties; it validates.)
## `EstimatorIDsTypeType` (BMSEstimateCommonTypes)
* `OriginalEstimatorID` (Char\_40) — optional
* `EstimatorHistory` (0..\*) → `EstimatorHistoryType` ⇒ (`DocumentVerCode`, `DocumentVerNum`)
## `ClaimInfoType` (BMSCommonGlobalTypes) *(optional)*
* `ClaimNum` (Char\_50)
* `PolicyInfo` (0..\*) (`PolicyInfoType`)
* `LossInfo` (`LossInfoType`) — details on loss/time/location/coverage
* `AdditionalIDInfo` (0..\*) (`IDInfoType`)
* `ClaimStatus`, `PreviousPaymentAmt`, `ClaimMemo`, etc.
## `VehicleInfoType` (BMSCommonGlobalTypes) *(choose this over PropertyInfo)*
* `VINInfo` (0..\*) (`VINInfoType`) → **choice** of `VINAvailabilityCode` or one or more `VIN` (
`VINType`)
* `License` (`LicenseType`)
* `VehicleDesc` (`VehicleDescType`) — **ModelYear**, **MakeDesc/MakeCode**, **ModelName/ModelNum**,
`VehicleType`, etc.
* `Paint`, `Body`, `Powertrain`, `Condition`, `Valuation`, `VehicleMemo`
* `PolicyVehicleNum`, `LossVehicleNum`
* `FileAttachment` (`FileAttachmentType`)
* `CustomElement` (0..\*)
* `UnitNum` (Char\_20)
> Note: `VINType` is referenced but its concrete restriction is provided elsewhere in BMS; you can
> treat it as a VIN string (17-char typical) and your validator will enforce the real facet.
## `ProfileInfoType` (BMSEstimateCommonTypes) **required**
Controls rates, tax, and rules used to compute totals:
* `ProfileName` (Char\_40)
* **`RateInfo`** (1..\*) (`RateInfoType`)
* `RateType` (`RateTypeClosedEnumType`) — e.g., BODY\_LABOR, PAINT\_LABOR, MECHANICAL\_LABOR,
MATERIAL, etc.
* `RateTierInfo` / `RateTierHistory` (0..\*)
* `TaxableInd`, `TaxRate`, `AdjustmentInfo` (0..*), `TaxInfo` (0..*)
* `MaterialCalcSettings` (optional)
* `AlternatePartInfo` (0..*), `PartCertification` (0..*), `RefinishCalcSettings`,
`PreTaxDiscountRate`, `TaxExemptInfo` (0..\*), `CanadianTax` (for CA specifics)
## `DamageLineInfoType` (BMSEstimateCommonTypes) **1..**\*
One per estimate line. Core children:
* `LineNum`, `UniqueSequenceNum`, `ParentLineNum` (hierarchy)
* `ManualLineInd`, `AutomatedEntry`, `LineStatusCode`
* `LineDesc`, `LineDescCode`
* `SubletInfo` (`SubletInfoType`)
* `PartInfo` (0..\*) (`PartInfoType`)
* `LaborInfo` (`LaborInfoType`)
* `RefinishLaborInfo` (`LaborInfoType`)
* `MaterialType`, `OtherChargesInfo`, `WhoPays`
* `LineAdjustment`, `AppliedAdjustment`
* `PDRInfo`, `LineType`, `LineMemo`, `VendorRefNum` (0..\*)
**`PartInfoType`** highlights:
* `PartMaterialCode`, `PartType`, `LineItemCategoryCode`
* `PartDesc`, `PartNum`, `OEMPartNum`
* `NonOEM` (0..\*) (`NonOEMType`) — alternate sources/quality
* `ListPrice`, `PartPrice`, `UnitPartPrice`, `TotalPartPrice`, `OEMPartPrice`
* `PriceAdjustment` (0..\*) (`PriceAdjustmentType`)
* `TaxableInd`, `AppliedTaxes`
* `CertificationType` (0..\*), `AlternatePartInd`, `GlassPartInd`
* `Quantity`, `PartStatus`, `Dimensions`, `Glass*`, `QuotedPartList`
**`LaborInfoType`** highlights:
* **`LaborType`** (`LaborTypeClosedEnumType`) — **required**
* `LaborOperation`, `LaborHours`, `LaborHourlyRate`, `LaborAmt`
* `DatabaseLaborType/Hours/Amt`
* `LaborAdjustment` (0..\*)
* Judgment/flags (e.g., `LaborAmtJudgmentInd`, `OverlapInd`)
* Paint-specific fields (`PaintStagesNum`, `PaintTonesNum`)
* `AssemblyLaborCode`
## `CalibrationInfoType` / `ScanInfoType` (BMSEstimateCommonTypes)
* **`ScanInfoType`**: `ScanDetailsList` (optional), `FileAttachment` (optional), `ScanTool`,
`ScanDateTime` (**required**), flags `CleanScanInd`, `FollowUpInd`, plus `Technician`.
* **`CalibrationInfoType`**: optional lists for details & technicians, plus process flags (
`PrerequisitesMetInd`, `ProceduresFollowedInd`, `ADASReviewedWithOwnerInd`).
## `FileAttachmentType` (BMSCommonGlobalTypes)
* `DocAttachment` (0..\*) (`DocAttachmentType`)
* `AttachmentType` (open enum)
* `AttachmentTitle` **or** `AttachmentMemo`
* `AttachmentFileType`, `AttachmentFileName`, `AttachmentLength`
* **One of:** `AttachmentURI` **or** `EmbeddedAttachmentType`
* `EmbeddedAttachmentType`**choice**: `EmbeddedAttachment` (Binary) **or**
`EmbeddedAttachmentText` (C)
* `AttachmentIntegrity` (0..\*) (optionally includes Binary integrity blobs)
* `AttachmentStatusCode` (open enum)
## `RepairTotalsInfoType` (BMSEstimateCommonTypes) **1..**\*
* `LaborTotalsInfo` (0..\*) (`TotalsInfoType`)
* `PartsTotalsInfo` (0..\*) (`TotalsInfoType`)
* `OtherChargesTotalsInfo` (0..\*) (`TotalsInfoType`)
* `NumOfDamageLines` (optional)
* **`SummaryTotalsInfo`** (1..\*) (`TotalsInfoType`) — your rolled-up totals
* `RepairTotalsType` (`LineTypeClosedEnumType`) — optional (e.g., gross vs. customer-pay segments)
**`TotalsInfoType`** (BMSCommonGlobalTypes) highlights:
* **`TotalType`** (`TotalTypeOpenEnumType`) — category (e.g., LABOR, PARTS, TAX, GRAND\_TOTAL,…)
* `TotalSubType` (open enum)
* **`TotalTypeDesc`** (Char\_30)
* Hours quantities & units, item quantity, unit price
* Detailed `TotalTaxInfo` / `TotalAdjustmentInfo` (0..\*)
* Amounts: `NonTaxableAmt`, `TaxableAmt`, `TaxTotalAmt`, `OtherCharges*`, **`TotalAmt`**,
`TotalPct`, `TotalCost`
* `AmtDueInfo` (0..\*)
## `RepairTotalsHistoryType` (BMSEstimateCommonTypes)
* Version stamp and one or more `HistoryTotalsInfo` entries.
## `PaymentInfoType` (BMSCommonGlobalTypes) *(optional)*
* `PayerType`, `PaymentType`
* `Payee`/`PayerInfo`/`PayeeInfo`
* `PaymentDateTime`, **`PaymentAmt`**
* `PaymentID`, `PaymentMemo`, `PaymentAmtType`
## `OtherMemosType` (BMSCommonGlobalTypes)
* `OtherMemoRef` (open enum), `OtherMemo` (C)
---
# Minimal, schema-valid XML skeleton (vehicle path)
> Uses only **required** containers/fields; values shown as **PLACEHOLDER**.
> You must add at least one **DamageLineInfo** and one **SummaryTotalsInfo** item, and at least one
**RateInfo** inside **ProfileInfo**.
> Enumerations are *code lists*; use valid codes from your system.
```xml
<VehicleDamageEstimateAddRq xmlns="http://www.cieca.com/BMS">
<!-- MessageHeaderType -->
<RqUID>00000000-0000-0000-0000-000000000000</RqUID>
<!-- EstimateRqType sequence -->
<DocumentInfo>
<BMSVer>6.9.0</BMSVer>
<DocumentType>E</DocumentType>
<CreateDateTime>2025-08-14T12:00:00Z</CreateDateTime>
</DocumentInfo>
<ApplicationInfo>
<ApplicationType>INSERT_APP_TYPE</ApplicationType>
<ApplicationName>INSERT_APP_NAME</ApplicationName>
<ApplicationVer>INSERT_APP_VER</ApplicationVer>
</ApplicationInfo>
<AdminInfo/> <!-- container required; children optional -->
<EstimatorIDs/> <!-- container required; children optional -->
<!-- choice: VehicleInfo OR PropertyInfo -->
<VehicleInfo>
<!-- minimally empty is allowed; typical payload would include VIN and Year/Make/Model -->
<!-- Example:
<VINInfo>
<VIN>1HGBH41JXMN109186</VIN>
</VINInfo>
<VehicleDesc>
<ModelYear>2020</ModelYear>
<MakeDesc>Honda</MakeDesc>
<ModelName>Civic</ModelName>
</VehicleDesc>
-->
</VehicleInfo>
<ProfileInfo>
<!-- at least one RateInfo required -->
<RateInfo>
<RateType>INSERT_RATE_TYPE</RateType>
<!-- optional: <RateDesc>Body Labor</RateDesc> <TaxRate>13.00</TaxRate> etc. -->
</RateInfo>
</ProfileInfo>
<!-- at least one DamageLineInfo -->
<DamageLineInfo>
<!-- minimal: include a LaborInfo with required LaborType -->
<LaborInfo>
<LaborType>INSERT_LABOR_TYPE</LaborType>
<!-- optional: <LaborHours>1.0</LaborHours> <LaborHourlyRate>85.00</LaborHourlyRate> -->
</LaborInfo>
</DamageLineInfo>
<!-- at least one RepairTotalsInfo with at least one SummaryTotalsInfo -->
<RepairTotalsInfo>
<SummaryTotalsInfo>
<TotalType>INSERT_TOTAL_TYPE</TotalType>
<TotalTypeDesc>Grand Total</TotalTypeDesc>
<TotalAmt>0.00</TotalAmt>
</SummaryTotalsInfo>
</RepairTotalsInfo>
</VehicleDamageEstimateAddRq>
```
---
# Implementation notes & gotchas (important)
1. **Required containers vs. required content**
* `AdminInfo` and `EstimatorIDs` are **required containers** but their **children are optional**.
Empty elements validate.
* `ProfileInfo` is required and must include **≥1 `RateInfo`** with a `RateType`.
* You must include the **choice** of **`VehicleInfo`** (for this message) instead of `PropertyInfo`.
* Include **≥1 `DamageLineInfo`** and **≥1 `RepairTotalsInfo`** each containing *
*≥1 `SummaryTotalsInfo`**.
2. **Header**
* `RqUID` is required; use a real UUID.
3. **Enumerations / code lists**
* Many fields are `ClosedEnumType`/`OpenEnumType` and validated against the BMS code list XSDs you
included (e.g., `BMSCodeLists_*.xsd`). Use the exact code values your trading partner expects (
e.g., `DocumentType` = `E` for estimates).
* `BMSVer` supports `6.9.0`.
4. **Line hierarchy**
* For nested kits/assemblies, use `ParentLineNum`; `UniqueSequenceNum` helps ordering. `LineType`
can label grouping (e.g., Sublet, Labor, Part, etc.).
5. **Attachments**
* You can embed binary (`EmbeddedAttachmentType/EmbeddedAttachment`) **or** provide a URI (
`AttachmentURI`). Provide `AttachmentFileType` and `AttachmentFileName` either way.
6. **Scans & calibrations**
* If you include `ScanInfo`, it **requires** `ScanTool` and `ScanDateTime`. Calibrations are
optional but provide strong ADAS traceability.
7. **Totals integrity**
* `RepairTotalsInfo/SummaryTotalsInfo` acts as your roll-up. Ensure it reconciles with the sum of
`DamageLineInfo` components and the profiles rates/taxes so consumers dont reject on mismatches.
8. **Currency / numeric facets**
* Monetary fields use `Currency`. Hours/rates/quantities have explicit facets (e.g.,
`Decimal_Range_-999.9-999.9`). Stay within ranges.
9. **Canada specifics**
* `DocumentInfo/CountryCode` = `CA`, and `ProfileInfo/CanadianTax` is available for PST/HST/GST
modeling if you need to encode tax policy explicitly.
---
# Quick field checklist for a typical *valid* “vehicle add” youll generate
* **Header**
* `RqUID`
* **Doc header**
* `DocumentInfo/BMSVer` = `6.9.0`
* `DocumentInfo/DocumentType` = `E`
* `DocumentInfo/CreateDateTime`
* **App**
* `ApplicationInfo[1..*]/(ApplicationType, ApplicationName, ApplicationVer)`
* **Admin**
* `<AdminInfo/>` (or populate parties) ✅
* **EstimatorIDs**
* `<EstimatorIDs/>` (or add contents) ✅
* **Vehicle**
* `VehicleInfo` (VIN + YMM recommended) ✅
* **Profile & rates**
* `ProfileInfo/RateInfo[1..*]/RateType`
* **Lines**
* `DamageLineInfo[1..*]` with at least one `LaborInfo/LaborType` or `PartInfo`
* **Totals**
* `RepairTotalsInfo[1..*]/SummaryTotalsInfo[1..*]/(TotalType, TotalTypeDesc, TotalAmt)`
---
# Pointers to definitions in your bundle (for traceability)
* `VehicleDamageEstimateAddRq` element → `BMSEstimateMessages_2024R1_V6.9.0.xsd`
* `EstimateRqType``BMSEstimateCommonTypes_2024R1_V6.9.0.xsd`
* `MessageHeaderType`, `DocumentInfoType`, `VehicleInfoType`, `FileAttachmentType`,
`PaymentInfoType`, etc. → `BMSCommonGlobalTypes_2024R1_V6.9.0.xsd`
* Rates/lines/totals/calibration/scan subtypes → mostly `BMSEstimateCommonTypes_2024R1_V6.9.0.xsd`
* Enums/code lists → `BMSCodeLists_ClassicCode_2024R1_V6.9.0.xsd`,
`BMSCodeLists_CodeExt_2024R1_V6.9.0.xsd`
* Service wrapper (which messages are valid to send/receive) →
`BMSEstimateService_2024R1_V6.9.0.xsd`
---

View File

@@ -0,0 +1,250 @@
You got it—heres the same style of breakdown for **`VehicleDamageEstimateChgRq`** (the *change
request* variant). I pulled this straight from your XSD set and focused on what differs from
`…AddRq`, whats required vs optional, and what a minimal-but-valid payload looks like.
---
# What it is & where it lives
* **Global element**: `VehicleDamageEstimateChgRq`
* **Namespace**: `http://www.cieca.com/BMS`
* **Defined in**: `BMSEstimateMessages_2024R1_V6.9.0.xsd`
* **Type**: `EstimateChgRqType` (declared in `BMSEstimateCommonTypes_2024R1_V6.9.0.xsd`)
* **Service group**: `EstimateService` from `BMSEstimateService_2024R1_V6.9.0.xsd`
Group includes: `PropertyDamageEstimateAddRq/Rs`, `VehicleDamageEstimateAddRq/Rs`, *
*`VehicleDamageEstimateChgRq/Rs`**, `VehicleDamagePhotoEstimateAddRq/Rs`.
---
# Top-level schema (for `VehicleDamageEstimateChgRq` → `EstimateChgRqType`)
`EstimateChgRqType` **extends** `MessageHeaderType` (same header as `…AddRq`) but the **body is
almost entirely optional** (intended to send only whats changing). Only **`DocumentInfo`** is
required.
**Header (inherited from `MessageHeaderType`):**
* **`RqUID`** (UUID) — **required**
* `AsyncRqUID` (UUID) — *optional*
* `PartnerKey` (Identifier) — *optional*
**Body (from `EstimateChgRqType`):**
* `SvcProviderName` (Identifier) — *optional*
* `RefClaimNum` (Char\_50) — *optional*
* **`DocumentInfo`** (`DocumentInfoType`) — **required**
* `ApplicationInfo` (`ApplicationInfoType`) — *optional, 0..*\*\*
* `EventInfo` (`EventInfoType`) — *optional*
* `AdminInfo` (`AdminInfoType`) — *optional*
* `EstimatorIDs` (`EstimatorIDsTypeType`) — *optional*
* `ClaimInfo` (`ClaimInfoType`) — *optional*
* **Choice** — *both optional*:
* `VehicleInfo` (`VehicleInfoType`) — *optional*
* `PropertyInfo` (`PropertyInfoType`) — *optional*
* `ProfileInfo` (`ProfileInfoType`) — *optional*
* `DamageLineInfo` (`DamageLineInfoType`) — *optional, 0..*\*\* (send only changed/affected lines)
* `NonNewOEMPartInd` (Boolean) — *optional*
* `StorageDuration` (Integer\_Range\_0-999) — *optional*
* `RepairTotalsInfo` (`RepairTotalsInfoType`) — *optional, 0..*\*\*
* `RepairTotalsHistory` (`RepairTotalsHistoryType`) — *optional, 0..*\*\*
* `PaymentInfo` (`PaymentInfoType`) — *optional*
* `EstimateMemo` (C) — *optional*
* `AdministrativeMemo` (C) — *optional*
* `Disclaimers` (C) — *optional*
* `CustomMemo` (C) — *optional*
* `CustomPrintImage` (C) — *optional*
* `OtherMemos` (`OtherMemosType`) — *optional, 0..*\*\*
**Key deltas vs `VehicleDamageEstimateAddRq`:**
* `…AddRq` *requires* several containers (`AdminInfo`, `EstimatorIDs`, `ProfileInfo`,
`DamageLineInfo`, `RepairTotalsInfo` with `SummaryTotalsInfo`, etc.).
* `…ChgRq` **only requires** `MessageHeaderType/RqUID` and **`DocumentInfo`**; everything else is
optional so you can send *just what changed*.
* `CalibrationInfo` and `ScanInfo` (present in `…AddRq`) are **not** present in `EstimateChgRqType`.
* Because almost everything is optional, **the burden is on you** to correctly identify the target
document/version in `DocumentInfo` (or via `ReferenceInfo`) and to include all fields necessary
for the receiver to apply your changes.
---
# Important dependent types (same as Add, but optional here)
* **`DocumentInfoType`** (BMSCommonGlobalTypes) — **required**
* Use this to identify *which* estimate youre changing. Typical:
* **`BMSVer`** = `6.9.0`
* **`DocumentType`** = `E` (estimate)
* `DocumentID` — your estimate ID
* `CreateDateTime` — when you formed this change message
* `ReferenceInfo` — link back to the prior/authoritative doc (e.g., original `DocumentID`/
`DocumentVer`), if your workflow uses references
* `DocumentVer` — version info list, if you lifecycle versions
* **`ApplicationInfoType`** — software fingerprint (optional, 0..\*)
* **`AdminInfoType`** — parties/roles (optional)
* **`EstimatorIDsTypeType`** — supplemental estimator IDs/history (optional)
* **`ClaimInfoType`** — claim-level data (optional)
* **`VehicleInfoType`** (or `PropertyInfoType`) — vehicle path stays under `VehicleInfo` (optional)
* **`ProfileInfoType`** — rates/taxes/rules (optional)
* **`DamageLineInfoType`** — **send changed/added/removed lines only** (your trading partner may
require specific flags/LineStatusCode or use `ParentLineNum`+`UniqueSequenceNum` to identify
updates)
* **`RepairTotalsInfoType`** — updated totals (optional; some partners expect totals to reconcile
with changed lines)
* **`PaymentInfoType`**, memos, custom print/image & `OtherMemos` — all optional
> Because `ChgRq` is sparse by design, **schema validation wont catch semantic issues** (e.g., you
> remove a part but dont update totals). Make sure your payload is self-consistent per partner
> rules.
---
# Minimal, schema-valid XML skeleton (change request)
> This represents the *absolute floor* to validate: **Header/RqUID** + **DocumentInfo** with basic
> fields. In practice, include `DocumentID` and some way to reference the prior document/version so
> the receiver can apply changes.
```xml
<VehicleDamageEstimateChgRq xmlns="http://www.cieca.com/BMS">
<!-- MessageHeaderType -->
<RqUID>00000000-0000-0000-0000-000000000000</RqUID>
<!-- EstimateChgRqType sequence -->
<DocumentInfo>
<BMSVer>6.9.0</BMSVer>
<DocumentType>E</DocumentType>
<CreateDateTime>2025-08-14T12:00:00Z</CreateDateTime>
<!-- Strongly recommended for change requests: -->
<!-- <DocumentID>EST-12345</DocumentID> -->
<!-- <DocumentVer>
<DocumentVerCode>REV</DocumentVerCode>
<DocumentVerNum>2</DocumentVerNum>
</DocumentVer>
<ReferenceInfo>
<RefDocumentID>EST-12345</RefDocumentID>
<RefDocumentVerNum>1</RefDocumentVerNum>
</ReferenceInfo> -->
</DocumentInfo>
<!-- Add only what changed. Examples: -->
<!-- Update a rate -->
<!--
<ProfileInfo>
<RateInfo>
<RateType>BODY_LABOR</RateType>
<TaxableInd>true</TaxableInd>
<TaxRate>13.00</TaxRate>
</RateInfo>
</ProfileInfo>
-->
<!-- Add/update a labor line -->
<!--
<DamageLineInfo>
<LineNum>10</LineNum>
<LaborInfo>
<LaborType>BODY</LaborType>
<LaborHours>1.5</LaborHours>
<LaborHourlyRate>85.00</LaborHourlyRate>
</LaborInfo>
</DamageLineInfo>
-->
<!-- Sync totals if your partner requires it with each change -->
<!--
<RepairTotalsInfo>
<SummaryTotalsInfo>
<TotalType>GRAND_TOTAL</TotalType>
<TotalTypeDesc>Grand Total</TotalTypeDesc>
<TotalAmt>1234.56</TotalAmt>
</SummaryTotalsInfo>
</RepairTotalsInfo>
-->
</VehicleDamageEstimateChgRq>
```
---
# Practical guidance & gotchas
1. **Targeting the right document/version**
* `DocumentInfo/DocumentID` + `DocumentVer` and/or `ReferenceInfo` should point unambiguously to the
estimate being changed. This is essential because the schema does **not** include a separate
“ChangeTarget” field—partners expect this info in `DocumentInfo`/`ReferenceInfo`.
2. **Sparsity vs completeness**
* You can send just the changed sections (e.g., one `DamageLineInfo`, one `RateInfo`).
* Some receivers require you to **also** include reconciled `RepairTotalsInfo/SummaryTotalsInfo`.
Check partner specs.
3. **Line identity**
* If youre updating an existing line, keep its identity stable using `LineNum` and/or
`UniqueSequenceNum`.
* For nested structures, preserve `ParentLineNum`. Use `LineStatusCode` if your partner requires
explicit “added/changed/deleted” flags.
4. **Profile impacts**
* If a change affects pricing (rates, taxes, discounts), update `ProfileInfo` (and possibly totals).
Omitting totals may be acceptable for some partners; others will reject mismatches.
5. **Whats *not* in ChgRq vs AddRq**
* `CalibrationInfo` and `ScanInfo` do not appear in `EstimateChgRqType`. If you need to change those
data, partner workflows may expect a re-send under Add/PhotoAdd or a separate message
family—confirm externally.
6. **Header is still mandatory**
* `RqUID` must be a real UUID.
7. **Code lists**
* Enumerations (e.g., `DocumentType`, `RateType`, `TotalType`, `LaborType`) are validated against
your code list XSDs. Use exact codes.
---
# Quick field checklist for a *solid* ChgRq
* **Header**
* `RqUID`
* **Doc identity**
* `DocumentInfo/BMSVer` = `6.9.0`
* `DocumentInfo/DocumentType` = `E`
* `DocumentInfo/CreateDateTime`
* `DocumentInfo/DocumentID` (recommended) ✅
* `DocumentInfo/DocumentVer` and/or `ReferenceInfo` (recommended) ✅
* **Changed data only**
* `ProfileInfo/RateInfo` (if rates/taxes changed)
* `DamageLineInfo[0..*]` (added/updated/removed lines)
* `RepairTotalsInfo/SummaryTotalsInfo` (if required by partner)
* Any updated `AdminInfo`, `ClaimInfo`, `VehicleInfo` fragments as needed
---
# Pointers to definitions in your bundle
* `VehicleDamageEstimateChgRq` element → `BMSEstimateMessages_2024R1_V6.9.0.xsd`
* `EstimateChgRqType``BMSEstimateCommonTypes_2024R1_V6.9.0.xsd`
* `MessageHeaderType`, `DocumentInfoType`, `VehicleInfoType`, `TotalsInfoType`, etc. →
`BMSCommonGlobalTypes_2024R1_V6.9.0.xsd`
* Code lists → `BMSCodeLists_ClassicCode_2024R1_V6.9.0.xsd`,
`BMSCodeLists_CodeExt_2024R1_V6.9.0.xsd`
* Service wrapper → `BMSEstimateService_2024R1_V6.9.0.xsd` (group `EstimateService` contains the
ChgRq/Rs)
---

View File

@@ -54,6 +54,10 @@ paths:
userEmail:
type: string
format: email
userPassword:
type: string
description: Optional password for the new user. If provided, the password is set directly, and no password reset link is sent. Must be at least 6 characters.
nullable: true
logoUrl:
type: string
format: uri
@@ -140,6 +144,8 @@ paths:
resetLink:
type: string
format: uri
nullable: true
description: Password reset link for the user. Only included if userPassword is not provided in the request.
'400':
description: Bad request (missing or invalid fields)
content:

View File

@@ -252,35 +252,27 @@ const generatePaymentUrl = async (req, res) => {
* @returns {Promise<void>}
*/
const checkFee = async (req, res) => {
const logResponseMeta = {
bodyshop: {
id: req.body?.bodyshop?.id,
imexshopid: req.body?.bodyshop?.imexshopid,
name: req.body?.bodyshop?.shopname,
state: req.body?.bodyshop?.state
},
amount: req.body?.amount
};
const { bodyshop = {}, amount } = req.body || {};
const { id, imexshopid, shopname, state } = bodyshop;
const logResponseMeta = { bodyshop: { id, imexshopid, name: shopname, state }, amount };
logger.log("intellipay-checkfee-request-received", "DEBUG", req.user?.email, null, logResponseMeta);
if (!isNumber(req.body?.amount) || req.body?.amount <= 0) {
if (!isNumber(amount) || amount <= 0) {
logger.log("intellipay-checkfee-skip", "DEBUG", req.user?.email, null, {
message: "Amount is zero or undefined, skipping fee check.",
...logResponseMeta
});
return res.json({ fee: 0 });
}
const shopCredentials = await getShopCredentials(req.body.bodyshop);
const shopCredentials = await getShopCredentials(bodyshop);
if (shopCredentials?.error) {
logger.log("intellipay-checkfee-credentials-error", "ERROR", req.user?.email, null, {
message: shopCredentials.error?.message,
...logResponseMeta
});
return res.status(400).json({ error: shopCredentials.error?.message, ...logResponseMeta });
}
@@ -292,13 +284,10 @@ const checkFee = async (req, res) => {
{
method: "fee",
...shopCredentials,
amount: req.body.amount,
paymenttype: `CC`,
amount: String(amount), // Type cast to string as required by API
paymenttype: "CC",
cardnum: "4111111111111111", // Required for compatibility with API
state:
req.body.bodyshop?.state && req.body.bodyshop.state.length === 2
? req.body.bodyshop.state.toUpperCase()
: "ZZ"
state: state?.toUpperCase() || "ZZ"
},
{ sort: false } // Ensure query string order is preserved
),
@@ -310,46 +299,24 @@ const checkFee = async (req, res) => {
...logResponseMeta
});
const response = await axios(options);
const { data } = await axios(options);
if (response.data?.error) {
logger.log("intellipay-checkfee-api-error", "ERROR", req.user?.email, null, {
message: response.data?.error,
...logResponseMeta
});
return res.status(400).json({
error: response.data?.error,
type: "intellipay-checkfee-api-error",
...logResponseMeta
});
if (data?.error || data < 0) {
const errorType = data?.error ? "intellipay-checkfee-api-error" : "intellipay-checkfee-negative-fee";
const errorMessage = data?.error
? data?.error
: "Fee amount negative. Check API credentials & account configuration.";
logger.log(errorType, "ERROR", req.user?.email, null, { message: errorMessage, data, ...logResponseMeta });
return res.status(400).json({ error: errorMessage, type: errorType, data, ...logResponseMeta });
}
if (response.data < 0) {
logger.log("intellipay-checkfee-negative-fee", "ERROR", req.user?.email, null, {
message: "Fee amount returned is negative.",
...logResponseMeta
});
return res.json({
error: "Fee amount negative. Check API credentials & account configuration.",
...logResponseMeta,
type: "intellipay-checkfee-negative-fee"
});
}
logger.log("intellipay-checkfee-success", "DEBUG", req.user?.email, null, {
fee: response.data,
...logResponseMeta
});
return res.json({ fee: response.data, ...logResponseMeta });
logger.log("intellipay-checkfee-success", "DEBUG", req.user?.email, null, { fee: data, ...logResponseMeta });
return res.json({ fee: data, ...logResponseMeta });
} catch (error) {
logger.log("intellipay-checkfee-error", "ERROR", req.user?.email, null, {
message: error?.message,
...logResponseMeta
});
return res.status(500).json({ error: error?.message, logResponseMeta });
}
};

View File

@@ -6,7 +6,7 @@
const decodeComment = (comment) => {
try {
return comment ? JSON.parse(Buffer.from(comment, "base64").toString()) : null;
} catch (error) {
} catch {
return null; // Handle malformed base64 string gracefully
}
};

View File

@@ -1,40 +1,14 @@
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const path = require("path");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
// Emit this to bodyshop room
exports.default = async (req, res) => {
const { useremail, bodyshopid, operationName, variables, env, time, dbevent, user } = req.body;
const {
ioRedis,
ioHelpers: { getBodyshopRoom }
} = req;
try {
// await client.request(queries.INSERT_IOEVENT, {
// event: {
// operationname: operationName,
// time,
// dbevent,
// env,
// variables,
// bodyshopid,
// useremail
// }
// });
// ioRedis.to(getBodyshopRoom(bodyshopid)).emit("bodyshop-message", {
// operationName,
// useremail
// });
res.sendStatus(200);
} catch (error) {
} catch {
logger.log("ioevent-error", "silly", user, null, {
operationname: operationName,
time,

View File

@@ -1,24 +1,18 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
//const client = require("../graphql-client/graphql-client").client;
const _ = require("lodash");
const GraphQLClient = require("graphql-request").GraphQLClient;
const logger = require("../utils/logger");
// Dinero.defaultCurrency = "USD";
// Dinero.globalLocale = "en-CA";
Dinero.globalRoundingMode = "HALF_EVEN";
const path = require("path");
const client = require("../graphql-client/graphql-client").client;
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
async function StatusTransition(req, res) {
const { id: jobid, status: value, shopid: bodyshopid } = req.body.event.data.new;
// Create record OPEN on new item, enter state
// If change to SCHEDULE, update the last record and create a new record (update status and end time on old record, create a new record saying we came from previous status going to previous status
// If change to SCHEDULE, update the last record and create a new record (update status and end time on old record,
// create a new record saying we came from previous status going to previous status
// (Timeline)
// Final status is exported, there is no end date as there is no further transition (has no end date)
try {

View File

@@ -1,10 +1,7 @@
const path = require("path");
const logger = require("../utils/logger");
const { Upload } = require("@aws-sdk/lib-storage");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const { InstanceRegion } = require("../utils/instanceMgr");
const archiver = require("archiver");
const stream = require("node:stream");
const base64UrlEncode = require("./util/base64UrlEncode");
const createHmacSha256 = require("./util/createHmacSha256");
const {
@@ -47,13 +44,34 @@ const generateSignedUploadUrls = async (req, res) => {
for (const filename of filenames) {
const key = filename;
const client = new S3Client({ region: InstanceRegion() });
const command = new PutObjectCommand({
// Check if filename indicates PDF and set content type accordingly
const isPdf = filename.toLowerCase().endsWith('.pdf');
const commandParams = {
Bucket: imgproxyDestinationBucket,
Key: key,
StorageClass: "INTELLIGENT_TIERING"
};
if (isPdf) {
commandParams.ContentType = "application/pdf";
}
const command = new PutObjectCommand(commandParams);
// For PDFs, we need to add conditions to the presigned URL to enforce content type
const presignedUrlOptions = { expiresIn: 360 };
if (isPdf) {
presignedUrlOptions.signableHeaders = new Set(['content-type']);
}
const presignedUrl = await getSignedUrl(client, command, presignedUrlOptions);
signedUrls.push({
filename,
presignedUrl,
key,
...(isPdf && { contentType: "application/pdf" })
});
const presignedUrl = await getSignedUrl(client, command, { expiresIn: 360 });
signedUrls.push({ filename, presignedUrl, key });
}
logger.log("imgproxy-upload-success", "DEBUG", req.user?.email, jobid, { signedUrls });
@@ -91,11 +109,11 @@ const getThumbnailUrls = async (req, res) => {
//Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components.
const client = req.userGraphQLClient;
//If there's no jobid and no billid, we're in temporary documents.
const data = await (
billid ? client.request(GET_DOCUMENTS_BY_BILL, { billId: billid }) :
jobid
? client.request(GET_DOCUMENTS_BY_JOB, { jobId: jobid })
: client.request(QUERY_TEMPORARY_DOCS));
const data = await (billid
? client.request(GET_DOCUMENTS_BY_BILL, { billId: billid })
: jobid
? client.request(GET_DOCUMENTS_BY_JOB, { jobId: jobid })
: client.request(QUERY_TEMPORARY_DOCS));
const thumbResizeParams = `rs:fill:250:250:1/g:ce`;
const s3client = new S3Client({ region: InstanceRegion() });
@@ -106,7 +124,7 @@ const getThumbnailUrls = async (req, res) => {
//<Cloudfront_to_lambda>/<hmac with SHA of entire request URI path (with base64 encoded URL if needed), beginning with un-encoded/un-hashed Salt>/<remainder of url - resize params >/< base 64 URL encoded to image path>
//When working with documents from Cloudinary, the URL does not include the extension.
let key = keyStandardize(document)
let key = keyStandardize(document);
// Build the S3 path to the object.
const fullS3Path = `s3://${imgproxyDestinationBucket}/${key}`;
const base64UrlEncodedKeyString = base64UrlEncode(fullS3Path);
@@ -193,7 +211,10 @@ const downloadFiles = async (req, res) => {
// Handle zipfile stream errors
zipfile.outputStream.on("error", (err) => {
logger.log("imgproxy-download-zipstream-error", "ERROR", req.user?.email, jobId, { message: err.message, stack: err.stack });
logger.log("imgproxy-download-zipstream-error", "ERROR", req.user?.email, jobId, {
message: err.message,
stack: err.stack
});
// Cannot send another response here, just destroy the connection
res.destroy(err);
});
@@ -202,7 +223,7 @@ const downloadFiles = async (req, res) => {
try {
for (const doc of data.documents) {
let key = keyStandardize(doc)
let key = keyStandardize(doc);
let response;
try {
response = await s3client.send(
@@ -212,13 +233,21 @@ const downloadFiles = async (req, res) => {
})
);
} catch (err) {
logger.log("imgproxy-download-s3-error", "ERROR", req.user?.email, jobId, { key, message: err.message, stack: err.stack });
logger.log("imgproxy-download-s3-error", "ERROR", req.user?.email, jobId, {
key,
message: err.message,
stack: err.stack
});
// Optionally, skip this file or add a placeholder file in the zip
continue;
}
// Attach error handler to S3 stream
response.Body.on("error", (err) => {
logger.log("imgproxy-download-s3stream-error", "ERROR", req.user?.email, jobId, { key, message: err.message, stack: err.stack });
logger.log("imgproxy-download-s3stream-error", "ERROR", req.user?.email, jobId, {
key,
message: err.message,
stack: err.stack
});
res.destroy(err);
});
zipfile.addReadStream(response.Body, path.basename(key));
@@ -393,7 +422,6 @@ const keyStandardize = (doc) => {
}
};
module.exports = {
generateSignedUploadUrls,
getThumbnailUrls,

View File

@@ -1,5 +1,3 @@
const path = require("path");
/**
* Checks if the event secret is correct
* It adds the following properties to the request object:

View File

@@ -1,13 +1,8 @@
const path = require("path");
const _ = require("lodash");
const xml2js = require("xml2js");
const queries = require("../graphql-client/queries");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
exports.mixdataUpload = async (req, res) => {
const client = req.userGraphQLClient;

View File

@@ -280,7 +280,7 @@ const getQueue = () => {
/**
* Dispatches notifications to the `addQueue` for processing.
*/
const dispatchAppsToQueue = async ({ appsToDispatch, logger }) => {
const dispatchAppsToQueue = async ({ appsToDispatch }) => {
const appQueue = getQueue();
for (const app of appsToDispatch) {

View File

@@ -1,7 +1,3 @@
require("dotenv").config({
path: require("path").resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
//const client = require("../graphql-client/graphql-client").client;
const logger = require("../utils/logger");
const queries = require("../graphql-client/queries");

View File

@@ -1,6 +1,5 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const GraphQLClient = require("graphql-request").GraphQLClient;
const logger = require("../utils/logger");
const { CalculateExpectedHoursForJob, CalculateTicketsHoursForJob } = require("./pay-all");
@@ -8,6 +7,12 @@ const { CalculateExpectedHoursForJob, CalculateTicketsHoursForJob } = require(".
// Dinero.globalLocale = "en-CA";
Dinero.globalRoundingMode = "HALF_EVEN";
const get = (obj, key) => {
return key.split(".").reduce((o, x) => {
return typeof o == "undefined" || o === null ? o : o[x];
}, obj);
};
exports.calculatelabor = async function (req, res) {
const { jobid, calculateOnly } = req.body;
logger.log("job-payroll-calculate-labor", "DEBUG", req.user.email, jobid, null);
@@ -99,9 +104,3 @@ exports.calculatelabor = async function (req, res) {
res.status(503).send();
}
};
get = function (obj, key) {
return key.split(".").reduce(function (o, x) {
return typeof o == "undefined" || o === null ? o : o[x];
}, obj);
};

View File

@@ -1,8 +1,7 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const GraphQLClient = require("graphql-request").GraphQLClient;
const logger = require("../utils/logger");
const { CalculateExpectedHoursForJob, CalculateTicketsHoursForJob } = require("./pay-all");
const { CalculateExpectedHoursForJob } = require("./pay-all");
const moment = require("moment");
// Dinero.defaultCurrency = "USD";
// Dinero.globalLocale = "en-CA";

View File

@@ -1,6 +1,5 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const _ = require("lodash");
const rdiff = require("recursive-diff");
const logger = require("../utils/logger");

View File

@@ -1,5 +1,6 @@
const express = require("express");
const router = express.Router();
const logger = require("../../server/utils/logger");
// Pull secrets from env
const { VSSTA_INTEGRATION_SECRET, PARTS_MANAGEMENT_INTEGRATION_SECRET } = process.env;
@@ -11,17 +12,51 @@ if (typeof VSSTA_INTEGRATION_SECRET === "string" && VSSTA_INTEGRATION_SECRET.len
router.post("/vssta", vsstaMiddleware, vsstaIntegration);
} else {
console.warn("VSSTA_INTEGRATION_SECRET is not set — skipping /vssta integration route");
logger.logger.warn("VSSTA_INTEGRATION_SECRET is not set — skipping /vssta integration route");
}
// Only load Parts Management routes if that secret is set
if (typeof PARTS_MANAGEMENT_INTEGRATION_SECRET === "string" && PARTS_MANAGEMENT_INTEGRATION_SECRET.length > 0) {
const partsManagementProvisioning = require("../integrations/partsManagement/partsManagementProvisioning");
const partsManagementIntegrationMiddleware = require("../middleware/partsManagementIntegrationMiddleware");
const XML_BODY_LIMIT = "10mb"; // Set a limit for XML body size
const partsManagementProvisioning = require("../integrations/partsManagement/endpoints/partsManagementProvisioning");
const partsManagementDeprovisioning = require("../integrations/partsManagement/endpoints/partsManagementDeprovisioning");
const partsManagementIntegrationMiddleware = require("../middleware/partsManagementIntegrationMiddleware");
const partsManagementVehicleDamageEstimateAddRq = require("../integrations/partsManagement/endpoints/vehicleDamageEstimateAddRq");
const partsManagementVehicleDamageEstimateChqRq = require("../integrations/partsManagement/endpoints/vehicleDamageEstimateChgRq");
/**
* Route to handle Vehicle Damage Estimate Add Request
*/
router.post(
"/parts-management/VehicleDamageEstimateAddRq",
express.raw({ type: "application/xml", limit: XML_BODY_LIMIT }), // Parse XML body
partsManagementIntegrationMiddleware,
partsManagementVehicleDamageEstimateAddRq
);
/**
* Route to handle Vehicle Damage Estimate Change Request
*/
router.post(
"/parts-management/VehicleDamageEstimateChgRq",
express.raw({ type: "application/xml", limit: XML_BODY_LIMIT }), // Parse XML body
partsManagementIntegrationMiddleware,
partsManagementVehicleDamageEstimateChqRq
);
// Deprovisioning route
if (process.env.NODE_ENV !== "production" && !process.env.HOSTNAME?.endsWith("compute.internal")) {
logger.logger.warn("Parts Management Deprovisioning route has been loaded.");
router.post("/parts-management/deprovision", partsManagementIntegrationMiddleware, partsManagementDeprovisioning);
}
/**
* Route to handle Parts Management Provisioning
*/
router.post("/parts-management/provision", partsManagementIntegrationMiddleware, partsManagementProvisioning);
} else {
console.warn("PARTS_MANAGEMENT_INTEGRATION_SECRET is not set — skipping /parts-management/provision route");
logger.logger.warn("PARTS_MANAGEMENT_INTEGRATION_SECRET is not set — skipping /parts-management/provision route");
}
module.exports = router;

View File

@@ -144,6 +144,9 @@ router.post("/emsupload", validateFirebaseIdTokenMiddleware, data.emsUpload);
// Redis Cache Routes
router.post("/bodyshop-cache", eventAuthorizationMiddleware, updateBodyshopCache);
// Estimate Scrubber Vehicle Type
router.post("/es/vehicletype", data.vehicletype);
// Health Check for docker-compose-cluster load balancer, only available in development
if (process.env.NODE_ENV === "development") {
router.get("/health", (req, res) => {

View File

@@ -1,13 +1,7 @@
const path = require("path");
const queries = require("../graphql-client/queries");
const Dinero = require("dinero.js");
const moment = require("moment-timezone");
const logger = require("../utils/logger");
const _ = require("lodash");
const { filter } = require("lodash");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
exports.job = async (req, res) => {
const { jobId } = req.body;
@@ -26,12 +20,10 @@ exports.job = async (req, res) => {
});
const { jobs_by_pk, blockedDays, prodJobs, arrJobs, compJobs } = result;
const { ssbuckets, workingdays, timezone, ss_configuration } = result.jobs_by_pk.bodyshop;
const jobHrs = result.jobs_by_pk.jobhrs.aggregate.sum.mod_lb_hrs;
const { ssbuckets, workingdays, timezone, ss_configuration } = jobs_by_pk.bodyshop;
const jobHrs = jobs_by_pk.jobhrs.aggregate.sum.mod_lb_hrs;
const JobBucket = ssbuckets.filter(
(bucket) => bucket.gte <= jobHrs && (!!bucket.lt ? bucket.lt > jobHrs : true)
)[0];
const JobBucket = ssbuckets.filter((bucket) => bucket.gte <= jobHrs && (bucket.lt ? bucket.lt > jobHrs : true))[0];
const load = {
productionTotal: {},
productionHours: 0
@@ -73,7 +65,7 @@ exports.job = async (req, res) => {
.tz(timezone)
.format("yyyy-MM-DD");
if (isSameBucket) {
if (!!load[itemDate]) {
if (load[itemDate]) {
load[itemDate].hoursIn = (load[itemDate].hoursIn || 0) + AddJobForSchedulingCalc ? jobHours : 0;
if (AddJobForSchedulingCalc) load[itemDate].jobsIn.push(item);
} else {
@@ -97,7 +89,6 @@ exports.job = async (req, res) => {
});
//Get the completing jobs.
let problemJobs = [];
const filteredCompJobs = compJobs.filter((j) => JobBucket.id === CheckJobBucket(ssbuckets, j));
filteredCompJobs.forEach((item) => {
@@ -109,7 +100,7 @@ exports.job = async (req, res) => {
const itemDate = moment(item.actual_completion || item.scheduled_completion)
.tz(timezone)
.format("yyyy-MM-DD");
if (!!load[itemDate]) {
if (load[itemDate]) {
load[itemDate].hoursOut =
(load[itemDate].hoursOut || 0) + AddJobForSchedulingCalc
? item.labhrs.aggregate.sum.mod_lb_hrs + item.larhrs.aggregate.sum.mod_lb_hrs
@@ -143,7 +134,7 @@ exports.job = async (req, res) => {
.tz(timezone)
.add(day - 1, "days")
.format("yyyy-MM-DD");
if (!!!load[current]) {
if (!load[current]) {
load[current] = {};
}
if (day === 0) {

View File

@@ -291,7 +291,13 @@ const receive = async (req, res) => {
res.status(200).send("");
} catch (e) {
handleError(req, e, res, "RECEIVE_MESSAGE", logger);
handleError({
req,
res,
logger,
error: e,
context: "RECEIVE_MESSAGE"
});
}
};
@@ -301,11 +307,11 @@ const receive = async (req, res) => {
* @param logger
* @returns {null|*[]}
*/
const generateMediaArray = (body, logger) => {
const generateMediaArray = (body) => {
const { NumMedia } = body;
if (parseInt(NumMedia) > 0) {
if (parseInt(NumMedia, 10) > 0) {
const ret = [];
for (let i = 0; i < parseInt(NumMedia); i++) {
for (let i = 0; i < parseInt(NumMedia, 10); i++) {
ret.push(body[`MediaUrl${i}`]);
}
return ret;
@@ -322,7 +328,7 @@ const generateMediaArray = (body, logger) => {
* @param context
* @param logger
*/
const handleError = (req, error, res, context, logger) => {
const handleError = ({ req, error, res, context, logger }) => {
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,

View File

@@ -11,7 +11,7 @@ const gqlClient = require("../graphql-client/graphql-client").client;
* @returns {Promise<void>}
*/
const send = async (req, res) => {
const { to, messagingServiceSid, body, conversationid, selectedMedia, imexshopid } = req.body;
const { to, messagingServiceSid, body, conversationid, selectedMedia } = req.body;
const {
ioRedis,
logger,

View File

@@ -139,7 +139,7 @@ const status = async (req, res) => {
const markConversationRead = async (req, res) => {
const {
ioRedis,
ioHelpers: { getBodyshopRoom, getBodyshopConversationRoom }
ioHelpers: { getBodyshopRoom }
} = req;
const { conversation, imexshopid, bodyshopid } = req.body;

View File

@@ -3,12 +3,6 @@
* If required, remember to re-install stripe 14.19.0
*/
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const stripe = require("stripe")(process.env.STRIPE_SECRET_KEY);
const processor = async (req, res) => {

View File

@@ -1,7 +1,3 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const client = require("../graphql-client/graphql-client").client;
const emailer = require("../email/sendemail");
const moment = require("moment-timezone");
@@ -77,9 +73,6 @@ function functionMapper(f, timezone) {
return moment().tz(timezone).add(3, "day");
case "date.7daysfromnowtz":
return moment().tz(timezone).add(7, "day");
case "date.now":
return moment().tz(timezone);
default:
return f;
}

View File

@@ -1,10 +1,6 @@
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const path = require("path");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
exports.techLogin = async (req, res) => {
const { shopid, employeeid, pin } = req.body;

View File

@@ -1,4 +1,4 @@
import { describe, it, expect } from "vitest";
import { describe, expect, it } from "vitest";
function add(a, b) {
return a + b;

View File

@@ -9,7 +9,7 @@ const calculateStatusDuration = (transitions, statuses) => {
let totalCurrentStatusDuration = null;
let summations = [];
transitions.forEach((transition, index) => {
transitions.forEach((transition) => {
let duration = transition.duration;
totalDuration += duration;
if (transition.start && !transition.end) {

View File

@@ -1,9 +1,4 @@
// Load environment variables THIS MUST BE AT THE TOP
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const { networkInterfaces, hostname } = require("node:os");
const getHostNameOrIP = () => {

View File

@@ -6,7 +6,7 @@
* @param logger
* @returns {{getBodyshopRoom: (function(*): string), getBodyshopConversationRoom: (function({bodyshopId: *, conversationId: *}): string)}}
*/
const applyIOHelpers = ({ app, api, io, logger }) => {
const applyIOHelpers = ({ app }) => {
// Global Bodyshop Room
const getBodyshopRoom = (bodyshopId) => `bodyshop-broadcast-room:${bodyshopId}`;

View File

@@ -1,9 +1,4 @@
// Load environment variables THIS MUST BE AT THE TOP
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const InstanceManager = require("../utils/instanceMgr").default;
const winston = require("winston");
const WinstonCloudWatch = require("winston-cloudwatch");
@@ -15,7 +10,6 @@ const getHostNameOrIP = require("./getHostNameOrIP");
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const LOG_LEVELS = {
error: { level: 0, name: "error" },
warn: { level: 1, name: "warn" },
@@ -38,7 +32,7 @@ const region = InstanceRegion();
const estimateLogSize = (logEntry) => {
let estimatedSize = 0;
for (const key in logEntry) {
if (logEntry.hasOwnProperty(key)) {
if (Object.prototype.hasOwnProperty.call(logEntry, key)) {
const value = logEntry[key];
if (value === undefined || value === null) {
estimatedSize += key.length; // Only count the key length if value is undefined or null
@@ -102,11 +96,13 @@ const createLogger = () => {
const labelColor = "\x1b[33m"; // Yellow
const separatorColor = "\x1b[35m|\x1b[0m"; // Magenta for separators
return `${timestampColor} [${hostnameColor}] [${level}]: ${message} ${user ? `${separatorColor} ${labelColor}user:\x1b[0m ${JSON.stringify(user)}` : ""
} ${record ? `${separatorColor} ${labelColor}record:\x1b[0m ${JSON.stringify(record)}` : ""}${meta
return `${timestampColor} [${hostnameColor}] [${level}]: ${message} ${
user ? `${separatorColor} ${labelColor}user:\x1b[0m ${JSON.stringify(user)}` : ""
} ${record ? `${separatorColor} ${labelColor}record:\x1b[0m ${JSON.stringify(record)}` : ""}${
meta
? `\n${separatorColor} ${labelColor}meta:\x1b[0m ${JSON.stringify(meta, null, 2)} ${separatorColor}`
: ""
}`;
}`;
})
)
})
@@ -195,9 +191,19 @@ const createLogger = () => {
winstonLogger.log(logEntry);
};
const LogIntegrationCall = async ({ platform, method, name, jobid, paymentid, billid, status, bodyshopid, email }) => {
const LogIntegrationCall = async ({
platform,
method,
name,
jobid,
paymentid,
billid,
status,
bodyshopid,
email
}) => {
try {
//Insert the record.
//Insert the record.
await client.request(queries.INSERT_INTEGRATION_LOG, {
log: {
platform,
@@ -211,7 +217,6 @@ const createLogger = () => {
email
}
});
} catch (error) {
console.trace("Stack", error?.stack);
log("integration-log-error", "ERROR", email, null, {

View File

@@ -2,6 +2,7 @@
const path = require("path");
const getHostNameOrIP = require("./getHostNameOrIP");
const logger = require("./logger");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});

View File

@@ -182,7 +182,6 @@ const redisSocketEvents = ({
const registerSyncEvents = (socket) => {
socket.on("sync-notification-read", async ({ email, bodyshopId, notificationId }) => {
try {
const userEmail = socket.user.email;
const socketMapping = await getUserSocketMappingByBodyshop(email, bodyshopId);
const timestamp = new Date().toISOString();

View File

@@ -1,7 +1,4 @@
const path = require("path");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
const { io } = require("../../server");
const { admin } = require("../firebase/firebase-handler");