Merge branch 'feature/IO-3255-simplified-part-management' into feature/IO-2776-cdk-fortellis

# Conflicts:
#	client/package-lock.json
#	client/src/components/dms-cdk-makes/dms-cdk-makes.component.jsx
#	client/src/components/dms-customer-selector/dms-customer-selector.component.jsx
#	client/src/pages/dms/dms.container.jsx
#	package-lock.json
#	package.json
This commit is contained in:
Dave
2025-08-21 11:13:27 -04:00
698 changed files with 7509 additions and 3049 deletions

View File

@@ -112,6 +112,7 @@ exports.PbsSelectedCustomer = async function PbsSelectedCustomer(socket, selecte
await InsertFailedExportLog(socket, error);
}
};
// Was Successful
async function CheckForErrors(socket, response) {
if (response.WasSuccessful === undefined || response.WasSuccessful === true) {
@@ -138,7 +139,7 @@ async function QueryVehicleFromDms(socket) {
try {
if (!socket.JobData.v_vin) return null;
const { data: VehicleGetResponse, request } = await axios.post(
const { data: VehicleGetResponse } = await axios.post(
PBS_ENDPOINTS.VehicleGet,
{
SerialNumber: socket.JobData.bodyshop.pbs_serialnumber,

View File

@@ -19,7 +19,7 @@ exports.default = async (req, res) => {
clientId: process.env.QBO_CLIENT_ID,
clientSecret: process.env.QBO_SECRET,
environment: process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
redirectUri: process.env.QBO_REDIRECT_URI
});
try {
@@ -65,36 +65,34 @@ exports.default = async (req, res) => {
vendorRecord = await InsertVendorRecord(oauthClient, qbo_realmId, req, bill);
}
const insertResults = await InsertBill(oauthClient, qbo_realmId, req, bill, vendorRecord, bodyshop);
await InsertBill(oauthClient, qbo_realmId, req, bill, vendorRecord, bodyshop);
// //No error. Mark the job exported & insert export log.
if (elgen) {
const result = await client
.setHeaders({ Authorization: BearerToken })
.request(queries.QBO_MARK_BILL_EXPORTED, {
billId: bill.id,
bill: {
exported: true,
exported_at: moment().tz(bodyshop.timezone)
},
logs: [
{
bodyshopid: bodyshop.id,
billid: bill.id,
successful: true,
useremail: req.user.email
}
]
});
await client.setHeaders({ Authorization: BearerToken }).request(queries.QBO_MARK_BILL_EXPORTED, {
billId: bill.id,
bill: {
exported: true,
exported_at: moment().tz(bodyshop.timezone)
},
logs: [
{
bodyshopid: bodyshop.id,
billid: bill.id,
successful: true,
useremail: req.user.email
}
]
});
}
ret.push({ billid: bill.id, success: true });
} catch (error) {
logger.log("qbo-paybles-create-error", "ERROR", req.user.email, null, {
error:
(error && error.authResponse && error.authResponse.body) ||
(error?.authResponse && error.authResponse.body) ||
error.response?.data?.Fault?.Error.map((e) => e.Detail).join(", ") ||
(error && error.message)
error?.message
});
ret.push({
billid: bill.id,
@@ -107,7 +105,7 @@ exports.default = async (req, res) => {
//Add the export log error.
if (elgen) {
const result = await client.setHeaders({ Authorization: BearerToken }).request(queries.INSERT_EXPORT_LOG, {
await client.setHeaders({ Authorization: BearerToken }).request(queries.INSERT_EXPORT_LOG, {
logs: [
{
bodyshopid: bodyshop.id,
@@ -141,7 +139,9 @@ async function QueryVendorRecord(oauthClient, qbo_realmId, req, bill) {
url: urlBuilder(
qbo_realmId,
"query",
`select * From vendor where DisplayName = '${StandardizeName(bill.vendor.name)}'`
`select *
From vendor
where DisplayName = '${StandardizeName(bill.vendor.name)}'`
),
method: "POST",
headers: {
@@ -156,7 +156,7 @@ async function QueryVendorRecord(oauthClient, qbo_realmId, req, bill) {
status: result.response?.status,
bodyshopid: bill.job.shopid,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return (
result.json &&
@@ -194,7 +194,7 @@ async function InsertVendorRecord(oauthClient, qbo_realmId, req, bill) {
status: result.response?.status,
bodyshopid: bill.job.shopid,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Vendor;
} catch (error) {
@@ -263,11 +263,11 @@ async function InsertBill(oauthClient, qbo_realmId, req, bill, vendor, bodyshop)
.format("YYYY-MM-DD"),
...(!bill.is_credit_memo &&
bill.vendor.due_date && {
DueDate: moment(bill.date)
//.tz(bill.job.bodyshop.timezone)
.add(bill.vendor.due_date, "days")
.format("YYYY-MM-DD")
}),
DueDate: moment(bill.date)
//.tz(bill.job.bodyshop.timezone)
.add(bill.vendor.due_date, "days")
.format("YYYY-MM-DD")
}),
DocNumber: bill.invoice_number,
//...(bill.job.class ? { ClassRef: { Id: classes[bill.job.class] } } : {}),
...(!(
@@ -280,8 +280,8 @@ async function InsertBill(oauthClient, qbo_realmId, req, bill, vendor, bodyshop)
: {}),
...(bodyshop.accountingconfig.qbo_departmentid &&
bodyshop.accountingconfig.qbo_departmentid.trim() !== "" && {
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
PrivateNote: `RO ${bill.job.ro_number || ""}`,
Line: lines
};
@@ -305,7 +305,7 @@ async function InsertBill(oauthClient, qbo_realmId, req, bill, vendor, bodyshop)
status: result.response?.status,
bodyshopid: bill.job.shopid,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Bill;
} catch (error) {
@@ -353,8 +353,8 @@ const generateBillLine = (
accountingconfig.qbo && accountingconfig.qbo_usa && region_config.includes("CA_")
? {}
: {
value: taxCodes[findTaxCode(billLine.applicable_taxes, ioSalesTaxCodes)]
},
value: taxCodes[findTaxCode(billLine.applicable_taxes, ioSalesTaxCodes)]
},
AccountRef: {
value: accounts[account.accountname]
}
@@ -373,7 +373,9 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid) {
url: urlBuilder(
qbo_realmId,
"query",
`select * From Account where AccountType in ('Cost of Goods Sold', 'Other Current Liability')`
`select *
From Account
where AccountType in ('Cost of Goods Sold', 'Other Current Liability')`
),
method: "POST",
headers: {
@@ -387,10 +389,15 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid) {
status: accounts.response?.status,
bodyshopid,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, accounts);
const taxCodes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From TaxCode`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From TaxCode`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -403,9 +410,14 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid) {
status: taxCodes.status,
bodyshopid,
email: req.user.email
})
});
const classes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Class`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From Class`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -418,7 +430,7 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid) {
status: classes.status,
bodyshopid,
email: req.user.email
})
});
const taxCodeMapping = {};
taxCodes.json &&

View File

@@ -12,7 +12,6 @@ const { refresh: refreshOauthToken, setNewRefreshToken } = require("./qbo-callba
const OAuthClient = require("intuit-oauth");
const CreateInvoiceLines = require("../qb-receivables-lines").default;
const moment = require("moment-timezone");
const GraphQLClient = require("graphql-request").GraphQLClient;
const { generateOwnerTier } = require("../qbxml/qbxml-utils");
const { createMultiQbPayerLines } = require("../qb-receivables-lines");
@@ -21,7 +20,7 @@ exports.default = async (req, res) => {
clientId: process.env.QBO_CLIENT_ID,
clientSecret: process.env.QBO_SECRET,
environment: process.env.NODE_ENV === "production" ? "production" : "sandbox",
redirectUri: process.env.QBO_REDIRECT_URI,
redirectUri: process.env.QBO_REDIRECT_URI
});
try {
@@ -226,7 +225,10 @@ async function QueryInsuranceCo(oauthClient, qbo_realmId, req, job) {
url: urlBuilder(
qbo_realmId,
"query",
`select * From Customer where DisplayName = '${StandardizeName(job.ins_co_nm.trim())}' and Active = true`
`select *
From Customer
where DisplayName = '${StandardizeName(job.ins_co_nm.trim())}'
and Active = true`
),
method: "POST",
headers: {
@@ -241,7 +243,7 @@ async function QueryInsuranceCo(oauthClient, qbo_realmId, req, job) {
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return (
result.json &&
@@ -296,7 +298,7 @@ async function InsertInsuranceCo(oauthClient, qbo_realmId, req, job, bodyshop) {
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json.Customer;
} catch (error) {
@@ -316,7 +318,10 @@ async function QueryOwner(oauthClient, qbo_realmId, req, job, isThreeTier, paren
url: urlBuilder(
qbo_realmId,
"query",
`select * From Customer where DisplayName = '${StandardizeName(ownerName)}' and Active = true`
`select *
From Customer
where DisplayName = '${StandardizeName(ownerName)}'
and Active = true`
),
method: "POST",
headers: {
@@ -331,7 +336,7 @@ async function QueryOwner(oauthClient, qbo_realmId, req, job, isThreeTier, paren
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return (
result.json &&
@@ -358,11 +363,11 @@ async function InsertOwner(oauthClient, qbo_realmId, req, job, isThreeTier, pare
...(job.ownr_ea ? { PrimaryEmailAddr: { Address: job.ownr_ea.trim() } } : {}),
...(isThreeTier
? {
Job: true,
ParentRef: {
value: parentTierRef.Id
Job: true,
ParentRef: {
value: parentTierRef.Id
}
}
}
: {})
};
try {
@@ -382,7 +387,7 @@ async function InsertOwner(oauthClient, qbo_realmId, req, job, isThreeTier, pare
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json.Customer;
} catch (error) {
@@ -401,7 +406,10 @@ async function QueryJob(oauthClient, qbo_realmId, req, job, parentTierRef) {
url: urlBuilder(
qbo_realmId,
"query",
`select * From Customer where DisplayName = '${job.ro_number}' and Active = true`
`select *
From Customer
where DisplayName = '${job.ro_number}'
and Active = true`
),
method: "POST",
headers: {
@@ -416,7 +424,7 @@ async function QueryJob(oauthClient, qbo_realmId, req, job, parentTierRef) {
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return (
result.json &&
@@ -464,7 +472,7 @@ async function InsertJob(oauthClient, qbo_realmId, req, job, parentTierRef) {
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json.Customer;
} catch (error) {
@@ -480,7 +488,13 @@ exports.InsertJob = InsertJob;
async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid, jobid) {
const items = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Item where active=true maxresults 1000`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From Item
where active = true maxresults 1000`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -494,10 +508,16 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid, jobid) {
bodyshopid,
jobid: jobid,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, items);
const taxCodes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From TaxCode where active=true`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From TaxCode
where active = true`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -511,9 +531,14 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid, jobid) {
bodyshopid,
jobid: jobid,
email: req.user.email
})
});
const classes = await oauthClient.makeApiCall({
url: urlBuilder(qbo_realmId, "query", `select * From Class`),
url: urlBuilder(
qbo_realmId,
"query",
`select *
From Class`
),
method: "POST",
headers: {
"Content-Type": "application/json"
@@ -527,7 +552,7 @@ async function QueryMetaData(oauthClient, qbo_realmId, req, bodyshopid, jobid) {
bodyshopid,
jobid: jobid,
email: req.user.email
})
});
const taxCodeMapping = {};
taxCodes.json &&
@@ -578,55 +603,57 @@ async function InsertInvoice(oauthClient, qbo_realmId, req, job, bodyshop, paren
DocNumber: job.ro_number,
...(job.class ? { ClassRef: { value: classes[job.class] } } : {}),
CustomerMemo: {
value: `${job.clm_no ? `Claim No: ${job.clm_no}` : ``}${job.po_number ? `PO No: ${job.po_number}` : ``
} Vehicle:${job.v_model_yr || ""} ${job.v_make_desc || ""} ${job.v_model_desc || ""
} ${job.v_vin || ""} ${job.plate_no || ""} `.trim()
value: `${job.clm_no ? `Claim No: ${job.clm_no}` : ``}${
job.po_number ? `PO No: ${job.po_number}` : ``
} Vehicle:${job.v_model_yr || ""} ${job.v_make_desc || ""} ${
job.v_model_desc || ""
} ${job.v_vin || ""} ${job.plate_no || ""} `.trim()
},
CustomerRef: {
value: parentTierRef.Id
},
...(bodyshop.accountingconfig.qbo_departmentid &&
bodyshop.accountingconfig.qbo_departmentid.trim() !== "" && {
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
CustomField: [
...(bodyshop.accountingconfig.ReceivableCustomField1
? [
{
DefinitionId: "1",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField1],
Type: "StringType"
}
]
{
DefinitionId: "1",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField1],
Type: "StringType"
}
]
: []),
...(bodyshop.accountingconfig.ReceivableCustomField2
? [
{
DefinitionId: "2",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField2],
Type: "StringType"
}
]
{
DefinitionId: "2",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField2],
Type: "StringType"
}
]
: []),
...(bodyshop.accountingconfig.ReceivableCustomField3
? [
{
DefinitionId: "3",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField3],
Type: "StringType"
}
]
{
DefinitionId: "3",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField3],
Type: "StringType"
}
]
: [])
],
...(bodyshop.accountingconfig &&
bodyshop.accountingconfig.qbo &&
bodyshop.accountingconfig.qbo_usa && {
TxnTaxDetail: {
TxnTaxCodeRef: {
value: taxCodes[bodyshop.md_responsibility_centers.taxes.state.accountitem]
TxnTaxDetail: {
TxnTaxCodeRef: {
value: taxCodes[bodyshop.md_responsibility_centers.taxes.state.accountitem]
}
}
}
}),
}),
...(bodyshop.accountingconfig.printlater ? { PrintStatus: "NeedToPrint" } : {}),
...(bodyshop.accountingconfig.emaillater && job.ownr_ea ? { EmailStatus: "NeedToSend" } : {}),
@@ -660,7 +687,7 @@ async function InsertInvoice(oauthClient, qbo_realmId, req, job, bodyshop, paren
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Invoice;
} catch (error) {
@@ -702,56 +729,58 @@ async function InsertInvoiceMultiPayerInvoice(
DocNumber: job.ro_number + suffix,
...(job.class ? { ClassRef: { value: classes[job.class] } } : {}),
CustomerMemo: {
value: `${job.clm_no ? `Claim No: ${job.clm_no}` : ``}${job.po_number ? `PO No: ${job.po_number}` : ``
} Vehicle:${job.v_model_yr || ""} ${job.v_make_desc || ""} ${job.v_model_desc || ""
} ${job.v_vin || ""} ${job.plate_no || ""} `.trim()
value: `${job.clm_no ? `Claim No: ${job.clm_no}` : ``}${
job.po_number ? `PO No: ${job.po_number}` : ``
} Vehicle:${job.v_model_yr || ""} ${job.v_make_desc || ""} ${
job.v_model_desc || ""
} ${job.v_vin || ""} ${job.plate_no || ""} `.trim()
},
CustomerRef: {
value: parentTierRef.Id
},
...(bodyshop.accountingconfig.qbo_departmentid &&
bodyshop.accountingconfig.qbo_departmentid.trim() !== "" && {
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
DepartmentRef: { value: bodyshop.accountingconfig.qbo_departmentid }
}),
CustomField: [
...(bodyshop.accountingconfig.ReceivableCustomField1
? [
{
DefinitionId: "1",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField1],
Type: "StringType"
}
]
{
DefinitionId: "1",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField1],
Type: "StringType"
}
]
: []),
...(bodyshop.accountingconfig.ReceivableCustomField2
? [
{
DefinitionId: "2",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField2],
Type: "StringType"
}
]
{
DefinitionId: "2",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField2],
Type: "StringType"
}
]
: []),
...(bodyshop.accountingconfig.ReceivableCustomField3
? [
{
DefinitionId: "3",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField3],
Type: "StringType"
}
]
{
DefinitionId: "3",
StringValue: job[bodyshop.accountingconfig.ReceivableCustomField3],
Type: "StringType"
}
]
: [])
],
...(bodyshop.accountingconfig &&
bodyshop.accountingconfig.qbo &&
bodyshop.accountingconfig.qbo_usa &&
bodyshop.region_config.includes("CA_") && {
TxnTaxDetail: {
TxnTaxCodeRef: {
value: taxCodes[bodyshop.md_responsibility_centers.taxes.state.accountitem]
TxnTaxDetail: {
TxnTaxCodeRef: {
value: taxCodes[bodyshop.md_responsibility_centers.taxes.state.accountitem]
}
}
}
}),
}),
...(bodyshop.accountingconfig.printlater ? { PrintStatus: "NeedToPrint" } : {}),
...(bodyshop.accountingconfig.emaillater && job.ownr_ea ? { EmailStatus: "NeedToSend" } : {}),
@@ -785,7 +814,7 @@ async function InsertInvoiceMultiPayerInvoice(
bodyshopid: job.shopid,
jobid: job.id,
email: req.user.email
})
});
setNewRefreshToken(req.user.email, result);
return result && result.json && result.json.Invoice;
} catch (error) {

View File

@@ -1,4 +1,3 @@
const GraphQLClient = require("graphql-request").GraphQLClient;
const path = require("path");
const DineroQbFormat = require("../accounting-constants").DineroQbFormat;
const queries = require("../../graphql-client/queries");

View File

@@ -1,10 +1,12 @@
exports.addQbxmlHeader = addQbxmlHeader = (xml) => {
const addQbxmlHeader = (xml) => {
return `<?xml version="1.0" encoding="utf-8"?>
<?qbxml version="13.0"?>
${xml}
`;
};
exports.addQbxmlHeader = addQbxmlHeader;
exports.generateSourceTier = (jobs_by_pk) => {
return jobs_by_pk.ins_co_nm && jobs_by_pk.ins_co_nm.trim().replace(":", " ");
};

View File

@@ -1,6 +1,5 @@
const path = require("path");
const _ = require("lodash");
const logger = require("../utils/logger");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)

View File

@@ -1,5 +1,4 @@
const path = require("path");
const _ = require("lodash");
const logger = require("../utils/logger");
const queries = require("../graphql-client/queries");
const GraphQLClient = require("graphql-request").GraphQLClient;

View File

@@ -0,0 +1,66 @@
const KNOWN_PART_RATE_TYPES = [
"PAA",
"PAC",
"PAG",
"PAL",
"PAM",
"PAN",
"PAO",
"PAP",
"PAR",
"PAS",
"PASL",
"CCC",
"CCD",
"CCF",
"CCM",
"CCDR"
];
/**
* Extracts and processes parts tax rates from profile info.
* @param {object} profile - The ProfileInfo object from XML.
* @returns {object} The parts tax rates object.
*/
const extractPartsTaxRates = (profile = {}) => {
const rateInfos = Array.isArray(profile.RateInfo) ? profile.RateInfo : [profile.RateInfo || {}];
const partsTaxRates = {};
for (const r of rateInfos) {
const rateTypeRaw =
typeof r?.RateType === "string"
? r.RateType
: typeof r?.RateType === "object" && r?.RateType._
? r.RateType._
: "";
const rateType = (rateTypeRaw || "").toUpperCase();
if (!KNOWN_PART_RATE_TYPES.includes(rateType)) continue;
const taxInfo = r.TaxInfo;
const taxTier = taxInfo?.TaxTierInfo;
let percentage = parseFloat(taxTier?.Percentage ?? "NaN");
if (isNaN(percentage)) {
const tierRate = Array.isArray(r.RateTierInfo) ? r.RateTierInfo[0]?.Rate : r.RateTierInfo?.Rate;
percentage = parseFloat(tierRate ?? "NaN");
}
if (!isNaN(percentage)) {
partsTaxRates[rateType] = {
prt_discp: 0,
prt_mktyp: false,
prt_mkupp: 0,
prt_tax_in: true,
prt_tax_rt: percentage / 100
};
}
}
return partsTaxRates;
};
module.exports = {
extractPartsTaxRates,
KNOWN_PART_RATE_TYPES
};

View File

@@ -0,0 +1,187 @@
const admin = require("firebase-admin");
const client = require("../../../graphql-client/graphql-client").client;
const {
DELETE_SHOP,
DELETE_VENDORS_BY_SHOP,
GET_BODYSHOP,
GET_ASSOCIATED_USERS,
DELETE_ASSOCIATIONS_BY_SHOP,
GET_USER_ASSOCIATIONS_COUNT,
DELETE_USER,
GET_VENDORS,
GET_JOBS_BY_SHOP,
DELETE_JOBLINES_BY_JOB_IDS,
DELETE_JOBS_BY_IDS,
DELETE_AUDIT_TRAIL_BY_SHOP
} = require("../partsManagement.queries");
/**
* Deletes a Firebase user by UID.
* @param uid
* @returns {Promise<void>}
*/
const deleteFirebaseUser = async (uid) => {
return admin.auth().deleteUser(uid);
};
/**
* Deletes all vendors associated with a shop.
* @param shopId
* @returns {Promise<void>}
*/
const deleteVendorsByShop = async (shopId) => {
await client.request(DELETE_VENDORS_BY_SHOP, { shopId });
};
/**
* Deletes a bodyshop from the database.
* @param shopId
* @returns {Promise<void>}
*/
const deleteBodyshop = async (shopId) => {
await client.request(DELETE_SHOP, { id: shopId });
};
/**
* Fetch job ids for a given shop
* @param shopId
* @returns {Promise<string[]>}
*/
const getJobIdsForShop = async (shopId) => {
const resp = await client.request(GET_JOBS_BY_SHOP, { shopId });
return resp.jobs.map((j) => j.id);
};
/**
* Delete joblines for the given job ids
* @param jobIds {string[]}
* @returns {Promise<number>} affected rows
*/
const deleteJoblinesForJobs = async (jobIds) => {
if (!jobIds.length) return 0;
const resp = await client.request(DELETE_JOBLINES_BY_JOB_IDS, { jobIds });
return resp.delete_joblines.affected_rows;
};
/**
* Delete jobs for the given job ids
* @param jobIds {string[]}
* @returns {Promise<number>} affected rows
*/
const deleteJobsByIds = async (jobIds) => {
if (!jobIds.length) return 0;
const resp = await client.request(DELETE_JOBS_BY_IDS, { jobIds });
return resp.delete_jobs.affected_rows;
};
/**
* Handles deprovisioning a shop for parts management.
* @param req
* @param res
* @returns {Promise<*>}
*/
const partsManagementDeprovisioning = async (req, res) => {
const { logger } = req;
const p = req.body;
if (process.env.NODE_ENV === "production") {
return res.status(403).json({ error: "Deprovisioning not allowed in production environment." });
}
try {
if (!p.shopId) {
throw { status: 400, message: "shopId is required." };
}
// Fetch bodyshop and check external_shop_id
const shopResp = await client.request(GET_BODYSHOP, { id: p.shopId });
const shop = shopResp.bodyshops_by_pk;
if (!shop) {
throw { status: 404, message: `Bodyshop with id ${p.shopId} not found.` };
}
if (!shop.external_shop_id) {
throw { status: 400, message: "Cannot delete bodyshop without external_shop_id." };
}
logger.log("admin-delete-shop", "debug", null, null, {
shopId: p.shopId,
shopname: shop.shopname,
ioadmin: true
});
// Get vendors
const vendorsResp = await client.request(GET_VENDORS, { shopId: p.shopId });
const deletedVendors = vendorsResp.vendors.map((v) => v.name);
// Get associated users
const assocResp = await client.request(GET_ASSOCIATED_USERS, { shopId: p.shopId });
const associatedUsers = assocResp.associations.map((assoc) => ({
authId: assoc.user.authid,
email: assoc.user.email
}));
// Delete associations for the shop
const assocDeleteResp = await client.request(DELETE_ASSOCIATIONS_BY_SHOP, { shopId: p.shopId });
const associationsDeleted = assocDeleteResp.delete_associations.affected_rows;
// For each user, check if they have remaining associations; if not, delete user and Firebase account
const deletedUsers = [];
for (const user of associatedUsers) {
const countResp = await client.request(GET_USER_ASSOCIATIONS_COUNT, { userEmail: user.email });
const assocCount = countResp.associations_aggregate.aggregate.count;
if (assocCount === 0) {
await client.request(DELETE_USER, { email: user.email });
await deleteFirebaseUser(user.authId);
deletedUsers.push(user.email);
}
}
// Get all job ids for this shop, then delete joblines and jobs (joblines first)
const jobIds = await getJobIdsForShop(p.shopId);
const joblinesDeleted = await deleteJoblinesForJobs(jobIds);
const jobsDeleted = await deleteJobsByIds(jobIds);
// Delete any audit trail entries tied to this bodyshop to avoid FK violations
const auditResp = await client.request(DELETE_AUDIT_TRAIL_BY_SHOP, { shopId: p.shopId });
const auditDeleted = auditResp.delete_audit_trail.affected_rows;
// Delete vendors
await deleteVendorsByShop(p.shopId);
// Delete shop
await deleteBodyshop(p.shopId);
// Summary log
logger.log("admin-delete-shop-summary", "info", null, null, {
shopId: p.shopId,
shopname: shop.shopname,
associationsDeleted,
deletedUsers,
deletedVendors,
joblinesDeleted,
jobsDeleted,
auditDeleted
});
return res.status(200).json({
message: `Bodyshop ${p.shopId} and associated resources deleted successfully.`,
deletedShop: { id: p.shopId, name: shop.shopname },
deletedAssociationsCount: associationsDeleted,
deletedUsers: deletedUsers,
deletedVendors: deletedVendors,
deletedJoblinesCount: joblinesDeleted,
deletedJobsCount: jobsDeleted,
deletedAuditTrailCount: auditDeleted
});
} catch (err) {
logger.log("admin-delete-shop-error", "error", null, null, {
message: err.message,
detail: err.detail || err
});
return res.status(err.status || 500).json({ error: err.message || "Internal server error" });
}
};
module.exports = partsManagementDeprovisioning;

View File

@@ -1,10 +1,17 @@
const crypto = require("crypto");
const admin = require("firebase-admin");
const client = require("../../graphql-client/graphql-client").client;
const DefaultNewShop = require("./defaultNewShop.json");
const client = require("../../../graphql-client/graphql-client").client;
const DefaultNewShop = require("../defaultNewShop.json");
const {
CHECK_EXTERNAL_SHOP_ID,
CREATE_SHOP,
DELETE_VENDORS_BY_SHOP,
DELETE_SHOP,
CREATE_USER
} = require("../partsManagement.queries");
/**
* Ensures that the required fields are present in the payload.
* Checks if the required fields are present in the payload.
* @param payload
* @param fields
*/
@@ -17,7 +24,7 @@ const requireFields = (payload, fields) => {
};
/**
* Ensures that the email is not already registered in Firebase.
* Ensures that the provided email is not already registered in Firebase.
* @param email
* @returns {Promise<void>}
*/
@@ -33,16 +40,19 @@ const ensureEmailNotRegistered = async (email) => {
};
/**
* Creates a new Firebase user with the provided email.
* Creates a new Firebase user with the given email and optional password.
* @param email
* @param password
* @returns {Promise<UserRecord>}
*/
const createFirebaseUser = async (email) => {
return admin.auth().createUser({ email });
const createFirebaseUser = async (email, password = null) => {
const userData = { email };
if (password) userData.password = password;
return admin.auth().createUser(userData);
};
/**
* Deletes a Firebase user by their UID.
* Deletes a Firebase user by UID.
* @param uid
* @returns {Promise<void>}
*/
@@ -60,18 +70,12 @@ const generateResetLink = async (email) => {
};
/**
* Ensures that the external shop ID is unique in the database.
* Ensures that the provided external shop ID is unique.
* @param externalId
* @returns {Promise<void>}
*/
const ensureExternalIdUnique = async (externalId) => {
const query = `
query CHECK_KEY($key: String!) {
bodyshops(where: { external_shop_id: { _eq: $key } }) {
external_shop_id
}
}`;
const resp = await client.request(query, { key: externalId });
const resp = await client.request(CHECK_EXTERNAL_SHOP_ID, { key: externalId });
if (resp.bodyshops.length) {
throw { status: 400, message: `external_shop_id '${externalId}' is already in use.` };
}
@@ -83,40 +87,26 @@ const ensureExternalIdUnique = async (externalId) => {
* @returns {Promise<*>}
*/
const insertBodyshop = async (input) => {
const mutation = `
mutation CREATE_SHOP($bs: bodyshops_insert_input!) {
insert_bodyshops_one(object: $bs) { id }
}`;
const resp = await client.request(mutation, { bs: input });
const resp = await client.request(CREATE_SHOP, { bs: input });
return resp.insert_bodyshops_one.id;
};
/**
* Deletes all vendors associated with a specific shop ID.
* Deletes all vendors associated with a shop.
* @param shopId
* @returns {Promise<void>}
*/
const deleteVendorsByShop = async (shopId) => {
const mutation = `
mutation DELETE_VENDORS($shopId: uuid!) {
delete_vendors(where: { shopid: { _eq: $shopId } }) {
affected_rows
}
}`;
await client.request(mutation, { shopId });
await client.request(DELETE_VENDORS_BY_SHOP, { shopId });
};
/**
* Deletes a bodyshop by its ID.
* Deletes a bodyshop from the database.
* @param shopId
* @returns {Promise<void>}
*/
const deleteBodyshop = async (shopId) => {
const mutation = `
mutation DELETE_SHOP($id: uuid!) {
delete_bodyshops_by_pk(id: $id) { id }
}`;
await client.request(mutation, { id: shopId });
await client.request(DELETE_SHOP, { id: shopId });
};
/**
@@ -127,13 +117,6 @@ const deleteBodyshop = async (shopId) => {
* @returns {Promise<*>}
*/
const insertUserAssociation = async (uid, email, shopId) => {
const mutation = `
mutation CREATE_USER($u: users_insert_input!) {
insert_users_one(object: $u) {
id: authid
email
}
}`;
const vars = {
u: {
email,
@@ -144,12 +127,12 @@ const insertUserAssociation = async (uid, email, shopId) => {
}
}
};
const resp = await client.request(mutation, vars);
const resp = await client.request(CREATE_USER, vars);
return resp.insert_users_one;
};
/**
* Handles the provisioning of a new parts management shop and user.
* Handles provisioning a new shop for parts management.
* @param req
* @param res
* @returns {Promise<*>}
@@ -159,7 +142,6 @@ const partsManagementProvisioning = async (req, res) => {
const p = { ...req.body, userEmail: req.body.userEmail?.toLowerCase() };
try {
// Validate inputs
await ensureEmailNotRegistered(p.userEmail);
requireFields(p, [
"external_shop_id",
@@ -180,7 +162,6 @@ const partsManagementProvisioning = async (req, res) => {
ioadmin: true
});
// Create shop
const shopInput = {
shopname: p.shopname,
address1: p.address1,
@@ -191,7 +172,7 @@ const partsManagementProvisioning = async (req, res) => {
country: p.country,
email: p.email,
external_shop_id: p.external_shop_id,
timezone: p.timezone,
timezone: p.timezone || DefaultNewShop.timezone,
phone: p.phone,
logo_img_path: {
src: p.logoUrl,
@@ -199,7 +180,25 @@ const partsManagementProvisioning = async (req, res) => {
height: "",
headerMargin: DefaultNewShop.logo_img_path.headerMargin
},
features: {
allAccess: true, // TODO: should be false?
partsManagementOnly: true
},
md_ro_statuses: DefaultNewShop.md_ro_statuses,
md_order_statuses: DefaultNewShop.md_order_statuses,
md_responsibility_centers: DefaultNewShop.md_responsibility_centers,
md_referral_sources: DefaultNewShop.md_referral_sources,
md_messaging_presets: DefaultNewShop.md_messaging_presets,
md_rbac: DefaultNewShop.md_rbac,
md_classes: DefaultNewShop.md_classes,
md_ins_cos: DefaultNewShop.md_ins_cos, // TODO need?
md_categories: DefaultNewShop.md_categories, // TODO need?
md_labor_rates: DefaultNewShop.md_labor_rates, // TODO need?
md_payment_types: DefaultNewShop.md_payment_types, // TODO need?
md_hour_split: DefaultNewShop.md_hour_split, // TODO need?
md_ccc_rates: DefaultNewShop.md_ccc_rates, // TODO need?
appt_alt_transport: DefaultNewShop.appt_alt_transport, // TODO need?
md_jobline_presets: DefaultNewShop.md_jobline_presets, // TODO need?
vendors: {
data: p.vendors.map((v) => ({
name: v.name,
@@ -220,11 +219,12 @@ const partsManagementProvisioning = async (req, res) => {
}))
}
};
const newShopId = await insertBodyshop(shopInput);
// Create user + association
const userRecord = await createFirebaseUser(p.userEmail);
const resetLink = await generateResetLink(p.userEmail);
const newShopId = await insertBodyshop(shopInput);
const userRecord = await createFirebaseUser(p.userEmail, p.userPassword);
let resetLink = null;
if (!p.userPassword) resetLink = await generateResetLink(p.userEmail);
const createdUser = await insertUserAssociation(userRecord.uid, p.userEmail, newShopId);
return res.status(200).json({
@@ -232,7 +232,7 @@ const partsManagementProvisioning = async (req, res) => {
user: {
id: createdUser.id,
email: createdUser.email,
resetLink
resetLink: resetLink || undefined
}
});
} catch (err) {
@@ -241,13 +241,18 @@ const partsManagementProvisioning = async (req, res) => {
detail: err.detail || err
});
// Cleanup on failure
if (err.userRecord) {
await deleteFirebaseUser(err.userRecord.uid).catch(() => {});
await deleteFirebaseUser(err.userRecord.uid).catch(() => {
/* empty */
});
}
if (err.newShopId) {
await deleteVendorsByShop(err.newShopId).catch(() => {});
await deleteBodyshop(err.newShopId).catch(() => {});
await deleteVendorsByShop(err.newShopId).catch(() => {
/* empty */
});
await deleteBodyshop(err.newShopId).catch(() => {
/* empty */
});
}
return res.status(err.status || 500).json({ error: err.message || "Internal server error" });

View File

@@ -0,0 +1,654 @@
// no-dd-sa:javascript-code-style/assignment-name
// CamelCase is used for GraphQL and database fields.
const client = require("../../../graphql-client/graphql-client").client;
const { extractPartsTaxRates } = require("./lib/extractPartsTaxRates");
const { parseXml, normalizeXmlObject } = require("../partsManagementUtils");
// GraphQL Queries and Mutations
const {
GET_BODYSHOP_STATUS,
GET_VEHICLE_BY_SHOP_VIN,
INSERT_OWNER,
INSERT_JOB_WITH_LINES
} = require("../partsManagement.queries");
// Defaults
const FALLBACK_DEFAULT_ORDER_STATUS = "OPEN";
// Config: include labor lines and labor in totals (default true)
const INCLUDE_LABOR = true;
/**
* Fetches the default order status for a bodyshop.
* @param {string} shopId - The bodyshop UUID.
* @param {object} logger - The logger instance.
* @returns {Promise<string>} The default status or fallback.
*/
const getDefaultOrderStatus = async (shopId, logger) => {
try {
const { bodyshop_by_pk } = await client.request(GET_BODYSHOP_STATUS, { id: shopId });
return bodyshop_by_pk?.md_order_statuses?.default_open || FALLBACK_DEFAULT_ORDER_STATUS;
} catch (err) {
logger.log("parts-bodyshop-fetch-failed", "warn", shopId, null, { error: err });
return FALLBACK_DEFAULT_ORDER_STATUS;
}
};
/**
* Finds an existing vehicle by shopId and VIN.
* @param {string} shopId - The bodyshop UUID.
* @param {string} v_vin - The vehicle VIN.
* @param {object} logger - The logger instance.
* @returns {Promise<string|null>} The vehicle ID or null if not found.
*/
const findExistingVehicle = async (shopId, v_vin, logger) => {
if (!v_vin) return null;
try {
const { vehicles } = await client.request(GET_VEHICLE_BY_SHOP_VIN, { shopid: shopId, v_vin });
if (vehicles?.length > 0) {
logger.log("parts-vehicle-found", "info", vehicles[0].id, null, { shopid: shopId, v_vin });
return vehicles[0].id;
}
} catch (err) {
logger.log("parts-vehicle-fetch-failed", "warn", null, null, { error: err });
}
return null;
};
/**
* Extracts job-related data from the XML request.
* @param {object} rq - The VehicleDamageEstimateAddRq object.
* @returns {object} Extracted job data.
*/
const extractJobData = (rq) => {
const doc = rq.DocumentInfo || {};
const ev = rq.EventInfo || {};
const asgn = ev.AssignmentEvent || {};
const ci = rq.ClaimInfo || {};
return {
shopId: rq.ShopID || rq.shopId,
refClaimNum: rq.RefClaimNum,
ciecaid: rq.RqUID || null,
// Pull Cieca_ttl from ClaimInfo per schema/sample
cieca_ttl: parseFloat(ci.Cieca_ttl || 0),
cat_no: doc.VendorCode || null,
category: doc.DocumentType || null,
classType: doc.DocumentStatus || null,
comment: doc.Comment || null,
// TODO: This causes the job to be read only in the UI
// date_exported: doc.TransmitDateTime || null,
asgn_no: asgn.AssignmentNumber || null,
asgn_type: asgn.AssignmentType || null,
asgn_date: asgn.AssignmentDate || null,
// asgn_created: asgn.CreateDateTime || null,
scheduled_in: ev.RepairEvent?.RequestedPickUpDateTime || null,
scheduled_completion: ev.RepairEvent?.TargetCompletionDateTime || null,
clm_no: ci.ClaimNum || null,
status: ci.ClaimStatus || null,
policy_no: ci.PolicyInfo?.PolicyInfo?.PolicyNum || ci.PolicyInfo?.PolicyNum || null,
ded_amt: parseFloat(ci.PolicyInfo?.CoverageInfo?.Coverage?.DeductibleInfo?.DeductibleAmt || 0)
};
};
/**
* Extracts owner data from the XML request.
* Falls back to Claimant if Owner is missing.
* @param {object} rq - The VehicleDamageEstimateAddRq object.
* @param {string} shopId - The bodyshop UUID.
* @returns {object} Owner data for insertion and inline use.
*/
const extractOwnerData = (rq, shopId) => {
const ownerOrClaimant = rq.AdminInfo?.Owner?.Party || rq.AdminInfo?.Claimant?.Party || {};
const personInfo = ownerOrClaimant.PersonInfo || {};
const personName = personInfo.PersonName || {};
const address = personInfo.Communications?.Address || {};
let ownr_ph1, ownr_ph2, ownr_ea, ownr_alt_ph;
const comms = Array.isArray(ownerOrClaimant.ContactInfo?.Communications)
? ownerOrClaimant.ContactInfo.Communications
: [ownerOrClaimant.ContactInfo?.Communications || {}];
for (const c of comms) {
if (c.CommQualifier === "CP") ownr_ph1 = c.CommPhone;
if (c.CommQualifier === "WP") ownr_ph2 = c.CommPhone;
if (c.CommQualifier === "EM") ownr_ea = c.CommEmail;
if (c.CommQualifier === "AL") ownr_alt_ph = c.CommPhone;
}
return {
shopid: shopId,
ownr_fn: personName.FirstName || null,
ownr_ln: personName.LastName || null,
ownr_co_nm: ownerOrClaimant.OrgInfo?.CompanyName || null,
ownr_addr1: address.Address1 || null,
ownr_addr2: address.Address2 || null,
ownr_city: address.City || null,
ownr_st: address.StateProvince || null,
ownr_zip: address.PostalCode || null,
ownr_ctry: address.Country || null,
ownr_ph1,
ownr_ph2,
ownr_ea,
ownr_alt_ph
// ownr_id_qualifier: ownerOrClaimant.IDInfo?.IDQualifierCode || null // New
// ownr_id_num: ownerOrClaimant.IDInfo?.IDNum || null, // New
// ownr_preferred_contact: ownerOrClaimant.PreferredContactMethod || null // New
};
};
/**
* Extracts estimator data from the XML request.
* @param {object} rq - The VehicleDamageEstimateAddRq object.
* @returns {object} Estimator data.
*/
const extractEstimatorData = (rq) => {
const estParty = rq.AdminInfo?.Estimator?.Party || {};
const estComms = Array.isArray(estParty.ContactInfo?.Communications)
? estParty.ContactInfo.Communications
: [estParty.ContactInfo?.Communications || {}];
return {
est_co_nm: rq.AdminInfo?.Estimator?.Affiliation || null,
est_ct_fn: estParty.PersonInfo?.PersonName?.FirstName || null,
est_ct_ln: estParty.PersonInfo?.PersonName?.LastName || null,
est_ea: estComms.find((c) => c.CommQualifier === "EM")?.CommEmail || null
};
};
/**
* Extracts adjuster data from the XML request.
* @param {object} rq - The VehicleDamageEstimateAddRq object.
* @returns {object} Adjuster data.
*/
const extractAdjusterData = (rq) => {
const adjParty = rq.AdminInfo?.Adjuster?.Party || {};
const adjComms = Array.isArray(adjParty.ContactInfo?.Communications)
? adjParty.ContactInfo.Communications
: [adjParty.ContactInfo?.Communications || {}];
return {
agt_ct_fn: adjParty.PersonInfo?.PersonName?.FirstName || null,
agt_ct_ln: adjParty.PersonInfo?.PersonName?.LastName || null,
agt_ct_ph: adjComms.find((c) => c.CommQualifier === "CP")?.CommPhone || null,
agt_ea: adjComms.find((c) => c.CommQualifier === "EM")?.CommEmail || null
};
};
/**
* Extracts repair facility data from the XML request.
* @param {object} rq - The VehicleDamageEstimateAddRq object.
* @returns {object} Repair facility data.
*/
const extractRepairFacilityData = (rq) => {
const rfParty = rq.AdminInfo?.RepairFacility?.Party || {};
const rfComms = Array.isArray(rfParty.ContactInfo?.Communications)
? rfParty.ContactInfo.Communications
: [rfParty.ContactInfo?.Communications || {}];
return {
servicing_dealer: rfParty.OrgInfo?.CompanyName || null,
servicing_dealer_contact:
rfComms.find((c) => c.CommQualifier === "WP" || c.CommQualifier === "FX")?.CommPhone || null
};
};
/**
* Extracts loss information from the XML request.
* @param rq
* @returns {{loss_dt: (*|null), reported_dt: (*|null), loss_type_code: (*|null), loss_type_desc: (*|null)}}
*/
const extractLossInfo = (rq) => {
const loss = rq.ClaimInfo?.LossInfo?.Facts || {};
const custom = rq.ClaimInfo?.CustomElement || {};
return {
loss_date: loss.LossDateTime || null,
loss_type: custom.LossTypeCode || null,
loss_desc: custom.LossTypeDesc || null
// primary_poi: loss.PrimaryPOI?.POICode || null,
// secondary_poi: loss.SecondaryPOI?.POICode || null,
// damage_memo: loss.DamageMemo || null, //(maybe ins_memo)
// total_loss_ind: rq.ClaimInfo?.LossInfo?.TotalLossInd || null // New
};
};
/**
* Extracts insurance data from the XML request.
* @param rq
* @returns {{insd_ln: (*|null), insd_fn: (string|null), insd_title: (*|null), insd_co_nm: (*|string|null), insd_addr1:
* (*|null), insd_addr2: (*|null), insd_city: (*|null), insd_st: (*|null), insd_zip: (*|null), insd_ctry: (*|null),
* insd_ph1, insd_ph1x, insd_ph2, insd_ph2x, insd_fax, insd_faxx, insd_ea}}
*/
const extractInsuranceData = (rq) => {
const insuredParty = rq.AdminInfo?.Insured?.Party || {};
const insuredPerson = insuredParty.PersonInfo || {};
const insuredComms = Array.isArray(insuredParty.ContactInfo?.Communications)
? insuredParty.ContactInfo.Communications
: [insuredParty.ContactInfo?.Communications || {}];
const insuredAddress = insuredPerson.Communications?.Address || {};
const insurerParty = rq.AdminInfo?.InsuranceCompany?.Party || {};
let insd_ph1, insd_ph1x, insd_ph2, insd_ph2x, insd_fax, insd_faxx, insd_ea;
for (const c of insuredComms) {
if (c.CommQualifier === "CP") {
insd_ph1 = c.CommPhone;
insd_ph1x = c.CommPhoneExt;
}
if (c.CommQualifier === "WP") {
insd_ph2 = c.CommPhone;
insd_ph2x = c.CommPhoneExt;
}
if (c.CommQualifier === "FX") {
insd_fax = c.CommPhone;
insd_faxx = c.CommPhoneExt;
}
if (c.CommQualifier === "EM") insd_ea = c.CommEmail;
}
return {
insd_ln: insuredPerson.PersonName?.LastName || null,
insd_fn: insuredPerson.PersonName?.FirstName || null,
insd_title: insuredPerson.PersonName?.Title || null,
insd_co_nm: insurerParty.OrgInfo?.CompanyName || insuredParty.OrgInfo?.CompanyName || null,
insd_addr1: insuredAddress.Address1 || null,
insd_addr2: insuredAddress.Address2 || null,
insd_city: insuredAddress.City || null,
insd_st: insuredAddress.StateProvince || null,
insd_zip: insuredAddress.PostalCode || null,
insd_ctry: insuredAddress.Country || null,
insd_ph1,
insd_ph1x,
insd_ph2,
insd_ph2x,
insd_fax,
insd_faxx,
insd_ea
};
};
/**
* Extracts vehicle data from the XML request.
* @param {object} rq - The VehicleDamageEstimateAddRq object.
* @param {string} shopId - The bodyshop UUID.
* @returns {object} Vehicle data for insertion and inline use.
*/
const extractVehicleData = (rq, shopId) => {
const desc = rq.VehicleInfo?.VehicleDesc || {};
const exterior = rq.VehicleInfo?.Paint?.Exterior || {};
const interior = rq.VehicleInfo?.Paint?.Interior || {};
return {
shopid: shopId,
// VIN may be either VINInfo.VINNum or VINInfo.VIN.VINNum depending on producer
v_vin: rq.VehicleInfo?.VINInfo?.VINNum || rq.VehicleInfo?.VINInfo?.VIN?.VINNum || null,
plate_no: rq.VehicleInfo?.License?.LicensePlateNum || null,
plate_st: rq.VehicleInfo?.License?.LicensePlateStateProvince || null,
v_model_yr: desc.ModelYear || null,
v_make_desc: desc.MakeDesc || null,
v_model_desc: desc.ModelName || null,
v_color: exterior.Color?.ColorName || null,
v_bstyle: desc.BodyStyle || null,
v_engine: desc.EngineDesc || null,
v_options: desc.SubModelDesc || null,
v_type: desc.FuelType || null,
v_cond: rq.VehicleInfo?.Condition?.DrivableInd,
v_trimcode: desc.TrimCode || null,
v_tone: exterior.Tone || null,
v_stage: exterior.RefinishStage || rq.VehicleInfo?.Paint?.RefinishStage || null,
v_prod_dt: desc.ProductionDate || null,
v_paint_codes: Array.isArray(exterior.PaintCodeInfo)
? exterior.PaintCodeInfo.map((p) => p.PaintCode).join(",")
: exterior.PaintCode || null,
v_mldgcode: desc.MldgCode || null,
v_makecode: desc.MakeCode || null,
trim_color: interior.ColorName || desc.TrimColor || null,
db_v_code: desc.DatabaseCode || null
};
};
/**
* Extracts job lines from the XML request.
* @param {object} rq - The VehicleDamageEstimateAddRq object.
* @returns {object[]} Array of job line objects.
*/
const extractJobLines = (rq) => {
// Normalize to array without lodash toArray (which flattens object values incorrectly)
const dl = rq.DamageLineInfo;
const damageLines = Array.isArray(dl) ? dl : dl ? [dl] : [];
if (damageLines.length === 0) {
return [];
}
const out = [];
for (const line of damageLines) {
const partInfo = line.PartInfo || {};
const laborInfo = line.LaborInfo || {};
const refinishInfo = line.RefinishLaborInfo || {};
const subletInfo = line.SubletInfo || {};
let jobLineType = "PART";
if (Object.keys(subletInfo).length > 0) jobLineType = "SUBLET";
else if (Object.keys(laborInfo).length > 0 && Object.keys(partInfo).length === 0) jobLineType = "LABOR";
const base = {
line_no: parseInt(line.LineNum || 0, 10),
unq_seq: parseInt(line.UniqueSequenceNum || 0, 10),
status: line.LineStatusCode || null,
line_desc: line.LineDesc || null,
notes: line.LineMemo || null
};
if (jobLineType === "PART") {
const price = parseFloat(partInfo.PartPrice || partInfo.ListPrice || 0);
// Push the part line with ONLY part pricing/fields
out.push({
...base,
part_type: partInfo.PartType || null ? String(partInfo.PartType).toUpperCase() : null,
part_qty: parseFloat(partInfo.Quantity || 0) || 1,
oem_partno: partInfo.OEMPartNum || partInfo.PartNum || null,
db_price: price,
act_price: price,
// Tax flag from PartInfo.TaxableInd when provided
...(partInfo.TaxableInd !== undefined &&
(typeof partInfo.TaxableInd === "string" ||
typeof partInfo.TaxableInd === "number" ||
typeof partInfo.TaxableInd === "boolean")
? {
tax_part:
partInfo.TaxableInd === true ||
partInfo.TaxableInd === 1 ||
partInfo.TaxableInd === "1" ||
(typeof partInfo.TaxableInd === "string" && partInfo.TaxableInd.toUpperCase() === "Y")
}
: {}),
// Manual line flag coercion
...(line.ManualLineInd !== undefined
? {
manual_line:
line.ManualLineInd === true ||
line.ManualLineInd === 1 ||
line.ManualLineInd === "1" ||
(typeof line.ManualLineInd === "string" && line.ManualLineInd.toUpperCase() === "Y")
}
: { manual_line: null })
});
// If labor is present on the same damage line, split it to a separate LABOR jobline
// TODO: Verify with patrick this is desired.
if (INCLUDE_LABOR) {
const hrs = parseFloat(laborInfo.LaborHours || 0);
const amt = parseFloat(laborInfo.LaborAmt || 0);
const hasLabor =
(!!laborInfo.LaborType && String(laborInfo.LaborType).length > 0) ||
(!isNaN(hrs) && hrs !== 0) ||
(!isNaN(amt) && amt !== 0);
if (hasLabor) {
out.push({
...base,
// tweak unq_seq to avoid collisions in later upserts
unq_seq: (parseInt(line.UniqueSequenceNum || 0, 10) || 0) + 400000,
mod_lbr_ty: laborInfo.LaborType || null,
mod_lb_hrs: isNaN(hrs) ? 0 : hrs,
lbr_op: laborInfo.LaborOperation || null,
lbr_amt: isNaN(amt) ? 0 : amt,
...(line.ManualLineInd !== undefined
? {
manual_line:
line.ManualLineInd === true ||
line.ManualLineInd === 1 ||
line.ManualLineInd === "1" ||
(typeof line.ManualLineInd === "string" && line.ManualLineInd.toUpperCase() === "Y")
}
: { manual_line: null })
});
}
}
} else if (jobLineType === "SUBLET") {
out.push({
...base,
part_type: "PAS",
part_qty: 1,
act_price: parseFloat(subletInfo.SubletAmount || 0),
// Manual line flag
...(line.ManualLineInd !== undefined
? {
manual_line:
line.ManualLineInd === true ||
line.ManualLineInd === 1 ||
line.ManualLineInd === "1" ||
(typeof line.ManualLineInd === "string" && line.ManualLineInd.toUpperCase() === "Y")
}
: { manual_line: null })
});
} else if (INCLUDE_LABOR) {
// Labor-only line (only when enabled)
out.push({
...base,
mod_lbr_ty: laborInfo.LaborType || null,
mod_lb_hrs: parseFloat(laborInfo.LaborHours || 0),
lbr_op: laborInfo.LaborOperation || null,
lbr_amt: parseFloat(laborInfo.LaborAmt || 0),
...(line.ManualLineInd !== undefined
? {
manual_line:
line.ManualLineInd === true ||
line.ManualLineInd === 1 ||
line.ManualLineInd === "1" ||
(typeof line.ManualLineInd === "string" && line.ManualLineInd.toUpperCase() === "Y")
}
: { manual_line: null })
});
}
// Add a separate refinish labor line if present and enabled
if (INCLUDE_LABOR && Object.keys(refinishInfo).length > 0) {
const hrs = parseFloat(refinishInfo.LaborHours || 0);
const amt = parseFloat(refinishInfo.LaborAmt || 0);
if (!isNaN(hrs) || !isNaN(amt)) {
out.push({
...base,
// tweak unq_seq to avoid collisions in later upserts
unq_seq: (parseInt(line.UniqueSequenceNum || 0, 10) || 0) + 500000,
line_desc: base.line_desc || "Refinish",
mod_lbr_ty: "LAR",
mod_lb_hrs: isNaN(hrs) ? 0 : hrs,
lbr_op: refinishInfo.LaborOperation || null,
lbr_amt: isNaN(amt) ? 0 : amt,
...(line.ManualLineInd !== undefined
? {
manual_line:
line.ManualLineInd === true ||
line.ManualLineInd === 1 ||
line.ManualLineInd === "1" ||
(typeof line.ManualLineInd === "string" && line.ManualLineInd.toUpperCase() === "Y")
}
: { manual_line: null })
});
}
}
}
return out;
};
// Helper to extract a GRAND TOTAL amount from RepairTotalsInfo
const extractGrandTotal = (rq) => {
const rti = rq.RepairTotalsInfo;
const groups = Array.isArray(rti) ? rti : rti ? [rti] : [];
for (const grp of groups) {
const sums = Array.isArray(grp.SummaryTotalsInfo)
? grp.SummaryTotalsInfo
: grp.SummaryTotalsInfo
? [grp.SummaryTotalsInfo]
: [];
for (const s of sums) {
const type = (s.TotalType || "").toString().toUpperCase();
const desc = (s.TotalTypeDesc || "").toString().toUpperCase();
if (type.includes("GRAND") || type === "TOTAL" || desc.includes("GRAND")) {
const amt = parseFloat(s.TotalAmt ?? "NaN");
if (!isNaN(amt)) return amt;
}
}
}
return null;
};
/**
* Inserts an owner and returns the owner ID.
* @param {object} ownerInput - The owner data to insert.
* @param {object} logger - The logger instance.
* @returns {Promise<string|null>} The owner ID or null if insertion fails.
*/
const insertOwner = async (ownerInput, logger) => {
try {
const { insert_owners_one } = await client.request(INSERT_OWNER, { owner: ownerInput });
return insert_owners_one?.id;
} catch (err) {
logger.log("parts-owner-insert-failed", "warn", null, null, { error: err });
return null;
}
};
// Fallback: compute a naive total from joblines (parts + sublet + labor amounts)
const computeLinesTotal = (joblines = []) => {
let parts = 0;
let labor = 0;
for (const jl of joblines) {
if (jl && jl.part_type) {
const qty = Number.isFinite(jl.part_qty) ? jl.part_qty : 1;
const price = Number.isFinite(jl.act_price) ? jl.act_price : 0;
parts += price * (qty || 1);
} else if (!jl.part_type && Number.isFinite(jl.act_price)) {
parts += jl.act_price;
}
if (INCLUDE_LABOR && Number.isFinite(jl.lbr_amt)) {
labor += jl.lbr_amt;
}
}
const total = parts + labor;
return Number.isFinite(total) && total > 0 ? total : 0;
};
/**
* Handles the VehicleDamageEstimateAddRq XML request from parts management.
* @param {object} req - The HTTP request object.
* @param {object} res - The HTTP response object.
* @returns {Promise<void>}
*/
const vehicleDamageEstimateAddRq = async (req, res) => {
const { logger } = req;
try {
// Parse XML
const payload = await parseXml(req.body, logger);
const rq = normalizeXmlObject(payload.VehicleDamageEstimateAddRq);
if (!rq) {
logger.log("parts-missing-root", "error");
return res.status(400).send("Missing <VehicleDamageEstimateAddRq>");
}
// Extract job data
const {
shopId,
refClaimNum,
ciecaid,
cieca_ttl,
cat_no,
category,
classType,
comment,
date_exported,
asgn_no,
asgn_type,
asgn_date,
scheduled_in,
scheduled_completion,
clm_no,
status,
policy_no,
ded_amt
} = extractJobData(rq);
if (!shopId) {
throw { status: 400, message: "Missing <ShopID> in XML" };
}
// Get default status
const defaultStatus = await getDefaultOrderStatus(shopId, logger);
// Extract additional data
const parts_tax_rates = extractPartsTaxRates(rq.ProfileInfo);
const ownerData = extractOwnerData(rq, shopId);
const estimatorData = extractEstimatorData(rq);
const adjusterData = extractAdjusterData(rq);
const repairFacilityData = extractRepairFacilityData(rq);
const vehicleData = extractVehicleData(rq, shopId);
const lossInfo = extractLossInfo(rq);
const joblinesData = extractJobLines(rq);
const insuranceData = extractInsuranceData(rq);
// Derive clm_total: prefer RepairTotalsInfo SummaryTotals GRAND TOTAL; else sum from lines
const grandTotal = extractGrandTotal(rq);
const computedTotal = grandTotal ?? computeLinesTotal(joblinesData);
// Find or create relationships
const ownerid = await insertOwner(ownerData, logger);
const vehicleid = await findExistingVehicle(shopId, vehicleData.v_vin, logger);
// Build job input
const jobInput = {
shopid: shopId,
converted: true,
ownerid,
ro_number: refClaimNum,
ciecaid,
cieca_ttl,
cat_no,
category,
class: classType,
parts_tax_rates,
clm_no,
status: status || defaultStatus,
clm_total: computedTotal || null,
policy_no,
ded_amt,
comment,
date_exported,
asgn_no,
asgn_type,
asgn_date,
scheduled_in,
scheduled_completion,
// Inline insurance/loss/contacts
...insuranceData,
...lossInfo,
...ownerData,
...estimatorData,
...adjusterData,
...repairFacilityData,
// Inline vehicle data
v_vin: vehicleData.v_vin,
v_model_yr: vehicleData.v_model_yr,
v_model_desc: vehicleData.v_model_desc,
v_make_desc: vehicleData.v_make_desc,
v_color: vehicleData.v_color,
plate_no: vehicleData.plate_no,
plate_st: vehicleData.plate_st,
...(vehicleid ? { vehicleid } : { vehicle: { data: vehicleData } }),
joblines: { data: joblinesData }
};
// Insert job
const { insert_jobs_one: newJob } = await client.request(INSERT_JOB_WITH_LINES, { job: jobInput });
return res.status(200).json({ success: true, jobId: newJob.id });
} catch (err) {
logger.log("parts-route-error", "error", null, null, { error: err });
return res.status(err.status || 500).json({ error: err.message || "Internal error" });
}
};
module.exports = vehicleDamageEstimateAddRq;

View File

@@ -0,0 +1,235 @@
// no-dd-sa:javascript-code-style/assignment-name
// Handler for VehicleDamageEstimateChgRq
const client = require("../../../graphql-client/graphql-client").client;
const { parseXml, normalizeXmlObject } = require("../partsManagementUtils");
const { extractPartsTaxRates } = require("./lib/extractPartsTaxRates");
const {
GET_JOB_BY_CLAIM,
UPDATE_JOB_BY_ID,
DELETE_JOBLINES_BY_IDS,
INSERT_JOBLINES
} = require("../partsManagement.queries");
/**
* Finds a job by shop ID and claim number.
* @param shopId
* @param claimNum
* @param logger
* @returns {Promise<*|null>}
*/
const findJob = async (shopId, claimNum, logger) => {
try {
const { jobs } = await client.request(GET_JOB_BY_CLAIM, { shopid: shopId, clm_no: claimNum });
return jobs?.[0] || null;
} catch (err) {
logger.log("parts-job-lookup-failed", "error", null, null, { error: err });
return null;
}
};
/**
* Extracts updated job data from the request payload.
* @param rq
* @returns {{comment: (number|((comment: Comment, helper: postcss.Helpers) => (Promise<void> | void))|string|null), clm_no: null, status: (*|null), policy_no: (*|null)}}
*/
const extractUpdatedJobData = (rq) => {
const doc = rq.DocumentInfo || {};
const claim = rq.ClaimInfo || {};
const policyNo = claim.PolicyInfo?.PolicyInfo?.PolicyNum || claim.PolicyInfo?.PolicyNum || null;
const out = {
comment: doc.Comment || null,
clm_no: claim.ClaimNum || null,
status: claim.ClaimStatus || null,
policy_no: policyNo
};
// If ProfileInfo provided in ChangeRq, update parts_tax_rates to stay in sync with AddRq behavior
if (rq.ProfileInfo) {
out.parts_tax_rates = extractPartsTaxRates(rq.ProfileInfo);
}
return out;
};
/**
* Extracts updated job lines from the request payload, mirroring the AddRq splitting rules:
* - PART lines carry only part pricing (act_price) and related fields
* - If LaborInfo exists on a part line, add a separate LABOR line at unq_seq + 400000
* - If RefinishLaborInfo exists, add a separate LABOR line at unq_seq + 500000 with mod_lbr_ty=LAR
* - SUBLET lines become PAS part_type with act_price=SubletAmount
*/
const extractUpdatedJobLines = (addsChgs = {}, jobId) => {
const linesIn = Array.isArray(addsChgs.DamageLineInfo) ? addsChgs.DamageLineInfo : [addsChgs.DamageLineInfo || {}];
const coerceManual = (val) =>
val === true || val === 1 || val === "1" || (typeof val === "string" && val.toUpperCase() === "Y");
const out = [];
for (const line of linesIn) {
if (!line || Object.keys(line).length === 0) continue;
const partInfo = line.PartInfo || {};
const laborInfo = line.LaborInfo || {};
const refinishInfo = line.RefinishLaborInfo || {};
const subletInfo = line.SubletInfo || {};
const base = {
jobid: jobId,
line_no: parseInt(line.LineNum || 0, 10),
unq_seq: parseInt(line.UniqueSequenceNum || 0, 10),
status: line.LineStatusCode || null,
line_desc: line.LineDesc || null,
notes: line.LineMemo || null,
manual_line: line.ManualLineInd !== undefined ? coerceManual(line.ManualLineInd) : null
};
const hasPart = Object.keys(partInfo).length > 0;
const hasLaborOnly = Object.keys(laborInfo).length > 0 && !hasPart && Object.keys(subletInfo).length === 0;
const hasSublet = Object.keys(subletInfo).length > 0;
if (hasPart) {
const price = parseFloat(partInfo.PartPrice || partInfo.ListPrice || 0);
out.push({
...base,
part_type: partInfo.PartType ? String(partInfo.PartType).toUpperCase() : null,
part_qty: parseFloat(partInfo.Quantity || 0) || 1,
oem_partno: partInfo.OEMPartNum || partInfo.PartNum || null,
db_price: isNaN(price) ? 0 : price,
act_price: isNaN(price) ? 0 : price
});
// Split any attached labor on the part line into a derived labor jobline
const hrs = parseFloat(laborInfo.LaborHours || 0);
const amt = parseFloat(laborInfo.LaborAmt || 0);
const hasLabor =
(!!laborInfo.LaborType && String(laborInfo.LaborType).length > 0) ||
(!isNaN(hrs) && hrs !== 0) ||
(!isNaN(amt) && amt !== 0);
if (hasLabor) {
out.push({
...base,
unq_seq: (parseInt(line.UniqueSequenceNum || 0, 10) || 0) + 400000,
mod_lbr_ty: laborInfo.LaborType || null,
mod_lb_hrs: isNaN(hrs) ? 0 : hrs,
lbr_op: laborInfo.LaborOperation || null,
lbr_amt: isNaN(amt) ? 0 : amt
});
}
} else if (hasSublet) {
out.push({
...base,
part_type: "PAS",
part_qty: 1,
act_price: parseFloat(subletInfo.SubletAmount || 0) || 0
});
}
// Labor-only line (no PartInfo): still upsert as a labor entry
if (hasLaborOnly) {
out.push({
...base,
mod_lbr_ty: laborInfo.LaborType || null,
mod_lb_hrs: parseFloat(laborInfo.LaborHours || 0) || 0,
lbr_op: laborInfo.LaborOperation || null,
lbr_amt: parseFloat(laborInfo.LaborAmt || 0) || 0
});
}
// Separate refinish labor line
if (Object.keys(refinishInfo).length > 0) {
const rHrs = parseFloat(refinishInfo.LaborHours || 0);
const rAmt = parseFloat(refinishInfo.LaborAmt || 0);
if (!isNaN(rHrs) || !isNaN(rAmt)) {
out.push({
...base,
unq_seq: (parseInt(line.UniqueSequenceNum || 0, 10) || 0) + 500000,
line_desc: base.line_desc || "Refinish",
mod_lbr_ty: "LAR",
mod_lb_hrs: isNaN(rHrs) ? 0 : rHrs,
lbr_op: refinishInfo.LaborOperation || null,
lbr_amt: isNaN(rAmt) ? 0 : rAmt
});
}
}
}
return out;
};
/**
* Extracts deletion IDs from the deletions object, also removing any derived labor/refinish lines
* by including offsets (base + 400000, base + 500000).
*/
const extractDeletions = (deletions = {}) => {
const items = Array.isArray(deletions.DamageLineInfo) ? deletions.DamageLineInfo : [deletions.DamageLineInfo || {}];
const baseSeqs = items.map((line) => parseInt(line.UniqueSequenceNum, 10)).filter((id) => Number.isInteger(id));
const allSeqs = [];
for (const u of baseSeqs) {
allSeqs.push(u, u + 400000, u + 500000);
}
// De-dup
return Array.from(new Set(allSeqs));
};
/**
* Handles VehicleDamageEstimateChgRq requests.
* @param req
* @param res
* @returns {Promise<*>}
*/
const partsManagementVehicleDamageEstimateChgRq = async (req, res) => {
const { logger } = req;
try {
const payload = await parseXml(req.body, logger);
const rq = normalizeXmlObject(payload.VehicleDamageEstimateChgRq);
if (!rq) return res.status(400).send("Missing <VehicleDamageEstimateChgRq>");
const shopId = rq.ShopID;
const claimNum = rq.ClaimInfo?.ClaimNum;
if (!shopId || !claimNum) return res.status(400).send("Missing ShopID or ClaimNum");
const job = await findJob(shopId, claimNum, logger);
if (!job) return res.status(404).send("Job not found");
const updatedJobData = extractUpdatedJobData(rq);
const updatedLines = extractUpdatedJobLines(rq.AddsChgs, job.id);
const deletedLineIds = extractDeletions(rq.Deletions);
await client.request(UPDATE_JOB_BY_ID, { id: job.id, job: updatedJobData });
// Build a set of unq_seq that will be updated (replaced). We delete them first to avoid duplicates.
const updatedSeqs = Array.from(
new Set((updatedLines || []).map((l) => l && l.unq_seq).filter((v) => Number.isInteger(v)))
);
if ((deletedLineIds && deletedLineIds.length) || (updatedSeqs && updatedSeqs.length)) {
const allToDelete = Array.from(new Set([...(deletedLineIds || []), ...(updatedSeqs || [])]));
if (allToDelete.length) {
await client.request(DELETE_JOBLINES_BY_IDS, { jobid: job.id, unqSeqs: allToDelete });
}
}
if (updatedLines.length > 0) {
// Insert fresh versions after deletion so we dont depend on a unique constraint
await client.request(INSERT_JOBLINES, {
joblines: updatedLines
});
}
logger.log("parts-job-changed", "info", job.id, null);
return res.status(200).json({ success: true, jobId: job.id });
} catch (err) {
logger.log("parts-chgrq-error", "error", null, null, { error: err });
return res.status(err.status || 500).json({ error: err.message || "Internal error" });
}
};
module.exports = partsManagementVehicleDamageEstimateChgRq;

View File

@@ -0,0 +1,262 @@
// GraphQL Queries and Mutations
const GET_BODYSHOP_STATUS = `
query GetBodyshopStatus($id: uuid!) {
bodyshops_by_pk(id: $id) {
md_order_statuses
}
}
`;
const GET_VEHICLE_BY_SHOP_VIN = `
query GetVehicleByShopVin($shopid: uuid!, $v_vin: String!) {
vehicles(where: { shopid: { _eq: $shopid }, v_vin: { _eq: $v_vin } }, limit: 1) {
id
}
}
`;
const INSERT_OWNER = `
mutation InsertOwner($owner: owners_insert_input!) {
insert_owners_one(object: $owner) {
id
}
}
`;
const INSERT_JOB_WITH_LINES = `
mutation InsertJob($job: jobs_insert_input!) {
insert_jobs_one(object: $job) {
id
joblines { id unq_seq }
}
}
`;
const GET_JOB_BY_CLAIM = `
query GetJobByClaim($shopid: uuid!, $clm_no: String!) {
jobs(
where: { shopid: { _eq: $shopid }, clm_no: { _eq: $clm_no } }
order_by: { created_at: desc }
limit: 1
) {
id
}
}
`;
const UPDATE_JOB_BY_ID = `
mutation UpdateJobById($id: uuid!, $job: jobs_set_input!) {
update_jobs_by_pk(pk_columns: { id: $id }, _set: $job) {
id
}
}
`;
const UPSERT_JOBLINES = `
mutation UpsertJoblines($joblines: [joblines_insert_input!]!) {
insert_joblines(
objects: $joblines
on_conflict: {
constraint: joblines_pkey
update_columns: [
jobid
status
line_desc
part_type
part_qty
oem_partno
db_price
act_price
mod_lbr_ty
mod_lb_hrs
lbr_op
lbr_amt
notes
manual_line
]
}
) {
affected_rows
}
}
`;
const DELETE_JOBLINES_BY_JOBID = `
mutation DeleteJoblinesByJobId($jobid: uuid!) {
delete_joblines(where: { jobid: { _eq: $jobid } }) {
affected_rows
}
}
`;
const DELETE_JOBLINES_BY_IDS = `
mutation DeleteJoblinesByIds($jobid: uuid!, $unqSeqs: [Int!]!) {
delete_joblines(
where: {
jobid: { _eq: $jobid },
unq_seq: { _in: $unqSeqs }
}
) {
affected_rows
}
}
`;
const INSERT_JOBLINES = `
mutation InsertJoblines($joblines: [joblines_insert_input!]!) {
insert_joblines(objects: $joblines) {
affected_rows
}
}
`;
const CHECK_EXTERNAL_SHOP_ID = `
query CHECK_KEY($key: String!) {
bodyshops(where: { external_shop_id: { _eq: $key } }) {
external_shop_id
}
}
`;
const CREATE_SHOP = `
mutation CREATE_SHOP($bs: bodyshops_insert_input!) {
insert_bodyshops_one(object: $bs) { id }
}
`;
const DELETE_VENDORS_BY_SHOP = `
mutation DELETE_VENDORS($shopId: uuid!) {
delete_vendors(where: { bodyshopid: { _eq: $shopId } }) {
affected_rows
}
}
`;
const DELETE_SHOP = `
mutation DELETE_SHOP($id: uuid!) {
delete_bodyshops_by_pk(id: $id) { id }
}
`;
const CREATE_USER = `
mutation CREATE_USER($u: users_insert_input!) {
insert_users_one(object: $u) {
id: authid
email
}
}
`;
const GET_BODYSHOP = `
query GetBodyshop($id: uuid!) {
bodyshops_by_pk(id: $id) {
external_shop_id
shopname
}
}
`;
const GET_ASSOCIATED_USERS = `
query GetAssociatedUsers($shopId: uuid!) {
associations(where: {shopid: {_eq: $shopId}}) {
user {
authid
email
}
}
}
`;
const DELETE_ASSOCIATIONS_BY_SHOP = `
mutation DeleteAssociationsByShop($shopId: uuid!) {
delete_associations(where: {shopid: {_eq: $shopId}}) {
affected_rows
}
}
`;
const GET_USER_ASSOCIATIONS_COUNT = `
query GetUserAssociationsCount($userEmail: String!) {
associations_aggregate(where: {useremail: {_eq: $userEmail}}) {
aggregate {
count
}
}
}
`;
const DELETE_USER = `
mutation DeleteUser($email: String!) {
delete_users(where: {email: {_eq: $email}}) {
affected_rows
}
}
`;
const GET_VENDORS = `
query GetVendors($shopId: uuid!) {
vendors(where: {bodyshopid: {_eq: $shopId}}) {
name
}
}
`;
const GET_JOBS_BY_SHOP = `
query GetJobsByShop($shopId: uuid!) {
jobs(where: {shopid: {_eq: $shopId}}) {
id
}
}
`;
const DELETE_JOBLINES_BY_JOB_IDS = `
mutation DeleteJoblinesByJobIds($jobIds: [uuid!]!) {
delete_joblines(where: {jobid: {_in: $jobIds}}) {
affected_rows
}
}
`;
const DELETE_JOBS_BY_IDS = `
mutation DeleteJobsByIds($jobIds: [uuid!]!) {
delete_jobs(where: {id: {_in: $jobIds}}) {
affected_rows
}
}
`;
const DELETE_AUDIT_TRAIL_BY_SHOP = `
mutation DeleteAuditTrailByShop($shopId: uuid!) {
delete_audit_trail(where: {bodyshopid: {_eq: $shopId}}) {
affected_rows
}
}
`;
module.exports = {
GET_BODYSHOP_STATUS,
GET_VEHICLE_BY_SHOP_VIN,
INSERT_OWNER,
INSERT_JOB_WITH_LINES,
GET_JOB_BY_CLAIM,
UPDATE_JOB_BY_ID,
UPSERT_JOBLINES,
DELETE_JOBLINES_BY_JOBID,
DELETE_JOBLINES_BY_IDS,
INSERT_JOBLINES,
CHECK_EXTERNAL_SHOP_ID,
CREATE_SHOP,
DELETE_VENDORS_BY_SHOP,
DELETE_SHOP,
CREATE_USER,
GET_BODYSHOP,
GET_ASSOCIATED_USERS,
DELETE_ASSOCIATIONS_BY_SHOP,
GET_USER_ASSOCIATIONS_COUNT,
DELETE_USER,
GET_VENDORS,
GET_JOBS_BY_SHOP,
DELETE_JOBLINES_BY_JOB_IDS,
DELETE_JOBS_BY_IDS,
DELETE_AUDIT_TRAIL_BY_SHOP
};

View File

@@ -0,0 +1,54 @@
const xml2js = require("xml2js");
/**
* Parses XML string into a JavaScript object.
* @param {string} xml - The XML string to parse.
* @param {object} logger - The logger instance.
* @returns {Promise<object>} The parsed XML object.
* @throws {Error} If XML parsing fails.
*/
const parseXml = async (xml, logger) => {
try {
return await xml2js.parseStringPromise(xml, {
explicitArray: false,
tagNameProcessors: [xml2js.processors.stripPrefix],
attrNameProcessors: [xml2js.processors.stripPrefix]
});
} catch (err) {
logger.log("parts-xml-parse-error", "error", null, null, { error: err });
throw new Error("Invalid XML");
}
};
/**
* Recursively strip `xml2js`-style { _: 'value', $: { ... } } nodes into plain strings.
* @param {*} obj - Parsed XML object
* @returns {*} Normalized object
*/
const normalizeXmlObject = (obj) => {
if (Array.isArray(obj)) {
return obj.map(normalizeXmlObject);
}
if (typeof obj === "object" && obj !== null) {
if (Object.keys(obj).length === 2 && "_" in obj && "$" in obj) {
return normalizeXmlObject(obj._); // unwrap {_:"value",$:{...}} to just "value"
}
if (Object.keys(obj).length === 1 && "_" in obj) {
return normalizeXmlObject(obj._); // unwrap {_:"value"}
}
const normalized = {};
for (const key in obj) {
normalized[key] = normalizeXmlObject(obj[key]);
}
return normalized;
}
return obj;
};
module.exports = {
parseXml,
normalizeXmlObject
};

View File

@@ -0,0 +1,340 @@
<?xml version="1.0" encoding="UTF-8"?>
<VehicleDamageEstimateAddRq xmlns="http://www.cieca.com/BMS">
<!-- Shop identifier -->
<ShopID>71f8494c-89f0-43e0-8eb2-820b52d723bc</ShopID>
<!-- Request & Claim -->
<RqUID>17e5ccc4-cdfb-4cf3-a08d-ecfa8d145d6f</RqUID>
<RefClaimNum>CLM123</RefClaimNum>
<!-- Document metadata -->
<DocumentInfo>
<DocumentVer>
<DocumentVerCode>SV</DocumentVerCode>
<DocumentVerNum>1</DocumentVerNum>
</DocumentVer>
<DocumentVer>
<DocumentVerCode>VN</DocumentVerCode>
<DocumentVerNum>1</DocumentVerNum>
</DocumentVer>
<ReferenceInfo>
<OtherReferenceInfo>
<OtherReferenceName>RO Number</OtherReferenceName>
<OtherRefNum>RO-987</OtherRefNum>
</OtherReferenceInfo>
<OtherReferenceInfo>
<OtherReferenceName>Job UUID</OtherReferenceName>
<OtherRefNum>abcde-12345-uuid</OtherRefNum>
</OtherReferenceInfo>
</ReferenceInfo>
<Comment>Include OEM where possible</Comment>
<TransmitDateTime>2025-06-18T12:00:00Z</TransmitDateTime>
</DocumentInfo>
<!-- Event classification -->
<EventInfo>
<AssignmentEvent>
<AssignmentNumber>1</AssignmentNumber>
<AssignmentType>Estimate</AssignmentType>
<AssignmentDate>2025-06-18T11:30:00Z</AssignmentDate>
<CreateDateTime>2025-06-18T11:29:00Z</CreateDateTime>
</AssignmentEvent>
<RepairEvent>
<TargetCompletionDateTime>2025-06-25T17:00:00Z</TargetCompletionDateTime>
<RequestedPickUpDateTime>2025-06-22T09:00:00Z</RequestedPickUpDateTime>
</RepairEvent>
</EventInfo>
<!-- Claim & Policy -->
<ClaimInfo>
<ClaimNum>CLM123</ClaimNum>
<ClaimStatus>Open</ClaimStatus>
<PolicyInfo>
<PolicyNum>POL456</PolicyNum>
<CoverageInfo>
<Coverage>
<DeductibleInfo>
<DeductibleAmt>500.00</DeductibleAmt>
</DeductibleInfo>
</Coverage>
</CoverageInfo>
</PolicyInfo>
<Cieca_ttl>1500.50</Cieca_ttl>
</ClaimInfo>
<!-- Administrative Parties -->
<AdminInfo>
<!-- Owner -->
<Owner>
<Party>
<PersonInfo>
<PersonName>
<FirstName>John</FirstName>
<LastName>Doe</LastName>
</PersonName>
<Communications>
<CommQualifier>AL</CommQualifier>
<Address>
<Address1>100 Main St</Address1>
<City>Metropolis</City>
<StateProvince>NY</StateProvince>
<PostalCode>10001</PostalCode>
<Country>USA</Country>
</Address>
</Communications>
</PersonInfo>
<ContactInfo>
<Communications>
<CommQualifier>CP</CommQualifier>
<CommPhone>5551234567</CommPhone>
</Communications>
<Communications>
<CommQualifier>EM</CommQualifier>
<CommEmail>john.doe@example.com</CommEmail>
</Communications>
</ContactInfo>
</Party>
</Owner>
<!-- Estimator -->
<Estimator>
<Party>
<PersonInfo>
<PersonName>
<FirstName>Jane</FirstName>
<LastName>Smith</LastName>
</PersonName>
</PersonInfo>
<ContactInfo>
<Communications>
<CommQualifier>EM</CommQualifier>
<CommEmail>jane.smith@example.com</CommEmail>
</Communications>
</ContactInfo>
</Party>
<Affiliation>EST001</Affiliation>
</Estimator>
<!-- Repair Facility -->
<RepairFacility>
<Party>
<OrgInfo>
<CompanyName>AutoFix</CompanyName>
<Communications>
<CommQualifier>AL</CommQualifier>
<Address>
<Address1>200 Repair Rd</Address1>
<City>Mechanicsburg</City>
<StateProvince>PA</StateProvince>
<PostalCode>17055</PostalCode>
<Country>USA</Country>
</Address>
</Communications>
</OrgInfo>
<ContactInfo>
<Communications>
<CommQualifier>WP</CommQualifier>
<CommPhone>5559876543</CommPhone>
</Communications>
<Communications>
<CommQualifier>FX</CommQualifier>
<CommPhone>5559876544</CommPhone>
</Communications>
</ContactInfo>
</Party>
</RepairFacility>
<!-- Adjuster -->
<Adjuster>
<Party>
<PersonInfo>
<PersonName>
<FirstName>Alice</FirstName>
<LastName>Johnson</LastName>
</PersonName>
</PersonInfo>
</Party>
</Adjuster>
<!-- Supplier -->
<Supplier>
<Party>
<OrgInfo>
<CompanyName>PartsRUs</CompanyName>
</OrgInfo>
</Party>
</Supplier>
<!-- Sender -->
<Sender>
<Party>
<OrgInfo>
<CompanyName>XmlSender</CompanyName>
</OrgInfo>
</Party>
</Sender>
<!-- Other Admin Party -->
<OtherParty>
<Party>
<OrgInfo>
<CompanyName>ThirdPartyAdmin</CompanyName>
</OrgInfo>
</Party>
<AdminType>TPA</AdminType>
</OtherParty>
</AdminInfo>
<!-- (Optional) Rates -->
<ProfileInfo>
<RateInfo>
<RateType>LABOR</RateType>
<Rate>100.0</Rate>
<RateTierInfo>
<TierNum>1</TierNum>
<Percentage>50.0</Percentage>
</RateTierInfo>
</RateInfo>
</ProfileInfo>
<!-- Vehicle details -->
<VehicleInfo>
<VINInfo>
<VINNum>1HGCM82633A004352</VINNum>
</VINInfo>
<License>
<LicensePlateNum>ABC1234</LicensePlateNum>
<LicensePlateStateProvince>CA</LicensePlateStateProvince>
</License>
<VehicleDesc>
<ModelYear>2020</ModelYear>
<MakeDesc>Honda</MakeDesc>
<ModelName>Accord</ModelName>
<BodyStyle>Sedan</BodyStyle>
<EngineDesc>2.0L</EngineDesc>
<ProductionDate>2019-10-10</ProductionDate>
<SubModelDesc>Sport</SubModelDesc>
<FuelType>Gasoline</FuelType>
</VehicleDesc>
<Paint>
<Exterior>
<ColorName>Blue</ColorName>
</Exterior>
</Paint>
<Condition>
<DrivableInd>Y</DrivableInd>
</Condition>
</VehicleInfo>
<!-- Damage line with non-OEM part -->
<DamageLineInfo>
<LineNum>1</LineNum>
<UniqueSequenceNum>1001</UniqueSequenceNum>
<ParentLineNum>0</ParentLineNum>
<ManualLineInd>0</ManualLineInd>
<AutomatedEntry>1</AutomatedEntry>
<DescJudgmentInd>0</DescJudgmentInd>
<LineStatusCode>Draft</LineStatusCode>
<LineDesc>Front Bumper</LineDesc>
<PartInfo>
<PartType>PAA</PartType>
<Quantity>1</Quantity>
<PartPrice>200.00</PartPrice>
<OEMPartNum>OEM123</OEMPartNum>
<NonOEM>
<NonOEMPartNum>NONOEM123</NonOEMPartNum>
<NonOEMPartPrice>180.00</NonOEMPartPrice>
<SupplierRefNum>4c2ff2c4-af2b-4a5f-970e-3e026f0bbf9f</SupplierRefNum>
<PartSelectedInd>1</PartSelectedInd>
</NonOEM>
<TaxableInd>1</TaxableInd>
<AfterMarketUsage>OV</AfterMarketUsage>
<CertificationType>C</CertificationType>
<PriceJudgmentInd>0</PriceJudgmentInd>
<GlassPartInd>0</GlassPartInd>
<PriceInclInd>0</PriceInclInd>
<OrderByApplicationInd>false</OrderByApplicationInd>
</PartInfo>
<LaborInfo>
<LaborType>LAB</LaborType>
<LaborOperation>OP1</LaborOperation>
<LaborHours>2.5</LaborHours>
<LaborAmt>250.00</LaborAmt>
</LaborInfo>
<LineMemo>Replace bumper</LineMemo>
</DamageLineInfo>
<!-- Damage line with glass part
<DamageLineInfo>
<LineNum>2</LineNum>
<UniqueSequenceNum>1002</UniqueSequenceNum>
<ParentLineNum>0</ParentLineNum>
<ManualLineInd>0</ManualLineInd>
<AutomatedEntry>0</AutomatedEntry>
<DescJudgmentInd>0</DescJudgmentInd>
<LineStatusCode>Draft</LineStatusCode>
<LineDesc>Windshield</LineDesc>
<PartInfo>
<PartType>PAG</PartType>
<Quantity>1</Quantity>
<PartPrice>572.06</PartPrice>
<OEMPartNum>5610104082</OEMPartNum>
<NonOEM>
<NonOEMPartNum>5610104082</NonOEMPartNum>
<NonOEMPartPrice>572.06</NonOEMPartPrice>
<SupplierRefNum>VEND2</SupplierRefNum>
<PartSelectedInd>1</PartSelectedInd>
</NonOEM>
<TaxableInd>1</TaxableInd>
<AfterMarketUsage>NU</AfterMarketUsage>
<GlassPartInd>1</GlassPartInd>
<PriceJudgmentInd>0</PriceJudgmentInd>
<PriceInclInd>0</PriceInclInd>
<OrderByApplicationInd>false</OrderByApplicationInd>
</PartInfo>
<LaborInfo>
<LaborType>LAB</LaborType>
<LaborOperation>OP11</LaborOperation>
<LaborHours>3.7</LaborHours>
<LaborAmt>370.00</LaborAmt>
</LaborInfo>
<LineMemo>Replace windshield</LineMemo>
</DamageLineInfo> -->
<!-- Damage line with sublet info -->
<DamageLineInfo>
<LineNum>3</LineNum>
<UniqueSequenceNum>1003</UniqueSequenceNum>
<ParentLineNum>0</ParentLineNum>
<ManualLineInd>0</ManualLineInd>
<AutomatedEntry>1</AutomatedEntry>
<DescJudgmentInd>0</DescJudgmentInd>
<LineStatusCode>Draft</LineStatusCode>
<LineDesc>Sublet Upholstery Repair</LineDesc>
<SubletInfo>
<SubletVendorName>UpholsteryCo</SubletVendorName>
<SubletAmount>200.00</SubletAmount>
<SubletLaborHours>2.0</SubletLaborHours>
</SubletInfo>
<LineMemo>Stitching match required</LineMemo>
</DamageLineInfo>
<!-- Damage line with labor-only work -->
<DamageLineInfo>
<LineNum>4</LineNum>
<UniqueSequenceNum>1004</UniqueSequenceNum>
<ParentLineNum>0</ParentLineNum>
<ManualLineInd>0</ManualLineInd>
<AutomatedEntry>1</AutomatedEntry>
<DescJudgmentInd>0</DescJudgmentInd>
<LineStatusCode>Draft</LineStatusCode>
<LineDesc>Dent Repair Door</LineDesc>
<LaborInfo>
<LaborType>LAD</LaborType>
<LaborOperation>OP3</LaborOperation>
<LaborHours>1.5</LaborHours>
<LaborAmt>150.00</LaborAmt>
</LaborInfo>
<LineMemo>Requires touch-up</LineMemo>
</DamageLineInfo>
</VehicleDamageEstimateAddRq>

View File

@@ -0,0 +1,62 @@
<?xml version="1.0" encoding="UTF-8"?>
<VehicleDamageEstimateChgRq xmlns="http://www.cieca.com/BMS">
<ShopID>71f8494c-89f0-43e0-8eb2-820b52d723bc</ShopID>
<RqUID>chg-0001-uuid</RqUID>
<ClaimInfo>
<ClaimNum>CLM123</ClaimNum>
<ClaimStatus>In Progress</ClaimStatus>
<PolicyInfo>
<PolicyNum>POL456</PolicyNum>
</PolicyInfo>
</ClaimInfo>
<DocumentInfo>
<Comment>Revised bumper labor hours and added notes</Comment>
</DocumentInfo>
<AddsChgs>
<DamageLineInfo>
<LineNum>1</LineNum>
<UniqueSequenceNum>1001</UniqueSequenceNum>
<ManualLineInd>0</ManualLineInd>
<LineStatusCode>Updated</LineStatusCode>
<LineDesc>Front Bumper</LineDesc>
<PartInfo>
<PartType>PAA</PartType>
<Quantity>1</Quantity>
<PartPrice>200.00</PartPrice>
<OEMPartNum>OEM123</OEMPartNum>
<TaxableInd>1</TaxableInd>
</PartInfo>
<LaborInfo>
<LaborType>LAB</LaborType>
<LaborOperation>OP1</LaborOperation>
<LaborHours>3.0</LaborHours> <!-- Modified -->
<LaborAmt>300.00</LaborAmt> <!-- Modified -->
</LaborInfo>
<LineMemo>Increased time due to hidden damage</LineMemo> <!-- Added -->
</DamageLineInfo>
<DamageLineInfo>
<LineNum>4</LineNum>
<UniqueSequenceNum>1004</UniqueSequenceNum>
<ManualLineInd>0</ManualLineInd>
<LineStatusCode>Updated</LineStatusCode>
<LineDesc>Dent Repair Door</LineDesc>
<LaborInfo>
<LaborType>LAD</LaborType>
<LaborOperation>OP3</LaborOperation>
<LaborHours>2.0</LaborHours> <!-- Modified -->
<LaborAmt>200.00</LaborAmt> <!-- Modified -->
</LaborInfo>
<LineMemo>Increased scope of repair</LineMemo> <!-- Added -->
</DamageLineInfo>
</AddsChgs>
<Deletions>
<DamageLineInfo>
<UniqueSequenceNum>1003</UniqueSequenceNum> <!-- Sublet line -->
</DamageLineInfo>
</Deletions>
</VehicleDamageEstimateChgRq>

View File

@@ -0,0 +1,431 @@
Awesome — thanks for the dumps. I pulled the structures directly from the XSDs you uploaded and
focused on **`VehicleDamageEstimateAddRq`** and the graph of types it depends on. Below is a
developer-grade map you can hand to a coding agent.
---
# What it is & where it lives
* **Global element**: `VehicleDamageEstimateAddRq`
* **Namespace**: `http://www.cieca.com/BMS` (default ns in your files)
* **Defined in**: `BMSEstimateMessages_2024R1_V6.9.0.xsd`
* **Type**: `EstimateRqType` (from `BMSEstimateCommonTypes_2024R1_V6.9.0.xsd`)
* **Service group** (where this message is accepted): `EstimateService` in
`BMSEstimateService_2024R1_V6.9.0.xsd`
Includes: `PropertyDamageEstimateAddRq/Rs`, `VehicleDamageEstimateAddRq/Rs`,
`VehicleDamageEstimateChgRq/Rs`, `VehicleDamagePhotoEstimateAddRq/Rs`.
---
# Top-level schema (for `VehicleDamageEstimateAddRq` → `EstimateRqType`)
`EstimateRqType` **extends** `MessageHeaderType` (from `BMSCommonGlobalTypes_2024R1_V6.9.0.xsd`) and
then adds the following sequence. Ive marked **required** vs *optional* and multiplicity:
**Header (inherited from `MessageHeaderType`):**
* **`RqUID`** (UUID) — **required**
* `AsyncRqUID` (UUID) — *optional*
* `PartnerKey` (Identifier) — *optional*
**Body (from `EstimateRqType`):**
* `SvcProviderName` (Identifier) — *optional*
* `RefClaimNum` (Char\_50) — *optional*
* **`DocumentInfo`** (`DocumentInfoType`) — **required, 1**
* **`ApplicationInfo`** (`ApplicationInfoType`) — **required, 1..**\*
* `EventInfo` (`EventInfoType`) — *optional*
* **`AdminInfo`** (`AdminInfoType`) — **required**
* **`EstimatorIDs`** (`EstimatorIDsTypeType`) — **required**
* `ClaimInfo` (`ClaimInfoType`) — *optional*
* **`VehicleInfo`** (`VehicleInfoType`) **OR** `PropertyInfo` (`PropertyInfoType`) — **choice**
for vehicle, use **`VehicleInfo`**
* **`ProfileInfo`** (`ProfileInfoType`) — **required**
* **`DamageLineInfo`** (`DamageLineInfoType`) — **required, 1..**\* (line items)
* `CalibrationInfo` (`CalibrationInfoType`) — *optional, 0..*
* `ScanInfo` (`ScanInfoType`) — *optional, 0..*
* `FileAttachment` (`FileAttachmentType`) — *optional*
* `NonNewOEMPartInd` (Boolean) — *optional*
* `StorageDuration` (Integer\_Range\_0-999) — *optional*
* **`RepairTotalsInfo`** (`RepairTotalsInfoType`) — **required, 1..**\*
* `RepairTotalsHistory` (`RepairTotalsHistoryType`) — *optional, 0..*
* `PaymentInfo` (`PaymentInfoType`) — *optional*
* `EstimateMemo` (C) — *optional*
* `AdministrativeMemo` (C) — *optional*
* `Disclaimers` (C) — *optional*
* `CustomMemo` (C) — *optional*
* `CustomPrintImage` (C) — *optional*
* `OtherMemos` (`OtherMemosType`) — *optional, 0..*
**Files involved:**
`BMSEstimateMessages_2024R1_V6.9.0.xsd`, `BMSEstimateCommonTypes_2024R1_V6.9.0.xsd`,
`BMSCommonGlobalTypes_2024R1_V6.9.0.xsd`, `BMSSimpleTypes_2024R1_V6.9.0.xsd` + code lists XSDs for
enums.
---
# Key dependent types (immediate children youll actually populate)
Below are the **first-level** structures youll typically use. Ive trimmed to the practical fields;
each type has many optional parties and details you can ignore for a minimal AddRq.
## `DocumentInfoType` (BMSCommonGlobalTypes)
Typical header metadata:
* **`BMSVer`** (`BMSVersionClosedEnumType`) — e.g. **`6.9.0`**
* **`DocumentType`** (`DocumentTypeClosedEnumType`) — code for message family (e.g. `E` for
estimate; codelists provide the letter codes)
* `DocumentSubType` (`DocumentSubTypeClosedEnumType`) — e.g. “Original Estimate”, “Copy”, etc.
* `DocumentID` (Char\_50) — your ID
* `VendorCode` (VendorCodeOpenEnumType) — optional
* `DocumentVer` (`DocumentVerType`) — versioning container (0..\*)
* **`CreateDateTime`** (DateTime)
* `TransmitDateTime` (DateTime)
* `ReferenceInfo` (`RefInfoType`) — links to prior docs
* `CountryCode`, `CurrencyInfo`, `CultureCode` — optional locale bits
## `ApplicationInfoType` (BMSCommonGlobalTypes) **(1..\*)**
* **`ApplicationType`** (`ApplicationTypeClosedEnumType`) — e.g., Estimator, Shop Mgmt, etc.
* **`ApplicationName`** (Char\_30)
* **`ApplicationVer`** (Char\_12)
* `DatabaseVer` (Char\_12)
* `DatabaseDateTime` (DateTime)
## `AdminInfoType` (BMSCommonGlobalTypes)
Large party/role roster; **all child elements are optional**, but the container itself is required.
Common ones:
* `InsuranceCompany` (`InsuranceCompanyType`)
* `PolicyHolder` (`PolicyHolderType`)
* `Insured` / `Owner` / `Customer` (`GenericPartyType`)
* `Claimant` (`ClaimantType`)
* `Estimator` (0..\*) (`EstimatorType`)
* `RepairFacility` (`RepairFacilityType`)
* `RentalProvider`, `TowCompany`, `Lender`, `Lienholder` (0..\*), etc.
(You can send `<AdminInfo/>` if you dont need parties; it validates.)
## `EstimatorIDsTypeType` (BMSEstimateCommonTypes)
* `OriginalEstimatorID` (Char\_40) — optional
* `EstimatorHistory` (0..\*) → `EstimatorHistoryType` ⇒ (`DocumentVerCode`, `DocumentVerNum`)
## `ClaimInfoType` (BMSCommonGlobalTypes) *(optional)*
* `ClaimNum` (Char\_50)
* `PolicyInfo` (0..\*) (`PolicyInfoType`)
* `LossInfo` (`LossInfoType`) — details on loss/time/location/coverage
* `AdditionalIDInfo` (0..\*) (`IDInfoType`)
* `ClaimStatus`, `PreviousPaymentAmt`, `ClaimMemo`, etc.
## `VehicleInfoType` (BMSCommonGlobalTypes) *(choose this over PropertyInfo)*
* `VINInfo` (0..\*) (`VINInfoType`) → **choice** of `VINAvailabilityCode` or one or more `VIN` (
`VINType`)
* `License` (`LicenseType`)
* `VehicleDesc` (`VehicleDescType`) — **ModelYear**, **MakeDesc/MakeCode**, **ModelName/ModelNum**,
`VehicleType`, etc.
* `Paint`, `Body`, `Powertrain`, `Condition`, `Valuation`, `VehicleMemo`
* `PolicyVehicleNum`, `LossVehicleNum`
* `FileAttachment` (`FileAttachmentType`)
* `CustomElement` (0..\*)
* `UnitNum` (Char\_20)
> Note: `VINType` is referenced but its concrete restriction is provided elsewhere in BMS; you can
> treat it as a VIN string (17-char typical) and your validator will enforce the real facet.
## `ProfileInfoType` (BMSEstimateCommonTypes) **required**
Controls rates, tax, and rules used to compute totals:
* `ProfileName` (Char\_40)
* **`RateInfo`** (1..\*) (`RateInfoType`)
* `RateType` (`RateTypeClosedEnumType`) — e.g., BODY\_LABOR, PAINT\_LABOR, MECHANICAL\_LABOR,
MATERIAL, etc.
* `RateTierInfo` / `RateTierHistory` (0..\*)
* `TaxableInd`, `TaxRate`, `AdjustmentInfo` (0..*), `TaxInfo` (0..*)
* `MaterialCalcSettings` (optional)
* `AlternatePartInfo` (0..*), `PartCertification` (0..*), `RefinishCalcSettings`,
`PreTaxDiscountRate`, `TaxExemptInfo` (0..\*), `CanadianTax` (for CA specifics)
## `DamageLineInfoType` (BMSEstimateCommonTypes) **1..**\*
One per estimate line. Core children:
* `LineNum`, `UniqueSequenceNum`, `ParentLineNum` (hierarchy)
* `ManualLineInd`, `AutomatedEntry`, `LineStatusCode`
* `LineDesc`, `LineDescCode`
* `SubletInfo` (`SubletInfoType`)
* `PartInfo` (0..\*) (`PartInfoType`)
* `LaborInfo` (`LaborInfoType`)
* `RefinishLaborInfo` (`LaborInfoType`)
* `MaterialType`, `OtherChargesInfo`, `WhoPays`
* `LineAdjustment`, `AppliedAdjustment`
* `PDRInfo`, `LineType`, `LineMemo`, `VendorRefNum` (0..\*)
**`PartInfoType`** highlights:
* `PartMaterialCode`, `PartType`, `LineItemCategoryCode`
* `PartDesc`, `PartNum`, `OEMPartNum`
* `NonOEM` (0..\*) (`NonOEMType`) — alternate sources/quality
* `ListPrice`, `PartPrice`, `UnitPartPrice`, `TotalPartPrice`, `OEMPartPrice`
* `PriceAdjustment` (0..\*) (`PriceAdjustmentType`)
* `TaxableInd`, `AppliedTaxes`
* `CertificationType` (0..\*), `AlternatePartInd`, `GlassPartInd`
* `Quantity`, `PartStatus`, `Dimensions`, `Glass*`, `QuotedPartList`
**`LaborInfoType`** highlights:
* **`LaborType`** (`LaborTypeClosedEnumType`) — **required**
* `LaborOperation`, `LaborHours`, `LaborHourlyRate`, `LaborAmt`
* `DatabaseLaborType/Hours/Amt`
* `LaborAdjustment` (0..\*)
* Judgment/flags (e.g., `LaborAmtJudgmentInd`, `OverlapInd`)
* Paint-specific fields (`PaintStagesNum`, `PaintTonesNum`)
* `AssemblyLaborCode`
## `CalibrationInfoType` / `ScanInfoType` (BMSEstimateCommonTypes)
* **`ScanInfoType`**: `ScanDetailsList` (optional), `FileAttachment` (optional), `ScanTool`,
`ScanDateTime` (**required**), flags `CleanScanInd`, `FollowUpInd`, plus `Technician`.
* **`CalibrationInfoType`**: optional lists for details & technicians, plus process flags (
`PrerequisitesMetInd`, `ProceduresFollowedInd`, `ADASReviewedWithOwnerInd`).
## `FileAttachmentType` (BMSCommonGlobalTypes)
* `DocAttachment` (0..\*) (`DocAttachmentType`)
* `AttachmentType` (open enum)
* `AttachmentTitle` **or** `AttachmentMemo`
* `AttachmentFileType`, `AttachmentFileName`, `AttachmentLength`
* **One of:** `AttachmentURI` **or** `EmbeddedAttachmentType`
* `EmbeddedAttachmentType`**choice**: `EmbeddedAttachment` (Binary) **or**
`EmbeddedAttachmentText` (C)
* `AttachmentIntegrity` (0..\*) (optionally includes Binary integrity blobs)
* `AttachmentStatusCode` (open enum)
## `RepairTotalsInfoType` (BMSEstimateCommonTypes) **1..**\*
* `LaborTotalsInfo` (0..\*) (`TotalsInfoType`)
* `PartsTotalsInfo` (0..\*) (`TotalsInfoType`)
* `OtherChargesTotalsInfo` (0..\*) (`TotalsInfoType`)
* `NumOfDamageLines` (optional)
* **`SummaryTotalsInfo`** (1..\*) (`TotalsInfoType`) — your rolled-up totals
* `RepairTotalsType` (`LineTypeClosedEnumType`) — optional (e.g., gross vs. customer-pay segments)
**`TotalsInfoType`** (BMSCommonGlobalTypes) highlights:
* **`TotalType`** (`TotalTypeOpenEnumType`) — category (e.g., LABOR, PARTS, TAX, GRAND\_TOTAL,…)
* `TotalSubType` (open enum)
* **`TotalTypeDesc`** (Char\_30)
* Hours quantities & units, item quantity, unit price
* Detailed `TotalTaxInfo` / `TotalAdjustmentInfo` (0..\*)
* Amounts: `NonTaxableAmt`, `TaxableAmt`, `TaxTotalAmt`, `OtherCharges*`, **`TotalAmt`**,
`TotalPct`, `TotalCost`
* `AmtDueInfo` (0..\*)
## `RepairTotalsHistoryType` (BMSEstimateCommonTypes)
* Version stamp and one or more `HistoryTotalsInfo` entries.
## `PaymentInfoType` (BMSCommonGlobalTypes) *(optional)*
* `PayerType`, `PaymentType`
* `Payee`/`PayerInfo`/`PayeeInfo`
* `PaymentDateTime`, **`PaymentAmt`**
* `PaymentID`, `PaymentMemo`, `PaymentAmtType`
## `OtherMemosType` (BMSCommonGlobalTypes)
* `OtherMemoRef` (open enum), `OtherMemo` (C)
---
# Minimal, schema-valid XML skeleton (vehicle path)
> Uses only **required** containers/fields; values shown as **PLACEHOLDER**.
> You must add at least one **DamageLineInfo** and one **SummaryTotalsInfo** item, and at least one
**RateInfo** inside **ProfileInfo**.
> Enumerations are *code lists*; use valid codes from your system.
```xml
<VehicleDamageEstimateAddRq xmlns="http://www.cieca.com/BMS">
<!-- MessageHeaderType -->
<RqUID>00000000-0000-0000-0000-000000000000</RqUID>
<!-- EstimateRqType sequence -->
<DocumentInfo>
<BMSVer>6.9.0</BMSVer>
<DocumentType>E</DocumentType>
<CreateDateTime>2025-08-14T12:00:00Z</CreateDateTime>
</DocumentInfo>
<ApplicationInfo>
<ApplicationType>INSERT_APP_TYPE</ApplicationType>
<ApplicationName>INSERT_APP_NAME</ApplicationName>
<ApplicationVer>INSERT_APP_VER</ApplicationVer>
</ApplicationInfo>
<AdminInfo/> <!-- container required; children optional -->
<EstimatorIDs/> <!-- container required; children optional -->
<!-- choice: VehicleInfo OR PropertyInfo -->
<VehicleInfo>
<!-- minimally empty is allowed; typical payload would include VIN and Year/Make/Model -->
<!-- Example:
<VINInfo>
<VIN>1HGBH41JXMN109186</VIN>
</VINInfo>
<VehicleDesc>
<ModelYear>2020</ModelYear>
<MakeDesc>Honda</MakeDesc>
<ModelName>Civic</ModelName>
</VehicleDesc>
-->
</VehicleInfo>
<ProfileInfo>
<!-- at least one RateInfo required -->
<RateInfo>
<RateType>INSERT_RATE_TYPE</RateType>
<!-- optional: <RateDesc>Body Labor</RateDesc> <TaxRate>13.00</TaxRate> etc. -->
</RateInfo>
</ProfileInfo>
<!-- at least one DamageLineInfo -->
<DamageLineInfo>
<!-- minimal: include a LaborInfo with required LaborType -->
<LaborInfo>
<LaborType>INSERT_LABOR_TYPE</LaborType>
<!-- optional: <LaborHours>1.0</LaborHours> <LaborHourlyRate>85.00</LaborHourlyRate> -->
</LaborInfo>
</DamageLineInfo>
<!-- at least one RepairTotalsInfo with at least one SummaryTotalsInfo -->
<RepairTotalsInfo>
<SummaryTotalsInfo>
<TotalType>INSERT_TOTAL_TYPE</TotalType>
<TotalTypeDesc>Grand Total</TotalTypeDesc>
<TotalAmt>0.00</TotalAmt>
</SummaryTotalsInfo>
</RepairTotalsInfo>
</VehicleDamageEstimateAddRq>
```
---
# Implementation notes & gotchas (important)
1. **Required containers vs. required content**
* `AdminInfo` and `EstimatorIDs` are **required containers** but their **children are optional**.
Empty elements validate.
* `ProfileInfo` is required and must include **≥1 `RateInfo`** with a `RateType`.
* You must include the **choice** of **`VehicleInfo`** (for this message) instead of `PropertyInfo`.
* Include **≥1 `DamageLineInfo`** and **≥1 `RepairTotalsInfo`** each containing *
*≥1 `SummaryTotalsInfo`**.
2. **Header**
* `RqUID` is required; use a real UUID.
3. **Enumerations / code lists**
* Many fields are `ClosedEnumType`/`OpenEnumType` and validated against the BMS code list XSDs you
included (e.g., `BMSCodeLists_*.xsd`). Use the exact code values your trading partner expects (
e.g., `DocumentType` = `E` for estimates).
* `BMSVer` supports `6.9.0`.
4. **Line hierarchy**
* For nested kits/assemblies, use `ParentLineNum`; `UniqueSequenceNum` helps ordering. `LineType`
can label grouping (e.g., Sublet, Labor, Part, etc.).
5. **Attachments**
* You can embed binary (`EmbeddedAttachmentType/EmbeddedAttachment`) **or** provide a URI (
`AttachmentURI`). Provide `AttachmentFileType` and `AttachmentFileName` either way.
6. **Scans & calibrations**
* If you include `ScanInfo`, it **requires** `ScanTool` and `ScanDateTime`. Calibrations are
optional but provide strong ADAS traceability.
7. **Totals integrity**
* `RepairTotalsInfo/SummaryTotalsInfo` acts as your roll-up. Ensure it reconciles with the sum of
`DamageLineInfo` components and the profiles rates/taxes so consumers dont reject on mismatches.
8. **Currency / numeric facets**
* Monetary fields use `Currency`. Hours/rates/quantities have explicit facets (e.g.,
`Decimal_Range_-999.9-999.9`). Stay within ranges.
9. **Canada specifics**
* `DocumentInfo/CountryCode` = `CA`, and `ProfileInfo/CanadianTax` is available for PST/HST/GST
modeling if you need to encode tax policy explicitly.
---
# Quick field checklist for a typical *valid* “vehicle add” youll generate
* **Header**
* `RqUID`
* **Doc header**
* `DocumentInfo/BMSVer` = `6.9.0`
* `DocumentInfo/DocumentType` = `E`
* `DocumentInfo/CreateDateTime`
* **App**
* `ApplicationInfo[1..*]/(ApplicationType, ApplicationName, ApplicationVer)`
* **Admin**
* `<AdminInfo/>` (or populate parties) ✅
* **EstimatorIDs**
* `<EstimatorIDs/>` (or add contents) ✅
* **Vehicle**
* `VehicleInfo` (VIN + YMM recommended) ✅
* **Profile & rates**
* `ProfileInfo/RateInfo[1..*]/RateType`
* **Lines**
* `DamageLineInfo[1..*]` with at least one `LaborInfo/LaborType` or `PartInfo`
* **Totals**
* `RepairTotalsInfo[1..*]/SummaryTotalsInfo[1..*]/(TotalType, TotalTypeDesc, TotalAmt)`
---
# Pointers to definitions in your bundle (for traceability)
* `VehicleDamageEstimateAddRq` element → `BMSEstimateMessages_2024R1_V6.9.0.xsd`
* `EstimateRqType``BMSEstimateCommonTypes_2024R1_V6.9.0.xsd`
* `MessageHeaderType`, `DocumentInfoType`, `VehicleInfoType`, `FileAttachmentType`,
`PaymentInfoType`, etc. → `BMSCommonGlobalTypes_2024R1_V6.9.0.xsd`
* Rates/lines/totals/calibration/scan subtypes → mostly `BMSEstimateCommonTypes_2024R1_V6.9.0.xsd`
* Enums/code lists → `BMSCodeLists_ClassicCode_2024R1_V6.9.0.xsd`,
`BMSCodeLists_CodeExt_2024R1_V6.9.0.xsd`
* Service wrapper (which messages are valid to send/receive) →
`BMSEstimateService_2024R1_V6.9.0.xsd`
---

View File

@@ -0,0 +1,250 @@
You got it—heres the same style of breakdown for **`VehicleDamageEstimateChgRq`** (the *change
request* variant). I pulled this straight from your XSD set and focused on what differs from
`…AddRq`, whats required vs optional, and what a minimal-but-valid payload looks like.
---
# What it is & where it lives
* **Global element**: `VehicleDamageEstimateChgRq`
* **Namespace**: `http://www.cieca.com/BMS`
* **Defined in**: `BMSEstimateMessages_2024R1_V6.9.0.xsd`
* **Type**: `EstimateChgRqType` (declared in `BMSEstimateCommonTypes_2024R1_V6.9.0.xsd`)
* **Service group**: `EstimateService` from `BMSEstimateService_2024R1_V6.9.0.xsd`
Group includes: `PropertyDamageEstimateAddRq/Rs`, `VehicleDamageEstimateAddRq/Rs`, *
*`VehicleDamageEstimateChgRq/Rs`**, `VehicleDamagePhotoEstimateAddRq/Rs`.
---
# Top-level schema (for `VehicleDamageEstimateChgRq` → `EstimateChgRqType`)
`EstimateChgRqType` **extends** `MessageHeaderType` (same header as `…AddRq`) but the **body is
almost entirely optional** (intended to send only whats changing). Only **`DocumentInfo`** is
required.
**Header (inherited from `MessageHeaderType`):**
* **`RqUID`** (UUID) — **required**
* `AsyncRqUID` (UUID) — *optional*
* `PartnerKey` (Identifier) — *optional*
**Body (from `EstimateChgRqType`):**
* `SvcProviderName` (Identifier) — *optional*
* `RefClaimNum` (Char\_50) — *optional*
* **`DocumentInfo`** (`DocumentInfoType`) — **required**
* `ApplicationInfo` (`ApplicationInfoType`) — *optional, 0..*\*\*
* `EventInfo` (`EventInfoType`) — *optional*
* `AdminInfo` (`AdminInfoType`) — *optional*
* `EstimatorIDs` (`EstimatorIDsTypeType`) — *optional*
* `ClaimInfo` (`ClaimInfoType`) — *optional*
* **Choice** — *both optional*:
* `VehicleInfo` (`VehicleInfoType`) — *optional*
* `PropertyInfo` (`PropertyInfoType`) — *optional*
* `ProfileInfo` (`ProfileInfoType`) — *optional*
* `DamageLineInfo` (`DamageLineInfoType`) — *optional, 0..*\*\* (send only changed/affected lines)
* `NonNewOEMPartInd` (Boolean) — *optional*
* `StorageDuration` (Integer\_Range\_0-999) — *optional*
* `RepairTotalsInfo` (`RepairTotalsInfoType`) — *optional, 0..*\*\*
* `RepairTotalsHistory` (`RepairTotalsHistoryType`) — *optional, 0..*\*\*
* `PaymentInfo` (`PaymentInfoType`) — *optional*
* `EstimateMemo` (C) — *optional*
* `AdministrativeMemo` (C) — *optional*
* `Disclaimers` (C) — *optional*
* `CustomMemo` (C) — *optional*
* `CustomPrintImage` (C) — *optional*
* `OtherMemos` (`OtherMemosType`) — *optional, 0..*\*\*
**Key deltas vs `VehicleDamageEstimateAddRq`:**
* `…AddRq` *requires* several containers (`AdminInfo`, `EstimatorIDs`, `ProfileInfo`,
`DamageLineInfo`, `RepairTotalsInfo` with `SummaryTotalsInfo`, etc.).
* `…ChgRq` **only requires** `MessageHeaderType/RqUID` and **`DocumentInfo`**; everything else is
optional so you can send *just what changed*.
* `CalibrationInfo` and `ScanInfo` (present in `…AddRq`) are **not** present in `EstimateChgRqType`.
* Because almost everything is optional, **the burden is on you** to correctly identify the target
document/version in `DocumentInfo` (or via `ReferenceInfo`) and to include all fields necessary
for the receiver to apply your changes.
---
# Important dependent types (same as Add, but optional here)
* **`DocumentInfoType`** (BMSCommonGlobalTypes) — **required**
* Use this to identify *which* estimate youre changing. Typical:
* **`BMSVer`** = `6.9.0`
* **`DocumentType`** = `E` (estimate)
* `DocumentID` — your estimate ID
* `CreateDateTime` — when you formed this change message
* `ReferenceInfo` — link back to the prior/authoritative doc (e.g., original `DocumentID`/
`DocumentVer`), if your workflow uses references
* `DocumentVer` — version info list, if you lifecycle versions
* **`ApplicationInfoType`** — software fingerprint (optional, 0..\*)
* **`AdminInfoType`** — parties/roles (optional)
* **`EstimatorIDsTypeType`** — supplemental estimator IDs/history (optional)
* **`ClaimInfoType`** — claim-level data (optional)
* **`VehicleInfoType`** (or `PropertyInfoType`) — vehicle path stays under `VehicleInfo` (optional)
* **`ProfileInfoType`** — rates/taxes/rules (optional)
* **`DamageLineInfoType`** — **send changed/added/removed lines only** (your trading partner may
require specific flags/LineStatusCode or use `ParentLineNum`+`UniqueSequenceNum` to identify
updates)
* **`RepairTotalsInfoType`** — updated totals (optional; some partners expect totals to reconcile
with changed lines)
* **`PaymentInfoType`**, memos, custom print/image & `OtherMemos` — all optional
> Because `ChgRq` is sparse by design, **schema validation wont catch semantic issues** (e.g., you
> remove a part but dont update totals). Make sure your payload is self-consistent per partner
> rules.
---
# Minimal, schema-valid XML skeleton (change request)
> This represents the *absolute floor* to validate: **Header/RqUID** + **DocumentInfo** with basic
> fields. In practice, include `DocumentID` and some way to reference the prior document/version so
> the receiver can apply changes.
```xml
<VehicleDamageEstimateChgRq xmlns="http://www.cieca.com/BMS">
<!-- MessageHeaderType -->
<RqUID>00000000-0000-0000-0000-000000000000</RqUID>
<!-- EstimateChgRqType sequence -->
<DocumentInfo>
<BMSVer>6.9.0</BMSVer>
<DocumentType>E</DocumentType>
<CreateDateTime>2025-08-14T12:00:00Z</CreateDateTime>
<!-- Strongly recommended for change requests: -->
<!-- <DocumentID>EST-12345</DocumentID> -->
<!-- <DocumentVer>
<DocumentVerCode>REV</DocumentVerCode>
<DocumentVerNum>2</DocumentVerNum>
</DocumentVer>
<ReferenceInfo>
<RefDocumentID>EST-12345</RefDocumentID>
<RefDocumentVerNum>1</RefDocumentVerNum>
</ReferenceInfo> -->
</DocumentInfo>
<!-- Add only what changed. Examples: -->
<!-- Update a rate -->
<!--
<ProfileInfo>
<RateInfo>
<RateType>BODY_LABOR</RateType>
<TaxableInd>true</TaxableInd>
<TaxRate>13.00</TaxRate>
</RateInfo>
</ProfileInfo>
-->
<!-- Add/update a labor line -->
<!--
<DamageLineInfo>
<LineNum>10</LineNum>
<LaborInfo>
<LaborType>BODY</LaborType>
<LaborHours>1.5</LaborHours>
<LaborHourlyRate>85.00</LaborHourlyRate>
</LaborInfo>
</DamageLineInfo>
-->
<!-- Sync totals if your partner requires it with each change -->
<!--
<RepairTotalsInfo>
<SummaryTotalsInfo>
<TotalType>GRAND_TOTAL</TotalType>
<TotalTypeDesc>Grand Total</TotalTypeDesc>
<TotalAmt>1234.56</TotalAmt>
</SummaryTotalsInfo>
</RepairTotalsInfo>
-->
</VehicleDamageEstimateChgRq>
```
---
# Practical guidance & gotchas
1. **Targeting the right document/version**
* `DocumentInfo/DocumentID` + `DocumentVer` and/or `ReferenceInfo` should point unambiguously to the
estimate being changed. This is essential because the schema does **not** include a separate
“ChangeTarget” field—partners expect this info in `DocumentInfo`/`ReferenceInfo`.
2. **Sparsity vs completeness**
* You can send just the changed sections (e.g., one `DamageLineInfo`, one `RateInfo`).
* Some receivers require you to **also** include reconciled `RepairTotalsInfo/SummaryTotalsInfo`.
Check partner specs.
3. **Line identity**
* If youre updating an existing line, keep its identity stable using `LineNum` and/or
`UniqueSequenceNum`.
* For nested structures, preserve `ParentLineNum`. Use `LineStatusCode` if your partner requires
explicit “added/changed/deleted” flags.
4. **Profile impacts**
* If a change affects pricing (rates, taxes, discounts), update `ProfileInfo` (and possibly totals).
Omitting totals may be acceptable for some partners; others will reject mismatches.
5. **Whats *not* in ChgRq vs AddRq**
* `CalibrationInfo` and `ScanInfo` do not appear in `EstimateChgRqType`. If you need to change those
data, partner workflows may expect a re-send under Add/PhotoAdd or a separate message
family—confirm externally.
6. **Header is still mandatory**
* `RqUID` must be a real UUID.
7. **Code lists**
* Enumerations (e.g., `DocumentType`, `RateType`, `TotalType`, `LaborType`) are validated against
your code list XSDs. Use exact codes.
---
# Quick field checklist for a *solid* ChgRq
* **Header**
* `RqUID`
* **Doc identity**
* `DocumentInfo/BMSVer` = `6.9.0`
* `DocumentInfo/DocumentType` = `E`
* `DocumentInfo/CreateDateTime`
* `DocumentInfo/DocumentID` (recommended) ✅
* `DocumentInfo/DocumentVer` and/or `ReferenceInfo` (recommended) ✅
* **Changed data only**
* `ProfileInfo/RateInfo` (if rates/taxes changed)
* `DamageLineInfo[0..*]` (added/updated/removed lines)
* `RepairTotalsInfo/SummaryTotalsInfo` (if required by partner)
* Any updated `AdminInfo`, `ClaimInfo`, `VehicleInfo` fragments as needed
---
# Pointers to definitions in your bundle
* `VehicleDamageEstimateChgRq` element → `BMSEstimateMessages_2024R1_V6.9.0.xsd`
* `EstimateChgRqType``BMSEstimateCommonTypes_2024R1_V6.9.0.xsd`
* `MessageHeaderType`, `DocumentInfoType`, `VehicleInfoType`, `TotalsInfoType`, etc. →
`BMSCommonGlobalTypes_2024R1_V6.9.0.xsd`
* Code lists → `BMSCodeLists_ClassicCode_2024R1_V6.9.0.xsd`,
`BMSCodeLists_CodeExt_2024R1_V6.9.0.xsd`
* Service wrapper → `BMSEstimateService_2024R1_V6.9.0.xsd` (group `EstimateService` contains the
ChgRq/Rs)
---

View File

@@ -54,6 +54,10 @@ paths:
userEmail:
type: string
format: email
userPassword:
type: string
description: Optional password for the new user. If provided, the password is set directly, and no password reset link is sent. Must be at least 6 characters.
nullable: true
logoUrl:
type: string
format: uri
@@ -140,6 +144,8 @@ paths:
resetLink:
type: string
format: uri
nullable: true
description: Password reset link for the user. Only included if userPassword is not provided in the request.
'400':
description: Bad request (missing or invalid fields)
content:

View File

@@ -6,7 +6,7 @@
const decodeComment = (comment) => {
try {
return comment ? JSON.parse(Buffer.from(comment, "base64").toString()) : null;
} catch (error) {
} catch {
return null; // Handle malformed base64 string gracefully
}
};

View File

@@ -1,5 +1,3 @@
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const path = require("path");
const logger = require("../utils/logger");
@@ -11,30 +9,10 @@ require("dotenv").config({
exports.default = async (req, res) => {
const { useremail, bodyshopid, operationName, variables, env, time, dbevent, user } = req.body;
const {
ioRedis,
ioHelpers: { getBodyshopRoom }
} = req;
try {
// await client.request(queries.INSERT_IOEVENT, {
// event: {
// operationname: operationName,
// time,
// dbevent,
// env,
// variables,
// bodyshopid,
// useremail
// }
// });
// ioRedis.to(getBodyshopRoom(bodyshopid)).emit("bodyshop-message", {
// operationName,
// useremail
// });
res.sendStatus(200);
} catch (error) {
} catch {
logger.log("ioevent-error", "silly", user, null, {
operationname: operationName,
time,

View File

@@ -1,8 +1,6 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
//const client = require("../graphql-client/graphql-client").client;
const _ = require("lodash");
const GraphQLClient = require("graphql-request").GraphQLClient;
const logger = require("../utils/logger");
// Dinero.defaultCurrency = "USD";
// Dinero.globalLocale = "en-CA";
@@ -18,7 +16,8 @@ async function StatusTransition(req, res) {
const { id: jobid, status: value, shopid: bodyshopid } = req.body.event.data.new;
// Create record OPEN on new item, enter state
// If change to SCHEDULE, update the last record and create a new record (update status and end time on old record, create a new record saying we came from previous status going to previous status
// If change to SCHEDULE, update the last record and create a new record (update status and end time on old record,
// create a new record saying we came from previous status going to previous status
// (Timeline)
// Final status is exported, there is no end date as there is no further transition (has no end date)
try {

View File

@@ -1,10 +1,7 @@
const path = require("path");
const logger = require("../utils/logger");
const { Upload } = require("@aws-sdk/lib-storage");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const { InstanceRegion } = require("../utils/instanceMgr");
const archiver = require("archiver");
const stream = require("node:stream");
const base64UrlEncode = require("./util/base64UrlEncode");
const createHmacSha256 = require("./util/createHmacSha256");
const {
@@ -91,11 +88,11 @@ const getThumbnailUrls = async (req, res) => {
//Delayed as the key structure may change slightly from what it is currently and will require evaluating mobile components.
const client = req.userGraphQLClient;
//If there's no jobid and no billid, we're in temporary documents.
const data = await (
billid ? client.request(GET_DOCUMENTS_BY_BILL, { billId: billid }) :
jobid
? client.request(GET_DOCUMENTS_BY_JOB, { jobId: jobid })
: client.request(QUERY_TEMPORARY_DOCS));
const data = await (billid
? client.request(GET_DOCUMENTS_BY_BILL, { billId: billid })
: jobid
? client.request(GET_DOCUMENTS_BY_JOB, { jobId: jobid })
: client.request(QUERY_TEMPORARY_DOCS));
const thumbResizeParams = `rs:fill:250:250:1/g:ce`;
const s3client = new S3Client({ region: InstanceRegion() });
@@ -106,7 +103,7 @@ const getThumbnailUrls = async (req, res) => {
//<Cloudfront_to_lambda>/<hmac with SHA of entire request URI path (with base64 encoded URL if needed), beginning with un-encoded/un-hashed Salt>/<remainder of url - resize params >/< base 64 URL encoded to image path>
//When working with documents from Cloudinary, the URL does not include the extension.
let key = keyStandardize(document)
let key = keyStandardize(document);
// Build the S3 path to the object.
const fullS3Path = `s3://${imgproxyDestinationBucket}/${key}`;
const base64UrlEncodedKeyString = base64UrlEncode(fullS3Path);
@@ -193,7 +190,10 @@ const downloadFiles = async (req, res) => {
// Handle zipfile stream errors
zipfile.outputStream.on("error", (err) => {
logger.log("imgproxy-download-zipstream-error", "ERROR", req.user?.email, jobId, { message: err.message, stack: err.stack });
logger.log("imgproxy-download-zipstream-error", "ERROR", req.user?.email, jobId, {
message: err.message,
stack: err.stack
});
// Cannot send another response here, just destroy the connection
res.destroy(err);
});
@@ -202,7 +202,7 @@ const downloadFiles = async (req, res) => {
try {
for (const doc of data.documents) {
let key = keyStandardize(doc)
let key = keyStandardize(doc);
let response;
try {
response = await s3client.send(
@@ -212,13 +212,21 @@ const downloadFiles = async (req, res) => {
})
);
} catch (err) {
logger.log("imgproxy-download-s3-error", "ERROR", req.user?.email, jobId, { key, message: err.message, stack: err.stack });
logger.log("imgproxy-download-s3-error", "ERROR", req.user?.email, jobId, {
key,
message: err.message,
stack: err.stack
});
// Optionally, skip this file or add a placeholder file in the zip
continue;
}
// Attach error handler to S3 stream
response.Body.on("error", (err) => {
logger.log("imgproxy-download-s3stream-error", "ERROR", req.user?.email, jobId, { key, message: err.message, stack: err.stack });
logger.log("imgproxy-download-s3stream-error", "ERROR", req.user?.email, jobId, {
key,
message: err.message,
stack: err.stack
});
res.destroy(err);
});
zipfile.addReadStream(response.Body, path.basename(key));
@@ -393,7 +401,6 @@ const keyStandardize = (doc) => {
}
};
module.exports = {
generateSignedUploadUrls,
getThumbnailUrls,

View File

@@ -1,5 +1,3 @@
const path = require("path");
/**
* Checks if the event secret is correct
* It adds the following properties to the request object:

View File

@@ -1,5 +1,4 @@
const path = require("path");
const _ = require("lodash");
const xml2js = require("xml2js");
const queries = require("../graphql-client/queries");
const logger = require("../utils/logger");

View File

@@ -280,7 +280,7 @@ const getQueue = () => {
/**
* Dispatches notifications to the `addQueue` for processing.
*/
const dispatchAppsToQueue = async ({ appsToDispatch, logger }) => {
const dispatchAppsToQueue = async ({ appsToDispatch }) => {
const appQueue = getQueue();
for (const app of appsToDispatch) {

View File

@@ -1,6 +1,5 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const GraphQLClient = require("graphql-request").GraphQLClient;
const logger = require("../utils/logger");
const { CalculateExpectedHoursForJob, CalculateTicketsHoursForJob } = require("./pay-all");
@@ -8,6 +7,12 @@ const { CalculateExpectedHoursForJob, CalculateTicketsHoursForJob } = require(".
// Dinero.globalLocale = "en-CA";
Dinero.globalRoundingMode = "HALF_EVEN";
const get = (obj, key) => {
return key.split(".").reduce((o, x) => {
return typeof o == "undefined" || o === null ? o : o[x];
}, obj);
};
exports.calculatelabor = async function (req, res) {
const { jobid, calculateOnly } = req.body;
logger.log("job-payroll-calculate-labor", "DEBUG", req.user.email, jobid, null);
@@ -99,9 +104,3 @@ exports.calculatelabor = async function (req, res) {
res.status(503).send();
}
};
get = function (obj, key) {
return key.split(".").reduce(function (o, x) {
return typeof o == "undefined" || o === null ? o : o[x];
}, obj);
};

View File

@@ -1,8 +1,7 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const GraphQLClient = require("graphql-request").GraphQLClient;
const logger = require("../utils/logger");
const { CalculateExpectedHoursForJob, CalculateTicketsHoursForJob } = require("./pay-all");
const { CalculateExpectedHoursForJob } = require("./pay-all");
const moment = require("moment");
// Dinero.defaultCurrency = "USD";
// Dinero.globalLocale = "en-CA";

View File

@@ -1,6 +1,5 @@
const Dinero = require("dinero.js");
const queries = require("../graphql-client/queries");
const _ = require("lodash");
const rdiff = require("recursive-diff");
const logger = require("../utils/logger");

View File

@@ -16,9 +16,40 @@ if (typeof VSSTA_INTEGRATION_SECRET === "string" && VSSTA_INTEGRATION_SECRET.len
// Only load Parts Management routes if that secret is set
if (typeof PARTS_MANAGEMENT_INTEGRATION_SECRET === "string" && PARTS_MANAGEMENT_INTEGRATION_SECRET.length > 0) {
const partsManagementProvisioning = require("../integrations/partsManagement/partsManagementProvisioning");
const partsManagementIntegrationMiddleware = require("../middleware/partsManagementIntegrationMiddleware");
const XML_BODY_LIMIT = "10mb"; // Set a limit for XML body size
const partsManagementProvisioning = require("../integrations/partsManagement/endpoints/partsManagementProvisioning");
const partsManagementDeprovisioning = require("../integrations/partsManagement/endpoints/partsManagementDeprovisioning");
const partsManagementIntegrationMiddleware = require("../middleware/partsManagementIntegrationMiddleware");
const partsManagementVehicleDamageEstimateAddRq = require("../integrations/partsManagement/endpoints/vehicleDamageEstimateAddRq");
const partsManagementVehicleDamageEstimateChqRq = require("../integrations/partsManagement/endpoints/vehicleDamageEstimateChgRq");
/**
* Route to handle Vehicle Damage Estimate Add Request
*/
router.post(
"/parts-management/VehicleDamageEstimateAddRq",
express.raw({ type: "application/xml", limit: XML_BODY_LIMIT }), // Parse XML body
partsManagementIntegrationMiddleware,
partsManagementVehicleDamageEstimateAddRq
);
/**
* Route to handle Vehicle Damage Estimate Change Request
*/
router.post(
"/parts-management/VehicleDamageEstimateChqRq",
express.raw({ type: "application/xml", limit: XML_BODY_LIMIT }), // Parse XML body
partsManagementIntegrationMiddleware,
partsManagementVehicleDamageEstimateChqRq
);
// Deprovisioning route
router.post("/parts-management/deprovision", partsManagementIntegrationMiddleware, partsManagementDeprovisioning);
/**
* Route to handle Parts Management Provisioning
*/
router.post("/parts-management/provision", partsManagementIntegrationMiddleware, partsManagementProvisioning);
} else {
console.warn("PARTS_MANAGEMENT_INTEGRATION_SECRET is not set — skipping /parts-management/provision route");

View File

@@ -1,10 +1,9 @@
const path = require("path");
const queries = require("../graphql-client/queries");
const Dinero = require("dinero.js");
const moment = require("moment-timezone");
const logger = require("../utils/logger");
const _ = require("lodash");
const { filter } = require("lodash");
require("dotenv").config({
path: path.resolve(process.cwd(), `.env.${process.env.NODE_ENV || "development"}`)
});
@@ -26,12 +25,10 @@ exports.job = async (req, res) => {
});
const { jobs_by_pk, blockedDays, prodJobs, arrJobs, compJobs } = result;
const { ssbuckets, workingdays, timezone, ss_configuration } = result.jobs_by_pk.bodyshop;
const jobHrs = result.jobs_by_pk.jobhrs.aggregate.sum.mod_lb_hrs;
const { ssbuckets, workingdays, timezone, ss_configuration } = jobs_by_pk.bodyshop;
const jobHrs = jobs_by_pk.jobhrs.aggregate.sum.mod_lb_hrs;
const JobBucket = ssbuckets.filter(
(bucket) => bucket.gte <= jobHrs && (!!bucket.lt ? bucket.lt > jobHrs : true)
)[0];
const JobBucket = ssbuckets.filter((bucket) => bucket.gte <= jobHrs && (bucket.lt ? bucket.lt > jobHrs : true))[0];
const load = {
productionTotal: {},
productionHours: 0
@@ -73,7 +70,7 @@ exports.job = async (req, res) => {
.tz(timezone)
.format("yyyy-MM-DD");
if (isSameBucket) {
if (!!load[itemDate]) {
if (load[itemDate]) {
load[itemDate].hoursIn = (load[itemDate].hoursIn || 0) + AddJobForSchedulingCalc ? jobHours : 0;
if (AddJobForSchedulingCalc) load[itemDate].jobsIn.push(item);
} else {
@@ -97,7 +94,6 @@ exports.job = async (req, res) => {
});
//Get the completing jobs.
let problemJobs = [];
const filteredCompJobs = compJobs.filter((j) => JobBucket.id === CheckJobBucket(ssbuckets, j));
filteredCompJobs.forEach((item) => {
@@ -109,7 +105,7 @@ exports.job = async (req, res) => {
const itemDate = moment(item.actual_completion || item.scheduled_completion)
.tz(timezone)
.format("yyyy-MM-DD");
if (!!load[itemDate]) {
if (load[itemDate]) {
load[itemDate].hoursOut =
(load[itemDate].hoursOut || 0) + AddJobForSchedulingCalc
? item.labhrs.aggregate.sum.mod_lb_hrs + item.larhrs.aggregate.sum.mod_lb_hrs
@@ -143,7 +139,7 @@ exports.job = async (req, res) => {
.tz(timezone)
.add(day - 1, "days")
.format("yyyy-MM-DD");
if (!!!load[current]) {
if (!load[current]) {
load[current] = {};
}
if (day === 0) {

View File

@@ -291,7 +291,13 @@ const receive = async (req, res) => {
res.status(200).send("");
} catch (e) {
handleError(req, e, res, "RECEIVE_MESSAGE", logger);
handleError({
req,
res,
logger,
error: e,
context: "RECEIVE_MESSAGE"
});
}
};
@@ -301,11 +307,11 @@ const receive = async (req, res) => {
* @param logger
* @returns {null|*[]}
*/
const generateMediaArray = (body, logger) => {
const generateMediaArray = (body) => {
const { NumMedia } = body;
if (parseInt(NumMedia) > 0) {
if (parseInt(NumMedia, 10) > 0) {
const ret = [];
for (let i = 0; i < parseInt(NumMedia); i++) {
for (let i = 0; i < parseInt(NumMedia, 10); i++) {
ret.push(body[`MediaUrl${i}`]);
}
return ret;
@@ -322,7 +328,7 @@ const generateMediaArray = (body, logger) => {
* @param context
* @param logger
*/
const handleError = (req, error, res, context, logger) => {
const handleError = ({ req, error, res, context, logger }) => {
logger.log("sms-inbound-error", "ERROR", "api", null, {
msid: req.body.SmsMessageSid,
text: req.body.Body,

View File

@@ -11,7 +11,7 @@ const gqlClient = require("../graphql-client/graphql-client").client;
* @returns {Promise<void>}
*/
const send = async (req, res) => {
const { to, messagingServiceSid, body, conversationid, selectedMedia, imexshopid } = req.body;
const { to, messagingServiceSid, body, conversationid, selectedMedia } = req.body;
const {
ioRedis,
logger,

View File

@@ -139,7 +139,7 @@ const status = async (req, res) => {
const markConversationRead = async (req, res) => {
const {
ioRedis,
ioHelpers: { getBodyshopRoom, getBodyshopConversationRoom }
ioHelpers: { getBodyshopRoom }
} = req;
const { conversation, imexshopid, bodyshopid } = req.body;

View File

@@ -77,9 +77,6 @@ function functionMapper(f, timezone) {
return moment().tz(timezone).add(3, "day");
case "date.7daysfromnowtz":
return moment().tz(timezone).add(7, "day");
case "date.now":
return moment().tz(timezone);
default:
return f;
}

View File

@@ -1,4 +1,4 @@
import { describe, it, expect } from "vitest";
import { describe, expect, it } from "vitest";
function add(a, b) {
return a + b;

View File

@@ -9,7 +9,7 @@ const calculateStatusDuration = (transitions, statuses) => {
let totalCurrentStatusDuration = null;
let summations = [];
transitions.forEach((transition, index) => {
transitions.forEach((transition) => {
let duration = transition.duration;
totalDuration += duration;
if (transition.start && !transition.end) {

View File

@@ -6,7 +6,7 @@
* @param logger
* @returns {{getBodyshopRoom: (function(*): string), getBodyshopConversationRoom: (function({bodyshopId: *, conversationId: *}): string)}}
*/
const applyIOHelpers = ({ app, api, io, logger }) => {
const applyIOHelpers = ({ app }) => {
// Global Bodyshop Room
const getBodyshopRoom = (bodyshopId) => `bodyshop-broadcast-room:${bodyshopId}`;

View File

@@ -15,7 +15,6 @@ const getHostNameOrIP = require("./getHostNameOrIP");
const client = require("../graphql-client/graphql-client").client;
const queries = require("../graphql-client/queries");
const LOG_LEVELS = {
error: { level: 0, name: "error" },
warn: { level: 1, name: "warn" },
@@ -38,7 +37,7 @@ const region = InstanceRegion();
const estimateLogSize = (logEntry) => {
let estimatedSize = 0;
for (const key in logEntry) {
if (logEntry.hasOwnProperty(key)) {
if (Object.prototype.hasOwnProperty.call(logEntry, key)) {
const value = logEntry[key];
if (value === undefined || value === null) {
estimatedSize += key.length; // Only count the key length if value is undefined or null
@@ -102,11 +101,13 @@ const createLogger = () => {
const labelColor = "\x1b[33m"; // Yellow
const separatorColor = "\x1b[35m|\x1b[0m"; // Magenta for separators
return `${timestampColor} [${hostnameColor}] [${level}]: ${message} ${user ? `${separatorColor} ${labelColor}user:\x1b[0m ${JSON.stringify(user)}` : ""
} ${record ? `${separatorColor} ${labelColor}record:\x1b[0m ${JSON.stringify(record)}` : ""}${meta
return `${timestampColor} [${hostnameColor}] [${level}]: ${message} ${
user ? `${separatorColor} ${labelColor}user:\x1b[0m ${JSON.stringify(user)}` : ""
} ${record ? `${separatorColor} ${labelColor}record:\x1b[0m ${JSON.stringify(record)}` : ""}${
meta
? `\n${separatorColor} ${labelColor}meta:\x1b[0m ${JSON.stringify(meta, null, 2)} ${separatorColor}`
: ""
}`;
}`;
})
)
})
@@ -195,9 +196,19 @@ const createLogger = () => {
winstonLogger.log(logEntry);
};
const LogIntegrationCall = async ({ platform, method, name, jobid, paymentid, billid, status, bodyshopid, email }) => {
const LogIntegrationCall = async ({
platform,
method,
name,
jobid,
paymentid,
billid,
status,
bodyshopid,
email
}) => {
try {
//Insert the record.
//Insert the record.
await client.request(queries.INSERT_INTEGRATION_LOG, {
log: {
platform,
@@ -211,7 +222,6 @@ const createLogger = () => {
email
}
});
} catch (error) {
console.trace("Stack", error?.stack);
log("integration-log-error", "ERROR", email, null, {