Files
bodyshop/server/integrations/partsManagement/endpoints/vehicleDamageEstimateChgRq.js

236 lines
8.3 KiB
JavaScript
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
// no-dd-sa:javascript-code-style/assignment-name
// Handler for VehicleDamageEstimateChgRq
const client = require("../../../graphql-client/graphql-client").client;
const { parseXml, normalizeXmlObject } = require("../partsManagementUtils");
const { extractPartsTaxRates } = require("./lib/extractPartsTaxRates");
const {
GET_JOB_BY_CLAIM,
UPDATE_JOB_BY_ID,
DELETE_JOBLINES_BY_IDS,
INSERT_JOBLINES
} = require("../partsManagement.queries");
/**
* Finds a job by shop ID and claim number.
* @param shopId
* @param claimNum
* @param logger
* @returns {Promise<*|null>}
*/
const findJob = async (shopId, claimNum, logger) => {
try {
const { jobs } = await client.request(GET_JOB_BY_CLAIM, { shopid: shopId, clm_no: claimNum });
return jobs?.[0] || null;
} catch (err) {
logger.log("parts-job-lookup-failed", "error", null, null, { error: err });
return null;
}
};
/**
* Extracts updated job data from the request payload.
* @param rq
* @returns {{comment: (number|((comment: Comment, helper: postcss.Helpers) => (Promise<void> | void))|string|null), clm_no: null, status: (*|null), policy_no: (*|null)}}
*/
const extractUpdatedJobData = (rq) => {
const doc = rq.DocumentInfo || {};
const claim = rq.ClaimInfo || {};
const policyNo = claim.PolicyInfo?.PolicyInfo?.PolicyNum || claim.PolicyInfo?.PolicyNum || null;
const out = {
comment: doc.Comment || null,
clm_no: claim.ClaimNum || null,
status: claim.ClaimStatus || null,
policy_no: policyNo
};
// If ProfileInfo provided in ChangeRq, update parts_tax_rates to stay in sync with AddRq behavior
if (rq.ProfileInfo) {
out.parts_tax_rates = extractPartsTaxRates(rq.ProfileInfo);
}
return out;
};
/**
* Extracts updated job lines from the request payload, mirroring the AddRq splitting rules:
* - PART lines carry only part pricing (act_price) and related fields
* - If LaborInfo exists on a part line, add a separate LABOR line at unq_seq + 400000
* - If RefinishLaborInfo exists, add a separate LABOR line at unq_seq + 500000 with mod_lbr_ty=LAR
* - SUBLET lines become PAS part_type with act_price=SubletAmount
*/
const extractUpdatedJobLines = (addsChgs = {}, jobId) => {
const linesIn = Array.isArray(addsChgs.DamageLineInfo) ? addsChgs.DamageLineInfo : [addsChgs.DamageLineInfo || {}];
const coerceManual = (val) =>
val === true || val === 1 || val === "1" || (typeof val === "string" && val.toUpperCase() === "Y");
const out = [];
for (const line of linesIn) {
if (!line || Object.keys(line).length === 0) continue;
const partInfo = line.PartInfo || {};
const laborInfo = line.LaborInfo || {};
const refinishInfo = line.RefinishLaborInfo || {};
const subletInfo = line.SubletInfo || {};
const base = {
jobid: jobId,
line_no: parseInt(line.LineNum || 0, 10),
unq_seq: parseInt(line.UniqueSequenceNum || 0, 10),
status: line.LineStatusCode || null,
line_desc: line.LineDesc || null,
notes: line.LineMemo || null,
manual_line: line.ManualLineInd !== undefined ? coerceManual(line.ManualLineInd) : null
};
const hasPart = Object.keys(partInfo).length > 0;
const hasLaborOnly = Object.keys(laborInfo).length > 0 && !hasPart && Object.keys(subletInfo).length === 0;
const hasSublet = Object.keys(subletInfo).length > 0;
if (hasPart) {
const price = parseFloat(partInfo.PartPrice || partInfo.ListPrice || 0);
out.push({
...base,
part_type: partInfo.PartType ? String(partInfo.PartType).toUpperCase() : null,
part_qty: parseFloat(partInfo.Quantity || 0) || 1,
oem_partno: partInfo.OEMPartNum || partInfo.PartNum || null,
db_price: isNaN(price) ? 0 : price,
act_price: isNaN(price) ? 0 : price
});
// Split any attached labor on the part line into a derived labor jobline
const hrs = parseFloat(laborInfo.LaborHours || 0);
const amt = parseFloat(laborInfo.LaborAmt || 0);
const hasLabor =
(!!laborInfo.LaborType && String(laborInfo.LaborType).length > 0) ||
(!isNaN(hrs) && hrs !== 0) ||
(!isNaN(amt) && amt !== 0);
if (hasLabor) {
out.push({
...base,
unq_seq: (parseInt(line.UniqueSequenceNum || 0, 10) || 0) + 400000,
mod_lbr_ty: laborInfo.LaborType || null,
mod_lb_hrs: isNaN(hrs) ? 0 : hrs,
lbr_op: laborInfo.LaborOperation || null,
lbr_amt: isNaN(amt) ? 0 : amt
});
}
} else if (hasSublet) {
out.push({
...base,
part_type: "PAS",
part_qty: 1,
act_price: parseFloat(subletInfo.SubletAmount || 0) || 0
});
}
// Labor-only line (no PartInfo): still upsert as a labor entry
if (hasLaborOnly) {
out.push({
...base,
mod_lbr_ty: laborInfo.LaborType || null,
mod_lb_hrs: parseFloat(laborInfo.LaborHours || 0) || 0,
lbr_op: laborInfo.LaborOperation || null,
lbr_amt: parseFloat(laborInfo.LaborAmt || 0) || 0
});
}
// Separate refinish labor line
if (Object.keys(refinishInfo).length > 0) {
const rHrs = parseFloat(refinishInfo.LaborHours || 0);
const rAmt = parseFloat(refinishInfo.LaborAmt || 0);
if (!isNaN(rHrs) || !isNaN(rAmt)) {
out.push({
...base,
unq_seq: (parseInt(line.UniqueSequenceNum || 0, 10) || 0) + 500000,
line_desc: base.line_desc || "Refinish",
mod_lbr_ty: "LAR",
mod_lb_hrs: isNaN(rHrs) ? 0 : rHrs,
lbr_op: refinishInfo.LaborOperation || null,
lbr_amt: isNaN(rAmt) ? 0 : rAmt
});
}
}
}
return out;
};
/**
* Extracts deletion IDs from the deletions object, also removing any derived labor/refinish lines
* by including offsets (base + 400000, base + 500000).
*/
const extractDeletions = (deletions = {}) => {
const items = Array.isArray(deletions.DamageLineInfo) ? deletions.DamageLineInfo : [deletions.DamageLineInfo || {}];
const baseSeqs = items.map((line) => parseInt(line.UniqueSequenceNum, 10)).filter((id) => Number.isInteger(id));
const allSeqs = [];
for (const u of baseSeqs) {
allSeqs.push(u, u + 400000, u + 500000);
}
// De-dup
return Array.from(new Set(allSeqs));
};
/**
* Handles VehicleDamageEstimateChgRq requests.
* @param req
* @param res
* @returns {Promise<*>}
*/
const partsManagementVehicleDamageEstimateChgRq = async (req, res) => {
const { logger } = req;
try {
const payload = await parseXml(req.body, logger);
const rq = normalizeXmlObject(payload.VehicleDamageEstimateChgRq);
if (!rq) return res.status(400).send("Missing <VehicleDamageEstimateChgRq>");
const shopId = rq.ShopID;
const claimNum = rq.ClaimInfo?.ClaimNum;
if (!shopId || !claimNum) return res.status(400).send("Missing ShopID or ClaimNum");
const job = await findJob(shopId, claimNum, logger);
if (!job) return res.status(404).send("Job not found");
const updatedJobData = extractUpdatedJobData(rq);
const updatedLines = extractUpdatedJobLines(rq.AddsChgs, job.id);
const deletedLineIds = extractDeletions(rq.Deletions);
await client.request(UPDATE_JOB_BY_ID, { id: job.id, job: updatedJobData });
// Build a set of unq_seq that will be updated (replaced). We delete them first to avoid duplicates.
const updatedSeqs = Array.from(
new Set((updatedLines || []).map((l) => l && l.unq_seq).filter((v) => Number.isInteger(v)))
);
if ((deletedLineIds && deletedLineIds.length) || (updatedSeqs && updatedSeqs.length)) {
const allToDelete = Array.from(new Set([...(deletedLineIds || []), ...(updatedSeqs || [])]));
if (allToDelete.length) {
await client.request(DELETE_JOBLINES_BY_IDS, { jobid: job.id, unqSeqs: allToDelete });
}
}
if (updatedLines.length > 0) {
// Insert fresh versions after deletion so we dont depend on a unique constraint
await client.request(INSERT_JOBLINES, {
joblines: updatedLines
});
}
logger.log("parts-job-changed", "info", job.id, null);
return res.status(200).json({ success: true, jobId: job.id });
} catch (err) {
logger.log("parts-chgrq-error", "error", null, null, { error: err });
return res.status(err.status || 500).json({ error: err.message || "Internal error" });
}
};
module.exports = partsManagementVehicleDamageEstimateChgRq;