Files
bodyshop/server/integrations/partsManagement/endpoints/vehicleDamageEstimateChgRq.js

272 lines
9.6 KiB
JavaScript
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
// no-dd-sa:javascript-code-style/assignment-name
// Handler for VehicleDamageEstimateChgRq
const client = require("../../../graphql-client/graphql-client").client;
const { parseXml, normalizeXmlObject } = require("../partsManagementUtils");
const { extractPartsTaxRates } = require("./lib/extractPartsTaxRates");
const {
GET_JOB_BY_ID,
UPDATE_JOB_BY_ID,
SOFT_DELETE_JOBLINES_BY_IDS,
INSERT_JOBLINES,
GET_JOBLINES_NOTES_BY_JOBID_UNQSEQ
} = require("../partsManagement.queries");
/**
* Finds a job by shop ID and claim number.
* @param shopId
* @param jobId
* @param logger
* @returns {Promise<*|null>}
*/
const findJob = async (shopId, jobId, logger) => {
try {
const { jobs } = await client.request(GET_JOB_BY_ID, { shopid: shopId, jobid: jobId });
return jobs?.[0] || null;
} catch (err) {
logger.log("parts-job-lookup-failed", "error", null, null, { error: err });
return null;
}
};
/**
* Extracts updated job data from the request payload.
* @param rq
* @returns {{comment: (number|((comment: Comment, helper: postcss.Helpers) => (Promise<void> | void))|string|null), clm_no: null, status: (*|null), policy_no: (*|null)}}
*/
const extractUpdatedJobData = (rq) => {
const doc = rq.DocumentInfo || {};
const claim = rq.ClaimInfo || {};
const policyNo = claim.PolicyInfo?.PolicyInfo?.PolicyNum || claim.PolicyInfo?.PolicyNum || null;
const out = {
comment: doc.Comment || null,
clm_no: claim.ClaimNum || null,
status: claim.ClaimStatus || null,
policy_no: policyNo
};
// If ProfileInfo provided in ChangeRq, update parts_tax_rates to stay in sync with AddRq behavior
if (rq.ProfileInfo) {
out.parts_tax_rates = extractPartsTaxRates(rq.ProfileInfo);
}
return out;
};
/**
* Extracts updated job lines from the request payload without splitting parts and labor:
* - Keep part and labor on the same jobline
* - Aggregate RefinishLabor into secondary labor fields and add its amount to lbr_amt
* - SUBLET-only lines become PAS part_type with act_price = SubletAmount
* Accepts currentJobLineNotes map for notes merging.
*/
const extractUpdatedJobLines = (addsChgs = {}, jobId, currentJobLineNotes = {}) => {
const linesIn = Array.isArray(addsChgs.DamageLineInfo) ? addsChgs.DamageLineInfo : [addsChgs.DamageLineInfo || {}];
const coerceManual = (val) =>
val === true || val === 1 || val === "1" || (typeof val === "string" && val.toUpperCase() === "Y");
const out = [];
for (const line of linesIn) {
if (!line || Object.keys(line).length === 0) continue;
const partInfo = line.PartInfo || {};
const laborInfo = line.LaborInfo || {};
const refinishInfo = line.RefinishLaborInfo || {};
const subletInfo = line.SubletInfo || {};
const base = {
jobid: jobId,
line_no: parseInt(line.LineNum || 0, 10),
unq_seq: parseInt(line.UniqueSequenceNum || 0, 10),
status: line.LineStatusCode || null,
line_desc: line.LineDesc || null,
// notes will be set below
manual_line: line.ManualLineInd !== undefined ? coerceManual(line.ManualLineInd) : null
};
const lineOut = { ...base };
// --- Notes merge logic ---
const unqSeq = lineOut.unq_seq;
const currentNotes = currentJobLineNotes?.[unqSeq] || null;
const newNotes = line.LineMemo || null;
if (newNotes && currentNotes) {
if (currentNotes === newNotes) {
lineOut.notes = currentNotes;
} else if (currentNotes.includes(newNotes)) {
lineOut.notes = currentNotes;
} else {
lineOut.notes = `${currentNotes} | ${newNotes}`;
}
} else if (newNotes) {
lineOut.notes = newNotes;
} else if (currentNotes) {
lineOut.notes = currentNotes;
} else {
lineOut.notes = null;
}
// --- End notes merge logic ---
const hasPart = Object.keys(partInfo).length > 0;
const hasSublet = Object.keys(subletInfo).length > 0;
if (hasPart) {
const price = parseFloat(partInfo.PartPrice || partInfo.ListPrice || 0);
lineOut.part_type = partInfo.PartType ? String(partInfo.PartType).toUpperCase() : null;
lineOut.part_qty = parseFloat(partInfo.Quantity || 0) || 1;
lineOut.oem_partno = partInfo.OEMPartNum || partInfo.PartNum || null;
lineOut.db_price = isNaN(price) ? 0 : price;
lineOut.act_price = isNaN(price) ? 0 : price;
// Optional: taxability flag for parts
if (
partInfo.TaxableInd !== undefined &&
(typeof partInfo.TaxableInd === "string" ||
typeof partInfo.TaxableInd === "number" ||
typeof partInfo.TaxableInd === "boolean")
) {
lineOut.tax_part =
partInfo.TaxableInd === true ||
partInfo.TaxableInd === 1 ||
partInfo.TaxableInd === "1" ||
(typeof partInfo.TaxableInd === "string" && partInfo.TaxableInd.toUpperCase() === "Y");
}
} else if (hasSublet) {
const amt = parseFloat(subletInfo.SubletAmount || 0);
lineOut.part_type = "PAS";
lineOut.part_qty = 1;
lineOut.act_price = isNaN(amt) ? 0 : amt;
}
// Primary labor on same line
const hrs = parseFloat(laborInfo.LaborHours || 0);
const amt = parseFloat(laborInfo.LaborAmt || 0);
const hasLabor =
(!!laborInfo.LaborType && String(laborInfo.LaborType).length > 0) ||
(!isNaN(hrs) && hrs !== 0) ||
(!isNaN(amt) && amt !== 0);
if (hasLabor) {
lineOut.mod_lbr_ty = laborInfo.LaborType || null;
lineOut.mod_lb_hrs = isNaN(hrs) ? 0 : hrs;
lineOut.lbr_op = laborInfo.LaborOperation || null;
lineOut.lbr_amt = isNaN(amt) ? 0 : amt;
}
// Refinish labor on same line using secondary fields; aggregate amount into lbr_amt
const rHrs = parseFloat(refinishInfo.LaborHours || 0);
const rAmt = parseFloat(refinishInfo.LaborAmt || 0);
const hasRefinish =
Object.keys(refinishInfo).length > 0 &&
((refinishInfo.LaborType && String(refinishInfo.LaborType).length > 0) ||
!isNaN(rHrs) ||
!isNaN(rAmt) ||
!!refinishInfo.LaborOperation);
if (hasRefinish) {
lineOut.lbr_typ_j = refinishInfo.LaborType || "LAR";
lineOut.lbr_hrs_j = isNaN(rHrs) ? 0 : rHrs;
lineOut.lbr_op_j = refinishInfo.LaborOperation || null;
if (!isNaN(rAmt)) {
lineOut.lbr_amt = (Number.isFinite(lineOut.lbr_amt) ? lineOut.lbr_amt : 0) + rAmt;
}
if (refinishInfo.PaintStagesNum !== undefined) lineOut.paint_stg = refinishInfo.PaintStagesNum;
if (refinishInfo.PaintTonesNum !== undefined) lineOut.paint_tone = refinishInfo.PaintTonesNum;
}
out.push(lineOut);
}
return out;
};
/**
* Extracts deletion IDs from the deletions object, also removing any derived labor/refinish lines
* by including offsets (base + 400000, base + 500000).
*/
const extractDeletions = (deletions = {}) => {
const items = Array.isArray(deletions.DamageLineInfo) ? deletions.DamageLineInfo : [deletions.DamageLineInfo || {}];
const baseSeqs = items.map((line) => parseInt(line.UniqueSequenceNum, 10)).filter((id) => Number.isInteger(id));
const allSeqs = [];
for (const u of baseSeqs) {
allSeqs.push(u, u + 400000, u + 500000);
}
// De-dup
return Array.from(new Set(allSeqs));
};
/**
* Handles VehicleDamageEstimateChgRq requests.
* @param req
* @param res
* @returns {Promise<*>}
*/
const partsManagementVehicleDamageEstimateChgRq = async (req, res) => {
const { logger } = req;
try {
const payload = await parseXml(req.body, logger);
const rq = normalizeXmlObject(payload.VehicleDamageEstimateChgRq);
if (!rq) return res.status(400).send("Missing <VehicleDamageEstimateChgRq>");
const shopId = rq.ShopID;
const jobId = rq.JobID;
if (!shopId || !jobId) return res.status(400).send("Missing ShopID or JobID");
const job = await findJob(shopId, jobId, logger);
if (!job) return res.status(404).send("Job not found");
// --- Get updated lines and their unq_seq ---
const linesIn = Array.isArray(rq.AddsChgs?.DamageLineInfo)
? rq.AddsChgs.DamageLineInfo
: [rq.AddsChgs?.DamageLineInfo || {}];
const updatedSeqs = Array.from(
new Set((linesIn || []).map((l) => parseInt(l?.UniqueSequenceNum || 0, 10)).filter((v) => Number.isInteger(v)))
);
let currentJobLineNotes = {};
if (updatedSeqs.length > 0) {
const resp = await client.request(GET_JOBLINES_NOTES_BY_JOBID_UNQSEQ, { jobid: job.id, unqSeqs: updatedSeqs });
if (resp?.joblines) {
for (const jl of resp.joblines) {
currentJobLineNotes[jl.unq_seq] = jl.notes;
}
}
}
// --- End fetch current notes ---
const updatedJobData = extractUpdatedJobData(rq);
const updatedLines = extractUpdatedJobLines(rq.AddsChgs, job.id, currentJobLineNotes);
const deletedLineIds = extractDeletions(rq.Deletions);
await client.request(UPDATE_JOB_BY_ID, { id: job.id, job: updatedJobData });
if (deletedLineIds?.length || updatedSeqs?.length) {
const allToDelete = Array.from(new Set([...(deletedLineIds || []), ...(updatedSeqs || [])]));
if (allToDelete.length) {
await client.request(SOFT_DELETE_JOBLINES_BY_IDS, { jobid: job.id, unqSeqs: allToDelete });
}
}
if (updatedLines.length > 0) {
// Insert fresh versions after deletion so we dont depend on a unique constraint
await client.request(INSERT_JOBLINES, {
joblines: updatedLines
});
}
logger.log("parts-job-changed", "info", job.id, null);
return res.status(200).json({ success: true, jobId: job.id });
} catch (err) {
logger.log("parts-chgrq-error", "error", null, null, { error: err });
return res.status(err.status || 500).json({ error: err.message || "Internal error" });
}
};
module.exports = partsManagementVehicleDamageEstimateChgRq;