feature/IO-3255-simplified-parts-management - Checkpoint

This commit is contained in:
Dave
2025-09-05 12:16:25 -04:00
parent 82195a0584
commit 771a239773
2 changed files with 191 additions and 92 deletions

View File

@@ -12,12 +12,14 @@ const {
GET_JOB_BY_ID,
UPDATE_JOB_BY_ID,
SOFT_DELETE_JOBLINES_BY_IDS,
INSERT_JOBLINES,
GET_JOBLINES_NOTES_BY_JOBID_UNQSEQ
GET_JOBLINES_NOTES_BY_JOBID_UNQSEQ,
GET_JOBLINE_IDS_BY_JOBID_UNQSEQ,
UPDATE_JOBLINE_BY_PK,
INSERT_JOBLINES
} = require("../partsManagement.queries");
/**
* Finds a job by shop ID and claim number.
* Finds a job by shop ID and job ID.
* @param shopId
* @param jobId
* @param logger
@@ -35,30 +37,25 @@ const findJob = async (shopId, jobId, logger) => {
/**
* Extracts updated job data from the request payload.
* Mirrors AddRq for parts_tax_rates + driveable when present.
* @param rq
* @returns {{comment: (number|((comment: Comment, helper: postcss.Helpers) => (Promise<void> | void))|string|null), clm_no: null, status: (*|null), policy_no: (*|null)}}
*/
const extractUpdatedJobData = (rq) => {
const doc = rq.DocumentInfo || {};
const claim = rq.ClaimInfo || {};
//TODO: In the full BMS world, much more can change, this will need to be expanded
// before it can be considered an generic BMS importer, currently it is bespoke to webest
const policyNo = claim.PolicyInfo?.PolicyInfo?.PolicyNum || claim.PolicyInfo?.PolicyNum || null;
const out = {
comment: doc.Comment || null,
clm_no: claim.ClaimNum || null,
// TODO: Commented out so they do not blow over with 'Auth Cust'
// status: claim.ClaimStatus || null,
// TODO (future): status omitted intentionally to avoid overwriting with 'Auth Cust'
policy_no: policyNo
};
// If ProfileInfo provided in ChangeRq, update parts_tax_rates to stay in sync with AddRq behavior
if (rq.ProfileInfo) {
out.parts_tax_rates = extractPartsTaxRates(rq.ProfileInfo);
}
// Mirror AddRq: update driveable if present
if (rq.VehicleInfo?.Condition?.DrivableInd !== undefined) {
out.driveable = !!rq.VehicleInfo.Condition.DrivableInd;
}
@@ -67,11 +64,10 @@ const extractUpdatedJobData = (rq) => {
};
/**
* Extracts updated job lines from the request payload without splitting parts and labor:
* - Keep part and labor on the same jobline
* - Aggregate RefinishLabor into secondary labor fields and add its amount to lbr_amt
* - SUBLET-only lines become PAS part_type with act_price = SubletAmount
* Accepts currentJobLineNotes map for notes merging.
* Build jobline payloads for updates/inserts (no split between parts & labor).
* - Refinish labor aggregated into lbr_* secondary fields and lbr_amt.
* - SUBLET-only -> PAS line with act_price = SubletAmount.
* - Notes merged with current DB value by unq_seq.
*/
const extractUpdatedJobLines = (addsChgs = {}, jobId, currentJobLineNotes = {}) => {
const linesIn = Array.isArray(addsChgs.DamageLineInfo) ? addsChgs.DamageLineInfo : [addsChgs.DamageLineInfo || {}];
@@ -95,59 +91,38 @@ const extractUpdatedJobLines = (addsChgs = {}, jobId, currentJobLineNotes = {})
unq_seq: parseInt(line.UniqueSequenceNum || 0, 10),
status: line.LineStatusCode || null,
line_desc: line.LineDesc || null,
// notes will be set below
manual_line: line.ManualLineInd !== undefined ? coerceManual(line.ManualLineInd) : null
};
const lineOut = { ...base };
// --- Notes merge logic ---
// --- Notes merge ---
const unqSeq = lineOut.unq_seq;
const currentNotes = currentJobLineNotes?.[unqSeq] || null;
const newNotes = line.LineMemo || null;
if (newNotes && currentNotes) {
if (currentNotes === newNotes) {
lineOut.notes = currentNotes;
} else if (currentNotes.includes(newNotes)) {
lineOut.notes = currentNotes;
} else {
lineOut.notes = `${currentNotes} | ${newNotes}`;
}
} else if (newNotes) {
lineOut.notes = newNotes;
} else if (currentNotes) {
lineOut.notes = currentNotes;
} else {
lineOut.notes = null;
}
// --- End notes merge logic ---
if (currentNotes === newNotes || currentNotes.includes(newNotes)) lineOut.notes = currentNotes;
else lineOut.notes = `${currentNotes} | ${newNotes}`;
} else if (newNotes) lineOut.notes = newNotes;
else if (currentNotes) lineOut.notes = currentNotes;
else lineOut.notes = null;
// --- end notes merge ---
const hasPart = Object.keys(partInfo).length > 0;
const hasSublet = Object.keys(subletInfo).length > 0;
if (hasPart) {
// Mirror AddRq behavior: use OEMPartNum fields and parse prices directly
lineOut.part_qty = parseFloat(partInfo.Quantity || 0) || 1;
lineOut.oem_partno = partInfo.OEMPartNum;
lineOut.alt_partno = partInfo?.NonOEM?.NonOEMPartNum;
lineOut.part_type = partInfo.PartType || null ? String(partInfo.PartType).toUpperCase() : null;
// Pricing: act_price from PartPrice, db_price from OEMPartPrice
lineOut.act_price = parseFloat(partInfo?.PartPrice || 0);
lineOut.db_price = parseFloat(partInfo?.OEMPartPrice || 0);
// Optional: taxability flag for parts
if (
partInfo.TaxableInd !== undefined &&
(typeof partInfo.TaxableInd === "string" ||
typeof partInfo.TaxableInd === "number" ||
typeof partInfo.TaxableInd === "boolean")
) {
lineOut.tax_part =
partInfo.TaxableInd === true ||
partInfo.TaxableInd === 1 ||
partInfo.TaxableInd === "1" ||
(typeof partInfo.TaxableInd === "string" && partInfo.TaxableInd.toUpperCase() === "Y");
if (partInfo.TaxableInd !== undefined) {
const t = partInfo.TaxableInd;
lineOut.tax_part = t === true || t === 1 || t === "1" || (typeof t === "string" && t.toUpperCase() === "Y");
}
} else if (hasSublet) {
const amt = parseFloat(subletInfo.SubletAmount || 0);
@@ -156,7 +131,7 @@ const extractUpdatedJobLines = (addsChgs = {}, jobId, currentJobLineNotes = {})
lineOut.act_price = isNaN(amt) ? 0 : amt;
}
// Primary labor on same line
// Primary labor
const hrs = parseFloat(laborInfo.LaborHours || 0);
const amt = parseFloat(laborInfo.LaborAmt || 0);
const hasLabor =
@@ -167,7 +142,6 @@ const extractUpdatedJobLines = (addsChgs = {}, jobId, currentJobLineNotes = {})
lineOut.mod_lbr_ty = laborInfo.LaborType || null;
lineOut.mod_lb_hrs = isNaN(hrs) ? 0 : hrs;
// Map operation code description from opCodes.json (case-insensitive)
const opCodeKey =
typeof laborInfo.LaborOperation === "string" ? laborInfo.LaborOperation.trim().toUpperCase() : null;
lineOut.op_code_desc = opCodeKey && opCodes?.[opCodeKey]?.desc ? opCodes[opCodeKey].desc : null;
@@ -175,7 +149,7 @@ const extractUpdatedJobLines = (addsChgs = {}, jobId, currentJobLineNotes = {})
lineOut.lbr_amt = isNaN(amt) ? 0 : amt;
}
// Refinish labor on same line using secondary fields; aggregate amount into lbr_amt
// Refinish (secondary fields, add amount)
const rHrs = parseFloat(refinishInfo.LaborHours || 0);
const rAmt = parseFloat(refinishInfo.LaborAmt || 0);
const hasRefinish =
@@ -188,9 +162,7 @@ const extractUpdatedJobLines = (addsChgs = {}, jobId, currentJobLineNotes = {})
lineOut.lbr_typ_j = refinishInfo.LaborType || "LAR";
lineOut.lbr_hrs_j = isNaN(rHrs) ? 0 : rHrs;
lineOut.lbr_op_j = refinishInfo.LaborOperation || null;
if (!isNaN(rAmt)) {
lineOut.lbr_amt = (Number.isFinite(lineOut.lbr_amt) ? lineOut.lbr_amt : 0) + rAmt;
}
if (!isNaN(rAmt)) lineOut.lbr_amt = (Number.isFinite(lineOut.lbr_amt) ? lineOut.lbr_amt : 0) + rAmt;
if (refinishInfo.PaintStagesNum !== undefined) lineOut.paint_stg = refinishInfo.PaintStagesNum;
if (refinishInfo.PaintTonesNum !== undefined) lineOut.paint_tone = refinishInfo.PaintTonesNum;
}
@@ -202,18 +174,14 @@ const extractUpdatedJobLines = (addsChgs = {}, jobId, currentJobLineNotes = {})
};
/**
* Extracts deletion IDs from the deletions object, also removing any derived labor/refinish lines
* by including offsets (base + 400000, base + 500000).
* Expand deletion IDs to include derived labor/refinish offsets.
*/
const extractDeletions = (deletions = {}) => {
const items = Array.isArray(deletions.DamageLineInfo) ? deletions.DamageLineInfo : [deletions.DamageLineInfo || {}];
const baseSeqs = items.map((line) => parseInt(line.UniqueSequenceNum, 10)).filter((id) => Number.isInteger(id));
const allSeqs = [];
for (const u of baseSeqs) {
allSeqs.push(u, u + 400000, u + 500000);
}
// De-dup
for (const u of baseSeqs) allSeqs.push(u, u + 400000, u + 500000);
return Array.from(new Set(allSeqs));
};
@@ -234,10 +202,67 @@ const buildEstimateXmlKey = (rq) => {
};
/**
* Handles VehicleDamageEstimateChgRq requests.
* @param req
* @param res
* @returns {Promise<*>}
* Convert a full jobline object into a jobs_set_input for update_by_pk (omit immutable fields).
*/
const toJoblineSetInput = (jl) => {
const {
// immutable identity fields:
// jobid,
// unq_seq,
// everything else:
line_no,
status,
line_desc,
manual_line,
notes,
part_qty,
oem_partno,
alt_partno,
part_type,
act_price,
db_price,
tax_part,
mod_lbr_ty,
mod_lb_hrs,
op_code_desc,
lbr_amt,
lbr_typ_j,
lbr_hrs_j,
lbr_op_j,
paint_stg,
paint_tone
} = jl;
return {
line_no,
status,
line_desc,
manual_line,
notes,
part_qty,
oem_partno,
alt_partno,
part_type,
act_price,
db_price,
tax_part,
mod_lbr_ty,
mod_lb_hrs,
op_code_desc,
lbr_amt,
lbr_typ_j,
lbr_hrs_j,
lbr_op_j,
paint_stg,
paint_tone
};
};
/**
* Handles VehicleDamageEstimateChgRq requests:
* - Update core job fields
* - For lines: update by PK if existing; otherwise bulk insert
* - Soft-delete only explicit deletions (exclude any updated seqs)
*/
const partsManagementVehicleDamageEstimateChgRq = async (req, res) => {
const { logger } = req;
@@ -245,15 +270,20 @@ const partsManagementVehicleDamageEstimateChgRq = async (req, res) => {
try {
const payload = await parseXml(req.body, logger);
const rq = normalizeXmlObject(payload.VehicleDamageEstimateChgRq);
const jobId = rq.JobID;
const shopId = rq.ShopID;
// Fire-and-forget archival on valid request
(async () => {
try {
const key = buildEstimateXmlKey(rq);
await uploadFileToS3({ bucketName: ESTIMATE_XML_BUCKET, key, content: rawXml || "", contentType: "application/xml" });
await uploadFileToS3({
bucketName: ESTIMATE_XML_BUCKET,
key,
content: rawXml || "",
contentType: "application/xml"
});
logger.log("parts-estimate-xml-uploaded", "info", jobId, null, { key, bytes: rawXml?.length || 0 });
} catch (e) {
logger.log("parts-estimate-xml-upload-failed", "warn", jobId, null, { error: e?.message });
@@ -263,50 +293,67 @@ const partsManagementVehicleDamageEstimateChgRq = async (req, res) => {
const job = await findJob(shopId, jobId, logger);
if (!job) return res.status(404).send("Job not found");
// --- Get updated lines and their unq_seq ---
// --- Updated seqs from incoming changes ---
const linesIn = Array.isArray(rq.AddsChgs?.DamageLineInfo)
? rq.AddsChgs.DamageLineInfo
: [rq.AddsChgs?.DamageLineInfo || {}];
const updatedSeqs = Array.from(
new Set((linesIn || []).map((l) => parseInt(l?.UniqueSequenceNum || 0, 10)).filter((v) => Number.isInteger(v)))
);
// --- Fetch current notes for merge ---
let currentJobLineNotes = {};
if (updatedSeqs.length > 0) {
const resp = await client.request(GET_JOBLINES_NOTES_BY_JOBID_UNQSEQ, { jobid: job.id, unqSeqs: updatedSeqs });
if (resp?.joblines) {
for (const jl of resp.joblines) {
currentJobLineNotes[jl.unq_seq] = jl.notes;
}
for (const jl of resp.joblines) currentJobLineNotes[jl.unq_seq] = jl.notes;
}
}
// --- End fetch current notes ---
const updatedJobData = extractUpdatedJobData(rq);
const updatedLines = extractUpdatedJobLines(rq.AddsChgs, job.id, currentJobLineNotes);
const deletedLineIds = extractDeletions(rq.Deletions);
await client.request(UPDATE_JOB_BY_ID, { id: job.id, job: updatedJobData });
//TODO: for changed lines, are they deleted and then reinserted?
//TODO: Updated lines should get an upsert to update things like desc, price, etc.
// Updated Seqs should not be soft deleted
// logic in available jobs container
// Encapsulate so it is not multiple queries
if (deletedLineIds?.length || updatedSeqs?.length) {
const allToDelete = Array.from(new Set([...(deletedLineIds || []), ...(updatedSeqs || [])]));
if (allToDelete.length) {
await client.request(SOFT_DELETE_JOBLINES_BY_IDS, { jobid: job.id, unqSeqs: allToDelete });
//TODO: appears to soft delete updated lines as well.
// --- Look up existing rows (by natural key) to decide update vs insert ---
let existingIdByUnqSeq = {};
if (updatedSeqs.length > 0) {
const existing = await client.request(GET_JOBLINE_IDS_BY_JOBID_UNQSEQ, { jobid: job.id, unqSeqs: updatedSeqs });
if (existing?.joblines) {
for (const row of existing.joblines) existingIdByUnqSeq[row.unq_seq] = row.id;
}
}
if (updatedLines.length > 0) {
// Insert fresh versions after deletion so we dont depend on a unique constraint
await client.request(INSERT_JOBLINES, {
joblines: updatedLines
});
const toUpdate = [];
const toInsert = [];
for (const jl of updatedLines) {
const id = existingIdByUnqSeq[jl.unq_seq];
if (id) toUpdate.push({ id, _set: toJoblineSetInput(jl) });
else toInsert.push(jl);
}
// Build deletions list and exclude any seqs we are updating (avoid accidental removal)
const deletedLineIdsAll = extractDeletions(rq.Deletions);
const deletionSeqs = deletedLineIdsAll.filter((u) => !updatedSeqs.includes(u));
// Mutations:
const updateJobPromise = client.request(UPDATE_JOB_BY_ID, { id: job.id, job: updatedJobData });
const softDeletePromise = deletionSeqs.length
? client.request(SOFT_DELETE_JOBLINES_BY_IDS, { jobid: job.id, unqSeqs: deletionSeqs })
: Promise.resolve({});
// Update each existing row by primary key (parallelized)
const perRowUpdatesPromise =
toUpdate.length > 0
? Promise.all(toUpdate.map(({ id, _set }) => client.request(UPDATE_JOBLINE_BY_PK, { id, jl: _set })))
: Promise.resolve([]);
// Insert brand-new rows in bulk
const insertPromise =
toInsert.length > 0 ? client.request(INSERT_JOBLINES, { joblines: toInsert }) : Promise.resolve({});
await Promise.all([updateJobPromise, softDeletePromise, perRowUpdatesPromise, insertPromise]);
logger.log("parts-job-changed", "info", job.id, null);
return res.status(200).json({ success: true, jobId: job.id });
} catch (err) {
@@ -316,5 +363,3 @@ const partsManagementVehicleDamageEstimateChgRq = async (req, res) => {
};
module.exports = partsManagementVehicleDamageEstimateChgRq;
// Remove any duplicate S3 constants that might have been appended previously (none expected now)

View File

@@ -246,6 +246,58 @@ const DELETE_PARTS_ORDERS_BY_JOB_IDS = `
}
`;
const UPSERT_JOBLINES = `
mutation UpsertJoblines($joblines: [joblines_insert_input!]!) {
insert_joblines(
objects: $joblines,
on_conflict: {
constraint: joblines_jobid_unq_seq_key,
update_columns: [
status,
line_desc,
notes,
manual_line,
part_qty,
oem_partno,
alt_partno,
part_type,
act_price,
db_price,
tax_part,
mod_lbr_ty,
mod_lb_hrs,
op_code_desc,
lbr_amt,
lbr_typ_j,
lbr_hrs_j,
lbr_op_j,
paint_stg,
paint_tone
]
}
) {
affected_rows
}
}
`;
// Get jobline IDs for the incoming unq_seq values (only non-removed)
const GET_JOBLINE_IDS_BY_JOBID_UNQSEQ = `
query GetJoblineIdsByJobIdUnqSeq($jobid: uuid!, $unqSeqs: [Int!]!) {
joblines(where: { jobid: { _eq: $jobid }, unq_seq: { _in: $unqSeqs }, removed: { _neq: true } }) {
id
unq_seq
}
}
`;
// Update a single jobline by primary key
const UPDATE_JOBLINE_BY_PK = `
mutation UpdateJoblineByPk($id: uuid!, $jl: joblines_set_input!) {
update_joblines_by_pk(pk_columns: { id: $id }, _set: $jl) { id }
}
`;
module.exports = {
GET_BODYSHOP_STATUS,
GET_VEHICLE_BY_SHOP_VIN,
@@ -272,8 +324,10 @@ module.exports = {
DELETE_AUDIT_TRAIL_BY_SHOP,
GET_JOBLINES_NOTES_BY_JOBID_UNQSEQ,
GET_JOB_BY_ID,
// newly added exports
CLEAR_TASKS_PARTSORDER_LINKS_BY_JOBIDS,
DELETE_PARTS_ORDER_LINES_BY_JOB_IDS,
DELETE_PARTS_ORDERS_BY_JOB_IDS
DELETE_PARTS_ORDERS_BY_JOB_IDS,
UPSERT_JOBLINES,
GET_JOBLINE_IDS_BY_JOBID_UNQSEQ,
UPDATE_JOBLINE_BY_PK
};