feature/IO-3255-simplified-parts-management - Checkpoint

This commit is contained in:
Dave
2025-09-05 12:16:25 -04:00
parent 82195a0584
commit 771a239773
2 changed files with 191 additions and 92 deletions

View File

@@ -12,12 +12,14 @@ const {
GET_JOB_BY_ID, GET_JOB_BY_ID,
UPDATE_JOB_BY_ID, UPDATE_JOB_BY_ID,
SOFT_DELETE_JOBLINES_BY_IDS, SOFT_DELETE_JOBLINES_BY_IDS,
INSERT_JOBLINES, GET_JOBLINES_NOTES_BY_JOBID_UNQSEQ,
GET_JOBLINES_NOTES_BY_JOBID_UNQSEQ GET_JOBLINE_IDS_BY_JOBID_UNQSEQ,
UPDATE_JOBLINE_BY_PK,
INSERT_JOBLINES
} = require("../partsManagement.queries"); } = require("../partsManagement.queries");
/** /**
* Finds a job by shop ID and claim number. * Finds a job by shop ID and job ID.
* @param shopId * @param shopId
* @param jobId * @param jobId
* @param logger * @param logger
@@ -35,30 +37,25 @@ const findJob = async (shopId, jobId, logger) => {
/** /**
* Extracts updated job data from the request payload. * Extracts updated job data from the request payload.
* Mirrors AddRq for parts_tax_rates + driveable when present.
* @param rq * @param rq
* @returns {{comment: (number|((comment: Comment, helper: postcss.Helpers) => (Promise<void> | void))|string|null), clm_no: null, status: (*|null), policy_no: (*|null)}}
*/ */
const extractUpdatedJobData = (rq) => { const extractUpdatedJobData = (rq) => {
const doc = rq.DocumentInfo || {}; const doc = rq.DocumentInfo || {};
const claim = rq.ClaimInfo || {}; const claim = rq.ClaimInfo || {};
//TODO: In the full BMS world, much more can change, this will need to be expanded
// before it can be considered an generic BMS importer, currently it is bespoke to webest
const policyNo = claim.PolicyInfo?.PolicyInfo?.PolicyNum || claim.PolicyInfo?.PolicyNum || null; const policyNo = claim.PolicyInfo?.PolicyInfo?.PolicyNum || claim.PolicyInfo?.PolicyNum || null;
const out = { const out = {
comment: doc.Comment || null, comment: doc.Comment || null,
clm_no: claim.ClaimNum || null, clm_no: claim.ClaimNum || null,
// TODO: Commented out so they do not blow over with 'Auth Cust' // TODO (future): status omitted intentionally to avoid overwriting with 'Auth Cust'
// status: claim.ClaimStatus || null,
policy_no: policyNo policy_no: policyNo
}; };
// If ProfileInfo provided in ChangeRq, update parts_tax_rates to stay in sync with AddRq behavior
if (rq.ProfileInfo) { if (rq.ProfileInfo) {
out.parts_tax_rates = extractPartsTaxRates(rq.ProfileInfo); out.parts_tax_rates = extractPartsTaxRates(rq.ProfileInfo);
} }
// Mirror AddRq: update driveable if present
if (rq.VehicleInfo?.Condition?.DrivableInd !== undefined) { if (rq.VehicleInfo?.Condition?.DrivableInd !== undefined) {
out.driveable = !!rq.VehicleInfo.Condition.DrivableInd; out.driveable = !!rq.VehicleInfo.Condition.DrivableInd;
} }
@@ -67,11 +64,10 @@ const extractUpdatedJobData = (rq) => {
}; };
/** /**
* Extracts updated job lines from the request payload without splitting parts and labor: * Build jobline payloads for updates/inserts (no split between parts & labor).
* - Keep part and labor on the same jobline * - Refinish labor aggregated into lbr_* secondary fields and lbr_amt.
* - Aggregate RefinishLabor into secondary labor fields and add its amount to lbr_amt * - SUBLET-only -> PAS line with act_price = SubletAmount.
* - SUBLET-only lines become PAS part_type with act_price = SubletAmount * - Notes merged with current DB value by unq_seq.
* Accepts currentJobLineNotes map for notes merging.
*/ */
const extractUpdatedJobLines = (addsChgs = {}, jobId, currentJobLineNotes = {}) => { const extractUpdatedJobLines = (addsChgs = {}, jobId, currentJobLineNotes = {}) => {
const linesIn = Array.isArray(addsChgs.DamageLineInfo) ? addsChgs.DamageLineInfo : [addsChgs.DamageLineInfo || {}]; const linesIn = Array.isArray(addsChgs.DamageLineInfo) ? addsChgs.DamageLineInfo : [addsChgs.DamageLineInfo || {}];
@@ -95,59 +91,38 @@ const extractUpdatedJobLines = (addsChgs = {}, jobId, currentJobLineNotes = {})
unq_seq: parseInt(line.UniqueSequenceNum || 0, 10), unq_seq: parseInt(line.UniqueSequenceNum || 0, 10),
status: line.LineStatusCode || null, status: line.LineStatusCode || null,
line_desc: line.LineDesc || null, line_desc: line.LineDesc || null,
// notes will be set below
manual_line: line.ManualLineInd !== undefined ? coerceManual(line.ManualLineInd) : null manual_line: line.ManualLineInd !== undefined ? coerceManual(line.ManualLineInd) : null
}; };
const lineOut = { ...base }; const lineOut = { ...base };
// --- Notes merge logic --- // --- Notes merge ---
const unqSeq = lineOut.unq_seq; const unqSeq = lineOut.unq_seq;
const currentNotes = currentJobLineNotes?.[unqSeq] || null; const currentNotes = currentJobLineNotes?.[unqSeq] || null;
const newNotes = line.LineMemo || null; const newNotes = line.LineMemo || null;
if (newNotes && currentNotes) { if (newNotes && currentNotes) {
if (currentNotes === newNotes) { if (currentNotes === newNotes || currentNotes.includes(newNotes)) lineOut.notes = currentNotes;
lineOut.notes = currentNotes; else lineOut.notes = `${currentNotes} | ${newNotes}`;
} else if (currentNotes.includes(newNotes)) { } else if (newNotes) lineOut.notes = newNotes;
lineOut.notes = currentNotes; else if (currentNotes) lineOut.notes = currentNotes;
} else { else lineOut.notes = null;
lineOut.notes = `${currentNotes} | ${newNotes}`; // --- end notes merge ---
}
} else if (newNotes) {
lineOut.notes = newNotes;
} else if (currentNotes) {
lineOut.notes = currentNotes;
} else {
lineOut.notes = null;
}
// --- End notes merge logic ---
const hasPart = Object.keys(partInfo).length > 0; const hasPart = Object.keys(partInfo).length > 0;
const hasSublet = Object.keys(subletInfo).length > 0; const hasSublet = Object.keys(subletInfo).length > 0;
if (hasPart) { if (hasPart) {
// Mirror AddRq behavior: use OEMPartNum fields and parse prices directly
lineOut.part_qty = parseFloat(partInfo.Quantity || 0) || 1; lineOut.part_qty = parseFloat(partInfo.Quantity || 0) || 1;
lineOut.oem_partno = partInfo.OEMPartNum; lineOut.oem_partno = partInfo.OEMPartNum;
lineOut.alt_partno = partInfo?.NonOEM?.NonOEMPartNum; lineOut.alt_partno = partInfo?.NonOEM?.NonOEMPartNum;
lineOut.part_type = partInfo.PartType || null ? String(partInfo.PartType).toUpperCase() : null; lineOut.part_type = partInfo.PartType || null ? String(partInfo.PartType).toUpperCase() : null;
// Pricing: act_price from PartPrice, db_price from OEMPartPrice
lineOut.act_price = parseFloat(partInfo?.PartPrice || 0); lineOut.act_price = parseFloat(partInfo?.PartPrice || 0);
lineOut.db_price = parseFloat(partInfo?.OEMPartPrice || 0); lineOut.db_price = parseFloat(partInfo?.OEMPartPrice || 0);
// Optional: taxability flag for parts if (partInfo.TaxableInd !== undefined) {
if ( const t = partInfo.TaxableInd;
partInfo.TaxableInd !== undefined && lineOut.tax_part = t === true || t === 1 || t === "1" || (typeof t === "string" && t.toUpperCase() === "Y");
(typeof partInfo.TaxableInd === "string" ||
typeof partInfo.TaxableInd === "number" ||
typeof partInfo.TaxableInd === "boolean")
) {
lineOut.tax_part =
partInfo.TaxableInd === true ||
partInfo.TaxableInd === 1 ||
partInfo.TaxableInd === "1" ||
(typeof partInfo.TaxableInd === "string" && partInfo.TaxableInd.toUpperCase() === "Y");
} }
} else if (hasSublet) { } else if (hasSublet) {
const amt = parseFloat(subletInfo.SubletAmount || 0); const amt = parseFloat(subletInfo.SubletAmount || 0);
@@ -156,7 +131,7 @@ const extractUpdatedJobLines = (addsChgs = {}, jobId, currentJobLineNotes = {})
lineOut.act_price = isNaN(amt) ? 0 : amt; lineOut.act_price = isNaN(amt) ? 0 : amt;
} }
// Primary labor on same line // Primary labor
const hrs = parseFloat(laborInfo.LaborHours || 0); const hrs = parseFloat(laborInfo.LaborHours || 0);
const amt = parseFloat(laborInfo.LaborAmt || 0); const amt = parseFloat(laborInfo.LaborAmt || 0);
const hasLabor = const hasLabor =
@@ -167,7 +142,6 @@ const extractUpdatedJobLines = (addsChgs = {}, jobId, currentJobLineNotes = {})
lineOut.mod_lbr_ty = laborInfo.LaborType || null; lineOut.mod_lbr_ty = laborInfo.LaborType || null;
lineOut.mod_lb_hrs = isNaN(hrs) ? 0 : hrs; lineOut.mod_lb_hrs = isNaN(hrs) ? 0 : hrs;
// Map operation code description from opCodes.json (case-insensitive)
const opCodeKey = const opCodeKey =
typeof laborInfo.LaborOperation === "string" ? laborInfo.LaborOperation.trim().toUpperCase() : null; typeof laborInfo.LaborOperation === "string" ? laborInfo.LaborOperation.trim().toUpperCase() : null;
lineOut.op_code_desc = opCodeKey && opCodes?.[opCodeKey]?.desc ? opCodes[opCodeKey].desc : null; lineOut.op_code_desc = opCodeKey && opCodes?.[opCodeKey]?.desc ? opCodes[opCodeKey].desc : null;
@@ -175,7 +149,7 @@ const extractUpdatedJobLines = (addsChgs = {}, jobId, currentJobLineNotes = {})
lineOut.lbr_amt = isNaN(amt) ? 0 : amt; lineOut.lbr_amt = isNaN(amt) ? 0 : amt;
} }
// Refinish labor on same line using secondary fields; aggregate amount into lbr_amt // Refinish (secondary fields, add amount)
const rHrs = parseFloat(refinishInfo.LaborHours || 0); const rHrs = parseFloat(refinishInfo.LaborHours || 0);
const rAmt = parseFloat(refinishInfo.LaborAmt || 0); const rAmt = parseFloat(refinishInfo.LaborAmt || 0);
const hasRefinish = const hasRefinish =
@@ -188,9 +162,7 @@ const extractUpdatedJobLines = (addsChgs = {}, jobId, currentJobLineNotes = {})
lineOut.lbr_typ_j = refinishInfo.LaborType || "LAR"; lineOut.lbr_typ_j = refinishInfo.LaborType || "LAR";
lineOut.lbr_hrs_j = isNaN(rHrs) ? 0 : rHrs; lineOut.lbr_hrs_j = isNaN(rHrs) ? 0 : rHrs;
lineOut.lbr_op_j = refinishInfo.LaborOperation || null; lineOut.lbr_op_j = refinishInfo.LaborOperation || null;
if (!isNaN(rAmt)) { if (!isNaN(rAmt)) lineOut.lbr_amt = (Number.isFinite(lineOut.lbr_amt) ? lineOut.lbr_amt : 0) + rAmt;
lineOut.lbr_amt = (Number.isFinite(lineOut.lbr_amt) ? lineOut.lbr_amt : 0) + rAmt;
}
if (refinishInfo.PaintStagesNum !== undefined) lineOut.paint_stg = refinishInfo.PaintStagesNum; if (refinishInfo.PaintStagesNum !== undefined) lineOut.paint_stg = refinishInfo.PaintStagesNum;
if (refinishInfo.PaintTonesNum !== undefined) lineOut.paint_tone = refinishInfo.PaintTonesNum; if (refinishInfo.PaintTonesNum !== undefined) lineOut.paint_tone = refinishInfo.PaintTonesNum;
} }
@@ -202,18 +174,14 @@ const extractUpdatedJobLines = (addsChgs = {}, jobId, currentJobLineNotes = {})
}; };
/** /**
* Extracts deletion IDs from the deletions object, also removing any derived labor/refinish lines * Expand deletion IDs to include derived labor/refinish offsets.
* by including offsets (base + 400000, base + 500000).
*/ */
const extractDeletions = (deletions = {}) => { const extractDeletions = (deletions = {}) => {
const items = Array.isArray(deletions.DamageLineInfo) ? deletions.DamageLineInfo : [deletions.DamageLineInfo || {}]; const items = Array.isArray(deletions.DamageLineInfo) ? deletions.DamageLineInfo : [deletions.DamageLineInfo || {}];
const baseSeqs = items.map((line) => parseInt(line.UniqueSequenceNum, 10)).filter((id) => Number.isInteger(id)); const baseSeqs = items.map((line) => parseInt(line.UniqueSequenceNum, 10)).filter((id) => Number.isInteger(id));
const allSeqs = []; const allSeqs = [];
for (const u of baseSeqs) { for (const u of baseSeqs) allSeqs.push(u, u + 400000, u + 500000);
allSeqs.push(u, u + 400000, u + 500000);
}
// De-dup
return Array.from(new Set(allSeqs)); return Array.from(new Set(allSeqs));
}; };
@@ -234,10 +202,67 @@ const buildEstimateXmlKey = (rq) => {
}; };
/** /**
* Handles VehicleDamageEstimateChgRq requests. * Convert a full jobline object into a jobs_set_input for update_by_pk (omit immutable fields).
* @param req */
* @param res const toJoblineSetInput = (jl) => {
* @returns {Promise<*>} const {
// immutable identity fields:
// jobid,
// unq_seq,
// everything else:
line_no,
status,
line_desc,
manual_line,
notes,
part_qty,
oem_partno,
alt_partno,
part_type,
act_price,
db_price,
tax_part,
mod_lbr_ty,
mod_lb_hrs,
op_code_desc,
lbr_amt,
lbr_typ_j,
lbr_hrs_j,
lbr_op_j,
paint_stg,
paint_tone
} = jl;
return {
line_no,
status,
line_desc,
manual_line,
notes,
part_qty,
oem_partno,
alt_partno,
part_type,
act_price,
db_price,
tax_part,
mod_lbr_ty,
mod_lb_hrs,
op_code_desc,
lbr_amt,
lbr_typ_j,
lbr_hrs_j,
lbr_op_j,
paint_stg,
paint_tone
};
};
/**
* Handles VehicleDamageEstimateChgRq requests:
* - Update core job fields
* - For lines: update by PK if existing; otherwise bulk insert
* - Soft-delete only explicit deletions (exclude any updated seqs)
*/ */
const partsManagementVehicleDamageEstimateChgRq = async (req, res) => { const partsManagementVehicleDamageEstimateChgRq = async (req, res) => {
const { logger } = req; const { logger } = req;
@@ -245,15 +270,20 @@ const partsManagementVehicleDamageEstimateChgRq = async (req, res) => {
try { try {
const payload = await parseXml(req.body, logger); const payload = await parseXml(req.body, logger);
const rq = normalizeXmlObject(payload.VehicleDamageEstimateChgRq); const rq = normalizeXmlObject(payload.VehicleDamageEstimateChgRq);
const jobId = rq.JobID; const jobId = rq.JobID;
const shopId = rq.ShopID; const shopId = rq.ShopID;
// Fire-and-forget archival on valid request // Fire-and-forget archival on valid request
(async () => { (async () => {
try { try {
const key = buildEstimateXmlKey(rq); const key = buildEstimateXmlKey(rq);
await uploadFileToS3({ bucketName: ESTIMATE_XML_BUCKET, key, content: rawXml || "", contentType: "application/xml" }); await uploadFileToS3({
bucketName: ESTIMATE_XML_BUCKET,
key,
content: rawXml || "",
contentType: "application/xml"
});
logger.log("parts-estimate-xml-uploaded", "info", jobId, null, { key, bytes: rawXml?.length || 0 }); logger.log("parts-estimate-xml-uploaded", "info", jobId, null, { key, bytes: rawXml?.length || 0 });
} catch (e) { } catch (e) {
logger.log("parts-estimate-xml-upload-failed", "warn", jobId, null, { error: e?.message }); logger.log("parts-estimate-xml-upload-failed", "warn", jobId, null, { error: e?.message });
@@ -263,50 +293,67 @@ const partsManagementVehicleDamageEstimateChgRq = async (req, res) => {
const job = await findJob(shopId, jobId, logger); const job = await findJob(shopId, jobId, logger);
if (!job) return res.status(404).send("Job not found"); if (!job) return res.status(404).send("Job not found");
// --- Get updated lines and their unq_seq --- // --- Updated seqs from incoming changes ---
const linesIn = Array.isArray(rq.AddsChgs?.DamageLineInfo) const linesIn = Array.isArray(rq.AddsChgs?.DamageLineInfo)
? rq.AddsChgs.DamageLineInfo ? rq.AddsChgs.DamageLineInfo
: [rq.AddsChgs?.DamageLineInfo || {}]; : [rq.AddsChgs?.DamageLineInfo || {}];
const updatedSeqs = Array.from( const updatedSeqs = Array.from(
new Set((linesIn || []).map((l) => parseInt(l?.UniqueSequenceNum || 0, 10)).filter((v) => Number.isInteger(v))) new Set((linesIn || []).map((l) => parseInt(l?.UniqueSequenceNum || 0, 10)).filter((v) => Number.isInteger(v)))
); );
// --- Fetch current notes for merge ---
let currentJobLineNotes = {}; let currentJobLineNotes = {};
if (updatedSeqs.length > 0) { if (updatedSeqs.length > 0) {
const resp = await client.request(GET_JOBLINES_NOTES_BY_JOBID_UNQSEQ, { jobid: job.id, unqSeqs: updatedSeqs }); const resp = await client.request(GET_JOBLINES_NOTES_BY_JOBID_UNQSEQ, { jobid: job.id, unqSeqs: updatedSeqs });
if (resp?.joblines) { if (resp?.joblines) {
for (const jl of resp.joblines) { for (const jl of resp.joblines) currentJobLineNotes[jl.unq_seq] = jl.notes;
currentJobLineNotes[jl.unq_seq] = jl.notes;
}
} }
} }
// --- End fetch current notes ---
const updatedJobData = extractUpdatedJobData(rq); const updatedJobData = extractUpdatedJobData(rq);
const updatedLines = extractUpdatedJobLines(rq.AddsChgs, job.id, currentJobLineNotes); const updatedLines = extractUpdatedJobLines(rq.AddsChgs, job.id, currentJobLineNotes);
const deletedLineIds = extractDeletions(rq.Deletions);
await client.request(UPDATE_JOB_BY_ID, { id: job.id, job: updatedJobData }); // --- Look up existing rows (by natural key) to decide update vs insert ---
let existingIdByUnqSeq = {};
//TODO: for changed lines, are they deleted and then reinserted? if (updatedSeqs.length > 0) {
//TODO: Updated lines should get an upsert to update things like desc, price, etc. const existing = await client.request(GET_JOBLINE_IDS_BY_JOBID_UNQSEQ, { jobid: job.id, unqSeqs: updatedSeqs });
// Updated Seqs should not be soft deleted if (existing?.joblines) {
// logic in available jobs container for (const row of existing.joblines) existingIdByUnqSeq[row.unq_seq] = row.id;
// Encapsulate so it is not multiple queries
if (deletedLineIds?.length || updatedSeqs?.length) {
const allToDelete = Array.from(new Set([...(deletedLineIds || []), ...(updatedSeqs || [])]));
if (allToDelete.length) {
await client.request(SOFT_DELETE_JOBLINES_BY_IDS, { jobid: job.id, unqSeqs: allToDelete });
//TODO: appears to soft delete updated lines as well.
} }
} }
if (updatedLines.length > 0) { const toUpdate = [];
// Insert fresh versions after deletion so we dont depend on a unique constraint const toInsert = [];
await client.request(INSERT_JOBLINES, { for (const jl of updatedLines) {
joblines: updatedLines const id = existingIdByUnqSeq[jl.unq_seq];
}); if (id) toUpdate.push({ id, _set: toJoblineSetInput(jl) });
else toInsert.push(jl);
} }
// Build deletions list and exclude any seqs we are updating (avoid accidental removal)
const deletedLineIdsAll = extractDeletions(rq.Deletions);
const deletionSeqs = deletedLineIdsAll.filter((u) => !updatedSeqs.includes(u));
// Mutations:
const updateJobPromise = client.request(UPDATE_JOB_BY_ID, { id: job.id, job: updatedJobData });
const softDeletePromise = deletionSeqs.length
? client.request(SOFT_DELETE_JOBLINES_BY_IDS, { jobid: job.id, unqSeqs: deletionSeqs })
: Promise.resolve({});
// Update each existing row by primary key (parallelized)
const perRowUpdatesPromise =
toUpdate.length > 0
? Promise.all(toUpdate.map(({ id, _set }) => client.request(UPDATE_JOBLINE_BY_PK, { id, jl: _set })))
: Promise.resolve([]);
// Insert brand-new rows in bulk
const insertPromise =
toInsert.length > 0 ? client.request(INSERT_JOBLINES, { joblines: toInsert }) : Promise.resolve({});
await Promise.all([updateJobPromise, softDeletePromise, perRowUpdatesPromise, insertPromise]);
logger.log("parts-job-changed", "info", job.id, null); logger.log("parts-job-changed", "info", job.id, null);
return res.status(200).json({ success: true, jobId: job.id }); return res.status(200).json({ success: true, jobId: job.id });
} catch (err) { } catch (err) {
@@ -316,5 +363,3 @@ const partsManagementVehicleDamageEstimateChgRq = async (req, res) => {
}; };
module.exports = partsManagementVehicleDamageEstimateChgRq; module.exports = partsManagementVehicleDamageEstimateChgRq;
// Remove any duplicate S3 constants that might have been appended previously (none expected now)

View File

@@ -246,6 +246,58 @@ const DELETE_PARTS_ORDERS_BY_JOB_IDS = `
} }
`; `;
const UPSERT_JOBLINES = `
mutation UpsertJoblines($joblines: [joblines_insert_input!]!) {
insert_joblines(
objects: $joblines,
on_conflict: {
constraint: joblines_jobid_unq_seq_key,
update_columns: [
status,
line_desc,
notes,
manual_line,
part_qty,
oem_partno,
alt_partno,
part_type,
act_price,
db_price,
tax_part,
mod_lbr_ty,
mod_lb_hrs,
op_code_desc,
lbr_amt,
lbr_typ_j,
lbr_hrs_j,
lbr_op_j,
paint_stg,
paint_tone
]
}
) {
affected_rows
}
}
`;
// Get jobline IDs for the incoming unq_seq values (only non-removed)
const GET_JOBLINE_IDS_BY_JOBID_UNQSEQ = `
query GetJoblineIdsByJobIdUnqSeq($jobid: uuid!, $unqSeqs: [Int!]!) {
joblines(where: { jobid: { _eq: $jobid }, unq_seq: { _in: $unqSeqs }, removed: { _neq: true } }) {
id
unq_seq
}
}
`;
// Update a single jobline by primary key
const UPDATE_JOBLINE_BY_PK = `
mutation UpdateJoblineByPk($id: uuid!, $jl: joblines_set_input!) {
update_joblines_by_pk(pk_columns: { id: $id }, _set: $jl) { id }
}
`;
module.exports = { module.exports = {
GET_BODYSHOP_STATUS, GET_BODYSHOP_STATUS,
GET_VEHICLE_BY_SHOP_VIN, GET_VEHICLE_BY_SHOP_VIN,
@@ -272,8 +324,10 @@ module.exports = {
DELETE_AUDIT_TRAIL_BY_SHOP, DELETE_AUDIT_TRAIL_BY_SHOP,
GET_JOBLINES_NOTES_BY_JOBID_UNQSEQ, GET_JOBLINES_NOTES_BY_JOBID_UNQSEQ,
GET_JOB_BY_ID, GET_JOB_BY_ID,
// newly added exports
CLEAR_TASKS_PARTSORDER_LINKS_BY_JOBIDS, CLEAR_TASKS_PARTSORDER_LINKS_BY_JOBIDS,
DELETE_PARTS_ORDER_LINES_BY_JOB_IDS, DELETE_PARTS_ORDER_LINES_BY_JOB_IDS,
DELETE_PARTS_ORDERS_BY_JOB_IDS DELETE_PARTS_ORDERS_BY_JOB_IDS,
UPSERT_JOBLINES,
GET_JOBLINE_IDS_BY_JOBID_UNQSEQ,
UPDATE_JOBLINE_BY_PK
}; };